aboutsummaryrefslogtreecommitdiff
path: root/secure/lib
diff options
context:
space:
mode:
authorJohn Baldwin <jhb@FreeBSD.org>2023-08-22 04:02:29 +0000
committerJohn Baldwin <jhb@FreeBSD.org>2023-08-22 04:02:29 +0000
commit74d73bb743c759e6d4d67435d383d501585c4680 (patch)
tree3b1b00dc54ec20cf1958f7063e6bdf9772f2c271 /secure/lib
parentf3cac6c020ba474b11432eb9b29c91519962488a (diff)
downloadsrc-74d73bb743c759e6d4d67435d383d501585c4680.tar.gz
src-74d73bb743c759e6d4d67435d383d501585c4680.zip
libcrypto: Generate new files added in OpenSSL 3.0.
Reviewed by: gallatin, ngie, emaste Differential Revision: https://reviews.freebsd.org/D41538
Diffstat (limited to 'secure/lib')
-rw-r--r--secure/lib/libcrypto/arch/amd64/aes-x86_64.S2680
-rw-r--r--secure/lib/libcrypto/arch/amd64/bsaes-x86_64.S2619
-rw-r--r--secure/lib/libcrypto/arch/i386/aes-586.S6644
3 files changed, 11943 insertions, 0 deletions
diff --git a/secure/lib/libcrypto/arch/amd64/aes-x86_64.S b/secure/lib/libcrypto/arch/amd64/aes-x86_64.S
new file mode 100644
index 000000000000..fc375184a20a
--- /dev/null
+++ b/secure/lib/libcrypto/arch/amd64/aes-x86_64.S
@@ -0,0 +1,2680 @@
+/* Do not modify. This file is auto-generated from aes-x86_64.pl. */
+.text
+.type _x86_64_AES_encrypt,@function
+.align 16
+_x86_64_AES_encrypt:
+.cfi_startproc
+ xorl 0(%r15),%eax
+ xorl 4(%r15),%ebx
+ xorl 8(%r15),%ecx
+ xorl 12(%r15),%edx
+
+ movl 240(%r15),%r13d
+ subl $1,%r13d
+ jmp .Lenc_loop
+.align 16
+.Lenc_loop:
+
+ movzbl %al,%esi
+ movzbl %bl,%edi
+ movzbl %cl,%ebp
+ movl 0(%r14,%rsi,8),%r10d
+ movl 0(%r14,%rdi,8),%r11d
+ movl 0(%r14,%rbp,8),%r12d
+
+ movzbl %bh,%esi
+ movzbl %ch,%edi
+ movzbl %dl,%ebp
+ xorl 3(%r14,%rsi,8),%r10d
+ xorl 3(%r14,%rdi,8),%r11d
+ movl 0(%r14,%rbp,8),%r8d
+
+ movzbl %dh,%esi
+ shrl $16,%ecx
+ movzbl %ah,%ebp
+ xorl 3(%r14,%rsi,8),%r12d
+ shrl $16,%edx
+ xorl 3(%r14,%rbp,8),%r8d
+
+ shrl $16,%ebx
+ leaq 16(%r15),%r15
+ shrl $16,%eax
+
+ movzbl %cl,%esi
+ movzbl %dl,%edi
+ movzbl %al,%ebp
+ xorl 2(%r14,%rsi,8),%r10d
+ xorl 2(%r14,%rdi,8),%r11d
+ xorl 2(%r14,%rbp,8),%r12d
+
+ movzbl %dh,%esi
+ movzbl %ah,%edi
+ movzbl %bl,%ebp
+ xorl 1(%r14,%rsi,8),%r10d
+ xorl 1(%r14,%rdi,8),%r11d
+ xorl 2(%r14,%rbp,8),%r8d
+
+ movl 12(%r15),%edx
+ movzbl %bh,%edi
+ movzbl %ch,%ebp
+ movl 0(%r15),%eax
+ xorl 1(%r14,%rdi,8),%r12d
+ xorl 1(%r14,%rbp,8),%r8d
+
+ movl 4(%r15),%ebx
+ movl 8(%r15),%ecx
+ xorl %r10d,%eax
+ xorl %r11d,%ebx
+ xorl %r12d,%ecx
+ xorl %r8d,%edx
+ subl $1,%r13d
+ jnz .Lenc_loop
+ movzbl %al,%esi
+ movzbl %bl,%edi
+ movzbl %cl,%ebp
+ movzbl 2(%r14,%rsi,8),%r10d
+ movzbl 2(%r14,%rdi,8),%r11d
+ movzbl 2(%r14,%rbp,8),%r12d
+
+ movzbl %dl,%esi
+ movzbl %bh,%edi
+ movzbl %ch,%ebp
+ movzbl 2(%r14,%rsi,8),%r8d
+ movl 0(%r14,%rdi,8),%edi
+ movl 0(%r14,%rbp,8),%ebp
+
+ andl $0x0000ff00,%edi
+ andl $0x0000ff00,%ebp
+
+ xorl %edi,%r10d
+ xorl %ebp,%r11d
+ shrl $16,%ecx
+
+ movzbl %dh,%esi
+ movzbl %ah,%edi
+ shrl $16,%edx
+ movl 0(%r14,%rsi,8),%esi
+ movl 0(%r14,%rdi,8),%edi
+
+ andl $0x0000ff00,%esi
+ andl $0x0000ff00,%edi
+ shrl $16,%ebx
+ xorl %esi,%r12d
+ xorl %edi,%r8d
+ shrl $16,%eax
+
+ movzbl %cl,%esi
+ movzbl %dl,%edi
+ movzbl %al,%ebp
+ movl 0(%r14,%rsi,8),%esi
+ movl 0(%r14,%rdi,8),%edi
+ movl 0(%r14,%rbp,8),%ebp
+
+ andl $0x00ff0000,%esi
+ andl $0x00ff0000,%edi
+ andl $0x00ff0000,%ebp
+
+ xorl %esi,%r10d
+ xorl %edi,%r11d
+ xorl %ebp,%r12d
+
+ movzbl %bl,%esi
+ movzbl %dh,%edi
+ movzbl %ah,%ebp
+ movl 0(%r14,%rsi,8),%esi
+ movl 2(%r14,%rdi,8),%edi
+ movl 2(%r14,%rbp,8),%ebp
+
+ andl $0x00ff0000,%esi
+ andl $0xff000000,%edi
+ andl $0xff000000,%ebp
+
+ xorl %esi,%r8d
+ xorl %edi,%r10d
+ xorl %ebp,%r11d
+
+ movzbl %bh,%esi
+ movzbl %ch,%edi
+ movl 16+12(%r15),%edx
+ movl 2(%r14,%rsi,8),%esi
+ movl 2(%r14,%rdi,8),%edi
+ movl 16+0(%r15),%eax
+
+ andl $0xff000000,%esi
+ andl $0xff000000,%edi
+
+ xorl %esi,%r12d
+ xorl %edi,%r8d
+
+ movl 16+4(%r15),%ebx
+ movl 16+8(%r15),%ecx
+ xorl %r10d,%eax
+ xorl %r11d,%ebx
+ xorl %r12d,%ecx
+ xorl %r8d,%edx
+.byte 0xf3,0xc3
+.cfi_endproc
+.size _x86_64_AES_encrypt,.-_x86_64_AES_encrypt
+.type _x86_64_AES_encrypt_compact,@function
+.align 16
+_x86_64_AES_encrypt_compact:
+.cfi_startproc
+ leaq 128(%r14),%r8
+ movl 0-128(%r8),%edi
+ movl 32-128(%r8),%ebp
+ movl 64-128(%r8),%r10d
+ movl 96-128(%r8),%r11d
+ movl 128-128(%r8),%edi
+ movl 160-128(%r8),%ebp
+ movl 192-128(%r8),%r10d
+ movl 224-128(%r8),%r11d
+ jmp .Lenc_loop_compact
+.align 16
+.Lenc_loop_compact:
+ xorl 0(%r15),%eax
+ xorl 4(%r15),%ebx
+ xorl 8(%r15),%ecx
+ xorl 12(%r15),%edx
+ leaq 16(%r15),%r15
+ movzbl %al,%r10d
+ movzbl %bl,%r11d
+ movzbl %cl,%r12d
+ movzbl %dl,%r8d
+ movzbl %bh,%esi
+ movzbl %ch,%edi
+ shrl $16,%ecx
+ movzbl %dh,%ebp
+ movzbl (%r14,%r10,1),%r10d
+ movzbl (%r14,%r11,1),%r11d
+ movzbl (%r14,%r12,1),%r12d
+ movzbl (%r14,%r8,1),%r8d
+
+ movzbl (%r14,%rsi,1),%r9d
+ movzbl %ah,%esi
+ movzbl (%r14,%rdi,1),%r13d
+ movzbl %cl,%edi
+ movzbl (%r14,%rbp,1),%ebp
+ movzbl (%r14,%rsi,1),%esi
+
+ shll $8,%r9d
+ shrl $16,%edx
+ shll $8,%r13d
+ xorl %r9d,%r10d
+ shrl $16,%eax
+ movzbl %dl,%r9d
+ shrl $16,%ebx
+ xorl %r13d,%r11d
+ shll $8,%ebp
+ movzbl %al,%r13d
+ movzbl (%r14,%rdi,1),%edi
+ xorl %ebp,%r12d
+
+ shll $8,%esi
+ movzbl %bl,%ebp
+ shll $16,%edi
+ xorl %esi,%r8d
+ movzbl (%r14,%r9,1),%r9d
+ movzbl %dh,%esi
+ movzbl (%r14,%r13,1),%r13d
+ xorl %edi,%r10d
+
+ shrl $8,%ecx
+ movzbl %ah,%edi
+ shll $16,%r9d
+ shrl $8,%ebx
+ shll $16,%r13d
+ xorl %r9d,%r11d
+ movzbl (%r14,%rbp,1),%ebp
+ movzbl (%r14,%rsi,1),%esi
+ movzbl (%r14,%rdi,1),%edi
+ movzbl (%r14,%rcx,1),%edx
+ movzbl (%r14,%rbx,1),%ecx
+
+ shll $16,%ebp
+ xorl %r13d,%r12d
+ shll $24,%esi
+ xorl %ebp,%r8d
+ shll $24,%edi
+ xorl %esi,%r10d
+ shll $24,%edx
+ xorl %edi,%r11d
+ shll $24,%ecx
+ movl %r10d,%eax
+ movl %r11d,%ebx
+ xorl %r12d,%ecx
+ xorl %r8d,%edx
+ cmpq 16(%rsp),%r15
+ je .Lenc_compact_done
+ movl $0x80808080,%r10d
+ movl $0x80808080,%r11d
+ andl %eax,%r10d
+ andl %ebx,%r11d
+ movl %r10d,%esi
+ movl %r11d,%edi
+ shrl $7,%r10d
+ leal (%rax,%rax,1),%r8d
+ shrl $7,%r11d
+ leal (%rbx,%rbx,1),%r9d
+ subl %r10d,%esi
+ subl %r11d,%edi
+ andl $0xfefefefe,%r8d
+ andl $0xfefefefe,%r9d
+ andl $0x1b1b1b1b,%esi
+ andl $0x1b1b1b1b,%edi
+ movl %eax,%r10d
+ movl %ebx,%r11d
+ xorl %esi,%r8d
+ xorl %edi,%r9d
+
+ xorl %r8d,%eax
+ xorl %r9d,%ebx
+ movl $0x80808080,%r12d
+ roll $24,%eax
+ movl $0x80808080,%ebp
+ roll $24,%ebx
+ andl %ecx,%r12d
+ andl %edx,%ebp
+ xorl %r8d,%eax
+ xorl %r9d,%ebx
+ movl %r12d,%esi
+ rorl $16,%r10d
+ movl %ebp,%edi
+ rorl $16,%r11d
+ leal (%rcx,%rcx,1),%r8d
+ shrl $7,%r12d
+ xorl %r10d,%eax
+ shrl $7,%ebp
+ xorl %r11d,%ebx
+ rorl $8,%r10d
+ leal (%rdx,%rdx,1),%r9d
+ rorl $8,%r11d
+ subl %r12d,%esi
+ subl %ebp,%edi
+ xorl %r10d,%eax
+ xorl %r11d,%ebx
+
+ andl $0xfefefefe,%r8d
+ andl $0xfefefefe,%r9d
+ andl $0x1b1b1b1b,%esi
+ andl $0x1b1b1b1b,%edi
+ movl %ecx,%r12d
+ movl %edx,%ebp
+ xorl %esi,%r8d
+ xorl %edi,%r9d
+
+ rorl $16,%r12d
+ xorl %r8d,%ecx
+ rorl $16,%ebp
+ xorl %r9d,%edx
+ roll $24,%ecx
+ movl 0(%r14),%esi
+ roll $24,%edx
+ xorl %r8d,%ecx
+ movl 64(%r14),%edi
+ xorl %r9d,%edx
+ movl 128(%r14),%r8d
+ xorl %r12d,%ecx
+ rorl $8,%r12d
+ xorl %ebp,%edx
+ rorl $8,%ebp
+ xorl %r12d,%ecx
+ movl 192(%r14),%r9d
+ xorl %ebp,%edx
+ jmp .Lenc_loop_compact
+.align 16
+.Lenc_compact_done:
+ xorl 0(%r15),%eax
+ xorl 4(%r15),%ebx
+ xorl 8(%r15),%ecx
+ xorl 12(%r15),%edx
+.byte 0xf3,0xc3
+.cfi_endproc
+.size _x86_64_AES_encrypt_compact,.-_x86_64_AES_encrypt_compact
+.globl AES_encrypt
+.type AES_encrypt,@function
+.align 16
+.globl asm_AES_encrypt
+.hidden asm_AES_encrypt
+asm_AES_encrypt:
+AES_encrypt:
+.cfi_startproc
+.byte 243,15,30,250
+ movq %rsp,%rax
+.cfi_def_cfa_register %rax
+ pushq %rbx
+.cfi_offset %rbx,-16
+ pushq %rbp
+.cfi_offset %rbp,-24
+ pushq %r12
+.cfi_offset %r12,-32
+ pushq %r13
+.cfi_offset %r13,-40
+ pushq %r14
+.cfi_offset %r14,-48
+ pushq %r15
+.cfi_offset %r15,-56
+
+
+ leaq -63(%rdx),%rcx
+ andq $-64,%rsp
+ subq %rsp,%rcx
+ negq %rcx
+ andq $0x3c0,%rcx
+ subq %rcx,%rsp
+ subq $32,%rsp
+
+ movq %rsi,16(%rsp)
+ movq %rax,24(%rsp)
+.cfi_escape 0x0f,0x05,0x77,0x18,0x06,0x23,0x08
+.Lenc_prologue:
+
+ movq %rdx,%r15
+ movl 240(%r15),%r13d
+
+ movl 0(%rdi),%eax
+ movl 4(%rdi),%ebx
+ movl 8(%rdi),%ecx
+ movl 12(%rdi),%edx
+
+ shll $4,%r13d
+ leaq (%r15,%r13,1),%rbp
+ movq %r15,(%rsp)
+ movq %rbp,8(%rsp)
+
+
+ leaq .LAES_Te+2048(%rip),%r14
+ leaq 768(%rsp),%rbp
+ subq %r14,%rbp
+ andq $0x300,%rbp
+ leaq (%r14,%rbp,1),%r14
+
+ call _x86_64_AES_encrypt_compact
+
+ movq 16(%rsp),%r9
+ movq 24(%rsp),%rsi
+.cfi_def_cfa %rsi,8
+ movl %eax,0(%r9)
+ movl %ebx,4(%r9)
+ movl %ecx,8(%r9)
+ movl %edx,12(%r9)
+
+ movq -48(%rsi),%r15
+.cfi_restore %r15
+ movq -40(%rsi),%r14
+.cfi_restore %r14
+ movq -32(%rsi),%r13
+.cfi_restore %r13
+ movq -24(%rsi),%r12
+.cfi_restore %r12
+ movq -16(%rsi),%rbp
+.cfi_restore %rbp
+ movq -8(%rsi),%rbx
+.cfi_restore %rbx
+ leaq (%rsi),%rsp
+.cfi_def_cfa_register %rsp
+.Lenc_epilogue:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size AES_encrypt,.-AES_encrypt
+.type _x86_64_AES_decrypt,@function
+.align 16
+_x86_64_AES_decrypt:
+.cfi_startproc
+ xorl 0(%r15),%eax
+ xorl 4(%r15),%ebx
+ xorl 8(%r15),%ecx
+ xorl 12(%r15),%edx
+
+ movl 240(%r15),%r13d
+ subl $1,%r13d
+ jmp .Ldec_loop
+.align 16
+.Ldec_loop:
+
+ movzbl %al,%esi
+ movzbl %bl,%edi
+ movzbl %cl,%ebp
+ movl 0(%r14,%rsi,8),%r10d
+ movl 0(%r14,%rdi,8),%r11d
+ movl 0(%r14,%rbp,8),%r12d
+
+ movzbl %dh,%esi
+ movzbl %ah,%edi
+ movzbl %dl,%ebp
+ xorl 3(%r14,%rsi,8),%r10d
+ xorl 3(%r14,%rdi,8),%r11d
+ movl 0(%r14,%rbp,8),%r8d
+
+ movzbl %bh,%esi
+ shrl $16,%eax
+ movzbl %ch,%ebp
+ xorl 3(%r14,%rsi,8),%r12d
+ shrl $16,%edx
+ xorl 3(%r14,%rbp,8),%r8d
+
+ shrl $16,%ebx
+ leaq 16(%r15),%r15
+ shrl $16,%ecx
+
+ movzbl %cl,%esi
+ movzbl %dl,%edi
+ movzbl %al,%ebp
+ xorl 2(%r14,%rsi,8),%r10d
+ xorl 2(%r14,%rdi,8),%r11d
+ xorl 2(%r14,%rbp,8),%r12d
+
+ movzbl %bh,%esi
+ movzbl %ch,%edi
+ movzbl %bl,%ebp
+ xorl 1(%r14,%rsi,8),%r10d
+ xorl 1(%r14,%rdi,8),%r11d
+ xorl 2(%r14,%rbp,8),%r8d
+
+ movzbl %dh,%esi
+ movl 12(%r15),%edx
+ movzbl %ah,%ebp
+ xorl 1(%r14,%rsi,8),%r12d
+ movl 0(%r15),%eax
+ xorl 1(%r14,%rbp,8),%r8d
+
+ xorl %r10d,%eax
+ movl 4(%r15),%ebx
+ movl 8(%r15),%ecx
+ xorl %r12d,%ecx
+ xorl %r11d,%ebx
+ xorl %r8d,%edx
+ subl $1,%r13d
+ jnz .Ldec_loop
+ leaq 2048(%r14),%r14
+ movzbl %al,%esi
+ movzbl %bl,%edi
+ movzbl %cl,%ebp
+ movzbl (%r14,%rsi,1),%r10d
+ movzbl (%r14,%rdi,1),%r11d
+ movzbl (%r14,%rbp,1),%r12d
+
+ movzbl %dl,%esi
+ movzbl %dh,%edi
+ movzbl %ah,%ebp
+ movzbl (%r14,%rsi,1),%r8d
+ movzbl (%r14,%rdi,1),%edi
+ movzbl (%r14,%rbp,1),%ebp
+
+ shll $8,%edi
+ shll $8,%ebp
+
+ xorl %edi,%r10d
+ xorl %ebp,%r11d
+ shrl $16,%edx
+
+ movzbl %bh,%esi
+ movzbl %ch,%edi
+ shrl $16,%eax
+ movzbl (%r14,%rsi,1),%esi
+ movzbl (%r14,%rdi,1),%edi
+
+ shll $8,%esi
+ shll $8,%edi
+ shrl $16,%ebx
+ xorl %esi,%r12d
+ xorl %edi,%r8d
+ shrl $16,%ecx
+
+ movzbl %cl,%esi
+ movzbl %dl,%edi
+ movzbl %al,%ebp
+ movzbl (%r14,%rsi,1),%esi
+ movzbl (%r14,%rdi,1),%edi
+ movzbl (%r14,%rbp,1),%ebp
+
+ shll $16,%esi
+ shll $16,%edi
+ shll $16,%ebp
+
+ xorl %esi,%r10d
+ xorl %edi,%r11d
+ xorl %ebp,%r12d
+
+ movzbl %bl,%esi
+ movzbl %bh,%edi
+ movzbl %ch,%ebp
+ movzbl (%r14,%rsi,1),%esi
+ movzbl (%r14,%rdi,1),%edi
+ movzbl (%r14,%rbp,1),%ebp
+
+ shll $16,%esi
+ shll $24,%edi
+ shll $24,%ebp
+
+ xorl %esi,%r8d
+ xorl %edi,%r10d
+ xorl %ebp,%r11d
+
+ movzbl %dh,%esi
+ movzbl %ah,%edi
+ movl 16+12(%r15),%edx
+ movzbl (%r14,%rsi,1),%esi
+ movzbl (%r14,%rdi,1),%edi
+ movl 16+0(%r15),%eax
+
+ shll $24,%esi
+ shll $24,%edi
+
+ xorl %esi,%r12d
+ xorl %edi,%r8d
+
+ movl 16+4(%r15),%ebx
+ movl 16+8(%r15),%ecx
+ leaq -2048(%r14),%r14
+ xorl %r10d,%eax
+ xorl %r11d,%ebx
+ xorl %r12d,%ecx
+ xorl %r8d,%edx
+.byte 0xf3,0xc3
+.cfi_endproc
+.size _x86_64_AES_decrypt,.-_x86_64_AES_decrypt
+.type _x86_64_AES_decrypt_compact,@function
+.align 16
+_x86_64_AES_decrypt_compact:
+.cfi_startproc
+ leaq 128(%r14),%r8
+ movl 0-128(%r8),%edi
+ movl 32-128(%r8),%ebp
+ movl 64-128(%r8),%r10d
+ movl 96-128(%r8),%r11d
+ movl 128-128(%r8),%edi
+ movl 160-128(%r8),%ebp
+ movl 192-128(%r8),%r10d
+ movl 224-128(%r8),%r11d
+ jmp .Ldec_loop_compact
+
+.align 16
+.Ldec_loop_compact:
+ xorl 0(%r15),%eax
+ xorl 4(%r15),%ebx
+ xorl 8(%r15),%ecx
+ xorl 12(%r15),%edx
+ leaq 16(%r15),%r15
+ movzbl %al,%r10d
+ movzbl %bl,%r11d
+ movzbl %cl,%r12d
+ movzbl %dl,%r8d
+ movzbl %dh,%esi
+ movzbl %ah,%edi
+ shrl $16,%edx
+ movzbl %bh,%ebp
+ movzbl (%r14,%r10,1),%r10d
+ movzbl (%r14,%r11,1),%r11d
+ movzbl (%r14,%r12,1),%r12d
+ movzbl (%r14,%r8,1),%r8d
+
+ movzbl (%r14,%rsi,1),%r9d
+ movzbl %ch,%esi
+ movzbl (%r14,%rdi,1),%r13d
+ movzbl (%r14,%rbp,1),%ebp
+ movzbl (%r14,%rsi,1),%esi
+
+ shrl $16,%ecx
+ shll $8,%r13d
+ shll $8,%r9d
+ movzbl %cl,%edi
+ shrl $16,%eax
+ xorl %r9d,%r10d
+ shrl $16,%ebx
+ movzbl %dl,%r9d
+
+ shll $8,%ebp
+ xorl %r13d,%r11d
+ shll $8,%esi
+ movzbl %al,%r13d
+ movzbl (%r14,%rdi,1),%edi
+ xorl %ebp,%r12d
+ movzbl %bl,%ebp
+
+ shll $16,%edi
+ xorl %esi,%r8d
+ movzbl (%r14,%r9,1),%r9d
+ movzbl %bh,%esi
+ movzbl (%r14,%rbp,1),%ebp
+ xorl %edi,%r10d
+ movzbl (%r14,%r13,1),%r13d
+ movzbl %ch,%edi
+
+ shll $16,%ebp
+ shll $16,%r9d
+ shll $16,%r13d
+ xorl %ebp,%r8d
+ movzbl %dh,%ebp
+ xorl %r9d,%r11d
+ shrl $8,%eax
+ xorl %r13d,%r12d
+
+ movzbl (%r14,%rsi,1),%esi
+ movzbl (%r14,%rdi,1),%ebx
+ movzbl (%r14,%rbp,1),%ecx
+ movzbl (%r14,%rax,1),%edx
+
+ movl %r10d,%eax
+ shll $24,%esi
+ shll $24,%ebx
+ shll $24,%ecx
+ xorl %esi,%eax
+ shll $24,%edx
+ xorl %r11d,%ebx
+ xorl %r12d,%ecx
+ xorl %r8d,%edx
+ cmpq 16(%rsp),%r15
+ je .Ldec_compact_done
+
+ movq 256+0(%r14),%rsi
+ shlq $32,%rbx
+ shlq $32,%rdx
+ movq 256+8(%r14),%rdi
+ orq %rbx,%rax
+ orq %rdx,%rcx
+ movq 256+16(%r14),%rbp
+ movq %rsi,%r9
+ movq %rsi,%r12
+ andq %rax,%r9
+ andq %rcx,%r12
+ movq %r9,%rbx
+ movq %r12,%rdx
+ shrq $7,%r9
+ leaq (%rax,%rax,1),%r8
+ shrq $7,%r12
+ leaq (%rcx,%rcx,1),%r11
+ subq %r9,%rbx
+ subq %r12,%rdx
+ andq %rdi,%r8
+ andq %rdi,%r11
+ andq %rbp,%rbx
+ andq %rbp,%rdx
+ xorq %rbx,%r8
+ xorq %rdx,%r11
+ movq %rsi,%r10
+ movq %rsi,%r13
+
+ andq %r8,%r10
+ andq %r11,%r13
+ movq %r10,%rbx
+ movq %r13,%rdx
+ shrq $7,%r10
+ leaq (%r8,%r8,1),%r9
+ shrq $7,%r13
+ leaq (%r11,%r11,1),%r12
+ subq %r10,%rbx
+ subq %r13,%rdx
+ andq %rdi,%r9
+ andq %rdi,%r12
+ andq %rbp,%rbx
+ andq %rbp,%rdx
+ xorq %rbx,%r9
+ xorq %rdx,%r12
+ movq %rsi,%r10
+ movq %rsi,%r13
+
+ andq %r9,%r10
+ andq %r12,%r13
+ movq %r10,%rbx
+ movq %r13,%rdx
+ shrq $7,%r10
+ xorq %rax,%r8
+ shrq $7,%r13
+ xorq %rcx,%r11
+ subq %r10,%rbx
+ subq %r13,%rdx
+ leaq (%r9,%r9,1),%r10
+ leaq (%r12,%r12,1),%r13
+ xorq %rax,%r9
+ xorq %rcx,%r12
+ andq %rdi,%r10
+ andq %rdi,%r13
+ andq %rbp,%rbx
+ andq %rbp,%rdx
+ xorq %rbx,%r10
+ xorq %rdx,%r13
+
+ xorq %r10,%rax
+ xorq %r13,%rcx
+ xorq %r10,%r8
+ xorq %r13,%r11
+ movq %rax,%rbx
+ movq %rcx,%rdx
+ xorq %r10,%r9
+ shrq $32,%rbx
+ xorq %r13,%r12
+ shrq $32,%rdx
+ xorq %r8,%r10
+ roll $8,%eax
+ xorq %r11,%r13
+ roll $8,%ecx
+ xorq %r9,%r10
+ roll $8,%ebx
+ xorq %r12,%r13
+
+ roll $8,%edx
+ xorl %r10d,%eax
+ shrq $32,%r10
+ xorl %r13d,%ecx
+ shrq $32,%r13
+ xorl %r10d,%ebx
+ xorl %r13d,%edx
+
+ movq %r8,%r10
+ roll $24,%r8d
+ movq %r11,%r13
+ roll $24,%r11d
+ shrq $32,%r10
+ xorl %r8d,%eax
+ shrq $32,%r13
+ xorl %r11d,%ecx
+ roll $24,%r10d
+ movq %r9,%r8
+ roll $24,%r13d
+ movq %r12,%r11
+ shrq $32,%r8
+ xorl %r10d,%ebx
+ shrq $32,%r11
+ xorl %r13d,%edx
+
+ movq 0(%r14),%rsi
+ roll $16,%r9d
+ movq 64(%r14),%rdi
+ roll $16,%r12d
+ movq 128(%r14),%rbp
+ roll $16,%r8d
+ movq 192(%r14),%r10
+ xorl %r9d,%eax
+ roll $16,%r11d
+ xorl %r12d,%ecx
+ movq 256(%r14),%r13
+ xorl %r8d,%ebx
+ xorl %r11d,%edx
+ jmp .Ldec_loop_compact
+.align 16
+.Ldec_compact_done:
+ xorl 0(%r15),%eax
+ xorl 4(%r15),%ebx
+ xorl 8(%r15),%ecx
+ xorl 12(%r15),%edx
+.byte 0xf3,0xc3
+.cfi_endproc
+.size _x86_64_AES_decrypt_compact,.-_x86_64_AES_decrypt_compact
+.globl AES_decrypt
+.type AES_decrypt,@function
+.align 16
+.globl asm_AES_decrypt
+.hidden asm_AES_decrypt
+asm_AES_decrypt:
+AES_decrypt:
+.cfi_startproc
+.byte 243,15,30,250
+ movq %rsp,%rax
+.cfi_def_cfa_register %rax
+ pushq %rbx
+.cfi_offset %rbx,-16
+ pushq %rbp
+.cfi_offset %rbp,-24
+ pushq %r12
+.cfi_offset %r12,-32
+ pushq %r13
+.cfi_offset %r13,-40
+ pushq %r14
+.cfi_offset %r14,-48
+ pushq %r15
+.cfi_offset %r15,-56
+
+
+ leaq -63(%rdx),%rcx
+ andq $-64,%rsp
+ subq %rsp,%rcx
+ negq %rcx
+ andq $0x3c0,%rcx
+ subq %rcx,%rsp
+ subq $32,%rsp
+
+ movq %rsi,16(%rsp)
+ movq %rax,24(%rsp)
+.cfi_escape 0x0f,0x05,0x77,0x18,0x06,0x23,0x08
+.Ldec_prologue:
+
+ movq %rdx,%r15
+ movl 240(%r15),%r13d
+
+ movl 0(%rdi),%eax
+ movl 4(%rdi),%ebx
+ movl 8(%rdi),%ecx
+ movl 12(%rdi),%edx
+
+ shll $4,%r13d
+ leaq (%r15,%r13,1),%rbp
+ movq %r15,(%rsp)
+ movq %rbp,8(%rsp)
+
+
+ leaq .LAES_Td+2048(%rip),%r14
+ leaq 768(%rsp),%rbp
+ subq %r14,%rbp
+ andq $0x300,%rbp
+ leaq (%r14,%rbp,1),%r14
+ shrq $3,%rbp
+ addq %rbp,%r14
+
+ call _x86_64_AES_decrypt_compact
+
+ movq 16(%rsp),%r9
+ movq 24(%rsp),%rsi
+.cfi_def_cfa %rsi,8
+ movl %eax,0(%r9)
+ movl %ebx,4(%r9)
+ movl %ecx,8(%r9)
+ movl %edx,12(%r9)
+
+ movq -48(%rsi),%r15
+.cfi_restore %r15
+ movq -40(%rsi),%r14
+.cfi_restore %r14
+ movq -32(%rsi),%r13
+.cfi_restore %r13
+ movq -24(%rsi),%r12
+.cfi_restore %r12
+ movq -16(%rsi),%rbp
+.cfi_restore %rbp
+ movq -8(%rsi),%rbx
+.cfi_restore %rbx
+ leaq (%rsi),%rsp
+.cfi_def_cfa_register %rsp
+.Ldec_epilogue:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size AES_decrypt,.-AES_decrypt
+.globl AES_set_encrypt_key
+.type AES_set_encrypt_key,@function
+.align 16
+AES_set_encrypt_key:
+.cfi_startproc
+.byte 243,15,30,250
+ pushq %rbx
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbx,-16
+ pushq %rbp
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbp,-24
+ pushq %r12
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r12,-32
+ pushq %r13
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r13,-40
+ pushq %r14
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r14,-48
+ pushq %r15
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r15,-56
+ subq $8,%rsp
+.cfi_adjust_cfa_offset 8
+.Lenc_key_prologue:
+
+ call _x86_64_AES_set_encrypt_key
+
+ movq 40(%rsp),%rbp
+.cfi_restore %rbp
+ movq 48(%rsp),%rbx
+.cfi_restore %rbx
+ addq $56,%rsp
+.cfi_adjust_cfa_offset -56
+.Lenc_key_epilogue:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size AES_set_encrypt_key,.-AES_set_encrypt_key
+
+.type _x86_64_AES_set_encrypt_key,@function
+.align 16
+_x86_64_AES_set_encrypt_key:
+.cfi_startproc
+ movl %esi,%ecx
+ movq %rdi,%rsi
+ movq %rdx,%rdi
+
+ testq $-1,%rsi
+ jz .Lbadpointer
+ testq $-1,%rdi
+ jz .Lbadpointer
+
+ leaq .LAES_Te(%rip),%rbp
+ leaq 2048+128(%rbp),%rbp
+
+
+ movl 0-128(%rbp),%eax
+ movl 32-128(%rbp),%ebx
+ movl 64-128(%rbp),%r8d
+ movl 96-128(%rbp),%edx
+ movl 128-128(%rbp),%eax
+ movl 160-128(%rbp),%ebx
+ movl 192-128(%rbp),%r8d
+ movl 224-128(%rbp),%edx
+
+ cmpl $128,%ecx
+ je .L10rounds
+ cmpl $192,%ecx
+ je .L12rounds
+ cmpl $256,%ecx
+ je .L14rounds
+ movq $-2,%rax
+ jmp .Lexit
+
+.L10rounds:
+ movq 0(%rsi),%rax
+ movq 8(%rsi),%rdx
+ movq %rax,0(%rdi)
+ movq %rdx,8(%rdi)
+
+ shrq $32,%rdx
+ xorl %ecx,%ecx
+ jmp .L10shortcut
+.align 4
+.L10loop:
+ movl 0(%rdi),%eax
+ movl 12(%rdi),%edx
+.L10shortcut:
+ movzbl %dl,%esi
+ movzbl -128(%rbp,%rsi,1),%ebx
+ movzbl %dh,%esi
+ shll $24,%ebx
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ shrl $16,%edx
+ movzbl %dl,%esi
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ movzbl %dh,%esi
+ shll $8,%ebx
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%eax
+
+ xorl 1024-128(%rbp,%rcx,4),%eax
+ movl %eax,16(%rdi)
+ xorl 4(%rdi),%eax
+ movl %eax,20(%rdi)
+ xorl 8(%rdi),%eax
+ movl %eax,24(%rdi)
+ xorl 12(%rdi),%eax
+ movl %eax,28(%rdi)
+ addl $1,%ecx
+ leaq 16(%rdi),%rdi
+ cmpl $10,%ecx
+ jl .L10loop
+
+ movl $10,80(%rdi)
+ xorq %rax,%rax
+ jmp .Lexit
+
+.L12rounds:
+ movq 0(%rsi),%rax
+ movq 8(%rsi),%rbx
+ movq 16(%rsi),%rdx
+ movq %rax,0(%rdi)
+ movq %rbx,8(%rdi)
+ movq %rdx,16(%rdi)
+
+ shrq $32,%rdx
+ xorl %ecx,%ecx
+ jmp .L12shortcut
+.align 4
+.L12loop:
+ movl 0(%rdi),%eax
+ movl 20(%rdi),%edx
+.L12shortcut:
+ movzbl %dl,%esi
+ movzbl -128(%rbp,%rsi,1),%ebx
+ movzbl %dh,%esi
+ shll $24,%ebx
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ shrl $16,%edx
+ movzbl %dl,%esi
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ movzbl %dh,%esi
+ shll $8,%ebx
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%eax
+
+ xorl 1024-128(%rbp,%rcx,4),%eax
+ movl %eax,24(%rdi)
+ xorl 4(%rdi),%eax
+ movl %eax,28(%rdi)
+ xorl 8(%rdi),%eax
+ movl %eax,32(%rdi)
+ xorl 12(%rdi),%eax
+ movl %eax,36(%rdi)
+
+ cmpl $7,%ecx
+ je .L12break
+ addl $1,%ecx
+
+ xorl 16(%rdi),%eax
+ movl %eax,40(%rdi)
+ xorl 20(%rdi),%eax
+ movl %eax,44(%rdi)
+
+ leaq 24(%rdi),%rdi
+ jmp .L12loop
+.L12break:
+ movl $12,72(%rdi)
+ xorq %rax,%rax
+ jmp .Lexit
+
+.L14rounds:
+ movq 0(%rsi),%rax
+ movq 8(%rsi),%rbx
+ movq 16(%rsi),%rcx
+ movq 24(%rsi),%rdx
+ movq %rax,0(%rdi)
+ movq %rbx,8(%rdi)
+ movq %rcx,16(%rdi)
+ movq %rdx,24(%rdi)
+
+ shrq $32,%rdx
+ xorl %ecx,%ecx
+ jmp .L14shortcut
+.align 4
+.L14loop:
+ movl 0(%rdi),%eax
+ movl 28(%rdi),%edx
+.L14shortcut:
+ movzbl %dl,%esi
+ movzbl -128(%rbp,%rsi,1),%ebx
+ movzbl %dh,%esi
+ shll $24,%ebx
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ shrl $16,%edx
+ movzbl %dl,%esi
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ movzbl %dh,%esi
+ shll $8,%ebx
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%eax
+
+ xorl 1024-128(%rbp,%rcx,4),%eax
+ movl %eax,32(%rdi)
+ xorl 4(%rdi),%eax
+ movl %eax,36(%rdi)
+ xorl 8(%rdi),%eax
+ movl %eax,40(%rdi)
+ xorl 12(%rdi),%eax
+ movl %eax,44(%rdi)
+
+ cmpl $6,%ecx
+ je .L14break
+ addl $1,%ecx
+
+ movl %eax,%edx
+ movl 16(%rdi),%eax
+ movzbl %dl,%esi
+ movzbl -128(%rbp,%rsi,1),%ebx
+ movzbl %dh,%esi
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ shrl $16,%edx
+ shll $8,%ebx
+ movzbl %dl,%esi
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ movzbl %dh,%esi
+ shll $16,%ebx
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ shll $24,%ebx
+ xorl %ebx,%eax
+
+ movl %eax,48(%rdi)
+ xorl 20(%rdi),%eax
+ movl %eax,52(%rdi)
+ xorl 24(%rdi),%eax
+ movl %eax,56(%rdi)
+ xorl 28(%rdi),%eax
+ movl %eax,60(%rdi)
+
+ leaq 32(%rdi),%rdi
+ jmp .L14loop
+.L14break:
+ movl $14,48(%rdi)
+ xorq %rax,%rax
+ jmp .Lexit
+
+.Lbadpointer:
+ movq $-1,%rax
+.Lexit:
+.byte 0xf3,0xc3
+.cfi_endproc
+.size _x86_64_AES_set_encrypt_key,.-_x86_64_AES_set_encrypt_key
+.globl AES_set_decrypt_key
+.type AES_set_decrypt_key,@function
+.align 16
+AES_set_decrypt_key:
+.cfi_startproc
+.byte 243,15,30,250
+ pushq %rbx
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbx,-16
+ pushq %rbp
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbp,-24
+ pushq %r12
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r12,-32
+ pushq %r13
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r13,-40
+ pushq %r14
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r14,-48
+ pushq %r15
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r15,-56
+ pushq %rdx
+.cfi_adjust_cfa_offset 8
+.Ldec_key_prologue:
+
+ call _x86_64_AES_set_encrypt_key
+ movq (%rsp),%r8
+ cmpl $0,%eax
+ jne .Labort
+
+ movl 240(%r8),%r14d
+ xorq %rdi,%rdi
+ leaq (%rdi,%r14,4),%rcx
+ movq %r8,%rsi
+ leaq (%r8,%rcx,4),%rdi
+.align 4
+.Linvert:
+ movq 0(%rsi),%rax
+ movq 8(%rsi),%rbx
+ movq 0(%rdi),%rcx
+ movq 8(%rdi),%rdx
+ movq %rax,0(%rdi)
+ movq %rbx,8(%rdi)
+ movq %rcx,0(%rsi)
+ movq %rdx,8(%rsi)
+ leaq 16(%rsi),%rsi
+ leaq -16(%rdi),%rdi
+ cmpq %rsi,%rdi
+ jne .Linvert
+
+ leaq .LAES_Te+2048+1024(%rip),%rax
+
+ movq 40(%rax),%rsi
+ movq 48(%rax),%rdi
+ movq 56(%rax),%rbp
+
+ movq %r8,%r15
+ subl $1,%r14d
+.align 4
+.Lpermute:
+ leaq 16(%r15),%r15
+ movq 0(%r15),%rax
+ movq 8(%r15),%rcx
+ movq %rsi,%r9
+ movq %rsi,%r12
+ andq %rax,%r9
+ andq %rcx,%r12
+ movq %r9,%rbx
+ movq %r12,%rdx
+ shrq $7,%r9
+ leaq (%rax,%rax,1),%r8
+ shrq $7,%r12
+ leaq (%rcx,%rcx,1),%r11
+ subq %r9,%rbx
+ subq %r12,%rdx
+ andq %rdi,%r8
+ andq %rdi,%r11
+ andq %rbp,%rbx
+ andq %rbp,%rdx
+ xorq %rbx,%r8
+ xorq %rdx,%r11
+ movq %rsi,%r10
+ movq %rsi,%r13
+
+ andq %r8,%r10
+ andq %r11,%r13
+ movq %r10,%rbx
+ movq %r13,%rdx
+ shrq $7,%r10
+ leaq (%r8,%r8,1),%r9
+ shrq $7,%r13
+ leaq (%r11,%r11,1),%r12
+ subq %r10,%rbx
+ subq %r13,%rdx
+ andq %rdi,%r9
+ andq %rdi,%r12
+ andq %rbp,%rbx
+ andq %rbp,%rdx
+ xorq %rbx,%r9
+ xorq %rdx,%r12
+ movq %rsi,%r10
+ movq %rsi,%r13
+
+ andq %r9,%r10
+ andq %r12,%r13
+ movq %r10,%rbx
+ movq %r13,%rdx
+ shrq $7,%r10
+ xorq %rax,%r8
+ shrq $7,%r13
+ xorq %rcx,%r11
+ subq %r10,%rbx
+ subq %r13,%rdx
+ leaq (%r9,%r9,1),%r10
+ leaq (%r12,%r12,1),%r13
+ xorq %rax,%r9
+ xorq %rcx,%r12
+ andq %rdi,%r10
+ andq %rdi,%r13
+ andq %rbp,%rbx
+ andq %rbp,%rdx
+ xorq %rbx,%r10
+ xorq %rdx,%r13
+
+ xorq %r10,%rax
+ xorq %r13,%rcx
+ xorq %r10,%r8
+ xorq %r13,%r11
+ movq %rax,%rbx
+ movq %rcx,%rdx
+ xorq %r10,%r9
+ shrq $32,%rbx
+ xorq %r13,%r12
+ shrq $32,%rdx
+ xorq %r8,%r10
+ roll $8,%eax
+ xorq %r11,%r13
+ roll $8,%ecx
+ xorq %r9,%r10
+ roll $8,%ebx
+ xorq %r12,%r13
+
+ roll $8,%edx
+ xorl %r10d,%eax
+ shrq $32,%r10
+ xorl %r13d,%ecx
+ shrq $32,%r13
+ xorl %r10d,%ebx
+ xorl %r13d,%edx
+
+ movq %r8,%r10
+ roll $24,%r8d
+ movq %r11,%r13
+ roll $24,%r11d
+ shrq $32,%r10
+ xorl %r8d,%eax
+ shrq $32,%r13
+ xorl %r11d,%ecx
+ roll $24,%r10d
+ movq %r9,%r8
+ roll $24,%r13d
+ movq %r12,%r11
+ shrq $32,%r8
+ xorl %r10d,%ebx
+ shrq $32,%r11
+ xorl %r13d,%edx
+
+
+ roll $16,%r9d
+
+ roll $16,%r12d
+
+ roll $16,%r8d
+
+ xorl %r9d,%eax
+ roll $16,%r11d
+ xorl %r12d,%ecx
+
+ xorl %r8d,%ebx
+ xorl %r11d,%edx
+ movl %eax,0(%r15)
+ movl %ebx,4(%r15)
+ movl %ecx,8(%r15)
+ movl %edx,12(%r15)
+ subl $1,%r14d
+ jnz .Lpermute
+
+ xorq %rax,%rax
+.Labort:
+ movq 8(%rsp),%r15
+.cfi_restore %r15
+ movq 16(%rsp),%r14
+.cfi_restore %r14
+ movq 24(%rsp),%r13
+.cfi_restore %r13
+ movq 32(%rsp),%r12
+.cfi_restore %r12
+ movq 40(%rsp),%rbp
+.cfi_restore %rbp
+ movq 48(%rsp),%rbx
+.cfi_restore %rbx
+ addq $56,%rsp
+.cfi_adjust_cfa_offset -56
+.Ldec_key_epilogue:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size AES_set_decrypt_key,.-AES_set_decrypt_key
+.globl AES_cbc_encrypt
+.type AES_cbc_encrypt,@function
+.align 16
+
+.globl asm_AES_cbc_encrypt
+.hidden asm_AES_cbc_encrypt
+asm_AES_cbc_encrypt:
+AES_cbc_encrypt:
+.cfi_startproc
+.byte 243,15,30,250
+ cmpq $0,%rdx
+ je .Lcbc_epilogue
+ pushfq
+
+
+.cfi_adjust_cfa_offset 8
+ pushq %rbx
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbx,-24
+ pushq %rbp
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbp,-32
+ pushq %r12
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r12,-40
+ pushq %r13
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r13,-48
+ pushq %r14
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r14,-56
+ pushq %r15
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r15,-64
+.Lcbc_prologue:
+
+ cld
+ movl %r9d,%r9d
+
+ leaq .LAES_Te(%rip),%r14
+ leaq .LAES_Td(%rip),%r10
+ cmpq $0,%r9
+ cmoveq %r10,%r14
+
+.cfi_remember_state
+ movl OPENSSL_ia32cap_P(%rip),%r10d
+ cmpq $512,%rdx
+ jb .Lcbc_slow_prologue
+ testq $15,%rdx
+ jnz .Lcbc_slow_prologue
+ btl $28,%r10d
+ jc .Lcbc_slow_prologue
+
+
+ leaq -88-248(%rsp),%r15
+ andq $-64,%r15
+
+
+ movq %r14,%r10
+ leaq 2304(%r14),%r11
+ movq %r15,%r12
+ andq $0xFFF,%r10
+ andq $0xFFF,%r11
+ andq $0xFFF,%r12
+
+ cmpq %r11,%r12
+ jb .Lcbc_te_break_out
+ subq %r11,%r12
+ subq %r12,%r15
+ jmp .Lcbc_te_ok
+.Lcbc_te_break_out:
+ subq %r10,%r12
+ andq $0xFFF,%r12
+ addq $320,%r12
+ subq %r12,%r15
+.align 4
+.Lcbc_te_ok:
+
+ xchgq %rsp,%r15
+.cfi_def_cfa_register %r15
+
+ movq %r15,16(%rsp)
+.cfi_escape 0x0f,0x05,0x77,0x10,0x06,0x23,0x40
+.Lcbc_fast_body:
+ movq %rdi,24(%rsp)
+ movq %rsi,32(%rsp)
+ movq %rdx,40(%rsp)
+ movq %rcx,48(%rsp)
+ movq %r8,56(%rsp)
+ movl $0,80+240(%rsp)
+ movq %r8,%rbp
+ movq %r9,%rbx
+ movq %rsi,%r9
+ movq %rdi,%r8
+ movq %rcx,%r15
+
+ movl 240(%r15),%eax
+
+ movq %r15,%r10
+ subq %r14,%r10
+ andq $0xfff,%r10
+ cmpq $2304,%r10
+ jb .Lcbc_do_ecopy
+ cmpq $4096-248,%r10
+ jb .Lcbc_skip_ecopy
+.align 4
+.Lcbc_do_ecopy:
+ movq %r15,%rsi
+ leaq 80(%rsp),%rdi
+ leaq 80(%rsp),%r15
+ movl $30,%ecx
+.long 0x90A548F3
+ movl %eax,(%rdi)
+.Lcbc_skip_ecopy:
+ movq %r15,0(%rsp)
+
+ movl $18,%ecx
+.align 4
+.Lcbc_prefetch_te:
+ movq 0(%r14),%r10
+ movq 32(%r14),%r11
+ movq 64(%r14),%r12
+ movq 96(%r14),%r13
+ leaq 128(%r14),%r14
+ subl $1,%ecx
+ jnz .Lcbc_prefetch_te
+ leaq -2304(%r14),%r14
+
+ cmpq $0,%rbx
+ je .LFAST_DECRYPT
+
+
+ movl 0(%rbp),%eax
+ movl 4(%rbp),%ebx
+ movl 8(%rbp),%ecx
+ movl 12(%rbp),%edx
+
+.align 4
+.Lcbc_fast_enc_loop:
+ xorl 0(%r8),%eax
+ xorl 4(%r8),%ebx
+ xorl 8(%r8),%ecx
+ xorl 12(%r8),%edx
+ movq 0(%rsp),%r15
+ movq %r8,24(%rsp)
+
+ call _x86_64_AES_encrypt
+
+ movq 24(%rsp),%r8
+ movq 40(%rsp),%r10
+ movl %eax,0(%r9)
+ movl %ebx,4(%r9)
+ movl %ecx,8(%r9)
+ movl %edx,12(%r9)
+
+ leaq 16(%r8),%r8
+ leaq 16(%r9),%r9
+ subq $16,%r10
+ testq $-16,%r10
+ movq %r10,40(%rsp)
+ jnz .Lcbc_fast_enc_loop
+ movq 56(%rsp),%rbp
+ movl %eax,0(%rbp)
+ movl %ebx,4(%rbp)
+ movl %ecx,8(%rbp)
+ movl %edx,12(%rbp)
+
+ jmp .Lcbc_fast_cleanup
+
+
+.align 16
+.LFAST_DECRYPT:
+ cmpq %r8,%r9
+ je .Lcbc_fast_dec_in_place
+
+ movq %rbp,64(%rsp)
+.align 4
+.Lcbc_fast_dec_loop:
+ movl 0(%r8),%eax
+ movl 4(%r8),%ebx
+ movl 8(%r8),%ecx
+ movl 12(%r8),%edx
+ movq 0(%rsp),%r15
+ movq %r8,24(%rsp)
+
+ call _x86_64_AES_decrypt
+
+ movq 64(%rsp),%rbp
+ movq 24(%rsp),%r8
+ movq 40(%rsp),%r10
+ xorl 0(%rbp),%eax
+ xorl 4(%rbp),%ebx
+ xorl 8(%rbp),%ecx
+ xorl 12(%rbp),%edx
+ movq %r8,%rbp
+
+ subq $16,%r10
+ movq %r10,40(%rsp)
+ movq %rbp,64(%rsp)
+
+ movl %eax,0(%r9)
+ movl %ebx,4(%r9)
+ movl %ecx,8(%r9)
+ movl %edx,12(%r9)
+
+ leaq 16(%r8),%r8
+ leaq 16(%r9),%r9
+ jnz .Lcbc_fast_dec_loop
+ movq 56(%rsp),%r12
+ movq 0(%rbp),%r10
+ movq 8(%rbp),%r11
+ movq %r10,0(%r12)
+ movq %r11,8(%r12)
+ jmp .Lcbc_fast_cleanup
+
+.align 16
+.Lcbc_fast_dec_in_place:
+ movq 0(%rbp),%r10
+ movq 8(%rbp),%r11
+ movq %r10,0+64(%rsp)
+ movq %r11,8+64(%rsp)
+.align 4
+.Lcbc_fast_dec_in_place_loop:
+ movl 0(%r8),%eax
+ movl 4(%r8),%ebx
+ movl 8(%r8),%ecx
+ movl 12(%r8),%edx
+ movq 0(%rsp),%r15
+ movq %r8,24(%rsp)
+
+ call _x86_64_AES_decrypt
+
+ movq 24(%rsp),%r8
+ movq 40(%rsp),%r10
+ xorl 0+64(%rsp),%eax
+ xorl 4+64(%rsp),%ebx
+ xorl 8+64(%rsp),%ecx
+ xorl 12+64(%rsp),%edx
+
+ movq 0(%r8),%r11
+ movq 8(%r8),%r12
+ subq $16,%r10
+ jz .Lcbc_fast_dec_in_place_done
+
+ movq %r11,0+64(%rsp)
+ movq %r12,8+64(%rsp)
+
+ movl %eax,0(%r9)
+ movl %ebx,4(%r9)
+ movl %ecx,8(%r9)
+ movl %edx,12(%r9)
+
+ leaq 16(%r8),%r8
+ leaq 16(%r9),%r9
+ movq %r10,40(%rsp)
+ jmp .Lcbc_fast_dec_in_place_loop
+.Lcbc_fast_dec_in_place_done:
+ movq 56(%rsp),%rdi
+ movq %r11,0(%rdi)
+ movq %r12,8(%rdi)
+
+ movl %eax,0(%r9)
+ movl %ebx,4(%r9)
+ movl %ecx,8(%r9)
+ movl %edx,12(%r9)
+
+.align 4
+.Lcbc_fast_cleanup:
+ cmpl $0,80+240(%rsp)
+ leaq 80(%rsp),%rdi
+ je .Lcbc_exit
+ movl $30,%ecx
+ xorq %rax,%rax
+.long 0x90AB48F3
+
+ jmp .Lcbc_exit
+
+
+.align 16
+.Lcbc_slow_prologue:
+.cfi_restore_state
+
+ leaq -88(%rsp),%rbp
+ andq $-64,%rbp
+
+ leaq -88-63(%rcx),%r10
+ subq %rbp,%r10
+ negq %r10
+ andq $0x3c0,%r10
+ subq %r10,%rbp
+
+ xchgq %rsp,%rbp
+.cfi_def_cfa_register %rbp
+
+ movq %rbp,16(%rsp)
+.cfi_escape 0x0f,0x05,0x77,0x10,0x06,0x23,0x40
+.Lcbc_slow_body:
+
+
+
+
+ movq %r8,56(%rsp)
+ movq %r8,%rbp
+ movq %r9,%rbx
+ movq %rsi,%r9
+ movq %rdi,%r8
+ movq %rcx,%r15
+ movq %rdx,%r10
+
+ movl 240(%r15),%eax
+ movq %r15,0(%rsp)
+ shll $4,%eax
+ leaq (%r15,%rax,1),%rax
+ movq %rax,8(%rsp)
+
+
+ leaq 2048(%r14),%r14
+ leaq 768-8(%rsp),%rax
+ subq %r14,%rax
+ andq $0x300,%rax
+ leaq (%r14,%rax,1),%r14
+
+ cmpq $0,%rbx
+ je .LSLOW_DECRYPT
+
+
+ testq $-16,%r10
+ movl 0(%rbp),%eax
+ movl 4(%rbp),%ebx
+ movl 8(%rbp),%ecx
+ movl 12(%rbp),%edx
+ jz .Lcbc_slow_enc_tail
+
+.align 4
+.Lcbc_slow_enc_loop:
+ xorl 0(%r8),%eax
+ xorl 4(%r8),%ebx
+ xorl 8(%r8),%ecx
+ xorl 12(%r8),%edx
+ movq 0(%rsp),%r15
+ movq %r8,24(%rsp)
+ movq %r9,32(%rsp)
+ movq %r10,40(%rsp)
+
+ call _x86_64_AES_encrypt_compact
+
+ movq 24(%rsp),%r8
+ movq 32(%rsp),%r9
+ movq 40(%rsp),%r10
+ movl %eax,0(%r9)
+ movl %ebx,4(%r9)
+ movl %ecx,8(%r9)
+ movl %edx,12(%r9)
+
+ leaq 16(%r8),%r8
+ leaq 16(%r9),%r9
+ subq $16,%r10
+ testq $-16,%r10
+ jnz .Lcbc_slow_enc_loop
+ testq $15,%r10
+ jnz .Lcbc_slow_enc_tail
+ movq 56(%rsp),%rbp
+ movl %eax,0(%rbp)
+ movl %ebx,4(%rbp)
+ movl %ecx,8(%rbp)
+ movl %edx,12(%rbp)
+
+ jmp .Lcbc_exit
+
+.align 4
+.Lcbc_slow_enc_tail:
+ movq %rax,%r11
+ movq %rcx,%r12
+ movq %r10,%rcx
+ movq %r8,%rsi
+ movq %r9,%rdi
+.long 0x9066A4F3
+ movq $16,%rcx
+ subq %r10,%rcx
+ xorq %rax,%rax
+.long 0x9066AAF3
+ movq %r9,%r8
+ movq $16,%r10
+ movq %r11,%rax
+ movq %r12,%rcx
+ jmp .Lcbc_slow_enc_loop
+
+.align 16
+.LSLOW_DECRYPT:
+ shrq $3,%rax
+ addq %rax,%r14
+
+ movq 0(%rbp),%r11
+ movq 8(%rbp),%r12
+ movq %r11,0+64(%rsp)
+ movq %r12,8+64(%rsp)
+
+.align 4
+.Lcbc_slow_dec_loop:
+ movl 0(%r8),%eax
+ movl 4(%r8),%ebx
+ movl 8(%r8),%ecx
+ movl 12(%r8),%edx
+ movq 0(%rsp),%r15
+ movq %r8,24(%rsp)
+ movq %r9,32(%rsp)
+ movq %r10,40(%rsp)
+
+ call _x86_64_AES_decrypt_compact
+
+ movq 24(%rsp),%r8
+ movq 32(%rsp),%r9
+ movq 40(%rsp),%r10
+ xorl 0+64(%rsp),%eax
+ xorl 4+64(%rsp),%ebx
+ xorl 8+64(%rsp),%ecx
+ xorl 12+64(%rsp),%edx
+
+ movq 0(%r8),%r11
+ movq 8(%r8),%r12
+ subq $16,%r10
+ jc .Lcbc_slow_dec_partial
+ jz .Lcbc_slow_dec_done
+
+ movq %r11,0+64(%rsp)
+ movq %r12,8+64(%rsp)
+
+ movl %eax,0(%r9)
+ movl %ebx,4(%r9)
+ movl %ecx,8(%r9)
+ movl %edx,12(%r9)
+
+ leaq 16(%r8),%r8
+ leaq 16(%r9),%r9
+ jmp .Lcbc_slow_dec_loop
+.Lcbc_slow_dec_done:
+ movq 56(%rsp),%rdi
+ movq %r11,0(%rdi)
+ movq %r12,8(%rdi)
+
+ movl %eax,0(%r9)
+ movl %ebx,4(%r9)
+ movl %ecx,8(%r9)
+ movl %edx,12(%r9)
+
+ jmp .Lcbc_exit
+
+.align 4
+.Lcbc_slow_dec_partial:
+ movq 56(%rsp),%rdi
+ movq %r11,0(%rdi)
+ movq %r12,8(%rdi)
+
+ movl %eax,0+64(%rsp)
+ movl %ebx,4+64(%rsp)
+ movl %ecx,8+64(%rsp)
+ movl %edx,12+64(%rsp)
+
+ movq %r9,%rdi
+ leaq 64(%rsp),%rsi
+ leaq 16(%r10),%rcx
+.long 0x9066A4F3
+ jmp .Lcbc_exit
+
+.align 16
+.Lcbc_exit:
+ movq 16(%rsp),%rsi
+.cfi_def_cfa %rsi,64
+ movq (%rsi),%r15
+.cfi_restore %r15
+ movq 8(%rsi),%r14
+.cfi_restore %r14
+ movq 16(%rsi),%r13
+.cfi_restore %r13
+ movq 24(%rsi),%r12
+.cfi_restore %r12
+ movq 32(%rsi),%rbp
+.cfi_restore %rbp
+ movq 40(%rsi),%rbx
+.cfi_restore %rbx
+ leaq 48(%rsi),%rsp
+.cfi_def_cfa %rsp,16
+.Lcbc_popfq:
+ popfq
+
+
+.cfi_adjust_cfa_offset -8
+.Lcbc_epilogue:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size AES_cbc_encrypt,.-AES_cbc_encrypt
+.align 64
+.LAES_Te:
+.long 0xa56363c6,0xa56363c6
+.long 0x847c7cf8,0x847c7cf8
+.long 0x997777ee,0x997777ee
+.long 0x8d7b7bf6,0x8d7b7bf6
+.long 0x0df2f2ff,0x0df2f2ff
+.long 0xbd6b6bd6,0xbd6b6bd6
+.long 0xb16f6fde,0xb16f6fde
+.long 0x54c5c591,0x54c5c591
+.long 0x50303060,0x50303060
+.long 0x03010102,0x03010102
+.long 0xa96767ce,0xa96767ce
+.long 0x7d2b2b56,0x7d2b2b56
+.long 0x19fefee7,0x19fefee7
+.long 0x62d7d7b5,0x62d7d7b5
+.long 0xe6abab4d,0xe6abab4d
+.long 0x9a7676ec,0x9a7676ec
+.long 0x45caca8f,0x45caca8f
+.long 0x9d82821f,0x9d82821f
+.long 0x40c9c989,0x40c9c989
+.long 0x877d7dfa,0x877d7dfa
+.long 0x15fafaef,0x15fafaef
+.long 0xeb5959b2,0xeb5959b2
+.long 0xc947478e,0xc947478e
+.long 0x0bf0f0fb,0x0bf0f0fb
+.long 0xecadad41,0xecadad41
+.long 0x67d4d4b3,0x67d4d4b3
+.long 0xfda2a25f,0xfda2a25f
+.long 0xeaafaf45,0xeaafaf45
+.long 0xbf9c9c23,0xbf9c9c23
+.long 0xf7a4a453,0xf7a4a453
+.long 0x967272e4,0x967272e4
+.long 0x5bc0c09b,0x5bc0c09b
+.long 0xc2b7b775,0xc2b7b775
+.long 0x1cfdfde1,0x1cfdfde1
+.long 0xae93933d,0xae93933d
+.long 0x6a26264c,0x6a26264c
+.long 0x5a36366c,0x5a36366c
+.long 0x413f3f7e,0x413f3f7e
+.long 0x02f7f7f5,0x02f7f7f5
+.long 0x4fcccc83,0x4fcccc83
+.long 0x5c343468,0x5c343468
+.long 0xf4a5a551,0xf4a5a551
+.long 0x34e5e5d1,0x34e5e5d1
+.long 0x08f1f1f9,0x08f1f1f9
+.long 0x937171e2,0x937171e2
+.long 0x73d8d8ab,0x73d8d8ab
+.long 0x53313162,0x53313162
+.long 0x3f15152a,0x3f15152a
+.long 0x0c040408,0x0c040408
+.long 0x52c7c795,0x52c7c795
+.long 0x65232346,0x65232346
+.long 0x5ec3c39d,0x5ec3c39d
+.long 0x28181830,0x28181830
+.long 0xa1969637,0xa1969637
+.long 0x0f05050a,0x0f05050a
+.long 0xb59a9a2f,0xb59a9a2f
+.long 0x0907070e,0x0907070e
+.long 0x36121224,0x36121224
+.long 0x9b80801b,0x9b80801b
+.long 0x3de2e2df,0x3de2e2df
+.long 0x26ebebcd,0x26ebebcd
+.long 0x6927274e,0x6927274e
+.long 0xcdb2b27f,0xcdb2b27f
+.long 0x9f7575ea,0x9f7575ea
+.long 0x1b090912,0x1b090912
+.long 0x9e83831d,0x9e83831d
+.long 0x742c2c58,0x742c2c58
+.long 0x2e1a1a34,0x2e1a1a34
+.long 0x2d1b1b36,0x2d1b1b36
+.long 0xb26e6edc,0xb26e6edc
+.long 0xee5a5ab4,0xee5a5ab4
+.long 0xfba0a05b,0xfba0a05b
+.long 0xf65252a4,0xf65252a4
+.long 0x4d3b3b76,0x4d3b3b76
+.long 0x61d6d6b7,0x61d6d6b7
+.long 0xceb3b37d,0xceb3b37d
+.long 0x7b292952,0x7b292952
+.long 0x3ee3e3dd,0x3ee3e3dd
+.long 0x712f2f5e,0x712f2f5e
+.long 0x97848413,0x97848413
+.long 0xf55353a6,0xf55353a6
+.long 0x68d1d1b9,0x68d1d1b9
+.long 0x00000000,0x00000000
+.long 0x2cededc1,0x2cededc1
+.long 0x60202040,0x60202040
+.long 0x1ffcfce3,0x1ffcfce3
+.long 0xc8b1b179,0xc8b1b179
+.long 0xed5b5bb6,0xed5b5bb6
+.long 0xbe6a6ad4,0xbe6a6ad4
+.long 0x46cbcb8d,0x46cbcb8d
+.long 0xd9bebe67,0xd9bebe67
+.long 0x4b393972,0x4b393972
+.long 0xde4a4a94,0xde4a4a94
+.long 0xd44c4c98,0xd44c4c98
+.long 0xe85858b0,0xe85858b0
+.long 0x4acfcf85,0x4acfcf85
+.long 0x6bd0d0bb,0x6bd0d0bb
+.long 0x2aefefc5,0x2aefefc5
+.long 0xe5aaaa4f,0xe5aaaa4f
+.long 0x16fbfbed,0x16fbfbed
+.long 0xc5434386,0xc5434386
+.long 0xd74d4d9a,0xd74d4d9a
+.long 0x55333366,0x55333366
+.long 0x94858511,0x94858511
+.long 0xcf45458a,0xcf45458a
+.long 0x10f9f9e9,0x10f9f9e9
+.long 0x06020204,0x06020204
+.long 0x817f7ffe,0x817f7ffe
+.long 0xf05050a0,0xf05050a0
+.long 0x443c3c78,0x443c3c78
+.long 0xba9f9f25,0xba9f9f25
+.long 0xe3a8a84b,0xe3a8a84b
+.long 0xf35151a2,0xf35151a2
+.long 0xfea3a35d,0xfea3a35d
+.long 0xc0404080,0xc0404080
+.long 0x8a8f8f05,0x8a8f8f05
+.long 0xad92923f,0xad92923f
+.long 0xbc9d9d21,0xbc9d9d21
+.long 0x48383870,0x48383870
+.long 0x04f5f5f1,0x04f5f5f1
+.long 0xdfbcbc63,0xdfbcbc63
+.long 0xc1b6b677,0xc1b6b677
+.long 0x75dadaaf,0x75dadaaf
+.long 0x63212142,0x63212142
+.long 0x30101020,0x30101020
+.long 0x1affffe5,0x1affffe5
+.long 0x0ef3f3fd,0x0ef3f3fd
+.long 0x6dd2d2bf,0x6dd2d2bf
+.long 0x4ccdcd81,0x4ccdcd81
+.long 0x140c0c18,0x140c0c18
+.long 0x35131326,0x35131326
+.long 0x2fececc3,0x2fececc3
+.long 0xe15f5fbe,0xe15f5fbe
+.long 0xa2979735,0xa2979735
+.long 0xcc444488,0xcc444488
+.long 0x3917172e,0x3917172e
+.long 0x57c4c493,0x57c4c493
+.long 0xf2a7a755,0xf2a7a755
+.long 0x827e7efc,0x827e7efc
+.long 0x473d3d7a,0x473d3d7a
+.long 0xac6464c8,0xac6464c8
+.long 0xe75d5dba,0xe75d5dba
+.long 0x2b191932,0x2b191932
+.long 0x957373e6,0x957373e6
+.long 0xa06060c0,0xa06060c0
+.long 0x98818119,0x98818119
+.long 0xd14f4f9e,0xd14f4f9e
+.long 0x7fdcdca3,0x7fdcdca3
+.long 0x66222244,0x66222244
+.long 0x7e2a2a54,0x7e2a2a54
+.long 0xab90903b,0xab90903b
+.long 0x8388880b,0x8388880b
+.long 0xca46468c,0xca46468c
+.long 0x29eeeec7,0x29eeeec7
+.long 0xd3b8b86b,0xd3b8b86b
+.long 0x3c141428,0x3c141428
+.long 0x79dedea7,0x79dedea7
+.long 0xe25e5ebc,0xe25e5ebc
+.long 0x1d0b0b16,0x1d0b0b16
+.long 0x76dbdbad,0x76dbdbad
+.long 0x3be0e0db,0x3be0e0db
+.long 0x56323264,0x56323264
+.long 0x4e3a3a74,0x4e3a3a74
+.long 0x1e0a0a14,0x1e0a0a14
+.long 0xdb494992,0xdb494992
+.long 0x0a06060c,0x0a06060c
+.long 0x6c242448,0x6c242448
+.long 0xe45c5cb8,0xe45c5cb8
+.long 0x5dc2c29f,0x5dc2c29f
+.long 0x6ed3d3bd,0x6ed3d3bd
+.long 0xefacac43,0xefacac43
+.long 0xa66262c4,0xa66262c4
+.long 0xa8919139,0xa8919139
+.long 0xa4959531,0xa4959531
+.long 0x37e4e4d3,0x37e4e4d3
+.long 0x8b7979f2,0x8b7979f2
+.long 0x32e7e7d5,0x32e7e7d5
+.long 0x43c8c88b,0x43c8c88b
+.long 0x5937376e,0x5937376e
+.long 0xb76d6dda,0xb76d6dda
+.long 0x8c8d8d01,0x8c8d8d01
+.long 0x64d5d5b1,0x64d5d5b1
+.long 0xd24e4e9c,0xd24e4e9c
+.long 0xe0a9a949,0xe0a9a949
+.long 0xb46c6cd8,0xb46c6cd8
+.long 0xfa5656ac,0xfa5656ac
+.long 0x07f4f4f3,0x07f4f4f3
+.long 0x25eaeacf,0x25eaeacf
+.long 0xaf6565ca,0xaf6565ca
+.long 0x8e7a7af4,0x8e7a7af4
+.long 0xe9aeae47,0xe9aeae47
+.long 0x18080810,0x18080810
+.long 0xd5baba6f,0xd5baba6f
+.long 0x887878f0,0x887878f0
+.long 0x6f25254a,0x6f25254a
+.long 0x722e2e5c,0x722e2e5c
+.long 0x241c1c38,0x241c1c38
+.long 0xf1a6a657,0xf1a6a657
+.long 0xc7b4b473,0xc7b4b473
+.long 0x51c6c697,0x51c6c697
+.long 0x23e8e8cb,0x23e8e8cb
+.long 0x7cdddda1,0x7cdddda1
+.long 0x9c7474e8,0x9c7474e8
+.long 0x211f1f3e,0x211f1f3e
+.long 0xdd4b4b96,0xdd4b4b96
+.long 0xdcbdbd61,0xdcbdbd61
+.long 0x868b8b0d,0x868b8b0d
+.long 0x858a8a0f,0x858a8a0f
+.long 0x907070e0,0x907070e0
+.long 0x423e3e7c,0x423e3e7c
+.long 0xc4b5b571,0xc4b5b571
+.long 0xaa6666cc,0xaa6666cc
+.long 0xd8484890,0xd8484890
+.long 0x05030306,0x05030306
+.long 0x01f6f6f7,0x01f6f6f7
+.long 0x120e0e1c,0x120e0e1c
+.long 0xa36161c2,0xa36161c2
+.long 0x5f35356a,0x5f35356a
+.long 0xf95757ae,0xf95757ae
+.long 0xd0b9b969,0xd0b9b969
+.long 0x91868617,0x91868617
+.long 0x58c1c199,0x58c1c199
+.long 0x271d1d3a,0x271d1d3a
+.long 0xb99e9e27,0xb99e9e27
+.long 0x38e1e1d9,0x38e1e1d9
+.long 0x13f8f8eb,0x13f8f8eb
+.long 0xb398982b,0xb398982b
+.long 0x33111122,0x33111122
+.long 0xbb6969d2,0xbb6969d2
+.long 0x70d9d9a9,0x70d9d9a9
+.long 0x898e8e07,0x898e8e07
+.long 0xa7949433,0xa7949433
+.long 0xb69b9b2d,0xb69b9b2d
+.long 0x221e1e3c,0x221e1e3c
+.long 0x92878715,0x92878715
+.long 0x20e9e9c9,0x20e9e9c9
+.long 0x49cece87,0x49cece87
+.long 0xff5555aa,0xff5555aa
+.long 0x78282850,0x78282850
+.long 0x7adfdfa5,0x7adfdfa5
+.long 0x8f8c8c03,0x8f8c8c03
+.long 0xf8a1a159,0xf8a1a159
+.long 0x80898909,0x80898909
+.long 0x170d0d1a,0x170d0d1a
+.long 0xdabfbf65,0xdabfbf65
+.long 0x31e6e6d7,0x31e6e6d7
+.long 0xc6424284,0xc6424284
+.long 0xb86868d0,0xb86868d0
+.long 0xc3414182,0xc3414182
+.long 0xb0999929,0xb0999929
+.long 0x772d2d5a,0x772d2d5a
+.long 0x110f0f1e,0x110f0f1e
+.long 0xcbb0b07b,0xcbb0b07b
+.long 0xfc5454a8,0xfc5454a8
+.long 0xd6bbbb6d,0xd6bbbb6d
+.long 0x3a16162c,0x3a16162c
+.byte 0x63,0x7c,0x77,0x7b,0xf2,0x6b,0x6f,0xc5
+.byte 0x30,0x01,0x67,0x2b,0xfe,0xd7,0xab,0x76
+.byte 0xca,0x82,0xc9,0x7d,0xfa,0x59,0x47,0xf0
+.byte 0xad,0xd4,0xa2,0xaf,0x9c,0xa4,0x72,0xc0
+.byte 0xb7,0xfd,0x93,0x26,0x36,0x3f,0xf7,0xcc
+.byte 0x34,0xa5,0xe5,0xf1,0x71,0xd8,0x31,0x15
+.byte 0x04,0xc7,0x23,0xc3,0x18,0x96,0x05,0x9a
+.byte 0x07,0x12,0x80,0xe2,0xeb,0x27,0xb2,0x75
+.byte 0x09,0x83,0x2c,0x1a,0x1b,0x6e,0x5a,0xa0
+.byte 0x52,0x3b,0xd6,0xb3,0x29,0xe3,0x2f,0x84
+.byte 0x53,0xd1,0x00,0xed,0x20,0xfc,0xb1,0x5b
+.byte 0x6a,0xcb,0xbe,0x39,0x4a,0x4c,0x58,0xcf
+.byte 0xd0,0xef,0xaa,0xfb,0x43,0x4d,0x33,0x85
+.byte 0x45,0xf9,0x02,0x7f,0x50,0x3c,0x9f,0xa8
+.byte 0x51,0xa3,0x40,0x8f,0x92,0x9d,0x38,0xf5
+.byte 0xbc,0xb6,0xda,0x21,0x10,0xff,0xf3,0xd2
+.byte 0xcd,0x0c,0x13,0xec,0x5f,0x97,0x44,0x17
+.byte 0xc4,0xa7,0x7e,0x3d,0x64,0x5d,0x19,0x73
+.byte 0x60,0x81,0x4f,0xdc,0x22,0x2a,0x90,0x88
+.byte 0x46,0xee,0xb8,0x14,0xde,0x5e,0x0b,0xdb
+.byte 0xe0,0x32,0x3a,0x0a,0x49,0x06,0x24,0x5c
+.byte 0xc2,0xd3,0xac,0x62,0x91,0x95,0xe4,0x79
+.byte 0xe7,0xc8,0x37,0x6d,0x8d,0xd5,0x4e,0xa9
+.byte 0x6c,0x56,0xf4,0xea,0x65,0x7a,0xae,0x08
+.byte 0xba,0x78,0x25,0x2e,0x1c,0xa6,0xb4,0xc6
+.byte 0xe8,0xdd,0x74,0x1f,0x4b,0xbd,0x8b,0x8a
+.byte 0x70,0x3e,0xb5,0x66,0x48,0x03,0xf6,0x0e
+.byte 0x61,0x35,0x57,0xb9,0x86,0xc1,0x1d,0x9e
+.byte 0xe1,0xf8,0x98,0x11,0x69,0xd9,0x8e,0x94
+.byte 0x9b,0x1e,0x87,0xe9,0xce,0x55,0x28,0xdf
+.byte 0x8c,0xa1,0x89,0x0d,0xbf,0xe6,0x42,0x68
+.byte 0x41,0x99,0x2d,0x0f,0xb0,0x54,0xbb,0x16
+.byte 0x63,0x7c,0x77,0x7b,0xf2,0x6b,0x6f,0xc5
+.byte 0x30,0x01,0x67,0x2b,0xfe,0xd7,0xab,0x76
+.byte 0xca,0x82,0xc9,0x7d,0xfa,0x59,0x47,0xf0
+.byte 0xad,0xd4,0xa2,0xaf,0x9c,0xa4,0x72,0xc0
+.byte 0xb7,0xfd,0x93,0x26,0x36,0x3f,0xf7,0xcc
+.byte 0x34,0xa5,0xe5,0xf1,0x71,0xd8,0x31,0x15
+.byte 0x04,0xc7,0x23,0xc3,0x18,0x96,0x05,0x9a
+.byte 0x07,0x12,0x80,0xe2,0xeb,0x27,0xb2,0x75
+.byte 0x09,0x83,0x2c,0x1a,0x1b,0x6e,0x5a,0xa0
+.byte 0x52,0x3b,0xd6,0xb3,0x29,0xe3,0x2f,0x84
+.byte 0x53,0xd1,0x00,0xed,0x20,0xfc,0xb1,0x5b
+.byte 0x6a,0xcb,0xbe,0x39,0x4a,0x4c,0x58,0xcf
+.byte 0xd0,0xef,0xaa,0xfb,0x43,0x4d,0x33,0x85
+.byte 0x45,0xf9,0x02,0x7f,0x50,0x3c,0x9f,0xa8
+.byte 0x51,0xa3,0x40,0x8f,0x92,0x9d,0x38,0xf5
+.byte 0xbc,0xb6,0xda,0x21,0x10,0xff,0xf3,0xd2
+.byte 0xcd,0x0c,0x13,0xec,0x5f,0x97,0x44,0x17
+.byte 0xc4,0xa7,0x7e,0x3d,0x64,0x5d,0x19,0x73
+.byte 0x60,0x81,0x4f,0xdc,0x22,0x2a,0x90,0x88
+.byte 0x46,0xee,0xb8,0x14,0xde,0x5e,0x0b,0xdb
+.byte 0xe0,0x32,0x3a,0x0a,0x49,0x06,0x24,0x5c
+.byte 0xc2,0xd3,0xac,0x62,0x91,0x95,0xe4,0x79
+.byte 0xe7,0xc8,0x37,0x6d,0x8d,0xd5,0x4e,0xa9
+.byte 0x6c,0x56,0xf4,0xea,0x65,0x7a,0xae,0x08
+.byte 0xba,0x78,0x25,0x2e,0x1c,0xa6,0xb4,0xc6
+.byte 0xe8,0xdd,0x74,0x1f,0x4b,0xbd,0x8b,0x8a
+.byte 0x70,0x3e,0xb5,0x66,0x48,0x03,0xf6,0x0e
+.byte 0x61,0x35,0x57,0xb9,0x86,0xc1,0x1d,0x9e
+.byte 0xe1,0xf8,0x98,0x11,0x69,0xd9,0x8e,0x94
+.byte 0x9b,0x1e,0x87,0xe9,0xce,0x55,0x28,0xdf
+.byte 0x8c,0xa1,0x89,0x0d,0xbf,0xe6,0x42,0x68
+.byte 0x41,0x99,0x2d,0x0f,0xb0,0x54,0xbb,0x16
+.byte 0x63,0x7c,0x77,0x7b,0xf2,0x6b,0x6f,0xc5
+.byte 0x30,0x01,0x67,0x2b,0xfe,0xd7,0xab,0x76
+.byte 0xca,0x82,0xc9,0x7d,0xfa,0x59,0x47,0xf0
+.byte 0xad,0xd4,0xa2,0xaf,0x9c,0xa4,0x72,0xc0
+.byte 0xb7,0xfd,0x93,0x26,0x36,0x3f,0xf7,0xcc
+.byte 0x34,0xa5,0xe5,0xf1,0x71,0xd8,0x31,0x15
+.byte 0x04,0xc7,0x23,0xc3,0x18,0x96,0x05,0x9a
+.byte 0x07,0x12,0x80,0xe2,0xeb,0x27,0xb2,0x75
+.byte 0x09,0x83,0x2c,0x1a,0x1b,0x6e,0x5a,0xa0
+.byte 0x52,0x3b,0xd6,0xb3,0x29,0xe3,0x2f,0x84
+.byte 0x53,0xd1,0x00,0xed,0x20,0xfc,0xb1,0x5b
+.byte 0x6a,0xcb,0xbe,0x39,0x4a,0x4c,0x58,0xcf
+.byte 0xd0,0xef,0xaa,0xfb,0x43,0x4d,0x33,0x85
+.byte 0x45,0xf9,0x02,0x7f,0x50,0x3c,0x9f,0xa8
+.byte 0x51,0xa3,0x40,0x8f,0x92,0x9d,0x38,0xf5
+.byte 0xbc,0xb6,0xda,0x21,0x10,0xff,0xf3,0xd2
+.byte 0xcd,0x0c,0x13,0xec,0x5f,0x97,0x44,0x17
+.byte 0xc4,0xa7,0x7e,0x3d,0x64,0x5d,0x19,0x73
+.byte 0x60,0x81,0x4f,0xdc,0x22,0x2a,0x90,0x88
+.byte 0x46,0xee,0xb8,0x14,0xde,0x5e,0x0b,0xdb
+.byte 0xe0,0x32,0x3a,0x0a,0x49,0x06,0x24,0x5c
+.byte 0xc2,0xd3,0xac,0x62,0x91,0x95,0xe4,0x79
+.byte 0xe7,0xc8,0x37,0x6d,0x8d,0xd5,0x4e,0xa9
+.byte 0x6c,0x56,0xf4,0xea,0x65,0x7a,0xae,0x08
+.byte 0xba,0x78,0x25,0x2e,0x1c,0xa6,0xb4,0xc6
+.byte 0xe8,0xdd,0x74,0x1f,0x4b,0xbd,0x8b,0x8a
+.byte 0x70,0x3e,0xb5,0x66,0x48,0x03,0xf6,0x0e
+.byte 0x61,0x35,0x57,0xb9,0x86,0xc1,0x1d,0x9e
+.byte 0xe1,0xf8,0x98,0x11,0x69,0xd9,0x8e,0x94
+.byte 0x9b,0x1e,0x87,0xe9,0xce,0x55,0x28,0xdf
+.byte 0x8c,0xa1,0x89,0x0d,0xbf,0xe6,0x42,0x68
+.byte 0x41,0x99,0x2d,0x0f,0xb0,0x54,0xbb,0x16
+.byte 0x63,0x7c,0x77,0x7b,0xf2,0x6b,0x6f,0xc5
+.byte 0x30,0x01,0x67,0x2b,0xfe,0xd7,0xab,0x76
+.byte 0xca,0x82,0xc9,0x7d,0xfa,0x59,0x47,0xf0
+.byte 0xad,0xd4,0xa2,0xaf,0x9c,0xa4,0x72,0xc0
+.byte 0xb7,0xfd,0x93,0x26,0x36,0x3f,0xf7,0xcc
+.byte 0x34,0xa5,0xe5,0xf1,0x71,0xd8,0x31,0x15
+.byte 0x04,0xc7,0x23,0xc3,0x18,0x96,0x05,0x9a
+.byte 0x07,0x12,0x80,0xe2,0xeb,0x27,0xb2,0x75
+.byte 0x09,0x83,0x2c,0x1a,0x1b,0x6e,0x5a,0xa0
+.byte 0x52,0x3b,0xd6,0xb3,0x29,0xe3,0x2f,0x84
+.byte 0x53,0xd1,0x00,0xed,0x20,0xfc,0xb1,0x5b
+.byte 0x6a,0xcb,0xbe,0x39,0x4a,0x4c,0x58,0xcf
+.byte 0xd0,0xef,0xaa,0xfb,0x43,0x4d,0x33,0x85
+.byte 0x45,0xf9,0x02,0x7f,0x50,0x3c,0x9f,0xa8
+.byte 0x51,0xa3,0x40,0x8f,0x92,0x9d,0x38,0xf5
+.byte 0xbc,0xb6,0xda,0x21,0x10,0xff,0xf3,0xd2
+.byte 0xcd,0x0c,0x13,0xec,0x5f,0x97,0x44,0x17
+.byte 0xc4,0xa7,0x7e,0x3d,0x64,0x5d,0x19,0x73
+.byte 0x60,0x81,0x4f,0xdc,0x22,0x2a,0x90,0x88
+.byte 0x46,0xee,0xb8,0x14,0xde,0x5e,0x0b,0xdb
+.byte 0xe0,0x32,0x3a,0x0a,0x49,0x06,0x24,0x5c
+.byte 0xc2,0xd3,0xac,0x62,0x91,0x95,0xe4,0x79
+.byte 0xe7,0xc8,0x37,0x6d,0x8d,0xd5,0x4e,0xa9
+.byte 0x6c,0x56,0xf4,0xea,0x65,0x7a,0xae,0x08
+.byte 0xba,0x78,0x25,0x2e,0x1c,0xa6,0xb4,0xc6
+.byte 0xe8,0xdd,0x74,0x1f,0x4b,0xbd,0x8b,0x8a
+.byte 0x70,0x3e,0xb5,0x66,0x48,0x03,0xf6,0x0e
+.byte 0x61,0x35,0x57,0xb9,0x86,0xc1,0x1d,0x9e
+.byte 0xe1,0xf8,0x98,0x11,0x69,0xd9,0x8e,0x94
+.byte 0x9b,0x1e,0x87,0xe9,0xce,0x55,0x28,0xdf
+.byte 0x8c,0xa1,0x89,0x0d,0xbf,0xe6,0x42,0x68
+.byte 0x41,0x99,0x2d,0x0f,0xb0,0x54,0xbb,0x16
+.long 0x00000001, 0x00000002, 0x00000004, 0x00000008
+.long 0x00000010, 0x00000020, 0x00000040, 0x00000080
+.long 0x0000001b, 0x00000036, 0x80808080, 0x80808080
+.long 0xfefefefe, 0xfefefefe, 0x1b1b1b1b, 0x1b1b1b1b
+.align 64
+.LAES_Td:
+.long 0x50a7f451,0x50a7f451
+.long 0x5365417e,0x5365417e
+.long 0xc3a4171a,0xc3a4171a
+.long 0x965e273a,0x965e273a
+.long 0xcb6bab3b,0xcb6bab3b
+.long 0xf1459d1f,0xf1459d1f
+.long 0xab58faac,0xab58faac
+.long 0x9303e34b,0x9303e34b
+.long 0x55fa3020,0x55fa3020
+.long 0xf66d76ad,0xf66d76ad
+.long 0x9176cc88,0x9176cc88
+.long 0x254c02f5,0x254c02f5
+.long 0xfcd7e54f,0xfcd7e54f
+.long 0xd7cb2ac5,0xd7cb2ac5
+.long 0x80443526,0x80443526
+.long 0x8fa362b5,0x8fa362b5
+.long 0x495ab1de,0x495ab1de
+.long 0x671bba25,0x671bba25
+.long 0x980eea45,0x980eea45
+.long 0xe1c0fe5d,0xe1c0fe5d
+.long 0x02752fc3,0x02752fc3
+.long 0x12f04c81,0x12f04c81
+.long 0xa397468d,0xa397468d
+.long 0xc6f9d36b,0xc6f9d36b
+.long 0xe75f8f03,0xe75f8f03
+.long 0x959c9215,0x959c9215
+.long 0xeb7a6dbf,0xeb7a6dbf
+.long 0xda595295,0xda595295
+.long 0x2d83bed4,0x2d83bed4
+.long 0xd3217458,0xd3217458
+.long 0x2969e049,0x2969e049
+.long 0x44c8c98e,0x44c8c98e
+.long 0x6a89c275,0x6a89c275
+.long 0x78798ef4,0x78798ef4
+.long 0x6b3e5899,0x6b3e5899
+.long 0xdd71b927,0xdd71b927
+.long 0xb64fe1be,0xb64fe1be
+.long 0x17ad88f0,0x17ad88f0
+.long 0x66ac20c9,0x66ac20c9
+.long 0xb43ace7d,0xb43ace7d
+.long 0x184adf63,0x184adf63
+.long 0x82311ae5,0x82311ae5
+.long 0x60335197,0x60335197
+.long 0x457f5362,0x457f5362
+.long 0xe07764b1,0xe07764b1
+.long 0x84ae6bbb,0x84ae6bbb
+.long 0x1ca081fe,0x1ca081fe
+.long 0x942b08f9,0x942b08f9
+.long 0x58684870,0x58684870
+.long 0x19fd458f,0x19fd458f
+.long 0x876cde94,0x876cde94
+.long 0xb7f87b52,0xb7f87b52
+.long 0x23d373ab,0x23d373ab
+.long 0xe2024b72,0xe2024b72
+.long 0x578f1fe3,0x578f1fe3
+.long 0x2aab5566,0x2aab5566
+.long 0x0728ebb2,0x0728ebb2
+.long 0x03c2b52f,0x03c2b52f
+.long 0x9a7bc586,0x9a7bc586
+.long 0xa50837d3,0xa50837d3
+.long 0xf2872830,0xf2872830
+.long 0xb2a5bf23,0xb2a5bf23
+.long 0xba6a0302,0xba6a0302
+.long 0x5c8216ed,0x5c8216ed
+.long 0x2b1ccf8a,0x2b1ccf8a
+.long 0x92b479a7,0x92b479a7
+.long 0xf0f207f3,0xf0f207f3
+.long 0xa1e2694e,0xa1e2694e
+.long 0xcdf4da65,0xcdf4da65
+.long 0xd5be0506,0xd5be0506
+.long 0x1f6234d1,0x1f6234d1
+.long 0x8afea6c4,0x8afea6c4
+.long 0x9d532e34,0x9d532e34
+.long 0xa055f3a2,0xa055f3a2
+.long 0x32e18a05,0x32e18a05
+.long 0x75ebf6a4,0x75ebf6a4
+.long 0x39ec830b,0x39ec830b
+.long 0xaaef6040,0xaaef6040
+.long 0x069f715e,0x069f715e
+.long 0x51106ebd,0x51106ebd
+.long 0xf98a213e,0xf98a213e
+.long 0x3d06dd96,0x3d06dd96
+.long 0xae053edd,0xae053edd
+.long 0x46bde64d,0x46bde64d
+.long 0xb58d5491,0xb58d5491
+.long 0x055dc471,0x055dc471
+.long 0x6fd40604,0x6fd40604
+.long 0xff155060,0xff155060
+.long 0x24fb9819,0x24fb9819
+.long 0x97e9bdd6,0x97e9bdd6
+.long 0xcc434089,0xcc434089
+.long 0x779ed967,0x779ed967
+.long 0xbd42e8b0,0xbd42e8b0
+.long 0x888b8907,0x888b8907
+.long 0x385b19e7,0x385b19e7
+.long 0xdbeec879,0xdbeec879
+.long 0x470a7ca1,0x470a7ca1
+.long 0xe90f427c,0xe90f427c
+.long 0xc91e84f8,0xc91e84f8
+.long 0x00000000,0x00000000
+.long 0x83868009,0x83868009
+.long 0x48ed2b32,0x48ed2b32
+.long 0xac70111e,0xac70111e
+.long 0x4e725a6c,0x4e725a6c
+.long 0xfbff0efd,0xfbff0efd
+.long 0x5638850f,0x5638850f
+.long 0x1ed5ae3d,0x1ed5ae3d
+.long 0x27392d36,0x27392d36
+.long 0x64d90f0a,0x64d90f0a
+.long 0x21a65c68,0x21a65c68
+.long 0xd1545b9b,0xd1545b9b
+.long 0x3a2e3624,0x3a2e3624
+.long 0xb1670a0c,0xb1670a0c
+.long 0x0fe75793,0x0fe75793
+.long 0xd296eeb4,0xd296eeb4
+.long 0x9e919b1b,0x9e919b1b
+.long 0x4fc5c080,0x4fc5c080
+.long 0xa220dc61,0xa220dc61
+.long 0x694b775a,0x694b775a
+.long 0x161a121c,0x161a121c
+.long 0x0aba93e2,0x0aba93e2
+.long 0xe52aa0c0,0xe52aa0c0
+.long 0x43e0223c,0x43e0223c
+.long 0x1d171b12,0x1d171b12
+.long 0x0b0d090e,0x0b0d090e
+.long 0xadc78bf2,0xadc78bf2
+.long 0xb9a8b62d,0xb9a8b62d
+.long 0xc8a91e14,0xc8a91e14
+.long 0x8519f157,0x8519f157
+.long 0x4c0775af,0x4c0775af
+.long 0xbbdd99ee,0xbbdd99ee
+.long 0xfd607fa3,0xfd607fa3
+.long 0x9f2601f7,0x9f2601f7
+.long 0xbcf5725c,0xbcf5725c
+.long 0xc53b6644,0xc53b6644
+.long 0x347efb5b,0x347efb5b
+.long 0x7629438b,0x7629438b
+.long 0xdcc623cb,0xdcc623cb
+.long 0x68fcedb6,0x68fcedb6
+.long 0x63f1e4b8,0x63f1e4b8
+.long 0xcadc31d7,0xcadc31d7
+.long 0x10856342,0x10856342
+.long 0x40229713,0x40229713
+.long 0x2011c684,0x2011c684
+.long 0x7d244a85,0x7d244a85
+.long 0xf83dbbd2,0xf83dbbd2
+.long 0x1132f9ae,0x1132f9ae
+.long 0x6da129c7,0x6da129c7
+.long 0x4b2f9e1d,0x4b2f9e1d
+.long 0xf330b2dc,0xf330b2dc
+.long 0xec52860d,0xec52860d
+.long 0xd0e3c177,0xd0e3c177
+.long 0x6c16b32b,0x6c16b32b
+.long 0x99b970a9,0x99b970a9
+.long 0xfa489411,0xfa489411
+.long 0x2264e947,0x2264e947
+.long 0xc48cfca8,0xc48cfca8
+.long 0x1a3ff0a0,0x1a3ff0a0
+.long 0xd82c7d56,0xd82c7d56
+.long 0xef903322,0xef903322
+.long 0xc74e4987,0xc74e4987
+.long 0xc1d138d9,0xc1d138d9
+.long 0xfea2ca8c,0xfea2ca8c
+.long 0x360bd498,0x360bd498
+.long 0xcf81f5a6,0xcf81f5a6
+.long 0x28de7aa5,0x28de7aa5
+.long 0x268eb7da,0x268eb7da
+.long 0xa4bfad3f,0xa4bfad3f
+.long 0xe49d3a2c,0xe49d3a2c
+.long 0x0d927850,0x0d927850
+.long 0x9bcc5f6a,0x9bcc5f6a
+.long 0x62467e54,0x62467e54
+.long 0xc2138df6,0xc2138df6
+.long 0xe8b8d890,0xe8b8d890
+.long 0x5ef7392e,0x5ef7392e
+.long 0xf5afc382,0xf5afc382
+.long 0xbe805d9f,0xbe805d9f
+.long 0x7c93d069,0x7c93d069
+.long 0xa92dd56f,0xa92dd56f
+.long 0xb31225cf,0xb31225cf
+.long 0x3b99acc8,0x3b99acc8
+.long 0xa77d1810,0xa77d1810
+.long 0x6e639ce8,0x6e639ce8
+.long 0x7bbb3bdb,0x7bbb3bdb
+.long 0x097826cd,0x097826cd
+.long 0xf418596e,0xf418596e
+.long 0x01b79aec,0x01b79aec
+.long 0xa89a4f83,0xa89a4f83
+.long 0x656e95e6,0x656e95e6
+.long 0x7ee6ffaa,0x7ee6ffaa
+.long 0x08cfbc21,0x08cfbc21
+.long 0xe6e815ef,0xe6e815ef
+.long 0xd99be7ba,0xd99be7ba
+.long 0xce366f4a,0xce366f4a
+.long 0xd4099fea,0xd4099fea
+.long 0xd67cb029,0xd67cb029
+.long 0xafb2a431,0xafb2a431
+.long 0x31233f2a,0x31233f2a
+.long 0x3094a5c6,0x3094a5c6
+.long 0xc066a235,0xc066a235
+.long 0x37bc4e74,0x37bc4e74
+.long 0xa6ca82fc,0xa6ca82fc
+.long 0xb0d090e0,0xb0d090e0
+.long 0x15d8a733,0x15d8a733
+.long 0x4a9804f1,0x4a9804f1
+.long 0xf7daec41,0xf7daec41
+.long 0x0e50cd7f,0x0e50cd7f
+.long 0x2ff69117,0x2ff69117
+.long 0x8dd64d76,0x8dd64d76
+.long 0x4db0ef43,0x4db0ef43
+.long 0x544daacc,0x544daacc
+.long 0xdf0496e4,0xdf0496e4
+.long 0xe3b5d19e,0xe3b5d19e
+.long 0x1b886a4c,0x1b886a4c
+.long 0xb81f2cc1,0xb81f2cc1
+.long 0x7f516546,0x7f516546
+.long 0x04ea5e9d,0x04ea5e9d
+.long 0x5d358c01,0x5d358c01
+.long 0x737487fa,0x737487fa
+.long 0x2e410bfb,0x2e410bfb
+.long 0x5a1d67b3,0x5a1d67b3
+.long 0x52d2db92,0x52d2db92
+.long 0x335610e9,0x335610e9
+.long 0x1347d66d,0x1347d66d
+.long 0x8c61d79a,0x8c61d79a
+.long 0x7a0ca137,0x7a0ca137
+.long 0x8e14f859,0x8e14f859
+.long 0x893c13eb,0x893c13eb
+.long 0xee27a9ce,0xee27a9ce
+.long 0x35c961b7,0x35c961b7
+.long 0xede51ce1,0xede51ce1
+.long 0x3cb1477a,0x3cb1477a
+.long 0x59dfd29c,0x59dfd29c
+.long 0x3f73f255,0x3f73f255
+.long 0x79ce1418,0x79ce1418
+.long 0xbf37c773,0xbf37c773
+.long 0xeacdf753,0xeacdf753
+.long 0x5baafd5f,0x5baafd5f
+.long 0x146f3ddf,0x146f3ddf
+.long 0x86db4478,0x86db4478
+.long 0x81f3afca,0x81f3afca
+.long 0x3ec468b9,0x3ec468b9
+.long 0x2c342438,0x2c342438
+.long 0x5f40a3c2,0x5f40a3c2
+.long 0x72c31d16,0x72c31d16
+.long 0x0c25e2bc,0x0c25e2bc
+.long 0x8b493c28,0x8b493c28
+.long 0x41950dff,0x41950dff
+.long 0x7101a839,0x7101a839
+.long 0xdeb30c08,0xdeb30c08
+.long 0x9ce4b4d8,0x9ce4b4d8
+.long 0x90c15664,0x90c15664
+.long 0x6184cb7b,0x6184cb7b
+.long 0x70b632d5,0x70b632d5
+.long 0x745c6c48,0x745c6c48
+.long 0x4257b8d0,0x4257b8d0
+.byte 0x52,0x09,0x6a,0xd5,0x30,0x36,0xa5,0x38
+.byte 0xbf,0x40,0xa3,0x9e,0x81,0xf3,0xd7,0xfb
+.byte 0x7c,0xe3,0x39,0x82,0x9b,0x2f,0xff,0x87
+.byte 0x34,0x8e,0x43,0x44,0xc4,0xde,0xe9,0xcb
+.byte 0x54,0x7b,0x94,0x32,0xa6,0xc2,0x23,0x3d
+.byte 0xee,0x4c,0x95,0x0b,0x42,0xfa,0xc3,0x4e
+.byte 0x08,0x2e,0xa1,0x66,0x28,0xd9,0x24,0xb2
+.byte 0x76,0x5b,0xa2,0x49,0x6d,0x8b,0xd1,0x25
+.byte 0x72,0xf8,0xf6,0x64,0x86,0x68,0x98,0x16
+.byte 0xd4,0xa4,0x5c,0xcc,0x5d,0x65,0xb6,0x92
+.byte 0x6c,0x70,0x48,0x50,0xfd,0xed,0xb9,0xda
+.byte 0x5e,0x15,0x46,0x57,0xa7,0x8d,0x9d,0x84
+.byte 0x90,0xd8,0xab,0x00,0x8c,0xbc,0xd3,0x0a
+.byte 0xf7,0xe4,0x58,0x05,0xb8,0xb3,0x45,0x06
+.byte 0xd0,0x2c,0x1e,0x8f,0xca,0x3f,0x0f,0x02
+.byte 0xc1,0xaf,0xbd,0x03,0x01,0x13,0x8a,0x6b
+.byte 0x3a,0x91,0x11,0x41,0x4f,0x67,0xdc,0xea
+.byte 0x97,0xf2,0xcf,0xce,0xf0,0xb4,0xe6,0x73
+.byte 0x96,0xac,0x74,0x22,0xe7,0xad,0x35,0x85
+.byte 0xe2,0xf9,0x37,0xe8,0x1c,0x75,0xdf,0x6e
+.byte 0x47,0xf1,0x1a,0x71,0x1d,0x29,0xc5,0x89
+.byte 0x6f,0xb7,0x62,0x0e,0xaa,0x18,0xbe,0x1b
+.byte 0xfc,0x56,0x3e,0x4b,0xc6,0xd2,0x79,0x20
+.byte 0x9a,0xdb,0xc0,0xfe,0x78,0xcd,0x5a,0xf4
+.byte 0x1f,0xdd,0xa8,0x33,0x88,0x07,0xc7,0x31
+.byte 0xb1,0x12,0x10,0x59,0x27,0x80,0xec,0x5f
+.byte 0x60,0x51,0x7f,0xa9,0x19,0xb5,0x4a,0x0d
+.byte 0x2d,0xe5,0x7a,0x9f,0x93,0xc9,0x9c,0xef
+.byte 0xa0,0xe0,0x3b,0x4d,0xae,0x2a,0xf5,0xb0
+.byte 0xc8,0xeb,0xbb,0x3c,0x83,0x53,0x99,0x61
+.byte 0x17,0x2b,0x04,0x7e,0xba,0x77,0xd6,0x26
+.byte 0xe1,0x69,0x14,0x63,0x55,0x21,0x0c,0x7d
+.long 0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe
+.long 0x1b1b1b1b, 0x1b1b1b1b, 0, 0
+.byte 0x52,0x09,0x6a,0xd5,0x30,0x36,0xa5,0x38
+.byte 0xbf,0x40,0xa3,0x9e,0x81,0xf3,0xd7,0xfb
+.byte 0x7c,0xe3,0x39,0x82,0x9b,0x2f,0xff,0x87
+.byte 0x34,0x8e,0x43,0x44,0xc4,0xde,0xe9,0xcb
+.byte 0x54,0x7b,0x94,0x32,0xa6,0xc2,0x23,0x3d
+.byte 0xee,0x4c,0x95,0x0b,0x42,0xfa,0xc3,0x4e
+.byte 0x08,0x2e,0xa1,0x66,0x28,0xd9,0x24,0xb2
+.byte 0x76,0x5b,0xa2,0x49,0x6d,0x8b,0xd1,0x25
+.byte 0x72,0xf8,0xf6,0x64,0x86,0x68,0x98,0x16
+.byte 0xd4,0xa4,0x5c,0xcc,0x5d,0x65,0xb6,0x92
+.byte 0x6c,0x70,0x48,0x50,0xfd,0xed,0xb9,0xda
+.byte 0x5e,0x15,0x46,0x57,0xa7,0x8d,0x9d,0x84
+.byte 0x90,0xd8,0xab,0x00,0x8c,0xbc,0xd3,0x0a
+.byte 0xf7,0xe4,0x58,0x05,0xb8,0xb3,0x45,0x06
+.byte 0xd0,0x2c,0x1e,0x8f,0xca,0x3f,0x0f,0x02
+.byte 0xc1,0xaf,0xbd,0x03,0x01,0x13,0x8a,0x6b
+.byte 0x3a,0x91,0x11,0x41,0x4f,0x67,0xdc,0xea
+.byte 0x97,0xf2,0xcf,0xce,0xf0,0xb4,0xe6,0x73
+.byte 0x96,0xac,0x74,0x22,0xe7,0xad,0x35,0x85
+.byte 0xe2,0xf9,0x37,0xe8,0x1c,0x75,0xdf,0x6e
+.byte 0x47,0xf1,0x1a,0x71,0x1d,0x29,0xc5,0x89
+.byte 0x6f,0xb7,0x62,0x0e,0xaa,0x18,0xbe,0x1b
+.byte 0xfc,0x56,0x3e,0x4b,0xc6,0xd2,0x79,0x20
+.byte 0x9a,0xdb,0xc0,0xfe,0x78,0xcd,0x5a,0xf4
+.byte 0x1f,0xdd,0xa8,0x33,0x88,0x07,0xc7,0x31
+.byte 0xb1,0x12,0x10,0x59,0x27,0x80,0xec,0x5f
+.byte 0x60,0x51,0x7f,0xa9,0x19,0xb5,0x4a,0x0d
+.byte 0x2d,0xe5,0x7a,0x9f,0x93,0xc9,0x9c,0xef
+.byte 0xa0,0xe0,0x3b,0x4d,0xae,0x2a,0xf5,0xb0
+.byte 0xc8,0xeb,0xbb,0x3c,0x83,0x53,0x99,0x61
+.byte 0x17,0x2b,0x04,0x7e,0xba,0x77,0xd6,0x26
+.byte 0xe1,0x69,0x14,0x63,0x55,0x21,0x0c,0x7d
+.long 0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe
+.long 0x1b1b1b1b, 0x1b1b1b1b, 0, 0
+.byte 0x52,0x09,0x6a,0xd5,0x30,0x36,0xa5,0x38
+.byte 0xbf,0x40,0xa3,0x9e,0x81,0xf3,0xd7,0xfb
+.byte 0x7c,0xe3,0x39,0x82,0x9b,0x2f,0xff,0x87
+.byte 0x34,0x8e,0x43,0x44,0xc4,0xde,0xe9,0xcb
+.byte 0x54,0x7b,0x94,0x32,0xa6,0xc2,0x23,0x3d
+.byte 0xee,0x4c,0x95,0x0b,0x42,0xfa,0xc3,0x4e
+.byte 0x08,0x2e,0xa1,0x66,0x28,0xd9,0x24,0xb2
+.byte 0x76,0x5b,0xa2,0x49,0x6d,0x8b,0xd1,0x25
+.byte 0x72,0xf8,0xf6,0x64,0x86,0x68,0x98,0x16
+.byte 0xd4,0xa4,0x5c,0xcc,0x5d,0x65,0xb6,0x92
+.byte 0x6c,0x70,0x48,0x50,0xfd,0xed,0xb9,0xda
+.byte 0x5e,0x15,0x46,0x57,0xa7,0x8d,0x9d,0x84
+.byte 0x90,0xd8,0xab,0x00,0x8c,0xbc,0xd3,0x0a
+.byte 0xf7,0xe4,0x58,0x05,0xb8,0xb3,0x45,0x06
+.byte 0xd0,0x2c,0x1e,0x8f,0xca,0x3f,0x0f,0x02
+.byte 0xc1,0xaf,0xbd,0x03,0x01,0x13,0x8a,0x6b
+.byte 0x3a,0x91,0x11,0x41,0x4f,0x67,0xdc,0xea
+.byte 0x97,0xf2,0xcf,0xce,0xf0,0xb4,0xe6,0x73
+.byte 0x96,0xac,0x74,0x22,0xe7,0xad,0x35,0x85
+.byte 0xe2,0xf9,0x37,0xe8,0x1c,0x75,0xdf,0x6e
+.byte 0x47,0xf1,0x1a,0x71,0x1d,0x29,0xc5,0x89
+.byte 0x6f,0xb7,0x62,0x0e,0xaa,0x18,0xbe,0x1b
+.byte 0xfc,0x56,0x3e,0x4b,0xc6,0xd2,0x79,0x20
+.byte 0x9a,0xdb,0xc0,0xfe,0x78,0xcd,0x5a,0xf4
+.byte 0x1f,0xdd,0xa8,0x33,0x88,0x07,0xc7,0x31
+.byte 0xb1,0x12,0x10,0x59,0x27,0x80,0xec,0x5f
+.byte 0x60,0x51,0x7f,0xa9,0x19,0xb5,0x4a,0x0d
+.byte 0x2d,0xe5,0x7a,0x9f,0x93,0xc9,0x9c,0xef
+.byte 0xa0,0xe0,0x3b,0x4d,0xae,0x2a,0xf5,0xb0
+.byte 0xc8,0xeb,0xbb,0x3c,0x83,0x53,0x99,0x61
+.byte 0x17,0x2b,0x04,0x7e,0xba,0x77,0xd6,0x26
+.byte 0xe1,0x69,0x14,0x63,0x55,0x21,0x0c,0x7d
+.long 0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe
+.long 0x1b1b1b1b, 0x1b1b1b1b, 0, 0
+.byte 0x52,0x09,0x6a,0xd5,0x30,0x36,0xa5,0x38
+.byte 0xbf,0x40,0xa3,0x9e,0x81,0xf3,0xd7,0xfb
+.byte 0x7c,0xe3,0x39,0x82,0x9b,0x2f,0xff,0x87
+.byte 0x34,0x8e,0x43,0x44,0xc4,0xde,0xe9,0xcb
+.byte 0x54,0x7b,0x94,0x32,0xa6,0xc2,0x23,0x3d
+.byte 0xee,0x4c,0x95,0x0b,0x42,0xfa,0xc3,0x4e
+.byte 0x08,0x2e,0xa1,0x66,0x28,0xd9,0x24,0xb2
+.byte 0x76,0x5b,0xa2,0x49,0x6d,0x8b,0xd1,0x25
+.byte 0x72,0xf8,0xf6,0x64,0x86,0x68,0x98,0x16
+.byte 0xd4,0xa4,0x5c,0xcc,0x5d,0x65,0xb6,0x92
+.byte 0x6c,0x70,0x48,0x50,0xfd,0xed,0xb9,0xda
+.byte 0x5e,0x15,0x46,0x57,0xa7,0x8d,0x9d,0x84
+.byte 0x90,0xd8,0xab,0x00,0x8c,0xbc,0xd3,0x0a
+.byte 0xf7,0xe4,0x58,0x05,0xb8,0xb3,0x45,0x06
+.byte 0xd0,0x2c,0x1e,0x8f,0xca,0x3f,0x0f,0x02
+.byte 0xc1,0xaf,0xbd,0x03,0x01,0x13,0x8a,0x6b
+.byte 0x3a,0x91,0x11,0x41,0x4f,0x67,0xdc,0xea
+.byte 0x97,0xf2,0xcf,0xce,0xf0,0xb4,0xe6,0x73
+.byte 0x96,0xac,0x74,0x22,0xe7,0xad,0x35,0x85
+.byte 0xe2,0xf9,0x37,0xe8,0x1c,0x75,0xdf,0x6e
+.byte 0x47,0xf1,0x1a,0x71,0x1d,0x29,0xc5,0x89
+.byte 0x6f,0xb7,0x62,0x0e,0xaa,0x18,0xbe,0x1b
+.byte 0xfc,0x56,0x3e,0x4b,0xc6,0xd2,0x79,0x20
+.byte 0x9a,0xdb,0xc0,0xfe,0x78,0xcd,0x5a,0xf4
+.byte 0x1f,0xdd,0xa8,0x33,0x88,0x07,0xc7,0x31
+.byte 0xb1,0x12,0x10,0x59,0x27,0x80,0xec,0x5f
+.byte 0x60,0x51,0x7f,0xa9,0x19,0xb5,0x4a,0x0d
+.byte 0x2d,0xe5,0x7a,0x9f,0x93,0xc9,0x9c,0xef
+.byte 0xa0,0xe0,0x3b,0x4d,0xae,0x2a,0xf5,0xb0
+.byte 0xc8,0xeb,0xbb,0x3c,0x83,0x53,0x99,0x61
+.byte 0x17,0x2b,0x04,0x7e,0xba,0x77,0xd6,0x26
+.byte 0xe1,0x69,0x14,0x63,0x55,0x21,0x0c,0x7d
+.long 0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe
+.long 0x1b1b1b1b, 0x1b1b1b1b, 0, 0
+.byte 65,69,83,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.align 64
+ .section ".note.gnu.property", "a"
+ .p2align 3
+ .long 1f - 0f
+ .long 4f - 1f
+ .long 5
+0:
+ # "GNU" encoded with .byte, since .asciz isn't supported
+ # on Solaris.
+ .byte 0x47
+ .byte 0x4e
+ .byte 0x55
+ .byte 0
+1:
+ .p2align 3
+ .long 0xc0000002
+ .long 3f - 2f
+2:
+ .long 3
+3:
+ .p2align 3
+4:
diff --git a/secure/lib/libcrypto/arch/amd64/bsaes-x86_64.S b/secure/lib/libcrypto/arch/amd64/bsaes-x86_64.S
new file mode 100644
index 000000000000..77c4e61df0e9
--- /dev/null
+++ b/secure/lib/libcrypto/arch/amd64/bsaes-x86_64.S
@@ -0,0 +1,2619 @@
+/* Do not modify. This file is auto-generated from bsaes-x86_64.pl. */
+.text
+
+
+
+
+.type _bsaes_encrypt8,@function
+.align 64
+_bsaes_encrypt8:
+.cfi_startproc
+ leaq .LBS0(%rip),%r11
+
+ movdqa (%rax),%xmm8
+ leaq 16(%rax),%rax
+ movdqa 80(%r11),%xmm7
+ pxor %xmm8,%xmm15
+ pxor %xmm8,%xmm0
+ pxor %xmm8,%xmm1
+ pxor %xmm8,%xmm2
+.byte 102,68,15,56,0,255
+.byte 102,15,56,0,199
+ pxor %xmm8,%xmm3
+ pxor %xmm8,%xmm4
+.byte 102,15,56,0,207
+.byte 102,15,56,0,215
+ pxor %xmm8,%xmm5
+ pxor %xmm8,%xmm6
+.byte 102,15,56,0,223
+.byte 102,15,56,0,231
+.byte 102,15,56,0,239
+.byte 102,15,56,0,247
+_bsaes_encrypt8_bitslice:
+ movdqa 0(%r11),%xmm7
+ movdqa 16(%r11),%xmm8
+ movdqa %xmm5,%xmm9
+ psrlq $1,%xmm5
+ movdqa %xmm3,%xmm10
+ psrlq $1,%xmm3
+ pxor %xmm6,%xmm5
+ pxor %xmm4,%xmm3
+ pand %xmm7,%xmm5
+ pand %xmm7,%xmm3
+ pxor %xmm5,%xmm6
+ psllq $1,%xmm5
+ pxor %xmm3,%xmm4
+ psllq $1,%xmm3
+ pxor %xmm9,%xmm5
+ pxor %xmm10,%xmm3
+ movdqa %xmm1,%xmm9
+ psrlq $1,%xmm1
+ movdqa %xmm15,%xmm10
+ psrlq $1,%xmm15
+ pxor %xmm2,%xmm1
+ pxor %xmm0,%xmm15
+ pand %xmm7,%xmm1
+ pand %xmm7,%xmm15
+ pxor %xmm1,%xmm2
+ psllq $1,%xmm1
+ pxor %xmm15,%xmm0
+ psllq $1,%xmm15
+ pxor %xmm9,%xmm1
+ pxor %xmm10,%xmm15
+ movdqa 32(%r11),%xmm7
+ movdqa %xmm4,%xmm9
+ psrlq $2,%xmm4
+ movdqa %xmm3,%xmm10
+ psrlq $2,%xmm3
+ pxor %xmm6,%xmm4
+ pxor %xmm5,%xmm3
+ pand %xmm8,%xmm4
+ pand %xmm8,%xmm3
+ pxor %xmm4,%xmm6
+ psllq $2,%xmm4
+ pxor %xmm3,%xmm5
+ psllq $2,%xmm3
+ pxor %xmm9,%xmm4
+ pxor %xmm10,%xmm3
+ movdqa %xmm0,%xmm9
+ psrlq $2,%xmm0
+ movdqa %xmm15,%xmm10
+ psrlq $2,%xmm15
+ pxor %xmm2,%xmm0
+ pxor %xmm1,%xmm15
+ pand %xmm8,%xmm0
+ pand %xmm8,%xmm15
+ pxor %xmm0,%xmm2
+ psllq $2,%xmm0
+ pxor %xmm15,%xmm1
+ psllq $2,%xmm15
+ pxor %xmm9,%xmm0
+ pxor %xmm10,%xmm15
+ movdqa %xmm2,%xmm9
+ psrlq $4,%xmm2
+ movdqa %xmm1,%xmm10
+ psrlq $4,%xmm1
+ pxor %xmm6,%xmm2
+ pxor %xmm5,%xmm1
+ pand %xmm7,%xmm2
+ pand %xmm7,%xmm1
+ pxor %xmm2,%xmm6
+ psllq $4,%xmm2
+ pxor %xmm1,%xmm5
+ psllq $4,%xmm1
+ pxor %xmm9,%xmm2
+ pxor %xmm10,%xmm1
+ movdqa %xmm0,%xmm9
+ psrlq $4,%xmm0
+ movdqa %xmm15,%xmm10
+ psrlq $4,%xmm15
+ pxor %xmm4,%xmm0
+ pxor %xmm3,%xmm15
+ pand %xmm7,%xmm0
+ pand %xmm7,%xmm15
+ pxor %xmm0,%xmm4
+ psllq $4,%xmm0
+ pxor %xmm15,%xmm3
+ psllq $4,%xmm15
+ pxor %xmm9,%xmm0
+ pxor %xmm10,%xmm15
+ decl %r10d
+ jmp .Lenc_sbox
+.align 16
+.Lenc_loop:
+ pxor 0(%rax),%xmm15
+ pxor 16(%rax),%xmm0
+ pxor 32(%rax),%xmm1
+ pxor 48(%rax),%xmm2
+.byte 102,68,15,56,0,255
+.byte 102,15,56,0,199
+ pxor 64(%rax),%xmm3
+ pxor 80(%rax),%xmm4
+.byte 102,15,56,0,207
+.byte 102,15,56,0,215
+ pxor 96(%rax),%xmm5
+ pxor 112(%rax),%xmm6
+.byte 102,15,56,0,223
+.byte 102,15,56,0,231
+.byte 102,15,56,0,239
+.byte 102,15,56,0,247
+ leaq 128(%rax),%rax
+.Lenc_sbox:
+ pxor %xmm5,%xmm4
+ pxor %xmm0,%xmm1
+ pxor %xmm15,%xmm2
+ pxor %xmm1,%xmm5
+ pxor %xmm15,%xmm4
+
+ pxor %xmm2,%xmm5
+ pxor %xmm6,%xmm2
+ pxor %xmm4,%xmm6
+ pxor %xmm3,%xmm2
+ pxor %xmm4,%xmm3
+ pxor %xmm0,%xmm2
+
+ pxor %xmm6,%xmm1
+ pxor %xmm4,%xmm0
+ movdqa %xmm6,%xmm10
+ movdqa %xmm0,%xmm9
+ movdqa %xmm4,%xmm8
+ movdqa %xmm1,%xmm12
+ movdqa %xmm5,%xmm11
+
+ pxor %xmm3,%xmm10
+ pxor %xmm1,%xmm9
+ pxor %xmm2,%xmm8
+ movdqa %xmm10,%xmm13
+ pxor %xmm3,%xmm12
+ movdqa %xmm9,%xmm7
+ pxor %xmm15,%xmm11
+ movdqa %xmm10,%xmm14
+
+ por %xmm8,%xmm9
+ por %xmm11,%xmm10
+ pxor %xmm7,%xmm14
+ pand %xmm11,%xmm13
+ pxor %xmm8,%xmm11
+ pand %xmm8,%xmm7
+ pand %xmm11,%xmm14
+ movdqa %xmm2,%xmm11
+ pxor %xmm15,%xmm11
+ pand %xmm11,%xmm12
+ pxor %xmm12,%xmm10
+ pxor %xmm12,%xmm9
+ movdqa %xmm6,%xmm12
+ movdqa %xmm4,%xmm11
+ pxor %xmm0,%xmm12
+ pxor %xmm5,%xmm11
+ movdqa %xmm12,%xmm8
+ pand %xmm11,%xmm12
+ por %xmm11,%xmm8
+ pxor %xmm12,%xmm7
+ pxor %xmm14,%xmm10
+ pxor %xmm13,%xmm9
+ pxor %xmm14,%xmm8
+ movdqa %xmm1,%xmm11
+ pxor %xmm13,%xmm7
+ movdqa %xmm3,%xmm12
+ pxor %xmm13,%xmm8
+ movdqa %xmm0,%xmm13
+ pand %xmm2,%xmm11
+ movdqa %xmm6,%xmm14
+ pand %xmm15,%xmm12
+ pand %xmm4,%xmm13
+ por %xmm5,%xmm14
+ pxor %xmm11,%xmm10
+ pxor %xmm12,%xmm9
+ pxor %xmm13,%xmm8
+ pxor %xmm14,%xmm7
+
+
+
+
+
+ movdqa %xmm10,%xmm11
+ pand %xmm8,%xmm10
+ pxor %xmm9,%xmm11
+
+ movdqa %xmm7,%xmm13
+ movdqa %xmm11,%xmm14
+ pxor %xmm10,%xmm13
+ pand %xmm13,%xmm14
+
+ movdqa %xmm8,%xmm12
+ pxor %xmm9,%xmm14
+ pxor %xmm7,%xmm12
+
+ pxor %xmm9,%xmm10
+
+ pand %xmm10,%xmm12
+
+ movdqa %xmm13,%xmm9
+ pxor %xmm7,%xmm12
+
+ pxor %xmm12,%xmm9
+ pxor %xmm12,%xmm8
+
+ pand %xmm7,%xmm9
+
+ pxor %xmm9,%xmm13
+ pxor %xmm9,%xmm8
+
+ pand %xmm14,%xmm13
+
+ pxor %xmm11,%xmm13
+ movdqa %xmm5,%xmm11
+ movdqa %xmm4,%xmm7
+ movdqa %xmm14,%xmm9
+ pxor %xmm13,%xmm9
+ pand %xmm5,%xmm9
+ pxor %xmm4,%xmm5
+ pand %xmm14,%xmm4
+ pand %xmm13,%xmm5
+ pxor %xmm4,%xmm5
+ pxor %xmm9,%xmm4
+ pxor %xmm15,%xmm11
+ pxor %xmm2,%xmm7
+ pxor %xmm12,%xmm14
+ pxor %xmm8,%xmm13
+ movdqa %xmm14,%xmm10
+ movdqa %xmm12,%xmm9
+ pxor %xmm13,%xmm10
+ pxor %xmm8,%xmm9
+ pand %xmm11,%xmm10
+ pand %xmm15,%xmm9
+ pxor %xmm7,%xmm11
+ pxor %xmm2,%xmm15
+ pand %xmm14,%xmm7
+ pand %xmm12,%xmm2
+ pand %xmm13,%xmm11
+ pand %xmm8,%xmm15
+ pxor %xmm11,%xmm7
+ pxor %xmm2,%xmm15
+ pxor %xmm10,%xmm11
+ pxor %xmm9,%xmm2
+ pxor %xmm11,%xmm5
+ pxor %xmm11,%xmm15
+ pxor %xmm7,%xmm4
+ pxor %xmm7,%xmm2
+
+ movdqa %xmm6,%xmm11
+ movdqa %xmm0,%xmm7
+ pxor %xmm3,%xmm11
+ pxor %xmm1,%xmm7
+ movdqa %xmm14,%xmm10
+ movdqa %xmm12,%xmm9
+ pxor %xmm13,%xmm10
+ pxor %xmm8,%xmm9
+ pand %xmm11,%xmm10
+ pand %xmm3,%xmm9
+ pxor %xmm7,%xmm11
+ pxor %xmm1,%xmm3
+ pand %xmm14,%xmm7
+ pand %xmm12,%xmm1
+ pand %xmm13,%xmm11
+ pand %xmm8,%xmm3
+ pxor %xmm11,%xmm7
+ pxor %xmm1,%xmm3
+ pxor %xmm10,%xmm11
+ pxor %xmm9,%xmm1
+ pxor %xmm12,%xmm14
+ pxor %xmm8,%xmm13
+ movdqa %xmm14,%xmm10
+ pxor %xmm13,%xmm10
+ pand %xmm6,%xmm10
+ pxor %xmm0,%xmm6
+ pand %xmm14,%xmm0
+ pand %xmm13,%xmm6
+ pxor %xmm0,%xmm6
+ pxor %xmm10,%xmm0
+ pxor %xmm11,%xmm6
+ pxor %xmm11,%xmm3
+ pxor %xmm7,%xmm0
+ pxor %xmm7,%xmm1
+ pxor %xmm15,%xmm6
+ pxor %xmm5,%xmm0
+ pxor %xmm6,%xmm3
+ pxor %xmm15,%xmm5
+ pxor %xmm0,%xmm15
+
+ pxor %xmm4,%xmm0
+ pxor %xmm1,%xmm4
+ pxor %xmm2,%xmm1
+ pxor %xmm4,%xmm2
+ pxor %xmm4,%xmm3
+
+ pxor %xmm2,%xmm5
+ decl %r10d
+ jl .Lenc_done
+ pshufd $0x93,%xmm15,%xmm7
+ pshufd $0x93,%xmm0,%xmm8
+ pxor %xmm7,%xmm15
+ pshufd $0x93,%xmm3,%xmm9
+ pxor %xmm8,%xmm0
+ pshufd $0x93,%xmm5,%xmm10
+ pxor %xmm9,%xmm3
+ pshufd $0x93,%xmm2,%xmm11
+ pxor %xmm10,%xmm5
+ pshufd $0x93,%xmm6,%xmm12
+ pxor %xmm11,%xmm2
+ pshufd $0x93,%xmm1,%xmm13
+ pxor %xmm12,%xmm6
+ pshufd $0x93,%xmm4,%xmm14
+ pxor %xmm13,%xmm1
+ pxor %xmm14,%xmm4
+
+ pxor %xmm15,%xmm8
+ pxor %xmm4,%xmm7
+ pxor %xmm4,%xmm8
+ pshufd $0x4E,%xmm15,%xmm15
+ pxor %xmm0,%xmm9
+ pshufd $0x4E,%xmm0,%xmm0
+ pxor %xmm2,%xmm12
+ pxor %xmm7,%xmm15
+ pxor %xmm6,%xmm13
+ pxor %xmm8,%xmm0
+ pxor %xmm5,%xmm11
+ pshufd $0x4E,%xmm2,%xmm7
+ pxor %xmm1,%xmm14
+ pshufd $0x4E,%xmm6,%xmm8
+ pxor %xmm3,%xmm10
+ pshufd $0x4E,%xmm5,%xmm2
+ pxor %xmm4,%xmm10
+ pshufd $0x4E,%xmm4,%xmm6
+ pxor %xmm4,%xmm11
+ pshufd $0x4E,%xmm1,%xmm5
+ pxor %xmm11,%xmm7
+ pshufd $0x4E,%xmm3,%xmm1
+ pxor %xmm12,%xmm8
+ pxor %xmm10,%xmm2
+ pxor %xmm14,%xmm6
+ pxor %xmm13,%xmm5
+ movdqa %xmm7,%xmm3
+ pxor %xmm9,%xmm1
+ movdqa %xmm8,%xmm4
+ movdqa 48(%r11),%xmm7
+ jnz .Lenc_loop
+ movdqa 64(%r11),%xmm7
+ jmp .Lenc_loop
+.align 16
+.Lenc_done:
+ movdqa 0(%r11),%xmm7
+ movdqa 16(%r11),%xmm8
+ movdqa %xmm1,%xmm9
+ psrlq $1,%xmm1
+ movdqa %xmm2,%xmm10
+ psrlq $1,%xmm2
+ pxor %xmm4,%xmm1
+ pxor %xmm6,%xmm2
+ pand %xmm7,%xmm1
+ pand %xmm7,%xmm2
+ pxor %xmm1,%xmm4
+ psllq $1,%xmm1
+ pxor %xmm2,%xmm6
+ psllq $1,%xmm2
+ pxor %xmm9,%xmm1
+ pxor %xmm10,%xmm2
+ movdqa %xmm3,%xmm9
+ psrlq $1,%xmm3
+ movdqa %xmm15,%xmm10
+ psrlq $1,%xmm15
+ pxor %xmm5,%xmm3
+ pxor %xmm0,%xmm15
+ pand %xmm7,%xmm3
+ pand %xmm7,%xmm15
+ pxor %xmm3,%xmm5
+ psllq $1,%xmm3
+ pxor %xmm15,%xmm0
+ psllq $1,%xmm15
+ pxor %xmm9,%xmm3
+ pxor %xmm10,%xmm15
+ movdqa 32(%r11),%xmm7
+ movdqa %xmm6,%xmm9
+ psrlq $2,%xmm6
+ movdqa %xmm2,%xmm10
+ psrlq $2,%xmm2
+ pxor %xmm4,%xmm6
+ pxor %xmm1,%xmm2
+ pand %xmm8,%xmm6
+ pand %xmm8,%xmm2
+ pxor %xmm6,%xmm4
+ psllq $2,%xmm6
+ pxor %xmm2,%xmm1
+ psllq $2,%xmm2
+ pxor %xmm9,%xmm6
+ pxor %xmm10,%xmm2
+ movdqa %xmm0,%xmm9
+ psrlq $2,%xmm0
+ movdqa %xmm15,%xmm10
+ psrlq $2,%xmm15
+ pxor %xmm5,%xmm0
+ pxor %xmm3,%xmm15
+ pand %xmm8,%xmm0
+ pand %xmm8,%xmm15
+ pxor %xmm0,%xmm5
+ psllq $2,%xmm0
+ pxor %xmm15,%xmm3
+ psllq $2,%xmm15
+ pxor %xmm9,%xmm0
+ pxor %xmm10,%xmm15
+ movdqa %xmm5,%xmm9
+ psrlq $4,%xmm5
+ movdqa %xmm3,%xmm10
+ psrlq $4,%xmm3
+ pxor %xmm4,%xmm5
+ pxor %xmm1,%xmm3
+ pand %xmm7,%xmm5
+ pand %xmm7,%xmm3
+ pxor %xmm5,%xmm4
+ psllq $4,%xmm5
+ pxor %xmm3,%xmm1
+ psllq $4,%xmm3
+ pxor %xmm9,%xmm5
+ pxor %xmm10,%xmm3
+ movdqa %xmm0,%xmm9
+ psrlq $4,%xmm0
+ movdqa %xmm15,%xmm10
+ psrlq $4,%xmm15
+ pxor %xmm6,%xmm0
+ pxor %xmm2,%xmm15
+ pand %xmm7,%xmm0
+ pand %xmm7,%xmm15
+ pxor %xmm0,%xmm6
+ psllq $4,%xmm0
+ pxor %xmm15,%xmm2
+ psllq $4,%xmm15
+ pxor %xmm9,%xmm0
+ pxor %xmm10,%xmm15
+ movdqa (%rax),%xmm7
+ pxor %xmm7,%xmm3
+ pxor %xmm7,%xmm5
+ pxor %xmm7,%xmm2
+ pxor %xmm7,%xmm6
+ pxor %xmm7,%xmm1
+ pxor %xmm7,%xmm4
+ pxor %xmm7,%xmm15
+ pxor %xmm7,%xmm0
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size _bsaes_encrypt8,.-_bsaes_encrypt8
+
+.type _bsaes_decrypt8,@function
+.align 64
+_bsaes_decrypt8:
+.cfi_startproc
+ leaq .LBS0(%rip),%r11
+
+ movdqa (%rax),%xmm8
+ leaq 16(%rax),%rax
+ movdqa -48(%r11),%xmm7
+ pxor %xmm8,%xmm15
+ pxor %xmm8,%xmm0
+ pxor %xmm8,%xmm1
+ pxor %xmm8,%xmm2
+.byte 102,68,15,56,0,255
+.byte 102,15,56,0,199
+ pxor %xmm8,%xmm3
+ pxor %xmm8,%xmm4
+.byte 102,15,56,0,207
+.byte 102,15,56,0,215
+ pxor %xmm8,%xmm5
+ pxor %xmm8,%xmm6
+.byte 102,15,56,0,223
+.byte 102,15,56,0,231
+.byte 102,15,56,0,239
+.byte 102,15,56,0,247
+ movdqa 0(%r11),%xmm7
+ movdqa 16(%r11),%xmm8
+ movdqa %xmm5,%xmm9
+ psrlq $1,%xmm5
+ movdqa %xmm3,%xmm10
+ psrlq $1,%xmm3
+ pxor %xmm6,%xmm5
+ pxor %xmm4,%xmm3
+ pand %xmm7,%xmm5
+ pand %xmm7,%xmm3
+ pxor %xmm5,%xmm6
+ psllq $1,%xmm5
+ pxor %xmm3,%xmm4
+ psllq $1,%xmm3
+ pxor %xmm9,%xmm5
+ pxor %xmm10,%xmm3
+ movdqa %xmm1,%xmm9
+ psrlq $1,%xmm1
+ movdqa %xmm15,%xmm10
+ psrlq $1,%xmm15
+ pxor %xmm2,%xmm1
+ pxor %xmm0,%xmm15
+ pand %xmm7,%xmm1
+ pand %xmm7,%xmm15
+ pxor %xmm1,%xmm2
+ psllq $1,%xmm1
+ pxor %xmm15,%xmm0
+ psllq $1,%xmm15
+ pxor %xmm9,%xmm1
+ pxor %xmm10,%xmm15
+ movdqa 32(%r11),%xmm7
+ movdqa %xmm4,%xmm9
+ psrlq $2,%xmm4
+ movdqa %xmm3,%xmm10
+ psrlq $2,%xmm3
+ pxor %xmm6,%xmm4
+ pxor %xmm5,%xmm3
+ pand %xmm8,%xmm4
+ pand %xmm8,%xmm3
+ pxor %xmm4,%xmm6
+ psllq $2,%xmm4
+ pxor %xmm3,%xmm5
+ psllq $2,%xmm3
+ pxor %xmm9,%xmm4
+ pxor %xmm10,%xmm3
+ movdqa %xmm0,%xmm9
+ psrlq $2,%xmm0
+ movdqa %xmm15,%xmm10
+ psrlq $2,%xmm15
+ pxor %xmm2,%xmm0
+ pxor %xmm1,%xmm15
+ pand %xmm8,%xmm0
+ pand %xmm8,%xmm15
+ pxor %xmm0,%xmm2
+ psllq $2,%xmm0
+ pxor %xmm15,%xmm1
+ psllq $2,%xmm15
+ pxor %xmm9,%xmm0
+ pxor %xmm10,%xmm15
+ movdqa %xmm2,%xmm9
+ psrlq $4,%xmm2
+ movdqa %xmm1,%xmm10
+ psrlq $4,%xmm1
+ pxor %xmm6,%xmm2
+ pxor %xmm5,%xmm1
+ pand %xmm7,%xmm2
+ pand %xmm7,%xmm1
+ pxor %xmm2,%xmm6
+ psllq $4,%xmm2
+ pxor %xmm1,%xmm5
+ psllq $4,%xmm1
+ pxor %xmm9,%xmm2
+ pxor %xmm10,%xmm1
+ movdqa %xmm0,%xmm9
+ psrlq $4,%xmm0
+ movdqa %xmm15,%xmm10
+ psrlq $4,%xmm15
+ pxor %xmm4,%xmm0
+ pxor %xmm3,%xmm15
+ pand %xmm7,%xmm0
+ pand %xmm7,%xmm15
+ pxor %xmm0,%xmm4
+ psllq $4,%xmm0
+ pxor %xmm15,%xmm3
+ psllq $4,%xmm15
+ pxor %xmm9,%xmm0
+ pxor %xmm10,%xmm15
+ decl %r10d
+ jmp .Ldec_sbox
+.align 16
+.Ldec_loop:
+ pxor 0(%rax),%xmm15
+ pxor 16(%rax),%xmm0
+ pxor 32(%rax),%xmm1
+ pxor 48(%rax),%xmm2
+.byte 102,68,15,56,0,255
+.byte 102,15,56,0,199
+ pxor 64(%rax),%xmm3
+ pxor 80(%rax),%xmm4
+.byte 102,15,56,0,207
+.byte 102,15,56,0,215
+ pxor 96(%rax),%xmm5
+ pxor 112(%rax),%xmm6
+.byte 102,15,56,0,223
+.byte 102,15,56,0,231
+.byte 102,15,56,0,239
+.byte 102,15,56,0,247
+ leaq 128(%rax),%rax
+.Ldec_sbox:
+ pxor %xmm3,%xmm2
+
+ pxor %xmm6,%xmm3
+ pxor %xmm6,%xmm1
+ pxor %xmm3,%xmm5
+ pxor %xmm5,%xmm6
+ pxor %xmm6,%xmm0
+
+ pxor %xmm0,%xmm15
+ pxor %xmm4,%xmm1
+ pxor %xmm15,%xmm2
+ pxor %xmm15,%xmm4
+ pxor %xmm2,%xmm0
+ movdqa %xmm2,%xmm10
+ movdqa %xmm6,%xmm9
+ movdqa %xmm0,%xmm8
+ movdqa %xmm3,%xmm12
+ movdqa %xmm4,%xmm11
+
+ pxor %xmm15,%xmm10
+ pxor %xmm3,%xmm9
+ pxor %xmm5,%xmm8
+ movdqa %xmm10,%xmm13
+ pxor %xmm15,%xmm12
+ movdqa %xmm9,%xmm7
+ pxor %xmm1,%xmm11
+ movdqa %xmm10,%xmm14
+
+ por %xmm8,%xmm9
+ por %xmm11,%xmm10
+ pxor %xmm7,%xmm14
+ pand %xmm11,%xmm13
+ pxor %xmm8,%xmm11
+ pand %xmm8,%xmm7
+ pand %xmm11,%xmm14
+ movdqa %xmm5,%xmm11
+ pxor %xmm1,%xmm11
+ pand %xmm11,%xmm12
+ pxor %xmm12,%xmm10
+ pxor %xmm12,%xmm9
+ movdqa %xmm2,%xmm12
+ movdqa %xmm0,%xmm11
+ pxor %xmm6,%xmm12
+ pxor %xmm4,%xmm11
+ movdqa %xmm12,%xmm8
+ pand %xmm11,%xmm12
+ por %xmm11,%xmm8
+ pxor %xmm12,%xmm7
+ pxor %xmm14,%xmm10
+ pxor %xmm13,%xmm9
+ pxor %xmm14,%xmm8
+ movdqa %xmm3,%xmm11
+ pxor %xmm13,%xmm7
+ movdqa %xmm15,%xmm12
+ pxor %xmm13,%xmm8
+ movdqa %xmm6,%xmm13
+ pand %xmm5,%xmm11
+ movdqa %xmm2,%xmm14
+ pand %xmm1,%xmm12
+ pand %xmm0,%xmm13
+ por %xmm4,%xmm14
+ pxor %xmm11,%xmm10
+ pxor %xmm12,%xmm9
+ pxor %xmm13,%xmm8
+ pxor %xmm14,%xmm7
+
+
+
+
+
+ movdqa %xmm10,%xmm11
+ pand %xmm8,%xmm10
+ pxor %xmm9,%xmm11
+
+ movdqa %xmm7,%xmm13
+ movdqa %xmm11,%xmm14
+ pxor %xmm10,%xmm13
+ pand %xmm13,%xmm14
+
+ movdqa %xmm8,%xmm12
+ pxor %xmm9,%xmm14
+ pxor %xmm7,%xmm12
+
+ pxor %xmm9,%xmm10
+
+ pand %xmm10,%xmm12
+
+ movdqa %xmm13,%xmm9
+ pxor %xmm7,%xmm12
+
+ pxor %xmm12,%xmm9
+ pxor %xmm12,%xmm8
+
+ pand %xmm7,%xmm9
+
+ pxor %xmm9,%xmm13
+ pxor %xmm9,%xmm8
+
+ pand %xmm14,%xmm13
+
+ pxor %xmm11,%xmm13
+ movdqa %xmm4,%xmm11
+ movdqa %xmm0,%xmm7
+ movdqa %xmm14,%xmm9
+ pxor %xmm13,%xmm9
+ pand %xmm4,%xmm9
+ pxor %xmm0,%xmm4
+ pand %xmm14,%xmm0
+ pand %xmm13,%xmm4
+ pxor %xmm0,%xmm4
+ pxor %xmm9,%xmm0
+ pxor %xmm1,%xmm11
+ pxor %xmm5,%xmm7
+ pxor %xmm12,%xmm14
+ pxor %xmm8,%xmm13
+ movdqa %xmm14,%xmm10
+ movdqa %xmm12,%xmm9
+ pxor %xmm13,%xmm10
+ pxor %xmm8,%xmm9
+ pand %xmm11,%xmm10
+ pand %xmm1,%xmm9
+ pxor %xmm7,%xmm11
+ pxor %xmm5,%xmm1
+ pand %xmm14,%xmm7
+ pand %xmm12,%xmm5
+ pand %xmm13,%xmm11
+ pand %xmm8,%xmm1
+ pxor %xmm11,%xmm7
+ pxor %xmm5,%xmm1
+ pxor %xmm10,%xmm11
+ pxor %xmm9,%xmm5
+ pxor %xmm11,%xmm4
+ pxor %xmm11,%xmm1
+ pxor %xmm7,%xmm0
+ pxor %xmm7,%xmm5
+
+ movdqa %xmm2,%xmm11
+ movdqa %xmm6,%xmm7
+ pxor %xmm15,%xmm11
+ pxor %xmm3,%xmm7
+ movdqa %xmm14,%xmm10
+ movdqa %xmm12,%xmm9
+ pxor %xmm13,%xmm10
+ pxor %xmm8,%xmm9
+ pand %xmm11,%xmm10
+ pand %xmm15,%xmm9
+ pxor %xmm7,%xmm11
+ pxor %xmm3,%xmm15
+ pand %xmm14,%xmm7
+ pand %xmm12,%xmm3
+ pand %xmm13,%xmm11
+ pand %xmm8,%xmm15
+ pxor %xmm11,%xmm7
+ pxor %xmm3,%xmm15
+ pxor %xmm10,%xmm11
+ pxor %xmm9,%xmm3
+ pxor %xmm12,%xmm14
+ pxor %xmm8,%xmm13
+ movdqa %xmm14,%xmm10
+ pxor %xmm13,%xmm10
+ pand %xmm2,%xmm10
+ pxor %xmm6,%xmm2
+ pand %xmm14,%xmm6
+ pand %xmm13,%xmm2
+ pxor %xmm6,%xmm2
+ pxor %xmm10,%xmm6
+ pxor %xmm11,%xmm2
+ pxor %xmm11,%xmm15
+ pxor %xmm7,%xmm6
+ pxor %xmm7,%xmm3
+ pxor %xmm6,%xmm0
+ pxor %xmm4,%xmm5
+
+ pxor %xmm0,%xmm3
+ pxor %xmm6,%xmm1
+ pxor %xmm6,%xmm4
+ pxor %xmm1,%xmm3
+ pxor %xmm15,%xmm6
+ pxor %xmm4,%xmm3
+ pxor %xmm5,%xmm2
+ pxor %xmm0,%xmm5
+ pxor %xmm3,%xmm2
+
+ pxor %xmm15,%xmm3
+ pxor %xmm2,%xmm6
+ decl %r10d
+ jl .Ldec_done
+
+ pshufd $0x4E,%xmm15,%xmm7
+ pshufd $0x4E,%xmm2,%xmm13
+ pxor %xmm15,%xmm7
+ pshufd $0x4E,%xmm4,%xmm14
+ pxor %xmm2,%xmm13
+ pshufd $0x4E,%xmm0,%xmm8
+ pxor %xmm4,%xmm14
+ pshufd $0x4E,%xmm5,%xmm9
+ pxor %xmm0,%xmm8
+ pshufd $0x4E,%xmm3,%xmm10
+ pxor %xmm5,%xmm9
+ pxor %xmm13,%xmm15
+ pxor %xmm13,%xmm0
+ pshufd $0x4E,%xmm1,%xmm11
+ pxor %xmm3,%xmm10
+ pxor %xmm7,%xmm5
+ pxor %xmm8,%xmm3
+ pshufd $0x4E,%xmm6,%xmm12
+ pxor %xmm1,%xmm11
+ pxor %xmm14,%xmm0
+ pxor %xmm9,%xmm1
+ pxor %xmm6,%xmm12
+
+ pxor %xmm14,%xmm5
+ pxor %xmm13,%xmm3
+ pxor %xmm13,%xmm1
+ pxor %xmm10,%xmm6
+ pxor %xmm11,%xmm2
+ pxor %xmm14,%xmm1
+ pxor %xmm14,%xmm6
+ pxor %xmm12,%xmm4
+ pshufd $0x93,%xmm15,%xmm7
+ pshufd $0x93,%xmm0,%xmm8
+ pxor %xmm7,%xmm15
+ pshufd $0x93,%xmm5,%xmm9
+ pxor %xmm8,%xmm0
+ pshufd $0x93,%xmm3,%xmm10
+ pxor %xmm9,%xmm5
+ pshufd $0x93,%xmm1,%xmm11
+ pxor %xmm10,%xmm3
+ pshufd $0x93,%xmm6,%xmm12
+ pxor %xmm11,%xmm1
+ pshufd $0x93,%xmm2,%xmm13
+ pxor %xmm12,%xmm6
+ pshufd $0x93,%xmm4,%xmm14
+ pxor %xmm13,%xmm2
+ pxor %xmm14,%xmm4
+
+ pxor %xmm15,%xmm8
+ pxor %xmm4,%xmm7
+ pxor %xmm4,%xmm8
+ pshufd $0x4E,%xmm15,%xmm15
+ pxor %xmm0,%xmm9
+ pshufd $0x4E,%xmm0,%xmm0
+ pxor %xmm1,%xmm12
+ pxor %xmm7,%xmm15
+ pxor %xmm6,%xmm13
+ pxor %xmm8,%xmm0
+ pxor %xmm3,%xmm11
+ pshufd $0x4E,%xmm1,%xmm7
+ pxor %xmm2,%xmm14
+ pshufd $0x4E,%xmm6,%xmm8
+ pxor %xmm5,%xmm10
+ pshufd $0x4E,%xmm3,%xmm1
+ pxor %xmm4,%xmm10
+ pshufd $0x4E,%xmm4,%xmm6
+ pxor %xmm4,%xmm11
+ pshufd $0x4E,%xmm2,%xmm3
+ pxor %xmm11,%xmm7
+ pshufd $0x4E,%xmm5,%xmm2
+ pxor %xmm12,%xmm8
+ pxor %xmm1,%xmm10
+ pxor %xmm14,%xmm6
+ pxor %xmm3,%xmm13
+ movdqa %xmm7,%xmm3
+ pxor %xmm9,%xmm2
+ movdqa %xmm13,%xmm5
+ movdqa %xmm8,%xmm4
+ movdqa %xmm2,%xmm1
+ movdqa %xmm10,%xmm2
+ movdqa -16(%r11),%xmm7
+ jnz .Ldec_loop
+ movdqa -32(%r11),%xmm7
+ jmp .Ldec_loop
+.align 16
+.Ldec_done:
+ movdqa 0(%r11),%xmm7
+ movdqa 16(%r11),%xmm8
+ movdqa %xmm2,%xmm9
+ psrlq $1,%xmm2
+ movdqa %xmm1,%xmm10
+ psrlq $1,%xmm1
+ pxor %xmm4,%xmm2
+ pxor %xmm6,%xmm1
+ pand %xmm7,%xmm2
+ pand %xmm7,%xmm1
+ pxor %xmm2,%xmm4
+ psllq $1,%xmm2
+ pxor %xmm1,%xmm6
+ psllq $1,%xmm1
+ pxor %xmm9,%xmm2
+ pxor %xmm10,%xmm1
+ movdqa %xmm5,%xmm9
+ psrlq $1,%xmm5
+ movdqa %xmm15,%xmm10
+ psrlq $1,%xmm15
+ pxor %xmm3,%xmm5
+ pxor %xmm0,%xmm15
+ pand %xmm7,%xmm5
+ pand %xmm7,%xmm15
+ pxor %xmm5,%xmm3
+ psllq $1,%xmm5
+ pxor %xmm15,%xmm0
+ psllq $1,%xmm15
+ pxor %xmm9,%xmm5
+ pxor %xmm10,%xmm15
+ movdqa 32(%r11),%xmm7
+ movdqa %xmm6,%xmm9
+ psrlq $2,%xmm6
+ movdqa %xmm1,%xmm10
+ psrlq $2,%xmm1
+ pxor %xmm4,%xmm6
+ pxor %xmm2,%xmm1
+ pand %xmm8,%xmm6
+ pand %xmm8,%xmm1
+ pxor %xmm6,%xmm4
+ psllq $2,%xmm6
+ pxor %xmm1,%xmm2
+ psllq $2,%xmm1
+ pxor %xmm9,%xmm6
+ pxor %xmm10,%xmm1
+ movdqa %xmm0,%xmm9
+ psrlq $2,%xmm0
+ movdqa %xmm15,%xmm10
+ psrlq $2,%xmm15
+ pxor %xmm3,%xmm0
+ pxor %xmm5,%xmm15
+ pand %xmm8,%xmm0
+ pand %xmm8,%xmm15
+ pxor %xmm0,%xmm3
+ psllq $2,%xmm0
+ pxor %xmm15,%xmm5
+ psllq $2,%xmm15
+ pxor %xmm9,%xmm0
+ pxor %xmm10,%xmm15
+ movdqa %xmm3,%xmm9
+ psrlq $4,%xmm3
+ movdqa %xmm5,%xmm10
+ psrlq $4,%xmm5
+ pxor %xmm4,%xmm3
+ pxor %xmm2,%xmm5
+ pand %xmm7,%xmm3
+ pand %xmm7,%xmm5
+ pxor %xmm3,%xmm4
+ psllq $4,%xmm3
+ pxor %xmm5,%xmm2
+ psllq $4,%xmm5
+ pxor %xmm9,%xmm3
+ pxor %xmm10,%xmm5
+ movdqa %xmm0,%xmm9
+ psrlq $4,%xmm0
+ movdqa %xmm15,%xmm10
+ psrlq $4,%xmm15
+ pxor %xmm6,%xmm0
+ pxor %xmm1,%xmm15
+ pand %xmm7,%xmm0
+ pand %xmm7,%xmm15
+ pxor %xmm0,%xmm6
+ psllq $4,%xmm0
+ pxor %xmm15,%xmm1
+ psllq $4,%xmm15
+ pxor %xmm9,%xmm0
+ pxor %xmm10,%xmm15
+ movdqa (%rax),%xmm7
+ pxor %xmm7,%xmm5
+ pxor %xmm7,%xmm3
+ pxor %xmm7,%xmm1
+ pxor %xmm7,%xmm6
+ pxor %xmm7,%xmm2
+ pxor %xmm7,%xmm4
+ pxor %xmm7,%xmm15
+ pxor %xmm7,%xmm0
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size _bsaes_decrypt8,.-_bsaes_decrypt8
+.type _bsaes_key_convert,@function
+.align 16
+_bsaes_key_convert:
+.cfi_startproc
+ leaq .Lmasks(%rip),%r11
+ movdqu (%rcx),%xmm7
+ leaq 16(%rcx),%rcx
+ movdqa 0(%r11),%xmm0
+ movdqa 16(%r11),%xmm1
+ movdqa 32(%r11),%xmm2
+ movdqa 48(%r11),%xmm3
+ movdqa 64(%r11),%xmm4
+ pcmpeqd %xmm5,%xmm5
+
+ movdqu (%rcx),%xmm6
+ movdqa %xmm7,(%rax)
+ leaq 16(%rax),%rax
+ decl %r10d
+ jmp .Lkey_loop
+.align 16
+.Lkey_loop:
+.byte 102,15,56,0,244
+
+ movdqa %xmm0,%xmm8
+ movdqa %xmm1,%xmm9
+
+ pand %xmm6,%xmm8
+ pand %xmm6,%xmm9
+ movdqa %xmm2,%xmm10
+ pcmpeqb %xmm0,%xmm8
+ psllq $4,%xmm0
+ movdqa %xmm3,%xmm11
+ pcmpeqb %xmm1,%xmm9
+ psllq $4,%xmm1
+
+ pand %xmm6,%xmm10
+ pand %xmm6,%xmm11
+ movdqa %xmm0,%xmm12
+ pcmpeqb %xmm2,%xmm10
+ psllq $4,%xmm2
+ movdqa %xmm1,%xmm13
+ pcmpeqb %xmm3,%xmm11
+ psllq $4,%xmm3
+
+ movdqa %xmm2,%xmm14
+ movdqa %xmm3,%xmm15
+ pxor %xmm5,%xmm8
+ pxor %xmm5,%xmm9
+
+ pand %xmm6,%xmm12
+ pand %xmm6,%xmm13
+ movdqa %xmm8,0(%rax)
+ pcmpeqb %xmm0,%xmm12
+ psrlq $4,%xmm0
+ movdqa %xmm9,16(%rax)
+ pcmpeqb %xmm1,%xmm13
+ psrlq $4,%xmm1
+ leaq 16(%rcx),%rcx
+
+ pand %xmm6,%xmm14
+ pand %xmm6,%xmm15
+ movdqa %xmm10,32(%rax)
+ pcmpeqb %xmm2,%xmm14
+ psrlq $4,%xmm2
+ movdqa %xmm11,48(%rax)
+ pcmpeqb %xmm3,%xmm15
+ psrlq $4,%xmm3
+ movdqu (%rcx),%xmm6
+
+ pxor %xmm5,%xmm13
+ pxor %xmm5,%xmm14
+ movdqa %xmm12,64(%rax)
+ movdqa %xmm13,80(%rax)
+ movdqa %xmm14,96(%rax)
+ movdqa %xmm15,112(%rax)
+ leaq 128(%rax),%rax
+ decl %r10d
+ jnz .Lkey_loop
+
+ movdqa 80(%r11),%xmm7
+
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size _bsaes_key_convert,.-_bsaes_key_convert
+
+.globl ossl_bsaes_cbc_encrypt
+.type ossl_bsaes_cbc_encrypt,@function
+.align 16
+ossl_bsaes_cbc_encrypt:
+.cfi_startproc
+.byte 243,15,30,250
+ cmpl $0,%r9d
+ jne asm_AES_cbc_encrypt
+ cmpq $128,%rdx
+ jb asm_AES_cbc_encrypt
+
+ movq %rsp,%rax
+.Lcbc_dec_prologue:
+ pushq %rbp
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbp,-16
+ pushq %rbx
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbx,-24
+ pushq %r12
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r12,-32
+ pushq %r13
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r13,-40
+ pushq %r14
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r14,-48
+ pushq %r15
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r15,-56
+ leaq -72(%rsp),%rsp
+.cfi_adjust_cfa_offset 0x48
+ movq %rsp,%rbp
+.cfi_def_cfa_register %rbp
+ movl 240(%rcx),%eax
+ movq %rdi,%r12
+ movq %rsi,%r13
+ movq %rdx,%r14
+ movq %rcx,%r15
+ movq %r8,%rbx
+ shrq $4,%r14
+
+ movl %eax,%edx
+ shlq $7,%rax
+ subq $96,%rax
+ subq %rax,%rsp
+
+ movq %rsp,%rax
+ movq %r15,%rcx
+ movl %edx,%r10d
+ call _bsaes_key_convert
+ pxor (%rsp),%xmm7
+ movdqa %xmm6,(%rax)
+ movdqa %xmm7,(%rsp)
+
+ movdqu (%rbx),%xmm14
+ subq $8,%r14
+.Lcbc_dec_loop:
+ movdqu 0(%r12),%xmm15
+ movdqu 16(%r12),%xmm0
+ movdqu 32(%r12),%xmm1
+ movdqu 48(%r12),%xmm2
+ movdqu 64(%r12),%xmm3
+ movdqu 80(%r12),%xmm4
+ movq %rsp,%rax
+ movdqu 96(%r12),%xmm5
+ movl %edx,%r10d
+ movdqu 112(%r12),%xmm6
+ movdqa %xmm14,32(%rbp)
+
+ call _bsaes_decrypt8
+
+ pxor 32(%rbp),%xmm15
+ movdqu 0(%r12),%xmm7
+ movdqu 16(%r12),%xmm8
+ pxor %xmm7,%xmm0
+ movdqu 32(%r12),%xmm9
+ pxor %xmm8,%xmm5
+ movdqu 48(%r12),%xmm10
+ pxor %xmm9,%xmm3
+ movdqu 64(%r12),%xmm11
+ pxor %xmm10,%xmm1
+ movdqu 80(%r12),%xmm12
+ pxor %xmm11,%xmm6
+ movdqu 96(%r12),%xmm13
+ pxor %xmm12,%xmm2
+ movdqu 112(%r12),%xmm14
+ pxor %xmm13,%xmm4
+ movdqu %xmm15,0(%r13)
+ leaq 128(%r12),%r12
+ movdqu %xmm0,16(%r13)
+ movdqu %xmm5,32(%r13)
+ movdqu %xmm3,48(%r13)
+ movdqu %xmm1,64(%r13)
+ movdqu %xmm6,80(%r13)
+ movdqu %xmm2,96(%r13)
+ movdqu %xmm4,112(%r13)
+ leaq 128(%r13),%r13
+ subq $8,%r14
+ jnc .Lcbc_dec_loop
+
+ addq $8,%r14
+ jz .Lcbc_dec_done
+
+ movdqu 0(%r12),%xmm15
+ movq %rsp,%rax
+ movl %edx,%r10d
+ cmpq $2,%r14
+ jb .Lcbc_dec_one
+ movdqu 16(%r12),%xmm0
+ je .Lcbc_dec_two
+ movdqu 32(%r12),%xmm1
+ cmpq $4,%r14
+ jb .Lcbc_dec_three
+ movdqu 48(%r12),%xmm2
+ je .Lcbc_dec_four
+ movdqu 64(%r12),%xmm3
+ cmpq $6,%r14
+ jb .Lcbc_dec_five
+ movdqu 80(%r12),%xmm4
+ je .Lcbc_dec_six
+ movdqu 96(%r12),%xmm5
+ movdqa %xmm14,32(%rbp)
+ call _bsaes_decrypt8
+ pxor 32(%rbp),%xmm15
+ movdqu 0(%r12),%xmm7
+ movdqu 16(%r12),%xmm8
+ pxor %xmm7,%xmm0
+ movdqu 32(%r12),%xmm9
+ pxor %xmm8,%xmm5
+ movdqu 48(%r12),%xmm10
+ pxor %xmm9,%xmm3
+ movdqu 64(%r12),%xmm11
+ pxor %xmm10,%xmm1
+ movdqu 80(%r12),%xmm12
+ pxor %xmm11,%xmm6
+ movdqu 96(%r12),%xmm14
+ pxor %xmm12,%xmm2
+ movdqu %xmm15,0(%r13)
+ movdqu %xmm0,16(%r13)
+ movdqu %xmm5,32(%r13)
+ movdqu %xmm3,48(%r13)
+ movdqu %xmm1,64(%r13)
+ movdqu %xmm6,80(%r13)
+ movdqu %xmm2,96(%r13)
+ jmp .Lcbc_dec_done
+.align 16
+.Lcbc_dec_six:
+ movdqa %xmm14,32(%rbp)
+ call _bsaes_decrypt8
+ pxor 32(%rbp),%xmm15
+ movdqu 0(%r12),%xmm7
+ movdqu 16(%r12),%xmm8
+ pxor %xmm7,%xmm0
+ movdqu 32(%r12),%xmm9
+ pxor %xmm8,%xmm5
+ movdqu 48(%r12),%xmm10
+ pxor %xmm9,%xmm3
+ movdqu 64(%r12),%xmm11
+ pxor %xmm10,%xmm1
+ movdqu 80(%r12),%xmm14
+ pxor %xmm11,%xmm6
+ movdqu %xmm15,0(%r13)
+ movdqu %xmm0,16(%r13)
+ movdqu %xmm5,32(%r13)
+ movdqu %xmm3,48(%r13)
+ movdqu %xmm1,64(%r13)
+ movdqu %xmm6,80(%r13)
+ jmp .Lcbc_dec_done
+.align 16
+.Lcbc_dec_five:
+ movdqa %xmm14,32(%rbp)
+ call _bsaes_decrypt8
+ pxor 32(%rbp),%xmm15
+ movdqu 0(%r12),%xmm7
+ movdqu 16(%r12),%xmm8
+ pxor %xmm7,%xmm0
+ movdqu 32(%r12),%xmm9
+ pxor %xmm8,%xmm5
+ movdqu 48(%r12),%xmm10
+ pxor %xmm9,%xmm3
+ movdqu 64(%r12),%xmm14
+ pxor %xmm10,%xmm1
+ movdqu %xmm15,0(%r13)
+ movdqu %xmm0,16(%r13)
+ movdqu %xmm5,32(%r13)
+ movdqu %xmm3,48(%r13)
+ movdqu %xmm1,64(%r13)
+ jmp .Lcbc_dec_done
+.align 16
+.Lcbc_dec_four:
+ movdqa %xmm14,32(%rbp)
+ call _bsaes_decrypt8
+ pxor 32(%rbp),%xmm15
+ movdqu 0(%r12),%xmm7
+ movdqu 16(%r12),%xmm8
+ pxor %xmm7,%xmm0
+ movdqu 32(%r12),%xmm9
+ pxor %xmm8,%xmm5
+ movdqu 48(%r12),%xmm14
+ pxor %xmm9,%xmm3
+ movdqu %xmm15,0(%r13)
+ movdqu %xmm0,16(%r13)
+ movdqu %xmm5,32(%r13)
+ movdqu %xmm3,48(%r13)
+ jmp .Lcbc_dec_done
+.align 16
+.Lcbc_dec_three:
+ movdqa %xmm14,32(%rbp)
+ call _bsaes_decrypt8
+ pxor 32(%rbp),%xmm15
+ movdqu 0(%r12),%xmm7
+ movdqu 16(%r12),%xmm8
+ pxor %xmm7,%xmm0
+ movdqu 32(%r12),%xmm14
+ pxor %xmm8,%xmm5
+ movdqu %xmm15,0(%r13)
+ movdqu %xmm0,16(%r13)
+ movdqu %xmm5,32(%r13)
+ jmp .Lcbc_dec_done
+.align 16
+.Lcbc_dec_two:
+ movdqa %xmm14,32(%rbp)
+ call _bsaes_decrypt8
+ pxor 32(%rbp),%xmm15
+ movdqu 0(%r12),%xmm7
+ movdqu 16(%r12),%xmm14
+ pxor %xmm7,%xmm0
+ movdqu %xmm15,0(%r13)
+ movdqu %xmm0,16(%r13)
+ jmp .Lcbc_dec_done
+.align 16
+.Lcbc_dec_one:
+ leaq (%r12),%rdi
+ leaq 32(%rbp),%rsi
+ leaq (%r15),%rdx
+ call asm_AES_decrypt
+ pxor 32(%rbp),%xmm14
+ movdqu %xmm14,(%r13)
+ movdqa %xmm15,%xmm14
+
+.Lcbc_dec_done:
+ movdqu %xmm14,(%rbx)
+ leaq (%rsp),%rax
+ pxor %xmm0,%xmm0
+.Lcbc_dec_bzero:
+ movdqa %xmm0,0(%rax)
+ movdqa %xmm0,16(%rax)
+ leaq 32(%rax),%rax
+ cmpq %rax,%rbp
+ ja .Lcbc_dec_bzero
+
+ leaq 120(%rbp),%rax
+.cfi_def_cfa %rax,8
+ movq -48(%rax),%r15
+.cfi_restore %r15
+ movq -40(%rax),%r14
+.cfi_restore %r14
+ movq -32(%rax),%r13
+.cfi_restore %r13
+ movq -24(%rax),%r12
+.cfi_restore %r12
+ movq -16(%rax),%rbx
+.cfi_restore %rbx
+ movq -8(%rax),%rbp
+.cfi_restore %rbp
+ leaq (%rax),%rsp
+.cfi_def_cfa_register %rsp
+.Lcbc_dec_epilogue:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size ossl_bsaes_cbc_encrypt,.-ossl_bsaes_cbc_encrypt
+
+.globl ossl_bsaes_ctr32_encrypt_blocks
+.type ossl_bsaes_ctr32_encrypt_blocks,@function
+.align 16
+ossl_bsaes_ctr32_encrypt_blocks:
+.cfi_startproc
+.byte 243,15,30,250
+ movq %rsp,%rax
+.Lctr_enc_prologue:
+ pushq %rbp
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbp,-16
+ pushq %rbx
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbx,-24
+ pushq %r12
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r12,-32
+ pushq %r13
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r13,-40
+ pushq %r14
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r14,-48
+ pushq %r15
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r15,-56
+ leaq -72(%rsp),%rsp
+.cfi_adjust_cfa_offset 0x48
+ movq %rsp,%rbp
+.cfi_def_cfa_register %rbp
+ movdqu (%r8),%xmm0
+ movl 240(%rcx),%eax
+ movq %rdi,%r12
+ movq %rsi,%r13
+ movq %rdx,%r14
+ movq %rcx,%r15
+ movdqa %xmm0,32(%rbp)
+ cmpq $8,%rdx
+ jb .Lctr_enc_short
+
+ movl %eax,%ebx
+ shlq $7,%rax
+ subq $96,%rax
+ subq %rax,%rsp
+
+ movq %rsp,%rax
+ movq %r15,%rcx
+ movl %ebx,%r10d
+ call _bsaes_key_convert
+ pxor %xmm6,%xmm7
+ movdqa %xmm7,(%rax)
+
+ movdqa (%rsp),%xmm8
+ leaq .LADD1(%rip),%r11
+ movdqa 32(%rbp),%xmm15
+ movdqa -32(%r11),%xmm7
+.byte 102,68,15,56,0,199
+.byte 102,68,15,56,0,255
+ movdqa %xmm8,(%rsp)
+ jmp .Lctr_enc_loop
+.align 16
+.Lctr_enc_loop:
+ movdqa %xmm15,32(%rbp)
+ movdqa %xmm15,%xmm0
+ movdqa %xmm15,%xmm1
+ paddd 0(%r11),%xmm0
+ movdqa %xmm15,%xmm2
+ paddd 16(%r11),%xmm1
+ movdqa %xmm15,%xmm3
+ paddd 32(%r11),%xmm2
+ movdqa %xmm15,%xmm4
+ paddd 48(%r11),%xmm3
+ movdqa %xmm15,%xmm5
+ paddd 64(%r11),%xmm4
+ movdqa %xmm15,%xmm6
+ paddd 80(%r11),%xmm5
+ paddd 96(%r11),%xmm6
+
+
+
+ movdqa (%rsp),%xmm8
+ leaq 16(%rsp),%rax
+ movdqa -16(%r11),%xmm7
+ pxor %xmm8,%xmm15
+ pxor %xmm8,%xmm0
+ pxor %xmm8,%xmm1
+ pxor %xmm8,%xmm2
+.byte 102,68,15,56,0,255
+.byte 102,15,56,0,199
+ pxor %xmm8,%xmm3
+ pxor %xmm8,%xmm4
+.byte 102,15,56,0,207
+.byte 102,15,56,0,215
+ pxor %xmm8,%xmm5
+ pxor %xmm8,%xmm6
+.byte 102,15,56,0,223
+.byte 102,15,56,0,231
+.byte 102,15,56,0,239
+.byte 102,15,56,0,247
+ leaq .LBS0(%rip),%r11
+ movl %ebx,%r10d
+
+ call _bsaes_encrypt8_bitslice
+
+ subq $8,%r14
+ jc .Lctr_enc_loop_done
+
+ movdqu 0(%r12),%xmm7
+ movdqu 16(%r12),%xmm8
+ movdqu 32(%r12),%xmm9
+ movdqu 48(%r12),%xmm10
+ movdqu 64(%r12),%xmm11
+ movdqu 80(%r12),%xmm12
+ movdqu 96(%r12),%xmm13
+ movdqu 112(%r12),%xmm14
+ leaq 128(%r12),%r12
+ pxor %xmm15,%xmm7
+ movdqa 32(%rbp),%xmm15
+ pxor %xmm8,%xmm0
+ movdqu %xmm7,0(%r13)
+ pxor %xmm9,%xmm3
+ movdqu %xmm0,16(%r13)
+ pxor %xmm10,%xmm5
+ movdqu %xmm3,32(%r13)
+ pxor %xmm11,%xmm2
+ movdqu %xmm5,48(%r13)
+ pxor %xmm12,%xmm6
+ movdqu %xmm2,64(%r13)
+ pxor %xmm13,%xmm1
+ movdqu %xmm6,80(%r13)
+ pxor %xmm14,%xmm4
+ movdqu %xmm1,96(%r13)
+ leaq .LADD1(%rip),%r11
+ movdqu %xmm4,112(%r13)
+ leaq 128(%r13),%r13
+ paddd 112(%r11),%xmm15
+ jnz .Lctr_enc_loop
+
+ jmp .Lctr_enc_done
+.align 16
+.Lctr_enc_loop_done:
+ addq $8,%r14
+ movdqu 0(%r12),%xmm7
+ pxor %xmm7,%xmm15
+ movdqu %xmm15,0(%r13)
+ cmpq $2,%r14
+ jb .Lctr_enc_done
+ movdqu 16(%r12),%xmm8
+ pxor %xmm8,%xmm0
+ movdqu %xmm0,16(%r13)
+ je .Lctr_enc_done
+ movdqu 32(%r12),%xmm9
+ pxor %xmm9,%xmm3
+ movdqu %xmm3,32(%r13)
+ cmpq $4,%r14
+ jb .Lctr_enc_done
+ movdqu 48(%r12),%xmm10
+ pxor %xmm10,%xmm5
+ movdqu %xmm5,48(%r13)
+ je .Lctr_enc_done
+ movdqu 64(%r12),%xmm11
+ pxor %xmm11,%xmm2
+ movdqu %xmm2,64(%r13)
+ cmpq $6,%r14
+ jb .Lctr_enc_done
+ movdqu 80(%r12),%xmm12
+ pxor %xmm12,%xmm6
+ movdqu %xmm6,80(%r13)
+ je .Lctr_enc_done
+ movdqu 96(%r12),%xmm13
+ pxor %xmm13,%xmm1
+ movdqu %xmm1,96(%r13)
+ jmp .Lctr_enc_done
+
+.align 16
+.Lctr_enc_short:
+ leaq 32(%rbp),%rdi
+ leaq 48(%rbp),%rsi
+ leaq (%r15),%rdx
+ call asm_AES_encrypt
+ movdqu (%r12),%xmm0
+ leaq 16(%r12),%r12
+ movl 44(%rbp),%eax
+ bswapl %eax
+ pxor 48(%rbp),%xmm0
+ incl %eax
+ movdqu %xmm0,(%r13)
+ bswapl %eax
+ leaq 16(%r13),%r13
+ movl %eax,44(%rsp)
+ decq %r14
+ jnz .Lctr_enc_short
+
+.Lctr_enc_done:
+ leaq (%rsp),%rax
+ pxor %xmm0,%xmm0
+.Lctr_enc_bzero:
+ movdqa %xmm0,0(%rax)
+ movdqa %xmm0,16(%rax)
+ leaq 32(%rax),%rax
+ cmpq %rax,%rbp
+ ja .Lctr_enc_bzero
+
+ leaq 120(%rbp),%rax
+.cfi_def_cfa %rax,8
+ movq -48(%rax),%r15
+.cfi_restore %r15
+ movq -40(%rax),%r14
+.cfi_restore %r14
+ movq -32(%rax),%r13
+.cfi_restore %r13
+ movq -24(%rax),%r12
+.cfi_restore %r12
+ movq -16(%rax),%rbx
+.cfi_restore %rbx
+ movq -8(%rax),%rbp
+.cfi_restore %rbp
+ leaq (%rax),%rsp
+.cfi_def_cfa_register %rsp
+.Lctr_enc_epilogue:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size ossl_bsaes_ctr32_encrypt_blocks,.-ossl_bsaes_ctr32_encrypt_blocks
+.globl ossl_bsaes_xts_encrypt
+.type ossl_bsaes_xts_encrypt,@function
+.align 16
+ossl_bsaes_xts_encrypt:
+.cfi_startproc
+ movq %rsp,%rax
+.Lxts_enc_prologue:
+ pushq %rbp
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbp,-16
+ pushq %rbx
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbx,-24
+ pushq %r12
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r12,-32
+ pushq %r13
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r13,-40
+ pushq %r14
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r14,-48
+ pushq %r15
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r15,-56
+ leaq -72(%rsp),%rsp
+.cfi_adjust_cfa_offset 0x48
+ movq %rsp,%rbp
+.cfi_def_cfa_register %rbp
+ movq %rdi,%r12
+ movq %rsi,%r13
+ movq %rdx,%r14
+ movq %rcx,%r15
+
+ leaq (%r9),%rdi
+ leaq 32(%rbp),%rsi
+ leaq (%r8),%rdx
+ call asm_AES_encrypt
+
+ movl 240(%r15),%eax
+ movq %r14,%rbx
+
+ movl %eax,%edx
+ shlq $7,%rax
+ subq $96,%rax
+ subq %rax,%rsp
+
+ movq %rsp,%rax
+ movq %r15,%rcx
+ movl %edx,%r10d
+ call _bsaes_key_convert
+ pxor %xmm6,%xmm7
+ movdqa %xmm7,(%rax)
+
+ andq $-16,%r14
+ subq $0x80,%rsp
+ movdqa 32(%rbp),%xmm6
+
+ pxor %xmm14,%xmm14
+ movdqa .Lxts_magic(%rip),%xmm12
+ pcmpgtd %xmm6,%xmm14
+
+ subq $0x80,%r14
+ jc .Lxts_enc_short
+ jmp .Lxts_enc_loop
+
+.align 16
+.Lxts_enc_loop:
+ pshufd $0x13,%xmm14,%xmm13
+ pxor %xmm14,%xmm14
+ movdqa %xmm6,%xmm15
+ movdqa %xmm6,0(%rsp)
+ paddq %xmm6,%xmm6
+ pand %xmm12,%xmm13
+ pcmpgtd %xmm6,%xmm14
+ pxor %xmm13,%xmm6
+ pshufd $0x13,%xmm14,%xmm13
+ pxor %xmm14,%xmm14
+ movdqa %xmm6,%xmm0
+ movdqa %xmm6,16(%rsp)
+ paddq %xmm6,%xmm6
+ pand %xmm12,%xmm13
+ pcmpgtd %xmm6,%xmm14
+ pxor %xmm13,%xmm6
+ movdqu 0(%r12),%xmm7
+ pshufd $0x13,%xmm14,%xmm13
+ pxor %xmm14,%xmm14
+ movdqa %xmm6,%xmm1
+ movdqa %xmm6,32(%rsp)
+ paddq %xmm6,%xmm6
+ pand %xmm12,%xmm13
+ pcmpgtd %xmm6,%xmm14
+ pxor %xmm13,%xmm6
+ movdqu 16(%r12),%xmm8
+ pxor %xmm7,%xmm15
+ pshufd $0x13,%xmm14,%xmm13
+ pxor %xmm14,%xmm14
+ movdqa %xmm6,%xmm2
+ movdqa %xmm6,48(%rsp)
+ paddq %xmm6,%xmm6
+ pand %xmm12,%xmm13
+ pcmpgtd %xmm6,%xmm14
+ pxor %xmm13,%xmm6
+ movdqu 32(%r12),%xmm9
+ pxor %xmm8,%xmm0
+ pshufd $0x13,%xmm14,%xmm13
+ pxor %xmm14,%xmm14
+ movdqa %xmm6,%xmm3
+ movdqa %xmm6,64(%rsp)
+ paddq %xmm6,%xmm6
+ pand %xmm12,%xmm13
+ pcmpgtd %xmm6,%xmm14
+ pxor %xmm13,%xmm6
+ movdqu 48(%r12),%xmm10
+ pxor %xmm9,%xmm1
+ pshufd $0x13,%xmm14,%xmm13
+ pxor %xmm14,%xmm14
+ movdqa %xmm6,%xmm4
+ movdqa %xmm6,80(%rsp)
+ paddq %xmm6,%xmm6
+ pand %xmm12,%xmm13
+ pcmpgtd %xmm6,%xmm14
+ pxor %xmm13,%xmm6
+ movdqu 64(%r12),%xmm11
+ pxor %xmm10,%xmm2
+ pshufd $0x13,%xmm14,%xmm13
+ pxor %xmm14,%xmm14
+ movdqa %xmm6,%xmm5
+ movdqa %xmm6,96(%rsp)
+ paddq %xmm6,%xmm6
+ pand %xmm12,%xmm13
+ pcmpgtd %xmm6,%xmm14
+ pxor %xmm13,%xmm6
+ movdqu 80(%r12),%xmm12
+ pxor %xmm11,%xmm3
+ movdqu 96(%r12),%xmm13
+ pxor %xmm12,%xmm4
+ movdqu 112(%r12),%xmm14
+ leaq 128(%r12),%r12
+ movdqa %xmm6,112(%rsp)
+ pxor %xmm13,%xmm5
+ leaq 128(%rsp),%rax
+ pxor %xmm14,%xmm6
+ movl %edx,%r10d
+
+ call _bsaes_encrypt8
+
+ pxor 0(%rsp),%xmm15
+ pxor 16(%rsp),%xmm0
+ movdqu %xmm15,0(%r13)
+ pxor 32(%rsp),%xmm3
+ movdqu %xmm0,16(%r13)
+ pxor 48(%rsp),%xmm5
+ movdqu %xmm3,32(%r13)
+ pxor 64(%rsp),%xmm2
+ movdqu %xmm5,48(%r13)
+ pxor 80(%rsp),%xmm6
+ movdqu %xmm2,64(%r13)
+ pxor 96(%rsp),%xmm1
+ movdqu %xmm6,80(%r13)
+ pxor 112(%rsp),%xmm4
+ movdqu %xmm1,96(%r13)
+ movdqu %xmm4,112(%r13)
+ leaq 128(%r13),%r13
+
+ movdqa 112(%rsp),%xmm6
+ pxor %xmm14,%xmm14
+ movdqa .Lxts_magic(%rip),%xmm12
+ pcmpgtd %xmm6,%xmm14
+ pshufd $0x13,%xmm14,%xmm13
+ pxor %xmm14,%xmm14
+ paddq %xmm6,%xmm6
+ pand %xmm12,%xmm13
+ pcmpgtd %xmm6,%xmm14
+ pxor %xmm13,%xmm6
+
+ subq $0x80,%r14
+ jnc .Lxts_enc_loop
+
+.Lxts_enc_short:
+ addq $0x80,%r14
+ jz .Lxts_enc_done
+ pshufd $0x13,%xmm14,%xmm13
+ pxor %xmm14,%xmm14
+ movdqa %xmm6,%xmm15
+ movdqa %xmm6,0(%rsp)
+ paddq %xmm6,%xmm6
+ pand %xmm12,%xmm13
+ pcmpgtd %xmm6,%xmm14
+ pxor %xmm13,%xmm6
+ pshufd $0x13,%xmm14,%xmm13
+ pxor %xmm14,%xmm14
+ movdqa %xmm6,%xmm0
+ movdqa %xmm6,16(%rsp)
+ paddq %xmm6,%xmm6
+ pand %xmm12,%xmm13
+ pcmpgtd %xmm6,%xmm14
+ pxor %xmm13,%xmm6
+ movdqu 0(%r12),%xmm7
+ cmpq $16,%r14
+ je .Lxts_enc_1
+ pshufd $0x13,%xmm14,%xmm13
+ pxor %xmm14,%xmm14
+ movdqa %xmm6,%xmm1
+ movdqa %xmm6,32(%rsp)
+ paddq %xmm6,%xmm6
+ pand %xmm12,%xmm13
+ pcmpgtd %xmm6,%xmm14
+ pxor %xmm13,%xmm6
+ movdqu 16(%r12),%xmm8
+ cmpq $32,%r14
+ je .Lxts_enc_2
+ pxor %xmm7,%xmm15
+ pshufd $0x13,%xmm14,%xmm13
+ pxor %xmm14,%xmm14
+ movdqa %xmm6,%xmm2
+ movdqa %xmm6,48(%rsp)
+ paddq %xmm6,%xmm6
+ pand %xmm12,%xmm13
+ pcmpgtd %xmm6,%xmm14
+ pxor %xmm13,%xmm6
+ movdqu 32(%r12),%xmm9
+ cmpq $48,%r14
+ je .Lxts_enc_3
+ pxor %xmm8,%xmm0
+ pshufd $0x13,%xmm14,%xmm13
+ pxor %xmm14,%xmm14
+ movdqa %xmm6,%xmm3
+ movdqa %xmm6,64(%rsp)
+ paddq %xmm6,%xmm6
+ pand %xmm12,%xmm13
+ pcmpgtd %xmm6,%xmm14
+ pxor %xmm13,%xmm6
+ movdqu 48(%r12),%xmm10
+ cmpq $64,%r14
+ je .Lxts_enc_4
+ pxor %xmm9,%xmm1
+ pshufd $0x13,%xmm14,%xmm13
+ pxor %xmm14,%xmm14
+ movdqa %xmm6,%xmm4
+ movdqa %xmm6,80(%rsp)
+ paddq %xmm6,%xmm6
+ pand %xmm12,%xmm13
+ pcmpgtd %xmm6,%xmm14
+ pxor %xmm13,%xmm6
+ movdqu 64(%r12),%xmm11
+ cmpq $80,%r14
+ je .Lxts_enc_5
+ pxor %xmm10,%xmm2
+ pshufd $0x13,%xmm14,%xmm13
+ pxor %xmm14,%xmm14
+ movdqa %xmm6,%xmm5
+ movdqa %xmm6,96(%rsp)
+ paddq %xmm6,%xmm6
+ pand %xmm12,%xmm13
+ pcmpgtd %xmm6,%xmm14
+ pxor %xmm13,%xmm6
+ movdqu 80(%r12),%xmm12
+ cmpq $96,%r14
+ je .Lxts_enc_6
+ pxor %xmm11,%xmm3
+ movdqu 96(%r12),%xmm13
+ pxor %xmm12,%xmm4
+ movdqa %xmm6,112(%rsp)
+ leaq 112(%r12),%r12
+ pxor %xmm13,%xmm5
+ leaq 128(%rsp),%rax
+ movl %edx,%r10d
+
+ call _bsaes_encrypt8
+
+ pxor 0(%rsp),%xmm15
+ pxor 16(%rsp),%xmm0
+ movdqu %xmm15,0(%r13)
+ pxor 32(%rsp),%xmm3
+ movdqu %xmm0,16(%r13)
+ pxor 48(%rsp),%xmm5
+ movdqu %xmm3,32(%r13)
+ pxor 64(%rsp),%xmm2
+ movdqu %xmm5,48(%r13)
+ pxor 80(%rsp),%xmm6
+ movdqu %xmm2,64(%r13)
+ pxor 96(%rsp),%xmm1
+ movdqu %xmm6,80(%r13)
+ movdqu %xmm1,96(%r13)
+ leaq 112(%r13),%r13
+
+ movdqa 112(%rsp),%xmm6
+ jmp .Lxts_enc_done
+.align 16
+.Lxts_enc_6:
+ pxor %xmm11,%xmm3
+ leaq 96(%r12),%r12
+ pxor %xmm12,%xmm4
+ leaq 128(%rsp),%rax
+ movl %edx,%r10d
+
+ call _bsaes_encrypt8
+
+ pxor 0(%rsp),%xmm15
+ pxor 16(%rsp),%xmm0
+ movdqu %xmm15,0(%r13)
+ pxor 32(%rsp),%xmm3
+ movdqu %xmm0,16(%r13)
+ pxor 48(%rsp),%xmm5
+ movdqu %xmm3,32(%r13)
+ pxor 64(%rsp),%xmm2
+ movdqu %xmm5,48(%r13)
+ pxor 80(%rsp),%xmm6
+ movdqu %xmm2,64(%r13)
+ movdqu %xmm6,80(%r13)
+ leaq 96(%r13),%r13
+
+ movdqa 96(%rsp),%xmm6
+ jmp .Lxts_enc_done
+.align 16
+.Lxts_enc_5:
+ pxor %xmm10,%xmm2
+ leaq 80(%r12),%r12
+ pxor %xmm11,%xmm3
+ leaq 128(%rsp),%rax
+ movl %edx,%r10d
+
+ call _bsaes_encrypt8
+
+ pxor 0(%rsp),%xmm15
+ pxor 16(%rsp),%xmm0
+ movdqu %xmm15,0(%r13)
+ pxor 32(%rsp),%xmm3
+ movdqu %xmm0,16(%r13)
+ pxor 48(%rsp),%xmm5
+ movdqu %xmm3,32(%r13)
+ pxor 64(%rsp),%xmm2
+ movdqu %xmm5,48(%r13)
+ movdqu %xmm2,64(%r13)
+ leaq 80(%r13),%r13
+
+ movdqa 80(%rsp),%xmm6
+ jmp .Lxts_enc_done
+.align 16
+.Lxts_enc_4:
+ pxor %xmm9,%xmm1
+ leaq 64(%r12),%r12
+ pxor %xmm10,%xmm2
+ leaq 128(%rsp),%rax
+ movl %edx,%r10d
+
+ call _bsaes_encrypt8
+
+ pxor 0(%rsp),%xmm15
+ pxor 16(%rsp),%xmm0
+ movdqu %xmm15,0(%r13)
+ pxor 32(%rsp),%xmm3
+ movdqu %xmm0,16(%r13)
+ pxor 48(%rsp),%xmm5
+ movdqu %xmm3,32(%r13)
+ movdqu %xmm5,48(%r13)
+ leaq 64(%r13),%r13
+
+ movdqa 64(%rsp),%xmm6
+ jmp .Lxts_enc_done
+.align 16
+.Lxts_enc_3:
+ pxor %xmm8,%xmm0
+ leaq 48(%r12),%r12
+ pxor %xmm9,%xmm1
+ leaq 128(%rsp),%rax
+ movl %edx,%r10d
+
+ call _bsaes_encrypt8
+
+ pxor 0(%rsp),%xmm15
+ pxor 16(%rsp),%xmm0
+ movdqu %xmm15,0(%r13)
+ pxor 32(%rsp),%xmm3
+ movdqu %xmm0,16(%r13)
+ movdqu %xmm3,32(%r13)
+ leaq 48(%r13),%r13
+
+ movdqa 48(%rsp),%xmm6
+ jmp .Lxts_enc_done
+.align 16
+.Lxts_enc_2:
+ pxor %xmm7,%xmm15
+ leaq 32(%r12),%r12
+ pxor %xmm8,%xmm0
+ leaq 128(%rsp),%rax
+ movl %edx,%r10d
+
+ call _bsaes_encrypt8
+
+ pxor 0(%rsp),%xmm15
+ pxor 16(%rsp),%xmm0
+ movdqu %xmm15,0(%r13)
+ movdqu %xmm0,16(%r13)
+ leaq 32(%r13),%r13
+
+ movdqa 32(%rsp),%xmm6
+ jmp .Lxts_enc_done
+.align 16
+.Lxts_enc_1:
+ pxor %xmm15,%xmm7
+ leaq 16(%r12),%r12
+ movdqa %xmm7,32(%rbp)
+ leaq 32(%rbp),%rdi
+ leaq 32(%rbp),%rsi
+ leaq (%r15),%rdx
+ call asm_AES_encrypt
+ pxor 32(%rbp),%xmm15
+
+
+
+
+
+ movdqu %xmm15,0(%r13)
+ leaq 16(%r13),%r13
+
+ movdqa 16(%rsp),%xmm6
+
+.Lxts_enc_done:
+ andl $15,%ebx
+ jz .Lxts_enc_ret
+ movq %r13,%rdx
+
+.Lxts_enc_steal:
+ movzbl (%r12),%eax
+ movzbl -16(%rdx),%ecx
+ leaq 1(%r12),%r12
+ movb %al,-16(%rdx)
+ movb %cl,0(%rdx)
+ leaq 1(%rdx),%rdx
+ subl $1,%ebx
+ jnz .Lxts_enc_steal
+
+ movdqu -16(%r13),%xmm15
+ leaq 32(%rbp),%rdi
+ pxor %xmm6,%xmm15
+ leaq 32(%rbp),%rsi
+ movdqa %xmm15,32(%rbp)
+ leaq (%r15),%rdx
+ call asm_AES_encrypt
+ pxor 32(%rbp),%xmm6
+ movdqu %xmm6,-16(%r13)
+
+.Lxts_enc_ret:
+ leaq (%rsp),%rax
+ pxor %xmm0,%xmm0
+.Lxts_enc_bzero:
+ movdqa %xmm0,0(%rax)
+ movdqa %xmm0,16(%rax)
+ leaq 32(%rax),%rax
+ cmpq %rax,%rbp
+ ja .Lxts_enc_bzero
+
+ leaq 120(%rbp),%rax
+.cfi_def_cfa %rax,8
+ movq -48(%rax),%r15
+.cfi_restore %r15
+ movq -40(%rax),%r14
+.cfi_restore %r14
+ movq -32(%rax),%r13
+.cfi_restore %r13
+ movq -24(%rax),%r12
+.cfi_restore %r12
+ movq -16(%rax),%rbx
+.cfi_restore %rbx
+ movq -8(%rax),%rbp
+.cfi_restore %rbp
+ leaq (%rax),%rsp
+.cfi_def_cfa_register %rsp
+.Lxts_enc_epilogue:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size ossl_bsaes_xts_encrypt,.-ossl_bsaes_xts_encrypt
+
+.globl ossl_bsaes_xts_decrypt
+.type ossl_bsaes_xts_decrypt,@function
+.align 16
+ossl_bsaes_xts_decrypt:
+.cfi_startproc
+ movq %rsp,%rax
+.Lxts_dec_prologue:
+ pushq %rbp
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbp,-16
+ pushq %rbx
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbx,-24
+ pushq %r12
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r12,-32
+ pushq %r13
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r13,-40
+ pushq %r14
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r14,-48
+ pushq %r15
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r15,-56
+ leaq -72(%rsp),%rsp
+.cfi_adjust_cfa_offset 0x48
+ movq %rsp,%rbp
+ movq %rdi,%r12
+ movq %rsi,%r13
+ movq %rdx,%r14
+ movq %rcx,%r15
+
+ leaq (%r9),%rdi
+ leaq 32(%rbp),%rsi
+ leaq (%r8),%rdx
+ call asm_AES_encrypt
+
+ movl 240(%r15),%eax
+ movq %r14,%rbx
+
+ movl %eax,%edx
+ shlq $7,%rax
+ subq $96,%rax
+ subq %rax,%rsp
+
+ movq %rsp,%rax
+ movq %r15,%rcx
+ movl %edx,%r10d
+ call _bsaes_key_convert
+ pxor (%rsp),%xmm7
+ movdqa %xmm6,(%rax)
+ movdqa %xmm7,(%rsp)
+
+ xorl %eax,%eax
+ andq $-16,%r14
+ testl $15,%ebx
+ setnz %al
+ shlq $4,%rax
+ subq %rax,%r14
+
+ subq $0x80,%rsp
+ movdqa 32(%rbp),%xmm6
+
+ pxor %xmm14,%xmm14
+ movdqa .Lxts_magic(%rip),%xmm12
+ pcmpgtd %xmm6,%xmm14
+
+ subq $0x80,%r14
+ jc .Lxts_dec_short
+ jmp .Lxts_dec_loop
+
+.align 16
+.Lxts_dec_loop:
+ pshufd $0x13,%xmm14,%xmm13
+ pxor %xmm14,%xmm14
+ movdqa %xmm6,%xmm15
+ movdqa %xmm6,0(%rsp)
+ paddq %xmm6,%xmm6
+ pand %xmm12,%xmm13
+ pcmpgtd %xmm6,%xmm14
+ pxor %xmm13,%xmm6
+ pshufd $0x13,%xmm14,%xmm13
+ pxor %xmm14,%xmm14
+ movdqa %xmm6,%xmm0
+ movdqa %xmm6,16(%rsp)
+ paddq %xmm6,%xmm6
+ pand %xmm12,%xmm13
+ pcmpgtd %xmm6,%xmm14
+ pxor %xmm13,%xmm6
+ movdqu 0(%r12),%xmm7
+ pshufd $0x13,%xmm14,%xmm13
+ pxor %xmm14,%xmm14
+ movdqa %xmm6,%xmm1
+ movdqa %xmm6,32(%rsp)
+ paddq %xmm6,%xmm6
+ pand %xmm12,%xmm13
+ pcmpgtd %xmm6,%xmm14
+ pxor %xmm13,%xmm6
+ movdqu 16(%r12),%xmm8
+ pxor %xmm7,%xmm15
+ pshufd $0x13,%xmm14,%xmm13
+ pxor %xmm14,%xmm14
+ movdqa %xmm6,%xmm2
+ movdqa %xmm6,48(%rsp)
+ paddq %xmm6,%xmm6
+ pand %xmm12,%xmm13
+ pcmpgtd %xmm6,%xmm14
+ pxor %xmm13,%xmm6
+ movdqu 32(%r12),%xmm9
+ pxor %xmm8,%xmm0
+ pshufd $0x13,%xmm14,%xmm13
+ pxor %xmm14,%xmm14
+ movdqa %xmm6,%xmm3
+ movdqa %xmm6,64(%rsp)
+ paddq %xmm6,%xmm6
+ pand %xmm12,%xmm13
+ pcmpgtd %xmm6,%xmm14
+ pxor %xmm13,%xmm6
+ movdqu 48(%r12),%xmm10
+ pxor %xmm9,%xmm1
+ pshufd $0x13,%xmm14,%xmm13
+ pxor %xmm14,%xmm14
+ movdqa %xmm6,%xmm4
+ movdqa %xmm6,80(%rsp)
+ paddq %xmm6,%xmm6
+ pand %xmm12,%xmm13
+ pcmpgtd %xmm6,%xmm14
+ pxor %xmm13,%xmm6
+ movdqu 64(%r12),%xmm11
+ pxor %xmm10,%xmm2
+ pshufd $0x13,%xmm14,%xmm13
+ pxor %xmm14,%xmm14
+ movdqa %xmm6,%xmm5
+ movdqa %xmm6,96(%rsp)
+ paddq %xmm6,%xmm6
+ pand %xmm12,%xmm13
+ pcmpgtd %xmm6,%xmm14
+ pxor %xmm13,%xmm6
+ movdqu 80(%r12),%xmm12
+ pxor %xmm11,%xmm3
+ movdqu 96(%r12),%xmm13
+ pxor %xmm12,%xmm4
+ movdqu 112(%r12),%xmm14
+ leaq 128(%r12),%r12
+ movdqa %xmm6,112(%rsp)
+ pxor %xmm13,%xmm5
+ leaq 128(%rsp),%rax
+ pxor %xmm14,%xmm6
+ movl %edx,%r10d
+
+ call _bsaes_decrypt8
+
+ pxor 0(%rsp),%xmm15
+ pxor 16(%rsp),%xmm0
+ movdqu %xmm15,0(%r13)
+ pxor 32(%rsp),%xmm5
+ movdqu %xmm0,16(%r13)
+ pxor 48(%rsp),%xmm3
+ movdqu %xmm5,32(%r13)
+ pxor 64(%rsp),%xmm1
+ movdqu %xmm3,48(%r13)
+ pxor 80(%rsp),%xmm6
+ movdqu %xmm1,64(%r13)
+ pxor 96(%rsp),%xmm2
+ movdqu %xmm6,80(%r13)
+ pxor 112(%rsp),%xmm4
+ movdqu %xmm2,96(%r13)
+ movdqu %xmm4,112(%r13)
+ leaq 128(%r13),%r13
+
+ movdqa 112(%rsp),%xmm6
+ pxor %xmm14,%xmm14
+ movdqa .Lxts_magic(%rip),%xmm12
+ pcmpgtd %xmm6,%xmm14
+ pshufd $0x13,%xmm14,%xmm13
+ pxor %xmm14,%xmm14
+ paddq %xmm6,%xmm6
+ pand %xmm12,%xmm13
+ pcmpgtd %xmm6,%xmm14
+ pxor %xmm13,%xmm6
+
+ subq $0x80,%r14
+ jnc .Lxts_dec_loop
+
+.Lxts_dec_short:
+ addq $0x80,%r14
+ jz .Lxts_dec_done
+ pshufd $0x13,%xmm14,%xmm13
+ pxor %xmm14,%xmm14
+ movdqa %xmm6,%xmm15
+ movdqa %xmm6,0(%rsp)
+ paddq %xmm6,%xmm6
+ pand %xmm12,%xmm13
+ pcmpgtd %xmm6,%xmm14
+ pxor %xmm13,%xmm6
+ pshufd $0x13,%xmm14,%xmm13
+ pxor %xmm14,%xmm14
+ movdqa %xmm6,%xmm0
+ movdqa %xmm6,16(%rsp)
+ paddq %xmm6,%xmm6
+ pand %xmm12,%xmm13
+ pcmpgtd %xmm6,%xmm14
+ pxor %xmm13,%xmm6
+ movdqu 0(%r12),%xmm7
+ cmpq $16,%r14
+ je .Lxts_dec_1
+ pshufd $0x13,%xmm14,%xmm13
+ pxor %xmm14,%xmm14
+ movdqa %xmm6,%xmm1
+ movdqa %xmm6,32(%rsp)
+ paddq %xmm6,%xmm6
+ pand %xmm12,%xmm13
+ pcmpgtd %xmm6,%xmm14
+ pxor %xmm13,%xmm6
+ movdqu 16(%r12),%xmm8
+ cmpq $32,%r14
+ je .Lxts_dec_2
+ pxor %xmm7,%xmm15
+ pshufd $0x13,%xmm14,%xmm13
+ pxor %xmm14,%xmm14
+ movdqa %xmm6,%xmm2
+ movdqa %xmm6,48(%rsp)
+ paddq %xmm6,%xmm6
+ pand %xmm12,%xmm13
+ pcmpgtd %xmm6,%xmm14
+ pxor %xmm13,%xmm6
+ movdqu 32(%r12),%xmm9
+ cmpq $48,%r14
+ je .Lxts_dec_3
+ pxor %xmm8,%xmm0
+ pshufd $0x13,%xmm14,%xmm13
+ pxor %xmm14,%xmm14
+ movdqa %xmm6,%xmm3
+ movdqa %xmm6,64(%rsp)
+ paddq %xmm6,%xmm6
+ pand %xmm12,%xmm13
+ pcmpgtd %xmm6,%xmm14
+ pxor %xmm13,%xmm6
+ movdqu 48(%r12),%xmm10
+ cmpq $64,%r14
+ je .Lxts_dec_4
+ pxor %xmm9,%xmm1
+ pshufd $0x13,%xmm14,%xmm13
+ pxor %xmm14,%xmm14
+ movdqa %xmm6,%xmm4
+ movdqa %xmm6,80(%rsp)
+ paddq %xmm6,%xmm6
+ pand %xmm12,%xmm13
+ pcmpgtd %xmm6,%xmm14
+ pxor %xmm13,%xmm6
+ movdqu 64(%r12),%xmm11
+ cmpq $80,%r14
+ je .Lxts_dec_5
+ pxor %xmm10,%xmm2
+ pshufd $0x13,%xmm14,%xmm13
+ pxor %xmm14,%xmm14
+ movdqa %xmm6,%xmm5
+ movdqa %xmm6,96(%rsp)
+ paddq %xmm6,%xmm6
+ pand %xmm12,%xmm13
+ pcmpgtd %xmm6,%xmm14
+ pxor %xmm13,%xmm6
+ movdqu 80(%r12),%xmm12
+ cmpq $96,%r14
+ je .Lxts_dec_6
+ pxor %xmm11,%xmm3
+ movdqu 96(%r12),%xmm13
+ pxor %xmm12,%xmm4
+ movdqa %xmm6,112(%rsp)
+ leaq 112(%r12),%r12
+ pxor %xmm13,%xmm5
+ leaq 128(%rsp),%rax
+ movl %edx,%r10d
+
+ call _bsaes_decrypt8
+
+ pxor 0(%rsp),%xmm15
+ pxor 16(%rsp),%xmm0
+ movdqu %xmm15,0(%r13)
+ pxor 32(%rsp),%xmm5
+ movdqu %xmm0,16(%r13)
+ pxor 48(%rsp),%xmm3
+ movdqu %xmm5,32(%r13)
+ pxor 64(%rsp),%xmm1
+ movdqu %xmm3,48(%r13)
+ pxor 80(%rsp),%xmm6
+ movdqu %xmm1,64(%r13)
+ pxor 96(%rsp),%xmm2
+ movdqu %xmm6,80(%r13)
+ movdqu %xmm2,96(%r13)
+ leaq 112(%r13),%r13
+
+ movdqa 112(%rsp),%xmm6
+ jmp .Lxts_dec_done
+.align 16
+.Lxts_dec_6:
+ pxor %xmm11,%xmm3
+ leaq 96(%r12),%r12
+ pxor %xmm12,%xmm4
+ leaq 128(%rsp),%rax
+ movl %edx,%r10d
+
+ call _bsaes_decrypt8
+
+ pxor 0(%rsp),%xmm15
+ pxor 16(%rsp),%xmm0
+ movdqu %xmm15,0(%r13)
+ pxor 32(%rsp),%xmm5
+ movdqu %xmm0,16(%r13)
+ pxor 48(%rsp),%xmm3
+ movdqu %xmm5,32(%r13)
+ pxor 64(%rsp),%xmm1
+ movdqu %xmm3,48(%r13)
+ pxor 80(%rsp),%xmm6
+ movdqu %xmm1,64(%r13)
+ movdqu %xmm6,80(%r13)
+ leaq 96(%r13),%r13
+
+ movdqa 96(%rsp),%xmm6
+ jmp .Lxts_dec_done
+.align 16
+.Lxts_dec_5:
+ pxor %xmm10,%xmm2
+ leaq 80(%r12),%r12
+ pxor %xmm11,%xmm3
+ leaq 128(%rsp),%rax
+ movl %edx,%r10d
+
+ call _bsaes_decrypt8
+
+ pxor 0(%rsp),%xmm15
+ pxor 16(%rsp),%xmm0
+ movdqu %xmm15,0(%r13)
+ pxor 32(%rsp),%xmm5
+ movdqu %xmm0,16(%r13)
+ pxor 48(%rsp),%xmm3
+ movdqu %xmm5,32(%r13)
+ pxor 64(%rsp),%xmm1
+ movdqu %xmm3,48(%r13)
+ movdqu %xmm1,64(%r13)
+ leaq 80(%r13),%r13
+
+ movdqa 80(%rsp),%xmm6
+ jmp .Lxts_dec_done
+.align 16
+.Lxts_dec_4:
+ pxor %xmm9,%xmm1
+ leaq 64(%r12),%r12
+ pxor %xmm10,%xmm2
+ leaq 128(%rsp),%rax
+ movl %edx,%r10d
+
+ call _bsaes_decrypt8
+
+ pxor 0(%rsp),%xmm15
+ pxor 16(%rsp),%xmm0
+ movdqu %xmm15,0(%r13)
+ pxor 32(%rsp),%xmm5
+ movdqu %xmm0,16(%r13)
+ pxor 48(%rsp),%xmm3
+ movdqu %xmm5,32(%r13)
+ movdqu %xmm3,48(%r13)
+ leaq 64(%r13),%r13
+
+ movdqa 64(%rsp),%xmm6
+ jmp .Lxts_dec_done
+.align 16
+.Lxts_dec_3:
+ pxor %xmm8,%xmm0
+ leaq 48(%r12),%r12
+ pxor %xmm9,%xmm1
+ leaq 128(%rsp),%rax
+ movl %edx,%r10d
+
+ call _bsaes_decrypt8
+
+ pxor 0(%rsp),%xmm15
+ pxor 16(%rsp),%xmm0
+ movdqu %xmm15,0(%r13)
+ pxor 32(%rsp),%xmm5
+ movdqu %xmm0,16(%r13)
+ movdqu %xmm5,32(%r13)
+ leaq 48(%r13),%r13
+
+ movdqa 48(%rsp),%xmm6
+ jmp .Lxts_dec_done
+.align 16
+.Lxts_dec_2:
+ pxor %xmm7,%xmm15
+ leaq 32(%r12),%r12
+ pxor %xmm8,%xmm0
+ leaq 128(%rsp),%rax
+ movl %edx,%r10d
+
+ call _bsaes_decrypt8
+
+ pxor 0(%rsp),%xmm15
+ pxor 16(%rsp),%xmm0
+ movdqu %xmm15,0(%r13)
+ movdqu %xmm0,16(%r13)
+ leaq 32(%r13),%r13
+
+ movdqa 32(%rsp),%xmm6
+ jmp .Lxts_dec_done
+.align 16
+.Lxts_dec_1:
+ pxor %xmm15,%xmm7
+ leaq 16(%r12),%r12
+ movdqa %xmm7,32(%rbp)
+ leaq 32(%rbp),%rdi
+ leaq 32(%rbp),%rsi
+ leaq (%r15),%rdx
+ call asm_AES_decrypt
+ pxor 32(%rbp),%xmm15
+
+
+
+
+
+ movdqu %xmm15,0(%r13)
+ leaq 16(%r13),%r13
+
+ movdqa 16(%rsp),%xmm6
+
+.Lxts_dec_done:
+ andl $15,%ebx
+ jz .Lxts_dec_ret
+
+ pxor %xmm14,%xmm14
+ movdqa .Lxts_magic(%rip),%xmm12
+ pcmpgtd %xmm6,%xmm14
+ pshufd $0x13,%xmm14,%xmm13
+ movdqa %xmm6,%xmm5
+ paddq %xmm6,%xmm6
+ pand %xmm12,%xmm13
+ movdqu (%r12),%xmm15
+ pxor %xmm13,%xmm6
+
+ leaq 32(%rbp),%rdi
+ pxor %xmm6,%xmm15
+ leaq 32(%rbp),%rsi
+ movdqa %xmm15,32(%rbp)
+ leaq (%r15),%rdx
+ call asm_AES_decrypt
+ pxor 32(%rbp),%xmm6
+ movq %r13,%rdx
+ movdqu %xmm6,(%r13)
+
+.Lxts_dec_steal:
+ movzbl 16(%r12),%eax
+ movzbl (%rdx),%ecx
+ leaq 1(%r12),%r12
+ movb %al,(%rdx)
+ movb %cl,16(%rdx)
+ leaq 1(%rdx),%rdx
+ subl $1,%ebx
+ jnz .Lxts_dec_steal
+
+ movdqu (%r13),%xmm15
+ leaq 32(%rbp),%rdi
+ pxor %xmm5,%xmm15
+ leaq 32(%rbp),%rsi
+ movdqa %xmm15,32(%rbp)
+ leaq (%r15),%rdx
+ call asm_AES_decrypt
+ pxor 32(%rbp),%xmm5
+ movdqu %xmm5,(%r13)
+
+.Lxts_dec_ret:
+ leaq (%rsp),%rax
+ pxor %xmm0,%xmm0
+.Lxts_dec_bzero:
+ movdqa %xmm0,0(%rax)
+ movdqa %xmm0,16(%rax)
+ leaq 32(%rax),%rax
+ cmpq %rax,%rbp
+ ja .Lxts_dec_bzero
+
+ leaq 120(%rbp),%rax
+.cfi_def_cfa %rax,8
+ movq -48(%rax),%r15
+.cfi_restore %r15
+ movq -40(%rax),%r14
+.cfi_restore %r14
+ movq -32(%rax),%r13
+.cfi_restore %r13
+ movq -24(%rax),%r12
+.cfi_restore %r12
+ movq -16(%rax),%rbx
+.cfi_restore %rbx
+ movq -8(%rax),%rbp
+.cfi_restore %rbp
+ leaq (%rax),%rsp
+.cfi_def_cfa_register %rsp
+.Lxts_dec_epilogue:
+ .byte 0xf3,0xc3
+.cfi_endproc
+.size ossl_bsaes_xts_decrypt,.-ossl_bsaes_xts_decrypt
+.type _bsaes_const,@object
+.align 64
+_bsaes_const:
+.LM0ISR:
+.quad 0x0a0e0206070b0f03, 0x0004080c0d010509
+.LISRM0:
+.quad 0x01040b0e0205080f, 0x0306090c00070a0d
+.LISR:
+.quad 0x0504070602010003, 0x0f0e0d0c080b0a09
+.LBS0:
+.quad 0x5555555555555555, 0x5555555555555555
+.LBS1:
+.quad 0x3333333333333333, 0x3333333333333333
+.LBS2:
+.quad 0x0f0f0f0f0f0f0f0f, 0x0f0f0f0f0f0f0f0f
+.LSR:
+.quad 0x0504070600030201, 0x0f0e0d0c0a09080b
+.LSRM0:
+.quad 0x0304090e00050a0f, 0x01060b0c0207080d
+.LM0SR:
+.quad 0x0a0e02060f03070b, 0x0004080c05090d01
+.LSWPUP:
+.quad 0x0706050403020100, 0x0c0d0e0f0b0a0908
+.LSWPUPM0SR:
+.quad 0x0a0d02060c03070b, 0x0004080f05090e01
+.LADD1:
+.quad 0x0000000000000000, 0x0000000100000000
+.LADD2:
+.quad 0x0000000000000000, 0x0000000200000000
+.LADD3:
+.quad 0x0000000000000000, 0x0000000300000000
+.LADD4:
+.quad 0x0000000000000000, 0x0000000400000000
+.LADD5:
+.quad 0x0000000000000000, 0x0000000500000000
+.LADD6:
+.quad 0x0000000000000000, 0x0000000600000000
+.LADD7:
+.quad 0x0000000000000000, 0x0000000700000000
+.LADD8:
+.quad 0x0000000000000000, 0x0000000800000000
+.Lxts_magic:
+.long 0x87,0,1,0
+.Lmasks:
+.quad 0x0101010101010101, 0x0101010101010101
+.quad 0x0202020202020202, 0x0202020202020202
+.quad 0x0404040404040404, 0x0404040404040404
+.quad 0x0808080808080808, 0x0808080808080808
+.LM0:
+.quad 0x02060a0e03070b0f, 0x0004080c0105090d
+.L63:
+.quad 0x6363636363636363, 0x6363636363636363
+.byte 66,105,116,45,115,108,105,99,101,100,32,65,69,83,32,102,111,114,32,120,56,54,95,54,52,47,83,83,83,69,51,44,32,69,109,105,108,105,97,32,75,195,164,115,112,101,114,44,32,80,101,116,101,114,32,83,99,104,119,97,98,101,44,32,65,110,100,121,32,80,111,108,121,97,107,111,118,0
+.align 64
+.size _bsaes_const,.-_bsaes_const
+ .section ".note.gnu.property", "a"
+ .p2align 3
+ .long 1f - 0f
+ .long 4f - 1f
+ .long 5
+0:
+ # "GNU" encoded with .byte, since .asciz isn't supported
+ # on Solaris.
+ .byte 0x47
+ .byte 0x4e
+ .byte 0x55
+ .byte 0
+1:
+ .p2align 3
+ .long 0xc0000002
+ .long 3f - 2f
+2:
+ .long 3
+3:
+ .p2align 3
+4:
diff --git a/secure/lib/libcrypto/arch/i386/aes-586.S b/secure/lib/libcrypto/arch/i386/aes-586.S
new file mode 100644
index 000000000000..861ee21e8400
--- /dev/null
+++ b/secure/lib/libcrypto/arch/i386/aes-586.S
@@ -0,0 +1,6644 @@
+/* Do not modify. This file is auto-generated from aes-586.pl. */
+#ifdef PIC
+.text
+.type _x86_AES_encrypt_compact,@function
+.align 16
+_x86_AES_encrypt_compact:
+ #ifdef __CET__
+
+.byte 243,15,30,251
+ #endif
+
+ movl %edi,20(%esp)
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ movl 240(%edi),%esi
+ leal -2(%esi,%esi,1),%esi
+ leal (%edi,%esi,8),%esi
+ movl %esi,24(%esp)
+ movl -128(%ebp),%edi
+ movl -96(%ebp),%esi
+ movl -64(%ebp),%edi
+ movl -32(%ebp),%esi
+ movl (%ebp),%edi
+ movl 32(%ebp),%esi
+ movl 64(%ebp),%edi
+ movl 96(%ebp),%esi
+.align 16
+.L000loop:
+ movl %eax,%esi
+ andl $255,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %bh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,4(%esp)
+
+ movl %ebx,%esi
+ andl $255,%esi
+ shrl $16,%ebx
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %ch,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,8(%esp)
+
+ movl %ecx,%esi
+ andl $255,%esi
+ shrl $24,%ecx
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %dh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $16,%edi
+ andl $255,%edx
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movzbl %bh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+
+ andl $255,%edx
+ movzbl -128(%ebp,%edx,1),%edx
+ movzbl %ah,%eax
+ movzbl -128(%ebp,%eax,1),%eax
+ shll $8,%eax
+ xorl %eax,%edx
+ movl 4(%esp),%eax
+ andl $255,%ebx
+ movzbl -128(%ebp,%ebx,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%edx
+ movl 8(%esp),%ebx
+ movzbl -128(%ebp,%ecx,1),%ecx
+ shll $24,%ecx
+ xorl %ecx,%edx
+ movl %esi,%ecx
+
+ movl $2155905152,%ebp
+ andl %ecx,%ebp
+ leal (%ecx,%ecx,1),%edi
+ movl %ebp,%esi
+ shrl $7,%ebp
+ andl $4278124286,%edi
+ subl %ebp,%esi
+ movl %ecx,%ebp
+ andl $454761243,%esi
+ rorl $16,%ebp
+ xorl %edi,%esi
+ movl %ecx,%edi
+ xorl %esi,%ecx
+ rorl $24,%edi
+ xorl %ebp,%esi
+ roll $24,%ecx
+ xorl %edi,%esi
+ movl $2155905152,%ebp
+ xorl %esi,%ecx
+ andl %edx,%ebp
+ leal (%edx,%edx,1),%edi
+ movl %ebp,%esi
+ shrl $7,%ebp
+ andl $4278124286,%edi
+ subl %ebp,%esi
+ movl %edx,%ebp
+ andl $454761243,%esi
+ rorl $16,%ebp
+ xorl %edi,%esi
+ movl %edx,%edi
+ xorl %esi,%edx
+ rorl $24,%edi
+ xorl %ebp,%esi
+ roll $24,%edx
+ xorl %edi,%esi
+ movl $2155905152,%ebp
+ xorl %esi,%edx
+ andl %eax,%ebp
+ leal (%eax,%eax,1),%edi
+ movl %ebp,%esi
+ shrl $7,%ebp
+ andl $4278124286,%edi
+ subl %ebp,%esi
+ movl %eax,%ebp
+ andl $454761243,%esi
+ rorl $16,%ebp
+ xorl %edi,%esi
+ movl %eax,%edi
+ xorl %esi,%eax
+ rorl $24,%edi
+ xorl %ebp,%esi
+ roll $24,%eax
+ xorl %edi,%esi
+ movl $2155905152,%ebp
+ xorl %esi,%eax
+ andl %ebx,%ebp
+ leal (%ebx,%ebx,1),%edi
+ movl %ebp,%esi
+ shrl $7,%ebp
+ andl $4278124286,%edi
+ subl %ebp,%esi
+ movl %ebx,%ebp
+ andl $454761243,%esi
+ rorl $16,%ebp
+ xorl %edi,%esi
+ movl %ebx,%edi
+ xorl %esi,%ebx
+ rorl $24,%edi
+ xorl %ebp,%esi
+ roll $24,%ebx
+ xorl %edi,%esi
+ xorl %esi,%ebx
+ movl 20(%esp),%edi
+ movl 28(%esp),%ebp
+ addl $16,%edi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ cmpl 24(%esp),%edi
+ movl %edi,20(%esp)
+ jb .L000loop
+ movl %eax,%esi
+ andl $255,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %bh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,4(%esp)
+
+ movl %ebx,%esi
+ andl $255,%esi
+ shrl $16,%ebx
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %ch,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,8(%esp)
+
+ movl %ecx,%esi
+ andl $255,%esi
+ shrl $24,%ecx
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %dh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $16,%edi
+ andl $255,%edx
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movzbl %bh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+
+ movl 20(%esp),%edi
+ andl $255,%edx
+ movzbl -128(%ebp,%edx,1),%edx
+ movzbl %ah,%eax
+ movzbl -128(%ebp,%eax,1),%eax
+ shll $8,%eax
+ xorl %eax,%edx
+ movl 4(%esp),%eax
+ andl $255,%ebx
+ movzbl -128(%ebp,%ebx,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%edx
+ movl 8(%esp),%ebx
+ movzbl -128(%ebp,%ecx,1),%ecx
+ shll $24,%ecx
+ xorl %ecx,%edx
+ movl %esi,%ecx
+
+ xorl 16(%edi),%eax
+ xorl 20(%edi),%ebx
+ xorl 24(%edi),%ecx
+ xorl 28(%edi),%edx
+ ret
+.size _x86_AES_encrypt_compact,.-_x86_AES_encrypt_compact
+.type _sse_AES_encrypt_compact,@function
+.align 16
+_sse_AES_encrypt_compact:
+ #ifdef __CET__
+
+.byte 243,15,30,251
+ #endif
+
+ pxor (%edi),%mm0
+ pxor 8(%edi),%mm4
+ movl 240(%edi),%esi
+ leal -2(%esi,%esi,1),%esi
+ leal (%edi,%esi,8),%esi
+ movl %esi,24(%esp)
+ movl $454761243,%eax
+ movl %eax,8(%esp)
+ movl %eax,12(%esp)
+ movl -128(%ebp),%eax
+ movl -96(%ebp),%ebx
+ movl -64(%ebp),%ecx
+ movl -32(%ebp),%edx
+ movl (%ebp),%eax
+ movl 32(%ebp),%ebx
+ movl 64(%ebp),%ecx
+ movl 96(%ebp),%edx
+.align 16
+.L001loop:
+ pshufw $8,%mm0,%mm1
+ pshufw $13,%mm4,%mm5
+ movd %mm1,%eax
+ movd %mm5,%ebx
+ movl %edi,20(%esp)
+ movzbl %al,%esi
+ movzbl %ah,%edx
+ pshufw $13,%mm0,%mm2
+ movzbl -128(%ebp,%esi,1),%ecx
+ movzbl %bl,%edi
+ movzbl -128(%ebp,%edx,1),%edx
+ shrl $16,%eax
+ shll $8,%edx
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %bh,%edi
+ shll $16,%esi
+ pshufw $8,%mm4,%mm6
+ orl %esi,%ecx
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %ah,%edi
+ shll $24,%esi
+ shrl $16,%ebx
+ orl %esi,%edx
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %bh,%edi
+ shll $8,%esi
+ orl %esi,%ecx
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %al,%edi
+ shll $24,%esi
+ orl %esi,%ecx
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %bl,%edi
+ movd %mm2,%eax
+ movd %ecx,%mm0
+ movzbl -128(%ebp,%edi,1),%ecx
+ movzbl %ah,%edi
+ shll $16,%ecx
+ movd %mm6,%ebx
+ orl %esi,%ecx
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %bh,%edi
+ shll $24,%esi
+ orl %esi,%ecx
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %bl,%edi
+ shll $8,%esi
+ shrl $16,%ebx
+ orl %esi,%ecx
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %al,%edi
+ shrl $16,%eax
+ movd %ecx,%mm1
+ movzbl -128(%ebp,%edi,1),%ecx
+ movzbl %ah,%edi
+ shll $16,%ecx
+ andl $255,%eax
+ orl %esi,%ecx
+ punpckldq %mm1,%mm0
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %bh,%edi
+ shll $24,%esi
+ andl $255,%ebx
+ movzbl -128(%ebp,%eax,1),%eax
+ orl %esi,%ecx
+ shll $16,%eax
+ movzbl -128(%ebp,%edi,1),%esi
+ orl %eax,%edx
+ shll $8,%esi
+ movzbl -128(%ebp,%ebx,1),%ebx
+ orl %esi,%ecx
+ orl %ebx,%edx
+ movl 20(%esp),%edi
+ movd %ecx,%mm4
+ movd %edx,%mm5
+ punpckldq %mm5,%mm4
+ addl $16,%edi
+ cmpl 24(%esp),%edi
+ ja .L002out
+ movq 8(%esp),%mm2
+ pxor %mm3,%mm3
+ pxor %mm7,%mm7
+ movq %mm0,%mm1
+ movq %mm4,%mm5
+ pcmpgtb %mm0,%mm3
+ pcmpgtb %mm4,%mm7
+ pand %mm2,%mm3
+ pand %mm2,%mm7
+ pshufw $177,%mm0,%mm2
+ pshufw $177,%mm4,%mm6
+ paddb %mm0,%mm0
+ paddb %mm4,%mm4
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ pshufw $177,%mm2,%mm3
+ pshufw $177,%mm6,%mm7
+ pxor %mm0,%mm1
+ pxor %mm4,%mm5
+ pxor %mm2,%mm0
+ pxor %mm6,%mm4
+ movq %mm3,%mm2
+ movq %mm7,%mm6
+ pslld $8,%mm3
+ pslld $8,%mm7
+ psrld $24,%mm2
+ psrld $24,%mm6
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ pxor %mm2,%mm0
+ pxor %mm6,%mm4
+ movq %mm1,%mm3
+ movq %mm5,%mm7
+ movq (%edi),%mm2
+ movq 8(%edi),%mm6
+ psrld $8,%mm1
+ psrld $8,%mm5
+ movl -128(%ebp),%eax
+ pslld $24,%mm3
+ pslld $24,%mm7
+ movl -64(%ebp),%ebx
+ pxor %mm1,%mm0
+ pxor %mm5,%mm4
+ movl (%ebp),%ecx
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ movl 64(%ebp),%edx
+ pxor %mm2,%mm0
+ pxor %mm6,%mm4
+ jmp .L001loop
+.align 16
+.L002out:
+ pxor (%edi),%mm0
+ pxor 8(%edi),%mm4
+ ret
+.size _sse_AES_encrypt_compact,.-_sse_AES_encrypt_compact
+.type _x86_AES_encrypt,@function
+.align 16
+_x86_AES_encrypt:
+ #ifdef __CET__
+
+.byte 243,15,30,251
+ #endif
+
+ movl %edi,20(%esp)
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ movl 240(%edi),%esi
+ leal -2(%esi,%esi,1),%esi
+ leal (%edi,%esi,8),%esi
+ movl %esi,24(%esp)
+.align 16
+.L003loop:
+ movl %eax,%esi
+ andl $255,%esi
+ movl (%ebp,%esi,8),%esi
+ movzbl %bh,%edi
+ xorl 3(%ebp,%edi,8),%esi
+ movl %ecx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ xorl 2(%ebp,%edi,8),%esi
+ movl %edx,%edi
+ shrl $24,%edi
+ xorl 1(%ebp,%edi,8),%esi
+ movl %esi,4(%esp)
+
+ movl %ebx,%esi
+ andl $255,%esi
+ shrl $16,%ebx
+ movl (%ebp,%esi,8),%esi
+ movzbl %ch,%edi
+ xorl 3(%ebp,%edi,8),%esi
+ movl %edx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ xorl 2(%ebp,%edi,8),%esi
+ movl %eax,%edi
+ shrl $24,%edi
+ xorl 1(%ebp,%edi,8),%esi
+ movl %esi,8(%esp)
+
+ movl %ecx,%esi
+ andl $255,%esi
+ shrl $24,%ecx
+ movl (%ebp,%esi,8),%esi
+ movzbl %dh,%edi
+ xorl 3(%ebp,%edi,8),%esi
+ movl %eax,%edi
+ shrl $16,%edi
+ andl $255,%edx
+ andl $255,%edi
+ xorl 2(%ebp,%edi,8),%esi
+ movzbl %bh,%edi
+ xorl 1(%ebp,%edi,8),%esi
+
+ movl 20(%esp),%edi
+ movl (%ebp,%edx,8),%edx
+ movzbl %ah,%eax
+ xorl 3(%ebp,%eax,8),%edx
+ movl 4(%esp),%eax
+ andl $255,%ebx
+ xorl 2(%ebp,%ebx,8),%edx
+ movl 8(%esp),%ebx
+ xorl 1(%ebp,%ecx,8),%edx
+ movl %esi,%ecx
+
+ addl $16,%edi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ cmpl 24(%esp),%edi
+ movl %edi,20(%esp)
+ jb .L003loop
+ movl %eax,%esi
+ andl $255,%esi
+ movl 2(%ebp,%esi,8),%esi
+ andl $255,%esi
+ movzbl %bh,%edi
+ movl (%ebp,%edi,8),%edi
+ andl $65280,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movl (%ebp,%edi,8),%edi
+ andl $16711680,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $24,%edi
+ movl 2(%ebp,%edi,8),%edi
+ andl $4278190080,%edi
+ xorl %edi,%esi
+ movl %esi,4(%esp)
+ movl %ebx,%esi
+ andl $255,%esi
+ shrl $16,%ebx
+ movl 2(%ebp,%esi,8),%esi
+ andl $255,%esi
+ movzbl %ch,%edi
+ movl (%ebp,%edi,8),%edi
+ andl $65280,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movl (%ebp,%edi,8),%edi
+ andl $16711680,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $24,%edi
+ movl 2(%ebp,%edi,8),%edi
+ andl $4278190080,%edi
+ xorl %edi,%esi
+ movl %esi,8(%esp)
+ movl %ecx,%esi
+ andl $255,%esi
+ shrl $24,%ecx
+ movl 2(%ebp,%esi,8),%esi
+ andl $255,%esi
+ movzbl %dh,%edi
+ movl (%ebp,%edi,8),%edi
+ andl $65280,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $16,%edi
+ andl $255,%edx
+ andl $255,%edi
+ movl (%ebp,%edi,8),%edi
+ andl $16711680,%edi
+ xorl %edi,%esi
+ movzbl %bh,%edi
+ movl 2(%ebp,%edi,8),%edi
+ andl $4278190080,%edi
+ xorl %edi,%esi
+ movl 20(%esp),%edi
+ andl $255,%edx
+ movl 2(%ebp,%edx,8),%edx
+ andl $255,%edx
+ movzbl %ah,%eax
+ movl (%ebp,%eax,8),%eax
+ andl $65280,%eax
+ xorl %eax,%edx
+ movl 4(%esp),%eax
+ andl $255,%ebx
+ movl (%ebp,%ebx,8),%ebx
+ andl $16711680,%ebx
+ xorl %ebx,%edx
+ movl 8(%esp),%ebx
+ movl 2(%ebp,%ecx,8),%ecx
+ andl $4278190080,%ecx
+ xorl %ecx,%edx
+ movl %esi,%ecx
+ addl $16,%edi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ ret
+.align 64
+.LAES_Te:
+.long 2774754246,2774754246
+.long 2222750968,2222750968
+.long 2574743534,2574743534
+.long 2373680118,2373680118
+.long 234025727,234025727
+.long 3177933782,3177933782
+.long 2976870366,2976870366
+.long 1422247313,1422247313
+.long 1345335392,1345335392
+.long 50397442,50397442
+.long 2842126286,2842126286
+.long 2099981142,2099981142
+.long 436141799,436141799
+.long 1658312629,1658312629
+.long 3870010189,3870010189
+.long 2591454956,2591454956
+.long 1170918031,1170918031
+.long 2642575903,2642575903
+.long 1086966153,1086966153
+.long 2273148410,2273148410
+.long 368769775,368769775
+.long 3948501426,3948501426
+.long 3376891790,3376891790
+.long 200339707,200339707
+.long 3970805057,3970805057
+.long 1742001331,1742001331
+.long 4255294047,4255294047
+.long 3937382213,3937382213
+.long 3214711843,3214711843
+.long 4154762323,4154762323
+.long 2524082916,2524082916
+.long 1539358875,1539358875
+.long 3266819957,3266819957
+.long 486407649,486407649
+.long 2928907069,2928907069
+.long 1780885068,1780885068
+.long 1513502316,1513502316
+.long 1094664062,1094664062
+.long 49805301,49805301
+.long 1338821763,1338821763
+.long 1546925160,1546925160
+.long 4104496465,4104496465
+.long 887481809,887481809
+.long 150073849,150073849
+.long 2473685474,2473685474
+.long 1943591083,1943591083
+.long 1395732834,1395732834
+.long 1058346282,1058346282
+.long 201589768,201589768
+.long 1388824469,1388824469
+.long 1696801606,1696801606
+.long 1589887901,1589887901
+.long 672667696,672667696
+.long 2711000631,2711000631
+.long 251987210,251987210
+.long 3046808111,3046808111
+.long 151455502,151455502
+.long 907153956,907153956
+.long 2608889883,2608889883
+.long 1038279391,1038279391
+.long 652995533,652995533
+.long 1764173646,1764173646
+.long 3451040383,3451040383
+.long 2675275242,2675275242
+.long 453576978,453576978
+.long 2659418909,2659418909
+.long 1949051992,1949051992
+.long 773462580,773462580
+.long 756751158,756751158
+.long 2993581788,2993581788
+.long 3998898868,3998898868
+.long 4221608027,4221608027
+.long 4132590244,4132590244
+.long 1295727478,1295727478
+.long 1641469623,1641469623
+.long 3467883389,3467883389
+.long 2066295122,2066295122
+.long 1055122397,1055122397
+.long 1898917726,1898917726
+.long 2542044179,2542044179
+.long 4115878822,4115878822
+.long 1758581177,1758581177
+.long 0,0
+.long 753790401,753790401
+.long 1612718144,1612718144
+.long 536673507,536673507
+.long 3367088505,3367088505
+.long 3982187446,3982187446
+.long 3194645204,3194645204
+.long 1187761037,1187761037
+.long 3653156455,3653156455
+.long 1262041458,1262041458
+.long 3729410708,3729410708
+.long 3561770136,3561770136
+.long 3898103984,3898103984
+.long 1255133061,1255133061
+.long 1808847035,1808847035
+.long 720367557,720367557
+.long 3853167183,3853167183
+.long 385612781,385612781
+.long 3309519750,3309519750
+.long 3612167578,3612167578
+.long 1429418854,1429418854
+.long 2491778321,2491778321
+.long 3477423498,3477423498
+.long 284817897,284817897
+.long 100794884,100794884
+.long 2172616702,2172616702
+.long 4031795360,4031795360
+.long 1144798328,1144798328
+.long 3131023141,3131023141
+.long 3819481163,3819481163
+.long 4082192802,4082192802
+.long 4272137053,4272137053
+.long 3225436288,3225436288
+.long 2324664069,2324664069
+.long 2912064063,2912064063
+.long 3164445985,3164445985
+.long 1211644016,1211644016
+.long 83228145,83228145
+.long 3753688163,3753688163
+.long 3249976951,3249976951
+.long 1977277103,1977277103
+.long 1663115586,1663115586
+.long 806359072,806359072
+.long 452984805,452984805
+.long 250868733,250868733
+.long 1842533055,1842533055
+.long 1288555905,1288555905
+.long 336333848,336333848
+.long 890442534,890442534
+.long 804056259,804056259
+.long 3781124030,3781124030
+.long 2727843637,2727843637
+.long 3427026056,3427026056
+.long 957814574,957814574
+.long 1472513171,1472513171
+.long 4071073621,4071073621
+.long 2189328124,2189328124
+.long 1195195770,1195195770
+.long 2892260552,2892260552
+.long 3881655738,3881655738
+.long 723065138,723065138
+.long 2507371494,2507371494
+.long 2690670784,2690670784
+.long 2558624025,2558624025
+.long 3511635870,3511635870
+.long 2145180835,2145180835
+.long 1713513028,1713513028
+.long 2116692564,2116692564
+.long 2878378043,2878378043
+.long 2206763019,2206763019
+.long 3393603212,3393603212
+.long 703524551,703524551
+.long 3552098411,3552098411
+.long 1007948840,1007948840
+.long 2044649127,2044649127
+.long 3797835452,3797835452
+.long 487262998,487262998
+.long 1994120109,1994120109
+.long 1004593371,1004593371
+.long 1446130276,1446130276
+.long 1312438900,1312438900
+.long 503974420,503974420
+.long 3679013266,3679013266
+.long 168166924,168166924
+.long 1814307912,1814307912
+.long 3831258296,3831258296
+.long 1573044895,1573044895
+.long 1859376061,1859376061
+.long 4021070915,4021070915
+.long 2791465668,2791465668
+.long 2828112185,2828112185
+.long 2761266481,2761266481
+.long 937747667,937747667
+.long 2339994098,2339994098
+.long 854058965,854058965
+.long 1137232011,1137232011
+.long 1496790894,1496790894
+.long 3077402074,3077402074
+.long 2358086913,2358086913
+.long 1691735473,1691735473
+.long 3528347292,3528347292
+.long 3769215305,3769215305
+.long 3027004632,3027004632
+.long 4199962284,4199962284
+.long 133494003,133494003
+.long 636152527,636152527
+.long 2942657994,2942657994
+.long 2390391540,2390391540
+.long 3920539207,3920539207
+.long 403179536,403179536
+.long 3585784431,3585784431
+.long 2289596656,2289596656
+.long 1864705354,1864705354
+.long 1915629148,1915629148
+.long 605822008,605822008
+.long 4054230615,4054230615
+.long 3350508659,3350508659
+.long 1371981463,1371981463
+.long 602466507,602466507
+.long 2094914977,2094914977
+.long 2624877800,2624877800
+.long 555687742,555687742
+.long 3712699286,3712699286
+.long 3703422305,3703422305
+.long 2257292045,2257292045
+.long 2240449039,2240449039
+.long 2423288032,2423288032
+.long 1111375484,1111375484
+.long 3300242801,3300242801
+.long 2858837708,2858837708
+.long 3628615824,3628615824
+.long 84083462,84083462
+.long 32962295,32962295
+.long 302911004,302911004
+.long 2741068226,2741068226
+.long 1597322602,1597322602
+.long 4183250862,4183250862
+.long 3501832553,3501832553
+.long 2441512471,2441512471
+.long 1489093017,1489093017
+.long 656219450,656219450
+.long 3114180135,3114180135
+.long 954327513,954327513
+.long 335083755,335083755
+.long 3013122091,3013122091
+.long 856756514,856756514
+.long 3144247762,3144247762
+.long 1893325225,1893325225
+.long 2307821063,2307821063
+.long 2811532339,2811532339
+.long 3063651117,3063651117
+.long 572399164,572399164
+.long 2458355477,2458355477
+.long 552200649,552200649
+.long 1238290055,1238290055
+.long 4283782570,4283782570
+.long 2015897680,2015897680
+.long 2061492133,2061492133
+.long 2408352771,2408352771
+.long 4171342169,4171342169
+.long 2156497161,2156497161
+.long 386731290,386731290
+.long 3669999461,3669999461
+.long 837215959,837215959
+.long 3326231172,3326231172
+.long 3093850320,3093850320
+.long 3275833730,3275833730
+.long 2962856233,2962856233
+.long 1999449434,1999449434
+.long 286199582,286199582
+.long 3417354363,3417354363
+.long 4233385128,4233385128
+.long 3602627437,3602627437
+.long 974525996,974525996
+.byte 99,124,119,123,242,107,111,197
+.byte 48,1,103,43,254,215,171,118
+.byte 202,130,201,125,250,89,71,240
+.byte 173,212,162,175,156,164,114,192
+.byte 183,253,147,38,54,63,247,204
+.byte 52,165,229,241,113,216,49,21
+.byte 4,199,35,195,24,150,5,154
+.byte 7,18,128,226,235,39,178,117
+.byte 9,131,44,26,27,110,90,160
+.byte 82,59,214,179,41,227,47,132
+.byte 83,209,0,237,32,252,177,91
+.byte 106,203,190,57,74,76,88,207
+.byte 208,239,170,251,67,77,51,133
+.byte 69,249,2,127,80,60,159,168
+.byte 81,163,64,143,146,157,56,245
+.byte 188,182,218,33,16,255,243,210
+.byte 205,12,19,236,95,151,68,23
+.byte 196,167,126,61,100,93,25,115
+.byte 96,129,79,220,34,42,144,136
+.byte 70,238,184,20,222,94,11,219
+.byte 224,50,58,10,73,6,36,92
+.byte 194,211,172,98,145,149,228,121
+.byte 231,200,55,109,141,213,78,169
+.byte 108,86,244,234,101,122,174,8
+.byte 186,120,37,46,28,166,180,198
+.byte 232,221,116,31,75,189,139,138
+.byte 112,62,181,102,72,3,246,14
+.byte 97,53,87,185,134,193,29,158
+.byte 225,248,152,17,105,217,142,148
+.byte 155,30,135,233,206,85,40,223
+.byte 140,161,137,13,191,230,66,104
+.byte 65,153,45,15,176,84,187,22
+.byte 99,124,119,123,242,107,111,197
+.byte 48,1,103,43,254,215,171,118
+.byte 202,130,201,125,250,89,71,240
+.byte 173,212,162,175,156,164,114,192
+.byte 183,253,147,38,54,63,247,204
+.byte 52,165,229,241,113,216,49,21
+.byte 4,199,35,195,24,150,5,154
+.byte 7,18,128,226,235,39,178,117
+.byte 9,131,44,26,27,110,90,160
+.byte 82,59,214,179,41,227,47,132
+.byte 83,209,0,237,32,252,177,91
+.byte 106,203,190,57,74,76,88,207
+.byte 208,239,170,251,67,77,51,133
+.byte 69,249,2,127,80,60,159,168
+.byte 81,163,64,143,146,157,56,245
+.byte 188,182,218,33,16,255,243,210
+.byte 205,12,19,236,95,151,68,23
+.byte 196,167,126,61,100,93,25,115
+.byte 96,129,79,220,34,42,144,136
+.byte 70,238,184,20,222,94,11,219
+.byte 224,50,58,10,73,6,36,92
+.byte 194,211,172,98,145,149,228,121
+.byte 231,200,55,109,141,213,78,169
+.byte 108,86,244,234,101,122,174,8
+.byte 186,120,37,46,28,166,180,198
+.byte 232,221,116,31,75,189,139,138
+.byte 112,62,181,102,72,3,246,14
+.byte 97,53,87,185,134,193,29,158
+.byte 225,248,152,17,105,217,142,148
+.byte 155,30,135,233,206,85,40,223
+.byte 140,161,137,13,191,230,66,104
+.byte 65,153,45,15,176,84,187,22
+.byte 99,124,119,123,242,107,111,197
+.byte 48,1,103,43,254,215,171,118
+.byte 202,130,201,125,250,89,71,240
+.byte 173,212,162,175,156,164,114,192
+.byte 183,253,147,38,54,63,247,204
+.byte 52,165,229,241,113,216,49,21
+.byte 4,199,35,195,24,150,5,154
+.byte 7,18,128,226,235,39,178,117
+.byte 9,131,44,26,27,110,90,160
+.byte 82,59,214,179,41,227,47,132
+.byte 83,209,0,237,32,252,177,91
+.byte 106,203,190,57,74,76,88,207
+.byte 208,239,170,251,67,77,51,133
+.byte 69,249,2,127,80,60,159,168
+.byte 81,163,64,143,146,157,56,245
+.byte 188,182,218,33,16,255,243,210
+.byte 205,12,19,236,95,151,68,23
+.byte 196,167,126,61,100,93,25,115
+.byte 96,129,79,220,34,42,144,136
+.byte 70,238,184,20,222,94,11,219
+.byte 224,50,58,10,73,6,36,92
+.byte 194,211,172,98,145,149,228,121
+.byte 231,200,55,109,141,213,78,169
+.byte 108,86,244,234,101,122,174,8
+.byte 186,120,37,46,28,166,180,198
+.byte 232,221,116,31,75,189,139,138
+.byte 112,62,181,102,72,3,246,14
+.byte 97,53,87,185,134,193,29,158
+.byte 225,248,152,17,105,217,142,148
+.byte 155,30,135,233,206,85,40,223
+.byte 140,161,137,13,191,230,66,104
+.byte 65,153,45,15,176,84,187,22
+.byte 99,124,119,123,242,107,111,197
+.byte 48,1,103,43,254,215,171,118
+.byte 202,130,201,125,250,89,71,240
+.byte 173,212,162,175,156,164,114,192
+.byte 183,253,147,38,54,63,247,204
+.byte 52,165,229,241,113,216,49,21
+.byte 4,199,35,195,24,150,5,154
+.byte 7,18,128,226,235,39,178,117
+.byte 9,131,44,26,27,110,90,160
+.byte 82,59,214,179,41,227,47,132
+.byte 83,209,0,237,32,252,177,91
+.byte 106,203,190,57,74,76,88,207
+.byte 208,239,170,251,67,77,51,133
+.byte 69,249,2,127,80,60,159,168
+.byte 81,163,64,143,146,157,56,245
+.byte 188,182,218,33,16,255,243,210
+.byte 205,12,19,236,95,151,68,23
+.byte 196,167,126,61,100,93,25,115
+.byte 96,129,79,220,34,42,144,136
+.byte 70,238,184,20,222,94,11,219
+.byte 224,50,58,10,73,6,36,92
+.byte 194,211,172,98,145,149,228,121
+.byte 231,200,55,109,141,213,78,169
+.byte 108,86,244,234,101,122,174,8
+.byte 186,120,37,46,28,166,180,198
+.byte 232,221,116,31,75,189,139,138
+.byte 112,62,181,102,72,3,246,14
+.byte 97,53,87,185,134,193,29,158
+.byte 225,248,152,17,105,217,142,148
+.byte 155,30,135,233,206,85,40,223
+.byte 140,161,137,13,191,230,66,104
+.byte 65,153,45,15,176,84,187,22
+.long 1,2,4,8
+.long 16,32,64,128
+.long 27,54,0,0
+.long 0,0,0,0
+.size _x86_AES_encrypt,.-_x86_AES_encrypt
+.globl AES_encrypt
+.type AES_encrypt,@function
+.align 16
+AES_encrypt:
+.L_AES_encrypt_begin:
+ #ifdef __CET__
+
+.byte 243,15,30,251
+ #endif
+
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%esi
+ movl 28(%esp),%edi
+ movl %esp,%eax
+ subl $36,%esp
+ andl $-64,%esp
+ leal -127(%edi),%ebx
+ subl %esp,%ebx
+ negl %ebx
+ andl $960,%ebx
+ subl %ebx,%esp
+ addl $4,%esp
+ movl %eax,28(%esp)
+ call .L004pic_point
+.L004pic_point:
+ popl %ebp
+ leal OPENSSL_ia32cap_P-.L004pic_point(%ebp),%eax
+ leal .LAES_Te-.L004pic_point(%ebp),%ebp
+ leal 764(%esp),%ebx
+ subl %ebp,%ebx
+ andl $768,%ebx
+ leal 2176(%ebp,%ebx,1),%ebp
+ btl $25,(%eax)
+ jnc .L005x86
+ movq (%esi),%mm0
+ movq 8(%esi),%mm4
+ call _sse_AES_encrypt_compact
+ movl 28(%esp),%esp
+ movl 24(%esp),%esi
+ movq %mm0,(%esi)
+ movq %mm4,8(%esi)
+ emms
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 16
+.L005x86:
+ movl %ebp,24(%esp)
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ call _x86_AES_encrypt_compact
+ movl 28(%esp),%esp
+ movl 24(%esp),%esi
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size AES_encrypt,.-.L_AES_encrypt_begin
+.type _x86_AES_decrypt_compact,@function
+.align 16
+_x86_AES_decrypt_compact:
+ #ifdef __CET__
+
+.byte 243,15,30,251
+ #endif
+
+ movl %edi,20(%esp)
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ movl 240(%edi),%esi
+ leal -2(%esi,%esi,1),%esi
+ leal (%edi,%esi,8),%esi
+ movl %esi,24(%esp)
+ movl -128(%ebp),%edi
+ movl -96(%ebp),%esi
+ movl -64(%ebp),%edi
+ movl -32(%ebp),%esi
+ movl (%ebp),%edi
+ movl 32(%ebp),%esi
+ movl 64(%ebp),%edi
+ movl 96(%ebp),%esi
+.align 16
+.L006loop:
+ movl %eax,%esi
+ andl $255,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %dh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %ebx,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,4(%esp)
+ movl %ebx,%esi
+ andl $255,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %ah,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,8(%esp)
+ movl %ecx,%esi
+ andl $255,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %bh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ andl $255,%edx
+ movzbl -128(%ebp,%edx,1),%edx
+ movzbl %ch,%ecx
+ movzbl -128(%ebp,%ecx,1),%ecx
+ shll $8,%ecx
+ xorl %ecx,%edx
+ movl %esi,%ecx
+ shrl $16,%ebx
+ andl $255,%ebx
+ movzbl -128(%ebp,%ebx,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%edx
+ shrl $24,%eax
+ movzbl -128(%ebp,%eax,1),%eax
+ shll $24,%eax
+ xorl %eax,%edx
+ movl $2155905152,%edi
+ andl %ecx,%edi
+ movl %edi,%esi
+ shrl $7,%edi
+ leal (%ecx,%ecx,1),%eax
+ subl %edi,%esi
+ andl $4278124286,%eax
+ andl $454761243,%esi
+ xorl %esi,%eax
+ movl $2155905152,%edi
+ andl %eax,%edi
+ movl %edi,%esi
+ shrl $7,%edi
+ leal (%eax,%eax,1),%ebx
+ subl %edi,%esi
+ andl $4278124286,%ebx
+ andl $454761243,%esi
+ xorl %ecx,%eax
+ xorl %esi,%ebx
+ movl $2155905152,%edi
+ andl %ebx,%edi
+ movl %edi,%esi
+ shrl $7,%edi
+ leal (%ebx,%ebx,1),%ebp
+ subl %edi,%esi
+ andl $4278124286,%ebp
+ andl $454761243,%esi
+ xorl %ecx,%ebx
+ roll $8,%ecx
+ xorl %esi,%ebp
+ xorl %eax,%ecx
+ xorl %ebp,%eax
+ xorl %ebx,%ecx
+ xorl %ebp,%ebx
+ roll $24,%eax
+ xorl %ebp,%ecx
+ roll $16,%ebx
+ xorl %eax,%ecx
+ roll $8,%ebp
+ xorl %ebx,%ecx
+ movl 4(%esp),%eax
+ xorl %ebp,%ecx
+ movl %ecx,12(%esp)
+ movl $2155905152,%edi
+ andl %edx,%edi
+ movl %edi,%esi
+ shrl $7,%edi
+ leal (%edx,%edx,1),%ebx
+ subl %edi,%esi
+ andl $4278124286,%ebx
+ andl $454761243,%esi
+ xorl %esi,%ebx
+ movl $2155905152,%edi
+ andl %ebx,%edi
+ movl %edi,%esi
+ shrl $7,%edi
+ leal (%ebx,%ebx,1),%ecx
+ subl %edi,%esi
+ andl $4278124286,%ecx
+ andl $454761243,%esi
+ xorl %edx,%ebx
+ xorl %esi,%ecx
+ movl $2155905152,%edi
+ andl %ecx,%edi
+ movl %edi,%esi
+ shrl $7,%edi
+ leal (%ecx,%ecx,1),%ebp
+ subl %edi,%esi
+ andl $4278124286,%ebp
+ andl $454761243,%esi
+ xorl %edx,%ecx
+ roll $8,%edx
+ xorl %esi,%ebp
+ xorl %ebx,%edx
+ xorl %ebp,%ebx
+ xorl %ecx,%edx
+ xorl %ebp,%ecx
+ roll $24,%ebx
+ xorl %ebp,%edx
+ roll $16,%ecx
+ xorl %ebx,%edx
+ roll $8,%ebp
+ xorl %ecx,%edx
+ movl 8(%esp),%ebx
+ xorl %ebp,%edx
+ movl %edx,16(%esp)
+ movl $2155905152,%edi
+ andl %eax,%edi
+ movl %edi,%esi
+ shrl $7,%edi
+ leal (%eax,%eax,1),%ecx
+ subl %edi,%esi
+ andl $4278124286,%ecx
+ andl $454761243,%esi
+ xorl %esi,%ecx
+ movl $2155905152,%edi
+ andl %ecx,%edi
+ movl %edi,%esi
+ shrl $7,%edi
+ leal (%ecx,%ecx,1),%edx
+ subl %edi,%esi
+ andl $4278124286,%edx
+ andl $454761243,%esi
+ xorl %eax,%ecx
+ xorl %esi,%edx
+ movl $2155905152,%edi
+ andl %edx,%edi
+ movl %edi,%esi
+ shrl $7,%edi
+ leal (%edx,%edx,1),%ebp
+ subl %edi,%esi
+ andl $4278124286,%ebp
+ andl $454761243,%esi
+ xorl %eax,%edx
+ roll $8,%eax
+ xorl %esi,%ebp
+ xorl %ecx,%eax
+ xorl %ebp,%ecx
+ xorl %edx,%eax
+ xorl %ebp,%edx
+ roll $24,%ecx
+ xorl %ebp,%eax
+ roll $16,%edx
+ xorl %ecx,%eax
+ roll $8,%ebp
+ xorl %edx,%eax
+ xorl %ebp,%eax
+ movl $2155905152,%edi
+ andl %ebx,%edi
+ movl %edi,%esi
+ shrl $7,%edi
+ leal (%ebx,%ebx,1),%ecx
+ subl %edi,%esi
+ andl $4278124286,%ecx
+ andl $454761243,%esi
+ xorl %esi,%ecx
+ movl $2155905152,%edi
+ andl %ecx,%edi
+ movl %edi,%esi
+ shrl $7,%edi
+ leal (%ecx,%ecx,1),%edx
+ subl %edi,%esi
+ andl $4278124286,%edx
+ andl $454761243,%esi
+ xorl %ebx,%ecx
+ xorl %esi,%edx
+ movl $2155905152,%edi
+ andl %edx,%edi
+ movl %edi,%esi
+ shrl $7,%edi
+ leal (%edx,%edx,1),%ebp
+ subl %edi,%esi
+ andl $4278124286,%ebp
+ andl $454761243,%esi
+ xorl %ebx,%edx
+ roll $8,%ebx
+ xorl %esi,%ebp
+ xorl %ecx,%ebx
+ xorl %ebp,%ecx
+ xorl %edx,%ebx
+ xorl %ebp,%edx
+ roll $24,%ecx
+ xorl %ebp,%ebx
+ roll $16,%edx
+ xorl %ecx,%ebx
+ roll $8,%ebp
+ xorl %edx,%ebx
+ movl 12(%esp),%ecx
+ xorl %ebp,%ebx
+ movl 16(%esp),%edx
+ movl 20(%esp),%edi
+ movl 28(%esp),%ebp
+ addl $16,%edi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ cmpl 24(%esp),%edi
+ movl %edi,20(%esp)
+ jb .L006loop
+ movl %eax,%esi
+ andl $255,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %dh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %ebx,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,4(%esp)
+ movl %ebx,%esi
+ andl $255,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %ah,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,8(%esp)
+ movl %ecx,%esi
+ andl $255,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %bh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl 20(%esp),%edi
+ andl $255,%edx
+ movzbl -128(%ebp,%edx,1),%edx
+ movzbl %ch,%ecx
+ movzbl -128(%ebp,%ecx,1),%ecx
+ shll $8,%ecx
+ xorl %ecx,%edx
+ movl %esi,%ecx
+ shrl $16,%ebx
+ andl $255,%ebx
+ movzbl -128(%ebp,%ebx,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%edx
+ movl 8(%esp),%ebx
+ shrl $24,%eax
+ movzbl -128(%ebp,%eax,1),%eax
+ shll $24,%eax
+ xorl %eax,%edx
+ movl 4(%esp),%eax
+ xorl 16(%edi),%eax
+ xorl 20(%edi),%ebx
+ xorl 24(%edi),%ecx
+ xorl 28(%edi),%edx
+ ret
+.size _x86_AES_decrypt_compact,.-_x86_AES_decrypt_compact
+.type _sse_AES_decrypt_compact,@function
+.align 16
+_sse_AES_decrypt_compact:
+ #ifdef __CET__
+
+.byte 243,15,30,251
+ #endif
+
+ pxor (%edi),%mm0
+ pxor 8(%edi),%mm4
+ movl 240(%edi),%esi
+ leal -2(%esi,%esi,1),%esi
+ leal (%edi,%esi,8),%esi
+ movl %esi,24(%esp)
+ movl $454761243,%eax
+ movl %eax,8(%esp)
+ movl %eax,12(%esp)
+ movl -128(%ebp),%eax
+ movl -96(%ebp),%ebx
+ movl -64(%ebp),%ecx
+ movl -32(%ebp),%edx
+ movl (%ebp),%eax
+ movl 32(%ebp),%ebx
+ movl 64(%ebp),%ecx
+ movl 96(%ebp),%edx
+.align 16
+.L007loop:
+ pshufw $12,%mm0,%mm1
+ pshufw $9,%mm4,%mm5
+ movd %mm1,%eax
+ movd %mm5,%ebx
+ movl %edi,20(%esp)
+ movzbl %al,%esi
+ movzbl %ah,%edx
+ pshufw $6,%mm0,%mm2
+ movzbl -128(%ebp,%esi,1),%ecx
+ movzbl %bl,%edi
+ movzbl -128(%ebp,%edx,1),%edx
+ shrl $16,%eax
+ shll $8,%edx
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %bh,%edi
+ shll $16,%esi
+ pshufw $3,%mm4,%mm6
+ orl %esi,%ecx
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %ah,%edi
+ shll $24,%esi
+ shrl $16,%ebx
+ orl %esi,%edx
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %bh,%edi
+ shll $24,%esi
+ orl %esi,%ecx
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %al,%edi
+ shll $8,%esi
+ movd %mm2,%eax
+ orl %esi,%ecx
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %bl,%edi
+ shll $16,%esi
+ movd %mm6,%ebx
+ movd %ecx,%mm0
+ movzbl -128(%ebp,%edi,1),%ecx
+ movzbl %al,%edi
+ orl %esi,%ecx
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %bl,%edi
+ orl %esi,%edx
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %ah,%edi
+ shll $16,%esi
+ shrl $16,%eax
+ orl %esi,%edx
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %bh,%edi
+ shrl $16,%ebx
+ shll $8,%esi
+ movd %edx,%mm1
+ movzbl -128(%ebp,%edi,1),%edx
+ movzbl %bh,%edi
+ shll $24,%edx
+ andl $255,%ebx
+ orl %esi,%edx
+ punpckldq %mm1,%mm0
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %al,%edi
+ shll $8,%esi
+ movzbl %ah,%eax
+ movzbl -128(%ebp,%ebx,1),%ebx
+ orl %esi,%ecx
+ movzbl -128(%ebp,%edi,1),%esi
+ orl %ebx,%edx
+ shll $16,%esi
+ movzbl -128(%ebp,%eax,1),%eax
+ orl %esi,%edx
+ shll $24,%eax
+ orl %eax,%ecx
+ movl 20(%esp),%edi
+ movd %edx,%mm4
+ movd %ecx,%mm5
+ punpckldq %mm5,%mm4
+ addl $16,%edi
+ cmpl 24(%esp),%edi
+ ja .L008out
+ movq %mm0,%mm3
+ movq %mm4,%mm7
+ pshufw $228,%mm0,%mm2
+ pshufw $228,%mm4,%mm6
+ movq %mm0,%mm1
+ movq %mm4,%mm5
+ pshufw $177,%mm0,%mm0
+ pshufw $177,%mm4,%mm4
+ pslld $8,%mm2
+ pslld $8,%mm6
+ psrld $8,%mm3
+ psrld $8,%mm7
+ pxor %mm2,%mm0
+ pxor %mm6,%mm4
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ pslld $16,%mm2
+ pslld $16,%mm6
+ psrld $16,%mm3
+ psrld $16,%mm7
+ pxor %mm2,%mm0
+ pxor %mm6,%mm4
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ movq 8(%esp),%mm3
+ pxor %mm2,%mm2
+ pxor %mm6,%mm6
+ pcmpgtb %mm1,%mm2
+ pcmpgtb %mm5,%mm6
+ pand %mm3,%mm2
+ pand %mm3,%mm6
+ paddb %mm1,%mm1
+ paddb %mm5,%mm5
+ pxor %mm2,%mm1
+ pxor %mm6,%mm5
+ movq %mm1,%mm3
+ movq %mm5,%mm7
+ movq %mm1,%mm2
+ movq %mm5,%mm6
+ pxor %mm1,%mm0
+ pxor %mm5,%mm4
+ pslld $24,%mm3
+ pslld $24,%mm7
+ psrld $8,%mm2
+ psrld $8,%mm6
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ pxor %mm2,%mm0
+ pxor %mm6,%mm4
+ movq 8(%esp),%mm2
+ pxor %mm3,%mm3
+ pxor %mm7,%mm7
+ pcmpgtb %mm1,%mm3
+ pcmpgtb %mm5,%mm7
+ pand %mm2,%mm3
+ pand %mm2,%mm7
+ paddb %mm1,%mm1
+ paddb %mm5,%mm5
+ pxor %mm3,%mm1
+ pxor %mm7,%mm5
+ pshufw $177,%mm1,%mm3
+ pshufw $177,%mm5,%mm7
+ pxor %mm1,%mm0
+ pxor %mm5,%mm4
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ pxor %mm3,%mm3
+ pxor %mm7,%mm7
+ pcmpgtb %mm1,%mm3
+ pcmpgtb %mm5,%mm7
+ pand %mm2,%mm3
+ pand %mm2,%mm7
+ paddb %mm1,%mm1
+ paddb %mm5,%mm5
+ pxor %mm3,%mm1
+ pxor %mm7,%mm5
+ pxor %mm1,%mm0
+ pxor %mm5,%mm4
+ movq %mm1,%mm3
+ movq %mm5,%mm7
+ pshufw $177,%mm1,%mm2
+ pshufw $177,%mm5,%mm6
+ pxor %mm2,%mm0
+ pxor %mm6,%mm4
+ pslld $8,%mm1
+ pslld $8,%mm5
+ psrld $8,%mm3
+ psrld $8,%mm7
+ movq (%edi),%mm2
+ movq 8(%edi),%mm6
+ pxor %mm1,%mm0
+ pxor %mm5,%mm4
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ movl -128(%ebp),%eax
+ pslld $16,%mm1
+ pslld $16,%mm5
+ movl -64(%ebp),%ebx
+ psrld $16,%mm3
+ psrld $16,%mm7
+ movl (%ebp),%ecx
+ pxor %mm1,%mm0
+ pxor %mm5,%mm4
+ movl 64(%ebp),%edx
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ pxor %mm2,%mm0
+ pxor %mm6,%mm4
+ jmp .L007loop
+.align 16
+.L008out:
+ pxor (%edi),%mm0
+ pxor 8(%edi),%mm4
+ ret
+.size _sse_AES_decrypt_compact,.-_sse_AES_decrypt_compact
+.type _x86_AES_decrypt,@function
+.align 16
+_x86_AES_decrypt:
+ #ifdef __CET__
+
+.byte 243,15,30,251
+ #endif
+
+ movl %edi,20(%esp)
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ movl 240(%edi),%esi
+ leal -2(%esi,%esi,1),%esi
+ leal (%edi,%esi,8),%esi
+ movl %esi,24(%esp)
+.align 16
+.L009loop:
+ movl %eax,%esi
+ andl $255,%esi
+ movl (%ebp,%esi,8),%esi
+ movzbl %dh,%edi
+ xorl 3(%ebp,%edi,8),%esi
+ movl %ecx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ xorl 2(%ebp,%edi,8),%esi
+ movl %ebx,%edi
+ shrl $24,%edi
+ xorl 1(%ebp,%edi,8),%esi
+ movl %esi,4(%esp)
+
+ movl %ebx,%esi
+ andl $255,%esi
+ movl (%ebp,%esi,8),%esi
+ movzbl %ah,%edi
+ xorl 3(%ebp,%edi,8),%esi
+ movl %edx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ xorl 2(%ebp,%edi,8),%esi
+ movl %ecx,%edi
+ shrl $24,%edi
+ xorl 1(%ebp,%edi,8),%esi
+ movl %esi,8(%esp)
+
+ movl %ecx,%esi
+ andl $255,%esi
+ movl (%ebp,%esi,8),%esi
+ movzbl %bh,%edi
+ xorl 3(%ebp,%edi,8),%esi
+ movl %eax,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ xorl 2(%ebp,%edi,8),%esi
+ movl %edx,%edi
+ shrl $24,%edi
+ xorl 1(%ebp,%edi,8),%esi
+
+ movl 20(%esp),%edi
+ andl $255,%edx
+ movl (%ebp,%edx,8),%edx
+ movzbl %ch,%ecx
+ xorl 3(%ebp,%ecx,8),%edx
+ movl %esi,%ecx
+ shrl $16,%ebx
+ andl $255,%ebx
+ xorl 2(%ebp,%ebx,8),%edx
+ movl 8(%esp),%ebx
+ shrl $24,%eax
+ xorl 1(%ebp,%eax,8),%edx
+ movl 4(%esp),%eax
+
+ addl $16,%edi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ cmpl 24(%esp),%edi
+ movl %edi,20(%esp)
+ jb .L009loop
+ leal 2176(%ebp),%ebp
+ movl -128(%ebp),%edi
+ movl -96(%ebp),%esi
+ movl -64(%ebp),%edi
+ movl -32(%ebp),%esi
+ movl (%ebp),%edi
+ movl 32(%ebp),%esi
+ movl 64(%ebp),%edi
+ movl 96(%ebp),%esi
+ leal -128(%ebp),%ebp
+ movl %eax,%esi
+ andl $255,%esi
+ movzbl (%ebp,%esi,1),%esi
+ movzbl %dh,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %ebx,%edi
+ shrl $24,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,4(%esp)
+ movl %ebx,%esi
+ andl $255,%esi
+ movzbl (%ebp,%esi,1),%esi
+ movzbl %ah,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $24,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,8(%esp)
+ movl %ecx,%esi
+ andl $255,%esi
+ movzbl (%ebp,%esi,1),%esi
+ movzbl %bh,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $24,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl 20(%esp),%edi
+ andl $255,%edx
+ movzbl (%ebp,%edx,1),%edx
+ movzbl %ch,%ecx
+ movzbl (%ebp,%ecx,1),%ecx
+ shll $8,%ecx
+ xorl %ecx,%edx
+ movl %esi,%ecx
+ shrl $16,%ebx
+ andl $255,%ebx
+ movzbl (%ebp,%ebx,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%edx
+ movl 8(%esp),%ebx
+ shrl $24,%eax
+ movzbl (%ebp,%eax,1),%eax
+ shll $24,%eax
+ xorl %eax,%edx
+ movl 4(%esp),%eax
+ leal -2048(%ebp),%ebp
+ addl $16,%edi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ ret
+.align 64
+.LAES_Td:
+.long 1353184337,1353184337
+.long 1399144830,1399144830
+.long 3282310938,3282310938
+.long 2522752826,2522752826
+.long 3412831035,3412831035
+.long 4047871263,4047871263
+.long 2874735276,2874735276
+.long 2466505547,2466505547
+.long 1442459680,1442459680
+.long 4134368941,4134368941
+.long 2440481928,2440481928
+.long 625738485,625738485
+.long 4242007375,4242007375
+.long 3620416197,3620416197
+.long 2151953702,2151953702
+.long 2409849525,2409849525
+.long 1230680542,1230680542
+.long 1729870373,1729870373
+.long 2551114309,2551114309
+.long 3787521629,3787521629
+.long 41234371,41234371
+.long 317738113,317738113
+.long 2744600205,2744600205
+.long 3338261355,3338261355
+.long 3881799427,3881799427
+.long 2510066197,2510066197
+.long 3950669247,3950669247
+.long 3663286933,3663286933
+.long 763608788,763608788
+.long 3542185048,3542185048
+.long 694804553,694804553
+.long 1154009486,1154009486
+.long 1787413109,1787413109
+.long 2021232372,2021232372
+.long 1799248025,1799248025
+.long 3715217703,3715217703
+.long 3058688446,3058688446
+.long 397248752,397248752
+.long 1722556617,1722556617
+.long 3023752829,3023752829
+.long 407560035,407560035
+.long 2184256229,2184256229
+.long 1613975959,1613975959
+.long 1165972322,1165972322
+.long 3765920945,3765920945
+.long 2226023355,2226023355
+.long 480281086,480281086
+.long 2485848313,2485848313
+.long 1483229296,1483229296
+.long 436028815,436028815
+.long 2272059028,2272059028
+.long 3086515026,3086515026
+.long 601060267,601060267
+.long 3791801202,3791801202
+.long 1468997603,1468997603
+.long 715871590,715871590
+.long 120122290,120122290
+.long 63092015,63092015
+.long 2591802758,2591802758
+.long 2768779219,2768779219
+.long 4068943920,4068943920
+.long 2997206819,2997206819
+.long 3127509762,3127509762
+.long 1552029421,1552029421
+.long 723308426,723308426
+.long 2461301159,2461301159
+.long 4042393587,4042393587
+.long 2715969870,2715969870
+.long 3455375973,3455375973
+.long 3586000134,3586000134
+.long 526529745,526529745
+.long 2331944644,2331944644
+.long 2639474228,2639474228
+.long 2689987490,2689987490
+.long 853641733,853641733
+.long 1978398372,1978398372
+.long 971801355,971801355
+.long 2867814464,2867814464
+.long 111112542,111112542
+.long 1360031421,1360031421
+.long 4186579262,4186579262
+.long 1023860118,1023860118
+.long 2919579357,2919579357
+.long 1186850381,1186850381
+.long 3045938321,3045938321
+.long 90031217,90031217
+.long 1876166148,1876166148
+.long 4279586912,4279586912
+.long 620468249,620468249
+.long 2548678102,2548678102
+.long 3426959497,3426959497
+.long 2006899047,2006899047
+.long 3175278768,3175278768
+.long 2290845959,2290845959
+.long 945494503,945494503
+.long 3689859193,3689859193
+.long 1191869601,1191869601
+.long 3910091388,3910091388
+.long 3374220536,3374220536
+.long 0,0
+.long 2206629897,2206629897
+.long 1223502642,1223502642
+.long 2893025566,2893025566
+.long 1316117100,1316117100
+.long 4227796733,4227796733
+.long 1446544655,1446544655
+.long 517320253,517320253
+.long 658058550,658058550
+.long 1691946762,1691946762
+.long 564550760,564550760
+.long 3511966619,3511966619
+.long 976107044,976107044
+.long 2976320012,2976320012
+.long 266819475,266819475
+.long 3533106868,3533106868
+.long 2660342555,2660342555
+.long 1338359936,1338359936
+.long 2720062561,2720062561
+.long 1766553434,1766553434
+.long 370807324,370807324
+.long 179999714,179999714
+.long 3844776128,3844776128
+.long 1138762300,1138762300
+.long 488053522,488053522
+.long 185403662,185403662
+.long 2915535858,2915535858
+.long 3114841645,3114841645
+.long 3366526484,3366526484
+.long 2233069911,2233069911
+.long 1275557295,1275557295
+.long 3151862254,3151862254
+.long 4250959779,4250959779
+.long 2670068215,2670068215
+.long 3170202204,3170202204
+.long 3309004356,3309004356
+.long 880737115,880737115
+.long 1982415755,1982415755
+.long 3703972811,3703972811
+.long 1761406390,1761406390
+.long 1676797112,1676797112
+.long 3403428311,3403428311
+.long 277177154,277177154
+.long 1076008723,1076008723
+.long 538035844,538035844
+.long 2099530373,2099530373
+.long 4164795346,4164795346
+.long 288553390,288553390
+.long 1839278535,1839278535
+.long 1261411869,1261411869
+.long 4080055004,4080055004
+.long 3964831245,3964831245
+.long 3504587127,3504587127
+.long 1813426987,1813426987
+.long 2579067049,2579067049
+.long 4199060497,4199060497
+.long 577038663,577038663
+.long 3297574056,3297574056
+.long 440397984,440397984
+.long 3626794326,3626794326
+.long 4019204898,4019204898
+.long 3343796615,3343796615
+.long 3251714265,3251714265
+.long 4272081548,4272081548
+.long 906744984,906744984
+.long 3481400742,3481400742
+.long 685669029,685669029
+.long 646887386,646887386
+.long 2764025151,2764025151
+.long 3835509292,3835509292
+.long 227702864,227702864
+.long 2613862250,2613862250
+.long 1648787028,1648787028
+.long 3256061430,3256061430
+.long 3904428176,3904428176
+.long 1593260334,1593260334
+.long 4121936770,4121936770
+.long 3196083615,3196083615
+.long 2090061929,2090061929
+.long 2838353263,2838353263
+.long 3004310991,3004310991
+.long 999926984,999926984
+.long 2809993232,2809993232
+.long 1852021992,1852021992
+.long 2075868123,2075868123
+.long 158869197,158869197
+.long 4095236462,4095236462
+.long 28809964,28809964
+.long 2828685187,2828685187
+.long 1701746150,1701746150
+.long 2129067946,2129067946
+.long 147831841,147831841
+.long 3873969647,3873969647
+.long 3650873274,3650873274
+.long 3459673930,3459673930
+.long 3557400554,3557400554
+.long 3598495785,3598495785
+.long 2947720241,2947720241
+.long 824393514,824393514
+.long 815048134,815048134
+.long 3227951669,3227951669
+.long 935087732,935087732
+.long 2798289660,2798289660
+.long 2966458592,2966458592
+.long 366520115,366520115
+.long 1251476721,1251476721
+.long 4158319681,4158319681
+.long 240176511,240176511
+.long 804688151,804688151
+.long 2379631990,2379631990
+.long 1303441219,1303441219
+.long 1414376140,1414376140
+.long 3741619940,3741619940
+.long 3820343710,3820343710
+.long 461924940,461924940
+.long 3089050817,3089050817
+.long 2136040774,2136040774
+.long 82468509,82468509
+.long 1563790337,1563790337
+.long 1937016826,1937016826
+.long 776014843,776014843
+.long 1511876531,1511876531
+.long 1389550482,1389550482
+.long 861278441,861278441
+.long 323475053,323475053
+.long 2355222426,2355222426
+.long 2047648055,2047648055
+.long 2383738969,2383738969
+.long 2302415851,2302415851
+.long 3995576782,3995576782
+.long 902390199,902390199
+.long 3991215329,3991215329
+.long 1018251130,1018251130
+.long 1507840668,1507840668
+.long 1064563285,1064563285
+.long 2043548696,2043548696
+.long 3208103795,3208103795
+.long 3939366739,3939366739
+.long 1537932639,1537932639
+.long 342834655,342834655
+.long 2262516856,2262516856
+.long 2180231114,2180231114
+.long 1053059257,1053059257
+.long 741614648,741614648
+.long 1598071746,1598071746
+.long 1925389590,1925389590
+.long 203809468,203809468
+.long 2336832552,2336832552
+.long 1100287487,1100287487
+.long 1895934009,1895934009
+.long 3736275976,3736275976
+.long 2632234200,2632234200
+.long 2428589668,2428589668
+.long 1636092795,1636092795
+.long 1890988757,1890988757
+.long 1952214088,1952214088
+.long 1113045200,1113045200
+.byte 82,9,106,213,48,54,165,56
+.byte 191,64,163,158,129,243,215,251
+.byte 124,227,57,130,155,47,255,135
+.byte 52,142,67,68,196,222,233,203
+.byte 84,123,148,50,166,194,35,61
+.byte 238,76,149,11,66,250,195,78
+.byte 8,46,161,102,40,217,36,178
+.byte 118,91,162,73,109,139,209,37
+.byte 114,248,246,100,134,104,152,22
+.byte 212,164,92,204,93,101,182,146
+.byte 108,112,72,80,253,237,185,218
+.byte 94,21,70,87,167,141,157,132
+.byte 144,216,171,0,140,188,211,10
+.byte 247,228,88,5,184,179,69,6
+.byte 208,44,30,143,202,63,15,2
+.byte 193,175,189,3,1,19,138,107
+.byte 58,145,17,65,79,103,220,234
+.byte 151,242,207,206,240,180,230,115
+.byte 150,172,116,34,231,173,53,133
+.byte 226,249,55,232,28,117,223,110
+.byte 71,241,26,113,29,41,197,137
+.byte 111,183,98,14,170,24,190,27
+.byte 252,86,62,75,198,210,121,32
+.byte 154,219,192,254,120,205,90,244
+.byte 31,221,168,51,136,7,199,49
+.byte 177,18,16,89,39,128,236,95
+.byte 96,81,127,169,25,181,74,13
+.byte 45,229,122,159,147,201,156,239
+.byte 160,224,59,77,174,42,245,176
+.byte 200,235,187,60,131,83,153,97
+.byte 23,43,4,126,186,119,214,38
+.byte 225,105,20,99,85,33,12,125
+.byte 82,9,106,213,48,54,165,56
+.byte 191,64,163,158,129,243,215,251
+.byte 124,227,57,130,155,47,255,135
+.byte 52,142,67,68,196,222,233,203
+.byte 84,123,148,50,166,194,35,61
+.byte 238,76,149,11,66,250,195,78
+.byte 8,46,161,102,40,217,36,178
+.byte 118,91,162,73,109,139,209,37
+.byte 114,248,246,100,134,104,152,22
+.byte 212,164,92,204,93,101,182,146
+.byte 108,112,72,80,253,237,185,218
+.byte 94,21,70,87,167,141,157,132
+.byte 144,216,171,0,140,188,211,10
+.byte 247,228,88,5,184,179,69,6
+.byte 208,44,30,143,202,63,15,2
+.byte 193,175,189,3,1,19,138,107
+.byte 58,145,17,65,79,103,220,234
+.byte 151,242,207,206,240,180,230,115
+.byte 150,172,116,34,231,173,53,133
+.byte 226,249,55,232,28,117,223,110
+.byte 71,241,26,113,29,41,197,137
+.byte 111,183,98,14,170,24,190,27
+.byte 252,86,62,75,198,210,121,32
+.byte 154,219,192,254,120,205,90,244
+.byte 31,221,168,51,136,7,199,49
+.byte 177,18,16,89,39,128,236,95
+.byte 96,81,127,169,25,181,74,13
+.byte 45,229,122,159,147,201,156,239
+.byte 160,224,59,77,174,42,245,176
+.byte 200,235,187,60,131,83,153,97
+.byte 23,43,4,126,186,119,214,38
+.byte 225,105,20,99,85,33,12,125
+.byte 82,9,106,213,48,54,165,56
+.byte 191,64,163,158,129,243,215,251
+.byte 124,227,57,130,155,47,255,135
+.byte 52,142,67,68,196,222,233,203
+.byte 84,123,148,50,166,194,35,61
+.byte 238,76,149,11,66,250,195,78
+.byte 8,46,161,102,40,217,36,178
+.byte 118,91,162,73,109,139,209,37
+.byte 114,248,246,100,134,104,152,22
+.byte 212,164,92,204,93,101,182,146
+.byte 108,112,72,80,253,237,185,218
+.byte 94,21,70,87,167,141,157,132
+.byte 144,216,171,0,140,188,211,10
+.byte 247,228,88,5,184,179,69,6
+.byte 208,44,30,143,202,63,15,2
+.byte 193,175,189,3,1,19,138,107
+.byte 58,145,17,65,79,103,220,234
+.byte 151,242,207,206,240,180,230,115
+.byte 150,172,116,34,231,173,53,133
+.byte 226,249,55,232,28,117,223,110
+.byte 71,241,26,113,29,41,197,137
+.byte 111,183,98,14,170,24,190,27
+.byte 252,86,62,75,198,210,121,32
+.byte 154,219,192,254,120,205,90,244
+.byte 31,221,168,51,136,7,199,49
+.byte 177,18,16,89,39,128,236,95
+.byte 96,81,127,169,25,181,74,13
+.byte 45,229,122,159,147,201,156,239
+.byte 160,224,59,77,174,42,245,176
+.byte 200,235,187,60,131,83,153,97
+.byte 23,43,4,126,186,119,214,38
+.byte 225,105,20,99,85,33,12,125
+.byte 82,9,106,213,48,54,165,56
+.byte 191,64,163,158,129,243,215,251
+.byte 124,227,57,130,155,47,255,135
+.byte 52,142,67,68,196,222,233,203
+.byte 84,123,148,50,166,194,35,61
+.byte 238,76,149,11,66,250,195,78
+.byte 8,46,161,102,40,217,36,178
+.byte 118,91,162,73,109,139,209,37
+.byte 114,248,246,100,134,104,152,22
+.byte 212,164,92,204,93,101,182,146
+.byte 108,112,72,80,253,237,185,218
+.byte 94,21,70,87,167,141,157,132
+.byte 144,216,171,0,140,188,211,10
+.byte 247,228,88,5,184,179,69,6
+.byte 208,44,30,143,202,63,15,2
+.byte 193,175,189,3,1,19,138,107
+.byte 58,145,17,65,79,103,220,234
+.byte 151,242,207,206,240,180,230,115
+.byte 150,172,116,34,231,173,53,133
+.byte 226,249,55,232,28,117,223,110
+.byte 71,241,26,113,29,41,197,137
+.byte 111,183,98,14,170,24,190,27
+.byte 252,86,62,75,198,210,121,32
+.byte 154,219,192,254,120,205,90,244
+.byte 31,221,168,51,136,7,199,49
+.byte 177,18,16,89,39,128,236,95
+.byte 96,81,127,169,25,181,74,13
+.byte 45,229,122,159,147,201,156,239
+.byte 160,224,59,77,174,42,245,176
+.byte 200,235,187,60,131,83,153,97
+.byte 23,43,4,126,186,119,214,38
+.byte 225,105,20,99,85,33,12,125
+.size _x86_AES_decrypt,.-_x86_AES_decrypt
+.globl AES_decrypt
+.type AES_decrypt,@function
+.align 16
+AES_decrypt:
+.L_AES_decrypt_begin:
+ #ifdef __CET__
+
+.byte 243,15,30,251
+ #endif
+
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%esi
+ movl 28(%esp),%edi
+ movl %esp,%eax
+ subl $36,%esp
+ andl $-64,%esp
+ leal -127(%edi),%ebx
+ subl %esp,%ebx
+ negl %ebx
+ andl $960,%ebx
+ subl %ebx,%esp
+ addl $4,%esp
+ movl %eax,28(%esp)
+ call .L010pic_point
+.L010pic_point:
+ popl %ebp
+ leal OPENSSL_ia32cap_P-.L010pic_point(%ebp),%eax
+ leal .LAES_Td-.L010pic_point(%ebp),%ebp
+ leal 764(%esp),%ebx
+ subl %ebp,%ebx
+ andl $768,%ebx
+ leal 2176(%ebp,%ebx,1),%ebp
+ btl $25,(%eax)
+ jnc .L011x86
+ movq (%esi),%mm0
+ movq 8(%esi),%mm4
+ call _sse_AES_decrypt_compact
+ movl 28(%esp),%esp
+ movl 24(%esp),%esi
+ movq %mm0,(%esi)
+ movq %mm4,8(%esi)
+ emms
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 16
+.L011x86:
+ movl %ebp,24(%esp)
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ call _x86_AES_decrypt_compact
+ movl 28(%esp),%esp
+ movl 24(%esp),%esi
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size AES_decrypt,.-.L_AES_decrypt_begin
+.globl AES_cbc_encrypt
+.type AES_cbc_encrypt,@function
+.align 16
+AES_cbc_encrypt:
+.L_AES_cbc_encrypt_begin:
+ #ifdef __CET__
+
+.byte 243,15,30,251
+ #endif
+
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 28(%esp),%ecx
+ cmpl $0,%ecx
+ je .L012drop_out
+ call .L013pic_point
+.L013pic_point:
+ popl %ebp
+ leal OPENSSL_ia32cap_P-.L013pic_point(%ebp),%eax
+ cmpl $0,40(%esp)
+ leal .LAES_Te-.L013pic_point(%ebp),%ebp
+ jne .L014picked_te
+ leal .LAES_Td-.LAES_Te(%ebp),%ebp
+.L014picked_te:
+ pushfl
+ cld
+ cmpl $512,%ecx
+ jb .L015slow_way
+ testl $15,%ecx
+ jnz .L015slow_way
+ btl $28,(%eax)
+ jc .L015slow_way
+ leal -324(%esp),%esi
+ andl $-64,%esi
+ movl %ebp,%eax
+ leal 2304(%ebp),%ebx
+ movl %esi,%edx
+ andl $4095,%eax
+ andl $4095,%ebx
+ andl $4095,%edx
+ cmpl %ebx,%edx
+ jb .L016tbl_break_out
+ subl %ebx,%edx
+ subl %edx,%esi
+ jmp .L017tbl_ok
+.align 4
+.L016tbl_break_out:
+ subl %eax,%edx
+ andl $4095,%edx
+ addl $384,%edx
+ subl %edx,%esi
+.align 4
+.L017tbl_ok:
+ leal 24(%esp),%edx
+ xchgl %esi,%esp
+ addl $4,%esp
+ movl %ebp,24(%esp)
+ movl %esi,28(%esp)
+ movl (%edx),%eax
+ movl 4(%edx),%ebx
+ movl 12(%edx),%edi
+ movl 16(%edx),%esi
+ movl 20(%edx),%edx
+ movl %eax,32(%esp)
+ movl %ebx,36(%esp)
+ movl %ecx,40(%esp)
+ movl %edi,44(%esp)
+ movl %esi,48(%esp)
+ movl $0,316(%esp)
+ movl %edi,%ebx
+ movl $61,%ecx
+ subl %ebp,%ebx
+ movl %edi,%esi
+ andl $4095,%ebx
+ leal 76(%esp),%edi
+ cmpl $2304,%ebx
+ jb .L018do_copy
+ cmpl $3852,%ebx
+ jb .L019skip_copy
+.align 4
+.L018do_copy:
+ movl %edi,44(%esp)
+.long 2784229001
+.L019skip_copy:
+ movl $16,%edi
+.align 4
+.L020prefetch_tbl:
+ movl (%ebp),%eax
+ movl 32(%ebp),%ebx
+ movl 64(%ebp),%ecx
+ movl 96(%ebp),%esi
+ leal 128(%ebp),%ebp
+ subl $1,%edi
+ jnz .L020prefetch_tbl
+ subl $2048,%ebp
+ movl 32(%esp),%esi
+ movl 48(%esp),%edi
+ cmpl $0,%edx
+ je .L021fast_decrypt
+ movl (%edi),%eax
+ movl 4(%edi),%ebx
+.align 16
+.L022fast_enc_loop:
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ xorl (%esi),%eax
+ xorl 4(%esi),%ebx
+ xorl 8(%esi),%ecx
+ xorl 12(%esi),%edx
+ movl 44(%esp),%edi
+ call _x86_AES_encrypt
+ movl 32(%esp),%esi
+ movl 36(%esp),%edi
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ leal 16(%esi),%esi
+ movl 40(%esp),%ecx
+ movl %esi,32(%esp)
+ leal 16(%edi),%edx
+ movl %edx,36(%esp)
+ subl $16,%ecx
+ movl %ecx,40(%esp)
+ jnz .L022fast_enc_loop
+ movl 48(%esp),%esi
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ cmpl $0,316(%esp)
+ movl 44(%esp),%edi
+ je .L023skip_ezero
+ movl $60,%ecx
+ xorl %eax,%eax
+.align 4
+.long 2884892297
+.L023skip_ezero:
+ movl 28(%esp),%esp
+ popfl
+.L012drop_out:
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+ pushfl
+.align 16
+.L021fast_decrypt:
+ cmpl 36(%esp),%esi
+ je .L024fast_dec_in_place
+ movl %edi,52(%esp)
+.align 4
+.align 16
+.L025fast_dec_loop:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ movl 44(%esp),%edi
+ call _x86_AES_decrypt
+ movl 52(%esp),%edi
+ movl 40(%esp),%esi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ movl 36(%esp),%edi
+ movl 32(%esp),%esi
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 40(%esp),%ecx
+ movl %esi,52(%esp)
+ leal 16(%esi),%esi
+ movl %esi,32(%esp)
+ leal 16(%edi),%edi
+ movl %edi,36(%esp)
+ subl $16,%ecx
+ movl %ecx,40(%esp)
+ jnz .L025fast_dec_loop
+ movl 52(%esp),%edi
+ movl 48(%esp),%esi
+ movl (%edi),%eax
+ movl 4(%edi),%ebx
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ jmp .L026fast_dec_out
+.align 16
+.L024fast_dec_in_place:
+.L027fast_dec_in_place_loop:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ leal 60(%esp),%edi
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 44(%esp),%edi
+ call _x86_AES_decrypt
+ movl 48(%esp),%edi
+ movl 36(%esp),%esi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ leal 16(%esi),%esi
+ movl %esi,36(%esp)
+ leal 60(%esp),%esi
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 32(%esp),%esi
+ movl 40(%esp),%ecx
+ leal 16(%esi),%esi
+ movl %esi,32(%esp)
+ subl $16,%ecx
+ movl %ecx,40(%esp)
+ jnz .L027fast_dec_in_place_loop
+.align 4
+.L026fast_dec_out:
+ cmpl $0,316(%esp)
+ movl 44(%esp),%edi
+ je .L028skip_dzero
+ movl $60,%ecx
+ xorl %eax,%eax
+.align 4
+.long 2884892297
+.L028skip_dzero:
+ movl 28(%esp),%esp
+ popfl
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+ pushfl
+.align 16
+.L015slow_way:
+ movl (%eax),%eax
+ movl 36(%esp),%edi
+ leal -80(%esp),%esi
+ andl $-64,%esi
+ leal -143(%edi),%ebx
+ subl %esi,%ebx
+ negl %ebx
+ andl $960,%ebx
+ subl %ebx,%esi
+ leal 768(%esi),%ebx
+ subl %ebp,%ebx
+ andl $768,%ebx
+ leal 2176(%ebp,%ebx,1),%ebp
+ leal 24(%esp),%edx
+ xchgl %esi,%esp
+ addl $4,%esp
+ movl %ebp,24(%esp)
+ movl %esi,28(%esp)
+ movl %eax,52(%esp)
+ movl (%edx),%eax
+ movl 4(%edx),%ebx
+ movl 16(%edx),%esi
+ movl 20(%edx),%edx
+ movl %eax,32(%esp)
+ movl %ebx,36(%esp)
+ movl %ecx,40(%esp)
+ movl %edi,44(%esp)
+ movl %esi,48(%esp)
+ movl %esi,%edi
+ movl %eax,%esi
+ cmpl $0,%edx
+ je .L029slow_decrypt
+ cmpl $16,%ecx
+ movl %ebx,%edx
+ jb .L030slow_enc_tail
+ btl $25,52(%esp)
+ jnc .L031slow_enc_x86
+ movq (%edi),%mm0
+ movq 8(%edi),%mm4
+.align 16
+.L032slow_enc_loop_sse:
+ pxor (%esi),%mm0
+ pxor 8(%esi),%mm4
+ movl 44(%esp),%edi
+ call _sse_AES_encrypt_compact
+ movl 32(%esp),%esi
+ movl 36(%esp),%edi
+ movl 40(%esp),%ecx
+ movq %mm0,(%edi)
+ movq %mm4,8(%edi)
+ leal 16(%esi),%esi
+ movl %esi,32(%esp)
+ leal 16(%edi),%edx
+ movl %edx,36(%esp)
+ subl $16,%ecx
+ cmpl $16,%ecx
+ movl %ecx,40(%esp)
+ jae .L032slow_enc_loop_sse
+ testl $15,%ecx
+ jnz .L030slow_enc_tail
+ movl 48(%esp),%esi
+ movq %mm0,(%esi)
+ movq %mm4,8(%esi)
+ emms
+ movl 28(%esp),%esp
+ popfl
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+ pushfl
+.align 16
+.L031slow_enc_x86:
+ movl (%edi),%eax
+ movl 4(%edi),%ebx
+.align 4
+.L033slow_enc_loop_x86:
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ xorl (%esi),%eax
+ xorl 4(%esi),%ebx
+ xorl 8(%esi),%ecx
+ xorl 12(%esi),%edx
+ movl 44(%esp),%edi
+ call _x86_AES_encrypt_compact
+ movl 32(%esp),%esi
+ movl 36(%esp),%edi
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 40(%esp),%ecx
+ leal 16(%esi),%esi
+ movl %esi,32(%esp)
+ leal 16(%edi),%edx
+ movl %edx,36(%esp)
+ subl $16,%ecx
+ cmpl $16,%ecx
+ movl %ecx,40(%esp)
+ jae .L033slow_enc_loop_x86
+ testl $15,%ecx
+ jnz .L030slow_enc_tail
+ movl 48(%esp),%esi
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ movl 28(%esp),%esp
+ popfl
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+ pushfl
+.align 16
+.L030slow_enc_tail:
+ emms
+ movl %edx,%edi
+ movl $16,%ebx
+ subl %ecx,%ebx
+ cmpl %esi,%edi
+ je .L034enc_in_place
+.align 4
+.long 2767451785
+ jmp .L035enc_skip_in_place
+.L034enc_in_place:
+ leal (%edi,%ecx,1),%edi
+.L035enc_skip_in_place:
+ movl %ebx,%ecx
+ xorl %eax,%eax
+.align 4
+.long 2868115081
+ movl 48(%esp),%edi
+ movl %edx,%esi
+ movl (%edi),%eax
+ movl 4(%edi),%ebx
+ movl $16,40(%esp)
+ jmp .L033slow_enc_loop_x86
+.align 16
+.L029slow_decrypt:
+ btl $25,52(%esp)
+ jnc .L036slow_dec_loop_x86
+.align 4
+.L037slow_dec_loop_sse:
+ movq (%esi),%mm0
+ movq 8(%esi),%mm4
+ movl 44(%esp),%edi
+ call _sse_AES_decrypt_compact
+ movl 32(%esp),%esi
+ leal 60(%esp),%eax
+ movl 36(%esp),%ebx
+ movl 40(%esp),%ecx
+ movl 48(%esp),%edi
+ movq (%esi),%mm1
+ movq 8(%esi),%mm5
+ pxor (%edi),%mm0
+ pxor 8(%edi),%mm4
+ movq %mm1,(%edi)
+ movq %mm5,8(%edi)
+ subl $16,%ecx
+ jc .L038slow_dec_partial_sse
+ movq %mm0,(%ebx)
+ movq %mm4,8(%ebx)
+ leal 16(%ebx),%ebx
+ movl %ebx,36(%esp)
+ leal 16(%esi),%esi
+ movl %esi,32(%esp)
+ movl %ecx,40(%esp)
+ jnz .L037slow_dec_loop_sse
+ emms
+ movl 28(%esp),%esp
+ popfl
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+ pushfl
+.align 16
+.L038slow_dec_partial_sse:
+ movq %mm0,(%eax)
+ movq %mm4,8(%eax)
+ emms
+ addl $16,%ecx
+ movl %ebx,%edi
+ movl %eax,%esi
+.align 4
+.long 2767451785
+ movl 28(%esp),%esp
+ popfl
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+ pushfl
+.align 16
+.L036slow_dec_loop_x86:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ leal 60(%esp),%edi
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 44(%esp),%edi
+ call _x86_AES_decrypt_compact
+ movl 48(%esp),%edi
+ movl 40(%esp),%esi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ subl $16,%esi
+ jc .L039slow_dec_partial_x86
+ movl %esi,40(%esp)
+ movl 36(%esp),%esi
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ leal 16(%esi),%esi
+ movl %esi,36(%esp)
+ leal 60(%esp),%esi
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 32(%esp),%esi
+ leal 16(%esi),%esi
+ movl %esi,32(%esp)
+ jnz .L036slow_dec_loop_x86
+ movl 28(%esp),%esp
+ popfl
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+ pushfl
+.align 16
+.L039slow_dec_partial_x86:
+ leal 60(%esp),%esi
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ movl 32(%esp),%esi
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 40(%esp),%ecx
+ movl 36(%esp),%edi
+ leal 60(%esp),%esi
+.align 4
+.long 2767451785
+ movl 28(%esp),%esp
+ popfl
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size AES_cbc_encrypt,.-.L_AES_cbc_encrypt_begin
+.type _x86_AES_set_encrypt_key,@function
+.align 16
+_x86_AES_set_encrypt_key:
+ #ifdef __CET__
+
+.byte 243,15,30,251
+ #endif
+
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 24(%esp),%esi
+ movl 32(%esp),%edi
+ testl $-1,%esi
+ jz .L040badpointer
+ testl $-1,%edi
+ jz .L040badpointer
+ call .L041pic_point
+.L041pic_point:
+ popl %ebp
+ leal .LAES_Te-.L041pic_point(%ebp),%ebp
+ leal 2176(%ebp),%ebp
+ movl -128(%ebp),%eax
+ movl -96(%ebp),%ebx
+ movl -64(%ebp),%ecx
+ movl -32(%ebp),%edx
+ movl (%ebp),%eax
+ movl 32(%ebp),%ebx
+ movl 64(%ebp),%ecx
+ movl 96(%ebp),%edx
+ movl 28(%esp),%ecx
+ cmpl $128,%ecx
+ je .L04210rounds
+ cmpl $192,%ecx
+ je .L04312rounds
+ cmpl $256,%ecx
+ je .L04414rounds
+ movl $-2,%eax
+ jmp .L045exit
+.L04210rounds:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ xorl %ecx,%ecx
+ jmp .L04610shortcut
+.align 4
+.L04710loop:
+ movl (%edi),%eax
+ movl 12(%edi),%edx
+.L04610shortcut:
+ movzbl %dl,%esi
+ movzbl -128(%ebp,%esi,1),%ebx
+ movzbl %dh,%esi
+ shll $24,%ebx
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ shrl $16,%edx
+ movzbl %dl,%esi
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ movzbl %dh,%esi
+ shll $8,%ebx
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%eax
+ xorl 896(%ebp,%ecx,4),%eax
+ movl %eax,16(%edi)
+ xorl 4(%edi),%eax
+ movl %eax,20(%edi)
+ xorl 8(%edi),%eax
+ movl %eax,24(%edi)
+ xorl 12(%edi),%eax
+ movl %eax,28(%edi)
+ incl %ecx
+ addl $16,%edi
+ cmpl $10,%ecx
+ jl .L04710loop
+ movl $10,80(%edi)
+ xorl %eax,%eax
+ jmp .L045exit
+.L04312rounds:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 16(%esi),%ecx
+ movl 20(%esi),%edx
+ movl %ecx,16(%edi)
+ movl %edx,20(%edi)
+ xorl %ecx,%ecx
+ jmp .L04812shortcut
+.align 4
+.L04912loop:
+ movl (%edi),%eax
+ movl 20(%edi),%edx
+.L04812shortcut:
+ movzbl %dl,%esi
+ movzbl -128(%ebp,%esi,1),%ebx
+ movzbl %dh,%esi
+ shll $24,%ebx
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ shrl $16,%edx
+ movzbl %dl,%esi
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ movzbl %dh,%esi
+ shll $8,%ebx
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%eax
+ xorl 896(%ebp,%ecx,4),%eax
+ movl %eax,24(%edi)
+ xorl 4(%edi),%eax
+ movl %eax,28(%edi)
+ xorl 8(%edi),%eax
+ movl %eax,32(%edi)
+ xorl 12(%edi),%eax
+ movl %eax,36(%edi)
+ cmpl $7,%ecx
+ je .L05012break
+ incl %ecx
+ xorl 16(%edi),%eax
+ movl %eax,40(%edi)
+ xorl 20(%edi),%eax
+ movl %eax,44(%edi)
+ addl $24,%edi
+ jmp .L04912loop
+.L05012break:
+ movl $12,72(%edi)
+ xorl %eax,%eax
+ jmp .L045exit
+.L04414rounds:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 16(%esi),%eax
+ movl 20(%esi),%ebx
+ movl 24(%esi),%ecx
+ movl 28(%esi),%edx
+ movl %eax,16(%edi)
+ movl %ebx,20(%edi)
+ movl %ecx,24(%edi)
+ movl %edx,28(%edi)
+ xorl %ecx,%ecx
+ jmp .L05114shortcut
+.align 4
+.L05214loop:
+ movl 28(%edi),%edx
+.L05114shortcut:
+ movl (%edi),%eax
+ movzbl %dl,%esi
+ movzbl -128(%ebp,%esi,1),%ebx
+ movzbl %dh,%esi
+ shll $24,%ebx
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ shrl $16,%edx
+ movzbl %dl,%esi
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ movzbl %dh,%esi
+ shll $8,%ebx
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%eax
+ xorl 896(%ebp,%ecx,4),%eax
+ movl %eax,32(%edi)
+ xorl 4(%edi),%eax
+ movl %eax,36(%edi)
+ xorl 8(%edi),%eax
+ movl %eax,40(%edi)
+ xorl 12(%edi),%eax
+ movl %eax,44(%edi)
+ cmpl $6,%ecx
+ je .L05314break
+ incl %ecx
+ movl %eax,%edx
+ movl 16(%edi),%eax
+ movzbl %dl,%esi
+ movzbl -128(%ebp,%esi,1),%ebx
+ movzbl %dh,%esi
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ shrl $16,%edx
+ shll $8,%ebx
+ movzbl %dl,%esi
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ movzbl %dh,%esi
+ shll $16,%ebx
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ shll $24,%ebx
+ xorl %ebx,%eax
+ movl %eax,48(%edi)
+ xorl 20(%edi),%eax
+ movl %eax,52(%edi)
+ xorl 24(%edi),%eax
+ movl %eax,56(%edi)
+ xorl 28(%edi),%eax
+ movl %eax,60(%edi)
+ addl $32,%edi
+ jmp .L05214loop
+.L05314break:
+ movl $14,48(%edi)
+ xorl %eax,%eax
+ jmp .L045exit
+.L040badpointer:
+ movl $-1,%eax
+.L045exit:
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size _x86_AES_set_encrypt_key,.-_x86_AES_set_encrypt_key
+.globl AES_set_encrypt_key
+.type AES_set_encrypt_key,@function
+.align 16
+AES_set_encrypt_key:
+.L_AES_set_encrypt_key_begin:
+ #ifdef __CET__
+
+.byte 243,15,30,251
+ #endif
+
+ call _x86_AES_set_encrypt_key
+ ret
+.size AES_set_encrypt_key,.-.L_AES_set_encrypt_key_begin
+.globl AES_set_decrypt_key
+.type AES_set_decrypt_key,@function
+.align 16
+AES_set_decrypt_key:
+.L_AES_set_decrypt_key_begin:
+ #ifdef __CET__
+
+.byte 243,15,30,251
+ #endif
+
+ call _x86_AES_set_encrypt_key
+ cmpl $0,%eax
+ je .L054proceed
+ ret
+.L054proceed:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 28(%esp),%esi
+ movl 240(%esi),%ecx
+ leal (,%ecx,4),%ecx
+ leal (%esi,%ecx,4),%edi
+.align 4
+.L055invert:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl (%edi),%ecx
+ movl 4(%edi),%edx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,(%esi)
+ movl %edx,4(%esi)
+ movl 8(%esi),%eax
+ movl 12(%esi),%ebx
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ movl %eax,8(%edi)
+ movl %ebx,12(%edi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ addl $16,%esi
+ subl $16,%edi
+ cmpl %edi,%esi
+ jne .L055invert
+ movl 28(%esp),%edi
+ movl 240(%edi),%esi
+ leal -2(%esi,%esi,1),%esi
+ leal (%edi,%esi,8),%esi
+ movl %esi,28(%esp)
+ movl 16(%edi),%eax
+.align 4
+.L056permute:
+ addl $16,%edi
+ movl $2155905152,%ebp
+ andl %eax,%ebp
+ leal (%eax,%eax,1),%ebx
+ movl %ebp,%esi
+ shrl $7,%ebp
+ subl %ebp,%esi
+ andl $4278124286,%ebx
+ andl $454761243,%esi
+ xorl %esi,%ebx
+ movl $2155905152,%ebp
+ andl %ebx,%ebp
+ leal (%ebx,%ebx,1),%ecx
+ movl %ebp,%esi
+ shrl $7,%ebp
+ subl %ebp,%esi
+ andl $4278124286,%ecx
+ andl $454761243,%esi
+ xorl %eax,%ebx
+ xorl %esi,%ecx
+ movl $2155905152,%ebp
+ andl %ecx,%ebp
+ leal (%ecx,%ecx,1),%edx
+ movl %ebp,%esi
+ shrl $7,%ebp
+ xorl %eax,%ecx
+ subl %ebp,%esi
+ andl $4278124286,%edx
+ andl $454761243,%esi
+ roll $8,%eax
+ xorl %esi,%edx
+ movl 4(%edi),%ebp
+ xorl %ebx,%eax
+ xorl %edx,%ebx
+ xorl %ecx,%eax
+ roll $24,%ebx
+ xorl %edx,%ecx
+ xorl %edx,%eax
+ roll $16,%ecx
+ xorl %ebx,%eax
+ roll $8,%edx
+ xorl %ecx,%eax
+ movl %ebp,%ebx
+ xorl %edx,%eax
+ movl %eax,(%edi)
+ movl $2155905152,%ebp
+ andl %ebx,%ebp
+ leal (%ebx,%ebx,1),%ecx
+ movl %ebp,%esi
+ shrl $7,%ebp
+ subl %ebp,%esi
+ andl $4278124286,%ecx
+ andl $454761243,%esi
+ xorl %esi,%ecx
+ movl $2155905152,%ebp
+ andl %ecx,%ebp
+ leal (%ecx,%ecx,1),%edx
+ movl %ebp,%esi
+ shrl $7,%ebp
+ subl %ebp,%esi
+ andl $4278124286,%edx
+ andl $454761243,%esi
+ xorl %ebx,%ecx
+ xorl %esi,%edx
+ movl $2155905152,%ebp
+ andl %edx,%ebp
+ leal (%edx,%edx,1),%eax
+ movl %ebp,%esi
+ shrl $7,%ebp
+ xorl %ebx,%edx
+ subl %ebp,%esi
+ andl $4278124286,%eax
+ andl $454761243,%esi
+ roll $8,%ebx
+ xorl %esi,%eax
+ movl 8(%edi),%ebp
+ xorl %ecx,%ebx
+ xorl %eax,%ecx
+ xorl %edx,%ebx
+ roll $24,%ecx
+ xorl %eax,%edx
+ xorl %eax,%ebx
+ roll $16,%edx
+ xorl %ecx,%ebx
+ roll $8,%eax
+ xorl %edx,%ebx
+ movl %ebp,%ecx
+ xorl %eax,%ebx
+ movl %ebx,4(%edi)
+ movl $2155905152,%ebp
+ andl %ecx,%ebp
+ leal (%ecx,%ecx,1),%edx
+ movl %ebp,%esi
+ shrl $7,%ebp
+ subl %ebp,%esi
+ andl $4278124286,%edx
+ andl $454761243,%esi
+ xorl %esi,%edx
+ movl $2155905152,%ebp
+ andl %edx,%ebp
+ leal (%edx,%edx,1),%eax
+ movl %ebp,%esi
+ shrl $7,%ebp
+ subl %ebp,%esi
+ andl $4278124286,%eax
+ andl $454761243,%esi
+ xorl %ecx,%edx
+ xorl %esi,%eax
+ movl $2155905152,%ebp
+ andl %eax,%ebp
+ leal (%eax,%eax,1),%ebx
+ movl %ebp,%esi
+ shrl $7,%ebp
+ xorl %ecx,%eax
+ subl %ebp,%esi
+ andl $4278124286,%ebx
+ andl $454761243,%esi
+ roll $8,%ecx
+ xorl %esi,%ebx
+ movl 12(%edi),%ebp
+ xorl %edx,%ecx
+ xorl %ebx,%edx
+ xorl %eax,%ecx
+ roll $24,%edx
+ xorl %ebx,%eax
+ xorl %ebx,%ecx
+ roll $16,%eax
+ xorl %edx,%ecx
+ roll $8,%ebx
+ xorl %eax,%ecx
+ movl %ebp,%edx
+ xorl %ebx,%ecx
+ movl %ecx,8(%edi)
+ movl $2155905152,%ebp
+ andl %edx,%ebp
+ leal (%edx,%edx,1),%eax
+ movl %ebp,%esi
+ shrl $7,%ebp
+ subl %ebp,%esi
+ andl $4278124286,%eax
+ andl $454761243,%esi
+ xorl %esi,%eax
+ movl $2155905152,%ebp
+ andl %eax,%ebp
+ leal (%eax,%eax,1),%ebx
+ movl %ebp,%esi
+ shrl $7,%ebp
+ subl %ebp,%esi
+ andl $4278124286,%ebx
+ andl $454761243,%esi
+ xorl %edx,%eax
+ xorl %esi,%ebx
+ movl $2155905152,%ebp
+ andl %ebx,%ebp
+ leal (%ebx,%ebx,1),%ecx
+ movl %ebp,%esi
+ shrl $7,%ebp
+ xorl %edx,%ebx
+ subl %ebp,%esi
+ andl $4278124286,%ecx
+ andl $454761243,%esi
+ roll $8,%edx
+ xorl %esi,%ecx
+ movl 16(%edi),%ebp
+ xorl %eax,%edx
+ xorl %ecx,%eax
+ xorl %ebx,%edx
+ roll $24,%eax
+ xorl %ecx,%ebx
+ xorl %ecx,%edx
+ roll $16,%ebx
+ xorl %eax,%edx
+ roll $8,%ecx
+ xorl %ebx,%edx
+ movl %ebp,%eax
+ xorl %ecx,%edx
+ movl %edx,12(%edi)
+ cmpl 28(%esp),%edi
+ jb .L056permute
+ xorl %eax,%eax
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size AES_set_decrypt_key,.-.L_AES_set_decrypt_key_begin
+.byte 65,69,83,32,102,111,114,32,120,56,54,44,32,67,82,89
+.byte 80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114
+.byte 111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.comm OPENSSL_ia32cap_P,16,4
+
+ .section ".note.gnu.property", "a"
+ .p2align 2
+ .long 1f - 0f
+ .long 4f - 1f
+ .long 5
+0:
+ .asciz "GNU"
+1:
+ .p2align 2
+ .long 0xc0000002
+ .long 3f - 2f
+2:
+ .long 3
+3:
+ .p2align 2
+4:
+#else
+.text
+.type _x86_AES_encrypt_compact,@function
+.align 16
+_x86_AES_encrypt_compact:
+ #ifdef __CET__
+
+.byte 243,15,30,251
+ #endif
+
+ movl %edi,20(%esp)
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ movl 240(%edi),%esi
+ leal -2(%esi,%esi,1),%esi
+ leal (%edi,%esi,8),%esi
+ movl %esi,24(%esp)
+ movl -128(%ebp),%edi
+ movl -96(%ebp),%esi
+ movl -64(%ebp),%edi
+ movl -32(%ebp),%esi
+ movl (%ebp),%edi
+ movl 32(%ebp),%esi
+ movl 64(%ebp),%edi
+ movl 96(%ebp),%esi
+.align 16
+.L000loop:
+ movl %eax,%esi
+ andl $255,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %bh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,4(%esp)
+
+ movl %ebx,%esi
+ andl $255,%esi
+ shrl $16,%ebx
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %ch,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,8(%esp)
+
+ movl %ecx,%esi
+ andl $255,%esi
+ shrl $24,%ecx
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %dh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $16,%edi
+ andl $255,%edx
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movzbl %bh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+
+ andl $255,%edx
+ movzbl -128(%ebp,%edx,1),%edx
+ movzbl %ah,%eax
+ movzbl -128(%ebp,%eax,1),%eax
+ shll $8,%eax
+ xorl %eax,%edx
+ movl 4(%esp),%eax
+ andl $255,%ebx
+ movzbl -128(%ebp,%ebx,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%edx
+ movl 8(%esp),%ebx
+ movzbl -128(%ebp,%ecx,1),%ecx
+ shll $24,%ecx
+ xorl %ecx,%edx
+ movl %esi,%ecx
+
+ movl $2155905152,%ebp
+ andl %ecx,%ebp
+ leal (%ecx,%ecx,1),%edi
+ movl %ebp,%esi
+ shrl $7,%ebp
+ andl $4278124286,%edi
+ subl %ebp,%esi
+ movl %ecx,%ebp
+ andl $454761243,%esi
+ rorl $16,%ebp
+ xorl %edi,%esi
+ movl %ecx,%edi
+ xorl %esi,%ecx
+ rorl $24,%edi
+ xorl %ebp,%esi
+ roll $24,%ecx
+ xorl %edi,%esi
+ movl $2155905152,%ebp
+ xorl %esi,%ecx
+ andl %edx,%ebp
+ leal (%edx,%edx,1),%edi
+ movl %ebp,%esi
+ shrl $7,%ebp
+ andl $4278124286,%edi
+ subl %ebp,%esi
+ movl %edx,%ebp
+ andl $454761243,%esi
+ rorl $16,%ebp
+ xorl %edi,%esi
+ movl %edx,%edi
+ xorl %esi,%edx
+ rorl $24,%edi
+ xorl %ebp,%esi
+ roll $24,%edx
+ xorl %edi,%esi
+ movl $2155905152,%ebp
+ xorl %esi,%edx
+ andl %eax,%ebp
+ leal (%eax,%eax,1),%edi
+ movl %ebp,%esi
+ shrl $7,%ebp
+ andl $4278124286,%edi
+ subl %ebp,%esi
+ movl %eax,%ebp
+ andl $454761243,%esi
+ rorl $16,%ebp
+ xorl %edi,%esi
+ movl %eax,%edi
+ xorl %esi,%eax
+ rorl $24,%edi
+ xorl %ebp,%esi
+ roll $24,%eax
+ xorl %edi,%esi
+ movl $2155905152,%ebp
+ xorl %esi,%eax
+ andl %ebx,%ebp
+ leal (%ebx,%ebx,1),%edi
+ movl %ebp,%esi
+ shrl $7,%ebp
+ andl $4278124286,%edi
+ subl %ebp,%esi
+ movl %ebx,%ebp
+ andl $454761243,%esi
+ rorl $16,%ebp
+ xorl %edi,%esi
+ movl %ebx,%edi
+ xorl %esi,%ebx
+ rorl $24,%edi
+ xorl %ebp,%esi
+ roll $24,%ebx
+ xorl %edi,%esi
+ xorl %esi,%ebx
+ movl 20(%esp),%edi
+ movl 28(%esp),%ebp
+ addl $16,%edi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ cmpl 24(%esp),%edi
+ movl %edi,20(%esp)
+ jb .L000loop
+ movl %eax,%esi
+ andl $255,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %bh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,4(%esp)
+
+ movl %ebx,%esi
+ andl $255,%esi
+ shrl $16,%ebx
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %ch,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,8(%esp)
+
+ movl %ecx,%esi
+ andl $255,%esi
+ shrl $24,%ecx
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %dh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $16,%edi
+ andl $255,%edx
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movzbl %bh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+
+ movl 20(%esp),%edi
+ andl $255,%edx
+ movzbl -128(%ebp,%edx,1),%edx
+ movzbl %ah,%eax
+ movzbl -128(%ebp,%eax,1),%eax
+ shll $8,%eax
+ xorl %eax,%edx
+ movl 4(%esp),%eax
+ andl $255,%ebx
+ movzbl -128(%ebp,%ebx,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%edx
+ movl 8(%esp),%ebx
+ movzbl -128(%ebp,%ecx,1),%ecx
+ shll $24,%ecx
+ xorl %ecx,%edx
+ movl %esi,%ecx
+
+ xorl 16(%edi),%eax
+ xorl 20(%edi),%ebx
+ xorl 24(%edi),%ecx
+ xorl 28(%edi),%edx
+ ret
+.size _x86_AES_encrypt_compact,.-_x86_AES_encrypt_compact
+.type _sse_AES_encrypt_compact,@function
+.align 16
+_sse_AES_encrypt_compact:
+ #ifdef __CET__
+
+.byte 243,15,30,251
+ #endif
+
+ pxor (%edi),%mm0
+ pxor 8(%edi),%mm4
+ movl 240(%edi),%esi
+ leal -2(%esi,%esi,1),%esi
+ leal (%edi,%esi,8),%esi
+ movl %esi,24(%esp)
+ movl $454761243,%eax
+ movl %eax,8(%esp)
+ movl %eax,12(%esp)
+ movl -128(%ebp),%eax
+ movl -96(%ebp),%ebx
+ movl -64(%ebp),%ecx
+ movl -32(%ebp),%edx
+ movl (%ebp),%eax
+ movl 32(%ebp),%ebx
+ movl 64(%ebp),%ecx
+ movl 96(%ebp),%edx
+.align 16
+.L001loop:
+ pshufw $8,%mm0,%mm1
+ pshufw $13,%mm4,%mm5
+ movd %mm1,%eax
+ movd %mm5,%ebx
+ movl %edi,20(%esp)
+ movzbl %al,%esi
+ movzbl %ah,%edx
+ pshufw $13,%mm0,%mm2
+ movzbl -128(%ebp,%esi,1),%ecx
+ movzbl %bl,%edi
+ movzbl -128(%ebp,%edx,1),%edx
+ shrl $16,%eax
+ shll $8,%edx
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %bh,%edi
+ shll $16,%esi
+ pshufw $8,%mm4,%mm6
+ orl %esi,%ecx
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %ah,%edi
+ shll $24,%esi
+ shrl $16,%ebx
+ orl %esi,%edx
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %bh,%edi
+ shll $8,%esi
+ orl %esi,%ecx
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %al,%edi
+ shll $24,%esi
+ orl %esi,%ecx
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %bl,%edi
+ movd %mm2,%eax
+ movd %ecx,%mm0
+ movzbl -128(%ebp,%edi,1),%ecx
+ movzbl %ah,%edi
+ shll $16,%ecx
+ movd %mm6,%ebx
+ orl %esi,%ecx
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %bh,%edi
+ shll $24,%esi
+ orl %esi,%ecx
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %bl,%edi
+ shll $8,%esi
+ shrl $16,%ebx
+ orl %esi,%ecx
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %al,%edi
+ shrl $16,%eax
+ movd %ecx,%mm1
+ movzbl -128(%ebp,%edi,1),%ecx
+ movzbl %ah,%edi
+ shll $16,%ecx
+ andl $255,%eax
+ orl %esi,%ecx
+ punpckldq %mm1,%mm0
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %bh,%edi
+ shll $24,%esi
+ andl $255,%ebx
+ movzbl -128(%ebp,%eax,1),%eax
+ orl %esi,%ecx
+ shll $16,%eax
+ movzbl -128(%ebp,%edi,1),%esi
+ orl %eax,%edx
+ shll $8,%esi
+ movzbl -128(%ebp,%ebx,1),%ebx
+ orl %esi,%ecx
+ orl %ebx,%edx
+ movl 20(%esp),%edi
+ movd %ecx,%mm4
+ movd %edx,%mm5
+ punpckldq %mm5,%mm4
+ addl $16,%edi
+ cmpl 24(%esp),%edi
+ ja .L002out
+ movq 8(%esp),%mm2
+ pxor %mm3,%mm3
+ pxor %mm7,%mm7
+ movq %mm0,%mm1
+ movq %mm4,%mm5
+ pcmpgtb %mm0,%mm3
+ pcmpgtb %mm4,%mm7
+ pand %mm2,%mm3
+ pand %mm2,%mm7
+ pshufw $177,%mm0,%mm2
+ pshufw $177,%mm4,%mm6
+ paddb %mm0,%mm0
+ paddb %mm4,%mm4
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ pshufw $177,%mm2,%mm3
+ pshufw $177,%mm6,%mm7
+ pxor %mm0,%mm1
+ pxor %mm4,%mm5
+ pxor %mm2,%mm0
+ pxor %mm6,%mm4
+ movq %mm3,%mm2
+ movq %mm7,%mm6
+ pslld $8,%mm3
+ pslld $8,%mm7
+ psrld $24,%mm2
+ psrld $24,%mm6
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ pxor %mm2,%mm0
+ pxor %mm6,%mm4
+ movq %mm1,%mm3
+ movq %mm5,%mm7
+ movq (%edi),%mm2
+ movq 8(%edi),%mm6
+ psrld $8,%mm1
+ psrld $8,%mm5
+ movl -128(%ebp),%eax
+ pslld $24,%mm3
+ pslld $24,%mm7
+ movl -64(%ebp),%ebx
+ pxor %mm1,%mm0
+ pxor %mm5,%mm4
+ movl (%ebp),%ecx
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ movl 64(%ebp),%edx
+ pxor %mm2,%mm0
+ pxor %mm6,%mm4
+ jmp .L001loop
+.align 16
+.L002out:
+ pxor (%edi),%mm0
+ pxor 8(%edi),%mm4
+ ret
+.size _sse_AES_encrypt_compact,.-_sse_AES_encrypt_compact
+.type _x86_AES_encrypt,@function
+.align 16
+_x86_AES_encrypt:
+ #ifdef __CET__
+
+.byte 243,15,30,251
+ #endif
+
+ movl %edi,20(%esp)
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ movl 240(%edi),%esi
+ leal -2(%esi,%esi,1),%esi
+ leal (%edi,%esi,8),%esi
+ movl %esi,24(%esp)
+.align 16
+.L003loop:
+ movl %eax,%esi
+ andl $255,%esi
+ movl (%ebp,%esi,8),%esi
+ movzbl %bh,%edi
+ xorl 3(%ebp,%edi,8),%esi
+ movl %ecx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ xorl 2(%ebp,%edi,8),%esi
+ movl %edx,%edi
+ shrl $24,%edi
+ xorl 1(%ebp,%edi,8),%esi
+ movl %esi,4(%esp)
+
+ movl %ebx,%esi
+ andl $255,%esi
+ shrl $16,%ebx
+ movl (%ebp,%esi,8),%esi
+ movzbl %ch,%edi
+ xorl 3(%ebp,%edi,8),%esi
+ movl %edx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ xorl 2(%ebp,%edi,8),%esi
+ movl %eax,%edi
+ shrl $24,%edi
+ xorl 1(%ebp,%edi,8),%esi
+ movl %esi,8(%esp)
+
+ movl %ecx,%esi
+ andl $255,%esi
+ shrl $24,%ecx
+ movl (%ebp,%esi,8),%esi
+ movzbl %dh,%edi
+ xorl 3(%ebp,%edi,8),%esi
+ movl %eax,%edi
+ shrl $16,%edi
+ andl $255,%edx
+ andl $255,%edi
+ xorl 2(%ebp,%edi,8),%esi
+ movzbl %bh,%edi
+ xorl 1(%ebp,%edi,8),%esi
+
+ movl 20(%esp),%edi
+ movl (%ebp,%edx,8),%edx
+ movzbl %ah,%eax
+ xorl 3(%ebp,%eax,8),%edx
+ movl 4(%esp),%eax
+ andl $255,%ebx
+ xorl 2(%ebp,%ebx,8),%edx
+ movl 8(%esp),%ebx
+ xorl 1(%ebp,%ecx,8),%edx
+ movl %esi,%ecx
+
+ addl $16,%edi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ cmpl 24(%esp),%edi
+ movl %edi,20(%esp)
+ jb .L003loop
+ movl %eax,%esi
+ andl $255,%esi
+ movl 2(%ebp,%esi,8),%esi
+ andl $255,%esi
+ movzbl %bh,%edi
+ movl (%ebp,%edi,8),%edi
+ andl $65280,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movl (%ebp,%edi,8),%edi
+ andl $16711680,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $24,%edi
+ movl 2(%ebp,%edi,8),%edi
+ andl $4278190080,%edi
+ xorl %edi,%esi
+ movl %esi,4(%esp)
+ movl %ebx,%esi
+ andl $255,%esi
+ shrl $16,%ebx
+ movl 2(%ebp,%esi,8),%esi
+ andl $255,%esi
+ movzbl %ch,%edi
+ movl (%ebp,%edi,8),%edi
+ andl $65280,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movl (%ebp,%edi,8),%edi
+ andl $16711680,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $24,%edi
+ movl 2(%ebp,%edi,8),%edi
+ andl $4278190080,%edi
+ xorl %edi,%esi
+ movl %esi,8(%esp)
+ movl %ecx,%esi
+ andl $255,%esi
+ shrl $24,%ecx
+ movl 2(%ebp,%esi,8),%esi
+ andl $255,%esi
+ movzbl %dh,%edi
+ movl (%ebp,%edi,8),%edi
+ andl $65280,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $16,%edi
+ andl $255,%edx
+ andl $255,%edi
+ movl (%ebp,%edi,8),%edi
+ andl $16711680,%edi
+ xorl %edi,%esi
+ movzbl %bh,%edi
+ movl 2(%ebp,%edi,8),%edi
+ andl $4278190080,%edi
+ xorl %edi,%esi
+ movl 20(%esp),%edi
+ andl $255,%edx
+ movl 2(%ebp,%edx,8),%edx
+ andl $255,%edx
+ movzbl %ah,%eax
+ movl (%ebp,%eax,8),%eax
+ andl $65280,%eax
+ xorl %eax,%edx
+ movl 4(%esp),%eax
+ andl $255,%ebx
+ movl (%ebp,%ebx,8),%ebx
+ andl $16711680,%ebx
+ xorl %ebx,%edx
+ movl 8(%esp),%ebx
+ movl 2(%ebp,%ecx,8),%ecx
+ andl $4278190080,%ecx
+ xorl %ecx,%edx
+ movl %esi,%ecx
+ addl $16,%edi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ ret
+.align 64
+.LAES_Te:
+.long 2774754246,2774754246
+.long 2222750968,2222750968
+.long 2574743534,2574743534
+.long 2373680118,2373680118
+.long 234025727,234025727
+.long 3177933782,3177933782
+.long 2976870366,2976870366
+.long 1422247313,1422247313
+.long 1345335392,1345335392
+.long 50397442,50397442
+.long 2842126286,2842126286
+.long 2099981142,2099981142
+.long 436141799,436141799
+.long 1658312629,1658312629
+.long 3870010189,3870010189
+.long 2591454956,2591454956
+.long 1170918031,1170918031
+.long 2642575903,2642575903
+.long 1086966153,1086966153
+.long 2273148410,2273148410
+.long 368769775,368769775
+.long 3948501426,3948501426
+.long 3376891790,3376891790
+.long 200339707,200339707
+.long 3970805057,3970805057
+.long 1742001331,1742001331
+.long 4255294047,4255294047
+.long 3937382213,3937382213
+.long 3214711843,3214711843
+.long 4154762323,4154762323
+.long 2524082916,2524082916
+.long 1539358875,1539358875
+.long 3266819957,3266819957
+.long 486407649,486407649
+.long 2928907069,2928907069
+.long 1780885068,1780885068
+.long 1513502316,1513502316
+.long 1094664062,1094664062
+.long 49805301,49805301
+.long 1338821763,1338821763
+.long 1546925160,1546925160
+.long 4104496465,4104496465
+.long 887481809,887481809
+.long 150073849,150073849
+.long 2473685474,2473685474
+.long 1943591083,1943591083
+.long 1395732834,1395732834
+.long 1058346282,1058346282
+.long 201589768,201589768
+.long 1388824469,1388824469
+.long 1696801606,1696801606
+.long 1589887901,1589887901
+.long 672667696,672667696
+.long 2711000631,2711000631
+.long 251987210,251987210
+.long 3046808111,3046808111
+.long 151455502,151455502
+.long 907153956,907153956
+.long 2608889883,2608889883
+.long 1038279391,1038279391
+.long 652995533,652995533
+.long 1764173646,1764173646
+.long 3451040383,3451040383
+.long 2675275242,2675275242
+.long 453576978,453576978
+.long 2659418909,2659418909
+.long 1949051992,1949051992
+.long 773462580,773462580
+.long 756751158,756751158
+.long 2993581788,2993581788
+.long 3998898868,3998898868
+.long 4221608027,4221608027
+.long 4132590244,4132590244
+.long 1295727478,1295727478
+.long 1641469623,1641469623
+.long 3467883389,3467883389
+.long 2066295122,2066295122
+.long 1055122397,1055122397
+.long 1898917726,1898917726
+.long 2542044179,2542044179
+.long 4115878822,4115878822
+.long 1758581177,1758581177
+.long 0,0
+.long 753790401,753790401
+.long 1612718144,1612718144
+.long 536673507,536673507
+.long 3367088505,3367088505
+.long 3982187446,3982187446
+.long 3194645204,3194645204
+.long 1187761037,1187761037
+.long 3653156455,3653156455
+.long 1262041458,1262041458
+.long 3729410708,3729410708
+.long 3561770136,3561770136
+.long 3898103984,3898103984
+.long 1255133061,1255133061
+.long 1808847035,1808847035
+.long 720367557,720367557
+.long 3853167183,3853167183
+.long 385612781,385612781
+.long 3309519750,3309519750
+.long 3612167578,3612167578
+.long 1429418854,1429418854
+.long 2491778321,2491778321
+.long 3477423498,3477423498
+.long 284817897,284817897
+.long 100794884,100794884
+.long 2172616702,2172616702
+.long 4031795360,4031795360
+.long 1144798328,1144798328
+.long 3131023141,3131023141
+.long 3819481163,3819481163
+.long 4082192802,4082192802
+.long 4272137053,4272137053
+.long 3225436288,3225436288
+.long 2324664069,2324664069
+.long 2912064063,2912064063
+.long 3164445985,3164445985
+.long 1211644016,1211644016
+.long 83228145,83228145
+.long 3753688163,3753688163
+.long 3249976951,3249976951
+.long 1977277103,1977277103
+.long 1663115586,1663115586
+.long 806359072,806359072
+.long 452984805,452984805
+.long 250868733,250868733
+.long 1842533055,1842533055
+.long 1288555905,1288555905
+.long 336333848,336333848
+.long 890442534,890442534
+.long 804056259,804056259
+.long 3781124030,3781124030
+.long 2727843637,2727843637
+.long 3427026056,3427026056
+.long 957814574,957814574
+.long 1472513171,1472513171
+.long 4071073621,4071073621
+.long 2189328124,2189328124
+.long 1195195770,1195195770
+.long 2892260552,2892260552
+.long 3881655738,3881655738
+.long 723065138,723065138
+.long 2507371494,2507371494
+.long 2690670784,2690670784
+.long 2558624025,2558624025
+.long 3511635870,3511635870
+.long 2145180835,2145180835
+.long 1713513028,1713513028
+.long 2116692564,2116692564
+.long 2878378043,2878378043
+.long 2206763019,2206763019
+.long 3393603212,3393603212
+.long 703524551,703524551
+.long 3552098411,3552098411
+.long 1007948840,1007948840
+.long 2044649127,2044649127
+.long 3797835452,3797835452
+.long 487262998,487262998
+.long 1994120109,1994120109
+.long 1004593371,1004593371
+.long 1446130276,1446130276
+.long 1312438900,1312438900
+.long 503974420,503974420
+.long 3679013266,3679013266
+.long 168166924,168166924
+.long 1814307912,1814307912
+.long 3831258296,3831258296
+.long 1573044895,1573044895
+.long 1859376061,1859376061
+.long 4021070915,4021070915
+.long 2791465668,2791465668
+.long 2828112185,2828112185
+.long 2761266481,2761266481
+.long 937747667,937747667
+.long 2339994098,2339994098
+.long 854058965,854058965
+.long 1137232011,1137232011
+.long 1496790894,1496790894
+.long 3077402074,3077402074
+.long 2358086913,2358086913
+.long 1691735473,1691735473
+.long 3528347292,3528347292
+.long 3769215305,3769215305
+.long 3027004632,3027004632
+.long 4199962284,4199962284
+.long 133494003,133494003
+.long 636152527,636152527
+.long 2942657994,2942657994
+.long 2390391540,2390391540
+.long 3920539207,3920539207
+.long 403179536,403179536
+.long 3585784431,3585784431
+.long 2289596656,2289596656
+.long 1864705354,1864705354
+.long 1915629148,1915629148
+.long 605822008,605822008
+.long 4054230615,4054230615
+.long 3350508659,3350508659
+.long 1371981463,1371981463
+.long 602466507,602466507
+.long 2094914977,2094914977
+.long 2624877800,2624877800
+.long 555687742,555687742
+.long 3712699286,3712699286
+.long 3703422305,3703422305
+.long 2257292045,2257292045
+.long 2240449039,2240449039
+.long 2423288032,2423288032
+.long 1111375484,1111375484
+.long 3300242801,3300242801
+.long 2858837708,2858837708
+.long 3628615824,3628615824
+.long 84083462,84083462
+.long 32962295,32962295
+.long 302911004,302911004
+.long 2741068226,2741068226
+.long 1597322602,1597322602
+.long 4183250862,4183250862
+.long 3501832553,3501832553
+.long 2441512471,2441512471
+.long 1489093017,1489093017
+.long 656219450,656219450
+.long 3114180135,3114180135
+.long 954327513,954327513
+.long 335083755,335083755
+.long 3013122091,3013122091
+.long 856756514,856756514
+.long 3144247762,3144247762
+.long 1893325225,1893325225
+.long 2307821063,2307821063
+.long 2811532339,2811532339
+.long 3063651117,3063651117
+.long 572399164,572399164
+.long 2458355477,2458355477
+.long 552200649,552200649
+.long 1238290055,1238290055
+.long 4283782570,4283782570
+.long 2015897680,2015897680
+.long 2061492133,2061492133
+.long 2408352771,2408352771
+.long 4171342169,4171342169
+.long 2156497161,2156497161
+.long 386731290,386731290
+.long 3669999461,3669999461
+.long 837215959,837215959
+.long 3326231172,3326231172
+.long 3093850320,3093850320
+.long 3275833730,3275833730
+.long 2962856233,2962856233
+.long 1999449434,1999449434
+.long 286199582,286199582
+.long 3417354363,3417354363
+.long 4233385128,4233385128
+.long 3602627437,3602627437
+.long 974525996,974525996
+.byte 99,124,119,123,242,107,111,197
+.byte 48,1,103,43,254,215,171,118
+.byte 202,130,201,125,250,89,71,240
+.byte 173,212,162,175,156,164,114,192
+.byte 183,253,147,38,54,63,247,204
+.byte 52,165,229,241,113,216,49,21
+.byte 4,199,35,195,24,150,5,154
+.byte 7,18,128,226,235,39,178,117
+.byte 9,131,44,26,27,110,90,160
+.byte 82,59,214,179,41,227,47,132
+.byte 83,209,0,237,32,252,177,91
+.byte 106,203,190,57,74,76,88,207
+.byte 208,239,170,251,67,77,51,133
+.byte 69,249,2,127,80,60,159,168
+.byte 81,163,64,143,146,157,56,245
+.byte 188,182,218,33,16,255,243,210
+.byte 205,12,19,236,95,151,68,23
+.byte 196,167,126,61,100,93,25,115
+.byte 96,129,79,220,34,42,144,136
+.byte 70,238,184,20,222,94,11,219
+.byte 224,50,58,10,73,6,36,92
+.byte 194,211,172,98,145,149,228,121
+.byte 231,200,55,109,141,213,78,169
+.byte 108,86,244,234,101,122,174,8
+.byte 186,120,37,46,28,166,180,198
+.byte 232,221,116,31,75,189,139,138
+.byte 112,62,181,102,72,3,246,14
+.byte 97,53,87,185,134,193,29,158
+.byte 225,248,152,17,105,217,142,148
+.byte 155,30,135,233,206,85,40,223
+.byte 140,161,137,13,191,230,66,104
+.byte 65,153,45,15,176,84,187,22
+.byte 99,124,119,123,242,107,111,197
+.byte 48,1,103,43,254,215,171,118
+.byte 202,130,201,125,250,89,71,240
+.byte 173,212,162,175,156,164,114,192
+.byte 183,253,147,38,54,63,247,204
+.byte 52,165,229,241,113,216,49,21
+.byte 4,199,35,195,24,150,5,154
+.byte 7,18,128,226,235,39,178,117
+.byte 9,131,44,26,27,110,90,160
+.byte 82,59,214,179,41,227,47,132
+.byte 83,209,0,237,32,252,177,91
+.byte 106,203,190,57,74,76,88,207
+.byte 208,239,170,251,67,77,51,133
+.byte 69,249,2,127,80,60,159,168
+.byte 81,163,64,143,146,157,56,245
+.byte 188,182,218,33,16,255,243,210
+.byte 205,12,19,236,95,151,68,23
+.byte 196,167,126,61,100,93,25,115
+.byte 96,129,79,220,34,42,144,136
+.byte 70,238,184,20,222,94,11,219
+.byte 224,50,58,10,73,6,36,92
+.byte 194,211,172,98,145,149,228,121
+.byte 231,200,55,109,141,213,78,169
+.byte 108,86,244,234,101,122,174,8
+.byte 186,120,37,46,28,166,180,198
+.byte 232,221,116,31,75,189,139,138
+.byte 112,62,181,102,72,3,246,14
+.byte 97,53,87,185,134,193,29,158
+.byte 225,248,152,17,105,217,142,148
+.byte 155,30,135,233,206,85,40,223
+.byte 140,161,137,13,191,230,66,104
+.byte 65,153,45,15,176,84,187,22
+.byte 99,124,119,123,242,107,111,197
+.byte 48,1,103,43,254,215,171,118
+.byte 202,130,201,125,250,89,71,240
+.byte 173,212,162,175,156,164,114,192
+.byte 183,253,147,38,54,63,247,204
+.byte 52,165,229,241,113,216,49,21
+.byte 4,199,35,195,24,150,5,154
+.byte 7,18,128,226,235,39,178,117
+.byte 9,131,44,26,27,110,90,160
+.byte 82,59,214,179,41,227,47,132
+.byte 83,209,0,237,32,252,177,91
+.byte 106,203,190,57,74,76,88,207
+.byte 208,239,170,251,67,77,51,133
+.byte 69,249,2,127,80,60,159,168
+.byte 81,163,64,143,146,157,56,245
+.byte 188,182,218,33,16,255,243,210
+.byte 205,12,19,236,95,151,68,23
+.byte 196,167,126,61,100,93,25,115
+.byte 96,129,79,220,34,42,144,136
+.byte 70,238,184,20,222,94,11,219
+.byte 224,50,58,10,73,6,36,92
+.byte 194,211,172,98,145,149,228,121
+.byte 231,200,55,109,141,213,78,169
+.byte 108,86,244,234,101,122,174,8
+.byte 186,120,37,46,28,166,180,198
+.byte 232,221,116,31,75,189,139,138
+.byte 112,62,181,102,72,3,246,14
+.byte 97,53,87,185,134,193,29,158
+.byte 225,248,152,17,105,217,142,148
+.byte 155,30,135,233,206,85,40,223
+.byte 140,161,137,13,191,230,66,104
+.byte 65,153,45,15,176,84,187,22
+.byte 99,124,119,123,242,107,111,197
+.byte 48,1,103,43,254,215,171,118
+.byte 202,130,201,125,250,89,71,240
+.byte 173,212,162,175,156,164,114,192
+.byte 183,253,147,38,54,63,247,204
+.byte 52,165,229,241,113,216,49,21
+.byte 4,199,35,195,24,150,5,154
+.byte 7,18,128,226,235,39,178,117
+.byte 9,131,44,26,27,110,90,160
+.byte 82,59,214,179,41,227,47,132
+.byte 83,209,0,237,32,252,177,91
+.byte 106,203,190,57,74,76,88,207
+.byte 208,239,170,251,67,77,51,133
+.byte 69,249,2,127,80,60,159,168
+.byte 81,163,64,143,146,157,56,245
+.byte 188,182,218,33,16,255,243,210
+.byte 205,12,19,236,95,151,68,23
+.byte 196,167,126,61,100,93,25,115
+.byte 96,129,79,220,34,42,144,136
+.byte 70,238,184,20,222,94,11,219
+.byte 224,50,58,10,73,6,36,92
+.byte 194,211,172,98,145,149,228,121
+.byte 231,200,55,109,141,213,78,169
+.byte 108,86,244,234,101,122,174,8
+.byte 186,120,37,46,28,166,180,198
+.byte 232,221,116,31,75,189,139,138
+.byte 112,62,181,102,72,3,246,14
+.byte 97,53,87,185,134,193,29,158
+.byte 225,248,152,17,105,217,142,148
+.byte 155,30,135,233,206,85,40,223
+.byte 140,161,137,13,191,230,66,104
+.byte 65,153,45,15,176,84,187,22
+.long 1,2,4,8
+.long 16,32,64,128
+.long 27,54,0,0
+.long 0,0,0,0
+.size _x86_AES_encrypt,.-_x86_AES_encrypt
+.globl AES_encrypt
+.type AES_encrypt,@function
+.align 16
+AES_encrypt:
+.L_AES_encrypt_begin:
+ #ifdef __CET__
+
+.byte 243,15,30,251
+ #endif
+
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%esi
+ movl 28(%esp),%edi
+ movl %esp,%eax
+ subl $36,%esp
+ andl $-64,%esp
+ leal -127(%edi),%ebx
+ subl %esp,%ebx
+ negl %ebx
+ andl $960,%ebx
+ subl %ebx,%esp
+ addl $4,%esp
+ movl %eax,28(%esp)
+ call .L004pic_point
+.L004pic_point:
+ popl %ebp
+ leal OPENSSL_ia32cap_P,%eax
+ leal .LAES_Te-.L004pic_point(%ebp),%ebp
+ leal 764(%esp),%ebx
+ subl %ebp,%ebx
+ andl $768,%ebx
+ leal 2176(%ebp,%ebx,1),%ebp
+ btl $25,(%eax)
+ jnc .L005x86
+ movq (%esi),%mm0
+ movq 8(%esi),%mm4
+ call _sse_AES_encrypt_compact
+ movl 28(%esp),%esp
+ movl 24(%esp),%esi
+ movq %mm0,(%esi)
+ movq %mm4,8(%esi)
+ emms
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 16
+.L005x86:
+ movl %ebp,24(%esp)
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ call _x86_AES_encrypt_compact
+ movl 28(%esp),%esp
+ movl 24(%esp),%esi
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size AES_encrypt,.-.L_AES_encrypt_begin
+.type _x86_AES_decrypt_compact,@function
+.align 16
+_x86_AES_decrypt_compact:
+ #ifdef __CET__
+
+.byte 243,15,30,251
+ #endif
+
+ movl %edi,20(%esp)
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ movl 240(%edi),%esi
+ leal -2(%esi,%esi,1),%esi
+ leal (%edi,%esi,8),%esi
+ movl %esi,24(%esp)
+ movl -128(%ebp),%edi
+ movl -96(%ebp),%esi
+ movl -64(%ebp),%edi
+ movl -32(%ebp),%esi
+ movl (%ebp),%edi
+ movl 32(%ebp),%esi
+ movl 64(%ebp),%edi
+ movl 96(%ebp),%esi
+.align 16
+.L006loop:
+ movl %eax,%esi
+ andl $255,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %dh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %ebx,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,4(%esp)
+ movl %ebx,%esi
+ andl $255,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %ah,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,8(%esp)
+ movl %ecx,%esi
+ andl $255,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %bh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ andl $255,%edx
+ movzbl -128(%ebp,%edx,1),%edx
+ movzbl %ch,%ecx
+ movzbl -128(%ebp,%ecx,1),%ecx
+ shll $8,%ecx
+ xorl %ecx,%edx
+ movl %esi,%ecx
+ shrl $16,%ebx
+ andl $255,%ebx
+ movzbl -128(%ebp,%ebx,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%edx
+ shrl $24,%eax
+ movzbl -128(%ebp,%eax,1),%eax
+ shll $24,%eax
+ xorl %eax,%edx
+ movl $2155905152,%edi
+ andl %ecx,%edi
+ movl %edi,%esi
+ shrl $7,%edi
+ leal (%ecx,%ecx,1),%eax
+ subl %edi,%esi
+ andl $4278124286,%eax
+ andl $454761243,%esi
+ xorl %esi,%eax
+ movl $2155905152,%edi
+ andl %eax,%edi
+ movl %edi,%esi
+ shrl $7,%edi
+ leal (%eax,%eax,1),%ebx
+ subl %edi,%esi
+ andl $4278124286,%ebx
+ andl $454761243,%esi
+ xorl %ecx,%eax
+ xorl %esi,%ebx
+ movl $2155905152,%edi
+ andl %ebx,%edi
+ movl %edi,%esi
+ shrl $7,%edi
+ leal (%ebx,%ebx,1),%ebp
+ subl %edi,%esi
+ andl $4278124286,%ebp
+ andl $454761243,%esi
+ xorl %ecx,%ebx
+ roll $8,%ecx
+ xorl %esi,%ebp
+ xorl %eax,%ecx
+ xorl %ebp,%eax
+ xorl %ebx,%ecx
+ xorl %ebp,%ebx
+ roll $24,%eax
+ xorl %ebp,%ecx
+ roll $16,%ebx
+ xorl %eax,%ecx
+ roll $8,%ebp
+ xorl %ebx,%ecx
+ movl 4(%esp),%eax
+ xorl %ebp,%ecx
+ movl %ecx,12(%esp)
+ movl $2155905152,%edi
+ andl %edx,%edi
+ movl %edi,%esi
+ shrl $7,%edi
+ leal (%edx,%edx,1),%ebx
+ subl %edi,%esi
+ andl $4278124286,%ebx
+ andl $454761243,%esi
+ xorl %esi,%ebx
+ movl $2155905152,%edi
+ andl %ebx,%edi
+ movl %edi,%esi
+ shrl $7,%edi
+ leal (%ebx,%ebx,1),%ecx
+ subl %edi,%esi
+ andl $4278124286,%ecx
+ andl $454761243,%esi
+ xorl %edx,%ebx
+ xorl %esi,%ecx
+ movl $2155905152,%edi
+ andl %ecx,%edi
+ movl %edi,%esi
+ shrl $7,%edi
+ leal (%ecx,%ecx,1),%ebp
+ subl %edi,%esi
+ andl $4278124286,%ebp
+ andl $454761243,%esi
+ xorl %edx,%ecx
+ roll $8,%edx
+ xorl %esi,%ebp
+ xorl %ebx,%edx
+ xorl %ebp,%ebx
+ xorl %ecx,%edx
+ xorl %ebp,%ecx
+ roll $24,%ebx
+ xorl %ebp,%edx
+ roll $16,%ecx
+ xorl %ebx,%edx
+ roll $8,%ebp
+ xorl %ecx,%edx
+ movl 8(%esp),%ebx
+ xorl %ebp,%edx
+ movl %edx,16(%esp)
+ movl $2155905152,%edi
+ andl %eax,%edi
+ movl %edi,%esi
+ shrl $7,%edi
+ leal (%eax,%eax,1),%ecx
+ subl %edi,%esi
+ andl $4278124286,%ecx
+ andl $454761243,%esi
+ xorl %esi,%ecx
+ movl $2155905152,%edi
+ andl %ecx,%edi
+ movl %edi,%esi
+ shrl $7,%edi
+ leal (%ecx,%ecx,1),%edx
+ subl %edi,%esi
+ andl $4278124286,%edx
+ andl $454761243,%esi
+ xorl %eax,%ecx
+ xorl %esi,%edx
+ movl $2155905152,%edi
+ andl %edx,%edi
+ movl %edi,%esi
+ shrl $7,%edi
+ leal (%edx,%edx,1),%ebp
+ subl %edi,%esi
+ andl $4278124286,%ebp
+ andl $454761243,%esi
+ xorl %eax,%edx
+ roll $8,%eax
+ xorl %esi,%ebp
+ xorl %ecx,%eax
+ xorl %ebp,%ecx
+ xorl %edx,%eax
+ xorl %ebp,%edx
+ roll $24,%ecx
+ xorl %ebp,%eax
+ roll $16,%edx
+ xorl %ecx,%eax
+ roll $8,%ebp
+ xorl %edx,%eax
+ xorl %ebp,%eax
+ movl $2155905152,%edi
+ andl %ebx,%edi
+ movl %edi,%esi
+ shrl $7,%edi
+ leal (%ebx,%ebx,1),%ecx
+ subl %edi,%esi
+ andl $4278124286,%ecx
+ andl $454761243,%esi
+ xorl %esi,%ecx
+ movl $2155905152,%edi
+ andl %ecx,%edi
+ movl %edi,%esi
+ shrl $7,%edi
+ leal (%ecx,%ecx,1),%edx
+ subl %edi,%esi
+ andl $4278124286,%edx
+ andl $454761243,%esi
+ xorl %ebx,%ecx
+ xorl %esi,%edx
+ movl $2155905152,%edi
+ andl %edx,%edi
+ movl %edi,%esi
+ shrl $7,%edi
+ leal (%edx,%edx,1),%ebp
+ subl %edi,%esi
+ andl $4278124286,%ebp
+ andl $454761243,%esi
+ xorl %ebx,%edx
+ roll $8,%ebx
+ xorl %esi,%ebp
+ xorl %ecx,%ebx
+ xorl %ebp,%ecx
+ xorl %edx,%ebx
+ xorl %ebp,%edx
+ roll $24,%ecx
+ xorl %ebp,%ebx
+ roll $16,%edx
+ xorl %ecx,%ebx
+ roll $8,%ebp
+ xorl %edx,%ebx
+ movl 12(%esp),%ecx
+ xorl %ebp,%ebx
+ movl 16(%esp),%edx
+ movl 20(%esp),%edi
+ movl 28(%esp),%ebp
+ addl $16,%edi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ cmpl 24(%esp),%edi
+ movl %edi,20(%esp)
+ jb .L006loop
+ movl %eax,%esi
+ andl $255,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %dh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %ebx,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,4(%esp)
+ movl %ebx,%esi
+ andl $255,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %ah,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,8(%esp)
+ movl %ecx,%esi
+ andl $255,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %bh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl 20(%esp),%edi
+ andl $255,%edx
+ movzbl -128(%ebp,%edx,1),%edx
+ movzbl %ch,%ecx
+ movzbl -128(%ebp,%ecx,1),%ecx
+ shll $8,%ecx
+ xorl %ecx,%edx
+ movl %esi,%ecx
+ shrl $16,%ebx
+ andl $255,%ebx
+ movzbl -128(%ebp,%ebx,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%edx
+ movl 8(%esp),%ebx
+ shrl $24,%eax
+ movzbl -128(%ebp,%eax,1),%eax
+ shll $24,%eax
+ xorl %eax,%edx
+ movl 4(%esp),%eax
+ xorl 16(%edi),%eax
+ xorl 20(%edi),%ebx
+ xorl 24(%edi),%ecx
+ xorl 28(%edi),%edx
+ ret
+.size _x86_AES_decrypt_compact,.-_x86_AES_decrypt_compact
+.type _sse_AES_decrypt_compact,@function
+.align 16
+_sse_AES_decrypt_compact:
+ #ifdef __CET__
+
+.byte 243,15,30,251
+ #endif
+
+ pxor (%edi),%mm0
+ pxor 8(%edi),%mm4
+ movl 240(%edi),%esi
+ leal -2(%esi,%esi,1),%esi
+ leal (%edi,%esi,8),%esi
+ movl %esi,24(%esp)
+ movl $454761243,%eax
+ movl %eax,8(%esp)
+ movl %eax,12(%esp)
+ movl -128(%ebp),%eax
+ movl -96(%ebp),%ebx
+ movl -64(%ebp),%ecx
+ movl -32(%ebp),%edx
+ movl (%ebp),%eax
+ movl 32(%ebp),%ebx
+ movl 64(%ebp),%ecx
+ movl 96(%ebp),%edx
+.align 16
+.L007loop:
+ pshufw $12,%mm0,%mm1
+ pshufw $9,%mm4,%mm5
+ movd %mm1,%eax
+ movd %mm5,%ebx
+ movl %edi,20(%esp)
+ movzbl %al,%esi
+ movzbl %ah,%edx
+ pshufw $6,%mm0,%mm2
+ movzbl -128(%ebp,%esi,1),%ecx
+ movzbl %bl,%edi
+ movzbl -128(%ebp,%edx,1),%edx
+ shrl $16,%eax
+ shll $8,%edx
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %bh,%edi
+ shll $16,%esi
+ pshufw $3,%mm4,%mm6
+ orl %esi,%ecx
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %ah,%edi
+ shll $24,%esi
+ shrl $16,%ebx
+ orl %esi,%edx
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %bh,%edi
+ shll $24,%esi
+ orl %esi,%ecx
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %al,%edi
+ shll $8,%esi
+ movd %mm2,%eax
+ orl %esi,%ecx
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %bl,%edi
+ shll $16,%esi
+ movd %mm6,%ebx
+ movd %ecx,%mm0
+ movzbl -128(%ebp,%edi,1),%ecx
+ movzbl %al,%edi
+ orl %esi,%ecx
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %bl,%edi
+ orl %esi,%edx
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %ah,%edi
+ shll $16,%esi
+ shrl $16,%eax
+ orl %esi,%edx
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %bh,%edi
+ shrl $16,%ebx
+ shll $8,%esi
+ movd %edx,%mm1
+ movzbl -128(%ebp,%edi,1),%edx
+ movzbl %bh,%edi
+ shll $24,%edx
+ andl $255,%ebx
+ orl %esi,%edx
+ punpckldq %mm1,%mm0
+ movzbl -128(%ebp,%edi,1),%esi
+ movzbl %al,%edi
+ shll $8,%esi
+ movzbl %ah,%eax
+ movzbl -128(%ebp,%ebx,1),%ebx
+ orl %esi,%ecx
+ movzbl -128(%ebp,%edi,1),%esi
+ orl %ebx,%edx
+ shll $16,%esi
+ movzbl -128(%ebp,%eax,1),%eax
+ orl %esi,%edx
+ shll $24,%eax
+ orl %eax,%ecx
+ movl 20(%esp),%edi
+ movd %edx,%mm4
+ movd %ecx,%mm5
+ punpckldq %mm5,%mm4
+ addl $16,%edi
+ cmpl 24(%esp),%edi
+ ja .L008out
+ movq %mm0,%mm3
+ movq %mm4,%mm7
+ pshufw $228,%mm0,%mm2
+ pshufw $228,%mm4,%mm6
+ movq %mm0,%mm1
+ movq %mm4,%mm5
+ pshufw $177,%mm0,%mm0
+ pshufw $177,%mm4,%mm4
+ pslld $8,%mm2
+ pslld $8,%mm6
+ psrld $8,%mm3
+ psrld $8,%mm7
+ pxor %mm2,%mm0
+ pxor %mm6,%mm4
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ pslld $16,%mm2
+ pslld $16,%mm6
+ psrld $16,%mm3
+ psrld $16,%mm7
+ pxor %mm2,%mm0
+ pxor %mm6,%mm4
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ movq 8(%esp),%mm3
+ pxor %mm2,%mm2
+ pxor %mm6,%mm6
+ pcmpgtb %mm1,%mm2
+ pcmpgtb %mm5,%mm6
+ pand %mm3,%mm2
+ pand %mm3,%mm6
+ paddb %mm1,%mm1
+ paddb %mm5,%mm5
+ pxor %mm2,%mm1
+ pxor %mm6,%mm5
+ movq %mm1,%mm3
+ movq %mm5,%mm7
+ movq %mm1,%mm2
+ movq %mm5,%mm6
+ pxor %mm1,%mm0
+ pxor %mm5,%mm4
+ pslld $24,%mm3
+ pslld $24,%mm7
+ psrld $8,%mm2
+ psrld $8,%mm6
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ pxor %mm2,%mm0
+ pxor %mm6,%mm4
+ movq 8(%esp),%mm2
+ pxor %mm3,%mm3
+ pxor %mm7,%mm7
+ pcmpgtb %mm1,%mm3
+ pcmpgtb %mm5,%mm7
+ pand %mm2,%mm3
+ pand %mm2,%mm7
+ paddb %mm1,%mm1
+ paddb %mm5,%mm5
+ pxor %mm3,%mm1
+ pxor %mm7,%mm5
+ pshufw $177,%mm1,%mm3
+ pshufw $177,%mm5,%mm7
+ pxor %mm1,%mm0
+ pxor %mm5,%mm4
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ pxor %mm3,%mm3
+ pxor %mm7,%mm7
+ pcmpgtb %mm1,%mm3
+ pcmpgtb %mm5,%mm7
+ pand %mm2,%mm3
+ pand %mm2,%mm7
+ paddb %mm1,%mm1
+ paddb %mm5,%mm5
+ pxor %mm3,%mm1
+ pxor %mm7,%mm5
+ pxor %mm1,%mm0
+ pxor %mm5,%mm4
+ movq %mm1,%mm3
+ movq %mm5,%mm7
+ pshufw $177,%mm1,%mm2
+ pshufw $177,%mm5,%mm6
+ pxor %mm2,%mm0
+ pxor %mm6,%mm4
+ pslld $8,%mm1
+ pslld $8,%mm5
+ psrld $8,%mm3
+ psrld $8,%mm7
+ movq (%edi),%mm2
+ movq 8(%edi),%mm6
+ pxor %mm1,%mm0
+ pxor %mm5,%mm4
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ movl -128(%ebp),%eax
+ pslld $16,%mm1
+ pslld $16,%mm5
+ movl -64(%ebp),%ebx
+ psrld $16,%mm3
+ psrld $16,%mm7
+ movl (%ebp),%ecx
+ pxor %mm1,%mm0
+ pxor %mm5,%mm4
+ movl 64(%ebp),%edx
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ pxor %mm2,%mm0
+ pxor %mm6,%mm4
+ jmp .L007loop
+.align 16
+.L008out:
+ pxor (%edi),%mm0
+ pxor 8(%edi),%mm4
+ ret
+.size _sse_AES_decrypt_compact,.-_sse_AES_decrypt_compact
+.type _x86_AES_decrypt,@function
+.align 16
+_x86_AES_decrypt:
+ #ifdef __CET__
+
+.byte 243,15,30,251
+ #endif
+
+ movl %edi,20(%esp)
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ movl 240(%edi),%esi
+ leal -2(%esi,%esi,1),%esi
+ leal (%edi,%esi,8),%esi
+ movl %esi,24(%esp)
+.align 16
+.L009loop:
+ movl %eax,%esi
+ andl $255,%esi
+ movl (%ebp,%esi,8),%esi
+ movzbl %dh,%edi
+ xorl 3(%ebp,%edi,8),%esi
+ movl %ecx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ xorl 2(%ebp,%edi,8),%esi
+ movl %ebx,%edi
+ shrl $24,%edi
+ xorl 1(%ebp,%edi,8),%esi
+ movl %esi,4(%esp)
+
+ movl %ebx,%esi
+ andl $255,%esi
+ movl (%ebp,%esi,8),%esi
+ movzbl %ah,%edi
+ xorl 3(%ebp,%edi,8),%esi
+ movl %edx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ xorl 2(%ebp,%edi,8),%esi
+ movl %ecx,%edi
+ shrl $24,%edi
+ xorl 1(%ebp,%edi,8),%esi
+ movl %esi,8(%esp)
+
+ movl %ecx,%esi
+ andl $255,%esi
+ movl (%ebp,%esi,8),%esi
+ movzbl %bh,%edi
+ xorl 3(%ebp,%edi,8),%esi
+ movl %eax,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ xorl 2(%ebp,%edi,8),%esi
+ movl %edx,%edi
+ shrl $24,%edi
+ xorl 1(%ebp,%edi,8),%esi
+
+ movl 20(%esp),%edi
+ andl $255,%edx
+ movl (%ebp,%edx,8),%edx
+ movzbl %ch,%ecx
+ xorl 3(%ebp,%ecx,8),%edx
+ movl %esi,%ecx
+ shrl $16,%ebx
+ andl $255,%ebx
+ xorl 2(%ebp,%ebx,8),%edx
+ movl 8(%esp),%ebx
+ shrl $24,%eax
+ xorl 1(%ebp,%eax,8),%edx
+ movl 4(%esp),%eax
+
+ addl $16,%edi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ cmpl 24(%esp),%edi
+ movl %edi,20(%esp)
+ jb .L009loop
+ leal 2176(%ebp),%ebp
+ movl -128(%ebp),%edi
+ movl -96(%ebp),%esi
+ movl -64(%ebp),%edi
+ movl -32(%ebp),%esi
+ movl (%ebp),%edi
+ movl 32(%ebp),%esi
+ movl 64(%ebp),%edi
+ movl 96(%ebp),%esi
+ leal -128(%ebp),%ebp
+ movl %eax,%esi
+ andl $255,%esi
+ movzbl (%ebp,%esi,1),%esi
+ movzbl %dh,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %ebx,%edi
+ shrl $24,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,4(%esp)
+ movl %ebx,%esi
+ andl $255,%esi
+ movzbl (%ebp,%esi,1),%esi
+ movzbl %ah,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $24,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,8(%esp)
+ movl %ecx,%esi
+ andl $255,%esi
+ movzbl (%ebp,%esi,1),%esi
+ movzbl %bh,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $24,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl 20(%esp),%edi
+ andl $255,%edx
+ movzbl (%ebp,%edx,1),%edx
+ movzbl %ch,%ecx
+ movzbl (%ebp,%ecx,1),%ecx
+ shll $8,%ecx
+ xorl %ecx,%edx
+ movl %esi,%ecx
+ shrl $16,%ebx
+ andl $255,%ebx
+ movzbl (%ebp,%ebx,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%edx
+ movl 8(%esp),%ebx
+ shrl $24,%eax
+ movzbl (%ebp,%eax,1),%eax
+ shll $24,%eax
+ xorl %eax,%edx
+ movl 4(%esp),%eax
+ leal -2048(%ebp),%ebp
+ addl $16,%edi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ ret
+.align 64
+.LAES_Td:
+.long 1353184337,1353184337
+.long 1399144830,1399144830
+.long 3282310938,3282310938
+.long 2522752826,2522752826
+.long 3412831035,3412831035
+.long 4047871263,4047871263
+.long 2874735276,2874735276
+.long 2466505547,2466505547
+.long 1442459680,1442459680
+.long 4134368941,4134368941
+.long 2440481928,2440481928
+.long 625738485,625738485
+.long 4242007375,4242007375
+.long 3620416197,3620416197
+.long 2151953702,2151953702
+.long 2409849525,2409849525
+.long 1230680542,1230680542
+.long 1729870373,1729870373
+.long 2551114309,2551114309
+.long 3787521629,3787521629
+.long 41234371,41234371
+.long 317738113,317738113
+.long 2744600205,2744600205
+.long 3338261355,3338261355
+.long 3881799427,3881799427
+.long 2510066197,2510066197
+.long 3950669247,3950669247
+.long 3663286933,3663286933
+.long 763608788,763608788
+.long 3542185048,3542185048
+.long 694804553,694804553
+.long 1154009486,1154009486
+.long 1787413109,1787413109
+.long 2021232372,2021232372
+.long 1799248025,1799248025
+.long 3715217703,3715217703
+.long 3058688446,3058688446
+.long 397248752,397248752
+.long 1722556617,1722556617
+.long 3023752829,3023752829
+.long 407560035,407560035
+.long 2184256229,2184256229
+.long 1613975959,1613975959
+.long 1165972322,1165972322
+.long 3765920945,3765920945
+.long 2226023355,2226023355
+.long 480281086,480281086
+.long 2485848313,2485848313
+.long 1483229296,1483229296
+.long 436028815,436028815
+.long 2272059028,2272059028
+.long 3086515026,3086515026
+.long 601060267,601060267
+.long 3791801202,3791801202
+.long 1468997603,1468997603
+.long 715871590,715871590
+.long 120122290,120122290
+.long 63092015,63092015
+.long 2591802758,2591802758
+.long 2768779219,2768779219
+.long 4068943920,4068943920
+.long 2997206819,2997206819
+.long 3127509762,3127509762
+.long 1552029421,1552029421
+.long 723308426,723308426
+.long 2461301159,2461301159
+.long 4042393587,4042393587
+.long 2715969870,2715969870
+.long 3455375973,3455375973
+.long 3586000134,3586000134
+.long 526529745,526529745
+.long 2331944644,2331944644
+.long 2639474228,2639474228
+.long 2689987490,2689987490
+.long 853641733,853641733
+.long 1978398372,1978398372
+.long 971801355,971801355
+.long 2867814464,2867814464
+.long 111112542,111112542
+.long 1360031421,1360031421
+.long 4186579262,4186579262
+.long 1023860118,1023860118
+.long 2919579357,2919579357
+.long 1186850381,1186850381
+.long 3045938321,3045938321
+.long 90031217,90031217
+.long 1876166148,1876166148
+.long 4279586912,4279586912
+.long 620468249,620468249
+.long 2548678102,2548678102
+.long 3426959497,3426959497
+.long 2006899047,2006899047
+.long 3175278768,3175278768
+.long 2290845959,2290845959
+.long 945494503,945494503
+.long 3689859193,3689859193
+.long 1191869601,1191869601
+.long 3910091388,3910091388
+.long 3374220536,3374220536
+.long 0,0
+.long 2206629897,2206629897
+.long 1223502642,1223502642
+.long 2893025566,2893025566
+.long 1316117100,1316117100
+.long 4227796733,4227796733
+.long 1446544655,1446544655
+.long 517320253,517320253
+.long 658058550,658058550
+.long 1691946762,1691946762
+.long 564550760,564550760
+.long 3511966619,3511966619
+.long 976107044,976107044
+.long 2976320012,2976320012
+.long 266819475,266819475
+.long 3533106868,3533106868
+.long 2660342555,2660342555
+.long 1338359936,1338359936
+.long 2720062561,2720062561
+.long 1766553434,1766553434
+.long 370807324,370807324
+.long 179999714,179999714
+.long 3844776128,3844776128
+.long 1138762300,1138762300
+.long 488053522,488053522
+.long 185403662,185403662
+.long 2915535858,2915535858
+.long 3114841645,3114841645
+.long 3366526484,3366526484
+.long 2233069911,2233069911
+.long 1275557295,1275557295
+.long 3151862254,3151862254
+.long 4250959779,4250959779
+.long 2670068215,2670068215
+.long 3170202204,3170202204
+.long 3309004356,3309004356
+.long 880737115,880737115
+.long 1982415755,1982415755
+.long 3703972811,3703972811
+.long 1761406390,1761406390
+.long 1676797112,1676797112
+.long 3403428311,3403428311
+.long 277177154,277177154
+.long 1076008723,1076008723
+.long 538035844,538035844
+.long 2099530373,2099530373
+.long 4164795346,4164795346
+.long 288553390,288553390
+.long 1839278535,1839278535
+.long 1261411869,1261411869
+.long 4080055004,4080055004
+.long 3964831245,3964831245
+.long 3504587127,3504587127
+.long 1813426987,1813426987
+.long 2579067049,2579067049
+.long 4199060497,4199060497
+.long 577038663,577038663
+.long 3297574056,3297574056
+.long 440397984,440397984
+.long 3626794326,3626794326
+.long 4019204898,4019204898
+.long 3343796615,3343796615
+.long 3251714265,3251714265
+.long 4272081548,4272081548
+.long 906744984,906744984
+.long 3481400742,3481400742
+.long 685669029,685669029
+.long 646887386,646887386
+.long 2764025151,2764025151
+.long 3835509292,3835509292
+.long 227702864,227702864
+.long 2613862250,2613862250
+.long 1648787028,1648787028
+.long 3256061430,3256061430
+.long 3904428176,3904428176
+.long 1593260334,1593260334
+.long 4121936770,4121936770
+.long 3196083615,3196083615
+.long 2090061929,2090061929
+.long 2838353263,2838353263
+.long 3004310991,3004310991
+.long 999926984,999926984
+.long 2809993232,2809993232
+.long 1852021992,1852021992
+.long 2075868123,2075868123
+.long 158869197,158869197
+.long 4095236462,4095236462
+.long 28809964,28809964
+.long 2828685187,2828685187
+.long 1701746150,1701746150
+.long 2129067946,2129067946
+.long 147831841,147831841
+.long 3873969647,3873969647
+.long 3650873274,3650873274
+.long 3459673930,3459673930
+.long 3557400554,3557400554
+.long 3598495785,3598495785
+.long 2947720241,2947720241
+.long 824393514,824393514
+.long 815048134,815048134
+.long 3227951669,3227951669
+.long 935087732,935087732
+.long 2798289660,2798289660
+.long 2966458592,2966458592
+.long 366520115,366520115
+.long 1251476721,1251476721
+.long 4158319681,4158319681
+.long 240176511,240176511
+.long 804688151,804688151
+.long 2379631990,2379631990
+.long 1303441219,1303441219
+.long 1414376140,1414376140
+.long 3741619940,3741619940
+.long 3820343710,3820343710
+.long 461924940,461924940
+.long 3089050817,3089050817
+.long 2136040774,2136040774
+.long 82468509,82468509
+.long 1563790337,1563790337
+.long 1937016826,1937016826
+.long 776014843,776014843
+.long 1511876531,1511876531
+.long 1389550482,1389550482
+.long 861278441,861278441
+.long 323475053,323475053
+.long 2355222426,2355222426
+.long 2047648055,2047648055
+.long 2383738969,2383738969
+.long 2302415851,2302415851
+.long 3995576782,3995576782
+.long 902390199,902390199
+.long 3991215329,3991215329
+.long 1018251130,1018251130
+.long 1507840668,1507840668
+.long 1064563285,1064563285
+.long 2043548696,2043548696
+.long 3208103795,3208103795
+.long 3939366739,3939366739
+.long 1537932639,1537932639
+.long 342834655,342834655
+.long 2262516856,2262516856
+.long 2180231114,2180231114
+.long 1053059257,1053059257
+.long 741614648,741614648
+.long 1598071746,1598071746
+.long 1925389590,1925389590
+.long 203809468,203809468
+.long 2336832552,2336832552
+.long 1100287487,1100287487
+.long 1895934009,1895934009
+.long 3736275976,3736275976
+.long 2632234200,2632234200
+.long 2428589668,2428589668
+.long 1636092795,1636092795
+.long 1890988757,1890988757
+.long 1952214088,1952214088
+.long 1113045200,1113045200
+.byte 82,9,106,213,48,54,165,56
+.byte 191,64,163,158,129,243,215,251
+.byte 124,227,57,130,155,47,255,135
+.byte 52,142,67,68,196,222,233,203
+.byte 84,123,148,50,166,194,35,61
+.byte 238,76,149,11,66,250,195,78
+.byte 8,46,161,102,40,217,36,178
+.byte 118,91,162,73,109,139,209,37
+.byte 114,248,246,100,134,104,152,22
+.byte 212,164,92,204,93,101,182,146
+.byte 108,112,72,80,253,237,185,218
+.byte 94,21,70,87,167,141,157,132
+.byte 144,216,171,0,140,188,211,10
+.byte 247,228,88,5,184,179,69,6
+.byte 208,44,30,143,202,63,15,2
+.byte 193,175,189,3,1,19,138,107
+.byte 58,145,17,65,79,103,220,234
+.byte 151,242,207,206,240,180,230,115
+.byte 150,172,116,34,231,173,53,133
+.byte 226,249,55,232,28,117,223,110
+.byte 71,241,26,113,29,41,197,137
+.byte 111,183,98,14,170,24,190,27
+.byte 252,86,62,75,198,210,121,32
+.byte 154,219,192,254,120,205,90,244
+.byte 31,221,168,51,136,7,199,49
+.byte 177,18,16,89,39,128,236,95
+.byte 96,81,127,169,25,181,74,13
+.byte 45,229,122,159,147,201,156,239
+.byte 160,224,59,77,174,42,245,176
+.byte 200,235,187,60,131,83,153,97
+.byte 23,43,4,126,186,119,214,38
+.byte 225,105,20,99,85,33,12,125
+.byte 82,9,106,213,48,54,165,56
+.byte 191,64,163,158,129,243,215,251
+.byte 124,227,57,130,155,47,255,135
+.byte 52,142,67,68,196,222,233,203
+.byte 84,123,148,50,166,194,35,61
+.byte 238,76,149,11,66,250,195,78
+.byte 8,46,161,102,40,217,36,178
+.byte 118,91,162,73,109,139,209,37
+.byte 114,248,246,100,134,104,152,22
+.byte 212,164,92,204,93,101,182,146
+.byte 108,112,72,80,253,237,185,218
+.byte 94,21,70,87,167,141,157,132
+.byte 144,216,171,0,140,188,211,10
+.byte 247,228,88,5,184,179,69,6
+.byte 208,44,30,143,202,63,15,2
+.byte 193,175,189,3,1,19,138,107
+.byte 58,145,17,65,79,103,220,234
+.byte 151,242,207,206,240,180,230,115
+.byte 150,172,116,34,231,173,53,133
+.byte 226,249,55,232,28,117,223,110
+.byte 71,241,26,113,29,41,197,137
+.byte 111,183,98,14,170,24,190,27
+.byte 252,86,62,75,198,210,121,32
+.byte 154,219,192,254,120,205,90,244
+.byte 31,221,168,51,136,7,199,49
+.byte 177,18,16,89,39,128,236,95
+.byte 96,81,127,169,25,181,74,13
+.byte 45,229,122,159,147,201,156,239
+.byte 160,224,59,77,174,42,245,176
+.byte 200,235,187,60,131,83,153,97
+.byte 23,43,4,126,186,119,214,38
+.byte 225,105,20,99,85,33,12,125
+.byte 82,9,106,213,48,54,165,56
+.byte 191,64,163,158,129,243,215,251
+.byte 124,227,57,130,155,47,255,135
+.byte 52,142,67,68,196,222,233,203
+.byte 84,123,148,50,166,194,35,61
+.byte 238,76,149,11,66,250,195,78
+.byte 8,46,161,102,40,217,36,178
+.byte 118,91,162,73,109,139,209,37
+.byte 114,248,246,100,134,104,152,22
+.byte 212,164,92,204,93,101,182,146
+.byte 108,112,72,80,253,237,185,218
+.byte 94,21,70,87,167,141,157,132
+.byte 144,216,171,0,140,188,211,10
+.byte 247,228,88,5,184,179,69,6
+.byte 208,44,30,143,202,63,15,2
+.byte 193,175,189,3,1,19,138,107
+.byte 58,145,17,65,79,103,220,234
+.byte 151,242,207,206,240,180,230,115
+.byte 150,172,116,34,231,173,53,133
+.byte 226,249,55,232,28,117,223,110
+.byte 71,241,26,113,29,41,197,137
+.byte 111,183,98,14,170,24,190,27
+.byte 252,86,62,75,198,210,121,32
+.byte 154,219,192,254,120,205,90,244
+.byte 31,221,168,51,136,7,199,49
+.byte 177,18,16,89,39,128,236,95
+.byte 96,81,127,169,25,181,74,13
+.byte 45,229,122,159,147,201,156,239
+.byte 160,224,59,77,174,42,245,176
+.byte 200,235,187,60,131,83,153,97
+.byte 23,43,4,126,186,119,214,38
+.byte 225,105,20,99,85,33,12,125
+.byte 82,9,106,213,48,54,165,56
+.byte 191,64,163,158,129,243,215,251
+.byte 124,227,57,130,155,47,255,135
+.byte 52,142,67,68,196,222,233,203
+.byte 84,123,148,50,166,194,35,61
+.byte 238,76,149,11,66,250,195,78
+.byte 8,46,161,102,40,217,36,178
+.byte 118,91,162,73,109,139,209,37
+.byte 114,248,246,100,134,104,152,22
+.byte 212,164,92,204,93,101,182,146
+.byte 108,112,72,80,253,237,185,218
+.byte 94,21,70,87,167,141,157,132
+.byte 144,216,171,0,140,188,211,10
+.byte 247,228,88,5,184,179,69,6
+.byte 208,44,30,143,202,63,15,2
+.byte 193,175,189,3,1,19,138,107
+.byte 58,145,17,65,79,103,220,234
+.byte 151,242,207,206,240,180,230,115
+.byte 150,172,116,34,231,173,53,133
+.byte 226,249,55,232,28,117,223,110
+.byte 71,241,26,113,29,41,197,137
+.byte 111,183,98,14,170,24,190,27
+.byte 252,86,62,75,198,210,121,32
+.byte 154,219,192,254,120,205,90,244
+.byte 31,221,168,51,136,7,199,49
+.byte 177,18,16,89,39,128,236,95
+.byte 96,81,127,169,25,181,74,13
+.byte 45,229,122,159,147,201,156,239
+.byte 160,224,59,77,174,42,245,176
+.byte 200,235,187,60,131,83,153,97
+.byte 23,43,4,126,186,119,214,38
+.byte 225,105,20,99,85,33,12,125
+.size _x86_AES_decrypt,.-_x86_AES_decrypt
+.globl AES_decrypt
+.type AES_decrypt,@function
+.align 16
+AES_decrypt:
+.L_AES_decrypt_begin:
+ #ifdef __CET__
+
+.byte 243,15,30,251
+ #endif
+
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%esi
+ movl 28(%esp),%edi
+ movl %esp,%eax
+ subl $36,%esp
+ andl $-64,%esp
+ leal -127(%edi),%ebx
+ subl %esp,%ebx
+ negl %ebx
+ andl $960,%ebx
+ subl %ebx,%esp
+ addl $4,%esp
+ movl %eax,28(%esp)
+ call .L010pic_point
+.L010pic_point:
+ popl %ebp
+ leal OPENSSL_ia32cap_P,%eax
+ leal .LAES_Td-.L010pic_point(%ebp),%ebp
+ leal 764(%esp),%ebx
+ subl %ebp,%ebx
+ andl $768,%ebx
+ leal 2176(%ebp,%ebx,1),%ebp
+ btl $25,(%eax)
+ jnc .L011x86
+ movq (%esi),%mm0
+ movq 8(%esi),%mm4
+ call _sse_AES_decrypt_compact
+ movl 28(%esp),%esp
+ movl 24(%esp),%esi
+ movq %mm0,(%esi)
+ movq %mm4,8(%esi)
+ emms
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 16
+.L011x86:
+ movl %ebp,24(%esp)
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ call _x86_AES_decrypt_compact
+ movl 28(%esp),%esp
+ movl 24(%esp),%esi
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size AES_decrypt,.-.L_AES_decrypt_begin
+.globl AES_cbc_encrypt
+.type AES_cbc_encrypt,@function
+.align 16
+AES_cbc_encrypt:
+.L_AES_cbc_encrypt_begin:
+ #ifdef __CET__
+
+.byte 243,15,30,251
+ #endif
+
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 28(%esp),%ecx
+ cmpl $0,%ecx
+ je .L012drop_out
+ call .L013pic_point
+.L013pic_point:
+ popl %ebp
+ leal OPENSSL_ia32cap_P,%eax
+ cmpl $0,40(%esp)
+ leal .LAES_Te-.L013pic_point(%ebp),%ebp
+ jne .L014picked_te
+ leal .LAES_Td-.LAES_Te(%ebp),%ebp
+.L014picked_te:
+ pushfl
+ cld
+ cmpl $512,%ecx
+ jb .L015slow_way
+ testl $15,%ecx
+ jnz .L015slow_way
+ btl $28,(%eax)
+ jc .L015slow_way
+ leal -324(%esp),%esi
+ andl $-64,%esi
+ movl %ebp,%eax
+ leal 2304(%ebp),%ebx
+ movl %esi,%edx
+ andl $4095,%eax
+ andl $4095,%ebx
+ andl $4095,%edx
+ cmpl %ebx,%edx
+ jb .L016tbl_break_out
+ subl %ebx,%edx
+ subl %edx,%esi
+ jmp .L017tbl_ok
+.align 4
+.L016tbl_break_out:
+ subl %eax,%edx
+ andl $4095,%edx
+ addl $384,%edx
+ subl %edx,%esi
+.align 4
+.L017tbl_ok:
+ leal 24(%esp),%edx
+ xchgl %esi,%esp
+ addl $4,%esp
+ movl %ebp,24(%esp)
+ movl %esi,28(%esp)
+ movl (%edx),%eax
+ movl 4(%edx),%ebx
+ movl 12(%edx),%edi
+ movl 16(%edx),%esi
+ movl 20(%edx),%edx
+ movl %eax,32(%esp)
+ movl %ebx,36(%esp)
+ movl %ecx,40(%esp)
+ movl %edi,44(%esp)
+ movl %esi,48(%esp)
+ movl $0,316(%esp)
+ movl %edi,%ebx
+ movl $61,%ecx
+ subl %ebp,%ebx
+ movl %edi,%esi
+ andl $4095,%ebx
+ leal 76(%esp),%edi
+ cmpl $2304,%ebx
+ jb .L018do_copy
+ cmpl $3852,%ebx
+ jb .L019skip_copy
+.align 4
+.L018do_copy:
+ movl %edi,44(%esp)
+.long 2784229001
+.L019skip_copy:
+ movl $16,%edi
+.align 4
+.L020prefetch_tbl:
+ movl (%ebp),%eax
+ movl 32(%ebp),%ebx
+ movl 64(%ebp),%ecx
+ movl 96(%ebp),%esi
+ leal 128(%ebp),%ebp
+ subl $1,%edi
+ jnz .L020prefetch_tbl
+ subl $2048,%ebp
+ movl 32(%esp),%esi
+ movl 48(%esp),%edi
+ cmpl $0,%edx
+ je .L021fast_decrypt
+ movl (%edi),%eax
+ movl 4(%edi),%ebx
+.align 16
+.L022fast_enc_loop:
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ xorl (%esi),%eax
+ xorl 4(%esi),%ebx
+ xorl 8(%esi),%ecx
+ xorl 12(%esi),%edx
+ movl 44(%esp),%edi
+ call _x86_AES_encrypt
+ movl 32(%esp),%esi
+ movl 36(%esp),%edi
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ leal 16(%esi),%esi
+ movl 40(%esp),%ecx
+ movl %esi,32(%esp)
+ leal 16(%edi),%edx
+ movl %edx,36(%esp)
+ subl $16,%ecx
+ movl %ecx,40(%esp)
+ jnz .L022fast_enc_loop
+ movl 48(%esp),%esi
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ cmpl $0,316(%esp)
+ movl 44(%esp),%edi
+ je .L023skip_ezero
+ movl $60,%ecx
+ xorl %eax,%eax
+.align 4
+.long 2884892297
+.L023skip_ezero:
+ movl 28(%esp),%esp
+ popfl
+.L012drop_out:
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+ pushfl
+.align 16
+.L021fast_decrypt:
+ cmpl 36(%esp),%esi
+ je .L024fast_dec_in_place
+ movl %edi,52(%esp)
+.align 4
+.align 16
+.L025fast_dec_loop:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ movl 44(%esp),%edi
+ call _x86_AES_decrypt
+ movl 52(%esp),%edi
+ movl 40(%esp),%esi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ movl 36(%esp),%edi
+ movl 32(%esp),%esi
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 40(%esp),%ecx
+ movl %esi,52(%esp)
+ leal 16(%esi),%esi
+ movl %esi,32(%esp)
+ leal 16(%edi),%edi
+ movl %edi,36(%esp)
+ subl $16,%ecx
+ movl %ecx,40(%esp)
+ jnz .L025fast_dec_loop
+ movl 52(%esp),%edi
+ movl 48(%esp),%esi
+ movl (%edi),%eax
+ movl 4(%edi),%ebx
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ jmp .L026fast_dec_out
+.align 16
+.L024fast_dec_in_place:
+.L027fast_dec_in_place_loop:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ leal 60(%esp),%edi
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 44(%esp),%edi
+ call _x86_AES_decrypt
+ movl 48(%esp),%edi
+ movl 36(%esp),%esi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ leal 16(%esi),%esi
+ movl %esi,36(%esp)
+ leal 60(%esp),%esi
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 32(%esp),%esi
+ movl 40(%esp),%ecx
+ leal 16(%esi),%esi
+ movl %esi,32(%esp)
+ subl $16,%ecx
+ movl %ecx,40(%esp)
+ jnz .L027fast_dec_in_place_loop
+.align 4
+.L026fast_dec_out:
+ cmpl $0,316(%esp)
+ movl 44(%esp),%edi
+ je .L028skip_dzero
+ movl $60,%ecx
+ xorl %eax,%eax
+.align 4
+.long 2884892297
+.L028skip_dzero:
+ movl 28(%esp),%esp
+ popfl
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+ pushfl
+.align 16
+.L015slow_way:
+ movl (%eax),%eax
+ movl 36(%esp),%edi
+ leal -80(%esp),%esi
+ andl $-64,%esi
+ leal -143(%edi),%ebx
+ subl %esi,%ebx
+ negl %ebx
+ andl $960,%ebx
+ subl %ebx,%esi
+ leal 768(%esi),%ebx
+ subl %ebp,%ebx
+ andl $768,%ebx
+ leal 2176(%ebp,%ebx,1),%ebp
+ leal 24(%esp),%edx
+ xchgl %esi,%esp
+ addl $4,%esp
+ movl %ebp,24(%esp)
+ movl %esi,28(%esp)
+ movl %eax,52(%esp)
+ movl (%edx),%eax
+ movl 4(%edx),%ebx
+ movl 16(%edx),%esi
+ movl 20(%edx),%edx
+ movl %eax,32(%esp)
+ movl %ebx,36(%esp)
+ movl %ecx,40(%esp)
+ movl %edi,44(%esp)
+ movl %esi,48(%esp)
+ movl %esi,%edi
+ movl %eax,%esi
+ cmpl $0,%edx
+ je .L029slow_decrypt
+ cmpl $16,%ecx
+ movl %ebx,%edx
+ jb .L030slow_enc_tail
+ btl $25,52(%esp)
+ jnc .L031slow_enc_x86
+ movq (%edi),%mm0
+ movq 8(%edi),%mm4
+.align 16
+.L032slow_enc_loop_sse:
+ pxor (%esi),%mm0
+ pxor 8(%esi),%mm4
+ movl 44(%esp),%edi
+ call _sse_AES_encrypt_compact
+ movl 32(%esp),%esi
+ movl 36(%esp),%edi
+ movl 40(%esp),%ecx
+ movq %mm0,(%edi)
+ movq %mm4,8(%edi)
+ leal 16(%esi),%esi
+ movl %esi,32(%esp)
+ leal 16(%edi),%edx
+ movl %edx,36(%esp)
+ subl $16,%ecx
+ cmpl $16,%ecx
+ movl %ecx,40(%esp)
+ jae .L032slow_enc_loop_sse
+ testl $15,%ecx
+ jnz .L030slow_enc_tail
+ movl 48(%esp),%esi
+ movq %mm0,(%esi)
+ movq %mm4,8(%esi)
+ emms
+ movl 28(%esp),%esp
+ popfl
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+ pushfl
+.align 16
+.L031slow_enc_x86:
+ movl (%edi),%eax
+ movl 4(%edi),%ebx
+.align 4
+.L033slow_enc_loop_x86:
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ xorl (%esi),%eax
+ xorl 4(%esi),%ebx
+ xorl 8(%esi),%ecx
+ xorl 12(%esi),%edx
+ movl 44(%esp),%edi
+ call _x86_AES_encrypt_compact
+ movl 32(%esp),%esi
+ movl 36(%esp),%edi
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 40(%esp),%ecx
+ leal 16(%esi),%esi
+ movl %esi,32(%esp)
+ leal 16(%edi),%edx
+ movl %edx,36(%esp)
+ subl $16,%ecx
+ cmpl $16,%ecx
+ movl %ecx,40(%esp)
+ jae .L033slow_enc_loop_x86
+ testl $15,%ecx
+ jnz .L030slow_enc_tail
+ movl 48(%esp),%esi
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ movl 28(%esp),%esp
+ popfl
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+ pushfl
+.align 16
+.L030slow_enc_tail:
+ emms
+ movl %edx,%edi
+ movl $16,%ebx
+ subl %ecx,%ebx
+ cmpl %esi,%edi
+ je .L034enc_in_place
+.align 4
+.long 2767451785
+ jmp .L035enc_skip_in_place
+.L034enc_in_place:
+ leal (%edi,%ecx,1),%edi
+.L035enc_skip_in_place:
+ movl %ebx,%ecx
+ xorl %eax,%eax
+.align 4
+.long 2868115081
+ movl 48(%esp),%edi
+ movl %edx,%esi
+ movl (%edi),%eax
+ movl 4(%edi),%ebx
+ movl $16,40(%esp)
+ jmp .L033slow_enc_loop_x86
+.align 16
+.L029slow_decrypt:
+ btl $25,52(%esp)
+ jnc .L036slow_dec_loop_x86
+.align 4
+.L037slow_dec_loop_sse:
+ movq (%esi),%mm0
+ movq 8(%esi),%mm4
+ movl 44(%esp),%edi
+ call _sse_AES_decrypt_compact
+ movl 32(%esp),%esi
+ leal 60(%esp),%eax
+ movl 36(%esp),%ebx
+ movl 40(%esp),%ecx
+ movl 48(%esp),%edi
+ movq (%esi),%mm1
+ movq 8(%esi),%mm5
+ pxor (%edi),%mm0
+ pxor 8(%edi),%mm4
+ movq %mm1,(%edi)
+ movq %mm5,8(%edi)
+ subl $16,%ecx
+ jc .L038slow_dec_partial_sse
+ movq %mm0,(%ebx)
+ movq %mm4,8(%ebx)
+ leal 16(%ebx),%ebx
+ movl %ebx,36(%esp)
+ leal 16(%esi),%esi
+ movl %esi,32(%esp)
+ movl %ecx,40(%esp)
+ jnz .L037slow_dec_loop_sse
+ emms
+ movl 28(%esp),%esp
+ popfl
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+ pushfl
+.align 16
+.L038slow_dec_partial_sse:
+ movq %mm0,(%eax)
+ movq %mm4,8(%eax)
+ emms
+ addl $16,%ecx
+ movl %ebx,%edi
+ movl %eax,%esi
+.align 4
+.long 2767451785
+ movl 28(%esp),%esp
+ popfl
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+ pushfl
+.align 16
+.L036slow_dec_loop_x86:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ leal 60(%esp),%edi
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 44(%esp),%edi
+ call _x86_AES_decrypt_compact
+ movl 48(%esp),%edi
+ movl 40(%esp),%esi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ subl $16,%esi
+ jc .L039slow_dec_partial_x86
+ movl %esi,40(%esp)
+ movl 36(%esp),%esi
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ leal 16(%esi),%esi
+ movl %esi,36(%esp)
+ leal 60(%esp),%esi
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 32(%esp),%esi
+ leal 16(%esi),%esi
+ movl %esi,32(%esp)
+ jnz .L036slow_dec_loop_x86
+ movl 28(%esp),%esp
+ popfl
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+ pushfl
+.align 16
+.L039slow_dec_partial_x86:
+ leal 60(%esp),%esi
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ movl 32(%esp),%esi
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 40(%esp),%ecx
+ movl 36(%esp),%edi
+ leal 60(%esp),%esi
+.align 4
+.long 2767451785
+ movl 28(%esp),%esp
+ popfl
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size AES_cbc_encrypt,.-.L_AES_cbc_encrypt_begin
+.type _x86_AES_set_encrypt_key,@function
+.align 16
+_x86_AES_set_encrypt_key:
+ #ifdef __CET__
+
+.byte 243,15,30,251
+ #endif
+
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 24(%esp),%esi
+ movl 32(%esp),%edi
+ testl $-1,%esi
+ jz .L040badpointer
+ testl $-1,%edi
+ jz .L040badpointer
+ call .L041pic_point
+.L041pic_point:
+ popl %ebp
+ leal .LAES_Te-.L041pic_point(%ebp),%ebp
+ leal 2176(%ebp),%ebp
+ movl -128(%ebp),%eax
+ movl -96(%ebp),%ebx
+ movl -64(%ebp),%ecx
+ movl -32(%ebp),%edx
+ movl (%ebp),%eax
+ movl 32(%ebp),%ebx
+ movl 64(%ebp),%ecx
+ movl 96(%ebp),%edx
+ movl 28(%esp),%ecx
+ cmpl $128,%ecx
+ je .L04210rounds
+ cmpl $192,%ecx
+ je .L04312rounds
+ cmpl $256,%ecx
+ je .L04414rounds
+ movl $-2,%eax
+ jmp .L045exit
+.L04210rounds:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ xorl %ecx,%ecx
+ jmp .L04610shortcut
+.align 4
+.L04710loop:
+ movl (%edi),%eax
+ movl 12(%edi),%edx
+.L04610shortcut:
+ movzbl %dl,%esi
+ movzbl -128(%ebp,%esi,1),%ebx
+ movzbl %dh,%esi
+ shll $24,%ebx
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ shrl $16,%edx
+ movzbl %dl,%esi
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ movzbl %dh,%esi
+ shll $8,%ebx
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%eax
+ xorl 896(%ebp,%ecx,4),%eax
+ movl %eax,16(%edi)
+ xorl 4(%edi),%eax
+ movl %eax,20(%edi)
+ xorl 8(%edi),%eax
+ movl %eax,24(%edi)
+ xorl 12(%edi),%eax
+ movl %eax,28(%edi)
+ incl %ecx
+ addl $16,%edi
+ cmpl $10,%ecx
+ jl .L04710loop
+ movl $10,80(%edi)
+ xorl %eax,%eax
+ jmp .L045exit
+.L04312rounds:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 16(%esi),%ecx
+ movl 20(%esi),%edx
+ movl %ecx,16(%edi)
+ movl %edx,20(%edi)
+ xorl %ecx,%ecx
+ jmp .L04812shortcut
+.align 4
+.L04912loop:
+ movl (%edi),%eax
+ movl 20(%edi),%edx
+.L04812shortcut:
+ movzbl %dl,%esi
+ movzbl -128(%ebp,%esi,1),%ebx
+ movzbl %dh,%esi
+ shll $24,%ebx
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ shrl $16,%edx
+ movzbl %dl,%esi
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ movzbl %dh,%esi
+ shll $8,%ebx
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%eax
+ xorl 896(%ebp,%ecx,4),%eax
+ movl %eax,24(%edi)
+ xorl 4(%edi),%eax
+ movl %eax,28(%edi)
+ xorl 8(%edi),%eax
+ movl %eax,32(%edi)
+ xorl 12(%edi),%eax
+ movl %eax,36(%edi)
+ cmpl $7,%ecx
+ je .L05012break
+ incl %ecx
+ xorl 16(%edi),%eax
+ movl %eax,40(%edi)
+ xorl 20(%edi),%eax
+ movl %eax,44(%edi)
+ addl $24,%edi
+ jmp .L04912loop
+.L05012break:
+ movl $12,72(%edi)
+ xorl %eax,%eax
+ jmp .L045exit
+.L04414rounds:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 16(%esi),%eax
+ movl 20(%esi),%ebx
+ movl 24(%esi),%ecx
+ movl 28(%esi),%edx
+ movl %eax,16(%edi)
+ movl %ebx,20(%edi)
+ movl %ecx,24(%edi)
+ movl %edx,28(%edi)
+ xorl %ecx,%ecx
+ jmp .L05114shortcut
+.align 4
+.L05214loop:
+ movl 28(%edi),%edx
+.L05114shortcut:
+ movl (%edi),%eax
+ movzbl %dl,%esi
+ movzbl -128(%ebp,%esi,1),%ebx
+ movzbl %dh,%esi
+ shll $24,%ebx
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ shrl $16,%edx
+ movzbl %dl,%esi
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ movzbl %dh,%esi
+ shll $8,%ebx
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%eax
+ xorl 896(%ebp,%ecx,4),%eax
+ movl %eax,32(%edi)
+ xorl 4(%edi),%eax
+ movl %eax,36(%edi)
+ xorl 8(%edi),%eax
+ movl %eax,40(%edi)
+ xorl 12(%edi),%eax
+ movl %eax,44(%edi)
+ cmpl $6,%ecx
+ je .L05314break
+ incl %ecx
+ movl %eax,%edx
+ movl 16(%edi),%eax
+ movzbl %dl,%esi
+ movzbl -128(%ebp,%esi,1),%ebx
+ movzbl %dh,%esi
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ shrl $16,%edx
+ shll $8,%ebx
+ movzbl %dl,%esi
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ movzbl %dh,%esi
+ shll $16,%ebx
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ shll $24,%ebx
+ xorl %ebx,%eax
+ movl %eax,48(%edi)
+ xorl 20(%edi),%eax
+ movl %eax,52(%edi)
+ xorl 24(%edi),%eax
+ movl %eax,56(%edi)
+ xorl 28(%edi),%eax
+ movl %eax,60(%edi)
+ addl $32,%edi
+ jmp .L05214loop
+.L05314break:
+ movl $14,48(%edi)
+ xorl %eax,%eax
+ jmp .L045exit
+.L040badpointer:
+ movl $-1,%eax
+.L045exit:
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size _x86_AES_set_encrypt_key,.-_x86_AES_set_encrypt_key
+.globl AES_set_encrypt_key
+.type AES_set_encrypt_key,@function
+.align 16
+AES_set_encrypt_key:
+.L_AES_set_encrypt_key_begin:
+ #ifdef __CET__
+
+.byte 243,15,30,251
+ #endif
+
+ call _x86_AES_set_encrypt_key
+ ret
+.size AES_set_encrypt_key,.-.L_AES_set_encrypt_key_begin
+.globl AES_set_decrypt_key
+.type AES_set_decrypt_key,@function
+.align 16
+AES_set_decrypt_key:
+.L_AES_set_decrypt_key_begin:
+ #ifdef __CET__
+
+.byte 243,15,30,251
+ #endif
+
+ call _x86_AES_set_encrypt_key
+ cmpl $0,%eax
+ je .L054proceed
+ ret
+.L054proceed:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 28(%esp),%esi
+ movl 240(%esi),%ecx
+ leal (,%ecx,4),%ecx
+ leal (%esi,%ecx,4),%edi
+.align 4
+.L055invert:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl (%edi),%ecx
+ movl 4(%edi),%edx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,(%esi)
+ movl %edx,4(%esi)
+ movl 8(%esi),%eax
+ movl 12(%esi),%ebx
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ movl %eax,8(%edi)
+ movl %ebx,12(%edi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ addl $16,%esi
+ subl $16,%edi
+ cmpl %edi,%esi
+ jne .L055invert
+ movl 28(%esp),%edi
+ movl 240(%edi),%esi
+ leal -2(%esi,%esi,1),%esi
+ leal (%edi,%esi,8),%esi
+ movl %esi,28(%esp)
+ movl 16(%edi),%eax
+.align 4
+.L056permute:
+ addl $16,%edi
+ movl $2155905152,%ebp
+ andl %eax,%ebp
+ leal (%eax,%eax,1),%ebx
+ movl %ebp,%esi
+ shrl $7,%ebp
+ subl %ebp,%esi
+ andl $4278124286,%ebx
+ andl $454761243,%esi
+ xorl %esi,%ebx
+ movl $2155905152,%ebp
+ andl %ebx,%ebp
+ leal (%ebx,%ebx,1),%ecx
+ movl %ebp,%esi
+ shrl $7,%ebp
+ subl %ebp,%esi
+ andl $4278124286,%ecx
+ andl $454761243,%esi
+ xorl %eax,%ebx
+ xorl %esi,%ecx
+ movl $2155905152,%ebp
+ andl %ecx,%ebp
+ leal (%ecx,%ecx,1),%edx
+ movl %ebp,%esi
+ shrl $7,%ebp
+ xorl %eax,%ecx
+ subl %ebp,%esi
+ andl $4278124286,%edx
+ andl $454761243,%esi
+ roll $8,%eax
+ xorl %esi,%edx
+ movl 4(%edi),%ebp
+ xorl %ebx,%eax
+ xorl %edx,%ebx
+ xorl %ecx,%eax
+ roll $24,%ebx
+ xorl %edx,%ecx
+ xorl %edx,%eax
+ roll $16,%ecx
+ xorl %ebx,%eax
+ roll $8,%edx
+ xorl %ecx,%eax
+ movl %ebp,%ebx
+ xorl %edx,%eax
+ movl %eax,(%edi)
+ movl $2155905152,%ebp
+ andl %ebx,%ebp
+ leal (%ebx,%ebx,1),%ecx
+ movl %ebp,%esi
+ shrl $7,%ebp
+ subl %ebp,%esi
+ andl $4278124286,%ecx
+ andl $454761243,%esi
+ xorl %esi,%ecx
+ movl $2155905152,%ebp
+ andl %ecx,%ebp
+ leal (%ecx,%ecx,1),%edx
+ movl %ebp,%esi
+ shrl $7,%ebp
+ subl %ebp,%esi
+ andl $4278124286,%edx
+ andl $454761243,%esi
+ xorl %ebx,%ecx
+ xorl %esi,%edx
+ movl $2155905152,%ebp
+ andl %edx,%ebp
+ leal (%edx,%edx,1),%eax
+ movl %ebp,%esi
+ shrl $7,%ebp
+ xorl %ebx,%edx
+ subl %ebp,%esi
+ andl $4278124286,%eax
+ andl $454761243,%esi
+ roll $8,%ebx
+ xorl %esi,%eax
+ movl 8(%edi),%ebp
+ xorl %ecx,%ebx
+ xorl %eax,%ecx
+ xorl %edx,%ebx
+ roll $24,%ecx
+ xorl %eax,%edx
+ xorl %eax,%ebx
+ roll $16,%edx
+ xorl %ecx,%ebx
+ roll $8,%eax
+ xorl %edx,%ebx
+ movl %ebp,%ecx
+ xorl %eax,%ebx
+ movl %ebx,4(%edi)
+ movl $2155905152,%ebp
+ andl %ecx,%ebp
+ leal (%ecx,%ecx,1),%edx
+ movl %ebp,%esi
+ shrl $7,%ebp
+ subl %ebp,%esi
+ andl $4278124286,%edx
+ andl $454761243,%esi
+ xorl %esi,%edx
+ movl $2155905152,%ebp
+ andl %edx,%ebp
+ leal (%edx,%edx,1),%eax
+ movl %ebp,%esi
+ shrl $7,%ebp
+ subl %ebp,%esi
+ andl $4278124286,%eax
+ andl $454761243,%esi
+ xorl %ecx,%edx
+ xorl %esi,%eax
+ movl $2155905152,%ebp
+ andl %eax,%ebp
+ leal (%eax,%eax,1),%ebx
+ movl %ebp,%esi
+ shrl $7,%ebp
+ xorl %ecx,%eax
+ subl %ebp,%esi
+ andl $4278124286,%ebx
+ andl $454761243,%esi
+ roll $8,%ecx
+ xorl %esi,%ebx
+ movl 12(%edi),%ebp
+ xorl %edx,%ecx
+ xorl %ebx,%edx
+ xorl %eax,%ecx
+ roll $24,%edx
+ xorl %ebx,%eax
+ xorl %ebx,%ecx
+ roll $16,%eax
+ xorl %edx,%ecx
+ roll $8,%ebx
+ xorl %eax,%ecx
+ movl %ebp,%edx
+ xorl %ebx,%ecx
+ movl %ecx,8(%edi)
+ movl $2155905152,%ebp
+ andl %edx,%ebp
+ leal (%edx,%edx,1),%eax
+ movl %ebp,%esi
+ shrl $7,%ebp
+ subl %ebp,%esi
+ andl $4278124286,%eax
+ andl $454761243,%esi
+ xorl %esi,%eax
+ movl $2155905152,%ebp
+ andl %eax,%ebp
+ leal (%eax,%eax,1),%ebx
+ movl %ebp,%esi
+ shrl $7,%ebp
+ subl %ebp,%esi
+ andl $4278124286,%ebx
+ andl $454761243,%esi
+ xorl %edx,%eax
+ xorl %esi,%ebx
+ movl $2155905152,%ebp
+ andl %ebx,%ebp
+ leal (%ebx,%ebx,1),%ecx
+ movl %ebp,%esi
+ shrl $7,%ebp
+ xorl %edx,%ebx
+ subl %ebp,%esi
+ andl $4278124286,%ecx
+ andl $454761243,%esi
+ roll $8,%edx
+ xorl %esi,%ecx
+ movl 16(%edi),%ebp
+ xorl %eax,%edx
+ xorl %ecx,%eax
+ xorl %ebx,%edx
+ roll $24,%eax
+ xorl %ecx,%ebx
+ xorl %ecx,%edx
+ roll $16,%ebx
+ xorl %eax,%edx
+ roll $8,%ecx
+ xorl %ebx,%edx
+ movl %ebp,%eax
+ xorl %ecx,%edx
+ movl %edx,12(%edi)
+ cmpl 28(%esp),%edi
+ jb .L056permute
+ xorl %eax,%eax
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size AES_set_decrypt_key,.-.L_AES_set_decrypt_key_begin
+.byte 65,69,83,32,102,111,114,32,120,56,54,44,32,67,82,89
+.byte 80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114
+.byte 111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.comm OPENSSL_ia32cap_P,16,4
+
+ .section ".note.gnu.property", "a"
+ .p2align 2
+ .long 1f - 0f
+ .long 4f - 1f
+ .long 5
+0:
+ .asciz "GNU"
+1:
+ .p2align 2
+ .long 0xc0000002
+ .long 3f - 2f
+2:
+ .long 3
+3:
+ .p2align 2
+4:
+#endif