summaryrefslogtreecommitdiff
path: root/deps
diff options
context:
space:
mode:
authorBert Belder <bertbelder@gmail.com>2012-09-08 02:43:18 +0200
committerisaacs <i@izs.me>2012-09-25 08:51:04 -0700
commit823e807b5ba627b183623240148f9c8c0703506e (patch)
treefa6860eaa2c12983fe101344f309e47903c638f1 /deps
parent62c3879cfac81df48e63353d084080d928dda10f (diff)
downloadandroid-node-v8-823e807b5ba627b183623240148f9c8c0703506e.tar.gz
android-node-v8-823e807b5ba627b183623240148f9c8c0703506e.tar.bz2
android-node-v8-823e807b5ba627b183623240148f9c8c0703506e.zip
openssl: add generated asm code
Diffstat (limited to 'deps')
-rw-r--r--deps/openssl/asm/x64-elf-gas/aes/aes-x86_64.s2545
-rw-r--r--deps/openssl/asm/x64-elf-gas/bn/x86_64-mont.s172
-rw-r--r--deps/openssl/asm/x64-elf-gas/camellia/cmll-x86_64.s1844
-rw-r--r--deps/openssl/asm/x64-elf-gas/md5/md5-x86_64.s671
-rw-r--r--deps/openssl/asm/x64-elf-gas/rc4/rc4-x86_64.s430
-rw-r--r--deps/openssl/asm/x64-elf-gas/sha/sha1-x86_64.s1283
-rw-r--r--deps/openssl/asm/x64-elf-gas/sha/sha512-x86_64.s1971
-rw-r--r--deps/openssl/asm/x64-elf-gas/whrlpool/wp-x86_64.s859
-rw-r--r--deps/openssl/asm/x64-elf-gas/x86_64cpuid.s194
-rw-r--r--deps/openssl/asm/x64-macosx-gas/aes/aes-x86_64.s2545
-rw-r--r--deps/openssl/asm/x64-macosx-gas/bn/x86_64-mont.s172
-rw-r--r--deps/openssl/asm/x64-macosx-gas/camellia/cmll-x86_64.s1844
-rw-r--r--deps/openssl/asm/x64-macosx-gas/md5/md5-x86_64.s671
-rw-r--r--deps/openssl/asm/x64-macosx-gas/rc4/rc4-x86_64.s430
-rw-r--r--deps/openssl/asm/x64-macosx-gas/sha/sha1-x86_64.s1283
-rw-r--r--deps/openssl/asm/x64-macosx-gas/sha/sha512-x86_64.s1971
-rw-r--r--deps/openssl/asm/x64-macosx-gas/whrlpool/wp-x86_64.s859
-rw-r--r--deps/openssl/asm/x64-macosx-gas/x86_64cpuid.s195
-rw-r--r--deps/openssl/asm/x64-win32-masm/aes/aes-x86_64.asm2885
-rw-r--r--deps/openssl/asm/x64-win32-masm/bn/x86_64-mont.asm293
-rw-r--r--deps/openssl/asm/x64-win32-masm/camellia/cmll-x86_64.asm2108
-rw-r--r--deps/openssl/asm/x64-win32-masm/md5/md5-x86_64.asm781
-rw-r--r--deps/openssl/asm/x64-win32-masm/rc4/rc4-x86_64.asm586
-rw-r--r--deps/openssl/asm/x64-win32-masm/sha/sha1-x86_64.asm1394
-rw-r--r--deps/openssl/asm/x64-win32-masm/sha/sha512-x86_64.asm2085
-rw-r--r--deps/openssl/asm/x64-win32-masm/whrlpool/wp-x86_64.asm972
-rw-r--r--deps/openssl/asm/x64-win32-masm/x86_64cpuid.asm186
-rw-r--r--deps/openssl/asm/x86-elf-gas/aes/aes-586.s3234
-rw-r--r--deps/openssl/asm/x86-elf-gas/bf/bf-686.s864
-rw-r--r--deps/openssl/asm/x86-elf-gas/bn/x86-mont.s338
-rw-r--r--deps/openssl/asm/x86-elf-gas/bn/x86.s2114
-rw-r--r--deps/openssl/asm/x86-elf-gas/camellia/cmll-x86.s2375
-rw-r--r--deps/openssl/asm/x86-elf-gas/cast/cast-586.s933
-rw-r--r--deps/openssl/asm/x86-elf-gas/des/crypt586.s875
-rw-r--r--deps/openssl/asm/x86-elf-gas/des/des-586.s1837
-rw-r--r--deps/openssl/asm/x86-elf-gas/md5/md5-586.s679
-rw-r--r--deps/openssl/asm/x86-elf-gas/rc4/rc4-586.s230
-rw-r--r--deps/openssl/asm/x86-elf-gas/rc5/rc5-586.s564
-rw-r--r--deps/openssl/asm/x86-elf-gas/ripemd/rmd-586.s1965
-rw-r--r--deps/openssl/asm/x86-elf-gas/sha/sha1-586.s1442
-rw-r--r--deps/openssl/asm/x86-elf-gas/sha/sha256-586.s261
-rw-r--r--deps/openssl/asm/x86-elf-gas/sha/sha512-586.s563
-rw-r--r--deps/openssl/asm/x86-elf-gas/whrlpool/wp-mmx.s1105
-rw-r--r--deps/openssl/asm/x86-elf-gas/x86cpuid.s279
-rw-r--r--deps/openssl/asm/x86-macosx-gas/aes/aes-586.s3194
-rw-r--r--deps/openssl/asm/x86-macosx-gas/bf/bf-686.s897
-rw-r--r--deps/openssl/asm/x86-macosx-gas/bn/x86-mont.s336
-rw-r--r--deps/openssl/asm/x86-macosx-gas/bn/x86.s2385
-rw-r--r--deps/openssl/asm/x86-macosx-gas/camellia/cmll-x86.s2353
-rw-r--r--deps/openssl/asm/x86-macosx-gas/cast/cast-586.s967
-rw-r--r--deps/openssl/asm/x86-macosx-gas/des/crypt586.s891
-rw-r--r--deps/openssl/asm/x86-macosx-gas/des/des-586.s1873
-rw-r--r--deps/openssl/asm/x86-macosx-gas/md5/md5-586.s745
-rw-r--r--deps/openssl/asm/x86-macosx-gas/rc4/rc4-586.s224
-rw-r--r--deps/openssl/asm/x86-macosx-gas/rc5/rc5-586.s563
-rw-r--r--deps/openssl/asm/x86-macosx-gas/ripemd/rmd-586.s2123
-rw-r--r--deps/openssl/asm/x86-macosx-gas/sha/sha1-586.s1520
-rw-r--r--deps/openssl/asm/x86-macosx-gas/sha/sha256-586.s259
-rw-r--r--deps/openssl/asm/x86-macosx-gas/sha/sha512-586.s561
-rw-r--r--deps/openssl/asm/x86-macosx-gas/whrlpool/wp-mmx.s1103
-rw-r--r--deps/openssl/asm/x86-macosx-gas/x86cpuid.s261
-rw-r--r--deps/openssl/asm/x86-win32-masm/aes/aes-586.asm3222
-rw-r--r--deps/openssl/asm/x86-win32-masm/bf/bf-686.asm907
-rw-r--r--deps/openssl/asm/x86-win32-masm/bn/x86-mont.asm348
-rw-r--r--deps/openssl/asm/x86-win32-masm/bn/x86.asm2116
-rw-r--r--deps/openssl/asm/x86-win32-masm/camellia/cmll-x86.asm2367
-rw-r--r--deps/openssl/asm/x86-win32-masm/cast/cast-586.asm950
-rw-r--r--deps/openssl/asm/x86-win32-masm/des/crypt586.asm909
-rw-r--r--deps/openssl/asm/x86-win32-masm/des/des-586.asm1878
-rw-r--r--deps/openssl/asm/x86-win32-masm/md5/md5-586.asm693
-rw-r--r--deps/openssl/asm/x86-win32-masm/rc4/rc4-586.asm239
-rw-r--r--deps/openssl/asm/x86-win32-masm/rc5/rc5-586.asm573
-rw-r--r--deps/openssl/asm/x86-win32-masm/ripemd/rmd-586.asm1976
-rw-r--r--deps/openssl/asm/x86-win32-masm/sha/sha1-586.asm1452
-rw-r--r--deps/openssl/asm/x86-win32-masm/sha/sha256-586.asm271
-rw-r--r--deps/openssl/asm/x86-win32-masm/sha/sha512-586.asm573
-rw-r--r--deps/openssl/asm/x86-win32-masm/whrlpool/wp-mmx.asm1122
-rw-r--r--deps/openssl/asm/x86-win32-masm/x86cpuid.asm277
78 files changed, 91015 insertions, 0 deletions
diff --git a/deps/openssl/asm/x64-elf-gas/aes/aes-x86_64.s b/deps/openssl/asm/x64-elf-gas/aes/aes-x86_64.s
new file mode 100644
index 0000000000..d7feffbfa5
--- /dev/null
+++ b/deps/openssl/asm/x64-elf-gas/aes/aes-x86_64.s
@@ -0,0 +1,2545 @@
+.text
+
+.type _x86_64_AES_encrypt,@function
+.align 16
+_x86_64_AES_encrypt:
+ xorl 0(%r15),%eax
+ xorl 4(%r15),%ebx
+ xorl 8(%r15),%ecx
+ xorl 12(%r15),%edx
+
+ movl 240(%r15),%r13d
+ subl $1,%r13d
+ jmp .Lenc_loop
+.align 16
+.Lenc_loop:
+
+ movzbl %al,%esi
+ movzbl %bl,%edi
+ movzbl %cl,%ebp
+ movl 0(%r14,%rsi,8),%r10d
+ movl 0(%r14,%rdi,8),%r11d
+ movl 0(%r14,%rbp,8),%r12d
+
+ movzbl %bh,%esi
+ movzbl %ch,%edi
+ movzbl %dl,%ebp
+ xorl 3(%r14,%rsi,8),%r10d
+ xorl 3(%r14,%rdi,8),%r11d
+ movl 0(%r14,%rbp,8),%r8d
+
+ movzbl %dh,%esi
+ shrl $16,%ecx
+ movzbl %ah,%ebp
+ xorl 3(%r14,%rsi,8),%r12d
+ shrl $16,%edx
+ xorl 3(%r14,%rbp,8),%r8d
+
+ shrl $16,%ebx
+ leaq 16(%r15),%r15
+ shrl $16,%eax
+
+ movzbl %cl,%esi
+ movzbl %dl,%edi
+ movzbl %al,%ebp
+ xorl 2(%r14,%rsi,8),%r10d
+ xorl 2(%r14,%rdi,8),%r11d
+ xorl 2(%r14,%rbp,8),%r12d
+
+ movzbl %dh,%esi
+ movzbl %ah,%edi
+ movzbl %bl,%ebp
+ xorl 1(%r14,%rsi,8),%r10d
+ xorl 1(%r14,%rdi,8),%r11d
+ xorl 2(%r14,%rbp,8),%r8d
+
+ movl 12(%r15),%edx
+ movzbl %bh,%edi
+ movzbl %ch,%ebp
+ movl 0(%r15),%eax
+ xorl 1(%r14,%rdi,8),%r12d
+ xorl 1(%r14,%rbp,8),%r8d
+
+ movl 4(%r15),%ebx
+ movl 8(%r15),%ecx
+ xorl %r10d,%eax
+ xorl %r11d,%ebx
+ xorl %r12d,%ecx
+ xorl %r8d,%edx
+ subl $1,%r13d
+ jnz .Lenc_loop
+ movzbl %al,%esi
+ movzbl %bl,%edi
+ movzbl %cl,%ebp
+ movzbl 2(%r14,%rsi,8),%r10d
+ movzbl 2(%r14,%rdi,8),%r11d
+ movzbl 2(%r14,%rbp,8),%r12d
+
+ movzbl %dl,%esi
+ movzbl %bh,%edi
+ movzbl %ch,%ebp
+ movzbl 2(%r14,%rsi,8),%r8d
+ movl 0(%r14,%rdi,8),%edi
+ movl 0(%r14,%rbp,8),%ebp
+
+ andl $65280,%edi
+ andl $65280,%ebp
+
+ xorl %edi,%r10d
+ xorl %ebp,%r11d
+ shrl $16,%ecx
+
+ movzbl %dh,%esi
+ movzbl %ah,%edi
+ shrl $16,%edx
+ movl 0(%r14,%rsi,8),%esi
+ movl 0(%r14,%rdi,8),%edi
+
+ andl $65280,%esi
+ andl $65280,%edi
+ shrl $16,%ebx
+ xorl %esi,%r12d
+ xorl %edi,%r8d
+ shrl $16,%eax
+
+ movzbl %cl,%esi
+ movzbl %dl,%edi
+ movzbl %al,%ebp
+ movl 0(%r14,%rsi,8),%esi
+ movl 0(%r14,%rdi,8),%edi
+ movl 0(%r14,%rbp,8),%ebp
+
+ andl $16711680,%esi
+ andl $16711680,%edi
+ andl $16711680,%ebp
+
+ xorl %esi,%r10d
+ xorl %edi,%r11d
+ xorl %ebp,%r12d
+
+ movzbl %bl,%esi
+ movzbl %dh,%edi
+ movzbl %ah,%ebp
+ movl 0(%r14,%rsi,8),%esi
+ movl 2(%r14,%rdi,8),%edi
+ movl 2(%r14,%rbp,8),%ebp
+
+ andl $16711680,%esi
+ andl $4278190080,%edi
+ andl $4278190080,%ebp
+
+ xorl %esi,%r8d
+ xorl %edi,%r10d
+ xorl %ebp,%r11d
+
+ movzbl %bh,%esi
+ movzbl %ch,%edi
+ movl 16+12(%r15),%edx
+ movl 2(%r14,%rsi,8),%esi
+ movl 2(%r14,%rdi,8),%edi
+ movl 16+0(%r15),%eax
+
+ andl $4278190080,%esi
+ andl $4278190080,%edi
+
+ xorl %esi,%r12d
+ xorl %edi,%r8d
+
+ movl 16+4(%r15),%ebx
+ movl 16+8(%r15),%ecx
+ xorl %r10d,%eax
+ xorl %r11d,%ebx
+ xorl %r12d,%ecx
+ xorl %r8d,%edx
+.byte 0xf3,0xc3
+
+.size _x86_64_AES_encrypt,.-_x86_64_AES_encrypt
+.type _x86_64_AES_encrypt_compact,@function
+.align 16
+_x86_64_AES_encrypt_compact:
+ leaq 128(%r14),%r8
+ movl 0-128(%r8),%edi
+ movl 32-128(%r8),%ebp
+ movl 64-128(%r8),%r10d
+ movl 96-128(%r8),%r11d
+ movl 128-128(%r8),%edi
+ movl 160-128(%r8),%ebp
+ movl 192-128(%r8),%r10d
+ movl 224-128(%r8),%r11d
+ jmp .Lenc_loop_compact
+.align 16
+.Lenc_loop_compact:
+ xorl 0(%r15),%eax
+ xorl 4(%r15),%ebx
+ xorl 8(%r15),%ecx
+ xorl 12(%r15),%edx
+ leaq 16(%r15),%r15
+ movzbl %al,%r10d
+ movzbl %bl,%r11d
+ movzbl %cl,%r12d
+ movzbl (%r14,%r10,1),%r10d
+ movzbl (%r14,%r11,1),%r11d
+ movzbl (%r14,%r12,1),%r12d
+
+ movzbl %dl,%r8d
+ movzbl %bh,%esi
+ movzbl %ch,%edi
+ movzbl (%r14,%r8,1),%r8d
+ movzbl (%r14,%rsi,1),%r9d
+ movzbl (%r14,%rdi,1),%r13d
+
+ movzbl %dh,%ebp
+ movzbl %ah,%esi
+ shrl $16,%ecx
+ movzbl (%r14,%rbp,1),%ebp
+ movzbl (%r14,%rsi,1),%esi
+ shrl $16,%edx
+
+ movzbl %cl,%edi
+ shll $8,%r9d
+ shll $8,%r13d
+ movzbl (%r14,%rdi,1),%edi
+ xorl %r9d,%r10d
+ xorl %r13d,%r11d
+
+ movzbl %dl,%r9d
+ shrl $16,%eax
+ shrl $16,%ebx
+ movzbl %al,%r13d
+ shll $8,%ebp
+ shll $8,%esi
+ movzbl (%r14,%r9,1),%r9d
+ movzbl (%r14,%r13,1),%r13d
+ xorl %ebp,%r12d
+ xorl %esi,%r8d
+
+ movzbl %bl,%ebp
+ movzbl %dh,%esi
+ shll $16,%edi
+ movzbl (%r14,%rbp,1),%ebp
+ movzbl (%r14,%rsi,1),%esi
+ xorl %edi,%r10d
+
+ movzbl %ah,%edi
+ shrl $8,%ecx
+ shrl $8,%ebx
+ movzbl (%r14,%rdi,1),%edi
+ movzbl (%r14,%rcx,1),%edx
+ movzbl (%r14,%rbx,1),%ecx
+ shll $16,%r9d
+ shll $16,%r13d
+ shll $16,%ebp
+ xorl %r9d,%r11d
+ xorl %r13d,%r12d
+ xorl %ebp,%r8d
+
+ shll $24,%esi
+ shll $24,%edi
+ shll $24,%edx
+ xorl %esi,%r10d
+ shll $24,%ecx
+ xorl %edi,%r11d
+ movl %r10d,%eax
+ movl %r11d,%ebx
+ xorl %r12d,%ecx
+ xorl %r8d,%edx
+ cmpq 16(%rsp),%r15
+ je .Lenc_compact_done
+ movl %eax,%esi
+ movl %ebx,%edi
+ andl $2155905152,%esi
+ andl $2155905152,%edi
+ movl %esi,%r10d
+ movl %edi,%r11d
+ shrl $7,%r10d
+ leal (%rax,%rax,1),%r8d
+ shrl $7,%r11d
+ leal (%rbx,%rbx,1),%r9d
+ subl %r10d,%esi
+ subl %r11d,%edi
+ andl $4278124286,%r8d
+ andl $4278124286,%r9d
+ andl $454761243,%esi
+ andl $454761243,%edi
+ movl %eax,%r10d
+ movl %ebx,%r11d
+ xorl %esi,%r8d
+ xorl %edi,%r9d
+
+ xorl %r8d,%eax
+ xorl %r9d,%ebx
+ movl %ecx,%esi
+ movl %edx,%edi
+ roll $24,%eax
+ roll $24,%ebx
+ andl $2155905152,%esi
+ andl $2155905152,%edi
+ xorl %r8d,%eax
+ xorl %r9d,%ebx
+ movl %esi,%r12d
+ movl %edi,%ebp
+ rorl $16,%r10d
+ rorl $16,%r11d
+ shrl $7,%r12d
+ leal (%rcx,%rcx,1),%r8d
+ xorl %r10d,%eax
+ xorl %r11d,%ebx
+ shrl $7,%ebp
+ leal (%rdx,%rdx,1),%r9d
+ rorl $8,%r10d
+ rorl $8,%r11d
+ subl %r12d,%esi
+ subl %ebp,%edi
+ xorl %r10d,%eax
+ xorl %r11d,%ebx
+
+ andl $4278124286,%r8d
+ andl $4278124286,%r9d
+ andl $454761243,%esi
+ andl $454761243,%edi
+ movl %ecx,%r12d
+ movl %edx,%ebp
+ xorl %esi,%r8d
+ xorl %edi,%r9d
+
+ xorl %r8d,%ecx
+ xorl %r9d,%edx
+ roll $24,%ecx
+ roll $24,%edx
+ xorl %r8d,%ecx
+ xorl %r9d,%edx
+ movl 0(%r14),%esi
+ rorl $16,%r12d
+ rorl $16,%ebp
+ movl 64(%r14),%edi
+ xorl %r12d,%ecx
+ xorl %ebp,%edx
+ movl 128(%r14),%r8d
+ rorl $8,%r12d
+ rorl $8,%ebp
+ movl 192(%r14),%r9d
+ xorl %r12d,%ecx
+ xorl %ebp,%edx
+ jmp .Lenc_loop_compact
+.align 16
+.Lenc_compact_done:
+ xorl 0(%r15),%eax
+ xorl 4(%r15),%ebx
+ xorl 8(%r15),%ecx
+ xorl 12(%r15),%edx
+.byte 0xf3,0xc3
+
+.size _x86_64_AES_encrypt_compact,.-_x86_64_AES_encrypt_compact
+.globl AES_encrypt
+.type AES_encrypt,@function
+.align 16
+AES_encrypt:
+ pushq %rbx
+ pushq %rbp
+ pushq %r12
+ pushq %r13
+ pushq %r14
+ pushq %r15
+
+
+ movq %rsp,%r10
+ leaq -63(%rdx),%rcx
+ andq $-64,%rsp
+ subq %rsp,%rcx
+ negq %rcx
+ andq $960,%rcx
+ subq %rcx,%rsp
+ subq $32,%rsp
+
+ movq %rsi,16(%rsp)
+ movq %r10,24(%rsp)
+.Lenc_prologue:
+
+ movq %rdx,%r15
+ movl 240(%r15),%r13d
+
+ movl 0(%rdi),%eax
+ movl 4(%rdi),%ebx
+ movl 8(%rdi),%ecx
+ movl 12(%rdi),%edx
+
+ shll $4,%r13d
+ leaq (%r15,%r13,1),%rbp
+ movq %r15,(%rsp)
+ movq %rbp,8(%rsp)
+
+
+ leaq .LAES_Te+2048(%rip),%r14
+ leaq 768(%rsp),%rbp
+ subq %r14,%rbp
+ andq $768,%rbp
+ leaq (%r14,%rbp,1),%r14
+
+ call _x86_64_AES_encrypt_compact
+
+ movq 16(%rsp),%r9
+ movq 24(%rsp),%rsi
+ movl %eax,0(%r9)
+ movl %ebx,4(%r9)
+ movl %ecx,8(%r9)
+ movl %edx,12(%r9)
+
+ movq (%rsi),%r15
+ movq 8(%rsi),%r14
+ movq 16(%rsi),%r13
+ movq 24(%rsi),%r12
+ movq 32(%rsi),%rbp
+ movq 40(%rsi),%rbx
+ leaq 48(%rsi),%rsp
+.Lenc_epilogue:
+ .byte 0xf3,0xc3
+.size AES_encrypt,.-AES_encrypt
+.type _x86_64_AES_decrypt,@function
+.align 16
+_x86_64_AES_decrypt:
+ xorl 0(%r15),%eax
+ xorl 4(%r15),%ebx
+ xorl 8(%r15),%ecx
+ xorl 12(%r15),%edx
+
+ movl 240(%r15),%r13d
+ subl $1,%r13d
+ jmp .Ldec_loop
+.align 16
+.Ldec_loop:
+
+ movzbl %al,%esi
+ movzbl %bl,%edi
+ movzbl %cl,%ebp
+ movl 0(%r14,%rsi,8),%r10d
+ movl 0(%r14,%rdi,8),%r11d
+ movl 0(%r14,%rbp,8),%r12d
+
+ movzbl %dh,%esi
+ movzbl %ah,%edi
+ movzbl %dl,%ebp
+ xorl 3(%r14,%rsi,8),%r10d
+ xorl 3(%r14,%rdi,8),%r11d
+ movl 0(%r14,%rbp,8),%r8d
+
+ movzbl %bh,%esi
+ shrl $16,%eax
+ movzbl %ch,%ebp
+ xorl 3(%r14,%rsi,8),%r12d
+ shrl $16,%edx
+ xorl 3(%r14,%rbp,8),%r8d
+
+ shrl $16,%ebx
+ leaq 16(%r15),%r15
+ shrl $16,%ecx
+
+ movzbl %cl,%esi
+ movzbl %dl,%edi
+ movzbl %al,%ebp
+ xorl 2(%r14,%rsi,8),%r10d
+ xorl 2(%r14,%rdi,8),%r11d
+ xorl 2(%r14,%rbp,8),%r12d
+
+ movzbl %bh,%esi
+ movzbl %ch,%edi
+ movzbl %bl,%ebp
+ xorl 1(%r14,%rsi,8),%r10d
+ xorl 1(%r14,%rdi,8),%r11d
+ xorl 2(%r14,%rbp,8),%r8d
+
+ movzbl %dh,%esi
+ movl 12(%r15),%edx
+ movzbl %ah,%ebp
+ xorl 1(%r14,%rsi,8),%r12d
+ movl 0(%r15),%eax
+ xorl 1(%r14,%rbp,8),%r8d
+
+ xorl %r10d,%eax
+ movl 4(%r15),%ebx
+ movl 8(%r15),%ecx
+ xorl %r12d,%ecx
+ xorl %r11d,%ebx
+ xorl %r8d,%edx
+ subl $1,%r13d
+ jnz .Ldec_loop
+ leaq 2048(%r14),%r14
+ movzbl %al,%esi
+ movzbl %bl,%edi
+ movzbl %cl,%ebp
+ movzbl (%r14,%rsi,1),%r10d
+ movzbl (%r14,%rdi,1),%r11d
+ movzbl (%r14,%rbp,1),%r12d
+
+ movzbl %dl,%esi
+ movzbl %dh,%edi
+ movzbl %ah,%ebp
+ movzbl (%r14,%rsi,1),%r8d
+ movzbl (%r14,%rdi,1),%edi
+ movzbl (%r14,%rbp,1),%ebp
+
+ shll $8,%edi
+ shll $8,%ebp
+
+ xorl %edi,%r10d
+ xorl %ebp,%r11d
+ shrl $16,%edx
+
+ movzbl %bh,%esi
+ movzbl %ch,%edi
+ shrl $16,%eax
+ movzbl (%r14,%rsi,1),%esi
+ movzbl (%r14,%rdi,1),%edi
+
+ shll $8,%esi
+ shll $8,%edi
+ shrl $16,%ebx
+ xorl %esi,%r12d
+ xorl %edi,%r8d
+ shrl $16,%ecx
+
+ movzbl %cl,%esi
+ movzbl %dl,%edi
+ movzbl %al,%ebp
+ movzbl (%r14,%rsi,1),%esi
+ movzbl (%r14,%rdi,1),%edi
+ movzbl (%r14,%rbp,1),%ebp
+
+ shll $16,%esi
+ shll $16,%edi
+ shll $16,%ebp
+
+ xorl %esi,%r10d
+ xorl %edi,%r11d
+ xorl %ebp,%r12d
+
+ movzbl %bl,%esi
+ movzbl %bh,%edi
+ movzbl %ch,%ebp
+ movzbl (%r14,%rsi,1),%esi
+ movzbl (%r14,%rdi,1),%edi
+ movzbl (%r14,%rbp,1),%ebp
+
+ shll $16,%esi
+ shll $24,%edi
+ shll $24,%ebp
+
+ xorl %esi,%r8d
+ xorl %edi,%r10d
+ xorl %ebp,%r11d
+
+ movzbl %dh,%esi
+ movzbl %ah,%edi
+ movl 16+12(%r15),%edx
+ movzbl (%r14,%rsi,1),%esi
+ movzbl (%r14,%rdi,1),%edi
+ movl 16+0(%r15),%eax
+
+ shll $24,%esi
+ shll $24,%edi
+
+ xorl %esi,%r12d
+ xorl %edi,%r8d
+
+ movl 16+4(%r15),%ebx
+ movl 16+8(%r15),%ecx
+ leaq -2048(%r14),%r14
+ xorl %r10d,%eax
+ xorl %r11d,%ebx
+ xorl %r12d,%ecx
+ xorl %r8d,%edx
+.byte 0xf3,0xc3
+
+.size _x86_64_AES_decrypt,.-_x86_64_AES_decrypt
+.type _x86_64_AES_decrypt_compact,@function
+.align 16
+_x86_64_AES_decrypt_compact:
+ leaq 128(%r14),%r8
+ movl 0-128(%r8),%edi
+ movl 32-128(%r8),%ebp
+ movl 64-128(%r8),%r10d
+ movl 96-128(%r8),%r11d
+ movl 128-128(%r8),%edi
+ movl 160-128(%r8),%ebp
+ movl 192-128(%r8),%r10d
+ movl 224-128(%r8),%r11d
+ jmp .Ldec_loop_compact
+
+.align 16
+.Ldec_loop_compact:
+ xorl 0(%r15),%eax
+ xorl 4(%r15),%ebx
+ xorl 8(%r15),%ecx
+ xorl 12(%r15),%edx
+ leaq 16(%r15),%r15
+ movzbl %al,%r10d
+ movzbl %bl,%r11d
+ movzbl %cl,%r12d
+ movzbl (%r14,%r10,1),%r10d
+ movzbl (%r14,%r11,1),%r11d
+ movzbl (%r14,%r12,1),%r12d
+
+ movzbl %dl,%r8d
+ movzbl %dh,%esi
+ movzbl %ah,%edi
+ movzbl (%r14,%r8,1),%r8d
+ movzbl (%r14,%rsi,1),%r9d
+ movzbl (%r14,%rdi,1),%r13d
+
+ movzbl %bh,%ebp
+ movzbl %ch,%esi
+ shrl $16,%ecx
+ movzbl (%r14,%rbp,1),%ebp
+ movzbl (%r14,%rsi,1),%esi
+ shrl $16,%edx
+
+ movzbl %cl,%edi
+ shll $8,%r9d
+ shll $8,%r13d
+ movzbl (%r14,%rdi,1),%edi
+ xorl %r9d,%r10d
+ xorl %r13d,%r11d
+
+ movzbl %dl,%r9d
+ shrl $16,%eax
+ shrl $16,%ebx
+ movzbl %al,%r13d
+ shll $8,%ebp
+ shll $8,%esi
+ movzbl (%r14,%r9,1),%r9d
+ movzbl (%r14,%r13,1),%r13d
+ xorl %ebp,%r12d
+ xorl %esi,%r8d
+
+ movzbl %bl,%ebp
+ movzbl %bh,%esi
+ shll $16,%edi
+ movzbl (%r14,%rbp,1),%ebp
+ movzbl (%r14,%rsi,1),%esi
+ xorl %edi,%r10d
+
+ movzbl %ch,%edi
+ shll $16,%r9d
+ shll $16,%r13d
+ movzbl (%r14,%rdi,1),%ebx
+ xorl %r9d,%r11d
+ xorl %r13d,%r12d
+
+ movzbl %dh,%edi
+ shrl $8,%eax
+ shll $16,%ebp
+ movzbl (%r14,%rdi,1),%ecx
+ movzbl (%r14,%rax,1),%edx
+ xorl %ebp,%r8d
+
+ shll $24,%esi
+ shll $24,%ebx
+ shll $24,%ecx
+ xorl %esi,%r10d
+ shll $24,%edx
+ xorl %r11d,%ebx
+ movl %r10d,%eax
+ xorl %r12d,%ecx
+ xorl %r8d,%edx
+ cmpq 16(%rsp),%r15
+ je .Ldec_compact_done
+
+ movq 256+0(%r14),%rsi
+ shlq $32,%rbx
+ shlq $32,%rdx
+ movq 256+8(%r14),%rdi
+ orq %rbx,%rax
+ orq %rdx,%rcx
+ movq 256+16(%r14),%rbp
+ movq %rax,%rbx
+ movq %rcx,%rdx
+ andq %rsi,%rbx
+ andq %rsi,%rdx
+ movq %rbx,%r9
+ movq %rdx,%r12
+ shrq $7,%r9
+ leaq (%rax,%rax,1),%r8
+ shrq $7,%r12
+ leaq (%rcx,%rcx,1),%r11
+ subq %r9,%rbx
+ subq %r12,%rdx
+ andq %rdi,%r8
+ andq %rdi,%r11
+ andq %rbp,%rbx
+ andq %rbp,%rdx
+ xorq %r8,%rbx
+ xorq %r11,%rdx
+ movq %rbx,%r8
+ movq %rdx,%r11
+
+ andq %rsi,%rbx
+ andq %rsi,%rdx
+ movq %rbx,%r10
+ movq %rdx,%r13
+ shrq $7,%r10
+ leaq (%r8,%r8,1),%r9
+ shrq $7,%r13
+ leaq (%r11,%r11,1),%r12
+ subq %r10,%rbx
+ subq %r13,%rdx
+ andq %rdi,%r9
+ andq %rdi,%r12
+ andq %rbp,%rbx
+ andq %rbp,%rdx
+ xorq %r9,%rbx
+ xorq %r12,%rdx
+ movq %rbx,%r9
+ movq %rdx,%r12
+
+ andq %rsi,%rbx
+ andq %rsi,%rdx
+ movq %rbx,%r10
+ movq %rdx,%r13
+ shrq $7,%r10
+ xorq %rax,%r8
+ shrq $7,%r13
+ xorq %rcx,%r11
+ subq %r10,%rbx
+ subq %r13,%rdx
+ leaq (%r9,%r9,1),%r10
+ leaq (%r12,%r12,1),%r13
+ xorq %rax,%r9
+ xorq %rcx,%r12
+ andq %rdi,%r10
+ andq %rdi,%r13
+ andq %rbp,%rbx
+ andq %rbp,%rdx
+ xorq %rbx,%r10
+ xorq %rdx,%r13
+
+ xorq %r10,%rax
+ xorq %r13,%rcx
+ xorq %r10,%r8
+ xorq %r13,%r11
+ movq %rax,%rbx
+ movq %rcx,%rdx
+ xorq %r10,%r9
+ xorq %r13,%r12
+ shrq $32,%rbx
+ shrq $32,%rdx
+ xorq %r8,%r10
+ xorq %r11,%r13
+ roll $8,%eax
+ roll $8,%ecx
+ xorq %r9,%r10
+ xorq %r12,%r13
+
+ roll $8,%ebx
+ roll $8,%edx
+ xorl %r10d,%eax
+ xorl %r13d,%ecx
+ shrq $32,%r10
+ shrq $32,%r13
+ xorl %r10d,%ebx
+ xorl %r13d,%edx
+
+ movq %r8,%r10
+ movq %r11,%r13
+ shrq $32,%r10
+ shrq $32,%r13
+ roll $24,%r8d
+ roll $24,%r11d
+ roll $24,%r10d
+ roll $24,%r13d
+ xorl %r8d,%eax
+ xorl %r11d,%ecx
+ movq %r9,%r8
+ movq %r12,%r11
+ xorl %r10d,%ebx
+ xorl %r13d,%edx
+
+ movq 0(%r14),%rsi
+ shrq $32,%r8
+ shrq $32,%r11
+ movq 64(%r14),%rdi
+ roll $16,%r9d
+ roll $16,%r12d
+ movq 128(%r14),%rbp
+ roll $16,%r8d
+ roll $16,%r11d
+ movq 192(%r14),%r10
+ xorl %r9d,%eax
+ xorl %r12d,%ecx
+ movq 256(%r14),%r13
+ xorl %r8d,%ebx
+ xorl %r11d,%edx
+ jmp .Ldec_loop_compact
+.align 16
+.Ldec_compact_done:
+ xorl 0(%r15),%eax
+ xorl 4(%r15),%ebx
+ xorl 8(%r15),%ecx
+ xorl 12(%r15),%edx
+.byte 0xf3,0xc3
+
+.size _x86_64_AES_decrypt_compact,.-_x86_64_AES_decrypt_compact
+.globl AES_decrypt
+.type AES_decrypt,@function
+.align 16
+AES_decrypt:
+ pushq %rbx
+ pushq %rbp
+ pushq %r12
+ pushq %r13
+ pushq %r14
+ pushq %r15
+
+
+ movq %rsp,%r10
+ leaq -63(%rdx),%rcx
+ andq $-64,%rsp
+ subq %rsp,%rcx
+ negq %rcx
+ andq $960,%rcx
+ subq %rcx,%rsp
+ subq $32,%rsp
+
+ movq %rsi,16(%rsp)
+ movq %r10,24(%rsp)
+.Ldec_prologue:
+
+ movq %rdx,%r15
+ movl 240(%r15),%r13d
+
+ movl 0(%rdi),%eax
+ movl 4(%rdi),%ebx
+ movl 8(%rdi),%ecx
+ movl 12(%rdi),%edx
+
+ shll $4,%r13d
+ leaq (%r15,%r13,1),%rbp
+ movq %r15,(%rsp)
+ movq %rbp,8(%rsp)
+
+
+ leaq .LAES_Td+2048(%rip),%r14
+ leaq 768(%rsp),%rbp
+ subq %r14,%rbp
+ andq $768,%rbp
+ leaq (%r14,%rbp,1),%r14
+ shrq $3,%rbp
+ addq %rbp,%r14
+
+ call _x86_64_AES_decrypt_compact
+
+ movq 16(%rsp),%r9
+ movq 24(%rsp),%rsi
+ movl %eax,0(%r9)
+ movl %ebx,4(%r9)
+ movl %ecx,8(%r9)
+ movl %edx,12(%r9)
+
+ movq (%rsi),%r15
+ movq 8(%rsi),%r14
+ movq 16(%rsi),%r13
+ movq 24(%rsi),%r12
+ movq 32(%rsi),%rbp
+ movq 40(%rsi),%rbx
+ leaq 48(%rsi),%rsp
+.Ldec_epilogue:
+ .byte 0xf3,0xc3
+.size AES_decrypt,.-AES_decrypt
+.globl AES_set_encrypt_key
+.type AES_set_encrypt_key,@function
+.align 16
+AES_set_encrypt_key:
+ pushq %rbx
+ pushq %rbp
+ pushq %r12
+ pushq %r13
+ pushq %r14
+ pushq %r15
+ subq $8,%rsp
+.Lenc_key_prologue:
+
+ call _x86_64_AES_set_encrypt_key
+
+ movq 8(%rsp),%r15
+ movq 16(%rsp),%r14
+ movq 24(%rsp),%r13
+ movq 32(%rsp),%r12
+ movq 40(%rsp),%rbp
+ movq 48(%rsp),%rbx
+ addq $56,%rsp
+.Lenc_key_epilogue:
+ .byte 0xf3,0xc3
+.size AES_set_encrypt_key,.-AES_set_encrypt_key
+
+.type _x86_64_AES_set_encrypt_key,@function
+.align 16
+_x86_64_AES_set_encrypt_key:
+ movl %esi,%ecx
+ movq %rdi,%rsi
+ movq %rdx,%rdi
+
+ testq $-1,%rsi
+ jz .Lbadpointer
+ testq $-1,%rdi
+ jz .Lbadpointer
+
+ leaq .LAES_Te(%rip),%rbp
+ leaq 2048+128(%rbp),%rbp
+
+
+ movl 0-128(%rbp),%eax
+ movl 32-128(%rbp),%ebx
+ movl 64-128(%rbp),%r8d
+ movl 96-128(%rbp),%edx
+ movl 128-128(%rbp),%eax
+ movl 160-128(%rbp),%ebx
+ movl 192-128(%rbp),%r8d
+ movl 224-128(%rbp),%edx
+
+ cmpl $128,%ecx
+ je .L10rounds
+ cmpl $192,%ecx
+ je .L12rounds
+ cmpl $256,%ecx
+ je .L14rounds
+ movq $-2,%rax
+ jmp .Lexit
+
+.L10rounds:
+ movq 0(%rsi),%rax
+ movq 8(%rsi),%rdx
+ movq %rax,0(%rdi)
+ movq %rdx,8(%rdi)
+
+ shrq $32,%rdx
+ xorl %ecx,%ecx
+ jmp .L10shortcut
+.align 4
+.L10loop:
+ movl 0(%rdi),%eax
+ movl 12(%rdi),%edx
+.L10shortcut:
+ movzbl %dl,%esi
+ movzbl -128(%rbp,%rsi,1),%ebx
+ movzbl %dh,%esi
+ shll $24,%ebx
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ shrl $16,%edx
+ movzbl %dl,%esi
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ movzbl %dh,%esi
+ shll $8,%ebx
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%eax
+
+ xorl 1024-128(%rbp,%rcx,4),%eax
+ movl %eax,16(%rdi)
+ xorl 4(%rdi),%eax
+ movl %eax,20(%rdi)
+ xorl 8(%rdi),%eax
+ movl %eax,24(%rdi)
+ xorl 12(%rdi),%eax
+ movl %eax,28(%rdi)
+ addl $1,%ecx
+ leaq 16(%rdi),%rdi
+ cmpl $10,%ecx
+ jl .L10loop
+
+ movl $10,80(%rdi)
+ xorq %rax,%rax
+ jmp .Lexit
+
+.L12rounds:
+ movq 0(%rsi),%rax
+ movq 8(%rsi),%rbx
+ movq 16(%rsi),%rdx
+ movq %rax,0(%rdi)
+ movq %rbx,8(%rdi)
+ movq %rdx,16(%rdi)
+
+ shrq $32,%rdx
+ xorl %ecx,%ecx
+ jmp .L12shortcut
+.align 4
+.L12loop:
+ movl 0(%rdi),%eax
+ movl 20(%rdi),%edx
+.L12shortcut:
+ movzbl %dl,%esi
+ movzbl -128(%rbp,%rsi,1),%ebx
+ movzbl %dh,%esi
+ shll $24,%ebx
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ shrl $16,%edx
+ movzbl %dl,%esi
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ movzbl %dh,%esi
+ shll $8,%ebx
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%eax
+
+ xorl 1024-128(%rbp,%rcx,4),%eax
+ movl %eax,24(%rdi)
+ xorl 4(%rdi),%eax
+ movl %eax,28(%rdi)
+ xorl 8(%rdi),%eax
+ movl %eax,32(%rdi)
+ xorl 12(%rdi),%eax
+ movl %eax,36(%rdi)
+
+ cmpl $7,%ecx
+ je .L12break
+ addl $1,%ecx
+
+ xorl 16(%rdi),%eax
+ movl %eax,40(%rdi)
+ xorl 20(%rdi),%eax
+ movl %eax,44(%rdi)
+
+ leaq 24(%rdi),%rdi
+ jmp .L12loop
+.L12break:
+ movl $12,72(%rdi)
+ xorq %rax,%rax
+ jmp .Lexit
+
+.L14rounds:
+ movq 0(%rsi),%rax
+ movq 8(%rsi),%rbx
+ movq 16(%rsi),%rcx
+ movq 24(%rsi),%rdx
+ movq %rax,0(%rdi)
+ movq %rbx,8(%rdi)
+ movq %rcx,16(%rdi)
+ movq %rdx,24(%rdi)
+
+ shrq $32,%rdx
+ xorl %ecx,%ecx
+ jmp .L14shortcut
+.align 4
+.L14loop:
+ movl 0(%rdi),%eax
+ movl 28(%rdi),%edx
+.L14shortcut:
+ movzbl %dl,%esi
+ movzbl -128(%rbp,%rsi,1),%ebx
+ movzbl %dh,%esi
+ shll $24,%ebx
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ shrl $16,%edx
+ movzbl %dl,%esi
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ movzbl %dh,%esi
+ shll $8,%ebx
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%eax
+
+ xorl 1024-128(%rbp,%rcx,4),%eax
+ movl %eax,32(%rdi)
+ xorl 4(%rdi),%eax
+ movl %eax,36(%rdi)
+ xorl 8(%rdi),%eax
+ movl %eax,40(%rdi)
+ xorl 12(%rdi),%eax
+ movl %eax,44(%rdi)
+
+ cmpl $6,%ecx
+ je .L14break
+ addl $1,%ecx
+
+ movl %eax,%edx
+ movl 16(%rdi),%eax
+ movzbl %dl,%esi
+ movzbl -128(%rbp,%rsi,1),%ebx
+ movzbl %dh,%esi
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ shrl $16,%edx
+ shll $8,%ebx
+ movzbl %dl,%esi
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ movzbl %dh,%esi
+ shll $16,%ebx
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ shll $24,%ebx
+ xorl %ebx,%eax
+
+ movl %eax,48(%rdi)
+ xorl 20(%rdi),%eax
+ movl %eax,52(%rdi)
+ xorl 24(%rdi),%eax
+ movl %eax,56(%rdi)
+ xorl 28(%rdi),%eax
+ movl %eax,60(%rdi)
+
+ leaq 32(%rdi),%rdi
+ jmp .L14loop
+.L14break:
+ movl $14,48(%rdi)
+ xorq %rax,%rax
+ jmp .Lexit
+
+.Lbadpointer:
+ movq $-1,%rax
+.Lexit:
+.byte 0xf3,0xc3
+
+.size _x86_64_AES_set_encrypt_key,.-_x86_64_AES_set_encrypt_key
+.globl AES_set_decrypt_key
+.type AES_set_decrypt_key,@function
+.align 16
+AES_set_decrypt_key:
+ pushq %rbx
+ pushq %rbp
+ pushq %r12
+ pushq %r13
+ pushq %r14
+ pushq %r15
+ pushq %rdx
+.Ldec_key_prologue:
+
+ call _x86_64_AES_set_encrypt_key
+ movq (%rsp),%r8
+ cmpl $0,%eax
+ jne .Labort
+
+ movl 240(%r8),%r14d
+ xorq %rdi,%rdi
+ leaq (%rdi,%r14,4),%rcx
+ movq %r8,%rsi
+ leaq (%r8,%rcx,4),%rdi
+.align 4
+.Linvert:
+ movq 0(%rsi),%rax
+ movq 8(%rsi),%rbx
+ movq 0(%rdi),%rcx
+ movq 8(%rdi),%rdx
+ movq %rax,0(%rdi)
+ movq %rbx,8(%rdi)
+ movq %rcx,0(%rsi)
+ movq %rdx,8(%rsi)
+ leaq 16(%rsi),%rsi
+ leaq -16(%rdi),%rdi
+ cmpq %rsi,%rdi
+ jne .Linvert
+
+ leaq .LAES_Te+2048+1024(%rip),%rax
+
+ movq 40(%rax),%rsi
+ movq 48(%rax),%rdi
+ movq 56(%rax),%rbp
+
+ movq %r8,%r15
+ subl $1,%r14d
+.align 4
+.Lpermute:
+ leaq 16(%r15),%r15
+ movq 0(%r15),%rax
+ movq 8(%r15),%rcx
+ movq %rax,%rbx
+ movq %rcx,%rdx
+ andq %rsi,%rbx
+ andq %rsi,%rdx
+ movq %rbx,%r9
+ movq %rdx,%r12
+ shrq $7,%r9
+ leaq (%rax,%rax,1),%r8
+ shrq $7,%r12
+ leaq (%rcx,%rcx,1),%r11
+ subq %r9,%rbx
+ subq %r12,%rdx
+ andq %rdi,%r8
+ andq %rdi,%r11
+ andq %rbp,%rbx
+ andq %rbp,%rdx
+ xorq %r8,%rbx
+ xorq %r11,%rdx
+ movq %rbx,%r8
+ movq %rdx,%r11
+
+ andq %rsi,%rbx
+ andq %rsi,%rdx
+ movq %rbx,%r10
+ movq %rdx,%r13
+ shrq $7,%r10
+ leaq (%r8,%r8,1),%r9
+ shrq $7,%r13
+ leaq (%r11,%r11,1),%r12
+ subq %r10,%rbx
+ subq %r13,%rdx
+ andq %rdi,%r9
+ andq %rdi,%r12
+ andq %rbp,%rbx
+ andq %rbp,%rdx
+ xorq %r9,%rbx
+ xorq %r12,%rdx
+ movq %rbx,%r9
+ movq %rdx,%r12
+
+ andq %rsi,%rbx
+ andq %rsi,%rdx
+ movq %rbx,%r10
+ movq %rdx,%r13
+ shrq $7,%r10
+ xorq %rax,%r8
+ shrq $7,%r13
+ xorq %rcx,%r11
+ subq %r10,%rbx
+ subq %r13,%rdx
+ leaq (%r9,%r9,1),%r10
+ leaq (%r12,%r12,1),%r13
+ xorq %rax,%r9
+ xorq %rcx,%r12
+ andq %rdi,%r10
+ andq %rdi,%r13
+ andq %rbp,%rbx
+ andq %rbp,%rdx
+ xorq %rbx,%r10
+ xorq %rdx,%r13
+
+ xorq %r10,%rax
+ xorq %r13,%rcx
+ xorq %r10,%r8
+ xorq %r13,%r11
+ movq %rax,%rbx
+ movq %rcx,%rdx
+ xorq %r10,%r9
+ xorq %r13,%r12
+ shrq $32,%rbx
+ shrq $32,%rdx
+ xorq %r8,%r10
+ xorq %r11,%r13
+ roll $8,%eax
+ roll $8,%ecx
+ xorq %r9,%r10
+ xorq %r12,%r13
+
+ roll $8,%ebx
+ roll $8,%edx
+ xorl %r10d,%eax
+ xorl %r13d,%ecx
+ shrq $32,%r10
+ shrq $32,%r13
+ xorl %r10d,%ebx
+ xorl %r13d,%edx
+
+ movq %r8,%r10
+ movq %r11,%r13
+ shrq $32,%r10
+ shrq $32,%r13
+ roll $24,%r8d
+ roll $24,%r11d
+ roll $24,%r10d
+ roll $24,%r13d
+ xorl %r8d,%eax
+ xorl %r11d,%ecx
+ movq %r9,%r8
+ movq %r12,%r11
+ xorl %r10d,%ebx
+ xorl %r13d,%edx
+
+
+ shrq $32,%r8
+ shrq $32,%r11
+
+ roll $16,%r9d
+ roll $16,%r12d
+
+ roll $16,%r8d
+ roll $16,%r11d
+
+ xorl %r9d,%eax
+ xorl %r12d,%ecx
+
+ xorl %r8d,%ebx
+ xorl %r11d,%edx
+ movl %eax,0(%r15)
+ movl %ebx,4(%r15)
+ movl %ecx,8(%r15)
+ movl %edx,12(%r15)
+ subl $1,%r14d
+ jnz .Lpermute
+
+ xorq %rax,%rax
+.Labort:
+ movq 8(%rsp),%r15
+ movq 16(%rsp),%r14
+ movq 24(%rsp),%r13
+ movq 32(%rsp),%r12
+ movq 40(%rsp),%rbp
+ movq 48(%rsp),%rbx
+ addq $56,%rsp
+.Ldec_key_epilogue:
+ .byte 0xf3,0xc3
+.size AES_set_decrypt_key,.-AES_set_decrypt_key
+.globl AES_cbc_encrypt
+.type AES_cbc_encrypt,@function
+.align 16
+
+AES_cbc_encrypt:
+ cmpq $0,%rdx
+ je .Lcbc_epilogue
+ pushfq
+ pushq %rbx
+ pushq %rbp
+ pushq %r12
+ pushq %r13
+ pushq %r14
+ pushq %r15
+.Lcbc_prologue:
+
+ cld
+ movl %r9d,%r9d
+
+ leaq .LAES_Te(%rip),%r14
+ cmpq $0,%r9
+ jne .Lcbc_picked_te
+ leaq .LAES_Td(%rip),%r14
+.Lcbc_picked_te:
+
+ movl OPENSSL_ia32cap_P(%rip),%r10d
+ cmpq $512,%rdx
+ jb .Lcbc_slow_prologue
+ testq $15,%rdx
+ jnz .Lcbc_slow_prologue
+
+
+
+
+ leaq -88-248(%rsp),%r15
+ andq $-64,%r15
+
+
+ movq %r14,%r10
+ leaq 2304(%r14),%r11
+ movq %r15,%r12
+ andq $4095,%r10
+ andq $4095,%r11
+ andq $4095,%r12
+
+ cmpq %r11,%r12
+ jb .Lcbc_te_break_out
+ subq %r11,%r12
+ subq %r12,%r15
+ jmp .Lcbc_te_ok
+.Lcbc_te_break_out:
+ subq %r10,%r12
+ andq $4095,%r12
+ addq $320,%r12
+ subq %r12,%r15
+.align 4
+.Lcbc_te_ok:
+
+ xchgq %rsp,%r15
+
+ movq %r15,16(%rsp)
+.Lcbc_fast_body:
+ movq %rdi,24(%rsp)
+ movq %rsi,32(%rsp)
+ movq %rdx,40(%rsp)
+ movq %rcx,48(%rsp)
+ movq %r8,56(%rsp)
+ movl $0,80+240(%rsp)
+ movq %r8,%rbp
+ movq %r9,%rbx
+ movq %rsi,%r9
+ movq %rdi,%r8
+ movq %rcx,%r15
+
+ movl 240(%r15),%eax
+
+ movq %r15,%r10
+ subq %r14,%r10
+ andq $4095,%r10
+ cmpq $2304,%r10
+ jb .Lcbc_do_ecopy
+ cmpq $4096-248,%r10
+ jb .Lcbc_skip_ecopy
+.align 4
+.Lcbc_do_ecopy:
+ movq %r15,%rsi
+ leaq 80(%rsp),%rdi
+ leaq 80(%rsp),%r15
+ movl $30,%ecx
+.long 0x90A548F3
+
+ movl %eax,(%rdi)
+.Lcbc_skip_ecopy:
+ movq %r15,0(%rsp)
+
+ movl $18,%ecx
+.align 4
+.Lcbc_prefetch_te:
+ movq 0(%r14),%r10
+ movq 32(%r14),%r11
+ movq 64(%r14),%r12
+ movq 96(%r14),%r13
+ leaq 128(%r14),%r14
+ subl $1,%ecx
+ jnz .Lcbc_prefetch_te
+ leaq -2304(%r14),%r14
+
+ cmpq $0,%rbx
+ je .LFAST_DECRYPT
+
+
+ movl 0(%rbp),%eax
+ movl 4(%rbp),%ebx
+ movl 8(%rbp),%ecx
+ movl 12(%rbp),%edx
+
+.align 4
+.Lcbc_fast_enc_loop:
+ xorl 0(%r8),%eax
+ xorl 4(%r8),%ebx
+ xorl 8(%r8),%ecx
+ xorl 12(%r8),%edx
+ movq 0(%rsp),%r15
+ movq %r8,24(%rsp)
+
+ call _x86_64_AES_encrypt
+
+ movq 24(%rsp),%r8
+ movq 40(%rsp),%r10
+ movl %eax,0(%r9)
+ movl %ebx,4(%r9)
+ movl %ecx,8(%r9)
+ movl %edx,12(%r9)
+
+ leaq 16(%r8),%r8
+ leaq 16(%r9),%r9
+ subq $16,%r10
+ testq $-16,%r10
+ movq %r10,40(%rsp)
+ jnz .Lcbc_fast_enc_loop
+ movq 56(%rsp),%rbp
+ movl %eax,0(%rbp)
+ movl %ebx,4(%rbp)
+ movl %ecx,8(%rbp)
+ movl %edx,12(%rbp)
+
+ jmp .Lcbc_fast_cleanup
+
+
+.align 16
+.LFAST_DECRYPT:
+ cmpq %r8,%r9
+ je .Lcbc_fast_dec_in_place
+
+ movq %rbp,64(%rsp)
+.align 4
+.Lcbc_fast_dec_loop:
+ movl 0(%r8),%eax
+ movl 4(%r8),%ebx
+ movl 8(%r8),%ecx
+ movl 12(%r8),%edx
+ movq 0(%rsp),%r15
+ movq %r8,24(%rsp)
+
+ call _x86_64_AES_decrypt
+
+ movq 64(%rsp),%rbp
+ movq 24(%rsp),%r8
+ movq 40(%rsp),%r10
+ xorl 0(%rbp),%eax
+ xorl 4(%rbp),%ebx
+ xorl 8(%rbp),%ecx
+ xorl 12(%rbp),%edx
+ movq %r8,%rbp
+
+ subq $16,%r10
+ movq %r10,40(%rsp)
+ movq %rbp,64(%rsp)
+
+ movl %eax,0(%r9)
+ movl %ebx,4(%r9)
+ movl %ecx,8(%r9)
+ movl %edx,12(%r9)
+
+ leaq 16(%r8),%r8
+ leaq 16(%r9),%r9
+ jnz .Lcbc_fast_dec_loop
+ movq 56(%rsp),%r12
+ movq 0(%rbp),%r10
+ movq 8(%rbp),%r11
+ movq %r10,0(%r12)
+ movq %r11,8(%r12)
+ jmp .Lcbc_fast_cleanup
+
+.align 16
+.Lcbc_fast_dec_in_place:
+ movq 0(%rbp),%r10
+ movq 8(%rbp),%r11
+ movq %r10,0+64(%rsp)
+ movq %r11,8+64(%rsp)
+.align 4
+.Lcbc_fast_dec_in_place_loop:
+ movl 0(%r8),%eax
+ movl 4(%r8),%ebx
+ movl 8(%r8),%ecx
+ movl 12(%r8),%edx
+ movq 0(%rsp),%r15
+ movq %r8,24(%rsp)
+
+ call _x86_64_AES_decrypt
+
+ movq 24(%rsp),%r8
+ movq 40(%rsp),%r10
+ xorl 0+64(%rsp),%eax
+ xorl 4+64(%rsp),%ebx
+ xorl 8+64(%rsp),%ecx
+ xorl 12+64(%rsp),%edx
+
+ movq 0(%r8),%r11
+ movq 8(%r8),%r12
+ subq $16,%r10
+ jz .Lcbc_fast_dec_in_place_done
+
+ movq %r11,0+64(%rsp)
+ movq %r12,8+64(%rsp)
+
+ movl %eax,0(%r9)
+ movl %ebx,4(%r9)
+ movl %ecx,8(%r9)
+ movl %edx,12(%r9)
+
+ leaq 16(%r8),%r8
+ leaq 16(%r9),%r9
+ movq %r10,40(%rsp)
+ jmp .Lcbc_fast_dec_in_place_loop
+.Lcbc_fast_dec_in_place_done:
+ movq 56(%rsp),%rdi
+ movq %r11,0(%rdi)
+ movq %r12,8(%rdi)
+
+ movl %eax,0(%r9)
+ movl %ebx,4(%r9)
+ movl %ecx,8(%r9)
+ movl %edx,12(%r9)
+
+.align 4
+.Lcbc_fast_cleanup:
+ cmpl $0,80+240(%rsp)
+ leaq 80(%rsp),%rdi
+ je .Lcbc_exit
+ movl $30,%ecx
+ xorq %rax,%rax
+.long 0x90AB48F3
+
+
+ jmp .Lcbc_exit
+
+
+.align 16
+.Lcbc_slow_prologue:
+
+ leaq -88(%rsp),%rbp
+ andq $-64,%rbp
+
+ leaq -88-63(%rcx),%r10
+ subq %rbp,%r10
+ negq %r10
+ andq $960,%r10
+ subq %r10,%rbp
+
+ xchgq %rsp,%rbp
+
+ movq %rbp,16(%rsp)
+.Lcbc_slow_body:
+
+
+
+
+ movq %r8,56(%rsp)
+ movq %r8,%rbp
+ movq %r9,%rbx
+ movq %rsi,%r9
+ movq %rdi,%r8
+ movq %rcx,%r15
+ movq %rdx,%r10
+
+ movl 240(%r15),%eax
+ movq %r15,0(%rsp)
+ shll $4,%eax
+ leaq (%r15,%rax,1),%rax
+ movq %rax,8(%rsp)
+
+
+ leaq 2048(%r14),%r14
+ leaq 768-8(%rsp),%rax
+ subq %r14,%rax
+ andq $768,%rax
+ leaq (%r14,%rax,1),%r14
+
+ cmpq $0,%rbx
+ je .LSLOW_DECRYPT
+
+
+ testq $-16,%r10
+ movl 0(%rbp),%eax
+ movl 4(%rbp),%ebx
+ movl 8(%rbp),%ecx
+ movl 12(%rbp),%edx
+ jz .Lcbc_slow_enc_tail
+
+
+.align 4
+.Lcbc_slow_enc_loop:
+ xorl 0(%r8),%eax
+ xorl 4(%r8),%ebx
+ xorl 8(%r8),%ecx
+ xorl 12(%r8),%edx
+ movq 0(%rsp),%r15
+ movq %r8,24(%rsp)
+ movq %r9,32(%rsp)
+ movq %r10,40(%rsp)
+
+ call _x86_64_AES_encrypt_compact
+
+ movq 24(%rsp),%r8
+ movq 32(%rsp),%r9
+ movq 40(%rsp),%r10
+ movl %eax,0(%r9)
+ movl %ebx,4(%r9)
+ movl %ecx,8(%r9)
+ movl %edx,12(%r9)
+
+ leaq 16(%r8),%r8
+ leaq 16(%r9),%r9
+ subq $16,%r10
+ testq $-16,%r10
+ jnz .Lcbc_slow_enc_loop
+ testq $15,%r10
+ jnz .Lcbc_slow_enc_tail
+ movq 56(%rsp),%rbp
+ movl %eax,0(%rbp)
+ movl %ebx,4(%rbp)
+ movl %ecx,8(%rbp)
+ movl %edx,12(%rbp)
+
+ jmp .Lcbc_exit
+
+.align 4
+.Lcbc_slow_enc_tail:
+ movq %rax,%r11
+ movq %rcx,%r12
+ movq %r10,%rcx
+ movq %r8,%rsi
+ movq %r9,%rdi
+.long 0x9066A4F3
+
+ movq $16,%rcx
+ subq %r10,%rcx
+ xorq %rax,%rax
+.long 0x9066AAF3
+
+ movq %r9,%r8
+ movq $16,%r10
+ movq %r11,%rax
+ movq %r12,%rcx
+ jmp .Lcbc_slow_enc_loop
+
+
+.align 16
+.LSLOW_DECRYPT:
+ shrq $3,%rax
+ addq %rax,%r14
+
+ movq 0(%rbp),%r11
+ movq 8(%rbp),%r12
+ movq %r11,0+64(%rsp)
+ movq %r12,8+64(%rsp)
+
+.align 4
+.Lcbc_slow_dec_loop:
+ movl 0(%r8),%eax
+ movl 4(%r8),%ebx
+ movl 8(%r8),%ecx
+ movl 12(%r8),%edx
+ movq 0(%rsp),%r15
+ movq %r8,24(%rsp)
+ movq %r9,32(%rsp)
+ movq %r10,40(%rsp)
+
+ call _x86_64_AES_decrypt_compact
+
+ movq 24(%rsp),%r8
+ movq 32(%rsp),%r9
+ movq 40(%rsp),%r10
+ xorl 0+64(%rsp),%eax
+ xorl 4+64(%rsp),%ebx
+ xorl 8+64(%rsp),%ecx
+ xorl 12+64(%rsp),%edx
+
+ movq 0(%r8),%r11
+ movq 8(%r8),%r12
+ subq $16,%r10
+ jc .Lcbc_slow_dec_partial
+ jz .Lcbc_slow_dec_done
+
+ movq %r11,0+64(%rsp)
+ movq %r12,8+64(%rsp)
+
+ movl %eax,0(%r9)
+ movl %ebx,4(%r9)
+ movl %ecx,8(%r9)
+ movl %edx,12(%r9)
+
+ leaq 16(%r8),%r8
+ leaq 16(%r9),%r9
+ jmp .Lcbc_slow_dec_loop
+.Lcbc_slow_dec_done:
+ movq 56(%rsp),%rdi
+ movq %r11,0(%rdi)
+ movq %r12,8(%rdi)
+
+ movl %eax,0(%r9)
+ movl %ebx,4(%r9)
+ movl %ecx,8(%r9)
+ movl %edx,12(%r9)
+
+ jmp .Lcbc_exit
+
+.align 4
+.Lcbc_slow_dec_partial:
+ movq 56(%rsp),%rdi
+ movq %r11,0(%rdi)
+ movq %r12,8(%rdi)
+
+ movl %eax,0+64(%rsp)
+ movl %ebx,4+64(%rsp)
+ movl %ecx,8+64(%rsp)
+ movl %edx,12+64(%rsp)
+
+ movq %r9,%rdi
+ leaq 64(%rsp),%rsi
+ leaq 16(%r10),%rcx
+.long 0x9066A4F3
+
+ jmp .Lcbc_exit
+
+.align 16
+.Lcbc_exit:
+ movq 16(%rsp),%rsi
+ movq (%rsi),%r15
+ movq 8(%rsi),%r14
+ movq 16(%rsi),%r13
+ movq 24(%rsi),%r12
+ movq 32(%rsi),%rbp
+ movq 40(%rsi),%rbx
+ leaq 48(%rsi),%rsp
+.Lcbc_popfq:
+ popfq
+.Lcbc_epilogue:
+ .byte 0xf3,0xc3
+.size AES_cbc_encrypt,.-AES_cbc_encrypt
+.align 64
+.LAES_Te:
+.long 0xa56363c6,0xa56363c6
+.long 0x847c7cf8,0x847c7cf8
+.long 0x997777ee,0x997777ee
+.long 0x8d7b7bf6,0x8d7b7bf6
+.long 0x0df2f2ff,0x0df2f2ff
+.long 0xbd6b6bd6,0xbd6b6bd6
+.long 0xb16f6fde,0xb16f6fde
+.long 0x54c5c591,0x54c5c591
+.long 0x50303060,0x50303060
+.long 0x03010102,0x03010102
+.long 0xa96767ce,0xa96767ce
+.long 0x7d2b2b56,0x7d2b2b56
+.long 0x19fefee7,0x19fefee7
+.long 0x62d7d7b5,0x62d7d7b5
+.long 0xe6abab4d,0xe6abab4d
+.long 0x9a7676ec,0x9a7676ec
+.long 0x45caca8f,0x45caca8f
+.long 0x9d82821f,0x9d82821f
+.long 0x40c9c989,0x40c9c989
+.long 0x877d7dfa,0x877d7dfa
+.long 0x15fafaef,0x15fafaef
+.long 0xeb5959b2,0xeb5959b2
+.long 0xc947478e,0xc947478e
+.long 0x0bf0f0fb,0x0bf0f0fb
+.long 0xecadad41,0xecadad41
+.long 0x67d4d4b3,0x67d4d4b3
+.long 0xfda2a25f,0xfda2a25f
+.long 0xeaafaf45,0xeaafaf45
+.long 0xbf9c9c23,0xbf9c9c23
+.long 0xf7a4a453,0xf7a4a453
+.long 0x967272e4,0x967272e4
+.long 0x5bc0c09b,0x5bc0c09b
+.long 0xc2b7b775,0xc2b7b775
+.long 0x1cfdfde1,0x1cfdfde1
+.long 0xae93933d,0xae93933d
+.long 0x6a26264c,0x6a26264c
+.long 0x5a36366c,0x5a36366c
+.long 0x413f3f7e,0x413f3f7e
+.long 0x02f7f7f5,0x02f7f7f5
+.long 0x4fcccc83,0x4fcccc83
+.long 0x5c343468,0x5c343468
+.long 0xf4a5a551,0xf4a5a551
+.long 0x34e5e5d1,0x34e5e5d1
+.long 0x08f1f1f9,0x08f1f1f9
+.long 0x937171e2,0x937171e2
+.long 0x73d8d8ab,0x73d8d8ab
+.long 0x53313162,0x53313162
+.long 0x3f15152a,0x3f15152a
+.long 0x0c040408,0x0c040408
+.long 0x52c7c795,0x52c7c795
+.long 0x65232346,0x65232346
+.long 0x5ec3c39d,0x5ec3c39d
+.long 0x28181830,0x28181830
+.long 0xa1969637,0xa1969637
+.long 0x0f05050a,0x0f05050a
+.long 0xb59a9a2f,0xb59a9a2f
+.long 0x0907070e,0x0907070e
+.long 0x36121224,0x36121224
+.long 0x9b80801b,0x9b80801b
+.long 0x3de2e2df,0x3de2e2df
+.long 0x26ebebcd,0x26ebebcd
+.long 0x6927274e,0x6927274e
+.long 0xcdb2b27f,0xcdb2b27f
+.long 0x9f7575ea,0x9f7575ea
+.long 0x1b090912,0x1b090912
+.long 0x9e83831d,0x9e83831d
+.long 0x742c2c58,0x742c2c58
+.long 0x2e1a1a34,0x2e1a1a34
+.long 0x2d1b1b36,0x2d1b1b36
+.long 0xb26e6edc,0xb26e6edc
+.long 0xee5a5ab4,0xee5a5ab4
+.long 0xfba0a05b,0xfba0a05b
+.long 0xf65252a4,0xf65252a4
+.long 0x4d3b3b76,0x4d3b3b76
+.long 0x61d6d6b7,0x61d6d6b7
+.long 0xceb3b37d,0xceb3b37d
+.long 0x7b292952,0x7b292952
+.long 0x3ee3e3dd,0x3ee3e3dd
+.long 0x712f2f5e,0x712f2f5e
+.long 0x97848413,0x97848413
+.long 0xf55353a6,0xf55353a6
+.long 0x68d1d1b9,0x68d1d1b9
+.long 0x00000000,0x00000000
+.long 0x2cededc1,0x2cededc1
+.long 0x60202040,0x60202040
+.long 0x1ffcfce3,0x1ffcfce3
+.long 0xc8b1b179,0xc8b1b179
+.long 0xed5b5bb6,0xed5b5bb6
+.long 0xbe6a6ad4,0xbe6a6ad4
+.long 0x46cbcb8d,0x46cbcb8d
+.long 0xd9bebe67,0xd9bebe67
+.long 0x4b393972,0x4b393972
+.long 0xde4a4a94,0xde4a4a94
+.long 0xd44c4c98,0xd44c4c98
+.long 0xe85858b0,0xe85858b0
+.long 0x4acfcf85,0x4acfcf85
+.long 0x6bd0d0bb,0x6bd0d0bb
+.long 0x2aefefc5,0x2aefefc5
+.long 0xe5aaaa4f,0xe5aaaa4f
+.long 0x16fbfbed,0x16fbfbed
+.long 0xc5434386,0xc5434386
+.long 0xd74d4d9a,0xd74d4d9a
+.long 0x55333366,0x55333366
+.long 0x94858511,0x94858511
+.long 0xcf45458a,0xcf45458a
+.long 0x10f9f9e9,0x10f9f9e9
+.long 0x06020204,0x06020204
+.long 0x817f7ffe,0x817f7ffe
+.long 0xf05050a0,0xf05050a0
+.long 0x443c3c78,0x443c3c78
+.long 0xba9f9f25,0xba9f9f25
+.long 0xe3a8a84b,0xe3a8a84b
+.long 0xf35151a2,0xf35151a2
+.long 0xfea3a35d,0xfea3a35d
+.long 0xc0404080,0xc0404080
+.long 0x8a8f8f05,0x8a8f8f05
+.long 0xad92923f,0xad92923f
+.long 0xbc9d9d21,0xbc9d9d21
+.long 0x48383870,0x48383870
+.long 0x04f5f5f1,0x04f5f5f1
+.long 0xdfbcbc63,0xdfbcbc63
+.long 0xc1b6b677,0xc1b6b677
+.long 0x75dadaaf,0x75dadaaf
+.long 0x63212142,0x63212142
+.long 0x30101020,0x30101020
+.long 0x1affffe5,0x1affffe5
+.long 0x0ef3f3fd,0x0ef3f3fd
+.long 0x6dd2d2bf,0x6dd2d2bf
+.long 0x4ccdcd81,0x4ccdcd81
+.long 0x140c0c18,0x140c0c18
+.long 0x35131326,0x35131326
+.long 0x2fececc3,0x2fececc3
+.long 0xe15f5fbe,0xe15f5fbe
+.long 0xa2979735,0xa2979735
+.long 0xcc444488,0xcc444488
+.long 0x3917172e,0x3917172e
+.long 0x57c4c493,0x57c4c493
+.long 0xf2a7a755,0xf2a7a755
+.long 0x827e7efc,0x827e7efc
+.long 0x473d3d7a,0x473d3d7a
+.long 0xac6464c8,0xac6464c8
+.long 0xe75d5dba,0xe75d5dba
+.long 0x2b191932,0x2b191932
+.long 0x957373e6,0x957373e6
+.long 0xa06060c0,0xa06060c0
+.long 0x98818119,0x98818119
+.long 0xd14f4f9e,0xd14f4f9e
+.long 0x7fdcdca3,0x7fdcdca3
+.long 0x66222244,0x66222244
+.long 0x7e2a2a54,0x7e2a2a54
+.long 0xab90903b,0xab90903b
+.long 0x8388880b,0x8388880b
+.long 0xca46468c,0xca46468c
+.long 0x29eeeec7,0x29eeeec7
+.long 0xd3b8b86b,0xd3b8b86b
+.long 0x3c141428,0x3c141428
+.long 0x79dedea7,0x79dedea7
+.long 0xe25e5ebc,0xe25e5ebc
+.long 0x1d0b0b16,0x1d0b0b16
+.long 0x76dbdbad,0x76dbdbad
+.long 0x3be0e0db,0x3be0e0db
+.long 0x56323264,0x56323264
+.long 0x4e3a3a74,0x4e3a3a74
+.long 0x1e0a0a14,0x1e0a0a14
+.long 0xdb494992,0xdb494992
+.long 0x0a06060c,0x0a06060c
+.long 0x6c242448,0x6c242448
+.long 0xe45c5cb8,0xe45c5cb8
+.long 0x5dc2c29f,0x5dc2c29f
+.long 0x6ed3d3bd,0x6ed3d3bd
+.long 0xefacac43,0xefacac43
+.long 0xa66262c4,0xa66262c4
+.long 0xa8919139,0xa8919139
+.long 0xa4959531,0xa4959531
+.long 0x37e4e4d3,0x37e4e4d3
+.long 0x8b7979f2,0x8b7979f2
+.long 0x32e7e7d5,0x32e7e7d5
+.long 0x43c8c88b,0x43c8c88b
+.long 0x5937376e,0x5937376e
+.long 0xb76d6dda,0xb76d6dda
+.long 0x8c8d8d01,0x8c8d8d01
+.long 0x64d5d5b1,0x64d5d5b1
+.long 0xd24e4e9c,0xd24e4e9c
+.long 0xe0a9a949,0xe0a9a949
+.long 0xb46c6cd8,0xb46c6cd8
+.long 0xfa5656ac,0xfa5656ac
+.long 0x07f4f4f3,0x07f4f4f3
+.long 0x25eaeacf,0x25eaeacf
+.long 0xaf6565ca,0xaf6565ca
+.long 0x8e7a7af4,0x8e7a7af4
+.long 0xe9aeae47,0xe9aeae47
+.long 0x18080810,0x18080810
+.long 0xd5baba6f,0xd5baba6f
+.long 0x887878f0,0x887878f0
+.long 0x6f25254a,0x6f25254a
+.long 0x722e2e5c,0x722e2e5c
+.long 0x241c1c38,0x241c1c38
+.long 0xf1a6a657,0xf1a6a657
+.long 0xc7b4b473,0xc7b4b473
+.long 0x51c6c697,0x51c6c697
+.long 0x23e8e8cb,0x23e8e8cb
+.long 0x7cdddda1,0x7cdddda1
+.long 0x9c7474e8,0x9c7474e8
+.long 0x211f1f3e,0x211f1f3e
+.long 0xdd4b4b96,0xdd4b4b96
+.long 0xdcbdbd61,0xdcbdbd61
+.long 0x868b8b0d,0x868b8b0d
+.long 0x858a8a0f,0x858a8a0f
+.long 0x907070e0,0x907070e0
+.long 0x423e3e7c,0x423e3e7c
+.long 0xc4b5b571,0xc4b5b571
+.long 0xaa6666cc,0xaa6666cc
+.long 0xd8484890,0xd8484890
+.long 0x05030306,0x05030306
+.long 0x01f6f6f7,0x01f6f6f7
+.long 0x120e0e1c,0x120e0e1c
+.long 0xa36161c2,0xa36161c2
+.long 0x5f35356a,0x5f35356a
+.long 0xf95757ae,0xf95757ae
+.long 0xd0b9b969,0xd0b9b969
+.long 0x91868617,0x91868617
+.long 0x58c1c199,0x58c1c199
+.long 0x271d1d3a,0x271d1d3a
+.long 0xb99e9e27,0xb99e9e27
+.long 0x38e1e1d9,0x38e1e1d9
+.long 0x13f8f8eb,0x13f8f8eb
+.long 0xb398982b,0xb398982b
+.long 0x33111122,0x33111122
+.long 0xbb6969d2,0xbb6969d2
+.long 0x70d9d9a9,0x70d9d9a9
+.long 0x898e8e07,0x898e8e07
+.long 0xa7949433,0xa7949433
+.long 0xb69b9b2d,0xb69b9b2d
+.long 0x221e1e3c,0x221e1e3c
+.long 0x92878715,0x92878715
+.long 0x20e9e9c9,0x20e9e9c9
+.long 0x49cece87,0x49cece87
+.long 0xff5555aa,0xff5555aa
+.long 0x78282850,0x78282850
+.long 0x7adfdfa5,0x7adfdfa5
+.long 0x8f8c8c03,0x8f8c8c03
+.long 0xf8a1a159,0xf8a1a159
+.long 0x80898909,0x80898909
+.long 0x170d0d1a,0x170d0d1a
+.long 0xdabfbf65,0xdabfbf65
+.long 0x31e6e6d7,0x31e6e6d7
+.long 0xc6424284,0xc6424284
+.long 0xb86868d0,0xb86868d0
+.long 0xc3414182,0xc3414182
+.long 0xb0999929,0xb0999929
+.long 0x772d2d5a,0x772d2d5a
+.long 0x110f0f1e,0x110f0f1e
+.long 0xcbb0b07b,0xcbb0b07b
+.long 0xfc5454a8,0xfc5454a8
+.long 0xd6bbbb6d,0xd6bbbb6d
+.long 0x3a16162c,0x3a16162c
+.byte 0x63,0x7c,0x77,0x7b,0xf2,0x6b,0x6f,0xc5
+.byte 0x30,0x01,0x67,0x2b,0xfe,0xd7,0xab,0x76
+.byte 0xca,0x82,0xc9,0x7d,0xfa,0x59,0x47,0xf0
+.byte 0xad,0xd4,0xa2,0xaf,0x9c,0xa4,0x72,0xc0
+.byte 0xb7,0xfd,0x93,0x26,0x36,0x3f,0xf7,0xcc
+.byte 0x34,0xa5,0xe5,0xf1,0x71,0xd8,0x31,0x15
+.byte 0x04,0xc7,0x23,0xc3,0x18,0x96,0x05,0x9a
+.byte 0x07,0x12,0x80,0xe2,0xeb,0x27,0xb2,0x75
+.byte 0x09,0x83,0x2c,0x1a,0x1b,0x6e,0x5a,0xa0
+.byte 0x52,0x3b,0xd6,0xb3,0x29,0xe3,0x2f,0x84
+.byte 0x53,0xd1,0x00,0xed,0x20,0xfc,0xb1,0x5b
+.byte 0x6a,0xcb,0xbe,0x39,0x4a,0x4c,0x58,0xcf
+.byte 0xd0,0xef,0xaa,0xfb,0x43,0x4d,0x33,0x85
+.byte 0x45,0xf9,0x02,0x7f,0x50,0x3c,0x9f,0xa8
+.byte 0x51,0xa3,0x40,0x8f,0x92,0x9d,0x38,0xf5
+.byte 0xbc,0xb6,0xda,0x21,0x10,0xff,0xf3,0xd2
+.byte 0xcd,0x0c,0x13,0xec,0x5f,0x97,0x44,0x17
+.byte 0xc4,0xa7,0x7e,0x3d,0x64,0x5d,0x19,0x73
+.byte 0x60,0x81,0x4f,0xdc,0x22,0x2a,0x90,0x88
+.byte 0x46,0xee,0xb8,0x14,0xde,0x5e,0x0b,0xdb
+.byte 0xe0,0x32,0x3a,0x0a,0x49,0x06,0x24,0x5c
+.byte 0xc2,0xd3,0xac,0x62,0x91,0x95,0xe4,0x79
+.byte 0xe7,0xc8,0x37,0x6d,0x8d,0xd5,0x4e,0xa9
+.byte 0x6c,0x56,0xf4,0xea,0x65,0x7a,0xae,0x08
+.byte 0xba,0x78,0x25,0x2e,0x1c,0xa6,0xb4,0xc6
+.byte 0xe8,0xdd,0x74,0x1f,0x4b,0xbd,0x8b,0x8a
+.byte 0x70,0x3e,0xb5,0x66,0x48,0x03,0xf6,0x0e
+.byte 0x61,0x35,0x57,0xb9,0x86,0xc1,0x1d,0x9e
+.byte 0xe1,0xf8,0x98,0x11,0x69,0xd9,0x8e,0x94
+.byte 0x9b,0x1e,0x87,0xe9,0xce,0x55,0x28,0xdf
+.byte 0x8c,0xa1,0x89,0x0d,0xbf,0xe6,0x42,0x68
+.byte 0x41,0x99,0x2d,0x0f,0xb0,0x54,0xbb,0x16
+.byte 0x63,0x7c,0x77,0x7b,0xf2,0x6b,0x6f,0xc5
+.byte 0x30,0x01,0x67,0x2b,0xfe,0xd7,0xab,0x76
+.byte 0xca,0x82,0xc9,0x7d,0xfa,0x59,0x47,0xf0
+.byte 0xad,0xd4,0xa2,0xaf,0x9c,0xa4,0x72,0xc0
+.byte 0xb7,0xfd,0x93,0x26,0x36,0x3f,0xf7,0xcc
+.byte 0x34,0xa5,0xe5,0xf1,0x71,0xd8,0x31,0x15
+.byte 0x04,0xc7,0x23,0xc3,0x18,0x96,0x05,0x9a
+.byte 0x07,0x12,0x80,0xe2,0xeb,0x27,0xb2,0x75
+.byte 0x09,0x83,0x2c,0x1a,0x1b,0x6e,0x5a,0xa0
+.byte 0x52,0x3b,0xd6,0xb3,0x29,0xe3,0x2f,0x84
+.byte 0x53,0xd1,0x00,0xed,0x20,0xfc,0xb1,0x5b
+.byte 0x6a,0xcb,0xbe,0x39,0x4a,0x4c,0x58,0xcf
+.byte 0xd0,0xef,0xaa,0xfb,0x43,0x4d,0x33,0x85
+.byte 0x45,0xf9,0x02,0x7f,0x50,0x3c,0x9f,0xa8
+.byte 0x51,0xa3,0x40,0x8f,0x92,0x9d,0x38,0xf5
+.byte 0xbc,0xb6,0xda,0x21,0x10,0xff,0xf3,0xd2
+.byte 0xcd,0x0c,0x13,0xec,0x5f,0x97,0x44,0x17
+.byte 0xc4,0xa7,0x7e,0x3d,0x64,0x5d,0x19,0x73
+.byte 0x60,0x81,0x4f,0xdc,0x22,0x2a,0x90,0x88
+.byte 0x46,0xee,0xb8,0x14,0xde,0x5e,0x0b,0xdb
+.byte 0xe0,0x32,0x3a,0x0a,0x49,0x06,0x24,0x5c
+.byte 0xc2,0xd3,0xac,0x62,0x91,0x95,0xe4,0x79
+.byte 0xe7,0xc8,0x37,0x6d,0x8d,0xd5,0x4e,0xa9
+.byte 0x6c,0x56,0xf4,0xea,0x65,0x7a,0xae,0x08
+.byte 0xba,0x78,0x25,0x2e,0x1c,0xa6,0xb4,0xc6
+.byte 0xe8,0xdd,0x74,0x1f,0x4b,0xbd,0x8b,0x8a
+.byte 0x70,0x3e,0xb5,0x66,0x48,0x03,0xf6,0x0e
+.byte 0x61,0x35,0x57,0xb9,0x86,0xc1,0x1d,0x9e
+.byte 0xe1,0xf8,0x98,0x11,0x69,0xd9,0x8e,0x94
+.byte 0x9b,0x1e,0x87,0xe9,0xce,0x55,0x28,0xdf
+.byte 0x8c,0xa1,0x89,0x0d,0xbf,0xe6,0x42,0x68
+.byte 0x41,0x99,0x2d,0x0f,0xb0,0x54,0xbb,0x16
+.byte 0x63,0x7c,0x77,0x7b,0xf2,0x6b,0x6f,0xc5
+.byte 0x30,0x01,0x67,0x2b,0xfe,0xd7,0xab,0x76
+.byte 0xca,0x82,0xc9,0x7d,0xfa,0x59,0x47,0xf0
+.byte 0xad,0xd4,0xa2,0xaf,0x9c,0xa4,0x72,0xc0
+.byte 0xb7,0xfd,0x93,0x26,0x36,0x3f,0xf7,0xcc
+.byte 0x34,0xa5,0xe5,0xf1,0x71,0xd8,0x31,0x15
+.byte 0x04,0xc7,0x23,0xc3,0x18,0x96,0x05,0x9a
+.byte 0x07,0x12,0x80,0xe2,0xeb,0x27,0xb2,0x75
+.byte 0x09,0x83,0x2c,0x1a,0x1b,0x6e,0x5a,0xa0
+.byte 0x52,0x3b,0xd6,0xb3,0x29,0xe3,0x2f,0x84
+.byte 0x53,0xd1,0x00,0xed,0x20,0xfc,0xb1,0x5b
+.byte 0x6a,0xcb,0xbe,0x39,0x4a,0x4c,0x58,0xcf
+.byte 0xd0,0xef,0xaa,0xfb,0x43,0x4d,0x33,0x85
+.byte 0x45,0xf9,0x02,0x7f,0x50,0x3c,0x9f,0xa8
+.byte 0x51,0xa3,0x40,0x8f,0x92,0x9d,0x38,0xf5
+.byte 0xbc,0xb6,0xda,0x21,0x10,0xff,0xf3,0xd2
+.byte 0xcd,0x0c,0x13,0xec,0x5f,0x97,0x44,0x17
+.byte 0xc4,0xa7,0x7e,0x3d,0x64,0x5d,0x19,0x73
+.byte 0x60,0x81,0x4f,0xdc,0x22,0x2a,0x90,0x88
+.byte 0x46,0xee,0xb8,0x14,0xde,0x5e,0x0b,0xdb
+.byte 0xe0,0x32,0x3a,0x0a,0x49,0x06,0x24,0x5c
+.byte 0xc2,0xd3,0xac,0x62,0x91,0x95,0xe4,0x79
+.byte 0xe7,0xc8,0x37,0x6d,0x8d,0xd5,0x4e,0xa9
+.byte 0x6c,0x56,0xf4,0xea,0x65,0x7a,0xae,0x08
+.byte 0xba,0x78,0x25,0x2e,0x1c,0xa6,0xb4,0xc6
+.byte 0xe8,0xdd,0x74,0x1f,0x4b,0xbd,0x8b,0x8a
+.byte 0x70,0x3e,0xb5,0x66,0x48,0x03,0xf6,0x0e
+.byte 0x61,0x35,0x57,0xb9,0x86,0xc1,0x1d,0x9e
+.byte 0xe1,0xf8,0x98,0x11,0x69,0xd9,0x8e,0x94
+.byte 0x9b,0x1e,0x87,0xe9,0xce,0x55,0x28,0xdf
+.byte 0x8c,0xa1,0x89,0x0d,0xbf,0xe6,0x42,0x68
+.byte 0x41,0x99,0x2d,0x0f,0xb0,0x54,0xbb,0x16
+.byte 0x63,0x7c,0x77,0x7b,0xf2,0x6b,0x6f,0xc5
+.byte 0x30,0x01,0x67,0x2b,0xfe,0xd7,0xab,0x76
+.byte 0xca,0x82,0xc9,0x7d,0xfa,0x59,0x47,0xf0
+.byte 0xad,0xd4,0xa2,0xaf,0x9c,0xa4,0x72,0xc0
+.byte 0xb7,0xfd,0x93,0x26,0x36,0x3f,0xf7,0xcc
+.byte 0x34,0xa5,0xe5,0xf1,0x71,0xd8,0x31,0x15
+.byte 0x04,0xc7,0x23,0xc3,0x18,0x96,0x05,0x9a
+.byte 0x07,0x12,0x80,0xe2,0xeb,0x27,0xb2,0x75
+.byte 0x09,0x83,0x2c,0x1a,0x1b,0x6e,0x5a,0xa0
+.byte 0x52,0x3b,0xd6,0xb3,0x29,0xe3,0x2f,0x84
+.byte 0x53,0xd1,0x00,0xed,0x20,0xfc,0xb1,0x5b
+.byte 0x6a,0xcb,0xbe,0x39,0x4a,0x4c,0x58,0xcf
+.byte 0xd0,0xef,0xaa,0xfb,0x43,0x4d,0x33,0x85
+.byte 0x45,0xf9,0x02,0x7f,0x50,0x3c,0x9f,0xa8
+.byte 0x51,0xa3,0x40,0x8f,0x92,0x9d,0x38,0xf5
+.byte 0xbc,0xb6,0xda,0x21,0x10,0xff,0xf3,0xd2
+.byte 0xcd,0x0c,0x13,0xec,0x5f,0x97,0x44,0x17
+.byte 0xc4,0xa7,0x7e,0x3d,0x64,0x5d,0x19,0x73
+.byte 0x60,0x81,0x4f,0xdc,0x22,0x2a,0x90,0x88
+.byte 0x46,0xee,0xb8,0x14,0xde,0x5e,0x0b,0xdb
+.byte 0xe0,0x32,0x3a,0x0a,0x49,0x06,0x24,0x5c
+.byte 0xc2,0xd3,0xac,0x62,0x91,0x95,0xe4,0x79
+.byte 0xe7,0xc8,0x37,0x6d,0x8d,0xd5,0x4e,0xa9
+.byte 0x6c,0x56,0xf4,0xea,0x65,0x7a,0xae,0x08
+.byte 0xba,0x78,0x25,0x2e,0x1c,0xa6,0xb4,0xc6
+.byte 0xe8,0xdd,0x74,0x1f,0x4b,0xbd,0x8b,0x8a
+.byte 0x70,0x3e,0xb5,0x66,0x48,0x03,0xf6,0x0e
+.byte 0x61,0x35,0x57,0xb9,0x86,0xc1,0x1d,0x9e
+.byte 0xe1,0xf8,0x98,0x11,0x69,0xd9,0x8e,0x94
+.byte 0x9b,0x1e,0x87,0xe9,0xce,0x55,0x28,0xdf
+.byte 0x8c,0xa1,0x89,0x0d,0xbf,0xe6,0x42,0x68
+.byte 0x41,0x99,0x2d,0x0f,0xb0,0x54,0xbb,0x16
+.long 0x00000001, 0x00000002, 0x00000004, 0x00000008
+.long 0x00000010, 0x00000020, 0x00000040, 0x00000080
+.long 0x0000001b, 0x00000036, 0x80808080, 0x80808080
+.long 0xfefefefe, 0xfefefefe, 0x1b1b1b1b, 0x1b1b1b1b
+.align 64
+.LAES_Td:
+.long 0x50a7f451,0x50a7f451
+.long 0x5365417e,0x5365417e
+.long 0xc3a4171a,0xc3a4171a
+.long 0x965e273a,0x965e273a
+.long 0xcb6bab3b,0xcb6bab3b
+.long 0xf1459d1f,0xf1459d1f
+.long 0xab58faac,0xab58faac
+.long 0x9303e34b,0x9303e34b
+.long 0x55fa3020,0x55fa3020
+.long 0xf66d76ad,0xf66d76ad
+.long 0x9176cc88,0x9176cc88
+.long 0x254c02f5,0x254c02f5
+.long 0xfcd7e54f,0xfcd7e54f
+.long 0xd7cb2ac5,0xd7cb2ac5
+.long 0x80443526,0x80443526
+.long 0x8fa362b5,0x8fa362b5
+.long 0x495ab1de,0x495ab1de
+.long 0x671bba25,0x671bba25
+.long 0x980eea45,0x980eea45
+.long 0xe1c0fe5d,0xe1c0fe5d
+.long 0x02752fc3,0x02752fc3
+.long 0x12f04c81,0x12f04c81
+.long 0xa397468d,0xa397468d
+.long 0xc6f9d36b,0xc6f9d36b
+.long 0xe75f8f03,0xe75f8f03
+.long 0x959c9215,0x959c9215
+.long 0xeb7a6dbf,0xeb7a6dbf
+.long 0xda595295,0xda595295
+.long 0x2d83bed4,0x2d83bed4
+.long 0xd3217458,0xd3217458
+.long 0x2969e049,0x2969e049
+.long 0x44c8c98e,0x44c8c98e
+.long 0x6a89c275,0x6a89c275
+.long 0x78798ef4,0x78798ef4
+.long 0x6b3e5899,0x6b3e5899
+.long 0xdd71b927,0xdd71b927
+.long 0xb64fe1be,0xb64fe1be
+.long 0x17ad88f0,0x17ad88f0
+.long 0x66ac20c9,0x66ac20c9
+.long 0xb43ace7d,0xb43ace7d
+.long 0x184adf63,0x184adf63
+.long 0x82311ae5,0x82311ae5
+.long 0x60335197,0x60335197
+.long 0x457f5362,0x457f5362
+.long 0xe07764b1,0xe07764b1
+.long 0x84ae6bbb,0x84ae6bbb
+.long 0x1ca081fe,0x1ca081fe
+.long 0x942b08f9,0x942b08f9
+.long 0x58684870,0x58684870
+.long 0x19fd458f,0x19fd458f
+.long 0x876cde94,0x876cde94
+.long 0xb7f87b52,0xb7f87b52
+.long 0x23d373ab,0x23d373ab
+.long 0xe2024b72,0xe2024b72
+.long 0x578f1fe3,0x578f1fe3
+.long 0x2aab5566,0x2aab5566
+.long 0x0728ebb2,0x0728ebb2
+.long 0x03c2b52f,0x03c2b52f
+.long 0x9a7bc586,0x9a7bc586
+.long 0xa50837d3,0xa50837d3
+.long 0xf2872830,0xf2872830
+.long 0xb2a5bf23,0xb2a5bf23
+.long 0xba6a0302,0xba6a0302
+.long 0x5c8216ed,0x5c8216ed
+.long 0x2b1ccf8a,0x2b1ccf8a
+.long 0x92b479a7,0x92b479a7
+.long 0xf0f207f3,0xf0f207f3
+.long 0xa1e2694e,0xa1e2694e
+.long 0xcdf4da65,0xcdf4da65
+.long 0xd5be0506,0xd5be0506
+.long 0x1f6234d1,0x1f6234d1
+.long 0x8afea6c4,0x8afea6c4
+.long 0x9d532e34,0x9d532e34
+.long 0xa055f3a2,0xa055f3a2
+.long 0x32e18a05,0x32e18a05
+.long 0x75ebf6a4,0x75ebf6a4
+.long 0x39ec830b,0x39ec830b
+.long 0xaaef6040,0xaaef6040
+.long 0x069f715e,0x069f715e
+.long 0x51106ebd,0x51106ebd
+.long 0xf98a213e,0xf98a213e
+.long 0x3d06dd96,0x3d06dd96
+.long 0xae053edd,0xae053edd
+.long 0x46bde64d,0x46bde64d
+.long 0xb58d5491,0xb58d5491
+.long 0x055dc471,0x055dc471
+.long 0x6fd40604,0x6fd40604
+.long 0xff155060,0xff155060
+.long 0x24fb9819,0x24fb9819
+.long 0x97e9bdd6,0x97e9bdd6
+.long 0xcc434089,0xcc434089
+.long 0x779ed967,0x779ed967
+.long 0xbd42e8b0,0xbd42e8b0
+.long 0x888b8907,0x888b8907
+.long 0x385b19e7,0x385b19e7
+.long 0xdbeec879,0xdbeec879
+.long 0x470a7ca1,0x470a7ca1
+.long 0xe90f427c,0xe90f427c
+.long 0xc91e84f8,0xc91e84f8
+.long 0x00000000,0x00000000
+.long 0x83868009,0x83868009
+.long 0x48ed2b32,0x48ed2b32
+.long 0xac70111e,0xac70111e
+.long 0x4e725a6c,0x4e725a6c
+.long 0xfbff0efd,0xfbff0efd
+.long 0x5638850f,0x5638850f
+.long 0x1ed5ae3d,0x1ed5ae3d
+.long 0x27392d36,0x27392d36
+.long 0x64d90f0a,0x64d90f0a
+.long 0x21a65c68,0x21a65c68
+.long 0xd1545b9b,0xd1545b9b
+.long 0x3a2e3624,0x3a2e3624
+.long 0xb1670a0c,0xb1670a0c
+.long 0x0fe75793,0x0fe75793
+.long 0xd296eeb4,0xd296eeb4
+.long 0x9e919b1b,0x9e919b1b
+.long 0x4fc5c080,0x4fc5c080
+.long 0xa220dc61,0xa220dc61
+.long 0x694b775a,0x694b775a
+.long 0x161a121c,0x161a121c
+.long 0x0aba93e2,0x0aba93e2
+.long 0xe52aa0c0,0xe52aa0c0
+.long 0x43e0223c,0x43e0223c
+.long 0x1d171b12,0x1d171b12
+.long 0x0b0d090e,0x0b0d090e
+.long 0xadc78bf2,0xadc78bf2
+.long 0xb9a8b62d,0xb9a8b62d
+.long 0xc8a91e14,0xc8a91e14
+.long 0x8519f157,0x8519f157
+.long 0x4c0775af,0x4c0775af
+.long 0xbbdd99ee,0xbbdd99ee
+.long 0xfd607fa3,0xfd607fa3
+.long 0x9f2601f7,0x9f2601f7
+.long 0xbcf5725c,0xbcf5725c
+.long 0xc53b6644,0xc53b6644
+.long 0x347efb5b,0x347efb5b
+.long 0x7629438b,0x7629438b
+.long 0xdcc623cb,0xdcc623cb
+.long 0x68fcedb6,0x68fcedb6
+.long 0x63f1e4b8,0x63f1e4b8
+.long 0xcadc31d7,0xcadc31d7
+.long 0x10856342,0x10856342
+.long 0x40229713,0x40229713
+.long 0x2011c684,0x2011c684
+.long 0x7d244a85,0x7d244a85
+.long 0xf83dbbd2,0xf83dbbd2
+.long 0x1132f9ae,0x1132f9ae
+.long 0x6da129c7,0x6da129c7
+.long 0x4b2f9e1d,0x4b2f9e1d
+.long 0xf330b2dc,0xf330b2dc
+.long 0xec52860d,0xec52860d
+.long 0xd0e3c177,0xd0e3c177
+.long 0x6c16b32b,0x6c16b32b
+.long 0x99b970a9,0x99b970a9
+.long 0xfa489411,0xfa489411
+.long 0x2264e947,0x2264e947
+.long 0xc48cfca8,0xc48cfca8
+.long 0x1a3ff0a0,0x1a3ff0a0
+.long 0xd82c7d56,0xd82c7d56
+.long 0xef903322,0xef903322
+.long 0xc74e4987,0xc74e4987
+.long 0xc1d138d9,0xc1d138d9
+.long 0xfea2ca8c,0xfea2ca8c
+.long 0x360bd498,0x360bd498
+.long 0xcf81f5a6,0xcf81f5a6
+.long 0x28de7aa5,0x28de7aa5
+.long 0x268eb7da,0x268eb7da
+.long 0xa4bfad3f,0xa4bfad3f
+.long 0xe49d3a2c,0xe49d3a2c
+.long 0x0d927850,0x0d927850
+.long 0x9bcc5f6a,0x9bcc5f6a
+.long 0x62467e54,0x62467e54
+.long 0xc2138df6,0xc2138df6
+.long 0xe8b8d890,0xe8b8d890
+.long 0x5ef7392e,0x5ef7392e
+.long 0xf5afc382,0xf5afc382
+.long 0xbe805d9f,0xbe805d9f
+.long 0x7c93d069,0x7c93d069
+.long 0xa92dd56f,0xa92dd56f
+.long 0xb31225cf,0xb31225cf
+.long 0x3b99acc8,0x3b99acc8
+.long 0xa77d1810,0xa77d1810
+.long 0x6e639ce8,0x6e639ce8
+.long 0x7bbb3bdb,0x7bbb3bdb
+.long 0x097826cd,0x097826cd
+.long 0xf418596e,0xf418596e
+.long 0x01b79aec,0x01b79aec
+.long 0xa89a4f83,0xa89a4f83
+.long 0x656e95e6,0x656e95e6
+.long 0x7ee6ffaa,0x7ee6ffaa
+.long 0x08cfbc21,0x08cfbc21
+.long 0xe6e815ef,0xe6e815ef
+.long 0xd99be7ba,0xd99be7ba
+.long 0xce366f4a,0xce366f4a
+.long 0xd4099fea,0xd4099fea
+.long 0xd67cb029,0xd67cb029
+.long 0xafb2a431,0xafb2a431
+.long 0x31233f2a,0x31233f2a
+.long 0x3094a5c6,0x3094a5c6
+.long 0xc066a235,0xc066a235
+.long 0x37bc4e74,0x37bc4e74
+.long 0xa6ca82fc,0xa6ca82fc
+.long 0xb0d090e0,0xb0d090e0
+.long 0x15d8a733,0x15d8a733
+.long 0x4a9804f1,0x4a9804f1
+.long 0xf7daec41,0xf7daec41
+.long 0x0e50cd7f,0x0e50cd7f
+.long 0x2ff69117,0x2ff69117
+.long 0x8dd64d76,0x8dd64d76
+.long 0x4db0ef43,0x4db0ef43
+.long 0x544daacc,0x544daacc
+.long 0xdf0496e4,0xdf0496e4
+.long 0xe3b5d19e,0xe3b5d19e
+.long 0x1b886a4c,0x1b886a4c
+.long 0xb81f2cc1,0xb81f2cc1
+.long 0x7f516546,0x7f516546
+.long 0x04ea5e9d,0x04ea5e9d
+.long 0x5d358c01,0x5d358c01
+.long 0x737487fa,0x737487fa
+.long 0x2e410bfb,0x2e410bfb
+.long 0x5a1d67b3,0x5a1d67b3
+.long 0x52d2db92,0x52d2db92
+.long 0x335610e9,0x335610e9
+.long 0x1347d66d,0x1347d66d
+.long 0x8c61d79a,0x8c61d79a
+.long 0x7a0ca137,0x7a0ca137
+.long 0x8e14f859,0x8e14f859
+.long 0x893c13eb,0x893c13eb
+.long 0xee27a9ce,0xee27a9ce
+.long 0x35c961b7,0x35c961b7
+.long 0xede51ce1,0xede51ce1
+.long 0x3cb1477a,0x3cb1477a
+.long 0x59dfd29c,0x59dfd29c
+.long 0x3f73f255,0x3f73f255
+.long 0x79ce1418,0x79ce1418
+.long 0xbf37c773,0xbf37c773
+.long 0xeacdf753,0xeacdf753
+.long 0x5baafd5f,0x5baafd5f
+.long 0x146f3ddf,0x146f3ddf
+.long 0x86db4478,0x86db4478
+.long 0x81f3afca,0x81f3afca
+.long 0x3ec468b9,0x3ec468b9
+.long 0x2c342438,0x2c342438
+.long 0x5f40a3c2,0x5f40a3c2
+.long 0x72c31d16,0x72c31d16
+.long 0x0c25e2bc,0x0c25e2bc
+.long 0x8b493c28,0x8b493c28
+.long 0x41950dff,0x41950dff
+.long 0x7101a839,0x7101a839
+.long 0xdeb30c08,0xdeb30c08
+.long 0x9ce4b4d8,0x9ce4b4d8
+.long 0x90c15664,0x90c15664
+.long 0x6184cb7b,0x6184cb7b
+.long 0x70b632d5,0x70b632d5
+.long 0x745c6c48,0x745c6c48
+.long 0x4257b8d0,0x4257b8d0
+.byte 0x52,0x09,0x6a,0xd5,0x30,0x36,0xa5,0x38
+.byte 0xbf,0x40,0xa3,0x9e,0x81,0xf3,0xd7,0xfb
+.byte 0x7c,0xe3,0x39,0x82,0x9b,0x2f,0xff,0x87
+.byte 0x34,0x8e,0x43,0x44,0xc4,0xde,0xe9,0xcb
+.byte 0x54,0x7b,0x94,0x32,0xa6,0xc2,0x23,0x3d
+.byte 0xee,0x4c,0x95,0x0b,0x42,0xfa,0xc3,0x4e
+.byte 0x08,0x2e,0xa1,0x66,0x28,0xd9,0x24,0xb2
+.byte 0x76,0x5b,0xa2,0x49,0x6d,0x8b,0xd1,0x25
+.byte 0x72,0xf8,0xf6,0x64,0x86,0x68,0x98,0x16
+.byte 0xd4,0xa4,0x5c,0xcc,0x5d,0x65,0xb6,0x92
+.byte 0x6c,0x70,0x48,0x50,0xfd,0xed,0xb9,0xda
+.byte 0x5e,0x15,0x46,0x57,0xa7,0x8d,0x9d,0x84
+.byte 0x90,0xd8,0xab,0x00,0x8c,0xbc,0xd3,0x0a
+.byte 0xf7,0xe4,0x58,0x05,0xb8,0xb3,0x45,0x06
+.byte 0xd0,0x2c,0x1e,0x8f,0xca,0x3f,0x0f,0x02
+.byte 0xc1,0xaf,0xbd,0x03,0x01,0x13,0x8a,0x6b
+.byte 0x3a,0x91,0x11,0x41,0x4f,0x67,0xdc,0xea
+.byte 0x97,0xf2,0xcf,0xce,0xf0,0xb4,0xe6,0x73
+.byte 0x96,0xac,0x74,0x22,0xe7,0xad,0x35,0x85
+.byte 0xe2,0xf9,0x37,0xe8,0x1c,0x75,0xdf,0x6e
+.byte 0x47,0xf1,0x1a,0x71,0x1d,0x29,0xc5,0x89
+.byte 0x6f,0xb7,0x62,0x0e,0xaa,0x18,0xbe,0x1b
+.byte 0xfc,0x56,0x3e,0x4b,0xc6,0xd2,0x79,0x20
+.byte 0x9a,0xdb,0xc0,0xfe,0x78,0xcd,0x5a,0xf4
+.byte 0x1f,0xdd,0xa8,0x33,0x88,0x07,0xc7,0x31
+.byte 0xb1,0x12,0x10,0x59,0x27,0x80,0xec,0x5f
+.byte 0x60,0x51,0x7f,0xa9,0x19,0xb5,0x4a,0x0d
+.byte 0x2d,0xe5,0x7a,0x9f,0x93,0xc9,0x9c,0xef
+.byte 0xa0,0xe0,0x3b,0x4d,0xae,0x2a,0xf5,0xb0
+.byte 0xc8,0xeb,0xbb,0x3c,0x83,0x53,0x99,0x61
+.byte 0x17,0x2b,0x04,0x7e,0xba,0x77,0xd6,0x26
+.byte 0xe1,0x69,0x14,0x63,0x55,0x21,0x0c,0x7d
+.long 0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe
+.long 0x1b1b1b1b, 0x1b1b1b1b, 0, 0
+.byte 0x52,0x09,0x6a,0xd5,0x30,0x36,0xa5,0x38
+.byte 0xbf,0x40,0xa3,0x9e,0x81,0xf3,0xd7,0xfb
+.byte 0x7c,0xe3,0x39,0x82,0x9b,0x2f,0xff,0x87
+.byte 0x34,0x8e,0x43,0x44,0xc4,0xde,0xe9,0xcb
+.byte 0x54,0x7b,0x94,0x32,0xa6,0xc2,0x23,0x3d
+.byte 0xee,0x4c,0x95,0x0b,0x42,0xfa,0xc3,0x4e
+.byte 0x08,0x2e,0xa1,0x66,0x28,0xd9,0x24,0xb2
+.byte 0x76,0x5b,0xa2,0x49,0x6d,0x8b,0xd1,0x25
+.byte 0x72,0xf8,0xf6,0x64,0x86,0x68,0x98,0x16
+.byte 0xd4,0xa4,0x5c,0xcc,0x5d,0x65,0xb6,0x92
+.byte 0x6c,0x70,0x48,0x50,0xfd,0xed,0xb9,0xda
+.byte 0x5e,0x15,0x46,0x57,0xa7,0x8d,0x9d,0x84
+.byte 0x90,0xd8,0xab,0x00,0x8c,0xbc,0xd3,0x0a
+.byte 0xf7,0xe4,0x58,0x05,0xb8,0xb3,0x45,0x06
+.byte 0xd0,0x2c,0x1e,0x8f,0xca,0x3f,0x0f,0x02
+.byte 0xc1,0xaf,0xbd,0x03,0x01,0x13,0x8a,0x6b
+.byte 0x3a,0x91,0x11,0x41,0x4f,0x67,0xdc,0xea
+.byte 0x97,0xf2,0xcf,0xce,0xf0,0xb4,0xe6,0x73
+.byte 0x96,0xac,0x74,0x22,0xe7,0xad,0x35,0x85
+.byte 0xe2,0xf9,0x37,0xe8,0x1c,0x75,0xdf,0x6e
+.byte 0x47,0xf1,0x1a,0x71,0x1d,0x29,0xc5,0x89
+.byte 0x6f,0xb7,0x62,0x0e,0xaa,0x18,0xbe,0x1b
+.byte 0xfc,0x56,0x3e,0x4b,0xc6,0xd2,0x79,0x20
+.byte 0x9a,0xdb,0xc0,0xfe,0x78,0xcd,0x5a,0xf4
+.byte 0x1f,0xdd,0xa8,0x33,0x88,0x07,0xc7,0x31
+.byte 0xb1,0x12,0x10,0x59,0x27,0x80,0xec,0x5f
+.byte 0x60,0x51,0x7f,0xa9,0x19,0xb5,0x4a,0x0d
+.byte 0x2d,0xe5,0x7a,0x9f,0x93,0xc9,0x9c,0xef
+.byte 0xa0,0xe0,0x3b,0x4d,0xae,0x2a,0xf5,0xb0
+.byte 0xc8,0xeb,0xbb,0x3c,0x83,0x53,0x99,0x61
+.byte 0x17,0x2b,0x04,0x7e,0xba,0x77,0xd6,0x26
+.byte 0xe1,0x69,0x14,0x63,0x55,0x21,0x0c,0x7d
+.long 0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe
+.long 0x1b1b1b1b, 0x1b1b1b1b, 0, 0
+.byte 0x52,0x09,0x6a,0xd5,0x30,0x36,0xa5,0x38
+.byte 0xbf,0x40,0xa3,0x9e,0x81,0xf3,0xd7,0xfb
+.byte 0x7c,0xe3,0x39,0x82,0x9b,0x2f,0xff,0x87
+.byte 0x34,0x8e,0x43,0x44,0xc4,0xde,0xe9,0xcb
+.byte 0x54,0x7b,0x94,0x32,0xa6,0xc2,0x23,0x3d
+.byte 0xee,0x4c,0x95,0x0b,0x42,0xfa,0xc3,0x4e
+.byte 0x08,0x2e,0xa1,0x66,0x28,0xd9,0x24,0xb2
+.byte 0x76,0x5b,0xa2,0x49,0x6d,0x8b,0xd1,0x25
+.byte 0x72,0xf8,0xf6,0x64,0x86,0x68,0x98,0x16
+.byte 0xd4,0xa4,0x5c,0xcc,0x5d,0x65,0xb6,0x92
+.byte 0x6c,0x70,0x48,0x50,0xfd,0xed,0xb9,0xda
+.byte 0x5e,0x15,0x46,0x57,0xa7,0x8d,0x9d,0x84
+.byte 0x90,0xd8,0xab,0x00,0x8c,0xbc,0xd3,0x0a
+.byte 0xf7,0xe4,0x58,0x05,0xb8,0xb3,0x45,0x06
+.byte 0xd0,0x2c,0x1e,0x8f,0xca,0x3f,0x0f,0x02
+.byte 0xc1,0xaf,0xbd,0x03,0x01,0x13,0x8a,0x6b
+.byte 0x3a,0x91,0x11,0x41,0x4f,0x67,0xdc,0xea
+.byte 0x97,0xf2,0xcf,0xce,0xf0,0xb4,0xe6,0x73
+.byte 0x96,0xac,0x74,0x22,0xe7,0xad,0x35,0x85
+.byte 0xe2,0xf9,0x37,0xe8,0x1c,0x75,0xdf,0x6e
+.byte 0x47,0xf1,0x1a,0x71,0x1d,0x29,0xc5,0x89
+.byte 0x6f,0xb7,0x62,0x0e,0xaa,0x18,0xbe,0x1b
+.byte 0xfc,0x56,0x3e,0x4b,0xc6,0xd2,0x79,0x20
+.byte 0x9a,0xdb,0xc0,0xfe,0x78,0xcd,0x5a,0xf4
+.byte 0x1f,0xdd,0xa8,0x33,0x88,0x07,0xc7,0x31
+.byte 0xb1,0x12,0x10,0x59,0x27,0x80,0xec,0x5f
+.byte 0x60,0x51,0x7f,0xa9,0x19,0xb5,0x4a,0x0d
+.byte 0x2d,0xe5,0x7a,0x9f,0x93,0xc9,0x9c,0xef
+.byte 0xa0,0xe0,0x3b,0x4d,0xae,0x2a,0xf5,0xb0
+.byte 0xc8,0xeb,0xbb,0x3c,0x83,0x53,0x99,0x61
+.byte 0x17,0x2b,0x04,0x7e,0xba,0x77,0xd6,0x26
+.byte 0xe1,0x69,0x14,0x63,0x55,0x21,0x0c,0x7d
+.long 0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe
+.long 0x1b1b1b1b, 0x1b1b1b1b, 0, 0
+.byte 0x52,0x09,0x6a,0xd5,0x30,0x36,0xa5,0x38
+.byte 0xbf,0x40,0xa3,0x9e,0x81,0xf3,0xd7,0xfb
+.byte 0x7c,0xe3,0x39,0x82,0x9b,0x2f,0xff,0x87
+.byte 0x34,0x8e,0x43,0x44,0xc4,0xde,0xe9,0xcb
+.byte 0x54,0x7b,0x94,0x32,0xa6,0xc2,0x23,0x3d
+.byte 0xee,0x4c,0x95,0x0b,0x42,0xfa,0xc3,0x4e
+.byte 0x08,0x2e,0xa1,0x66,0x28,0xd9,0x24,0xb2
+.byte 0x76,0x5b,0xa2,0x49,0x6d,0x8b,0xd1,0x25
+.byte 0x72,0xf8,0xf6,0x64,0x86,0x68,0x98,0x16
+.byte 0xd4,0xa4,0x5c,0xcc,0x5d,0x65,0xb6,0x92
+.byte 0x6c,0x70,0x48,0x50,0xfd,0xed,0xb9,0xda
+.byte 0x5e,0x15,0x46,0x57,0xa7,0x8d,0x9d,0x84
+.byte 0x90,0xd8,0xab,0x00,0x8c,0xbc,0xd3,0x0a
+.byte 0xf7,0xe4,0x58,0x05,0xb8,0xb3,0x45,0x06
+.byte 0xd0,0x2c,0x1e,0x8f,0xca,0x3f,0x0f,0x02
+.byte 0xc1,0xaf,0xbd,0x03,0x01,0x13,0x8a,0x6b
+.byte 0x3a,0x91,0x11,0x41,0x4f,0x67,0xdc,0xea
+.byte 0x97,0xf2,0xcf,0xce,0xf0,0xb4,0xe6,0x73
+.byte 0x96,0xac,0x74,0x22,0xe7,0xad,0x35,0x85
+.byte 0xe2,0xf9,0x37,0xe8,0x1c,0x75,0xdf,0x6e
+.byte 0x47,0xf1,0x1a,0x71,0x1d,0x29,0xc5,0x89
+.byte 0x6f,0xb7,0x62,0x0e,0xaa,0x18,0xbe,0x1b
+.byte 0xfc,0x56,0x3e,0x4b,0xc6,0xd2,0x79,0x20
+.byte 0x9a,0xdb,0xc0,0xfe,0x78,0xcd,0x5a,0xf4
+.byte 0x1f,0xdd,0xa8,0x33,0x88,0x07,0xc7,0x31
+.byte 0xb1,0x12,0x10,0x59,0x27,0x80,0xec,0x5f
+.byte 0x60,0x51,0x7f,0xa9,0x19,0xb5,0x4a,0x0d
+.byte 0x2d,0xe5,0x7a,0x9f,0x93,0xc9,0x9c,0xef
+.byte 0xa0,0xe0,0x3b,0x4d,0xae,0x2a,0xf5,0xb0
+.byte 0xc8,0xeb,0xbb,0x3c,0x83,0x53,0x99,0x61
+.byte 0x17,0x2b,0x04,0x7e,0xba,0x77,0xd6,0x26
+.byte 0xe1,0x69,0x14,0x63,0x55,0x21,0x0c,0x7d
+.long 0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe
+.long 0x1b1b1b1b, 0x1b1b1b1b, 0, 0
+.byte 65,69,83,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.align 64
diff --git a/deps/openssl/asm/x64-elf-gas/bn/x86_64-mont.s b/deps/openssl/asm/x64-elf-gas/bn/x86_64-mont.s
new file mode 100644
index 0000000000..2dbcffc59d
--- /dev/null
+++ b/deps/openssl/asm/x64-elf-gas/bn/x86_64-mont.s
@@ -0,0 +1,172 @@
+.text
+
+
+.globl bn_mul_mont
+.type bn_mul_mont,@function
+.align 16
+bn_mul_mont:
+ pushq %rbx
+ pushq %rbp
+ pushq %r12
+ pushq %r13
+ pushq %r14
+ pushq %r15
+
+ movl %r9d,%r9d
+ leaq 2(%r9),%r10
+ movq %rsp,%r11
+ negq %r10
+ leaq (%rsp,%r10,8),%rsp
+ andq $-1024,%rsp
+
+ movq %r11,8(%rsp,%r9,8)
+.Lprologue:
+ movq %rdx,%r12
+
+ movq (%r8),%r8
+
+ xorq %r14,%r14
+ xorq %r15,%r15
+
+ movq (%r12),%rbx
+ movq (%rsi),%rax
+ mulq %rbx
+ movq %rax,%r10
+ movq %rdx,%r11
+
+ imulq %r8,%rax
+ movq %rax,%rbp
+
+ mulq (%rcx)
+ addq %r10,%rax
+ adcq $0,%rdx
+ movq %rdx,%r13
+
+ leaq 1(%r15),%r15
+.L1st:
+ movq (%rsi,%r15,8),%rax
+ mulq %rbx
+ addq %r11,%rax
+ adcq $0,%rdx
+ movq %rax,%r10
+ movq (%rcx,%r15,8),%rax
+ movq %rdx,%r11
+
+ mulq %rbp
+ addq %r13,%rax
+ leaq 1(%r15),%r15
+ adcq $0,%rdx
+ addq %r10,%rax
+ adcq $0,%rdx
+ movq %rax,-16(%rsp,%r15,8)
+ cmpq %r9,%r15
+ movq %rdx,%r13
+ jl .L1st
+
+ xorq %rdx,%rdx
+ addq %r11,%r13
+ adcq $0,%rdx
+ movq %r13,-8(%rsp,%r9,8)
+ movq %rdx,(%rsp,%r9,8)
+
+ leaq 1(%r14),%r14
+.align 4
+.Louter:
+ xorq %r15,%r15
+
+ movq (%r12,%r14,8),%rbx
+ movq (%rsi),%rax
+ mulq %rbx
+ addq (%rsp),%rax
+ adcq $0,%rdx
+ movq %rax,%r10
+ movq %rdx,%r11
+
+ imulq %r8,%rax
+ movq %rax,%rbp
+
+ mulq (%rcx,%r15,8)
+ addq %r10,%rax
+ movq 8(%rsp),%r10
+ adcq $0,%rdx
+ movq %rdx,%r13
+
+ leaq 1(%r15),%r15
+.align 4
+.Linner:
+ movq (%rsi,%r15,8),%rax
+ mulq %rbx
+ addq %r11,%rax
+ adcq $0,%rdx
+ addq %rax,%r10
+ movq (%rcx,%r15,8),%rax
+ adcq $0,%rdx
+ movq %rdx,%r11
+
+ mulq %rbp
+ addq %r13,%rax
+ leaq 1(%r15),%r15
+ adcq $0,%rdx
+ addq %r10,%rax
+ adcq $0,%rdx
+ movq (%rsp,%r15,8),%r10
+ cmpq %r9,%r15
+ movq %rax,-16(%rsp,%r15,8)
+ movq %rdx,%r13
+ jl .Linner
+
+ xorq %rdx,%rdx
+ addq %r11,%r13
+ adcq $0,%rdx
+ addq %r10,%r13
+ adcq $0,%rdx
+ movq %r13,-8(%rsp,%r9,8)
+ movq %rdx,(%rsp,%r9,8)
+
+ leaq 1(%r14),%r14
+ cmpq %r9,%r14
+ jl .Louter
+
+ leaq (%rsp),%rsi
+ leaq -1(%r9),%r15
+
+ movq (%rsi),%rax
+ xorq %r14,%r14
+ jmp .Lsub
+.align 16
+.Lsub: sbbq (%rcx,%r14,8),%rax
+ movq %rax,(%rdi,%r14,8)
+ decq %r15
+ movq 8(%rsi,%r14,8),%rax
+ leaq 1(%r14),%r14
+ jge .Lsub
+
+ sbbq $0,%rax
+ andq %rax,%rsi
+ notq %rax
+ movq %rdi,%rcx
+ andq %rax,%rcx
+ leaq -1(%r9),%r15
+ orq %rcx,%rsi
+.align 16
+.Lcopy:
+ movq (%rsi,%r15,8),%rax
+ movq %rax,(%rdi,%r15,8)
+ movq %r14,(%rsp,%r15,8)
+ decq %r15
+ jge .Lcopy
+
+ movq 8(%rsp,%r9,8),%rsi
+ movq $1,%rax
+ movq (%rsi),%r15
+ movq 8(%rsi),%r14
+ movq 16(%rsi),%r13
+ movq 24(%rsi),%r12
+ movq 32(%rsi),%rbp
+ movq 40(%rsi),%rbx
+ leaq 48(%rsi),%rsp
+.Lepilogue:
+ .byte 0xf3,0xc3
+.size bn_mul_mont,.-bn_mul_mont
+.byte 77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105,112,108,105,99,97,116,105,111,110,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.align 16
diff --git a/deps/openssl/asm/x64-elf-gas/camellia/cmll-x86_64.s b/deps/openssl/asm/x64-elf-gas/camellia/cmll-x86_64.s
new file mode 100644
index 0000000000..3a5f4c4230
--- /dev/null
+++ b/deps/openssl/asm/x64-elf-gas/camellia/cmll-x86_64.s
@@ -0,0 +1,1844 @@
+.text
+
+
+
+.globl Camellia_EncryptBlock
+.type Camellia_EncryptBlock,@function
+.align 16
+Camellia_EncryptBlock:
+ movl $128,%eax
+ subl %edi,%eax
+ movl $3,%edi
+ adcl $0,%edi
+ jmp .Lenc_rounds
+.size Camellia_EncryptBlock,.-Camellia_EncryptBlock
+
+.globl Camellia_EncryptBlock_Rounds
+.type Camellia_EncryptBlock_Rounds,@function
+.align 16
+.Lenc_rounds:
+Camellia_EncryptBlock_Rounds:
+ pushq %rbx
+ pushq %rbp
+ pushq %r13
+ pushq %r14
+ pushq %r15
+.Lenc_prologue:
+
+
+ movq %rcx,%r13
+ movq %rdx,%r14
+
+ shll $6,%edi
+ leaq .LCamellia_SBOX(%rip),%rbp
+ leaq (%r14,%rdi,1),%r15
+
+ movl 0(%rsi),%r8d
+ movl 4(%rsi),%r9d
+ movl 8(%rsi),%r10d
+ bswapl %r8d
+ movl 12(%rsi),%r11d
+ bswapl %r9d
+ bswapl %r10d
+ bswapl %r11d
+
+ call _x86_64_Camellia_encrypt
+
+ bswapl %r8d
+ bswapl %r9d
+ bswapl %r10d
+ movl %r8d,0(%r13)
+ bswapl %r11d
+ movl %r9d,4(%r13)
+ movl %r10d,8(%r13)
+ movl %r11d,12(%r13)
+
+ movq 0(%rsp),%r15
+ movq 8(%rsp),%r14
+ movq 16(%rsp),%r13
+ movq 24(%rsp),%rbp
+ movq 32(%rsp),%rbx
+ leaq 40(%rsp),%rsp
+.Lenc_epilogue:
+ .byte 0xf3,0xc3
+.size Camellia_EncryptBlock_Rounds,.-Camellia_EncryptBlock_Rounds
+
+.type _x86_64_Camellia_encrypt,@function
+.align 16
+_x86_64_Camellia_encrypt:
+ xorl 0(%r14),%r9d
+ xorl 4(%r14),%r8d
+ xorl 8(%r14),%r11d
+ xorl 12(%r14),%r10d
+.align 16
+.Leloop:
+ movl 16(%r14),%ebx
+ movl 20(%r14),%eax
+
+ xorl %r8d,%eax
+ xorl %r9d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl 24(%r14),%ebx
+ movl 28(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r10d
+ xorl %ecx,%r11d
+ xorl %edx,%r11d
+ xorl %r10d,%eax
+ xorl %r11d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl 32(%r14),%ebx
+ movl 36(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r8d
+ xorl %ecx,%r9d
+ xorl %edx,%r9d
+ xorl %r8d,%eax
+ xorl %r9d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl 40(%r14),%ebx
+ movl 44(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r10d
+ xorl %ecx,%r11d
+ xorl %edx,%r11d
+ xorl %r10d,%eax
+ xorl %r11d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl 48(%r14),%ebx
+ movl 52(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r8d
+ xorl %ecx,%r9d
+ xorl %edx,%r9d
+ xorl %r8d,%eax
+ xorl %r9d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl 56(%r14),%ebx
+ movl 60(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r10d
+ xorl %ecx,%r11d
+ xorl %edx,%r11d
+ xorl %r10d,%eax
+ xorl %r11d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl 64(%r14),%ebx
+ movl 68(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r8d
+ xorl %ecx,%r9d
+ xorl %edx,%r9d
+ leaq 64(%r14),%r14
+ cmpq %r15,%r14
+ movl 8(%r14),%edx
+ movl 12(%r14),%ecx
+ je .Ledone
+
+ andl %r8d,%eax
+ orl %r11d,%edx
+ roll $1,%eax
+ xorl %edx,%r10d
+ xorl %eax,%r9d
+ andl %r10d,%ecx
+ orl %r9d,%ebx
+ roll $1,%ecx
+ xorl %ebx,%r8d
+ xorl %ecx,%r11d
+ jmp .Leloop
+
+.align 16
+.Ledone:
+ xorl %r10d,%eax
+ xorl %r11d,%ebx
+ xorl %r8d,%ecx
+ xorl %r9d,%edx
+
+ movl %eax,%r8d
+ movl %ebx,%r9d
+ movl %ecx,%r10d
+ movl %edx,%r11d
+
+.byte 0xf3,0xc3
+
+.size _x86_64_Camellia_encrypt,.-_x86_64_Camellia_encrypt
+
+
+.globl Camellia_DecryptBlock
+.type Camellia_DecryptBlock,@function
+.align 16
+Camellia_DecryptBlock:
+ movl $128,%eax
+ subl %edi,%eax
+ movl $3,%edi
+ adcl $0,%edi
+ jmp .Ldec_rounds
+.size Camellia_DecryptBlock,.-Camellia_DecryptBlock
+
+.globl Camellia_DecryptBlock_Rounds
+.type Camellia_DecryptBlock_Rounds,@function
+.align 16
+.Ldec_rounds:
+Camellia_DecryptBlock_Rounds:
+ pushq %rbx
+ pushq %rbp
+ pushq %r13
+ pushq %r14
+ pushq %r15
+.Ldec_prologue:
+
+
+ movq %rcx,%r13
+ movq %rdx,%r15
+
+ shll $6,%edi
+ leaq .LCamellia_SBOX(%rip),%rbp
+ leaq (%r15,%rdi,1),%r14
+
+ movl 0(%rsi),%r8d
+ movl 4(%rsi),%r9d
+ movl 8(%rsi),%r10d
+ bswapl %r8d
+ movl 12(%rsi),%r11d
+ bswapl %r9d
+ bswapl %r10d
+ bswapl %r11d
+
+ call _x86_64_Camellia_decrypt
+
+ bswapl %r8d
+ bswapl %r9d
+ bswapl %r10d
+ movl %r8d,0(%r13)
+ bswapl %r11d
+ movl %r9d,4(%r13)
+ movl %r10d,8(%r13)
+ movl %r11d,12(%r13)
+
+ movq 0(%rsp),%r15
+ movq 8(%rsp),%r14
+ movq 16(%rsp),%r13
+ movq 24(%rsp),%rbp
+ movq 32(%rsp),%rbx
+ leaq 40(%rsp),%rsp
+.Ldec_epilogue:
+ .byte 0xf3,0xc3
+.size Camellia_DecryptBlock_Rounds,.-Camellia_DecryptBlock_Rounds
+
+.type _x86_64_Camellia_decrypt,@function
+.align 16
+_x86_64_Camellia_decrypt:
+ xorl 0(%r14),%r9d
+ xorl 4(%r14),%r8d
+ xorl 8(%r14),%r11d
+ xorl 12(%r14),%r10d
+.align 16
+.Ldloop:
+ movl -8(%r14),%ebx
+ movl -4(%r14),%eax
+
+ xorl %r8d,%eax
+ xorl %r9d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl -16(%r14),%ebx
+ movl -12(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r10d
+ xorl %ecx,%r11d
+ xorl %edx,%r11d
+ xorl %r10d,%eax
+ xorl %r11d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl -24(%r14),%ebx
+ movl -20(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r8d
+ xorl %ecx,%r9d
+ xorl %edx,%r9d
+ xorl %r8d,%eax
+ xorl %r9d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl -32(%r14),%ebx
+ movl -28(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r10d
+ xorl %ecx,%r11d
+ xorl %edx,%r11d
+ xorl %r10d,%eax
+ xorl %r11d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl -40(%r14),%ebx
+ movl -36(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r8d
+ xorl %ecx,%r9d
+ xorl %edx,%r9d
+ xorl %r8d,%eax
+ xorl %r9d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl -48(%r14),%ebx
+ movl -44(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r10d
+ xorl %ecx,%r11d
+ xorl %edx,%r11d
+ xorl %r10d,%eax
+ xorl %r11d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl -56(%r14),%ebx
+ movl -52(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r8d
+ xorl %ecx,%r9d
+ xorl %edx,%r9d
+ leaq -64(%r14),%r14
+ cmpq %r15,%r14
+ movl 0(%r14),%edx
+ movl 4(%r14),%ecx
+ je .Lddone
+
+ andl %r8d,%eax
+ orl %r11d,%edx
+ roll $1,%eax
+ xorl %edx,%r10d
+ xorl %eax,%r9d
+ andl %r10d,%ecx
+ orl %r9d,%ebx
+ roll $1,%ecx
+ xorl %ebx,%r8d
+ xorl %ecx,%r11d
+
+ jmp .Ldloop
+
+.align 16
+.Lddone:
+ xorl %r10d,%ecx
+ xorl %r11d,%edx
+ xorl %r8d,%eax
+ xorl %r9d,%ebx
+
+ movl %ecx,%r8d
+ movl %edx,%r9d
+ movl %eax,%r10d
+ movl %ebx,%r11d
+
+.byte 0xf3,0xc3
+
+.size _x86_64_Camellia_decrypt,.-_x86_64_Camellia_decrypt
+.globl Camellia_Ekeygen
+.type Camellia_Ekeygen,@function
+.align 16
+Camellia_Ekeygen:
+ pushq %rbx
+ pushq %rbp
+ pushq %r13
+ pushq %r14
+ pushq %r15
+.Lkey_prologue:
+
+ movq %rdi,%r15
+ movq %rdx,%r13
+
+ movl 0(%rsi),%r8d
+ movl 4(%rsi),%r9d
+ movl 8(%rsi),%r10d
+ movl 12(%rsi),%r11d
+
+ bswapl %r8d
+ bswapl %r9d
+ bswapl %r10d
+ bswapl %r11d
+ movl %r9d,0(%r13)
+ movl %r8d,4(%r13)
+ movl %r11d,8(%r13)
+ movl %r10d,12(%r13)
+ cmpq $128,%r15
+ je .L1st128
+
+ movl 16(%rsi),%r8d
+ movl 20(%rsi),%r9d
+ cmpq $192,%r15
+ je .L1st192
+ movl 24(%rsi),%r10d
+ movl 28(%rsi),%r11d
+ jmp .L1st256
+.L1st192:
+ movl %r8d,%r10d
+ movl %r9d,%r11d
+ notl %r10d
+ notl %r11d
+.L1st256:
+ bswapl %r8d
+ bswapl %r9d
+ bswapl %r10d
+ bswapl %r11d
+ movl %r9d,32(%r13)
+ movl %r8d,36(%r13)
+ movl %r11d,40(%r13)
+ movl %r10d,44(%r13)
+ xorl 0(%r13),%r9d
+ xorl 4(%r13),%r8d
+ xorl 8(%r13),%r11d
+ xorl 12(%r13),%r10d
+
+.L1st128:
+ leaq .LCamellia_SIGMA(%rip),%r14
+ leaq .LCamellia_SBOX(%rip),%rbp
+
+ movl 0(%r14),%ebx
+ movl 4(%r14),%eax
+ xorl %r8d,%eax
+ xorl %r9d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl 8(%r14),%ebx
+ movl 12(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r10d
+ xorl %ecx,%r11d
+ xorl %edx,%r11d
+ xorl %r10d,%eax
+ xorl %r11d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl 16(%r14),%ebx
+ movl 20(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r8d
+ xorl %ecx,%r9d
+ xorl %edx,%r9d
+ xorl 0(%r13),%r9d
+ xorl 4(%r13),%r8d
+ xorl 8(%r13),%r11d
+ xorl 12(%r13),%r10d
+ xorl %r8d,%eax
+ xorl %r9d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl 24(%r14),%ebx
+ movl 28(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r10d
+ xorl %ecx,%r11d
+ xorl %edx,%r11d
+ xorl %r10d,%eax
+ xorl %r11d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl 32(%r14),%ebx
+ movl 36(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r8d
+ xorl %ecx,%r9d
+ xorl %edx,%r9d
+ cmpq $128,%r15
+ jne .L2nd256
+
+ leaq 128(%r13),%r13
+ shlq $32,%r8
+ shlq $32,%r10
+ orq %r9,%r8
+ orq %r11,%r10
+ movq -128(%r13),%rax
+ movq -120(%r13),%rbx
+ movq %r8,-112(%r13)
+ movq %r10,-104(%r13)
+ movq %rax,%r11
+ shlq $15,%rax
+ movq %rbx,%r9
+ shrq $49,%r9
+ shrq $49,%r11
+ orq %r9,%rax
+ shlq $15,%rbx
+ orq %r11,%rbx
+ movq %rax,-96(%r13)
+ movq %rbx,-88(%r13)
+ movq %r8,%r11
+ shlq $15,%r8
+ movq %r10,%r9
+ shrq $49,%r9
+ shrq $49,%r11
+ orq %r9,%r8
+ shlq $15,%r10
+ orq %r11,%r10
+ movq %r8,-80(%r13)
+ movq %r10,-72(%r13)
+ movq %r8,%r11
+ shlq $15,%r8
+ movq %r10,%r9
+ shrq $49,%r9
+ shrq $49,%r11
+ orq %r9,%r8
+ shlq $15,%r10
+ orq %r11,%r10
+ movq %r8,-64(%r13)
+ movq %r10,-56(%r13)
+ movq %rax,%r11
+ shlq $30,%rax
+ movq %rbx,%r9
+ shrq $34,%r9
+ shrq $34,%r11
+ orq %r9,%rax
+ shlq $30,%rbx
+ orq %r11,%rbx
+ movq %rax,-48(%r13)
+ movq %rbx,-40(%r13)
+ movq %r8,%r11
+ shlq $15,%r8
+ movq %r10,%r9
+ shrq $49,%r9
+ shrq $49,%r11
+ orq %r9,%r8
+ shlq $15,%r10
+ orq %r11,%r10
+ movq %r8,-32(%r13)
+ movq %rax,%r11
+ shlq $15,%rax
+ movq %rbx,%r9
+ shrq $49,%r9
+ shrq $49,%r11
+ orq %r9,%rax
+ shlq $15,%rbx
+ orq %r11,%rbx
+ movq %rbx,-24(%r13)
+ movq %r8,%r11
+ shlq $15,%r8
+ movq %r10,%r9
+ shrq $49,%r9
+ shrq $49,%r11
+ orq %r9,%r8
+ shlq $15,%r10
+ orq %r11,%r10
+ movq %r8,-16(%r13)
+ movq %r10,-8(%r13)
+ movq %rax,%r11
+ shlq $17,%rax
+ movq %rbx,%r9
+ shrq $47,%r9
+ shrq $47,%r11
+ orq %r9,%rax
+ shlq $17,%rbx
+ orq %r11,%rbx
+ movq %rax,0(%r13)
+ movq %rbx,8(%r13)
+ movq %rax,%r11
+ shlq $17,%rax
+ movq %rbx,%r9
+ shrq $47,%r9
+ shrq $47,%r11
+ orq %r9,%rax
+ shlq $17,%rbx
+ orq %r11,%rbx
+ movq %rax,16(%r13)
+ movq %rbx,24(%r13)
+ movq %r8,%r11
+ shlq $34,%r8
+ movq %r10,%r9
+ shrq $30,%r9
+ shrq $30,%r11
+ orq %r9,%r8
+ shlq $34,%r10
+ orq %r11,%r10
+ movq %r8,32(%r13)
+ movq %r10,40(%r13)
+ movq %rax,%r11
+ shlq $17,%rax
+ movq %rbx,%r9
+ shrq $47,%r9
+ shrq $47,%r11
+ orq %r9,%rax
+ shlq $17,%rbx
+ orq %r11,%rbx
+ movq %rax,48(%r13)
+ movq %rbx,56(%r13)
+ movq %r8,%r11
+ shlq $17,%r8
+ movq %r10,%r9
+ shrq $47,%r9
+ shrq $47,%r11
+ orq %r9,%r8
+ shlq $17,%r10
+ orq %r11,%r10
+ movq %r8,64(%r13)
+ movq %r10,72(%r13)
+ movl $3,%eax
+ jmp .Ldone
+.align 16
+.L2nd256:
+ movl %r9d,48(%r13)
+ movl %r8d,52(%r13)
+ movl %r11d,56(%r13)
+ movl %r10d,60(%r13)
+ xorl 32(%r13),%r9d
+ xorl 36(%r13),%r8d
+ xorl 40(%r13),%r11d
+ xorl 44(%r13),%r10d
+ xorl %r8d,%eax
+ xorl %r9d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl 40(%r14),%ebx
+ movl 44(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r10d
+ xorl %ecx,%r11d
+ xorl %edx,%r11d
+ xorl %r10d,%eax
+ xorl %r11d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl 48(%r14),%ebx
+ movl 52(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r8d
+ xorl %ecx,%r9d
+ xorl %edx,%r9d
+ movq 0(%r13),%rax
+ movq 8(%r13),%rbx
+ movq 32(%r13),%rcx
+ movq 40(%r13),%rdx
+ movq 48(%r13),%r14
+ movq 56(%r13),%r15
+ leaq 128(%r13),%r13
+ shlq $32,%r8
+ shlq $32,%r10
+ orq %r9,%r8
+ orq %r11,%r10
+ movq %r8,-112(%r13)
+ movq %r10,-104(%r13)
+ movq %rcx,%r11
+ shlq $15,%rcx
+ movq %rdx,%r9
+ shrq $49,%r9
+ shrq $49,%r11
+ orq %r9,%rcx
+ shlq $15,%rdx
+ orq %r11,%rdx
+ movq %rcx,-96(%r13)
+ movq %rdx,-88(%r13)
+ movq %r14,%r11
+ shlq $15,%r14
+ movq %r15,%r9
+ shrq $49,%r9
+ shrq $49,%r11
+ orq %r9,%r14
+ shlq $15,%r15
+ orq %r11,%r15
+ movq %r14,-80(%r13)
+ movq %r15,-72(%r13)
+ movq %rcx,%r11
+ shlq $15,%rcx
+ movq %rdx,%r9
+ shrq $49,%r9
+ shrq $49,%r11
+ orq %r9,%rcx
+ shlq $15,%rdx
+ orq %r11,%rdx
+ movq %rcx,-64(%r13)
+ movq %rdx,-56(%r13)
+ movq %r8,%r11
+ shlq $30,%r8
+ movq %r10,%r9
+ shrq $34,%r9
+ shrq $34,%r11
+ orq %r9,%r8
+ shlq $30,%r10
+ orq %r11,%r10
+ movq %r8,-48(%r13)
+ movq %r10,-40(%r13)
+ movq %rax,%r11
+ shlq $45,%rax
+ movq %rbx,%r9
+ shrq $19,%r9
+ shrq $19,%r11
+ orq %r9,%rax
+ shlq $45,%rbx
+ orq %r11,%rbx
+ movq %rax,-32(%r13)
+ movq %rbx,-24(%r13)
+ movq %r14,%r11
+ shlq $30,%r14
+ movq %r15,%r9
+ shrq $34,%r9
+ shrq $34,%r11
+ orq %r9,%r14
+ shlq $30,%r15
+ orq %r11,%r15
+ movq %r14,-16(%r13)
+ movq %r15,-8(%r13)
+ movq %rax,%r11
+ shlq $15,%rax
+ movq %rbx,%r9
+ shrq $49,%r9
+ shrq $49,%r11
+ orq %r9,%rax
+ shlq $15,%rbx
+ orq %r11,%rbx
+ movq %rax,0(%r13)
+ movq %rbx,8(%r13)
+ movq %rcx,%r11
+ shlq $30,%rcx
+ movq %rdx,%r9
+ shrq $34,%r9
+ shrq $34,%r11
+ orq %r9,%rcx
+ shlq $30,%rdx
+ orq %r11,%rdx
+ movq %rcx,16(%r13)
+ movq %rdx,24(%r13)
+ movq %r8,%r11
+ shlq $30,%r8
+ movq %r10,%r9
+ shrq $34,%r9
+ shrq $34,%r11
+ orq %r9,%r8
+ shlq $30,%r10
+ orq %r11,%r10
+ movq %r8,32(%r13)
+ movq %r10,40(%r13)
+ movq %rax,%r11
+ shlq $17,%rax
+ movq %rbx,%r9
+ shrq $47,%r9
+ shrq $47,%r11
+ orq %r9,%rax
+ shlq $17,%rbx
+ orq %r11,%rbx
+ movq %rax,48(%r13)
+ movq %rbx,56(%r13)
+ movq %r14,%r11
+ shlq $32,%r14
+ movq %r15,%r9
+ shrq $32,%r9
+ shrq $32,%r11
+ orq %r9,%r14
+ shlq $32,%r15
+ orq %r11,%r15
+ movq %r14,64(%r13)
+ movq %r15,72(%r13)
+ movq %rcx,%r11
+ shlq $34,%rcx
+ movq %rdx,%r9
+ shrq $30,%r9
+ shrq $30,%r11
+ orq %r9,%rcx
+ shlq $34,%rdx
+ orq %r11,%rdx
+ movq %rcx,80(%r13)
+ movq %rdx,88(%r13)
+ movq %r14,%r11
+ shlq $17,%r14
+ movq %r15,%r9
+ shrq $47,%r9
+ shrq $47,%r11
+ orq %r9,%r14
+ shlq $17,%r15
+ orq %r11,%r15
+ movq %r14,96(%r13)
+ movq %r15,104(%r13)
+ movq %rax,%r11
+ shlq $34,%rax
+ movq %rbx,%r9
+ shrq $30,%r9
+ shrq $30,%r11
+ orq %r9,%rax
+ shlq $34,%rbx
+ orq %r11,%rbx
+ movq %rax,112(%r13)
+ movq %rbx,120(%r13)
+ movq %r8,%r11
+ shlq $51,%r8
+ movq %r10,%r9
+ shrq $13,%r9
+ shrq $13,%r11
+ orq %r9,%r8
+ shlq $51,%r10
+ orq %r11,%r10
+ movq %r8,128(%r13)
+ movq %r10,136(%r13)
+ movl $4,%eax
+.Ldone:
+ movq 0(%rsp),%r15
+ movq 8(%rsp),%r14
+ movq 16(%rsp),%r13
+ movq 24(%rsp),%rbp
+ movq 32(%rsp),%rbx
+ leaq 40(%rsp),%rsp
+.Lkey_epilogue:
+ .byte 0xf3,0xc3
+.size Camellia_Ekeygen,.-Camellia_Ekeygen
+.align 64
+.LCamellia_SIGMA:
+.long 0x3bcc908b, 0xa09e667f, 0x4caa73b2, 0xb67ae858
+.long 0xe94f82be, 0xc6ef372f, 0xf1d36f1c, 0x54ff53a5
+.long 0xde682d1d, 0x10e527fa, 0xb3e6c1fd, 0xb05688c2
+.long 0, 0, 0, 0
+.LCamellia_SBOX:
+.long 0x70707000,0x70700070
+.long 0x82828200,0x2c2c002c
+.long 0x2c2c2c00,0xb3b300b3
+.long 0xececec00,0xc0c000c0
+.long 0xb3b3b300,0xe4e400e4
+.long 0x27272700,0x57570057
+.long 0xc0c0c000,0xeaea00ea
+.long 0xe5e5e500,0xaeae00ae
+.long 0xe4e4e400,0x23230023
+.long 0x85858500,0x6b6b006b
+.long 0x57575700,0x45450045
+.long 0x35353500,0xa5a500a5
+.long 0xeaeaea00,0xeded00ed
+.long 0x0c0c0c00,0x4f4f004f
+.long 0xaeaeae00,0x1d1d001d
+.long 0x41414100,0x92920092
+.long 0x23232300,0x86860086
+.long 0xefefef00,0xafaf00af
+.long 0x6b6b6b00,0x7c7c007c
+.long 0x93939300,0x1f1f001f
+.long 0x45454500,0x3e3e003e
+.long 0x19191900,0xdcdc00dc
+.long 0xa5a5a500,0x5e5e005e
+.long 0x21212100,0x0b0b000b
+.long 0xededed00,0xa6a600a6
+.long 0x0e0e0e00,0x39390039
+.long 0x4f4f4f00,0xd5d500d5
+.long 0x4e4e4e00,0x5d5d005d
+.long 0x1d1d1d00,0xd9d900d9
+.long 0x65656500,0x5a5a005a
+.long 0x92929200,0x51510051
+.long 0xbdbdbd00,0x6c6c006c
+.long 0x86868600,0x8b8b008b
+.long 0xb8b8b800,0x9a9a009a
+.long 0xafafaf00,0xfbfb00fb
+.long 0x8f8f8f00,0xb0b000b0
+.long 0x7c7c7c00,0x74740074
+.long 0xebebeb00,0x2b2b002b
+.long 0x1f1f1f00,0xf0f000f0
+.long 0xcecece00,0x84840084
+.long 0x3e3e3e00,0xdfdf00df
+.long 0x30303000,0xcbcb00cb
+.long 0xdcdcdc00,0x34340034
+.long 0x5f5f5f00,0x76760076
+.long 0x5e5e5e00,0x6d6d006d
+.long 0xc5c5c500,0xa9a900a9
+.long 0x0b0b0b00,0xd1d100d1
+.long 0x1a1a1a00,0x04040004
+.long 0xa6a6a600,0x14140014
+.long 0xe1e1e100,0x3a3a003a
+.long 0x39393900,0xdede00de
+.long 0xcacaca00,0x11110011
+.long 0xd5d5d500,0x32320032
+.long 0x47474700,0x9c9c009c
+.long 0x5d5d5d00,0x53530053
+.long 0x3d3d3d00,0xf2f200f2
+.long 0xd9d9d900,0xfefe00fe
+.long 0x01010100,0xcfcf00cf
+.long 0x5a5a5a00,0xc3c300c3
+.long 0xd6d6d600,0x7a7a007a
+.long 0x51515100,0x24240024
+.long 0x56565600,0xe8e800e8
+.long 0x6c6c6c00,0x60600060
+.long 0x4d4d4d00,0x69690069
+.long 0x8b8b8b00,0xaaaa00aa
+.long 0x0d0d0d00,0xa0a000a0
+.long 0x9a9a9a00,0xa1a100a1
+.long 0x66666600,0x62620062
+.long 0xfbfbfb00,0x54540054
+.long 0xcccccc00,0x1e1e001e
+.long 0xb0b0b000,0xe0e000e0
+.long 0x2d2d2d00,0x64640064
+.long 0x74747400,0x10100010
+.long 0x12121200,0x00000000
+.long 0x2b2b2b00,0xa3a300a3
+.long 0x20202000,0x75750075
+.long 0xf0f0f000,0x8a8a008a
+.long 0xb1b1b100,0xe6e600e6
+.long 0x84848400,0x09090009
+.long 0x99999900,0xdddd00dd
+.long 0xdfdfdf00,0x87870087
+.long 0x4c4c4c00,0x83830083
+.long 0xcbcbcb00,0xcdcd00cd
+.long 0xc2c2c200,0x90900090
+.long 0x34343400,0x73730073
+.long 0x7e7e7e00,0xf6f600f6
+.long 0x76767600,0x9d9d009d
+.long 0x05050500,0xbfbf00bf
+.long 0x6d6d6d00,0x52520052
+.long 0xb7b7b700,0xd8d800d8
+.long 0xa9a9a900,0xc8c800c8
+.long 0x31313100,0xc6c600c6
+.long 0xd1d1d100,0x81810081
+.long 0x17171700,0x6f6f006f
+.long 0x04040400,0x13130013
+.long 0xd7d7d700,0x63630063
+.long 0x14141400,0xe9e900e9
+.long 0x58585800,0xa7a700a7
+.long 0x3a3a3a00,0x9f9f009f
+.long 0x61616100,0xbcbc00bc
+.long 0xdedede00,0x29290029
+.long 0x1b1b1b00,0xf9f900f9
+.long 0x11111100,0x2f2f002f
+.long 0x1c1c1c00,0xb4b400b4
+.long 0x32323200,0x78780078
+.long 0x0f0f0f00,0x06060006
+.long 0x9c9c9c00,0xe7e700e7
+.long 0x16161600,0x71710071
+.long 0x53535300,0xd4d400d4
+.long 0x18181800,0xabab00ab
+.long 0xf2f2f200,0x88880088
+.long 0x22222200,0x8d8d008d
+.long 0xfefefe00,0x72720072
+.long 0x44444400,0xb9b900b9
+.long 0xcfcfcf00,0xf8f800f8
+.long 0xb2b2b200,0xacac00ac
+.long 0xc3c3c300,0x36360036
+.long 0xb5b5b500,0x2a2a002a
+.long 0x7a7a7a00,0x3c3c003c
+.long 0x91919100,0xf1f100f1
+.long 0x24242400,0x40400040
+.long 0x08080800,0xd3d300d3
+.long 0xe8e8e800,0xbbbb00bb
+.long 0xa8a8a800,0x43430043
+.long 0x60606000,0x15150015
+.long 0xfcfcfc00,0xadad00ad
+.long 0x69696900,0x77770077
+.long 0x50505000,0x80800080
+.long 0xaaaaaa00,0x82820082
+.long 0xd0d0d000,0xecec00ec
+.long 0xa0a0a000,0x27270027
+.long 0x7d7d7d00,0xe5e500e5
+.long 0xa1a1a100,0x85850085
+.long 0x89898900,0x35350035
+.long 0x62626200,0x0c0c000c
+.long 0x97979700,0x41410041
+.long 0x54545400,0xefef00ef
+.long 0x5b5b5b00,0x93930093
+.long 0x1e1e1e00,0x19190019
+.long 0x95959500,0x21210021
+.long 0xe0e0e000,0x0e0e000e
+.long 0xffffff00,0x4e4e004e
+.long 0x64646400,0x65650065
+.long 0xd2d2d200,0xbdbd00bd
+.long 0x10101000,0xb8b800b8
+.long 0xc4c4c400,0x8f8f008f
+.long 0x00000000,0xebeb00eb
+.long 0x48484800,0xcece00ce
+.long 0xa3a3a300,0x30300030
+.long 0xf7f7f700,0x5f5f005f
+.long 0x75757500,0xc5c500c5
+.long 0xdbdbdb00,0x1a1a001a
+.long 0x8a8a8a00,0xe1e100e1
+.long 0x03030300,0xcaca00ca
+.long 0xe6e6e600,0x47470047
+.long 0xdadada00,0x3d3d003d
+.long 0x09090900,0x01010001
+.long 0x3f3f3f00,0xd6d600d6
+.long 0xdddddd00,0x56560056
+.long 0x94949400,0x4d4d004d
+.long 0x87878700,0x0d0d000d
+.long 0x5c5c5c00,0x66660066
+.long 0x83838300,0xcccc00cc
+.long 0x02020200,0x2d2d002d
+.long 0xcdcdcd00,0x12120012
+.long 0x4a4a4a00,0x20200020
+.long 0x90909000,0xb1b100b1
+.long 0x33333300,0x99990099
+.long 0x73737300,0x4c4c004c
+.long 0x67676700,0xc2c200c2
+.long 0xf6f6f600,0x7e7e007e
+.long 0xf3f3f300,0x05050005
+.long 0x9d9d9d00,0xb7b700b7
+.long 0x7f7f7f00,0x31310031
+.long 0xbfbfbf00,0x17170017
+.long 0xe2e2e200,0xd7d700d7
+.long 0x52525200,0x58580058
+.long 0x9b9b9b00,0x61610061
+.long 0xd8d8d800,0x1b1b001b
+.long 0x26262600,0x1c1c001c
+.long 0xc8c8c800,0x0f0f000f
+.long 0x37373700,0x16160016
+.long 0xc6c6c600,0x18180018
+.long 0x3b3b3b00,0x22220022
+.long 0x81818100,0x44440044
+.long 0x96969600,0xb2b200b2
+.long 0x6f6f6f00,0xb5b500b5
+.long 0x4b4b4b00,0x91910091
+.long 0x13131300,0x08080008
+.long 0xbebebe00,0xa8a800a8
+.long 0x63636300,0xfcfc00fc
+.long 0x2e2e2e00,0x50500050
+.long 0xe9e9e900,0xd0d000d0
+.long 0x79797900,0x7d7d007d
+.long 0xa7a7a700,0x89890089
+.long 0x8c8c8c00,0x97970097
+.long 0x9f9f9f00,0x5b5b005b
+.long 0x6e6e6e00,0x95950095
+.long 0xbcbcbc00,0xffff00ff
+.long 0x8e8e8e00,0xd2d200d2
+.long 0x29292900,0xc4c400c4
+.long 0xf5f5f500,0x48480048
+.long 0xf9f9f900,0xf7f700f7
+.long 0xb6b6b600,0xdbdb00db
+.long 0x2f2f2f00,0x03030003
+.long 0xfdfdfd00,0xdada00da
+.long 0xb4b4b400,0x3f3f003f
+.long 0x59595900,0x94940094
+.long 0x78787800,0x5c5c005c
+.long 0x98989800,0x02020002
+.long 0x06060600,0x4a4a004a
+.long 0x6a6a6a00,0x33330033
+.long 0xe7e7e700,0x67670067
+.long 0x46464600,0xf3f300f3
+.long 0x71717100,0x7f7f007f
+.long 0xbababa00,0xe2e200e2
+.long 0xd4d4d400,0x9b9b009b
+.long 0x25252500,0x26260026
+.long 0xababab00,0x37370037
+.long 0x42424200,0x3b3b003b
+.long 0x88888800,0x96960096
+.long 0xa2a2a200,0x4b4b004b
+.long 0x8d8d8d00,0xbebe00be
+.long 0xfafafa00,0x2e2e002e
+.long 0x72727200,0x79790079
+.long 0x07070700,0x8c8c008c
+.long 0xb9b9b900,0x6e6e006e
+.long 0x55555500,0x8e8e008e
+.long 0xf8f8f800,0xf5f500f5
+.long 0xeeeeee00,0xb6b600b6
+.long 0xacacac00,0xfdfd00fd
+.long 0x0a0a0a00,0x59590059
+.long 0x36363600,0x98980098
+.long 0x49494900,0x6a6a006a
+.long 0x2a2a2a00,0x46460046
+.long 0x68686800,0xbaba00ba
+.long 0x3c3c3c00,0x25250025
+.long 0x38383800,0x42420042
+.long 0xf1f1f100,0xa2a200a2
+.long 0xa4a4a400,0xfafa00fa
+.long 0x40404000,0x07070007
+.long 0x28282800,0x55550055
+.long 0xd3d3d300,0xeeee00ee
+.long 0x7b7b7b00,0x0a0a000a
+.long 0xbbbbbb00,0x49490049
+.long 0xc9c9c900,0x68680068
+.long 0x43434300,0x38380038
+.long 0xc1c1c100,0xa4a400a4
+.long 0x15151500,0x28280028
+.long 0xe3e3e300,0x7b7b007b
+.long 0xadadad00,0xc9c900c9
+.long 0xf4f4f400,0xc1c100c1
+.long 0x77777700,0xe3e300e3
+.long 0xc7c7c700,0xf4f400f4
+.long 0x80808000,0xc7c700c7
+.long 0x9e9e9e00,0x9e9e009e
+.long 0x00e0e0e0,0x38003838
+.long 0x00050505,0x41004141
+.long 0x00585858,0x16001616
+.long 0x00d9d9d9,0x76007676
+.long 0x00676767,0xd900d9d9
+.long 0x004e4e4e,0x93009393
+.long 0x00818181,0x60006060
+.long 0x00cbcbcb,0xf200f2f2
+.long 0x00c9c9c9,0x72007272
+.long 0x000b0b0b,0xc200c2c2
+.long 0x00aeaeae,0xab00abab
+.long 0x006a6a6a,0x9a009a9a
+.long 0x00d5d5d5,0x75007575
+.long 0x00181818,0x06000606
+.long 0x005d5d5d,0x57005757
+.long 0x00828282,0xa000a0a0
+.long 0x00464646,0x91009191
+.long 0x00dfdfdf,0xf700f7f7
+.long 0x00d6d6d6,0xb500b5b5
+.long 0x00272727,0xc900c9c9
+.long 0x008a8a8a,0xa200a2a2
+.long 0x00323232,0x8c008c8c
+.long 0x004b4b4b,0xd200d2d2
+.long 0x00424242,0x90009090
+.long 0x00dbdbdb,0xf600f6f6
+.long 0x001c1c1c,0x07000707
+.long 0x009e9e9e,0xa700a7a7
+.long 0x009c9c9c,0x27002727
+.long 0x003a3a3a,0x8e008e8e
+.long 0x00cacaca,0xb200b2b2
+.long 0x00252525,0x49004949
+.long 0x007b7b7b,0xde00dede
+.long 0x000d0d0d,0x43004343
+.long 0x00717171,0x5c005c5c
+.long 0x005f5f5f,0xd700d7d7
+.long 0x001f1f1f,0xc700c7c7
+.long 0x00f8f8f8,0x3e003e3e
+.long 0x00d7d7d7,0xf500f5f5
+.long 0x003e3e3e,0x8f008f8f
+.long 0x009d9d9d,0x67006767
+.long 0x007c7c7c,0x1f001f1f
+.long 0x00606060,0x18001818
+.long 0x00b9b9b9,0x6e006e6e
+.long 0x00bebebe,0xaf00afaf
+.long 0x00bcbcbc,0x2f002f2f
+.long 0x008b8b8b,0xe200e2e2
+.long 0x00161616,0x85008585
+.long 0x00343434,0x0d000d0d
+.long 0x004d4d4d,0x53005353
+.long 0x00c3c3c3,0xf000f0f0
+.long 0x00727272,0x9c009c9c
+.long 0x00959595,0x65006565
+.long 0x00ababab,0xea00eaea
+.long 0x008e8e8e,0xa300a3a3
+.long 0x00bababa,0xae00aeae
+.long 0x007a7a7a,0x9e009e9e
+.long 0x00b3b3b3,0xec00ecec
+.long 0x00020202,0x80008080
+.long 0x00b4b4b4,0x2d002d2d
+.long 0x00adadad,0x6b006b6b
+.long 0x00a2a2a2,0xa800a8a8
+.long 0x00acacac,0x2b002b2b
+.long 0x00d8d8d8,0x36003636
+.long 0x009a9a9a,0xa600a6a6
+.long 0x00171717,0xc500c5c5
+.long 0x001a1a1a,0x86008686
+.long 0x00353535,0x4d004d4d
+.long 0x00cccccc,0x33003333
+.long 0x00f7f7f7,0xfd00fdfd
+.long 0x00999999,0x66006666
+.long 0x00616161,0x58005858
+.long 0x005a5a5a,0x96009696
+.long 0x00e8e8e8,0x3a003a3a
+.long 0x00242424,0x09000909
+.long 0x00565656,0x95009595
+.long 0x00404040,0x10001010
+.long 0x00e1e1e1,0x78007878
+.long 0x00636363,0xd800d8d8
+.long 0x00090909,0x42004242
+.long 0x00333333,0xcc00cccc
+.long 0x00bfbfbf,0xef00efef
+.long 0x00989898,0x26002626
+.long 0x00979797,0xe500e5e5
+.long 0x00858585,0x61006161
+.long 0x00686868,0x1a001a1a
+.long 0x00fcfcfc,0x3f003f3f
+.long 0x00ececec,0x3b003b3b
+.long 0x000a0a0a,0x82008282
+.long 0x00dadada,0xb600b6b6
+.long 0x006f6f6f,0xdb00dbdb
+.long 0x00535353,0xd400d4d4
+.long 0x00626262,0x98009898
+.long 0x00a3a3a3,0xe800e8e8
+.long 0x002e2e2e,0x8b008b8b
+.long 0x00080808,0x02000202
+.long 0x00afafaf,0xeb00ebeb
+.long 0x00282828,0x0a000a0a
+.long 0x00b0b0b0,0x2c002c2c
+.long 0x00747474,0x1d001d1d
+.long 0x00c2c2c2,0xb000b0b0
+.long 0x00bdbdbd,0x6f006f6f
+.long 0x00363636,0x8d008d8d
+.long 0x00222222,0x88008888
+.long 0x00383838,0x0e000e0e
+.long 0x00646464,0x19001919
+.long 0x001e1e1e,0x87008787
+.long 0x00393939,0x4e004e4e
+.long 0x002c2c2c,0x0b000b0b
+.long 0x00a6a6a6,0xa900a9a9
+.long 0x00303030,0x0c000c0c
+.long 0x00e5e5e5,0x79007979
+.long 0x00444444,0x11001111
+.long 0x00fdfdfd,0x7f007f7f
+.long 0x00888888,0x22002222
+.long 0x009f9f9f,0xe700e7e7
+.long 0x00656565,0x59005959
+.long 0x00878787,0xe100e1e1
+.long 0x006b6b6b,0xda00dada
+.long 0x00f4f4f4,0x3d003d3d
+.long 0x00232323,0xc800c8c8
+.long 0x00484848,0x12001212
+.long 0x00101010,0x04000404
+.long 0x00d1d1d1,0x74007474
+.long 0x00515151,0x54005454
+.long 0x00c0c0c0,0x30003030
+.long 0x00f9f9f9,0x7e007e7e
+.long 0x00d2d2d2,0xb400b4b4
+.long 0x00a0a0a0,0x28002828
+.long 0x00555555,0x55005555
+.long 0x00a1a1a1,0x68006868
+.long 0x00414141,0x50005050
+.long 0x00fafafa,0xbe00bebe
+.long 0x00434343,0xd000d0d0
+.long 0x00131313,0xc400c4c4
+.long 0x00c4c4c4,0x31003131
+.long 0x002f2f2f,0xcb00cbcb
+.long 0x00a8a8a8,0x2a002a2a
+.long 0x00b6b6b6,0xad00adad
+.long 0x003c3c3c,0x0f000f0f
+.long 0x002b2b2b,0xca00caca
+.long 0x00c1c1c1,0x70007070
+.long 0x00ffffff,0xff00ffff
+.long 0x00c8c8c8,0x32003232
+.long 0x00a5a5a5,0x69006969
+.long 0x00202020,0x08000808
+.long 0x00898989,0x62006262
+.long 0x00000000,0x00000000
+.long 0x00909090,0x24002424
+.long 0x00474747,0xd100d1d1
+.long 0x00efefef,0xfb00fbfb
+.long 0x00eaeaea,0xba00baba
+.long 0x00b7b7b7,0xed00eded
+.long 0x00151515,0x45004545
+.long 0x00060606,0x81008181
+.long 0x00cdcdcd,0x73007373
+.long 0x00b5b5b5,0x6d006d6d
+.long 0x00121212,0x84008484
+.long 0x007e7e7e,0x9f009f9f
+.long 0x00bbbbbb,0xee00eeee
+.long 0x00292929,0x4a004a4a
+.long 0x000f0f0f,0xc300c3c3
+.long 0x00b8b8b8,0x2e002e2e
+.long 0x00070707,0xc100c1c1
+.long 0x00040404,0x01000101
+.long 0x009b9b9b,0xe600e6e6
+.long 0x00949494,0x25002525
+.long 0x00212121,0x48004848
+.long 0x00666666,0x99009999
+.long 0x00e6e6e6,0xb900b9b9
+.long 0x00cecece,0xb300b3b3
+.long 0x00ededed,0x7b007b7b
+.long 0x00e7e7e7,0xf900f9f9
+.long 0x003b3b3b,0xce00cece
+.long 0x00fefefe,0xbf00bfbf
+.long 0x007f7f7f,0xdf00dfdf
+.long 0x00c5c5c5,0x71007171
+.long 0x00a4a4a4,0x29002929
+.long 0x00373737,0xcd00cdcd
+.long 0x00b1b1b1,0x6c006c6c
+.long 0x004c4c4c,0x13001313
+.long 0x00919191,0x64006464
+.long 0x006e6e6e,0x9b009b9b
+.long 0x008d8d8d,0x63006363
+.long 0x00767676,0x9d009d9d
+.long 0x00030303,0xc000c0c0
+.long 0x002d2d2d,0x4b004b4b
+.long 0x00dedede,0xb700b7b7
+.long 0x00969696,0xa500a5a5
+.long 0x00262626,0x89008989
+.long 0x007d7d7d,0x5f005f5f
+.long 0x00c6c6c6,0xb100b1b1
+.long 0x005c5c5c,0x17001717
+.long 0x00d3d3d3,0xf400f4f4
+.long 0x00f2f2f2,0xbc00bcbc
+.long 0x004f4f4f,0xd300d3d3
+.long 0x00191919,0x46004646
+.long 0x003f3f3f,0xcf00cfcf
+.long 0x00dcdcdc,0x37003737
+.long 0x00797979,0x5e005e5e
+.long 0x001d1d1d,0x47004747
+.long 0x00525252,0x94009494
+.long 0x00ebebeb,0xfa00fafa
+.long 0x00f3f3f3,0xfc00fcfc
+.long 0x006d6d6d,0x5b005b5b
+.long 0x005e5e5e,0x97009797
+.long 0x00fbfbfb,0xfe00fefe
+.long 0x00696969,0x5a005a5a
+.long 0x00b2b2b2,0xac00acac
+.long 0x00f0f0f0,0x3c003c3c
+.long 0x00313131,0x4c004c4c
+.long 0x000c0c0c,0x03000303
+.long 0x00d4d4d4,0x35003535
+.long 0x00cfcfcf,0xf300f3f3
+.long 0x008c8c8c,0x23002323
+.long 0x00e2e2e2,0xb800b8b8
+.long 0x00757575,0x5d005d5d
+.long 0x00a9a9a9,0x6a006a6a
+.long 0x004a4a4a,0x92009292
+.long 0x00575757,0xd500d5d5
+.long 0x00848484,0x21002121
+.long 0x00111111,0x44004444
+.long 0x00454545,0x51005151
+.long 0x001b1b1b,0xc600c6c6
+.long 0x00f5f5f5,0x7d007d7d
+.long 0x00e4e4e4,0x39003939
+.long 0x000e0e0e,0x83008383
+.long 0x00737373,0xdc00dcdc
+.long 0x00aaaaaa,0xaa00aaaa
+.long 0x00f1f1f1,0x7c007c7c
+.long 0x00dddddd,0x77007777
+.long 0x00595959,0x56005656
+.long 0x00141414,0x05000505
+.long 0x006c6c6c,0x1b001b1b
+.long 0x00929292,0xa400a4a4
+.long 0x00545454,0x15001515
+.long 0x00d0d0d0,0x34003434
+.long 0x00787878,0x1e001e1e
+.long 0x00707070,0x1c001c1c
+.long 0x00e3e3e3,0xf800f8f8
+.long 0x00494949,0x52005252
+.long 0x00808080,0x20002020
+.long 0x00505050,0x14001414
+.long 0x00a7a7a7,0xe900e9e9
+.long 0x00f6f6f6,0xbd00bdbd
+.long 0x00777777,0xdd00dddd
+.long 0x00939393,0xe400e4e4
+.long 0x00868686,0xa100a1a1
+.long 0x00838383,0xe000e0e0
+.long 0x002a2a2a,0x8a008a8a
+.long 0x00c7c7c7,0xf100f1f1
+.long 0x005b5b5b,0xd600d6d6
+.long 0x00e9e9e9,0x7a007a7a
+.long 0x00eeeeee,0xbb00bbbb
+.long 0x008f8f8f,0xe300e3e3
+.long 0x00010101,0x40004040
+.long 0x003d3d3d,0x4f004f4f
+.globl Camellia_cbc_encrypt
+.type Camellia_cbc_encrypt,@function
+.align 16
+Camellia_cbc_encrypt:
+ cmpq $0,%rdx
+ je .Lcbc_abort
+ pushq %rbx
+ pushq %rbp
+ pushq %r12
+ pushq %r13
+ pushq %r14
+ pushq %r15
+.Lcbc_prologue:
+
+ movq %rsp,%rbp
+ subq $64,%rsp
+ andq $-64,%rsp
+
+
+
+ leaq -64-63(%rcx),%r10
+ subq %rsp,%r10
+ negq %r10
+ andq $960,%r10
+ subq %r10,%rsp
+
+
+ movq %rdi,%r12
+ movq %rsi,%r13
+ movq %r8,%rbx
+ movq %rcx,%r14
+ movl 272(%rcx),%r15d
+
+ movq %r8,40(%rsp)
+ movq %rbp,48(%rsp)
+
+.Lcbc_body:
+ leaq .LCamellia_SBOX(%rip),%rbp
+
+ movl $32,%ecx
+.align 4
+.Lcbc_prefetch_sbox:
+ movq 0(%rbp),%rax
+ movq 32(%rbp),%rsi
+ movq 64(%rbp),%rdi
+ movq 96(%rbp),%r11
+ leaq 128(%rbp),%rbp
+ loop .Lcbc_prefetch_sbox
+ subq $4096,%rbp
+ shlq $6,%r15
+ movq %rdx,%rcx
+ leaq (%r14,%r15,1),%r15
+
+ cmpl $0,%r9d
+ je .LCBC_DECRYPT
+
+ andq $-16,%rdx
+ andq $15,%rcx
+ leaq (%r12,%rdx,1),%rdx
+ movq %r14,0(%rsp)
+ movq %rdx,8(%rsp)
+ movq %rcx,16(%rsp)
+
+ cmpq %r12,%rdx
+ movl 0(%rbx),%r8d
+ movl 4(%rbx),%r9d
+ movl 8(%rbx),%r10d
+ movl 12(%rbx),%r11d
+ je .Lcbc_enc_tail
+ jmp .Lcbc_eloop
+
+.align 16
+.Lcbc_eloop:
+ xorl 0(%r12),%r8d
+ xorl 4(%r12),%r9d
+ xorl 8(%r12),%r10d
+ bswapl %r8d
+ xorl 12(%r12),%r11d
+ bswapl %r9d
+ bswapl %r10d
+ bswapl %r11d
+
+ call _x86_64_Camellia_encrypt
+
+ movq 0(%rsp),%r14
+ bswapl %r8d
+ movq 8(%rsp),%rdx
+ bswapl %r9d
+ movq 16(%rsp),%rcx
+ bswapl %r10d
+ movl %r8d,0(%r13)
+ bswapl %r11d
+ movl %r9d,4(%r13)
+ movl %r10d,8(%r13)
+ leaq 16(%r12),%r12
+ movl %r11d,12(%r13)
+ cmpq %rdx,%r12
+ leaq 16(%r13),%r13
+ jne .Lcbc_eloop
+
+ cmpq $0,%rcx
+ jne .Lcbc_enc_tail
+
+ movq 40(%rsp),%r13
+ movl %r8d,0(%r13)
+ movl %r9d,4(%r13)
+ movl %r10d,8(%r13)
+ movl %r11d,12(%r13)
+ jmp .Lcbc_done
+
+.align 16
+.Lcbc_enc_tail:
+ xorq %rax,%rax
+ movq %rax,0+24(%rsp)
+ movq %rax,8+24(%rsp)
+ movq %rax,16(%rsp)
+
+.Lcbc_enc_pushf:
+ pushfq
+ cld
+ movq %r12,%rsi
+ leaq 8+24(%rsp),%rdi
+.long 0x9066A4F3
+
+ popfq
+.Lcbc_enc_popf:
+
+ leaq 24(%rsp),%r12
+ leaq 16+24(%rsp),%rax
+ movq %rax,8(%rsp)
+ jmp .Lcbc_eloop
+
+
+.align 16
+.LCBC_DECRYPT:
+ xchgq %r14,%r15
+ addq $15,%rdx
+ andq $15,%rcx
+ andq $-16,%rdx
+ movq %r14,0(%rsp)
+ leaq (%r12,%rdx,1),%rdx
+ movq %rdx,8(%rsp)
+ movq %rcx,16(%rsp)
+
+ movq (%rbx),%rax
+ movq 8(%rbx),%rbx
+ jmp .Lcbc_dloop
+.align 16
+.Lcbc_dloop:
+ movl 0(%r12),%r8d
+ movl 4(%r12),%r9d
+ movl 8(%r12),%r10d
+ bswapl %r8d
+ movl 12(%r12),%r11d
+ bswapl %r9d
+ movq %rax,0+24(%rsp)
+ bswapl %r10d
+ movq %rbx,8+24(%rsp)
+ bswapl %r11d
+
+ call _x86_64_Camellia_decrypt
+
+ movq 0(%rsp),%r14
+ movq 8(%rsp),%rdx
+ movq 16(%rsp),%rcx
+
+ bswapl %r8d
+ movq (%r12),%rax
+ bswapl %r9d
+ movq 8(%r12),%rbx
+ bswapl %r10d
+ xorl 0+24(%rsp),%r8d
+ bswapl %r11d
+ xorl 4+24(%rsp),%r9d
+ xorl 8+24(%rsp),%r10d
+ leaq 16(%r12),%r12
+ xorl 12+24(%rsp),%r11d
+ cmpq %rdx,%r12
+ je .Lcbc_ddone
+
+ movl %r8d,0(%r13)
+ movl %r9d,4(%r13)
+ movl %r10d,8(%r13)
+ movl %r11d,12(%r13)
+
+ leaq 16(%r13),%r13
+ jmp .Lcbc_dloop
+
+.align 16
+.Lcbc_ddone:
+ movq 40(%rsp),%rdx
+ cmpq $0,%rcx
+ jne .Lcbc_dec_tail
+
+ movl %r8d,0(%r13)
+ movl %r9d,4(%r13)
+ movl %r10d,8(%r13)
+ movl %r11d,12(%r13)
+
+ movq %rax,(%rdx)
+ movq %rbx,8(%rdx)
+ jmp .Lcbc_done
+.align 16
+.Lcbc_dec_tail:
+ movl %r8d,0+24(%rsp)
+ movl %r9d,4+24(%rsp)
+ movl %r10d,8+24(%rsp)
+ movl %r11d,12+24(%rsp)
+
+.Lcbc_dec_pushf:
+ pushfq
+ cld
+ leaq 8+24(%rsp),%rsi
+ leaq (%r13),%rdi
+.long 0x9066A4F3
+
+ popfq
+.Lcbc_dec_popf:
+
+ movq %rax,(%rdx)
+ movq %rbx,8(%rdx)
+ jmp .Lcbc_done
+
+.align 16
+.Lcbc_done:
+ movq 48(%rsp),%rcx
+ movq 0(%rcx),%r15
+ movq 8(%rcx),%r14
+ movq 16(%rcx),%r13
+ movq 24(%rcx),%r12
+ movq 32(%rcx),%rbp
+ movq 40(%rcx),%rbx
+ leaq 48(%rcx),%rsp
+.Lcbc_abort:
+ .byte 0xf3,0xc3
+.size Camellia_cbc_encrypt,.-Camellia_cbc_encrypt
+
+.byte 67,97,109,101,108,108,105,97,32,102,111,114,32,120,56,54,95,54,52,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
diff --git a/deps/openssl/asm/x64-elf-gas/md5/md5-x86_64.s b/deps/openssl/asm/x64-elf-gas/md5/md5-x86_64.s
new file mode 100644
index 0000000000..81b0c7a117
--- /dev/null
+++ b/deps/openssl/asm/x64-elf-gas/md5/md5-x86_64.s
@@ -0,0 +1,671 @@
+.text
+
+.align 16
+
+.globl md5_block_asm_data_order
+.type md5_block_asm_data_order,@function
+md5_block_asm_data_order:
+ pushq %rbp
+ pushq %rbx
+ pushq %r12
+ pushq %r14
+ pushq %r15
+.Lprologue:
+
+
+
+
+ movq %rdi,%rbp
+ shlq $6,%rdx
+ leaq (%rsi,%rdx,1),%rdi
+ movl 0(%rbp),%eax
+ movl 4(%rbp),%ebx
+ movl 8(%rbp),%ecx
+ movl 12(%rbp),%edx
+
+
+
+
+
+
+
+ cmpq %rdi,%rsi
+ je .Lend
+
+
+
+.Lloop:
+ movl %eax,%r8d
+ movl %ebx,%r9d
+ movl %ecx,%r14d
+ movl %edx,%r15d
+ movl 0(%rsi),%r10d
+ movl %edx,%r11d
+ xorl %ecx,%r11d
+ leal -680876936(%rax,%r10,1),%eax
+ andl %ebx,%r11d
+ xorl %edx,%r11d
+ movl 4(%rsi),%r10d
+ addl %r11d,%eax
+ roll $7,%eax
+ movl %ecx,%r11d
+ addl %ebx,%eax
+ xorl %ebx,%r11d
+ leal -389564586(%rdx,%r10,1),%edx
+ andl %eax,%r11d
+ xorl %ecx,%r11d
+ movl 8(%rsi),%r10d
+ addl %r11d,%edx
+ roll $12,%edx
+ movl %ebx,%r11d
+ addl %eax,%edx
+ xorl %eax,%r11d
+ leal 606105819(%rcx,%r10,1),%ecx
+ andl %edx,%r11d
+ xorl %ebx,%r11d
+ movl 12(%rsi),%r10d
+ addl %r11d,%ecx
+ roll $17,%ecx
+ movl %eax,%r11d
+ addl %edx,%ecx
+ xorl %edx,%r11d
+ leal -1044525330(%rbx,%r10,1),%ebx
+ andl %ecx,%r11d
+ xorl %eax,%r11d
+ movl 16(%rsi),%r10d
+ addl %r11d,%ebx
+ roll $22,%ebx
+ movl %edx,%r11d
+ addl %ecx,%ebx
+ xorl %ecx,%r11d
+ leal -176418897(%rax,%r10,1),%eax
+ andl %ebx,%r11d
+ xorl %edx,%r11d
+ movl 20(%rsi),%r10d
+ addl %r11d,%eax
+ roll $7,%eax
+ movl %ecx,%r11d
+ addl %ebx,%eax
+ xorl %ebx,%r11d
+ leal 1200080426(%rdx,%r10,1),%edx
+ andl %eax,%r11d
+ xorl %ecx,%r11d
+ movl 24(%rsi),%r10d
+ addl %r11d,%edx
+ roll $12,%edx
+ movl %ebx,%r11d
+ addl %eax,%edx
+ xorl %eax,%r11d
+ leal -1473231341(%rcx,%r10,1),%ecx
+ andl %edx,%r11d
+ xorl %ebx,%r11d
+ movl 28(%rsi),%r10d
+ addl %r11d,%ecx
+ roll $17,%ecx
+ movl %eax,%r11d
+ addl %edx,%ecx
+ xorl %edx,%r11d
+ leal -45705983(%rbx,%r10,1),%ebx
+ andl %ecx,%r11d
+ xorl %eax,%r11d
+ movl 32(%rsi),%r10d
+ addl %r11d,%ebx
+ roll $22,%ebx
+ movl %edx,%r11d
+ addl %ecx,%ebx
+ xorl %ecx,%r11d
+ leal 1770035416(%rax,%r10,1),%eax
+ andl %ebx,%r11d
+ xorl %edx,%r11d
+ movl 36(%rsi),%r10d
+ addl %r11d,%eax
+ roll $7,%eax
+ movl %ecx,%r11d
+ addl %ebx,%eax
+ xorl %ebx,%r11d
+ leal -1958414417(%rdx,%r10,1),%edx
+ andl %eax,%r11d
+ xorl %ecx,%r11d
+ movl 40(%rsi),%r10d
+ addl %r11d,%edx
+ roll $12,%edx
+ movl %ebx,%r11d
+ addl %eax,%edx
+ xorl %eax,%r11d
+ leal -42063(%rcx,%r10,1),%ecx
+ andl %edx,%r11d
+ xorl %ebx,%r11d
+ movl 44(%rsi),%r10d
+ addl %r11d,%ecx
+ roll $17,%ecx
+ movl %eax,%r11d
+ addl %edx,%ecx
+ xorl %edx,%r11d
+ leal -1990404162(%rbx,%r10,1),%ebx
+ andl %ecx,%r11d
+ xorl %eax,%r11d
+ movl 48(%rsi),%r10d
+ addl %r11d,%ebx
+ roll $22,%ebx
+ movl %edx,%r11d
+ addl %ecx,%ebx
+ xorl %ecx,%r11d
+ leal 1804603682(%rax,%r10,1),%eax
+ andl %ebx,%r11d
+ xorl %edx,%r11d
+ movl 52(%rsi),%r10d
+ addl %r11d,%eax
+ roll $7,%eax
+ movl %ecx,%r11d
+ addl %ebx,%eax
+ xorl %ebx,%r11d
+ leal -40341101(%rdx,%r10,1),%edx
+ andl %eax,%r11d
+ xorl %ecx,%r11d
+ movl 56(%rsi),%r10d
+ addl %r11d,%edx
+ roll $12,%edx
+ movl %ebx,%r11d
+ addl %eax,%edx
+ xorl %eax,%r11d
+ leal -1502002290(%rcx,%r10,1),%ecx
+ andl %edx,%r11d
+ xorl %ebx,%r11d
+ movl 60(%rsi),%r10d
+ addl %r11d,%ecx
+ roll $17,%ecx
+ movl %eax,%r11d
+ addl %edx,%ecx
+ xorl %edx,%r11d
+ leal 1236535329(%rbx,%r10,1),%ebx
+ andl %ecx,%r11d
+ xorl %eax,%r11d
+ movl 0(%rsi),%r10d
+ addl %r11d,%ebx
+ roll $22,%ebx
+ movl %edx,%r11d
+ addl %ecx,%ebx
+ movl 4(%rsi),%r10d
+ movl %edx,%r11d
+ movl %edx,%r12d
+ notl %r11d
+ leal -165796510(%rax,%r10,1),%eax
+ andl %ebx,%r12d
+ andl %ecx,%r11d
+ movl 24(%rsi),%r10d
+ orl %r11d,%r12d
+ movl %ecx,%r11d
+ addl %r12d,%eax
+ movl %ecx,%r12d
+ roll $5,%eax
+ addl %ebx,%eax
+ notl %r11d
+ leal -1069501632(%rdx,%r10,1),%edx
+ andl %eax,%r12d
+ andl %ebx,%r11d
+ movl 44(%rsi),%r10d
+ orl %r11d,%r12d
+ movl %ebx,%r11d
+ addl %r12d,%edx
+ movl %ebx,%r12d
+ roll $9,%edx
+ addl %eax,%edx
+ notl %r11d
+ leal 643717713(%rcx,%r10,1),%ecx
+ andl %edx,%r12d
+ andl %eax,%r11d
+ movl 0(%rsi),%r10d
+ orl %r11d,%r12d
+ movl %eax,%r11d
+ addl %r12d,%ecx
+ movl %eax,%r12d
+ roll $14,%ecx
+ addl %edx,%ecx
+ notl %r11d
+ leal -373897302(%rbx,%r10,1),%ebx
+ andl %ecx,%r12d
+ andl %edx,%r11d
+ movl 20(%rsi),%r10d
+ orl %r11d,%r12d
+ movl %edx,%r11d
+ addl %r12d,%ebx
+ movl %edx,%r12d
+ roll $20,%ebx
+ addl %ecx,%ebx
+ notl %r11d
+ leal -701558691(%rax,%r10,1),%eax
+ andl %ebx,%r12d
+ andl %ecx,%r11d
+ movl 40(%rsi),%r10d
+ orl %r11d,%r12d
+ movl %ecx,%r11d
+ addl %r12d,%eax
+ movl %ecx,%r12d
+ roll $5,%eax
+ addl %ebx,%eax
+ notl %r11d
+ leal 38016083(%rdx,%r10,1),%edx
+ andl %eax,%r12d
+ andl %ebx,%r11d
+ movl 60(%rsi),%r10d
+ orl %r11d,%r12d
+ movl %ebx,%r11d
+ addl %r12d,%edx
+ movl %ebx,%r12d
+ roll $9,%edx
+ addl %eax,%edx
+ notl %r11d
+ leal -660478335(%rcx,%r10,1),%ecx
+ andl %edx,%r12d
+ andl %eax,%r11d
+ movl 16(%rsi),%r10d
+ orl %r11d,%r12d
+ movl %eax,%r11d
+ addl %r12d,%ecx
+ movl %eax,%r12d
+ roll $14,%ecx
+ addl %edx,%ecx
+ notl %r11d
+ leal -405537848(%rbx,%r10,1),%ebx
+ andl %ecx,%r12d
+ andl %edx,%r11d
+ movl 36(%rsi),%r10d
+ orl %r11d,%r12d
+ movl %edx,%r11d
+ addl %r12d,%ebx
+ movl %edx,%r12d
+ roll $20,%ebx
+ addl %ecx,%ebx
+ notl %r11d
+ leal 568446438(%rax,%r10,1),%eax
+ andl %ebx,%r12d
+ andl %ecx,%r11d
+ movl 56(%rsi),%r10d
+ orl %r11d,%r12d
+ movl %ecx,%r11d
+ addl %r12d,%eax
+ movl %ecx,%r12d
+ roll $5,%eax
+ addl %ebx,%eax
+ notl %r11d
+ leal -1019803690(%rdx,%r10,1),%edx
+ andl %eax,%r12d
+ andl %ebx,%r11d
+ movl 12(%rsi),%r10d
+ orl %r11d,%r12d
+ movl %ebx,%r11d
+ addl %r12d,%edx
+ movl %ebx,%r12d
+ roll $9,%edx
+ addl %eax,%edx
+ notl %r11d
+ leal -187363961(%rcx,%r10,1),%ecx
+ andl %edx,%r12d
+ andl %eax,%r11d
+ movl 32(%rsi),%r10d
+ orl %r11d,%r12d
+ movl %eax,%r11d
+ addl %r12d,%ecx
+ movl %eax,%r12d
+ roll $14,%ecx
+ addl %edx,%ecx
+ notl %r11d
+ leal 1163531501(%rbx,%r10,1),%ebx
+ andl %ecx,%r12d
+ andl %edx,%r11d
+ movl 52(%rsi),%r10d
+ orl %r11d,%r12d
+ movl %edx,%r11d
+ addl %r12d,%ebx
+ movl %edx,%r12d
+ roll $20,%ebx
+ addl %ecx,%ebx
+ notl %r11d
+ leal -1444681467(%rax,%r10,1),%eax
+ andl %ebx,%r12d
+ andl %ecx,%r11d
+ movl 8(%rsi),%r10d
+ orl %r11d,%r12d
+ movl %ecx,%r11d
+ addl %r12d,%eax
+ movl %ecx,%r12d
+ roll $5,%eax
+ addl %ebx,%eax
+ notl %r11d
+ leal -51403784(%rdx,%r10,1),%edx
+ andl %eax,%r12d
+ andl %ebx,%r11d
+ movl 28(%rsi),%r10d
+ orl %r11d,%r12d
+ movl %ebx,%r11d
+ addl %r12d,%edx
+ movl %ebx,%r12d
+ roll $9,%edx
+ addl %eax,%edx
+ notl %r11d
+ leal 1735328473(%rcx,%r10,1),%ecx
+ andl %edx,%r12d
+ andl %eax,%r11d
+ movl 48(%rsi),%r10d
+ orl %r11d,%r12d
+ movl %eax,%r11d
+ addl %r12d,%ecx
+ movl %eax,%r12d
+ roll $14,%ecx
+ addl %edx,%ecx
+ notl %r11d
+ leal -1926607734(%rbx,%r10,1),%ebx
+ andl %ecx,%r12d
+ andl %edx,%r11d
+ movl 0(%rsi),%r10d
+ orl %r11d,%r12d
+ movl %edx,%r11d
+ addl %r12d,%ebx
+ movl %edx,%r12d
+ roll $20,%ebx
+ addl %ecx,%ebx
+ movl 20(%rsi),%r10d
+ movl %ecx,%r11d
+ leal -378558(%rax,%r10,1),%eax
+ movl 32(%rsi),%r10d
+ xorl %edx,%r11d
+ xorl %ebx,%r11d
+ addl %r11d,%eax
+ roll $4,%eax
+ movl %ebx,%r11d
+ addl %ebx,%eax
+ leal -2022574463(%rdx,%r10,1),%edx
+ movl 44(%rsi),%r10d
+ xorl %ecx,%r11d
+ xorl %eax,%r11d
+ addl %r11d,%edx
+ roll $11,%edx
+ movl %eax,%r11d
+ addl %eax,%edx
+ leal 1839030562(%rcx,%r10,1),%ecx
+ movl 56(%rsi),%r10d
+ xorl %ebx,%r11d
+ xorl %edx,%r11d
+ addl %r11d,%ecx
+ roll $16,%ecx
+ movl %edx,%r11d
+ addl %edx,%ecx
+ leal -35309556(%rbx,%r10,1),%ebx
+ movl 4(%rsi),%r10d
+ xorl %eax,%r11d
+ xorl %ecx,%r11d
+ addl %r11d,%ebx
+ roll $23,%ebx
+ movl %ecx,%r11d
+ addl %ecx,%ebx
+ leal -1530992060(%rax,%r10,1),%eax
+ movl 16(%rsi),%r10d
+ xorl %edx,%r11d
+ xorl %ebx,%r11d
+ addl %r11d,%eax
+ roll $4,%eax
+ movl %ebx,%r11d
+ addl %ebx,%eax
+ leal 1272893353(%rdx,%r10,1),%edx
+ movl 28(%rsi),%r10d
+ xorl %ecx,%r11d
+ xorl %eax,%r11d
+ addl %r11d,%edx
+ roll $11,%edx
+ movl %eax,%r11d
+ addl %eax,%edx
+ leal -155497632(%rcx,%r10,1),%ecx
+ movl 40(%rsi),%r10d
+ xorl %ebx,%r11d
+ xorl %edx,%r11d
+ addl %r11d,%ecx
+ roll $16,%ecx
+ movl %edx,%r11d
+ addl %edx,%ecx
+ leal -1094730640(%rbx,%r10,1),%ebx
+ movl 52(%rsi),%r10d
+ xorl %eax,%r11d
+ xorl %ecx,%r11d
+ addl %r11d,%ebx
+ roll $23,%ebx
+ movl %ecx,%r11d
+ addl %ecx,%ebx
+ leal 681279174(%rax,%r10,1),%eax
+ movl 0(%rsi),%r10d
+ xorl %edx,%r11d
+ xorl %ebx,%r11d
+ addl %r11d,%eax
+ roll $4,%eax
+ movl %ebx,%r11d
+ addl %ebx,%eax
+ leal -358537222(%rdx,%r10,1),%edx
+ movl 12(%rsi),%r10d
+ xorl %ecx,%r11d
+ xorl %eax,%r11d
+ addl %r11d,%edx
+ roll $11,%edx
+ movl %eax,%r11d
+ addl %eax,%edx
+ leal -722521979(%rcx,%r10,1),%ecx
+ movl 24(%rsi),%r10d
+ xorl %ebx,%r11d
+ xorl %edx,%r11d
+ addl %r11d,%ecx
+ roll $16,%ecx
+ movl %edx,%r11d
+ addl %edx,%ecx
+ leal 76029189(%rbx,%r10,1),%ebx
+ movl 36(%rsi),%r10d
+ xorl %eax,%r11d
+ xorl %ecx,%r11d
+ addl %r11d,%ebx
+ roll $23,%ebx
+ movl %ecx,%r11d
+ addl %ecx,%ebx
+ leal -640364487(%rax,%r10,1),%eax
+ movl 48(%rsi),%r10d
+ xorl %edx,%r11d
+ xorl %ebx,%r11d
+ addl %r11d,%eax
+ roll $4,%eax
+ movl %ebx,%r11d
+ addl %ebx,%eax
+ leal -421815835(%rdx,%r10,1),%edx
+ movl 60(%rsi),%r10d
+ xorl %ecx,%r11d
+ xorl %eax,%r11d
+ addl %r11d,%edx
+ roll $11,%edx
+ movl %eax,%r11d
+ addl %eax,%edx
+ leal 530742520(%rcx,%r10,1),%ecx
+ movl 8(%rsi),%r10d
+ xorl %ebx,%r11d
+ xorl %edx,%r11d
+ addl %r11d,%ecx
+ roll $16,%ecx
+ movl %edx,%r11d
+ addl %edx,%ecx
+ leal -995338651(%rbx,%r10,1),%ebx
+ movl 0(%rsi),%r10d
+ xorl %eax,%r11d
+ xorl %ecx,%r11d
+ addl %r11d,%ebx
+ roll $23,%ebx
+ movl %ecx,%r11d
+ addl %ecx,%ebx
+ movl 0(%rsi),%r10d
+ movl $4294967295,%r11d
+ xorl %edx,%r11d
+ leal -198630844(%rax,%r10,1),%eax
+ orl %ebx,%r11d
+ xorl %ecx,%r11d
+ addl %r11d,%eax
+ movl 28(%rsi),%r10d
+ movl $4294967295,%r11d
+ roll $6,%eax
+ xorl %ecx,%r11d
+ addl %ebx,%eax
+ leal 1126891415(%rdx,%r10,1),%edx
+ orl %eax,%r11d
+ xorl %ebx,%r11d
+ addl %r11d,%edx
+ movl 56(%rsi),%r10d
+ movl $4294967295,%r11d
+ roll $10,%edx
+ xorl %ebx,%r11d
+ addl %eax,%edx
+ leal -1416354905(%rcx,%r10,1),%ecx
+ orl %edx,%r11d
+ xorl %eax,%r11d
+ addl %r11d,%ecx
+ movl 20(%rsi),%r10d
+ movl $4294967295,%r11d
+ roll $15,%ecx
+ xorl %eax,%r11d
+ addl %edx,%ecx
+ leal -57434055(%rbx,%r10,1),%ebx
+ orl %ecx,%r11d
+ xorl %edx,%r11d
+ addl %r11d,%ebx
+ movl 48(%rsi),%r10d
+ movl $4294967295,%r11d
+ roll $21,%ebx
+ xorl %edx,%r11d
+ addl %ecx,%ebx
+ leal 1700485571(%rax,%r10,1),%eax
+ orl %ebx,%r11d
+ xorl %ecx,%r11d
+ addl %r11d,%eax
+ movl 12(%rsi),%r10d
+ movl $4294967295,%r11d
+ roll $6,%eax
+ xorl %ecx,%r11d
+ addl %ebx,%eax
+ leal -1894986606(%rdx,%r10,1),%edx
+ orl %eax,%r11d
+ xorl %ebx,%r11d
+ addl %r11d,%edx
+ movl 40(%rsi),%r10d
+ movl $4294967295,%r11d
+ roll $10,%edx
+ xorl %ebx,%r11d
+ addl %eax,%edx
+ leal -1051523(%rcx,%r10,1),%ecx
+ orl %edx,%r11d
+ xorl %eax,%r11d
+ addl %r11d,%ecx
+ movl 4(%rsi),%r10d
+ movl $4294967295,%r11d
+ roll $15,%ecx
+ xorl %eax,%r11d
+ addl %edx,%ecx
+ leal -2054922799(%rbx,%r10,1),%ebx
+ orl %ecx,%r11d
+ xorl %edx,%r11d
+ addl %r11d,%ebx
+ movl 32(%rsi),%r10d
+ movl $4294967295,%r11d
+ roll $21,%ebx
+ xorl %edx,%r11d
+ addl %ecx,%ebx
+ leal 1873313359(%rax,%r10,1),%eax
+ orl %ebx,%r11d
+ xorl %ecx,%r11d
+ addl %r11d,%eax
+ movl 60(%rsi),%r10d
+ movl $4294967295,%r11d
+ roll $6,%eax
+ xorl %ecx,%r11d
+ addl %ebx,%eax
+ leal -30611744(%rdx,%r10,1),%edx
+ orl %eax,%r11d
+ xorl %ebx,%r11d
+ addl %r11d,%edx
+ movl 24(%rsi),%r10d
+ movl $4294967295,%r11d
+ roll $10,%edx
+ xorl %ebx,%r11d
+ addl %eax,%edx
+ leal -1560198380(%rcx,%r10,1),%ecx
+ orl %edx,%r11d
+ xorl %eax,%r11d
+ addl %r11d,%ecx
+ movl 52(%rsi),%r10d
+ movl $4294967295,%r11d
+ roll $15,%ecx
+ xorl %eax,%r11d
+ addl %edx,%ecx
+ leal 1309151649(%rbx,%r10,1),%ebx
+ orl %ecx,%r11d
+ xorl %edx,%r11d
+ addl %r11d,%ebx
+ movl 16(%rsi),%r10d
+ movl $4294967295,%r11d
+ roll $21,%ebx
+ xorl %edx,%r11d
+ addl %ecx,%ebx
+ leal -145523070(%rax,%r10,1),%eax
+ orl %ebx,%r11d
+ xorl %ecx,%r11d
+ addl %r11d,%eax
+ movl 44(%rsi),%r10d
+ movl $4294967295,%r11d
+ roll $6,%eax
+ xorl %ecx,%r11d
+ addl %ebx,%eax
+ leal -1120210379(%rdx,%r10,1),%edx
+ orl %eax,%r11d
+ xorl %ebx,%r11d
+ addl %r11d,%edx
+ movl 8(%rsi),%r10d
+ movl $4294967295,%r11d
+ roll $10,%edx
+ xorl %ebx,%r11d
+ addl %eax,%edx
+ leal 718787259(%rcx,%r10,1),%ecx
+ orl %edx,%r11d
+ xorl %eax,%r11d
+ addl %r11d,%ecx
+ movl 36(%rsi),%r10d
+ movl $4294967295,%r11d
+ roll $15,%ecx
+ xorl %eax,%r11d
+ addl %edx,%ecx
+ leal -343485551(%rbx,%r10,1),%ebx
+ orl %ecx,%r11d
+ xorl %edx,%r11d
+ addl %r11d,%ebx
+ movl 0(%rsi),%r10d
+ movl $4294967295,%r11d
+ roll $21,%ebx
+ xorl %edx,%r11d
+ addl %ecx,%ebx
+
+ addl %r8d,%eax
+ addl %r9d,%ebx
+ addl %r14d,%ecx
+ addl %r15d,%edx
+
+
+ addq $64,%rsi
+ cmpq %rdi,%rsi
+ jb .Lloop
+
+
+
+.Lend:
+ movl %eax,0(%rbp)
+ movl %ebx,4(%rbp)
+ movl %ecx,8(%rbp)
+ movl %edx,12(%rbp)
+
+ movq (%rsp),%r15
+ movq 8(%rsp),%r14
+ movq 16(%rsp),%r12
+ movq 24(%rsp),%rbx
+ movq 32(%rsp),%rbp
+ addq $40,%rsp
+.Lepilogue:
+ .byte 0xf3,0xc3
+.size md5_block_asm_data_order,.-md5_block_asm_data_order
diff --git a/deps/openssl/asm/x64-elf-gas/rc4/rc4-x86_64.s b/deps/openssl/asm/x64-elf-gas/rc4/rc4-x86_64.s
new file mode 100644
index 0000000000..1bafefeb02
--- /dev/null
+++ b/deps/openssl/asm/x64-elf-gas/rc4/rc4-x86_64.s
@@ -0,0 +1,430 @@
+.text
+
+
+.globl RC4
+.type RC4,@function
+.align 16
+RC4: orq %rsi,%rsi
+ jne .Lentry
+ .byte 0xf3,0xc3
+.Lentry:
+ pushq %rbx
+ pushq %r12
+ pushq %r13
+.Lprologue:
+
+ addq $8,%rdi
+ movl -8(%rdi),%r8d
+ movl -4(%rdi),%r12d
+ cmpl $-1,256(%rdi)
+ je .LRC4_CHAR
+ incb %r8b
+ movl (%rdi,%r8,4),%r9d
+ testq $-8,%rsi
+ jz .Lloop1
+ jmp .Lloop8
+.align 16
+.Lloop8:
+ addb %r9b,%r12b
+ movq %r8,%r10
+ movl (%rdi,%r12,4),%r13d
+ rorq $8,%rax
+ incb %r10b
+ movl (%rdi,%r10,4),%r11d
+ cmpq %r10,%r12
+ movl %r9d,(%rdi,%r12,4)
+ cmoveq %r9,%r11
+ movl %r13d,(%rdi,%r8,4)
+ addb %r9b,%r13b
+ movb (%rdi,%r13,4),%al
+ addb %r11b,%r12b
+ movq %r10,%r8
+ movl (%rdi,%r12,4),%r13d
+ rorq $8,%rax
+ incb %r8b
+ movl (%rdi,%r8,4),%r9d
+ cmpq %r8,%r12
+ movl %r11d,(%rdi,%r12,4)
+ cmoveq %r11,%r9
+ movl %r13d,(%rdi,%r10,4)
+ addb %r11b,%r13b
+ movb (%rdi,%r13,4),%al
+ addb %r9b,%r12b
+ movq %r8,%r10
+ movl (%rdi,%r12,4),%r13d
+ rorq $8,%rax
+ incb %r10b
+ movl (%rdi,%r10,4),%r11d
+ cmpq %r10,%r12
+ movl %r9d,(%rdi,%r12,4)
+ cmoveq %r9,%r11
+ movl %r13d,(%rdi,%r8,4)
+ addb %r9b,%r13b
+ movb (%rdi,%r13,4),%al
+ addb %r11b,%r12b
+ movq %r10,%r8
+ movl (%rdi,%r12,4),%r13d
+ rorq $8,%rax
+ incb %r8b
+ movl (%rdi,%r8,4),%r9d
+ cmpq %r8,%r12
+ movl %r11d,(%rdi,%r12,4)
+ cmoveq %r11,%r9
+ movl %r13d,(%rdi,%r10,4)
+ addb %r11b,%r13b
+ movb (%rdi,%r13,4),%al
+ addb %r9b,%r12b
+ movq %r8,%r10
+ movl (%rdi,%r12,4),%r13d
+ rorq $8,%rax
+ incb %r10b
+ movl (%rdi,%r10,4),%r11d
+ cmpq %r10,%r12
+ movl %r9d,(%rdi,%r12,4)
+ cmoveq %r9,%r11
+ movl %r13d,(%rdi,%r8,4)
+ addb %r9b,%r13b
+ movb (%rdi,%r13,4),%al
+ addb %r11b,%r12b
+ movq %r10,%r8
+ movl (%rdi,%r12,4),%r13d
+ rorq $8,%rax
+ incb %r8b
+ movl (%rdi,%r8,4),%r9d
+ cmpq %r8,%r12
+ movl %r11d,(%rdi,%r12,4)
+ cmoveq %r11,%r9
+ movl %r13d,(%rdi,%r10,4)
+ addb %r11b,%r13b
+ movb (%rdi,%r13,4),%al
+ addb %r9b,%r12b
+ movq %r8,%r10
+ movl (%rdi,%r12,4),%r13d
+ rorq $8,%rax
+ incb %r10b
+ movl (%rdi,%r10,4),%r11d
+ cmpq %r10,%r12
+ movl %r9d,(%rdi,%r12,4)
+ cmoveq %r9,%r11
+ movl %r13d,(%rdi,%r8,4)
+ addb %r9b,%r13b
+ movb (%rdi,%r13,4),%al
+ addb %r11b,%r12b
+ movq %r10,%r8
+ movl (%rdi,%r12,4),%r13d
+ rorq $8,%rax
+ incb %r8b
+ movl (%rdi,%r8,4),%r9d
+ cmpq %r8,%r12
+ movl %r11d,(%rdi,%r12,4)
+ cmoveq %r11,%r9
+ movl %r13d,(%rdi,%r10,4)
+ addb %r11b,%r13b
+ movb (%rdi,%r13,4),%al
+ rorq $8,%rax
+ subq $8,%rsi
+
+ xorq (%rdx),%rax
+ addq $8,%rdx
+ movq %rax,(%rcx)
+ addq $8,%rcx
+
+ testq $-8,%rsi
+ jnz .Lloop8
+ cmpq $0,%rsi
+ jne .Lloop1
+ jmp .Lexit
+
+.align 16
+.Lloop1:
+ addb %r9b,%r12b
+ movl (%rdi,%r12,4),%r13d
+ movl %r9d,(%rdi,%r12,4)
+ movl %r13d,(%rdi,%r8,4)
+ addb %r13b,%r9b
+ incb %r8b
+ movl (%rdi,%r9,4),%r13d
+ movl (%rdi,%r8,4),%r9d
+ xorb (%rdx),%r13b
+ incq %rdx
+ movb %r13b,(%rcx)
+ incq %rcx
+ decq %rsi
+ jnz .Lloop1
+ jmp .Lexit
+
+.align 16
+.LRC4_CHAR:
+ addb $1,%r8b
+ movzbl (%rdi,%r8,1),%r9d
+ testq $-8,%rsi
+ jz .Lcloop1
+ cmpl $0,260(%rdi)
+ jnz .Lcloop1
+ jmp .Lcloop8
+.align 16
+.Lcloop8:
+ movl (%rdx),%eax
+ movl 4(%rdx),%ebx
+ addb %r9b,%r12b
+ leaq 1(%r8),%r10
+ movzbl (%rdi,%r12,1),%r13d
+ movzbl %r10b,%r10d
+ movzbl (%rdi,%r10,1),%r11d
+ movb %r9b,(%rdi,%r12,1)
+ cmpq %r10,%r12
+ movb %r13b,(%rdi,%r8,1)
+ jne .Lcmov0
+
+ movq %r9,%r11
+.Lcmov0:
+ addb %r9b,%r13b
+ xorb (%rdi,%r13,1),%al
+ rorl $8,%eax
+ addb %r11b,%r12b
+ leaq 1(%r10),%r8
+ movzbl (%rdi,%r12,1),%r13d
+ movzbl %r8b,%r8d
+ movzbl (%rdi,%r8,1),%r9d
+ movb %r11b,(%rdi,%r12,1)
+ cmpq %r8,%r12
+ movb %r13b,(%rdi,%r10,1)
+ jne .Lcmov1
+
+ movq %r11,%r9
+.Lcmov1:
+ addb %r11b,%r13b
+ xorb (%rdi,%r13,1),%al
+ rorl $8,%eax
+ addb %r9b,%r12b
+ leaq 1(%r8),%r10
+ movzbl (%rdi,%r12,1),%r13d
+ movzbl %r10b,%r10d
+ movzbl (%rdi,%r10,1),%r11d
+ movb %r9b,(%rdi,%r12,1)
+ cmpq %r10,%r12
+ movb %r13b,(%rdi,%r8,1)
+ jne .Lcmov2
+
+ movq %r9,%r11
+.Lcmov2:
+ addb %r9b,%r13b
+ xorb (%rdi,%r13,1),%al
+ rorl $8,%eax
+ addb %r11b,%r12b
+ leaq 1(%r10),%r8
+ movzbl (%rdi,%r12,1),%r13d
+ movzbl %r8b,%r8d
+ movzbl (%rdi,%r8,1),%r9d
+ movb %r11b,(%rdi,%r12,1)
+ cmpq %r8,%r12
+ movb %r13b,(%rdi,%r10,1)
+ jne .Lcmov3
+
+ movq %r11,%r9
+.Lcmov3:
+ addb %r11b,%r13b
+ xorb (%rdi,%r13,1),%al
+ rorl $8,%eax
+ addb %r9b,%r12b
+ leaq 1(%r8),%r10
+ movzbl (%rdi,%r12,1),%r13d
+ movzbl %r10b,%r10d
+ movzbl (%rdi,%r10,1),%r11d
+ movb %r9b,(%rdi,%r12,1)
+ cmpq %r10,%r12
+ movb %r13b,(%rdi,%r8,1)
+ jne .Lcmov4
+
+ movq %r9,%r11
+.Lcmov4:
+ addb %r9b,%r13b
+ xorb (%rdi,%r13,1),%bl
+ rorl $8,%ebx
+ addb %r11b,%r12b
+ leaq 1(%r10),%r8
+ movzbl (%rdi,%r12,1),%r13d
+ movzbl %r8b,%r8d
+ movzbl (%rdi,%r8,1),%r9d
+ movb %r11b,(%rdi,%r12,1)
+ cmpq %r8,%r12
+ movb %r13b,(%rdi,%r10,1)
+ jne .Lcmov5
+
+ movq %r11,%r9
+.Lcmov5:
+ addb %r11b,%r13b
+ xorb (%rdi,%r13,1),%bl
+ rorl $8,%ebx
+ addb %r9b,%r12b
+ leaq 1(%r8),%r10
+ movzbl (%rdi,%r12,1),%r13d
+ movzbl %r10b,%r10d
+ movzbl (%rdi,%r10,1),%r11d
+ movb %r9b,(%rdi,%r12,1)
+ cmpq %r10,%r12
+ movb %r13b,(%rdi,%r8,1)
+ jne .Lcmov6
+
+ movq %r9,%r11
+.Lcmov6:
+ addb %r9b,%r13b
+ xorb (%rdi,%r13,1),%bl
+ rorl $8,%ebx
+ addb %r11b,%r12b
+ leaq 1(%r10),%r8
+ movzbl (%rdi,%r12,1),%r13d
+ movzbl %r8b,%r8d
+ movzbl (%rdi,%r8,1),%r9d
+ movb %r11b,(%rdi,%r12,1)
+ cmpq %r8,%r12
+ movb %r13b,(%rdi,%r10,1)
+ jne .Lcmov7
+
+ movq %r11,%r9
+.Lcmov7:
+ addb %r11b,%r13b
+ xorb (%rdi,%r13,1),%bl
+ rorl $8,%ebx
+ leaq -8(%rsi),%rsi
+ movl %eax,(%rcx)
+ leaq 8(%rdx),%rdx
+ movl %ebx,4(%rcx)
+ leaq 8(%rcx),%rcx
+
+ testq $-8,%rsi
+ jnz .Lcloop8
+ cmpq $0,%rsi
+ jne .Lcloop1
+ jmp .Lexit
+.align 16
+.Lcloop1:
+ addb %r9b,%r12b
+ movzbl (%rdi,%r12,1),%r13d
+ movb %r9b,(%rdi,%r12,1)
+ movb %r13b,(%rdi,%r8,1)
+ addb %r9b,%r13b
+ addb $1,%r8b
+ movzbl %r13b,%r13d
+ movzbl %r8b,%r8d
+ movzbl (%rdi,%r13,1),%r13d
+ movzbl (%rdi,%r8,1),%r9d
+ xorb (%rdx),%r13b
+ leaq 1(%rdx),%rdx
+ movb %r13b,(%rcx)
+ leaq 1(%rcx),%rcx
+ subq $1,%rsi
+ jnz .Lcloop1
+ jmp .Lexit
+
+.align 16
+.Lexit:
+ subb $1,%r8b
+ movl %r8d,-8(%rdi)
+ movl %r12d,-4(%rdi)
+
+ movq (%rsp),%r13
+ movq 8(%rsp),%r12
+ movq 16(%rsp),%rbx
+ addq $24,%rsp
+.Lepilogue:
+ .byte 0xf3,0xc3
+.size RC4,.-RC4
+
+.globl RC4_set_key
+.type RC4_set_key,@function
+.align 16
+RC4_set_key:
+ leaq 8(%rdi),%rdi
+ leaq (%rdx,%rsi,1),%rdx
+ negq %rsi
+ movq %rsi,%rcx
+ xorl %eax,%eax
+ xorq %r9,%r9
+ xorq %r10,%r10
+ xorq %r11,%r11
+
+ movl OPENSSL_ia32cap_P(%rip),%r8d
+ btl $20,%r8d
+ jnc .Lw1stloop
+ btl $30,%r8d
+ setc %r9b
+ movl %r9d,260(%rdi)
+ jmp .Lc1stloop
+
+.align 16
+.Lw1stloop:
+ movl %eax,(%rdi,%rax,4)
+ addb $1,%al
+ jnc .Lw1stloop
+
+ xorq %r9,%r9
+ xorq %r8,%r8
+.align 16
+.Lw2ndloop:
+ movl (%rdi,%r9,4),%r10d
+ addb (%rdx,%rsi,1),%r8b
+ addb %r10b,%r8b
+ addq $1,%rsi
+ movl (%rdi,%r8,4),%r11d
+ cmovzq %rcx,%rsi
+ movl %r10d,(%rdi,%r8,4)
+ movl %r11d,(%rdi,%r9,4)
+ addb $1,%r9b
+ jnc .Lw2ndloop
+ jmp .Lexit_key
+
+.align 16
+.Lc1stloop:
+ movb %al,(%rdi,%rax,1)
+ addb $1,%al
+ jnc .Lc1stloop
+
+ xorq %r9,%r9
+ xorq %r8,%r8
+.align 16
+.Lc2ndloop:
+ movb (%rdi,%r9,1),%r10b
+ addb (%rdx,%rsi,1),%r8b
+ addb %r10b,%r8b
+ addq $1,%rsi
+ movb (%rdi,%r8,1),%r11b
+ jnz .Lcnowrap
+ movq %rcx,%rsi
+.Lcnowrap:
+ movb %r10b,(%rdi,%r8,1)
+ movb %r11b,(%rdi,%r9,1)
+ addb $1,%r9b
+ jnc .Lc2ndloop
+ movl $-1,256(%rdi)
+
+.align 16
+.Lexit_key:
+ xorl %eax,%eax
+ movl %eax,-8(%rdi)
+ movl %eax,-4(%rdi)
+ .byte 0xf3,0xc3
+.size RC4_set_key,.-RC4_set_key
+
+.globl RC4_options
+.type RC4_options,@function
+.align 16
+RC4_options:
+ leaq .Lopts(%rip),%rax
+ movl OPENSSL_ia32cap_P(%rip),%edx
+ btl $20,%edx
+ jnc .Ldone
+ addq $12,%rax
+ btl $30,%edx
+ jnc .Ldone
+ addq $13,%rax
+.Ldone:
+ .byte 0xf3,0xc3
+.align 64
+.Lopts:
+.byte 114,99,52,40,56,120,44,105,110,116,41,0
+.byte 114,99,52,40,56,120,44,99,104,97,114,41,0
+.byte 114,99,52,40,49,120,44,99,104,97,114,41,0
+.byte 82,67,52,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.align 64
+.size RC4_options,.-RC4_options
diff --git a/deps/openssl/asm/x64-elf-gas/sha/sha1-x86_64.s b/deps/openssl/asm/x64-elf-gas/sha/sha1-x86_64.s
new file mode 100644
index 0000000000..208c2cdd26
--- /dev/null
+++ b/deps/openssl/asm/x64-elf-gas/sha/sha1-x86_64.s
@@ -0,0 +1,1283 @@
+.text
+
+.globl sha1_block_data_order
+.type sha1_block_data_order,@function
+.align 16
+sha1_block_data_order:
+ pushq %rbx
+ pushq %rbp
+ pushq %r12
+ movq %rsp,%r11
+ movq %rdi,%r8
+ subq $72,%rsp
+ movq %rsi,%r9
+ andq $-64,%rsp
+ movq %rdx,%r10
+ movq %r11,64(%rsp)
+.Lprologue:
+
+ movl 0(%r8),%edx
+ movl 4(%r8),%esi
+ movl 8(%r8),%edi
+ movl 12(%r8),%ebp
+ movl 16(%r8),%r11d
+.align 4
+.Lloop:
+ movl 0(%r9),%eax
+ bswapl %eax
+ movl %eax,0(%rsp)
+ leal 1518500249(%rax,%r11,1),%r12d
+ movl %edi,%ebx
+ movl 4(%r9),%eax
+ movl %edx,%r11d
+ xorl %ebp,%ebx
+ bswapl %eax
+ roll $5,%r11d
+ andl %esi,%ebx
+ movl %eax,4(%rsp)
+ addl %r11d,%r12d
+ xorl %ebp,%ebx
+ roll $30,%esi
+ addl %ebx,%r12d
+ leal 1518500249(%rax,%rbp,1),%r11d
+ movl %esi,%ebx
+ movl 8(%r9),%eax
+ movl %r12d,%ebp
+ xorl %edi,%ebx
+ bswapl %eax
+ roll $5,%ebp
+ andl %edx,%ebx
+ movl %eax,8(%rsp)
+ addl %ebp,%r11d
+ xorl %edi,%ebx
+ roll $30,%edx
+ addl %ebx,%r11d
+ leal 1518500249(%rax,%rdi,1),%ebp
+ movl %edx,%ebx
+ movl 12(%r9),%eax
+ movl %r11d,%edi
+ xorl %esi,%ebx
+ bswapl %eax
+ roll $5,%edi
+ andl %r12d,%ebx
+ movl %eax,12(%rsp)
+ addl %edi,%ebp
+ xorl %esi,%ebx
+ roll $30,%r12d
+ addl %ebx,%ebp
+ leal 1518500249(%rax,%rsi,1),%edi
+ movl %r12d,%ebx
+ movl 16(%r9),%eax
+ movl %ebp,%esi
+ xorl %edx,%ebx
+ bswapl %eax
+ roll $5,%esi
+ andl %r11d,%ebx
+ movl %eax,16(%rsp)
+ addl %esi,%edi
+ xorl %edx,%ebx
+ roll $30,%r11d
+ addl %ebx,%edi
+ leal 1518500249(%rax,%rdx,1),%esi
+ movl %r11d,%ebx
+ movl 20(%r9),%eax
+ movl %edi,%edx
+ xorl %r12d,%ebx
+ bswapl %eax
+ roll $5,%edx
+ andl %ebp,%ebx
+ movl %eax,20(%rsp)
+ addl %edx,%esi
+ xorl %r12d,%ebx
+ roll $30,%ebp
+ addl %ebx,%esi
+ leal 1518500249(%rax,%r12,1),%edx
+ movl %ebp,%ebx
+ movl 24(%r9),%eax
+ movl %esi,%r12d
+ xorl %r11d,%ebx
+ bswapl %eax
+ roll $5,%r12d
+ andl %edi,%ebx
+ movl %eax,24(%rsp)
+ addl %r12d,%edx
+ xorl %r11d,%ebx
+ roll $30,%edi
+ addl %ebx,%edx
+ leal 1518500249(%rax,%r11,1),%r12d
+ movl %edi,%ebx
+ movl 28(%r9),%eax
+ movl %edx,%r11d
+ xorl %ebp,%ebx
+ bswapl %eax
+ roll $5,%r11d
+ andl %esi,%ebx
+ movl %eax,28(%rsp)
+ addl %r11d,%r12d
+ xorl %ebp,%ebx
+ roll $30,%esi
+ addl %ebx,%r12d
+ leal 1518500249(%rax,%rbp,1),%r11d
+ movl %esi,%ebx
+ movl 32(%r9),%eax
+ movl %r12d,%ebp
+ xorl %edi,%ebx
+ bswapl %eax
+ roll $5,%ebp
+ andl %edx,%ebx
+ movl %eax,32(%rsp)
+ addl %ebp,%r11d
+ xorl %edi,%ebx
+ roll $30,%edx
+ addl %ebx,%r11d
+ leal 1518500249(%rax,%rdi,1),%ebp
+ movl %edx,%ebx
+ movl 36(%r9),%eax
+ movl %r11d,%edi
+ xorl %esi,%ebx
+ bswapl %eax
+ roll $5,%edi
+ andl %r12d,%ebx
+ movl %eax,36(%rsp)
+ addl %edi,%ebp
+ xorl %esi,%ebx
+ roll $30,%r12d
+ addl %ebx,%ebp
+ leal 1518500249(%rax,%rsi,1),%edi
+ movl %r12d,%ebx
+ movl 40(%r9),%eax
+ movl %ebp,%esi
+ xorl %edx,%ebx
+ bswapl %eax
+ roll $5,%esi
+ andl %r11d,%ebx
+ movl %eax,40(%rsp)
+ addl %esi,%edi
+ xorl %edx,%ebx
+ roll $30,%r11d
+ addl %ebx,%edi
+ leal 1518500249(%rax,%rdx,1),%esi
+ movl %r11d,%ebx
+ movl 44(%r9),%eax
+ movl %edi,%edx
+ xorl %r12d,%ebx
+ bswapl %eax
+ roll $5,%edx
+ andl %ebp,%ebx
+ movl %eax,44(%rsp)
+ addl %edx,%esi
+ xorl %r12d,%ebx
+ roll $30,%ebp
+ addl %ebx,%esi
+ leal 1518500249(%rax,%r12,1),%edx
+ movl %ebp,%ebx
+ movl 48(%r9),%eax
+ movl %esi,%r12d
+ xorl %r11d,%ebx
+ bswapl %eax
+ roll $5,%r12d
+ andl %edi,%ebx
+ movl %eax,48(%rsp)
+ addl %r12d,%edx
+ xorl %r11d,%ebx
+ roll $30,%edi
+ addl %ebx,%edx
+ leal 1518500249(%rax,%r11,1),%r12d
+ movl %edi,%ebx
+ movl 52(%r9),%eax
+ movl %edx,%r11d
+ xorl %ebp,%ebx
+ bswapl %eax
+ roll $5,%r11d
+ andl %esi,%ebx
+ movl %eax,52(%rsp)
+ addl %r11d,%r12d
+ xorl %ebp,%ebx
+ roll $30,%esi
+ addl %ebx,%r12d
+ leal 1518500249(%rax,%rbp,1),%r11d
+ movl %esi,%ebx
+ movl 56(%r9),%eax
+ movl %r12d,%ebp
+ xorl %edi,%ebx
+ bswapl %eax
+ roll $5,%ebp
+ andl %edx,%ebx
+ movl %eax,56(%rsp)
+ addl %ebp,%r11d
+ xorl %edi,%ebx
+ roll $30,%edx
+ addl %ebx,%r11d
+ leal 1518500249(%rax,%rdi,1),%ebp
+ movl %edx,%ebx
+ movl 60(%r9),%eax
+ movl %r11d,%edi
+ xorl %esi,%ebx
+ bswapl %eax
+ roll $5,%edi
+ andl %r12d,%ebx
+ movl %eax,60(%rsp)
+ addl %edi,%ebp
+ xorl %esi,%ebx
+ roll $30,%r12d
+ addl %ebx,%ebp
+ leal 1518500249(%rax,%rsi,1),%edi
+ movl 0(%rsp),%eax
+ movl %r12d,%ebx
+ movl %ebp,%esi
+ xorl 8(%rsp),%eax
+ xorl %edx,%ebx
+ roll $5,%esi
+ xorl 32(%rsp),%eax
+ andl %r11d,%ebx
+ addl %esi,%edi
+ xorl 52(%rsp),%eax
+ xorl %edx,%ebx
+ roll $30,%r11d
+ addl %ebx,%edi
+ roll $1,%eax
+ movl %eax,0(%rsp)
+ leal 1518500249(%rax,%rdx,1),%esi
+ movl 4(%rsp),%eax
+ movl %r11d,%ebx
+ movl %edi,%edx
+ xorl 12(%rsp),%eax
+ xorl %r12d,%ebx
+ roll $5,%edx
+ xorl 36(%rsp),%eax
+ andl %ebp,%ebx
+ addl %edx,%esi
+ xorl 56(%rsp),%eax
+ xorl %r12d,%ebx
+ roll $30,%ebp
+ addl %ebx,%esi
+ roll $1,%eax
+ movl %eax,4(%rsp)
+ leal 1518500249(%rax,%r12,1),%edx
+ movl 8(%rsp),%eax
+ movl %ebp,%ebx
+ movl %esi,%r12d
+ xorl 16(%rsp),%eax
+ xorl %r11d,%ebx
+ roll $5,%r12d
+ xorl 40(%rsp),%eax
+ andl %edi,%ebx
+ addl %r12d,%edx
+ xorl 60(%rsp),%eax
+ xorl %r11d,%ebx
+ roll $30,%edi
+ addl %ebx,%edx
+ roll $1,%eax
+ movl %eax,8(%rsp)
+ leal 1518500249(%rax,%r11,1),%r12d
+ movl 12(%rsp),%eax
+ movl %edi,%ebx
+ movl %edx,%r11d
+ xorl 20(%rsp),%eax
+ xorl %ebp,%ebx
+ roll $5,%r11d
+ xorl 44(%rsp),%eax
+ andl %esi,%ebx
+ addl %r11d,%r12d
+ xorl 0(%rsp),%eax
+ xorl %ebp,%ebx
+ roll $30,%esi
+ addl %ebx,%r12d
+ roll $1,%eax
+ movl %eax,12(%rsp)
+ leal 1518500249(%rax,%rbp,1),%r11d
+ movl 16(%rsp),%eax
+ movl %esi,%ebx
+ movl %r12d,%ebp
+ xorl 24(%rsp),%eax
+ xorl %edi,%ebx
+ roll $5,%ebp
+ xorl 48(%rsp),%eax
+ andl %edx,%ebx
+ addl %ebp,%r11d
+ xorl 4(%rsp),%eax
+ xorl %edi,%ebx
+ roll $30,%edx
+ addl %ebx,%r11d
+ roll $1,%eax
+ movl %eax,16(%rsp)
+ leal 1859775393(%rax,%rdi,1),%ebp
+ movl 20(%rsp),%eax
+ movl %edx,%ebx
+ movl %r11d,%edi
+ xorl 28(%rsp),%eax
+ xorl %r12d,%ebx
+ roll $5,%edi
+ xorl 52(%rsp),%eax
+ xorl %esi,%ebx
+ addl %edi,%ebp
+ xorl 8(%rsp),%eax
+ roll $30,%r12d
+ addl %ebx,%ebp
+ roll $1,%eax
+ movl %eax,20(%rsp)
+ leal 1859775393(%rax,%rsi,1),%edi
+ movl 24(%rsp),%eax
+ movl %r12d,%ebx
+ movl %ebp,%esi
+ xorl 32(%rsp),%eax
+ xorl %r11d,%ebx
+ roll $5,%esi
+ xorl 56(%rsp),%eax
+ xorl %edx,%ebx
+ addl %esi,%edi
+ xorl 12(%rsp),%eax
+ roll $30,%r11d
+ addl %ebx,%edi
+ roll $1,%eax
+ movl %eax,24(%rsp)
+ leal 1859775393(%rax,%rdx,1),%esi
+ movl 28(%rsp),%eax
+ movl %r11d,%ebx
+ movl %edi,%edx
+ xorl 36(%rsp),%eax
+ xorl %ebp,%ebx
+ roll $5,%edx
+ xorl 60(%rsp),%eax
+ xorl %r12d,%ebx
+ addl %edx,%esi
+ xorl 16(%rsp),%eax
+ roll $30,%ebp
+ addl %ebx,%esi
+ roll $1,%eax
+ movl %eax,28(%rsp)
+ leal 1859775393(%rax,%r12,1),%edx
+ movl 32(%rsp),%eax
+ movl %ebp,%ebx
+ movl %esi,%r12d
+ xorl 40(%rsp),%eax
+ xorl %edi,%ebx
+ roll $5,%r12d
+ xorl 0(%rsp),%eax
+ xorl %r11d,%ebx
+ addl %r12d,%edx
+ xorl 20(%rsp),%eax
+ roll $30,%edi
+ addl %ebx,%edx
+ roll $1,%eax
+ movl %eax,32(%rsp)
+ leal 1859775393(%rax,%r11,1),%r12d
+ movl 36(%rsp),%eax
+ movl %edi,%ebx
+ movl %edx,%r11d
+ xorl 44(%rsp),%eax
+ xorl %esi,%ebx
+ roll $5,%r11d
+ xorl 4(%rsp),%eax
+ xorl %ebp,%ebx
+ addl %r11d,%r12d
+ xorl 24(%rsp),%eax
+ roll $30,%esi
+ addl %ebx,%r12d
+ roll $1,%eax
+ movl %eax,36(%rsp)
+ leal 1859775393(%rax,%rbp,1),%r11d
+ movl 40(%rsp),%eax
+ movl %esi,%ebx
+ movl %r12d,%ebp
+ xorl 48(%rsp),%eax
+ xorl %edx,%ebx
+ roll $5,%ebp
+ xorl 8(%rsp),%eax
+ xorl %edi,%ebx
+ addl %ebp,%r11d
+ xorl 28(%rsp),%eax
+ roll $30,%edx
+ addl %ebx,%r11d
+ roll $1,%eax
+ movl %eax,40(%rsp)
+ leal 1859775393(%rax,%rdi,1),%ebp
+ movl 44(%rsp),%eax
+ movl %edx,%ebx
+ movl %r11d,%edi
+ xorl 52(%rsp),%eax
+ xorl %r12d,%ebx
+ roll $5,%edi
+ xorl 12(%rsp),%eax
+ xorl %esi,%ebx
+ addl %edi,%ebp
+ xorl 32(%rsp),%eax
+ roll $30,%r12d
+ addl %ebx,%ebp
+ roll $1,%eax
+ movl %eax,44(%rsp)
+ leal 1859775393(%rax,%rsi,1),%edi
+ movl 48(%rsp),%eax
+ movl %r12d,%ebx
+ movl %ebp,%esi
+ xorl 56(%rsp),%eax
+ xorl %r11d,%ebx
+ roll $5,%esi
+ xorl 16(%rsp),%eax
+ xorl %edx,%ebx
+ addl %esi,%edi
+ xorl 36(%rsp),%eax
+ roll $30,%r11d
+ addl %ebx,%edi
+ roll $1,%eax
+ movl %eax,48(%rsp)
+ leal 1859775393(%rax,%rdx,1),%esi
+ movl 52(%rsp),%eax
+ movl %r11d,%ebx
+ movl %edi,%edx
+ xorl 60(%rsp),%eax
+ xorl %ebp,%ebx
+ roll $5,%edx
+ xorl 20(%rsp),%eax
+ xorl %r12d,%ebx
+ addl %edx,%esi
+ xorl 40(%rsp),%eax
+ roll $30,%ebp
+ addl %ebx,%esi
+ roll $1,%eax
+ movl %eax,52(%rsp)
+ leal 1859775393(%rax,%r12,1),%edx
+ movl 56(%rsp),%eax
+ movl %ebp,%ebx
+ movl %esi,%r12d
+ xorl 0(%rsp),%eax
+ xorl %edi,%ebx
+ roll $5,%r12d
+ xorl 24(%rsp),%eax
+ xorl %r11d,%ebx
+ addl %r12d,%edx
+ xorl 44(%rsp),%eax
+ roll $30,%edi
+ addl %ebx,%edx
+ roll $1,%eax
+ movl %eax,56(%rsp)
+ leal 1859775393(%rax,%r11,1),%r12d
+ movl 60(%rsp),%eax
+ movl %edi,%ebx
+ movl %edx,%r11d
+ xorl 4(%rsp),%eax
+ xorl %esi,%ebx
+ roll $5,%r11d
+ xorl 28(%rsp),%eax
+ xorl %ebp,%ebx
+ addl %r11d,%r12d
+ xorl 48(%rsp),%eax
+ roll $30,%esi
+ addl %ebx,%r12d
+ roll $1,%eax
+ movl %eax,60(%rsp)
+ leal 1859775393(%rax,%rbp,1),%r11d
+ movl 0(%rsp),%eax
+ movl %esi,%ebx
+ movl %r12d,%ebp
+ xorl 8(%rsp),%eax
+ xorl %edx,%ebx
+ roll $5,%ebp
+ xorl 32(%rsp),%eax
+ xorl %edi,%ebx
+ addl %ebp,%r11d
+ xorl 52(%rsp),%eax
+ roll $30,%edx
+ addl %ebx,%r11d
+ roll $1,%eax
+ movl %eax,0(%rsp)
+ leal 1859775393(%rax,%rdi,1),%ebp
+ movl 4(%rsp),%eax
+ movl %edx,%ebx
+ movl %r11d,%edi
+ xorl 12(%rsp),%eax
+ xorl %r12d,%ebx
+ roll $5,%edi
+ xorl 36(%rsp),%eax
+ xorl %esi,%ebx
+ addl %edi,%ebp
+ xorl 56(%rsp),%eax
+ roll $30,%r12d
+ addl %ebx,%ebp
+ roll $1,%eax
+ movl %eax,4(%rsp)
+ leal 1859775393(%rax,%rsi,1),%edi
+ movl 8(%rsp),%eax
+ movl %r12d,%ebx
+ movl %ebp,%esi
+ xorl 16(%rsp),%eax
+ xorl %r11d,%ebx
+ roll $5,%esi
+ xorl 40(%rsp),%eax
+ xorl %edx,%ebx
+ addl %esi,%edi
+ xorl 60(%rsp),%eax
+ roll $30,%r11d
+ addl %ebx,%edi
+ roll $1,%eax
+ movl %eax,8(%rsp)
+ leal 1859775393(%rax,%rdx,1),%esi
+ movl 12(%rsp),%eax
+ movl %r11d,%ebx
+ movl %edi,%edx
+ xorl 20(%rsp),%eax
+ xorl %ebp,%ebx
+ roll $5,%edx
+ xorl 44(%rsp),%eax
+ xorl %r12d,%ebx
+ addl %edx,%esi
+ xorl 0(%rsp),%eax
+ roll $30,%ebp
+ addl %ebx,%esi
+ roll $1,%eax
+ movl %eax,12(%rsp)
+ leal 1859775393(%rax,%r12,1),%edx
+ movl 16(%rsp),%eax
+ movl %ebp,%ebx
+ movl %esi,%r12d
+ xorl 24(%rsp),%eax
+ xorl %edi,%ebx
+ roll $5,%r12d
+ xorl 48(%rsp),%eax
+ xorl %r11d,%ebx
+ addl %r12d,%edx
+ xorl 4(%rsp),%eax
+ roll $30,%edi
+ addl %ebx,%edx
+ roll $1,%eax
+ movl %eax,16(%rsp)
+ leal 1859775393(%rax,%r11,1),%r12d
+ movl 20(%rsp),%eax
+ movl %edi,%ebx
+ movl %edx,%r11d
+ xorl 28(%rsp),%eax
+ xorl %esi,%ebx
+ roll $5,%r11d
+ xorl 52(%rsp),%eax
+ xorl %ebp,%ebx
+ addl %r11d,%r12d
+ xorl 8(%rsp),%eax
+ roll $30,%esi
+ addl %ebx,%r12d
+ roll $1,%eax
+ movl %eax,20(%rsp)
+ leal 1859775393(%rax,%rbp,1),%r11d
+ movl 24(%rsp),%eax
+ movl %esi,%ebx
+ movl %r12d,%ebp
+ xorl 32(%rsp),%eax
+ xorl %edx,%ebx
+ roll $5,%ebp
+ xorl 56(%rsp),%eax
+ xorl %edi,%ebx
+ addl %ebp,%r11d
+ xorl 12(%rsp),%eax
+ roll $30,%edx
+ addl %ebx,%r11d
+ roll $1,%eax
+ movl %eax,24(%rsp)
+ leal 1859775393(%rax,%rdi,1),%ebp
+ movl 28(%rsp),%eax
+ movl %edx,%ebx
+ movl %r11d,%edi
+ xorl 36(%rsp),%eax
+ xorl %r12d,%ebx
+ roll $5,%edi
+ xorl 60(%rsp),%eax
+ xorl %esi,%ebx
+ addl %edi,%ebp
+ xorl 16(%rsp),%eax
+ roll $30,%r12d
+ addl %ebx,%ebp
+ roll $1,%eax
+ movl %eax,28(%rsp)
+ leal 1859775393(%rax,%rsi,1),%edi
+ movl 32(%rsp),%eax
+ movl %r12d,%ebx
+ movl %ebp,%esi
+ xorl 40(%rsp),%eax
+ xorl %r11d,%ebx
+ roll $5,%esi
+ xorl 0(%rsp),%eax
+ xorl %edx,%ebx
+ addl %esi,%edi
+ xorl 20(%rsp),%eax
+ roll $30,%r11d
+ addl %ebx,%edi
+ roll $1,%eax
+ movl %eax,32(%rsp)
+ leal -1894007588(%rax,%rdx,1),%esi
+ movl 36(%rsp),%eax
+ movl %ebp,%ebx
+ movl %ebp,%ecx
+ xorl 44(%rsp),%eax
+ movl %edi,%edx
+ andl %r11d,%ebx
+ xorl 4(%rsp),%eax
+ orl %r11d,%ecx
+ roll $5,%edx
+ xorl 24(%rsp),%eax
+ andl %r12d,%ecx
+ addl %edx,%esi
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%ebp
+ movl %eax,36(%rsp)
+ addl %ebx,%esi
+ leal -1894007588(%rax,%r12,1),%edx
+ movl 40(%rsp),%eax
+ movl %edi,%ebx
+ movl %edi,%ecx
+ xorl 48(%rsp),%eax
+ movl %esi,%r12d
+ andl %ebp,%ebx
+ xorl 8(%rsp),%eax
+ orl %ebp,%ecx
+ roll $5,%r12d
+ xorl 28(%rsp),%eax
+ andl %r11d,%ecx
+ addl %r12d,%edx
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%edi
+ movl %eax,40(%rsp)
+ addl %ebx,%edx
+ leal -1894007588(%rax,%r11,1),%r12d
+ movl 44(%rsp),%eax
+ movl %esi,%ebx
+ movl %esi,%ecx
+ xorl 52(%rsp),%eax
+ movl %edx,%r11d
+ andl %edi,%ebx
+ xorl 12(%rsp),%eax
+ orl %edi,%ecx
+ roll $5,%r11d
+ xorl 32(%rsp),%eax
+ andl %ebp,%ecx
+ addl %r11d,%r12d
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%esi
+ movl %eax,44(%rsp)
+ addl %ebx,%r12d
+ leal -1894007588(%rax,%rbp,1),%r11d
+ movl 48(%rsp),%eax
+ movl %edx,%ebx
+ movl %edx,%ecx
+ xorl 56(%rsp),%eax
+ movl %r12d,%ebp
+ andl %esi,%ebx
+ xorl 16(%rsp),%eax
+ orl %esi,%ecx
+ roll $5,%ebp
+ xorl 36(%rsp),%eax
+ andl %edi,%ecx
+ addl %ebp,%r11d
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%edx
+ movl %eax,48(%rsp)
+ addl %ebx,%r11d
+ leal -1894007588(%rax,%rdi,1),%ebp
+ movl 52(%rsp),%eax
+ movl %r12d,%ebx
+ movl %r12d,%ecx
+ xorl 60(%rsp),%eax
+ movl %r11d,%edi
+ andl %edx,%ebx
+ xorl 20(%rsp),%eax
+ orl %edx,%ecx
+ roll $5,%edi
+ xorl 40(%rsp),%eax
+ andl %esi,%ecx
+ addl %edi,%ebp
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%r12d
+ movl %eax,52(%rsp)
+ addl %ebx,%ebp
+ leal -1894007588(%rax,%rsi,1),%edi
+ movl 56(%rsp),%eax
+ movl %r11d,%ebx
+ movl %r11d,%ecx
+ xorl 0(%rsp),%eax
+ movl %ebp,%esi
+ andl %r12d,%ebx
+ xorl 24(%rsp),%eax
+ orl %r12d,%ecx
+ roll $5,%esi
+ xorl 44(%rsp),%eax
+ andl %edx,%ecx
+ addl %esi,%edi
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%r11d
+ movl %eax,56(%rsp)
+ addl %ebx,%edi
+ leal -1894007588(%rax,%rdx,1),%esi
+ movl 60(%rsp),%eax
+ movl %ebp,%ebx
+ movl %ebp,%ecx
+ xorl 4(%rsp),%eax
+ movl %edi,%edx
+ andl %r11d,%ebx
+ xorl 28(%rsp),%eax
+ orl %r11d,%ecx
+ roll $5,%edx
+ xorl 48(%rsp),%eax
+ andl %r12d,%ecx
+ addl %edx,%esi
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%ebp
+ movl %eax,60(%rsp)
+ addl %ebx,%esi
+ leal -1894007588(%rax,%r12,1),%edx
+ movl 0(%rsp),%eax
+ movl %edi,%ebx
+ movl %edi,%ecx
+ xorl 8(%rsp),%eax
+ movl %esi,%r12d
+ andl %ebp,%ebx
+ xorl 32(%rsp),%eax
+ orl %ebp,%ecx
+ roll $5,%r12d
+ xorl 52(%rsp),%eax
+ andl %r11d,%ecx
+ addl %r12d,%edx
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%edi
+ movl %eax,0(%rsp)
+ addl %ebx,%edx
+ leal -1894007588(%rax,%r11,1),%r12d
+ movl 4(%rsp),%eax
+ movl %esi,%ebx
+ movl %esi,%ecx
+ xorl 12(%rsp),%eax
+ movl %edx,%r11d
+ andl %edi,%ebx
+ xorl 36(%rsp),%eax
+ orl %edi,%ecx
+ roll $5,%r11d
+ xorl 56(%rsp),%eax
+ andl %ebp,%ecx
+ addl %r11d,%r12d
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%esi
+ movl %eax,4(%rsp)
+ addl %ebx,%r12d
+ leal -1894007588(%rax,%rbp,1),%r11d
+ movl 8(%rsp),%eax
+ movl %edx,%ebx
+ movl %edx,%ecx
+ xorl 16(%rsp),%eax
+ movl %r12d,%ebp
+ andl %esi,%ebx
+ xorl 40(%rsp),%eax
+ orl %esi,%ecx
+ roll $5,%ebp
+ xorl 60(%rsp),%eax
+ andl %edi,%ecx
+ addl %ebp,%r11d
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%edx
+ movl %eax,8(%rsp)
+ addl %ebx,%r11d
+ leal -1894007588(%rax,%rdi,1),%ebp
+ movl 12(%rsp),%eax
+ movl %r12d,%ebx
+ movl %r12d,%ecx
+ xorl 20(%rsp),%eax
+ movl %r11d,%edi
+ andl %edx,%ebx
+ xorl 44(%rsp),%eax
+ orl %edx,%ecx
+ roll $5,%edi
+ xorl 0(%rsp),%eax
+ andl %esi,%ecx
+ addl %edi,%ebp
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%r12d
+ movl %eax,12(%rsp)
+ addl %ebx,%ebp
+ leal -1894007588(%rax,%rsi,1),%edi
+ movl 16(%rsp),%eax
+ movl %r11d,%ebx
+ movl %r11d,%ecx
+ xorl 24(%rsp),%eax
+ movl %ebp,%esi
+ andl %r12d,%ebx
+ xorl 48(%rsp),%eax
+ orl %r12d,%ecx
+ roll $5,%esi
+ xorl 4(%rsp),%eax
+ andl %edx,%ecx
+ addl %esi,%edi
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%r11d
+ movl %eax,16(%rsp)
+ addl %ebx,%edi
+ leal -1894007588(%rax,%rdx,1),%esi
+ movl 20(%rsp),%eax
+ movl %ebp,%ebx
+ movl %ebp,%ecx
+ xorl 28(%rsp),%eax
+ movl %edi,%edx
+ andl %r11d,%ebx
+ xorl 52(%rsp),%eax
+ orl %r11d,%ecx
+ roll $5,%edx
+ xorl 8(%rsp),%eax
+ andl %r12d,%ecx
+ addl %edx,%esi
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%ebp
+ movl %eax,20(%rsp)
+ addl %ebx,%esi
+ leal -1894007588(%rax,%r12,1),%edx
+ movl 24(%rsp),%eax
+ movl %edi,%ebx
+ movl %edi,%ecx
+ xorl 32(%rsp),%eax
+ movl %esi,%r12d
+ andl %ebp,%ebx
+ xorl 56(%rsp),%eax
+ orl %ebp,%ecx
+ roll $5,%r12d
+ xorl 12(%rsp),%eax
+ andl %r11d,%ecx
+ addl %r12d,%edx
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%edi
+ movl %eax,24(%rsp)
+ addl %ebx,%edx
+ leal -1894007588(%rax,%r11,1),%r12d
+ movl 28(%rsp),%eax
+ movl %esi,%ebx
+ movl %esi,%ecx
+ xorl 36(%rsp),%eax
+ movl %edx,%r11d
+ andl %edi,%ebx
+ xorl 60(%rsp),%eax
+ orl %edi,%ecx
+ roll $5,%r11d
+ xorl 16(%rsp),%eax
+ andl %ebp,%ecx
+ addl %r11d,%r12d
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%esi
+ movl %eax,28(%rsp)
+ addl %ebx,%r12d
+ leal -1894007588(%rax,%rbp,1),%r11d
+ movl 32(%rsp),%eax
+ movl %edx,%ebx
+ movl %edx,%ecx
+ xorl 40(%rsp),%eax
+ movl %r12d,%ebp
+ andl %esi,%ebx
+ xorl 0(%rsp),%eax
+ orl %esi,%ecx
+ roll $5,%ebp
+ xorl 20(%rsp),%eax
+ andl %edi,%ecx
+ addl %ebp,%r11d
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%edx
+ movl %eax,32(%rsp)
+ addl %ebx,%r11d
+ leal -1894007588(%rax,%rdi,1),%ebp
+ movl 36(%rsp),%eax
+ movl %r12d,%ebx
+ movl %r12d,%ecx
+ xorl 44(%rsp),%eax
+ movl %r11d,%edi
+ andl %edx,%ebx
+ xorl 4(%rsp),%eax
+ orl %edx,%ecx
+ roll $5,%edi
+ xorl 24(%rsp),%eax
+ andl %esi,%ecx
+ addl %edi,%ebp
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%r12d
+ movl %eax,36(%rsp)
+ addl %ebx,%ebp
+ leal -1894007588(%rax,%rsi,1),%edi
+ movl 40(%rsp),%eax
+ movl %r11d,%ebx
+ movl %r11d,%ecx
+ xorl 48(%rsp),%eax
+ movl %ebp,%esi
+ andl %r12d,%ebx
+ xorl 8(%rsp),%eax
+ orl %r12d,%ecx
+ roll $5,%esi
+ xorl 28(%rsp),%eax
+ andl %edx,%ecx
+ addl %esi,%edi
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%r11d
+ movl %eax,40(%rsp)
+ addl %ebx,%edi
+ leal -1894007588(%rax,%rdx,1),%esi
+ movl 44(%rsp),%eax
+ movl %ebp,%ebx
+ movl %ebp,%ecx
+ xorl 52(%rsp),%eax
+ movl %edi,%edx
+ andl %r11d,%ebx
+ xorl 12(%rsp),%eax
+ orl %r11d,%ecx
+ roll $5,%edx
+ xorl 32(%rsp),%eax
+ andl %r12d,%ecx
+ addl %edx,%esi
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%ebp
+ movl %eax,44(%rsp)
+ addl %ebx,%esi
+ leal -1894007588(%rax,%r12,1),%edx
+ movl 48(%rsp),%eax
+ movl %edi,%ebx
+ movl %edi,%ecx
+ xorl 56(%rsp),%eax
+ movl %esi,%r12d
+ andl %ebp,%ebx
+ xorl 16(%rsp),%eax
+ orl %ebp,%ecx
+ roll $5,%r12d
+ xorl 36(%rsp),%eax
+ andl %r11d,%ecx
+ addl %r12d,%edx
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%edi
+ movl %eax,48(%rsp)
+ addl %ebx,%edx
+ leal -899497514(%rax,%r11,1),%r12d
+ movl 52(%rsp),%eax
+ movl %edi,%ebx
+ movl %edx,%r11d
+ xorl 60(%rsp),%eax
+ xorl %esi,%ebx
+ roll $5,%r11d
+ xorl 20(%rsp),%eax
+ xorl %ebp,%ebx
+ addl %r11d,%r12d
+ xorl 40(%rsp),%eax
+ roll $30,%esi
+ addl %ebx,%r12d
+ roll $1,%eax
+ movl %eax,52(%rsp)
+ leal -899497514(%rax,%rbp,1),%r11d
+ movl 56(%rsp),%eax
+ movl %esi,%ebx
+ movl %r12d,%ebp
+ xorl 0(%rsp),%eax
+ xorl %edx,%ebx
+ roll $5,%ebp
+ xorl 24(%rsp),%eax
+ xorl %edi,%ebx
+ addl %ebp,%r11d
+ xorl 44(%rsp),%eax
+ roll $30,%edx
+ addl %ebx,%r11d
+ roll $1,%eax
+ movl %eax,56(%rsp)
+ leal -899497514(%rax,%rdi,1),%ebp
+ movl 60(%rsp),%eax
+ movl %edx,%ebx
+ movl %r11d,%edi
+ xorl 4(%rsp),%eax
+ xorl %r12d,%ebx
+ roll $5,%edi
+ xorl 28(%rsp),%eax
+ xorl %esi,%ebx
+ addl %edi,%ebp
+ xorl 48(%rsp),%eax
+ roll $30,%r12d
+ addl %ebx,%ebp
+ roll $1,%eax
+ movl %eax,60(%rsp)
+ leal -899497514(%rax,%rsi,1),%edi
+ movl 0(%rsp),%eax
+ movl %r12d,%ebx
+ movl %ebp,%esi
+ xorl 8(%rsp),%eax
+ xorl %r11d,%ebx
+ roll $5,%esi
+ xorl 32(%rsp),%eax
+ xorl %edx,%ebx
+ addl %esi,%edi
+ xorl 52(%rsp),%eax
+ roll $30,%r11d
+ addl %ebx,%edi
+ roll $1,%eax
+ movl %eax,0(%rsp)
+ leal -899497514(%rax,%rdx,1),%esi
+ movl 4(%rsp),%eax
+ movl %r11d,%ebx
+ movl %edi,%edx
+ xorl 12(%rsp),%eax
+ xorl %ebp,%ebx
+ roll $5,%edx
+ xorl 36(%rsp),%eax
+ xorl %r12d,%ebx
+ addl %edx,%esi
+ xorl 56(%rsp),%eax
+ roll $30,%ebp
+ addl %ebx,%esi
+ roll $1,%eax
+ movl %eax,4(%rsp)
+ leal -899497514(%rax,%r12,1),%edx
+ movl 8(%rsp),%eax
+ movl %ebp,%ebx
+ movl %esi,%r12d
+ xorl 16(%rsp),%eax
+ xorl %edi,%ebx
+ roll $5,%r12d
+ xorl 40(%rsp),%eax
+ xorl %r11d,%ebx
+ addl %r12d,%edx
+ xorl 60(%rsp),%eax
+ roll $30,%edi
+ addl %ebx,%edx
+ roll $1,%eax
+ movl %eax,8(%rsp)
+ leal -899497514(%rax,%r11,1),%r12d
+ movl 12(%rsp),%eax
+ movl %edi,%ebx
+ movl %edx,%r11d
+ xorl 20(%rsp),%eax
+ xorl %esi,%ebx
+ roll $5,%r11d
+ xorl 44(%rsp),%eax
+ xorl %ebp,%ebx
+ addl %r11d,%r12d
+ xorl 0(%rsp),%eax
+ roll $30,%esi
+ addl %ebx,%r12d
+ roll $1,%eax
+ movl %eax,12(%rsp)
+ leal -899497514(%rax,%rbp,1),%r11d
+ movl 16(%rsp),%eax
+ movl %esi,%ebx
+ movl %r12d,%ebp
+ xorl 24(%rsp),%eax
+ xorl %edx,%ebx
+ roll $5,%ebp
+ xorl 48(%rsp),%eax
+ xorl %edi,%ebx
+ addl %ebp,%r11d
+ xorl 4(%rsp),%eax
+ roll $30,%edx
+ addl %ebx,%r11d
+ roll $1,%eax
+ movl %eax,16(%rsp)
+ leal -899497514(%rax,%rdi,1),%ebp
+ movl 20(%rsp),%eax
+ movl %edx,%ebx
+ movl %r11d,%edi
+ xorl 28(%rsp),%eax
+ xorl %r12d,%ebx
+ roll $5,%edi
+ xorl 52(%rsp),%eax
+ xorl %esi,%ebx
+ addl %edi,%ebp
+ xorl 8(%rsp),%eax
+ roll $30,%r12d
+ addl %ebx,%ebp
+ roll $1,%eax
+ movl %eax,20(%rsp)
+ leal -899497514(%rax,%rsi,1),%edi
+ movl 24(%rsp),%eax
+ movl %r12d,%ebx
+ movl %ebp,%esi
+ xorl 32(%rsp),%eax
+ xorl %r11d,%ebx
+ roll $5,%esi
+ xorl 56(%rsp),%eax
+ xorl %edx,%ebx
+ addl %esi,%edi
+ xorl 12(%rsp),%eax
+ roll $30,%r11d
+ addl %ebx,%edi
+ roll $1,%eax
+ movl %eax,24(%rsp)
+ leal -899497514(%rax,%rdx,1),%esi
+ movl 28(%rsp),%eax
+ movl %r11d,%ebx
+ movl %edi,%edx
+ xorl 36(%rsp),%eax
+ xorl %ebp,%ebx
+ roll $5,%edx
+ xorl 60(%rsp),%eax
+ xorl %r12d,%ebx
+ addl %edx,%esi
+ xorl 16(%rsp),%eax
+ roll $30,%ebp
+ addl %ebx,%esi
+ roll $1,%eax
+ movl %eax,28(%rsp)
+ leal -899497514(%rax,%r12,1),%edx
+ movl 32(%rsp),%eax
+ movl %ebp,%ebx
+ movl %esi,%r12d
+ xorl 40(%rsp),%eax
+ xorl %edi,%ebx
+ roll $5,%r12d
+ xorl 0(%rsp),%eax
+ xorl %r11d,%ebx
+ addl %r12d,%edx
+ xorl 20(%rsp),%eax
+ roll $30,%edi
+ addl %ebx,%edx
+ roll $1,%eax
+ movl %eax,32(%rsp)
+ leal -899497514(%rax,%r11,1),%r12d
+ movl 36(%rsp),%eax
+ movl %edi,%ebx
+ movl %edx,%r11d
+ xorl 44(%rsp),%eax
+ xorl %esi,%ebx
+ roll $5,%r11d
+ xorl 4(%rsp),%eax
+ xorl %ebp,%ebx
+ addl %r11d,%r12d
+ xorl 24(%rsp),%eax
+ roll $30,%esi
+ addl %ebx,%r12d
+ roll $1,%eax
+ movl %eax,36(%rsp)
+ leal -899497514(%rax,%rbp,1),%r11d
+ movl 40(%rsp),%eax
+ movl %esi,%ebx
+ movl %r12d,%ebp
+ xorl 48(%rsp),%eax
+ xorl %edx,%ebx
+ roll $5,%ebp
+ xorl 8(%rsp),%eax
+ xorl %edi,%ebx
+ addl %ebp,%r11d
+ xorl 28(%rsp),%eax
+ roll $30,%edx
+ addl %ebx,%r11d
+ roll $1,%eax
+ movl %eax,40(%rsp)
+ leal -899497514(%rax,%rdi,1),%ebp
+ movl 44(%rsp),%eax
+ movl %edx,%ebx
+ movl %r11d,%edi
+ xorl 52(%rsp),%eax
+ xorl %r12d,%ebx
+ roll $5,%edi
+ xorl 12(%rsp),%eax
+ xorl %esi,%ebx
+ addl %edi,%ebp
+ xorl 32(%rsp),%eax
+ roll $30,%r12d
+ addl %ebx,%ebp
+ roll $1,%eax
+ movl %eax,44(%rsp)
+ leal -899497514(%rax,%rsi,1),%edi
+ movl 48(%rsp),%eax
+ movl %r12d,%ebx
+ movl %ebp,%esi
+ xorl 56(%rsp),%eax
+ xorl %r11d,%ebx
+ roll $5,%esi
+ xorl 16(%rsp),%eax
+ xorl %edx,%ebx
+ addl %esi,%edi
+ xorl 36(%rsp),%eax
+ roll $30,%r11d
+ addl %ebx,%edi
+ roll $1,%eax
+ movl %eax,48(%rsp)
+ leal -899497514(%rax,%rdx,1),%esi
+ movl 52(%rsp),%eax
+ movl %r11d,%ebx
+ movl %edi,%edx
+ xorl 60(%rsp),%eax
+ xorl %ebp,%ebx
+ roll $5,%edx
+ xorl 20(%rsp),%eax
+ xorl %r12d,%ebx
+ addl %edx,%esi
+ xorl 40(%rsp),%eax
+ roll $30,%ebp
+ addl %ebx,%esi
+ roll $1,%eax
+ leal -899497514(%rax,%r12,1),%edx
+ movl 56(%rsp),%eax
+ movl %ebp,%ebx
+ movl %esi,%r12d
+ xorl 0(%rsp),%eax
+ xorl %edi,%ebx
+ roll $5,%r12d
+ xorl 24(%rsp),%eax
+ xorl %r11d,%ebx
+ addl %r12d,%edx
+ xorl 44(%rsp),%eax
+ roll $30,%edi
+ addl %ebx,%edx
+ roll $1,%eax
+ leal -899497514(%rax,%r11,1),%r12d
+ movl 60(%rsp),%eax
+ movl %edi,%ebx
+ movl %edx,%r11d
+ xorl 4(%rsp),%eax
+ xorl %esi,%ebx
+ roll $5,%r11d
+ xorl 28(%rsp),%eax
+ xorl %ebp,%ebx
+ addl %r11d,%r12d
+ xorl 48(%rsp),%eax
+ roll $30,%esi
+ addl %ebx,%r12d
+ roll $1,%eax
+ leal -899497514(%rax,%rbp,1),%r11d
+ movl %esi,%ebx
+ movl %r12d,%ebp
+ xorl %edx,%ebx
+ roll $5,%ebp
+ xorl %edi,%ebx
+ addl %ebp,%r11d
+ roll $30,%edx
+ addl %ebx,%r11d
+ addl 0(%r8),%r11d
+ addl 4(%r8),%r12d
+ addl 8(%r8),%edx
+ addl 12(%r8),%esi
+ addl 16(%r8),%edi
+ movl %r11d,0(%r8)
+ movl %r12d,4(%r8)
+ movl %edx,8(%r8)
+ movl %esi,12(%r8)
+ movl %edi,16(%r8)
+
+ xchgl %r11d,%edx
+ xchgl %r12d,%esi
+ xchgl %r11d,%edi
+ xchgl %r12d,%ebp
+
+ leaq 64(%r9),%r9
+ subq $1,%r10
+ jnz .Lloop
+ movq 64(%rsp),%rsi
+ movq (%rsi),%r12
+ movq 8(%rsi),%rbp
+ movq 16(%rsi),%rbx
+ leaq 24(%rsi),%rsp
+.Lepilogue:
+ .byte 0xf3,0xc3
+.size sha1_block_data_order,.-sha1_block_data_order
+.byte 83,72,65,49,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.align 16
diff --git a/deps/openssl/asm/x64-elf-gas/sha/sha512-x86_64.s b/deps/openssl/asm/x64-elf-gas/sha/sha512-x86_64.s
new file mode 100644
index 0000000000..ddf7b907a9
--- /dev/null
+++ b/deps/openssl/asm/x64-elf-gas/sha/sha512-x86_64.s
@@ -0,0 +1,1971 @@
+.text
+
+
+.globl sha256_block_data_order
+.type sha256_block_data_order,@function
+.align 16
+sha256_block_data_order:
+ pushq %rbx
+ pushq %rbp
+ pushq %r12
+ pushq %r13
+ pushq %r14
+ pushq %r15
+ movq %rsp,%r11
+ shlq $4,%rdx
+ subq $64+32,%rsp
+ leaq (%rsi,%rdx,4),%rdx
+ andq $-64,%rsp
+ movq %rdi,64+0(%rsp)
+ movq %rsi,64+8(%rsp)
+ movq %rdx,64+16(%rsp)
+ movq %r11,64+24(%rsp)
+.Lprologue:
+
+ leaq K256(%rip),%rbp
+
+ movl 0(%rdi),%eax
+ movl 4(%rdi),%ebx
+ movl 8(%rdi),%ecx
+ movl 12(%rdi),%edx
+ movl 16(%rdi),%r8d
+ movl 20(%rdi),%r9d
+ movl 24(%rdi),%r10d
+ movl 28(%rdi),%r11d
+ jmp .Lloop
+
+.align 16
+.Lloop:
+ xorq %rdi,%rdi
+ movl 0(%rsi),%r12d
+ bswapl %r12d
+ movl %r8d,%r13d
+ movl %r8d,%r14d
+ movl %r9d,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %r10d,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %r8d,%r15d
+ movl %r12d,0(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %r10d,%r15d
+ addl %r11d,%r12d
+
+ movl %eax,%r11d
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %eax,%r13d
+ movl %eax,%r14d
+
+ rorl $2,%r11d
+ rorl $13,%r13d
+ movl %eax,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%r11d
+ rorl $9,%r13d
+ orl %ecx,%r14d
+
+ xorl %r13d,%r11d
+ andl %ecx,%r15d
+ addl %r12d,%edx
+
+ andl %ebx,%r14d
+ addl %r12d,%r11d
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%r11d
+ movl 4(%rsi),%r12d
+ bswapl %r12d
+ movl %edx,%r13d
+ movl %edx,%r14d
+ movl %r8d,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %r9d,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %edx,%r15d
+ movl %r12d,4(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %r9d,%r15d
+ addl %r10d,%r12d
+
+ movl %r11d,%r10d
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %r11d,%r13d
+ movl %r11d,%r14d
+
+ rorl $2,%r10d
+ rorl $13,%r13d
+ movl %r11d,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%r10d
+ rorl $9,%r13d
+ orl %ebx,%r14d
+
+ xorl %r13d,%r10d
+ andl %ebx,%r15d
+ addl %r12d,%ecx
+
+ andl %eax,%r14d
+ addl %r12d,%r10d
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%r10d
+ movl 8(%rsi),%r12d
+ bswapl %r12d
+ movl %ecx,%r13d
+ movl %ecx,%r14d
+ movl %edx,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %r8d,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %ecx,%r15d
+ movl %r12d,8(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %r8d,%r15d
+ addl %r9d,%r12d
+
+ movl %r10d,%r9d
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %r10d,%r13d
+ movl %r10d,%r14d
+
+ rorl $2,%r9d
+ rorl $13,%r13d
+ movl %r10d,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%r9d
+ rorl $9,%r13d
+ orl %eax,%r14d
+
+ xorl %r13d,%r9d
+ andl %eax,%r15d
+ addl %r12d,%ebx
+
+ andl %r11d,%r14d
+ addl %r12d,%r9d
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%r9d
+ movl 12(%rsi),%r12d
+ bswapl %r12d
+ movl %ebx,%r13d
+ movl %ebx,%r14d
+ movl %ecx,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %edx,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %ebx,%r15d
+ movl %r12d,12(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %edx,%r15d
+ addl %r8d,%r12d
+
+ movl %r9d,%r8d
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %r9d,%r13d
+ movl %r9d,%r14d
+
+ rorl $2,%r8d
+ rorl $13,%r13d
+ movl %r9d,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%r8d
+ rorl $9,%r13d
+ orl %r11d,%r14d
+
+ xorl %r13d,%r8d
+ andl %r11d,%r15d
+ addl %r12d,%eax
+
+ andl %r10d,%r14d
+ addl %r12d,%r8d
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%r8d
+ movl 16(%rsi),%r12d
+ bswapl %r12d
+ movl %eax,%r13d
+ movl %eax,%r14d
+ movl %ebx,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %ecx,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %eax,%r15d
+ movl %r12d,16(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %ecx,%r15d
+ addl %edx,%r12d
+
+ movl %r8d,%edx
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %r8d,%r13d
+ movl %r8d,%r14d
+
+ rorl $2,%edx
+ rorl $13,%r13d
+ movl %r8d,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%edx
+ rorl $9,%r13d
+ orl %r10d,%r14d
+
+ xorl %r13d,%edx
+ andl %r10d,%r15d
+ addl %r12d,%r11d
+
+ andl %r9d,%r14d
+ addl %r12d,%edx
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%edx
+ movl 20(%rsi),%r12d
+ bswapl %r12d
+ movl %r11d,%r13d
+ movl %r11d,%r14d
+ movl %eax,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %ebx,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %r11d,%r15d
+ movl %r12d,20(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %ebx,%r15d
+ addl %ecx,%r12d
+
+ movl %edx,%ecx
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %edx,%r13d
+ movl %edx,%r14d
+
+ rorl $2,%ecx
+ rorl $13,%r13d
+ movl %edx,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%ecx
+ rorl $9,%r13d
+ orl %r9d,%r14d
+
+ xorl %r13d,%ecx
+ andl %r9d,%r15d
+ addl %r12d,%r10d
+
+ andl %r8d,%r14d
+ addl %r12d,%ecx
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%ecx
+ movl 24(%rsi),%r12d
+ bswapl %r12d
+ movl %r10d,%r13d
+ movl %r10d,%r14d
+ movl %r11d,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %eax,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %r10d,%r15d
+ movl %r12d,24(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %eax,%r15d
+ addl %ebx,%r12d
+
+ movl %ecx,%ebx
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %ecx,%r13d
+ movl %ecx,%r14d
+
+ rorl $2,%ebx
+ rorl $13,%r13d
+ movl %ecx,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%ebx
+ rorl $9,%r13d
+ orl %r8d,%r14d
+
+ xorl %r13d,%ebx
+ andl %r8d,%r15d
+ addl %r12d,%r9d
+
+ andl %edx,%r14d
+ addl %r12d,%ebx
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%ebx
+ movl 28(%rsi),%r12d
+ bswapl %r12d
+ movl %r9d,%r13d
+ movl %r9d,%r14d
+ movl %r10d,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %r11d,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %r9d,%r15d
+ movl %r12d,28(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %r11d,%r15d
+ addl %eax,%r12d
+
+ movl %ebx,%eax
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %ebx,%r13d
+ movl %ebx,%r14d
+
+ rorl $2,%eax
+ rorl $13,%r13d
+ movl %ebx,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%eax
+ rorl $9,%r13d
+ orl %edx,%r14d
+
+ xorl %r13d,%eax
+ andl %edx,%r15d
+ addl %r12d,%r8d
+
+ andl %ecx,%r14d
+ addl %r12d,%eax
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%eax
+ movl 32(%rsi),%r12d
+ bswapl %r12d
+ movl %r8d,%r13d
+ movl %r8d,%r14d
+ movl %r9d,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %r10d,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %r8d,%r15d
+ movl %r12d,32(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %r10d,%r15d
+ addl %r11d,%r12d
+
+ movl %eax,%r11d
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %eax,%r13d
+ movl %eax,%r14d
+
+ rorl $2,%r11d
+ rorl $13,%r13d
+ movl %eax,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%r11d
+ rorl $9,%r13d
+ orl %ecx,%r14d
+
+ xorl %r13d,%r11d
+ andl %ecx,%r15d
+ addl %r12d,%edx
+
+ andl %ebx,%r14d
+ addl %r12d,%r11d
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%r11d
+ movl 36(%rsi),%r12d
+ bswapl %r12d
+ movl %edx,%r13d
+ movl %edx,%r14d
+ movl %r8d,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %r9d,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %edx,%r15d
+ movl %r12d,36(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %r9d,%r15d
+ addl %r10d,%r12d
+
+ movl %r11d,%r10d
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %r11d,%r13d
+ movl %r11d,%r14d
+
+ rorl $2,%r10d
+ rorl $13,%r13d
+ movl %r11d,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%r10d
+ rorl $9,%r13d
+ orl %ebx,%r14d
+
+ xorl %r13d,%r10d
+ andl %ebx,%r15d
+ addl %r12d,%ecx
+
+ andl %eax,%r14d
+ addl %r12d,%r10d
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%r10d
+ movl 40(%rsi),%r12d
+ bswapl %r12d
+ movl %ecx,%r13d
+ movl %ecx,%r14d
+ movl %edx,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %r8d,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %ecx,%r15d
+ movl %r12d,40(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %r8d,%r15d
+ addl %r9d,%r12d
+
+ movl %r10d,%r9d
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %r10d,%r13d
+ movl %r10d,%r14d
+
+ rorl $2,%r9d
+ rorl $13,%r13d
+ movl %r10d,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%r9d
+ rorl $9,%r13d
+ orl %eax,%r14d
+
+ xorl %r13d,%r9d
+ andl %eax,%r15d
+ addl %r12d,%ebx
+
+ andl %r11d,%r14d
+ addl %r12d,%r9d
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%r9d
+ movl 44(%rsi),%r12d
+ bswapl %r12d
+ movl %ebx,%r13d
+ movl %ebx,%r14d
+ movl %ecx,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %edx,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %ebx,%r15d
+ movl %r12d,44(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %edx,%r15d
+ addl %r8d,%r12d
+
+ movl %r9d,%r8d
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %r9d,%r13d
+ movl %r9d,%r14d
+
+ rorl $2,%r8d
+ rorl $13,%r13d
+ movl %r9d,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%r8d
+ rorl $9,%r13d
+ orl %r11d,%r14d
+
+ xorl %r13d,%r8d
+ andl %r11d,%r15d
+ addl %r12d,%eax
+
+ andl %r10d,%r14d
+ addl %r12d,%r8d
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%r8d
+ movl 48(%rsi),%r12d
+ bswapl %r12d
+ movl %eax,%r13d
+ movl %eax,%r14d
+ movl %ebx,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %ecx,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %eax,%r15d
+ movl %r12d,48(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %ecx,%r15d
+ addl %edx,%r12d
+
+ movl %r8d,%edx
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %r8d,%r13d
+ movl %r8d,%r14d
+
+ rorl $2,%edx
+ rorl $13,%r13d
+ movl %r8d,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%edx
+ rorl $9,%r13d
+ orl %r10d,%r14d
+
+ xorl %r13d,%edx
+ andl %r10d,%r15d
+ addl %r12d,%r11d
+
+ andl %r9d,%r14d
+ addl %r12d,%edx
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%edx
+ movl 52(%rsi),%r12d
+ bswapl %r12d
+ movl %r11d,%r13d
+ movl %r11d,%r14d
+ movl %eax,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %ebx,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %r11d,%r15d
+ movl %r12d,52(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %ebx,%r15d
+ addl %ecx,%r12d
+
+ movl %edx,%ecx
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %edx,%r13d
+ movl %edx,%r14d
+
+ rorl $2,%ecx
+ rorl $13,%r13d
+ movl %edx,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%ecx
+ rorl $9,%r13d
+ orl %r9d,%r14d
+
+ xorl %r13d,%ecx
+ andl %r9d,%r15d
+ addl %r12d,%r10d
+
+ andl %r8d,%r14d
+ addl %r12d,%ecx
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%ecx
+ movl 56(%rsi),%r12d
+ bswapl %r12d
+ movl %r10d,%r13d
+ movl %r10d,%r14d
+ movl %r11d,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %eax,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %r10d,%r15d
+ movl %r12d,56(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %eax,%r15d
+ addl %ebx,%r12d
+
+ movl %ecx,%ebx
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %ecx,%r13d
+ movl %ecx,%r14d
+
+ rorl $2,%ebx
+ rorl $13,%r13d
+ movl %ecx,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%ebx
+ rorl $9,%r13d
+ orl %r8d,%r14d
+
+ xorl %r13d,%ebx
+ andl %r8d,%r15d
+ addl %r12d,%r9d
+
+ andl %edx,%r14d
+ addl %r12d,%ebx
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%ebx
+ movl 60(%rsi),%r12d
+ bswapl %r12d
+ movl %r9d,%r13d
+ movl %r9d,%r14d
+ movl %r10d,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %r11d,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %r9d,%r15d
+ movl %r12d,60(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %r11d,%r15d
+ addl %eax,%r12d
+
+ movl %ebx,%eax
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %ebx,%r13d
+ movl %ebx,%r14d
+
+ rorl $2,%eax
+ rorl $13,%r13d
+ movl %ebx,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%eax
+ rorl $9,%r13d
+ orl %edx,%r14d
+
+ xorl %r13d,%eax
+ andl %edx,%r15d
+ addl %r12d,%r8d
+
+ andl %ecx,%r14d
+ addl %r12d,%eax
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%eax
+ jmp .Lrounds_16_xx
+.align 16
+.Lrounds_16_xx:
+ movl 4(%rsp),%r13d
+ movl 56(%rsp),%r12d
+
+ movl %r13d,%r15d
+
+ shrl $3,%r13d
+ rorl $7,%r15d
+
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
+
+ addl %r13d,%r12d
+
+ addl 36(%rsp),%r12d
+
+ addl 0(%rsp),%r12d
+ movl %r8d,%r13d
+ movl %r8d,%r14d
+ movl %r9d,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %r10d,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %r8d,%r15d
+ movl %r12d,0(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %r10d,%r15d
+ addl %r11d,%r12d
+
+ movl %eax,%r11d
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %eax,%r13d
+ movl %eax,%r14d
+
+ rorl $2,%r11d
+ rorl $13,%r13d
+ movl %eax,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%r11d
+ rorl $9,%r13d
+ orl %ecx,%r14d
+
+ xorl %r13d,%r11d
+ andl %ecx,%r15d
+ addl %r12d,%edx
+
+ andl %ebx,%r14d
+ addl %r12d,%r11d
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%r11d
+ movl 8(%rsp),%r13d
+ movl 60(%rsp),%r12d
+
+ movl %r13d,%r15d
+
+ shrl $3,%r13d
+ rorl $7,%r15d
+
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
+
+ addl %r13d,%r12d
+
+ addl 40(%rsp),%r12d
+
+ addl 4(%rsp),%r12d
+ movl %edx,%r13d
+ movl %edx,%r14d
+ movl %r8d,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %r9d,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %edx,%r15d
+ movl %r12d,4(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %r9d,%r15d
+ addl %r10d,%r12d
+
+ movl %r11d,%r10d
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %r11d,%r13d
+ movl %r11d,%r14d
+
+ rorl $2,%r10d
+ rorl $13,%r13d
+ movl %r11d,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%r10d
+ rorl $9,%r13d
+ orl %ebx,%r14d
+
+ xorl %r13d,%r10d
+ andl %ebx,%r15d
+ addl %r12d,%ecx
+
+ andl %eax,%r14d
+ addl %r12d,%r10d
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%r10d
+ movl 12(%rsp),%r13d
+ movl 0(%rsp),%r12d
+
+ movl %r13d,%r15d
+
+ shrl $3,%r13d
+ rorl $7,%r15d
+
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
+
+ addl %r13d,%r12d
+
+ addl 44(%rsp),%r12d
+
+ addl 8(%rsp),%r12d
+ movl %ecx,%r13d
+ movl %ecx,%r14d
+ movl %edx,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %r8d,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %ecx,%r15d
+ movl %r12d,8(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %r8d,%r15d
+ addl %r9d,%r12d
+
+ movl %r10d,%r9d
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %r10d,%r13d
+ movl %r10d,%r14d
+
+ rorl $2,%r9d
+ rorl $13,%r13d
+ movl %r10d,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%r9d
+ rorl $9,%r13d
+ orl %eax,%r14d
+
+ xorl %r13d,%r9d
+ andl %eax,%r15d
+ addl %r12d,%ebx
+
+ andl %r11d,%r14d
+ addl %r12d,%r9d
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%r9d
+ movl 16(%rsp),%r13d
+ movl 4(%rsp),%r12d
+
+ movl %r13d,%r15d
+
+ shrl $3,%r13d
+ rorl $7,%r15d
+
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
+
+ addl %r13d,%r12d
+
+ addl 48(%rsp),%r12d
+
+ addl 12(%rsp),%r12d
+ movl %ebx,%r13d
+ movl %ebx,%r14d
+ movl %ecx,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %edx,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %ebx,%r15d
+ movl %r12d,12(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %edx,%r15d
+ addl %r8d,%r12d
+
+ movl %r9d,%r8d
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %r9d,%r13d
+ movl %r9d,%r14d
+
+ rorl $2,%r8d
+ rorl $13,%r13d
+ movl %r9d,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%r8d
+ rorl $9,%r13d
+ orl %r11d,%r14d
+
+ xorl %r13d,%r8d
+ andl %r11d,%r15d
+ addl %r12d,%eax
+
+ andl %r10d,%r14d
+ addl %r12d,%r8d
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%r8d
+ movl 20(%rsp),%r13d
+ movl 8(%rsp),%r12d
+
+ movl %r13d,%r15d
+
+ shrl $3,%r13d
+ rorl $7,%r15d
+
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
+
+ addl %r13d,%r12d
+
+ addl 52(%rsp),%r12d
+
+ addl 16(%rsp),%r12d
+ movl %eax,%r13d
+ movl %eax,%r14d
+ movl %ebx,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %ecx,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %eax,%r15d
+ movl %r12d,16(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %ecx,%r15d
+ addl %edx,%r12d
+
+ movl %r8d,%edx
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %r8d,%r13d
+ movl %r8d,%r14d
+
+ rorl $2,%edx
+ rorl $13,%r13d
+ movl %r8d,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%edx
+ rorl $9,%r13d
+ orl %r10d,%r14d
+
+ xorl %r13d,%edx
+ andl %r10d,%r15d
+ addl %r12d,%r11d
+
+ andl %r9d,%r14d
+ addl %r12d,%edx
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%edx
+ movl 24(%rsp),%r13d
+ movl 12(%rsp),%r12d
+
+ movl %r13d,%r15d
+
+ shrl $3,%r13d
+ rorl $7,%r15d
+
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
+
+ addl %r13d,%r12d
+
+ addl 56(%rsp),%r12d
+
+ addl 20(%rsp),%r12d
+ movl %r11d,%r13d
+ movl %r11d,%r14d
+ movl %eax,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %ebx,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %r11d,%r15d
+ movl %r12d,20(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %ebx,%r15d
+ addl %ecx,%r12d
+
+ movl %edx,%ecx
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %edx,%r13d
+ movl %edx,%r14d
+
+ rorl $2,%ecx
+ rorl $13,%r13d
+ movl %edx,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%ecx
+ rorl $9,%r13d
+ orl %r9d,%r14d
+
+ xorl %r13d,%ecx
+ andl %r9d,%r15d
+ addl %r12d,%r10d
+
+ andl %r8d,%r14d
+ addl %r12d,%ecx
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%ecx
+ movl 28(%rsp),%r13d
+ movl 16(%rsp),%r12d
+
+ movl %r13d,%r15d
+
+ shrl $3,%r13d
+ rorl $7,%r15d
+
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
+
+ addl %r13d,%r12d
+
+ addl 60(%rsp),%r12d
+
+ addl 24(%rsp),%r12d
+ movl %r10d,%r13d
+ movl %r10d,%r14d
+ movl %r11d,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %eax,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %r10d,%r15d
+ movl %r12d,24(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %eax,%r15d
+ addl %ebx,%r12d
+
+ movl %ecx,%ebx
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %ecx,%r13d
+ movl %ecx,%r14d
+
+ rorl $2,%ebx
+ rorl $13,%r13d
+ movl %ecx,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%ebx
+ rorl $9,%r13d
+ orl %r8d,%r14d
+
+ xorl %r13d,%ebx
+ andl %r8d,%r15d
+ addl %r12d,%r9d
+
+ andl %edx,%r14d
+ addl %r12d,%ebx
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%ebx
+ movl 32(%rsp),%r13d
+ movl 20(%rsp),%r12d
+
+ movl %r13d,%r15d
+
+ shrl $3,%r13d
+ rorl $7,%r15d
+
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
+
+ addl %r13d,%r12d
+
+ addl 0(%rsp),%r12d
+
+ addl 28(%rsp),%r12d
+ movl %r9d,%r13d
+ movl %r9d,%r14d
+ movl %r10d,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %r11d,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %r9d,%r15d
+ movl %r12d,28(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %r11d,%r15d
+ addl %eax,%r12d
+
+ movl %ebx,%eax
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %ebx,%r13d
+ movl %ebx,%r14d
+
+ rorl $2,%eax
+ rorl $13,%r13d
+ movl %ebx,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%eax
+ rorl $9,%r13d
+ orl %edx,%r14d
+
+ xorl %r13d,%eax
+ andl %edx,%r15d
+ addl %r12d,%r8d
+
+ andl %ecx,%r14d
+ addl %r12d,%eax
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%eax
+ movl 36(%rsp),%r13d
+ movl 24(%rsp),%r12d
+
+ movl %r13d,%r15d
+
+ shrl $3,%r13d
+ rorl $7,%r15d
+
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
+
+ addl %r13d,%r12d
+
+ addl 4(%rsp),%r12d
+
+ addl 32(%rsp),%r12d
+ movl %r8d,%r13d
+ movl %r8d,%r14d
+ movl %r9d,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %r10d,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %r8d,%r15d
+ movl %r12d,32(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %r10d,%r15d
+ addl %r11d,%r12d
+
+ movl %eax,%r11d
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %eax,%r13d
+ movl %eax,%r14d
+
+ rorl $2,%r11d
+ rorl $13,%r13d
+ movl %eax,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%r11d
+ rorl $9,%r13d
+ orl %ecx,%r14d
+
+ xorl %r13d,%r11d
+ andl %ecx,%r15d
+ addl %r12d,%edx
+
+ andl %ebx,%r14d
+ addl %r12d,%r11d
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%r11d
+ movl 40(%rsp),%r13d
+ movl 28(%rsp),%r12d
+
+ movl %r13d,%r15d
+
+ shrl $3,%r13d
+ rorl $7,%r15d
+
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
+
+ addl %r13d,%r12d
+
+ addl 8(%rsp),%r12d
+
+ addl 36(%rsp),%r12d
+ movl %edx,%r13d
+ movl %edx,%r14d
+ movl %r8d,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %r9d,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %edx,%r15d
+ movl %r12d,36(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %r9d,%r15d
+ addl %r10d,%r12d
+
+ movl %r11d,%r10d
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %r11d,%r13d
+ movl %r11d,%r14d
+
+ rorl $2,%r10d
+ rorl $13,%r13d
+ movl %r11d,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%r10d
+ rorl $9,%r13d
+ orl %ebx,%r14d
+
+ xorl %r13d,%r10d
+ andl %ebx,%r15d
+ addl %r12d,%ecx
+
+ andl %eax,%r14d
+ addl %r12d,%r10d
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%r10d
+ movl 44(%rsp),%r13d
+ movl 32(%rsp),%r12d
+
+ movl %r13d,%r15d
+
+ shrl $3,%r13d
+ rorl $7,%r15d
+
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
+
+ addl %r13d,%r12d
+
+ addl 12(%rsp),%r12d
+
+ addl 40(%rsp),%r12d
+ movl %ecx,%r13d
+ movl %ecx,%r14d
+ movl %edx,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %r8d,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %ecx,%r15d
+ movl %r12d,40(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %r8d,%r15d
+ addl %r9d,%r12d
+
+ movl %r10d,%r9d
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %r10d,%r13d
+ movl %r10d,%r14d
+
+ rorl $2,%r9d
+ rorl $13,%r13d
+ movl %r10d,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%r9d
+ rorl $9,%r13d
+ orl %eax,%r14d
+
+ xorl %r13d,%r9d
+ andl %eax,%r15d
+ addl %r12d,%ebx
+
+ andl %r11d,%r14d
+ addl %r12d,%r9d
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%r9d
+ movl 48(%rsp),%r13d
+ movl 36(%rsp),%r12d
+
+ movl %r13d,%r15d
+
+ shrl $3,%r13d
+ rorl $7,%r15d
+
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
+
+ addl %r13d,%r12d
+
+ addl 16(%rsp),%r12d
+
+ addl 44(%rsp),%r12d
+ movl %ebx,%r13d
+ movl %ebx,%r14d
+ movl %ecx,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %edx,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %ebx,%r15d
+ movl %r12d,44(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %edx,%r15d
+ addl %r8d,%r12d
+
+ movl %r9d,%r8d
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %r9d,%r13d
+ movl %r9d,%r14d
+
+ rorl $2,%r8d
+ rorl $13,%r13d
+ movl %r9d,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%r8d
+ rorl $9,%r13d
+ orl %r11d,%r14d
+
+ xorl %r13d,%r8d
+ andl %r11d,%r15d
+ addl %r12d,%eax
+
+ andl %r10d,%r14d
+ addl %r12d,%r8d
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%r8d
+ movl 52(%rsp),%r13d
+ movl 40(%rsp),%r12d
+
+ movl %r13d,%r15d
+
+ shrl $3,%r13d
+ rorl $7,%r15d
+
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
+
+ addl %r13d,%r12d
+
+ addl 20(%rsp),%r12d
+
+ addl 48(%rsp),%r12d
+ movl %eax,%r13d
+ movl %eax,%r14d
+ movl %ebx,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %ecx,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %eax,%r15d
+ movl %r12d,48(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %ecx,%r15d
+ addl %edx,%r12d
+
+ movl %r8d,%edx
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %r8d,%r13d
+ movl %r8d,%r14d
+
+ rorl $2,%edx
+ rorl $13,%r13d
+ movl %r8d,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%edx
+ rorl $9,%r13d
+ orl %r10d,%r14d
+
+ xorl %r13d,%edx
+ andl %r10d,%r15d
+ addl %r12d,%r11d
+
+ andl %r9d,%r14d
+ addl %r12d,%edx
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%edx
+ movl 56(%rsp),%r13d
+ movl 44(%rsp),%r12d
+
+ movl %r13d,%r15d
+
+ shrl $3,%r13d
+ rorl $7,%r15d
+
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
+
+ addl %r13d,%r12d
+
+ addl 24(%rsp),%r12d
+
+ addl 52(%rsp),%r12d
+ movl %r11d,%r13d
+ movl %r11d,%r14d
+ movl %eax,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %ebx,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %r11d,%r15d
+ movl %r12d,52(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %ebx,%r15d
+ addl %ecx,%r12d
+
+ movl %edx,%ecx
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %edx,%r13d
+ movl %edx,%r14d
+
+ rorl $2,%ecx
+ rorl $13,%r13d
+ movl %edx,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%ecx
+ rorl $9,%r13d
+ orl %r9d,%r14d
+
+ xorl %r13d,%ecx
+ andl %r9d,%r15d
+ addl %r12d,%r10d
+
+ andl %r8d,%r14d
+ addl %r12d,%ecx
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%ecx
+ movl 60(%rsp),%r13d
+ movl 48(%rsp),%r12d
+
+ movl %r13d,%r15d
+
+ shrl $3,%r13d
+ rorl $7,%r15d
+
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
+
+ addl %r13d,%r12d
+
+ addl 28(%rsp),%r12d
+
+ addl 56(%rsp),%r12d
+ movl %r10d,%r13d
+ movl %r10d,%r14d
+ movl %r11d,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %eax,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %r10d,%r15d
+ movl %r12d,56(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %eax,%r15d
+ addl %ebx,%r12d
+
+ movl %ecx,%ebx
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %ecx,%r13d
+ movl %ecx,%r14d
+
+ rorl $2,%ebx
+ rorl $13,%r13d
+ movl %ecx,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%ebx
+ rorl $9,%r13d
+ orl %r8d,%r14d
+
+ xorl %r13d,%ebx
+ andl %r8d,%r15d
+ addl %r12d,%r9d
+
+ andl %edx,%r14d
+ addl %r12d,%ebx
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%ebx
+ movl 0(%rsp),%r13d
+ movl 52(%rsp),%r12d
+
+ movl %r13d,%r15d
+
+ shrl $3,%r13d
+ rorl $7,%r15d
+
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
+
+ addl %r13d,%r12d
+
+ addl 32(%rsp),%r12d
+
+ addl 60(%rsp),%r12d
+ movl %r9d,%r13d
+ movl %r9d,%r14d
+ movl %r10d,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %r11d,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %r9d,%r15d
+ movl %r12d,60(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %r11d,%r15d
+ addl %eax,%r12d
+
+ movl %ebx,%eax
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %ebx,%r13d
+ movl %ebx,%r14d
+
+ rorl $2,%eax
+ rorl $13,%r13d
+ movl %ebx,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%eax
+ rorl $9,%r13d
+ orl %edx,%r14d
+
+ xorl %r13d,%eax
+ andl %edx,%r15d
+ addl %r12d,%r8d
+
+ andl %ecx,%r14d
+ addl %r12d,%eax
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%eax
+ cmpq $64,%rdi
+ jb .Lrounds_16_xx
+
+ movq 64+0(%rsp),%rdi
+ leaq 64(%rsi),%rsi
+
+ addl 0(%rdi),%eax
+ addl 4(%rdi),%ebx
+ addl 8(%rdi),%ecx
+ addl 12(%rdi),%edx
+ addl 16(%rdi),%r8d
+ addl 20(%rdi),%r9d
+ addl 24(%rdi),%r10d
+ addl 28(%rdi),%r11d
+
+ cmpq 64+16(%rsp),%rsi
+
+ movl %eax,0(%rdi)
+ movl %ebx,4(%rdi)
+ movl %ecx,8(%rdi)
+ movl %edx,12(%rdi)
+ movl %r8d,16(%rdi)
+ movl %r9d,20(%rdi)
+ movl %r10d,24(%rdi)
+ movl %r11d,28(%rdi)
+ jb .Lloop
+
+ movq 64+24(%rsp),%rsi
+ movq (%rsi),%r15
+ movq 8(%rsi),%r14
+ movq 16(%rsi),%r13
+ movq 24(%rsi),%r12
+ movq 32(%rsi),%rbp
+ movq 40(%rsi),%rbx
+ leaq 48(%rsi),%rsp
+.Lepilogue:
+ .byte 0xf3,0xc3
+.size sha256_block_data_order,.-sha256_block_data_order
+.align 64
+.type K256,@object
+K256:
+.long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5
+.long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5
+.long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3
+.long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174
+.long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc
+.long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da
+.long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7
+.long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967
+.long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13
+.long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85
+.long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3
+.long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070
+.long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5
+.long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3
+.long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208
+.long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2
diff --git a/deps/openssl/asm/x64-elf-gas/whrlpool/wp-x86_64.s b/deps/openssl/asm/x64-elf-gas/whrlpool/wp-x86_64.s
new file mode 100644
index 0000000000..6c2656c9a1
--- /dev/null
+++ b/deps/openssl/asm/x64-elf-gas/whrlpool/wp-x86_64.s
@@ -0,0 +1,859 @@
+.text
+
+
+.globl whirlpool_block
+.type whirlpool_block,@function
+.align 16
+whirlpool_block:
+ pushq %rbx
+ pushq %rbp
+ pushq %r12
+ pushq %r13
+ pushq %r14
+ pushq %r15
+
+ movq %rsp,%r11
+ subq $128+40,%rsp
+ andq $-64,%rsp
+
+ leaq 128(%rsp),%r10
+ movq %rdi,0(%r10)
+ movq %rsi,8(%r10)
+ movq %rdx,16(%r10)
+ movq %r11,32(%r10)
+.Lprologue:
+
+ movq %r10,%rbx
+ leaq .Ltable(%rip),%rbp
+
+ xorq %rcx,%rcx
+ xorq %rdx,%rdx
+ movq 0(%rdi),%r8
+ movq 8(%rdi),%r9
+ movq 16(%rdi),%r10
+ movq 24(%rdi),%r11
+ movq 32(%rdi),%r12
+ movq 40(%rdi),%r13
+ movq 48(%rdi),%r14
+ movq 56(%rdi),%r15
+.Louterloop:
+ movq %r8,0(%rsp)
+ movq %r9,8(%rsp)
+ movq %r10,16(%rsp)
+ movq %r11,24(%rsp)
+ movq %r12,32(%rsp)
+ movq %r13,40(%rsp)
+ movq %r14,48(%rsp)
+ movq %r15,56(%rsp)
+ xorq 0(%rsi),%r8
+ xorq 8(%rsi),%r9
+ xorq 16(%rsi),%r10
+ xorq 24(%rsi),%r11
+ xorq 32(%rsi),%r12
+ xorq 40(%rsi),%r13
+ xorq 48(%rsi),%r14
+ xorq 56(%rsi),%r15
+ movq %r8,64+0(%rsp)
+ movq %r9,64+8(%rsp)
+ movq %r10,64+16(%rsp)
+ movq %r11,64+24(%rsp)
+ movq %r12,64+32(%rsp)
+ movq %r13,64+40(%rsp)
+ movq %r14,64+48(%rsp)
+ movq %r15,64+56(%rsp)
+ xorq %rsi,%rsi
+ movq %rsi,24(%rbx)
+.align 16
+.Lround:
+ movq 4096(%rbp,%rsi,8),%r8
+ movl 0(%rsp),%eax
+ movl 4(%rsp),%ebx
+ movb %al,%cl
+ movb %ah,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%eax
+ xorq 0(%rbp,%rsi,8),%r8
+ movq 7(%rbp,%rdi,8),%r9
+ movb %al,%cl
+ movb %ah,%dl
+ movl 0+8(%rsp),%eax
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ movq 6(%rbp,%rsi,8),%r10
+ movq 5(%rbp,%rdi,8),%r11
+ movb %bl,%cl
+ movb %bh,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%ebx
+ movq 4(%rbp,%rsi,8),%r12
+ movq 3(%rbp,%rdi,8),%r13
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 0+8+4(%rsp),%ebx
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ movq 2(%rbp,%rsi,8),%r14
+ movq 1(%rbp,%rdi,8),%r15
+ movb %al,%cl
+ movb %ah,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%eax
+ xorq 0(%rbp,%rsi,8),%r9
+ xorq 7(%rbp,%rdi,8),%r10
+ movb %al,%cl
+ movb %ah,%dl
+ movl 8+8(%rsp),%eax
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 6(%rbp,%rsi,8),%r11
+ xorq 5(%rbp,%rdi,8),%r12
+ movb %bl,%cl
+ movb %bh,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%ebx
+ xorq 4(%rbp,%rsi,8),%r13
+ xorq 3(%rbp,%rdi,8),%r14
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 8+8+4(%rsp),%ebx
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 2(%rbp,%rsi,8),%r15
+ xorq 1(%rbp,%rdi,8),%r8
+ movb %al,%cl
+ movb %ah,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%eax
+ xorq 0(%rbp,%rsi,8),%r10
+ xorq 7(%rbp,%rdi,8),%r11
+ movb %al,%cl
+ movb %ah,%dl
+ movl 16+8(%rsp),%eax
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 6(%rbp,%rsi,8),%r12
+ xorq 5(%rbp,%rdi,8),%r13
+ movb %bl,%cl
+ movb %bh,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%ebx
+ xorq 4(%rbp,%rsi,8),%r14
+ xorq 3(%rbp,%rdi,8),%r15
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 16+8+4(%rsp),%ebx
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 2(%rbp,%rsi,8),%r8
+ xorq 1(%rbp,%rdi,8),%r9
+ movb %al,%cl
+ movb %ah,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%eax
+ xorq 0(%rbp,%rsi,8),%r11
+ xorq 7(%rbp,%rdi,8),%r12
+ movb %al,%cl
+ movb %ah,%dl
+ movl 24+8(%rsp),%eax
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 6(%rbp,%rsi,8),%r13
+ xorq 5(%rbp,%rdi,8),%r14
+ movb %bl,%cl
+ movb %bh,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%ebx
+ xorq 4(%rbp,%rsi,8),%r15
+ xorq 3(%rbp,%rdi,8),%r8
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 24+8+4(%rsp),%ebx
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 2(%rbp,%rsi,8),%r9
+ xorq 1(%rbp,%rdi,8),%r10
+ movb %al,%cl
+ movb %ah,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%eax
+ xorq 0(%rbp,%rsi,8),%r12
+ xorq 7(%rbp,%rdi,8),%r13
+ movb %al,%cl
+ movb %ah,%dl
+ movl 32+8(%rsp),%eax
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 6(%rbp,%rsi,8),%r14
+ xorq 5(%rbp,%rdi,8),%r15
+ movb %bl,%cl
+ movb %bh,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%ebx
+ xorq 4(%rbp,%rsi,8),%r8
+ xorq 3(%rbp,%rdi,8),%r9
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 32+8+4(%rsp),%ebx
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 2(%rbp,%rsi,8),%r10
+ xorq 1(%rbp,%rdi,8),%r11
+ movb %al,%cl
+ movb %ah,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%eax
+ xorq 0(%rbp,%rsi,8),%r13
+ xorq 7(%rbp,%rdi,8),%r14
+ movb %al,%cl
+ movb %ah,%dl
+ movl 40+8(%rsp),%eax
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 6(%rbp,%rsi,8),%r15
+ xorq 5(%rbp,%rdi,8),%r8
+ movb %bl,%cl
+ movb %bh,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%ebx
+ xorq 4(%rbp,%rsi,8),%r9
+ xorq 3(%rbp,%rdi,8),%r10
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 40+8+4(%rsp),%ebx
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 2(%rbp,%rsi,8),%r11
+ xorq 1(%rbp,%rdi,8),%r12
+ movb %al,%cl
+ movb %ah,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%eax
+ xorq 0(%rbp,%rsi,8),%r14
+ xorq 7(%rbp,%rdi,8),%r15
+ movb %al,%cl
+ movb %ah,%dl
+ movl 48+8(%rsp),%eax
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 6(%rbp,%rsi,8),%r8
+ xorq 5(%rbp,%rdi,8),%r9
+ movb %bl,%cl
+ movb %bh,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%ebx
+ xorq 4(%rbp,%rsi,8),%r10
+ xorq 3(%rbp,%rdi,8),%r11
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 48+8+4(%rsp),%ebx
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 2(%rbp,%rsi,8),%r12
+ xorq 1(%rbp,%rdi,8),%r13
+ movb %al,%cl
+ movb %ah,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%eax
+ xorq 0(%rbp,%rsi,8),%r15
+ xorq 7(%rbp,%rdi,8),%r8
+ movb %al,%cl
+ movb %ah,%dl
+ movl 56+8(%rsp),%eax
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 6(%rbp,%rsi,8),%r9
+ xorq 5(%rbp,%rdi,8),%r10
+ movb %bl,%cl
+ movb %bh,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%ebx
+ xorq 4(%rbp,%rsi,8),%r11
+ xorq 3(%rbp,%rdi,8),%r12
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 56+8+4(%rsp),%ebx
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 2(%rbp,%rsi,8),%r13
+ xorq 1(%rbp,%rdi,8),%r14
+ movq %r8,0(%rsp)
+ movq %r9,8(%rsp)
+ movq %r10,16(%rsp)
+ movq %r11,24(%rsp)
+ movq %r12,32(%rsp)
+ movq %r13,40(%rsp)
+ movq %r14,48(%rsp)
+ movq %r15,56(%rsp)
+ movb %al,%cl
+ movb %ah,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%eax
+ xorq 0(%rbp,%rsi,8),%r8
+ xorq 7(%rbp,%rdi,8),%r9
+ movb %al,%cl
+ movb %ah,%dl
+ movl 64+0+8(%rsp),%eax
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 6(%rbp,%rsi,8),%r10
+ xorq 5(%rbp,%rdi,8),%r11
+ movb %bl,%cl
+ movb %bh,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%ebx
+ xorq 4(%rbp,%rsi,8),%r12
+ xorq 3(%rbp,%rdi,8),%r13
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 64+0+8+4(%rsp),%ebx
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 2(%rbp,%rsi,8),%r14
+ xorq 1(%rbp,%rdi,8),%r15
+ movb %al,%cl
+ movb %ah,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%eax
+ xorq 0(%rbp,%rsi,8),%r9
+ xorq 7(%rbp,%rdi,8),%r10
+ movb %al,%cl
+ movb %ah,%dl
+ movl 64+8+8(%rsp),%eax
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 6(%rbp,%rsi,8),%r11
+ xorq 5(%rbp,%rdi,8),%r12
+ movb %bl,%cl
+ movb %bh,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%ebx
+ xorq 4(%rbp,%rsi,8),%r13
+ xorq 3(%rbp,%rdi,8),%r14
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 64+8+8+4(%rsp),%ebx
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 2(%rbp,%rsi,8),%r15
+ xorq 1(%rbp,%rdi,8),%r8
+ movb %al,%cl
+ movb %ah,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%eax
+ xorq 0(%rbp,%rsi,8),%r10
+ xorq 7(%rbp,%rdi,8),%r11
+ movb %al,%cl
+ movb %ah,%dl
+ movl 64+16+8(%rsp),%eax
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 6(%rbp,%rsi,8),%r12
+ xorq 5(%rbp,%rdi,8),%r13
+ movb %bl,%cl
+ movb %bh,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%ebx
+ xorq 4(%rbp,%rsi,8),%r14
+ xorq 3(%rbp,%rdi,8),%r15
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 64+16+8+4(%rsp),%ebx
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 2(%rbp,%rsi,8),%r8
+ xorq 1(%rbp,%rdi,8),%r9
+ movb %al,%cl
+ movb %ah,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%eax
+ xorq 0(%rbp,%rsi,8),%r11
+ xorq 7(%rbp,%rdi,8),%r12
+ movb %al,%cl
+ movb %ah,%dl
+ movl 64+24+8(%rsp),%eax
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 6(%rbp,%rsi,8),%r13
+ xorq 5(%rbp,%rdi,8),%r14
+ movb %bl,%cl
+ movb %bh,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%ebx
+ xorq 4(%rbp,%rsi,8),%r15
+ xorq 3(%rbp,%rdi,8),%r8
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 64+24+8+4(%rsp),%ebx
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 2(%rbp,%rsi,8),%r9
+ xorq 1(%rbp,%rdi,8),%r10
+ movb %al,%cl
+ movb %ah,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%eax
+ xorq 0(%rbp,%rsi,8),%r12
+ xorq 7(%rbp,%rdi,8),%r13
+ movb %al,%cl
+ movb %ah,%dl
+ movl 64+32+8(%rsp),%eax
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 6(%rbp,%rsi,8),%r14
+ xorq 5(%rbp,%rdi,8),%r15
+ movb %bl,%cl
+ movb %bh,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%ebx
+ xorq 4(%rbp,%rsi,8),%r8
+ xorq 3(%rbp,%rdi,8),%r9
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 64+32+8+4(%rsp),%ebx
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 2(%rbp,%rsi,8),%r10
+ xorq 1(%rbp,%rdi,8),%r11
+ movb %al,%cl
+ movb %ah,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%eax
+ xorq 0(%rbp,%rsi,8),%r13
+ xorq 7(%rbp,%rdi,8),%r14
+ movb %al,%cl
+ movb %ah,%dl
+ movl 64+40+8(%rsp),%eax
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 6(%rbp,%rsi,8),%r15
+ xorq 5(%rbp,%rdi,8),%r8
+ movb %bl,%cl
+ movb %bh,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%ebx
+ xorq 4(%rbp,%rsi,8),%r9
+ xorq 3(%rbp,%rdi,8),%r10
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 64+40+8+4(%rsp),%ebx
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 2(%rbp,%rsi,8),%r11
+ xorq 1(%rbp,%rdi,8),%r12
+ movb %al,%cl
+ movb %ah,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%eax
+ xorq 0(%rbp,%rsi,8),%r14
+ xorq 7(%rbp,%rdi,8),%r15
+ movb %al,%cl
+ movb %ah,%dl
+ movl 64+48+8(%rsp),%eax
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 6(%rbp,%rsi,8),%r8
+ xorq 5(%rbp,%rdi,8),%r9
+ movb %bl,%cl
+ movb %bh,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%ebx
+ xorq 4(%rbp,%rsi,8),%r10
+ xorq 3(%rbp,%rdi,8),%r11
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 64+48+8+4(%rsp),%ebx
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 2(%rbp,%rsi,8),%r12
+ xorq 1(%rbp,%rdi,8),%r13
+ movb %al,%cl
+ movb %ah,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%eax
+ xorq 0(%rbp,%rsi,8),%r15
+ xorq 7(%rbp,%rdi,8),%r8
+ movb %al,%cl
+ movb %ah,%dl
+
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 6(%rbp,%rsi,8),%r9
+ xorq 5(%rbp,%rdi,8),%r10
+ movb %bl,%cl
+ movb %bh,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%ebx
+ xorq 4(%rbp,%rsi,8),%r11
+ xorq 3(%rbp,%rdi,8),%r12
+ movb %bl,%cl
+ movb %bh,%dl
+
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 2(%rbp,%rsi,8),%r13
+ xorq 1(%rbp,%rdi,8),%r14
+ leaq 128(%rsp),%rbx
+ movq 24(%rbx),%rsi
+ addq $1,%rsi
+ cmpq $10,%rsi
+ je .Lroundsdone
+
+ movq %rsi,24(%rbx)
+ movq %r8,64+0(%rsp)
+ movq %r9,64+8(%rsp)
+ movq %r10,64+16(%rsp)
+ movq %r11,64+24(%rsp)
+ movq %r12,64+32(%rsp)
+ movq %r13,64+40(%rsp)
+ movq %r14,64+48(%rsp)
+ movq %r15,64+56(%rsp)
+ jmp .Lround
+.align 16
+.Lroundsdone:
+ movq 0(%rbx),%rdi
+ movq 8(%rbx),%rsi
+ movq 16(%rbx),%rax
+ xorq 0(%rsi),%r8
+ xorq 8(%rsi),%r9
+ xorq 16(%rsi),%r10
+ xorq 24(%rsi),%r11
+ xorq 32(%rsi),%r12
+ xorq 40(%rsi),%r13
+ xorq 48(%rsi),%r14
+ xorq 56(%rsi),%r15
+ xorq 0(%rdi),%r8
+ xorq 8(%rdi),%r9
+ xorq 16(%rdi),%r10
+ xorq 24(%rdi),%r11
+ xorq 32(%rdi),%r12
+ xorq 40(%rdi),%r13
+ xorq 48(%rdi),%r14
+ xorq 56(%rdi),%r15
+ movq %r8,0(%rdi)
+ movq %r9,8(%rdi)
+ movq %r10,16(%rdi)
+ movq %r11,24(%rdi)
+ movq %r12,32(%rdi)
+ movq %r13,40(%rdi)
+ movq %r14,48(%rdi)
+ movq %r15,56(%rdi)
+ leaq 64(%rsi),%rsi
+ subq $1,%rax
+ jz .Lalldone
+ movq %rsi,8(%rbx)
+ movq %rax,16(%rbx)
+ jmp .Louterloop
+.Lalldone:
+ movq 32(%rbx),%rsi
+ movq (%rsi),%r15
+ movq 8(%rsi),%r14
+ movq 16(%rsi),%r13
+ movq 24(%rsi),%r12
+ movq 32(%rsi),%rbp
+ movq 40(%rsi),%rbx
+ leaq 48(%rsi),%rsp
+.Lepilogue:
+ .byte 0xf3,0xc3
+.size whirlpool_block,.-whirlpool_block
+
+.align 64
+.type .Ltable,@object
+.Ltable:
+.byte 24,24,96,24,192,120,48,216,24,24,96,24,192,120,48,216
+.byte 35,35,140,35,5,175,70,38,35,35,140,35,5,175,70,38
+.byte 198,198,63,198,126,249,145,184,198,198,63,198,126,249,145,184
+.byte 232,232,135,232,19,111,205,251,232,232,135,232,19,111,205,251
+.byte 135,135,38,135,76,161,19,203,135,135,38,135,76,161,19,203
+.byte 184,184,218,184,169,98,109,17,184,184,218,184,169,98,109,17
+.byte 1,1,4,1,8,5,2,9,1,1,4,1,8,5,2,9
+.byte 79,79,33,79,66,110,158,13,79,79,33,79,66,110,158,13
+.byte 54,54,216,54,173,238,108,155,54,54,216,54,173,238,108,155
+.byte 166,166,162,166,89,4,81,255,166,166,162,166,89,4,81,255
+.byte 210,210,111,210,222,189,185,12,210,210,111,210,222,189,185,12
+.byte 245,245,243,245,251,6,247,14,245,245,243,245,251,6,247,14
+.byte 121,121,249,121,239,128,242,150,121,121,249,121,239,128,242,150
+.byte 111,111,161,111,95,206,222,48,111,111,161,111,95,206,222,48
+.byte 145,145,126,145,252,239,63,109,145,145,126,145,252,239,63,109
+.byte 82,82,85,82,170,7,164,248,82,82,85,82,170,7,164,248
+.byte 96,96,157,96,39,253,192,71,96,96,157,96,39,253,192,71
+.byte 188,188,202,188,137,118,101,53,188,188,202,188,137,118,101,53
+.byte 155,155,86,155,172,205,43,55,155,155,86,155,172,205,43,55
+.byte 142,142,2,142,4,140,1,138,142,142,2,142,4,140,1,138
+.byte 163,163,182,163,113,21,91,210,163,163,182,163,113,21,91,210
+.byte 12,12,48,12,96,60,24,108,12,12,48,12,96,60,24,108
+.byte 123,123,241,123,255,138,246,132,123,123,241,123,255,138,246,132
+.byte 53,53,212,53,181,225,106,128,53,53,212,53,181,225,106,128
+.byte 29,29,116,29,232,105,58,245,29,29,116,29,232,105,58,245
+.byte 224,224,167,224,83,71,221,179,224,224,167,224,83,71,221,179
+.byte 215,215,123,215,246,172,179,33,215,215,123,215,246,172,179,33
+.byte 194,194,47,194,94,237,153,156,194,194,47,194,94,237,153,156
+.byte 46,46,184,46,109,150,92,67,46,46,184,46,109,150,92,67
+.byte 75,75,49,75,98,122,150,41,75,75,49,75,98,122,150,41
+.byte 254,254,223,254,163,33,225,93,254,254,223,254,163,33,225,93
+.byte 87,87,65,87,130,22,174,213,87,87,65,87,130,22,174,213
+.byte 21,21,84,21,168,65,42,189,21,21,84,21,168,65,42,189
+.byte 119,119,193,119,159,182,238,232,119,119,193,119,159,182,238,232
+.byte 55,55,220,55,165,235,110,146,55,55,220,55,165,235,110,146
+.byte 229,229,179,229,123,86,215,158,229,229,179,229,123,86,215,158
+.byte 159,159,70,159,140,217,35,19,159,159,70,159,140,217,35,19
+.byte 240,240,231,240,211,23,253,35,240,240,231,240,211,23,253,35
+.byte 74,74,53,74,106,127,148,32,74,74,53,74,106,127,148,32
+.byte 218,218,79,218,158,149,169,68,218,218,79,218,158,149,169,68
+.byte 88,88,125,88,250,37,176,162,88,88,125,88,250,37,176,162
+.byte 201,201,3,201,6,202,143,207,201,201,3,201,6,202,143,207
+.byte 41,41,164,41,85,141,82,124,41,41,164,41,85,141,82,124
+.byte 10,10,40,10,80,34,20,90,10,10,40,10,80,34,20,90
+.byte 177,177,254,177,225,79,127,80,177,177,254,177,225,79,127,80
+.byte 160,160,186,160,105,26,93,201,160,160,186,160,105,26,93,201
+.byte 107,107,177,107,127,218,214,20,107,107,177,107,127,218,214,20
+.byte 133,133,46,133,92,171,23,217,133,133,46,133,92,171,23,217
+.byte 189,189,206,189,129,115,103,60,189,189,206,189,129,115,103,60
+.byte 93,93,105,93,210,52,186,143,93,93,105,93,210,52,186,143
+.byte 16,16,64,16,128,80,32,144,16,16,64,16,128,80,32,144
+.byte 244,244,247,244,243,3,245,7,244,244,247,244,243,3,245,7
+.byte 203,203,11,203,22,192,139,221,203,203,11,203,22,192,139,221
+.byte 62,62,248,62,237,198,124,211,62,62,248,62,237,198,124,211
+.byte 5,5,20,5,40,17,10,45,5,5,20,5,40,17,10,45
+.byte 103,103,129,103,31,230,206,120,103,103,129,103,31,230,206,120
+.byte 228,228,183,228,115,83,213,151,228,228,183,228,115,83,213,151
+.byte 39,39,156,39,37,187,78,2,39,39,156,39,37,187,78,2
+.byte 65,65,25,65,50,88,130,115,65,65,25,65,50,88,130,115
+.byte 139,139,22,139,44,157,11,167,139,139,22,139,44,157,11,167
+.byte 167,167,166,167,81,1,83,246,167,167,166,167,81,1,83,246
+.byte 125,125,233,125,207,148,250,178,125,125,233,125,207,148,250,178
+.byte 149,149,110,149,220,251,55,73,149,149,110,149,220,251,55,73
+.byte 216,216,71,216,142,159,173,86,216,216,71,216,142,159,173,86
+.byte 251,251,203,251,139,48,235,112,251,251,203,251,139,48,235,112
+.byte 238,238,159,238,35,113,193,205,238,238,159,238,35,113,193,205
+.byte 124,124,237,124,199,145,248,187,124,124,237,124,199,145,248,187
+.byte 102,102,133,102,23,227,204,113,102,102,133,102,23,227,204,113
+.byte 221,221,83,221,166,142,167,123,221,221,83,221,166,142,167,123
+.byte 23,23,92,23,184,75,46,175,23,23,92,23,184,75,46,175
+.byte 71,71,1,71,2,70,142,69,71,71,1,71,2,70,142,69
+.byte 158,158,66,158,132,220,33,26,158,158,66,158,132,220,33,26
+.byte 202,202,15,202,30,197,137,212,202,202,15,202,30,197,137,212
+.byte 45,45,180,45,117,153,90,88,45,45,180,45,117,153,90,88
+.byte 191,191,198,191,145,121,99,46,191,191,198,191,145,121,99,46
+.byte 7,7,28,7,56,27,14,63,7,7,28,7,56,27,14,63
+.byte 173,173,142,173,1,35,71,172,173,173,142,173,1,35,71,172
+.byte 90,90,117,90,234,47,180,176,90,90,117,90,234,47,180,176
+.byte 131,131,54,131,108,181,27,239,131,131,54,131,108,181,27,239
+.byte 51,51,204,51,133,255,102,182,51,51,204,51,133,255,102,182
+.byte 99,99,145,99,63,242,198,92,99,99,145,99,63,242,198,92
+.byte 2,2,8,2,16,10,4,18,2,2,8,2,16,10,4,18
+.byte 170,170,146,170,57,56,73,147,170,170,146,170,57,56,73,147
+.byte 113,113,217,113,175,168,226,222,113,113,217,113,175,168,226,222
+.byte 200,200,7,200,14,207,141,198,200,200,7,200,14,207,141,198
+.byte 25,25,100,25,200,125,50,209,25,25,100,25,200,125,50,209
+.byte 73,73,57,73,114,112,146,59,73,73,57,73,114,112,146,59
+.byte 217,217,67,217,134,154,175,95,217,217,67,217,134,154,175,95
+.byte 242,242,239,242,195,29,249,49,242,242,239,242,195,29,249,49
+.byte 227,227,171,227,75,72,219,168,227,227,171,227,75,72,219,168
+.byte 91,91,113,91,226,42,182,185,91,91,113,91,226,42,182,185
+.byte 136,136,26,136,52,146,13,188,136,136,26,136,52,146,13,188
+.byte 154,154,82,154,164,200,41,62,154,154,82,154,164,200,41,62
+.byte 38,38,152,38,45,190,76,11,38,38,152,38,45,190,76,11
+.byte 50,50,200,50,141,250,100,191,50,50,200,50,141,250,100,191
+.byte 176,176,250,176,233,74,125,89,176,176,250,176,233,74,125,89
+.byte 233,233,131,233,27,106,207,242,233,233,131,233,27,106,207,242
+.byte 15,15,60,15,120,51,30,119,15,15,60,15,120,51,30,119
+.byte 213,213,115,213,230,166,183,51,213,213,115,213,230,166,183,51
+.byte 128,128,58,128,116,186,29,244,128,128,58,128,116,186,29,244
+.byte 190,190,194,190,153,124,97,39,190,190,194,190,153,124,97,39
+.byte 205,205,19,205,38,222,135,235,205,205,19,205,38,222,135,235
+.byte 52,52,208,52,189,228,104,137,52,52,208,52,189,228,104,137
+.byte 72,72,61,72,122,117,144,50,72,72,61,72,122,117,144,50
+.byte 255,255,219,255,171,36,227,84,255,255,219,255,171,36,227,84
+.byte 122,122,245,122,247,143,244,141,122,122,245,122,247,143,244,141
+.byte 144,144,122,144,244,234,61,100,144,144,122,144,244,234,61,100
+.byte 95,95,97,95,194,62,190,157,95,95,97,95,194,62,190,157
+.byte 32,32,128,32,29,160,64,61,32,32,128,32,29,160,64,61
+.byte 104,104,189,104,103,213,208,15,104,104,189,104,103,213,208,15
+.byte 26,26,104,26,208,114,52,202,26,26,104,26,208,114,52,202
+.byte 174,174,130,174,25,44,65,183,174,174,130,174,25,44,65,183
+.byte 180,180,234,180,201,94,117,125,180,180,234,180,201,94,117,125
+.byte 84,84,77,84,154,25,168,206,84,84,77,84,154,25,168,206
+.byte 147,147,118,147,236,229,59,127,147,147,118,147,236,229,59,127
+.byte 34,34,136,34,13,170,68,47,34,34,136,34,13,170,68,47
+.byte 100,100,141,100,7,233,200,99,100,100,141,100,7,233,200,99
+.byte 241,241,227,241,219,18,255,42,241,241,227,241,219,18,255,42
+.byte 115,115,209,115,191,162,230,204,115,115,209,115,191,162,230,204
+.byte 18,18,72,18,144,90,36,130,18,18,72,18,144,90,36,130
+.byte 64,64,29,64,58,93,128,122,64,64,29,64,58,93,128,122
+.byte 8,8,32,8,64,40,16,72,8,8,32,8,64,40,16,72
+.byte 195,195,43,195,86,232,155,149,195,195,43,195,86,232,155,149
+.byte 236,236,151,236,51,123,197,223,236,236,151,236,51,123,197,223
+.byte 219,219,75,219,150,144,171,77,219,219,75,219,150,144,171,77
+.byte 161,161,190,161,97,31,95,192,161,161,190,161,97,31,95,192
+.byte 141,141,14,141,28,131,7,145,141,141,14,141,28,131,7,145
+.byte 61,61,244,61,245,201,122,200,61,61,244,61,245,201,122,200
+.byte 151,151,102,151,204,241,51,91,151,151,102,151,204,241,51,91
+.byte 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
+.byte 207,207,27,207,54,212,131,249,207,207,27,207,54,212,131,249
+.byte 43,43,172,43,69,135,86,110,43,43,172,43,69,135,86,110
+.byte 118,118,197,118,151,179,236,225,118,118,197,118,151,179,236,225
+.byte 130,130,50,130,100,176,25,230,130,130,50,130,100,176,25,230
+.byte 214,214,127,214,254,169,177,40,214,214,127,214,254,169,177,40
+.byte 27,27,108,27,216,119,54,195,27,27,108,27,216,119,54,195
+.byte 181,181,238,181,193,91,119,116,181,181,238,181,193,91,119,116
+.byte 175,175,134,175,17,41,67,190,175,175,134,175,17,41,67,190
+.byte 106,106,181,106,119,223,212,29,106,106,181,106,119,223,212,29
+.byte 80,80,93,80,186,13,160,234,80,80,93,80,186,13,160,234
+.byte 69,69,9,69,18,76,138,87,69,69,9,69,18,76,138,87
+.byte 243,243,235,243,203,24,251,56,243,243,235,243,203,24,251,56
+.byte 48,48,192,48,157,240,96,173,48,48,192,48,157,240,96,173
+.byte 239,239,155,239,43,116,195,196,239,239,155,239,43,116,195,196
+.byte 63,63,252,63,229,195,126,218,63,63,252,63,229,195,126,218
+.byte 85,85,73,85,146,28,170,199,85,85,73,85,146,28,170,199
+.byte 162,162,178,162,121,16,89,219,162,162,178,162,121,16,89,219
+.byte 234,234,143,234,3,101,201,233,234,234,143,234,3,101,201,233
+.byte 101,101,137,101,15,236,202,106,101,101,137,101,15,236,202,106
+.byte 186,186,210,186,185,104,105,3,186,186,210,186,185,104,105,3
+.byte 47,47,188,47,101,147,94,74,47,47,188,47,101,147,94,74
+.byte 192,192,39,192,78,231,157,142,192,192,39,192,78,231,157,142
+.byte 222,222,95,222,190,129,161,96,222,222,95,222,190,129,161,96
+.byte 28,28,112,28,224,108,56,252,28,28,112,28,224,108,56,252
+.byte 253,253,211,253,187,46,231,70,253,253,211,253,187,46,231,70
+.byte 77,77,41,77,82,100,154,31,77,77,41,77,82,100,154,31
+.byte 146,146,114,146,228,224,57,118,146,146,114,146,228,224,57,118
+.byte 117,117,201,117,143,188,234,250,117,117,201,117,143,188,234,250
+.byte 6,6,24,6,48,30,12,54,6,6,24,6,48,30,12,54
+.byte 138,138,18,138,36,152,9,174,138,138,18,138,36,152,9,174
+.byte 178,178,242,178,249,64,121,75,178,178,242,178,249,64,121,75
+.byte 230,230,191,230,99,89,209,133,230,230,191,230,99,89,209,133
+.byte 14,14,56,14,112,54,28,126,14,14,56,14,112,54,28,126
+.byte 31,31,124,31,248,99,62,231,31,31,124,31,248,99,62,231
+.byte 98,98,149,98,55,247,196,85,98,98,149,98,55,247,196,85
+.byte 212,212,119,212,238,163,181,58,212,212,119,212,238,163,181,58
+.byte 168,168,154,168,41,50,77,129,168,168,154,168,41,50,77,129
+.byte 150,150,98,150,196,244,49,82,150,150,98,150,196,244,49,82
+.byte 249,249,195,249,155,58,239,98,249,249,195,249,155,58,239,98
+.byte 197,197,51,197,102,246,151,163,197,197,51,197,102,246,151,163
+.byte 37,37,148,37,53,177,74,16,37,37,148,37,53,177,74,16
+.byte 89,89,121,89,242,32,178,171,89,89,121,89,242,32,178,171
+.byte 132,132,42,132,84,174,21,208,132,132,42,132,84,174,21,208
+.byte 114,114,213,114,183,167,228,197,114,114,213,114,183,167,228,197
+.byte 57,57,228,57,213,221,114,236,57,57,228,57,213,221,114,236
+.byte 76,76,45,76,90,97,152,22,76,76,45,76,90,97,152,22
+.byte 94,94,101,94,202,59,188,148,94,94,101,94,202,59,188,148
+.byte 120,120,253,120,231,133,240,159,120,120,253,120,231,133,240,159
+.byte 56,56,224,56,221,216,112,229,56,56,224,56,221,216,112,229
+.byte 140,140,10,140,20,134,5,152,140,140,10,140,20,134,5,152
+.byte 209,209,99,209,198,178,191,23,209,209,99,209,198,178,191,23
+.byte 165,165,174,165,65,11,87,228,165,165,174,165,65,11,87,228
+.byte 226,226,175,226,67,77,217,161,226,226,175,226,67,77,217,161
+.byte 97,97,153,97,47,248,194,78,97,97,153,97,47,248,194,78
+.byte 179,179,246,179,241,69,123,66,179,179,246,179,241,69,123,66
+.byte 33,33,132,33,21,165,66,52,33,33,132,33,21,165,66,52
+.byte 156,156,74,156,148,214,37,8,156,156,74,156,148,214,37,8
+.byte 30,30,120,30,240,102,60,238,30,30,120,30,240,102,60,238
+.byte 67,67,17,67,34,82,134,97,67,67,17,67,34,82,134,97
+.byte 199,199,59,199,118,252,147,177,199,199,59,199,118,252,147,177
+.byte 252,252,215,252,179,43,229,79,252,252,215,252,179,43,229,79
+.byte 4,4,16,4,32,20,8,36,4,4,16,4,32,20,8,36
+.byte 81,81,89,81,178,8,162,227,81,81,89,81,178,8,162,227
+.byte 153,153,94,153,188,199,47,37,153,153,94,153,188,199,47,37
+.byte 109,109,169,109,79,196,218,34,109,109,169,109,79,196,218,34
+.byte 13,13,52,13,104,57,26,101,13,13,52,13,104,57,26,101
+.byte 250,250,207,250,131,53,233,121,250,250,207,250,131,53,233,121
+.byte 223,223,91,223,182,132,163,105,223,223,91,223,182,132,163,105
+.byte 126,126,229,126,215,155,252,169,126,126,229,126,215,155,252,169
+.byte 36,36,144,36,61,180,72,25,36,36,144,36,61,180,72,25
+.byte 59,59,236,59,197,215,118,254,59,59,236,59,197,215,118,254
+.byte 171,171,150,171,49,61,75,154,171,171,150,171,49,61,75,154
+.byte 206,206,31,206,62,209,129,240,206,206,31,206,62,209,129,240
+.byte 17,17,68,17,136,85,34,153,17,17,68,17,136,85,34,153
+.byte 143,143,6,143,12,137,3,131,143,143,6,143,12,137,3,131
+.byte 78,78,37,78,74,107,156,4,78,78,37,78,74,107,156,4
+.byte 183,183,230,183,209,81,115,102,183,183,230,183,209,81,115,102
+.byte 235,235,139,235,11,96,203,224,235,235,139,235,11,96,203,224
+.byte 60,60,240,60,253,204,120,193,60,60,240,60,253,204,120,193
+.byte 129,129,62,129,124,191,31,253,129,129,62,129,124,191,31,253
+.byte 148,148,106,148,212,254,53,64,148,148,106,148,212,254,53,64
+.byte 247,247,251,247,235,12,243,28,247,247,251,247,235,12,243,28
+.byte 185,185,222,185,161,103,111,24,185,185,222,185,161,103,111,24
+.byte 19,19,76,19,152,95,38,139,19,19,76,19,152,95,38,139
+.byte 44,44,176,44,125,156,88,81,44,44,176,44,125,156,88,81
+.byte 211,211,107,211,214,184,187,5,211,211,107,211,214,184,187,5
+.byte 231,231,187,231,107,92,211,140,231,231,187,231,107,92,211,140
+.byte 110,110,165,110,87,203,220,57,110,110,165,110,87,203,220,57
+.byte 196,196,55,196,110,243,149,170,196,196,55,196,110,243,149,170
+.byte 3,3,12,3,24,15,6,27,3,3,12,3,24,15,6,27
+.byte 86,86,69,86,138,19,172,220,86,86,69,86,138,19,172,220
+.byte 68,68,13,68,26,73,136,94,68,68,13,68,26,73,136,94
+.byte 127,127,225,127,223,158,254,160,127,127,225,127,223,158,254,160
+.byte 169,169,158,169,33,55,79,136,169,169,158,169,33,55,79,136
+.byte 42,42,168,42,77,130,84,103,42,42,168,42,77,130,84,103
+.byte 187,187,214,187,177,109,107,10,187,187,214,187,177,109,107,10
+.byte 193,193,35,193,70,226,159,135,193,193,35,193,70,226,159,135
+.byte 83,83,81,83,162,2,166,241,83,83,81,83,162,2,166,241
+.byte 220,220,87,220,174,139,165,114,220,220,87,220,174,139,165,114
+.byte 11,11,44,11,88,39,22,83,11,11,44,11,88,39,22,83
+.byte 157,157,78,157,156,211,39,1,157,157,78,157,156,211,39,1
+.byte 108,108,173,108,71,193,216,43,108,108,173,108,71,193,216,43
+.byte 49,49,196,49,149,245,98,164,49,49,196,49,149,245,98,164
+.byte 116,116,205,116,135,185,232,243,116,116,205,116,135,185,232,243
+.byte 246,246,255,246,227,9,241,21,246,246,255,246,227,9,241,21
+.byte 70,70,5,70,10,67,140,76,70,70,5,70,10,67,140,76
+.byte 172,172,138,172,9,38,69,165,172,172,138,172,9,38,69,165
+.byte 137,137,30,137,60,151,15,181,137,137,30,137,60,151,15,181
+.byte 20,20,80,20,160,68,40,180,20,20,80,20,160,68,40,180
+.byte 225,225,163,225,91,66,223,186,225,225,163,225,91,66,223,186
+.byte 22,22,88,22,176,78,44,166,22,22,88,22,176,78,44,166
+.byte 58,58,232,58,205,210,116,247,58,58,232,58,205,210,116,247
+.byte 105,105,185,105,111,208,210,6,105,105,185,105,111,208,210,6
+.byte 9,9,36,9,72,45,18,65,9,9,36,9,72,45,18,65
+.byte 112,112,221,112,167,173,224,215,112,112,221,112,167,173,224,215
+.byte 182,182,226,182,217,84,113,111,182,182,226,182,217,84,113,111
+.byte 208,208,103,208,206,183,189,30,208,208,103,208,206,183,189,30
+.byte 237,237,147,237,59,126,199,214,237,237,147,237,59,126,199,214
+.byte 204,204,23,204,46,219,133,226,204,204,23,204,46,219,133,226
+.byte 66,66,21,66,42,87,132,104,66,66,21,66,42,87,132,104
+.byte 152,152,90,152,180,194,45,44,152,152,90,152,180,194,45,44
+.byte 164,164,170,164,73,14,85,237,164,164,170,164,73,14,85,237
+.byte 40,40,160,40,93,136,80,117,40,40,160,40,93,136,80,117
+.byte 92,92,109,92,218,49,184,134,92,92,109,92,218,49,184,134
+.byte 248,248,199,248,147,63,237,107,248,248,199,248,147,63,237,107
+.byte 134,134,34,134,68,164,17,194,134,134,34,134,68,164,17,194
+.byte 24,35,198,232,135,184,1,79
+.byte 54,166,210,245,121,111,145,82
+.byte 96,188,155,142,163,12,123,53
+.byte 29,224,215,194,46,75,254,87
+.byte 21,119,55,229,159,240,74,218
+.byte 88,201,41,10,177,160,107,133
+.byte 189,93,16,244,203,62,5,103
+.byte 228,39,65,139,167,125,149,216
+.byte 251,238,124,102,221,23,71,158
+.byte 202,45,191,7,173,90,131,51
diff --git a/deps/openssl/asm/x64-elf-gas/x86_64cpuid.s b/deps/openssl/asm/x64-elf-gas/x86_64cpuid.s
new file mode 100644
index 0000000000..0a565a989b
--- /dev/null
+++ b/deps/openssl/asm/x64-elf-gas/x86_64cpuid.s
@@ -0,0 +1,194 @@
+
+.section .init
+ call OPENSSL_cpuid_setup
+
+.text
+
+
+.globl OPENSSL_atomic_add
+.type OPENSSL_atomic_add,@function
+.align 16
+OPENSSL_atomic_add:
+ movl (%rdi),%eax
+.Lspin: leaq (%rsi,%rax,1),%r8
+.byte 0xf0
+
+ cmpxchgl %r8d,(%rdi)
+ jne .Lspin
+ movl %r8d,%eax
+.byte 0x48,0x98
+
+ .byte 0xf3,0xc3
+.size OPENSSL_atomic_add,.-OPENSSL_atomic_add
+
+.globl OPENSSL_rdtsc
+.type OPENSSL_rdtsc,@function
+.align 16
+OPENSSL_rdtsc:
+ rdtsc
+ shlq $32,%rdx
+ orq %rdx,%rax
+ .byte 0xf3,0xc3
+.size OPENSSL_rdtsc,.-OPENSSL_rdtsc
+
+.globl OPENSSL_ia32_cpuid
+.type OPENSSL_ia32_cpuid,@function
+.align 16
+OPENSSL_ia32_cpuid:
+ movq %rbx,%r8
+
+ xorl %eax,%eax
+ cpuid
+ movl %eax,%r11d
+
+ xorl %eax,%eax
+ cmpl $1970169159,%ebx
+ setne %al
+ movl %eax,%r9d
+ cmpl $1231384169,%edx
+ setne %al
+ orl %eax,%r9d
+ cmpl $1818588270,%ecx
+ setne %al
+ orl %eax,%r9d
+ jz .Lintel
+
+ cmpl $1752462657,%ebx
+ setne %al
+ movl %eax,%r10d
+ cmpl $1769238117,%edx
+ setne %al
+ orl %eax,%r10d
+ cmpl $1145913699,%ecx
+ setne %al
+ orl %eax,%r10d
+ jnz .Lintel
+
+
+ movl $2147483648,%eax
+ cpuid
+ cmpl $2147483656,%eax
+ jb .Lintel
+
+ movl $2147483656,%eax
+ cpuid
+ movzbq %cl,%r10
+ incq %r10
+
+ movl $1,%eax
+ cpuid
+ btl $28,%edx
+ jnc .Ldone
+ shrl $16,%ebx
+ cmpb %r10b,%bl
+ ja .Ldone
+ andl $4026531839,%edx
+ jmp .Ldone
+
+.Lintel:
+ cmpl $4,%r11d
+ movl $-1,%r10d
+ jb .Lnocacheinfo
+
+ movl $4,%eax
+ movl $0,%ecx
+ cpuid
+ movl %eax,%r10d
+ shrl $14,%r10d
+ andl $4095,%r10d
+
+.Lnocacheinfo:
+ movl $1,%eax
+ cpuid
+ cmpl $0,%r9d
+ jne .Lnotintel
+ orl $1048576,%edx
+ andb $15,%ah
+ cmpb $15,%ah
+ je .Lnotintel
+ orl $1073741824,%edx
+.Lnotintel:
+ btl $28,%edx
+ jnc .Ldone
+ andl $4026531839,%edx
+ cmpl $0,%r10d
+ je .Ldone
+
+ orl $268435456,%edx
+ shrl $16,%ebx
+ cmpb $1,%bl
+ ja .Ldone
+ andl $4026531839,%edx
+.Ldone:
+ shlq $32,%rcx
+ movl %edx,%eax
+ movq %r8,%rbx
+ orq %rcx,%rax
+ .byte 0xf3,0xc3
+.size OPENSSL_ia32_cpuid,.-OPENSSL_ia32_cpuid
+
+.globl OPENSSL_cleanse
+.type OPENSSL_cleanse,@function
+.align 16
+OPENSSL_cleanse:
+ xorq %rax,%rax
+ cmpq $15,%rsi
+ jae .Lot
+ cmpq $0,%rsi
+ je .Lret
+.Little:
+ movb %al,(%rdi)
+ subq $1,%rsi
+ leaq 1(%rdi),%rdi
+ jnz .Little
+.Lret:
+ .byte 0xf3,0xc3
+.align 16
+.Lot:
+ testq $7,%rdi
+ jz .Laligned
+ movb %al,(%rdi)
+ leaq -1(%rsi),%rsi
+ leaq 1(%rdi),%rdi
+ jmp .Lot
+.Laligned:
+ movq %rax,(%rdi)
+ leaq -8(%rsi),%rsi
+ testq $-8,%rsi
+ leaq 8(%rdi),%rdi
+ jnz .Laligned
+ cmpq $0,%rsi
+ jne .Little
+ .byte 0xf3,0xc3
+.size OPENSSL_cleanse,.-OPENSSL_cleanse
+.globl OPENSSL_wipe_cpu
+.type OPENSSL_wipe_cpu,@function
+.align 16
+OPENSSL_wipe_cpu:
+ pxor %xmm0,%xmm0
+ pxor %xmm1,%xmm1
+ pxor %xmm2,%xmm2
+ pxor %xmm3,%xmm3
+ pxor %xmm4,%xmm4
+ pxor %xmm5,%xmm5
+ pxor %xmm6,%xmm6
+ pxor %xmm7,%xmm7
+ pxor %xmm8,%xmm8
+ pxor %xmm9,%xmm9
+ pxor %xmm10,%xmm10
+ pxor %xmm11,%xmm11
+ pxor %xmm12,%xmm12
+ pxor %xmm13,%xmm13
+ pxor %xmm14,%xmm14
+ pxor %xmm15,%xmm15
+ xorq %rcx,%rcx
+ xorq %rdx,%rdx
+ xorq %rsi,%rsi
+ xorq %rdi,%rdi
+ xorq %r8,%r8
+ xorq %r9,%r9
+ xorq %r10,%r10
+ xorq %r11,%r11
+ leaq 8(%rsp),%rax
+ .byte 0xf3,0xc3
+.size OPENSSL_wipe_cpu,.-OPENSSL_wipe_cpu
diff --git a/deps/openssl/asm/x64-macosx-gas/aes/aes-x86_64.s b/deps/openssl/asm/x64-macosx-gas/aes/aes-x86_64.s
new file mode 100644
index 0000000000..d42e1ea79a
--- /dev/null
+++ b/deps/openssl/asm/x64-macosx-gas/aes/aes-x86_64.s
@@ -0,0 +1,2545 @@
+.text
+
+
+.p2align 4
+_x86_64_AES_encrypt:
+ xorl 0(%r15),%eax
+ xorl 4(%r15),%ebx
+ xorl 8(%r15),%ecx
+ xorl 12(%r15),%edx
+
+ movl 240(%r15),%r13d
+ subl $1,%r13d
+ jmp L$enc_loop
+.p2align 4
+L$enc_loop:
+
+ movzbl %al,%esi
+ movzbl %bl,%edi
+ movzbl %cl,%ebp
+ movl 0(%r14,%rsi,8),%r10d
+ movl 0(%r14,%rdi,8),%r11d
+ movl 0(%r14,%rbp,8),%r12d
+
+ movzbl %bh,%esi
+ movzbl %ch,%edi
+ movzbl %dl,%ebp
+ xorl 3(%r14,%rsi,8),%r10d
+ xorl 3(%r14,%rdi,8),%r11d
+ movl 0(%r14,%rbp,8),%r8d
+
+ movzbl %dh,%esi
+ shrl $16,%ecx
+ movzbl %ah,%ebp
+ xorl 3(%r14,%rsi,8),%r12d
+ shrl $16,%edx
+ xorl 3(%r14,%rbp,8),%r8d
+
+ shrl $16,%ebx
+ leaq 16(%r15),%r15
+ shrl $16,%eax
+
+ movzbl %cl,%esi
+ movzbl %dl,%edi
+ movzbl %al,%ebp
+ xorl 2(%r14,%rsi,8),%r10d
+ xorl 2(%r14,%rdi,8),%r11d
+ xorl 2(%r14,%rbp,8),%r12d
+
+ movzbl %dh,%esi
+ movzbl %ah,%edi
+ movzbl %bl,%ebp
+ xorl 1(%r14,%rsi,8),%r10d
+ xorl 1(%r14,%rdi,8),%r11d
+ xorl 2(%r14,%rbp,8),%r8d
+
+ movl 12(%r15),%edx
+ movzbl %bh,%edi
+ movzbl %ch,%ebp
+ movl 0(%r15),%eax
+ xorl 1(%r14,%rdi,8),%r12d
+ xorl 1(%r14,%rbp,8),%r8d
+
+ movl 4(%r15),%ebx
+ movl 8(%r15),%ecx
+ xorl %r10d,%eax
+ xorl %r11d,%ebx
+ xorl %r12d,%ecx
+ xorl %r8d,%edx
+ subl $1,%r13d
+ jnz L$enc_loop
+ movzbl %al,%esi
+ movzbl %bl,%edi
+ movzbl %cl,%ebp
+ movzbl 2(%r14,%rsi,8),%r10d
+ movzbl 2(%r14,%rdi,8),%r11d
+ movzbl 2(%r14,%rbp,8),%r12d
+
+ movzbl %dl,%esi
+ movzbl %bh,%edi
+ movzbl %ch,%ebp
+ movzbl 2(%r14,%rsi,8),%r8d
+ movl 0(%r14,%rdi,8),%edi
+ movl 0(%r14,%rbp,8),%ebp
+
+ andl $65280,%edi
+ andl $65280,%ebp
+
+ xorl %edi,%r10d
+ xorl %ebp,%r11d
+ shrl $16,%ecx
+
+ movzbl %dh,%esi
+ movzbl %ah,%edi
+ shrl $16,%edx
+ movl 0(%r14,%rsi,8),%esi
+ movl 0(%r14,%rdi,8),%edi
+
+ andl $65280,%esi
+ andl $65280,%edi
+ shrl $16,%ebx
+ xorl %esi,%r12d
+ xorl %edi,%r8d
+ shrl $16,%eax
+
+ movzbl %cl,%esi
+ movzbl %dl,%edi
+ movzbl %al,%ebp
+ movl 0(%r14,%rsi,8),%esi
+ movl 0(%r14,%rdi,8),%edi
+ movl 0(%r14,%rbp,8),%ebp
+
+ andl $16711680,%esi
+ andl $16711680,%edi
+ andl $16711680,%ebp
+
+ xorl %esi,%r10d
+ xorl %edi,%r11d
+ xorl %ebp,%r12d
+
+ movzbl %bl,%esi
+ movzbl %dh,%edi
+ movzbl %ah,%ebp
+ movl 0(%r14,%rsi,8),%esi
+ movl 2(%r14,%rdi,8),%edi
+ movl 2(%r14,%rbp,8),%ebp
+
+ andl $16711680,%esi
+ andl $4278190080,%edi
+ andl $4278190080,%ebp
+
+ xorl %esi,%r8d
+ xorl %edi,%r10d
+ xorl %ebp,%r11d
+
+ movzbl %bh,%esi
+ movzbl %ch,%edi
+ movl 16+12(%r15),%edx
+ movl 2(%r14,%rsi,8),%esi
+ movl 2(%r14,%rdi,8),%edi
+ movl 16+0(%r15),%eax
+
+ andl $4278190080,%esi
+ andl $4278190080,%edi
+
+ xorl %esi,%r12d
+ xorl %edi,%r8d
+
+ movl 16+4(%r15),%ebx
+ movl 16+8(%r15),%ecx
+ xorl %r10d,%eax
+ xorl %r11d,%ebx
+ xorl %r12d,%ecx
+ xorl %r8d,%edx
+.byte 0xf3,0xc3
+
+
+
+.p2align 4
+_x86_64_AES_encrypt_compact:
+ leaq 128(%r14),%r8
+ movl 0-128(%r8),%edi
+ movl 32-128(%r8),%ebp
+ movl 64-128(%r8),%r10d
+ movl 96-128(%r8),%r11d
+ movl 128-128(%r8),%edi
+ movl 160-128(%r8),%ebp
+ movl 192-128(%r8),%r10d
+ movl 224-128(%r8),%r11d
+ jmp L$enc_loop_compact
+.p2align 4
+L$enc_loop_compact:
+ xorl 0(%r15),%eax
+ xorl 4(%r15),%ebx
+ xorl 8(%r15),%ecx
+ xorl 12(%r15),%edx
+ leaq 16(%r15),%r15
+ movzbl %al,%r10d
+ movzbl %bl,%r11d
+ movzbl %cl,%r12d
+ movzbl (%r14,%r10,1),%r10d
+ movzbl (%r14,%r11,1),%r11d
+ movzbl (%r14,%r12,1),%r12d
+
+ movzbl %dl,%r8d
+ movzbl %bh,%esi
+ movzbl %ch,%edi
+ movzbl (%r14,%r8,1),%r8d
+ movzbl (%r14,%rsi,1),%r9d
+ movzbl (%r14,%rdi,1),%r13d
+
+ movzbl %dh,%ebp
+ movzbl %ah,%esi
+ shrl $16,%ecx
+ movzbl (%r14,%rbp,1),%ebp
+ movzbl (%r14,%rsi,1),%esi
+ shrl $16,%edx
+
+ movzbl %cl,%edi
+ shll $8,%r9d
+ shll $8,%r13d
+ movzbl (%r14,%rdi,1),%edi
+ xorl %r9d,%r10d
+ xorl %r13d,%r11d
+
+ movzbl %dl,%r9d
+ shrl $16,%eax
+ shrl $16,%ebx
+ movzbl %al,%r13d
+ shll $8,%ebp
+ shll $8,%esi
+ movzbl (%r14,%r9,1),%r9d
+ movzbl (%r14,%r13,1),%r13d
+ xorl %ebp,%r12d
+ xorl %esi,%r8d
+
+ movzbl %bl,%ebp
+ movzbl %dh,%esi
+ shll $16,%edi
+ movzbl (%r14,%rbp,1),%ebp
+ movzbl (%r14,%rsi,1),%esi
+ xorl %edi,%r10d
+
+ movzbl %ah,%edi
+ shrl $8,%ecx
+ shrl $8,%ebx
+ movzbl (%r14,%rdi,1),%edi
+ movzbl (%r14,%rcx,1),%edx
+ movzbl (%r14,%rbx,1),%ecx
+ shll $16,%r9d
+ shll $16,%r13d
+ shll $16,%ebp
+ xorl %r9d,%r11d
+ xorl %r13d,%r12d
+ xorl %ebp,%r8d
+
+ shll $24,%esi
+ shll $24,%edi
+ shll $24,%edx
+ xorl %esi,%r10d
+ shll $24,%ecx
+ xorl %edi,%r11d
+ movl %r10d,%eax
+ movl %r11d,%ebx
+ xorl %r12d,%ecx
+ xorl %r8d,%edx
+ cmpq 16(%rsp),%r15
+ je L$enc_compact_done
+ movl %eax,%esi
+ movl %ebx,%edi
+ andl $2155905152,%esi
+ andl $2155905152,%edi
+ movl %esi,%r10d
+ movl %edi,%r11d
+ shrl $7,%r10d
+ leal (%rax,%rax,1),%r8d
+ shrl $7,%r11d
+ leal (%rbx,%rbx,1),%r9d
+ subl %r10d,%esi
+ subl %r11d,%edi
+ andl $4278124286,%r8d
+ andl $4278124286,%r9d
+ andl $454761243,%esi
+ andl $454761243,%edi
+ movl %eax,%r10d
+ movl %ebx,%r11d
+ xorl %esi,%r8d
+ xorl %edi,%r9d
+
+ xorl %r8d,%eax
+ xorl %r9d,%ebx
+ movl %ecx,%esi
+ movl %edx,%edi
+ roll $24,%eax
+ roll $24,%ebx
+ andl $2155905152,%esi
+ andl $2155905152,%edi
+ xorl %r8d,%eax
+ xorl %r9d,%ebx
+ movl %esi,%r12d
+ movl %edi,%ebp
+ rorl $16,%r10d
+ rorl $16,%r11d
+ shrl $7,%r12d
+ leal (%rcx,%rcx,1),%r8d
+ xorl %r10d,%eax
+ xorl %r11d,%ebx
+ shrl $7,%ebp
+ leal (%rdx,%rdx,1),%r9d
+ rorl $8,%r10d
+ rorl $8,%r11d
+ subl %r12d,%esi
+ subl %ebp,%edi
+ xorl %r10d,%eax
+ xorl %r11d,%ebx
+
+ andl $4278124286,%r8d
+ andl $4278124286,%r9d
+ andl $454761243,%esi
+ andl $454761243,%edi
+ movl %ecx,%r12d
+ movl %edx,%ebp
+ xorl %esi,%r8d
+ xorl %edi,%r9d
+
+ xorl %r8d,%ecx
+ xorl %r9d,%edx
+ roll $24,%ecx
+ roll $24,%edx
+ xorl %r8d,%ecx
+ xorl %r9d,%edx
+ movl 0(%r14),%esi
+ rorl $16,%r12d
+ rorl $16,%ebp
+ movl 64(%r14),%edi
+ xorl %r12d,%ecx
+ xorl %ebp,%edx
+ movl 128(%r14),%r8d
+ rorl $8,%r12d
+ rorl $8,%ebp
+ movl 192(%r14),%r9d
+ xorl %r12d,%ecx
+ xorl %ebp,%edx
+ jmp L$enc_loop_compact
+.p2align 4
+L$enc_compact_done:
+ xorl 0(%r15),%eax
+ xorl 4(%r15),%ebx
+ xorl 8(%r15),%ecx
+ xorl 12(%r15),%edx
+.byte 0xf3,0xc3
+
+
+.globl _AES_encrypt
+
+.p2align 4
+_AES_encrypt:
+ pushq %rbx
+ pushq %rbp
+ pushq %r12
+ pushq %r13
+ pushq %r14
+ pushq %r15
+
+
+ movq %rsp,%r10
+ leaq -63(%rdx),%rcx
+ andq $-64,%rsp
+ subq %rsp,%rcx
+ negq %rcx
+ andq $960,%rcx
+ subq %rcx,%rsp
+ subq $32,%rsp
+
+ movq %rsi,16(%rsp)
+ movq %r10,24(%rsp)
+L$enc_prologue:
+
+ movq %rdx,%r15
+ movl 240(%r15),%r13d
+
+ movl 0(%rdi),%eax
+ movl 4(%rdi),%ebx
+ movl 8(%rdi),%ecx
+ movl 12(%rdi),%edx
+
+ shll $4,%r13d
+ leaq (%r15,%r13,1),%rbp
+ movq %r15,(%rsp)
+ movq %rbp,8(%rsp)
+
+
+ leaq L$AES_Te+2048(%rip),%r14
+ leaq 768(%rsp),%rbp
+ subq %r14,%rbp
+ andq $768,%rbp
+ leaq (%r14,%rbp,1),%r14
+
+ call _x86_64_AES_encrypt_compact
+
+ movq 16(%rsp),%r9
+ movq 24(%rsp),%rsi
+ movl %eax,0(%r9)
+ movl %ebx,4(%r9)
+ movl %ecx,8(%r9)
+ movl %edx,12(%r9)
+
+ movq (%rsi),%r15
+ movq 8(%rsi),%r14
+ movq 16(%rsi),%r13
+ movq 24(%rsi),%r12
+ movq 32(%rsi),%rbp
+ movq 40(%rsi),%rbx
+ leaq 48(%rsi),%rsp
+L$enc_epilogue:
+ .byte 0xf3,0xc3
+
+
+.p2align 4
+_x86_64_AES_decrypt:
+ xorl 0(%r15),%eax
+ xorl 4(%r15),%ebx
+ xorl 8(%r15),%ecx
+ xorl 12(%r15),%edx
+
+ movl 240(%r15),%r13d
+ subl $1,%r13d
+ jmp L$dec_loop
+.p2align 4
+L$dec_loop:
+
+ movzbl %al,%esi
+ movzbl %bl,%edi
+ movzbl %cl,%ebp
+ movl 0(%r14,%rsi,8),%r10d
+ movl 0(%r14,%rdi,8),%r11d
+ movl 0(%r14,%rbp,8),%r12d
+
+ movzbl %dh,%esi
+ movzbl %ah,%edi
+ movzbl %dl,%ebp
+ xorl 3(%r14,%rsi,8),%r10d
+ xorl 3(%r14,%rdi,8),%r11d
+ movl 0(%r14,%rbp,8),%r8d
+
+ movzbl %bh,%esi
+ shrl $16,%eax
+ movzbl %ch,%ebp
+ xorl 3(%r14,%rsi,8),%r12d
+ shrl $16,%edx
+ xorl 3(%r14,%rbp,8),%r8d
+
+ shrl $16,%ebx
+ leaq 16(%r15),%r15
+ shrl $16,%ecx
+
+ movzbl %cl,%esi
+ movzbl %dl,%edi
+ movzbl %al,%ebp
+ xorl 2(%r14,%rsi,8),%r10d
+ xorl 2(%r14,%rdi,8),%r11d
+ xorl 2(%r14,%rbp,8),%r12d
+
+ movzbl %bh,%esi
+ movzbl %ch,%edi
+ movzbl %bl,%ebp
+ xorl 1(%r14,%rsi,8),%r10d
+ xorl 1(%r14,%rdi,8),%r11d
+ xorl 2(%r14,%rbp,8),%r8d
+
+ movzbl %dh,%esi
+ movl 12(%r15),%edx
+ movzbl %ah,%ebp
+ xorl 1(%r14,%rsi,8),%r12d
+ movl 0(%r15),%eax
+ xorl 1(%r14,%rbp,8),%r8d
+
+ xorl %r10d,%eax
+ movl 4(%r15),%ebx
+ movl 8(%r15),%ecx
+ xorl %r12d,%ecx
+ xorl %r11d,%ebx
+ xorl %r8d,%edx
+ subl $1,%r13d
+ jnz L$dec_loop
+ leaq 2048(%r14),%r14
+ movzbl %al,%esi
+ movzbl %bl,%edi
+ movzbl %cl,%ebp
+ movzbl (%r14,%rsi,1),%r10d
+ movzbl (%r14,%rdi,1),%r11d
+ movzbl (%r14,%rbp,1),%r12d
+
+ movzbl %dl,%esi
+ movzbl %dh,%edi
+ movzbl %ah,%ebp
+ movzbl (%r14,%rsi,1),%r8d
+ movzbl (%r14,%rdi,1),%edi
+ movzbl (%r14,%rbp,1),%ebp
+
+ shll $8,%edi
+ shll $8,%ebp
+
+ xorl %edi,%r10d
+ xorl %ebp,%r11d
+ shrl $16,%edx
+
+ movzbl %bh,%esi
+ movzbl %ch,%edi
+ shrl $16,%eax
+ movzbl (%r14,%rsi,1),%esi
+ movzbl (%r14,%rdi,1),%edi
+
+ shll $8,%esi
+ shll $8,%edi
+ shrl $16,%ebx
+ xorl %esi,%r12d
+ xorl %edi,%r8d
+ shrl $16,%ecx
+
+ movzbl %cl,%esi
+ movzbl %dl,%edi
+ movzbl %al,%ebp
+ movzbl (%r14,%rsi,1),%esi
+ movzbl (%r14,%rdi,1),%edi
+ movzbl (%r14,%rbp,1),%ebp
+
+ shll $16,%esi
+ shll $16,%edi
+ shll $16,%ebp
+
+ xorl %esi,%r10d
+ xorl %edi,%r11d
+ xorl %ebp,%r12d
+
+ movzbl %bl,%esi
+ movzbl %bh,%edi
+ movzbl %ch,%ebp
+ movzbl (%r14,%rsi,1),%esi
+ movzbl (%r14,%rdi,1),%edi
+ movzbl (%r14,%rbp,1),%ebp
+
+ shll $16,%esi
+ shll $24,%edi
+ shll $24,%ebp
+
+ xorl %esi,%r8d
+ xorl %edi,%r10d
+ xorl %ebp,%r11d
+
+ movzbl %dh,%esi
+ movzbl %ah,%edi
+ movl 16+12(%r15),%edx
+ movzbl (%r14,%rsi,1),%esi
+ movzbl (%r14,%rdi,1),%edi
+ movl 16+0(%r15),%eax
+
+ shll $24,%esi
+ shll $24,%edi
+
+ xorl %esi,%r12d
+ xorl %edi,%r8d
+
+ movl 16+4(%r15),%ebx
+ movl 16+8(%r15),%ecx
+ leaq -2048(%r14),%r14
+ xorl %r10d,%eax
+ xorl %r11d,%ebx
+ xorl %r12d,%ecx
+ xorl %r8d,%edx
+.byte 0xf3,0xc3
+
+
+
+.p2align 4
+_x86_64_AES_decrypt_compact:
+ leaq 128(%r14),%r8
+ movl 0-128(%r8),%edi
+ movl 32-128(%r8),%ebp
+ movl 64-128(%r8),%r10d
+ movl 96-128(%r8),%r11d
+ movl 128-128(%r8),%edi
+ movl 160-128(%r8),%ebp
+ movl 192-128(%r8),%r10d
+ movl 224-128(%r8),%r11d
+ jmp L$dec_loop_compact
+
+.p2align 4
+L$dec_loop_compact:
+ xorl 0(%r15),%eax
+ xorl 4(%r15),%ebx
+ xorl 8(%r15),%ecx
+ xorl 12(%r15),%edx
+ leaq 16(%r15),%r15
+ movzbl %al,%r10d
+ movzbl %bl,%r11d
+ movzbl %cl,%r12d
+ movzbl (%r14,%r10,1),%r10d
+ movzbl (%r14,%r11,1),%r11d
+ movzbl (%r14,%r12,1),%r12d
+
+ movzbl %dl,%r8d
+ movzbl %dh,%esi
+ movzbl %ah,%edi
+ movzbl (%r14,%r8,1),%r8d
+ movzbl (%r14,%rsi,1),%r9d
+ movzbl (%r14,%rdi,1),%r13d
+
+ movzbl %bh,%ebp
+ movzbl %ch,%esi
+ shrl $16,%ecx
+ movzbl (%r14,%rbp,1),%ebp
+ movzbl (%r14,%rsi,1),%esi
+ shrl $16,%edx
+
+ movzbl %cl,%edi
+ shll $8,%r9d
+ shll $8,%r13d
+ movzbl (%r14,%rdi,1),%edi
+ xorl %r9d,%r10d
+ xorl %r13d,%r11d
+
+ movzbl %dl,%r9d
+ shrl $16,%eax
+ shrl $16,%ebx
+ movzbl %al,%r13d
+ shll $8,%ebp
+ shll $8,%esi
+ movzbl (%r14,%r9,1),%r9d
+ movzbl (%r14,%r13,1),%r13d
+ xorl %ebp,%r12d
+ xorl %esi,%r8d
+
+ movzbl %bl,%ebp
+ movzbl %bh,%esi
+ shll $16,%edi
+ movzbl (%r14,%rbp,1),%ebp
+ movzbl (%r14,%rsi,1),%esi
+ xorl %edi,%r10d
+
+ movzbl %ch,%edi
+ shll $16,%r9d
+ shll $16,%r13d
+ movzbl (%r14,%rdi,1),%ebx
+ xorl %r9d,%r11d
+ xorl %r13d,%r12d
+
+ movzbl %dh,%edi
+ shrl $8,%eax
+ shll $16,%ebp
+ movzbl (%r14,%rdi,1),%ecx
+ movzbl (%r14,%rax,1),%edx
+ xorl %ebp,%r8d
+
+ shll $24,%esi
+ shll $24,%ebx
+ shll $24,%ecx
+ xorl %esi,%r10d
+ shll $24,%edx
+ xorl %r11d,%ebx
+ movl %r10d,%eax
+ xorl %r12d,%ecx
+ xorl %r8d,%edx
+ cmpq 16(%rsp),%r15
+ je L$dec_compact_done
+
+ movq 256+0(%r14),%rsi
+ shlq $32,%rbx
+ shlq $32,%rdx
+ movq 256+8(%r14),%rdi
+ orq %rbx,%rax
+ orq %rdx,%rcx
+ movq 256+16(%r14),%rbp
+ movq %rax,%rbx
+ movq %rcx,%rdx
+ andq %rsi,%rbx
+ andq %rsi,%rdx
+ movq %rbx,%r9
+ movq %rdx,%r12
+ shrq $7,%r9
+ leaq (%rax,%rax,1),%r8
+ shrq $7,%r12
+ leaq (%rcx,%rcx,1),%r11
+ subq %r9,%rbx
+ subq %r12,%rdx
+ andq %rdi,%r8
+ andq %rdi,%r11
+ andq %rbp,%rbx
+ andq %rbp,%rdx
+ xorq %r8,%rbx
+ xorq %r11,%rdx
+ movq %rbx,%r8
+ movq %rdx,%r11
+
+ andq %rsi,%rbx
+ andq %rsi,%rdx
+ movq %rbx,%r10
+ movq %rdx,%r13
+ shrq $7,%r10
+ leaq (%r8,%r8,1),%r9
+ shrq $7,%r13
+ leaq (%r11,%r11,1),%r12
+ subq %r10,%rbx
+ subq %r13,%rdx
+ andq %rdi,%r9
+ andq %rdi,%r12
+ andq %rbp,%rbx
+ andq %rbp,%rdx
+ xorq %r9,%rbx
+ xorq %r12,%rdx
+ movq %rbx,%r9
+ movq %rdx,%r12
+
+ andq %rsi,%rbx
+ andq %rsi,%rdx
+ movq %rbx,%r10
+ movq %rdx,%r13
+ shrq $7,%r10
+ xorq %rax,%r8
+ shrq $7,%r13
+ xorq %rcx,%r11
+ subq %r10,%rbx
+ subq %r13,%rdx
+ leaq (%r9,%r9,1),%r10
+ leaq (%r12,%r12,1),%r13
+ xorq %rax,%r9
+ xorq %rcx,%r12
+ andq %rdi,%r10
+ andq %rdi,%r13
+ andq %rbp,%rbx
+ andq %rbp,%rdx
+ xorq %rbx,%r10
+ xorq %rdx,%r13
+
+ xorq %r10,%rax
+ xorq %r13,%rcx
+ xorq %r10,%r8
+ xorq %r13,%r11
+ movq %rax,%rbx
+ movq %rcx,%rdx
+ xorq %r10,%r9
+ xorq %r13,%r12
+ shrq $32,%rbx
+ shrq $32,%rdx
+ xorq %r8,%r10
+ xorq %r11,%r13
+ roll $8,%eax
+ roll $8,%ecx
+ xorq %r9,%r10
+ xorq %r12,%r13
+
+ roll $8,%ebx
+ roll $8,%edx
+ xorl %r10d,%eax
+ xorl %r13d,%ecx
+ shrq $32,%r10
+ shrq $32,%r13
+ xorl %r10d,%ebx
+ xorl %r13d,%edx
+
+ movq %r8,%r10
+ movq %r11,%r13
+ shrq $32,%r10
+ shrq $32,%r13
+ roll $24,%r8d
+ roll $24,%r11d
+ roll $24,%r10d
+ roll $24,%r13d
+ xorl %r8d,%eax
+ xorl %r11d,%ecx
+ movq %r9,%r8
+ movq %r12,%r11
+ xorl %r10d,%ebx
+ xorl %r13d,%edx
+
+ movq 0(%r14),%rsi
+ shrq $32,%r8
+ shrq $32,%r11
+ movq 64(%r14),%rdi
+ roll $16,%r9d
+ roll $16,%r12d
+ movq 128(%r14),%rbp
+ roll $16,%r8d
+ roll $16,%r11d
+ movq 192(%r14),%r10
+ xorl %r9d,%eax
+ xorl %r12d,%ecx
+ movq 256(%r14),%r13
+ xorl %r8d,%ebx
+ xorl %r11d,%edx
+ jmp L$dec_loop_compact
+.p2align 4
+L$dec_compact_done:
+ xorl 0(%r15),%eax
+ xorl 4(%r15),%ebx
+ xorl 8(%r15),%ecx
+ xorl 12(%r15),%edx
+.byte 0xf3,0xc3
+
+
+.globl _AES_decrypt
+
+.p2align 4
+_AES_decrypt:
+ pushq %rbx
+ pushq %rbp
+ pushq %r12
+ pushq %r13
+ pushq %r14
+ pushq %r15
+
+
+ movq %rsp,%r10
+ leaq -63(%rdx),%rcx
+ andq $-64,%rsp
+ subq %rsp,%rcx
+ negq %rcx
+ andq $960,%rcx
+ subq %rcx,%rsp
+ subq $32,%rsp
+
+ movq %rsi,16(%rsp)
+ movq %r10,24(%rsp)
+L$dec_prologue:
+
+ movq %rdx,%r15
+ movl 240(%r15),%r13d
+
+ movl 0(%rdi),%eax
+ movl 4(%rdi),%ebx
+ movl 8(%rdi),%ecx
+ movl 12(%rdi),%edx
+
+ shll $4,%r13d
+ leaq (%r15,%r13,1),%rbp
+ movq %r15,(%rsp)
+ movq %rbp,8(%rsp)
+
+
+ leaq L$AES_Td+2048(%rip),%r14
+ leaq 768(%rsp),%rbp
+ subq %r14,%rbp
+ andq $768,%rbp
+ leaq (%r14,%rbp,1),%r14
+ shrq $3,%rbp
+ addq %rbp,%r14
+
+ call _x86_64_AES_decrypt_compact
+
+ movq 16(%rsp),%r9
+ movq 24(%rsp),%rsi
+ movl %eax,0(%r9)
+ movl %ebx,4(%r9)
+ movl %ecx,8(%r9)
+ movl %edx,12(%r9)
+
+ movq (%rsi),%r15
+ movq 8(%rsi),%r14
+ movq 16(%rsi),%r13
+ movq 24(%rsi),%r12
+ movq 32(%rsi),%rbp
+ movq 40(%rsi),%rbx
+ leaq 48(%rsi),%rsp
+L$dec_epilogue:
+ .byte 0xf3,0xc3
+
+.globl _AES_set_encrypt_key
+
+.p2align 4
+_AES_set_encrypt_key:
+ pushq %rbx
+ pushq %rbp
+ pushq %r12
+ pushq %r13
+ pushq %r14
+ pushq %r15
+ subq $8,%rsp
+L$enc_key_prologue:
+
+ call _x86_64_AES_set_encrypt_key
+
+ movq 8(%rsp),%r15
+ movq 16(%rsp),%r14
+ movq 24(%rsp),%r13
+ movq 32(%rsp),%r12
+ movq 40(%rsp),%rbp
+ movq 48(%rsp),%rbx
+ addq $56,%rsp
+L$enc_key_epilogue:
+ .byte 0xf3,0xc3
+
+
+
+.p2align 4
+_x86_64_AES_set_encrypt_key:
+ movl %esi,%ecx
+ movq %rdi,%rsi
+ movq %rdx,%rdi
+
+ testq $-1,%rsi
+ jz L$badpointer
+ testq $-1,%rdi
+ jz L$badpointer
+
+ leaq L$AES_Te(%rip),%rbp
+ leaq 2048+128(%rbp),%rbp
+
+
+ movl 0-128(%rbp),%eax
+ movl 32-128(%rbp),%ebx
+ movl 64-128(%rbp),%r8d
+ movl 96-128(%rbp),%edx
+ movl 128-128(%rbp),%eax
+ movl 160-128(%rbp),%ebx
+ movl 192-128(%rbp),%r8d
+ movl 224-128(%rbp),%edx
+
+ cmpl $128,%ecx
+ je L$10rounds
+ cmpl $192,%ecx
+ je L$12rounds
+ cmpl $256,%ecx
+ je L$14rounds
+ movq $-2,%rax
+ jmp L$exit
+
+L$10rounds:
+ movq 0(%rsi),%rax
+ movq 8(%rsi),%rdx
+ movq %rax,0(%rdi)
+ movq %rdx,8(%rdi)
+
+ shrq $32,%rdx
+ xorl %ecx,%ecx
+ jmp L$10shortcut
+.p2align 2
+L$10loop:
+ movl 0(%rdi),%eax
+ movl 12(%rdi),%edx
+L$10shortcut:
+ movzbl %dl,%esi
+ movzbl -128(%rbp,%rsi,1),%ebx
+ movzbl %dh,%esi
+ shll $24,%ebx
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ shrl $16,%edx
+ movzbl %dl,%esi
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ movzbl %dh,%esi
+ shll $8,%ebx
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%eax
+
+ xorl 1024-128(%rbp,%rcx,4),%eax
+ movl %eax,16(%rdi)
+ xorl 4(%rdi),%eax
+ movl %eax,20(%rdi)
+ xorl 8(%rdi),%eax
+ movl %eax,24(%rdi)
+ xorl 12(%rdi),%eax
+ movl %eax,28(%rdi)
+ addl $1,%ecx
+ leaq 16(%rdi),%rdi
+ cmpl $10,%ecx
+ jl L$10loop
+
+ movl $10,80(%rdi)
+ xorq %rax,%rax
+ jmp L$exit
+
+L$12rounds:
+ movq 0(%rsi),%rax
+ movq 8(%rsi),%rbx
+ movq 16(%rsi),%rdx
+ movq %rax,0(%rdi)
+ movq %rbx,8(%rdi)
+ movq %rdx,16(%rdi)
+
+ shrq $32,%rdx
+ xorl %ecx,%ecx
+ jmp L$12shortcut
+.p2align 2
+L$12loop:
+ movl 0(%rdi),%eax
+ movl 20(%rdi),%edx
+L$12shortcut:
+ movzbl %dl,%esi
+ movzbl -128(%rbp,%rsi,1),%ebx
+ movzbl %dh,%esi
+ shll $24,%ebx
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ shrl $16,%edx
+ movzbl %dl,%esi
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ movzbl %dh,%esi
+ shll $8,%ebx
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%eax
+
+ xorl 1024-128(%rbp,%rcx,4),%eax
+ movl %eax,24(%rdi)
+ xorl 4(%rdi),%eax
+ movl %eax,28(%rdi)
+ xorl 8(%rdi),%eax
+ movl %eax,32(%rdi)
+ xorl 12(%rdi),%eax
+ movl %eax,36(%rdi)
+
+ cmpl $7,%ecx
+ je L$12break
+ addl $1,%ecx
+
+ xorl 16(%rdi),%eax
+ movl %eax,40(%rdi)
+ xorl 20(%rdi),%eax
+ movl %eax,44(%rdi)
+
+ leaq 24(%rdi),%rdi
+ jmp L$12loop
+L$12break:
+ movl $12,72(%rdi)
+ xorq %rax,%rax
+ jmp L$exit
+
+L$14rounds:
+ movq 0(%rsi),%rax
+ movq 8(%rsi),%rbx
+ movq 16(%rsi),%rcx
+ movq 24(%rsi),%rdx
+ movq %rax,0(%rdi)
+ movq %rbx,8(%rdi)
+ movq %rcx,16(%rdi)
+ movq %rdx,24(%rdi)
+
+ shrq $32,%rdx
+ xorl %ecx,%ecx
+ jmp L$14shortcut
+.p2align 2
+L$14loop:
+ movl 0(%rdi),%eax
+ movl 28(%rdi),%edx
+L$14shortcut:
+ movzbl %dl,%esi
+ movzbl -128(%rbp,%rsi,1),%ebx
+ movzbl %dh,%esi
+ shll $24,%ebx
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ shrl $16,%edx
+ movzbl %dl,%esi
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ movzbl %dh,%esi
+ shll $8,%ebx
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%eax
+
+ xorl 1024-128(%rbp,%rcx,4),%eax
+ movl %eax,32(%rdi)
+ xorl 4(%rdi),%eax
+ movl %eax,36(%rdi)
+ xorl 8(%rdi),%eax
+ movl %eax,40(%rdi)
+ xorl 12(%rdi),%eax
+ movl %eax,44(%rdi)
+
+ cmpl $6,%ecx
+ je L$14break
+ addl $1,%ecx
+
+ movl %eax,%edx
+ movl 16(%rdi),%eax
+ movzbl %dl,%esi
+ movzbl -128(%rbp,%rsi,1),%ebx
+ movzbl %dh,%esi
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ shrl $16,%edx
+ shll $8,%ebx
+ movzbl %dl,%esi
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ movzbl %dh,%esi
+ shll $16,%ebx
+ xorl %ebx,%eax
+
+ movzbl -128(%rbp,%rsi,1),%ebx
+ shll $24,%ebx
+ xorl %ebx,%eax
+
+ movl %eax,48(%rdi)
+ xorl 20(%rdi),%eax
+ movl %eax,52(%rdi)
+ xorl 24(%rdi),%eax
+ movl %eax,56(%rdi)
+ xorl 28(%rdi),%eax
+ movl %eax,60(%rdi)
+
+ leaq 32(%rdi),%rdi
+ jmp L$14loop
+L$14break:
+ movl $14,48(%rdi)
+ xorq %rax,%rax
+ jmp L$exit
+
+L$badpointer:
+ movq $-1,%rax
+L$exit:
+.byte 0xf3,0xc3
+
+
+.globl _AES_set_decrypt_key
+
+.p2align 4
+_AES_set_decrypt_key:
+ pushq %rbx
+ pushq %rbp
+ pushq %r12
+ pushq %r13
+ pushq %r14
+ pushq %r15
+ pushq %rdx
+L$dec_key_prologue:
+
+ call _x86_64_AES_set_encrypt_key
+ movq (%rsp),%r8
+ cmpl $0,%eax
+ jne L$abort
+
+ movl 240(%r8),%r14d
+ xorq %rdi,%rdi
+ leaq (%rdi,%r14,4),%rcx
+ movq %r8,%rsi
+ leaq (%r8,%rcx,4),%rdi
+.p2align 2
+L$invert:
+ movq 0(%rsi),%rax
+ movq 8(%rsi),%rbx
+ movq 0(%rdi),%rcx
+ movq 8(%rdi),%rdx
+ movq %rax,0(%rdi)
+ movq %rbx,8(%rdi)
+ movq %rcx,0(%rsi)
+ movq %rdx,8(%rsi)
+ leaq 16(%rsi),%rsi
+ leaq -16(%rdi),%rdi
+ cmpq %rsi,%rdi
+ jne L$invert
+
+ leaq L$AES_Te+2048+1024(%rip),%rax
+
+ movq 40(%rax),%rsi
+ movq 48(%rax),%rdi
+ movq 56(%rax),%rbp
+
+ movq %r8,%r15
+ subl $1,%r14d
+.p2align 2
+L$permute:
+ leaq 16(%r15),%r15
+ movq 0(%r15),%rax
+ movq 8(%r15),%rcx
+ movq %rax,%rbx
+ movq %rcx,%rdx
+ andq %rsi,%rbx
+ andq %rsi,%rdx
+ movq %rbx,%r9
+ movq %rdx,%r12
+ shrq $7,%r9
+ leaq (%rax,%rax,1),%r8
+ shrq $7,%r12
+ leaq (%rcx,%rcx,1),%r11
+ subq %r9,%rbx
+ subq %r12,%rdx
+ andq %rdi,%r8
+ andq %rdi,%r11
+ andq %rbp,%rbx
+ andq %rbp,%rdx
+ xorq %r8,%rbx
+ xorq %r11,%rdx
+ movq %rbx,%r8
+ movq %rdx,%r11
+
+ andq %rsi,%rbx
+ andq %rsi,%rdx
+ movq %rbx,%r10
+ movq %rdx,%r13
+ shrq $7,%r10
+ leaq (%r8,%r8,1),%r9
+ shrq $7,%r13
+ leaq (%r11,%r11,1),%r12
+ subq %r10,%rbx
+ subq %r13,%rdx
+ andq %rdi,%r9
+ andq %rdi,%r12
+ andq %rbp,%rbx
+ andq %rbp,%rdx
+ xorq %r9,%rbx
+ xorq %r12,%rdx
+ movq %rbx,%r9
+ movq %rdx,%r12
+
+ andq %rsi,%rbx
+ andq %rsi,%rdx
+ movq %rbx,%r10
+ movq %rdx,%r13
+ shrq $7,%r10
+ xorq %rax,%r8
+ shrq $7,%r13
+ xorq %rcx,%r11
+ subq %r10,%rbx
+ subq %r13,%rdx
+ leaq (%r9,%r9,1),%r10
+ leaq (%r12,%r12,1),%r13
+ xorq %rax,%r9
+ xorq %rcx,%r12
+ andq %rdi,%r10
+ andq %rdi,%r13
+ andq %rbp,%rbx
+ andq %rbp,%rdx
+ xorq %rbx,%r10
+ xorq %rdx,%r13
+
+ xorq %r10,%rax
+ xorq %r13,%rcx
+ xorq %r10,%r8
+ xorq %r13,%r11
+ movq %rax,%rbx
+ movq %rcx,%rdx
+ xorq %r10,%r9
+ xorq %r13,%r12
+ shrq $32,%rbx
+ shrq $32,%rdx
+ xorq %r8,%r10
+ xorq %r11,%r13
+ roll $8,%eax
+ roll $8,%ecx
+ xorq %r9,%r10
+ xorq %r12,%r13
+
+ roll $8,%ebx
+ roll $8,%edx
+ xorl %r10d,%eax
+ xorl %r13d,%ecx
+ shrq $32,%r10
+ shrq $32,%r13
+ xorl %r10d,%ebx
+ xorl %r13d,%edx
+
+ movq %r8,%r10
+ movq %r11,%r13
+ shrq $32,%r10
+ shrq $32,%r13
+ roll $24,%r8d
+ roll $24,%r11d
+ roll $24,%r10d
+ roll $24,%r13d
+ xorl %r8d,%eax
+ xorl %r11d,%ecx
+ movq %r9,%r8
+ movq %r12,%r11
+ xorl %r10d,%ebx
+ xorl %r13d,%edx
+
+
+ shrq $32,%r8
+ shrq $32,%r11
+
+ roll $16,%r9d
+ roll $16,%r12d
+
+ roll $16,%r8d
+ roll $16,%r11d
+
+ xorl %r9d,%eax
+ xorl %r12d,%ecx
+
+ xorl %r8d,%ebx
+ xorl %r11d,%edx
+ movl %eax,0(%r15)
+ movl %ebx,4(%r15)
+ movl %ecx,8(%r15)
+ movl %edx,12(%r15)
+ subl $1,%r14d
+ jnz L$permute
+
+ xorq %rax,%rax
+L$abort:
+ movq 8(%rsp),%r15
+ movq 16(%rsp),%r14
+ movq 24(%rsp),%r13
+ movq 32(%rsp),%r12
+ movq 40(%rsp),%rbp
+ movq 48(%rsp),%rbx
+ addq $56,%rsp
+L$dec_key_epilogue:
+ .byte 0xf3,0xc3
+
+.globl _AES_cbc_encrypt
+
+.p2align 4
+
+_AES_cbc_encrypt:
+ cmpq $0,%rdx
+ je L$cbc_epilogue
+ pushfq
+ pushq %rbx
+ pushq %rbp
+ pushq %r12
+ pushq %r13
+ pushq %r14
+ pushq %r15
+L$cbc_prologue:
+
+ cld
+ movl %r9d,%r9d
+
+ leaq L$AES_Te(%rip),%r14
+ cmpq $0,%r9
+ jne L$cbc_picked_te
+ leaq L$AES_Td(%rip),%r14
+L$cbc_picked_te:
+
+ movl _OPENSSL_ia32cap_P(%rip),%r10d
+ cmpq $512,%rdx
+ jb L$cbc_slow_prologue
+ testq $15,%rdx
+ jnz L$cbc_slow_prologue
+
+
+
+
+ leaq -88-248(%rsp),%r15
+ andq $-64,%r15
+
+
+ movq %r14,%r10
+ leaq 2304(%r14),%r11
+ movq %r15,%r12
+ andq $4095,%r10
+ andq $4095,%r11
+ andq $4095,%r12
+
+ cmpq %r11,%r12
+ jb L$cbc_te_break_out
+ subq %r11,%r12
+ subq %r12,%r15
+ jmp L$cbc_te_ok
+L$cbc_te_break_out:
+ subq %r10,%r12
+ andq $4095,%r12
+ addq $320,%r12
+ subq %r12,%r15
+.p2align 2
+L$cbc_te_ok:
+
+ xchgq %rsp,%r15
+
+ movq %r15,16(%rsp)
+L$cbc_fast_body:
+ movq %rdi,24(%rsp)
+ movq %rsi,32(%rsp)
+ movq %rdx,40(%rsp)
+ movq %rcx,48(%rsp)
+ movq %r8,56(%rsp)
+ movl $0,80+240(%rsp)
+ movq %r8,%rbp
+ movq %r9,%rbx
+ movq %rsi,%r9
+ movq %rdi,%r8
+ movq %rcx,%r15
+
+ movl 240(%r15),%eax
+
+ movq %r15,%r10
+ subq %r14,%r10
+ andq $4095,%r10
+ cmpq $2304,%r10
+ jb L$cbc_do_ecopy
+ cmpq $4096-248,%r10
+ jb L$cbc_skip_ecopy
+.p2align 2
+L$cbc_do_ecopy:
+ movq %r15,%rsi
+ leaq 80(%rsp),%rdi
+ leaq 80(%rsp),%r15
+ movl $30,%ecx
+.long 0x90A548F3
+
+ movl %eax,(%rdi)
+L$cbc_skip_ecopy:
+ movq %r15,0(%rsp)
+
+ movl $18,%ecx
+.p2align 2
+L$cbc_prefetch_te:
+ movq 0(%r14),%r10
+ movq 32(%r14),%r11
+ movq 64(%r14),%r12
+ movq 96(%r14),%r13
+ leaq 128(%r14),%r14
+ subl $1,%ecx
+ jnz L$cbc_prefetch_te
+ leaq -2304(%r14),%r14
+
+ cmpq $0,%rbx
+ je L$FAST_DECRYPT
+
+
+ movl 0(%rbp),%eax
+ movl 4(%rbp),%ebx
+ movl 8(%rbp),%ecx
+ movl 12(%rbp),%edx
+
+.p2align 2
+L$cbc_fast_enc_loop:
+ xorl 0(%r8),%eax
+ xorl 4(%r8),%ebx
+ xorl 8(%r8),%ecx
+ xorl 12(%r8),%edx
+ movq 0(%rsp),%r15
+ movq %r8,24(%rsp)
+
+ call _x86_64_AES_encrypt
+
+ movq 24(%rsp),%r8
+ movq 40(%rsp),%r10
+ movl %eax,0(%r9)
+ movl %ebx,4(%r9)
+ movl %ecx,8(%r9)
+ movl %edx,12(%r9)
+
+ leaq 16(%r8),%r8
+ leaq 16(%r9),%r9
+ subq $16,%r10
+ testq $-16,%r10
+ movq %r10,40(%rsp)
+ jnz L$cbc_fast_enc_loop
+ movq 56(%rsp),%rbp
+ movl %eax,0(%rbp)
+ movl %ebx,4(%rbp)
+ movl %ecx,8(%rbp)
+ movl %edx,12(%rbp)
+
+ jmp L$cbc_fast_cleanup
+
+
+.p2align 4
+L$FAST_DECRYPT:
+ cmpq %r8,%r9
+ je L$cbc_fast_dec_in_place
+
+ movq %rbp,64(%rsp)
+.p2align 2
+L$cbc_fast_dec_loop:
+ movl 0(%r8),%eax
+ movl 4(%r8),%ebx
+ movl 8(%r8),%ecx
+ movl 12(%r8),%edx
+ movq 0(%rsp),%r15
+ movq %r8,24(%rsp)
+
+ call _x86_64_AES_decrypt
+
+ movq 64(%rsp),%rbp
+ movq 24(%rsp),%r8
+ movq 40(%rsp),%r10
+ xorl 0(%rbp),%eax
+ xorl 4(%rbp),%ebx
+ xorl 8(%rbp),%ecx
+ xorl 12(%rbp),%edx
+ movq %r8,%rbp
+
+ subq $16,%r10
+ movq %r10,40(%rsp)
+ movq %rbp,64(%rsp)
+
+ movl %eax,0(%r9)
+ movl %ebx,4(%r9)
+ movl %ecx,8(%r9)
+ movl %edx,12(%r9)
+
+ leaq 16(%r8),%r8
+ leaq 16(%r9),%r9
+ jnz L$cbc_fast_dec_loop
+ movq 56(%rsp),%r12
+ movq 0(%rbp),%r10
+ movq 8(%rbp),%r11
+ movq %r10,0(%r12)
+ movq %r11,8(%r12)
+ jmp L$cbc_fast_cleanup
+
+.p2align 4
+L$cbc_fast_dec_in_place:
+ movq 0(%rbp),%r10
+ movq 8(%rbp),%r11
+ movq %r10,0+64(%rsp)
+ movq %r11,8+64(%rsp)
+.p2align 2
+L$cbc_fast_dec_in_place_loop:
+ movl 0(%r8),%eax
+ movl 4(%r8),%ebx
+ movl 8(%r8),%ecx
+ movl 12(%r8),%edx
+ movq 0(%rsp),%r15
+ movq %r8,24(%rsp)
+
+ call _x86_64_AES_decrypt
+
+ movq 24(%rsp),%r8
+ movq 40(%rsp),%r10
+ xorl 0+64(%rsp),%eax
+ xorl 4+64(%rsp),%ebx
+ xorl 8+64(%rsp),%ecx
+ xorl 12+64(%rsp),%edx
+
+ movq 0(%r8),%r11
+ movq 8(%r8),%r12
+ subq $16,%r10
+ jz L$cbc_fast_dec_in_place_done
+
+ movq %r11,0+64(%rsp)
+ movq %r12,8+64(%rsp)
+
+ movl %eax,0(%r9)
+ movl %ebx,4(%r9)
+ movl %ecx,8(%r9)
+ movl %edx,12(%r9)
+
+ leaq 16(%r8),%r8
+ leaq 16(%r9),%r9
+ movq %r10,40(%rsp)
+ jmp L$cbc_fast_dec_in_place_loop
+L$cbc_fast_dec_in_place_done:
+ movq 56(%rsp),%rdi
+ movq %r11,0(%rdi)
+ movq %r12,8(%rdi)
+
+ movl %eax,0(%r9)
+ movl %ebx,4(%r9)
+ movl %ecx,8(%r9)
+ movl %edx,12(%r9)
+
+.p2align 2
+L$cbc_fast_cleanup:
+ cmpl $0,80+240(%rsp)
+ leaq 80(%rsp),%rdi
+ je L$cbc_exit
+ movl $30,%ecx
+ xorq %rax,%rax
+.long 0x90AB48F3
+
+
+ jmp L$cbc_exit
+
+
+.p2align 4
+L$cbc_slow_prologue:
+
+ leaq -88(%rsp),%rbp
+ andq $-64,%rbp
+
+ leaq -88-63(%rcx),%r10
+ subq %rbp,%r10
+ negq %r10
+ andq $960,%r10
+ subq %r10,%rbp
+
+ xchgq %rsp,%rbp
+
+ movq %rbp,16(%rsp)
+L$cbc_slow_body:
+
+
+
+
+ movq %r8,56(%rsp)
+ movq %r8,%rbp
+ movq %r9,%rbx
+ movq %rsi,%r9
+ movq %rdi,%r8
+ movq %rcx,%r15
+ movq %rdx,%r10
+
+ movl 240(%r15),%eax
+ movq %r15,0(%rsp)
+ shll $4,%eax
+ leaq (%r15,%rax,1),%rax
+ movq %rax,8(%rsp)
+
+
+ leaq 2048(%r14),%r14
+ leaq 768-8(%rsp),%rax
+ subq %r14,%rax
+ andq $768,%rax
+ leaq (%r14,%rax,1),%r14
+
+ cmpq $0,%rbx
+ je L$SLOW_DECRYPT
+
+
+ testq $-16,%r10
+ movl 0(%rbp),%eax
+ movl 4(%rbp),%ebx
+ movl 8(%rbp),%ecx
+ movl 12(%rbp),%edx
+ jz L$cbc_slow_enc_tail
+
+
+.p2align 2
+L$cbc_slow_enc_loop:
+ xorl 0(%r8),%eax
+ xorl 4(%r8),%ebx
+ xorl 8(%r8),%ecx
+ xorl 12(%r8),%edx
+ movq 0(%rsp),%r15
+ movq %r8,24(%rsp)
+ movq %r9,32(%rsp)
+ movq %r10,40(%rsp)
+
+ call _x86_64_AES_encrypt_compact
+
+ movq 24(%rsp),%r8
+ movq 32(%rsp),%r9
+ movq 40(%rsp),%r10
+ movl %eax,0(%r9)
+ movl %ebx,4(%r9)
+ movl %ecx,8(%r9)
+ movl %edx,12(%r9)
+
+ leaq 16(%r8),%r8
+ leaq 16(%r9),%r9
+ subq $16,%r10
+ testq $-16,%r10
+ jnz L$cbc_slow_enc_loop
+ testq $15,%r10
+ jnz L$cbc_slow_enc_tail
+ movq 56(%rsp),%rbp
+ movl %eax,0(%rbp)
+ movl %ebx,4(%rbp)
+ movl %ecx,8(%rbp)
+ movl %edx,12(%rbp)
+
+ jmp L$cbc_exit
+
+.p2align 2
+L$cbc_slow_enc_tail:
+ movq %rax,%r11
+ movq %rcx,%r12
+ movq %r10,%rcx
+ movq %r8,%rsi
+ movq %r9,%rdi
+.long 0x9066A4F3
+
+ movq $16,%rcx
+ subq %r10,%rcx
+ xorq %rax,%rax
+.long 0x9066AAF3
+
+ movq %r9,%r8
+ movq $16,%r10
+ movq %r11,%rax
+ movq %r12,%rcx
+ jmp L$cbc_slow_enc_loop
+
+
+.p2align 4
+L$SLOW_DECRYPT:
+ shrq $3,%rax
+ addq %rax,%r14
+
+ movq 0(%rbp),%r11
+ movq 8(%rbp),%r12
+ movq %r11,0+64(%rsp)
+ movq %r12,8+64(%rsp)
+
+.p2align 2
+L$cbc_slow_dec_loop:
+ movl 0(%r8),%eax
+ movl 4(%r8),%ebx
+ movl 8(%r8),%ecx
+ movl 12(%r8),%edx
+ movq 0(%rsp),%r15
+ movq %r8,24(%rsp)
+ movq %r9,32(%rsp)
+ movq %r10,40(%rsp)
+
+ call _x86_64_AES_decrypt_compact
+
+ movq 24(%rsp),%r8
+ movq 32(%rsp),%r9
+ movq 40(%rsp),%r10
+ xorl 0+64(%rsp),%eax
+ xorl 4+64(%rsp),%ebx
+ xorl 8+64(%rsp),%ecx
+ xorl 12+64(%rsp),%edx
+
+ movq 0(%r8),%r11
+ movq 8(%r8),%r12
+ subq $16,%r10
+ jc L$cbc_slow_dec_partial
+ jz L$cbc_slow_dec_done
+
+ movq %r11,0+64(%rsp)
+ movq %r12,8+64(%rsp)
+
+ movl %eax,0(%r9)
+ movl %ebx,4(%r9)
+ movl %ecx,8(%r9)
+ movl %edx,12(%r9)
+
+ leaq 16(%r8),%r8
+ leaq 16(%r9),%r9
+ jmp L$cbc_slow_dec_loop
+L$cbc_slow_dec_done:
+ movq 56(%rsp),%rdi
+ movq %r11,0(%rdi)
+ movq %r12,8(%rdi)
+
+ movl %eax,0(%r9)
+ movl %ebx,4(%r9)
+ movl %ecx,8(%r9)
+ movl %edx,12(%r9)
+
+ jmp L$cbc_exit
+
+.p2align 2
+L$cbc_slow_dec_partial:
+ movq 56(%rsp),%rdi
+ movq %r11,0(%rdi)
+ movq %r12,8(%rdi)
+
+ movl %eax,0+64(%rsp)
+ movl %ebx,4+64(%rsp)
+ movl %ecx,8+64(%rsp)
+ movl %edx,12+64(%rsp)
+
+ movq %r9,%rdi
+ leaq 64(%rsp),%rsi
+ leaq 16(%r10),%rcx
+.long 0x9066A4F3
+
+ jmp L$cbc_exit
+
+.p2align 4
+L$cbc_exit:
+ movq 16(%rsp),%rsi
+ movq (%rsi),%r15
+ movq 8(%rsi),%r14
+ movq 16(%rsi),%r13
+ movq 24(%rsi),%r12
+ movq 32(%rsi),%rbp
+ movq 40(%rsi),%rbx
+ leaq 48(%rsi),%rsp
+L$cbc_popfq:
+ popfq
+L$cbc_epilogue:
+ .byte 0xf3,0xc3
+
+.p2align 6
+L$AES_Te:
+.long 0xa56363c6,0xa56363c6
+.long 0x847c7cf8,0x847c7cf8
+.long 0x997777ee,0x997777ee
+.long 0x8d7b7bf6,0x8d7b7bf6
+.long 0x0df2f2ff,0x0df2f2ff
+.long 0xbd6b6bd6,0xbd6b6bd6
+.long 0xb16f6fde,0xb16f6fde
+.long 0x54c5c591,0x54c5c591
+.long 0x50303060,0x50303060
+.long 0x03010102,0x03010102
+.long 0xa96767ce,0xa96767ce
+.long 0x7d2b2b56,0x7d2b2b56
+.long 0x19fefee7,0x19fefee7
+.long 0x62d7d7b5,0x62d7d7b5
+.long 0xe6abab4d,0xe6abab4d
+.long 0x9a7676ec,0x9a7676ec
+.long 0x45caca8f,0x45caca8f
+.long 0x9d82821f,0x9d82821f
+.long 0x40c9c989,0x40c9c989
+.long 0x877d7dfa,0x877d7dfa
+.long 0x15fafaef,0x15fafaef
+.long 0xeb5959b2,0xeb5959b2
+.long 0xc947478e,0xc947478e
+.long 0x0bf0f0fb,0x0bf0f0fb
+.long 0xecadad41,0xecadad41
+.long 0x67d4d4b3,0x67d4d4b3
+.long 0xfda2a25f,0xfda2a25f
+.long 0xeaafaf45,0xeaafaf45
+.long 0xbf9c9c23,0xbf9c9c23
+.long 0xf7a4a453,0xf7a4a453
+.long 0x967272e4,0x967272e4
+.long 0x5bc0c09b,0x5bc0c09b
+.long 0xc2b7b775,0xc2b7b775
+.long 0x1cfdfde1,0x1cfdfde1
+.long 0xae93933d,0xae93933d
+.long 0x6a26264c,0x6a26264c
+.long 0x5a36366c,0x5a36366c
+.long 0x413f3f7e,0x413f3f7e
+.long 0x02f7f7f5,0x02f7f7f5
+.long 0x4fcccc83,0x4fcccc83
+.long 0x5c343468,0x5c343468
+.long 0xf4a5a551,0xf4a5a551
+.long 0x34e5e5d1,0x34e5e5d1
+.long 0x08f1f1f9,0x08f1f1f9
+.long 0x937171e2,0x937171e2
+.long 0x73d8d8ab,0x73d8d8ab
+.long 0x53313162,0x53313162
+.long 0x3f15152a,0x3f15152a
+.long 0x0c040408,0x0c040408
+.long 0x52c7c795,0x52c7c795
+.long 0x65232346,0x65232346
+.long 0x5ec3c39d,0x5ec3c39d
+.long 0x28181830,0x28181830
+.long 0xa1969637,0xa1969637
+.long 0x0f05050a,0x0f05050a
+.long 0xb59a9a2f,0xb59a9a2f
+.long 0x0907070e,0x0907070e
+.long 0x36121224,0x36121224
+.long 0x9b80801b,0x9b80801b
+.long 0x3de2e2df,0x3de2e2df
+.long 0x26ebebcd,0x26ebebcd
+.long 0x6927274e,0x6927274e
+.long 0xcdb2b27f,0xcdb2b27f
+.long 0x9f7575ea,0x9f7575ea
+.long 0x1b090912,0x1b090912
+.long 0x9e83831d,0x9e83831d
+.long 0x742c2c58,0x742c2c58
+.long 0x2e1a1a34,0x2e1a1a34
+.long 0x2d1b1b36,0x2d1b1b36
+.long 0xb26e6edc,0xb26e6edc
+.long 0xee5a5ab4,0xee5a5ab4
+.long 0xfba0a05b,0xfba0a05b
+.long 0xf65252a4,0xf65252a4
+.long 0x4d3b3b76,0x4d3b3b76
+.long 0x61d6d6b7,0x61d6d6b7
+.long 0xceb3b37d,0xceb3b37d
+.long 0x7b292952,0x7b292952
+.long 0x3ee3e3dd,0x3ee3e3dd
+.long 0x712f2f5e,0x712f2f5e
+.long 0x97848413,0x97848413
+.long 0xf55353a6,0xf55353a6
+.long 0x68d1d1b9,0x68d1d1b9
+.long 0x00000000,0x00000000
+.long 0x2cededc1,0x2cededc1
+.long 0x60202040,0x60202040
+.long 0x1ffcfce3,0x1ffcfce3
+.long 0xc8b1b179,0xc8b1b179
+.long 0xed5b5bb6,0xed5b5bb6
+.long 0xbe6a6ad4,0xbe6a6ad4
+.long 0x46cbcb8d,0x46cbcb8d
+.long 0xd9bebe67,0xd9bebe67
+.long 0x4b393972,0x4b393972
+.long 0xde4a4a94,0xde4a4a94
+.long 0xd44c4c98,0xd44c4c98
+.long 0xe85858b0,0xe85858b0
+.long 0x4acfcf85,0x4acfcf85
+.long 0x6bd0d0bb,0x6bd0d0bb
+.long 0x2aefefc5,0x2aefefc5
+.long 0xe5aaaa4f,0xe5aaaa4f
+.long 0x16fbfbed,0x16fbfbed
+.long 0xc5434386,0xc5434386
+.long 0xd74d4d9a,0xd74d4d9a
+.long 0x55333366,0x55333366
+.long 0x94858511,0x94858511
+.long 0xcf45458a,0xcf45458a
+.long 0x10f9f9e9,0x10f9f9e9
+.long 0x06020204,0x06020204
+.long 0x817f7ffe,0x817f7ffe
+.long 0xf05050a0,0xf05050a0
+.long 0x443c3c78,0x443c3c78
+.long 0xba9f9f25,0xba9f9f25
+.long 0xe3a8a84b,0xe3a8a84b
+.long 0xf35151a2,0xf35151a2
+.long 0xfea3a35d,0xfea3a35d
+.long 0xc0404080,0xc0404080
+.long 0x8a8f8f05,0x8a8f8f05
+.long 0xad92923f,0xad92923f
+.long 0xbc9d9d21,0xbc9d9d21
+.long 0x48383870,0x48383870
+.long 0x04f5f5f1,0x04f5f5f1
+.long 0xdfbcbc63,0xdfbcbc63
+.long 0xc1b6b677,0xc1b6b677
+.long 0x75dadaaf,0x75dadaaf
+.long 0x63212142,0x63212142
+.long 0x30101020,0x30101020
+.long 0x1affffe5,0x1affffe5
+.long 0x0ef3f3fd,0x0ef3f3fd
+.long 0x6dd2d2bf,0x6dd2d2bf
+.long 0x4ccdcd81,0x4ccdcd81
+.long 0x140c0c18,0x140c0c18
+.long 0x35131326,0x35131326
+.long 0x2fececc3,0x2fececc3
+.long 0xe15f5fbe,0xe15f5fbe
+.long 0xa2979735,0xa2979735
+.long 0xcc444488,0xcc444488
+.long 0x3917172e,0x3917172e
+.long 0x57c4c493,0x57c4c493
+.long 0xf2a7a755,0xf2a7a755
+.long 0x827e7efc,0x827e7efc
+.long 0x473d3d7a,0x473d3d7a
+.long 0xac6464c8,0xac6464c8
+.long 0xe75d5dba,0xe75d5dba
+.long 0x2b191932,0x2b191932
+.long 0x957373e6,0x957373e6
+.long 0xa06060c0,0xa06060c0
+.long 0x98818119,0x98818119
+.long 0xd14f4f9e,0xd14f4f9e
+.long 0x7fdcdca3,0x7fdcdca3
+.long 0x66222244,0x66222244
+.long 0x7e2a2a54,0x7e2a2a54
+.long 0xab90903b,0xab90903b
+.long 0x8388880b,0x8388880b
+.long 0xca46468c,0xca46468c
+.long 0x29eeeec7,0x29eeeec7
+.long 0xd3b8b86b,0xd3b8b86b
+.long 0x3c141428,0x3c141428
+.long 0x79dedea7,0x79dedea7
+.long 0xe25e5ebc,0xe25e5ebc
+.long 0x1d0b0b16,0x1d0b0b16
+.long 0x76dbdbad,0x76dbdbad
+.long 0x3be0e0db,0x3be0e0db
+.long 0x56323264,0x56323264
+.long 0x4e3a3a74,0x4e3a3a74
+.long 0x1e0a0a14,0x1e0a0a14
+.long 0xdb494992,0xdb494992
+.long 0x0a06060c,0x0a06060c
+.long 0x6c242448,0x6c242448
+.long 0xe45c5cb8,0xe45c5cb8
+.long 0x5dc2c29f,0x5dc2c29f
+.long 0x6ed3d3bd,0x6ed3d3bd
+.long 0xefacac43,0xefacac43
+.long 0xa66262c4,0xa66262c4
+.long 0xa8919139,0xa8919139
+.long 0xa4959531,0xa4959531
+.long 0x37e4e4d3,0x37e4e4d3
+.long 0x8b7979f2,0x8b7979f2
+.long 0x32e7e7d5,0x32e7e7d5
+.long 0x43c8c88b,0x43c8c88b
+.long 0x5937376e,0x5937376e
+.long 0xb76d6dda,0xb76d6dda
+.long 0x8c8d8d01,0x8c8d8d01
+.long 0x64d5d5b1,0x64d5d5b1
+.long 0xd24e4e9c,0xd24e4e9c
+.long 0xe0a9a949,0xe0a9a949
+.long 0xb46c6cd8,0xb46c6cd8
+.long 0xfa5656ac,0xfa5656ac
+.long 0x07f4f4f3,0x07f4f4f3
+.long 0x25eaeacf,0x25eaeacf
+.long 0xaf6565ca,0xaf6565ca
+.long 0x8e7a7af4,0x8e7a7af4
+.long 0xe9aeae47,0xe9aeae47
+.long 0x18080810,0x18080810
+.long 0xd5baba6f,0xd5baba6f
+.long 0x887878f0,0x887878f0
+.long 0x6f25254a,0x6f25254a
+.long 0x722e2e5c,0x722e2e5c
+.long 0x241c1c38,0x241c1c38
+.long 0xf1a6a657,0xf1a6a657
+.long 0xc7b4b473,0xc7b4b473
+.long 0x51c6c697,0x51c6c697
+.long 0x23e8e8cb,0x23e8e8cb
+.long 0x7cdddda1,0x7cdddda1
+.long 0x9c7474e8,0x9c7474e8
+.long 0x211f1f3e,0x211f1f3e
+.long 0xdd4b4b96,0xdd4b4b96
+.long 0xdcbdbd61,0xdcbdbd61
+.long 0x868b8b0d,0x868b8b0d
+.long 0x858a8a0f,0x858a8a0f
+.long 0x907070e0,0x907070e0
+.long 0x423e3e7c,0x423e3e7c
+.long 0xc4b5b571,0xc4b5b571
+.long 0xaa6666cc,0xaa6666cc
+.long 0xd8484890,0xd8484890
+.long 0x05030306,0x05030306
+.long 0x01f6f6f7,0x01f6f6f7
+.long 0x120e0e1c,0x120e0e1c
+.long 0xa36161c2,0xa36161c2
+.long 0x5f35356a,0x5f35356a
+.long 0xf95757ae,0xf95757ae
+.long 0xd0b9b969,0xd0b9b969
+.long 0x91868617,0x91868617
+.long 0x58c1c199,0x58c1c199
+.long 0x271d1d3a,0x271d1d3a
+.long 0xb99e9e27,0xb99e9e27
+.long 0x38e1e1d9,0x38e1e1d9
+.long 0x13f8f8eb,0x13f8f8eb
+.long 0xb398982b,0xb398982b
+.long 0x33111122,0x33111122
+.long 0xbb6969d2,0xbb6969d2
+.long 0x70d9d9a9,0x70d9d9a9
+.long 0x898e8e07,0x898e8e07
+.long 0xa7949433,0xa7949433
+.long 0xb69b9b2d,0xb69b9b2d
+.long 0x221e1e3c,0x221e1e3c
+.long 0x92878715,0x92878715
+.long 0x20e9e9c9,0x20e9e9c9
+.long 0x49cece87,0x49cece87
+.long 0xff5555aa,0xff5555aa
+.long 0x78282850,0x78282850
+.long 0x7adfdfa5,0x7adfdfa5
+.long 0x8f8c8c03,0x8f8c8c03
+.long 0xf8a1a159,0xf8a1a159
+.long 0x80898909,0x80898909
+.long 0x170d0d1a,0x170d0d1a
+.long 0xdabfbf65,0xdabfbf65
+.long 0x31e6e6d7,0x31e6e6d7
+.long 0xc6424284,0xc6424284
+.long 0xb86868d0,0xb86868d0
+.long 0xc3414182,0xc3414182
+.long 0xb0999929,0xb0999929
+.long 0x772d2d5a,0x772d2d5a
+.long 0x110f0f1e,0x110f0f1e
+.long 0xcbb0b07b,0xcbb0b07b
+.long 0xfc5454a8,0xfc5454a8
+.long 0xd6bbbb6d,0xd6bbbb6d
+.long 0x3a16162c,0x3a16162c
+.byte 0x63,0x7c,0x77,0x7b,0xf2,0x6b,0x6f,0xc5
+.byte 0x30,0x01,0x67,0x2b,0xfe,0xd7,0xab,0x76
+.byte 0xca,0x82,0xc9,0x7d,0xfa,0x59,0x47,0xf0
+.byte 0xad,0xd4,0xa2,0xaf,0x9c,0xa4,0x72,0xc0
+.byte 0xb7,0xfd,0x93,0x26,0x36,0x3f,0xf7,0xcc
+.byte 0x34,0xa5,0xe5,0xf1,0x71,0xd8,0x31,0x15
+.byte 0x04,0xc7,0x23,0xc3,0x18,0x96,0x05,0x9a
+.byte 0x07,0x12,0x80,0xe2,0xeb,0x27,0xb2,0x75
+.byte 0x09,0x83,0x2c,0x1a,0x1b,0x6e,0x5a,0xa0
+.byte 0x52,0x3b,0xd6,0xb3,0x29,0xe3,0x2f,0x84
+.byte 0x53,0xd1,0x00,0xed,0x20,0xfc,0xb1,0x5b
+.byte 0x6a,0xcb,0xbe,0x39,0x4a,0x4c,0x58,0xcf
+.byte 0xd0,0xef,0xaa,0xfb,0x43,0x4d,0x33,0x85
+.byte 0x45,0xf9,0x02,0x7f,0x50,0x3c,0x9f,0xa8
+.byte 0x51,0xa3,0x40,0x8f,0x92,0x9d,0x38,0xf5
+.byte 0xbc,0xb6,0xda,0x21,0x10,0xff,0xf3,0xd2
+.byte 0xcd,0x0c,0x13,0xec,0x5f,0x97,0x44,0x17
+.byte 0xc4,0xa7,0x7e,0x3d,0x64,0x5d,0x19,0x73
+.byte 0x60,0x81,0x4f,0xdc,0x22,0x2a,0x90,0x88
+.byte 0x46,0xee,0xb8,0x14,0xde,0x5e,0x0b,0xdb
+.byte 0xe0,0x32,0x3a,0x0a,0x49,0x06,0x24,0x5c
+.byte 0xc2,0xd3,0xac,0x62,0x91,0x95,0xe4,0x79
+.byte 0xe7,0xc8,0x37,0x6d,0x8d,0xd5,0x4e,0xa9
+.byte 0x6c,0x56,0xf4,0xea,0x65,0x7a,0xae,0x08
+.byte 0xba,0x78,0x25,0x2e,0x1c,0xa6,0xb4,0xc6
+.byte 0xe8,0xdd,0x74,0x1f,0x4b,0xbd,0x8b,0x8a
+.byte 0x70,0x3e,0xb5,0x66,0x48,0x03,0xf6,0x0e
+.byte 0x61,0x35,0x57,0xb9,0x86,0xc1,0x1d,0x9e
+.byte 0xe1,0xf8,0x98,0x11,0x69,0xd9,0x8e,0x94
+.byte 0x9b,0x1e,0x87,0xe9,0xce,0x55,0x28,0xdf
+.byte 0x8c,0xa1,0x89,0x0d,0xbf,0xe6,0x42,0x68
+.byte 0x41,0x99,0x2d,0x0f,0xb0,0x54,0xbb,0x16
+.byte 0x63,0x7c,0x77,0x7b,0xf2,0x6b,0x6f,0xc5
+.byte 0x30,0x01,0x67,0x2b,0xfe,0xd7,0xab,0x76
+.byte 0xca,0x82,0xc9,0x7d,0xfa,0x59,0x47,0xf0
+.byte 0xad,0xd4,0xa2,0xaf,0x9c,0xa4,0x72,0xc0
+.byte 0xb7,0xfd,0x93,0x26,0x36,0x3f,0xf7,0xcc
+.byte 0x34,0xa5,0xe5,0xf1,0x71,0xd8,0x31,0x15
+.byte 0x04,0xc7,0x23,0xc3,0x18,0x96,0x05,0x9a
+.byte 0x07,0x12,0x80,0xe2,0xeb,0x27,0xb2,0x75
+.byte 0x09,0x83,0x2c,0x1a,0x1b,0x6e,0x5a,0xa0
+.byte 0x52,0x3b,0xd6,0xb3,0x29,0xe3,0x2f,0x84
+.byte 0x53,0xd1,0x00,0xed,0x20,0xfc,0xb1,0x5b
+.byte 0x6a,0xcb,0xbe,0x39,0x4a,0x4c,0x58,0xcf
+.byte 0xd0,0xef,0xaa,0xfb,0x43,0x4d,0x33,0x85
+.byte 0x45,0xf9,0x02,0x7f,0x50,0x3c,0x9f,0xa8
+.byte 0x51,0xa3,0x40,0x8f,0x92,0x9d,0x38,0xf5
+.byte 0xbc,0xb6,0xda,0x21,0x10,0xff,0xf3,0xd2
+.byte 0xcd,0x0c,0x13,0xec,0x5f,0x97,0x44,0x17
+.byte 0xc4,0xa7,0x7e,0x3d,0x64,0x5d,0x19,0x73
+.byte 0x60,0x81,0x4f,0xdc,0x22,0x2a,0x90,0x88
+.byte 0x46,0xee,0xb8,0x14,0xde,0x5e,0x0b,0xdb
+.byte 0xe0,0x32,0x3a,0x0a,0x49,0x06,0x24,0x5c
+.byte 0xc2,0xd3,0xac,0x62,0x91,0x95,0xe4,0x79
+.byte 0xe7,0xc8,0x37,0x6d,0x8d,0xd5,0x4e,0xa9
+.byte 0x6c,0x56,0xf4,0xea,0x65,0x7a,0xae,0x08
+.byte 0xba,0x78,0x25,0x2e,0x1c,0xa6,0xb4,0xc6
+.byte 0xe8,0xdd,0x74,0x1f,0x4b,0xbd,0x8b,0x8a
+.byte 0x70,0x3e,0xb5,0x66,0x48,0x03,0xf6,0x0e
+.byte 0x61,0x35,0x57,0xb9,0x86,0xc1,0x1d,0x9e
+.byte 0xe1,0xf8,0x98,0x11,0x69,0xd9,0x8e,0x94
+.byte 0x9b,0x1e,0x87,0xe9,0xce,0x55,0x28,0xdf
+.byte 0x8c,0xa1,0x89,0x0d,0xbf,0xe6,0x42,0x68
+.byte 0x41,0x99,0x2d,0x0f,0xb0,0x54,0xbb,0x16
+.byte 0x63,0x7c,0x77,0x7b,0xf2,0x6b,0x6f,0xc5
+.byte 0x30,0x01,0x67,0x2b,0xfe,0xd7,0xab,0x76
+.byte 0xca,0x82,0xc9,0x7d,0xfa,0x59,0x47,0xf0
+.byte 0xad,0xd4,0xa2,0xaf,0x9c,0xa4,0x72,0xc0
+.byte 0xb7,0xfd,0x93,0x26,0x36,0x3f,0xf7,0xcc
+.byte 0x34,0xa5,0xe5,0xf1,0x71,0xd8,0x31,0x15
+.byte 0x04,0xc7,0x23,0xc3,0x18,0x96,0x05,0x9a
+.byte 0x07,0x12,0x80,0xe2,0xeb,0x27,0xb2,0x75
+.byte 0x09,0x83,0x2c,0x1a,0x1b,0x6e,0x5a,0xa0
+.byte 0x52,0x3b,0xd6,0xb3,0x29,0xe3,0x2f,0x84
+.byte 0x53,0xd1,0x00,0xed,0x20,0xfc,0xb1,0x5b
+.byte 0x6a,0xcb,0xbe,0x39,0x4a,0x4c,0x58,0xcf
+.byte 0xd0,0xef,0xaa,0xfb,0x43,0x4d,0x33,0x85
+.byte 0x45,0xf9,0x02,0x7f,0x50,0x3c,0x9f,0xa8
+.byte 0x51,0xa3,0x40,0x8f,0x92,0x9d,0x38,0xf5
+.byte 0xbc,0xb6,0xda,0x21,0x10,0xff,0xf3,0xd2
+.byte 0xcd,0x0c,0x13,0xec,0x5f,0x97,0x44,0x17
+.byte 0xc4,0xa7,0x7e,0x3d,0x64,0x5d,0x19,0x73
+.byte 0x60,0x81,0x4f,0xdc,0x22,0x2a,0x90,0x88
+.byte 0x46,0xee,0xb8,0x14,0xde,0x5e,0x0b,0xdb
+.byte 0xe0,0x32,0x3a,0x0a,0x49,0x06,0x24,0x5c
+.byte 0xc2,0xd3,0xac,0x62,0x91,0x95,0xe4,0x79
+.byte 0xe7,0xc8,0x37,0x6d,0x8d,0xd5,0x4e,0xa9
+.byte 0x6c,0x56,0xf4,0xea,0x65,0x7a,0xae,0x08
+.byte 0xba,0x78,0x25,0x2e,0x1c,0xa6,0xb4,0xc6
+.byte 0xe8,0xdd,0x74,0x1f,0x4b,0xbd,0x8b,0x8a
+.byte 0x70,0x3e,0xb5,0x66,0x48,0x03,0xf6,0x0e
+.byte 0x61,0x35,0x57,0xb9,0x86,0xc1,0x1d,0x9e
+.byte 0xe1,0xf8,0x98,0x11,0x69,0xd9,0x8e,0x94
+.byte 0x9b,0x1e,0x87,0xe9,0xce,0x55,0x28,0xdf
+.byte 0x8c,0xa1,0x89,0x0d,0xbf,0xe6,0x42,0x68
+.byte 0x41,0x99,0x2d,0x0f,0xb0,0x54,0xbb,0x16
+.byte 0x63,0x7c,0x77,0x7b,0xf2,0x6b,0x6f,0xc5
+.byte 0x30,0x01,0x67,0x2b,0xfe,0xd7,0xab,0x76
+.byte 0xca,0x82,0xc9,0x7d,0xfa,0x59,0x47,0xf0
+.byte 0xad,0xd4,0xa2,0xaf,0x9c,0xa4,0x72,0xc0
+.byte 0xb7,0xfd,0x93,0x26,0x36,0x3f,0xf7,0xcc
+.byte 0x34,0xa5,0xe5,0xf1,0x71,0xd8,0x31,0x15
+.byte 0x04,0xc7,0x23,0xc3,0x18,0x96,0x05,0x9a
+.byte 0x07,0x12,0x80,0xe2,0xeb,0x27,0xb2,0x75
+.byte 0x09,0x83,0x2c,0x1a,0x1b,0x6e,0x5a,0xa0
+.byte 0x52,0x3b,0xd6,0xb3,0x29,0xe3,0x2f,0x84
+.byte 0x53,0xd1,0x00,0xed,0x20,0xfc,0xb1,0x5b
+.byte 0x6a,0xcb,0xbe,0x39,0x4a,0x4c,0x58,0xcf
+.byte 0xd0,0xef,0xaa,0xfb,0x43,0x4d,0x33,0x85
+.byte 0x45,0xf9,0x02,0x7f,0x50,0x3c,0x9f,0xa8
+.byte 0x51,0xa3,0x40,0x8f,0x92,0x9d,0x38,0xf5
+.byte 0xbc,0xb6,0xda,0x21,0x10,0xff,0xf3,0xd2
+.byte 0xcd,0x0c,0x13,0xec,0x5f,0x97,0x44,0x17
+.byte 0xc4,0xa7,0x7e,0x3d,0x64,0x5d,0x19,0x73
+.byte 0x60,0x81,0x4f,0xdc,0x22,0x2a,0x90,0x88
+.byte 0x46,0xee,0xb8,0x14,0xde,0x5e,0x0b,0xdb
+.byte 0xe0,0x32,0x3a,0x0a,0x49,0x06,0x24,0x5c
+.byte 0xc2,0xd3,0xac,0x62,0x91,0x95,0xe4,0x79
+.byte 0xe7,0xc8,0x37,0x6d,0x8d,0xd5,0x4e,0xa9
+.byte 0x6c,0x56,0xf4,0xea,0x65,0x7a,0xae,0x08
+.byte 0xba,0x78,0x25,0x2e,0x1c,0xa6,0xb4,0xc6
+.byte 0xe8,0xdd,0x74,0x1f,0x4b,0xbd,0x8b,0x8a
+.byte 0x70,0x3e,0xb5,0x66,0x48,0x03,0xf6,0x0e
+.byte 0x61,0x35,0x57,0xb9,0x86,0xc1,0x1d,0x9e
+.byte 0xe1,0xf8,0x98,0x11,0x69,0xd9,0x8e,0x94
+.byte 0x9b,0x1e,0x87,0xe9,0xce,0x55,0x28,0xdf
+.byte 0x8c,0xa1,0x89,0x0d,0xbf,0xe6,0x42,0x68
+.byte 0x41,0x99,0x2d,0x0f,0xb0,0x54,0xbb,0x16
+.long 0x00000001, 0x00000002, 0x00000004, 0x00000008
+.long 0x00000010, 0x00000020, 0x00000040, 0x00000080
+.long 0x0000001b, 0x00000036, 0x80808080, 0x80808080
+.long 0xfefefefe, 0xfefefefe, 0x1b1b1b1b, 0x1b1b1b1b
+.p2align 6
+L$AES_Td:
+.long 0x50a7f451,0x50a7f451
+.long 0x5365417e,0x5365417e
+.long 0xc3a4171a,0xc3a4171a
+.long 0x965e273a,0x965e273a
+.long 0xcb6bab3b,0xcb6bab3b
+.long 0xf1459d1f,0xf1459d1f
+.long 0xab58faac,0xab58faac
+.long 0x9303e34b,0x9303e34b
+.long 0x55fa3020,0x55fa3020
+.long 0xf66d76ad,0xf66d76ad
+.long 0x9176cc88,0x9176cc88
+.long 0x254c02f5,0x254c02f5
+.long 0xfcd7e54f,0xfcd7e54f
+.long 0xd7cb2ac5,0xd7cb2ac5
+.long 0x80443526,0x80443526
+.long 0x8fa362b5,0x8fa362b5
+.long 0x495ab1de,0x495ab1de
+.long 0x671bba25,0x671bba25
+.long 0x980eea45,0x980eea45
+.long 0xe1c0fe5d,0xe1c0fe5d
+.long 0x02752fc3,0x02752fc3
+.long 0x12f04c81,0x12f04c81
+.long 0xa397468d,0xa397468d
+.long 0xc6f9d36b,0xc6f9d36b
+.long 0xe75f8f03,0xe75f8f03
+.long 0x959c9215,0x959c9215
+.long 0xeb7a6dbf,0xeb7a6dbf
+.long 0xda595295,0xda595295
+.long 0x2d83bed4,0x2d83bed4
+.long 0xd3217458,0xd3217458
+.long 0x2969e049,0x2969e049
+.long 0x44c8c98e,0x44c8c98e
+.long 0x6a89c275,0x6a89c275
+.long 0x78798ef4,0x78798ef4
+.long 0x6b3e5899,0x6b3e5899
+.long 0xdd71b927,0xdd71b927
+.long 0xb64fe1be,0xb64fe1be
+.long 0x17ad88f0,0x17ad88f0
+.long 0x66ac20c9,0x66ac20c9
+.long 0xb43ace7d,0xb43ace7d
+.long 0x184adf63,0x184adf63
+.long 0x82311ae5,0x82311ae5
+.long 0x60335197,0x60335197
+.long 0x457f5362,0x457f5362
+.long 0xe07764b1,0xe07764b1
+.long 0x84ae6bbb,0x84ae6bbb
+.long 0x1ca081fe,0x1ca081fe
+.long 0x942b08f9,0x942b08f9
+.long 0x58684870,0x58684870
+.long 0x19fd458f,0x19fd458f
+.long 0x876cde94,0x876cde94
+.long 0xb7f87b52,0xb7f87b52
+.long 0x23d373ab,0x23d373ab
+.long 0xe2024b72,0xe2024b72
+.long 0x578f1fe3,0x578f1fe3
+.long 0x2aab5566,0x2aab5566
+.long 0x0728ebb2,0x0728ebb2
+.long 0x03c2b52f,0x03c2b52f
+.long 0x9a7bc586,0x9a7bc586
+.long 0xa50837d3,0xa50837d3
+.long 0xf2872830,0xf2872830
+.long 0xb2a5bf23,0xb2a5bf23
+.long 0xba6a0302,0xba6a0302
+.long 0x5c8216ed,0x5c8216ed
+.long 0x2b1ccf8a,0x2b1ccf8a
+.long 0x92b479a7,0x92b479a7
+.long 0xf0f207f3,0xf0f207f3
+.long 0xa1e2694e,0xa1e2694e
+.long 0xcdf4da65,0xcdf4da65
+.long 0xd5be0506,0xd5be0506
+.long 0x1f6234d1,0x1f6234d1
+.long 0x8afea6c4,0x8afea6c4
+.long 0x9d532e34,0x9d532e34
+.long 0xa055f3a2,0xa055f3a2
+.long 0x32e18a05,0x32e18a05
+.long 0x75ebf6a4,0x75ebf6a4
+.long 0x39ec830b,0x39ec830b
+.long 0xaaef6040,0xaaef6040
+.long 0x069f715e,0x069f715e
+.long 0x51106ebd,0x51106ebd
+.long 0xf98a213e,0xf98a213e
+.long 0x3d06dd96,0x3d06dd96
+.long 0xae053edd,0xae053edd
+.long 0x46bde64d,0x46bde64d
+.long 0xb58d5491,0xb58d5491
+.long 0x055dc471,0x055dc471
+.long 0x6fd40604,0x6fd40604
+.long 0xff155060,0xff155060
+.long 0x24fb9819,0x24fb9819
+.long 0x97e9bdd6,0x97e9bdd6
+.long 0xcc434089,0xcc434089
+.long 0x779ed967,0x779ed967
+.long 0xbd42e8b0,0xbd42e8b0
+.long 0x888b8907,0x888b8907
+.long 0x385b19e7,0x385b19e7
+.long 0xdbeec879,0xdbeec879
+.long 0x470a7ca1,0x470a7ca1
+.long 0xe90f427c,0xe90f427c
+.long 0xc91e84f8,0xc91e84f8
+.long 0x00000000,0x00000000
+.long 0x83868009,0x83868009
+.long 0x48ed2b32,0x48ed2b32
+.long 0xac70111e,0xac70111e
+.long 0x4e725a6c,0x4e725a6c
+.long 0xfbff0efd,0xfbff0efd
+.long 0x5638850f,0x5638850f
+.long 0x1ed5ae3d,0x1ed5ae3d
+.long 0x27392d36,0x27392d36
+.long 0x64d90f0a,0x64d90f0a
+.long 0x21a65c68,0x21a65c68
+.long 0xd1545b9b,0xd1545b9b
+.long 0x3a2e3624,0x3a2e3624
+.long 0xb1670a0c,0xb1670a0c
+.long 0x0fe75793,0x0fe75793
+.long 0xd296eeb4,0xd296eeb4
+.long 0x9e919b1b,0x9e919b1b
+.long 0x4fc5c080,0x4fc5c080
+.long 0xa220dc61,0xa220dc61
+.long 0x694b775a,0x694b775a
+.long 0x161a121c,0x161a121c
+.long 0x0aba93e2,0x0aba93e2
+.long 0xe52aa0c0,0xe52aa0c0
+.long 0x43e0223c,0x43e0223c
+.long 0x1d171b12,0x1d171b12
+.long 0x0b0d090e,0x0b0d090e
+.long 0xadc78bf2,0xadc78bf2
+.long 0xb9a8b62d,0xb9a8b62d
+.long 0xc8a91e14,0xc8a91e14
+.long 0x8519f157,0x8519f157
+.long 0x4c0775af,0x4c0775af
+.long 0xbbdd99ee,0xbbdd99ee
+.long 0xfd607fa3,0xfd607fa3
+.long 0x9f2601f7,0x9f2601f7
+.long 0xbcf5725c,0xbcf5725c
+.long 0xc53b6644,0xc53b6644
+.long 0x347efb5b,0x347efb5b
+.long 0x7629438b,0x7629438b
+.long 0xdcc623cb,0xdcc623cb
+.long 0x68fcedb6,0x68fcedb6
+.long 0x63f1e4b8,0x63f1e4b8
+.long 0xcadc31d7,0xcadc31d7
+.long 0x10856342,0x10856342
+.long 0x40229713,0x40229713
+.long 0x2011c684,0x2011c684
+.long 0x7d244a85,0x7d244a85
+.long 0xf83dbbd2,0xf83dbbd2
+.long 0x1132f9ae,0x1132f9ae
+.long 0x6da129c7,0x6da129c7
+.long 0x4b2f9e1d,0x4b2f9e1d
+.long 0xf330b2dc,0xf330b2dc
+.long 0xec52860d,0xec52860d
+.long 0xd0e3c177,0xd0e3c177
+.long 0x6c16b32b,0x6c16b32b
+.long 0x99b970a9,0x99b970a9
+.long 0xfa489411,0xfa489411
+.long 0x2264e947,0x2264e947
+.long 0xc48cfca8,0xc48cfca8
+.long 0x1a3ff0a0,0x1a3ff0a0
+.long 0xd82c7d56,0xd82c7d56
+.long 0xef903322,0xef903322
+.long 0xc74e4987,0xc74e4987
+.long 0xc1d138d9,0xc1d138d9
+.long 0xfea2ca8c,0xfea2ca8c
+.long 0x360bd498,0x360bd498
+.long 0xcf81f5a6,0xcf81f5a6
+.long 0x28de7aa5,0x28de7aa5
+.long 0x268eb7da,0x268eb7da
+.long 0xa4bfad3f,0xa4bfad3f
+.long 0xe49d3a2c,0xe49d3a2c
+.long 0x0d927850,0x0d927850
+.long 0x9bcc5f6a,0x9bcc5f6a
+.long 0x62467e54,0x62467e54
+.long 0xc2138df6,0xc2138df6
+.long 0xe8b8d890,0xe8b8d890
+.long 0x5ef7392e,0x5ef7392e
+.long 0xf5afc382,0xf5afc382
+.long 0xbe805d9f,0xbe805d9f
+.long 0x7c93d069,0x7c93d069
+.long 0xa92dd56f,0xa92dd56f
+.long 0xb31225cf,0xb31225cf
+.long 0x3b99acc8,0x3b99acc8
+.long 0xa77d1810,0xa77d1810
+.long 0x6e639ce8,0x6e639ce8
+.long 0x7bbb3bdb,0x7bbb3bdb
+.long 0x097826cd,0x097826cd
+.long 0xf418596e,0xf418596e
+.long 0x01b79aec,0x01b79aec
+.long 0xa89a4f83,0xa89a4f83
+.long 0x656e95e6,0x656e95e6
+.long 0x7ee6ffaa,0x7ee6ffaa
+.long 0x08cfbc21,0x08cfbc21
+.long 0xe6e815ef,0xe6e815ef
+.long 0xd99be7ba,0xd99be7ba
+.long 0xce366f4a,0xce366f4a
+.long 0xd4099fea,0xd4099fea
+.long 0xd67cb029,0xd67cb029
+.long 0xafb2a431,0xafb2a431
+.long 0x31233f2a,0x31233f2a
+.long 0x3094a5c6,0x3094a5c6
+.long 0xc066a235,0xc066a235
+.long 0x37bc4e74,0x37bc4e74
+.long 0xa6ca82fc,0xa6ca82fc
+.long 0xb0d090e0,0xb0d090e0
+.long 0x15d8a733,0x15d8a733
+.long 0x4a9804f1,0x4a9804f1
+.long 0xf7daec41,0xf7daec41
+.long 0x0e50cd7f,0x0e50cd7f
+.long 0x2ff69117,0x2ff69117
+.long 0x8dd64d76,0x8dd64d76
+.long 0x4db0ef43,0x4db0ef43
+.long 0x544daacc,0x544daacc
+.long 0xdf0496e4,0xdf0496e4
+.long 0xe3b5d19e,0xe3b5d19e
+.long 0x1b886a4c,0x1b886a4c
+.long 0xb81f2cc1,0xb81f2cc1
+.long 0x7f516546,0x7f516546
+.long 0x04ea5e9d,0x04ea5e9d
+.long 0x5d358c01,0x5d358c01
+.long 0x737487fa,0x737487fa
+.long 0x2e410bfb,0x2e410bfb
+.long 0x5a1d67b3,0x5a1d67b3
+.long 0x52d2db92,0x52d2db92
+.long 0x335610e9,0x335610e9
+.long 0x1347d66d,0x1347d66d
+.long 0x8c61d79a,0x8c61d79a
+.long 0x7a0ca137,0x7a0ca137
+.long 0x8e14f859,0x8e14f859
+.long 0x893c13eb,0x893c13eb
+.long 0xee27a9ce,0xee27a9ce
+.long 0x35c961b7,0x35c961b7
+.long 0xede51ce1,0xede51ce1
+.long 0x3cb1477a,0x3cb1477a
+.long 0x59dfd29c,0x59dfd29c
+.long 0x3f73f255,0x3f73f255
+.long 0x79ce1418,0x79ce1418
+.long 0xbf37c773,0xbf37c773
+.long 0xeacdf753,0xeacdf753
+.long 0x5baafd5f,0x5baafd5f
+.long 0x146f3ddf,0x146f3ddf
+.long 0x86db4478,0x86db4478
+.long 0x81f3afca,0x81f3afca
+.long 0x3ec468b9,0x3ec468b9
+.long 0x2c342438,0x2c342438
+.long 0x5f40a3c2,0x5f40a3c2
+.long 0x72c31d16,0x72c31d16
+.long 0x0c25e2bc,0x0c25e2bc
+.long 0x8b493c28,0x8b493c28
+.long 0x41950dff,0x41950dff
+.long 0x7101a839,0x7101a839
+.long 0xdeb30c08,0xdeb30c08
+.long 0x9ce4b4d8,0x9ce4b4d8
+.long 0x90c15664,0x90c15664
+.long 0x6184cb7b,0x6184cb7b
+.long 0x70b632d5,0x70b632d5
+.long 0x745c6c48,0x745c6c48
+.long 0x4257b8d0,0x4257b8d0
+.byte 0x52,0x09,0x6a,0xd5,0x30,0x36,0xa5,0x38
+.byte 0xbf,0x40,0xa3,0x9e,0x81,0xf3,0xd7,0xfb
+.byte 0x7c,0xe3,0x39,0x82,0x9b,0x2f,0xff,0x87
+.byte 0x34,0x8e,0x43,0x44,0xc4,0xde,0xe9,0xcb
+.byte 0x54,0x7b,0x94,0x32,0xa6,0xc2,0x23,0x3d
+.byte 0xee,0x4c,0x95,0x0b,0x42,0xfa,0xc3,0x4e
+.byte 0x08,0x2e,0xa1,0x66,0x28,0xd9,0x24,0xb2
+.byte 0x76,0x5b,0xa2,0x49,0x6d,0x8b,0xd1,0x25
+.byte 0x72,0xf8,0xf6,0x64,0x86,0x68,0x98,0x16
+.byte 0xd4,0xa4,0x5c,0xcc,0x5d,0x65,0xb6,0x92
+.byte 0x6c,0x70,0x48,0x50,0xfd,0xed,0xb9,0xda
+.byte 0x5e,0x15,0x46,0x57,0xa7,0x8d,0x9d,0x84
+.byte 0x90,0xd8,0xab,0x00,0x8c,0xbc,0xd3,0x0a
+.byte 0xf7,0xe4,0x58,0x05,0xb8,0xb3,0x45,0x06
+.byte 0xd0,0x2c,0x1e,0x8f,0xca,0x3f,0x0f,0x02
+.byte 0xc1,0xaf,0xbd,0x03,0x01,0x13,0x8a,0x6b
+.byte 0x3a,0x91,0x11,0x41,0x4f,0x67,0xdc,0xea
+.byte 0x97,0xf2,0xcf,0xce,0xf0,0xb4,0xe6,0x73
+.byte 0x96,0xac,0x74,0x22,0xe7,0xad,0x35,0x85
+.byte 0xe2,0xf9,0x37,0xe8,0x1c,0x75,0xdf,0x6e
+.byte 0x47,0xf1,0x1a,0x71,0x1d,0x29,0xc5,0x89
+.byte 0x6f,0xb7,0x62,0x0e,0xaa,0x18,0xbe,0x1b
+.byte 0xfc,0x56,0x3e,0x4b,0xc6,0xd2,0x79,0x20
+.byte 0x9a,0xdb,0xc0,0xfe,0x78,0xcd,0x5a,0xf4
+.byte 0x1f,0xdd,0xa8,0x33,0x88,0x07,0xc7,0x31
+.byte 0xb1,0x12,0x10,0x59,0x27,0x80,0xec,0x5f
+.byte 0x60,0x51,0x7f,0xa9,0x19,0xb5,0x4a,0x0d
+.byte 0x2d,0xe5,0x7a,0x9f,0x93,0xc9,0x9c,0xef
+.byte 0xa0,0xe0,0x3b,0x4d,0xae,0x2a,0xf5,0xb0
+.byte 0xc8,0xeb,0xbb,0x3c,0x83,0x53,0x99,0x61
+.byte 0x17,0x2b,0x04,0x7e,0xba,0x77,0xd6,0x26
+.byte 0xe1,0x69,0x14,0x63,0x55,0x21,0x0c,0x7d
+.long 0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe
+.long 0x1b1b1b1b, 0x1b1b1b1b, 0, 0
+.byte 0x52,0x09,0x6a,0xd5,0x30,0x36,0xa5,0x38
+.byte 0xbf,0x40,0xa3,0x9e,0x81,0xf3,0xd7,0xfb
+.byte 0x7c,0xe3,0x39,0x82,0x9b,0x2f,0xff,0x87
+.byte 0x34,0x8e,0x43,0x44,0xc4,0xde,0xe9,0xcb
+.byte 0x54,0x7b,0x94,0x32,0xa6,0xc2,0x23,0x3d
+.byte 0xee,0x4c,0x95,0x0b,0x42,0xfa,0xc3,0x4e
+.byte 0x08,0x2e,0xa1,0x66,0x28,0xd9,0x24,0xb2
+.byte 0x76,0x5b,0xa2,0x49,0x6d,0x8b,0xd1,0x25
+.byte 0x72,0xf8,0xf6,0x64,0x86,0x68,0x98,0x16
+.byte 0xd4,0xa4,0x5c,0xcc,0x5d,0x65,0xb6,0x92
+.byte 0x6c,0x70,0x48,0x50,0xfd,0xed,0xb9,0xda
+.byte 0x5e,0x15,0x46,0x57,0xa7,0x8d,0x9d,0x84
+.byte 0x90,0xd8,0xab,0x00,0x8c,0xbc,0xd3,0x0a
+.byte 0xf7,0xe4,0x58,0x05,0xb8,0xb3,0x45,0x06
+.byte 0xd0,0x2c,0x1e,0x8f,0xca,0x3f,0x0f,0x02
+.byte 0xc1,0xaf,0xbd,0x03,0x01,0x13,0x8a,0x6b
+.byte 0x3a,0x91,0x11,0x41,0x4f,0x67,0xdc,0xea
+.byte 0x97,0xf2,0xcf,0xce,0xf0,0xb4,0xe6,0x73
+.byte 0x96,0xac,0x74,0x22,0xe7,0xad,0x35,0x85
+.byte 0xe2,0xf9,0x37,0xe8,0x1c,0x75,0xdf,0x6e
+.byte 0x47,0xf1,0x1a,0x71,0x1d,0x29,0xc5,0x89
+.byte 0x6f,0xb7,0x62,0x0e,0xaa,0x18,0xbe,0x1b
+.byte 0xfc,0x56,0x3e,0x4b,0xc6,0xd2,0x79,0x20
+.byte 0x9a,0xdb,0xc0,0xfe,0x78,0xcd,0x5a,0xf4
+.byte 0x1f,0xdd,0xa8,0x33,0x88,0x07,0xc7,0x31
+.byte 0xb1,0x12,0x10,0x59,0x27,0x80,0xec,0x5f
+.byte 0x60,0x51,0x7f,0xa9,0x19,0xb5,0x4a,0x0d
+.byte 0x2d,0xe5,0x7a,0x9f,0x93,0xc9,0x9c,0xef
+.byte 0xa0,0xe0,0x3b,0x4d,0xae,0x2a,0xf5,0xb0
+.byte 0xc8,0xeb,0xbb,0x3c,0x83,0x53,0x99,0x61
+.byte 0x17,0x2b,0x04,0x7e,0xba,0x77,0xd6,0x26
+.byte 0xe1,0x69,0x14,0x63,0x55,0x21,0x0c,0x7d
+.long 0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe
+.long 0x1b1b1b1b, 0x1b1b1b1b, 0, 0
+.byte 0x52,0x09,0x6a,0xd5,0x30,0x36,0xa5,0x38
+.byte 0xbf,0x40,0xa3,0x9e,0x81,0xf3,0xd7,0xfb
+.byte 0x7c,0xe3,0x39,0x82,0x9b,0x2f,0xff,0x87
+.byte 0x34,0x8e,0x43,0x44,0xc4,0xde,0xe9,0xcb
+.byte 0x54,0x7b,0x94,0x32,0xa6,0xc2,0x23,0x3d
+.byte 0xee,0x4c,0x95,0x0b,0x42,0xfa,0xc3,0x4e
+.byte 0x08,0x2e,0xa1,0x66,0x28,0xd9,0x24,0xb2
+.byte 0x76,0x5b,0xa2,0x49,0x6d,0x8b,0xd1,0x25
+.byte 0x72,0xf8,0xf6,0x64,0x86,0x68,0x98,0x16
+.byte 0xd4,0xa4,0x5c,0xcc,0x5d,0x65,0xb6,0x92
+.byte 0x6c,0x70,0x48,0x50,0xfd,0xed,0xb9,0xda
+.byte 0x5e,0x15,0x46,0x57,0xa7,0x8d,0x9d,0x84
+.byte 0x90,0xd8,0xab,0x00,0x8c,0xbc,0xd3,0x0a
+.byte 0xf7,0xe4,0x58,0x05,0xb8,0xb3,0x45,0x06
+.byte 0xd0,0x2c,0x1e,0x8f,0xca,0x3f,0x0f,0x02
+.byte 0xc1,0xaf,0xbd,0x03,0x01,0x13,0x8a,0x6b
+.byte 0x3a,0x91,0x11,0x41,0x4f,0x67,0xdc,0xea
+.byte 0x97,0xf2,0xcf,0xce,0xf0,0xb4,0xe6,0x73
+.byte 0x96,0xac,0x74,0x22,0xe7,0xad,0x35,0x85
+.byte 0xe2,0xf9,0x37,0xe8,0x1c,0x75,0xdf,0x6e
+.byte 0x47,0xf1,0x1a,0x71,0x1d,0x29,0xc5,0x89
+.byte 0x6f,0xb7,0x62,0x0e,0xaa,0x18,0xbe,0x1b
+.byte 0xfc,0x56,0x3e,0x4b,0xc6,0xd2,0x79,0x20
+.byte 0x9a,0xdb,0xc0,0xfe,0x78,0xcd,0x5a,0xf4
+.byte 0x1f,0xdd,0xa8,0x33,0x88,0x07,0xc7,0x31
+.byte 0xb1,0x12,0x10,0x59,0x27,0x80,0xec,0x5f
+.byte 0x60,0x51,0x7f,0xa9,0x19,0xb5,0x4a,0x0d
+.byte 0x2d,0xe5,0x7a,0x9f,0x93,0xc9,0x9c,0xef
+.byte 0xa0,0xe0,0x3b,0x4d,0xae,0x2a,0xf5,0xb0
+.byte 0xc8,0xeb,0xbb,0x3c,0x83,0x53,0x99,0x61
+.byte 0x17,0x2b,0x04,0x7e,0xba,0x77,0xd6,0x26
+.byte 0xe1,0x69,0x14,0x63,0x55,0x21,0x0c,0x7d
+.long 0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe
+.long 0x1b1b1b1b, 0x1b1b1b1b, 0, 0
+.byte 0x52,0x09,0x6a,0xd5,0x30,0x36,0xa5,0x38
+.byte 0xbf,0x40,0xa3,0x9e,0x81,0xf3,0xd7,0xfb
+.byte 0x7c,0xe3,0x39,0x82,0x9b,0x2f,0xff,0x87
+.byte 0x34,0x8e,0x43,0x44,0xc4,0xde,0xe9,0xcb
+.byte 0x54,0x7b,0x94,0x32,0xa6,0xc2,0x23,0x3d
+.byte 0xee,0x4c,0x95,0x0b,0x42,0xfa,0xc3,0x4e
+.byte 0x08,0x2e,0xa1,0x66,0x28,0xd9,0x24,0xb2
+.byte 0x76,0x5b,0xa2,0x49,0x6d,0x8b,0xd1,0x25
+.byte 0x72,0xf8,0xf6,0x64,0x86,0x68,0x98,0x16
+.byte 0xd4,0xa4,0x5c,0xcc,0x5d,0x65,0xb6,0x92
+.byte 0x6c,0x70,0x48,0x50,0xfd,0xed,0xb9,0xda
+.byte 0x5e,0x15,0x46,0x57,0xa7,0x8d,0x9d,0x84
+.byte 0x90,0xd8,0xab,0x00,0x8c,0xbc,0xd3,0x0a
+.byte 0xf7,0xe4,0x58,0x05,0xb8,0xb3,0x45,0x06
+.byte 0xd0,0x2c,0x1e,0x8f,0xca,0x3f,0x0f,0x02
+.byte 0xc1,0xaf,0xbd,0x03,0x01,0x13,0x8a,0x6b
+.byte 0x3a,0x91,0x11,0x41,0x4f,0x67,0xdc,0xea
+.byte 0x97,0xf2,0xcf,0xce,0xf0,0xb4,0xe6,0x73
+.byte 0x96,0xac,0x74,0x22,0xe7,0xad,0x35,0x85
+.byte 0xe2,0xf9,0x37,0xe8,0x1c,0x75,0xdf,0x6e
+.byte 0x47,0xf1,0x1a,0x71,0x1d,0x29,0xc5,0x89
+.byte 0x6f,0xb7,0x62,0x0e,0xaa,0x18,0xbe,0x1b
+.byte 0xfc,0x56,0x3e,0x4b,0xc6,0xd2,0x79,0x20
+.byte 0x9a,0xdb,0xc0,0xfe,0x78,0xcd,0x5a,0xf4
+.byte 0x1f,0xdd,0xa8,0x33,0x88,0x07,0xc7,0x31
+.byte 0xb1,0x12,0x10,0x59,0x27,0x80,0xec,0x5f
+.byte 0x60,0x51,0x7f,0xa9,0x19,0xb5,0x4a,0x0d
+.byte 0x2d,0xe5,0x7a,0x9f,0x93,0xc9,0x9c,0xef
+.byte 0xa0,0xe0,0x3b,0x4d,0xae,0x2a,0xf5,0xb0
+.byte 0xc8,0xeb,0xbb,0x3c,0x83,0x53,0x99,0x61
+.byte 0x17,0x2b,0x04,0x7e,0xba,0x77,0xd6,0x26
+.byte 0xe1,0x69,0x14,0x63,0x55,0x21,0x0c,0x7d
+.long 0x80808080, 0x80808080, 0xfefefefe, 0xfefefefe
+.long 0x1b1b1b1b, 0x1b1b1b1b, 0, 0
+.byte 65,69,83,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.p2align 6
diff --git a/deps/openssl/asm/x64-macosx-gas/bn/x86_64-mont.s b/deps/openssl/asm/x64-macosx-gas/bn/x86_64-mont.s
new file mode 100644
index 0000000000..23292a0716
--- /dev/null
+++ b/deps/openssl/asm/x64-macosx-gas/bn/x86_64-mont.s
@@ -0,0 +1,172 @@
+.text
+
+
+.globl _bn_mul_mont
+
+.p2align 4
+_bn_mul_mont:
+ pushq %rbx
+ pushq %rbp
+ pushq %r12
+ pushq %r13
+ pushq %r14
+ pushq %r15
+
+ movl %r9d,%r9d
+ leaq 2(%r9),%r10
+ movq %rsp,%r11
+ negq %r10
+ leaq (%rsp,%r10,8),%rsp
+ andq $-1024,%rsp
+
+ movq %r11,8(%rsp,%r9,8)
+L$prologue:
+ movq %rdx,%r12
+
+ movq (%r8),%r8
+
+ xorq %r14,%r14
+ xorq %r15,%r15
+
+ movq (%r12),%rbx
+ movq (%rsi),%rax
+ mulq %rbx
+ movq %rax,%r10
+ movq %rdx,%r11
+
+ imulq %r8,%rax
+ movq %rax,%rbp
+
+ mulq (%rcx)
+ addq %r10,%rax
+ adcq $0,%rdx
+ movq %rdx,%r13
+
+ leaq 1(%r15),%r15
+L$1st:
+ movq (%rsi,%r15,8),%rax
+ mulq %rbx
+ addq %r11,%rax
+ adcq $0,%rdx
+ movq %rax,%r10
+ movq (%rcx,%r15,8),%rax
+ movq %rdx,%r11
+
+ mulq %rbp
+ addq %r13,%rax
+ leaq 1(%r15),%r15
+ adcq $0,%rdx
+ addq %r10,%rax
+ adcq $0,%rdx
+ movq %rax,-16(%rsp,%r15,8)
+ cmpq %r9,%r15
+ movq %rdx,%r13
+ jl L$1st
+
+ xorq %rdx,%rdx
+ addq %r11,%r13
+ adcq $0,%rdx
+ movq %r13,-8(%rsp,%r9,8)
+ movq %rdx,(%rsp,%r9,8)
+
+ leaq 1(%r14),%r14
+.p2align 2
+L$outer:
+ xorq %r15,%r15
+
+ movq (%r12,%r14,8),%rbx
+ movq (%rsi),%rax
+ mulq %rbx
+ addq (%rsp),%rax
+ adcq $0,%rdx
+ movq %rax,%r10
+ movq %rdx,%r11
+
+ imulq %r8,%rax
+ movq %rax,%rbp
+
+ mulq (%rcx,%r15,8)
+ addq %r10,%rax
+ movq 8(%rsp),%r10
+ adcq $0,%rdx
+ movq %rdx,%r13
+
+ leaq 1(%r15),%r15
+.p2align 2
+L$inner:
+ movq (%rsi,%r15,8),%rax
+ mulq %rbx
+ addq %r11,%rax
+ adcq $0,%rdx
+ addq %rax,%r10
+ movq (%rcx,%r15,8),%rax
+ adcq $0,%rdx
+ movq %rdx,%r11
+
+ mulq %rbp
+ addq %r13,%rax
+ leaq 1(%r15),%r15
+ adcq $0,%rdx
+ addq %r10,%rax
+ adcq $0,%rdx
+ movq (%rsp,%r15,8),%r10
+ cmpq %r9,%r15
+ movq %rax,-16(%rsp,%r15,8)
+ movq %rdx,%r13
+ jl L$inner
+
+ xorq %rdx,%rdx
+ addq %r11,%r13
+ adcq $0,%rdx
+ addq %r10,%r13
+ adcq $0,%rdx
+ movq %r13,-8(%rsp,%r9,8)
+ movq %rdx,(%rsp,%r9,8)
+
+ leaq 1(%r14),%r14
+ cmpq %r9,%r14
+ jl L$outer
+
+ leaq (%rsp),%rsi
+ leaq -1(%r9),%r15
+
+ movq (%rsi),%rax
+ xorq %r14,%r14
+ jmp L$sub
+.p2align 4
+L$sub: sbbq (%rcx,%r14,8),%rax
+ movq %rax,(%rdi,%r14,8)
+ decq %r15
+ movq 8(%rsi,%r14,8),%rax
+ leaq 1(%r14),%r14
+ jge L$sub
+
+ sbbq $0,%rax
+ andq %rax,%rsi
+ notq %rax
+ movq %rdi,%rcx
+ andq %rax,%rcx
+ leaq -1(%r9),%r15
+ orq %rcx,%rsi
+.p2align 4
+L$copy:
+ movq (%rsi,%r15,8),%rax
+ movq %rax,(%rdi,%r15,8)
+ movq %r14,(%rsp,%r15,8)
+ decq %r15
+ jge L$copy
+
+ movq 8(%rsp,%r9,8),%rsi
+ movq $1,%rax
+ movq (%rsi),%r15
+ movq 8(%rsi),%r14
+ movq 16(%rsi),%r13
+ movq 24(%rsi),%r12
+ movq 32(%rsi),%rbp
+ movq 40(%rsi),%rbx
+ leaq 48(%rsi),%rsp
+L$epilogue:
+ .byte 0xf3,0xc3
+
+.byte 77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105,112,108,105,99,97,116,105,111,110,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.p2align 4
diff --git a/deps/openssl/asm/x64-macosx-gas/camellia/cmll-x86_64.s b/deps/openssl/asm/x64-macosx-gas/camellia/cmll-x86_64.s
new file mode 100644
index 0000000000..dfc8d592e8
--- /dev/null
+++ b/deps/openssl/asm/x64-macosx-gas/camellia/cmll-x86_64.s
@@ -0,0 +1,1844 @@
+.text
+
+
+
+.globl _Camellia_EncryptBlock
+
+.p2align 4
+_Camellia_EncryptBlock:
+ movl $128,%eax
+ subl %edi,%eax
+ movl $3,%edi
+ adcl $0,%edi
+ jmp L$enc_rounds
+
+
+.globl _Camellia_EncryptBlock_Rounds
+
+.p2align 4
+L$enc_rounds:
+_Camellia_EncryptBlock_Rounds:
+ pushq %rbx
+ pushq %rbp
+ pushq %r13
+ pushq %r14
+ pushq %r15
+L$enc_prologue:
+
+
+ movq %rcx,%r13
+ movq %rdx,%r14
+
+ shll $6,%edi
+ leaq L$Camellia_SBOX(%rip),%rbp
+ leaq (%r14,%rdi,1),%r15
+
+ movl 0(%rsi),%r8d
+ movl 4(%rsi),%r9d
+ movl 8(%rsi),%r10d
+ bswapl %r8d
+ movl 12(%rsi),%r11d
+ bswapl %r9d
+ bswapl %r10d
+ bswapl %r11d
+
+ call _x86_64_Camellia_encrypt
+
+ bswapl %r8d
+ bswapl %r9d
+ bswapl %r10d
+ movl %r8d,0(%r13)
+ bswapl %r11d
+ movl %r9d,4(%r13)
+ movl %r10d,8(%r13)
+ movl %r11d,12(%r13)
+
+ movq 0(%rsp),%r15
+ movq 8(%rsp),%r14
+ movq 16(%rsp),%r13
+ movq 24(%rsp),%rbp
+ movq 32(%rsp),%rbx
+ leaq 40(%rsp),%rsp
+L$enc_epilogue:
+ .byte 0xf3,0xc3
+
+
+
+.p2align 4
+_x86_64_Camellia_encrypt:
+ xorl 0(%r14),%r9d
+ xorl 4(%r14),%r8d
+ xorl 8(%r14),%r11d
+ xorl 12(%r14),%r10d
+.p2align 4
+L$eloop:
+ movl 16(%r14),%ebx
+ movl 20(%r14),%eax
+
+ xorl %r8d,%eax
+ xorl %r9d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl 24(%r14),%ebx
+ movl 28(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r10d
+ xorl %ecx,%r11d
+ xorl %edx,%r11d
+ xorl %r10d,%eax
+ xorl %r11d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl 32(%r14),%ebx
+ movl 36(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r8d
+ xorl %ecx,%r9d
+ xorl %edx,%r9d
+ xorl %r8d,%eax
+ xorl %r9d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl 40(%r14),%ebx
+ movl 44(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r10d
+ xorl %ecx,%r11d
+ xorl %edx,%r11d
+ xorl %r10d,%eax
+ xorl %r11d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl 48(%r14),%ebx
+ movl 52(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r8d
+ xorl %ecx,%r9d
+ xorl %edx,%r9d
+ xorl %r8d,%eax
+ xorl %r9d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl 56(%r14),%ebx
+ movl 60(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r10d
+ xorl %ecx,%r11d
+ xorl %edx,%r11d
+ xorl %r10d,%eax
+ xorl %r11d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl 64(%r14),%ebx
+ movl 68(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r8d
+ xorl %ecx,%r9d
+ xorl %edx,%r9d
+ leaq 64(%r14),%r14
+ cmpq %r15,%r14
+ movl 8(%r14),%edx
+ movl 12(%r14),%ecx
+ je L$edone
+
+ andl %r8d,%eax
+ orl %r11d,%edx
+ roll $1,%eax
+ xorl %edx,%r10d
+ xorl %eax,%r9d
+ andl %r10d,%ecx
+ orl %r9d,%ebx
+ roll $1,%ecx
+ xorl %ebx,%r8d
+ xorl %ecx,%r11d
+ jmp L$eloop
+
+.p2align 4
+L$edone:
+ xorl %r10d,%eax
+ xorl %r11d,%ebx
+ xorl %r8d,%ecx
+ xorl %r9d,%edx
+
+ movl %eax,%r8d
+ movl %ebx,%r9d
+ movl %ecx,%r10d
+ movl %edx,%r11d
+
+.byte 0xf3,0xc3
+
+
+
+
+.globl _Camellia_DecryptBlock
+
+.p2align 4
+_Camellia_DecryptBlock:
+ movl $128,%eax
+ subl %edi,%eax
+ movl $3,%edi
+ adcl $0,%edi
+ jmp L$dec_rounds
+
+
+.globl _Camellia_DecryptBlock_Rounds
+
+.p2align 4
+L$dec_rounds:
+_Camellia_DecryptBlock_Rounds:
+ pushq %rbx
+ pushq %rbp
+ pushq %r13
+ pushq %r14
+ pushq %r15
+L$dec_prologue:
+
+
+ movq %rcx,%r13
+ movq %rdx,%r15
+
+ shll $6,%edi
+ leaq L$Camellia_SBOX(%rip),%rbp
+ leaq (%r15,%rdi,1),%r14
+
+ movl 0(%rsi),%r8d
+ movl 4(%rsi),%r9d
+ movl 8(%rsi),%r10d
+ bswapl %r8d
+ movl 12(%rsi),%r11d
+ bswapl %r9d
+ bswapl %r10d
+ bswapl %r11d
+
+ call _x86_64_Camellia_decrypt
+
+ bswapl %r8d
+ bswapl %r9d
+ bswapl %r10d
+ movl %r8d,0(%r13)
+ bswapl %r11d
+ movl %r9d,4(%r13)
+ movl %r10d,8(%r13)
+ movl %r11d,12(%r13)
+
+ movq 0(%rsp),%r15
+ movq 8(%rsp),%r14
+ movq 16(%rsp),%r13
+ movq 24(%rsp),%rbp
+ movq 32(%rsp),%rbx
+ leaq 40(%rsp),%rsp
+L$dec_epilogue:
+ .byte 0xf3,0xc3
+
+
+
+.p2align 4
+_x86_64_Camellia_decrypt:
+ xorl 0(%r14),%r9d
+ xorl 4(%r14),%r8d
+ xorl 8(%r14),%r11d
+ xorl 12(%r14),%r10d
+.p2align 4
+L$dloop:
+ movl -8(%r14),%ebx
+ movl -4(%r14),%eax
+
+ xorl %r8d,%eax
+ xorl %r9d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl -16(%r14),%ebx
+ movl -12(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r10d
+ xorl %ecx,%r11d
+ xorl %edx,%r11d
+ xorl %r10d,%eax
+ xorl %r11d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl -24(%r14),%ebx
+ movl -20(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r8d
+ xorl %ecx,%r9d
+ xorl %edx,%r9d
+ xorl %r8d,%eax
+ xorl %r9d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl -32(%r14),%ebx
+ movl -28(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r10d
+ xorl %ecx,%r11d
+ xorl %edx,%r11d
+ xorl %r10d,%eax
+ xorl %r11d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl -40(%r14),%ebx
+ movl -36(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r8d
+ xorl %ecx,%r9d
+ xorl %edx,%r9d
+ xorl %r8d,%eax
+ xorl %r9d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl -48(%r14),%ebx
+ movl -44(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r10d
+ xorl %ecx,%r11d
+ xorl %edx,%r11d
+ xorl %r10d,%eax
+ xorl %r11d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl -56(%r14),%ebx
+ movl -52(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r8d
+ xorl %ecx,%r9d
+ xorl %edx,%r9d
+ leaq -64(%r14),%r14
+ cmpq %r15,%r14
+ movl 0(%r14),%edx
+ movl 4(%r14),%ecx
+ je L$ddone
+
+ andl %r8d,%eax
+ orl %r11d,%edx
+ roll $1,%eax
+ xorl %edx,%r10d
+ xorl %eax,%r9d
+ andl %r10d,%ecx
+ orl %r9d,%ebx
+ roll $1,%ecx
+ xorl %ebx,%r8d
+ xorl %ecx,%r11d
+
+ jmp L$dloop
+
+.p2align 4
+L$ddone:
+ xorl %r10d,%ecx
+ xorl %r11d,%edx
+ xorl %r8d,%eax
+ xorl %r9d,%ebx
+
+ movl %ecx,%r8d
+ movl %edx,%r9d
+ movl %eax,%r10d
+ movl %ebx,%r11d
+
+.byte 0xf3,0xc3
+
+
+.globl _Camellia_Ekeygen
+
+.p2align 4
+_Camellia_Ekeygen:
+ pushq %rbx
+ pushq %rbp
+ pushq %r13
+ pushq %r14
+ pushq %r15
+L$key_prologue:
+
+ movq %rdi,%r15
+ movq %rdx,%r13
+
+ movl 0(%rsi),%r8d
+ movl 4(%rsi),%r9d
+ movl 8(%rsi),%r10d
+ movl 12(%rsi),%r11d
+
+ bswapl %r8d
+ bswapl %r9d
+ bswapl %r10d
+ bswapl %r11d
+ movl %r9d,0(%r13)
+ movl %r8d,4(%r13)
+ movl %r11d,8(%r13)
+ movl %r10d,12(%r13)
+ cmpq $128,%r15
+ je L$1st128
+
+ movl 16(%rsi),%r8d
+ movl 20(%rsi),%r9d
+ cmpq $192,%r15
+ je L$1st192
+ movl 24(%rsi),%r10d
+ movl 28(%rsi),%r11d
+ jmp L$1st256
+L$1st192:
+ movl %r8d,%r10d
+ movl %r9d,%r11d
+ notl %r10d
+ notl %r11d
+L$1st256:
+ bswapl %r8d
+ bswapl %r9d
+ bswapl %r10d
+ bswapl %r11d
+ movl %r9d,32(%r13)
+ movl %r8d,36(%r13)
+ movl %r11d,40(%r13)
+ movl %r10d,44(%r13)
+ xorl 0(%r13),%r9d
+ xorl 4(%r13),%r8d
+ xorl 8(%r13),%r11d
+ xorl 12(%r13),%r10d
+
+L$1st128:
+ leaq L$Camellia_SIGMA(%rip),%r14
+ leaq L$Camellia_SBOX(%rip),%rbp
+
+ movl 0(%r14),%ebx
+ movl 4(%r14),%eax
+ xorl %r8d,%eax
+ xorl %r9d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl 8(%r14),%ebx
+ movl 12(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r10d
+ xorl %ecx,%r11d
+ xorl %edx,%r11d
+ xorl %r10d,%eax
+ xorl %r11d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl 16(%r14),%ebx
+ movl 20(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r8d
+ xorl %ecx,%r9d
+ xorl %edx,%r9d
+ xorl 0(%r13),%r9d
+ xorl 4(%r13),%r8d
+ xorl 8(%r13),%r11d
+ xorl 12(%r13),%r10d
+ xorl %r8d,%eax
+ xorl %r9d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl 24(%r14),%ebx
+ movl 28(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r10d
+ xorl %ecx,%r11d
+ xorl %edx,%r11d
+ xorl %r10d,%eax
+ xorl %r11d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl 32(%r14),%ebx
+ movl 36(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r8d
+ xorl %ecx,%r9d
+ xorl %edx,%r9d
+ cmpq $128,%r15
+ jne L$2nd256
+
+ leaq 128(%r13),%r13
+ shlq $32,%r8
+ shlq $32,%r10
+ orq %r9,%r8
+ orq %r11,%r10
+ movq -128(%r13),%rax
+ movq -120(%r13),%rbx
+ movq %r8,-112(%r13)
+ movq %r10,-104(%r13)
+ movq %rax,%r11
+ shlq $15,%rax
+ movq %rbx,%r9
+ shrq $49,%r9
+ shrq $49,%r11
+ orq %r9,%rax
+ shlq $15,%rbx
+ orq %r11,%rbx
+ movq %rax,-96(%r13)
+ movq %rbx,-88(%r13)
+ movq %r8,%r11
+ shlq $15,%r8
+ movq %r10,%r9
+ shrq $49,%r9
+ shrq $49,%r11
+ orq %r9,%r8
+ shlq $15,%r10
+ orq %r11,%r10
+ movq %r8,-80(%r13)
+ movq %r10,-72(%r13)
+ movq %r8,%r11
+ shlq $15,%r8
+ movq %r10,%r9
+ shrq $49,%r9
+ shrq $49,%r11
+ orq %r9,%r8
+ shlq $15,%r10
+ orq %r11,%r10
+ movq %r8,-64(%r13)
+ movq %r10,-56(%r13)
+ movq %rax,%r11
+ shlq $30,%rax
+ movq %rbx,%r9
+ shrq $34,%r9
+ shrq $34,%r11
+ orq %r9,%rax
+ shlq $30,%rbx
+ orq %r11,%rbx
+ movq %rax,-48(%r13)
+ movq %rbx,-40(%r13)
+ movq %r8,%r11
+ shlq $15,%r8
+ movq %r10,%r9
+ shrq $49,%r9
+ shrq $49,%r11
+ orq %r9,%r8
+ shlq $15,%r10
+ orq %r11,%r10
+ movq %r8,-32(%r13)
+ movq %rax,%r11
+ shlq $15,%rax
+ movq %rbx,%r9
+ shrq $49,%r9
+ shrq $49,%r11
+ orq %r9,%rax
+ shlq $15,%rbx
+ orq %r11,%rbx
+ movq %rbx,-24(%r13)
+ movq %r8,%r11
+ shlq $15,%r8
+ movq %r10,%r9
+ shrq $49,%r9
+ shrq $49,%r11
+ orq %r9,%r8
+ shlq $15,%r10
+ orq %r11,%r10
+ movq %r8,-16(%r13)
+ movq %r10,-8(%r13)
+ movq %rax,%r11
+ shlq $17,%rax
+ movq %rbx,%r9
+ shrq $47,%r9
+ shrq $47,%r11
+ orq %r9,%rax
+ shlq $17,%rbx
+ orq %r11,%rbx
+ movq %rax,0(%r13)
+ movq %rbx,8(%r13)
+ movq %rax,%r11
+ shlq $17,%rax
+ movq %rbx,%r9
+ shrq $47,%r9
+ shrq $47,%r11
+ orq %r9,%rax
+ shlq $17,%rbx
+ orq %r11,%rbx
+ movq %rax,16(%r13)
+ movq %rbx,24(%r13)
+ movq %r8,%r11
+ shlq $34,%r8
+ movq %r10,%r9
+ shrq $30,%r9
+ shrq $30,%r11
+ orq %r9,%r8
+ shlq $34,%r10
+ orq %r11,%r10
+ movq %r8,32(%r13)
+ movq %r10,40(%r13)
+ movq %rax,%r11
+ shlq $17,%rax
+ movq %rbx,%r9
+ shrq $47,%r9
+ shrq $47,%r11
+ orq %r9,%rax
+ shlq $17,%rbx
+ orq %r11,%rbx
+ movq %rax,48(%r13)
+ movq %rbx,56(%r13)
+ movq %r8,%r11
+ shlq $17,%r8
+ movq %r10,%r9
+ shrq $47,%r9
+ shrq $47,%r11
+ orq %r9,%r8
+ shlq $17,%r10
+ orq %r11,%r10
+ movq %r8,64(%r13)
+ movq %r10,72(%r13)
+ movl $3,%eax
+ jmp L$done
+.p2align 4
+L$2nd256:
+ movl %r9d,48(%r13)
+ movl %r8d,52(%r13)
+ movl %r11d,56(%r13)
+ movl %r10d,60(%r13)
+ xorl 32(%r13),%r9d
+ xorl 36(%r13),%r8d
+ xorl 40(%r13),%r11d
+ xorl 44(%r13),%r10d
+ xorl %r8d,%eax
+ xorl %r9d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl 40(%r14),%ebx
+ movl 44(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r10d
+ xorl %ecx,%r11d
+ xorl %edx,%r11d
+ xorl %r10d,%eax
+ xorl %r11d,%ebx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ movl 2052(%rbp,%rsi,8),%edx
+ movl 0(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ shrl $16,%eax
+ movzbl %bh,%edi
+ xorl 4(%rbp,%rsi,8),%edx
+ shrl $16,%ebx
+ xorl 4(%rbp,%rdi,8),%ecx
+ movzbl %ah,%esi
+ movzbl %bl,%edi
+ xorl 0(%rbp,%rsi,8),%edx
+ xorl 2052(%rbp,%rdi,8),%ecx
+ movzbl %al,%esi
+ movzbl %bh,%edi
+ xorl 2048(%rbp,%rsi,8),%edx
+ xorl 2048(%rbp,%rdi,8),%ecx
+ movl 48(%r14),%ebx
+ movl 52(%r14),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl %ecx,%r8d
+ xorl %ecx,%r9d
+ xorl %edx,%r9d
+ movq 0(%r13),%rax
+ movq 8(%r13),%rbx
+ movq 32(%r13),%rcx
+ movq 40(%r13),%rdx
+ movq 48(%r13),%r14
+ movq 56(%r13),%r15
+ leaq 128(%r13),%r13
+ shlq $32,%r8
+ shlq $32,%r10
+ orq %r9,%r8
+ orq %r11,%r10
+ movq %r8,-112(%r13)
+ movq %r10,-104(%r13)
+ movq %rcx,%r11
+ shlq $15,%rcx
+ movq %rdx,%r9
+ shrq $49,%r9
+ shrq $49,%r11
+ orq %r9,%rcx
+ shlq $15,%rdx
+ orq %r11,%rdx
+ movq %rcx,-96(%r13)
+ movq %rdx,-88(%r13)
+ movq %r14,%r11
+ shlq $15,%r14
+ movq %r15,%r9
+ shrq $49,%r9
+ shrq $49,%r11
+ orq %r9,%r14
+ shlq $15,%r15
+ orq %r11,%r15
+ movq %r14,-80(%r13)
+ movq %r15,-72(%r13)
+ movq %rcx,%r11
+ shlq $15,%rcx
+ movq %rdx,%r9
+ shrq $49,%r9
+ shrq $49,%r11
+ orq %r9,%rcx
+ shlq $15,%rdx
+ orq %r11,%rdx
+ movq %rcx,-64(%r13)
+ movq %rdx,-56(%r13)
+ movq %r8,%r11
+ shlq $30,%r8
+ movq %r10,%r9
+ shrq $34,%r9
+ shrq $34,%r11
+ orq %r9,%r8
+ shlq $30,%r10
+ orq %r11,%r10
+ movq %r8,-48(%r13)
+ movq %r10,-40(%r13)
+ movq %rax,%r11
+ shlq $45,%rax
+ movq %rbx,%r9
+ shrq $19,%r9
+ shrq $19,%r11
+ orq %r9,%rax
+ shlq $45,%rbx
+ orq %r11,%rbx
+ movq %rax,-32(%r13)
+ movq %rbx,-24(%r13)
+ movq %r14,%r11
+ shlq $30,%r14
+ movq %r15,%r9
+ shrq $34,%r9
+ shrq $34,%r11
+ orq %r9,%r14
+ shlq $30,%r15
+ orq %r11,%r15
+ movq %r14,-16(%r13)
+ movq %r15,-8(%r13)
+ movq %rax,%r11
+ shlq $15,%rax
+ movq %rbx,%r9
+ shrq $49,%r9
+ shrq $49,%r11
+ orq %r9,%rax
+ shlq $15,%rbx
+ orq %r11,%rbx
+ movq %rax,0(%r13)
+ movq %rbx,8(%r13)
+ movq %rcx,%r11
+ shlq $30,%rcx
+ movq %rdx,%r9
+ shrq $34,%r9
+ shrq $34,%r11
+ orq %r9,%rcx
+ shlq $30,%rdx
+ orq %r11,%rdx
+ movq %rcx,16(%r13)
+ movq %rdx,24(%r13)
+ movq %r8,%r11
+ shlq $30,%r8
+ movq %r10,%r9
+ shrq $34,%r9
+ shrq $34,%r11
+ orq %r9,%r8
+ shlq $30,%r10
+ orq %r11,%r10
+ movq %r8,32(%r13)
+ movq %r10,40(%r13)
+ movq %rax,%r11
+ shlq $17,%rax
+ movq %rbx,%r9
+ shrq $47,%r9
+ shrq $47,%r11
+ orq %r9,%rax
+ shlq $17,%rbx
+ orq %r11,%rbx
+ movq %rax,48(%r13)
+ movq %rbx,56(%r13)
+ movq %r14,%r11
+ shlq $32,%r14
+ movq %r15,%r9
+ shrq $32,%r9
+ shrq $32,%r11
+ orq %r9,%r14
+ shlq $32,%r15
+ orq %r11,%r15
+ movq %r14,64(%r13)
+ movq %r15,72(%r13)
+ movq %rcx,%r11
+ shlq $34,%rcx
+ movq %rdx,%r9
+ shrq $30,%r9
+ shrq $30,%r11
+ orq %r9,%rcx
+ shlq $34,%rdx
+ orq %r11,%rdx
+ movq %rcx,80(%r13)
+ movq %rdx,88(%r13)
+ movq %r14,%r11
+ shlq $17,%r14
+ movq %r15,%r9
+ shrq $47,%r9
+ shrq $47,%r11
+ orq %r9,%r14
+ shlq $17,%r15
+ orq %r11,%r15
+ movq %r14,96(%r13)
+ movq %r15,104(%r13)
+ movq %rax,%r11
+ shlq $34,%rax
+ movq %rbx,%r9
+ shrq $30,%r9
+ shrq $30,%r11
+ orq %r9,%rax
+ shlq $34,%rbx
+ orq %r11,%rbx
+ movq %rax,112(%r13)
+ movq %rbx,120(%r13)
+ movq %r8,%r11
+ shlq $51,%r8
+ movq %r10,%r9
+ shrq $13,%r9
+ shrq $13,%r11
+ orq %r9,%r8
+ shlq $51,%r10
+ orq %r11,%r10
+ movq %r8,128(%r13)
+ movq %r10,136(%r13)
+ movl $4,%eax
+L$done:
+ movq 0(%rsp),%r15
+ movq 8(%rsp),%r14
+ movq 16(%rsp),%r13
+ movq 24(%rsp),%rbp
+ movq 32(%rsp),%rbx
+ leaq 40(%rsp),%rsp
+L$key_epilogue:
+ .byte 0xf3,0xc3
+
+.p2align 6
+L$Camellia_SIGMA:
+.long 0x3bcc908b, 0xa09e667f, 0x4caa73b2, 0xb67ae858
+.long 0xe94f82be, 0xc6ef372f, 0xf1d36f1c, 0x54ff53a5
+.long 0xde682d1d, 0x10e527fa, 0xb3e6c1fd, 0xb05688c2
+.long 0, 0, 0, 0
+L$Camellia_SBOX:
+.long 0x70707000,0x70700070
+.long 0x82828200,0x2c2c002c
+.long 0x2c2c2c00,0xb3b300b3
+.long 0xececec00,0xc0c000c0
+.long 0xb3b3b300,0xe4e400e4
+.long 0x27272700,0x57570057
+.long 0xc0c0c000,0xeaea00ea
+.long 0xe5e5e500,0xaeae00ae
+.long 0xe4e4e400,0x23230023
+.long 0x85858500,0x6b6b006b
+.long 0x57575700,0x45450045
+.long 0x35353500,0xa5a500a5
+.long 0xeaeaea00,0xeded00ed
+.long 0x0c0c0c00,0x4f4f004f
+.long 0xaeaeae00,0x1d1d001d
+.long 0x41414100,0x92920092
+.long 0x23232300,0x86860086
+.long 0xefefef00,0xafaf00af
+.long 0x6b6b6b00,0x7c7c007c
+.long 0x93939300,0x1f1f001f
+.long 0x45454500,0x3e3e003e
+.long 0x19191900,0xdcdc00dc
+.long 0xa5a5a500,0x5e5e005e
+.long 0x21212100,0x0b0b000b
+.long 0xededed00,0xa6a600a6
+.long 0x0e0e0e00,0x39390039
+.long 0x4f4f4f00,0xd5d500d5
+.long 0x4e4e4e00,0x5d5d005d
+.long 0x1d1d1d00,0xd9d900d9
+.long 0x65656500,0x5a5a005a
+.long 0x92929200,0x51510051
+.long 0xbdbdbd00,0x6c6c006c
+.long 0x86868600,0x8b8b008b
+.long 0xb8b8b800,0x9a9a009a
+.long 0xafafaf00,0xfbfb00fb
+.long 0x8f8f8f00,0xb0b000b0
+.long 0x7c7c7c00,0x74740074
+.long 0xebebeb00,0x2b2b002b
+.long 0x1f1f1f00,0xf0f000f0
+.long 0xcecece00,0x84840084
+.long 0x3e3e3e00,0xdfdf00df
+.long 0x30303000,0xcbcb00cb
+.long 0xdcdcdc00,0x34340034
+.long 0x5f5f5f00,0x76760076
+.long 0x5e5e5e00,0x6d6d006d
+.long 0xc5c5c500,0xa9a900a9
+.long 0x0b0b0b00,0xd1d100d1
+.long 0x1a1a1a00,0x04040004
+.long 0xa6a6a600,0x14140014
+.long 0xe1e1e100,0x3a3a003a
+.long 0x39393900,0xdede00de
+.long 0xcacaca00,0x11110011
+.long 0xd5d5d500,0x32320032
+.long 0x47474700,0x9c9c009c
+.long 0x5d5d5d00,0x53530053
+.long 0x3d3d3d00,0xf2f200f2
+.long 0xd9d9d900,0xfefe00fe
+.long 0x01010100,0xcfcf00cf
+.long 0x5a5a5a00,0xc3c300c3
+.long 0xd6d6d600,0x7a7a007a
+.long 0x51515100,0x24240024
+.long 0x56565600,0xe8e800e8
+.long 0x6c6c6c00,0x60600060
+.long 0x4d4d4d00,0x69690069
+.long 0x8b8b8b00,0xaaaa00aa
+.long 0x0d0d0d00,0xa0a000a0
+.long 0x9a9a9a00,0xa1a100a1
+.long 0x66666600,0x62620062
+.long 0xfbfbfb00,0x54540054
+.long 0xcccccc00,0x1e1e001e
+.long 0xb0b0b000,0xe0e000e0
+.long 0x2d2d2d00,0x64640064
+.long 0x74747400,0x10100010
+.long 0x12121200,0x00000000
+.long 0x2b2b2b00,0xa3a300a3
+.long 0x20202000,0x75750075
+.long 0xf0f0f000,0x8a8a008a
+.long 0xb1b1b100,0xe6e600e6
+.long 0x84848400,0x09090009
+.long 0x99999900,0xdddd00dd
+.long 0xdfdfdf00,0x87870087
+.long 0x4c4c4c00,0x83830083
+.long 0xcbcbcb00,0xcdcd00cd
+.long 0xc2c2c200,0x90900090
+.long 0x34343400,0x73730073
+.long 0x7e7e7e00,0xf6f600f6
+.long 0x76767600,0x9d9d009d
+.long 0x05050500,0xbfbf00bf
+.long 0x6d6d6d00,0x52520052
+.long 0xb7b7b700,0xd8d800d8
+.long 0xa9a9a900,0xc8c800c8
+.long 0x31313100,0xc6c600c6
+.long 0xd1d1d100,0x81810081
+.long 0x17171700,0x6f6f006f
+.long 0x04040400,0x13130013
+.long 0xd7d7d700,0x63630063
+.long 0x14141400,0xe9e900e9
+.long 0x58585800,0xa7a700a7
+.long 0x3a3a3a00,0x9f9f009f
+.long 0x61616100,0xbcbc00bc
+.long 0xdedede00,0x29290029
+.long 0x1b1b1b00,0xf9f900f9
+.long 0x11111100,0x2f2f002f
+.long 0x1c1c1c00,0xb4b400b4
+.long 0x32323200,0x78780078
+.long 0x0f0f0f00,0x06060006
+.long 0x9c9c9c00,0xe7e700e7
+.long 0x16161600,0x71710071
+.long 0x53535300,0xd4d400d4
+.long 0x18181800,0xabab00ab
+.long 0xf2f2f200,0x88880088
+.long 0x22222200,0x8d8d008d
+.long 0xfefefe00,0x72720072
+.long 0x44444400,0xb9b900b9
+.long 0xcfcfcf00,0xf8f800f8
+.long 0xb2b2b200,0xacac00ac
+.long 0xc3c3c300,0x36360036
+.long 0xb5b5b500,0x2a2a002a
+.long 0x7a7a7a00,0x3c3c003c
+.long 0x91919100,0xf1f100f1
+.long 0x24242400,0x40400040
+.long 0x08080800,0xd3d300d3
+.long 0xe8e8e800,0xbbbb00bb
+.long 0xa8a8a800,0x43430043
+.long 0x60606000,0x15150015
+.long 0xfcfcfc00,0xadad00ad
+.long 0x69696900,0x77770077
+.long 0x50505000,0x80800080
+.long 0xaaaaaa00,0x82820082
+.long 0xd0d0d000,0xecec00ec
+.long 0xa0a0a000,0x27270027
+.long 0x7d7d7d00,0xe5e500e5
+.long 0xa1a1a100,0x85850085
+.long 0x89898900,0x35350035
+.long 0x62626200,0x0c0c000c
+.long 0x97979700,0x41410041
+.long 0x54545400,0xefef00ef
+.long 0x5b5b5b00,0x93930093
+.long 0x1e1e1e00,0x19190019
+.long 0x95959500,0x21210021
+.long 0xe0e0e000,0x0e0e000e
+.long 0xffffff00,0x4e4e004e
+.long 0x64646400,0x65650065
+.long 0xd2d2d200,0xbdbd00bd
+.long 0x10101000,0xb8b800b8
+.long 0xc4c4c400,0x8f8f008f
+.long 0x00000000,0xebeb00eb
+.long 0x48484800,0xcece00ce
+.long 0xa3a3a300,0x30300030
+.long 0xf7f7f700,0x5f5f005f
+.long 0x75757500,0xc5c500c5
+.long 0xdbdbdb00,0x1a1a001a
+.long 0x8a8a8a00,0xe1e100e1
+.long 0x03030300,0xcaca00ca
+.long 0xe6e6e600,0x47470047
+.long 0xdadada00,0x3d3d003d
+.long 0x09090900,0x01010001
+.long 0x3f3f3f00,0xd6d600d6
+.long 0xdddddd00,0x56560056
+.long 0x94949400,0x4d4d004d
+.long 0x87878700,0x0d0d000d
+.long 0x5c5c5c00,0x66660066
+.long 0x83838300,0xcccc00cc
+.long 0x02020200,0x2d2d002d
+.long 0xcdcdcd00,0x12120012
+.long 0x4a4a4a00,0x20200020
+.long 0x90909000,0xb1b100b1
+.long 0x33333300,0x99990099
+.long 0x73737300,0x4c4c004c
+.long 0x67676700,0xc2c200c2
+.long 0xf6f6f600,0x7e7e007e
+.long 0xf3f3f300,0x05050005
+.long 0x9d9d9d00,0xb7b700b7
+.long 0x7f7f7f00,0x31310031
+.long 0xbfbfbf00,0x17170017
+.long 0xe2e2e200,0xd7d700d7
+.long 0x52525200,0x58580058
+.long 0x9b9b9b00,0x61610061
+.long 0xd8d8d800,0x1b1b001b
+.long 0x26262600,0x1c1c001c
+.long 0xc8c8c800,0x0f0f000f
+.long 0x37373700,0x16160016
+.long 0xc6c6c600,0x18180018
+.long 0x3b3b3b00,0x22220022
+.long 0x81818100,0x44440044
+.long 0x96969600,0xb2b200b2
+.long 0x6f6f6f00,0xb5b500b5
+.long 0x4b4b4b00,0x91910091
+.long 0x13131300,0x08080008
+.long 0xbebebe00,0xa8a800a8
+.long 0x63636300,0xfcfc00fc
+.long 0x2e2e2e00,0x50500050
+.long 0xe9e9e900,0xd0d000d0
+.long 0x79797900,0x7d7d007d
+.long 0xa7a7a700,0x89890089
+.long 0x8c8c8c00,0x97970097
+.long 0x9f9f9f00,0x5b5b005b
+.long 0x6e6e6e00,0x95950095
+.long 0xbcbcbc00,0xffff00ff
+.long 0x8e8e8e00,0xd2d200d2
+.long 0x29292900,0xc4c400c4
+.long 0xf5f5f500,0x48480048
+.long 0xf9f9f900,0xf7f700f7
+.long 0xb6b6b600,0xdbdb00db
+.long 0x2f2f2f00,0x03030003
+.long 0xfdfdfd00,0xdada00da
+.long 0xb4b4b400,0x3f3f003f
+.long 0x59595900,0x94940094
+.long 0x78787800,0x5c5c005c
+.long 0x98989800,0x02020002
+.long 0x06060600,0x4a4a004a
+.long 0x6a6a6a00,0x33330033
+.long 0xe7e7e700,0x67670067
+.long 0x46464600,0xf3f300f3
+.long 0x71717100,0x7f7f007f
+.long 0xbababa00,0xe2e200e2
+.long 0xd4d4d400,0x9b9b009b
+.long 0x25252500,0x26260026
+.long 0xababab00,0x37370037
+.long 0x42424200,0x3b3b003b
+.long 0x88888800,0x96960096
+.long 0xa2a2a200,0x4b4b004b
+.long 0x8d8d8d00,0xbebe00be
+.long 0xfafafa00,0x2e2e002e
+.long 0x72727200,0x79790079
+.long 0x07070700,0x8c8c008c
+.long 0xb9b9b900,0x6e6e006e
+.long 0x55555500,0x8e8e008e
+.long 0xf8f8f800,0xf5f500f5
+.long 0xeeeeee00,0xb6b600b6
+.long 0xacacac00,0xfdfd00fd
+.long 0x0a0a0a00,0x59590059
+.long 0x36363600,0x98980098
+.long 0x49494900,0x6a6a006a
+.long 0x2a2a2a00,0x46460046
+.long 0x68686800,0xbaba00ba
+.long 0x3c3c3c00,0x25250025
+.long 0x38383800,0x42420042
+.long 0xf1f1f100,0xa2a200a2
+.long 0xa4a4a400,0xfafa00fa
+.long 0x40404000,0x07070007
+.long 0x28282800,0x55550055
+.long 0xd3d3d300,0xeeee00ee
+.long 0x7b7b7b00,0x0a0a000a
+.long 0xbbbbbb00,0x49490049
+.long 0xc9c9c900,0x68680068
+.long 0x43434300,0x38380038
+.long 0xc1c1c100,0xa4a400a4
+.long 0x15151500,0x28280028
+.long 0xe3e3e300,0x7b7b007b
+.long 0xadadad00,0xc9c900c9
+.long 0xf4f4f400,0xc1c100c1
+.long 0x77777700,0xe3e300e3
+.long 0xc7c7c700,0xf4f400f4
+.long 0x80808000,0xc7c700c7
+.long 0x9e9e9e00,0x9e9e009e
+.long 0x00e0e0e0,0x38003838
+.long 0x00050505,0x41004141
+.long 0x00585858,0x16001616
+.long 0x00d9d9d9,0x76007676
+.long 0x00676767,0xd900d9d9
+.long 0x004e4e4e,0x93009393
+.long 0x00818181,0x60006060
+.long 0x00cbcbcb,0xf200f2f2
+.long 0x00c9c9c9,0x72007272
+.long 0x000b0b0b,0xc200c2c2
+.long 0x00aeaeae,0xab00abab
+.long 0x006a6a6a,0x9a009a9a
+.long 0x00d5d5d5,0x75007575
+.long 0x00181818,0x06000606
+.long 0x005d5d5d,0x57005757
+.long 0x00828282,0xa000a0a0
+.long 0x00464646,0x91009191
+.long 0x00dfdfdf,0xf700f7f7
+.long 0x00d6d6d6,0xb500b5b5
+.long 0x00272727,0xc900c9c9
+.long 0x008a8a8a,0xa200a2a2
+.long 0x00323232,0x8c008c8c
+.long 0x004b4b4b,0xd200d2d2
+.long 0x00424242,0x90009090
+.long 0x00dbdbdb,0xf600f6f6
+.long 0x001c1c1c,0x07000707
+.long 0x009e9e9e,0xa700a7a7
+.long 0x009c9c9c,0x27002727
+.long 0x003a3a3a,0x8e008e8e
+.long 0x00cacaca,0xb200b2b2
+.long 0x00252525,0x49004949
+.long 0x007b7b7b,0xde00dede
+.long 0x000d0d0d,0x43004343
+.long 0x00717171,0x5c005c5c
+.long 0x005f5f5f,0xd700d7d7
+.long 0x001f1f1f,0xc700c7c7
+.long 0x00f8f8f8,0x3e003e3e
+.long 0x00d7d7d7,0xf500f5f5
+.long 0x003e3e3e,0x8f008f8f
+.long 0x009d9d9d,0x67006767
+.long 0x007c7c7c,0x1f001f1f
+.long 0x00606060,0x18001818
+.long 0x00b9b9b9,0x6e006e6e
+.long 0x00bebebe,0xaf00afaf
+.long 0x00bcbcbc,0x2f002f2f
+.long 0x008b8b8b,0xe200e2e2
+.long 0x00161616,0x85008585
+.long 0x00343434,0x0d000d0d
+.long 0x004d4d4d,0x53005353
+.long 0x00c3c3c3,0xf000f0f0
+.long 0x00727272,0x9c009c9c
+.long 0x00959595,0x65006565
+.long 0x00ababab,0xea00eaea
+.long 0x008e8e8e,0xa300a3a3
+.long 0x00bababa,0xae00aeae
+.long 0x007a7a7a,0x9e009e9e
+.long 0x00b3b3b3,0xec00ecec
+.long 0x00020202,0x80008080
+.long 0x00b4b4b4,0x2d002d2d
+.long 0x00adadad,0x6b006b6b
+.long 0x00a2a2a2,0xa800a8a8
+.long 0x00acacac,0x2b002b2b
+.long 0x00d8d8d8,0x36003636
+.long 0x009a9a9a,0xa600a6a6
+.long 0x00171717,0xc500c5c5
+.long 0x001a1a1a,0x86008686
+.long 0x00353535,0x4d004d4d
+.long 0x00cccccc,0x33003333
+.long 0x00f7f7f7,0xfd00fdfd
+.long 0x00999999,0x66006666
+.long 0x00616161,0x58005858
+.long 0x005a5a5a,0x96009696
+.long 0x00e8e8e8,0x3a003a3a
+.long 0x00242424,0x09000909
+.long 0x00565656,0x95009595
+.long 0x00404040,0x10001010
+.long 0x00e1e1e1,0x78007878
+.long 0x00636363,0xd800d8d8
+.long 0x00090909,0x42004242
+.long 0x00333333,0xcc00cccc
+.long 0x00bfbfbf,0xef00efef
+.long 0x00989898,0x26002626
+.long 0x00979797,0xe500e5e5
+.long 0x00858585,0x61006161
+.long 0x00686868,0x1a001a1a
+.long 0x00fcfcfc,0x3f003f3f
+.long 0x00ececec,0x3b003b3b
+.long 0x000a0a0a,0x82008282
+.long 0x00dadada,0xb600b6b6
+.long 0x006f6f6f,0xdb00dbdb
+.long 0x00535353,0xd400d4d4
+.long 0x00626262,0x98009898
+.long 0x00a3a3a3,0xe800e8e8
+.long 0x002e2e2e,0x8b008b8b
+.long 0x00080808,0x02000202
+.long 0x00afafaf,0xeb00ebeb
+.long 0x00282828,0x0a000a0a
+.long 0x00b0b0b0,0x2c002c2c
+.long 0x00747474,0x1d001d1d
+.long 0x00c2c2c2,0xb000b0b0
+.long 0x00bdbdbd,0x6f006f6f
+.long 0x00363636,0x8d008d8d
+.long 0x00222222,0x88008888
+.long 0x00383838,0x0e000e0e
+.long 0x00646464,0x19001919
+.long 0x001e1e1e,0x87008787
+.long 0x00393939,0x4e004e4e
+.long 0x002c2c2c,0x0b000b0b
+.long 0x00a6a6a6,0xa900a9a9
+.long 0x00303030,0x0c000c0c
+.long 0x00e5e5e5,0x79007979
+.long 0x00444444,0x11001111
+.long 0x00fdfdfd,0x7f007f7f
+.long 0x00888888,0x22002222
+.long 0x009f9f9f,0xe700e7e7
+.long 0x00656565,0x59005959
+.long 0x00878787,0xe100e1e1
+.long 0x006b6b6b,0xda00dada
+.long 0x00f4f4f4,0x3d003d3d
+.long 0x00232323,0xc800c8c8
+.long 0x00484848,0x12001212
+.long 0x00101010,0x04000404
+.long 0x00d1d1d1,0x74007474
+.long 0x00515151,0x54005454
+.long 0x00c0c0c0,0x30003030
+.long 0x00f9f9f9,0x7e007e7e
+.long 0x00d2d2d2,0xb400b4b4
+.long 0x00a0a0a0,0x28002828
+.long 0x00555555,0x55005555
+.long 0x00a1a1a1,0x68006868
+.long 0x00414141,0x50005050
+.long 0x00fafafa,0xbe00bebe
+.long 0x00434343,0xd000d0d0
+.long 0x00131313,0xc400c4c4
+.long 0x00c4c4c4,0x31003131
+.long 0x002f2f2f,0xcb00cbcb
+.long 0x00a8a8a8,0x2a002a2a
+.long 0x00b6b6b6,0xad00adad
+.long 0x003c3c3c,0x0f000f0f
+.long 0x002b2b2b,0xca00caca
+.long 0x00c1c1c1,0x70007070
+.long 0x00ffffff,0xff00ffff
+.long 0x00c8c8c8,0x32003232
+.long 0x00a5a5a5,0x69006969
+.long 0x00202020,0x08000808
+.long 0x00898989,0x62006262
+.long 0x00000000,0x00000000
+.long 0x00909090,0x24002424
+.long 0x00474747,0xd100d1d1
+.long 0x00efefef,0xfb00fbfb
+.long 0x00eaeaea,0xba00baba
+.long 0x00b7b7b7,0xed00eded
+.long 0x00151515,0x45004545
+.long 0x00060606,0x81008181
+.long 0x00cdcdcd,0x73007373
+.long 0x00b5b5b5,0x6d006d6d
+.long 0x00121212,0x84008484
+.long 0x007e7e7e,0x9f009f9f
+.long 0x00bbbbbb,0xee00eeee
+.long 0x00292929,0x4a004a4a
+.long 0x000f0f0f,0xc300c3c3
+.long 0x00b8b8b8,0x2e002e2e
+.long 0x00070707,0xc100c1c1
+.long 0x00040404,0x01000101
+.long 0x009b9b9b,0xe600e6e6
+.long 0x00949494,0x25002525
+.long 0x00212121,0x48004848
+.long 0x00666666,0x99009999
+.long 0x00e6e6e6,0xb900b9b9
+.long 0x00cecece,0xb300b3b3
+.long 0x00ededed,0x7b007b7b
+.long 0x00e7e7e7,0xf900f9f9
+.long 0x003b3b3b,0xce00cece
+.long 0x00fefefe,0xbf00bfbf
+.long 0x007f7f7f,0xdf00dfdf
+.long 0x00c5c5c5,0x71007171
+.long 0x00a4a4a4,0x29002929
+.long 0x00373737,0xcd00cdcd
+.long 0x00b1b1b1,0x6c006c6c
+.long 0x004c4c4c,0x13001313
+.long 0x00919191,0x64006464
+.long 0x006e6e6e,0x9b009b9b
+.long 0x008d8d8d,0x63006363
+.long 0x00767676,0x9d009d9d
+.long 0x00030303,0xc000c0c0
+.long 0x002d2d2d,0x4b004b4b
+.long 0x00dedede,0xb700b7b7
+.long 0x00969696,0xa500a5a5
+.long 0x00262626,0x89008989
+.long 0x007d7d7d,0x5f005f5f
+.long 0x00c6c6c6,0xb100b1b1
+.long 0x005c5c5c,0x17001717
+.long 0x00d3d3d3,0xf400f4f4
+.long 0x00f2f2f2,0xbc00bcbc
+.long 0x004f4f4f,0xd300d3d3
+.long 0x00191919,0x46004646
+.long 0x003f3f3f,0xcf00cfcf
+.long 0x00dcdcdc,0x37003737
+.long 0x00797979,0x5e005e5e
+.long 0x001d1d1d,0x47004747
+.long 0x00525252,0x94009494
+.long 0x00ebebeb,0xfa00fafa
+.long 0x00f3f3f3,0xfc00fcfc
+.long 0x006d6d6d,0x5b005b5b
+.long 0x005e5e5e,0x97009797
+.long 0x00fbfbfb,0xfe00fefe
+.long 0x00696969,0x5a005a5a
+.long 0x00b2b2b2,0xac00acac
+.long 0x00f0f0f0,0x3c003c3c
+.long 0x00313131,0x4c004c4c
+.long 0x000c0c0c,0x03000303
+.long 0x00d4d4d4,0x35003535
+.long 0x00cfcfcf,0xf300f3f3
+.long 0x008c8c8c,0x23002323
+.long 0x00e2e2e2,0xb800b8b8
+.long 0x00757575,0x5d005d5d
+.long 0x00a9a9a9,0x6a006a6a
+.long 0x004a4a4a,0x92009292
+.long 0x00575757,0xd500d5d5
+.long 0x00848484,0x21002121
+.long 0x00111111,0x44004444
+.long 0x00454545,0x51005151
+.long 0x001b1b1b,0xc600c6c6
+.long 0x00f5f5f5,0x7d007d7d
+.long 0x00e4e4e4,0x39003939
+.long 0x000e0e0e,0x83008383
+.long 0x00737373,0xdc00dcdc
+.long 0x00aaaaaa,0xaa00aaaa
+.long 0x00f1f1f1,0x7c007c7c
+.long 0x00dddddd,0x77007777
+.long 0x00595959,0x56005656
+.long 0x00141414,0x05000505
+.long 0x006c6c6c,0x1b001b1b
+.long 0x00929292,0xa400a4a4
+.long 0x00545454,0x15001515
+.long 0x00d0d0d0,0x34003434
+.long 0x00787878,0x1e001e1e
+.long 0x00707070,0x1c001c1c
+.long 0x00e3e3e3,0xf800f8f8
+.long 0x00494949,0x52005252
+.long 0x00808080,0x20002020
+.long 0x00505050,0x14001414
+.long 0x00a7a7a7,0xe900e9e9
+.long 0x00f6f6f6,0xbd00bdbd
+.long 0x00777777,0xdd00dddd
+.long 0x00939393,0xe400e4e4
+.long 0x00868686,0xa100a1a1
+.long 0x00838383,0xe000e0e0
+.long 0x002a2a2a,0x8a008a8a
+.long 0x00c7c7c7,0xf100f1f1
+.long 0x005b5b5b,0xd600d6d6
+.long 0x00e9e9e9,0x7a007a7a
+.long 0x00eeeeee,0xbb00bbbb
+.long 0x008f8f8f,0xe300e3e3
+.long 0x00010101,0x40004040
+.long 0x003d3d3d,0x4f004f4f
+.globl _Camellia_cbc_encrypt
+
+.p2align 4
+_Camellia_cbc_encrypt:
+ cmpq $0,%rdx
+ je L$cbc_abort
+ pushq %rbx
+ pushq %rbp
+ pushq %r12
+ pushq %r13
+ pushq %r14
+ pushq %r15
+L$cbc_prologue:
+
+ movq %rsp,%rbp
+ subq $64,%rsp
+ andq $-64,%rsp
+
+
+
+ leaq -64-63(%rcx),%r10
+ subq %rsp,%r10
+ negq %r10
+ andq $960,%r10
+ subq %r10,%rsp
+
+
+ movq %rdi,%r12
+ movq %rsi,%r13
+ movq %r8,%rbx
+ movq %rcx,%r14
+ movl 272(%rcx),%r15d
+
+ movq %r8,40(%rsp)
+ movq %rbp,48(%rsp)
+
+L$cbc_body:
+ leaq L$Camellia_SBOX(%rip),%rbp
+
+ movl $32,%ecx
+.p2align 2
+L$cbc_prefetch_sbox:
+ movq 0(%rbp),%rax
+ movq 32(%rbp),%rsi
+ movq 64(%rbp),%rdi
+ movq 96(%rbp),%r11
+ leaq 128(%rbp),%rbp
+ loop L$cbc_prefetch_sbox
+ subq $4096,%rbp
+ shlq $6,%r15
+ movq %rdx,%rcx
+ leaq (%r14,%r15,1),%r15
+
+ cmpl $0,%r9d
+ je L$CBC_DECRYPT
+
+ andq $-16,%rdx
+ andq $15,%rcx
+ leaq (%r12,%rdx,1),%rdx
+ movq %r14,0(%rsp)
+ movq %rdx,8(%rsp)
+ movq %rcx,16(%rsp)
+
+ cmpq %r12,%rdx
+ movl 0(%rbx),%r8d
+ movl 4(%rbx),%r9d
+ movl 8(%rbx),%r10d
+ movl 12(%rbx),%r11d
+ je L$cbc_enc_tail
+ jmp L$cbc_eloop
+
+.p2align 4
+L$cbc_eloop:
+ xorl 0(%r12),%r8d
+ xorl 4(%r12),%r9d
+ xorl 8(%r12),%r10d
+ bswapl %r8d
+ xorl 12(%r12),%r11d
+ bswapl %r9d
+ bswapl %r10d
+ bswapl %r11d
+
+ call _x86_64_Camellia_encrypt
+
+ movq 0(%rsp),%r14
+ bswapl %r8d
+ movq 8(%rsp),%rdx
+ bswapl %r9d
+ movq 16(%rsp),%rcx
+ bswapl %r10d
+ movl %r8d,0(%r13)
+ bswapl %r11d
+ movl %r9d,4(%r13)
+ movl %r10d,8(%r13)
+ leaq 16(%r12),%r12
+ movl %r11d,12(%r13)
+ cmpq %rdx,%r12
+ leaq 16(%r13),%r13
+ jne L$cbc_eloop
+
+ cmpq $0,%rcx
+ jne L$cbc_enc_tail
+
+ movq 40(%rsp),%r13
+ movl %r8d,0(%r13)
+ movl %r9d,4(%r13)
+ movl %r10d,8(%r13)
+ movl %r11d,12(%r13)
+ jmp L$cbc_done
+
+.p2align 4
+L$cbc_enc_tail:
+ xorq %rax,%rax
+ movq %rax,0+24(%rsp)
+ movq %rax,8+24(%rsp)
+ movq %rax,16(%rsp)
+
+L$cbc_enc_pushf:
+ pushfq
+ cld
+ movq %r12,%rsi
+ leaq 8+24(%rsp),%rdi
+.long 0x9066A4F3
+
+ popfq
+L$cbc_enc_popf:
+
+ leaq 24(%rsp),%r12
+ leaq 16+24(%rsp),%rax
+ movq %rax,8(%rsp)
+ jmp L$cbc_eloop
+
+
+.p2align 4
+L$CBC_DECRYPT:
+ xchgq %r14,%r15
+ addq $15,%rdx
+ andq $15,%rcx
+ andq $-16,%rdx
+ movq %r14,0(%rsp)
+ leaq (%r12,%rdx,1),%rdx
+ movq %rdx,8(%rsp)
+ movq %rcx,16(%rsp)
+
+ movq (%rbx),%rax
+ movq 8(%rbx),%rbx
+ jmp L$cbc_dloop
+.p2align 4
+L$cbc_dloop:
+ movl 0(%r12),%r8d
+ movl 4(%r12),%r9d
+ movl 8(%r12),%r10d
+ bswapl %r8d
+ movl 12(%r12),%r11d
+ bswapl %r9d
+ movq %rax,0+24(%rsp)
+ bswapl %r10d
+ movq %rbx,8+24(%rsp)
+ bswapl %r11d
+
+ call _x86_64_Camellia_decrypt
+
+ movq 0(%rsp),%r14
+ movq 8(%rsp),%rdx
+ movq 16(%rsp),%rcx
+
+ bswapl %r8d
+ movq (%r12),%rax
+ bswapl %r9d
+ movq 8(%r12),%rbx
+ bswapl %r10d
+ xorl 0+24(%rsp),%r8d
+ bswapl %r11d
+ xorl 4+24(%rsp),%r9d
+ xorl 8+24(%rsp),%r10d
+ leaq 16(%r12),%r12
+ xorl 12+24(%rsp),%r11d
+ cmpq %rdx,%r12
+ je L$cbc_ddone
+
+ movl %r8d,0(%r13)
+ movl %r9d,4(%r13)
+ movl %r10d,8(%r13)
+ movl %r11d,12(%r13)
+
+ leaq 16(%r13),%r13
+ jmp L$cbc_dloop
+
+.p2align 4
+L$cbc_ddone:
+ movq 40(%rsp),%rdx
+ cmpq $0,%rcx
+ jne L$cbc_dec_tail
+
+ movl %r8d,0(%r13)
+ movl %r9d,4(%r13)
+ movl %r10d,8(%r13)
+ movl %r11d,12(%r13)
+
+ movq %rax,(%rdx)
+ movq %rbx,8(%rdx)
+ jmp L$cbc_done
+.p2align 4
+L$cbc_dec_tail:
+ movl %r8d,0+24(%rsp)
+ movl %r9d,4+24(%rsp)
+ movl %r10d,8+24(%rsp)
+ movl %r11d,12+24(%rsp)
+
+L$cbc_dec_pushf:
+ pushfq
+ cld
+ leaq 8+24(%rsp),%rsi
+ leaq (%r13),%rdi
+.long 0x9066A4F3
+
+ popfq
+L$cbc_dec_popf:
+
+ movq %rax,(%rdx)
+ movq %rbx,8(%rdx)
+ jmp L$cbc_done
+
+.p2align 4
+L$cbc_done:
+ movq 48(%rsp),%rcx
+ movq 0(%rcx),%r15
+ movq 8(%rcx),%r14
+ movq 16(%rcx),%r13
+ movq 24(%rcx),%r12
+ movq 32(%rcx),%rbp
+ movq 40(%rcx),%rbx
+ leaq 48(%rcx),%rsp
+L$cbc_abort:
+ .byte 0xf3,0xc3
+
+
+.byte 67,97,109,101,108,108,105,97,32,102,111,114,32,120,56,54,95,54,52,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
diff --git a/deps/openssl/asm/x64-macosx-gas/md5/md5-x86_64.s b/deps/openssl/asm/x64-macosx-gas/md5/md5-x86_64.s
new file mode 100644
index 0000000000..96f6ea16ce
--- /dev/null
+++ b/deps/openssl/asm/x64-macosx-gas/md5/md5-x86_64.s
@@ -0,0 +1,671 @@
+.text
+
+.p2align 4
+
+.globl _md5_block_asm_data_order
+
+_md5_block_asm_data_order:
+ pushq %rbp
+ pushq %rbx
+ pushq %r12
+ pushq %r14
+ pushq %r15
+L$prologue:
+
+
+
+
+ movq %rdi,%rbp
+ shlq $6,%rdx
+ leaq (%rsi,%rdx,1),%rdi
+ movl 0(%rbp),%eax
+ movl 4(%rbp),%ebx
+ movl 8(%rbp),%ecx
+ movl 12(%rbp),%edx
+
+
+
+
+
+
+
+ cmpq %rdi,%rsi
+ je L$end
+
+
+
+L$loop:
+ movl %eax,%r8d
+ movl %ebx,%r9d
+ movl %ecx,%r14d
+ movl %edx,%r15d
+ movl 0(%rsi),%r10d
+ movl %edx,%r11d
+ xorl %ecx,%r11d
+ leal -680876936(%rax,%r10,1),%eax
+ andl %ebx,%r11d
+ xorl %edx,%r11d
+ movl 4(%rsi),%r10d
+ addl %r11d,%eax
+ roll $7,%eax
+ movl %ecx,%r11d
+ addl %ebx,%eax
+ xorl %ebx,%r11d
+ leal -389564586(%rdx,%r10,1),%edx
+ andl %eax,%r11d
+ xorl %ecx,%r11d
+ movl 8(%rsi),%r10d
+ addl %r11d,%edx
+ roll $12,%edx
+ movl %ebx,%r11d
+ addl %eax,%edx
+ xorl %eax,%r11d
+ leal 606105819(%rcx,%r10,1),%ecx
+ andl %edx,%r11d
+ xorl %ebx,%r11d
+ movl 12(%rsi),%r10d
+ addl %r11d,%ecx
+ roll $17,%ecx
+ movl %eax,%r11d
+ addl %edx,%ecx
+ xorl %edx,%r11d
+ leal -1044525330(%rbx,%r10,1),%ebx
+ andl %ecx,%r11d
+ xorl %eax,%r11d
+ movl 16(%rsi),%r10d
+ addl %r11d,%ebx
+ roll $22,%ebx
+ movl %edx,%r11d
+ addl %ecx,%ebx
+ xorl %ecx,%r11d
+ leal -176418897(%rax,%r10,1),%eax
+ andl %ebx,%r11d
+ xorl %edx,%r11d
+ movl 20(%rsi),%r10d
+ addl %r11d,%eax
+ roll $7,%eax
+ movl %ecx,%r11d
+ addl %ebx,%eax
+ xorl %ebx,%r11d
+ leal 1200080426(%rdx,%r10,1),%edx
+ andl %eax,%r11d
+ xorl %ecx,%r11d
+ movl 24(%rsi),%r10d
+ addl %r11d,%edx
+ roll $12,%edx
+ movl %ebx,%r11d
+ addl %eax,%edx
+ xorl %eax,%r11d
+ leal -1473231341(%rcx,%r10,1),%ecx
+ andl %edx,%r11d
+ xorl %ebx,%r11d
+ movl 28(%rsi),%r10d
+ addl %r11d,%ecx
+ roll $17,%ecx
+ movl %eax,%r11d
+ addl %edx,%ecx
+ xorl %edx,%r11d
+ leal -45705983(%rbx,%r10,1),%ebx
+ andl %ecx,%r11d
+ xorl %eax,%r11d
+ movl 32(%rsi),%r10d
+ addl %r11d,%ebx
+ roll $22,%ebx
+ movl %edx,%r11d
+ addl %ecx,%ebx
+ xorl %ecx,%r11d
+ leal 1770035416(%rax,%r10,1),%eax
+ andl %ebx,%r11d
+ xorl %edx,%r11d
+ movl 36(%rsi),%r10d
+ addl %r11d,%eax
+ roll $7,%eax
+ movl %ecx,%r11d
+ addl %ebx,%eax
+ xorl %ebx,%r11d
+ leal -1958414417(%rdx,%r10,1),%edx
+ andl %eax,%r11d
+ xorl %ecx,%r11d
+ movl 40(%rsi),%r10d
+ addl %r11d,%edx
+ roll $12,%edx
+ movl %ebx,%r11d
+ addl %eax,%edx
+ xorl %eax,%r11d
+ leal -42063(%rcx,%r10,1),%ecx
+ andl %edx,%r11d
+ xorl %ebx,%r11d
+ movl 44(%rsi),%r10d
+ addl %r11d,%ecx
+ roll $17,%ecx
+ movl %eax,%r11d
+ addl %edx,%ecx
+ xorl %edx,%r11d
+ leal -1990404162(%rbx,%r10,1),%ebx
+ andl %ecx,%r11d
+ xorl %eax,%r11d
+ movl 48(%rsi),%r10d
+ addl %r11d,%ebx
+ roll $22,%ebx
+ movl %edx,%r11d
+ addl %ecx,%ebx
+ xorl %ecx,%r11d
+ leal 1804603682(%rax,%r10,1),%eax
+ andl %ebx,%r11d
+ xorl %edx,%r11d
+ movl 52(%rsi),%r10d
+ addl %r11d,%eax
+ roll $7,%eax
+ movl %ecx,%r11d
+ addl %ebx,%eax
+ xorl %ebx,%r11d
+ leal -40341101(%rdx,%r10,1),%edx
+ andl %eax,%r11d
+ xorl %ecx,%r11d
+ movl 56(%rsi),%r10d
+ addl %r11d,%edx
+ roll $12,%edx
+ movl %ebx,%r11d
+ addl %eax,%edx
+ xorl %eax,%r11d
+ leal -1502002290(%rcx,%r10,1),%ecx
+ andl %edx,%r11d
+ xorl %ebx,%r11d
+ movl 60(%rsi),%r10d
+ addl %r11d,%ecx
+ roll $17,%ecx
+ movl %eax,%r11d
+ addl %edx,%ecx
+ xorl %edx,%r11d
+ leal 1236535329(%rbx,%r10,1),%ebx
+ andl %ecx,%r11d
+ xorl %eax,%r11d
+ movl 0(%rsi),%r10d
+ addl %r11d,%ebx
+ roll $22,%ebx
+ movl %edx,%r11d
+ addl %ecx,%ebx
+ movl 4(%rsi),%r10d
+ movl %edx,%r11d
+ movl %edx,%r12d
+ notl %r11d
+ leal -165796510(%rax,%r10,1),%eax
+ andl %ebx,%r12d
+ andl %ecx,%r11d
+ movl 24(%rsi),%r10d
+ orl %r11d,%r12d
+ movl %ecx,%r11d
+ addl %r12d,%eax
+ movl %ecx,%r12d
+ roll $5,%eax
+ addl %ebx,%eax
+ notl %r11d
+ leal -1069501632(%rdx,%r10,1),%edx
+ andl %eax,%r12d
+ andl %ebx,%r11d
+ movl 44(%rsi),%r10d
+ orl %r11d,%r12d
+ movl %ebx,%r11d
+ addl %r12d,%edx
+ movl %ebx,%r12d
+ roll $9,%edx
+ addl %eax,%edx
+ notl %r11d
+ leal 643717713(%rcx,%r10,1),%ecx
+ andl %edx,%r12d
+ andl %eax,%r11d
+ movl 0(%rsi),%r10d
+ orl %r11d,%r12d
+ movl %eax,%r11d
+ addl %r12d,%ecx
+ movl %eax,%r12d
+ roll $14,%ecx
+ addl %edx,%ecx
+ notl %r11d
+ leal -373897302(%rbx,%r10,1),%ebx
+ andl %ecx,%r12d
+ andl %edx,%r11d
+ movl 20(%rsi),%r10d
+ orl %r11d,%r12d
+ movl %edx,%r11d
+ addl %r12d,%ebx
+ movl %edx,%r12d
+ roll $20,%ebx
+ addl %ecx,%ebx
+ notl %r11d
+ leal -701558691(%rax,%r10,1),%eax
+ andl %ebx,%r12d
+ andl %ecx,%r11d
+ movl 40(%rsi),%r10d
+ orl %r11d,%r12d
+ movl %ecx,%r11d
+ addl %r12d,%eax
+ movl %ecx,%r12d
+ roll $5,%eax
+ addl %ebx,%eax
+ notl %r11d
+ leal 38016083(%rdx,%r10,1),%edx
+ andl %eax,%r12d
+ andl %ebx,%r11d
+ movl 60(%rsi),%r10d
+ orl %r11d,%r12d
+ movl %ebx,%r11d
+ addl %r12d,%edx
+ movl %ebx,%r12d
+ roll $9,%edx
+ addl %eax,%edx
+ notl %r11d
+ leal -660478335(%rcx,%r10,1),%ecx
+ andl %edx,%r12d
+ andl %eax,%r11d
+ movl 16(%rsi),%r10d
+ orl %r11d,%r12d
+ movl %eax,%r11d
+ addl %r12d,%ecx
+ movl %eax,%r12d
+ roll $14,%ecx
+ addl %edx,%ecx
+ notl %r11d
+ leal -405537848(%rbx,%r10,1),%ebx
+ andl %ecx,%r12d
+ andl %edx,%r11d
+ movl 36(%rsi),%r10d
+ orl %r11d,%r12d
+ movl %edx,%r11d
+ addl %r12d,%ebx
+ movl %edx,%r12d
+ roll $20,%ebx
+ addl %ecx,%ebx
+ notl %r11d
+ leal 568446438(%rax,%r10,1),%eax
+ andl %ebx,%r12d
+ andl %ecx,%r11d
+ movl 56(%rsi),%r10d
+ orl %r11d,%r12d
+ movl %ecx,%r11d
+ addl %r12d,%eax
+ movl %ecx,%r12d
+ roll $5,%eax
+ addl %ebx,%eax
+ notl %r11d
+ leal -1019803690(%rdx,%r10,1),%edx
+ andl %eax,%r12d
+ andl %ebx,%r11d
+ movl 12(%rsi),%r10d
+ orl %r11d,%r12d
+ movl %ebx,%r11d
+ addl %r12d,%edx
+ movl %ebx,%r12d
+ roll $9,%edx
+ addl %eax,%edx
+ notl %r11d
+ leal -187363961(%rcx,%r10,1),%ecx
+ andl %edx,%r12d
+ andl %eax,%r11d
+ movl 32(%rsi),%r10d
+ orl %r11d,%r12d
+ movl %eax,%r11d
+ addl %r12d,%ecx
+ movl %eax,%r12d
+ roll $14,%ecx
+ addl %edx,%ecx
+ notl %r11d
+ leal 1163531501(%rbx,%r10,1),%ebx
+ andl %ecx,%r12d
+ andl %edx,%r11d
+ movl 52(%rsi),%r10d
+ orl %r11d,%r12d
+ movl %edx,%r11d
+ addl %r12d,%ebx
+ movl %edx,%r12d
+ roll $20,%ebx
+ addl %ecx,%ebx
+ notl %r11d
+ leal -1444681467(%rax,%r10,1),%eax
+ andl %ebx,%r12d
+ andl %ecx,%r11d
+ movl 8(%rsi),%r10d
+ orl %r11d,%r12d
+ movl %ecx,%r11d
+ addl %r12d,%eax
+ movl %ecx,%r12d
+ roll $5,%eax
+ addl %ebx,%eax
+ notl %r11d
+ leal -51403784(%rdx,%r10,1),%edx
+ andl %eax,%r12d
+ andl %ebx,%r11d
+ movl 28(%rsi),%r10d
+ orl %r11d,%r12d
+ movl %ebx,%r11d
+ addl %r12d,%edx
+ movl %ebx,%r12d
+ roll $9,%edx
+ addl %eax,%edx
+ notl %r11d
+ leal 1735328473(%rcx,%r10,1),%ecx
+ andl %edx,%r12d
+ andl %eax,%r11d
+ movl 48(%rsi),%r10d
+ orl %r11d,%r12d
+ movl %eax,%r11d
+ addl %r12d,%ecx
+ movl %eax,%r12d
+ roll $14,%ecx
+ addl %edx,%ecx
+ notl %r11d
+ leal -1926607734(%rbx,%r10,1),%ebx
+ andl %ecx,%r12d
+ andl %edx,%r11d
+ movl 0(%rsi),%r10d
+ orl %r11d,%r12d
+ movl %edx,%r11d
+ addl %r12d,%ebx
+ movl %edx,%r12d
+ roll $20,%ebx
+ addl %ecx,%ebx
+ movl 20(%rsi),%r10d
+ movl %ecx,%r11d
+ leal -378558(%rax,%r10,1),%eax
+ movl 32(%rsi),%r10d
+ xorl %edx,%r11d
+ xorl %ebx,%r11d
+ addl %r11d,%eax
+ roll $4,%eax
+ movl %ebx,%r11d
+ addl %ebx,%eax
+ leal -2022574463(%rdx,%r10,1),%edx
+ movl 44(%rsi),%r10d
+ xorl %ecx,%r11d
+ xorl %eax,%r11d
+ addl %r11d,%edx
+ roll $11,%edx
+ movl %eax,%r11d
+ addl %eax,%edx
+ leal 1839030562(%rcx,%r10,1),%ecx
+ movl 56(%rsi),%r10d
+ xorl %ebx,%r11d
+ xorl %edx,%r11d
+ addl %r11d,%ecx
+ roll $16,%ecx
+ movl %edx,%r11d
+ addl %edx,%ecx
+ leal -35309556(%rbx,%r10,1),%ebx
+ movl 4(%rsi),%r10d
+ xorl %eax,%r11d
+ xorl %ecx,%r11d
+ addl %r11d,%ebx
+ roll $23,%ebx
+ movl %ecx,%r11d
+ addl %ecx,%ebx
+ leal -1530992060(%rax,%r10,1),%eax
+ movl 16(%rsi),%r10d
+ xorl %edx,%r11d
+ xorl %ebx,%r11d
+ addl %r11d,%eax
+ roll $4,%eax
+ movl %ebx,%r11d
+ addl %ebx,%eax
+ leal 1272893353(%rdx,%r10,1),%edx
+ movl 28(%rsi),%r10d
+ xorl %ecx,%r11d
+ xorl %eax,%r11d
+ addl %r11d,%edx
+ roll $11,%edx
+ movl %eax,%r11d
+ addl %eax,%edx
+ leal -155497632(%rcx,%r10,1),%ecx
+ movl 40(%rsi),%r10d
+ xorl %ebx,%r11d
+ xorl %edx,%r11d
+ addl %r11d,%ecx
+ roll $16,%ecx
+ movl %edx,%r11d
+ addl %edx,%ecx
+ leal -1094730640(%rbx,%r10,1),%ebx
+ movl 52(%rsi),%r10d
+ xorl %eax,%r11d
+ xorl %ecx,%r11d
+ addl %r11d,%ebx
+ roll $23,%ebx
+ movl %ecx,%r11d
+ addl %ecx,%ebx
+ leal 681279174(%rax,%r10,1),%eax
+ movl 0(%rsi),%r10d
+ xorl %edx,%r11d
+ xorl %ebx,%r11d
+ addl %r11d,%eax
+ roll $4,%eax
+ movl %ebx,%r11d
+ addl %ebx,%eax
+ leal -358537222(%rdx,%r10,1),%edx
+ movl 12(%rsi),%r10d
+ xorl %ecx,%r11d
+ xorl %eax,%r11d
+ addl %r11d,%edx
+ roll $11,%edx
+ movl %eax,%r11d
+ addl %eax,%edx
+ leal -722521979(%rcx,%r10,1),%ecx
+ movl 24(%rsi),%r10d
+ xorl %ebx,%r11d
+ xorl %edx,%r11d
+ addl %r11d,%ecx
+ roll $16,%ecx
+ movl %edx,%r11d
+ addl %edx,%ecx
+ leal 76029189(%rbx,%r10,1),%ebx
+ movl 36(%rsi),%r10d
+ xorl %eax,%r11d
+ xorl %ecx,%r11d
+ addl %r11d,%ebx
+ roll $23,%ebx
+ movl %ecx,%r11d
+ addl %ecx,%ebx
+ leal -640364487(%rax,%r10,1),%eax
+ movl 48(%rsi),%r10d
+ xorl %edx,%r11d
+ xorl %ebx,%r11d
+ addl %r11d,%eax
+ roll $4,%eax
+ movl %ebx,%r11d
+ addl %ebx,%eax
+ leal -421815835(%rdx,%r10,1),%edx
+ movl 60(%rsi),%r10d
+ xorl %ecx,%r11d
+ xorl %eax,%r11d
+ addl %r11d,%edx
+ roll $11,%edx
+ movl %eax,%r11d
+ addl %eax,%edx
+ leal 530742520(%rcx,%r10,1),%ecx
+ movl 8(%rsi),%r10d
+ xorl %ebx,%r11d
+ xorl %edx,%r11d
+ addl %r11d,%ecx
+ roll $16,%ecx
+ movl %edx,%r11d
+ addl %edx,%ecx
+ leal -995338651(%rbx,%r10,1),%ebx
+ movl 0(%rsi),%r10d
+ xorl %eax,%r11d
+ xorl %ecx,%r11d
+ addl %r11d,%ebx
+ roll $23,%ebx
+ movl %ecx,%r11d
+ addl %ecx,%ebx
+ movl 0(%rsi),%r10d
+ movl $4294967295,%r11d
+ xorl %edx,%r11d
+ leal -198630844(%rax,%r10,1),%eax
+ orl %ebx,%r11d
+ xorl %ecx,%r11d
+ addl %r11d,%eax
+ movl 28(%rsi),%r10d
+ movl $4294967295,%r11d
+ roll $6,%eax
+ xorl %ecx,%r11d
+ addl %ebx,%eax
+ leal 1126891415(%rdx,%r10,1),%edx
+ orl %eax,%r11d
+ xorl %ebx,%r11d
+ addl %r11d,%edx
+ movl 56(%rsi),%r10d
+ movl $4294967295,%r11d
+ roll $10,%edx
+ xorl %ebx,%r11d
+ addl %eax,%edx
+ leal -1416354905(%rcx,%r10,1),%ecx
+ orl %edx,%r11d
+ xorl %eax,%r11d
+ addl %r11d,%ecx
+ movl 20(%rsi),%r10d
+ movl $4294967295,%r11d
+ roll $15,%ecx
+ xorl %eax,%r11d
+ addl %edx,%ecx
+ leal -57434055(%rbx,%r10,1),%ebx
+ orl %ecx,%r11d
+ xorl %edx,%r11d
+ addl %r11d,%ebx
+ movl 48(%rsi),%r10d
+ movl $4294967295,%r11d
+ roll $21,%ebx
+ xorl %edx,%r11d
+ addl %ecx,%ebx
+ leal 1700485571(%rax,%r10,1),%eax
+ orl %ebx,%r11d
+ xorl %ecx,%r11d
+ addl %r11d,%eax
+ movl 12(%rsi),%r10d
+ movl $4294967295,%r11d
+ roll $6,%eax
+ xorl %ecx,%r11d
+ addl %ebx,%eax
+ leal -1894986606(%rdx,%r10,1),%edx
+ orl %eax,%r11d
+ xorl %ebx,%r11d
+ addl %r11d,%edx
+ movl 40(%rsi),%r10d
+ movl $4294967295,%r11d
+ roll $10,%edx
+ xorl %ebx,%r11d
+ addl %eax,%edx
+ leal -1051523(%rcx,%r10,1),%ecx
+ orl %edx,%r11d
+ xorl %eax,%r11d
+ addl %r11d,%ecx
+ movl 4(%rsi),%r10d
+ movl $4294967295,%r11d
+ roll $15,%ecx
+ xorl %eax,%r11d
+ addl %edx,%ecx
+ leal -2054922799(%rbx,%r10,1),%ebx
+ orl %ecx,%r11d
+ xorl %edx,%r11d
+ addl %r11d,%ebx
+ movl 32(%rsi),%r10d
+ movl $4294967295,%r11d
+ roll $21,%ebx
+ xorl %edx,%r11d
+ addl %ecx,%ebx
+ leal 1873313359(%rax,%r10,1),%eax
+ orl %ebx,%r11d
+ xorl %ecx,%r11d
+ addl %r11d,%eax
+ movl 60(%rsi),%r10d
+ movl $4294967295,%r11d
+ roll $6,%eax
+ xorl %ecx,%r11d
+ addl %ebx,%eax
+ leal -30611744(%rdx,%r10,1),%edx
+ orl %eax,%r11d
+ xorl %ebx,%r11d
+ addl %r11d,%edx
+ movl 24(%rsi),%r10d
+ movl $4294967295,%r11d
+ roll $10,%edx
+ xorl %ebx,%r11d
+ addl %eax,%edx
+ leal -1560198380(%rcx,%r10,1),%ecx
+ orl %edx,%r11d
+ xorl %eax,%r11d
+ addl %r11d,%ecx
+ movl 52(%rsi),%r10d
+ movl $4294967295,%r11d
+ roll $15,%ecx
+ xorl %eax,%r11d
+ addl %edx,%ecx
+ leal 1309151649(%rbx,%r10,1),%ebx
+ orl %ecx,%r11d
+ xorl %edx,%r11d
+ addl %r11d,%ebx
+ movl 16(%rsi),%r10d
+ movl $4294967295,%r11d
+ roll $21,%ebx
+ xorl %edx,%r11d
+ addl %ecx,%ebx
+ leal -145523070(%rax,%r10,1),%eax
+ orl %ebx,%r11d
+ xorl %ecx,%r11d
+ addl %r11d,%eax
+ movl 44(%rsi),%r10d
+ movl $4294967295,%r11d
+ roll $6,%eax
+ xorl %ecx,%r11d
+ addl %ebx,%eax
+ leal -1120210379(%rdx,%r10,1),%edx
+ orl %eax,%r11d
+ xorl %ebx,%r11d
+ addl %r11d,%edx
+ movl 8(%rsi),%r10d
+ movl $4294967295,%r11d
+ roll $10,%edx
+ xorl %ebx,%r11d
+ addl %eax,%edx
+ leal 718787259(%rcx,%r10,1),%ecx
+ orl %edx,%r11d
+ xorl %eax,%r11d
+ addl %r11d,%ecx
+ movl 36(%rsi),%r10d
+ movl $4294967295,%r11d
+ roll $15,%ecx
+ xorl %eax,%r11d
+ addl %edx,%ecx
+ leal -343485551(%rbx,%r10,1),%ebx
+ orl %ecx,%r11d
+ xorl %edx,%r11d
+ addl %r11d,%ebx
+ movl 0(%rsi),%r10d
+ movl $4294967295,%r11d
+ roll $21,%ebx
+ xorl %edx,%r11d
+ addl %ecx,%ebx
+
+ addl %r8d,%eax
+ addl %r9d,%ebx
+ addl %r14d,%ecx
+ addl %r15d,%edx
+
+
+ addq $64,%rsi
+ cmpq %rdi,%rsi
+ jb L$loop
+
+
+
+L$end:
+ movl %eax,0(%rbp)
+ movl %ebx,4(%rbp)
+ movl %ecx,8(%rbp)
+ movl %edx,12(%rbp)
+
+ movq (%rsp),%r15
+ movq 8(%rsp),%r14
+ movq 16(%rsp),%r12
+ movq 24(%rsp),%rbx
+ movq 32(%rsp),%rbp
+ addq $40,%rsp
+L$epilogue:
+ .byte 0xf3,0xc3
+
diff --git a/deps/openssl/asm/x64-macosx-gas/rc4/rc4-x86_64.s b/deps/openssl/asm/x64-macosx-gas/rc4/rc4-x86_64.s
new file mode 100644
index 0000000000..41183cebec
--- /dev/null
+++ b/deps/openssl/asm/x64-macosx-gas/rc4/rc4-x86_64.s
@@ -0,0 +1,430 @@
+.text
+
+
+.globl _RC4
+
+.p2align 4
+_RC4: orq %rsi,%rsi
+ jne L$entry
+ .byte 0xf3,0xc3
+L$entry:
+ pushq %rbx
+ pushq %r12
+ pushq %r13
+L$prologue:
+
+ addq $8,%rdi
+ movl -8(%rdi),%r8d
+ movl -4(%rdi),%r12d
+ cmpl $-1,256(%rdi)
+ je L$RC4_CHAR
+ incb %r8b
+ movl (%rdi,%r8,4),%r9d
+ testq $-8,%rsi
+ jz L$loop1
+ jmp L$loop8
+.p2align 4
+L$loop8:
+ addb %r9b,%r12b
+ movq %r8,%r10
+ movl (%rdi,%r12,4),%r13d
+ rorq $8,%rax
+ incb %r10b
+ movl (%rdi,%r10,4),%r11d
+ cmpq %r10,%r12
+ movl %r9d,(%rdi,%r12,4)
+ cmoveq %r9,%r11
+ movl %r13d,(%rdi,%r8,4)
+ addb %r9b,%r13b
+ movb (%rdi,%r13,4),%al
+ addb %r11b,%r12b
+ movq %r10,%r8
+ movl (%rdi,%r12,4),%r13d
+ rorq $8,%rax
+ incb %r8b
+ movl (%rdi,%r8,4),%r9d
+ cmpq %r8,%r12
+ movl %r11d,(%rdi,%r12,4)
+ cmoveq %r11,%r9
+ movl %r13d,(%rdi,%r10,4)
+ addb %r11b,%r13b
+ movb (%rdi,%r13,4),%al
+ addb %r9b,%r12b
+ movq %r8,%r10
+ movl (%rdi,%r12,4),%r13d
+ rorq $8,%rax
+ incb %r10b
+ movl (%rdi,%r10,4),%r11d
+ cmpq %r10,%r12
+ movl %r9d,(%rdi,%r12,4)
+ cmoveq %r9,%r11
+ movl %r13d,(%rdi,%r8,4)
+ addb %r9b,%r13b
+ movb (%rdi,%r13,4),%al
+ addb %r11b,%r12b
+ movq %r10,%r8
+ movl (%rdi,%r12,4),%r13d
+ rorq $8,%rax
+ incb %r8b
+ movl (%rdi,%r8,4),%r9d
+ cmpq %r8,%r12
+ movl %r11d,(%rdi,%r12,4)
+ cmoveq %r11,%r9
+ movl %r13d,(%rdi,%r10,4)
+ addb %r11b,%r13b
+ movb (%rdi,%r13,4),%al
+ addb %r9b,%r12b
+ movq %r8,%r10
+ movl (%rdi,%r12,4),%r13d
+ rorq $8,%rax
+ incb %r10b
+ movl (%rdi,%r10,4),%r11d
+ cmpq %r10,%r12
+ movl %r9d,(%rdi,%r12,4)
+ cmoveq %r9,%r11
+ movl %r13d,(%rdi,%r8,4)
+ addb %r9b,%r13b
+ movb (%rdi,%r13,4),%al
+ addb %r11b,%r12b
+ movq %r10,%r8
+ movl (%rdi,%r12,4),%r13d
+ rorq $8,%rax
+ incb %r8b
+ movl (%rdi,%r8,4),%r9d
+ cmpq %r8,%r12
+ movl %r11d,(%rdi,%r12,4)
+ cmoveq %r11,%r9
+ movl %r13d,(%rdi,%r10,4)
+ addb %r11b,%r13b
+ movb (%rdi,%r13,4),%al
+ addb %r9b,%r12b
+ movq %r8,%r10
+ movl (%rdi,%r12,4),%r13d
+ rorq $8,%rax
+ incb %r10b
+ movl (%rdi,%r10,4),%r11d
+ cmpq %r10,%r12
+ movl %r9d,(%rdi,%r12,4)
+ cmoveq %r9,%r11
+ movl %r13d,(%rdi,%r8,4)
+ addb %r9b,%r13b
+ movb (%rdi,%r13,4),%al
+ addb %r11b,%r12b
+ movq %r10,%r8
+ movl (%rdi,%r12,4),%r13d
+ rorq $8,%rax
+ incb %r8b
+ movl (%rdi,%r8,4),%r9d
+ cmpq %r8,%r12
+ movl %r11d,(%rdi,%r12,4)
+ cmoveq %r11,%r9
+ movl %r13d,(%rdi,%r10,4)
+ addb %r11b,%r13b
+ movb (%rdi,%r13,4),%al
+ rorq $8,%rax
+ subq $8,%rsi
+
+ xorq (%rdx),%rax
+ addq $8,%rdx
+ movq %rax,(%rcx)
+ addq $8,%rcx
+
+ testq $-8,%rsi
+ jnz L$loop8
+ cmpq $0,%rsi
+ jne L$loop1
+ jmp L$exit
+
+.p2align 4
+L$loop1:
+ addb %r9b,%r12b
+ movl (%rdi,%r12,4),%r13d
+ movl %r9d,(%rdi,%r12,4)
+ movl %r13d,(%rdi,%r8,4)
+ addb %r13b,%r9b
+ incb %r8b
+ movl (%rdi,%r9,4),%r13d
+ movl (%rdi,%r8,4),%r9d
+ xorb (%rdx),%r13b
+ incq %rdx
+ movb %r13b,(%rcx)
+ incq %rcx
+ decq %rsi
+ jnz L$loop1
+ jmp L$exit
+
+.p2align 4
+L$RC4_CHAR:
+ addb $1,%r8b
+ movzbl (%rdi,%r8,1),%r9d
+ testq $-8,%rsi
+ jz L$cloop1
+ cmpl $0,260(%rdi)
+ jnz L$cloop1
+ jmp L$cloop8
+.p2align 4
+L$cloop8:
+ movl (%rdx),%eax
+ movl 4(%rdx),%ebx
+ addb %r9b,%r12b
+ leaq 1(%r8),%r10
+ movzbl (%rdi,%r12,1),%r13d
+ movzbl %r10b,%r10d
+ movzbl (%rdi,%r10,1),%r11d
+ movb %r9b,(%rdi,%r12,1)
+ cmpq %r10,%r12
+ movb %r13b,(%rdi,%r8,1)
+ jne L$cmov0
+
+ movq %r9,%r11
+L$cmov0:
+ addb %r9b,%r13b
+ xorb (%rdi,%r13,1),%al
+ rorl $8,%eax
+ addb %r11b,%r12b
+ leaq 1(%r10),%r8
+ movzbl (%rdi,%r12,1),%r13d
+ movzbl %r8b,%r8d
+ movzbl (%rdi,%r8,1),%r9d
+ movb %r11b,(%rdi,%r12,1)
+ cmpq %r8,%r12
+ movb %r13b,(%rdi,%r10,1)
+ jne L$cmov1
+
+ movq %r11,%r9
+L$cmov1:
+ addb %r11b,%r13b
+ xorb (%rdi,%r13,1),%al
+ rorl $8,%eax
+ addb %r9b,%r12b
+ leaq 1(%r8),%r10
+ movzbl (%rdi,%r12,1),%r13d
+ movzbl %r10b,%r10d
+ movzbl (%rdi,%r10,1),%r11d
+ movb %r9b,(%rdi,%r12,1)
+ cmpq %r10,%r12
+ movb %r13b,(%rdi,%r8,1)
+ jne L$cmov2
+
+ movq %r9,%r11
+L$cmov2:
+ addb %r9b,%r13b
+ xorb (%rdi,%r13,1),%al
+ rorl $8,%eax
+ addb %r11b,%r12b
+ leaq 1(%r10),%r8
+ movzbl (%rdi,%r12,1),%r13d
+ movzbl %r8b,%r8d
+ movzbl (%rdi,%r8,1),%r9d
+ movb %r11b,(%rdi,%r12,1)
+ cmpq %r8,%r12
+ movb %r13b,(%rdi,%r10,1)
+ jne L$cmov3
+
+ movq %r11,%r9
+L$cmov3:
+ addb %r11b,%r13b
+ xorb (%rdi,%r13,1),%al
+ rorl $8,%eax
+ addb %r9b,%r12b
+ leaq 1(%r8),%r10
+ movzbl (%rdi,%r12,1),%r13d
+ movzbl %r10b,%r10d
+ movzbl (%rdi,%r10,1),%r11d
+ movb %r9b,(%rdi,%r12,1)
+ cmpq %r10,%r12
+ movb %r13b,(%rdi,%r8,1)
+ jne L$cmov4
+
+ movq %r9,%r11
+L$cmov4:
+ addb %r9b,%r13b
+ xorb (%rdi,%r13,1),%bl
+ rorl $8,%ebx
+ addb %r11b,%r12b
+ leaq 1(%r10),%r8
+ movzbl (%rdi,%r12,1),%r13d
+ movzbl %r8b,%r8d
+ movzbl (%rdi,%r8,1),%r9d
+ movb %r11b,(%rdi,%r12,1)
+ cmpq %r8,%r12
+ movb %r13b,(%rdi,%r10,1)
+ jne L$cmov5
+
+ movq %r11,%r9
+L$cmov5:
+ addb %r11b,%r13b
+ xorb (%rdi,%r13,1),%bl
+ rorl $8,%ebx
+ addb %r9b,%r12b
+ leaq 1(%r8),%r10
+ movzbl (%rdi,%r12,1),%r13d
+ movzbl %r10b,%r10d
+ movzbl (%rdi,%r10,1),%r11d
+ movb %r9b,(%rdi,%r12,1)
+ cmpq %r10,%r12
+ movb %r13b,(%rdi,%r8,1)
+ jne L$cmov6
+
+ movq %r9,%r11
+L$cmov6:
+ addb %r9b,%r13b
+ xorb (%rdi,%r13,1),%bl
+ rorl $8,%ebx
+ addb %r11b,%r12b
+ leaq 1(%r10),%r8
+ movzbl (%rdi,%r12,1),%r13d
+ movzbl %r8b,%r8d
+ movzbl (%rdi,%r8,1),%r9d
+ movb %r11b,(%rdi,%r12,1)
+ cmpq %r8,%r12
+ movb %r13b,(%rdi,%r10,1)
+ jne L$cmov7
+
+ movq %r11,%r9
+L$cmov7:
+ addb %r11b,%r13b
+ xorb (%rdi,%r13,1),%bl
+ rorl $8,%ebx
+ leaq -8(%rsi),%rsi
+ movl %eax,(%rcx)
+ leaq 8(%rdx),%rdx
+ movl %ebx,4(%rcx)
+ leaq 8(%rcx),%rcx
+
+ testq $-8,%rsi
+ jnz L$cloop8
+ cmpq $0,%rsi
+ jne L$cloop1
+ jmp L$exit
+.p2align 4
+L$cloop1:
+ addb %r9b,%r12b
+ movzbl (%rdi,%r12,1),%r13d
+ movb %r9b,(%rdi,%r12,1)
+ movb %r13b,(%rdi,%r8,1)
+ addb %r9b,%r13b
+ addb $1,%r8b
+ movzbl %r13b,%r13d
+ movzbl %r8b,%r8d
+ movzbl (%rdi,%r13,1),%r13d
+ movzbl (%rdi,%r8,1),%r9d
+ xorb (%rdx),%r13b
+ leaq 1(%rdx),%rdx
+ movb %r13b,(%rcx)
+ leaq 1(%rcx),%rcx
+ subq $1,%rsi
+ jnz L$cloop1
+ jmp L$exit
+
+.p2align 4
+L$exit:
+ subb $1,%r8b
+ movl %r8d,-8(%rdi)
+ movl %r12d,-4(%rdi)
+
+ movq (%rsp),%r13
+ movq 8(%rsp),%r12
+ movq 16(%rsp),%rbx
+ addq $24,%rsp
+L$epilogue:
+ .byte 0xf3,0xc3
+
+
+.globl _RC4_set_key
+
+.p2align 4
+_RC4_set_key:
+ leaq 8(%rdi),%rdi
+ leaq (%rdx,%rsi,1),%rdx
+ negq %rsi
+ movq %rsi,%rcx
+ xorl %eax,%eax
+ xorq %r9,%r9
+ xorq %r10,%r10
+ xorq %r11,%r11
+
+ movl _OPENSSL_ia32cap_P(%rip),%r8d
+ btl $20,%r8d
+ jnc L$w1stloop
+ btl $30,%r8d
+ setc %r9b
+ movl %r9d,260(%rdi)
+ jmp L$c1stloop
+
+.p2align 4
+L$w1stloop:
+ movl %eax,(%rdi,%rax,4)
+ addb $1,%al
+ jnc L$w1stloop
+
+ xorq %r9,%r9
+ xorq %r8,%r8
+.p2align 4
+L$w2ndloop:
+ movl (%rdi,%r9,4),%r10d
+ addb (%rdx,%rsi,1),%r8b
+ addb %r10b,%r8b
+ addq $1,%rsi
+ movl (%rdi,%r8,4),%r11d
+ cmovzq %rcx,%rsi
+ movl %r10d,(%rdi,%r8,4)
+ movl %r11d,(%rdi,%r9,4)
+ addb $1,%r9b
+ jnc L$w2ndloop
+ jmp L$exit_key
+
+.p2align 4
+L$c1stloop:
+ movb %al,(%rdi,%rax,1)
+ addb $1,%al
+ jnc L$c1stloop
+
+ xorq %r9,%r9
+ xorq %r8,%r8
+.p2align 4
+L$c2ndloop:
+ movb (%rdi,%r9,1),%r10b
+ addb (%rdx,%rsi,1),%r8b
+ addb %r10b,%r8b
+ addq $1,%rsi
+ movb (%rdi,%r8,1),%r11b
+ jnz L$cnowrap
+ movq %rcx,%rsi
+L$cnowrap:
+ movb %r10b,(%rdi,%r8,1)
+ movb %r11b,(%rdi,%r9,1)
+ addb $1,%r9b
+ jnc L$c2ndloop
+ movl $-1,256(%rdi)
+
+.p2align 4
+L$exit_key:
+ xorl %eax,%eax
+ movl %eax,-8(%rdi)
+ movl %eax,-4(%rdi)
+ .byte 0xf3,0xc3
+
+
+.globl _RC4_options
+
+.p2align 4
+_RC4_options:
+ leaq L$opts(%rip),%rax
+ movl _OPENSSL_ia32cap_P(%rip),%edx
+ btl $20,%edx
+ jnc L$done
+ addq $12,%rax
+ btl $30,%edx
+ jnc L$done
+ addq $13,%rax
+L$done:
+ .byte 0xf3,0xc3
+.p2align 6
+L$opts:
+.byte 114,99,52,40,56,120,44,105,110,116,41,0
+.byte 114,99,52,40,56,120,44,99,104,97,114,41,0
+.byte 114,99,52,40,49,120,44,99,104,97,114,41,0
+.byte 82,67,52,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.p2align 6
+
diff --git a/deps/openssl/asm/x64-macosx-gas/sha/sha1-x86_64.s b/deps/openssl/asm/x64-macosx-gas/sha/sha1-x86_64.s
new file mode 100644
index 0000000000..f9dc2568e5
--- /dev/null
+++ b/deps/openssl/asm/x64-macosx-gas/sha/sha1-x86_64.s
@@ -0,0 +1,1283 @@
+.text
+
+.globl _sha1_block_data_order
+
+.p2align 4
+_sha1_block_data_order:
+ pushq %rbx
+ pushq %rbp
+ pushq %r12
+ movq %rsp,%r11
+ movq %rdi,%r8
+ subq $72,%rsp
+ movq %rsi,%r9
+ andq $-64,%rsp
+ movq %rdx,%r10
+ movq %r11,64(%rsp)
+L$prologue:
+
+ movl 0(%r8),%edx
+ movl 4(%r8),%esi
+ movl 8(%r8),%edi
+ movl 12(%r8),%ebp
+ movl 16(%r8),%r11d
+.p2align 2
+L$loop:
+ movl 0(%r9),%eax
+ bswapl %eax
+ movl %eax,0(%rsp)
+ leal 1518500249(%rax,%r11,1),%r12d
+ movl %edi,%ebx
+ movl 4(%r9),%eax
+ movl %edx,%r11d
+ xorl %ebp,%ebx
+ bswapl %eax
+ roll $5,%r11d
+ andl %esi,%ebx
+ movl %eax,4(%rsp)
+ addl %r11d,%r12d
+ xorl %ebp,%ebx
+ roll $30,%esi
+ addl %ebx,%r12d
+ leal 1518500249(%rax,%rbp,1),%r11d
+ movl %esi,%ebx
+ movl 8(%r9),%eax
+ movl %r12d,%ebp
+ xorl %edi,%ebx
+ bswapl %eax
+ roll $5,%ebp
+ andl %edx,%ebx
+ movl %eax,8(%rsp)
+ addl %ebp,%r11d
+ xorl %edi,%ebx
+ roll $30,%edx
+ addl %ebx,%r11d
+ leal 1518500249(%rax,%rdi,1),%ebp
+ movl %edx,%ebx
+ movl 12(%r9),%eax
+ movl %r11d,%edi
+ xorl %esi,%ebx
+ bswapl %eax
+ roll $5,%edi
+ andl %r12d,%ebx
+ movl %eax,12(%rsp)
+ addl %edi,%ebp
+ xorl %esi,%ebx
+ roll $30,%r12d
+ addl %ebx,%ebp
+ leal 1518500249(%rax,%rsi,1),%edi
+ movl %r12d,%ebx
+ movl 16(%r9),%eax
+ movl %ebp,%esi
+ xorl %edx,%ebx
+ bswapl %eax
+ roll $5,%esi
+ andl %r11d,%ebx
+ movl %eax,16(%rsp)
+ addl %esi,%edi
+ xorl %edx,%ebx
+ roll $30,%r11d
+ addl %ebx,%edi
+ leal 1518500249(%rax,%rdx,1),%esi
+ movl %r11d,%ebx
+ movl 20(%r9),%eax
+ movl %edi,%edx
+ xorl %r12d,%ebx
+ bswapl %eax
+ roll $5,%edx
+ andl %ebp,%ebx
+ movl %eax,20(%rsp)
+ addl %edx,%esi
+ xorl %r12d,%ebx
+ roll $30,%ebp
+ addl %ebx,%esi
+ leal 1518500249(%rax,%r12,1),%edx
+ movl %ebp,%ebx
+ movl 24(%r9),%eax
+ movl %esi,%r12d
+ xorl %r11d,%ebx
+ bswapl %eax
+ roll $5,%r12d
+ andl %edi,%ebx
+ movl %eax,24(%rsp)
+ addl %r12d,%edx
+ xorl %r11d,%ebx
+ roll $30,%edi
+ addl %ebx,%edx
+ leal 1518500249(%rax,%r11,1),%r12d
+ movl %edi,%ebx
+ movl 28(%r9),%eax
+ movl %edx,%r11d
+ xorl %ebp,%ebx
+ bswapl %eax
+ roll $5,%r11d
+ andl %esi,%ebx
+ movl %eax,28(%rsp)
+ addl %r11d,%r12d
+ xorl %ebp,%ebx
+ roll $30,%esi
+ addl %ebx,%r12d
+ leal 1518500249(%rax,%rbp,1),%r11d
+ movl %esi,%ebx
+ movl 32(%r9),%eax
+ movl %r12d,%ebp
+ xorl %edi,%ebx
+ bswapl %eax
+ roll $5,%ebp
+ andl %edx,%ebx
+ movl %eax,32(%rsp)
+ addl %ebp,%r11d
+ xorl %edi,%ebx
+ roll $30,%edx
+ addl %ebx,%r11d
+ leal 1518500249(%rax,%rdi,1),%ebp
+ movl %edx,%ebx
+ movl 36(%r9),%eax
+ movl %r11d,%edi
+ xorl %esi,%ebx
+ bswapl %eax
+ roll $5,%edi
+ andl %r12d,%ebx
+ movl %eax,36(%rsp)
+ addl %edi,%ebp
+ xorl %esi,%ebx
+ roll $30,%r12d
+ addl %ebx,%ebp
+ leal 1518500249(%rax,%rsi,1),%edi
+ movl %r12d,%ebx
+ movl 40(%r9),%eax
+ movl %ebp,%esi
+ xorl %edx,%ebx
+ bswapl %eax
+ roll $5,%esi
+ andl %r11d,%ebx
+ movl %eax,40(%rsp)
+ addl %esi,%edi
+ xorl %edx,%ebx
+ roll $30,%r11d
+ addl %ebx,%edi
+ leal 1518500249(%rax,%rdx,1),%esi
+ movl %r11d,%ebx
+ movl 44(%r9),%eax
+ movl %edi,%edx
+ xorl %r12d,%ebx
+ bswapl %eax
+ roll $5,%edx
+ andl %ebp,%ebx
+ movl %eax,44(%rsp)
+ addl %edx,%esi
+ xorl %r12d,%ebx
+ roll $30,%ebp
+ addl %ebx,%esi
+ leal 1518500249(%rax,%r12,1),%edx
+ movl %ebp,%ebx
+ movl 48(%r9),%eax
+ movl %esi,%r12d
+ xorl %r11d,%ebx
+ bswapl %eax
+ roll $5,%r12d
+ andl %edi,%ebx
+ movl %eax,48(%rsp)
+ addl %r12d,%edx
+ xorl %r11d,%ebx
+ roll $30,%edi
+ addl %ebx,%edx
+ leal 1518500249(%rax,%r11,1),%r12d
+ movl %edi,%ebx
+ movl 52(%r9),%eax
+ movl %edx,%r11d
+ xorl %ebp,%ebx
+ bswapl %eax
+ roll $5,%r11d
+ andl %esi,%ebx
+ movl %eax,52(%rsp)
+ addl %r11d,%r12d
+ xorl %ebp,%ebx
+ roll $30,%esi
+ addl %ebx,%r12d
+ leal 1518500249(%rax,%rbp,1),%r11d
+ movl %esi,%ebx
+ movl 56(%r9),%eax
+ movl %r12d,%ebp
+ xorl %edi,%ebx
+ bswapl %eax
+ roll $5,%ebp
+ andl %edx,%ebx
+ movl %eax,56(%rsp)
+ addl %ebp,%r11d
+ xorl %edi,%ebx
+ roll $30,%edx
+ addl %ebx,%r11d
+ leal 1518500249(%rax,%rdi,1),%ebp
+ movl %edx,%ebx
+ movl 60(%r9),%eax
+ movl %r11d,%edi
+ xorl %esi,%ebx
+ bswapl %eax
+ roll $5,%edi
+ andl %r12d,%ebx
+ movl %eax,60(%rsp)
+ addl %edi,%ebp
+ xorl %esi,%ebx
+ roll $30,%r12d
+ addl %ebx,%ebp
+ leal 1518500249(%rax,%rsi,1),%edi
+ movl 0(%rsp),%eax
+ movl %r12d,%ebx
+ movl %ebp,%esi
+ xorl 8(%rsp),%eax
+ xorl %edx,%ebx
+ roll $5,%esi
+ xorl 32(%rsp),%eax
+ andl %r11d,%ebx
+ addl %esi,%edi
+ xorl 52(%rsp),%eax
+ xorl %edx,%ebx
+ roll $30,%r11d
+ addl %ebx,%edi
+ roll $1,%eax
+ movl %eax,0(%rsp)
+ leal 1518500249(%rax,%rdx,1),%esi
+ movl 4(%rsp),%eax
+ movl %r11d,%ebx
+ movl %edi,%edx
+ xorl 12(%rsp),%eax
+ xorl %r12d,%ebx
+ roll $5,%edx
+ xorl 36(%rsp),%eax
+ andl %ebp,%ebx
+ addl %edx,%esi
+ xorl 56(%rsp),%eax
+ xorl %r12d,%ebx
+ roll $30,%ebp
+ addl %ebx,%esi
+ roll $1,%eax
+ movl %eax,4(%rsp)
+ leal 1518500249(%rax,%r12,1),%edx
+ movl 8(%rsp),%eax
+ movl %ebp,%ebx
+ movl %esi,%r12d
+ xorl 16(%rsp),%eax
+ xorl %r11d,%ebx
+ roll $5,%r12d
+ xorl 40(%rsp),%eax
+ andl %edi,%ebx
+ addl %r12d,%edx
+ xorl 60(%rsp),%eax
+ xorl %r11d,%ebx
+ roll $30,%edi
+ addl %ebx,%edx
+ roll $1,%eax
+ movl %eax,8(%rsp)
+ leal 1518500249(%rax,%r11,1),%r12d
+ movl 12(%rsp),%eax
+ movl %edi,%ebx
+ movl %edx,%r11d
+ xorl 20(%rsp),%eax
+ xorl %ebp,%ebx
+ roll $5,%r11d
+ xorl 44(%rsp),%eax
+ andl %esi,%ebx
+ addl %r11d,%r12d
+ xorl 0(%rsp),%eax
+ xorl %ebp,%ebx
+ roll $30,%esi
+ addl %ebx,%r12d
+ roll $1,%eax
+ movl %eax,12(%rsp)
+ leal 1518500249(%rax,%rbp,1),%r11d
+ movl 16(%rsp),%eax
+ movl %esi,%ebx
+ movl %r12d,%ebp
+ xorl 24(%rsp),%eax
+ xorl %edi,%ebx
+ roll $5,%ebp
+ xorl 48(%rsp),%eax
+ andl %edx,%ebx
+ addl %ebp,%r11d
+ xorl 4(%rsp),%eax
+ xorl %edi,%ebx
+ roll $30,%edx
+ addl %ebx,%r11d
+ roll $1,%eax
+ movl %eax,16(%rsp)
+ leal 1859775393(%rax,%rdi,1),%ebp
+ movl 20(%rsp),%eax
+ movl %edx,%ebx
+ movl %r11d,%edi
+ xorl 28(%rsp),%eax
+ xorl %r12d,%ebx
+ roll $5,%edi
+ xorl 52(%rsp),%eax
+ xorl %esi,%ebx
+ addl %edi,%ebp
+ xorl 8(%rsp),%eax
+ roll $30,%r12d
+ addl %ebx,%ebp
+ roll $1,%eax
+ movl %eax,20(%rsp)
+ leal 1859775393(%rax,%rsi,1),%edi
+ movl 24(%rsp),%eax
+ movl %r12d,%ebx
+ movl %ebp,%esi
+ xorl 32(%rsp),%eax
+ xorl %r11d,%ebx
+ roll $5,%esi
+ xorl 56(%rsp),%eax
+ xorl %edx,%ebx
+ addl %esi,%edi
+ xorl 12(%rsp),%eax
+ roll $30,%r11d
+ addl %ebx,%edi
+ roll $1,%eax
+ movl %eax,24(%rsp)
+ leal 1859775393(%rax,%rdx,1),%esi
+ movl 28(%rsp),%eax
+ movl %r11d,%ebx
+ movl %edi,%edx
+ xorl 36(%rsp),%eax
+ xorl %ebp,%ebx
+ roll $5,%edx
+ xorl 60(%rsp),%eax
+ xorl %r12d,%ebx
+ addl %edx,%esi
+ xorl 16(%rsp),%eax
+ roll $30,%ebp
+ addl %ebx,%esi
+ roll $1,%eax
+ movl %eax,28(%rsp)
+ leal 1859775393(%rax,%r12,1),%edx
+ movl 32(%rsp),%eax
+ movl %ebp,%ebx
+ movl %esi,%r12d
+ xorl 40(%rsp),%eax
+ xorl %edi,%ebx
+ roll $5,%r12d
+ xorl 0(%rsp),%eax
+ xorl %r11d,%ebx
+ addl %r12d,%edx
+ xorl 20(%rsp),%eax
+ roll $30,%edi
+ addl %ebx,%edx
+ roll $1,%eax
+ movl %eax,32(%rsp)
+ leal 1859775393(%rax,%r11,1),%r12d
+ movl 36(%rsp),%eax
+ movl %edi,%ebx
+ movl %edx,%r11d
+ xorl 44(%rsp),%eax
+ xorl %esi,%ebx
+ roll $5,%r11d
+ xorl 4(%rsp),%eax
+ xorl %ebp,%ebx
+ addl %r11d,%r12d
+ xorl 24(%rsp),%eax
+ roll $30,%esi
+ addl %ebx,%r12d
+ roll $1,%eax
+ movl %eax,36(%rsp)
+ leal 1859775393(%rax,%rbp,1),%r11d
+ movl 40(%rsp),%eax
+ movl %esi,%ebx
+ movl %r12d,%ebp
+ xorl 48(%rsp),%eax
+ xorl %edx,%ebx
+ roll $5,%ebp
+ xorl 8(%rsp),%eax
+ xorl %edi,%ebx
+ addl %ebp,%r11d
+ xorl 28(%rsp),%eax
+ roll $30,%edx
+ addl %ebx,%r11d
+ roll $1,%eax
+ movl %eax,40(%rsp)
+ leal 1859775393(%rax,%rdi,1),%ebp
+ movl 44(%rsp),%eax
+ movl %edx,%ebx
+ movl %r11d,%edi
+ xorl 52(%rsp),%eax
+ xorl %r12d,%ebx
+ roll $5,%edi
+ xorl 12(%rsp),%eax
+ xorl %esi,%ebx
+ addl %edi,%ebp
+ xorl 32(%rsp),%eax
+ roll $30,%r12d
+ addl %ebx,%ebp
+ roll $1,%eax
+ movl %eax,44(%rsp)
+ leal 1859775393(%rax,%rsi,1),%edi
+ movl 48(%rsp),%eax
+ movl %r12d,%ebx
+ movl %ebp,%esi
+ xorl 56(%rsp),%eax
+ xorl %r11d,%ebx
+ roll $5,%esi
+ xorl 16(%rsp),%eax
+ xorl %edx,%ebx
+ addl %esi,%edi
+ xorl 36(%rsp),%eax
+ roll $30,%r11d
+ addl %ebx,%edi
+ roll $1,%eax
+ movl %eax,48(%rsp)
+ leal 1859775393(%rax,%rdx,1),%esi
+ movl 52(%rsp),%eax
+ movl %r11d,%ebx
+ movl %edi,%edx
+ xorl 60(%rsp),%eax
+ xorl %ebp,%ebx
+ roll $5,%edx
+ xorl 20(%rsp),%eax
+ xorl %r12d,%ebx
+ addl %edx,%esi
+ xorl 40(%rsp),%eax
+ roll $30,%ebp
+ addl %ebx,%esi
+ roll $1,%eax
+ movl %eax,52(%rsp)
+ leal 1859775393(%rax,%r12,1),%edx
+ movl 56(%rsp),%eax
+ movl %ebp,%ebx
+ movl %esi,%r12d
+ xorl 0(%rsp),%eax
+ xorl %edi,%ebx
+ roll $5,%r12d
+ xorl 24(%rsp),%eax
+ xorl %r11d,%ebx
+ addl %r12d,%edx
+ xorl 44(%rsp),%eax
+ roll $30,%edi
+ addl %ebx,%edx
+ roll $1,%eax
+ movl %eax,56(%rsp)
+ leal 1859775393(%rax,%r11,1),%r12d
+ movl 60(%rsp),%eax
+ movl %edi,%ebx
+ movl %edx,%r11d
+ xorl 4(%rsp),%eax
+ xorl %esi,%ebx
+ roll $5,%r11d
+ xorl 28(%rsp),%eax
+ xorl %ebp,%ebx
+ addl %r11d,%r12d
+ xorl 48(%rsp),%eax
+ roll $30,%esi
+ addl %ebx,%r12d
+ roll $1,%eax
+ movl %eax,60(%rsp)
+ leal 1859775393(%rax,%rbp,1),%r11d
+ movl 0(%rsp),%eax
+ movl %esi,%ebx
+ movl %r12d,%ebp
+ xorl 8(%rsp),%eax
+ xorl %edx,%ebx
+ roll $5,%ebp
+ xorl 32(%rsp),%eax
+ xorl %edi,%ebx
+ addl %ebp,%r11d
+ xorl 52(%rsp),%eax
+ roll $30,%edx
+ addl %ebx,%r11d
+ roll $1,%eax
+ movl %eax,0(%rsp)
+ leal 1859775393(%rax,%rdi,1),%ebp
+ movl 4(%rsp),%eax
+ movl %edx,%ebx
+ movl %r11d,%edi
+ xorl 12(%rsp),%eax
+ xorl %r12d,%ebx
+ roll $5,%edi
+ xorl 36(%rsp),%eax
+ xorl %esi,%ebx
+ addl %edi,%ebp
+ xorl 56(%rsp),%eax
+ roll $30,%r12d
+ addl %ebx,%ebp
+ roll $1,%eax
+ movl %eax,4(%rsp)
+ leal 1859775393(%rax,%rsi,1),%edi
+ movl 8(%rsp),%eax
+ movl %r12d,%ebx
+ movl %ebp,%esi
+ xorl 16(%rsp),%eax
+ xorl %r11d,%ebx
+ roll $5,%esi
+ xorl 40(%rsp),%eax
+ xorl %edx,%ebx
+ addl %esi,%edi
+ xorl 60(%rsp),%eax
+ roll $30,%r11d
+ addl %ebx,%edi
+ roll $1,%eax
+ movl %eax,8(%rsp)
+ leal 1859775393(%rax,%rdx,1),%esi
+ movl 12(%rsp),%eax
+ movl %r11d,%ebx
+ movl %edi,%edx
+ xorl 20(%rsp),%eax
+ xorl %ebp,%ebx
+ roll $5,%edx
+ xorl 44(%rsp),%eax
+ xorl %r12d,%ebx
+ addl %edx,%esi
+ xorl 0(%rsp),%eax
+ roll $30,%ebp
+ addl %ebx,%esi
+ roll $1,%eax
+ movl %eax,12(%rsp)
+ leal 1859775393(%rax,%r12,1),%edx
+ movl 16(%rsp),%eax
+ movl %ebp,%ebx
+ movl %esi,%r12d
+ xorl 24(%rsp),%eax
+ xorl %edi,%ebx
+ roll $5,%r12d
+ xorl 48(%rsp),%eax
+ xorl %r11d,%ebx
+ addl %r12d,%edx
+ xorl 4(%rsp),%eax
+ roll $30,%edi
+ addl %ebx,%edx
+ roll $1,%eax
+ movl %eax,16(%rsp)
+ leal 1859775393(%rax,%r11,1),%r12d
+ movl 20(%rsp),%eax
+ movl %edi,%ebx
+ movl %edx,%r11d
+ xorl 28(%rsp),%eax
+ xorl %esi,%ebx
+ roll $5,%r11d
+ xorl 52(%rsp),%eax
+ xorl %ebp,%ebx
+ addl %r11d,%r12d
+ xorl 8(%rsp),%eax
+ roll $30,%esi
+ addl %ebx,%r12d
+ roll $1,%eax
+ movl %eax,20(%rsp)
+ leal 1859775393(%rax,%rbp,1),%r11d
+ movl 24(%rsp),%eax
+ movl %esi,%ebx
+ movl %r12d,%ebp
+ xorl 32(%rsp),%eax
+ xorl %edx,%ebx
+ roll $5,%ebp
+ xorl 56(%rsp),%eax
+ xorl %edi,%ebx
+ addl %ebp,%r11d
+ xorl 12(%rsp),%eax
+ roll $30,%edx
+ addl %ebx,%r11d
+ roll $1,%eax
+ movl %eax,24(%rsp)
+ leal 1859775393(%rax,%rdi,1),%ebp
+ movl 28(%rsp),%eax
+ movl %edx,%ebx
+ movl %r11d,%edi
+ xorl 36(%rsp),%eax
+ xorl %r12d,%ebx
+ roll $5,%edi
+ xorl 60(%rsp),%eax
+ xorl %esi,%ebx
+ addl %edi,%ebp
+ xorl 16(%rsp),%eax
+ roll $30,%r12d
+ addl %ebx,%ebp
+ roll $1,%eax
+ movl %eax,28(%rsp)
+ leal 1859775393(%rax,%rsi,1),%edi
+ movl 32(%rsp),%eax
+ movl %r12d,%ebx
+ movl %ebp,%esi
+ xorl 40(%rsp),%eax
+ xorl %r11d,%ebx
+ roll $5,%esi
+ xorl 0(%rsp),%eax
+ xorl %edx,%ebx
+ addl %esi,%edi
+ xorl 20(%rsp),%eax
+ roll $30,%r11d
+ addl %ebx,%edi
+ roll $1,%eax
+ movl %eax,32(%rsp)
+ leal -1894007588(%rax,%rdx,1),%esi
+ movl 36(%rsp),%eax
+ movl %ebp,%ebx
+ movl %ebp,%ecx
+ xorl 44(%rsp),%eax
+ movl %edi,%edx
+ andl %r11d,%ebx
+ xorl 4(%rsp),%eax
+ orl %r11d,%ecx
+ roll $5,%edx
+ xorl 24(%rsp),%eax
+ andl %r12d,%ecx
+ addl %edx,%esi
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%ebp
+ movl %eax,36(%rsp)
+ addl %ebx,%esi
+ leal -1894007588(%rax,%r12,1),%edx
+ movl 40(%rsp),%eax
+ movl %edi,%ebx
+ movl %edi,%ecx
+ xorl 48(%rsp),%eax
+ movl %esi,%r12d
+ andl %ebp,%ebx
+ xorl 8(%rsp),%eax
+ orl %ebp,%ecx
+ roll $5,%r12d
+ xorl 28(%rsp),%eax
+ andl %r11d,%ecx
+ addl %r12d,%edx
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%edi
+ movl %eax,40(%rsp)
+ addl %ebx,%edx
+ leal -1894007588(%rax,%r11,1),%r12d
+ movl 44(%rsp),%eax
+ movl %esi,%ebx
+ movl %esi,%ecx
+ xorl 52(%rsp),%eax
+ movl %edx,%r11d
+ andl %edi,%ebx
+ xorl 12(%rsp),%eax
+ orl %edi,%ecx
+ roll $5,%r11d
+ xorl 32(%rsp),%eax
+ andl %ebp,%ecx
+ addl %r11d,%r12d
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%esi
+ movl %eax,44(%rsp)
+ addl %ebx,%r12d
+ leal -1894007588(%rax,%rbp,1),%r11d
+ movl 48(%rsp),%eax
+ movl %edx,%ebx
+ movl %edx,%ecx
+ xorl 56(%rsp),%eax
+ movl %r12d,%ebp
+ andl %esi,%ebx
+ xorl 16(%rsp),%eax
+ orl %esi,%ecx
+ roll $5,%ebp
+ xorl 36(%rsp),%eax
+ andl %edi,%ecx
+ addl %ebp,%r11d
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%edx
+ movl %eax,48(%rsp)
+ addl %ebx,%r11d
+ leal -1894007588(%rax,%rdi,1),%ebp
+ movl 52(%rsp),%eax
+ movl %r12d,%ebx
+ movl %r12d,%ecx
+ xorl 60(%rsp),%eax
+ movl %r11d,%edi
+ andl %edx,%ebx
+ xorl 20(%rsp),%eax
+ orl %edx,%ecx
+ roll $5,%edi
+ xorl 40(%rsp),%eax
+ andl %esi,%ecx
+ addl %edi,%ebp
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%r12d
+ movl %eax,52(%rsp)
+ addl %ebx,%ebp
+ leal -1894007588(%rax,%rsi,1),%edi
+ movl 56(%rsp),%eax
+ movl %r11d,%ebx
+ movl %r11d,%ecx
+ xorl 0(%rsp),%eax
+ movl %ebp,%esi
+ andl %r12d,%ebx
+ xorl 24(%rsp),%eax
+ orl %r12d,%ecx
+ roll $5,%esi
+ xorl 44(%rsp),%eax
+ andl %edx,%ecx
+ addl %esi,%edi
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%r11d
+ movl %eax,56(%rsp)
+ addl %ebx,%edi
+ leal -1894007588(%rax,%rdx,1),%esi
+ movl 60(%rsp),%eax
+ movl %ebp,%ebx
+ movl %ebp,%ecx
+ xorl 4(%rsp),%eax
+ movl %edi,%edx
+ andl %r11d,%ebx
+ xorl 28(%rsp),%eax
+ orl %r11d,%ecx
+ roll $5,%edx
+ xorl 48(%rsp),%eax
+ andl %r12d,%ecx
+ addl %edx,%esi
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%ebp
+ movl %eax,60(%rsp)
+ addl %ebx,%esi
+ leal -1894007588(%rax,%r12,1),%edx
+ movl 0(%rsp),%eax
+ movl %edi,%ebx
+ movl %edi,%ecx
+ xorl 8(%rsp),%eax
+ movl %esi,%r12d
+ andl %ebp,%ebx
+ xorl 32(%rsp),%eax
+ orl %ebp,%ecx
+ roll $5,%r12d
+ xorl 52(%rsp),%eax
+ andl %r11d,%ecx
+ addl %r12d,%edx
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%edi
+ movl %eax,0(%rsp)
+ addl %ebx,%edx
+ leal -1894007588(%rax,%r11,1),%r12d
+ movl 4(%rsp),%eax
+ movl %esi,%ebx
+ movl %esi,%ecx
+ xorl 12(%rsp),%eax
+ movl %edx,%r11d
+ andl %edi,%ebx
+ xorl 36(%rsp),%eax
+ orl %edi,%ecx
+ roll $5,%r11d
+ xorl 56(%rsp),%eax
+ andl %ebp,%ecx
+ addl %r11d,%r12d
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%esi
+ movl %eax,4(%rsp)
+ addl %ebx,%r12d
+ leal -1894007588(%rax,%rbp,1),%r11d
+ movl 8(%rsp),%eax
+ movl %edx,%ebx
+ movl %edx,%ecx
+ xorl 16(%rsp),%eax
+ movl %r12d,%ebp
+ andl %esi,%ebx
+ xorl 40(%rsp),%eax
+ orl %esi,%ecx
+ roll $5,%ebp
+ xorl 60(%rsp),%eax
+ andl %edi,%ecx
+ addl %ebp,%r11d
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%edx
+ movl %eax,8(%rsp)
+ addl %ebx,%r11d
+ leal -1894007588(%rax,%rdi,1),%ebp
+ movl 12(%rsp),%eax
+ movl %r12d,%ebx
+ movl %r12d,%ecx
+ xorl 20(%rsp),%eax
+ movl %r11d,%edi
+ andl %edx,%ebx
+ xorl 44(%rsp),%eax
+ orl %edx,%ecx
+ roll $5,%edi
+ xorl 0(%rsp),%eax
+ andl %esi,%ecx
+ addl %edi,%ebp
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%r12d
+ movl %eax,12(%rsp)
+ addl %ebx,%ebp
+ leal -1894007588(%rax,%rsi,1),%edi
+ movl 16(%rsp),%eax
+ movl %r11d,%ebx
+ movl %r11d,%ecx
+ xorl 24(%rsp),%eax
+ movl %ebp,%esi
+ andl %r12d,%ebx
+ xorl 48(%rsp),%eax
+ orl %r12d,%ecx
+ roll $5,%esi
+ xorl 4(%rsp),%eax
+ andl %edx,%ecx
+ addl %esi,%edi
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%r11d
+ movl %eax,16(%rsp)
+ addl %ebx,%edi
+ leal -1894007588(%rax,%rdx,1),%esi
+ movl 20(%rsp),%eax
+ movl %ebp,%ebx
+ movl %ebp,%ecx
+ xorl 28(%rsp),%eax
+ movl %edi,%edx
+ andl %r11d,%ebx
+ xorl 52(%rsp),%eax
+ orl %r11d,%ecx
+ roll $5,%edx
+ xorl 8(%rsp),%eax
+ andl %r12d,%ecx
+ addl %edx,%esi
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%ebp
+ movl %eax,20(%rsp)
+ addl %ebx,%esi
+ leal -1894007588(%rax,%r12,1),%edx
+ movl 24(%rsp),%eax
+ movl %edi,%ebx
+ movl %edi,%ecx
+ xorl 32(%rsp),%eax
+ movl %esi,%r12d
+ andl %ebp,%ebx
+ xorl 56(%rsp),%eax
+ orl %ebp,%ecx
+ roll $5,%r12d
+ xorl 12(%rsp),%eax
+ andl %r11d,%ecx
+ addl %r12d,%edx
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%edi
+ movl %eax,24(%rsp)
+ addl %ebx,%edx
+ leal -1894007588(%rax,%r11,1),%r12d
+ movl 28(%rsp),%eax
+ movl %esi,%ebx
+ movl %esi,%ecx
+ xorl 36(%rsp),%eax
+ movl %edx,%r11d
+ andl %edi,%ebx
+ xorl 60(%rsp),%eax
+ orl %edi,%ecx
+ roll $5,%r11d
+ xorl 16(%rsp),%eax
+ andl %ebp,%ecx
+ addl %r11d,%r12d
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%esi
+ movl %eax,28(%rsp)
+ addl %ebx,%r12d
+ leal -1894007588(%rax,%rbp,1),%r11d
+ movl 32(%rsp),%eax
+ movl %edx,%ebx
+ movl %edx,%ecx
+ xorl 40(%rsp),%eax
+ movl %r12d,%ebp
+ andl %esi,%ebx
+ xorl 0(%rsp),%eax
+ orl %esi,%ecx
+ roll $5,%ebp
+ xorl 20(%rsp),%eax
+ andl %edi,%ecx
+ addl %ebp,%r11d
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%edx
+ movl %eax,32(%rsp)
+ addl %ebx,%r11d
+ leal -1894007588(%rax,%rdi,1),%ebp
+ movl 36(%rsp),%eax
+ movl %r12d,%ebx
+ movl %r12d,%ecx
+ xorl 44(%rsp),%eax
+ movl %r11d,%edi
+ andl %edx,%ebx
+ xorl 4(%rsp),%eax
+ orl %edx,%ecx
+ roll $5,%edi
+ xorl 24(%rsp),%eax
+ andl %esi,%ecx
+ addl %edi,%ebp
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%r12d
+ movl %eax,36(%rsp)
+ addl %ebx,%ebp
+ leal -1894007588(%rax,%rsi,1),%edi
+ movl 40(%rsp),%eax
+ movl %r11d,%ebx
+ movl %r11d,%ecx
+ xorl 48(%rsp),%eax
+ movl %ebp,%esi
+ andl %r12d,%ebx
+ xorl 8(%rsp),%eax
+ orl %r12d,%ecx
+ roll $5,%esi
+ xorl 28(%rsp),%eax
+ andl %edx,%ecx
+ addl %esi,%edi
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%r11d
+ movl %eax,40(%rsp)
+ addl %ebx,%edi
+ leal -1894007588(%rax,%rdx,1),%esi
+ movl 44(%rsp),%eax
+ movl %ebp,%ebx
+ movl %ebp,%ecx
+ xorl 52(%rsp),%eax
+ movl %edi,%edx
+ andl %r11d,%ebx
+ xorl 12(%rsp),%eax
+ orl %r11d,%ecx
+ roll $5,%edx
+ xorl 32(%rsp),%eax
+ andl %r12d,%ecx
+ addl %edx,%esi
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%ebp
+ movl %eax,44(%rsp)
+ addl %ebx,%esi
+ leal -1894007588(%rax,%r12,1),%edx
+ movl 48(%rsp),%eax
+ movl %edi,%ebx
+ movl %edi,%ecx
+ xorl 56(%rsp),%eax
+ movl %esi,%r12d
+ andl %ebp,%ebx
+ xorl 16(%rsp),%eax
+ orl %ebp,%ecx
+ roll $5,%r12d
+ xorl 36(%rsp),%eax
+ andl %r11d,%ecx
+ addl %r12d,%edx
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%edi
+ movl %eax,48(%rsp)
+ addl %ebx,%edx
+ leal -899497514(%rax,%r11,1),%r12d
+ movl 52(%rsp),%eax
+ movl %edi,%ebx
+ movl %edx,%r11d
+ xorl 60(%rsp),%eax
+ xorl %esi,%ebx
+ roll $5,%r11d
+ xorl 20(%rsp),%eax
+ xorl %ebp,%ebx
+ addl %r11d,%r12d
+ xorl 40(%rsp),%eax
+ roll $30,%esi
+ addl %ebx,%r12d
+ roll $1,%eax
+ movl %eax,52(%rsp)
+ leal -899497514(%rax,%rbp,1),%r11d
+ movl 56(%rsp),%eax
+ movl %esi,%ebx
+ movl %r12d,%ebp
+ xorl 0(%rsp),%eax
+ xorl %edx,%ebx
+ roll $5,%ebp
+ xorl 24(%rsp),%eax
+ xorl %edi,%ebx
+ addl %ebp,%r11d
+ xorl 44(%rsp),%eax
+ roll $30,%edx
+ addl %ebx,%r11d
+ roll $1,%eax
+ movl %eax,56(%rsp)
+ leal -899497514(%rax,%rdi,1),%ebp
+ movl 60(%rsp),%eax
+ movl %edx,%ebx
+ movl %r11d,%edi
+ xorl 4(%rsp),%eax
+ xorl %r12d,%ebx
+ roll $5,%edi
+ xorl 28(%rsp),%eax
+ xorl %esi,%ebx
+ addl %edi,%ebp
+ xorl 48(%rsp),%eax
+ roll $30,%r12d
+ addl %ebx,%ebp
+ roll $1,%eax
+ movl %eax,60(%rsp)
+ leal -899497514(%rax,%rsi,1),%edi
+ movl 0(%rsp),%eax
+ movl %r12d,%ebx
+ movl %ebp,%esi
+ xorl 8(%rsp),%eax
+ xorl %r11d,%ebx
+ roll $5,%esi
+ xorl 32(%rsp),%eax
+ xorl %edx,%ebx
+ addl %esi,%edi
+ xorl 52(%rsp),%eax
+ roll $30,%r11d
+ addl %ebx,%edi
+ roll $1,%eax
+ movl %eax,0(%rsp)
+ leal -899497514(%rax,%rdx,1),%esi
+ movl 4(%rsp),%eax
+ movl %r11d,%ebx
+ movl %edi,%edx
+ xorl 12(%rsp),%eax
+ xorl %ebp,%ebx
+ roll $5,%edx
+ xorl 36(%rsp),%eax
+ xorl %r12d,%ebx
+ addl %edx,%esi
+ xorl 56(%rsp),%eax
+ roll $30,%ebp
+ addl %ebx,%esi
+ roll $1,%eax
+ movl %eax,4(%rsp)
+ leal -899497514(%rax,%r12,1),%edx
+ movl 8(%rsp),%eax
+ movl %ebp,%ebx
+ movl %esi,%r12d
+ xorl 16(%rsp),%eax
+ xorl %edi,%ebx
+ roll $5,%r12d
+ xorl 40(%rsp),%eax
+ xorl %r11d,%ebx
+ addl %r12d,%edx
+ xorl 60(%rsp),%eax
+ roll $30,%edi
+ addl %ebx,%edx
+ roll $1,%eax
+ movl %eax,8(%rsp)
+ leal -899497514(%rax,%r11,1),%r12d
+ movl 12(%rsp),%eax
+ movl %edi,%ebx
+ movl %edx,%r11d
+ xorl 20(%rsp),%eax
+ xorl %esi,%ebx
+ roll $5,%r11d
+ xorl 44(%rsp),%eax
+ xorl %ebp,%ebx
+ addl %r11d,%r12d
+ xorl 0(%rsp),%eax
+ roll $30,%esi
+ addl %ebx,%r12d
+ roll $1,%eax
+ movl %eax,12(%rsp)
+ leal -899497514(%rax,%rbp,1),%r11d
+ movl 16(%rsp),%eax
+ movl %esi,%ebx
+ movl %r12d,%ebp
+ xorl 24(%rsp),%eax
+ xorl %edx,%ebx
+ roll $5,%ebp
+ xorl 48(%rsp),%eax
+ xorl %edi,%ebx
+ addl %ebp,%r11d
+ xorl 4(%rsp),%eax
+ roll $30,%edx
+ addl %ebx,%r11d
+ roll $1,%eax
+ movl %eax,16(%rsp)
+ leal -899497514(%rax,%rdi,1),%ebp
+ movl 20(%rsp),%eax
+ movl %edx,%ebx
+ movl %r11d,%edi
+ xorl 28(%rsp),%eax
+ xorl %r12d,%ebx
+ roll $5,%edi
+ xorl 52(%rsp),%eax
+ xorl %esi,%ebx
+ addl %edi,%ebp
+ xorl 8(%rsp),%eax
+ roll $30,%r12d
+ addl %ebx,%ebp
+ roll $1,%eax
+ movl %eax,20(%rsp)
+ leal -899497514(%rax,%rsi,1),%edi
+ movl 24(%rsp),%eax
+ movl %r12d,%ebx
+ movl %ebp,%esi
+ xorl 32(%rsp),%eax
+ xorl %r11d,%ebx
+ roll $5,%esi
+ xorl 56(%rsp),%eax
+ xorl %edx,%ebx
+ addl %esi,%edi
+ xorl 12(%rsp),%eax
+ roll $30,%r11d
+ addl %ebx,%edi
+ roll $1,%eax
+ movl %eax,24(%rsp)
+ leal -899497514(%rax,%rdx,1),%esi
+ movl 28(%rsp),%eax
+ movl %r11d,%ebx
+ movl %edi,%edx
+ xorl 36(%rsp),%eax
+ xorl %ebp,%ebx
+ roll $5,%edx
+ xorl 60(%rsp),%eax
+ xorl %r12d,%ebx
+ addl %edx,%esi
+ xorl 16(%rsp),%eax
+ roll $30,%ebp
+ addl %ebx,%esi
+ roll $1,%eax
+ movl %eax,28(%rsp)
+ leal -899497514(%rax,%r12,1),%edx
+ movl 32(%rsp),%eax
+ movl %ebp,%ebx
+ movl %esi,%r12d
+ xorl 40(%rsp),%eax
+ xorl %edi,%ebx
+ roll $5,%r12d
+ xorl 0(%rsp),%eax
+ xorl %r11d,%ebx
+ addl %r12d,%edx
+ xorl 20(%rsp),%eax
+ roll $30,%edi
+ addl %ebx,%edx
+ roll $1,%eax
+ movl %eax,32(%rsp)
+ leal -899497514(%rax,%r11,1),%r12d
+ movl 36(%rsp),%eax
+ movl %edi,%ebx
+ movl %edx,%r11d
+ xorl 44(%rsp),%eax
+ xorl %esi,%ebx
+ roll $5,%r11d
+ xorl 4(%rsp),%eax
+ xorl %ebp,%ebx
+ addl %r11d,%r12d
+ xorl 24(%rsp),%eax
+ roll $30,%esi
+ addl %ebx,%r12d
+ roll $1,%eax
+ movl %eax,36(%rsp)
+ leal -899497514(%rax,%rbp,1),%r11d
+ movl 40(%rsp),%eax
+ movl %esi,%ebx
+ movl %r12d,%ebp
+ xorl 48(%rsp),%eax
+ xorl %edx,%ebx
+ roll $5,%ebp
+ xorl 8(%rsp),%eax
+ xorl %edi,%ebx
+ addl %ebp,%r11d
+ xorl 28(%rsp),%eax
+ roll $30,%edx
+ addl %ebx,%r11d
+ roll $1,%eax
+ movl %eax,40(%rsp)
+ leal -899497514(%rax,%rdi,1),%ebp
+ movl 44(%rsp),%eax
+ movl %edx,%ebx
+ movl %r11d,%edi
+ xorl 52(%rsp),%eax
+ xorl %r12d,%ebx
+ roll $5,%edi
+ xorl 12(%rsp),%eax
+ xorl %esi,%ebx
+ addl %edi,%ebp
+ xorl 32(%rsp),%eax
+ roll $30,%r12d
+ addl %ebx,%ebp
+ roll $1,%eax
+ movl %eax,44(%rsp)
+ leal -899497514(%rax,%rsi,1),%edi
+ movl 48(%rsp),%eax
+ movl %r12d,%ebx
+ movl %ebp,%esi
+ xorl 56(%rsp),%eax
+ xorl %r11d,%ebx
+ roll $5,%esi
+ xorl 16(%rsp),%eax
+ xorl %edx,%ebx
+ addl %esi,%edi
+ xorl 36(%rsp),%eax
+ roll $30,%r11d
+ addl %ebx,%edi
+ roll $1,%eax
+ movl %eax,48(%rsp)
+ leal -899497514(%rax,%rdx,1),%esi
+ movl 52(%rsp),%eax
+ movl %r11d,%ebx
+ movl %edi,%edx
+ xorl 60(%rsp),%eax
+ xorl %ebp,%ebx
+ roll $5,%edx
+ xorl 20(%rsp),%eax
+ xorl %r12d,%ebx
+ addl %edx,%esi
+ xorl 40(%rsp),%eax
+ roll $30,%ebp
+ addl %ebx,%esi
+ roll $1,%eax
+ leal -899497514(%rax,%r12,1),%edx
+ movl 56(%rsp),%eax
+ movl %ebp,%ebx
+ movl %esi,%r12d
+ xorl 0(%rsp),%eax
+ xorl %edi,%ebx
+ roll $5,%r12d
+ xorl 24(%rsp),%eax
+ xorl %r11d,%ebx
+ addl %r12d,%edx
+ xorl 44(%rsp),%eax
+ roll $30,%edi
+ addl %ebx,%edx
+ roll $1,%eax
+ leal -899497514(%rax,%r11,1),%r12d
+ movl 60(%rsp),%eax
+ movl %edi,%ebx
+ movl %edx,%r11d
+ xorl 4(%rsp),%eax
+ xorl %esi,%ebx
+ roll $5,%r11d
+ xorl 28(%rsp),%eax
+ xorl %ebp,%ebx
+ addl %r11d,%r12d
+ xorl 48(%rsp),%eax
+ roll $30,%esi
+ addl %ebx,%r12d
+ roll $1,%eax
+ leal -899497514(%rax,%rbp,1),%r11d
+ movl %esi,%ebx
+ movl %r12d,%ebp
+ xorl %edx,%ebx
+ roll $5,%ebp
+ xorl %edi,%ebx
+ addl %ebp,%r11d
+ roll $30,%edx
+ addl %ebx,%r11d
+ addl 0(%r8),%r11d
+ addl 4(%r8),%r12d
+ addl 8(%r8),%edx
+ addl 12(%r8),%esi
+ addl 16(%r8),%edi
+ movl %r11d,0(%r8)
+ movl %r12d,4(%r8)
+ movl %edx,8(%r8)
+ movl %esi,12(%r8)
+ movl %edi,16(%r8)
+
+ xchgl %r11d,%edx
+ xchgl %r12d,%esi
+ xchgl %r11d,%edi
+ xchgl %r12d,%ebp
+
+ leaq 64(%r9),%r9
+ subq $1,%r10
+ jnz L$loop
+ movq 64(%rsp),%rsi
+ movq (%rsi),%r12
+ movq 8(%rsi),%rbp
+ movq 16(%rsi),%rbx
+ leaq 24(%rsi),%rsp
+L$epilogue:
+ .byte 0xf3,0xc3
+
+.byte 83,72,65,49,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.p2align 4
diff --git a/deps/openssl/asm/x64-macosx-gas/sha/sha512-x86_64.s b/deps/openssl/asm/x64-macosx-gas/sha/sha512-x86_64.s
new file mode 100644
index 0000000000..73c4990304
--- /dev/null
+++ b/deps/openssl/asm/x64-macosx-gas/sha/sha512-x86_64.s
@@ -0,0 +1,1971 @@
+.text
+
+
+.globl _sha256_block_data_order
+
+.p2align 4
+_sha256_block_data_order:
+ pushq %rbx
+ pushq %rbp
+ pushq %r12
+ pushq %r13
+ pushq %r14
+ pushq %r15
+ movq %rsp,%r11
+ shlq $4,%rdx
+ subq $64+32,%rsp
+ leaq (%rsi,%rdx,4),%rdx
+ andq $-64,%rsp
+ movq %rdi,64+0(%rsp)
+ movq %rsi,64+8(%rsp)
+ movq %rdx,64+16(%rsp)
+ movq %r11,64+24(%rsp)
+L$prologue:
+
+ leaq K256(%rip),%rbp
+
+ movl 0(%rdi),%eax
+ movl 4(%rdi),%ebx
+ movl 8(%rdi),%ecx
+ movl 12(%rdi),%edx
+ movl 16(%rdi),%r8d
+ movl 20(%rdi),%r9d
+ movl 24(%rdi),%r10d
+ movl 28(%rdi),%r11d
+ jmp L$loop
+
+.p2align 4
+L$loop:
+ xorq %rdi,%rdi
+ movl 0(%rsi),%r12d
+ bswapl %r12d
+ movl %r8d,%r13d
+ movl %r8d,%r14d
+ movl %r9d,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %r10d,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %r8d,%r15d
+ movl %r12d,0(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %r10d,%r15d
+ addl %r11d,%r12d
+
+ movl %eax,%r11d
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %eax,%r13d
+ movl %eax,%r14d
+
+ rorl $2,%r11d
+ rorl $13,%r13d
+ movl %eax,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%r11d
+ rorl $9,%r13d
+ orl %ecx,%r14d
+
+ xorl %r13d,%r11d
+ andl %ecx,%r15d
+ addl %r12d,%edx
+
+ andl %ebx,%r14d
+ addl %r12d,%r11d
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%r11d
+ movl 4(%rsi),%r12d
+ bswapl %r12d
+ movl %edx,%r13d
+ movl %edx,%r14d
+ movl %r8d,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %r9d,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %edx,%r15d
+ movl %r12d,4(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %r9d,%r15d
+ addl %r10d,%r12d
+
+ movl %r11d,%r10d
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %r11d,%r13d
+ movl %r11d,%r14d
+
+ rorl $2,%r10d
+ rorl $13,%r13d
+ movl %r11d,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%r10d
+ rorl $9,%r13d
+ orl %ebx,%r14d
+
+ xorl %r13d,%r10d
+ andl %ebx,%r15d
+ addl %r12d,%ecx
+
+ andl %eax,%r14d
+ addl %r12d,%r10d
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%r10d
+ movl 8(%rsi),%r12d
+ bswapl %r12d
+ movl %ecx,%r13d
+ movl %ecx,%r14d
+ movl %edx,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %r8d,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %ecx,%r15d
+ movl %r12d,8(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %r8d,%r15d
+ addl %r9d,%r12d
+
+ movl %r10d,%r9d
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %r10d,%r13d
+ movl %r10d,%r14d
+
+ rorl $2,%r9d
+ rorl $13,%r13d
+ movl %r10d,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%r9d
+ rorl $9,%r13d
+ orl %eax,%r14d
+
+ xorl %r13d,%r9d
+ andl %eax,%r15d
+ addl %r12d,%ebx
+
+ andl %r11d,%r14d
+ addl %r12d,%r9d
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%r9d
+ movl 12(%rsi),%r12d
+ bswapl %r12d
+ movl %ebx,%r13d
+ movl %ebx,%r14d
+ movl %ecx,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %edx,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %ebx,%r15d
+ movl %r12d,12(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %edx,%r15d
+ addl %r8d,%r12d
+
+ movl %r9d,%r8d
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %r9d,%r13d
+ movl %r9d,%r14d
+
+ rorl $2,%r8d
+ rorl $13,%r13d
+ movl %r9d,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%r8d
+ rorl $9,%r13d
+ orl %r11d,%r14d
+
+ xorl %r13d,%r8d
+ andl %r11d,%r15d
+ addl %r12d,%eax
+
+ andl %r10d,%r14d
+ addl %r12d,%r8d
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%r8d
+ movl 16(%rsi),%r12d
+ bswapl %r12d
+ movl %eax,%r13d
+ movl %eax,%r14d
+ movl %ebx,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %ecx,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %eax,%r15d
+ movl %r12d,16(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %ecx,%r15d
+ addl %edx,%r12d
+
+ movl %r8d,%edx
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %r8d,%r13d
+ movl %r8d,%r14d
+
+ rorl $2,%edx
+ rorl $13,%r13d
+ movl %r8d,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%edx
+ rorl $9,%r13d
+ orl %r10d,%r14d
+
+ xorl %r13d,%edx
+ andl %r10d,%r15d
+ addl %r12d,%r11d
+
+ andl %r9d,%r14d
+ addl %r12d,%edx
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%edx
+ movl 20(%rsi),%r12d
+ bswapl %r12d
+ movl %r11d,%r13d
+ movl %r11d,%r14d
+ movl %eax,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %ebx,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %r11d,%r15d
+ movl %r12d,20(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %ebx,%r15d
+ addl %ecx,%r12d
+
+ movl %edx,%ecx
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %edx,%r13d
+ movl %edx,%r14d
+
+ rorl $2,%ecx
+ rorl $13,%r13d
+ movl %edx,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%ecx
+ rorl $9,%r13d
+ orl %r9d,%r14d
+
+ xorl %r13d,%ecx
+ andl %r9d,%r15d
+ addl %r12d,%r10d
+
+ andl %r8d,%r14d
+ addl %r12d,%ecx
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%ecx
+ movl 24(%rsi),%r12d
+ bswapl %r12d
+ movl %r10d,%r13d
+ movl %r10d,%r14d
+ movl %r11d,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %eax,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %r10d,%r15d
+ movl %r12d,24(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %eax,%r15d
+ addl %ebx,%r12d
+
+ movl %ecx,%ebx
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %ecx,%r13d
+ movl %ecx,%r14d
+
+ rorl $2,%ebx
+ rorl $13,%r13d
+ movl %ecx,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%ebx
+ rorl $9,%r13d
+ orl %r8d,%r14d
+
+ xorl %r13d,%ebx
+ andl %r8d,%r15d
+ addl %r12d,%r9d
+
+ andl %edx,%r14d
+ addl %r12d,%ebx
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%ebx
+ movl 28(%rsi),%r12d
+ bswapl %r12d
+ movl %r9d,%r13d
+ movl %r9d,%r14d
+ movl %r10d,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %r11d,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %r9d,%r15d
+ movl %r12d,28(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %r11d,%r15d
+ addl %eax,%r12d
+
+ movl %ebx,%eax
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %ebx,%r13d
+ movl %ebx,%r14d
+
+ rorl $2,%eax
+ rorl $13,%r13d
+ movl %ebx,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%eax
+ rorl $9,%r13d
+ orl %edx,%r14d
+
+ xorl %r13d,%eax
+ andl %edx,%r15d
+ addl %r12d,%r8d
+
+ andl %ecx,%r14d
+ addl %r12d,%eax
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%eax
+ movl 32(%rsi),%r12d
+ bswapl %r12d
+ movl %r8d,%r13d
+ movl %r8d,%r14d
+ movl %r9d,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %r10d,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %r8d,%r15d
+ movl %r12d,32(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %r10d,%r15d
+ addl %r11d,%r12d
+
+ movl %eax,%r11d
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %eax,%r13d
+ movl %eax,%r14d
+
+ rorl $2,%r11d
+ rorl $13,%r13d
+ movl %eax,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%r11d
+ rorl $9,%r13d
+ orl %ecx,%r14d
+
+ xorl %r13d,%r11d
+ andl %ecx,%r15d
+ addl %r12d,%edx
+
+ andl %ebx,%r14d
+ addl %r12d,%r11d
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%r11d
+ movl 36(%rsi),%r12d
+ bswapl %r12d
+ movl %edx,%r13d
+ movl %edx,%r14d
+ movl %r8d,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %r9d,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %edx,%r15d
+ movl %r12d,36(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %r9d,%r15d
+ addl %r10d,%r12d
+
+ movl %r11d,%r10d
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %r11d,%r13d
+ movl %r11d,%r14d
+
+ rorl $2,%r10d
+ rorl $13,%r13d
+ movl %r11d,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%r10d
+ rorl $9,%r13d
+ orl %ebx,%r14d
+
+ xorl %r13d,%r10d
+ andl %ebx,%r15d
+ addl %r12d,%ecx
+
+ andl %eax,%r14d
+ addl %r12d,%r10d
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%r10d
+ movl 40(%rsi),%r12d
+ bswapl %r12d
+ movl %ecx,%r13d
+ movl %ecx,%r14d
+ movl %edx,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %r8d,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %ecx,%r15d
+ movl %r12d,40(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %r8d,%r15d
+ addl %r9d,%r12d
+
+ movl %r10d,%r9d
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %r10d,%r13d
+ movl %r10d,%r14d
+
+ rorl $2,%r9d
+ rorl $13,%r13d
+ movl %r10d,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%r9d
+ rorl $9,%r13d
+ orl %eax,%r14d
+
+ xorl %r13d,%r9d
+ andl %eax,%r15d
+ addl %r12d,%ebx
+
+ andl %r11d,%r14d
+ addl %r12d,%r9d
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%r9d
+ movl 44(%rsi),%r12d
+ bswapl %r12d
+ movl %ebx,%r13d
+ movl %ebx,%r14d
+ movl %ecx,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %edx,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %ebx,%r15d
+ movl %r12d,44(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %edx,%r15d
+ addl %r8d,%r12d
+
+ movl %r9d,%r8d
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %r9d,%r13d
+ movl %r9d,%r14d
+
+ rorl $2,%r8d
+ rorl $13,%r13d
+ movl %r9d,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%r8d
+ rorl $9,%r13d
+ orl %r11d,%r14d
+
+ xorl %r13d,%r8d
+ andl %r11d,%r15d
+ addl %r12d,%eax
+
+ andl %r10d,%r14d
+ addl %r12d,%r8d
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%r8d
+ movl 48(%rsi),%r12d
+ bswapl %r12d
+ movl %eax,%r13d
+ movl %eax,%r14d
+ movl %ebx,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %ecx,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %eax,%r15d
+ movl %r12d,48(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %ecx,%r15d
+ addl %edx,%r12d
+
+ movl %r8d,%edx
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %r8d,%r13d
+ movl %r8d,%r14d
+
+ rorl $2,%edx
+ rorl $13,%r13d
+ movl %r8d,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%edx
+ rorl $9,%r13d
+ orl %r10d,%r14d
+
+ xorl %r13d,%edx
+ andl %r10d,%r15d
+ addl %r12d,%r11d
+
+ andl %r9d,%r14d
+ addl %r12d,%edx
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%edx
+ movl 52(%rsi),%r12d
+ bswapl %r12d
+ movl %r11d,%r13d
+ movl %r11d,%r14d
+ movl %eax,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %ebx,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %r11d,%r15d
+ movl %r12d,52(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %ebx,%r15d
+ addl %ecx,%r12d
+
+ movl %edx,%ecx
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %edx,%r13d
+ movl %edx,%r14d
+
+ rorl $2,%ecx
+ rorl $13,%r13d
+ movl %edx,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%ecx
+ rorl $9,%r13d
+ orl %r9d,%r14d
+
+ xorl %r13d,%ecx
+ andl %r9d,%r15d
+ addl %r12d,%r10d
+
+ andl %r8d,%r14d
+ addl %r12d,%ecx
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%ecx
+ movl 56(%rsi),%r12d
+ bswapl %r12d
+ movl %r10d,%r13d
+ movl %r10d,%r14d
+ movl %r11d,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %eax,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %r10d,%r15d
+ movl %r12d,56(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %eax,%r15d
+ addl %ebx,%r12d
+
+ movl %ecx,%ebx
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %ecx,%r13d
+ movl %ecx,%r14d
+
+ rorl $2,%ebx
+ rorl $13,%r13d
+ movl %ecx,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%ebx
+ rorl $9,%r13d
+ orl %r8d,%r14d
+
+ xorl %r13d,%ebx
+ andl %r8d,%r15d
+ addl %r12d,%r9d
+
+ andl %edx,%r14d
+ addl %r12d,%ebx
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%ebx
+ movl 60(%rsi),%r12d
+ bswapl %r12d
+ movl %r9d,%r13d
+ movl %r9d,%r14d
+ movl %r10d,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %r11d,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %r9d,%r15d
+ movl %r12d,60(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %r11d,%r15d
+ addl %eax,%r12d
+
+ movl %ebx,%eax
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %ebx,%r13d
+ movl %ebx,%r14d
+
+ rorl $2,%eax
+ rorl $13,%r13d
+ movl %ebx,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%eax
+ rorl $9,%r13d
+ orl %edx,%r14d
+
+ xorl %r13d,%eax
+ andl %edx,%r15d
+ addl %r12d,%r8d
+
+ andl %ecx,%r14d
+ addl %r12d,%eax
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%eax
+ jmp L$rounds_16_xx
+.p2align 4
+L$rounds_16_xx:
+ movl 4(%rsp),%r13d
+ movl 56(%rsp),%r12d
+
+ movl %r13d,%r15d
+
+ shrl $3,%r13d
+ rorl $7,%r15d
+
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
+
+ addl %r13d,%r12d
+
+ addl 36(%rsp),%r12d
+
+ addl 0(%rsp),%r12d
+ movl %r8d,%r13d
+ movl %r8d,%r14d
+ movl %r9d,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %r10d,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %r8d,%r15d
+ movl %r12d,0(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %r10d,%r15d
+ addl %r11d,%r12d
+
+ movl %eax,%r11d
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %eax,%r13d
+ movl %eax,%r14d
+
+ rorl $2,%r11d
+ rorl $13,%r13d
+ movl %eax,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%r11d
+ rorl $9,%r13d
+ orl %ecx,%r14d
+
+ xorl %r13d,%r11d
+ andl %ecx,%r15d
+ addl %r12d,%edx
+
+ andl %ebx,%r14d
+ addl %r12d,%r11d
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%r11d
+ movl 8(%rsp),%r13d
+ movl 60(%rsp),%r12d
+
+ movl %r13d,%r15d
+
+ shrl $3,%r13d
+ rorl $7,%r15d
+
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
+
+ addl %r13d,%r12d
+
+ addl 40(%rsp),%r12d
+
+ addl 4(%rsp),%r12d
+ movl %edx,%r13d
+ movl %edx,%r14d
+ movl %r8d,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %r9d,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %edx,%r15d
+ movl %r12d,4(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %r9d,%r15d
+ addl %r10d,%r12d
+
+ movl %r11d,%r10d
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %r11d,%r13d
+ movl %r11d,%r14d
+
+ rorl $2,%r10d
+ rorl $13,%r13d
+ movl %r11d,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%r10d
+ rorl $9,%r13d
+ orl %ebx,%r14d
+
+ xorl %r13d,%r10d
+ andl %ebx,%r15d
+ addl %r12d,%ecx
+
+ andl %eax,%r14d
+ addl %r12d,%r10d
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%r10d
+ movl 12(%rsp),%r13d
+ movl 0(%rsp),%r12d
+
+ movl %r13d,%r15d
+
+ shrl $3,%r13d
+ rorl $7,%r15d
+
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
+
+ addl %r13d,%r12d
+
+ addl 44(%rsp),%r12d
+
+ addl 8(%rsp),%r12d
+ movl %ecx,%r13d
+ movl %ecx,%r14d
+ movl %edx,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %r8d,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %ecx,%r15d
+ movl %r12d,8(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %r8d,%r15d
+ addl %r9d,%r12d
+
+ movl %r10d,%r9d
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %r10d,%r13d
+ movl %r10d,%r14d
+
+ rorl $2,%r9d
+ rorl $13,%r13d
+ movl %r10d,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%r9d
+ rorl $9,%r13d
+ orl %eax,%r14d
+
+ xorl %r13d,%r9d
+ andl %eax,%r15d
+ addl %r12d,%ebx
+
+ andl %r11d,%r14d
+ addl %r12d,%r9d
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%r9d
+ movl 16(%rsp),%r13d
+ movl 4(%rsp),%r12d
+
+ movl %r13d,%r15d
+
+ shrl $3,%r13d
+ rorl $7,%r15d
+
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
+
+ addl %r13d,%r12d
+
+ addl 48(%rsp),%r12d
+
+ addl 12(%rsp),%r12d
+ movl %ebx,%r13d
+ movl %ebx,%r14d
+ movl %ecx,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %edx,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %ebx,%r15d
+ movl %r12d,12(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %edx,%r15d
+ addl %r8d,%r12d
+
+ movl %r9d,%r8d
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %r9d,%r13d
+ movl %r9d,%r14d
+
+ rorl $2,%r8d
+ rorl $13,%r13d
+ movl %r9d,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%r8d
+ rorl $9,%r13d
+ orl %r11d,%r14d
+
+ xorl %r13d,%r8d
+ andl %r11d,%r15d
+ addl %r12d,%eax
+
+ andl %r10d,%r14d
+ addl %r12d,%r8d
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%r8d
+ movl 20(%rsp),%r13d
+ movl 8(%rsp),%r12d
+
+ movl %r13d,%r15d
+
+ shrl $3,%r13d
+ rorl $7,%r15d
+
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
+
+ addl %r13d,%r12d
+
+ addl 52(%rsp),%r12d
+
+ addl 16(%rsp),%r12d
+ movl %eax,%r13d
+ movl %eax,%r14d
+ movl %ebx,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %ecx,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %eax,%r15d
+ movl %r12d,16(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %ecx,%r15d
+ addl %edx,%r12d
+
+ movl %r8d,%edx
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %r8d,%r13d
+ movl %r8d,%r14d
+
+ rorl $2,%edx
+ rorl $13,%r13d
+ movl %r8d,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%edx
+ rorl $9,%r13d
+ orl %r10d,%r14d
+
+ xorl %r13d,%edx
+ andl %r10d,%r15d
+ addl %r12d,%r11d
+
+ andl %r9d,%r14d
+ addl %r12d,%edx
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%edx
+ movl 24(%rsp),%r13d
+ movl 12(%rsp),%r12d
+
+ movl %r13d,%r15d
+
+ shrl $3,%r13d
+ rorl $7,%r15d
+
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
+
+ addl %r13d,%r12d
+
+ addl 56(%rsp),%r12d
+
+ addl 20(%rsp),%r12d
+ movl %r11d,%r13d
+ movl %r11d,%r14d
+ movl %eax,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %ebx,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %r11d,%r15d
+ movl %r12d,20(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %ebx,%r15d
+ addl %ecx,%r12d
+
+ movl %edx,%ecx
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %edx,%r13d
+ movl %edx,%r14d
+
+ rorl $2,%ecx
+ rorl $13,%r13d
+ movl %edx,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%ecx
+ rorl $9,%r13d
+ orl %r9d,%r14d
+
+ xorl %r13d,%ecx
+ andl %r9d,%r15d
+ addl %r12d,%r10d
+
+ andl %r8d,%r14d
+ addl %r12d,%ecx
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%ecx
+ movl 28(%rsp),%r13d
+ movl 16(%rsp),%r12d
+
+ movl %r13d,%r15d
+
+ shrl $3,%r13d
+ rorl $7,%r15d
+
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
+
+ addl %r13d,%r12d
+
+ addl 60(%rsp),%r12d
+
+ addl 24(%rsp),%r12d
+ movl %r10d,%r13d
+ movl %r10d,%r14d
+ movl %r11d,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %eax,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %r10d,%r15d
+ movl %r12d,24(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %eax,%r15d
+ addl %ebx,%r12d
+
+ movl %ecx,%ebx
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %ecx,%r13d
+ movl %ecx,%r14d
+
+ rorl $2,%ebx
+ rorl $13,%r13d
+ movl %ecx,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%ebx
+ rorl $9,%r13d
+ orl %r8d,%r14d
+
+ xorl %r13d,%ebx
+ andl %r8d,%r15d
+ addl %r12d,%r9d
+
+ andl %edx,%r14d
+ addl %r12d,%ebx
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%ebx
+ movl 32(%rsp),%r13d
+ movl 20(%rsp),%r12d
+
+ movl %r13d,%r15d
+
+ shrl $3,%r13d
+ rorl $7,%r15d
+
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
+
+ addl %r13d,%r12d
+
+ addl 0(%rsp),%r12d
+
+ addl 28(%rsp),%r12d
+ movl %r9d,%r13d
+ movl %r9d,%r14d
+ movl %r10d,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %r11d,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %r9d,%r15d
+ movl %r12d,28(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %r11d,%r15d
+ addl %eax,%r12d
+
+ movl %ebx,%eax
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %ebx,%r13d
+ movl %ebx,%r14d
+
+ rorl $2,%eax
+ rorl $13,%r13d
+ movl %ebx,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%eax
+ rorl $9,%r13d
+ orl %edx,%r14d
+
+ xorl %r13d,%eax
+ andl %edx,%r15d
+ addl %r12d,%r8d
+
+ andl %ecx,%r14d
+ addl %r12d,%eax
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%eax
+ movl 36(%rsp),%r13d
+ movl 24(%rsp),%r12d
+
+ movl %r13d,%r15d
+
+ shrl $3,%r13d
+ rorl $7,%r15d
+
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
+
+ addl %r13d,%r12d
+
+ addl 4(%rsp),%r12d
+
+ addl 32(%rsp),%r12d
+ movl %r8d,%r13d
+ movl %r8d,%r14d
+ movl %r9d,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %r10d,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %r8d,%r15d
+ movl %r12d,32(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %r10d,%r15d
+ addl %r11d,%r12d
+
+ movl %eax,%r11d
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %eax,%r13d
+ movl %eax,%r14d
+
+ rorl $2,%r11d
+ rorl $13,%r13d
+ movl %eax,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%r11d
+ rorl $9,%r13d
+ orl %ecx,%r14d
+
+ xorl %r13d,%r11d
+ andl %ecx,%r15d
+ addl %r12d,%edx
+
+ andl %ebx,%r14d
+ addl %r12d,%r11d
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%r11d
+ movl 40(%rsp),%r13d
+ movl 28(%rsp),%r12d
+
+ movl %r13d,%r15d
+
+ shrl $3,%r13d
+ rorl $7,%r15d
+
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
+
+ addl %r13d,%r12d
+
+ addl 8(%rsp),%r12d
+
+ addl 36(%rsp),%r12d
+ movl %edx,%r13d
+ movl %edx,%r14d
+ movl %r8d,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %r9d,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %edx,%r15d
+ movl %r12d,36(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %r9d,%r15d
+ addl %r10d,%r12d
+
+ movl %r11d,%r10d
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %r11d,%r13d
+ movl %r11d,%r14d
+
+ rorl $2,%r10d
+ rorl $13,%r13d
+ movl %r11d,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%r10d
+ rorl $9,%r13d
+ orl %ebx,%r14d
+
+ xorl %r13d,%r10d
+ andl %ebx,%r15d
+ addl %r12d,%ecx
+
+ andl %eax,%r14d
+ addl %r12d,%r10d
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%r10d
+ movl 44(%rsp),%r13d
+ movl 32(%rsp),%r12d
+
+ movl %r13d,%r15d
+
+ shrl $3,%r13d
+ rorl $7,%r15d
+
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
+
+ addl %r13d,%r12d
+
+ addl 12(%rsp),%r12d
+
+ addl 40(%rsp),%r12d
+ movl %ecx,%r13d
+ movl %ecx,%r14d
+ movl %edx,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %r8d,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %ecx,%r15d
+ movl %r12d,40(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %r8d,%r15d
+ addl %r9d,%r12d
+
+ movl %r10d,%r9d
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %r10d,%r13d
+ movl %r10d,%r14d
+
+ rorl $2,%r9d
+ rorl $13,%r13d
+ movl %r10d,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%r9d
+ rorl $9,%r13d
+ orl %eax,%r14d
+
+ xorl %r13d,%r9d
+ andl %eax,%r15d
+ addl %r12d,%ebx
+
+ andl %r11d,%r14d
+ addl %r12d,%r9d
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%r9d
+ movl 48(%rsp),%r13d
+ movl 36(%rsp),%r12d
+
+ movl %r13d,%r15d
+
+ shrl $3,%r13d
+ rorl $7,%r15d
+
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
+
+ addl %r13d,%r12d
+
+ addl 16(%rsp),%r12d
+
+ addl 44(%rsp),%r12d
+ movl %ebx,%r13d
+ movl %ebx,%r14d
+ movl %ecx,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %edx,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %ebx,%r15d
+ movl %r12d,44(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %edx,%r15d
+ addl %r8d,%r12d
+
+ movl %r9d,%r8d
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %r9d,%r13d
+ movl %r9d,%r14d
+
+ rorl $2,%r8d
+ rorl $13,%r13d
+ movl %r9d,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%r8d
+ rorl $9,%r13d
+ orl %r11d,%r14d
+
+ xorl %r13d,%r8d
+ andl %r11d,%r15d
+ addl %r12d,%eax
+
+ andl %r10d,%r14d
+ addl %r12d,%r8d
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%r8d
+ movl 52(%rsp),%r13d
+ movl 40(%rsp),%r12d
+
+ movl %r13d,%r15d
+
+ shrl $3,%r13d
+ rorl $7,%r15d
+
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
+
+ addl %r13d,%r12d
+
+ addl 20(%rsp),%r12d
+
+ addl 48(%rsp),%r12d
+ movl %eax,%r13d
+ movl %eax,%r14d
+ movl %ebx,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %ecx,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %eax,%r15d
+ movl %r12d,48(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %ecx,%r15d
+ addl %edx,%r12d
+
+ movl %r8d,%edx
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %r8d,%r13d
+ movl %r8d,%r14d
+
+ rorl $2,%edx
+ rorl $13,%r13d
+ movl %r8d,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%edx
+ rorl $9,%r13d
+ orl %r10d,%r14d
+
+ xorl %r13d,%edx
+ andl %r10d,%r15d
+ addl %r12d,%r11d
+
+ andl %r9d,%r14d
+ addl %r12d,%edx
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%edx
+ movl 56(%rsp),%r13d
+ movl 44(%rsp),%r12d
+
+ movl %r13d,%r15d
+
+ shrl $3,%r13d
+ rorl $7,%r15d
+
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
+
+ addl %r13d,%r12d
+
+ addl 24(%rsp),%r12d
+
+ addl 52(%rsp),%r12d
+ movl %r11d,%r13d
+ movl %r11d,%r14d
+ movl %eax,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %ebx,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %r11d,%r15d
+ movl %r12d,52(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %ebx,%r15d
+ addl %ecx,%r12d
+
+ movl %edx,%ecx
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %edx,%r13d
+ movl %edx,%r14d
+
+ rorl $2,%ecx
+ rorl $13,%r13d
+ movl %edx,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%ecx
+ rorl $9,%r13d
+ orl %r9d,%r14d
+
+ xorl %r13d,%ecx
+ andl %r9d,%r15d
+ addl %r12d,%r10d
+
+ andl %r8d,%r14d
+ addl %r12d,%ecx
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%ecx
+ movl 60(%rsp),%r13d
+ movl 48(%rsp),%r12d
+
+ movl %r13d,%r15d
+
+ shrl $3,%r13d
+ rorl $7,%r15d
+
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
+
+ addl %r13d,%r12d
+
+ addl 28(%rsp),%r12d
+
+ addl 56(%rsp),%r12d
+ movl %r10d,%r13d
+ movl %r10d,%r14d
+ movl %r11d,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %eax,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %r10d,%r15d
+ movl %r12d,56(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %eax,%r15d
+ addl %ebx,%r12d
+
+ movl %ecx,%ebx
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %ecx,%r13d
+ movl %ecx,%r14d
+
+ rorl $2,%ebx
+ rorl $13,%r13d
+ movl %ecx,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%ebx
+ rorl $9,%r13d
+ orl %r8d,%r14d
+
+ xorl %r13d,%ebx
+ andl %r8d,%r15d
+ addl %r12d,%r9d
+
+ andl %edx,%r14d
+ addl %r12d,%ebx
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%ebx
+ movl 0(%rsp),%r13d
+ movl 52(%rsp),%r12d
+
+ movl %r13d,%r15d
+
+ shrl $3,%r13d
+ rorl $7,%r15d
+
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
+
+ addl %r13d,%r12d
+
+ addl 32(%rsp),%r12d
+
+ addl 60(%rsp),%r12d
+ movl %r9d,%r13d
+ movl %r9d,%r14d
+ movl %r10d,%r15d
+
+ rorl $6,%r13d
+ rorl $11,%r14d
+ xorl %r11d,%r15d
+
+ xorl %r14d,%r13d
+ rorl $14,%r14d
+ andl %r9d,%r15d
+ movl %r12d,60(%rsp)
+
+ xorl %r14d,%r13d
+ xorl %r11d,%r15d
+ addl %eax,%r12d
+
+ movl %ebx,%eax
+ addl %r13d,%r12d
+
+ addl %r15d,%r12d
+ movl %ebx,%r13d
+ movl %ebx,%r14d
+
+ rorl $2,%eax
+ rorl $13,%r13d
+ movl %ebx,%r15d
+ addl (%rbp,%rdi,4),%r12d
+
+ xorl %r13d,%eax
+ rorl $9,%r13d
+ orl %edx,%r14d
+
+ xorl %r13d,%eax
+ andl %edx,%r15d
+ addl %r12d,%r8d
+
+ andl %ecx,%r14d
+ addl %r12d,%eax
+
+ orl %r15d,%r14d
+ leaq 1(%rdi),%rdi
+
+ addl %r14d,%eax
+ cmpq $64,%rdi
+ jb L$rounds_16_xx
+
+ movq 64+0(%rsp),%rdi
+ leaq 64(%rsi),%rsi
+
+ addl 0(%rdi),%eax
+ addl 4(%rdi),%ebx
+ addl 8(%rdi),%ecx
+ addl 12(%rdi),%edx
+ addl 16(%rdi),%r8d
+ addl 20(%rdi),%r9d
+ addl 24(%rdi),%r10d
+ addl 28(%rdi),%r11d
+
+ cmpq 64+16(%rsp),%rsi
+
+ movl %eax,0(%rdi)
+ movl %ebx,4(%rdi)
+ movl %ecx,8(%rdi)
+ movl %edx,12(%rdi)
+ movl %r8d,16(%rdi)
+ movl %r9d,20(%rdi)
+ movl %r10d,24(%rdi)
+ movl %r11d,28(%rdi)
+ jb L$loop
+
+ movq 64+24(%rsp),%rsi
+ movq (%rsi),%r15
+ movq 8(%rsi),%r14
+ movq 16(%rsi),%r13
+ movq 24(%rsi),%r12
+ movq 32(%rsi),%rbp
+ movq 40(%rsi),%rbx
+ leaq 48(%rsi),%rsp
+L$epilogue:
+ .byte 0xf3,0xc3
+
+.p2align 6
+
+K256:
+.long 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5
+.long 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5
+.long 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3
+.long 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174
+.long 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc
+.long 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da
+.long 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7
+.long 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967
+.long 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13
+.long 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85
+.long 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3
+.long 0xd192e819,0xd6990624,0xf40e3585,0x106aa070
+.long 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5
+.long 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3
+.long 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208
+.long 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2
diff --git a/deps/openssl/asm/x64-macosx-gas/whrlpool/wp-x86_64.s b/deps/openssl/asm/x64-macosx-gas/whrlpool/wp-x86_64.s
new file mode 100644
index 0000000000..5e87e554ed
--- /dev/null
+++ b/deps/openssl/asm/x64-macosx-gas/whrlpool/wp-x86_64.s
@@ -0,0 +1,859 @@
+.text
+
+
+.globl _whirlpool_block
+
+.p2align 4
+_whirlpool_block:
+ pushq %rbx
+ pushq %rbp
+ pushq %r12
+ pushq %r13
+ pushq %r14
+ pushq %r15
+
+ movq %rsp,%r11
+ subq $128+40,%rsp
+ andq $-64,%rsp
+
+ leaq 128(%rsp),%r10
+ movq %rdi,0(%r10)
+ movq %rsi,8(%r10)
+ movq %rdx,16(%r10)
+ movq %r11,32(%r10)
+L$prologue:
+
+ movq %r10,%rbx
+ leaq L$table(%rip),%rbp
+
+ xorq %rcx,%rcx
+ xorq %rdx,%rdx
+ movq 0(%rdi),%r8
+ movq 8(%rdi),%r9
+ movq 16(%rdi),%r10
+ movq 24(%rdi),%r11
+ movq 32(%rdi),%r12
+ movq 40(%rdi),%r13
+ movq 48(%rdi),%r14
+ movq 56(%rdi),%r15
+L$outerloop:
+ movq %r8,0(%rsp)
+ movq %r9,8(%rsp)
+ movq %r10,16(%rsp)
+ movq %r11,24(%rsp)
+ movq %r12,32(%rsp)
+ movq %r13,40(%rsp)
+ movq %r14,48(%rsp)
+ movq %r15,56(%rsp)
+ xorq 0(%rsi),%r8
+ xorq 8(%rsi),%r9
+ xorq 16(%rsi),%r10
+ xorq 24(%rsi),%r11
+ xorq 32(%rsi),%r12
+ xorq 40(%rsi),%r13
+ xorq 48(%rsi),%r14
+ xorq 56(%rsi),%r15
+ movq %r8,64+0(%rsp)
+ movq %r9,64+8(%rsp)
+ movq %r10,64+16(%rsp)
+ movq %r11,64+24(%rsp)
+ movq %r12,64+32(%rsp)
+ movq %r13,64+40(%rsp)
+ movq %r14,64+48(%rsp)
+ movq %r15,64+56(%rsp)
+ xorq %rsi,%rsi
+ movq %rsi,24(%rbx)
+.p2align 4
+L$round:
+ movq 4096(%rbp,%rsi,8),%r8
+ movl 0(%rsp),%eax
+ movl 4(%rsp),%ebx
+ movb %al,%cl
+ movb %ah,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%eax
+ xorq 0(%rbp,%rsi,8),%r8
+ movq 7(%rbp,%rdi,8),%r9
+ movb %al,%cl
+ movb %ah,%dl
+ movl 0+8(%rsp),%eax
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ movq 6(%rbp,%rsi,8),%r10
+ movq 5(%rbp,%rdi,8),%r11
+ movb %bl,%cl
+ movb %bh,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%ebx
+ movq 4(%rbp,%rsi,8),%r12
+ movq 3(%rbp,%rdi,8),%r13
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 0+8+4(%rsp),%ebx
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ movq 2(%rbp,%rsi,8),%r14
+ movq 1(%rbp,%rdi,8),%r15
+ movb %al,%cl
+ movb %ah,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%eax
+ xorq 0(%rbp,%rsi,8),%r9
+ xorq 7(%rbp,%rdi,8),%r10
+ movb %al,%cl
+ movb %ah,%dl
+ movl 8+8(%rsp),%eax
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 6(%rbp,%rsi,8),%r11
+ xorq 5(%rbp,%rdi,8),%r12
+ movb %bl,%cl
+ movb %bh,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%ebx
+ xorq 4(%rbp,%rsi,8),%r13
+ xorq 3(%rbp,%rdi,8),%r14
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 8+8+4(%rsp),%ebx
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 2(%rbp,%rsi,8),%r15
+ xorq 1(%rbp,%rdi,8),%r8
+ movb %al,%cl
+ movb %ah,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%eax
+ xorq 0(%rbp,%rsi,8),%r10
+ xorq 7(%rbp,%rdi,8),%r11
+ movb %al,%cl
+ movb %ah,%dl
+ movl 16+8(%rsp),%eax
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 6(%rbp,%rsi,8),%r12
+ xorq 5(%rbp,%rdi,8),%r13
+ movb %bl,%cl
+ movb %bh,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%ebx
+ xorq 4(%rbp,%rsi,8),%r14
+ xorq 3(%rbp,%rdi,8),%r15
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 16+8+4(%rsp),%ebx
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 2(%rbp,%rsi,8),%r8
+ xorq 1(%rbp,%rdi,8),%r9
+ movb %al,%cl
+ movb %ah,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%eax
+ xorq 0(%rbp,%rsi,8),%r11
+ xorq 7(%rbp,%rdi,8),%r12
+ movb %al,%cl
+ movb %ah,%dl
+ movl 24+8(%rsp),%eax
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 6(%rbp,%rsi,8),%r13
+ xorq 5(%rbp,%rdi,8),%r14
+ movb %bl,%cl
+ movb %bh,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%ebx
+ xorq 4(%rbp,%rsi,8),%r15
+ xorq 3(%rbp,%rdi,8),%r8
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 24+8+4(%rsp),%ebx
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 2(%rbp,%rsi,8),%r9
+ xorq 1(%rbp,%rdi,8),%r10
+ movb %al,%cl
+ movb %ah,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%eax
+ xorq 0(%rbp,%rsi,8),%r12
+ xorq 7(%rbp,%rdi,8),%r13
+ movb %al,%cl
+ movb %ah,%dl
+ movl 32+8(%rsp),%eax
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 6(%rbp,%rsi,8),%r14
+ xorq 5(%rbp,%rdi,8),%r15
+ movb %bl,%cl
+ movb %bh,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%ebx
+ xorq 4(%rbp,%rsi,8),%r8
+ xorq 3(%rbp,%rdi,8),%r9
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 32+8+4(%rsp),%ebx
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 2(%rbp,%rsi,8),%r10
+ xorq 1(%rbp,%rdi,8),%r11
+ movb %al,%cl
+ movb %ah,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%eax
+ xorq 0(%rbp,%rsi,8),%r13
+ xorq 7(%rbp,%rdi,8),%r14
+ movb %al,%cl
+ movb %ah,%dl
+ movl 40+8(%rsp),%eax
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 6(%rbp,%rsi,8),%r15
+ xorq 5(%rbp,%rdi,8),%r8
+ movb %bl,%cl
+ movb %bh,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%ebx
+ xorq 4(%rbp,%rsi,8),%r9
+ xorq 3(%rbp,%rdi,8),%r10
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 40+8+4(%rsp),%ebx
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 2(%rbp,%rsi,8),%r11
+ xorq 1(%rbp,%rdi,8),%r12
+ movb %al,%cl
+ movb %ah,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%eax
+ xorq 0(%rbp,%rsi,8),%r14
+ xorq 7(%rbp,%rdi,8),%r15
+ movb %al,%cl
+ movb %ah,%dl
+ movl 48+8(%rsp),%eax
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 6(%rbp,%rsi,8),%r8
+ xorq 5(%rbp,%rdi,8),%r9
+ movb %bl,%cl
+ movb %bh,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%ebx
+ xorq 4(%rbp,%rsi,8),%r10
+ xorq 3(%rbp,%rdi,8),%r11
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 48+8+4(%rsp),%ebx
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 2(%rbp,%rsi,8),%r12
+ xorq 1(%rbp,%rdi,8),%r13
+ movb %al,%cl
+ movb %ah,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%eax
+ xorq 0(%rbp,%rsi,8),%r15
+ xorq 7(%rbp,%rdi,8),%r8
+ movb %al,%cl
+ movb %ah,%dl
+ movl 56+8(%rsp),%eax
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 6(%rbp,%rsi,8),%r9
+ xorq 5(%rbp,%rdi,8),%r10
+ movb %bl,%cl
+ movb %bh,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%ebx
+ xorq 4(%rbp,%rsi,8),%r11
+ xorq 3(%rbp,%rdi,8),%r12
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 56+8+4(%rsp),%ebx
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 2(%rbp,%rsi,8),%r13
+ xorq 1(%rbp,%rdi,8),%r14
+ movq %r8,0(%rsp)
+ movq %r9,8(%rsp)
+ movq %r10,16(%rsp)
+ movq %r11,24(%rsp)
+ movq %r12,32(%rsp)
+ movq %r13,40(%rsp)
+ movq %r14,48(%rsp)
+ movq %r15,56(%rsp)
+ movb %al,%cl
+ movb %ah,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%eax
+ xorq 0(%rbp,%rsi,8),%r8
+ xorq 7(%rbp,%rdi,8),%r9
+ movb %al,%cl
+ movb %ah,%dl
+ movl 64+0+8(%rsp),%eax
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 6(%rbp,%rsi,8),%r10
+ xorq 5(%rbp,%rdi,8),%r11
+ movb %bl,%cl
+ movb %bh,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%ebx
+ xorq 4(%rbp,%rsi,8),%r12
+ xorq 3(%rbp,%rdi,8),%r13
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 64+0+8+4(%rsp),%ebx
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 2(%rbp,%rsi,8),%r14
+ xorq 1(%rbp,%rdi,8),%r15
+ movb %al,%cl
+ movb %ah,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%eax
+ xorq 0(%rbp,%rsi,8),%r9
+ xorq 7(%rbp,%rdi,8),%r10
+ movb %al,%cl
+ movb %ah,%dl
+ movl 64+8+8(%rsp),%eax
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 6(%rbp,%rsi,8),%r11
+ xorq 5(%rbp,%rdi,8),%r12
+ movb %bl,%cl
+ movb %bh,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%ebx
+ xorq 4(%rbp,%rsi,8),%r13
+ xorq 3(%rbp,%rdi,8),%r14
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 64+8+8+4(%rsp),%ebx
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 2(%rbp,%rsi,8),%r15
+ xorq 1(%rbp,%rdi,8),%r8
+ movb %al,%cl
+ movb %ah,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%eax
+ xorq 0(%rbp,%rsi,8),%r10
+ xorq 7(%rbp,%rdi,8),%r11
+ movb %al,%cl
+ movb %ah,%dl
+ movl 64+16+8(%rsp),%eax
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 6(%rbp,%rsi,8),%r12
+ xorq 5(%rbp,%rdi,8),%r13
+ movb %bl,%cl
+ movb %bh,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%ebx
+ xorq 4(%rbp,%rsi,8),%r14
+ xorq 3(%rbp,%rdi,8),%r15
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 64+16+8+4(%rsp),%ebx
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 2(%rbp,%rsi,8),%r8
+ xorq 1(%rbp,%rdi,8),%r9
+ movb %al,%cl
+ movb %ah,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%eax
+ xorq 0(%rbp,%rsi,8),%r11
+ xorq 7(%rbp,%rdi,8),%r12
+ movb %al,%cl
+ movb %ah,%dl
+ movl 64+24+8(%rsp),%eax
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 6(%rbp,%rsi,8),%r13
+ xorq 5(%rbp,%rdi,8),%r14
+ movb %bl,%cl
+ movb %bh,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%ebx
+ xorq 4(%rbp,%rsi,8),%r15
+ xorq 3(%rbp,%rdi,8),%r8
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 64+24+8+4(%rsp),%ebx
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 2(%rbp,%rsi,8),%r9
+ xorq 1(%rbp,%rdi,8),%r10
+ movb %al,%cl
+ movb %ah,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%eax
+ xorq 0(%rbp,%rsi,8),%r12
+ xorq 7(%rbp,%rdi,8),%r13
+ movb %al,%cl
+ movb %ah,%dl
+ movl 64+32+8(%rsp),%eax
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 6(%rbp,%rsi,8),%r14
+ xorq 5(%rbp,%rdi,8),%r15
+ movb %bl,%cl
+ movb %bh,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%ebx
+ xorq 4(%rbp,%rsi,8),%r8
+ xorq 3(%rbp,%rdi,8),%r9
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 64+32+8+4(%rsp),%ebx
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 2(%rbp,%rsi,8),%r10
+ xorq 1(%rbp,%rdi,8),%r11
+ movb %al,%cl
+ movb %ah,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%eax
+ xorq 0(%rbp,%rsi,8),%r13
+ xorq 7(%rbp,%rdi,8),%r14
+ movb %al,%cl
+ movb %ah,%dl
+ movl 64+40+8(%rsp),%eax
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 6(%rbp,%rsi,8),%r15
+ xorq 5(%rbp,%rdi,8),%r8
+ movb %bl,%cl
+ movb %bh,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%ebx
+ xorq 4(%rbp,%rsi,8),%r9
+ xorq 3(%rbp,%rdi,8),%r10
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 64+40+8+4(%rsp),%ebx
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 2(%rbp,%rsi,8),%r11
+ xorq 1(%rbp,%rdi,8),%r12
+ movb %al,%cl
+ movb %ah,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%eax
+ xorq 0(%rbp,%rsi,8),%r14
+ xorq 7(%rbp,%rdi,8),%r15
+ movb %al,%cl
+ movb %ah,%dl
+ movl 64+48+8(%rsp),%eax
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 6(%rbp,%rsi,8),%r8
+ xorq 5(%rbp,%rdi,8),%r9
+ movb %bl,%cl
+ movb %bh,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%ebx
+ xorq 4(%rbp,%rsi,8),%r10
+ xorq 3(%rbp,%rdi,8),%r11
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 64+48+8+4(%rsp),%ebx
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 2(%rbp,%rsi,8),%r12
+ xorq 1(%rbp,%rdi,8),%r13
+ movb %al,%cl
+ movb %ah,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%eax
+ xorq 0(%rbp,%rsi,8),%r15
+ xorq 7(%rbp,%rdi,8),%r8
+ movb %al,%cl
+ movb %ah,%dl
+
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 6(%rbp,%rsi,8),%r9
+ xorq 5(%rbp,%rdi,8),%r10
+ movb %bl,%cl
+ movb %bh,%dl
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ shrl $16,%ebx
+ xorq 4(%rbp,%rsi,8),%r11
+ xorq 3(%rbp,%rdi,8),%r12
+ movb %bl,%cl
+ movb %bh,%dl
+
+ leaq (%rcx,%rcx,1),%rsi
+ leaq (%rdx,%rdx,1),%rdi
+ xorq 2(%rbp,%rsi,8),%r13
+ xorq 1(%rbp,%rdi,8),%r14
+ leaq 128(%rsp),%rbx
+ movq 24(%rbx),%rsi
+ addq $1,%rsi
+ cmpq $10,%rsi
+ je L$roundsdone
+
+ movq %rsi,24(%rbx)
+ movq %r8,64+0(%rsp)
+ movq %r9,64+8(%rsp)
+ movq %r10,64+16(%rsp)
+ movq %r11,64+24(%rsp)
+ movq %r12,64+32(%rsp)
+ movq %r13,64+40(%rsp)
+ movq %r14,64+48(%rsp)
+ movq %r15,64+56(%rsp)
+ jmp L$round
+.p2align 4
+L$roundsdone:
+ movq 0(%rbx),%rdi
+ movq 8(%rbx),%rsi
+ movq 16(%rbx),%rax
+ xorq 0(%rsi),%r8
+ xorq 8(%rsi),%r9
+ xorq 16(%rsi),%r10
+ xorq 24(%rsi),%r11
+ xorq 32(%rsi),%r12
+ xorq 40(%rsi),%r13
+ xorq 48(%rsi),%r14
+ xorq 56(%rsi),%r15
+ xorq 0(%rdi),%r8
+ xorq 8(%rdi),%r9
+ xorq 16(%rdi),%r10
+ xorq 24(%rdi),%r11
+ xorq 32(%rdi),%r12
+ xorq 40(%rdi),%r13
+ xorq 48(%rdi),%r14
+ xorq 56(%rdi),%r15
+ movq %r8,0(%rdi)
+ movq %r9,8(%rdi)
+ movq %r10,16(%rdi)
+ movq %r11,24(%rdi)
+ movq %r12,32(%rdi)
+ movq %r13,40(%rdi)
+ movq %r14,48(%rdi)
+ movq %r15,56(%rdi)
+ leaq 64(%rsi),%rsi
+ subq $1,%rax
+ jz L$alldone
+ movq %rsi,8(%rbx)
+ movq %rax,16(%rbx)
+ jmp L$outerloop
+L$alldone:
+ movq 32(%rbx),%rsi
+ movq (%rsi),%r15
+ movq 8(%rsi),%r14
+ movq 16(%rsi),%r13
+ movq 24(%rsi),%r12
+ movq 32(%rsi),%rbp
+ movq 40(%rsi),%rbx
+ leaq 48(%rsi),%rsp
+L$epilogue:
+ .byte 0xf3,0xc3
+
+
+.p2align 6
+
+L$table:
+.byte 24,24,96,24,192,120,48,216,24,24,96,24,192,120,48,216
+.byte 35,35,140,35,5,175,70,38,35,35,140,35,5,175,70,38
+.byte 198,198,63,198,126,249,145,184,198,198,63,198,126,249,145,184
+.byte 232,232,135,232,19,111,205,251,232,232,135,232,19,111,205,251
+.byte 135,135,38,135,76,161,19,203,135,135,38,135,76,161,19,203
+.byte 184,184,218,184,169,98,109,17,184,184,218,184,169,98,109,17
+.byte 1,1,4,1,8,5,2,9,1,1,4,1,8,5,2,9
+.byte 79,79,33,79,66,110,158,13,79,79,33,79,66,110,158,13
+.byte 54,54,216,54,173,238,108,155,54,54,216,54,173,238,108,155
+.byte 166,166,162,166,89,4,81,255,166,166,162,166,89,4,81,255
+.byte 210,210,111,210,222,189,185,12,210,210,111,210,222,189,185,12
+.byte 245,245,243,245,251,6,247,14,245,245,243,245,251,6,247,14
+.byte 121,121,249,121,239,128,242,150,121,121,249,121,239,128,242,150
+.byte 111,111,161,111,95,206,222,48,111,111,161,111,95,206,222,48
+.byte 145,145,126,145,252,239,63,109,145,145,126,145,252,239,63,109
+.byte 82,82,85,82,170,7,164,248,82,82,85,82,170,7,164,248
+.byte 96,96,157,96,39,253,192,71,96,96,157,96,39,253,192,71
+.byte 188,188,202,188,137,118,101,53,188,188,202,188,137,118,101,53
+.byte 155,155,86,155,172,205,43,55,155,155,86,155,172,205,43,55
+.byte 142,142,2,142,4,140,1,138,142,142,2,142,4,140,1,138
+.byte 163,163,182,163,113,21,91,210,163,163,182,163,113,21,91,210
+.byte 12,12,48,12,96,60,24,108,12,12,48,12,96,60,24,108
+.byte 123,123,241,123,255,138,246,132,123,123,241,123,255,138,246,132
+.byte 53,53,212,53,181,225,106,128,53,53,212,53,181,225,106,128
+.byte 29,29,116,29,232,105,58,245,29,29,116,29,232,105,58,245
+.byte 224,224,167,224,83,71,221,179,224,224,167,224,83,71,221,179
+.byte 215,215,123,215,246,172,179,33,215,215,123,215,246,172,179,33
+.byte 194,194,47,194,94,237,153,156,194,194,47,194,94,237,153,156
+.byte 46,46,184,46,109,150,92,67,46,46,184,46,109,150,92,67
+.byte 75,75,49,75,98,122,150,41,75,75,49,75,98,122,150,41
+.byte 254,254,223,254,163,33,225,93,254,254,223,254,163,33,225,93
+.byte 87,87,65,87,130,22,174,213,87,87,65,87,130,22,174,213
+.byte 21,21,84,21,168,65,42,189,21,21,84,21,168,65,42,189
+.byte 119,119,193,119,159,182,238,232,119,119,193,119,159,182,238,232
+.byte 55,55,220,55,165,235,110,146,55,55,220,55,165,235,110,146
+.byte 229,229,179,229,123,86,215,158,229,229,179,229,123,86,215,158
+.byte 159,159,70,159,140,217,35,19,159,159,70,159,140,217,35,19
+.byte 240,240,231,240,211,23,253,35,240,240,231,240,211,23,253,35
+.byte 74,74,53,74,106,127,148,32,74,74,53,74,106,127,148,32
+.byte 218,218,79,218,158,149,169,68,218,218,79,218,158,149,169,68
+.byte 88,88,125,88,250,37,176,162,88,88,125,88,250,37,176,162
+.byte 201,201,3,201,6,202,143,207,201,201,3,201,6,202,143,207
+.byte 41,41,164,41,85,141,82,124,41,41,164,41,85,141,82,124
+.byte 10,10,40,10,80,34,20,90,10,10,40,10,80,34,20,90
+.byte 177,177,254,177,225,79,127,80,177,177,254,177,225,79,127,80
+.byte 160,160,186,160,105,26,93,201,160,160,186,160,105,26,93,201
+.byte 107,107,177,107,127,218,214,20,107,107,177,107,127,218,214,20
+.byte 133,133,46,133,92,171,23,217,133,133,46,133,92,171,23,217
+.byte 189,189,206,189,129,115,103,60,189,189,206,189,129,115,103,60
+.byte 93,93,105,93,210,52,186,143,93,93,105,93,210,52,186,143
+.byte 16,16,64,16,128,80,32,144,16,16,64,16,128,80,32,144
+.byte 244,244,247,244,243,3,245,7,244,244,247,244,243,3,245,7
+.byte 203,203,11,203,22,192,139,221,203,203,11,203,22,192,139,221
+.byte 62,62,248,62,237,198,124,211,62,62,248,62,237,198,124,211
+.byte 5,5,20,5,40,17,10,45,5,5,20,5,40,17,10,45
+.byte 103,103,129,103,31,230,206,120,103,103,129,103,31,230,206,120
+.byte 228,228,183,228,115,83,213,151,228,228,183,228,115,83,213,151
+.byte 39,39,156,39,37,187,78,2,39,39,156,39,37,187,78,2
+.byte 65,65,25,65,50,88,130,115,65,65,25,65,50,88,130,115
+.byte 139,139,22,139,44,157,11,167,139,139,22,139,44,157,11,167
+.byte 167,167,166,167,81,1,83,246,167,167,166,167,81,1,83,246
+.byte 125,125,233,125,207,148,250,178,125,125,233,125,207,148,250,178
+.byte 149,149,110,149,220,251,55,73,149,149,110,149,220,251,55,73
+.byte 216,216,71,216,142,159,173,86,216,216,71,216,142,159,173,86
+.byte 251,251,203,251,139,48,235,112,251,251,203,251,139,48,235,112
+.byte 238,238,159,238,35,113,193,205,238,238,159,238,35,113,193,205
+.byte 124,124,237,124,199,145,248,187,124,124,237,124,199,145,248,187
+.byte 102,102,133,102,23,227,204,113,102,102,133,102,23,227,204,113
+.byte 221,221,83,221,166,142,167,123,221,221,83,221,166,142,167,123
+.byte 23,23,92,23,184,75,46,175,23,23,92,23,184,75,46,175
+.byte 71,71,1,71,2,70,142,69,71,71,1,71,2,70,142,69
+.byte 158,158,66,158,132,220,33,26,158,158,66,158,132,220,33,26
+.byte 202,202,15,202,30,197,137,212,202,202,15,202,30,197,137,212
+.byte 45,45,180,45,117,153,90,88,45,45,180,45,117,153,90,88
+.byte 191,191,198,191,145,121,99,46,191,191,198,191,145,121,99,46
+.byte 7,7,28,7,56,27,14,63,7,7,28,7,56,27,14,63
+.byte 173,173,142,173,1,35,71,172,173,173,142,173,1,35,71,172
+.byte 90,90,117,90,234,47,180,176,90,90,117,90,234,47,180,176
+.byte 131,131,54,131,108,181,27,239,131,131,54,131,108,181,27,239
+.byte 51,51,204,51,133,255,102,182,51,51,204,51,133,255,102,182
+.byte 99,99,145,99,63,242,198,92,99,99,145,99,63,242,198,92
+.byte 2,2,8,2,16,10,4,18,2,2,8,2,16,10,4,18
+.byte 170,170,146,170,57,56,73,147,170,170,146,170,57,56,73,147
+.byte 113,113,217,113,175,168,226,222,113,113,217,113,175,168,226,222
+.byte 200,200,7,200,14,207,141,198,200,200,7,200,14,207,141,198
+.byte 25,25,100,25,200,125,50,209,25,25,100,25,200,125,50,209
+.byte 73,73,57,73,114,112,146,59,73,73,57,73,114,112,146,59
+.byte 217,217,67,217,134,154,175,95,217,217,67,217,134,154,175,95
+.byte 242,242,239,242,195,29,249,49,242,242,239,242,195,29,249,49
+.byte 227,227,171,227,75,72,219,168,227,227,171,227,75,72,219,168
+.byte 91,91,113,91,226,42,182,185,91,91,113,91,226,42,182,185
+.byte 136,136,26,136,52,146,13,188,136,136,26,136,52,146,13,188
+.byte 154,154,82,154,164,200,41,62,154,154,82,154,164,200,41,62
+.byte 38,38,152,38,45,190,76,11,38,38,152,38,45,190,76,11
+.byte 50,50,200,50,141,250,100,191,50,50,200,50,141,250,100,191
+.byte 176,176,250,176,233,74,125,89,176,176,250,176,233,74,125,89
+.byte 233,233,131,233,27,106,207,242,233,233,131,233,27,106,207,242
+.byte 15,15,60,15,120,51,30,119,15,15,60,15,120,51,30,119
+.byte 213,213,115,213,230,166,183,51,213,213,115,213,230,166,183,51
+.byte 128,128,58,128,116,186,29,244,128,128,58,128,116,186,29,244
+.byte 190,190,194,190,153,124,97,39,190,190,194,190,153,124,97,39
+.byte 205,205,19,205,38,222,135,235,205,205,19,205,38,222,135,235
+.byte 52,52,208,52,189,228,104,137,52,52,208,52,189,228,104,137
+.byte 72,72,61,72,122,117,144,50,72,72,61,72,122,117,144,50
+.byte 255,255,219,255,171,36,227,84,255,255,219,255,171,36,227,84
+.byte 122,122,245,122,247,143,244,141,122,122,245,122,247,143,244,141
+.byte 144,144,122,144,244,234,61,100,144,144,122,144,244,234,61,100
+.byte 95,95,97,95,194,62,190,157,95,95,97,95,194,62,190,157
+.byte 32,32,128,32,29,160,64,61,32,32,128,32,29,160,64,61
+.byte 104,104,189,104,103,213,208,15,104,104,189,104,103,213,208,15
+.byte 26,26,104,26,208,114,52,202,26,26,104,26,208,114,52,202
+.byte 174,174,130,174,25,44,65,183,174,174,130,174,25,44,65,183
+.byte 180,180,234,180,201,94,117,125,180,180,234,180,201,94,117,125
+.byte 84,84,77,84,154,25,168,206,84,84,77,84,154,25,168,206
+.byte 147,147,118,147,236,229,59,127,147,147,118,147,236,229,59,127
+.byte 34,34,136,34,13,170,68,47,34,34,136,34,13,170,68,47
+.byte 100,100,141,100,7,233,200,99,100,100,141,100,7,233,200,99
+.byte 241,241,227,241,219,18,255,42,241,241,227,241,219,18,255,42
+.byte 115,115,209,115,191,162,230,204,115,115,209,115,191,162,230,204
+.byte 18,18,72,18,144,90,36,130,18,18,72,18,144,90,36,130
+.byte 64,64,29,64,58,93,128,122,64,64,29,64,58,93,128,122
+.byte 8,8,32,8,64,40,16,72,8,8,32,8,64,40,16,72
+.byte 195,195,43,195,86,232,155,149,195,195,43,195,86,232,155,149
+.byte 236,236,151,236,51,123,197,223,236,236,151,236,51,123,197,223
+.byte 219,219,75,219,150,144,171,77,219,219,75,219,150,144,171,77
+.byte 161,161,190,161,97,31,95,192,161,161,190,161,97,31,95,192
+.byte 141,141,14,141,28,131,7,145,141,141,14,141,28,131,7,145
+.byte 61,61,244,61,245,201,122,200,61,61,244,61,245,201,122,200
+.byte 151,151,102,151,204,241,51,91,151,151,102,151,204,241,51,91
+.byte 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
+.byte 207,207,27,207,54,212,131,249,207,207,27,207,54,212,131,249
+.byte 43,43,172,43,69,135,86,110,43,43,172,43,69,135,86,110
+.byte 118,118,197,118,151,179,236,225,118,118,197,118,151,179,236,225
+.byte 130,130,50,130,100,176,25,230,130,130,50,130,100,176,25,230
+.byte 214,214,127,214,254,169,177,40,214,214,127,214,254,169,177,40
+.byte 27,27,108,27,216,119,54,195,27,27,108,27,216,119,54,195
+.byte 181,181,238,181,193,91,119,116,181,181,238,181,193,91,119,116
+.byte 175,175,134,175,17,41,67,190,175,175,134,175,17,41,67,190
+.byte 106,106,181,106,119,223,212,29,106,106,181,106,119,223,212,29
+.byte 80,80,93,80,186,13,160,234,80,80,93,80,186,13,160,234
+.byte 69,69,9,69,18,76,138,87,69,69,9,69,18,76,138,87
+.byte 243,243,235,243,203,24,251,56,243,243,235,243,203,24,251,56
+.byte 48,48,192,48,157,240,96,173,48,48,192,48,157,240,96,173
+.byte 239,239,155,239,43,116,195,196,239,239,155,239,43,116,195,196
+.byte 63,63,252,63,229,195,126,218,63,63,252,63,229,195,126,218
+.byte 85,85,73,85,146,28,170,199,85,85,73,85,146,28,170,199
+.byte 162,162,178,162,121,16,89,219,162,162,178,162,121,16,89,219
+.byte 234,234,143,234,3,101,201,233,234,234,143,234,3,101,201,233
+.byte 101,101,137,101,15,236,202,106,101,101,137,101,15,236,202,106
+.byte 186,186,210,186,185,104,105,3,186,186,210,186,185,104,105,3
+.byte 47,47,188,47,101,147,94,74,47,47,188,47,101,147,94,74
+.byte 192,192,39,192,78,231,157,142,192,192,39,192,78,231,157,142
+.byte 222,222,95,222,190,129,161,96,222,222,95,222,190,129,161,96
+.byte 28,28,112,28,224,108,56,252,28,28,112,28,224,108,56,252
+.byte 253,253,211,253,187,46,231,70,253,253,211,253,187,46,231,70
+.byte 77,77,41,77,82,100,154,31,77,77,41,77,82,100,154,31
+.byte 146,146,114,146,228,224,57,118,146,146,114,146,228,224,57,118
+.byte 117,117,201,117,143,188,234,250,117,117,201,117,143,188,234,250
+.byte 6,6,24,6,48,30,12,54,6,6,24,6,48,30,12,54
+.byte 138,138,18,138,36,152,9,174,138,138,18,138,36,152,9,174
+.byte 178,178,242,178,249,64,121,75,178,178,242,178,249,64,121,75
+.byte 230,230,191,230,99,89,209,133,230,230,191,230,99,89,209,133
+.byte 14,14,56,14,112,54,28,126,14,14,56,14,112,54,28,126
+.byte 31,31,124,31,248,99,62,231,31,31,124,31,248,99,62,231
+.byte 98,98,149,98,55,247,196,85,98,98,149,98,55,247,196,85
+.byte 212,212,119,212,238,163,181,58,212,212,119,212,238,163,181,58
+.byte 168,168,154,168,41,50,77,129,168,168,154,168,41,50,77,129
+.byte 150,150,98,150,196,244,49,82,150,150,98,150,196,244,49,82
+.byte 249,249,195,249,155,58,239,98,249,249,195,249,155,58,239,98
+.byte 197,197,51,197,102,246,151,163,197,197,51,197,102,246,151,163
+.byte 37,37,148,37,53,177,74,16,37,37,148,37,53,177,74,16
+.byte 89,89,121,89,242,32,178,171,89,89,121,89,242,32,178,171
+.byte 132,132,42,132,84,174,21,208,132,132,42,132,84,174,21,208
+.byte 114,114,213,114,183,167,228,197,114,114,213,114,183,167,228,197
+.byte 57,57,228,57,213,221,114,236,57,57,228,57,213,221,114,236
+.byte 76,76,45,76,90,97,152,22,76,76,45,76,90,97,152,22
+.byte 94,94,101,94,202,59,188,148,94,94,101,94,202,59,188,148
+.byte 120,120,253,120,231,133,240,159,120,120,253,120,231,133,240,159
+.byte 56,56,224,56,221,216,112,229,56,56,224,56,221,216,112,229
+.byte 140,140,10,140,20,134,5,152,140,140,10,140,20,134,5,152
+.byte 209,209,99,209,198,178,191,23,209,209,99,209,198,178,191,23
+.byte 165,165,174,165,65,11,87,228,165,165,174,165,65,11,87,228
+.byte 226,226,175,226,67,77,217,161,226,226,175,226,67,77,217,161
+.byte 97,97,153,97,47,248,194,78,97,97,153,97,47,248,194,78
+.byte 179,179,246,179,241,69,123,66,179,179,246,179,241,69,123,66
+.byte 33,33,132,33,21,165,66,52,33,33,132,33,21,165,66,52
+.byte 156,156,74,156,148,214,37,8,156,156,74,156,148,214,37,8
+.byte 30,30,120,30,240,102,60,238,30,30,120,30,240,102,60,238
+.byte 67,67,17,67,34,82,134,97,67,67,17,67,34,82,134,97
+.byte 199,199,59,199,118,252,147,177,199,199,59,199,118,252,147,177
+.byte 252,252,215,252,179,43,229,79,252,252,215,252,179,43,229,79
+.byte 4,4,16,4,32,20,8,36,4,4,16,4,32,20,8,36
+.byte 81,81,89,81,178,8,162,227,81,81,89,81,178,8,162,227
+.byte 153,153,94,153,188,199,47,37,153,153,94,153,188,199,47,37
+.byte 109,109,169,109,79,196,218,34,109,109,169,109,79,196,218,34
+.byte 13,13,52,13,104,57,26,101,13,13,52,13,104,57,26,101
+.byte 250,250,207,250,131,53,233,121,250,250,207,250,131,53,233,121
+.byte 223,223,91,223,182,132,163,105,223,223,91,223,182,132,163,105
+.byte 126,126,229,126,215,155,252,169,126,126,229,126,215,155,252,169
+.byte 36,36,144,36,61,180,72,25,36,36,144,36,61,180,72,25
+.byte 59,59,236,59,197,215,118,254,59,59,236,59,197,215,118,254
+.byte 171,171,150,171,49,61,75,154,171,171,150,171,49,61,75,154
+.byte 206,206,31,206,62,209,129,240,206,206,31,206,62,209,129,240
+.byte 17,17,68,17,136,85,34,153,17,17,68,17,136,85,34,153
+.byte 143,143,6,143,12,137,3,131,143,143,6,143,12,137,3,131
+.byte 78,78,37,78,74,107,156,4,78,78,37,78,74,107,156,4
+.byte 183,183,230,183,209,81,115,102,183,183,230,183,209,81,115,102
+.byte 235,235,139,235,11,96,203,224,235,235,139,235,11,96,203,224
+.byte 60,60,240,60,253,204,120,193,60,60,240,60,253,204,120,193
+.byte 129,129,62,129,124,191,31,253,129,129,62,129,124,191,31,253
+.byte 148,148,106,148,212,254,53,64,148,148,106,148,212,254,53,64
+.byte 247,247,251,247,235,12,243,28,247,247,251,247,235,12,243,28
+.byte 185,185,222,185,161,103,111,24,185,185,222,185,161,103,111,24
+.byte 19,19,76,19,152,95,38,139,19,19,76,19,152,95,38,139
+.byte 44,44,176,44,125,156,88,81,44,44,176,44,125,156,88,81
+.byte 211,211,107,211,214,184,187,5,211,211,107,211,214,184,187,5
+.byte 231,231,187,231,107,92,211,140,231,231,187,231,107,92,211,140
+.byte 110,110,165,110,87,203,220,57,110,110,165,110,87,203,220,57
+.byte 196,196,55,196,110,243,149,170,196,196,55,196,110,243,149,170
+.byte 3,3,12,3,24,15,6,27,3,3,12,3,24,15,6,27
+.byte 86,86,69,86,138,19,172,220,86,86,69,86,138,19,172,220
+.byte 68,68,13,68,26,73,136,94,68,68,13,68,26,73,136,94
+.byte 127,127,225,127,223,158,254,160,127,127,225,127,223,158,254,160
+.byte 169,169,158,169,33,55,79,136,169,169,158,169,33,55,79,136
+.byte 42,42,168,42,77,130,84,103,42,42,168,42,77,130,84,103
+.byte 187,187,214,187,177,109,107,10,187,187,214,187,177,109,107,10
+.byte 193,193,35,193,70,226,159,135,193,193,35,193,70,226,159,135
+.byte 83,83,81,83,162,2,166,241,83,83,81,83,162,2,166,241
+.byte 220,220,87,220,174,139,165,114,220,220,87,220,174,139,165,114
+.byte 11,11,44,11,88,39,22,83,11,11,44,11,88,39,22,83
+.byte 157,157,78,157,156,211,39,1,157,157,78,157,156,211,39,1
+.byte 108,108,173,108,71,193,216,43,108,108,173,108,71,193,216,43
+.byte 49,49,196,49,149,245,98,164,49,49,196,49,149,245,98,164
+.byte 116,116,205,116,135,185,232,243,116,116,205,116,135,185,232,243
+.byte 246,246,255,246,227,9,241,21,246,246,255,246,227,9,241,21
+.byte 70,70,5,70,10,67,140,76,70,70,5,70,10,67,140,76
+.byte 172,172,138,172,9,38,69,165,172,172,138,172,9,38,69,165
+.byte 137,137,30,137,60,151,15,181,137,137,30,137,60,151,15,181
+.byte 20,20,80,20,160,68,40,180,20,20,80,20,160,68,40,180
+.byte 225,225,163,225,91,66,223,186,225,225,163,225,91,66,223,186
+.byte 22,22,88,22,176,78,44,166,22,22,88,22,176,78,44,166
+.byte 58,58,232,58,205,210,116,247,58,58,232,58,205,210,116,247
+.byte 105,105,185,105,111,208,210,6,105,105,185,105,111,208,210,6
+.byte 9,9,36,9,72,45,18,65,9,9,36,9,72,45,18,65
+.byte 112,112,221,112,167,173,224,215,112,112,221,112,167,173,224,215
+.byte 182,182,226,182,217,84,113,111,182,182,226,182,217,84,113,111
+.byte 208,208,103,208,206,183,189,30,208,208,103,208,206,183,189,30
+.byte 237,237,147,237,59,126,199,214,237,237,147,237,59,126,199,214
+.byte 204,204,23,204,46,219,133,226,204,204,23,204,46,219,133,226
+.byte 66,66,21,66,42,87,132,104,66,66,21,66,42,87,132,104
+.byte 152,152,90,152,180,194,45,44,152,152,90,152,180,194,45,44
+.byte 164,164,170,164,73,14,85,237,164,164,170,164,73,14,85,237
+.byte 40,40,160,40,93,136,80,117,40,40,160,40,93,136,80,117
+.byte 92,92,109,92,218,49,184,134,92,92,109,92,218,49,184,134
+.byte 248,248,199,248,147,63,237,107,248,248,199,248,147,63,237,107
+.byte 134,134,34,134,68,164,17,194,134,134,34,134,68,164,17,194
+.byte 24,35,198,232,135,184,1,79
+.byte 54,166,210,245,121,111,145,82
+.byte 96,188,155,142,163,12,123,53
+.byte 29,224,215,194,46,75,254,87
+.byte 21,119,55,229,159,240,74,218
+.byte 88,201,41,10,177,160,107,133
+.byte 189,93,16,244,203,62,5,103
+.byte 228,39,65,139,167,125,149,216
+.byte 251,238,124,102,221,23,71,158
+.byte 202,45,191,7,173,90,131,51
diff --git a/deps/openssl/asm/x64-macosx-gas/x86_64cpuid.s b/deps/openssl/asm/x64-macosx-gas/x86_64cpuid.s
new file mode 100644
index 0000000000..a1670e38e8
--- /dev/null
+++ b/deps/openssl/asm/x64-macosx-gas/x86_64cpuid.s
@@ -0,0 +1,195 @@
+
+.mod_init_func
+ .p2align 3
+ .quad _OPENSSL_cpuid_setup
+
+.text
+
+
+.globl _OPENSSL_atomic_add
+
+.p2align 4
+_OPENSSL_atomic_add:
+ movl (%rdi),%eax
+L$spin: leaq (%rsi,%rax,1),%r8
+.byte 0xf0
+
+ cmpxchgl %r8d,(%rdi)
+ jne L$spin
+ movl %r8d,%eax
+.byte 0x48,0x98
+
+ .byte 0xf3,0xc3
+
+
+.globl _OPENSSL_rdtsc
+
+.p2align 4
+_OPENSSL_rdtsc:
+ rdtsc
+ shlq $32,%rdx
+ orq %rdx,%rax
+ .byte 0xf3,0xc3
+
+
+.globl _OPENSSL_ia32_cpuid
+
+.p2align 4
+_OPENSSL_ia32_cpuid:
+ movq %rbx,%r8
+
+ xorl %eax,%eax
+ cpuid
+ movl %eax,%r11d
+
+ xorl %eax,%eax
+ cmpl $1970169159,%ebx
+ setne %al
+ movl %eax,%r9d
+ cmpl $1231384169,%edx
+ setne %al
+ orl %eax,%r9d
+ cmpl $1818588270,%ecx
+ setne %al
+ orl %eax,%r9d
+ jz L$intel
+
+ cmpl $1752462657,%ebx
+ setne %al
+ movl %eax,%r10d
+ cmpl $1769238117,%edx
+ setne %al
+ orl %eax,%r10d
+ cmpl $1145913699,%ecx
+ setne %al
+ orl %eax,%r10d
+ jnz L$intel
+
+
+ movl $2147483648,%eax
+ cpuid
+ cmpl $2147483656,%eax
+ jb L$intel
+
+ movl $2147483656,%eax
+ cpuid
+ movzbq %cl,%r10
+ incq %r10
+
+ movl $1,%eax
+ cpuid
+ btl $28,%edx
+ jnc L$done
+ shrl $16,%ebx
+ cmpb %r10b,%bl
+ ja L$done
+ andl $4026531839,%edx
+ jmp L$done
+
+L$intel:
+ cmpl $4,%r11d
+ movl $-1,%r10d
+ jb L$nocacheinfo
+
+ movl $4,%eax
+ movl $0,%ecx
+ cpuid
+ movl %eax,%r10d
+ shrl $14,%r10d
+ andl $4095,%r10d
+
+L$nocacheinfo:
+ movl $1,%eax
+ cpuid
+ cmpl $0,%r9d
+ jne L$notintel
+ orl $1048576,%edx
+ andb $15,%ah
+ cmpb $15,%ah
+ je L$notintel
+ orl $1073741824,%edx
+L$notintel:
+ btl $28,%edx
+ jnc L$done
+ andl $4026531839,%edx
+ cmpl $0,%r10d
+ je L$done
+
+ orl $268435456,%edx
+ shrl $16,%ebx
+ cmpb $1,%bl
+ ja L$done
+ andl $4026531839,%edx
+L$done:
+ shlq $32,%rcx
+ movl %edx,%eax
+ movq %r8,%rbx
+ orq %rcx,%rax
+ .byte 0xf3,0xc3
+
+
+.globl _OPENSSL_cleanse
+
+.p2align 4
+_OPENSSL_cleanse:
+ xorq %rax,%rax
+ cmpq $15,%rsi
+ jae L$ot
+ cmpq $0,%rsi
+ je L$ret
+L$ittle:
+ movb %al,(%rdi)
+ subq $1,%rsi
+ leaq 1(%rdi),%rdi
+ jnz L$ittle
+L$ret:
+ .byte 0xf3,0xc3
+.p2align 4
+L$ot:
+ testq $7,%rdi
+ jz L$aligned
+ movb %al,(%rdi)
+ leaq -1(%rsi),%rsi
+ leaq 1(%rdi),%rdi
+ jmp L$ot
+L$aligned:
+ movq %rax,(%rdi)
+ leaq -8(%rsi),%rsi
+ testq $-8,%rsi
+ leaq 8(%rdi),%rdi
+ jnz L$aligned
+ cmpq $0,%rsi
+ jne L$ittle
+ .byte 0xf3,0xc3
+
+.globl _OPENSSL_wipe_cpu
+
+.p2align 4
+_OPENSSL_wipe_cpu:
+ pxor %xmm0,%xmm0
+ pxor %xmm1,%xmm1
+ pxor %xmm2,%xmm2
+ pxor %xmm3,%xmm3
+ pxor %xmm4,%xmm4
+ pxor %xmm5,%xmm5
+ pxor %xmm6,%xmm6
+ pxor %xmm7,%xmm7
+ pxor %xmm8,%xmm8
+ pxor %xmm9,%xmm9
+ pxor %xmm10,%xmm10
+ pxor %xmm11,%xmm11
+ pxor %xmm12,%xmm12
+ pxor %xmm13,%xmm13
+ pxor %xmm14,%xmm14
+ pxor %xmm15,%xmm15
+ xorq %rcx,%rcx
+ xorq %rdx,%rdx
+ xorq %rsi,%rsi
+ xorq %rdi,%rdi
+ xorq %r8,%r8
+ xorq %r9,%r9
+ xorq %r10,%r10
+ xorq %r11,%r11
+ leaq 8(%rsp),%rax
+ .byte 0xf3,0xc3
+
diff --git a/deps/openssl/asm/x64-win32-masm/aes/aes-x86_64.asm b/deps/openssl/asm/x64-win32-masm/aes/aes-x86_64.asm
new file mode 100644
index 0000000000..2c590b94f4
--- /dev/null
+++ b/deps/openssl/asm/x64-win32-masm/aes/aes-x86_64.asm
@@ -0,0 +1,2885 @@
+OPTION DOTNAME
+.text$ SEGMENT ALIGN(64) 'CODE'
+
+ALIGN 16
+_x86_64_AES_encrypt PROC PRIVATE
+ xor eax,DWORD PTR[r15]
+ xor ebx,DWORD PTR[4+r15]
+ xor ecx,DWORD PTR[8+r15]
+ xor edx,DWORD PTR[12+r15]
+
+ mov r13d,DWORD PTR[240+r15]
+ sub r13d,1
+ jmp $L$enc_loop
+ALIGN 16
+$L$enc_loop::
+
+ movzx esi,al
+ movzx edi,bl
+ movzx ebp,cl
+ mov r10d,DWORD PTR[rsi*8+r14]
+ mov r11d,DWORD PTR[rdi*8+r14]
+ mov r12d,DWORD PTR[rbp*8+r14]
+
+ movzx esi,bh
+ movzx edi,ch
+ movzx ebp,dl
+ xor r10d,DWORD PTR[3+rsi*8+r14]
+ xor r11d,DWORD PTR[3+rdi*8+r14]
+ mov r8d,DWORD PTR[rbp*8+r14]
+
+ movzx esi,dh
+ shr ecx,16
+ movzx ebp,ah
+ xor r12d,DWORD PTR[3+rsi*8+r14]
+ shr edx,16
+ xor r8d,DWORD PTR[3+rbp*8+r14]
+
+ shr ebx,16
+ lea r15,QWORD PTR[16+r15]
+ shr eax,16
+
+ movzx esi,cl
+ movzx edi,dl
+ movzx ebp,al
+ xor r10d,DWORD PTR[2+rsi*8+r14]
+ xor r11d,DWORD PTR[2+rdi*8+r14]
+ xor r12d,DWORD PTR[2+rbp*8+r14]
+
+ movzx esi,dh
+ movzx edi,ah
+ movzx ebp,bl
+ xor r10d,DWORD PTR[1+rsi*8+r14]
+ xor r11d,DWORD PTR[1+rdi*8+r14]
+ xor r8d,DWORD PTR[2+rbp*8+r14]
+
+ mov edx,DWORD PTR[12+r15]
+ movzx edi,bh
+ movzx ebp,ch
+ mov eax,DWORD PTR[r15]
+ xor r12d,DWORD PTR[1+rdi*8+r14]
+ xor r8d,DWORD PTR[1+rbp*8+r14]
+
+ mov ebx,DWORD PTR[4+r15]
+ mov ecx,DWORD PTR[8+r15]
+ xor eax,r10d
+ xor ebx,r11d
+ xor ecx,r12d
+ xor edx,r8d
+ sub r13d,1
+ jnz $L$enc_loop
+ movzx esi,al
+ movzx edi,bl
+ movzx ebp,cl
+ movzx r10d,BYTE PTR[2+rsi*8+r14]
+ movzx r11d,BYTE PTR[2+rdi*8+r14]
+ movzx r12d,BYTE PTR[2+rbp*8+r14]
+
+ movzx esi,dl
+ movzx edi,bh
+ movzx ebp,ch
+ movzx r8d,BYTE PTR[2+rsi*8+r14]
+ mov edi,DWORD PTR[rdi*8+r14]
+ mov ebp,DWORD PTR[rbp*8+r14]
+
+ and edi,00000ff00h
+ and ebp,00000ff00h
+
+ xor r10d,edi
+ xor r11d,ebp
+ shr ecx,16
+
+ movzx esi,dh
+ movzx edi,ah
+ shr edx,16
+ mov esi,DWORD PTR[rsi*8+r14]
+ mov edi,DWORD PTR[rdi*8+r14]
+
+ and esi,00000ff00h
+ and edi,00000ff00h
+ shr ebx,16
+ xor r12d,esi
+ xor r8d,edi
+ shr eax,16
+
+ movzx esi,cl
+ movzx edi,dl
+ movzx ebp,al
+ mov esi,DWORD PTR[rsi*8+r14]
+ mov edi,DWORD PTR[rdi*8+r14]
+ mov ebp,DWORD PTR[rbp*8+r14]
+
+ and esi,000ff0000h
+ and edi,000ff0000h
+ and ebp,000ff0000h
+
+ xor r10d,esi
+ xor r11d,edi
+ xor r12d,ebp
+
+ movzx esi,bl
+ movzx edi,dh
+ movzx ebp,ah
+ mov esi,DWORD PTR[rsi*8+r14]
+ mov edi,DWORD PTR[2+rdi*8+r14]
+ mov ebp,DWORD PTR[2+rbp*8+r14]
+
+ and esi,000ff0000h
+ and edi,0ff000000h
+ and ebp,0ff000000h
+
+ xor r8d,esi
+ xor r10d,edi
+ xor r11d,ebp
+
+ movzx esi,bh
+ movzx edi,ch
+ mov edx,DWORD PTR[((16+12))+r15]
+ mov esi,DWORD PTR[2+rsi*8+r14]
+ mov edi,DWORD PTR[2+rdi*8+r14]
+ mov eax,DWORD PTR[((16+0))+r15]
+
+ and esi,0ff000000h
+ and edi,0ff000000h
+
+ xor r12d,esi
+ xor r8d,edi
+
+ mov ebx,DWORD PTR[((16+4))+r15]
+ mov ecx,DWORD PTR[((16+8))+r15]
+ xor eax,r10d
+ xor ebx,r11d
+ xor ecx,r12d
+ xor edx,r8d
+DB 0f3h,0c3h
+
+_x86_64_AES_encrypt ENDP
+
+ALIGN 16
+_x86_64_AES_encrypt_compact PROC PRIVATE
+ lea r8,QWORD PTR[128+r14]
+ mov edi,DWORD PTR[((0-128))+r8]
+ mov ebp,DWORD PTR[((32-128))+r8]
+ mov r10d,DWORD PTR[((64-128))+r8]
+ mov r11d,DWORD PTR[((96-128))+r8]
+ mov edi,DWORD PTR[((128-128))+r8]
+ mov ebp,DWORD PTR[((160-128))+r8]
+ mov r10d,DWORD PTR[((192-128))+r8]
+ mov r11d,DWORD PTR[((224-128))+r8]
+ jmp $L$enc_loop_compact
+ALIGN 16
+$L$enc_loop_compact::
+ xor eax,DWORD PTR[r15]
+ xor ebx,DWORD PTR[4+r15]
+ xor ecx,DWORD PTR[8+r15]
+ xor edx,DWORD PTR[12+r15]
+ lea r15,QWORD PTR[16+r15]
+ movzx r10d,al
+ movzx r11d,bl
+ movzx r12d,cl
+ movzx r10d,BYTE PTR[r10*1+r14]
+ movzx r11d,BYTE PTR[r11*1+r14]
+ movzx r12d,BYTE PTR[r12*1+r14]
+
+ movzx r8d,dl
+ movzx esi,bh
+ movzx edi,ch
+ movzx r8d,BYTE PTR[r8*1+r14]
+ movzx r9d,BYTE PTR[rsi*1+r14]
+ movzx r13d,BYTE PTR[rdi*1+r14]
+
+ movzx ebp,dh
+ movzx esi,ah
+ shr ecx,16
+ movzx ebp,BYTE PTR[rbp*1+r14]
+ movzx esi,BYTE PTR[rsi*1+r14]
+ shr edx,16
+
+ movzx edi,cl
+ shl r9d,8
+ shl r13d,8
+ movzx edi,BYTE PTR[rdi*1+r14]
+ xor r10d,r9d
+ xor r11d,r13d
+
+ movzx r9d,dl
+ shr eax,16
+ shr ebx,16
+ movzx r13d,al
+ shl ebp,8
+ shl esi,8
+ movzx r9d,BYTE PTR[r9*1+r14]
+ movzx r13d,BYTE PTR[r13*1+r14]
+ xor r12d,ebp
+ xor r8d,esi
+
+ movzx ebp,bl
+ movzx esi,dh
+ shl edi,16
+ movzx ebp,BYTE PTR[rbp*1+r14]
+ movzx esi,BYTE PTR[rsi*1+r14]
+ xor r10d,edi
+
+ movzx edi,ah
+ shr ecx,8
+ shr ebx,8
+ movzx edi,BYTE PTR[rdi*1+r14]
+ movzx edx,BYTE PTR[rcx*1+r14]
+ movzx ecx,BYTE PTR[rbx*1+r14]
+ shl r9d,16
+ shl r13d,16
+ shl ebp,16
+ xor r11d,r9d
+ xor r12d,r13d
+ xor r8d,ebp
+
+ shl esi,24
+ shl edi,24
+ shl edx,24
+ xor r10d,esi
+ shl ecx,24
+ xor r11d,edi
+ mov eax,r10d
+ mov ebx,r11d
+ xor ecx,r12d
+ xor edx,r8d
+ cmp r15,QWORD PTR[16+rsp]
+ je $L$enc_compact_done
+ mov esi,eax
+ mov edi,ebx
+ and esi,080808080h
+ and edi,080808080h
+ mov r10d,esi
+ mov r11d,edi
+ shr r10d,7
+ lea r8d,DWORD PTR[rax*1+rax]
+ shr r11d,7
+ lea r9d,DWORD PTR[rbx*1+rbx]
+ sub esi,r10d
+ sub edi,r11d
+ and r8d,0fefefefeh
+ and r9d,0fefefefeh
+ and esi,01b1b1b1bh
+ and edi,01b1b1b1bh
+ mov r10d,eax
+ mov r11d,ebx
+ xor r8d,esi
+ xor r9d,edi
+
+ xor eax,r8d
+ xor ebx,r9d
+ mov esi,ecx
+ mov edi,edx
+ rol eax,24
+ rol ebx,24
+ and esi,080808080h
+ and edi,080808080h
+ xor eax,r8d
+ xor ebx,r9d
+ mov r12d,esi
+ mov ebp,edi
+ ror r10d,16
+ ror r11d,16
+ shr r12d,7
+ lea r8d,DWORD PTR[rcx*1+rcx]
+ xor eax,r10d
+ xor ebx,r11d
+ shr ebp,7
+ lea r9d,DWORD PTR[rdx*1+rdx]
+ ror r10d,8
+ ror r11d,8
+ sub esi,r12d
+ sub edi,ebp
+ xor eax,r10d
+ xor ebx,r11d
+
+ and r8d,0fefefefeh
+ and r9d,0fefefefeh
+ and esi,01b1b1b1bh
+ and edi,01b1b1b1bh
+ mov r12d,ecx
+ mov ebp,edx
+ xor r8d,esi
+ xor r9d,edi
+
+ xor ecx,r8d
+ xor edx,r9d
+ rol ecx,24
+ rol edx,24
+ xor ecx,r8d
+ xor edx,r9d
+ mov esi,DWORD PTR[r14]
+ ror r12d,16
+ ror ebp,16
+ mov edi,DWORD PTR[64+r14]
+ xor ecx,r12d
+ xor edx,ebp
+ mov r8d,DWORD PTR[128+r14]
+ ror r12d,8
+ ror ebp,8
+ mov r9d,DWORD PTR[192+r14]
+ xor ecx,r12d
+ xor edx,ebp
+ jmp $L$enc_loop_compact
+ALIGN 16
+$L$enc_compact_done::
+ xor eax,DWORD PTR[r15]
+ xor ebx,DWORD PTR[4+r15]
+ xor ecx,DWORD PTR[8+r15]
+ xor edx,DWORD PTR[12+r15]
+DB 0f3h,0c3h
+
+_x86_64_AES_encrypt_compact ENDP
+PUBLIC AES_encrypt
+
+ALIGN 16
+AES_encrypt PROC PUBLIC
+ mov QWORD PTR[8+rsp],rdi ;WIN64 prologue
+ mov QWORD PTR[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_AES_encrypt::
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+
+
+ push rbx
+ push rbp
+ push r12
+ push r13
+ push r14
+ push r15
+
+
+ mov r10,rsp
+ lea rcx,QWORD PTR[((-63))+rdx]
+ and rsp,-64
+ sub rcx,rsp
+ neg rcx
+ and rcx,03c0h
+ sub rsp,rcx
+ sub rsp,32
+
+ mov QWORD PTR[16+rsp],rsi
+ mov QWORD PTR[24+rsp],r10
+$L$enc_prologue::
+
+ mov r15,rdx
+ mov r13d,DWORD PTR[240+r15]
+
+ mov eax,DWORD PTR[rdi]
+ mov ebx,DWORD PTR[4+rdi]
+ mov ecx,DWORD PTR[8+rdi]
+ mov edx,DWORD PTR[12+rdi]
+
+ shl r13d,4
+ lea rbp,QWORD PTR[r13*1+r15]
+ mov QWORD PTR[rsp],r15
+ mov QWORD PTR[8+rsp],rbp
+
+
+ lea r14,QWORD PTR[(($L$AES_Te+2048))]
+ lea rbp,QWORD PTR[768+rsp]
+ sub rbp,r14
+ and rbp,0300h
+ lea r14,QWORD PTR[rbp*1+r14]
+
+ call _x86_64_AES_encrypt_compact
+
+ mov r9,QWORD PTR[16+rsp]
+ mov rsi,QWORD PTR[24+rsp]
+ mov DWORD PTR[r9],eax
+ mov DWORD PTR[4+r9],ebx
+ mov DWORD PTR[8+r9],ecx
+ mov DWORD PTR[12+r9],edx
+
+ mov r15,QWORD PTR[rsi]
+ mov r14,QWORD PTR[8+rsi]
+ mov r13,QWORD PTR[16+rsi]
+ mov r12,QWORD PTR[24+rsi]
+ mov rbp,QWORD PTR[32+rsi]
+ mov rbx,QWORD PTR[40+rsi]
+ lea rsp,QWORD PTR[48+rsi]
+$L$enc_epilogue::
+ mov rdi,QWORD PTR[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD PTR[16+rsp]
+ DB 0F3h,0C3h ;repret
+$L$SEH_end_AES_encrypt::
+AES_encrypt ENDP
+
+ALIGN 16
+_x86_64_AES_decrypt PROC PRIVATE
+ xor eax,DWORD PTR[r15]
+ xor ebx,DWORD PTR[4+r15]
+ xor ecx,DWORD PTR[8+r15]
+ xor edx,DWORD PTR[12+r15]
+
+ mov r13d,DWORD PTR[240+r15]
+ sub r13d,1
+ jmp $L$dec_loop
+ALIGN 16
+$L$dec_loop::
+
+ movzx esi,al
+ movzx edi,bl
+ movzx ebp,cl
+ mov r10d,DWORD PTR[rsi*8+r14]
+ mov r11d,DWORD PTR[rdi*8+r14]
+ mov r12d,DWORD PTR[rbp*8+r14]
+
+ movzx esi,dh
+ movzx edi,ah
+ movzx ebp,dl
+ xor r10d,DWORD PTR[3+rsi*8+r14]
+ xor r11d,DWORD PTR[3+rdi*8+r14]
+ mov r8d,DWORD PTR[rbp*8+r14]
+
+ movzx esi,bh
+ shr eax,16
+ movzx ebp,ch
+ xor r12d,DWORD PTR[3+rsi*8+r14]
+ shr edx,16
+ xor r8d,DWORD PTR[3+rbp*8+r14]
+
+ shr ebx,16
+ lea r15,QWORD PTR[16+r15]
+ shr ecx,16
+
+ movzx esi,cl
+ movzx edi,dl
+ movzx ebp,al
+ xor r10d,DWORD PTR[2+rsi*8+r14]
+ xor r11d,DWORD PTR[2+rdi*8+r14]
+ xor r12d,DWORD PTR[2+rbp*8+r14]
+
+ movzx esi,bh
+ movzx edi,ch
+ movzx ebp,bl
+ xor r10d,DWORD PTR[1+rsi*8+r14]
+ xor r11d,DWORD PTR[1+rdi*8+r14]
+ xor r8d,DWORD PTR[2+rbp*8+r14]
+
+ movzx esi,dh
+ mov edx,DWORD PTR[12+r15]
+ movzx ebp,ah
+ xor r12d,DWORD PTR[1+rsi*8+r14]
+ mov eax,DWORD PTR[r15]
+ xor r8d,DWORD PTR[1+rbp*8+r14]
+
+ xor eax,r10d
+ mov ebx,DWORD PTR[4+r15]
+ mov ecx,DWORD PTR[8+r15]
+ xor ecx,r12d
+ xor ebx,r11d
+ xor edx,r8d
+ sub r13d,1
+ jnz $L$dec_loop
+ lea r14,QWORD PTR[2048+r14]
+ movzx esi,al
+ movzx edi,bl
+ movzx ebp,cl
+ movzx r10d,BYTE PTR[rsi*1+r14]
+ movzx r11d,BYTE PTR[rdi*1+r14]
+ movzx r12d,BYTE PTR[rbp*1+r14]
+
+ movzx esi,dl
+ movzx edi,dh
+ movzx ebp,ah
+ movzx r8d,BYTE PTR[rsi*1+r14]
+ movzx edi,BYTE PTR[rdi*1+r14]
+ movzx ebp,BYTE PTR[rbp*1+r14]
+
+ shl edi,8
+ shl ebp,8
+
+ xor r10d,edi
+ xor r11d,ebp
+ shr edx,16
+
+ movzx esi,bh
+ movzx edi,ch
+ shr eax,16
+ movzx esi,BYTE PTR[rsi*1+r14]
+ movzx edi,BYTE PTR[rdi*1+r14]
+
+ shl esi,8
+ shl edi,8
+ shr ebx,16
+ xor r12d,esi
+ xor r8d,edi
+ shr ecx,16
+
+ movzx esi,cl
+ movzx edi,dl
+ movzx ebp,al
+ movzx esi,BYTE PTR[rsi*1+r14]
+ movzx edi,BYTE PTR[rdi*1+r14]
+ movzx ebp,BYTE PTR[rbp*1+r14]
+
+ shl esi,16
+ shl edi,16
+ shl ebp,16
+
+ xor r10d,esi
+ xor r11d,edi
+ xor r12d,ebp
+
+ movzx esi,bl
+ movzx edi,bh
+ movzx ebp,ch
+ movzx esi,BYTE PTR[rsi*1+r14]
+ movzx edi,BYTE PTR[rdi*1+r14]
+ movzx ebp,BYTE PTR[rbp*1+r14]
+
+ shl esi,16
+ shl edi,24
+ shl ebp,24
+
+ xor r8d,esi
+ xor r10d,edi
+ xor r11d,ebp
+
+ movzx esi,dh
+ movzx edi,ah
+ mov edx,DWORD PTR[((16+12))+r15]
+ movzx esi,BYTE PTR[rsi*1+r14]
+ movzx edi,BYTE PTR[rdi*1+r14]
+ mov eax,DWORD PTR[((16+0))+r15]
+
+ shl esi,24
+ shl edi,24
+
+ xor r12d,esi
+ xor r8d,edi
+
+ mov ebx,DWORD PTR[((16+4))+r15]
+ mov ecx,DWORD PTR[((16+8))+r15]
+ lea r14,QWORD PTR[((-2048))+r14]
+ xor eax,r10d
+ xor ebx,r11d
+ xor ecx,r12d
+ xor edx,r8d
+DB 0f3h,0c3h
+
+_x86_64_AES_decrypt ENDP
+
+ALIGN 16
+_x86_64_AES_decrypt_compact PROC PRIVATE
+ lea r8,QWORD PTR[128+r14]
+ mov edi,DWORD PTR[((0-128))+r8]
+ mov ebp,DWORD PTR[((32-128))+r8]
+ mov r10d,DWORD PTR[((64-128))+r8]
+ mov r11d,DWORD PTR[((96-128))+r8]
+ mov edi,DWORD PTR[((128-128))+r8]
+ mov ebp,DWORD PTR[((160-128))+r8]
+ mov r10d,DWORD PTR[((192-128))+r8]
+ mov r11d,DWORD PTR[((224-128))+r8]
+ jmp $L$dec_loop_compact
+
+ALIGN 16
+$L$dec_loop_compact::
+ xor eax,DWORD PTR[r15]
+ xor ebx,DWORD PTR[4+r15]
+ xor ecx,DWORD PTR[8+r15]
+ xor edx,DWORD PTR[12+r15]
+ lea r15,QWORD PTR[16+r15]
+ movzx r10d,al
+ movzx r11d,bl
+ movzx r12d,cl
+ movzx r10d,BYTE PTR[r10*1+r14]
+ movzx r11d,BYTE PTR[r11*1+r14]
+ movzx r12d,BYTE PTR[r12*1+r14]
+
+ movzx r8d,dl
+ movzx esi,dh
+ movzx edi,ah
+ movzx r8d,BYTE PTR[r8*1+r14]
+ movzx r9d,BYTE PTR[rsi*1+r14]
+ movzx r13d,BYTE PTR[rdi*1+r14]
+
+ movzx ebp,bh
+ movzx esi,ch
+ shr ecx,16
+ movzx ebp,BYTE PTR[rbp*1+r14]
+ movzx esi,BYTE PTR[rsi*1+r14]
+ shr edx,16
+
+ movzx edi,cl
+ shl r9d,8
+ shl r13d,8
+ movzx edi,BYTE PTR[rdi*1+r14]
+ xor r10d,r9d
+ xor r11d,r13d
+
+ movzx r9d,dl
+ shr eax,16
+ shr ebx,16
+ movzx r13d,al
+ shl ebp,8
+ shl esi,8
+ movzx r9d,BYTE PTR[r9*1+r14]
+ movzx r13d,BYTE PTR[r13*1+r14]
+ xor r12d,ebp
+ xor r8d,esi
+
+ movzx ebp,bl
+ movzx esi,bh
+ shl edi,16
+ movzx ebp,BYTE PTR[rbp*1+r14]
+ movzx esi,BYTE PTR[rsi*1+r14]
+ xor r10d,edi
+
+ movzx edi,ch
+ shl r9d,16
+ shl r13d,16
+ movzx ebx,BYTE PTR[rdi*1+r14]
+ xor r11d,r9d
+ xor r12d,r13d
+
+ movzx edi,dh
+ shr eax,8
+ shl ebp,16
+ movzx ecx,BYTE PTR[rdi*1+r14]
+ movzx edx,BYTE PTR[rax*1+r14]
+ xor r8d,ebp
+
+ shl esi,24
+ shl ebx,24
+ shl ecx,24
+ xor r10d,esi
+ shl edx,24
+ xor ebx,r11d
+ mov eax,r10d
+ xor ecx,r12d
+ xor edx,r8d
+ cmp r15,QWORD PTR[16+rsp]
+ je $L$dec_compact_done
+
+ mov rsi,QWORD PTR[((256+0))+r14]
+ shl rbx,32
+ shl rdx,32
+ mov rdi,QWORD PTR[((256+8))+r14]
+ or rax,rbx
+ or rcx,rdx
+ mov rbp,QWORD PTR[((256+16))+r14]
+ mov rbx,rax
+ mov rdx,rcx
+ and rbx,rsi
+ and rdx,rsi
+ mov r9,rbx
+ mov r12,rdx
+ shr r9,7
+ lea r8,QWORD PTR[rax*1+rax]
+ shr r12,7
+ lea r11,QWORD PTR[rcx*1+rcx]
+ sub rbx,r9
+ sub rdx,r12
+ and r8,rdi
+ and r11,rdi
+ and rbx,rbp
+ and rdx,rbp
+ xor rbx,r8
+ xor rdx,r11
+ mov r8,rbx
+ mov r11,rdx
+
+ and rbx,rsi
+ and rdx,rsi
+ mov r10,rbx
+ mov r13,rdx
+ shr r10,7
+ lea r9,QWORD PTR[r8*1+r8]
+ shr r13,7
+ lea r12,QWORD PTR[r11*1+r11]
+ sub rbx,r10
+ sub rdx,r13
+ and r9,rdi
+ and r12,rdi
+ and rbx,rbp
+ and rdx,rbp
+ xor rbx,r9
+ xor rdx,r12
+ mov r9,rbx
+ mov r12,rdx
+
+ and rbx,rsi
+ and rdx,rsi
+ mov r10,rbx
+ mov r13,rdx
+ shr r10,7
+ xor r8,rax
+ shr r13,7
+ xor r11,rcx
+ sub rbx,r10
+ sub rdx,r13
+ lea r10,QWORD PTR[r9*1+r9]
+ lea r13,QWORD PTR[r12*1+r12]
+ xor r9,rax
+ xor r12,rcx
+ and r10,rdi
+ and r13,rdi
+ and rbx,rbp
+ and rdx,rbp
+ xor r10,rbx
+ xor r13,rdx
+
+ xor rax,r10
+ xor rcx,r13
+ xor r8,r10
+ xor r11,r13
+ mov rbx,rax
+ mov rdx,rcx
+ xor r9,r10
+ xor r12,r13
+ shr rbx,32
+ shr rdx,32
+ xor r10,r8
+ xor r13,r11
+ rol eax,8
+ rol ecx,8
+ xor r10,r9
+ xor r13,r12
+
+ rol ebx,8
+ rol edx,8
+ xor eax,r10d
+ xor ecx,r13d
+ shr r10,32
+ shr r13,32
+ xor ebx,r10d
+ xor edx,r13d
+
+ mov r10,r8
+ mov r13,r11
+ shr r10,32
+ shr r13,32
+ rol r8d,24
+ rol r11d,24
+ rol r10d,24
+ rol r13d,24
+ xor eax,r8d
+ xor ecx,r11d
+ mov r8,r9
+ mov r11,r12
+ xor ebx,r10d
+ xor edx,r13d
+
+ mov rsi,QWORD PTR[r14]
+ shr r8,32
+ shr r11,32
+ mov rdi,QWORD PTR[64+r14]
+ rol r9d,16
+ rol r12d,16
+ mov rbp,QWORD PTR[128+r14]
+ rol r8d,16
+ rol r11d,16
+ mov r10,QWORD PTR[192+r14]
+ xor eax,r9d
+ xor ecx,r12d
+ mov r13,QWORD PTR[256+r14]
+ xor ebx,r8d
+ xor edx,r11d
+ jmp $L$dec_loop_compact
+ALIGN 16
+$L$dec_compact_done::
+ xor eax,DWORD PTR[r15]
+ xor ebx,DWORD PTR[4+r15]
+ xor ecx,DWORD PTR[8+r15]
+ xor edx,DWORD PTR[12+r15]
+DB 0f3h,0c3h
+
+_x86_64_AES_decrypt_compact ENDP
+PUBLIC AES_decrypt
+
+ALIGN 16
+AES_decrypt PROC PUBLIC
+ mov QWORD PTR[8+rsp],rdi ;WIN64 prologue
+ mov QWORD PTR[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_AES_decrypt::
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+
+
+ push rbx
+ push rbp
+ push r12
+ push r13
+ push r14
+ push r15
+
+
+ mov r10,rsp
+ lea rcx,QWORD PTR[((-63))+rdx]
+ and rsp,-64
+ sub rcx,rsp
+ neg rcx
+ and rcx,03c0h
+ sub rsp,rcx
+ sub rsp,32
+
+ mov QWORD PTR[16+rsp],rsi
+ mov QWORD PTR[24+rsp],r10
+$L$dec_prologue::
+
+ mov r15,rdx
+ mov r13d,DWORD PTR[240+r15]
+
+ mov eax,DWORD PTR[rdi]
+ mov ebx,DWORD PTR[4+rdi]
+ mov ecx,DWORD PTR[8+rdi]
+ mov edx,DWORD PTR[12+rdi]
+
+ shl r13d,4
+ lea rbp,QWORD PTR[r13*1+r15]
+ mov QWORD PTR[rsp],r15
+ mov QWORD PTR[8+rsp],rbp
+
+
+ lea r14,QWORD PTR[(($L$AES_Td+2048))]
+ lea rbp,QWORD PTR[768+rsp]
+ sub rbp,r14
+ and rbp,0300h
+ lea r14,QWORD PTR[rbp*1+r14]
+ shr rbp,3
+ add r14,rbp
+
+ call _x86_64_AES_decrypt_compact
+
+ mov r9,QWORD PTR[16+rsp]
+ mov rsi,QWORD PTR[24+rsp]
+ mov DWORD PTR[r9],eax
+ mov DWORD PTR[4+r9],ebx
+ mov DWORD PTR[8+r9],ecx
+ mov DWORD PTR[12+r9],edx
+
+ mov r15,QWORD PTR[rsi]
+ mov r14,QWORD PTR[8+rsi]
+ mov r13,QWORD PTR[16+rsi]
+ mov r12,QWORD PTR[24+rsi]
+ mov rbp,QWORD PTR[32+rsi]
+ mov rbx,QWORD PTR[40+rsi]
+ lea rsp,QWORD PTR[48+rsi]
+$L$dec_epilogue::
+ mov rdi,QWORD PTR[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD PTR[16+rsp]
+ DB 0F3h,0C3h ;repret
+$L$SEH_end_AES_decrypt::
+AES_decrypt ENDP
+PUBLIC AES_set_encrypt_key
+
+ALIGN 16
+AES_set_encrypt_key PROC PUBLIC
+ mov QWORD PTR[8+rsp],rdi ;WIN64 prologue
+ mov QWORD PTR[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_AES_set_encrypt_key::
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+
+
+ push rbx
+ push rbp
+ push r12
+ push r13
+ push r14
+ push r15
+ sub rsp,8
+$L$enc_key_prologue::
+
+ call _x86_64_AES_set_encrypt_key
+
+ mov r15,QWORD PTR[8+rsp]
+ mov r14,QWORD PTR[16+rsp]
+ mov r13,QWORD PTR[24+rsp]
+ mov r12,QWORD PTR[32+rsp]
+ mov rbp,QWORD PTR[40+rsp]
+ mov rbx,QWORD PTR[48+rsp]
+ add rsp,56
+$L$enc_key_epilogue::
+ mov rdi,QWORD PTR[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD PTR[16+rsp]
+ DB 0F3h,0C3h ;repret
+$L$SEH_end_AES_set_encrypt_key::
+AES_set_encrypt_key ENDP
+
+
+ALIGN 16
+_x86_64_AES_set_encrypt_key PROC PRIVATE
+ mov ecx,esi
+ mov rsi,rdi
+ mov rdi,rdx
+
+ test rsi,-1
+ jz $L$badpointer
+ test rdi,-1
+ jz $L$badpointer
+
+ lea rbp,QWORD PTR[$L$AES_Te]
+ lea rbp,QWORD PTR[((2048+128))+rbp]
+
+
+ mov eax,DWORD PTR[((0-128))+rbp]
+ mov ebx,DWORD PTR[((32-128))+rbp]
+ mov r8d,DWORD PTR[((64-128))+rbp]
+ mov edx,DWORD PTR[((96-128))+rbp]
+ mov eax,DWORD PTR[((128-128))+rbp]
+ mov ebx,DWORD PTR[((160-128))+rbp]
+ mov r8d,DWORD PTR[((192-128))+rbp]
+ mov edx,DWORD PTR[((224-128))+rbp]
+
+ cmp ecx,128
+ je $L$10rounds
+ cmp ecx,192
+ je $L$12rounds
+ cmp ecx,256
+ je $L$14rounds
+ mov rax,-2
+ jmp $L$exit
+
+$L$10rounds::
+ mov rax,QWORD PTR[rsi]
+ mov rdx,QWORD PTR[8+rsi]
+ mov QWORD PTR[rdi],rax
+ mov QWORD PTR[8+rdi],rdx
+
+ shr rdx,32
+ xor ecx,ecx
+ jmp $L$10shortcut
+ALIGN 4
+$L$10loop::
+ mov eax,DWORD PTR[rdi]
+ mov edx,DWORD PTR[12+rdi]
+$L$10shortcut::
+ movzx esi,dl
+ movzx ebx,BYTE PTR[((-128))+rsi*1+rbp]
+ movzx esi,dh
+ shl ebx,24
+ xor eax,ebx
+
+ movzx ebx,BYTE PTR[((-128))+rsi*1+rbp]
+ shr edx,16
+ movzx esi,dl
+ xor eax,ebx
+
+ movzx ebx,BYTE PTR[((-128))+rsi*1+rbp]
+ movzx esi,dh
+ shl ebx,8
+ xor eax,ebx
+
+ movzx ebx,BYTE PTR[((-128))+rsi*1+rbp]
+ shl ebx,16
+ xor eax,ebx
+
+ xor eax,DWORD PTR[((1024-128))+rcx*4+rbp]
+ mov DWORD PTR[16+rdi],eax
+ xor eax,DWORD PTR[4+rdi]
+ mov DWORD PTR[20+rdi],eax
+ xor eax,DWORD PTR[8+rdi]
+ mov DWORD PTR[24+rdi],eax
+ xor eax,DWORD PTR[12+rdi]
+ mov DWORD PTR[28+rdi],eax
+ add ecx,1
+ lea rdi,QWORD PTR[16+rdi]
+ cmp ecx,10
+ jl $L$10loop
+
+ mov DWORD PTR[80+rdi],10
+ xor rax,rax
+ jmp $L$exit
+
+$L$12rounds::
+ mov rax,QWORD PTR[rsi]
+ mov rbx,QWORD PTR[8+rsi]
+ mov rdx,QWORD PTR[16+rsi]
+ mov QWORD PTR[rdi],rax
+ mov QWORD PTR[8+rdi],rbx
+ mov QWORD PTR[16+rdi],rdx
+
+ shr rdx,32
+ xor ecx,ecx
+ jmp $L$12shortcut
+ALIGN 4
+$L$12loop::
+ mov eax,DWORD PTR[rdi]
+ mov edx,DWORD PTR[20+rdi]
+$L$12shortcut::
+ movzx esi,dl
+ movzx ebx,BYTE PTR[((-128))+rsi*1+rbp]
+ movzx esi,dh
+ shl ebx,24
+ xor eax,ebx
+
+ movzx ebx,BYTE PTR[((-128))+rsi*1+rbp]
+ shr edx,16
+ movzx esi,dl
+ xor eax,ebx
+
+ movzx ebx,BYTE PTR[((-128))+rsi*1+rbp]
+ movzx esi,dh
+ shl ebx,8
+ xor eax,ebx
+
+ movzx ebx,BYTE PTR[((-128))+rsi*1+rbp]
+ shl ebx,16
+ xor eax,ebx
+
+ xor eax,DWORD PTR[((1024-128))+rcx*4+rbp]
+ mov DWORD PTR[24+rdi],eax
+ xor eax,DWORD PTR[4+rdi]
+ mov DWORD PTR[28+rdi],eax
+ xor eax,DWORD PTR[8+rdi]
+ mov DWORD PTR[32+rdi],eax
+ xor eax,DWORD PTR[12+rdi]
+ mov DWORD PTR[36+rdi],eax
+
+ cmp ecx,7
+ je $L$12break
+ add ecx,1
+
+ xor eax,DWORD PTR[16+rdi]
+ mov DWORD PTR[40+rdi],eax
+ xor eax,DWORD PTR[20+rdi]
+ mov DWORD PTR[44+rdi],eax
+
+ lea rdi,QWORD PTR[24+rdi]
+ jmp $L$12loop
+$L$12break::
+ mov DWORD PTR[72+rdi],12
+ xor rax,rax
+ jmp $L$exit
+
+$L$14rounds::
+ mov rax,QWORD PTR[rsi]
+ mov rbx,QWORD PTR[8+rsi]
+ mov rcx,QWORD PTR[16+rsi]
+ mov rdx,QWORD PTR[24+rsi]
+ mov QWORD PTR[rdi],rax
+ mov QWORD PTR[8+rdi],rbx
+ mov QWORD PTR[16+rdi],rcx
+ mov QWORD PTR[24+rdi],rdx
+
+ shr rdx,32
+ xor ecx,ecx
+ jmp $L$14shortcut
+ALIGN 4
+$L$14loop::
+ mov eax,DWORD PTR[rdi]
+ mov edx,DWORD PTR[28+rdi]
+$L$14shortcut::
+ movzx esi,dl
+ movzx ebx,BYTE PTR[((-128))+rsi*1+rbp]
+ movzx esi,dh
+ shl ebx,24
+ xor eax,ebx
+
+ movzx ebx,BYTE PTR[((-128))+rsi*1+rbp]
+ shr edx,16
+ movzx esi,dl
+ xor eax,ebx
+
+ movzx ebx,BYTE PTR[((-128))+rsi*1+rbp]
+ movzx esi,dh
+ shl ebx,8
+ xor eax,ebx
+
+ movzx ebx,BYTE PTR[((-128))+rsi*1+rbp]
+ shl ebx,16
+ xor eax,ebx
+
+ xor eax,DWORD PTR[((1024-128))+rcx*4+rbp]
+ mov DWORD PTR[32+rdi],eax
+ xor eax,DWORD PTR[4+rdi]
+ mov DWORD PTR[36+rdi],eax
+ xor eax,DWORD PTR[8+rdi]
+ mov DWORD PTR[40+rdi],eax
+ xor eax,DWORD PTR[12+rdi]
+ mov DWORD PTR[44+rdi],eax
+
+ cmp ecx,6
+ je $L$14break
+ add ecx,1
+
+ mov edx,eax
+ mov eax,DWORD PTR[16+rdi]
+ movzx esi,dl
+ movzx ebx,BYTE PTR[((-128))+rsi*1+rbp]
+ movzx esi,dh
+ xor eax,ebx
+
+ movzx ebx,BYTE PTR[((-128))+rsi*1+rbp]
+ shr edx,16
+ shl ebx,8
+ movzx esi,dl
+ xor eax,ebx
+
+ movzx ebx,BYTE PTR[((-128))+rsi*1+rbp]
+ movzx esi,dh
+ shl ebx,16
+ xor eax,ebx
+
+ movzx ebx,BYTE PTR[((-128))+rsi*1+rbp]
+ shl ebx,24
+ xor eax,ebx
+
+ mov DWORD PTR[48+rdi],eax
+ xor eax,DWORD PTR[20+rdi]
+ mov DWORD PTR[52+rdi],eax
+ xor eax,DWORD PTR[24+rdi]
+ mov DWORD PTR[56+rdi],eax
+ xor eax,DWORD PTR[28+rdi]
+ mov DWORD PTR[60+rdi],eax
+
+ lea rdi,QWORD PTR[32+rdi]
+ jmp $L$14loop
+$L$14break::
+ mov DWORD PTR[48+rdi],14
+ xor rax,rax
+ jmp $L$exit
+
+$L$badpointer::
+ mov rax,-1
+$L$exit::
+DB 0f3h,0c3h
+
+_x86_64_AES_set_encrypt_key ENDP
+PUBLIC AES_set_decrypt_key
+
+ALIGN 16
+AES_set_decrypt_key PROC PUBLIC
+ mov QWORD PTR[8+rsp],rdi ;WIN64 prologue
+ mov QWORD PTR[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_AES_set_decrypt_key::
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+
+
+ push rbx
+ push rbp
+ push r12
+ push r13
+ push r14
+ push r15
+ push rdx
+$L$dec_key_prologue::
+
+ call _x86_64_AES_set_encrypt_key
+ mov r8,QWORD PTR[rsp]
+ cmp eax,0
+ jne $L$abort
+
+ mov r14d,DWORD PTR[240+r8]
+ xor rdi,rdi
+ lea rcx,QWORD PTR[r14*4+rdi]
+ mov rsi,r8
+ lea rdi,QWORD PTR[rcx*4+r8]
+ALIGN 4
+$L$invert::
+ mov rax,QWORD PTR[rsi]
+ mov rbx,QWORD PTR[8+rsi]
+ mov rcx,QWORD PTR[rdi]
+ mov rdx,QWORD PTR[8+rdi]
+ mov QWORD PTR[rdi],rax
+ mov QWORD PTR[8+rdi],rbx
+ mov QWORD PTR[rsi],rcx
+ mov QWORD PTR[8+rsi],rdx
+ lea rsi,QWORD PTR[16+rsi]
+ lea rdi,QWORD PTR[((-16))+rdi]
+ cmp rdi,rsi
+ jne $L$invert
+
+ lea rax,QWORD PTR[(($L$AES_Te+2048+1024))]
+
+ mov rsi,QWORD PTR[40+rax]
+ mov rdi,QWORD PTR[48+rax]
+ mov rbp,QWORD PTR[56+rax]
+
+ mov r15,r8
+ sub r14d,1
+ALIGN 4
+$L$permute::
+ lea r15,QWORD PTR[16+r15]
+ mov rax,QWORD PTR[r15]
+ mov rcx,QWORD PTR[8+r15]
+ mov rbx,rax
+ mov rdx,rcx
+ and rbx,rsi
+ and rdx,rsi
+ mov r9,rbx
+ mov r12,rdx
+ shr r9,7
+ lea r8,QWORD PTR[rax*1+rax]
+ shr r12,7
+ lea r11,QWORD PTR[rcx*1+rcx]
+ sub rbx,r9
+ sub rdx,r12
+ and r8,rdi
+ and r11,rdi
+ and rbx,rbp
+ and rdx,rbp
+ xor rbx,r8
+ xor rdx,r11
+ mov r8,rbx
+ mov r11,rdx
+
+ and rbx,rsi
+ and rdx,rsi
+ mov r10,rbx
+ mov r13,rdx
+ shr r10,7
+ lea r9,QWORD PTR[r8*1+r8]
+ shr r13,7
+ lea r12,QWORD PTR[r11*1+r11]
+ sub rbx,r10
+ sub rdx,r13
+ and r9,rdi
+ and r12,rdi
+ and rbx,rbp
+ and rdx,rbp
+ xor rbx,r9
+ xor rdx,r12
+ mov r9,rbx
+ mov r12,rdx
+
+ and rbx,rsi
+ and rdx,rsi
+ mov r10,rbx
+ mov r13,rdx
+ shr r10,7
+ xor r8,rax
+ shr r13,7
+ xor r11,rcx
+ sub rbx,r10
+ sub rdx,r13
+ lea r10,QWORD PTR[r9*1+r9]
+ lea r13,QWORD PTR[r12*1+r12]
+ xor r9,rax
+ xor r12,rcx
+ and r10,rdi
+ and r13,rdi
+ and rbx,rbp
+ and rdx,rbp
+ xor r10,rbx
+ xor r13,rdx
+
+ xor rax,r10
+ xor rcx,r13
+ xor r8,r10
+ xor r11,r13
+ mov rbx,rax
+ mov rdx,rcx
+ xor r9,r10
+ xor r12,r13
+ shr rbx,32
+ shr rdx,32
+ xor r10,r8
+ xor r13,r11
+ rol eax,8
+ rol ecx,8
+ xor r10,r9
+ xor r13,r12
+
+ rol ebx,8
+ rol edx,8
+ xor eax,r10d
+ xor ecx,r13d
+ shr r10,32
+ shr r13,32
+ xor ebx,r10d
+ xor edx,r13d
+
+ mov r10,r8
+ mov r13,r11
+ shr r10,32
+ shr r13,32
+ rol r8d,24
+ rol r11d,24
+ rol r10d,24
+ rol r13d,24
+ xor eax,r8d
+ xor ecx,r11d
+ mov r8,r9
+ mov r11,r12
+ xor ebx,r10d
+ xor edx,r13d
+
+
+ shr r8,32
+ shr r11,32
+
+ rol r9d,16
+ rol r12d,16
+
+ rol r8d,16
+ rol r11d,16
+
+ xor eax,r9d
+ xor ecx,r12d
+
+ xor ebx,r8d
+ xor edx,r11d
+ mov DWORD PTR[r15],eax
+ mov DWORD PTR[4+r15],ebx
+ mov DWORD PTR[8+r15],ecx
+ mov DWORD PTR[12+r15],edx
+ sub r14d,1
+ jnz $L$permute
+
+ xor rax,rax
+$L$abort::
+ mov r15,QWORD PTR[8+rsp]
+ mov r14,QWORD PTR[16+rsp]
+ mov r13,QWORD PTR[24+rsp]
+ mov r12,QWORD PTR[32+rsp]
+ mov rbp,QWORD PTR[40+rsp]
+ mov rbx,QWORD PTR[48+rsp]
+ add rsp,56
+$L$dec_key_epilogue::
+ mov rdi,QWORD PTR[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD PTR[16+rsp]
+ DB 0F3h,0C3h ;repret
+$L$SEH_end_AES_set_decrypt_key::
+AES_set_decrypt_key ENDP
+PUBLIC AES_cbc_encrypt
+
+ALIGN 16
+EXTERN OPENSSL_ia32cap_P:NEAR
+AES_cbc_encrypt PROC PUBLIC
+ mov QWORD PTR[8+rsp],rdi ;WIN64 prologue
+ mov QWORD PTR[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_AES_cbc_encrypt::
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+ mov rcx,r9
+ mov r8,QWORD PTR[40+rsp]
+ mov r9,QWORD PTR[48+rsp]
+
+
+ cmp rdx,0
+ je $L$cbc_epilogue
+ pushfq
+ push rbx
+ push rbp
+ push r12
+ push r13
+ push r14
+ push r15
+$L$cbc_prologue::
+
+ cld
+ mov r9d,r9d
+
+ lea r14,QWORD PTR[$L$AES_Te]
+ cmp r9,0
+ jne $L$cbc_picked_te
+ lea r14,QWORD PTR[$L$AES_Td]
+$L$cbc_picked_te::
+
+ mov r10d,DWORD PTR[OPENSSL_ia32cap_P]
+ cmp rdx,512
+ jb $L$cbc_slow_prologue
+ test rdx,15
+ jnz $L$cbc_slow_prologue
+
+
+
+
+ lea r15,QWORD PTR[((-88-248))+rsp]
+ and r15,-64
+
+
+ mov r10,r14
+ lea r11,QWORD PTR[2304+r14]
+ mov r12,r15
+ and r10,0FFFh
+ and r11,0FFFh
+ and r12,0FFFh
+
+ cmp r12,r11
+ jb $L$cbc_te_break_out
+ sub r12,r11
+ sub r15,r12
+ jmp $L$cbc_te_ok
+$L$cbc_te_break_out::
+ sub r12,r10
+ and r12,0FFFh
+ add r12,320
+ sub r15,r12
+ALIGN 4
+$L$cbc_te_ok::
+
+ xchg r15,rsp
+
+ mov QWORD PTR[16+rsp],r15
+$L$cbc_fast_body::
+ mov QWORD PTR[24+rsp],rdi
+ mov QWORD PTR[32+rsp],rsi
+ mov QWORD PTR[40+rsp],rdx
+ mov QWORD PTR[48+rsp],rcx
+ mov QWORD PTR[56+rsp],r8
+ mov DWORD PTR[((80+240))+rsp],0
+ mov rbp,r8
+ mov rbx,r9
+ mov r9,rsi
+ mov r8,rdi
+ mov r15,rcx
+
+ mov eax,DWORD PTR[240+r15]
+
+ mov r10,r15
+ sub r10,r14
+ and r10,0fffh
+ cmp r10,2304
+ jb $L$cbc_do_ecopy
+ cmp r10,4096-248
+ jb $L$cbc_skip_ecopy
+ALIGN 4
+$L$cbc_do_ecopy::
+ mov rsi,r15
+ lea rdi,QWORD PTR[80+rsp]
+ lea r15,QWORD PTR[80+rsp]
+ mov ecx,240/8
+ DD 090A548F3h
+
+ mov DWORD PTR[rdi],eax
+$L$cbc_skip_ecopy::
+ mov QWORD PTR[rsp],r15
+
+ mov ecx,18
+ALIGN 4
+$L$cbc_prefetch_te::
+ mov r10,QWORD PTR[r14]
+ mov r11,QWORD PTR[32+r14]
+ mov r12,QWORD PTR[64+r14]
+ mov r13,QWORD PTR[96+r14]
+ lea r14,QWORD PTR[128+r14]
+ sub ecx,1
+ jnz $L$cbc_prefetch_te
+ lea r14,QWORD PTR[((-2304))+r14]
+
+ cmp rbx,0
+ je $L$FAST_DECRYPT
+
+
+ mov eax,DWORD PTR[rbp]
+ mov ebx,DWORD PTR[4+rbp]
+ mov ecx,DWORD PTR[8+rbp]
+ mov edx,DWORD PTR[12+rbp]
+
+ALIGN 4
+$L$cbc_fast_enc_loop::
+ xor eax,DWORD PTR[r8]
+ xor ebx,DWORD PTR[4+r8]
+ xor ecx,DWORD PTR[8+r8]
+ xor edx,DWORD PTR[12+r8]
+ mov r15,QWORD PTR[rsp]
+ mov QWORD PTR[24+rsp],r8
+
+ call _x86_64_AES_encrypt
+
+ mov r8,QWORD PTR[24+rsp]
+ mov r10,QWORD PTR[40+rsp]
+ mov DWORD PTR[r9],eax
+ mov DWORD PTR[4+r9],ebx
+ mov DWORD PTR[8+r9],ecx
+ mov DWORD PTR[12+r9],edx
+
+ lea r8,QWORD PTR[16+r8]
+ lea r9,QWORD PTR[16+r9]
+ sub r10,16
+ test r10,-16
+ mov QWORD PTR[40+rsp],r10
+ jnz $L$cbc_fast_enc_loop
+ mov rbp,QWORD PTR[56+rsp]
+ mov DWORD PTR[rbp],eax
+ mov DWORD PTR[4+rbp],ebx
+ mov DWORD PTR[8+rbp],ecx
+ mov DWORD PTR[12+rbp],edx
+
+ jmp $L$cbc_fast_cleanup
+
+
+ALIGN 16
+$L$FAST_DECRYPT::
+ cmp r9,r8
+ je $L$cbc_fast_dec_in_place
+
+ mov QWORD PTR[64+rsp],rbp
+ALIGN 4
+$L$cbc_fast_dec_loop::
+ mov eax,DWORD PTR[r8]
+ mov ebx,DWORD PTR[4+r8]
+ mov ecx,DWORD PTR[8+r8]
+ mov edx,DWORD PTR[12+r8]
+ mov r15,QWORD PTR[rsp]
+ mov QWORD PTR[24+rsp],r8
+
+ call _x86_64_AES_decrypt
+
+ mov rbp,QWORD PTR[64+rsp]
+ mov r8,QWORD PTR[24+rsp]
+ mov r10,QWORD PTR[40+rsp]
+ xor eax,DWORD PTR[rbp]
+ xor ebx,DWORD PTR[4+rbp]
+ xor ecx,DWORD PTR[8+rbp]
+ xor edx,DWORD PTR[12+rbp]
+ mov rbp,r8
+
+ sub r10,16
+ mov QWORD PTR[40+rsp],r10
+ mov QWORD PTR[64+rsp],rbp
+
+ mov DWORD PTR[r9],eax
+ mov DWORD PTR[4+r9],ebx
+ mov DWORD PTR[8+r9],ecx
+ mov DWORD PTR[12+r9],edx
+
+ lea r8,QWORD PTR[16+r8]
+ lea r9,QWORD PTR[16+r9]
+ jnz $L$cbc_fast_dec_loop
+ mov r12,QWORD PTR[56+rsp]
+ mov r10,QWORD PTR[rbp]
+ mov r11,QWORD PTR[8+rbp]
+ mov QWORD PTR[r12],r10
+ mov QWORD PTR[8+r12],r11
+ jmp $L$cbc_fast_cleanup
+
+ALIGN 16
+$L$cbc_fast_dec_in_place::
+ mov r10,QWORD PTR[rbp]
+ mov r11,QWORD PTR[8+rbp]
+ mov QWORD PTR[((0+64))+rsp],r10
+ mov QWORD PTR[((8+64))+rsp],r11
+ALIGN 4
+$L$cbc_fast_dec_in_place_loop::
+ mov eax,DWORD PTR[r8]
+ mov ebx,DWORD PTR[4+r8]
+ mov ecx,DWORD PTR[8+r8]
+ mov edx,DWORD PTR[12+r8]
+ mov r15,QWORD PTR[rsp]
+ mov QWORD PTR[24+rsp],r8
+
+ call _x86_64_AES_decrypt
+
+ mov r8,QWORD PTR[24+rsp]
+ mov r10,QWORD PTR[40+rsp]
+ xor eax,DWORD PTR[((0+64))+rsp]
+ xor ebx,DWORD PTR[((4+64))+rsp]
+ xor ecx,DWORD PTR[((8+64))+rsp]
+ xor edx,DWORD PTR[((12+64))+rsp]
+
+ mov r11,QWORD PTR[r8]
+ mov r12,QWORD PTR[8+r8]
+ sub r10,16
+ jz $L$cbc_fast_dec_in_place_done
+
+ mov QWORD PTR[((0+64))+rsp],r11
+ mov QWORD PTR[((8+64))+rsp],r12
+
+ mov DWORD PTR[r9],eax
+ mov DWORD PTR[4+r9],ebx
+ mov DWORD PTR[8+r9],ecx
+ mov DWORD PTR[12+r9],edx
+
+ lea r8,QWORD PTR[16+r8]
+ lea r9,QWORD PTR[16+r9]
+ mov QWORD PTR[40+rsp],r10
+ jmp $L$cbc_fast_dec_in_place_loop
+$L$cbc_fast_dec_in_place_done::
+ mov rdi,QWORD PTR[56+rsp]
+ mov QWORD PTR[rdi],r11
+ mov QWORD PTR[8+rdi],r12
+
+ mov DWORD PTR[r9],eax
+ mov DWORD PTR[4+r9],ebx
+ mov DWORD PTR[8+r9],ecx
+ mov DWORD PTR[12+r9],edx
+
+ALIGN 4
+$L$cbc_fast_cleanup::
+ cmp DWORD PTR[((80+240))+rsp],0
+ lea rdi,QWORD PTR[80+rsp]
+ je $L$cbc_exit
+ mov ecx,240/8
+ xor rax,rax
+ DD 090AB48F3h
+
+
+ jmp $L$cbc_exit
+
+
+ALIGN 16
+$L$cbc_slow_prologue::
+
+ lea rbp,QWORD PTR[((-88))+rsp]
+ and rbp,-64
+
+ lea r10,QWORD PTR[((-88-63))+rcx]
+ sub r10,rbp
+ neg r10
+ and r10,03c0h
+ sub rbp,r10
+
+ xchg rbp,rsp
+
+ mov QWORD PTR[16+rsp],rbp
+$L$cbc_slow_body::
+
+
+
+
+ mov QWORD PTR[56+rsp],r8
+ mov rbp,r8
+ mov rbx,r9
+ mov r9,rsi
+ mov r8,rdi
+ mov r15,rcx
+ mov r10,rdx
+
+ mov eax,DWORD PTR[240+r15]
+ mov QWORD PTR[rsp],r15
+ shl eax,4
+ lea rax,QWORD PTR[rax*1+r15]
+ mov QWORD PTR[8+rsp],rax
+
+
+ lea r14,QWORD PTR[2048+r14]
+ lea rax,QWORD PTR[((768-8))+rsp]
+ sub rax,r14
+ and rax,0300h
+ lea r14,QWORD PTR[rax*1+r14]
+
+ cmp rbx,0
+ je $L$SLOW_DECRYPT
+
+
+ test r10,-16
+ mov eax,DWORD PTR[rbp]
+ mov ebx,DWORD PTR[4+rbp]
+ mov ecx,DWORD PTR[8+rbp]
+ mov edx,DWORD PTR[12+rbp]
+ jz $L$cbc_slow_enc_tail
+
+
+ALIGN 4
+$L$cbc_slow_enc_loop::
+ xor eax,DWORD PTR[r8]
+ xor ebx,DWORD PTR[4+r8]
+ xor ecx,DWORD PTR[8+r8]
+ xor edx,DWORD PTR[12+r8]
+ mov r15,QWORD PTR[rsp]
+ mov QWORD PTR[24+rsp],r8
+ mov QWORD PTR[32+rsp],r9
+ mov QWORD PTR[40+rsp],r10
+
+ call _x86_64_AES_encrypt_compact
+
+ mov r8,QWORD PTR[24+rsp]
+ mov r9,QWORD PTR[32+rsp]
+ mov r10,QWORD PTR[40+rsp]
+ mov DWORD PTR[r9],eax
+ mov DWORD PTR[4+r9],ebx
+ mov DWORD PTR[8+r9],ecx
+ mov DWORD PTR[12+r9],edx
+
+ lea r8,QWORD PTR[16+r8]
+ lea r9,QWORD PTR[16+r9]
+ sub r10,16
+ test r10,-16
+ jnz $L$cbc_slow_enc_loop
+ test r10,15
+ jnz $L$cbc_slow_enc_tail
+ mov rbp,QWORD PTR[56+rsp]
+ mov DWORD PTR[rbp],eax
+ mov DWORD PTR[4+rbp],ebx
+ mov DWORD PTR[8+rbp],ecx
+ mov DWORD PTR[12+rbp],edx
+
+ jmp $L$cbc_exit
+
+ALIGN 4
+$L$cbc_slow_enc_tail::
+ mov r11,rax
+ mov r12,rcx
+ mov rcx,r10
+ mov rsi,r8
+ mov rdi,r9
+ DD 09066A4F3h
+
+ mov rcx,16
+ sub rcx,r10
+ xor rax,rax
+ DD 09066AAF3h
+
+ mov r8,r9
+ mov r10,16
+ mov rax,r11
+ mov rcx,r12
+ jmp $L$cbc_slow_enc_loop
+
+
+ALIGN 16
+$L$SLOW_DECRYPT::
+ shr rax,3
+ add r14,rax
+
+ mov r11,QWORD PTR[rbp]
+ mov r12,QWORD PTR[8+rbp]
+ mov QWORD PTR[((0+64))+rsp],r11
+ mov QWORD PTR[((8+64))+rsp],r12
+
+ALIGN 4
+$L$cbc_slow_dec_loop::
+ mov eax,DWORD PTR[r8]
+ mov ebx,DWORD PTR[4+r8]
+ mov ecx,DWORD PTR[8+r8]
+ mov edx,DWORD PTR[12+r8]
+ mov r15,QWORD PTR[rsp]
+ mov QWORD PTR[24+rsp],r8
+ mov QWORD PTR[32+rsp],r9
+ mov QWORD PTR[40+rsp],r10
+
+ call _x86_64_AES_decrypt_compact
+
+ mov r8,QWORD PTR[24+rsp]
+ mov r9,QWORD PTR[32+rsp]
+ mov r10,QWORD PTR[40+rsp]
+ xor eax,DWORD PTR[((0+64))+rsp]
+ xor ebx,DWORD PTR[((4+64))+rsp]
+ xor ecx,DWORD PTR[((8+64))+rsp]
+ xor edx,DWORD PTR[((12+64))+rsp]
+
+ mov r11,QWORD PTR[r8]
+ mov r12,QWORD PTR[8+r8]
+ sub r10,16
+ jc $L$cbc_slow_dec_partial
+ jz $L$cbc_slow_dec_done
+
+ mov QWORD PTR[((0+64))+rsp],r11
+ mov QWORD PTR[((8+64))+rsp],r12
+
+ mov DWORD PTR[r9],eax
+ mov DWORD PTR[4+r9],ebx
+ mov DWORD PTR[8+r9],ecx
+ mov DWORD PTR[12+r9],edx
+
+ lea r8,QWORD PTR[16+r8]
+ lea r9,QWORD PTR[16+r9]
+ jmp $L$cbc_slow_dec_loop
+$L$cbc_slow_dec_done::
+ mov rdi,QWORD PTR[56+rsp]
+ mov QWORD PTR[rdi],r11
+ mov QWORD PTR[8+rdi],r12
+
+ mov DWORD PTR[r9],eax
+ mov DWORD PTR[4+r9],ebx
+ mov DWORD PTR[8+r9],ecx
+ mov DWORD PTR[12+r9],edx
+
+ jmp $L$cbc_exit
+
+ALIGN 4
+$L$cbc_slow_dec_partial::
+ mov rdi,QWORD PTR[56+rsp]
+ mov QWORD PTR[rdi],r11
+ mov QWORD PTR[8+rdi],r12
+
+ mov DWORD PTR[((0+64))+rsp],eax
+ mov DWORD PTR[((4+64))+rsp],ebx
+ mov DWORD PTR[((8+64))+rsp],ecx
+ mov DWORD PTR[((12+64))+rsp],edx
+
+ mov rdi,r9
+ lea rsi,QWORD PTR[64+rsp]
+ lea rcx,QWORD PTR[16+r10]
+ DD 09066A4F3h
+
+ jmp $L$cbc_exit
+
+ALIGN 16
+$L$cbc_exit::
+ mov rsi,QWORD PTR[16+rsp]
+ mov r15,QWORD PTR[rsi]
+ mov r14,QWORD PTR[8+rsi]
+ mov r13,QWORD PTR[16+rsi]
+ mov r12,QWORD PTR[24+rsi]
+ mov rbp,QWORD PTR[32+rsi]
+ mov rbx,QWORD PTR[40+rsi]
+ lea rsp,QWORD PTR[48+rsi]
+$L$cbc_popfq::
+ popfq
+$L$cbc_epilogue::
+ mov rdi,QWORD PTR[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD PTR[16+rsp]
+ DB 0F3h,0C3h ;repret
+$L$SEH_end_AES_cbc_encrypt::
+AES_cbc_encrypt ENDP
+ALIGN 64
+$L$AES_Te::
+ DD 0a56363c6h,0a56363c6h
+ DD 0847c7cf8h,0847c7cf8h
+ DD 0997777eeh,0997777eeh
+ DD 08d7b7bf6h,08d7b7bf6h
+ DD 00df2f2ffh,00df2f2ffh
+ DD 0bd6b6bd6h,0bd6b6bd6h
+ DD 0b16f6fdeh,0b16f6fdeh
+ DD 054c5c591h,054c5c591h
+ DD 050303060h,050303060h
+ DD 003010102h,003010102h
+ DD 0a96767ceh,0a96767ceh
+ DD 07d2b2b56h,07d2b2b56h
+ DD 019fefee7h,019fefee7h
+ DD 062d7d7b5h,062d7d7b5h
+ DD 0e6abab4dh,0e6abab4dh
+ DD 09a7676ech,09a7676ech
+ DD 045caca8fh,045caca8fh
+ DD 09d82821fh,09d82821fh
+ DD 040c9c989h,040c9c989h
+ DD 0877d7dfah,0877d7dfah
+ DD 015fafaefh,015fafaefh
+ DD 0eb5959b2h,0eb5959b2h
+ DD 0c947478eh,0c947478eh
+ DD 00bf0f0fbh,00bf0f0fbh
+ DD 0ecadad41h,0ecadad41h
+ DD 067d4d4b3h,067d4d4b3h
+ DD 0fda2a25fh,0fda2a25fh
+ DD 0eaafaf45h,0eaafaf45h
+ DD 0bf9c9c23h,0bf9c9c23h
+ DD 0f7a4a453h,0f7a4a453h
+ DD 0967272e4h,0967272e4h
+ DD 05bc0c09bh,05bc0c09bh
+ DD 0c2b7b775h,0c2b7b775h
+ DD 01cfdfde1h,01cfdfde1h
+ DD 0ae93933dh,0ae93933dh
+ DD 06a26264ch,06a26264ch
+ DD 05a36366ch,05a36366ch
+ DD 0413f3f7eh,0413f3f7eh
+ DD 002f7f7f5h,002f7f7f5h
+ DD 04fcccc83h,04fcccc83h
+ DD 05c343468h,05c343468h
+ DD 0f4a5a551h,0f4a5a551h
+ DD 034e5e5d1h,034e5e5d1h
+ DD 008f1f1f9h,008f1f1f9h
+ DD 0937171e2h,0937171e2h
+ DD 073d8d8abh,073d8d8abh
+ DD 053313162h,053313162h
+ DD 03f15152ah,03f15152ah
+ DD 00c040408h,00c040408h
+ DD 052c7c795h,052c7c795h
+ DD 065232346h,065232346h
+ DD 05ec3c39dh,05ec3c39dh
+ DD 028181830h,028181830h
+ DD 0a1969637h,0a1969637h
+ DD 00f05050ah,00f05050ah
+ DD 0b59a9a2fh,0b59a9a2fh
+ DD 00907070eh,00907070eh
+ DD 036121224h,036121224h
+ DD 09b80801bh,09b80801bh
+ DD 03de2e2dfh,03de2e2dfh
+ DD 026ebebcdh,026ebebcdh
+ DD 06927274eh,06927274eh
+ DD 0cdb2b27fh,0cdb2b27fh
+ DD 09f7575eah,09f7575eah
+ DD 01b090912h,01b090912h
+ DD 09e83831dh,09e83831dh
+ DD 0742c2c58h,0742c2c58h
+ DD 02e1a1a34h,02e1a1a34h
+ DD 02d1b1b36h,02d1b1b36h
+ DD 0b26e6edch,0b26e6edch
+ DD 0ee5a5ab4h,0ee5a5ab4h
+ DD 0fba0a05bh,0fba0a05bh
+ DD 0f65252a4h,0f65252a4h
+ DD 04d3b3b76h,04d3b3b76h
+ DD 061d6d6b7h,061d6d6b7h
+ DD 0ceb3b37dh,0ceb3b37dh
+ DD 07b292952h,07b292952h
+ DD 03ee3e3ddh,03ee3e3ddh
+ DD 0712f2f5eh,0712f2f5eh
+ DD 097848413h,097848413h
+ DD 0f55353a6h,0f55353a6h
+ DD 068d1d1b9h,068d1d1b9h
+ DD 000000000h,000000000h
+ DD 02cededc1h,02cededc1h
+ DD 060202040h,060202040h
+ DD 01ffcfce3h,01ffcfce3h
+ DD 0c8b1b179h,0c8b1b179h
+ DD 0ed5b5bb6h,0ed5b5bb6h
+ DD 0be6a6ad4h,0be6a6ad4h
+ DD 046cbcb8dh,046cbcb8dh
+ DD 0d9bebe67h,0d9bebe67h
+ DD 04b393972h,04b393972h
+ DD 0de4a4a94h,0de4a4a94h
+ DD 0d44c4c98h,0d44c4c98h
+ DD 0e85858b0h,0e85858b0h
+ DD 04acfcf85h,04acfcf85h
+ DD 06bd0d0bbh,06bd0d0bbh
+ DD 02aefefc5h,02aefefc5h
+ DD 0e5aaaa4fh,0e5aaaa4fh
+ DD 016fbfbedh,016fbfbedh
+ DD 0c5434386h,0c5434386h
+ DD 0d74d4d9ah,0d74d4d9ah
+ DD 055333366h,055333366h
+ DD 094858511h,094858511h
+ DD 0cf45458ah,0cf45458ah
+ DD 010f9f9e9h,010f9f9e9h
+ DD 006020204h,006020204h
+ DD 0817f7ffeh,0817f7ffeh
+ DD 0f05050a0h,0f05050a0h
+ DD 0443c3c78h,0443c3c78h
+ DD 0ba9f9f25h,0ba9f9f25h
+ DD 0e3a8a84bh,0e3a8a84bh
+ DD 0f35151a2h,0f35151a2h
+ DD 0fea3a35dh,0fea3a35dh
+ DD 0c0404080h,0c0404080h
+ DD 08a8f8f05h,08a8f8f05h
+ DD 0ad92923fh,0ad92923fh
+ DD 0bc9d9d21h,0bc9d9d21h
+ DD 048383870h,048383870h
+ DD 004f5f5f1h,004f5f5f1h
+ DD 0dfbcbc63h,0dfbcbc63h
+ DD 0c1b6b677h,0c1b6b677h
+ DD 075dadaafh,075dadaafh
+ DD 063212142h,063212142h
+ DD 030101020h,030101020h
+ DD 01affffe5h,01affffe5h
+ DD 00ef3f3fdh,00ef3f3fdh
+ DD 06dd2d2bfh,06dd2d2bfh
+ DD 04ccdcd81h,04ccdcd81h
+ DD 0140c0c18h,0140c0c18h
+ DD 035131326h,035131326h
+ DD 02fececc3h,02fececc3h
+ DD 0e15f5fbeh,0e15f5fbeh
+ DD 0a2979735h,0a2979735h
+ DD 0cc444488h,0cc444488h
+ DD 03917172eh,03917172eh
+ DD 057c4c493h,057c4c493h
+ DD 0f2a7a755h,0f2a7a755h
+ DD 0827e7efch,0827e7efch
+ DD 0473d3d7ah,0473d3d7ah
+ DD 0ac6464c8h,0ac6464c8h
+ DD 0e75d5dbah,0e75d5dbah
+ DD 02b191932h,02b191932h
+ DD 0957373e6h,0957373e6h
+ DD 0a06060c0h,0a06060c0h
+ DD 098818119h,098818119h
+ DD 0d14f4f9eh,0d14f4f9eh
+ DD 07fdcdca3h,07fdcdca3h
+ DD 066222244h,066222244h
+ DD 07e2a2a54h,07e2a2a54h
+ DD 0ab90903bh,0ab90903bh
+ DD 08388880bh,08388880bh
+ DD 0ca46468ch,0ca46468ch
+ DD 029eeeec7h,029eeeec7h
+ DD 0d3b8b86bh,0d3b8b86bh
+ DD 03c141428h,03c141428h
+ DD 079dedea7h,079dedea7h
+ DD 0e25e5ebch,0e25e5ebch
+ DD 01d0b0b16h,01d0b0b16h
+ DD 076dbdbadh,076dbdbadh
+ DD 03be0e0dbh,03be0e0dbh
+ DD 056323264h,056323264h
+ DD 04e3a3a74h,04e3a3a74h
+ DD 01e0a0a14h,01e0a0a14h
+ DD 0db494992h,0db494992h
+ DD 00a06060ch,00a06060ch
+ DD 06c242448h,06c242448h
+ DD 0e45c5cb8h,0e45c5cb8h
+ DD 05dc2c29fh,05dc2c29fh
+ DD 06ed3d3bdh,06ed3d3bdh
+ DD 0efacac43h,0efacac43h
+ DD 0a66262c4h,0a66262c4h
+ DD 0a8919139h,0a8919139h
+ DD 0a4959531h,0a4959531h
+ DD 037e4e4d3h,037e4e4d3h
+ DD 08b7979f2h,08b7979f2h
+ DD 032e7e7d5h,032e7e7d5h
+ DD 043c8c88bh,043c8c88bh
+ DD 05937376eh,05937376eh
+ DD 0b76d6ddah,0b76d6ddah
+ DD 08c8d8d01h,08c8d8d01h
+ DD 064d5d5b1h,064d5d5b1h
+ DD 0d24e4e9ch,0d24e4e9ch
+ DD 0e0a9a949h,0e0a9a949h
+ DD 0b46c6cd8h,0b46c6cd8h
+ DD 0fa5656ach,0fa5656ach
+ DD 007f4f4f3h,007f4f4f3h
+ DD 025eaeacfh,025eaeacfh
+ DD 0af6565cah,0af6565cah
+ DD 08e7a7af4h,08e7a7af4h
+ DD 0e9aeae47h,0e9aeae47h
+ DD 018080810h,018080810h
+ DD 0d5baba6fh,0d5baba6fh
+ DD 0887878f0h,0887878f0h
+ DD 06f25254ah,06f25254ah
+ DD 0722e2e5ch,0722e2e5ch
+ DD 0241c1c38h,0241c1c38h
+ DD 0f1a6a657h,0f1a6a657h
+ DD 0c7b4b473h,0c7b4b473h
+ DD 051c6c697h,051c6c697h
+ DD 023e8e8cbh,023e8e8cbh
+ DD 07cdddda1h,07cdddda1h
+ DD 09c7474e8h,09c7474e8h
+ DD 0211f1f3eh,0211f1f3eh
+ DD 0dd4b4b96h,0dd4b4b96h
+ DD 0dcbdbd61h,0dcbdbd61h
+ DD 0868b8b0dh,0868b8b0dh
+ DD 0858a8a0fh,0858a8a0fh
+ DD 0907070e0h,0907070e0h
+ DD 0423e3e7ch,0423e3e7ch
+ DD 0c4b5b571h,0c4b5b571h
+ DD 0aa6666cch,0aa6666cch
+ DD 0d8484890h,0d8484890h
+ DD 005030306h,005030306h
+ DD 001f6f6f7h,001f6f6f7h
+ DD 0120e0e1ch,0120e0e1ch
+ DD 0a36161c2h,0a36161c2h
+ DD 05f35356ah,05f35356ah
+ DD 0f95757aeh,0f95757aeh
+ DD 0d0b9b969h,0d0b9b969h
+ DD 091868617h,091868617h
+ DD 058c1c199h,058c1c199h
+ DD 0271d1d3ah,0271d1d3ah
+ DD 0b99e9e27h,0b99e9e27h
+ DD 038e1e1d9h,038e1e1d9h
+ DD 013f8f8ebh,013f8f8ebh
+ DD 0b398982bh,0b398982bh
+ DD 033111122h,033111122h
+ DD 0bb6969d2h,0bb6969d2h
+ DD 070d9d9a9h,070d9d9a9h
+ DD 0898e8e07h,0898e8e07h
+ DD 0a7949433h,0a7949433h
+ DD 0b69b9b2dh,0b69b9b2dh
+ DD 0221e1e3ch,0221e1e3ch
+ DD 092878715h,092878715h
+ DD 020e9e9c9h,020e9e9c9h
+ DD 049cece87h,049cece87h
+ DD 0ff5555aah,0ff5555aah
+ DD 078282850h,078282850h
+ DD 07adfdfa5h,07adfdfa5h
+ DD 08f8c8c03h,08f8c8c03h
+ DD 0f8a1a159h,0f8a1a159h
+ DD 080898909h,080898909h
+ DD 0170d0d1ah,0170d0d1ah
+ DD 0dabfbf65h,0dabfbf65h
+ DD 031e6e6d7h,031e6e6d7h
+ DD 0c6424284h,0c6424284h
+ DD 0b86868d0h,0b86868d0h
+ DD 0c3414182h,0c3414182h
+ DD 0b0999929h,0b0999929h
+ DD 0772d2d5ah,0772d2d5ah
+ DD 0110f0f1eh,0110f0f1eh
+ DD 0cbb0b07bh,0cbb0b07bh
+ DD 0fc5454a8h,0fc5454a8h
+ DD 0d6bbbb6dh,0d6bbbb6dh
+ DD 03a16162ch,03a16162ch
+DB 063h,07ch,077h,07bh,0f2h,06bh,06fh,0c5h
+DB 030h,001h,067h,02bh,0feh,0d7h,0abh,076h
+DB 0cah,082h,0c9h,07dh,0fah,059h,047h,0f0h
+DB 0adh,0d4h,0a2h,0afh,09ch,0a4h,072h,0c0h
+DB 0b7h,0fdh,093h,026h,036h,03fh,0f7h,0cch
+DB 034h,0a5h,0e5h,0f1h,071h,0d8h,031h,015h
+DB 004h,0c7h,023h,0c3h,018h,096h,005h,09ah
+DB 007h,012h,080h,0e2h,0ebh,027h,0b2h,075h
+DB 009h,083h,02ch,01ah,01bh,06eh,05ah,0a0h
+DB 052h,03bh,0d6h,0b3h,029h,0e3h,02fh,084h
+DB 053h,0d1h,000h,0edh,020h,0fch,0b1h,05bh
+DB 06ah,0cbh,0beh,039h,04ah,04ch,058h,0cfh
+DB 0d0h,0efh,0aah,0fbh,043h,04dh,033h,085h
+DB 045h,0f9h,002h,07fh,050h,03ch,09fh,0a8h
+DB 051h,0a3h,040h,08fh,092h,09dh,038h,0f5h
+DB 0bch,0b6h,0dah,021h,010h,0ffh,0f3h,0d2h
+DB 0cdh,00ch,013h,0ech,05fh,097h,044h,017h
+DB 0c4h,0a7h,07eh,03dh,064h,05dh,019h,073h
+DB 060h,081h,04fh,0dch,022h,02ah,090h,088h
+DB 046h,0eeh,0b8h,014h,0deh,05eh,00bh,0dbh
+DB 0e0h,032h,03ah,00ah,049h,006h,024h,05ch
+DB 0c2h,0d3h,0ach,062h,091h,095h,0e4h,079h
+DB 0e7h,0c8h,037h,06dh,08dh,0d5h,04eh,0a9h
+DB 06ch,056h,0f4h,0eah,065h,07ah,0aeh,008h
+DB 0bah,078h,025h,02eh,01ch,0a6h,0b4h,0c6h
+DB 0e8h,0ddh,074h,01fh,04bh,0bdh,08bh,08ah
+DB 070h,03eh,0b5h,066h,048h,003h,0f6h,00eh
+DB 061h,035h,057h,0b9h,086h,0c1h,01dh,09eh
+DB 0e1h,0f8h,098h,011h,069h,0d9h,08eh,094h
+DB 09bh,01eh,087h,0e9h,0ceh,055h,028h,0dfh
+DB 08ch,0a1h,089h,00dh,0bfh,0e6h,042h,068h
+DB 041h,099h,02dh,00fh,0b0h,054h,0bbh,016h
+DB 063h,07ch,077h,07bh,0f2h,06bh,06fh,0c5h
+DB 030h,001h,067h,02bh,0feh,0d7h,0abh,076h
+DB 0cah,082h,0c9h,07dh,0fah,059h,047h,0f0h
+DB 0adh,0d4h,0a2h,0afh,09ch,0a4h,072h,0c0h
+DB 0b7h,0fdh,093h,026h,036h,03fh,0f7h,0cch
+DB 034h,0a5h,0e5h,0f1h,071h,0d8h,031h,015h
+DB 004h,0c7h,023h,0c3h,018h,096h,005h,09ah
+DB 007h,012h,080h,0e2h,0ebh,027h,0b2h,075h
+DB 009h,083h,02ch,01ah,01bh,06eh,05ah,0a0h
+DB 052h,03bh,0d6h,0b3h,029h,0e3h,02fh,084h
+DB 053h,0d1h,000h,0edh,020h,0fch,0b1h,05bh
+DB 06ah,0cbh,0beh,039h,04ah,04ch,058h,0cfh
+DB 0d0h,0efh,0aah,0fbh,043h,04dh,033h,085h
+DB 045h,0f9h,002h,07fh,050h,03ch,09fh,0a8h
+DB 051h,0a3h,040h,08fh,092h,09dh,038h,0f5h
+DB 0bch,0b6h,0dah,021h,010h,0ffh,0f3h,0d2h
+DB 0cdh,00ch,013h,0ech,05fh,097h,044h,017h
+DB 0c4h,0a7h,07eh,03dh,064h,05dh,019h,073h
+DB 060h,081h,04fh,0dch,022h,02ah,090h,088h
+DB 046h,0eeh,0b8h,014h,0deh,05eh,00bh,0dbh
+DB 0e0h,032h,03ah,00ah,049h,006h,024h,05ch
+DB 0c2h,0d3h,0ach,062h,091h,095h,0e4h,079h
+DB 0e7h,0c8h,037h,06dh,08dh,0d5h,04eh,0a9h
+DB 06ch,056h,0f4h,0eah,065h,07ah,0aeh,008h
+DB 0bah,078h,025h,02eh,01ch,0a6h,0b4h,0c6h
+DB 0e8h,0ddh,074h,01fh,04bh,0bdh,08bh,08ah
+DB 070h,03eh,0b5h,066h,048h,003h,0f6h,00eh
+DB 061h,035h,057h,0b9h,086h,0c1h,01dh,09eh
+DB 0e1h,0f8h,098h,011h,069h,0d9h,08eh,094h
+DB 09bh,01eh,087h,0e9h,0ceh,055h,028h,0dfh
+DB 08ch,0a1h,089h,00dh,0bfh,0e6h,042h,068h
+DB 041h,099h,02dh,00fh,0b0h,054h,0bbh,016h
+DB 063h,07ch,077h,07bh,0f2h,06bh,06fh,0c5h
+DB 030h,001h,067h,02bh,0feh,0d7h,0abh,076h
+DB 0cah,082h,0c9h,07dh,0fah,059h,047h,0f0h
+DB 0adh,0d4h,0a2h,0afh,09ch,0a4h,072h,0c0h
+DB 0b7h,0fdh,093h,026h,036h,03fh,0f7h,0cch
+DB 034h,0a5h,0e5h,0f1h,071h,0d8h,031h,015h
+DB 004h,0c7h,023h,0c3h,018h,096h,005h,09ah
+DB 007h,012h,080h,0e2h,0ebh,027h,0b2h,075h
+DB 009h,083h,02ch,01ah,01bh,06eh,05ah,0a0h
+DB 052h,03bh,0d6h,0b3h,029h,0e3h,02fh,084h
+DB 053h,0d1h,000h,0edh,020h,0fch,0b1h,05bh
+DB 06ah,0cbh,0beh,039h,04ah,04ch,058h,0cfh
+DB 0d0h,0efh,0aah,0fbh,043h,04dh,033h,085h
+DB 045h,0f9h,002h,07fh,050h,03ch,09fh,0a8h
+DB 051h,0a3h,040h,08fh,092h,09dh,038h,0f5h
+DB 0bch,0b6h,0dah,021h,010h,0ffh,0f3h,0d2h
+DB 0cdh,00ch,013h,0ech,05fh,097h,044h,017h
+DB 0c4h,0a7h,07eh,03dh,064h,05dh,019h,073h
+DB 060h,081h,04fh,0dch,022h,02ah,090h,088h
+DB 046h,0eeh,0b8h,014h,0deh,05eh,00bh,0dbh
+DB 0e0h,032h,03ah,00ah,049h,006h,024h,05ch
+DB 0c2h,0d3h,0ach,062h,091h,095h,0e4h,079h
+DB 0e7h,0c8h,037h,06dh,08dh,0d5h,04eh,0a9h
+DB 06ch,056h,0f4h,0eah,065h,07ah,0aeh,008h
+DB 0bah,078h,025h,02eh,01ch,0a6h,0b4h,0c6h
+DB 0e8h,0ddh,074h,01fh,04bh,0bdh,08bh,08ah
+DB 070h,03eh,0b5h,066h,048h,003h,0f6h,00eh
+DB 061h,035h,057h,0b9h,086h,0c1h,01dh,09eh
+DB 0e1h,0f8h,098h,011h,069h,0d9h,08eh,094h
+DB 09bh,01eh,087h,0e9h,0ceh,055h,028h,0dfh
+DB 08ch,0a1h,089h,00dh,0bfh,0e6h,042h,068h
+DB 041h,099h,02dh,00fh,0b0h,054h,0bbh,016h
+DB 063h,07ch,077h,07bh,0f2h,06bh,06fh,0c5h
+DB 030h,001h,067h,02bh,0feh,0d7h,0abh,076h
+DB 0cah,082h,0c9h,07dh,0fah,059h,047h,0f0h
+DB 0adh,0d4h,0a2h,0afh,09ch,0a4h,072h,0c0h
+DB 0b7h,0fdh,093h,026h,036h,03fh,0f7h,0cch
+DB 034h,0a5h,0e5h,0f1h,071h,0d8h,031h,015h
+DB 004h,0c7h,023h,0c3h,018h,096h,005h,09ah
+DB 007h,012h,080h,0e2h,0ebh,027h,0b2h,075h
+DB 009h,083h,02ch,01ah,01bh,06eh,05ah,0a0h
+DB 052h,03bh,0d6h,0b3h,029h,0e3h,02fh,084h
+DB 053h,0d1h,000h,0edh,020h,0fch,0b1h,05bh
+DB 06ah,0cbh,0beh,039h,04ah,04ch,058h,0cfh
+DB 0d0h,0efh,0aah,0fbh,043h,04dh,033h,085h
+DB 045h,0f9h,002h,07fh,050h,03ch,09fh,0a8h
+DB 051h,0a3h,040h,08fh,092h,09dh,038h,0f5h
+DB 0bch,0b6h,0dah,021h,010h,0ffh,0f3h,0d2h
+DB 0cdh,00ch,013h,0ech,05fh,097h,044h,017h
+DB 0c4h,0a7h,07eh,03dh,064h,05dh,019h,073h
+DB 060h,081h,04fh,0dch,022h,02ah,090h,088h
+DB 046h,0eeh,0b8h,014h,0deh,05eh,00bh,0dbh
+DB 0e0h,032h,03ah,00ah,049h,006h,024h,05ch
+DB 0c2h,0d3h,0ach,062h,091h,095h,0e4h,079h
+DB 0e7h,0c8h,037h,06dh,08dh,0d5h,04eh,0a9h
+DB 06ch,056h,0f4h,0eah,065h,07ah,0aeh,008h
+DB 0bah,078h,025h,02eh,01ch,0a6h,0b4h,0c6h
+DB 0e8h,0ddh,074h,01fh,04bh,0bdh,08bh,08ah
+DB 070h,03eh,0b5h,066h,048h,003h,0f6h,00eh
+DB 061h,035h,057h,0b9h,086h,0c1h,01dh,09eh
+DB 0e1h,0f8h,098h,011h,069h,0d9h,08eh,094h
+DB 09bh,01eh,087h,0e9h,0ceh,055h,028h,0dfh
+DB 08ch,0a1h,089h,00dh,0bfh,0e6h,042h,068h
+DB 041h,099h,02dh,00fh,0b0h,054h,0bbh,016h
+ DD 000000001h,000000002h,000000004h,000000008h
+ DD 000000010h,000000020h,000000040h,000000080h
+ DD 00000001bh,000000036h,080808080h,080808080h
+ DD 0fefefefeh,0fefefefeh,01b1b1b1bh,01b1b1b1bh
+ALIGN 64
+$L$AES_Td::
+ DD 050a7f451h,050a7f451h
+ DD 05365417eh,05365417eh
+ DD 0c3a4171ah,0c3a4171ah
+ DD 0965e273ah,0965e273ah
+ DD 0cb6bab3bh,0cb6bab3bh
+ DD 0f1459d1fh,0f1459d1fh
+ DD 0ab58faach,0ab58faach
+ DD 09303e34bh,09303e34bh
+ DD 055fa3020h,055fa3020h
+ DD 0f66d76adh,0f66d76adh
+ DD 09176cc88h,09176cc88h
+ DD 0254c02f5h,0254c02f5h
+ DD 0fcd7e54fh,0fcd7e54fh
+ DD 0d7cb2ac5h,0d7cb2ac5h
+ DD 080443526h,080443526h
+ DD 08fa362b5h,08fa362b5h
+ DD 0495ab1deh,0495ab1deh
+ DD 0671bba25h,0671bba25h
+ DD 0980eea45h,0980eea45h
+ DD 0e1c0fe5dh,0e1c0fe5dh
+ DD 002752fc3h,002752fc3h
+ DD 012f04c81h,012f04c81h
+ DD 0a397468dh,0a397468dh
+ DD 0c6f9d36bh,0c6f9d36bh
+ DD 0e75f8f03h,0e75f8f03h
+ DD 0959c9215h,0959c9215h
+ DD 0eb7a6dbfh,0eb7a6dbfh
+ DD 0da595295h,0da595295h
+ DD 02d83bed4h,02d83bed4h
+ DD 0d3217458h,0d3217458h
+ DD 02969e049h,02969e049h
+ DD 044c8c98eh,044c8c98eh
+ DD 06a89c275h,06a89c275h
+ DD 078798ef4h,078798ef4h
+ DD 06b3e5899h,06b3e5899h
+ DD 0dd71b927h,0dd71b927h
+ DD 0b64fe1beh,0b64fe1beh
+ DD 017ad88f0h,017ad88f0h
+ DD 066ac20c9h,066ac20c9h
+ DD 0b43ace7dh,0b43ace7dh
+ DD 0184adf63h,0184adf63h
+ DD 082311ae5h,082311ae5h
+ DD 060335197h,060335197h
+ DD 0457f5362h,0457f5362h
+ DD 0e07764b1h,0e07764b1h
+ DD 084ae6bbbh,084ae6bbbh
+ DD 01ca081feh,01ca081feh
+ DD 0942b08f9h,0942b08f9h
+ DD 058684870h,058684870h
+ DD 019fd458fh,019fd458fh
+ DD 0876cde94h,0876cde94h
+ DD 0b7f87b52h,0b7f87b52h
+ DD 023d373abh,023d373abh
+ DD 0e2024b72h,0e2024b72h
+ DD 0578f1fe3h,0578f1fe3h
+ DD 02aab5566h,02aab5566h
+ DD 00728ebb2h,00728ebb2h
+ DD 003c2b52fh,003c2b52fh
+ DD 09a7bc586h,09a7bc586h
+ DD 0a50837d3h,0a50837d3h
+ DD 0f2872830h,0f2872830h
+ DD 0b2a5bf23h,0b2a5bf23h
+ DD 0ba6a0302h,0ba6a0302h
+ DD 05c8216edh,05c8216edh
+ DD 02b1ccf8ah,02b1ccf8ah
+ DD 092b479a7h,092b479a7h
+ DD 0f0f207f3h,0f0f207f3h
+ DD 0a1e2694eh,0a1e2694eh
+ DD 0cdf4da65h,0cdf4da65h
+ DD 0d5be0506h,0d5be0506h
+ DD 01f6234d1h,01f6234d1h
+ DD 08afea6c4h,08afea6c4h
+ DD 09d532e34h,09d532e34h
+ DD 0a055f3a2h,0a055f3a2h
+ DD 032e18a05h,032e18a05h
+ DD 075ebf6a4h,075ebf6a4h
+ DD 039ec830bh,039ec830bh
+ DD 0aaef6040h,0aaef6040h
+ DD 0069f715eh,0069f715eh
+ DD 051106ebdh,051106ebdh
+ DD 0f98a213eh,0f98a213eh
+ DD 03d06dd96h,03d06dd96h
+ DD 0ae053eddh,0ae053eddh
+ DD 046bde64dh,046bde64dh
+ DD 0b58d5491h,0b58d5491h
+ DD 0055dc471h,0055dc471h
+ DD 06fd40604h,06fd40604h
+ DD 0ff155060h,0ff155060h
+ DD 024fb9819h,024fb9819h
+ DD 097e9bdd6h,097e9bdd6h
+ DD 0cc434089h,0cc434089h
+ DD 0779ed967h,0779ed967h
+ DD 0bd42e8b0h,0bd42e8b0h
+ DD 0888b8907h,0888b8907h
+ DD 0385b19e7h,0385b19e7h
+ DD 0dbeec879h,0dbeec879h
+ DD 0470a7ca1h,0470a7ca1h
+ DD 0e90f427ch,0e90f427ch
+ DD 0c91e84f8h,0c91e84f8h
+ DD 000000000h,000000000h
+ DD 083868009h,083868009h
+ DD 048ed2b32h,048ed2b32h
+ DD 0ac70111eh,0ac70111eh
+ DD 04e725a6ch,04e725a6ch
+ DD 0fbff0efdh,0fbff0efdh
+ DD 05638850fh,05638850fh
+ DD 01ed5ae3dh,01ed5ae3dh
+ DD 027392d36h,027392d36h
+ DD 064d90f0ah,064d90f0ah
+ DD 021a65c68h,021a65c68h
+ DD 0d1545b9bh,0d1545b9bh
+ DD 03a2e3624h,03a2e3624h
+ DD 0b1670a0ch,0b1670a0ch
+ DD 00fe75793h,00fe75793h
+ DD 0d296eeb4h,0d296eeb4h
+ DD 09e919b1bh,09e919b1bh
+ DD 04fc5c080h,04fc5c080h
+ DD 0a220dc61h,0a220dc61h
+ DD 0694b775ah,0694b775ah
+ DD 0161a121ch,0161a121ch
+ DD 00aba93e2h,00aba93e2h
+ DD 0e52aa0c0h,0e52aa0c0h
+ DD 043e0223ch,043e0223ch
+ DD 01d171b12h,01d171b12h
+ DD 00b0d090eh,00b0d090eh
+ DD 0adc78bf2h,0adc78bf2h
+ DD 0b9a8b62dh,0b9a8b62dh
+ DD 0c8a91e14h,0c8a91e14h
+ DD 08519f157h,08519f157h
+ DD 04c0775afh,04c0775afh
+ DD 0bbdd99eeh,0bbdd99eeh
+ DD 0fd607fa3h,0fd607fa3h
+ DD 09f2601f7h,09f2601f7h
+ DD 0bcf5725ch,0bcf5725ch
+ DD 0c53b6644h,0c53b6644h
+ DD 0347efb5bh,0347efb5bh
+ DD 07629438bh,07629438bh
+ DD 0dcc623cbh,0dcc623cbh
+ DD 068fcedb6h,068fcedb6h
+ DD 063f1e4b8h,063f1e4b8h
+ DD 0cadc31d7h,0cadc31d7h
+ DD 010856342h,010856342h
+ DD 040229713h,040229713h
+ DD 02011c684h,02011c684h
+ DD 07d244a85h,07d244a85h
+ DD 0f83dbbd2h,0f83dbbd2h
+ DD 01132f9aeh,01132f9aeh
+ DD 06da129c7h,06da129c7h
+ DD 04b2f9e1dh,04b2f9e1dh
+ DD 0f330b2dch,0f330b2dch
+ DD 0ec52860dh,0ec52860dh
+ DD 0d0e3c177h,0d0e3c177h
+ DD 06c16b32bh,06c16b32bh
+ DD 099b970a9h,099b970a9h
+ DD 0fa489411h,0fa489411h
+ DD 02264e947h,02264e947h
+ DD 0c48cfca8h,0c48cfca8h
+ DD 01a3ff0a0h,01a3ff0a0h
+ DD 0d82c7d56h,0d82c7d56h
+ DD 0ef903322h,0ef903322h
+ DD 0c74e4987h,0c74e4987h
+ DD 0c1d138d9h,0c1d138d9h
+ DD 0fea2ca8ch,0fea2ca8ch
+ DD 0360bd498h,0360bd498h
+ DD 0cf81f5a6h,0cf81f5a6h
+ DD 028de7aa5h,028de7aa5h
+ DD 0268eb7dah,0268eb7dah
+ DD 0a4bfad3fh,0a4bfad3fh
+ DD 0e49d3a2ch,0e49d3a2ch
+ DD 00d927850h,00d927850h
+ DD 09bcc5f6ah,09bcc5f6ah
+ DD 062467e54h,062467e54h
+ DD 0c2138df6h,0c2138df6h
+ DD 0e8b8d890h,0e8b8d890h
+ DD 05ef7392eh,05ef7392eh
+ DD 0f5afc382h,0f5afc382h
+ DD 0be805d9fh,0be805d9fh
+ DD 07c93d069h,07c93d069h
+ DD 0a92dd56fh,0a92dd56fh
+ DD 0b31225cfh,0b31225cfh
+ DD 03b99acc8h,03b99acc8h
+ DD 0a77d1810h,0a77d1810h
+ DD 06e639ce8h,06e639ce8h
+ DD 07bbb3bdbh,07bbb3bdbh
+ DD 0097826cdh,0097826cdh
+ DD 0f418596eh,0f418596eh
+ DD 001b79aech,001b79aech
+ DD 0a89a4f83h,0a89a4f83h
+ DD 0656e95e6h,0656e95e6h
+ DD 07ee6ffaah,07ee6ffaah
+ DD 008cfbc21h,008cfbc21h
+ DD 0e6e815efh,0e6e815efh
+ DD 0d99be7bah,0d99be7bah
+ DD 0ce366f4ah,0ce366f4ah
+ DD 0d4099feah,0d4099feah
+ DD 0d67cb029h,0d67cb029h
+ DD 0afb2a431h,0afb2a431h
+ DD 031233f2ah,031233f2ah
+ DD 03094a5c6h,03094a5c6h
+ DD 0c066a235h,0c066a235h
+ DD 037bc4e74h,037bc4e74h
+ DD 0a6ca82fch,0a6ca82fch
+ DD 0b0d090e0h,0b0d090e0h
+ DD 015d8a733h,015d8a733h
+ DD 04a9804f1h,04a9804f1h
+ DD 0f7daec41h,0f7daec41h
+ DD 00e50cd7fh,00e50cd7fh
+ DD 02ff69117h,02ff69117h
+ DD 08dd64d76h,08dd64d76h
+ DD 04db0ef43h,04db0ef43h
+ DD 0544daacch,0544daacch
+ DD 0df0496e4h,0df0496e4h
+ DD 0e3b5d19eh,0e3b5d19eh
+ DD 01b886a4ch,01b886a4ch
+ DD 0b81f2cc1h,0b81f2cc1h
+ DD 07f516546h,07f516546h
+ DD 004ea5e9dh,004ea5e9dh
+ DD 05d358c01h,05d358c01h
+ DD 0737487fah,0737487fah
+ DD 02e410bfbh,02e410bfbh
+ DD 05a1d67b3h,05a1d67b3h
+ DD 052d2db92h,052d2db92h
+ DD 0335610e9h,0335610e9h
+ DD 01347d66dh,01347d66dh
+ DD 08c61d79ah,08c61d79ah
+ DD 07a0ca137h,07a0ca137h
+ DD 08e14f859h,08e14f859h
+ DD 0893c13ebh,0893c13ebh
+ DD 0ee27a9ceh,0ee27a9ceh
+ DD 035c961b7h,035c961b7h
+ DD 0ede51ce1h,0ede51ce1h
+ DD 03cb1477ah,03cb1477ah
+ DD 059dfd29ch,059dfd29ch
+ DD 03f73f255h,03f73f255h
+ DD 079ce1418h,079ce1418h
+ DD 0bf37c773h,0bf37c773h
+ DD 0eacdf753h,0eacdf753h
+ DD 05baafd5fh,05baafd5fh
+ DD 0146f3ddfh,0146f3ddfh
+ DD 086db4478h,086db4478h
+ DD 081f3afcah,081f3afcah
+ DD 03ec468b9h,03ec468b9h
+ DD 02c342438h,02c342438h
+ DD 05f40a3c2h,05f40a3c2h
+ DD 072c31d16h,072c31d16h
+ DD 00c25e2bch,00c25e2bch
+ DD 08b493c28h,08b493c28h
+ DD 041950dffh,041950dffh
+ DD 07101a839h,07101a839h
+ DD 0deb30c08h,0deb30c08h
+ DD 09ce4b4d8h,09ce4b4d8h
+ DD 090c15664h,090c15664h
+ DD 06184cb7bh,06184cb7bh
+ DD 070b632d5h,070b632d5h
+ DD 0745c6c48h,0745c6c48h
+ DD 04257b8d0h,04257b8d0h
+DB 052h,009h,06ah,0d5h,030h,036h,0a5h,038h
+DB 0bfh,040h,0a3h,09eh,081h,0f3h,0d7h,0fbh
+DB 07ch,0e3h,039h,082h,09bh,02fh,0ffh,087h
+DB 034h,08eh,043h,044h,0c4h,0deh,0e9h,0cbh
+DB 054h,07bh,094h,032h,0a6h,0c2h,023h,03dh
+DB 0eeh,04ch,095h,00bh,042h,0fah,0c3h,04eh
+DB 008h,02eh,0a1h,066h,028h,0d9h,024h,0b2h
+DB 076h,05bh,0a2h,049h,06dh,08bh,0d1h,025h
+DB 072h,0f8h,0f6h,064h,086h,068h,098h,016h
+DB 0d4h,0a4h,05ch,0cch,05dh,065h,0b6h,092h
+DB 06ch,070h,048h,050h,0fdh,0edh,0b9h,0dah
+DB 05eh,015h,046h,057h,0a7h,08dh,09dh,084h
+DB 090h,0d8h,0abh,000h,08ch,0bch,0d3h,00ah
+DB 0f7h,0e4h,058h,005h,0b8h,0b3h,045h,006h
+DB 0d0h,02ch,01eh,08fh,0cah,03fh,00fh,002h
+DB 0c1h,0afh,0bdh,003h,001h,013h,08ah,06bh
+DB 03ah,091h,011h,041h,04fh,067h,0dch,0eah
+DB 097h,0f2h,0cfh,0ceh,0f0h,0b4h,0e6h,073h
+DB 096h,0ach,074h,022h,0e7h,0adh,035h,085h
+DB 0e2h,0f9h,037h,0e8h,01ch,075h,0dfh,06eh
+DB 047h,0f1h,01ah,071h,01dh,029h,0c5h,089h
+DB 06fh,0b7h,062h,00eh,0aah,018h,0beh,01bh
+DB 0fch,056h,03eh,04bh,0c6h,0d2h,079h,020h
+DB 09ah,0dbh,0c0h,0feh,078h,0cdh,05ah,0f4h
+DB 01fh,0ddh,0a8h,033h,088h,007h,0c7h,031h
+DB 0b1h,012h,010h,059h,027h,080h,0ech,05fh
+DB 060h,051h,07fh,0a9h,019h,0b5h,04ah,00dh
+DB 02dh,0e5h,07ah,09fh,093h,0c9h,09ch,0efh
+DB 0a0h,0e0h,03bh,04dh,0aeh,02ah,0f5h,0b0h
+DB 0c8h,0ebh,0bbh,03ch,083h,053h,099h,061h
+DB 017h,02bh,004h,07eh,0bah,077h,0d6h,026h
+DB 0e1h,069h,014h,063h,055h,021h,00ch,07dh
+ DD 080808080h,080808080h,0fefefefeh,0fefefefeh
+ DD 01b1b1b1bh,01b1b1b1bh,0,0
+DB 052h,009h,06ah,0d5h,030h,036h,0a5h,038h
+DB 0bfh,040h,0a3h,09eh,081h,0f3h,0d7h,0fbh
+DB 07ch,0e3h,039h,082h,09bh,02fh,0ffh,087h
+DB 034h,08eh,043h,044h,0c4h,0deh,0e9h,0cbh
+DB 054h,07bh,094h,032h,0a6h,0c2h,023h,03dh
+DB 0eeh,04ch,095h,00bh,042h,0fah,0c3h,04eh
+DB 008h,02eh,0a1h,066h,028h,0d9h,024h,0b2h
+DB 076h,05bh,0a2h,049h,06dh,08bh,0d1h,025h
+DB 072h,0f8h,0f6h,064h,086h,068h,098h,016h
+DB 0d4h,0a4h,05ch,0cch,05dh,065h,0b6h,092h
+DB 06ch,070h,048h,050h,0fdh,0edh,0b9h,0dah
+DB 05eh,015h,046h,057h,0a7h,08dh,09dh,084h
+DB 090h,0d8h,0abh,000h,08ch,0bch,0d3h,00ah
+DB 0f7h,0e4h,058h,005h,0b8h,0b3h,045h,006h
+DB 0d0h,02ch,01eh,08fh,0cah,03fh,00fh,002h
+DB 0c1h,0afh,0bdh,003h,001h,013h,08ah,06bh
+DB 03ah,091h,011h,041h,04fh,067h,0dch,0eah
+DB 097h,0f2h,0cfh,0ceh,0f0h,0b4h,0e6h,073h
+DB 096h,0ach,074h,022h,0e7h,0adh,035h,085h
+DB 0e2h,0f9h,037h,0e8h,01ch,075h,0dfh,06eh
+DB 047h,0f1h,01ah,071h,01dh,029h,0c5h,089h
+DB 06fh,0b7h,062h,00eh,0aah,018h,0beh,01bh
+DB 0fch,056h,03eh,04bh,0c6h,0d2h,079h,020h
+DB 09ah,0dbh,0c0h,0feh,078h,0cdh,05ah,0f4h
+DB 01fh,0ddh,0a8h,033h,088h,007h,0c7h,031h
+DB 0b1h,012h,010h,059h,027h,080h,0ech,05fh
+DB 060h,051h,07fh,0a9h,019h,0b5h,04ah,00dh
+DB 02dh,0e5h,07ah,09fh,093h,0c9h,09ch,0efh
+DB 0a0h,0e0h,03bh,04dh,0aeh,02ah,0f5h,0b0h
+DB 0c8h,0ebh,0bbh,03ch,083h,053h,099h,061h
+DB 017h,02bh,004h,07eh,0bah,077h,0d6h,026h
+DB 0e1h,069h,014h,063h,055h,021h,00ch,07dh
+ DD 080808080h,080808080h,0fefefefeh,0fefefefeh
+ DD 01b1b1b1bh,01b1b1b1bh,0,0
+DB 052h,009h,06ah,0d5h,030h,036h,0a5h,038h
+DB 0bfh,040h,0a3h,09eh,081h,0f3h,0d7h,0fbh
+DB 07ch,0e3h,039h,082h,09bh,02fh,0ffh,087h
+DB 034h,08eh,043h,044h,0c4h,0deh,0e9h,0cbh
+DB 054h,07bh,094h,032h,0a6h,0c2h,023h,03dh
+DB 0eeh,04ch,095h,00bh,042h,0fah,0c3h,04eh
+DB 008h,02eh,0a1h,066h,028h,0d9h,024h,0b2h
+DB 076h,05bh,0a2h,049h,06dh,08bh,0d1h,025h
+DB 072h,0f8h,0f6h,064h,086h,068h,098h,016h
+DB 0d4h,0a4h,05ch,0cch,05dh,065h,0b6h,092h
+DB 06ch,070h,048h,050h,0fdh,0edh,0b9h,0dah
+DB 05eh,015h,046h,057h,0a7h,08dh,09dh,084h
+DB 090h,0d8h,0abh,000h,08ch,0bch,0d3h,00ah
+DB 0f7h,0e4h,058h,005h,0b8h,0b3h,045h,006h
+DB 0d0h,02ch,01eh,08fh,0cah,03fh,00fh,002h
+DB 0c1h,0afh,0bdh,003h,001h,013h,08ah,06bh
+DB 03ah,091h,011h,041h,04fh,067h,0dch,0eah
+DB 097h,0f2h,0cfh,0ceh,0f0h,0b4h,0e6h,073h
+DB 096h,0ach,074h,022h,0e7h,0adh,035h,085h
+DB 0e2h,0f9h,037h,0e8h,01ch,075h,0dfh,06eh
+DB 047h,0f1h,01ah,071h,01dh,029h,0c5h,089h
+DB 06fh,0b7h,062h,00eh,0aah,018h,0beh,01bh
+DB 0fch,056h,03eh,04bh,0c6h,0d2h,079h,020h
+DB 09ah,0dbh,0c0h,0feh,078h,0cdh,05ah,0f4h
+DB 01fh,0ddh,0a8h,033h,088h,007h,0c7h,031h
+DB 0b1h,012h,010h,059h,027h,080h,0ech,05fh
+DB 060h,051h,07fh,0a9h,019h,0b5h,04ah,00dh
+DB 02dh,0e5h,07ah,09fh,093h,0c9h,09ch,0efh
+DB 0a0h,0e0h,03bh,04dh,0aeh,02ah,0f5h,0b0h
+DB 0c8h,0ebh,0bbh,03ch,083h,053h,099h,061h
+DB 017h,02bh,004h,07eh,0bah,077h,0d6h,026h
+DB 0e1h,069h,014h,063h,055h,021h,00ch,07dh
+ DD 080808080h,080808080h,0fefefefeh,0fefefefeh
+ DD 01b1b1b1bh,01b1b1b1bh,0,0
+DB 052h,009h,06ah,0d5h,030h,036h,0a5h,038h
+DB 0bfh,040h,0a3h,09eh,081h,0f3h,0d7h,0fbh
+DB 07ch,0e3h,039h,082h,09bh,02fh,0ffh,087h
+DB 034h,08eh,043h,044h,0c4h,0deh,0e9h,0cbh
+DB 054h,07bh,094h,032h,0a6h,0c2h,023h,03dh
+DB 0eeh,04ch,095h,00bh,042h,0fah,0c3h,04eh
+DB 008h,02eh,0a1h,066h,028h,0d9h,024h,0b2h
+DB 076h,05bh,0a2h,049h,06dh,08bh,0d1h,025h
+DB 072h,0f8h,0f6h,064h,086h,068h,098h,016h
+DB 0d4h,0a4h,05ch,0cch,05dh,065h,0b6h,092h
+DB 06ch,070h,048h,050h,0fdh,0edh,0b9h,0dah
+DB 05eh,015h,046h,057h,0a7h,08dh,09dh,084h
+DB 090h,0d8h,0abh,000h,08ch,0bch,0d3h,00ah
+DB 0f7h,0e4h,058h,005h,0b8h,0b3h,045h,006h
+DB 0d0h,02ch,01eh,08fh,0cah,03fh,00fh,002h
+DB 0c1h,0afh,0bdh,003h,001h,013h,08ah,06bh
+DB 03ah,091h,011h,041h,04fh,067h,0dch,0eah
+DB 097h,0f2h,0cfh,0ceh,0f0h,0b4h,0e6h,073h
+DB 096h,0ach,074h,022h,0e7h,0adh,035h,085h
+DB 0e2h,0f9h,037h,0e8h,01ch,075h,0dfh,06eh
+DB 047h,0f1h,01ah,071h,01dh,029h,0c5h,089h
+DB 06fh,0b7h,062h,00eh,0aah,018h,0beh,01bh
+DB 0fch,056h,03eh,04bh,0c6h,0d2h,079h,020h
+DB 09ah,0dbh,0c0h,0feh,078h,0cdh,05ah,0f4h
+DB 01fh,0ddh,0a8h,033h,088h,007h,0c7h,031h
+DB 0b1h,012h,010h,059h,027h,080h,0ech,05fh
+DB 060h,051h,07fh,0a9h,019h,0b5h,04ah,00dh
+DB 02dh,0e5h,07ah,09fh,093h,0c9h,09ch,0efh
+DB 0a0h,0e0h,03bh,04dh,0aeh,02ah,0f5h,0b0h
+DB 0c8h,0ebh,0bbh,03ch,083h,053h,099h,061h
+DB 017h,02bh,004h,07eh,0bah,077h,0d6h,026h
+DB 0e1h,069h,014h,063h,055h,021h,00ch,07dh
+ DD 080808080h,080808080h,0fefefefeh,0fefefefeh
+ DD 01b1b1b1bh,01b1b1b1bh,0,0
+DB 65,69,83,32,102,111,114,32,120,56,54,95,54,52,44,32
+DB 67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97
+DB 112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
+DB 62,0
+ALIGN 64
+EXTERN __imp_RtlVirtualUnwind:NEAR
+
+ALIGN 16
+block_se_handler PROC PRIVATE
+ push rsi
+ push rdi
+ push rbx
+ push rbp
+ push r12
+ push r13
+ push r14
+ push r15
+ pushfq
+ sub rsp,64
+
+ mov rax,QWORD PTR[120+r8]
+ mov rbx,QWORD PTR[248+r8]
+
+ mov rsi,QWORD PTR[8+r9]
+ mov r11,QWORD PTR[56+r9]
+
+ mov r10d,DWORD PTR[r11]
+ lea r10,QWORD PTR[r10*1+rsi]
+ cmp rbx,r10
+ jb $L$in_block_prologue
+
+ mov rax,QWORD PTR[152+r8]
+
+ mov r10d,DWORD PTR[4+r11]
+ lea r10,QWORD PTR[r10*1+rsi]
+ cmp rbx,r10
+ jae $L$in_block_prologue
+
+ mov rax,QWORD PTR[24+rax]
+ lea rax,QWORD PTR[48+rax]
+
+ mov rbx,QWORD PTR[((-8))+rax]
+ mov rbp,QWORD PTR[((-16))+rax]
+ mov r12,QWORD PTR[((-24))+rax]
+ mov r13,QWORD PTR[((-32))+rax]
+ mov r14,QWORD PTR[((-40))+rax]
+ mov r15,QWORD PTR[((-48))+rax]
+ mov QWORD PTR[144+r8],rbx
+ mov QWORD PTR[160+r8],rbp
+ mov QWORD PTR[216+r8],r12
+ mov QWORD PTR[224+r8],r13
+ mov QWORD PTR[232+r8],r14
+ mov QWORD PTR[240+r8],r15
+
+$L$in_block_prologue::
+ mov rdi,QWORD PTR[8+rax]
+ mov rsi,QWORD PTR[16+rax]
+ mov QWORD PTR[152+r8],rax
+ mov QWORD PTR[168+r8],rsi
+ mov QWORD PTR[176+r8],rdi
+
+ jmp $L$common_seh_exit
+block_se_handler ENDP
+
+
+ALIGN 16
+key_se_handler PROC PRIVATE
+ push rsi
+ push rdi
+ push rbx
+ push rbp
+ push r12
+ push r13
+ push r14
+ push r15
+ pushfq
+ sub rsp,64
+
+ mov rax,QWORD PTR[120+r8]
+ mov rbx,QWORD PTR[248+r8]
+
+ mov rsi,QWORD PTR[8+r9]
+ mov r11,QWORD PTR[56+r9]
+
+ mov r10d,DWORD PTR[r11]
+ lea r10,QWORD PTR[r10*1+rsi]
+ cmp rbx,r10
+ jb $L$in_key_prologue
+
+ mov rax,QWORD PTR[152+r8]
+
+ mov r10d,DWORD PTR[4+r11]
+ lea r10,QWORD PTR[r10*1+rsi]
+ cmp rbx,r10
+ jae $L$in_key_prologue
+
+ lea rax,QWORD PTR[56+rax]
+
+ mov rbx,QWORD PTR[((-8))+rax]
+ mov rbp,QWORD PTR[((-16))+rax]
+ mov r12,QWORD PTR[((-24))+rax]
+ mov r13,QWORD PTR[((-32))+rax]
+ mov r14,QWORD PTR[((-40))+rax]
+ mov r15,QWORD PTR[((-48))+rax]
+ mov QWORD PTR[144+r8],rbx
+ mov QWORD PTR[160+r8],rbp
+ mov QWORD PTR[216+r8],r12
+ mov QWORD PTR[224+r8],r13
+ mov QWORD PTR[232+r8],r14
+ mov QWORD PTR[240+r8],r15
+
+$L$in_key_prologue::
+ mov rdi,QWORD PTR[8+rax]
+ mov rsi,QWORD PTR[16+rax]
+ mov QWORD PTR[152+r8],rax
+ mov QWORD PTR[168+r8],rsi
+ mov QWORD PTR[176+r8],rdi
+
+ jmp $L$common_seh_exit
+key_se_handler ENDP
+
+
+ALIGN 16
+cbc_se_handler PROC PRIVATE
+ push rsi
+ push rdi
+ push rbx
+ push rbp
+ push r12
+ push r13
+ push r14
+ push r15
+ pushfq
+ sub rsp,64
+
+ mov rax,QWORD PTR[120+r8]
+ mov rbx,QWORD PTR[248+r8]
+
+ lea r10,QWORD PTR[$L$cbc_prologue]
+ cmp rbx,r10
+ jb $L$in_cbc_prologue
+
+ lea r10,QWORD PTR[$L$cbc_fast_body]
+ cmp rbx,r10
+ jb $L$in_cbc_frame_setup
+
+ lea r10,QWORD PTR[$L$cbc_slow_prologue]
+ cmp rbx,r10
+ jb $L$in_cbc_body
+
+ lea r10,QWORD PTR[$L$cbc_slow_body]
+ cmp rbx,r10
+ jb $L$in_cbc_frame_setup
+
+$L$in_cbc_body::
+ mov rax,QWORD PTR[152+r8]
+
+ lea r10,QWORD PTR[$L$cbc_epilogue]
+ cmp rbx,r10
+ jae $L$in_cbc_prologue
+
+ lea rax,QWORD PTR[8+rax]
+
+ lea r10,QWORD PTR[$L$cbc_popfq]
+ cmp rbx,r10
+ jae $L$in_cbc_prologue
+
+ mov rax,QWORD PTR[8+rax]
+ lea rax,QWORD PTR[56+rax]
+
+$L$in_cbc_frame_setup::
+ mov rbx,QWORD PTR[((-16))+rax]
+ mov rbp,QWORD PTR[((-24))+rax]
+ mov r12,QWORD PTR[((-32))+rax]
+ mov r13,QWORD PTR[((-40))+rax]
+ mov r14,QWORD PTR[((-48))+rax]
+ mov r15,QWORD PTR[((-56))+rax]
+ mov QWORD PTR[144+r8],rbx
+ mov QWORD PTR[160+r8],rbp
+ mov QWORD PTR[216+r8],r12
+ mov QWORD PTR[224+r8],r13
+ mov QWORD PTR[232+r8],r14
+ mov QWORD PTR[240+r8],r15
+
+$L$in_cbc_prologue::
+ mov rdi,QWORD PTR[8+rax]
+ mov rsi,QWORD PTR[16+rax]
+ mov QWORD PTR[152+r8],rax
+ mov QWORD PTR[168+r8],rsi
+ mov QWORD PTR[176+r8],rdi
+
+$L$common_seh_exit::
+
+ mov rdi,QWORD PTR[40+r9]
+ mov rsi,r8
+ mov ecx,154
+ DD 0a548f3fch
+
+
+ mov rsi,r9
+ xor rcx,rcx
+ mov rdx,QWORD PTR[8+rsi]
+ mov r8,QWORD PTR[rsi]
+ mov r9,QWORD PTR[16+rsi]
+ mov r10,QWORD PTR[40+rsi]
+ lea r11,QWORD PTR[56+rsi]
+ lea r12,QWORD PTR[24+rsi]
+ mov QWORD PTR[32+rsp],r10
+ mov QWORD PTR[40+rsp],r11
+ mov QWORD PTR[48+rsp],r12
+ mov QWORD PTR[56+rsp],rcx
+ call QWORD PTR[__imp_RtlVirtualUnwind]
+
+ mov eax,1
+ add rsp,64
+ popfq
+ pop r15
+ pop r14
+ pop r13
+ pop r12
+ pop rbp
+ pop rbx
+ pop rdi
+ pop rsi
+ DB 0F3h,0C3h ;repret
+cbc_se_handler ENDP
+
+.text$ ENDS
+.pdata SEGMENT READONLY ALIGN(4)
+ALIGN 4
+ DD imagerel $L$SEH_begin_AES_encrypt
+ DD imagerel $L$SEH_end_AES_encrypt
+ DD imagerel $L$SEH_info_AES_encrypt
+
+ DD imagerel $L$SEH_begin_AES_decrypt
+ DD imagerel $L$SEH_end_AES_decrypt
+ DD imagerel $L$SEH_info_AES_decrypt
+
+ DD imagerel $L$SEH_begin_AES_set_encrypt_key
+ DD imagerel $L$SEH_end_AES_set_encrypt_key
+ DD imagerel $L$SEH_info_AES_set_encrypt_key
+
+ DD imagerel $L$SEH_begin_AES_set_decrypt_key
+ DD imagerel $L$SEH_end_AES_set_decrypt_key
+ DD imagerel $L$SEH_info_AES_set_decrypt_key
+
+ DD imagerel $L$SEH_begin_AES_cbc_encrypt
+ DD imagerel $L$SEH_end_AES_cbc_encrypt
+ DD imagerel $L$SEH_info_AES_cbc_encrypt
+
+.pdata ENDS
+.xdata SEGMENT READONLY ALIGN(8)
+ALIGN 8
+$L$SEH_info_AES_encrypt::
+DB 9,0,0,0
+ DD imagerel block_se_handler
+ DD imagerel $L$enc_prologue,imagerel $L$enc_epilogue
+
+$L$SEH_info_AES_decrypt::
+DB 9,0,0,0
+ DD imagerel block_se_handler
+ DD imagerel $L$dec_prologue,imagerel $L$dec_epilogue
+
+$L$SEH_info_AES_set_encrypt_key::
+DB 9,0,0,0
+ DD imagerel key_se_handler
+ DD imagerel $L$enc_key_prologue,imagerel $L$enc_key_epilogue
+
+$L$SEH_info_AES_set_decrypt_key::
+DB 9,0,0,0
+ DD imagerel key_se_handler
+ DD imagerel $L$dec_key_prologue,imagerel $L$dec_key_epilogue
+
+$L$SEH_info_AES_cbc_encrypt::
+DB 9,0,0,0
+ DD imagerel cbc_se_handler
+
+.xdata ENDS
+END
diff --git a/deps/openssl/asm/x64-win32-masm/bn/x86_64-mont.asm b/deps/openssl/asm/x64-win32-masm/bn/x86_64-mont.asm
new file mode 100644
index 0000000000..9e54d88953
--- /dev/null
+++ b/deps/openssl/asm/x64-win32-masm/bn/x86_64-mont.asm
@@ -0,0 +1,293 @@
+OPTION DOTNAME
+.text$ SEGMENT ALIGN(64) 'CODE'
+
+PUBLIC bn_mul_mont
+
+ALIGN 16
+bn_mul_mont PROC PUBLIC
+ mov QWORD PTR[8+rsp],rdi ;WIN64 prologue
+ mov QWORD PTR[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_bn_mul_mont::
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+ mov rcx,r9
+ mov r8,QWORD PTR[40+rsp]
+ mov r9,QWORD PTR[48+rsp]
+
+
+ push rbx
+ push rbp
+ push r12
+ push r13
+ push r14
+ push r15
+
+ mov r9d,r9d
+ lea r10,QWORD PTR[2+r9]
+ mov r11,rsp
+ neg r10
+ lea rsp,QWORD PTR[r10*8+rsp]
+ and rsp,-1024
+
+ mov QWORD PTR[8+r9*8+rsp],r11
+$L$prologue::
+ mov r12,rdx
+
+ mov r8,QWORD PTR[r8]
+
+ xor r14,r14
+ xor r15,r15
+
+ mov rbx,QWORD PTR[r12]
+ mov rax,QWORD PTR[rsi]
+ mul rbx
+ mov r10,rax
+ mov r11,rdx
+
+ imul rax,r8
+ mov rbp,rax
+
+ mul QWORD PTR[rcx]
+ add rax,r10
+ adc rdx,0
+ mov r13,rdx
+
+ lea r15,QWORD PTR[1+r15]
+$L$1st::
+ mov rax,QWORD PTR[r15*8+rsi]
+ mul rbx
+ add rax,r11
+ adc rdx,0
+ mov r10,rax
+ mov rax,QWORD PTR[r15*8+rcx]
+ mov r11,rdx
+
+ mul rbp
+ add rax,r13
+ lea r15,QWORD PTR[1+r15]
+ adc rdx,0
+ add rax,r10
+ adc rdx,0
+ mov QWORD PTR[((-16))+r15*8+rsp],rax
+ cmp r15,r9
+ mov r13,rdx
+ jl $L$1st
+
+ xor rdx,rdx
+ add r13,r11
+ adc rdx,0
+ mov QWORD PTR[((-8))+r9*8+rsp],r13
+ mov QWORD PTR[r9*8+rsp],rdx
+
+ lea r14,QWORD PTR[1+r14]
+ALIGN 4
+$L$outer::
+ xor r15,r15
+
+ mov rbx,QWORD PTR[r14*8+r12]
+ mov rax,QWORD PTR[rsi]
+ mul rbx
+ add rax,QWORD PTR[rsp]
+ adc rdx,0
+ mov r10,rax
+ mov r11,rdx
+
+ imul rax,r8
+ mov rbp,rax
+
+ mul QWORD PTR[r15*8+rcx]
+ add rax,r10
+ mov r10,QWORD PTR[8+rsp]
+ adc rdx,0
+ mov r13,rdx
+
+ lea r15,QWORD PTR[1+r15]
+ALIGN 4
+$L$inner::
+ mov rax,QWORD PTR[r15*8+rsi]
+ mul rbx
+ add rax,r11
+ adc rdx,0
+ add r10,rax
+ mov rax,QWORD PTR[r15*8+rcx]
+ adc rdx,0
+ mov r11,rdx
+
+ mul rbp
+ add rax,r13
+ lea r15,QWORD PTR[1+r15]
+ adc rdx,0
+ add rax,r10
+ adc rdx,0
+ mov r10,QWORD PTR[r15*8+rsp]
+ cmp r15,r9
+ mov QWORD PTR[((-16))+r15*8+rsp],rax
+ mov r13,rdx
+ jl $L$inner
+
+ xor rdx,rdx
+ add r13,r11
+ adc rdx,0
+ add r13,r10
+ adc rdx,0
+ mov QWORD PTR[((-8))+r9*8+rsp],r13
+ mov QWORD PTR[r9*8+rsp],rdx
+
+ lea r14,QWORD PTR[1+r14]
+ cmp r14,r9
+ jl $L$outer
+
+ lea rsi,QWORD PTR[rsp]
+ lea r15,QWORD PTR[((-1))+r9]
+
+ mov rax,QWORD PTR[rsi]
+ xor r14,r14
+ jmp $L$sub
+ALIGN 16
+$L$sub:: sbb rax,QWORD PTR[r14*8+rcx]
+ mov QWORD PTR[r14*8+rdi],rax
+ dec r15
+ mov rax,QWORD PTR[8+r14*8+rsi]
+ lea r14,QWORD PTR[1+r14]
+ jge $L$sub
+
+ sbb rax,0
+ and rsi,rax
+ not rax
+ mov rcx,rdi
+ and rcx,rax
+ lea r15,QWORD PTR[((-1))+r9]
+ or rsi,rcx
+ALIGN 16
+$L$copy::
+ mov rax,QWORD PTR[r15*8+rsi]
+ mov QWORD PTR[r15*8+rdi],rax
+ mov QWORD PTR[r15*8+rsp],r14
+ dec r15
+ jge $L$copy
+
+ mov rsi,QWORD PTR[8+r9*8+rsp]
+ mov rax,1
+ mov r15,QWORD PTR[rsi]
+ mov r14,QWORD PTR[8+rsi]
+ mov r13,QWORD PTR[16+rsi]
+ mov r12,QWORD PTR[24+rsi]
+ mov rbp,QWORD PTR[32+rsi]
+ mov rbx,QWORD PTR[40+rsi]
+ lea rsp,QWORD PTR[48+rsi]
+$L$epilogue::
+ mov rdi,QWORD PTR[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD PTR[16+rsp]
+ DB 0F3h,0C3h ;repret
+$L$SEH_end_bn_mul_mont::
+bn_mul_mont ENDP
+DB 77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105
+DB 112,108,105,99,97,116,105,111,110,32,102,111,114,32,120,56
+DB 54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83
+DB 32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115
+DB 115,108,46,111,114,103,62,0
+ALIGN 16
+EXTERN __imp_RtlVirtualUnwind:NEAR
+
+ALIGN 16
+se_handler PROC PRIVATE
+ push rsi
+ push rdi
+ push rbx
+ push rbp
+ push r12
+ push r13
+ push r14
+ push r15
+ pushfq
+ sub rsp,64
+
+ mov rax,QWORD PTR[120+r8]
+ mov rbx,QWORD PTR[248+r8]
+
+ lea r10,QWORD PTR[$L$prologue]
+ cmp rbx,r10
+ jb $L$in_prologue
+
+ mov rax,QWORD PTR[152+r8]
+
+ lea r10,QWORD PTR[$L$epilogue]
+ cmp rbx,r10
+ jae $L$in_prologue
+
+ mov r10,QWORD PTR[192+r8]
+ mov rax,QWORD PTR[8+r10*8+rax]
+ lea rax,QWORD PTR[48+rax]
+
+ mov rbx,QWORD PTR[((-8))+rax]
+ mov rbp,QWORD PTR[((-16))+rax]
+ mov r12,QWORD PTR[((-24))+rax]
+ mov r13,QWORD PTR[((-32))+rax]
+ mov r14,QWORD PTR[((-40))+rax]
+ mov r15,QWORD PTR[((-48))+rax]
+ mov QWORD PTR[144+r8],rbx
+ mov QWORD PTR[160+r8],rbp
+ mov QWORD PTR[216+r8],r12
+ mov QWORD PTR[224+r8],r13
+ mov QWORD PTR[232+r8],r14
+ mov QWORD PTR[240+r8],r15
+
+$L$in_prologue::
+ mov rdi,QWORD PTR[8+rax]
+ mov rsi,QWORD PTR[16+rax]
+ mov QWORD PTR[152+r8],rax
+ mov QWORD PTR[168+r8],rsi
+ mov QWORD PTR[176+r8],rdi
+
+ mov rdi,QWORD PTR[40+r9]
+ mov rsi,r8
+ mov ecx,154
+ DD 0a548f3fch
+
+
+ mov rsi,r9
+ xor rcx,rcx
+ mov rdx,QWORD PTR[8+rsi]
+ mov r8,QWORD PTR[rsi]
+ mov r9,QWORD PTR[16+rsi]
+ mov r10,QWORD PTR[40+rsi]
+ lea r11,QWORD PTR[56+rsi]
+ lea r12,QWORD PTR[24+rsi]
+ mov QWORD PTR[32+rsp],r10
+ mov QWORD PTR[40+rsp],r11
+ mov QWORD PTR[48+rsp],r12
+ mov QWORD PTR[56+rsp],rcx
+ call QWORD PTR[__imp_RtlVirtualUnwind]
+
+ mov eax,1
+ add rsp,64
+ popfq
+ pop r15
+ pop r14
+ pop r13
+ pop r12
+ pop rbp
+ pop rbx
+ pop rdi
+ pop rsi
+ DB 0F3h,0C3h ;repret
+se_handler ENDP
+
+.text$ ENDS
+.pdata SEGMENT READONLY ALIGN(4)
+ALIGN 4
+ DD imagerel $L$SEH_begin_bn_mul_mont
+ DD imagerel $L$SEH_end_bn_mul_mont
+ DD imagerel $L$SEH_info_bn_mul_mont
+
+.pdata ENDS
+.xdata SEGMENT READONLY ALIGN(8)
+ALIGN 8
+$L$SEH_info_bn_mul_mont::
+DB 9,0,0,0
+ DD imagerel se_handler
+
+.xdata ENDS
+END
diff --git a/deps/openssl/asm/x64-win32-masm/camellia/cmll-x86_64.asm b/deps/openssl/asm/x64-win32-masm/camellia/cmll-x86_64.asm
new file mode 100644
index 0000000000..a5913da92e
--- /dev/null
+++ b/deps/openssl/asm/x64-win32-masm/camellia/cmll-x86_64.asm
@@ -0,0 +1,2108 @@
+OPTION DOTNAME
+.text$ SEGMENT ALIGN(64) 'CODE'
+
+
+PUBLIC Camellia_EncryptBlock
+
+ALIGN 16
+Camellia_EncryptBlock PROC PUBLIC
+ mov eax,128
+ sub eax,ecx
+ mov ecx,3
+ adc ecx,0
+ jmp $L$enc_rounds
+Camellia_EncryptBlock ENDP
+
+PUBLIC Camellia_EncryptBlock_Rounds
+
+ALIGN 16
+$L$enc_rounds::
+Camellia_EncryptBlock_Rounds PROC PUBLIC
+ mov QWORD PTR[8+rsp],rdi ;WIN64 prologue
+ mov QWORD PTR[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_Camellia_EncryptBlock_Rounds::
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+ mov rcx,r9
+
+
+ push rbx
+ push rbp
+ push r13
+ push r14
+ push r15
+$L$enc_prologue::
+
+
+ mov r13,rcx
+ mov r14,rdx
+
+ shl edi,6
+ lea rbp,QWORD PTR[$L$Camellia_SBOX]
+ lea r15,QWORD PTR[rdi*1+r14]
+
+ mov r8d,DWORD PTR[rsi]
+ mov r9d,DWORD PTR[4+rsi]
+ mov r10d,DWORD PTR[8+rsi]
+ bswap r8d
+ mov r11d,DWORD PTR[12+rsi]
+ bswap r9d
+ bswap r10d
+ bswap r11d
+
+ call _x86_64_Camellia_encrypt
+
+ bswap r8d
+ bswap r9d
+ bswap r10d
+ mov DWORD PTR[r13],r8d
+ bswap r11d
+ mov DWORD PTR[4+r13],r9d
+ mov DWORD PTR[8+r13],r10d
+ mov DWORD PTR[12+r13],r11d
+
+ mov r15,QWORD PTR[rsp]
+ mov r14,QWORD PTR[8+rsp]
+ mov r13,QWORD PTR[16+rsp]
+ mov rbp,QWORD PTR[24+rsp]
+ mov rbx,QWORD PTR[32+rsp]
+ lea rsp,QWORD PTR[40+rsp]
+$L$enc_epilogue::
+ mov rdi,QWORD PTR[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD PTR[16+rsp]
+ DB 0F3h,0C3h ;repret
+$L$SEH_end_Camellia_EncryptBlock_Rounds::
+Camellia_EncryptBlock_Rounds ENDP
+
+
+ALIGN 16
+_x86_64_Camellia_encrypt PROC PRIVATE
+ xor r9d,DWORD PTR[r14]
+ xor r8d,DWORD PTR[4+r14]
+ xor r11d,DWORD PTR[8+r14]
+ xor r10d,DWORD PTR[12+r14]
+ALIGN 16
+$L$eloop::
+ mov ebx,DWORD PTR[16+r14]
+ mov eax,DWORD PTR[20+r14]
+
+ xor eax,r8d
+ xor ebx,r9d
+ movzx esi,ah
+ movzx edi,bl
+ mov edx,DWORD PTR[2052+rsi*8+rbp]
+ mov ecx,DWORD PTR[rdi*8+rbp]
+ movzx esi,al
+ shr eax,16
+ movzx edi,bh
+ xor edx,DWORD PTR[4+rsi*8+rbp]
+ shr ebx,16
+ xor ecx,DWORD PTR[4+rdi*8+rbp]
+ movzx esi,ah
+ movzx edi,bl
+ xor edx,DWORD PTR[rsi*8+rbp]
+ xor ecx,DWORD PTR[2052+rdi*8+rbp]
+ movzx esi,al
+ movzx edi,bh
+ xor edx,DWORD PTR[2048+rsi*8+rbp]
+ xor ecx,DWORD PTR[2048+rdi*8+rbp]
+ mov ebx,DWORD PTR[24+r14]
+ mov eax,DWORD PTR[28+r14]
+ xor ecx,edx
+ ror edx,8
+ xor r10d,ecx
+ xor r11d,ecx
+ xor r11d,edx
+ xor eax,r10d
+ xor ebx,r11d
+ movzx esi,ah
+ movzx edi,bl
+ mov edx,DWORD PTR[2052+rsi*8+rbp]
+ mov ecx,DWORD PTR[rdi*8+rbp]
+ movzx esi,al
+ shr eax,16
+ movzx edi,bh
+ xor edx,DWORD PTR[4+rsi*8+rbp]
+ shr ebx,16
+ xor ecx,DWORD PTR[4+rdi*8+rbp]
+ movzx esi,ah
+ movzx edi,bl
+ xor edx,DWORD PTR[rsi*8+rbp]
+ xor ecx,DWORD PTR[2052+rdi*8+rbp]
+ movzx esi,al
+ movzx edi,bh
+ xor edx,DWORD PTR[2048+rsi*8+rbp]
+ xor ecx,DWORD PTR[2048+rdi*8+rbp]
+ mov ebx,DWORD PTR[32+r14]
+ mov eax,DWORD PTR[36+r14]
+ xor ecx,edx
+ ror edx,8
+ xor r8d,ecx
+ xor r9d,ecx
+ xor r9d,edx
+ xor eax,r8d
+ xor ebx,r9d
+ movzx esi,ah
+ movzx edi,bl
+ mov edx,DWORD PTR[2052+rsi*8+rbp]
+ mov ecx,DWORD PTR[rdi*8+rbp]
+ movzx esi,al
+ shr eax,16
+ movzx edi,bh
+ xor edx,DWORD PTR[4+rsi*8+rbp]
+ shr ebx,16
+ xor ecx,DWORD PTR[4+rdi*8+rbp]
+ movzx esi,ah
+ movzx edi,bl
+ xor edx,DWORD PTR[rsi*8+rbp]
+ xor ecx,DWORD PTR[2052+rdi*8+rbp]
+ movzx esi,al
+ movzx edi,bh
+ xor edx,DWORD PTR[2048+rsi*8+rbp]
+ xor ecx,DWORD PTR[2048+rdi*8+rbp]
+ mov ebx,DWORD PTR[40+r14]
+ mov eax,DWORD PTR[44+r14]
+ xor ecx,edx
+ ror edx,8
+ xor r10d,ecx
+ xor r11d,ecx
+ xor r11d,edx
+ xor eax,r10d
+ xor ebx,r11d
+ movzx esi,ah
+ movzx edi,bl
+ mov edx,DWORD PTR[2052+rsi*8+rbp]
+ mov ecx,DWORD PTR[rdi*8+rbp]
+ movzx esi,al
+ shr eax,16
+ movzx edi,bh
+ xor edx,DWORD PTR[4+rsi*8+rbp]
+ shr ebx,16
+ xor ecx,DWORD PTR[4+rdi*8+rbp]
+ movzx esi,ah
+ movzx edi,bl
+ xor edx,DWORD PTR[rsi*8+rbp]
+ xor ecx,DWORD PTR[2052+rdi*8+rbp]
+ movzx esi,al
+ movzx edi,bh
+ xor edx,DWORD PTR[2048+rsi*8+rbp]
+ xor ecx,DWORD PTR[2048+rdi*8+rbp]
+ mov ebx,DWORD PTR[48+r14]
+ mov eax,DWORD PTR[52+r14]
+ xor ecx,edx
+ ror edx,8
+ xor r8d,ecx
+ xor r9d,ecx
+ xor r9d,edx
+ xor eax,r8d
+ xor ebx,r9d
+ movzx esi,ah
+ movzx edi,bl
+ mov edx,DWORD PTR[2052+rsi*8+rbp]
+ mov ecx,DWORD PTR[rdi*8+rbp]
+ movzx esi,al
+ shr eax,16
+ movzx edi,bh
+ xor edx,DWORD PTR[4+rsi*8+rbp]
+ shr ebx,16
+ xor ecx,DWORD PTR[4+rdi*8+rbp]
+ movzx esi,ah
+ movzx edi,bl
+ xor edx,DWORD PTR[rsi*8+rbp]
+ xor ecx,DWORD PTR[2052+rdi*8+rbp]
+ movzx esi,al
+ movzx edi,bh
+ xor edx,DWORD PTR[2048+rsi*8+rbp]
+ xor ecx,DWORD PTR[2048+rdi*8+rbp]
+ mov ebx,DWORD PTR[56+r14]
+ mov eax,DWORD PTR[60+r14]
+ xor ecx,edx
+ ror edx,8
+ xor r10d,ecx
+ xor r11d,ecx
+ xor r11d,edx
+ xor eax,r10d
+ xor ebx,r11d
+ movzx esi,ah
+ movzx edi,bl
+ mov edx,DWORD PTR[2052+rsi*8+rbp]
+ mov ecx,DWORD PTR[rdi*8+rbp]
+ movzx esi,al
+ shr eax,16
+ movzx edi,bh
+ xor edx,DWORD PTR[4+rsi*8+rbp]
+ shr ebx,16
+ xor ecx,DWORD PTR[4+rdi*8+rbp]
+ movzx esi,ah
+ movzx edi,bl
+ xor edx,DWORD PTR[rsi*8+rbp]
+ xor ecx,DWORD PTR[2052+rdi*8+rbp]
+ movzx esi,al
+ movzx edi,bh
+ xor edx,DWORD PTR[2048+rsi*8+rbp]
+ xor ecx,DWORD PTR[2048+rdi*8+rbp]
+ mov ebx,DWORD PTR[64+r14]
+ mov eax,DWORD PTR[68+r14]
+ xor ecx,edx
+ ror edx,8
+ xor r8d,ecx
+ xor r9d,ecx
+ xor r9d,edx
+ lea r14,QWORD PTR[((16*4))+r14]
+ cmp r14,r15
+ mov edx,DWORD PTR[8+r14]
+ mov ecx,DWORD PTR[12+r14]
+ je $L$edone
+
+ and eax,r8d
+ or edx,r11d
+ rol eax,1
+ xor r10d,edx
+ xor r9d,eax
+ and ecx,r10d
+ or ebx,r9d
+ rol ecx,1
+ xor r8d,ebx
+ xor r11d,ecx
+ jmp $L$eloop
+
+ALIGN 16
+$L$edone::
+ xor eax,r10d
+ xor ebx,r11d
+ xor ecx,r8d
+ xor edx,r9d
+
+ mov r8d,eax
+ mov r9d,ebx
+ mov r10d,ecx
+ mov r11d,edx
+
+DB 0f3h,0c3h
+
+_x86_64_Camellia_encrypt ENDP
+
+
+PUBLIC Camellia_DecryptBlock
+
+ALIGN 16
+Camellia_DecryptBlock PROC PUBLIC
+ mov eax,128
+ sub eax,ecx
+ mov ecx,3
+ adc ecx,0
+ jmp $L$dec_rounds
+Camellia_DecryptBlock ENDP
+
+PUBLIC Camellia_DecryptBlock_Rounds
+
+ALIGN 16
+$L$dec_rounds::
+Camellia_DecryptBlock_Rounds PROC PUBLIC
+ mov QWORD PTR[8+rsp],rdi ;WIN64 prologue
+ mov QWORD PTR[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_Camellia_DecryptBlock_Rounds::
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+ mov rcx,r9
+
+
+ push rbx
+ push rbp
+ push r13
+ push r14
+ push r15
+$L$dec_prologue::
+
+
+ mov r13,rcx
+ mov r15,rdx
+
+ shl edi,6
+ lea rbp,QWORD PTR[$L$Camellia_SBOX]
+ lea r14,QWORD PTR[rdi*1+r15]
+
+ mov r8d,DWORD PTR[rsi]
+ mov r9d,DWORD PTR[4+rsi]
+ mov r10d,DWORD PTR[8+rsi]
+ bswap r8d
+ mov r11d,DWORD PTR[12+rsi]
+ bswap r9d
+ bswap r10d
+ bswap r11d
+
+ call _x86_64_Camellia_decrypt
+
+ bswap r8d
+ bswap r9d
+ bswap r10d
+ mov DWORD PTR[r13],r8d
+ bswap r11d
+ mov DWORD PTR[4+r13],r9d
+ mov DWORD PTR[8+r13],r10d
+ mov DWORD PTR[12+r13],r11d
+
+ mov r15,QWORD PTR[rsp]
+ mov r14,QWORD PTR[8+rsp]
+ mov r13,QWORD PTR[16+rsp]
+ mov rbp,QWORD PTR[24+rsp]
+ mov rbx,QWORD PTR[32+rsp]
+ lea rsp,QWORD PTR[40+rsp]
+$L$dec_epilogue::
+ mov rdi,QWORD PTR[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD PTR[16+rsp]
+ DB 0F3h,0C3h ;repret
+$L$SEH_end_Camellia_DecryptBlock_Rounds::
+Camellia_DecryptBlock_Rounds ENDP
+
+
+ALIGN 16
+_x86_64_Camellia_decrypt PROC PRIVATE
+ xor r9d,DWORD PTR[r14]
+ xor r8d,DWORD PTR[4+r14]
+ xor r11d,DWORD PTR[8+r14]
+ xor r10d,DWORD PTR[12+r14]
+ALIGN 16
+$L$dloop::
+ mov ebx,DWORD PTR[((-8))+r14]
+ mov eax,DWORD PTR[((-4))+r14]
+
+ xor eax,r8d
+ xor ebx,r9d
+ movzx esi,ah
+ movzx edi,bl
+ mov edx,DWORD PTR[2052+rsi*8+rbp]
+ mov ecx,DWORD PTR[rdi*8+rbp]
+ movzx esi,al
+ shr eax,16
+ movzx edi,bh
+ xor edx,DWORD PTR[4+rsi*8+rbp]
+ shr ebx,16
+ xor ecx,DWORD PTR[4+rdi*8+rbp]
+ movzx esi,ah
+ movzx edi,bl
+ xor edx,DWORD PTR[rsi*8+rbp]
+ xor ecx,DWORD PTR[2052+rdi*8+rbp]
+ movzx esi,al
+ movzx edi,bh
+ xor edx,DWORD PTR[2048+rsi*8+rbp]
+ xor ecx,DWORD PTR[2048+rdi*8+rbp]
+ mov ebx,DWORD PTR[((-16))+r14]
+ mov eax,DWORD PTR[((-12))+r14]
+ xor ecx,edx
+ ror edx,8
+ xor r10d,ecx
+ xor r11d,ecx
+ xor r11d,edx
+ xor eax,r10d
+ xor ebx,r11d
+ movzx esi,ah
+ movzx edi,bl
+ mov edx,DWORD PTR[2052+rsi*8+rbp]
+ mov ecx,DWORD PTR[rdi*8+rbp]
+ movzx esi,al
+ shr eax,16
+ movzx edi,bh
+ xor edx,DWORD PTR[4+rsi*8+rbp]
+ shr ebx,16
+ xor ecx,DWORD PTR[4+rdi*8+rbp]
+ movzx esi,ah
+ movzx edi,bl
+ xor edx,DWORD PTR[rsi*8+rbp]
+ xor ecx,DWORD PTR[2052+rdi*8+rbp]
+ movzx esi,al
+ movzx edi,bh
+ xor edx,DWORD PTR[2048+rsi*8+rbp]
+ xor ecx,DWORD PTR[2048+rdi*8+rbp]
+ mov ebx,DWORD PTR[((-24))+r14]
+ mov eax,DWORD PTR[((-20))+r14]
+ xor ecx,edx
+ ror edx,8
+ xor r8d,ecx
+ xor r9d,ecx
+ xor r9d,edx
+ xor eax,r8d
+ xor ebx,r9d
+ movzx esi,ah
+ movzx edi,bl
+ mov edx,DWORD PTR[2052+rsi*8+rbp]
+ mov ecx,DWORD PTR[rdi*8+rbp]
+ movzx esi,al
+ shr eax,16
+ movzx edi,bh
+ xor edx,DWORD PTR[4+rsi*8+rbp]
+ shr ebx,16
+ xor ecx,DWORD PTR[4+rdi*8+rbp]
+ movzx esi,ah
+ movzx edi,bl
+ xor edx,DWORD PTR[rsi*8+rbp]
+ xor ecx,DWORD PTR[2052+rdi*8+rbp]
+ movzx esi,al
+ movzx edi,bh
+ xor edx,DWORD PTR[2048+rsi*8+rbp]
+ xor ecx,DWORD PTR[2048+rdi*8+rbp]
+ mov ebx,DWORD PTR[((-32))+r14]
+ mov eax,DWORD PTR[((-28))+r14]
+ xor ecx,edx
+ ror edx,8
+ xor r10d,ecx
+ xor r11d,ecx
+ xor r11d,edx
+ xor eax,r10d
+ xor ebx,r11d
+ movzx esi,ah
+ movzx edi,bl
+ mov edx,DWORD PTR[2052+rsi*8+rbp]
+ mov ecx,DWORD PTR[rdi*8+rbp]
+ movzx esi,al
+ shr eax,16
+ movzx edi,bh
+ xor edx,DWORD PTR[4+rsi*8+rbp]
+ shr ebx,16
+ xor ecx,DWORD PTR[4+rdi*8+rbp]
+ movzx esi,ah
+ movzx edi,bl
+ xor edx,DWORD PTR[rsi*8+rbp]
+ xor ecx,DWORD PTR[2052+rdi*8+rbp]
+ movzx esi,al
+ movzx edi,bh
+ xor edx,DWORD PTR[2048+rsi*8+rbp]
+ xor ecx,DWORD PTR[2048+rdi*8+rbp]
+ mov ebx,DWORD PTR[((-40))+r14]
+ mov eax,DWORD PTR[((-36))+r14]
+ xor ecx,edx
+ ror edx,8
+ xor r8d,ecx
+ xor r9d,ecx
+ xor r9d,edx
+ xor eax,r8d
+ xor ebx,r9d
+ movzx esi,ah
+ movzx edi,bl
+ mov edx,DWORD PTR[2052+rsi*8+rbp]
+ mov ecx,DWORD PTR[rdi*8+rbp]
+ movzx esi,al
+ shr eax,16
+ movzx edi,bh
+ xor edx,DWORD PTR[4+rsi*8+rbp]
+ shr ebx,16
+ xor ecx,DWORD PTR[4+rdi*8+rbp]
+ movzx esi,ah
+ movzx edi,bl
+ xor edx,DWORD PTR[rsi*8+rbp]
+ xor ecx,DWORD PTR[2052+rdi*8+rbp]
+ movzx esi,al
+ movzx edi,bh
+ xor edx,DWORD PTR[2048+rsi*8+rbp]
+ xor ecx,DWORD PTR[2048+rdi*8+rbp]
+ mov ebx,DWORD PTR[((-48))+r14]
+ mov eax,DWORD PTR[((-44))+r14]
+ xor ecx,edx
+ ror edx,8
+ xor r10d,ecx
+ xor r11d,ecx
+ xor r11d,edx
+ xor eax,r10d
+ xor ebx,r11d
+ movzx esi,ah
+ movzx edi,bl
+ mov edx,DWORD PTR[2052+rsi*8+rbp]
+ mov ecx,DWORD PTR[rdi*8+rbp]
+ movzx esi,al
+ shr eax,16
+ movzx edi,bh
+ xor edx,DWORD PTR[4+rsi*8+rbp]
+ shr ebx,16
+ xor ecx,DWORD PTR[4+rdi*8+rbp]
+ movzx esi,ah
+ movzx edi,bl
+ xor edx,DWORD PTR[rsi*8+rbp]
+ xor ecx,DWORD PTR[2052+rdi*8+rbp]
+ movzx esi,al
+ movzx edi,bh
+ xor edx,DWORD PTR[2048+rsi*8+rbp]
+ xor ecx,DWORD PTR[2048+rdi*8+rbp]
+ mov ebx,DWORD PTR[((-56))+r14]
+ mov eax,DWORD PTR[((-52))+r14]
+ xor ecx,edx
+ ror edx,8
+ xor r8d,ecx
+ xor r9d,ecx
+ xor r9d,edx
+ lea r14,QWORD PTR[((-16*4))+r14]
+ cmp r14,r15
+ mov edx,DWORD PTR[r14]
+ mov ecx,DWORD PTR[4+r14]
+ je $L$ddone
+
+ and eax,r8d
+ or edx,r11d
+ rol eax,1
+ xor r10d,edx
+ xor r9d,eax
+ and ecx,r10d
+ or ebx,r9d
+ rol ecx,1
+ xor r8d,ebx
+ xor r11d,ecx
+
+ jmp $L$dloop
+
+ALIGN 16
+$L$ddone::
+ xor ecx,r10d
+ xor edx,r11d
+ xor eax,r8d
+ xor ebx,r9d
+
+ mov r8d,ecx
+ mov r9d,edx
+ mov r10d,eax
+ mov r11d,ebx
+
+DB 0f3h,0c3h
+
+_x86_64_Camellia_decrypt ENDP
+PUBLIC Camellia_Ekeygen
+
+ALIGN 16
+Camellia_Ekeygen PROC PUBLIC
+ mov QWORD PTR[8+rsp],rdi ;WIN64 prologue
+ mov QWORD PTR[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_Camellia_Ekeygen::
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+
+
+ push rbx
+ push rbp
+ push r13
+ push r14
+ push r15
+$L$key_prologue::
+
+ mov r15,rdi
+ mov r13,rdx
+
+ mov r8d,DWORD PTR[rsi]
+ mov r9d,DWORD PTR[4+rsi]
+ mov r10d,DWORD PTR[8+rsi]
+ mov r11d,DWORD PTR[12+rsi]
+
+ bswap r8d
+ bswap r9d
+ bswap r10d
+ bswap r11d
+ mov DWORD PTR[r13],r9d
+ mov DWORD PTR[4+r13],r8d
+ mov DWORD PTR[8+r13],r11d
+ mov DWORD PTR[12+r13],r10d
+ cmp r15,128
+ je $L$1st128
+
+ mov r8d,DWORD PTR[16+rsi]
+ mov r9d,DWORD PTR[20+rsi]
+ cmp r15,192
+ je $L$1st192
+ mov r10d,DWORD PTR[24+rsi]
+ mov r11d,DWORD PTR[28+rsi]
+ jmp $L$1st256
+$L$1st192::
+ mov r10d,r8d
+ mov r11d,r9d
+ not r10d
+ not r11d
+$L$1st256::
+ bswap r8d
+ bswap r9d
+ bswap r10d
+ bswap r11d
+ mov DWORD PTR[32+r13],r9d
+ mov DWORD PTR[36+r13],r8d
+ mov DWORD PTR[40+r13],r11d
+ mov DWORD PTR[44+r13],r10d
+ xor r9d,DWORD PTR[r13]
+ xor r8d,DWORD PTR[4+r13]
+ xor r11d,DWORD PTR[8+r13]
+ xor r10d,DWORD PTR[12+r13]
+
+$L$1st128::
+ lea r14,QWORD PTR[$L$Camellia_SIGMA]
+ lea rbp,QWORD PTR[$L$Camellia_SBOX]
+
+ mov ebx,DWORD PTR[r14]
+ mov eax,DWORD PTR[4+r14]
+ xor eax,r8d
+ xor ebx,r9d
+ movzx esi,ah
+ movzx edi,bl
+ mov edx,DWORD PTR[2052+rsi*8+rbp]
+ mov ecx,DWORD PTR[rdi*8+rbp]
+ movzx esi,al
+ shr eax,16
+ movzx edi,bh
+ xor edx,DWORD PTR[4+rsi*8+rbp]
+ shr ebx,16
+ xor ecx,DWORD PTR[4+rdi*8+rbp]
+ movzx esi,ah
+ movzx edi,bl
+ xor edx,DWORD PTR[rsi*8+rbp]
+ xor ecx,DWORD PTR[2052+rdi*8+rbp]
+ movzx esi,al
+ movzx edi,bh
+ xor edx,DWORD PTR[2048+rsi*8+rbp]
+ xor ecx,DWORD PTR[2048+rdi*8+rbp]
+ mov ebx,DWORD PTR[8+r14]
+ mov eax,DWORD PTR[12+r14]
+ xor ecx,edx
+ ror edx,8
+ xor r10d,ecx
+ xor r11d,ecx
+ xor r11d,edx
+ xor eax,r10d
+ xor ebx,r11d
+ movzx esi,ah
+ movzx edi,bl
+ mov edx,DWORD PTR[2052+rsi*8+rbp]
+ mov ecx,DWORD PTR[rdi*8+rbp]
+ movzx esi,al
+ shr eax,16
+ movzx edi,bh
+ xor edx,DWORD PTR[4+rsi*8+rbp]
+ shr ebx,16
+ xor ecx,DWORD PTR[4+rdi*8+rbp]
+ movzx esi,ah
+ movzx edi,bl
+ xor edx,DWORD PTR[rsi*8+rbp]
+ xor ecx,DWORD PTR[2052+rdi*8+rbp]
+ movzx esi,al
+ movzx edi,bh
+ xor edx,DWORD PTR[2048+rsi*8+rbp]
+ xor ecx,DWORD PTR[2048+rdi*8+rbp]
+ mov ebx,DWORD PTR[16+r14]
+ mov eax,DWORD PTR[20+r14]
+ xor ecx,edx
+ ror edx,8
+ xor r8d,ecx
+ xor r9d,ecx
+ xor r9d,edx
+ xor r9d,DWORD PTR[r13]
+ xor r8d,DWORD PTR[4+r13]
+ xor r11d,DWORD PTR[8+r13]
+ xor r10d,DWORD PTR[12+r13]
+ xor eax,r8d
+ xor ebx,r9d
+ movzx esi,ah
+ movzx edi,bl
+ mov edx,DWORD PTR[2052+rsi*8+rbp]
+ mov ecx,DWORD PTR[rdi*8+rbp]
+ movzx esi,al
+ shr eax,16
+ movzx edi,bh
+ xor edx,DWORD PTR[4+rsi*8+rbp]
+ shr ebx,16
+ xor ecx,DWORD PTR[4+rdi*8+rbp]
+ movzx esi,ah
+ movzx edi,bl
+ xor edx,DWORD PTR[rsi*8+rbp]
+ xor ecx,DWORD PTR[2052+rdi*8+rbp]
+ movzx esi,al
+ movzx edi,bh
+ xor edx,DWORD PTR[2048+rsi*8+rbp]
+ xor ecx,DWORD PTR[2048+rdi*8+rbp]
+ mov ebx,DWORD PTR[24+r14]
+ mov eax,DWORD PTR[28+r14]
+ xor ecx,edx
+ ror edx,8
+ xor r10d,ecx
+ xor r11d,ecx
+ xor r11d,edx
+ xor eax,r10d
+ xor ebx,r11d
+ movzx esi,ah
+ movzx edi,bl
+ mov edx,DWORD PTR[2052+rsi*8+rbp]
+ mov ecx,DWORD PTR[rdi*8+rbp]
+ movzx esi,al
+ shr eax,16
+ movzx edi,bh
+ xor edx,DWORD PTR[4+rsi*8+rbp]
+ shr ebx,16
+ xor ecx,DWORD PTR[4+rdi*8+rbp]
+ movzx esi,ah
+ movzx edi,bl
+ xor edx,DWORD PTR[rsi*8+rbp]
+ xor ecx,DWORD PTR[2052+rdi*8+rbp]
+ movzx esi,al
+ movzx edi,bh
+ xor edx,DWORD PTR[2048+rsi*8+rbp]
+ xor ecx,DWORD PTR[2048+rdi*8+rbp]
+ mov ebx,DWORD PTR[32+r14]
+ mov eax,DWORD PTR[36+r14]
+ xor ecx,edx
+ ror edx,8
+ xor r8d,ecx
+ xor r9d,ecx
+ xor r9d,edx
+ cmp r15,128
+ jne $L$2nd256
+
+ lea r13,QWORD PTR[128+r13]
+ shl r8,32
+ shl r10,32
+ or r8,r9
+ or r10,r11
+ mov rax,QWORD PTR[((-128))+r13]
+ mov rbx,QWORD PTR[((-120))+r13]
+ mov QWORD PTR[((-112))+r13],r8
+ mov QWORD PTR[((-104))+r13],r10
+ mov r11,rax
+ shl rax,15
+ mov r9,rbx
+ shr r9,49
+ shr r11,49
+ or rax,r9
+ shl rbx,15
+ or rbx,r11
+ mov QWORD PTR[((-96))+r13],rax
+ mov QWORD PTR[((-88))+r13],rbx
+ mov r11,r8
+ shl r8,15
+ mov r9,r10
+ shr r9,49
+ shr r11,49
+ or r8,r9
+ shl r10,15
+ or r10,r11
+ mov QWORD PTR[((-80))+r13],r8
+ mov QWORD PTR[((-72))+r13],r10
+ mov r11,r8
+ shl r8,15
+ mov r9,r10
+ shr r9,49
+ shr r11,49
+ or r8,r9
+ shl r10,15
+ or r10,r11
+ mov QWORD PTR[((-64))+r13],r8
+ mov QWORD PTR[((-56))+r13],r10
+ mov r11,rax
+ shl rax,30
+ mov r9,rbx
+ shr r9,34
+ shr r11,34
+ or rax,r9
+ shl rbx,30
+ or rbx,r11
+ mov QWORD PTR[((-48))+r13],rax
+ mov QWORD PTR[((-40))+r13],rbx
+ mov r11,r8
+ shl r8,15
+ mov r9,r10
+ shr r9,49
+ shr r11,49
+ or r8,r9
+ shl r10,15
+ or r10,r11
+ mov QWORD PTR[((-32))+r13],r8
+ mov r11,rax
+ shl rax,15
+ mov r9,rbx
+ shr r9,49
+ shr r11,49
+ or rax,r9
+ shl rbx,15
+ or rbx,r11
+ mov QWORD PTR[((-24))+r13],rbx
+ mov r11,r8
+ shl r8,15
+ mov r9,r10
+ shr r9,49
+ shr r11,49
+ or r8,r9
+ shl r10,15
+ or r10,r11
+ mov QWORD PTR[((-16))+r13],r8
+ mov QWORD PTR[((-8))+r13],r10
+ mov r11,rax
+ shl rax,17
+ mov r9,rbx
+ shr r9,47
+ shr r11,47
+ or rax,r9
+ shl rbx,17
+ or rbx,r11
+ mov QWORD PTR[r13],rax
+ mov QWORD PTR[8+r13],rbx
+ mov r11,rax
+ shl rax,17
+ mov r9,rbx
+ shr r9,47
+ shr r11,47
+ or rax,r9
+ shl rbx,17
+ or rbx,r11
+ mov QWORD PTR[16+r13],rax
+ mov QWORD PTR[24+r13],rbx
+ mov r11,r8
+ shl r8,34
+ mov r9,r10
+ shr r9,30
+ shr r11,30
+ or r8,r9
+ shl r10,34
+ or r10,r11
+ mov QWORD PTR[32+r13],r8
+ mov QWORD PTR[40+r13],r10
+ mov r11,rax
+ shl rax,17
+ mov r9,rbx
+ shr r9,47
+ shr r11,47
+ or rax,r9
+ shl rbx,17
+ or rbx,r11
+ mov QWORD PTR[48+r13],rax
+ mov QWORD PTR[56+r13],rbx
+ mov r11,r8
+ shl r8,17
+ mov r9,r10
+ shr r9,47
+ shr r11,47
+ or r8,r9
+ shl r10,17
+ or r10,r11
+ mov QWORD PTR[64+r13],r8
+ mov QWORD PTR[72+r13],r10
+ mov eax,3
+ jmp $L$done
+ALIGN 16
+$L$2nd256::
+ mov DWORD PTR[48+r13],r9d
+ mov DWORD PTR[52+r13],r8d
+ mov DWORD PTR[56+r13],r11d
+ mov DWORD PTR[60+r13],r10d
+ xor r9d,DWORD PTR[32+r13]
+ xor r8d,DWORD PTR[36+r13]
+ xor r11d,DWORD PTR[40+r13]
+ xor r10d,DWORD PTR[44+r13]
+ xor eax,r8d
+ xor ebx,r9d
+ movzx esi,ah
+ movzx edi,bl
+ mov edx,DWORD PTR[2052+rsi*8+rbp]
+ mov ecx,DWORD PTR[rdi*8+rbp]
+ movzx esi,al
+ shr eax,16
+ movzx edi,bh
+ xor edx,DWORD PTR[4+rsi*8+rbp]
+ shr ebx,16
+ xor ecx,DWORD PTR[4+rdi*8+rbp]
+ movzx esi,ah
+ movzx edi,bl
+ xor edx,DWORD PTR[rsi*8+rbp]
+ xor ecx,DWORD PTR[2052+rdi*8+rbp]
+ movzx esi,al
+ movzx edi,bh
+ xor edx,DWORD PTR[2048+rsi*8+rbp]
+ xor ecx,DWORD PTR[2048+rdi*8+rbp]
+ mov ebx,DWORD PTR[40+r14]
+ mov eax,DWORD PTR[44+r14]
+ xor ecx,edx
+ ror edx,8
+ xor r10d,ecx
+ xor r11d,ecx
+ xor r11d,edx
+ xor eax,r10d
+ xor ebx,r11d
+ movzx esi,ah
+ movzx edi,bl
+ mov edx,DWORD PTR[2052+rsi*8+rbp]
+ mov ecx,DWORD PTR[rdi*8+rbp]
+ movzx esi,al
+ shr eax,16
+ movzx edi,bh
+ xor edx,DWORD PTR[4+rsi*8+rbp]
+ shr ebx,16
+ xor ecx,DWORD PTR[4+rdi*8+rbp]
+ movzx esi,ah
+ movzx edi,bl
+ xor edx,DWORD PTR[rsi*8+rbp]
+ xor ecx,DWORD PTR[2052+rdi*8+rbp]
+ movzx esi,al
+ movzx edi,bh
+ xor edx,DWORD PTR[2048+rsi*8+rbp]
+ xor ecx,DWORD PTR[2048+rdi*8+rbp]
+ mov ebx,DWORD PTR[48+r14]
+ mov eax,DWORD PTR[52+r14]
+ xor ecx,edx
+ ror edx,8
+ xor r8d,ecx
+ xor r9d,ecx
+ xor r9d,edx
+ mov rax,QWORD PTR[r13]
+ mov rbx,QWORD PTR[8+r13]
+ mov rcx,QWORD PTR[32+r13]
+ mov rdx,QWORD PTR[40+r13]
+ mov r14,QWORD PTR[48+r13]
+ mov r15,QWORD PTR[56+r13]
+ lea r13,QWORD PTR[128+r13]
+ shl r8,32
+ shl r10,32
+ or r8,r9
+ or r10,r11
+ mov QWORD PTR[((-112))+r13],r8
+ mov QWORD PTR[((-104))+r13],r10
+ mov r11,rcx
+ shl rcx,15
+ mov r9,rdx
+ shr r9,49
+ shr r11,49
+ or rcx,r9
+ shl rdx,15
+ or rdx,r11
+ mov QWORD PTR[((-96))+r13],rcx
+ mov QWORD PTR[((-88))+r13],rdx
+ mov r11,r14
+ shl r14,15
+ mov r9,r15
+ shr r9,49
+ shr r11,49
+ or r14,r9
+ shl r15,15
+ or r15,r11
+ mov QWORD PTR[((-80))+r13],r14
+ mov QWORD PTR[((-72))+r13],r15
+ mov r11,rcx
+ shl rcx,15
+ mov r9,rdx
+ shr r9,49
+ shr r11,49
+ or rcx,r9
+ shl rdx,15
+ or rdx,r11
+ mov QWORD PTR[((-64))+r13],rcx
+ mov QWORD PTR[((-56))+r13],rdx
+ mov r11,r8
+ shl r8,30
+ mov r9,r10
+ shr r9,34
+ shr r11,34
+ or r8,r9
+ shl r10,30
+ or r10,r11
+ mov QWORD PTR[((-48))+r13],r8
+ mov QWORD PTR[((-40))+r13],r10
+ mov r11,rax
+ shl rax,45
+ mov r9,rbx
+ shr r9,19
+ shr r11,19
+ or rax,r9
+ shl rbx,45
+ or rbx,r11
+ mov QWORD PTR[((-32))+r13],rax
+ mov QWORD PTR[((-24))+r13],rbx
+ mov r11,r14
+ shl r14,30
+ mov r9,r15
+ shr r9,34
+ shr r11,34
+ or r14,r9
+ shl r15,30
+ or r15,r11
+ mov QWORD PTR[((-16))+r13],r14
+ mov QWORD PTR[((-8))+r13],r15
+ mov r11,rax
+ shl rax,15
+ mov r9,rbx
+ shr r9,49
+ shr r11,49
+ or rax,r9
+ shl rbx,15
+ or rbx,r11
+ mov QWORD PTR[r13],rax
+ mov QWORD PTR[8+r13],rbx
+ mov r11,rcx
+ shl rcx,30
+ mov r9,rdx
+ shr r9,34
+ shr r11,34
+ or rcx,r9
+ shl rdx,30
+ or rdx,r11
+ mov QWORD PTR[16+r13],rcx
+ mov QWORD PTR[24+r13],rdx
+ mov r11,r8
+ shl r8,30
+ mov r9,r10
+ shr r9,34
+ shr r11,34
+ or r8,r9
+ shl r10,30
+ or r10,r11
+ mov QWORD PTR[32+r13],r8
+ mov QWORD PTR[40+r13],r10
+ mov r11,rax
+ shl rax,17
+ mov r9,rbx
+ shr r9,47
+ shr r11,47
+ or rax,r9
+ shl rbx,17
+ or rbx,r11
+ mov QWORD PTR[48+r13],rax
+ mov QWORD PTR[56+r13],rbx
+ mov r11,r14
+ shl r14,32
+ mov r9,r15
+ shr r9,32
+ shr r11,32
+ or r14,r9
+ shl r15,32
+ or r15,r11
+ mov QWORD PTR[64+r13],r14
+ mov QWORD PTR[72+r13],r15
+ mov r11,rcx
+ shl rcx,34
+ mov r9,rdx
+ shr r9,30
+ shr r11,30
+ or rcx,r9
+ shl rdx,34
+ or rdx,r11
+ mov QWORD PTR[80+r13],rcx
+ mov QWORD PTR[88+r13],rdx
+ mov r11,r14
+ shl r14,17
+ mov r9,r15
+ shr r9,47
+ shr r11,47
+ or r14,r9
+ shl r15,17
+ or r15,r11
+ mov QWORD PTR[96+r13],r14
+ mov QWORD PTR[104+r13],r15
+ mov r11,rax
+ shl rax,34
+ mov r9,rbx
+ shr r9,30
+ shr r11,30
+ or rax,r9
+ shl rbx,34
+ or rbx,r11
+ mov QWORD PTR[112+r13],rax
+ mov QWORD PTR[120+r13],rbx
+ mov r11,r8
+ shl r8,51
+ mov r9,r10
+ shr r9,13
+ shr r11,13
+ or r8,r9
+ shl r10,51
+ or r10,r11
+ mov QWORD PTR[128+r13],r8
+ mov QWORD PTR[136+r13],r10
+ mov eax,4
+$L$done::
+ mov r15,QWORD PTR[rsp]
+ mov r14,QWORD PTR[8+rsp]
+ mov r13,QWORD PTR[16+rsp]
+ mov rbp,QWORD PTR[24+rsp]
+ mov rbx,QWORD PTR[32+rsp]
+ lea rsp,QWORD PTR[40+rsp]
+$L$key_epilogue::
+ mov rdi,QWORD PTR[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD PTR[16+rsp]
+ DB 0F3h,0C3h ;repret
+$L$SEH_end_Camellia_Ekeygen::
+Camellia_Ekeygen ENDP
+ALIGN 64
+$L$Camellia_SIGMA::
+ DD 03bcc908bh,0a09e667fh,04caa73b2h,0b67ae858h
+ DD 0e94f82beh,0c6ef372fh,0f1d36f1ch,054ff53a5h
+ DD 0de682d1dh,010e527fah,0b3e6c1fdh,0b05688c2h
+ DD 0,0,0,0
+$L$Camellia_SBOX::
+ DD 070707000h,070700070h
+ DD 082828200h,02c2c002ch
+ DD 02c2c2c00h,0b3b300b3h
+ DD 0ececec00h,0c0c000c0h
+ DD 0b3b3b300h,0e4e400e4h
+ DD 027272700h,057570057h
+ DD 0c0c0c000h,0eaea00eah
+ DD 0e5e5e500h,0aeae00aeh
+ DD 0e4e4e400h,023230023h
+ DD 085858500h,06b6b006bh
+ DD 057575700h,045450045h
+ DD 035353500h,0a5a500a5h
+ DD 0eaeaea00h,0eded00edh
+ DD 00c0c0c00h,04f4f004fh
+ DD 0aeaeae00h,01d1d001dh
+ DD 041414100h,092920092h
+ DD 023232300h,086860086h
+ DD 0efefef00h,0afaf00afh
+ DD 06b6b6b00h,07c7c007ch
+ DD 093939300h,01f1f001fh
+ DD 045454500h,03e3e003eh
+ DD 019191900h,0dcdc00dch
+ DD 0a5a5a500h,05e5e005eh
+ DD 021212100h,00b0b000bh
+ DD 0ededed00h,0a6a600a6h
+ DD 00e0e0e00h,039390039h
+ DD 04f4f4f00h,0d5d500d5h
+ DD 04e4e4e00h,05d5d005dh
+ DD 01d1d1d00h,0d9d900d9h
+ DD 065656500h,05a5a005ah
+ DD 092929200h,051510051h
+ DD 0bdbdbd00h,06c6c006ch
+ DD 086868600h,08b8b008bh
+ DD 0b8b8b800h,09a9a009ah
+ DD 0afafaf00h,0fbfb00fbh
+ DD 08f8f8f00h,0b0b000b0h
+ DD 07c7c7c00h,074740074h
+ DD 0ebebeb00h,02b2b002bh
+ DD 01f1f1f00h,0f0f000f0h
+ DD 0cecece00h,084840084h
+ DD 03e3e3e00h,0dfdf00dfh
+ DD 030303000h,0cbcb00cbh
+ DD 0dcdcdc00h,034340034h
+ DD 05f5f5f00h,076760076h
+ DD 05e5e5e00h,06d6d006dh
+ DD 0c5c5c500h,0a9a900a9h
+ DD 00b0b0b00h,0d1d100d1h
+ DD 01a1a1a00h,004040004h
+ DD 0a6a6a600h,014140014h
+ DD 0e1e1e100h,03a3a003ah
+ DD 039393900h,0dede00deh
+ DD 0cacaca00h,011110011h
+ DD 0d5d5d500h,032320032h
+ DD 047474700h,09c9c009ch
+ DD 05d5d5d00h,053530053h
+ DD 03d3d3d00h,0f2f200f2h
+ DD 0d9d9d900h,0fefe00feh
+ DD 001010100h,0cfcf00cfh
+ DD 05a5a5a00h,0c3c300c3h
+ DD 0d6d6d600h,07a7a007ah
+ DD 051515100h,024240024h
+ DD 056565600h,0e8e800e8h
+ DD 06c6c6c00h,060600060h
+ DD 04d4d4d00h,069690069h
+ DD 08b8b8b00h,0aaaa00aah
+ DD 00d0d0d00h,0a0a000a0h
+ DD 09a9a9a00h,0a1a100a1h
+ DD 066666600h,062620062h
+ DD 0fbfbfb00h,054540054h
+ DD 0cccccc00h,01e1e001eh
+ DD 0b0b0b000h,0e0e000e0h
+ DD 02d2d2d00h,064640064h
+ DD 074747400h,010100010h
+ DD 012121200h,000000000h
+ DD 02b2b2b00h,0a3a300a3h
+ DD 020202000h,075750075h
+ DD 0f0f0f000h,08a8a008ah
+ DD 0b1b1b100h,0e6e600e6h
+ DD 084848400h,009090009h
+ DD 099999900h,0dddd00ddh
+ DD 0dfdfdf00h,087870087h
+ DD 04c4c4c00h,083830083h
+ DD 0cbcbcb00h,0cdcd00cdh
+ DD 0c2c2c200h,090900090h
+ DD 034343400h,073730073h
+ DD 07e7e7e00h,0f6f600f6h
+ DD 076767600h,09d9d009dh
+ DD 005050500h,0bfbf00bfh
+ DD 06d6d6d00h,052520052h
+ DD 0b7b7b700h,0d8d800d8h
+ DD 0a9a9a900h,0c8c800c8h
+ DD 031313100h,0c6c600c6h
+ DD 0d1d1d100h,081810081h
+ DD 017171700h,06f6f006fh
+ DD 004040400h,013130013h
+ DD 0d7d7d700h,063630063h
+ DD 014141400h,0e9e900e9h
+ DD 058585800h,0a7a700a7h
+ DD 03a3a3a00h,09f9f009fh
+ DD 061616100h,0bcbc00bch
+ DD 0dedede00h,029290029h
+ DD 01b1b1b00h,0f9f900f9h
+ DD 011111100h,02f2f002fh
+ DD 01c1c1c00h,0b4b400b4h
+ DD 032323200h,078780078h
+ DD 00f0f0f00h,006060006h
+ DD 09c9c9c00h,0e7e700e7h
+ DD 016161600h,071710071h
+ DD 053535300h,0d4d400d4h
+ DD 018181800h,0abab00abh
+ DD 0f2f2f200h,088880088h
+ DD 022222200h,08d8d008dh
+ DD 0fefefe00h,072720072h
+ DD 044444400h,0b9b900b9h
+ DD 0cfcfcf00h,0f8f800f8h
+ DD 0b2b2b200h,0acac00ach
+ DD 0c3c3c300h,036360036h
+ DD 0b5b5b500h,02a2a002ah
+ DD 07a7a7a00h,03c3c003ch
+ DD 091919100h,0f1f100f1h
+ DD 024242400h,040400040h
+ DD 008080800h,0d3d300d3h
+ DD 0e8e8e800h,0bbbb00bbh
+ DD 0a8a8a800h,043430043h
+ DD 060606000h,015150015h
+ DD 0fcfcfc00h,0adad00adh
+ DD 069696900h,077770077h
+ DD 050505000h,080800080h
+ DD 0aaaaaa00h,082820082h
+ DD 0d0d0d000h,0ecec00ech
+ DD 0a0a0a000h,027270027h
+ DD 07d7d7d00h,0e5e500e5h
+ DD 0a1a1a100h,085850085h
+ DD 089898900h,035350035h
+ DD 062626200h,00c0c000ch
+ DD 097979700h,041410041h
+ DD 054545400h,0efef00efh
+ DD 05b5b5b00h,093930093h
+ DD 01e1e1e00h,019190019h
+ DD 095959500h,021210021h
+ DD 0e0e0e000h,00e0e000eh
+ DD 0ffffff00h,04e4e004eh
+ DD 064646400h,065650065h
+ DD 0d2d2d200h,0bdbd00bdh
+ DD 010101000h,0b8b800b8h
+ DD 0c4c4c400h,08f8f008fh
+ DD 000000000h,0ebeb00ebh
+ DD 048484800h,0cece00ceh
+ DD 0a3a3a300h,030300030h
+ DD 0f7f7f700h,05f5f005fh
+ DD 075757500h,0c5c500c5h
+ DD 0dbdbdb00h,01a1a001ah
+ DD 08a8a8a00h,0e1e100e1h
+ DD 003030300h,0caca00cah
+ DD 0e6e6e600h,047470047h
+ DD 0dadada00h,03d3d003dh
+ DD 009090900h,001010001h
+ DD 03f3f3f00h,0d6d600d6h
+ DD 0dddddd00h,056560056h
+ DD 094949400h,04d4d004dh
+ DD 087878700h,00d0d000dh
+ DD 05c5c5c00h,066660066h
+ DD 083838300h,0cccc00cch
+ DD 002020200h,02d2d002dh
+ DD 0cdcdcd00h,012120012h
+ DD 04a4a4a00h,020200020h
+ DD 090909000h,0b1b100b1h
+ DD 033333300h,099990099h
+ DD 073737300h,04c4c004ch
+ DD 067676700h,0c2c200c2h
+ DD 0f6f6f600h,07e7e007eh
+ DD 0f3f3f300h,005050005h
+ DD 09d9d9d00h,0b7b700b7h
+ DD 07f7f7f00h,031310031h
+ DD 0bfbfbf00h,017170017h
+ DD 0e2e2e200h,0d7d700d7h
+ DD 052525200h,058580058h
+ DD 09b9b9b00h,061610061h
+ DD 0d8d8d800h,01b1b001bh
+ DD 026262600h,01c1c001ch
+ DD 0c8c8c800h,00f0f000fh
+ DD 037373700h,016160016h
+ DD 0c6c6c600h,018180018h
+ DD 03b3b3b00h,022220022h
+ DD 081818100h,044440044h
+ DD 096969600h,0b2b200b2h
+ DD 06f6f6f00h,0b5b500b5h
+ DD 04b4b4b00h,091910091h
+ DD 013131300h,008080008h
+ DD 0bebebe00h,0a8a800a8h
+ DD 063636300h,0fcfc00fch
+ DD 02e2e2e00h,050500050h
+ DD 0e9e9e900h,0d0d000d0h
+ DD 079797900h,07d7d007dh
+ DD 0a7a7a700h,089890089h
+ DD 08c8c8c00h,097970097h
+ DD 09f9f9f00h,05b5b005bh
+ DD 06e6e6e00h,095950095h
+ DD 0bcbcbc00h,0ffff00ffh
+ DD 08e8e8e00h,0d2d200d2h
+ DD 029292900h,0c4c400c4h
+ DD 0f5f5f500h,048480048h
+ DD 0f9f9f900h,0f7f700f7h
+ DD 0b6b6b600h,0dbdb00dbh
+ DD 02f2f2f00h,003030003h
+ DD 0fdfdfd00h,0dada00dah
+ DD 0b4b4b400h,03f3f003fh
+ DD 059595900h,094940094h
+ DD 078787800h,05c5c005ch
+ DD 098989800h,002020002h
+ DD 006060600h,04a4a004ah
+ DD 06a6a6a00h,033330033h
+ DD 0e7e7e700h,067670067h
+ DD 046464600h,0f3f300f3h
+ DD 071717100h,07f7f007fh
+ DD 0bababa00h,0e2e200e2h
+ DD 0d4d4d400h,09b9b009bh
+ DD 025252500h,026260026h
+ DD 0ababab00h,037370037h
+ DD 042424200h,03b3b003bh
+ DD 088888800h,096960096h
+ DD 0a2a2a200h,04b4b004bh
+ DD 08d8d8d00h,0bebe00beh
+ DD 0fafafa00h,02e2e002eh
+ DD 072727200h,079790079h
+ DD 007070700h,08c8c008ch
+ DD 0b9b9b900h,06e6e006eh
+ DD 055555500h,08e8e008eh
+ DD 0f8f8f800h,0f5f500f5h
+ DD 0eeeeee00h,0b6b600b6h
+ DD 0acacac00h,0fdfd00fdh
+ DD 00a0a0a00h,059590059h
+ DD 036363600h,098980098h
+ DD 049494900h,06a6a006ah
+ DD 02a2a2a00h,046460046h
+ DD 068686800h,0baba00bah
+ DD 03c3c3c00h,025250025h
+ DD 038383800h,042420042h
+ DD 0f1f1f100h,0a2a200a2h
+ DD 0a4a4a400h,0fafa00fah
+ DD 040404000h,007070007h
+ DD 028282800h,055550055h
+ DD 0d3d3d300h,0eeee00eeh
+ DD 07b7b7b00h,00a0a000ah
+ DD 0bbbbbb00h,049490049h
+ DD 0c9c9c900h,068680068h
+ DD 043434300h,038380038h
+ DD 0c1c1c100h,0a4a400a4h
+ DD 015151500h,028280028h
+ DD 0e3e3e300h,07b7b007bh
+ DD 0adadad00h,0c9c900c9h
+ DD 0f4f4f400h,0c1c100c1h
+ DD 077777700h,0e3e300e3h
+ DD 0c7c7c700h,0f4f400f4h
+ DD 080808000h,0c7c700c7h
+ DD 09e9e9e00h,09e9e009eh
+ DD 000e0e0e0h,038003838h
+ DD 000050505h,041004141h
+ DD 000585858h,016001616h
+ DD 000d9d9d9h,076007676h
+ DD 000676767h,0d900d9d9h
+ DD 0004e4e4eh,093009393h
+ DD 000818181h,060006060h
+ DD 000cbcbcbh,0f200f2f2h
+ DD 000c9c9c9h,072007272h
+ DD 0000b0b0bh,0c200c2c2h
+ DD 000aeaeaeh,0ab00ababh
+ DD 0006a6a6ah,09a009a9ah
+ DD 000d5d5d5h,075007575h
+ DD 000181818h,006000606h
+ DD 0005d5d5dh,057005757h
+ DD 000828282h,0a000a0a0h
+ DD 000464646h,091009191h
+ DD 000dfdfdfh,0f700f7f7h
+ DD 000d6d6d6h,0b500b5b5h
+ DD 000272727h,0c900c9c9h
+ DD 0008a8a8ah,0a200a2a2h
+ DD 000323232h,08c008c8ch
+ DD 0004b4b4bh,0d200d2d2h
+ DD 000424242h,090009090h
+ DD 000dbdbdbh,0f600f6f6h
+ DD 0001c1c1ch,007000707h
+ DD 0009e9e9eh,0a700a7a7h
+ DD 0009c9c9ch,027002727h
+ DD 0003a3a3ah,08e008e8eh
+ DD 000cacacah,0b200b2b2h
+ DD 000252525h,049004949h
+ DD 0007b7b7bh,0de00dedeh
+ DD 0000d0d0dh,043004343h
+ DD 000717171h,05c005c5ch
+ DD 0005f5f5fh,0d700d7d7h
+ DD 0001f1f1fh,0c700c7c7h
+ DD 000f8f8f8h,03e003e3eh
+ DD 000d7d7d7h,0f500f5f5h
+ DD 0003e3e3eh,08f008f8fh
+ DD 0009d9d9dh,067006767h
+ DD 0007c7c7ch,01f001f1fh
+ DD 000606060h,018001818h
+ DD 000b9b9b9h,06e006e6eh
+ DD 000bebebeh,0af00afafh
+ DD 000bcbcbch,02f002f2fh
+ DD 0008b8b8bh,0e200e2e2h
+ DD 000161616h,085008585h
+ DD 000343434h,00d000d0dh
+ DD 0004d4d4dh,053005353h
+ DD 000c3c3c3h,0f000f0f0h
+ DD 000727272h,09c009c9ch
+ DD 000959595h,065006565h
+ DD 000abababh,0ea00eaeah
+ DD 0008e8e8eh,0a300a3a3h
+ DD 000bababah,0ae00aeaeh
+ DD 0007a7a7ah,09e009e9eh
+ DD 000b3b3b3h,0ec00ecech
+ DD 000020202h,080008080h
+ DD 000b4b4b4h,02d002d2dh
+ DD 000adadadh,06b006b6bh
+ DD 000a2a2a2h,0a800a8a8h
+ DD 000acacach,02b002b2bh
+ DD 000d8d8d8h,036003636h
+ DD 0009a9a9ah,0a600a6a6h
+ DD 000171717h,0c500c5c5h
+ DD 0001a1a1ah,086008686h
+ DD 000353535h,04d004d4dh
+ DD 000cccccch,033003333h
+ DD 000f7f7f7h,0fd00fdfdh
+ DD 000999999h,066006666h
+ DD 000616161h,058005858h
+ DD 0005a5a5ah,096009696h
+ DD 000e8e8e8h,03a003a3ah
+ DD 000242424h,009000909h
+ DD 000565656h,095009595h
+ DD 000404040h,010001010h
+ DD 000e1e1e1h,078007878h
+ DD 000636363h,0d800d8d8h
+ DD 000090909h,042004242h
+ DD 000333333h,0cc00cccch
+ DD 000bfbfbfh,0ef00efefh
+ DD 000989898h,026002626h
+ DD 000979797h,0e500e5e5h
+ DD 000858585h,061006161h
+ DD 000686868h,01a001a1ah
+ DD 000fcfcfch,03f003f3fh
+ DD 000ececech,03b003b3bh
+ DD 0000a0a0ah,082008282h
+ DD 000dadadah,0b600b6b6h
+ DD 0006f6f6fh,0db00dbdbh
+ DD 000535353h,0d400d4d4h
+ DD 000626262h,098009898h
+ DD 000a3a3a3h,0e800e8e8h
+ DD 0002e2e2eh,08b008b8bh
+ DD 000080808h,002000202h
+ DD 000afafafh,0eb00ebebh
+ DD 000282828h,00a000a0ah
+ DD 000b0b0b0h,02c002c2ch
+ DD 000747474h,01d001d1dh
+ DD 000c2c2c2h,0b000b0b0h
+ DD 000bdbdbdh,06f006f6fh
+ DD 000363636h,08d008d8dh
+ DD 000222222h,088008888h
+ DD 000383838h,00e000e0eh
+ DD 000646464h,019001919h
+ DD 0001e1e1eh,087008787h
+ DD 000393939h,04e004e4eh
+ DD 0002c2c2ch,00b000b0bh
+ DD 000a6a6a6h,0a900a9a9h
+ DD 000303030h,00c000c0ch
+ DD 000e5e5e5h,079007979h
+ DD 000444444h,011001111h
+ DD 000fdfdfdh,07f007f7fh
+ DD 000888888h,022002222h
+ DD 0009f9f9fh,0e700e7e7h
+ DD 000656565h,059005959h
+ DD 000878787h,0e100e1e1h
+ DD 0006b6b6bh,0da00dadah
+ DD 000f4f4f4h,03d003d3dh
+ DD 000232323h,0c800c8c8h
+ DD 000484848h,012001212h
+ DD 000101010h,004000404h
+ DD 000d1d1d1h,074007474h
+ DD 000515151h,054005454h
+ DD 000c0c0c0h,030003030h
+ DD 000f9f9f9h,07e007e7eh
+ DD 000d2d2d2h,0b400b4b4h
+ DD 000a0a0a0h,028002828h
+ DD 000555555h,055005555h
+ DD 000a1a1a1h,068006868h
+ DD 000414141h,050005050h
+ DD 000fafafah,0be00bebeh
+ DD 000434343h,0d000d0d0h
+ DD 000131313h,0c400c4c4h
+ DD 000c4c4c4h,031003131h
+ DD 0002f2f2fh,0cb00cbcbh
+ DD 000a8a8a8h,02a002a2ah
+ DD 000b6b6b6h,0ad00adadh
+ DD 0003c3c3ch,00f000f0fh
+ DD 0002b2b2bh,0ca00cacah
+ DD 000c1c1c1h,070007070h
+ DD 000ffffffh,0ff00ffffh
+ DD 000c8c8c8h,032003232h
+ DD 000a5a5a5h,069006969h
+ DD 000202020h,008000808h
+ DD 000898989h,062006262h
+ DD 000000000h,000000000h
+ DD 000909090h,024002424h
+ DD 000474747h,0d100d1d1h
+ DD 000efefefh,0fb00fbfbh
+ DD 000eaeaeah,0ba00babah
+ DD 000b7b7b7h,0ed00ededh
+ DD 000151515h,045004545h
+ DD 000060606h,081008181h
+ DD 000cdcdcdh,073007373h
+ DD 000b5b5b5h,06d006d6dh
+ DD 000121212h,084008484h
+ DD 0007e7e7eh,09f009f9fh
+ DD 000bbbbbbh,0ee00eeeeh
+ DD 000292929h,04a004a4ah
+ DD 0000f0f0fh,0c300c3c3h
+ DD 000b8b8b8h,02e002e2eh
+ DD 000070707h,0c100c1c1h
+ DD 000040404h,001000101h
+ DD 0009b9b9bh,0e600e6e6h
+ DD 000949494h,025002525h
+ DD 000212121h,048004848h
+ DD 000666666h,099009999h
+ DD 000e6e6e6h,0b900b9b9h
+ DD 000cececeh,0b300b3b3h
+ DD 000edededh,07b007b7bh
+ DD 000e7e7e7h,0f900f9f9h
+ DD 0003b3b3bh,0ce00ceceh
+ DD 000fefefeh,0bf00bfbfh
+ DD 0007f7f7fh,0df00dfdfh
+ DD 000c5c5c5h,071007171h
+ DD 000a4a4a4h,029002929h
+ DD 000373737h,0cd00cdcdh
+ DD 000b1b1b1h,06c006c6ch
+ DD 0004c4c4ch,013001313h
+ DD 000919191h,064006464h
+ DD 0006e6e6eh,09b009b9bh
+ DD 0008d8d8dh,063006363h
+ DD 000767676h,09d009d9dh
+ DD 000030303h,0c000c0c0h
+ DD 0002d2d2dh,04b004b4bh
+ DD 000dededeh,0b700b7b7h
+ DD 000969696h,0a500a5a5h
+ DD 000262626h,089008989h
+ DD 0007d7d7dh,05f005f5fh
+ DD 000c6c6c6h,0b100b1b1h
+ DD 0005c5c5ch,017001717h
+ DD 000d3d3d3h,0f400f4f4h
+ DD 000f2f2f2h,0bc00bcbch
+ DD 0004f4f4fh,0d300d3d3h
+ DD 000191919h,046004646h
+ DD 0003f3f3fh,0cf00cfcfh
+ DD 000dcdcdch,037003737h
+ DD 000797979h,05e005e5eh
+ DD 0001d1d1dh,047004747h
+ DD 000525252h,094009494h
+ DD 000ebebebh,0fa00fafah
+ DD 000f3f3f3h,0fc00fcfch
+ DD 0006d6d6dh,05b005b5bh
+ DD 0005e5e5eh,097009797h
+ DD 000fbfbfbh,0fe00fefeh
+ DD 000696969h,05a005a5ah
+ DD 000b2b2b2h,0ac00acach
+ DD 000f0f0f0h,03c003c3ch
+ DD 000313131h,04c004c4ch
+ DD 0000c0c0ch,003000303h
+ DD 000d4d4d4h,035003535h
+ DD 000cfcfcfh,0f300f3f3h
+ DD 0008c8c8ch,023002323h
+ DD 000e2e2e2h,0b800b8b8h
+ DD 000757575h,05d005d5dh
+ DD 000a9a9a9h,06a006a6ah
+ DD 0004a4a4ah,092009292h
+ DD 000575757h,0d500d5d5h
+ DD 000848484h,021002121h
+ DD 000111111h,044004444h
+ DD 000454545h,051005151h
+ DD 0001b1b1bh,0c600c6c6h
+ DD 000f5f5f5h,07d007d7dh
+ DD 000e4e4e4h,039003939h
+ DD 0000e0e0eh,083008383h
+ DD 000737373h,0dc00dcdch
+ DD 000aaaaaah,0aa00aaaah
+ DD 000f1f1f1h,07c007c7ch
+ DD 000ddddddh,077007777h
+ DD 000595959h,056005656h
+ DD 000141414h,005000505h
+ DD 0006c6c6ch,01b001b1bh
+ DD 000929292h,0a400a4a4h
+ DD 000545454h,015001515h
+ DD 000d0d0d0h,034003434h
+ DD 000787878h,01e001e1eh
+ DD 000707070h,01c001c1ch
+ DD 000e3e3e3h,0f800f8f8h
+ DD 000494949h,052005252h
+ DD 000808080h,020002020h
+ DD 000505050h,014001414h
+ DD 000a7a7a7h,0e900e9e9h
+ DD 000f6f6f6h,0bd00bdbdh
+ DD 000777777h,0dd00ddddh
+ DD 000939393h,0e400e4e4h
+ DD 000868686h,0a100a1a1h
+ DD 000838383h,0e000e0e0h
+ DD 0002a2a2ah,08a008a8ah
+ DD 000c7c7c7h,0f100f1f1h
+ DD 0005b5b5bh,0d600d6d6h
+ DD 000e9e9e9h,07a007a7ah
+ DD 000eeeeeeh,0bb00bbbbh
+ DD 0008f8f8fh,0e300e3e3h
+ DD 000010101h,040004040h
+ DD 0003d3d3dh,04f004f4fh
+PUBLIC Camellia_cbc_encrypt
+
+ALIGN 16
+Camellia_cbc_encrypt PROC PUBLIC
+ mov QWORD PTR[8+rsp],rdi ;WIN64 prologue
+ mov QWORD PTR[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_Camellia_cbc_encrypt::
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+ mov rcx,r9
+ mov r8,QWORD PTR[40+rsp]
+ mov r9,QWORD PTR[48+rsp]
+
+
+ cmp rdx,0
+ je $L$cbc_abort
+ push rbx
+ push rbp
+ push r12
+ push r13
+ push r14
+ push r15
+$L$cbc_prologue::
+
+ mov rbp,rsp
+ sub rsp,64
+ and rsp,-64
+
+
+
+ lea r10,QWORD PTR[((-64-63))+rcx]
+ sub r10,rsp
+ neg r10
+ and r10,03C0h
+ sub rsp,r10
+
+
+ mov r12,rdi
+ mov r13,rsi
+ mov rbx,r8
+ mov r14,rcx
+ mov r15d,DWORD PTR[272+rcx]
+
+ mov QWORD PTR[40+rsp],r8
+ mov QWORD PTR[48+rsp],rbp
+
+$L$cbc_body::
+ lea rbp,QWORD PTR[$L$Camellia_SBOX]
+
+ mov ecx,32
+ALIGN 4
+$L$cbc_prefetch_sbox::
+ mov rax,QWORD PTR[rbp]
+ mov rsi,QWORD PTR[32+rbp]
+ mov rdi,QWORD PTR[64+rbp]
+ mov r11,QWORD PTR[96+rbp]
+ lea rbp,QWORD PTR[128+rbp]
+ loop $L$cbc_prefetch_sbox
+ sub rbp,4096
+ shl r15,6
+ mov rcx,rdx
+ lea r15,QWORD PTR[r15*1+r14]
+
+ cmp r9d,0
+ je $L$CBC_DECRYPT
+
+ and rdx,-16
+ and rcx,15
+ lea rdx,QWORD PTR[rdx*1+r12]
+ mov QWORD PTR[rsp],r14
+ mov QWORD PTR[8+rsp],rdx
+ mov QWORD PTR[16+rsp],rcx
+
+ cmp rdx,r12
+ mov r8d,DWORD PTR[rbx]
+ mov r9d,DWORD PTR[4+rbx]
+ mov r10d,DWORD PTR[8+rbx]
+ mov r11d,DWORD PTR[12+rbx]
+ je $L$cbc_enc_tail
+ jmp $L$cbc_eloop
+
+ALIGN 16
+$L$cbc_eloop::
+ xor r8d,DWORD PTR[r12]
+ xor r9d,DWORD PTR[4+r12]
+ xor r10d,DWORD PTR[8+r12]
+ bswap r8d
+ xor r11d,DWORD PTR[12+r12]
+ bswap r9d
+ bswap r10d
+ bswap r11d
+
+ call _x86_64_Camellia_encrypt
+
+ mov r14,QWORD PTR[rsp]
+ bswap r8d
+ mov rdx,QWORD PTR[8+rsp]
+ bswap r9d
+ mov rcx,QWORD PTR[16+rsp]
+ bswap r10d
+ mov DWORD PTR[r13],r8d
+ bswap r11d
+ mov DWORD PTR[4+r13],r9d
+ mov DWORD PTR[8+r13],r10d
+ lea r12,QWORD PTR[16+r12]
+ mov DWORD PTR[12+r13],r11d
+ cmp r12,rdx
+ lea r13,QWORD PTR[16+r13]
+ jne $L$cbc_eloop
+
+ cmp rcx,0
+ jne $L$cbc_enc_tail
+
+ mov r13,QWORD PTR[40+rsp]
+ mov DWORD PTR[r13],r8d
+ mov DWORD PTR[4+r13],r9d
+ mov DWORD PTR[8+r13],r10d
+ mov DWORD PTR[12+r13],r11d
+ jmp $L$cbc_done
+
+ALIGN 16
+$L$cbc_enc_tail::
+ xor rax,rax
+ mov QWORD PTR[((0+24))+rsp],rax
+ mov QWORD PTR[((8+24))+rsp],rax
+ mov QWORD PTR[16+rsp],rax
+
+$L$cbc_enc_pushf::
+ pushfq
+ cld
+ mov rsi,r12
+ lea rdi,QWORD PTR[((8+24))+rsp]
+ DD 09066A4F3h
+
+ popfq
+$L$cbc_enc_popf::
+
+ lea r12,QWORD PTR[24+rsp]
+ lea rax,QWORD PTR[((16+24))+rsp]
+ mov QWORD PTR[8+rsp],rax
+ jmp $L$cbc_eloop
+
+
+ALIGN 16
+$L$CBC_DECRYPT::
+ xchg r15,r14
+ add rdx,15
+ and rcx,15
+ and rdx,-16
+ mov QWORD PTR[rsp],r14
+ lea rdx,QWORD PTR[rdx*1+r12]
+ mov QWORD PTR[8+rsp],rdx
+ mov QWORD PTR[16+rsp],rcx
+
+ mov rax,QWORD PTR[rbx]
+ mov rbx,QWORD PTR[8+rbx]
+ jmp $L$cbc_dloop
+ALIGN 16
+$L$cbc_dloop::
+ mov r8d,DWORD PTR[r12]
+ mov r9d,DWORD PTR[4+r12]
+ mov r10d,DWORD PTR[8+r12]
+ bswap r8d
+ mov r11d,DWORD PTR[12+r12]
+ bswap r9d
+ mov QWORD PTR[((0+24))+rsp],rax
+ bswap r10d
+ mov QWORD PTR[((8+24))+rsp],rbx
+ bswap r11d
+
+ call _x86_64_Camellia_decrypt
+
+ mov r14,QWORD PTR[rsp]
+ mov rdx,QWORD PTR[8+rsp]
+ mov rcx,QWORD PTR[16+rsp]
+
+ bswap r8d
+ mov rax,QWORD PTR[r12]
+ bswap r9d
+ mov rbx,QWORD PTR[8+r12]
+ bswap r10d
+ xor r8d,DWORD PTR[((0+24))+rsp]
+ bswap r11d
+ xor r9d,DWORD PTR[((4+24))+rsp]
+ xor r10d,DWORD PTR[((8+24))+rsp]
+ lea r12,QWORD PTR[16+r12]
+ xor r11d,DWORD PTR[((12+24))+rsp]
+ cmp r12,rdx
+ je $L$cbc_ddone
+
+ mov DWORD PTR[r13],r8d
+ mov DWORD PTR[4+r13],r9d
+ mov DWORD PTR[8+r13],r10d
+ mov DWORD PTR[12+r13],r11d
+
+ lea r13,QWORD PTR[16+r13]
+ jmp $L$cbc_dloop
+
+ALIGN 16
+$L$cbc_ddone::
+ mov rdx,QWORD PTR[40+rsp]
+ cmp rcx,0
+ jne $L$cbc_dec_tail
+
+ mov DWORD PTR[r13],r8d
+ mov DWORD PTR[4+r13],r9d
+ mov DWORD PTR[8+r13],r10d
+ mov DWORD PTR[12+r13],r11d
+
+ mov QWORD PTR[rdx],rax
+ mov QWORD PTR[8+rdx],rbx
+ jmp $L$cbc_done
+ALIGN 16
+$L$cbc_dec_tail::
+ mov DWORD PTR[((0+24))+rsp],r8d
+ mov DWORD PTR[((4+24))+rsp],r9d
+ mov DWORD PTR[((8+24))+rsp],r10d
+ mov DWORD PTR[((12+24))+rsp],r11d
+
+$L$cbc_dec_pushf::
+ pushfq
+ cld
+ lea rsi,QWORD PTR[((8+24))+rsp]
+ lea rdi,QWORD PTR[r13]
+ DD 09066A4F3h
+
+ popfq
+$L$cbc_dec_popf::
+
+ mov QWORD PTR[rdx],rax
+ mov QWORD PTR[8+rdx],rbx
+ jmp $L$cbc_done
+
+ALIGN 16
+$L$cbc_done::
+ mov rcx,QWORD PTR[48+rsp]
+ mov r15,QWORD PTR[rcx]
+ mov r14,QWORD PTR[8+rcx]
+ mov r13,QWORD PTR[16+rcx]
+ mov r12,QWORD PTR[24+rcx]
+ mov rbp,QWORD PTR[32+rcx]
+ mov rbx,QWORD PTR[40+rcx]
+ lea rsp,QWORD PTR[48+rcx]
+$L$cbc_abort::
+ mov rdi,QWORD PTR[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD PTR[16+rsp]
+ DB 0F3h,0C3h ;repret
+$L$SEH_end_Camellia_cbc_encrypt::
+Camellia_cbc_encrypt ENDP
+
+DB 67,97,109,101,108,108,105,97,32,102,111,114,32,120,56,54
+DB 95,54,52,32,98,121,32,60,97,112,112,114,111,64,111,112
+DB 101,110,115,115,108,46,111,114,103,62,0
+EXTERN __imp_RtlVirtualUnwind:NEAR
+
+ALIGN 16
+common_se_handler PROC PRIVATE
+ push rsi
+ push rdi
+ push rbx
+ push rbp
+ push r12
+ push r13
+ push r14
+ push r15
+ pushfq
+ lea rsp,QWORD PTR[((-64))+rsp]
+
+ mov rax,QWORD PTR[120+r8]
+ mov rbx,QWORD PTR[248+r8]
+
+ mov rsi,QWORD PTR[8+r9]
+ mov r11,QWORD PTR[56+r9]
+
+ mov r10d,DWORD PTR[r11]
+ lea r10,QWORD PTR[r10*1+rsi]
+ cmp rbx,r10
+ jb $L$in_prologue
+
+ mov rax,QWORD PTR[152+r8]
+
+ mov r10d,DWORD PTR[4+r11]
+ lea r10,QWORD PTR[r10*1+rsi]
+ cmp rbx,r10
+ jae $L$in_prologue
+
+ lea rax,QWORD PTR[40+rax]
+ mov rbx,QWORD PTR[((-8))+rax]
+ mov rbp,QWORD PTR[((-16))+rax]
+ mov r13,QWORD PTR[((-24))+rax]
+ mov r14,QWORD PTR[((-32))+rax]
+ mov r15,QWORD PTR[((-40))+rax]
+ mov QWORD PTR[144+r8],rbx
+ mov QWORD PTR[160+r8],rbp
+ mov QWORD PTR[224+r8],r13
+ mov QWORD PTR[232+r8],r14
+ mov QWORD PTR[240+r8],r15
+
+$L$in_prologue::
+ mov rdi,QWORD PTR[8+rax]
+ mov rsi,QWORD PTR[16+rax]
+ mov QWORD PTR[152+r8],rax
+ mov QWORD PTR[168+r8],rsi
+ mov QWORD PTR[176+r8],rdi
+
+ jmp $L$common_seh_exit
+common_se_handler ENDP
+
+
+ALIGN 16
+cbc_se_handler PROC PRIVATE
+ push rsi
+ push rdi
+ push rbx
+ push rbp
+ push r12
+ push r13
+ push r14
+ push r15
+ pushfq
+ lea rsp,QWORD PTR[((-64))+rsp]
+
+ mov rax,QWORD PTR[120+r8]
+ mov rbx,QWORD PTR[248+r8]
+
+ lea r10,QWORD PTR[$L$cbc_prologue]
+ cmp rbx,r10
+ jb $L$in_cbc_prologue
+
+ lea r10,QWORD PTR[$L$cbc_body]
+ cmp rbx,r10
+ jb $L$in_cbc_frame_setup
+
+ mov rax,QWORD PTR[152+r8]
+
+ lea r10,QWORD PTR[$L$cbc_abort]
+ cmp rbx,r10
+ jae $L$in_cbc_prologue
+
+
+ lea r10,QWORD PTR[$L$cbc_enc_pushf]
+ cmp rbx,r10
+ jbe $L$in_cbc_no_flag
+ lea rax,QWORD PTR[8+rax]
+ lea r10,QWORD PTR[$L$cbc_enc_popf]
+ cmp rbx,r10
+ jb $L$in_cbc_no_flag
+ lea rax,QWORD PTR[((-8))+rax]
+ lea r10,QWORD PTR[$L$cbc_dec_pushf]
+ cmp rbx,r10
+ jbe $L$in_cbc_no_flag
+ lea rax,QWORD PTR[8+rax]
+ lea r10,QWORD PTR[$L$cbc_dec_popf]
+ cmp rbx,r10
+ jb $L$in_cbc_no_flag
+ lea rax,QWORD PTR[((-8))+rax]
+
+$L$in_cbc_no_flag::
+ mov rax,QWORD PTR[48+rax]
+ lea rax,QWORD PTR[48+rax]
+
+$L$in_cbc_frame_setup::
+ mov rbx,QWORD PTR[((-8))+rax]
+ mov rbp,QWORD PTR[((-16))+rax]
+ mov r12,QWORD PTR[((-24))+rax]
+ mov r13,QWORD PTR[((-32))+rax]
+ mov r14,QWORD PTR[((-40))+rax]
+ mov r15,QWORD PTR[((-48))+rax]
+ mov QWORD PTR[144+r8],rbx
+ mov QWORD PTR[160+r8],rbp
+ mov QWORD PTR[216+r8],r12
+ mov QWORD PTR[224+r8],r13
+ mov QWORD PTR[232+r8],r14
+ mov QWORD PTR[240+r8],r15
+
+$L$in_cbc_prologue::
+ mov rdi,QWORD PTR[8+rax]
+ mov rsi,QWORD PTR[16+rax]
+ mov QWORD PTR[152+r8],rax
+ mov QWORD PTR[168+r8],rsi
+ mov QWORD PTR[176+r8],rdi
+
+ALIGN 4
+$L$common_seh_exit::
+
+ mov rdi,QWORD PTR[40+r9]
+ mov rsi,r8
+ mov ecx,154
+ DD 0a548f3fch
+
+
+ mov rsi,r9
+ xor rcx,rcx
+ mov rdx,QWORD PTR[8+rsi]
+ mov r8,QWORD PTR[rsi]
+ mov r9,QWORD PTR[16+rsi]
+ mov r10,QWORD PTR[40+rsi]
+ lea r11,QWORD PTR[56+rsi]
+ lea r12,QWORD PTR[24+rsi]
+ mov QWORD PTR[32+rsp],r10
+ mov QWORD PTR[40+rsp],r11
+ mov QWORD PTR[48+rsp],r12
+ mov QWORD PTR[56+rsp],rcx
+ call QWORD PTR[__imp_RtlVirtualUnwind]
+
+ mov eax,1
+ lea rsp,QWORD PTR[64+rsp]
+ popfq
+ pop r15
+ pop r14
+ pop r13
+ pop r12
+ pop rbp
+ pop rbx
+ pop rdi
+ pop rsi
+ DB 0F3h,0C3h ;repret
+cbc_se_handler ENDP
+
+.text$ ENDS
+.pdata SEGMENT READONLY ALIGN(4)
+ALIGN 4
+ DD imagerel $L$SEH_begin_Camellia_EncryptBlock_Rounds
+ DD imagerel $L$SEH_end_Camellia_EncryptBlock_Rounds
+ DD imagerel $L$SEH_info_Camellia_EncryptBlock_Rounds
+
+ DD imagerel $L$SEH_begin_Camellia_DecryptBlock_Rounds
+ DD imagerel $L$SEH_end_Camellia_DecryptBlock_Rounds
+ DD imagerel $L$SEH_info_Camellia_DecryptBlock_Rounds
+
+ DD imagerel $L$SEH_begin_Camellia_Ekeygen
+ DD imagerel $L$SEH_end_Camellia_Ekeygen
+ DD imagerel $L$SEH_info_Camellia_Ekeygen
+
+ DD imagerel $L$SEH_begin_Camellia_cbc_encrypt
+ DD imagerel $L$SEH_end_Camellia_cbc_encrypt
+ DD imagerel $L$SEH_info_Camellia_cbc_encrypt
+
+.pdata ENDS
+.xdata SEGMENT READONLY ALIGN(8)
+ALIGN 8
+$L$SEH_info_Camellia_EncryptBlock_Rounds::
+DB 9,0,0,0
+ DD imagerel common_se_handler
+ DD imagerel $L$enc_prologue,imagerel $L$enc_epilogue
+
+$L$SEH_info_Camellia_DecryptBlock_Rounds::
+DB 9,0,0,0
+ DD imagerel common_se_handler
+ DD imagerel $L$dec_prologue,imagerel $L$dec_epilogue
+
+$L$SEH_info_Camellia_Ekeygen::
+DB 9,0,0,0
+ DD imagerel common_se_handler
+ DD imagerel $L$key_prologue,imagerel $L$key_epilogue
+
+$L$SEH_info_Camellia_cbc_encrypt::
+DB 9,0,0,0
+ DD imagerel cbc_se_handler
+
+.xdata ENDS
+END
diff --git a/deps/openssl/asm/x64-win32-masm/md5/md5-x86_64.asm b/deps/openssl/asm/x64-win32-masm/md5/md5-x86_64.asm
new file mode 100644
index 0000000000..34305c687c
--- /dev/null
+++ b/deps/openssl/asm/x64-win32-masm/md5/md5-x86_64.asm
@@ -0,0 +1,781 @@
+OPTION DOTNAME
+.text$ SEGMENT ALIGN(64) 'CODE'
+ALIGN 16
+
+PUBLIC md5_block_asm_data_order
+
+md5_block_asm_data_order PROC PUBLIC
+ mov QWORD PTR[8+rsp],rdi ;WIN64 prologue
+ mov QWORD PTR[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_md5_block_asm_data_order::
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+
+
+ push rbp
+ push rbx
+ push r12
+ push r14
+ push r15
+$L$prologue::
+
+
+
+
+ mov rbp,rdi
+ shl rdx,6
+ lea rdi,QWORD PTR[rdx*1+rsi]
+ mov eax,DWORD PTR[((0*4))+rbp]
+ mov ebx,DWORD PTR[((1*4))+rbp]
+ mov ecx,DWORD PTR[((2*4))+rbp]
+ mov edx,DWORD PTR[((3*4))+rbp]
+
+
+
+
+
+
+
+ cmp rsi,rdi
+ je $L$end
+
+
+
+$L$loop::
+ mov r8d,eax
+ mov r9d,ebx
+ mov r14d,ecx
+ mov r15d,edx
+ mov r10d,DWORD PTR[((0*4))+rsi]
+ mov r11d,edx
+ xor r11d,ecx
+ lea eax,DWORD PTR[0d76aa478h+r10*1+rax]
+ and r11d,ebx
+ xor r11d,edx
+ mov r10d,DWORD PTR[((1*4))+rsi]
+ add eax,r11d
+ rol eax,7
+ mov r11d,ecx
+ add eax,ebx
+ xor r11d,ebx
+ lea edx,DWORD PTR[0e8c7b756h+r10*1+rdx]
+ and r11d,eax
+ xor r11d,ecx
+ mov r10d,DWORD PTR[((2*4))+rsi]
+ add edx,r11d
+ rol edx,12
+ mov r11d,ebx
+ add edx,eax
+ xor r11d,eax
+ lea ecx,DWORD PTR[0242070dbh+r10*1+rcx]
+ and r11d,edx
+ xor r11d,ebx
+ mov r10d,DWORD PTR[((3*4))+rsi]
+ add ecx,r11d
+ rol ecx,17
+ mov r11d,eax
+ add ecx,edx
+ xor r11d,edx
+ lea ebx,DWORD PTR[0c1bdceeeh+r10*1+rbx]
+ and r11d,ecx
+ xor r11d,eax
+ mov r10d,DWORD PTR[((4*4))+rsi]
+ add ebx,r11d
+ rol ebx,22
+ mov r11d,edx
+ add ebx,ecx
+ xor r11d,ecx
+ lea eax,DWORD PTR[0f57c0fafh+r10*1+rax]
+ and r11d,ebx
+ xor r11d,edx
+ mov r10d,DWORD PTR[((5*4))+rsi]
+ add eax,r11d
+ rol eax,7
+ mov r11d,ecx
+ add eax,ebx
+ xor r11d,ebx
+ lea edx,DWORD PTR[04787c62ah+r10*1+rdx]
+ and r11d,eax
+ xor r11d,ecx
+ mov r10d,DWORD PTR[((6*4))+rsi]
+ add edx,r11d
+ rol edx,12
+ mov r11d,ebx
+ add edx,eax
+ xor r11d,eax
+ lea ecx,DWORD PTR[0a8304613h+r10*1+rcx]
+ and r11d,edx
+ xor r11d,ebx
+ mov r10d,DWORD PTR[((7*4))+rsi]
+ add ecx,r11d
+ rol ecx,17
+ mov r11d,eax
+ add ecx,edx
+ xor r11d,edx
+ lea ebx,DWORD PTR[0fd469501h+r10*1+rbx]
+ and r11d,ecx
+ xor r11d,eax
+ mov r10d,DWORD PTR[((8*4))+rsi]
+ add ebx,r11d
+ rol ebx,22
+ mov r11d,edx
+ add ebx,ecx
+ xor r11d,ecx
+ lea eax,DWORD PTR[0698098d8h+r10*1+rax]
+ and r11d,ebx
+ xor r11d,edx
+ mov r10d,DWORD PTR[((9*4))+rsi]
+ add eax,r11d
+ rol eax,7
+ mov r11d,ecx
+ add eax,ebx
+ xor r11d,ebx
+ lea edx,DWORD PTR[08b44f7afh+r10*1+rdx]
+ and r11d,eax
+ xor r11d,ecx
+ mov r10d,DWORD PTR[((10*4))+rsi]
+ add edx,r11d
+ rol edx,12
+ mov r11d,ebx
+ add edx,eax
+ xor r11d,eax
+ lea ecx,DWORD PTR[0ffff5bb1h+r10*1+rcx]
+ and r11d,edx
+ xor r11d,ebx
+ mov r10d,DWORD PTR[((11*4))+rsi]
+ add ecx,r11d
+ rol ecx,17
+ mov r11d,eax
+ add ecx,edx
+ xor r11d,edx
+ lea ebx,DWORD PTR[0895cd7beh+r10*1+rbx]
+ and r11d,ecx
+ xor r11d,eax
+ mov r10d,DWORD PTR[((12*4))+rsi]
+ add ebx,r11d
+ rol ebx,22
+ mov r11d,edx
+ add ebx,ecx
+ xor r11d,ecx
+ lea eax,DWORD PTR[06b901122h+r10*1+rax]
+ and r11d,ebx
+ xor r11d,edx
+ mov r10d,DWORD PTR[((13*4))+rsi]
+ add eax,r11d
+ rol eax,7
+ mov r11d,ecx
+ add eax,ebx
+ xor r11d,ebx
+ lea edx,DWORD PTR[0fd987193h+r10*1+rdx]
+ and r11d,eax
+ xor r11d,ecx
+ mov r10d,DWORD PTR[((14*4))+rsi]
+ add edx,r11d
+ rol edx,12
+ mov r11d,ebx
+ add edx,eax
+ xor r11d,eax
+ lea ecx,DWORD PTR[0a679438eh+r10*1+rcx]
+ and r11d,edx
+ xor r11d,ebx
+ mov r10d,DWORD PTR[((15*4))+rsi]
+ add ecx,r11d
+ rol ecx,17
+ mov r11d,eax
+ add ecx,edx
+ xor r11d,edx
+ lea ebx,DWORD PTR[049b40821h+r10*1+rbx]
+ and r11d,ecx
+ xor r11d,eax
+ mov r10d,DWORD PTR[((0*4))+rsi]
+ add ebx,r11d
+ rol ebx,22
+ mov r11d,edx
+ add ebx,ecx
+ mov r10d,DWORD PTR[((1*4))+rsi]
+ mov r11d,edx
+ mov r12d,edx
+ not r11d
+ lea eax,DWORD PTR[0f61e2562h+r10*1+rax]
+ and r12d,ebx
+ and r11d,ecx
+ mov r10d,DWORD PTR[((6*4))+rsi]
+ or r12d,r11d
+ mov r11d,ecx
+ add eax,r12d
+ mov r12d,ecx
+ rol eax,5
+ add eax,ebx
+ not r11d
+ lea edx,DWORD PTR[0c040b340h+r10*1+rdx]
+ and r12d,eax
+ and r11d,ebx
+ mov r10d,DWORD PTR[((11*4))+rsi]
+ or r12d,r11d
+ mov r11d,ebx
+ add edx,r12d
+ mov r12d,ebx
+ rol edx,9
+ add edx,eax
+ not r11d
+ lea ecx,DWORD PTR[0265e5a51h+r10*1+rcx]
+ and r12d,edx
+ and r11d,eax
+ mov r10d,DWORD PTR[((0*4))+rsi]
+ or r12d,r11d
+ mov r11d,eax
+ add ecx,r12d
+ mov r12d,eax
+ rol ecx,14
+ add ecx,edx
+ not r11d
+ lea ebx,DWORD PTR[0e9b6c7aah+r10*1+rbx]
+ and r12d,ecx
+ and r11d,edx
+ mov r10d,DWORD PTR[((5*4))+rsi]
+ or r12d,r11d
+ mov r11d,edx
+ add ebx,r12d
+ mov r12d,edx
+ rol ebx,20
+ add ebx,ecx
+ not r11d
+ lea eax,DWORD PTR[0d62f105dh+r10*1+rax]
+ and r12d,ebx
+ and r11d,ecx
+ mov r10d,DWORD PTR[((10*4))+rsi]
+ or r12d,r11d
+ mov r11d,ecx
+ add eax,r12d
+ mov r12d,ecx
+ rol eax,5
+ add eax,ebx
+ not r11d
+ lea edx,DWORD PTR[02441453h+r10*1+rdx]
+ and r12d,eax
+ and r11d,ebx
+ mov r10d,DWORD PTR[((15*4))+rsi]
+ or r12d,r11d
+ mov r11d,ebx
+ add edx,r12d
+ mov r12d,ebx
+ rol edx,9
+ add edx,eax
+ not r11d
+ lea ecx,DWORD PTR[0d8a1e681h+r10*1+rcx]
+ and r12d,edx
+ and r11d,eax
+ mov r10d,DWORD PTR[((4*4))+rsi]
+ or r12d,r11d
+ mov r11d,eax
+ add ecx,r12d
+ mov r12d,eax
+ rol ecx,14
+ add ecx,edx
+ not r11d
+ lea ebx,DWORD PTR[0e7d3fbc8h+r10*1+rbx]
+ and r12d,ecx
+ and r11d,edx
+ mov r10d,DWORD PTR[((9*4))+rsi]
+ or r12d,r11d
+ mov r11d,edx
+ add ebx,r12d
+ mov r12d,edx
+ rol ebx,20
+ add ebx,ecx
+ not r11d
+ lea eax,DWORD PTR[021e1cde6h+r10*1+rax]
+ and r12d,ebx
+ and r11d,ecx
+ mov r10d,DWORD PTR[((14*4))+rsi]
+ or r12d,r11d
+ mov r11d,ecx
+ add eax,r12d
+ mov r12d,ecx
+ rol eax,5
+ add eax,ebx
+ not r11d
+ lea edx,DWORD PTR[0c33707d6h+r10*1+rdx]
+ and r12d,eax
+ and r11d,ebx
+ mov r10d,DWORD PTR[((3*4))+rsi]
+ or r12d,r11d
+ mov r11d,ebx
+ add edx,r12d
+ mov r12d,ebx
+ rol edx,9
+ add edx,eax
+ not r11d
+ lea ecx,DWORD PTR[0f4d50d87h+r10*1+rcx]
+ and r12d,edx
+ and r11d,eax
+ mov r10d,DWORD PTR[((8*4))+rsi]
+ or r12d,r11d
+ mov r11d,eax
+ add ecx,r12d
+ mov r12d,eax
+ rol ecx,14
+ add ecx,edx
+ not r11d
+ lea ebx,DWORD PTR[0455a14edh+r10*1+rbx]
+ and r12d,ecx
+ and r11d,edx
+ mov r10d,DWORD PTR[((13*4))+rsi]
+ or r12d,r11d
+ mov r11d,edx
+ add ebx,r12d
+ mov r12d,edx
+ rol ebx,20
+ add ebx,ecx
+ not r11d
+ lea eax,DWORD PTR[0a9e3e905h+r10*1+rax]
+ and r12d,ebx
+ and r11d,ecx
+ mov r10d,DWORD PTR[((2*4))+rsi]
+ or r12d,r11d
+ mov r11d,ecx
+ add eax,r12d
+ mov r12d,ecx
+ rol eax,5
+ add eax,ebx
+ not r11d
+ lea edx,DWORD PTR[0fcefa3f8h+r10*1+rdx]
+ and r12d,eax
+ and r11d,ebx
+ mov r10d,DWORD PTR[((7*4))+rsi]
+ or r12d,r11d
+ mov r11d,ebx
+ add edx,r12d
+ mov r12d,ebx
+ rol edx,9
+ add edx,eax
+ not r11d
+ lea ecx,DWORD PTR[0676f02d9h+r10*1+rcx]
+ and r12d,edx
+ and r11d,eax
+ mov r10d,DWORD PTR[((12*4))+rsi]
+ or r12d,r11d
+ mov r11d,eax
+ add ecx,r12d
+ mov r12d,eax
+ rol ecx,14
+ add ecx,edx
+ not r11d
+ lea ebx,DWORD PTR[08d2a4c8ah+r10*1+rbx]
+ and r12d,ecx
+ and r11d,edx
+ mov r10d,DWORD PTR[((0*4))+rsi]
+ or r12d,r11d
+ mov r11d,edx
+ add ebx,r12d
+ mov r12d,edx
+ rol ebx,20
+ add ebx,ecx
+ mov r10d,DWORD PTR[((5*4))+rsi]
+ mov r11d,ecx
+ lea eax,DWORD PTR[0fffa3942h+r10*1+rax]
+ mov r10d,DWORD PTR[((8*4))+rsi]
+ xor r11d,edx
+ xor r11d,ebx
+ add eax,r11d
+ rol eax,4
+ mov r11d,ebx
+ add eax,ebx
+ lea edx,DWORD PTR[08771f681h+r10*1+rdx]
+ mov r10d,DWORD PTR[((11*4))+rsi]
+ xor r11d,ecx
+ xor r11d,eax
+ add edx,r11d
+ rol edx,11
+ mov r11d,eax
+ add edx,eax
+ lea ecx,DWORD PTR[06d9d6122h+r10*1+rcx]
+ mov r10d,DWORD PTR[((14*4))+rsi]
+ xor r11d,ebx
+ xor r11d,edx
+ add ecx,r11d
+ rol ecx,16
+ mov r11d,edx
+ add ecx,edx
+ lea ebx,DWORD PTR[0fde5380ch+r10*1+rbx]
+ mov r10d,DWORD PTR[((1*4))+rsi]
+ xor r11d,eax
+ xor r11d,ecx
+ add ebx,r11d
+ rol ebx,23
+ mov r11d,ecx
+ add ebx,ecx
+ lea eax,DWORD PTR[0a4beea44h+r10*1+rax]
+ mov r10d,DWORD PTR[((4*4))+rsi]
+ xor r11d,edx
+ xor r11d,ebx
+ add eax,r11d
+ rol eax,4
+ mov r11d,ebx
+ add eax,ebx
+ lea edx,DWORD PTR[04bdecfa9h+r10*1+rdx]
+ mov r10d,DWORD PTR[((7*4))+rsi]
+ xor r11d,ecx
+ xor r11d,eax
+ add edx,r11d
+ rol edx,11
+ mov r11d,eax
+ add edx,eax
+ lea ecx,DWORD PTR[0f6bb4b60h+r10*1+rcx]
+ mov r10d,DWORD PTR[((10*4))+rsi]
+ xor r11d,ebx
+ xor r11d,edx
+ add ecx,r11d
+ rol ecx,16
+ mov r11d,edx
+ add ecx,edx
+ lea ebx,DWORD PTR[0bebfbc70h+r10*1+rbx]
+ mov r10d,DWORD PTR[((13*4))+rsi]
+ xor r11d,eax
+ xor r11d,ecx
+ add ebx,r11d
+ rol ebx,23
+ mov r11d,ecx
+ add ebx,ecx
+ lea eax,DWORD PTR[0289b7ec6h+r10*1+rax]
+ mov r10d,DWORD PTR[((0*4))+rsi]
+ xor r11d,edx
+ xor r11d,ebx
+ add eax,r11d
+ rol eax,4
+ mov r11d,ebx
+ add eax,ebx
+ lea edx,DWORD PTR[0eaa127fah+r10*1+rdx]
+ mov r10d,DWORD PTR[((3*4))+rsi]
+ xor r11d,ecx
+ xor r11d,eax
+ add edx,r11d
+ rol edx,11
+ mov r11d,eax
+ add edx,eax
+ lea ecx,DWORD PTR[0d4ef3085h+r10*1+rcx]
+ mov r10d,DWORD PTR[((6*4))+rsi]
+ xor r11d,ebx
+ xor r11d,edx
+ add ecx,r11d
+ rol ecx,16
+ mov r11d,edx
+ add ecx,edx
+ lea ebx,DWORD PTR[04881d05h+r10*1+rbx]
+ mov r10d,DWORD PTR[((9*4))+rsi]
+ xor r11d,eax
+ xor r11d,ecx
+ add ebx,r11d
+ rol ebx,23
+ mov r11d,ecx
+ add ebx,ecx
+ lea eax,DWORD PTR[0d9d4d039h+r10*1+rax]
+ mov r10d,DWORD PTR[((12*4))+rsi]
+ xor r11d,edx
+ xor r11d,ebx
+ add eax,r11d
+ rol eax,4
+ mov r11d,ebx
+ add eax,ebx
+ lea edx,DWORD PTR[0e6db99e5h+r10*1+rdx]
+ mov r10d,DWORD PTR[((15*4))+rsi]
+ xor r11d,ecx
+ xor r11d,eax
+ add edx,r11d
+ rol edx,11
+ mov r11d,eax
+ add edx,eax
+ lea ecx,DWORD PTR[01fa27cf8h+r10*1+rcx]
+ mov r10d,DWORD PTR[((2*4))+rsi]
+ xor r11d,ebx
+ xor r11d,edx
+ add ecx,r11d
+ rol ecx,16
+ mov r11d,edx
+ add ecx,edx
+ lea ebx,DWORD PTR[0c4ac5665h+r10*1+rbx]
+ mov r10d,DWORD PTR[((0*4))+rsi]
+ xor r11d,eax
+ xor r11d,ecx
+ add ebx,r11d
+ rol ebx,23
+ mov r11d,ecx
+ add ebx,ecx
+ mov r10d,DWORD PTR[((0*4))+rsi]
+ mov r11d,0ffffffffh
+ xor r11d,edx
+ lea eax,DWORD PTR[0f4292244h+r10*1+rax]
+ or r11d,ebx
+ xor r11d,ecx
+ add eax,r11d
+ mov r10d,DWORD PTR[((7*4))+rsi]
+ mov r11d,0ffffffffh
+ rol eax,6
+ xor r11d,ecx
+ add eax,ebx
+ lea edx,DWORD PTR[0432aff97h+r10*1+rdx]
+ or r11d,eax
+ xor r11d,ebx
+ add edx,r11d
+ mov r10d,DWORD PTR[((14*4))+rsi]
+ mov r11d,0ffffffffh
+ rol edx,10
+ xor r11d,ebx
+ add edx,eax
+ lea ecx,DWORD PTR[0ab9423a7h+r10*1+rcx]
+ or r11d,edx
+ xor r11d,eax
+ add ecx,r11d
+ mov r10d,DWORD PTR[((5*4))+rsi]
+ mov r11d,0ffffffffh
+ rol ecx,15
+ xor r11d,eax
+ add ecx,edx
+ lea ebx,DWORD PTR[0fc93a039h+r10*1+rbx]
+ or r11d,ecx
+ xor r11d,edx
+ add ebx,r11d
+ mov r10d,DWORD PTR[((12*4))+rsi]
+ mov r11d,0ffffffffh
+ rol ebx,21
+ xor r11d,edx
+ add ebx,ecx
+ lea eax,DWORD PTR[0655b59c3h+r10*1+rax]
+ or r11d,ebx
+ xor r11d,ecx
+ add eax,r11d
+ mov r10d,DWORD PTR[((3*4))+rsi]
+ mov r11d,0ffffffffh
+ rol eax,6
+ xor r11d,ecx
+ add eax,ebx
+ lea edx,DWORD PTR[08f0ccc92h+r10*1+rdx]
+ or r11d,eax
+ xor r11d,ebx
+ add edx,r11d
+ mov r10d,DWORD PTR[((10*4))+rsi]
+ mov r11d,0ffffffffh
+ rol edx,10
+ xor r11d,ebx
+ add edx,eax
+ lea ecx,DWORD PTR[0ffeff47dh+r10*1+rcx]
+ or r11d,edx
+ xor r11d,eax
+ add ecx,r11d
+ mov r10d,DWORD PTR[((1*4))+rsi]
+ mov r11d,0ffffffffh
+ rol ecx,15
+ xor r11d,eax
+ add ecx,edx
+ lea ebx,DWORD PTR[085845dd1h+r10*1+rbx]
+ or r11d,ecx
+ xor r11d,edx
+ add ebx,r11d
+ mov r10d,DWORD PTR[((8*4))+rsi]
+ mov r11d,0ffffffffh
+ rol ebx,21
+ xor r11d,edx
+ add ebx,ecx
+ lea eax,DWORD PTR[06fa87e4fh+r10*1+rax]
+ or r11d,ebx
+ xor r11d,ecx
+ add eax,r11d
+ mov r10d,DWORD PTR[((15*4))+rsi]
+ mov r11d,0ffffffffh
+ rol eax,6
+ xor r11d,ecx
+ add eax,ebx
+ lea edx,DWORD PTR[0fe2ce6e0h+r10*1+rdx]
+ or r11d,eax
+ xor r11d,ebx
+ add edx,r11d
+ mov r10d,DWORD PTR[((6*4))+rsi]
+ mov r11d,0ffffffffh
+ rol edx,10
+ xor r11d,ebx
+ add edx,eax
+ lea ecx,DWORD PTR[0a3014314h+r10*1+rcx]
+ or r11d,edx
+ xor r11d,eax
+ add ecx,r11d
+ mov r10d,DWORD PTR[((13*4))+rsi]
+ mov r11d,0ffffffffh
+ rol ecx,15
+ xor r11d,eax
+ add ecx,edx
+ lea ebx,DWORD PTR[04e0811a1h+r10*1+rbx]
+ or r11d,ecx
+ xor r11d,edx
+ add ebx,r11d
+ mov r10d,DWORD PTR[((4*4))+rsi]
+ mov r11d,0ffffffffh
+ rol ebx,21
+ xor r11d,edx
+ add ebx,ecx
+ lea eax,DWORD PTR[0f7537e82h+r10*1+rax]
+ or r11d,ebx
+ xor r11d,ecx
+ add eax,r11d
+ mov r10d,DWORD PTR[((11*4))+rsi]
+ mov r11d,0ffffffffh
+ rol eax,6
+ xor r11d,ecx
+ add eax,ebx
+ lea edx,DWORD PTR[0bd3af235h+r10*1+rdx]
+ or r11d,eax
+ xor r11d,ebx
+ add edx,r11d
+ mov r10d,DWORD PTR[((2*4))+rsi]
+ mov r11d,0ffffffffh
+ rol edx,10
+ xor r11d,ebx
+ add edx,eax
+ lea ecx,DWORD PTR[02ad7d2bbh+r10*1+rcx]
+ or r11d,edx
+ xor r11d,eax
+ add ecx,r11d
+ mov r10d,DWORD PTR[((9*4))+rsi]
+ mov r11d,0ffffffffh
+ rol ecx,15
+ xor r11d,eax
+ add ecx,edx
+ lea ebx,DWORD PTR[0eb86d391h+r10*1+rbx]
+ or r11d,ecx
+ xor r11d,edx
+ add ebx,r11d
+ mov r10d,DWORD PTR[((0*4))+rsi]
+ mov r11d,0ffffffffh
+ rol ebx,21
+ xor r11d,edx
+ add ebx,ecx
+
+ add eax,r8d
+ add ebx,r9d
+ add ecx,r14d
+ add edx,r15d
+
+
+ add rsi,64
+ cmp rsi,rdi
+ jb $L$loop
+
+
+
+$L$end::
+ mov DWORD PTR[((0*4))+rbp],eax
+ mov DWORD PTR[((1*4))+rbp],ebx
+ mov DWORD PTR[((2*4))+rbp],ecx
+ mov DWORD PTR[((3*4))+rbp],edx
+
+ mov r15,QWORD PTR[rsp]
+ mov r14,QWORD PTR[8+rsp]
+ mov r12,QWORD PTR[16+rsp]
+ mov rbx,QWORD PTR[24+rsp]
+ mov rbp,QWORD PTR[32+rsp]
+ add rsp,40
+$L$epilogue::
+ mov rdi,QWORD PTR[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD PTR[16+rsp]
+ DB 0F3h,0C3h ;repret
+$L$SEH_end_md5_block_asm_data_order::
+md5_block_asm_data_order ENDP
+EXTERN __imp_RtlVirtualUnwind:NEAR
+
+ALIGN 16
+se_handler PROC PRIVATE
+ push rsi
+ push rdi
+ push rbx
+ push rbp
+ push r12
+ push r13
+ push r14
+ push r15
+ pushfq
+ sub rsp,64
+
+ mov rax,QWORD PTR[120+r8]
+ mov rbx,QWORD PTR[248+r8]
+
+ lea r10,QWORD PTR[$L$prologue]
+ cmp rbx,r10
+ jb $L$in_prologue
+
+ mov rax,QWORD PTR[152+r8]
+
+ lea r10,QWORD PTR[$L$epilogue]
+ cmp rbx,r10
+ jae $L$in_prologue
+
+ lea rax,QWORD PTR[40+rax]
+
+ mov rbp,QWORD PTR[((-8))+rax]
+ mov rbx,QWORD PTR[((-16))+rax]
+ mov r12,QWORD PTR[((-24))+rax]
+ mov r14,QWORD PTR[((-32))+rax]
+ mov r15,QWORD PTR[((-40))+rax]
+ mov QWORD PTR[144+r8],rbx
+ mov QWORD PTR[160+r8],rbp
+ mov QWORD PTR[216+r8],r12
+ mov QWORD PTR[232+r8],r14
+ mov QWORD PTR[240+r8],r15
+
+$L$in_prologue::
+ mov rdi,QWORD PTR[8+rax]
+ mov rsi,QWORD PTR[16+rax]
+ mov QWORD PTR[152+r8],rax
+ mov QWORD PTR[168+r8],rsi
+ mov QWORD PTR[176+r8],rdi
+
+ mov rdi,QWORD PTR[40+r9]
+ mov rsi,r8
+ mov ecx,154
+ DD 0a548f3fch
+
+
+ mov rsi,r9
+ xor rcx,rcx
+ mov rdx,QWORD PTR[8+rsi]
+ mov r8,QWORD PTR[rsi]
+ mov r9,QWORD PTR[16+rsi]
+ mov r10,QWORD PTR[40+rsi]
+ lea r11,QWORD PTR[56+rsi]
+ lea r12,QWORD PTR[24+rsi]
+ mov QWORD PTR[32+rsp],r10
+ mov QWORD PTR[40+rsp],r11
+ mov QWORD PTR[48+rsp],r12
+ mov QWORD PTR[56+rsp],rcx
+ call QWORD PTR[__imp_RtlVirtualUnwind]
+
+ mov eax,1
+ add rsp,64
+ popfq
+ pop r15
+ pop r14
+ pop r13
+ pop r12
+ pop rbp
+ pop rbx
+ pop rdi
+ pop rsi
+ DB 0F3h,0C3h ;repret
+se_handler ENDP
+
+.text$ ENDS
+.pdata SEGMENT READONLY ALIGN(4)
+ALIGN 4
+ DD imagerel $L$SEH_begin_md5_block_asm_data_order
+ DD imagerel $L$SEH_end_md5_block_asm_data_order
+ DD imagerel $L$SEH_info_md5_block_asm_data_order
+
+.pdata ENDS
+.xdata SEGMENT READONLY ALIGN(8)
+ALIGN 8
+$L$SEH_info_md5_block_asm_data_order::
+DB 9,0,0,0
+ DD imagerel se_handler
+
+.xdata ENDS
+END
diff --git a/deps/openssl/asm/x64-win32-masm/rc4/rc4-x86_64.asm b/deps/openssl/asm/x64-win32-masm/rc4/rc4-x86_64.asm
new file mode 100644
index 0000000000..f508fa6679
--- /dev/null
+++ b/deps/openssl/asm/x64-win32-masm/rc4/rc4-x86_64.asm
@@ -0,0 +1,586 @@
+OPTION DOTNAME
+.text$ SEGMENT ALIGN(64) 'CODE'
+
+PUBLIC RC4
+
+ALIGN 16
+RC4 PROC PUBLIC
+ mov QWORD PTR[8+rsp],rdi ;WIN64 prologue
+ mov QWORD PTR[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_RC4::
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+ mov rcx,r9
+
+ or rsi,rsi
+ jne $L$entry
+ mov rdi,QWORD PTR[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD PTR[16+rsp]
+ DB 0F3h,0C3h ;repret
+$L$entry::
+ push rbx
+ push r12
+ push r13
+$L$prologue::
+
+ add rdi,8
+ mov r8d,DWORD PTR[((-8))+rdi]
+ mov r12d,DWORD PTR[((-4))+rdi]
+ cmp DWORD PTR[256+rdi],-1
+ je $L$RC4_CHAR
+ inc r8b
+ mov r9d,DWORD PTR[r8*4+rdi]
+ test rsi,-8
+ jz $L$loop1
+ jmp $L$loop8
+ALIGN 16
+$L$loop8::
+ add r12b,r9b
+ mov r10,r8
+ mov r13d,DWORD PTR[r12*4+rdi]
+ ror rax,8
+ inc r10b
+ mov r11d,DWORD PTR[r10*4+rdi]
+ cmp r12,r10
+ mov DWORD PTR[r12*4+rdi],r9d
+ cmove r11,r9
+ mov DWORD PTR[r8*4+rdi],r13d
+ add r13b,r9b
+ mov al,BYTE PTR[r13*4+rdi]
+ add r12b,r11b
+ mov r8,r10
+ mov r13d,DWORD PTR[r12*4+rdi]
+ ror rax,8
+ inc r8b
+ mov r9d,DWORD PTR[r8*4+rdi]
+ cmp r12,r8
+ mov DWORD PTR[r12*4+rdi],r11d
+ cmove r9,r11
+ mov DWORD PTR[r10*4+rdi],r13d
+ add r13b,r11b
+ mov al,BYTE PTR[r13*4+rdi]
+ add r12b,r9b
+ mov r10,r8
+ mov r13d,DWORD PTR[r12*4+rdi]
+ ror rax,8
+ inc r10b
+ mov r11d,DWORD PTR[r10*4+rdi]
+ cmp r12,r10
+ mov DWORD PTR[r12*4+rdi],r9d
+ cmove r11,r9
+ mov DWORD PTR[r8*4+rdi],r13d
+ add r13b,r9b
+ mov al,BYTE PTR[r13*4+rdi]
+ add r12b,r11b
+ mov r8,r10
+ mov r13d,DWORD PTR[r12*4+rdi]
+ ror rax,8
+ inc r8b
+ mov r9d,DWORD PTR[r8*4+rdi]
+ cmp r12,r8
+ mov DWORD PTR[r12*4+rdi],r11d
+ cmove r9,r11
+ mov DWORD PTR[r10*4+rdi],r13d
+ add r13b,r11b
+ mov al,BYTE PTR[r13*4+rdi]
+ add r12b,r9b
+ mov r10,r8
+ mov r13d,DWORD PTR[r12*4+rdi]
+ ror rax,8
+ inc r10b
+ mov r11d,DWORD PTR[r10*4+rdi]
+ cmp r12,r10
+ mov DWORD PTR[r12*4+rdi],r9d
+ cmove r11,r9
+ mov DWORD PTR[r8*4+rdi],r13d
+ add r13b,r9b
+ mov al,BYTE PTR[r13*4+rdi]
+ add r12b,r11b
+ mov r8,r10
+ mov r13d,DWORD PTR[r12*4+rdi]
+ ror rax,8
+ inc r8b
+ mov r9d,DWORD PTR[r8*4+rdi]
+ cmp r12,r8
+ mov DWORD PTR[r12*4+rdi],r11d
+ cmove r9,r11
+ mov DWORD PTR[r10*4+rdi],r13d
+ add r13b,r11b
+ mov al,BYTE PTR[r13*4+rdi]
+ add r12b,r9b
+ mov r10,r8
+ mov r13d,DWORD PTR[r12*4+rdi]
+ ror rax,8
+ inc r10b
+ mov r11d,DWORD PTR[r10*4+rdi]
+ cmp r12,r10
+ mov DWORD PTR[r12*4+rdi],r9d
+ cmove r11,r9
+ mov DWORD PTR[r8*4+rdi],r13d
+ add r13b,r9b
+ mov al,BYTE PTR[r13*4+rdi]
+ add r12b,r11b
+ mov r8,r10
+ mov r13d,DWORD PTR[r12*4+rdi]
+ ror rax,8
+ inc r8b
+ mov r9d,DWORD PTR[r8*4+rdi]
+ cmp r12,r8
+ mov DWORD PTR[r12*4+rdi],r11d
+ cmove r9,r11
+ mov DWORD PTR[r10*4+rdi],r13d
+ add r13b,r11b
+ mov al,BYTE PTR[r13*4+rdi]
+ ror rax,8
+ sub rsi,8
+
+ xor rax,QWORD PTR[rdx]
+ add rdx,8
+ mov QWORD PTR[rcx],rax
+ add rcx,8
+
+ test rsi,-8
+ jnz $L$loop8
+ cmp rsi,0
+ jne $L$loop1
+ jmp $L$exit
+
+ALIGN 16
+$L$loop1::
+ add r12b,r9b
+ mov r13d,DWORD PTR[r12*4+rdi]
+ mov DWORD PTR[r12*4+rdi],r9d
+ mov DWORD PTR[r8*4+rdi],r13d
+ add r9b,r13b
+ inc r8b
+ mov r13d,DWORD PTR[r9*4+rdi]
+ mov r9d,DWORD PTR[r8*4+rdi]
+ xor r13b,BYTE PTR[rdx]
+ inc rdx
+ mov BYTE PTR[rcx],r13b
+ inc rcx
+ dec rsi
+ jnz $L$loop1
+ jmp $L$exit
+
+ALIGN 16
+$L$RC4_CHAR::
+ add r8b,1
+ movzx r9d,BYTE PTR[r8*1+rdi]
+ test rsi,-8
+ jz $L$cloop1
+ cmp DWORD PTR[260+rdi],0
+ jnz $L$cloop1
+ jmp $L$cloop8
+ALIGN 16
+$L$cloop8::
+ mov eax,DWORD PTR[rdx]
+ mov ebx,DWORD PTR[4+rdx]
+ add r12b,r9b
+ lea r10,QWORD PTR[1+r8]
+ movzx r13d,BYTE PTR[r12*1+rdi]
+ movzx r10d,r10b
+ movzx r11d,BYTE PTR[r10*1+rdi]
+ mov BYTE PTR[r12*1+rdi],r9b
+ cmp r12,r10
+ mov BYTE PTR[r8*1+rdi],r13b
+ jne $L$cmov0
+
+ mov r11,r9
+$L$cmov0::
+ add r13b,r9b
+ xor al,BYTE PTR[r13*1+rdi]
+ ror eax,8
+ add r12b,r11b
+ lea r8,QWORD PTR[1+r10]
+ movzx r13d,BYTE PTR[r12*1+rdi]
+ movzx r8d,r8b
+ movzx r9d,BYTE PTR[r8*1+rdi]
+ mov BYTE PTR[r12*1+rdi],r11b
+ cmp r12,r8
+ mov BYTE PTR[r10*1+rdi],r13b
+ jne $L$cmov1
+
+ mov r9,r11
+$L$cmov1::
+ add r13b,r11b
+ xor al,BYTE PTR[r13*1+rdi]
+ ror eax,8
+ add r12b,r9b
+ lea r10,QWORD PTR[1+r8]
+ movzx r13d,BYTE PTR[r12*1+rdi]
+ movzx r10d,r10b
+ movzx r11d,BYTE PTR[r10*1+rdi]
+ mov BYTE PTR[r12*1+rdi],r9b
+ cmp r12,r10
+ mov BYTE PTR[r8*1+rdi],r13b
+ jne $L$cmov2
+
+ mov r11,r9
+$L$cmov2::
+ add r13b,r9b
+ xor al,BYTE PTR[r13*1+rdi]
+ ror eax,8
+ add r12b,r11b
+ lea r8,QWORD PTR[1+r10]
+ movzx r13d,BYTE PTR[r12*1+rdi]
+ movzx r8d,r8b
+ movzx r9d,BYTE PTR[r8*1+rdi]
+ mov BYTE PTR[r12*1+rdi],r11b
+ cmp r12,r8
+ mov BYTE PTR[r10*1+rdi],r13b
+ jne $L$cmov3
+
+ mov r9,r11
+$L$cmov3::
+ add r13b,r11b
+ xor al,BYTE PTR[r13*1+rdi]
+ ror eax,8
+ add r12b,r9b
+ lea r10,QWORD PTR[1+r8]
+ movzx r13d,BYTE PTR[r12*1+rdi]
+ movzx r10d,r10b
+ movzx r11d,BYTE PTR[r10*1+rdi]
+ mov BYTE PTR[r12*1+rdi],r9b
+ cmp r12,r10
+ mov BYTE PTR[r8*1+rdi],r13b
+ jne $L$cmov4
+
+ mov r11,r9
+$L$cmov4::
+ add r13b,r9b
+ xor bl,BYTE PTR[r13*1+rdi]
+ ror ebx,8
+ add r12b,r11b
+ lea r8,QWORD PTR[1+r10]
+ movzx r13d,BYTE PTR[r12*1+rdi]
+ movzx r8d,r8b
+ movzx r9d,BYTE PTR[r8*1+rdi]
+ mov BYTE PTR[r12*1+rdi],r11b
+ cmp r12,r8
+ mov BYTE PTR[r10*1+rdi],r13b
+ jne $L$cmov5
+
+ mov r9,r11
+$L$cmov5::
+ add r13b,r11b
+ xor bl,BYTE PTR[r13*1+rdi]
+ ror ebx,8
+ add r12b,r9b
+ lea r10,QWORD PTR[1+r8]
+ movzx r13d,BYTE PTR[r12*1+rdi]
+ movzx r10d,r10b
+ movzx r11d,BYTE PTR[r10*1+rdi]
+ mov BYTE PTR[r12*1+rdi],r9b
+ cmp r12,r10
+ mov BYTE PTR[r8*1+rdi],r13b
+ jne $L$cmov6
+
+ mov r11,r9
+$L$cmov6::
+ add r13b,r9b
+ xor bl,BYTE PTR[r13*1+rdi]
+ ror ebx,8
+ add r12b,r11b
+ lea r8,QWORD PTR[1+r10]
+ movzx r13d,BYTE PTR[r12*1+rdi]
+ movzx r8d,r8b
+ movzx r9d,BYTE PTR[r8*1+rdi]
+ mov BYTE PTR[r12*1+rdi],r11b
+ cmp r12,r8
+ mov BYTE PTR[r10*1+rdi],r13b
+ jne $L$cmov7
+
+ mov r9,r11
+$L$cmov7::
+ add r13b,r11b
+ xor bl,BYTE PTR[r13*1+rdi]
+ ror ebx,8
+ lea rsi,QWORD PTR[((-8))+rsi]
+ mov DWORD PTR[rcx],eax
+ lea rdx,QWORD PTR[8+rdx]
+ mov DWORD PTR[4+rcx],ebx
+ lea rcx,QWORD PTR[8+rcx]
+
+ test rsi,-8
+ jnz $L$cloop8
+ cmp rsi,0
+ jne $L$cloop1
+ jmp $L$exit
+ALIGN 16
+$L$cloop1::
+ add r12b,r9b
+ movzx r13d,BYTE PTR[r12*1+rdi]
+ mov BYTE PTR[r12*1+rdi],r9b
+ mov BYTE PTR[r8*1+rdi],r13b
+ add r13b,r9b
+ add r8b,1
+ movzx r13d,r13b
+ movzx r8d,r8b
+ movzx r13d,BYTE PTR[r13*1+rdi]
+ movzx r9d,BYTE PTR[r8*1+rdi]
+ xor r13b,BYTE PTR[rdx]
+ lea rdx,QWORD PTR[1+rdx]
+ mov BYTE PTR[rcx],r13b
+ lea rcx,QWORD PTR[1+rcx]
+ sub rsi,1
+ jnz $L$cloop1
+ jmp $L$exit
+
+ALIGN 16
+$L$exit::
+ sub r8b,1
+ mov DWORD PTR[((-8))+rdi],r8d
+ mov DWORD PTR[((-4))+rdi],r12d
+
+ mov r13,QWORD PTR[rsp]
+ mov r12,QWORD PTR[8+rsp]
+ mov rbx,QWORD PTR[16+rsp]
+ add rsp,24
+$L$epilogue::
+ mov rdi,QWORD PTR[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD PTR[16+rsp]
+ DB 0F3h,0C3h ;repret
+$L$SEH_end_RC4::
+RC4 ENDP
+EXTERN OPENSSL_ia32cap_P:NEAR
+PUBLIC RC4_set_key
+
+ALIGN 16
+RC4_set_key PROC PUBLIC
+ mov QWORD PTR[8+rsp],rdi ;WIN64 prologue
+ mov QWORD PTR[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_RC4_set_key::
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+
+
+ lea rdi,QWORD PTR[8+rdi]
+ lea rdx,QWORD PTR[rsi*1+rdx]
+ neg rsi
+ mov rcx,rsi
+ xor eax,eax
+ xor r9,r9
+ xor r10,r10
+ xor r11,r11
+
+ mov r8d,DWORD PTR[OPENSSL_ia32cap_P]
+ bt r8d,20
+ jnc $L$w1stloop
+ bt r8d,30
+ setc r9b
+ mov DWORD PTR[260+rdi],r9d
+ jmp $L$c1stloop
+
+ALIGN 16
+$L$w1stloop::
+ mov DWORD PTR[rax*4+rdi],eax
+ add al,1
+ jnc $L$w1stloop
+
+ xor r9,r9
+ xor r8,r8
+ALIGN 16
+$L$w2ndloop::
+ mov r10d,DWORD PTR[r9*4+rdi]
+ add r8b,BYTE PTR[rsi*1+rdx]
+ add r8b,r10b
+ add rsi,1
+ mov r11d,DWORD PTR[r8*4+rdi]
+ cmovz rsi,rcx
+ mov DWORD PTR[r8*4+rdi],r10d
+ mov DWORD PTR[r9*4+rdi],r11d
+ add r9b,1
+ jnc $L$w2ndloop
+ jmp $L$exit_key
+
+ALIGN 16
+$L$c1stloop::
+ mov BYTE PTR[rax*1+rdi],al
+ add al,1
+ jnc $L$c1stloop
+
+ xor r9,r9
+ xor r8,r8
+ALIGN 16
+$L$c2ndloop::
+ mov r10b,BYTE PTR[r9*1+rdi]
+ add r8b,BYTE PTR[rsi*1+rdx]
+ add r8b,r10b
+ add rsi,1
+ mov r11b,BYTE PTR[r8*1+rdi]
+ jnz $L$cnowrap
+ mov rsi,rcx
+$L$cnowrap::
+ mov BYTE PTR[r8*1+rdi],r10b
+ mov BYTE PTR[r9*1+rdi],r11b
+ add r9b,1
+ jnc $L$c2ndloop
+ mov DWORD PTR[256+rdi],-1
+
+ALIGN 16
+$L$exit_key::
+ xor eax,eax
+ mov DWORD PTR[((-8))+rdi],eax
+ mov DWORD PTR[((-4))+rdi],eax
+ mov rdi,QWORD PTR[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD PTR[16+rsp]
+ DB 0F3h,0C3h ;repret
+$L$SEH_end_RC4_set_key::
+RC4_set_key ENDP
+
+PUBLIC RC4_options
+
+ALIGN 16
+RC4_options PROC PUBLIC
+ lea rax,QWORD PTR[$L$opts]
+ mov edx,DWORD PTR[OPENSSL_ia32cap_P]
+ bt edx,20
+ jnc $L$done
+ add rax,12
+ bt edx,30
+ jnc $L$done
+ add rax,13
+$L$done::
+ DB 0F3h,0C3h ;repret
+ALIGN 64
+$L$opts::
+DB 114,99,52,40,56,120,44,105,110,116,41,0
+DB 114,99,52,40,56,120,44,99,104,97,114,41,0
+DB 114,99,52,40,49,120,44,99,104,97,114,41,0
+DB 82,67,52,32,102,111,114,32,120,56,54,95,54,52,44,32
+DB 67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97
+DB 112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
+DB 62,0
+ALIGN 64
+RC4_options ENDP
+EXTERN __imp_RtlVirtualUnwind:NEAR
+
+ALIGN 16
+stream_se_handler PROC PRIVATE
+ push rsi
+ push rdi
+ push rbx
+ push rbp
+ push r12
+ push r13
+ push r14
+ push r15
+ pushfq
+ sub rsp,64
+
+ mov rax,QWORD PTR[120+r8]
+ mov rbx,QWORD PTR[248+r8]
+
+ lea r10,QWORD PTR[$L$prologue]
+ cmp rbx,r10
+ jb $L$in_prologue
+
+ mov rax,QWORD PTR[152+r8]
+
+ lea r10,QWORD PTR[$L$epilogue]
+ cmp rbx,r10
+ jae $L$in_prologue
+
+ lea rax,QWORD PTR[24+rax]
+
+ mov rbx,QWORD PTR[((-8))+rax]
+ mov r12,QWORD PTR[((-16))+rax]
+ mov r13,QWORD PTR[((-24))+rax]
+ mov QWORD PTR[144+r8],rbx
+ mov QWORD PTR[216+r8],r12
+ mov QWORD PTR[224+r8],r13
+
+$L$in_prologue::
+ mov rdi,QWORD PTR[8+rax]
+ mov rsi,QWORD PTR[16+rax]
+ mov QWORD PTR[152+r8],rax
+ mov QWORD PTR[168+r8],rsi
+ mov QWORD PTR[176+r8],rdi
+
+ jmp $L$common_seh_exit
+stream_se_handler ENDP
+
+
+ALIGN 16
+key_se_handler PROC PRIVATE
+ push rsi
+ push rdi
+ push rbx
+ push rbp
+ push r12
+ push r13
+ push r14
+ push r15
+ pushfq
+ sub rsp,64
+
+ mov rax,QWORD PTR[152+r8]
+ mov rdi,QWORD PTR[8+rax]
+ mov rsi,QWORD PTR[16+rax]
+ mov QWORD PTR[168+r8],rsi
+ mov QWORD PTR[176+r8],rdi
+
+$L$common_seh_exit::
+
+ mov rdi,QWORD PTR[40+r9]
+ mov rsi,r8
+ mov ecx,154
+ DD 0a548f3fch
+
+
+ mov rsi,r9
+ xor rcx,rcx
+ mov rdx,QWORD PTR[8+rsi]
+ mov r8,QWORD PTR[rsi]
+ mov r9,QWORD PTR[16+rsi]
+ mov r10,QWORD PTR[40+rsi]
+ lea r11,QWORD PTR[56+rsi]
+ lea r12,QWORD PTR[24+rsi]
+ mov QWORD PTR[32+rsp],r10
+ mov QWORD PTR[40+rsp],r11
+ mov QWORD PTR[48+rsp],r12
+ mov QWORD PTR[56+rsp],rcx
+ call QWORD PTR[__imp_RtlVirtualUnwind]
+
+ mov eax,1
+ add rsp,64
+ popfq
+ pop r15
+ pop r14
+ pop r13
+ pop r12
+ pop rbp
+ pop rbx
+ pop rdi
+ pop rsi
+ DB 0F3h,0C3h ;repret
+key_se_handler ENDP
+
+.text$ ENDS
+.pdata SEGMENT READONLY ALIGN(4)
+ALIGN 4
+ DD imagerel $L$SEH_begin_RC4
+ DD imagerel $L$SEH_end_RC4
+ DD imagerel $L$SEH_info_RC4
+
+ DD imagerel $L$SEH_begin_RC4_set_key
+ DD imagerel $L$SEH_end_RC4_set_key
+ DD imagerel $L$SEH_info_RC4_set_key
+
+.pdata ENDS
+.xdata SEGMENT READONLY ALIGN(8)
+ALIGN 8
+$L$SEH_info_RC4::
+DB 9,0,0,0
+ DD imagerel stream_se_handler
+$L$SEH_info_RC4_set_key::
+DB 9,0,0,0
+ DD imagerel key_se_handler
+
+.xdata ENDS
+END
diff --git a/deps/openssl/asm/x64-win32-masm/sha/sha1-x86_64.asm b/deps/openssl/asm/x64-win32-masm/sha/sha1-x86_64.asm
new file mode 100644
index 0000000000..9323f2b26a
--- /dev/null
+++ b/deps/openssl/asm/x64-win32-masm/sha/sha1-x86_64.asm
@@ -0,0 +1,1394 @@
+OPTION DOTNAME
+.text$ SEGMENT ALIGN(64) 'CODE'
+PUBLIC sha1_block_data_order
+
+ALIGN 16
+sha1_block_data_order PROC PUBLIC
+ mov QWORD PTR[8+rsp],rdi ;WIN64 prologue
+ mov QWORD PTR[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_sha1_block_data_order::
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+
+
+ push rbx
+ push rbp
+ push r12
+ mov r11,rsp
+ mov r8,rdi
+ sub rsp,72
+ mov r9,rsi
+ and rsp,-64
+ mov r10,rdx
+ mov QWORD PTR[64+rsp],r11
+$L$prologue::
+
+ mov edx,DWORD PTR[r8]
+ mov esi,DWORD PTR[4+r8]
+ mov edi,DWORD PTR[8+r8]
+ mov ebp,DWORD PTR[12+r8]
+ mov r11d,DWORD PTR[16+r8]
+ALIGN 4
+$L$loop::
+ mov eax,DWORD PTR[r9]
+ bswap eax
+ mov DWORD PTR[rsp],eax
+ lea r12d,DWORD PTR[05a827999h+r11*1+rax]
+ mov ebx,edi
+ mov eax,DWORD PTR[4+r9]
+ mov r11d,edx
+ xor ebx,ebp
+ bswap eax
+ rol r11d,5
+ and ebx,esi
+ mov DWORD PTR[4+rsp],eax
+ add r12d,r11d
+ xor ebx,ebp
+ rol esi,30
+ add r12d,ebx
+ lea r11d,DWORD PTR[05a827999h+rbp*1+rax]
+ mov ebx,esi
+ mov eax,DWORD PTR[8+r9]
+ mov ebp,r12d
+ xor ebx,edi
+ bswap eax
+ rol ebp,5
+ and ebx,edx
+ mov DWORD PTR[8+rsp],eax
+ add r11d,ebp
+ xor ebx,edi
+ rol edx,30
+ add r11d,ebx
+ lea ebp,DWORD PTR[05a827999h+rdi*1+rax]
+ mov ebx,edx
+ mov eax,DWORD PTR[12+r9]
+ mov edi,r11d
+ xor ebx,esi
+ bswap eax
+ rol edi,5
+ and ebx,r12d
+ mov DWORD PTR[12+rsp],eax
+ add ebp,edi
+ xor ebx,esi
+ rol r12d,30
+ add ebp,ebx
+ lea edi,DWORD PTR[05a827999h+rsi*1+rax]
+ mov ebx,r12d
+ mov eax,DWORD PTR[16+r9]
+ mov esi,ebp
+ xor ebx,edx
+ bswap eax
+ rol esi,5
+ and ebx,r11d
+ mov DWORD PTR[16+rsp],eax
+ add edi,esi
+ xor ebx,edx
+ rol r11d,30
+ add edi,ebx
+ lea esi,DWORD PTR[05a827999h+rdx*1+rax]
+ mov ebx,r11d
+ mov eax,DWORD PTR[20+r9]
+ mov edx,edi
+ xor ebx,r12d
+ bswap eax
+ rol edx,5
+ and ebx,ebp
+ mov DWORD PTR[20+rsp],eax
+ add esi,edx
+ xor ebx,r12d
+ rol ebp,30
+ add esi,ebx
+ lea edx,DWORD PTR[05a827999h+r12*1+rax]
+ mov ebx,ebp
+ mov eax,DWORD PTR[24+r9]
+ mov r12d,esi
+ xor ebx,r11d
+ bswap eax
+ rol r12d,5
+ and ebx,edi
+ mov DWORD PTR[24+rsp],eax
+ add edx,r12d
+ xor ebx,r11d
+ rol edi,30
+ add edx,ebx
+ lea r12d,DWORD PTR[05a827999h+r11*1+rax]
+ mov ebx,edi
+ mov eax,DWORD PTR[28+r9]
+ mov r11d,edx
+ xor ebx,ebp
+ bswap eax
+ rol r11d,5
+ and ebx,esi
+ mov DWORD PTR[28+rsp],eax
+ add r12d,r11d
+ xor ebx,ebp
+ rol esi,30
+ add r12d,ebx
+ lea r11d,DWORD PTR[05a827999h+rbp*1+rax]
+ mov ebx,esi
+ mov eax,DWORD PTR[32+r9]
+ mov ebp,r12d
+ xor ebx,edi
+ bswap eax
+ rol ebp,5
+ and ebx,edx
+ mov DWORD PTR[32+rsp],eax
+ add r11d,ebp
+ xor ebx,edi
+ rol edx,30
+ add r11d,ebx
+ lea ebp,DWORD PTR[05a827999h+rdi*1+rax]
+ mov ebx,edx
+ mov eax,DWORD PTR[36+r9]
+ mov edi,r11d
+ xor ebx,esi
+ bswap eax
+ rol edi,5
+ and ebx,r12d
+ mov DWORD PTR[36+rsp],eax
+ add ebp,edi
+ xor ebx,esi
+ rol r12d,30
+ add ebp,ebx
+ lea edi,DWORD PTR[05a827999h+rsi*1+rax]
+ mov ebx,r12d
+ mov eax,DWORD PTR[40+r9]
+ mov esi,ebp
+ xor ebx,edx
+ bswap eax
+ rol esi,5
+ and ebx,r11d
+ mov DWORD PTR[40+rsp],eax
+ add edi,esi
+ xor ebx,edx
+ rol r11d,30
+ add edi,ebx
+ lea esi,DWORD PTR[05a827999h+rdx*1+rax]
+ mov ebx,r11d
+ mov eax,DWORD PTR[44+r9]
+ mov edx,edi
+ xor ebx,r12d
+ bswap eax
+ rol edx,5
+ and ebx,ebp
+ mov DWORD PTR[44+rsp],eax
+ add esi,edx
+ xor ebx,r12d
+ rol ebp,30
+ add esi,ebx
+ lea edx,DWORD PTR[05a827999h+r12*1+rax]
+ mov ebx,ebp
+ mov eax,DWORD PTR[48+r9]
+ mov r12d,esi
+ xor ebx,r11d
+ bswap eax
+ rol r12d,5
+ and ebx,edi
+ mov DWORD PTR[48+rsp],eax
+ add edx,r12d
+ xor ebx,r11d
+ rol edi,30
+ add edx,ebx
+ lea r12d,DWORD PTR[05a827999h+r11*1+rax]
+ mov ebx,edi
+ mov eax,DWORD PTR[52+r9]
+ mov r11d,edx
+ xor ebx,ebp
+ bswap eax
+ rol r11d,5
+ and ebx,esi
+ mov DWORD PTR[52+rsp],eax
+ add r12d,r11d
+ xor ebx,ebp
+ rol esi,30
+ add r12d,ebx
+ lea r11d,DWORD PTR[05a827999h+rbp*1+rax]
+ mov ebx,esi
+ mov eax,DWORD PTR[56+r9]
+ mov ebp,r12d
+ xor ebx,edi
+ bswap eax
+ rol ebp,5
+ and ebx,edx
+ mov DWORD PTR[56+rsp],eax
+ add r11d,ebp
+ xor ebx,edi
+ rol edx,30
+ add r11d,ebx
+ lea ebp,DWORD PTR[05a827999h+rdi*1+rax]
+ mov ebx,edx
+ mov eax,DWORD PTR[60+r9]
+ mov edi,r11d
+ xor ebx,esi
+ bswap eax
+ rol edi,5
+ and ebx,r12d
+ mov DWORD PTR[60+rsp],eax
+ add ebp,edi
+ xor ebx,esi
+ rol r12d,30
+ add ebp,ebx
+ lea edi,DWORD PTR[05a827999h+rsi*1+rax]
+ mov eax,DWORD PTR[rsp]
+ mov ebx,r12d
+ mov esi,ebp
+ xor eax,DWORD PTR[8+rsp]
+ xor ebx,edx
+ rol esi,5
+ xor eax,DWORD PTR[32+rsp]
+ and ebx,r11d
+ add edi,esi
+ xor eax,DWORD PTR[52+rsp]
+ xor ebx,edx
+ rol r11d,30
+ add edi,ebx
+ rol eax,1
+ mov DWORD PTR[rsp],eax
+ lea esi,DWORD PTR[05a827999h+rdx*1+rax]
+ mov eax,DWORD PTR[4+rsp]
+ mov ebx,r11d
+ mov edx,edi
+ xor eax,DWORD PTR[12+rsp]
+ xor ebx,r12d
+ rol edx,5
+ xor eax,DWORD PTR[36+rsp]
+ and ebx,ebp
+ add esi,edx
+ xor eax,DWORD PTR[56+rsp]
+ xor ebx,r12d
+ rol ebp,30
+ add esi,ebx
+ rol eax,1
+ mov DWORD PTR[4+rsp],eax
+ lea edx,DWORD PTR[05a827999h+r12*1+rax]
+ mov eax,DWORD PTR[8+rsp]
+ mov ebx,ebp
+ mov r12d,esi
+ xor eax,DWORD PTR[16+rsp]
+ xor ebx,r11d
+ rol r12d,5
+ xor eax,DWORD PTR[40+rsp]
+ and ebx,edi
+ add edx,r12d
+ xor eax,DWORD PTR[60+rsp]
+ xor ebx,r11d
+ rol edi,30
+ add edx,ebx
+ rol eax,1
+ mov DWORD PTR[8+rsp],eax
+ lea r12d,DWORD PTR[05a827999h+r11*1+rax]
+ mov eax,DWORD PTR[12+rsp]
+ mov ebx,edi
+ mov r11d,edx
+ xor eax,DWORD PTR[20+rsp]
+ xor ebx,ebp
+ rol r11d,5
+ xor eax,DWORD PTR[44+rsp]
+ and ebx,esi
+ add r12d,r11d
+ xor eax,DWORD PTR[rsp]
+ xor ebx,ebp
+ rol esi,30
+ add r12d,ebx
+ rol eax,1
+ mov DWORD PTR[12+rsp],eax
+ lea r11d,DWORD PTR[05a827999h+rbp*1+rax]
+ mov eax,DWORD PTR[16+rsp]
+ mov ebx,esi
+ mov ebp,r12d
+ xor eax,DWORD PTR[24+rsp]
+ xor ebx,edi
+ rol ebp,5
+ xor eax,DWORD PTR[48+rsp]
+ and ebx,edx
+ add r11d,ebp
+ xor eax,DWORD PTR[4+rsp]
+ xor ebx,edi
+ rol edx,30
+ add r11d,ebx
+ rol eax,1
+ mov DWORD PTR[16+rsp],eax
+ lea ebp,DWORD PTR[1859775393+rdi*1+rax]
+ mov eax,DWORD PTR[20+rsp]
+ mov ebx,edx
+ mov edi,r11d
+ xor eax,DWORD PTR[28+rsp]
+ xor ebx,r12d
+ rol edi,5
+ xor eax,DWORD PTR[52+rsp]
+ xor ebx,esi
+ add ebp,edi
+ xor eax,DWORD PTR[8+rsp]
+ rol r12d,30
+ add ebp,ebx
+ rol eax,1
+ mov DWORD PTR[20+rsp],eax
+ lea edi,DWORD PTR[1859775393+rsi*1+rax]
+ mov eax,DWORD PTR[24+rsp]
+ mov ebx,r12d
+ mov esi,ebp
+ xor eax,DWORD PTR[32+rsp]
+ xor ebx,r11d
+ rol esi,5
+ xor eax,DWORD PTR[56+rsp]
+ xor ebx,edx
+ add edi,esi
+ xor eax,DWORD PTR[12+rsp]
+ rol r11d,30
+ add edi,ebx
+ rol eax,1
+ mov DWORD PTR[24+rsp],eax
+ lea esi,DWORD PTR[1859775393+rdx*1+rax]
+ mov eax,DWORD PTR[28+rsp]
+ mov ebx,r11d
+ mov edx,edi
+ xor eax,DWORD PTR[36+rsp]
+ xor ebx,ebp
+ rol edx,5
+ xor eax,DWORD PTR[60+rsp]
+ xor ebx,r12d
+ add esi,edx
+ xor eax,DWORD PTR[16+rsp]
+ rol ebp,30
+ add esi,ebx
+ rol eax,1
+ mov DWORD PTR[28+rsp],eax
+ lea edx,DWORD PTR[1859775393+r12*1+rax]
+ mov eax,DWORD PTR[32+rsp]
+ mov ebx,ebp
+ mov r12d,esi
+ xor eax,DWORD PTR[40+rsp]
+ xor ebx,edi
+ rol r12d,5
+ xor eax,DWORD PTR[rsp]
+ xor ebx,r11d
+ add edx,r12d
+ xor eax,DWORD PTR[20+rsp]
+ rol edi,30
+ add edx,ebx
+ rol eax,1
+ mov DWORD PTR[32+rsp],eax
+ lea r12d,DWORD PTR[1859775393+r11*1+rax]
+ mov eax,DWORD PTR[36+rsp]
+ mov ebx,edi
+ mov r11d,edx
+ xor eax,DWORD PTR[44+rsp]
+ xor ebx,esi
+ rol r11d,5
+ xor eax,DWORD PTR[4+rsp]
+ xor ebx,ebp
+ add r12d,r11d
+ xor eax,DWORD PTR[24+rsp]
+ rol esi,30
+ add r12d,ebx
+ rol eax,1
+ mov DWORD PTR[36+rsp],eax
+ lea r11d,DWORD PTR[1859775393+rbp*1+rax]
+ mov eax,DWORD PTR[40+rsp]
+ mov ebx,esi
+ mov ebp,r12d
+ xor eax,DWORD PTR[48+rsp]
+ xor ebx,edx
+ rol ebp,5
+ xor eax,DWORD PTR[8+rsp]
+ xor ebx,edi
+ add r11d,ebp
+ xor eax,DWORD PTR[28+rsp]
+ rol edx,30
+ add r11d,ebx
+ rol eax,1
+ mov DWORD PTR[40+rsp],eax
+ lea ebp,DWORD PTR[1859775393+rdi*1+rax]
+ mov eax,DWORD PTR[44+rsp]
+ mov ebx,edx
+ mov edi,r11d
+ xor eax,DWORD PTR[52+rsp]
+ xor ebx,r12d
+ rol edi,5
+ xor eax,DWORD PTR[12+rsp]
+ xor ebx,esi
+ add ebp,edi
+ xor eax,DWORD PTR[32+rsp]
+ rol r12d,30
+ add ebp,ebx
+ rol eax,1
+ mov DWORD PTR[44+rsp],eax
+ lea edi,DWORD PTR[1859775393+rsi*1+rax]
+ mov eax,DWORD PTR[48+rsp]
+ mov ebx,r12d
+ mov esi,ebp
+ xor eax,DWORD PTR[56+rsp]
+ xor ebx,r11d
+ rol esi,5
+ xor eax,DWORD PTR[16+rsp]
+ xor ebx,edx
+ add edi,esi
+ xor eax,DWORD PTR[36+rsp]
+ rol r11d,30
+ add edi,ebx
+ rol eax,1
+ mov DWORD PTR[48+rsp],eax
+ lea esi,DWORD PTR[1859775393+rdx*1+rax]
+ mov eax,DWORD PTR[52+rsp]
+ mov ebx,r11d
+ mov edx,edi
+ xor eax,DWORD PTR[60+rsp]
+ xor ebx,ebp
+ rol edx,5
+ xor eax,DWORD PTR[20+rsp]
+ xor ebx,r12d
+ add esi,edx
+ xor eax,DWORD PTR[40+rsp]
+ rol ebp,30
+ add esi,ebx
+ rol eax,1
+ mov DWORD PTR[52+rsp],eax
+ lea edx,DWORD PTR[1859775393+r12*1+rax]
+ mov eax,DWORD PTR[56+rsp]
+ mov ebx,ebp
+ mov r12d,esi
+ xor eax,DWORD PTR[rsp]
+ xor ebx,edi
+ rol r12d,5
+ xor eax,DWORD PTR[24+rsp]
+ xor ebx,r11d
+ add edx,r12d
+ xor eax,DWORD PTR[44+rsp]
+ rol edi,30
+ add edx,ebx
+ rol eax,1
+ mov DWORD PTR[56+rsp],eax
+ lea r12d,DWORD PTR[1859775393+r11*1+rax]
+ mov eax,DWORD PTR[60+rsp]
+ mov ebx,edi
+ mov r11d,edx
+ xor eax,DWORD PTR[4+rsp]
+ xor ebx,esi
+ rol r11d,5
+ xor eax,DWORD PTR[28+rsp]
+ xor ebx,ebp
+ add r12d,r11d
+ xor eax,DWORD PTR[48+rsp]
+ rol esi,30
+ add r12d,ebx
+ rol eax,1
+ mov DWORD PTR[60+rsp],eax
+ lea r11d,DWORD PTR[1859775393+rbp*1+rax]
+ mov eax,DWORD PTR[rsp]
+ mov ebx,esi
+ mov ebp,r12d
+ xor eax,DWORD PTR[8+rsp]
+ xor ebx,edx
+ rol ebp,5
+ xor eax,DWORD PTR[32+rsp]
+ xor ebx,edi
+ add r11d,ebp
+ xor eax,DWORD PTR[52+rsp]
+ rol edx,30
+ add r11d,ebx
+ rol eax,1
+ mov DWORD PTR[rsp],eax
+ lea ebp,DWORD PTR[1859775393+rdi*1+rax]
+ mov eax,DWORD PTR[4+rsp]
+ mov ebx,edx
+ mov edi,r11d
+ xor eax,DWORD PTR[12+rsp]
+ xor ebx,r12d
+ rol edi,5
+ xor eax,DWORD PTR[36+rsp]
+ xor ebx,esi
+ add ebp,edi
+ xor eax,DWORD PTR[56+rsp]
+ rol r12d,30
+ add ebp,ebx
+ rol eax,1
+ mov DWORD PTR[4+rsp],eax
+ lea edi,DWORD PTR[1859775393+rsi*1+rax]
+ mov eax,DWORD PTR[8+rsp]
+ mov ebx,r12d
+ mov esi,ebp
+ xor eax,DWORD PTR[16+rsp]
+ xor ebx,r11d
+ rol esi,5
+ xor eax,DWORD PTR[40+rsp]
+ xor ebx,edx
+ add edi,esi
+ xor eax,DWORD PTR[60+rsp]
+ rol r11d,30
+ add edi,ebx
+ rol eax,1
+ mov DWORD PTR[8+rsp],eax
+ lea esi,DWORD PTR[1859775393+rdx*1+rax]
+ mov eax,DWORD PTR[12+rsp]
+ mov ebx,r11d
+ mov edx,edi
+ xor eax,DWORD PTR[20+rsp]
+ xor ebx,ebp
+ rol edx,5
+ xor eax,DWORD PTR[44+rsp]
+ xor ebx,r12d
+ add esi,edx
+ xor eax,DWORD PTR[rsp]
+ rol ebp,30
+ add esi,ebx
+ rol eax,1
+ mov DWORD PTR[12+rsp],eax
+ lea edx,DWORD PTR[1859775393+r12*1+rax]
+ mov eax,DWORD PTR[16+rsp]
+ mov ebx,ebp
+ mov r12d,esi
+ xor eax,DWORD PTR[24+rsp]
+ xor ebx,edi
+ rol r12d,5
+ xor eax,DWORD PTR[48+rsp]
+ xor ebx,r11d
+ add edx,r12d
+ xor eax,DWORD PTR[4+rsp]
+ rol edi,30
+ add edx,ebx
+ rol eax,1
+ mov DWORD PTR[16+rsp],eax
+ lea r12d,DWORD PTR[1859775393+r11*1+rax]
+ mov eax,DWORD PTR[20+rsp]
+ mov ebx,edi
+ mov r11d,edx
+ xor eax,DWORD PTR[28+rsp]
+ xor ebx,esi
+ rol r11d,5
+ xor eax,DWORD PTR[52+rsp]
+ xor ebx,ebp
+ add r12d,r11d
+ xor eax,DWORD PTR[8+rsp]
+ rol esi,30
+ add r12d,ebx
+ rol eax,1
+ mov DWORD PTR[20+rsp],eax
+ lea r11d,DWORD PTR[1859775393+rbp*1+rax]
+ mov eax,DWORD PTR[24+rsp]
+ mov ebx,esi
+ mov ebp,r12d
+ xor eax,DWORD PTR[32+rsp]
+ xor ebx,edx
+ rol ebp,5
+ xor eax,DWORD PTR[56+rsp]
+ xor ebx,edi
+ add r11d,ebp
+ xor eax,DWORD PTR[12+rsp]
+ rol edx,30
+ add r11d,ebx
+ rol eax,1
+ mov DWORD PTR[24+rsp],eax
+ lea ebp,DWORD PTR[1859775393+rdi*1+rax]
+ mov eax,DWORD PTR[28+rsp]
+ mov ebx,edx
+ mov edi,r11d
+ xor eax,DWORD PTR[36+rsp]
+ xor ebx,r12d
+ rol edi,5
+ xor eax,DWORD PTR[60+rsp]
+ xor ebx,esi
+ add ebp,edi
+ xor eax,DWORD PTR[16+rsp]
+ rol r12d,30
+ add ebp,ebx
+ rol eax,1
+ mov DWORD PTR[28+rsp],eax
+ lea edi,DWORD PTR[1859775393+rsi*1+rax]
+ mov eax,DWORD PTR[32+rsp]
+ mov ebx,r12d
+ mov esi,ebp
+ xor eax,DWORD PTR[40+rsp]
+ xor ebx,r11d
+ rol esi,5
+ xor eax,DWORD PTR[rsp]
+ xor ebx,edx
+ add edi,esi
+ xor eax,DWORD PTR[20+rsp]
+ rol r11d,30
+ add edi,ebx
+ rol eax,1
+ mov DWORD PTR[32+rsp],eax
+ lea esi,DWORD PTR[08f1bbcdch+rdx*1+rax]
+ mov eax,DWORD PTR[36+rsp]
+ mov ebx,ebp
+ mov ecx,ebp
+ xor eax,DWORD PTR[44+rsp]
+ mov edx,edi
+ and ebx,r11d
+ xor eax,DWORD PTR[4+rsp]
+ or ecx,r11d
+ rol edx,5
+ xor eax,DWORD PTR[24+rsp]
+ and ecx,r12d
+ add esi,edx
+ rol eax,1
+ or ebx,ecx
+ rol ebp,30
+ mov DWORD PTR[36+rsp],eax
+ add esi,ebx
+ lea edx,DWORD PTR[08f1bbcdch+r12*1+rax]
+ mov eax,DWORD PTR[40+rsp]
+ mov ebx,edi
+ mov ecx,edi
+ xor eax,DWORD PTR[48+rsp]
+ mov r12d,esi
+ and ebx,ebp
+ xor eax,DWORD PTR[8+rsp]
+ or ecx,ebp
+ rol r12d,5
+ xor eax,DWORD PTR[28+rsp]
+ and ecx,r11d
+ add edx,r12d
+ rol eax,1
+ or ebx,ecx
+ rol edi,30
+ mov DWORD PTR[40+rsp],eax
+ add edx,ebx
+ lea r12d,DWORD PTR[08f1bbcdch+r11*1+rax]
+ mov eax,DWORD PTR[44+rsp]
+ mov ebx,esi
+ mov ecx,esi
+ xor eax,DWORD PTR[52+rsp]
+ mov r11d,edx
+ and ebx,edi
+ xor eax,DWORD PTR[12+rsp]
+ or ecx,edi
+ rol r11d,5
+ xor eax,DWORD PTR[32+rsp]
+ and ecx,ebp
+ add r12d,r11d
+ rol eax,1
+ or ebx,ecx
+ rol esi,30
+ mov DWORD PTR[44+rsp],eax
+ add r12d,ebx
+ lea r11d,DWORD PTR[08f1bbcdch+rbp*1+rax]
+ mov eax,DWORD PTR[48+rsp]
+ mov ebx,edx
+ mov ecx,edx
+ xor eax,DWORD PTR[56+rsp]
+ mov ebp,r12d
+ and ebx,esi
+ xor eax,DWORD PTR[16+rsp]
+ or ecx,esi
+ rol ebp,5
+ xor eax,DWORD PTR[36+rsp]
+ and ecx,edi
+ add r11d,ebp
+ rol eax,1
+ or ebx,ecx
+ rol edx,30
+ mov DWORD PTR[48+rsp],eax
+ add r11d,ebx
+ lea ebp,DWORD PTR[08f1bbcdch+rdi*1+rax]
+ mov eax,DWORD PTR[52+rsp]
+ mov ebx,r12d
+ mov ecx,r12d
+ xor eax,DWORD PTR[60+rsp]
+ mov edi,r11d
+ and ebx,edx
+ xor eax,DWORD PTR[20+rsp]
+ or ecx,edx
+ rol edi,5
+ xor eax,DWORD PTR[40+rsp]
+ and ecx,esi
+ add ebp,edi
+ rol eax,1
+ or ebx,ecx
+ rol r12d,30
+ mov DWORD PTR[52+rsp],eax
+ add ebp,ebx
+ lea edi,DWORD PTR[08f1bbcdch+rsi*1+rax]
+ mov eax,DWORD PTR[56+rsp]
+ mov ebx,r11d
+ mov ecx,r11d
+ xor eax,DWORD PTR[rsp]
+ mov esi,ebp
+ and ebx,r12d
+ xor eax,DWORD PTR[24+rsp]
+ or ecx,r12d
+ rol esi,5
+ xor eax,DWORD PTR[44+rsp]
+ and ecx,edx
+ add edi,esi
+ rol eax,1
+ or ebx,ecx
+ rol r11d,30
+ mov DWORD PTR[56+rsp],eax
+ add edi,ebx
+ lea esi,DWORD PTR[08f1bbcdch+rdx*1+rax]
+ mov eax,DWORD PTR[60+rsp]
+ mov ebx,ebp
+ mov ecx,ebp
+ xor eax,DWORD PTR[4+rsp]
+ mov edx,edi
+ and ebx,r11d
+ xor eax,DWORD PTR[28+rsp]
+ or ecx,r11d
+ rol edx,5
+ xor eax,DWORD PTR[48+rsp]
+ and ecx,r12d
+ add esi,edx
+ rol eax,1
+ or ebx,ecx
+ rol ebp,30
+ mov DWORD PTR[60+rsp],eax
+ add esi,ebx
+ lea edx,DWORD PTR[08f1bbcdch+r12*1+rax]
+ mov eax,DWORD PTR[rsp]
+ mov ebx,edi
+ mov ecx,edi
+ xor eax,DWORD PTR[8+rsp]
+ mov r12d,esi
+ and ebx,ebp
+ xor eax,DWORD PTR[32+rsp]
+ or ecx,ebp
+ rol r12d,5
+ xor eax,DWORD PTR[52+rsp]
+ and ecx,r11d
+ add edx,r12d
+ rol eax,1
+ or ebx,ecx
+ rol edi,30
+ mov DWORD PTR[rsp],eax
+ add edx,ebx
+ lea r12d,DWORD PTR[08f1bbcdch+r11*1+rax]
+ mov eax,DWORD PTR[4+rsp]
+ mov ebx,esi
+ mov ecx,esi
+ xor eax,DWORD PTR[12+rsp]
+ mov r11d,edx
+ and ebx,edi
+ xor eax,DWORD PTR[36+rsp]
+ or ecx,edi
+ rol r11d,5
+ xor eax,DWORD PTR[56+rsp]
+ and ecx,ebp
+ add r12d,r11d
+ rol eax,1
+ or ebx,ecx
+ rol esi,30
+ mov DWORD PTR[4+rsp],eax
+ add r12d,ebx
+ lea r11d,DWORD PTR[08f1bbcdch+rbp*1+rax]
+ mov eax,DWORD PTR[8+rsp]
+ mov ebx,edx
+ mov ecx,edx
+ xor eax,DWORD PTR[16+rsp]
+ mov ebp,r12d
+ and ebx,esi
+ xor eax,DWORD PTR[40+rsp]
+ or ecx,esi
+ rol ebp,5
+ xor eax,DWORD PTR[60+rsp]
+ and ecx,edi
+ add r11d,ebp
+ rol eax,1
+ or ebx,ecx
+ rol edx,30
+ mov DWORD PTR[8+rsp],eax
+ add r11d,ebx
+ lea ebp,DWORD PTR[08f1bbcdch+rdi*1+rax]
+ mov eax,DWORD PTR[12+rsp]
+ mov ebx,r12d
+ mov ecx,r12d
+ xor eax,DWORD PTR[20+rsp]
+ mov edi,r11d
+ and ebx,edx
+ xor eax,DWORD PTR[44+rsp]
+ or ecx,edx
+ rol edi,5
+ xor eax,DWORD PTR[rsp]
+ and ecx,esi
+ add ebp,edi
+ rol eax,1
+ or ebx,ecx
+ rol r12d,30
+ mov DWORD PTR[12+rsp],eax
+ add ebp,ebx
+ lea edi,DWORD PTR[08f1bbcdch+rsi*1+rax]
+ mov eax,DWORD PTR[16+rsp]
+ mov ebx,r11d
+ mov ecx,r11d
+ xor eax,DWORD PTR[24+rsp]
+ mov esi,ebp
+ and ebx,r12d
+ xor eax,DWORD PTR[48+rsp]
+ or ecx,r12d
+ rol esi,5
+ xor eax,DWORD PTR[4+rsp]
+ and ecx,edx
+ add edi,esi
+ rol eax,1
+ or ebx,ecx
+ rol r11d,30
+ mov DWORD PTR[16+rsp],eax
+ add edi,ebx
+ lea esi,DWORD PTR[08f1bbcdch+rdx*1+rax]
+ mov eax,DWORD PTR[20+rsp]
+ mov ebx,ebp
+ mov ecx,ebp
+ xor eax,DWORD PTR[28+rsp]
+ mov edx,edi
+ and ebx,r11d
+ xor eax,DWORD PTR[52+rsp]
+ or ecx,r11d
+ rol edx,5
+ xor eax,DWORD PTR[8+rsp]
+ and ecx,r12d
+ add esi,edx
+ rol eax,1
+ or ebx,ecx
+ rol ebp,30
+ mov DWORD PTR[20+rsp],eax
+ add esi,ebx
+ lea edx,DWORD PTR[08f1bbcdch+r12*1+rax]
+ mov eax,DWORD PTR[24+rsp]
+ mov ebx,edi
+ mov ecx,edi
+ xor eax,DWORD PTR[32+rsp]
+ mov r12d,esi
+ and ebx,ebp
+ xor eax,DWORD PTR[56+rsp]
+ or ecx,ebp
+ rol r12d,5
+ xor eax,DWORD PTR[12+rsp]
+ and ecx,r11d
+ add edx,r12d
+ rol eax,1
+ or ebx,ecx
+ rol edi,30
+ mov DWORD PTR[24+rsp],eax
+ add edx,ebx
+ lea r12d,DWORD PTR[08f1bbcdch+r11*1+rax]
+ mov eax,DWORD PTR[28+rsp]
+ mov ebx,esi
+ mov ecx,esi
+ xor eax,DWORD PTR[36+rsp]
+ mov r11d,edx
+ and ebx,edi
+ xor eax,DWORD PTR[60+rsp]
+ or ecx,edi
+ rol r11d,5
+ xor eax,DWORD PTR[16+rsp]
+ and ecx,ebp
+ add r12d,r11d
+ rol eax,1
+ or ebx,ecx
+ rol esi,30
+ mov DWORD PTR[28+rsp],eax
+ add r12d,ebx
+ lea r11d,DWORD PTR[08f1bbcdch+rbp*1+rax]
+ mov eax,DWORD PTR[32+rsp]
+ mov ebx,edx
+ mov ecx,edx
+ xor eax,DWORD PTR[40+rsp]
+ mov ebp,r12d
+ and ebx,esi
+ xor eax,DWORD PTR[rsp]
+ or ecx,esi
+ rol ebp,5
+ xor eax,DWORD PTR[20+rsp]
+ and ecx,edi
+ add r11d,ebp
+ rol eax,1
+ or ebx,ecx
+ rol edx,30
+ mov DWORD PTR[32+rsp],eax
+ add r11d,ebx
+ lea ebp,DWORD PTR[08f1bbcdch+rdi*1+rax]
+ mov eax,DWORD PTR[36+rsp]
+ mov ebx,r12d
+ mov ecx,r12d
+ xor eax,DWORD PTR[44+rsp]
+ mov edi,r11d
+ and ebx,edx
+ xor eax,DWORD PTR[4+rsp]
+ or ecx,edx
+ rol edi,5
+ xor eax,DWORD PTR[24+rsp]
+ and ecx,esi
+ add ebp,edi
+ rol eax,1
+ or ebx,ecx
+ rol r12d,30
+ mov DWORD PTR[36+rsp],eax
+ add ebp,ebx
+ lea edi,DWORD PTR[08f1bbcdch+rsi*1+rax]
+ mov eax,DWORD PTR[40+rsp]
+ mov ebx,r11d
+ mov ecx,r11d
+ xor eax,DWORD PTR[48+rsp]
+ mov esi,ebp
+ and ebx,r12d
+ xor eax,DWORD PTR[8+rsp]
+ or ecx,r12d
+ rol esi,5
+ xor eax,DWORD PTR[28+rsp]
+ and ecx,edx
+ add edi,esi
+ rol eax,1
+ or ebx,ecx
+ rol r11d,30
+ mov DWORD PTR[40+rsp],eax
+ add edi,ebx
+ lea esi,DWORD PTR[08f1bbcdch+rdx*1+rax]
+ mov eax,DWORD PTR[44+rsp]
+ mov ebx,ebp
+ mov ecx,ebp
+ xor eax,DWORD PTR[52+rsp]
+ mov edx,edi
+ and ebx,r11d
+ xor eax,DWORD PTR[12+rsp]
+ or ecx,r11d
+ rol edx,5
+ xor eax,DWORD PTR[32+rsp]
+ and ecx,r12d
+ add esi,edx
+ rol eax,1
+ or ebx,ecx
+ rol ebp,30
+ mov DWORD PTR[44+rsp],eax
+ add esi,ebx
+ lea edx,DWORD PTR[08f1bbcdch+r12*1+rax]
+ mov eax,DWORD PTR[48+rsp]
+ mov ebx,edi
+ mov ecx,edi
+ xor eax,DWORD PTR[56+rsp]
+ mov r12d,esi
+ and ebx,ebp
+ xor eax,DWORD PTR[16+rsp]
+ or ecx,ebp
+ rol r12d,5
+ xor eax,DWORD PTR[36+rsp]
+ and ecx,r11d
+ add edx,r12d
+ rol eax,1
+ or ebx,ecx
+ rol edi,30
+ mov DWORD PTR[48+rsp],eax
+ add edx,ebx
+ lea r12d,DWORD PTR[3395469782+r11*1+rax]
+ mov eax,DWORD PTR[52+rsp]
+ mov ebx,edi
+ mov r11d,edx
+ xor eax,DWORD PTR[60+rsp]
+ xor ebx,esi
+ rol r11d,5
+ xor eax,DWORD PTR[20+rsp]
+ xor ebx,ebp
+ add r12d,r11d
+ xor eax,DWORD PTR[40+rsp]
+ rol esi,30
+ add r12d,ebx
+ rol eax,1
+ mov DWORD PTR[52+rsp],eax
+ lea r11d,DWORD PTR[3395469782+rbp*1+rax]
+ mov eax,DWORD PTR[56+rsp]
+ mov ebx,esi
+ mov ebp,r12d
+ xor eax,DWORD PTR[rsp]
+ xor ebx,edx
+ rol ebp,5
+ xor eax,DWORD PTR[24+rsp]
+ xor ebx,edi
+ add r11d,ebp
+ xor eax,DWORD PTR[44+rsp]
+ rol edx,30
+ add r11d,ebx
+ rol eax,1
+ mov DWORD PTR[56+rsp],eax
+ lea ebp,DWORD PTR[3395469782+rdi*1+rax]
+ mov eax,DWORD PTR[60+rsp]
+ mov ebx,edx
+ mov edi,r11d
+ xor eax,DWORD PTR[4+rsp]
+ xor ebx,r12d
+ rol edi,5
+ xor eax,DWORD PTR[28+rsp]
+ xor ebx,esi
+ add ebp,edi
+ xor eax,DWORD PTR[48+rsp]
+ rol r12d,30
+ add ebp,ebx
+ rol eax,1
+ mov DWORD PTR[60+rsp],eax
+ lea edi,DWORD PTR[3395469782+rsi*1+rax]
+ mov eax,DWORD PTR[rsp]
+ mov ebx,r12d
+ mov esi,ebp
+ xor eax,DWORD PTR[8+rsp]
+ xor ebx,r11d
+ rol esi,5
+ xor eax,DWORD PTR[32+rsp]
+ xor ebx,edx
+ add edi,esi
+ xor eax,DWORD PTR[52+rsp]
+ rol r11d,30
+ add edi,ebx
+ rol eax,1
+ mov DWORD PTR[rsp],eax
+ lea esi,DWORD PTR[3395469782+rdx*1+rax]
+ mov eax,DWORD PTR[4+rsp]
+ mov ebx,r11d
+ mov edx,edi
+ xor eax,DWORD PTR[12+rsp]
+ xor ebx,ebp
+ rol edx,5
+ xor eax,DWORD PTR[36+rsp]
+ xor ebx,r12d
+ add esi,edx
+ xor eax,DWORD PTR[56+rsp]
+ rol ebp,30
+ add esi,ebx
+ rol eax,1
+ mov DWORD PTR[4+rsp],eax
+ lea edx,DWORD PTR[3395469782+r12*1+rax]
+ mov eax,DWORD PTR[8+rsp]
+ mov ebx,ebp
+ mov r12d,esi
+ xor eax,DWORD PTR[16+rsp]
+ xor ebx,edi
+ rol r12d,5
+ xor eax,DWORD PTR[40+rsp]
+ xor ebx,r11d
+ add edx,r12d
+ xor eax,DWORD PTR[60+rsp]
+ rol edi,30
+ add edx,ebx
+ rol eax,1
+ mov DWORD PTR[8+rsp],eax
+ lea r12d,DWORD PTR[3395469782+r11*1+rax]
+ mov eax,DWORD PTR[12+rsp]
+ mov ebx,edi
+ mov r11d,edx
+ xor eax,DWORD PTR[20+rsp]
+ xor ebx,esi
+ rol r11d,5
+ xor eax,DWORD PTR[44+rsp]
+ xor ebx,ebp
+ add r12d,r11d
+ xor eax,DWORD PTR[rsp]
+ rol esi,30
+ add r12d,ebx
+ rol eax,1
+ mov DWORD PTR[12+rsp],eax
+ lea r11d,DWORD PTR[3395469782+rbp*1+rax]
+ mov eax,DWORD PTR[16+rsp]
+ mov ebx,esi
+ mov ebp,r12d
+ xor eax,DWORD PTR[24+rsp]
+ xor ebx,edx
+ rol ebp,5
+ xor eax,DWORD PTR[48+rsp]
+ xor ebx,edi
+ add r11d,ebp
+ xor eax,DWORD PTR[4+rsp]
+ rol edx,30
+ add r11d,ebx
+ rol eax,1
+ mov DWORD PTR[16+rsp],eax
+ lea ebp,DWORD PTR[3395469782+rdi*1+rax]
+ mov eax,DWORD PTR[20+rsp]
+ mov ebx,edx
+ mov edi,r11d
+ xor eax,DWORD PTR[28+rsp]
+ xor ebx,r12d
+ rol edi,5
+ xor eax,DWORD PTR[52+rsp]
+ xor ebx,esi
+ add ebp,edi
+ xor eax,DWORD PTR[8+rsp]
+ rol r12d,30
+ add ebp,ebx
+ rol eax,1
+ mov DWORD PTR[20+rsp],eax
+ lea edi,DWORD PTR[3395469782+rsi*1+rax]
+ mov eax,DWORD PTR[24+rsp]
+ mov ebx,r12d
+ mov esi,ebp
+ xor eax,DWORD PTR[32+rsp]
+ xor ebx,r11d
+ rol esi,5
+ xor eax,DWORD PTR[56+rsp]
+ xor ebx,edx
+ add edi,esi
+ xor eax,DWORD PTR[12+rsp]
+ rol r11d,30
+ add edi,ebx
+ rol eax,1
+ mov DWORD PTR[24+rsp],eax
+ lea esi,DWORD PTR[3395469782+rdx*1+rax]
+ mov eax,DWORD PTR[28+rsp]
+ mov ebx,r11d
+ mov edx,edi
+ xor eax,DWORD PTR[36+rsp]
+ xor ebx,ebp
+ rol edx,5
+ xor eax,DWORD PTR[60+rsp]
+ xor ebx,r12d
+ add esi,edx
+ xor eax,DWORD PTR[16+rsp]
+ rol ebp,30
+ add esi,ebx
+ rol eax,1
+ mov DWORD PTR[28+rsp],eax
+ lea edx,DWORD PTR[3395469782+r12*1+rax]
+ mov eax,DWORD PTR[32+rsp]
+ mov ebx,ebp
+ mov r12d,esi
+ xor eax,DWORD PTR[40+rsp]
+ xor ebx,edi
+ rol r12d,5
+ xor eax,DWORD PTR[rsp]
+ xor ebx,r11d
+ add edx,r12d
+ xor eax,DWORD PTR[20+rsp]
+ rol edi,30
+ add edx,ebx
+ rol eax,1
+ mov DWORD PTR[32+rsp],eax
+ lea r12d,DWORD PTR[3395469782+r11*1+rax]
+ mov eax,DWORD PTR[36+rsp]
+ mov ebx,edi
+ mov r11d,edx
+ xor eax,DWORD PTR[44+rsp]
+ xor ebx,esi
+ rol r11d,5
+ xor eax,DWORD PTR[4+rsp]
+ xor ebx,ebp
+ add r12d,r11d
+ xor eax,DWORD PTR[24+rsp]
+ rol esi,30
+ add r12d,ebx
+ rol eax,1
+ mov DWORD PTR[36+rsp],eax
+ lea r11d,DWORD PTR[3395469782+rbp*1+rax]
+ mov eax,DWORD PTR[40+rsp]
+ mov ebx,esi
+ mov ebp,r12d
+ xor eax,DWORD PTR[48+rsp]
+ xor ebx,edx
+ rol ebp,5
+ xor eax,DWORD PTR[8+rsp]
+ xor ebx,edi
+ add r11d,ebp
+ xor eax,DWORD PTR[28+rsp]
+ rol edx,30
+ add r11d,ebx
+ rol eax,1
+ mov DWORD PTR[40+rsp],eax
+ lea ebp,DWORD PTR[3395469782+rdi*1+rax]
+ mov eax,DWORD PTR[44+rsp]
+ mov ebx,edx
+ mov edi,r11d
+ xor eax,DWORD PTR[52+rsp]
+ xor ebx,r12d
+ rol edi,5
+ xor eax,DWORD PTR[12+rsp]
+ xor ebx,esi
+ add ebp,edi
+ xor eax,DWORD PTR[32+rsp]
+ rol r12d,30
+ add ebp,ebx
+ rol eax,1
+ mov DWORD PTR[44+rsp],eax
+ lea edi,DWORD PTR[3395469782+rsi*1+rax]
+ mov eax,DWORD PTR[48+rsp]
+ mov ebx,r12d
+ mov esi,ebp
+ xor eax,DWORD PTR[56+rsp]
+ xor ebx,r11d
+ rol esi,5
+ xor eax,DWORD PTR[16+rsp]
+ xor ebx,edx
+ add edi,esi
+ xor eax,DWORD PTR[36+rsp]
+ rol r11d,30
+ add edi,ebx
+ rol eax,1
+ mov DWORD PTR[48+rsp],eax
+ lea esi,DWORD PTR[3395469782+rdx*1+rax]
+ mov eax,DWORD PTR[52+rsp]
+ mov ebx,r11d
+ mov edx,edi
+ xor eax,DWORD PTR[60+rsp]
+ xor ebx,ebp
+ rol edx,5
+ xor eax,DWORD PTR[20+rsp]
+ xor ebx,r12d
+ add esi,edx
+ xor eax,DWORD PTR[40+rsp]
+ rol ebp,30
+ add esi,ebx
+ rol eax,1
+ lea edx,DWORD PTR[3395469782+r12*1+rax]
+ mov eax,DWORD PTR[56+rsp]
+ mov ebx,ebp
+ mov r12d,esi
+ xor eax,DWORD PTR[rsp]
+ xor ebx,edi
+ rol r12d,5
+ xor eax,DWORD PTR[24+rsp]
+ xor ebx,r11d
+ add edx,r12d
+ xor eax,DWORD PTR[44+rsp]
+ rol edi,30
+ add edx,ebx
+ rol eax,1
+ lea r12d,DWORD PTR[3395469782+r11*1+rax]
+ mov eax,DWORD PTR[60+rsp]
+ mov ebx,edi
+ mov r11d,edx
+ xor eax,DWORD PTR[4+rsp]
+ xor ebx,esi
+ rol r11d,5
+ xor eax,DWORD PTR[28+rsp]
+ xor ebx,ebp
+ add r12d,r11d
+ xor eax,DWORD PTR[48+rsp]
+ rol esi,30
+ add r12d,ebx
+ rol eax,1
+ lea r11d,DWORD PTR[3395469782+rbp*1+rax]
+ mov ebx,esi
+ mov ebp,r12d
+ xor ebx,edx
+ rol ebp,5
+ xor ebx,edi
+ add r11d,ebp
+ rol edx,30
+ add r11d,ebx
+ add r11d,DWORD PTR[r8]
+ add r12d,DWORD PTR[4+r8]
+ add edx,DWORD PTR[8+r8]
+ add esi,DWORD PTR[12+r8]
+ add edi,DWORD PTR[16+r8]
+ mov DWORD PTR[r8],r11d
+ mov DWORD PTR[4+r8],r12d
+ mov DWORD PTR[8+r8],edx
+ mov DWORD PTR[12+r8],esi
+ mov DWORD PTR[16+r8],edi
+
+ xchg edx,r11d
+ xchg esi,r12d
+ xchg edi,r11d
+ xchg ebp,r12d
+
+ lea r9,QWORD PTR[64+r9]
+ sub r10,1
+ jnz $L$loop
+ mov rsi,QWORD PTR[64+rsp]
+ mov r12,QWORD PTR[rsi]
+ mov rbp,QWORD PTR[8+rsi]
+ mov rbx,QWORD PTR[16+rsi]
+ lea rsp,QWORD PTR[24+rsi]
+$L$epilogue::
+ mov rdi,QWORD PTR[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD PTR[16+rsp]
+ DB 0F3h,0C3h ;repret
+$L$SEH_end_sha1_block_data_order::
+sha1_block_data_order ENDP
+DB 83,72,65,49,32,98,108,111,99,107,32,116,114,97,110,115
+DB 102,111,114,109,32,102,111,114,32,120,56,54,95,54,52,44
+DB 32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60
+DB 97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114
+DB 103,62,0
+ALIGN 16
+EXTERN __imp_RtlVirtualUnwind:NEAR
+
+ALIGN 16
+se_handler PROC PRIVATE
+ push rsi
+ push rdi
+ push rbx
+ push rbp
+ push r12
+ push r13
+ push r14
+ push r15
+ pushfq
+ sub rsp,64
+
+ mov rax,QWORD PTR[120+r8]
+ mov rbx,QWORD PTR[248+r8]
+
+ lea r10,QWORD PTR[$L$prologue]
+ cmp rbx,r10
+ jb $L$in_prologue
+
+ mov rax,QWORD PTR[152+r8]
+
+ lea r10,QWORD PTR[$L$epilogue]
+ cmp rbx,r10
+ jae $L$in_prologue
+
+ mov rax,QWORD PTR[64+rax]
+ lea rax,QWORD PTR[24+rax]
+
+ mov rbx,QWORD PTR[((-8))+rax]
+ mov rbp,QWORD PTR[((-16))+rax]
+ mov r12,QWORD PTR[((-24))+rax]
+ mov QWORD PTR[144+r8],rbx
+ mov QWORD PTR[160+r8],rbp
+ mov QWORD PTR[216+r8],r12
+
+$L$in_prologue::
+ mov rdi,QWORD PTR[8+rax]
+ mov rsi,QWORD PTR[16+rax]
+ mov QWORD PTR[152+r8],rax
+ mov QWORD PTR[168+r8],rsi
+ mov QWORD PTR[176+r8],rdi
+
+ mov rdi,QWORD PTR[40+r9]
+ mov rsi,r8
+ mov ecx,154
+ DD 0a548f3fch
+
+
+ mov rsi,r9
+ xor rcx,rcx
+ mov rdx,QWORD PTR[8+rsi]
+ mov r8,QWORD PTR[rsi]
+ mov r9,QWORD PTR[16+rsi]
+ mov r10,QWORD PTR[40+rsi]
+ lea r11,QWORD PTR[56+rsi]
+ lea r12,QWORD PTR[24+rsi]
+ mov QWORD PTR[32+rsp],r10
+ mov QWORD PTR[40+rsp],r11
+ mov QWORD PTR[48+rsp],r12
+ mov QWORD PTR[56+rsp],rcx
+ call QWORD PTR[__imp_RtlVirtualUnwind]
+
+ mov eax,1
+ add rsp,64
+ popfq
+ pop r15
+ pop r14
+ pop r13
+ pop r12
+ pop rbp
+ pop rbx
+ pop rdi
+ pop rsi
+ DB 0F3h,0C3h ;repret
+se_handler ENDP
+
+.text$ ENDS
+.pdata SEGMENT READONLY ALIGN(4)
+ALIGN 4
+ DD imagerel $L$SEH_begin_sha1_block_data_order
+ DD imagerel $L$SEH_end_sha1_block_data_order
+ DD imagerel $L$SEH_info_sha1_block_data_order
+
+.pdata ENDS
+.xdata SEGMENT READONLY ALIGN(8)
+ALIGN 8
+$L$SEH_info_sha1_block_data_order::
+DB 9,0,0,0
+ DD imagerel se_handler
+
+.xdata ENDS
+END
diff --git a/deps/openssl/asm/x64-win32-masm/sha/sha512-x86_64.asm b/deps/openssl/asm/x64-win32-masm/sha/sha512-x86_64.asm
new file mode 100644
index 0000000000..5ea4a6327a
--- /dev/null
+++ b/deps/openssl/asm/x64-win32-masm/sha/sha512-x86_64.asm
@@ -0,0 +1,2085 @@
+OPTION DOTNAME
+.text$ SEGMENT ALIGN(64) 'CODE'
+
+PUBLIC sha256_block_data_order
+
+ALIGN 16
+sha256_block_data_order PROC PUBLIC
+ mov QWORD PTR[8+rsp],rdi ;WIN64 prologue
+ mov QWORD PTR[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_sha256_block_data_order::
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+ mov rcx,r9
+
+
+ push rbx
+ push rbp
+ push r12
+ push r13
+ push r14
+ push r15
+ mov r11,rsp
+ shl rdx,4
+ sub rsp,16*4+4*8
+ lea rdx,QWORD PTR[rdx*4+rsi]
+ and rsp,-64
+ mov QWORD PTR[((16*4+0*8))+rsp],rdi
+ mov QWORD PTR[((16*4+1*8))+rsp],rsi
+ mov QWORD PTR[((16*4+2*8))+rsp],rdx
+ mov QWORD PTR[((16*4+3*8))+rsp],r11
+$L$prologue::
+
+ lea rbp,QWORD PTR[K256]
+
+ mov eax,DWORD PTR[((4*0))+rdi]
+ mov ebx,DWORD PTR[((4*1))+rdi]
+ mov ecx,DWORD PTR[((4*2))+rdi]
+ mov edx,DWORD PTR[((4*3))+rdi]
+ mov r8d,DWORD PTR[((4*4))+rdi]
+ mov r9d,DWORD PTR[((4*5))+rdi]
+ mov r10d,DWORD PTR[((4*6))+rdi]
+ mov r11d,DWORD PTR[((4*7))+rdi]
+ jmp $L$loop
+
+ALIGN 16
+$L$loop::
+ xor rdi,rdi
+ mov r12d,DWORD PTR[((4*0))+rsi]
+ bswap r12d
+ mov r13d,r8d
+ mov r14d,r8d
+ mov r15d,r9d
+
+ ror r13d,6
+ ror r14d,11
+ xor r15d,r10d
+
+ xor r13d,r14d
+ ror r14d,14
+ and r15d,r8d
+ mov DWORD PTR[rsp],r12d
+
+ xor r13d,r14d
+ xor r15d,r10d
+ add r12d,r11d
+
+ mov r11d,eax
+ add r12d,r13d
+
+ add r12d,r15d
+ mov r13d,eax
+ mov r14d,eax
+
+ ror r11d,2
+ ror r13d,13
+ mov r15d,eax
+ add r12d,DWORD PTR[rdi*4+rbp]
+
+ xor r11d,r13d
+ ror r13d,9
+ or r14d,ecx
+
+ xor r11d,r13d
+ and r15d,ecx
+ add edx,r12d
+
+ and r14d,ebx
+ add r11d,r12d
+
+ or r14d,r15d
+ lea rdi,QWORD PTR[1+rdi]
+
+ add r11d,r14d
+ mov r12d,DWORD PTR[((4*1))+rsi]
+ bswap r12d
+ mov r13d,edx
+ mov r14d,edx
+ mov r15d,r8d
+
+ ror r13d,6
+ ror r14d,11
+ xor r15d,r9d
+
+ xor r13d,r14d
+ ror r14d,14
+ and r15d,edx
+ mov DWORD PTR[4+rsp],r12d
+
+ xor r13d,r14d
+ xor r15d,r9d
+ add r12d,r10d
+
+ mov r10d,r11d
+ add r12d,r13d
+
+ add r12d,r15d
+ mov r13d,r11d
+ mov r14d,r11d
+
+ ror r10d,2
+ ror r13d,13
+ mov r15d,r11d
+ add r12d,DWORD PTR[rdi*4+rbp]
+
+ xor r10d,r13d
+ ror r13d,9
+ or r14d,ebx
+
+ xor r10d,r13d
+ and r15d,ebx
+ add ecx,r12d
+
+ and r14d,eax
+ add r10d,r12d
+
+ or r14d,r15d
+ lea rdi,QWORD PTR[1+rdi]
+
+ add r10d,r14d
+ mov r12d,DWORD PTR[((4*2))+rsi]
+ bswap r12d
+ mov r13d,ecx
+ mov r14d,ecx
+ mov r15d,edx
+
+ ror r13d,6
+ ror r14d,11
+ xor r15d,r8d
+
+ xor r13d,r14d
+ ror r14d,14
+ and r15d,ecx
+ mov DWORD PTR[8+rsp],r12d
+
+ xor r13d,r14d
+ xor r15d,r8d
+ add r12d,r9d
+
+ mov r9d,r10d
+ add r12d,r13d
+
+ add r12d,r15d
+ mov r13d,r10d
+ mov r14d,r10d
+
+ ror r9d,2
+ ror r13d,13
+ mov r15d,r10d
+ add r12d,DWORD PTR[rdi*4+rbp]
+
+ xor r9d,r13d
+ ror r13d,9
+ or r14d,eax
+
+ xor r9d,r13d
+ and r15d,eax
+ add ebx,r12d
+
+ and r14d,r11d
+ add r9d,r12d
+
+ or r14d,r15d
+ lea rdi,QWORD PTR[1+rdi]
+
+ add r9d,r14d
+ mov r12d,DWORD PTR[((4*3))+rsi]
+ bswap r12d
+ mov r13d,ebx
+ mov r14d,ebx
+ mov r15d,ecx
+
+ ror r13d,6
+ ror r14d,11
+ xor r15d,edx
+
+ xor r13d,r14d
+ ror r14d,14
+ and r15d,ebx
+ mov DWORD PTR[12+rsp],r12d
+
+ xor r13d,r14d
+ xor r15d,edx
+ add r12d,r8d
+
+ mov r8d,r9d
+ add r12d,r13d
+
+ add r12d,r15d
+ mov r13d,r9d
+ mov r14d,r9d
+
+ ror r8d,2
+ ror r13d,13
+ mov r15d,r9d
+ add r12d,DWORD PTR[rdi*4+rbp]
+
+ xor r8d,r13d
+ ror r13d,9
+ or r14d,r11d
+
+ xor r8d,r13d
+ and r15d,r11d
+ add eax,r12d
+
+ and r14d,r10d
+ add r8d,r12d
+
+ or r14d,r15d
+ lea rdi,QWORD PTR[1+rdi]
+
+ add r8d,r14d
+ mov r12d,DWORD PTR[((4*4))+rsi]
+ bswap r12d
+ mov r13d,eax
+ mov r14d,eax
+ mov r15d,ebx
+
+ ror r13d,6
+ ror r14d,11
+ xor r15d,ecx
+
+ xor r13d,r14d
+ ror r14d,14
+ and r15d,eax
+ mov DWORD PTR[16+rsp],r12d
+
+ xor r13d,r14d
+ xor r15d,ecx
+ add r12d,edx
+
+ mov edx,r8d
+ add r12d,r13d
+
+ add r12d,r15d
+ mov r13d,r8d
+ mov r14d,r8d
+
+ ror edx,2
+ ror r13d,13
+ mov r15d,r8d
+ add r12d,DWORD PTR[rdi*4+rbp]
+
+ xor edx,r13d
+ ror r13d,9
+ or r14d,r10d
+
+ xor edx,r13d
+ and r15d,r10d
+ add r11d,r12d
+
+ and r14d,r9d
+ add edx,r12d
+
+ or r14d,r15d
+ lea rdi,QWORD PTR[1+rdi]
+
+ add edx,r14d
+ mov r12d,DWORD PTR[((4*5))+rsi]
+ bswap r12d
+ mov r13d,r11d
+ mov r14d,r11d
+ mov r15d,eax
+
+ ror r13d,6
+ ror r14d,11
+ xor r15d,ebx
+
+ xor r13d,r14d
+ ror r14d,14
+ and r15d,r11d
+ mov DWORD PTR[20+rsp],r12d
+
+ xor r13d,r14d
+ xor r15d,ebx
+ add r12d,ecx
+
+ mov ecx,edx
+ add r12d,r13d
+
+ add r12d,r15d
+ mov r13d,edx
+ mov r14d,edx
+
+ ror ecx,2
+ ror r13d,13
+ mov r15d,edx
+ add r12d,DWORD PTR[rdi*4+rbp]
+
+ xor ecx,r13d
+ ror r13d,9
+ or r14d,r9d
+
+ xor ecx,r13d
+ and r15d,r9d
+ add r10d,r12d
+
+ and r14d,r8d
+ add ecx,r12d
+
+ or r14d,r15d
+ lea rdi,QWORD PTR[1+rdi]
+
+ add ecx,r14d
+ mov r12d,DWORD PTR[((4*6))+rsi]
+ bswap r12d
+ mov r13d,r10d
+ mov r14d,r10d
+ mov r15d,r11d
+
+ ror r13d,6
+ ror r14d,11
+ xor r15d,eax
+
+ xor r13d,r14d
+ ror r14d,14
+ and r15d,r10d
+ mov DWORD PTR[24+rsp],r12d
+
+ xor r13d,r14d
+ xor r15d,eax
+ add r12d,ebx
+
+ mov ebx,ecx
+ add r12d,r13d
+
+ add r12d,r15d
+ mov r13d,ecx
+ mov r14d,ecx
+
+ ror ebx,2
+ ror r13d,13
+ mov r15d,ecx
+ add r12d,DWORD PTR[rdi*4+rbp]
+
+ xor ebx,r13d
+ ror r13d,9
+ or r14d,r8d
+
+ xor ebx,r13d
+ and r15d,r8d
+ add r9d,r12d
+
+ and r14d,edx
+ add ebx,r12d
+
+ or r14d,r15d
+ lea rdi,QWORD PTR[1+rdi]
+
+ add ebx,r14d
+ mov r12d,DWORD PTR[((4*7))+rsi]
+ bswap r12d
+ mov r13d,r9d
+ mov r14d,r9d
+ mov r15d,r10d
+
+ ror r13d,6
+ ror r14d,11
+ xor r15d,r11d
+
+ xor r13d,r14d
+ ror r14d,14
+ and r15d,r9d
+ mov DWORD PTR[28+rsp],r12d
+
+ xor r13d,r14d
+ xor r15d,r11d
+ add r12d,eax
+
+ mov eax,ebx
+ add r12d,r13d
+
+ add r12d,r15d
+ mov r13d,ebx
+ mov r14d,ebx
+
+ ror eax,2
+ ror r13d,13
+ mov r15d,ebx
+ add r12d,DWORD PTR[rdi*4+rbp]
+
+ xor eax,r13d
+ ror r13d,9
+ or r14d,edx
+
+ xor eax,r13d
+ and r15d,edx
+ add r8d,r12d
+
+ and r14d,ecx
+ add eax,r12d
+
+ or r14d,r15d
+ lea rdi,QWORD PTR[1+rdi]
+
+ add eax,r14d
+ mov r12d,DWORD PTR[((4*8))+rsi]
+ bswap r12d
+ mov r13d,r8d
+ mov r14d,r8d
+ mov r15d,r9d
+
+ ror r13d,6
+ ror r14d,11
+ xor r15d,r10d
+
+ xor r13d,r14d
+ ror r14d,14
+ and r15d,r8d
+ mov DWORD PTR[32+rsp],r12d
+
+ xor r13d,r14d
+ xor r15d,r10d
+ add r12d,r11d
+
+ mov r11d,eax
+ add r12d,r13d
+
+ add r12d,r15d
+ mov r13d,eax
+ mov r14d,eax
+
+ ror r11d,2
+ ror r13d,13
+ mov r15d,eax
+ add r12d,DWORD PTR[rdi*4+rbp]
+
+ xor r11d,r13d
+ ror r13d,9
+ or r14d,ecx
+
+ xor r11d,r13d
+ and r15d,ecx
+ add edx,r12d
+
+ and r14d,ebx
+ add r11d,r12d
+
+ or r14d,r15d
+ lea rdi,QWORD PTR[1+rdi]
+
+ add r11d,r14d
+ mov r12d,DWORD PTR[((4*9))+rsi]
+ bswap r12d
+ mov r13d,edx
+ mov r14d,edx
+ mov r15d,r8d
+
+ ror r13d,6
+ ror r14d,11
+ xor r15d,r9d
+
+ xor r13d,r14d
+ ror r14d,14
+ and r15d,edx
+ mov DWORD PTR[36+rsp],r12d
+
+ xor r13d,r14d
+ xor r15d,r9d
+ add r12d,r10d
+
+ mov r10d,r11d
+ add r12d,r13d
+
+ add r12d,r15d
+ mov r13d,r11d
+ mov r14d,r11d
+
+ ror r10d,2
+ ror r13d,13
+ mov r15d,r11d
+ add r12d,DWORD PTR[rdi*4+rbp]
+
+ xor r10d,r13d
+ ror r13d,9
+ or r14d,ebx
+
+ xor r10d,r13d
+ and r15d,ebx
+ add ecx,r12d
+
+ and r14d,eax
+ add r10d,r12d
+
+ or r14d,r15d
+ lea rdi,QWORD PTR[1+rdi]
+
+ add r10d,r14d
+ mov r12d,DWORD PTR[((4*10))+rsi]
+ bswap r12d
+ mov r13d,ecx
+ mov r14d,ecx
+ mov r15d,edx
+
+ ror r13d,6
+ ror r14d,11
+ xor r15d,r8d
+
+ xor r13d,r14d
+ ror r14d,14
+ and r15d,ecx
+ mov DWORD PTR[40+rsp],r12d
+
+ xor r13d,r14d
+ xor r15d,r8d
+ add r12d,r9d
+
+ mov r9d,r10d
+ add r12d,r13d
+
+ add r12d,r15d
+ mov r13d,r10d
+ mov r14d,r10d
+
+ ror r9d,2
+ ror r13d,13
+ mov r15d,r10d
+ add r12d,DWORD PTR[rdi*4+rbp]
+
+ xor r9d,r13d
+ ror r13d,9
+ or r14d,eax
+
+ xor r9d,r13d
+ and r15d,eax
+ add ebx,r12d
+
+ and r14d,r11d
+ add r9d,r12d
+
+ or r14d,r15d
+ lea rdi,QWORD PTR[1+rdi]
+
+ add r9d,r14d
+ mov r12d,DWORD PTR[((4*11))+rsi]
+ bswap r12d
+ mov r13d,ebx
+ mov r14d,ebx
+ mov r15d,ecx
+
+ ror r13d,6
+ ror r14d,11
+ xor r15d,edx
+
+ xor r13d,r14d
+ ror r14d,14
+ and r15d,ebx
+ mov DWORD PTR[44+rsp],r12d
+
+ xor r13d,r14d
+ xor r15d,edx
+ add r12d,r8d
+
+ mov r8d,r9d
+ add r12d,r13d
+
+ add r12d,r15d
+ mov r13d,r9d
+ mov r14d,r9d
+
+ ror r8d,2
+ ror r13d,13
+ mov r15d,r9d
+ add r12d,DWORD PTR[rdi*4+rbp]
+
+ xor r8d,r13d
+ ror r13d,9
+ or r14d,r11d
+
+ xor r8d,r13d
+ and r15d,r11d
+ add eax,r12d
+
+ and r14d,r10d
+ add r8d,r12d
+
+ or r14d,r15d
+ lea rdi,QWORD PTR[1+rdi]
+
+ add r8d,r14d
+ mov r12d,DWORD PTR[((4*12))+rsi]
+ bswap r12d
+ mov r13d,eax
+ mov r14d,eax
+ mov r15d,ebx
+
+ ror r13d,6
+ ror r14d,11
+ xor r15d,ecx
+
+ xor r13d,r14d
+ ror r14d,14
+ and r15d,eax
+ mov DWORD PTR[48+rsp],r12d
+
+ xor r13d,r14d
+ xor r15d,ecx
+ add r12d,edx
+
+ mov edx,r8d
+ add r12d,r13d
+
+ add r12d,r15d
+ mov r13d,r8d
+ mov r14d,r8d
+
+ ror edx,2
+ ror r13d,13
+ mov r15d,r8d
+ add r12d,DWORD PTR[rdi*4+rbp]
+
+ xor edx,r13d
+ ror r13d,9
+ or r14d,r10d
+
+ xor edx,r13d
+ and r15d,r10d
+ add r11d,r12d
+
+ and r14d,r9d
+ add edx,r12d
+
+ or r14d,r15d
+ lea rdi,QWORD PTR[1+rdi]
+
+ add edx,r14d
+ mov r12d,DWORD PTR[((4*13))+rsi]
+ bswap r12d
+ mov r13d,r11d
+ mov r14d,r11d
+ mov r15d,eax
+
+ ror r13d,6
+ ror r14d,11
+ xor r15d,ebx
+
+ xor r13d,r14d
+ ror r14d,14
+ and r15d,r11d
+ mov DWORD PTR[52+rsp],r12d
+
+ xor r13d,r14d
+ xor r15d,ebx
+ add r12d,ecx
+
+ mov ecx,edx
+ add r12d,r13d
+
+ add r12d,r15d
+ mov r13d,edx
+ mov r14d,edx
+
+ ror ecx,2
+ ror r13d,13
+ mov r15d,edx
+ add r12d,DWORD PTR[rdi*4+rbp]
+
+ xor ecx,r13d
+ ror r13d,9
+ or r14d,r9d
+
+ xor ecx,r13d
+ and r15d,r9d
+ add r10d,r12d
+
+ and r14d,r8d
+ add ecx,r12d
+
+ or r14d,r15d
+ lea rdi,QWORD PTR[1+rdi]
+
+ add ecx,r14d
+ mov r12d,DWORD PTR[((4*14))+rsi]
+ bswap r12d
+ mov r13d,r10d
+ mov r14d,r10d
+ mov r15d,r11d
+
+ ror r13d,6
+ ror r14d,11
+ xor r15d,eax
+
+ xor r13d,r14d
+ ror r14d,14
+ and r15d,r10d
+ mov DWORD PTR[56+rsp],r12d
+
+ xor r13d,r14d
+ xor r15d,eax
+ add r12d,ebx
+
+ mov ebx,ecx
+ add r12d,r13d
+
+ add r12d,r15d
+ mov r13d,ecx
+ mov r14d,ecx
+
+ ror ebx,2
+ ror r13d,13
+ mov r15d,ecx
+ add r12d,DWORD PTR[rdi*4+rbp]
+
+ xor ebx,r13d
+ ror r13d,9
+ or r14d,r8d
+
+ xor ebx,r13d
+ and r15d,r8d
+ add r9d,r12d
+
+ and r14d,edx
+ add ebx,r12d
+
+ or r14d,r15d
+ lea rdi,QWORD PTR[1+rdi]
+
+ add ebx,r14d
+ mov r12d,DWORD PTR[((4*15))+rsi]
+ bswap r12d
+ mov r13d,r9d
+ mov r14d,r9d
+ mov r15d,r10d
+
+ ror r13d,6
+ ror r14d,11
+ xor r15d,r11d
+
+ xor r13d,r14d
+ ror r14d,14
+ and r15d,r9d
+ mov DWORD PTR[60+rsp],r12d
+
+ xor r13d,r14d
+ xor r15d,r11d
+ add r12d,eax
+
+ mov eax,ebx
+ add r12d,r13d
+
+ add r12d,r15d
+ mov r13d,ebx
+ mov r14d,ebx
+
+ ror eax,2
+ ror r13d,13
+ mov r15d,ebx
+ add r12d,DWORD PTR[rdi*4+rbp]
+
+ xor eax,r13d
+ ror r13d,9
+ or r14d,edx
+
+ xor eax,r13d
+ and r15d,edx
+ add r8d,r12d
+
+ and r14d,ecx
+ add eax,r12d
+
+ or r14d,r15d
+ lea rdi,QWORD PTR[1+rdi]
+
+ add eax,r14d
+ jmp $L$rounds_16_xx
+ALIGN 16
+$L$rounds_16_xx::
+ mov r13d,DWORD PTR[4+rsp]
+ mov r12d,DWORD PTR[56+rsp]
+
+ mov r15d,r13d
+
+ shr r13d,3
+ ror r15d,7
+
+ xor r13d,r15d
+ ror r15d,11
+
+ xor r13d,r15d
+ mov r14d,r12d
+
+ shr r12d,10
+ ror r14d,17
+
+ xor r12d,r14d
+ ror r14d,2
+
+ xor r12d,r14d
+
+ add r12d,r13d
+
+ add r12d,DWORD PTR[36+rsp]
+
+ add r12d,DWORD PTR[rsp]
+ mov r13d,r8d
+ mov r14d,r8d
+ mov r15d,r9d
+
+ ror r13d,6
+ ror r14d,11
+ xor r15d,r10d
+
+ xor r13d,r14d
+ ror r14d,14
+ and r15d,r8d
+ mov DWORD PTR[rsp],r12d
+
+ xor r13d,r14d
+ xor r15d,r10d
+ add r12d,r11d
+
+ mov r11d,eax
+ add r12d,r13d
+
+ add r12d,r15d
+ mov r13d,eax
+ mov r14d,eax
+
+ ror r11d,2
+ ror r13d,13
+ mov r15d,eax
+ add r12d,DWORD PTR[rdi*4+rbp]
+
+ xor r11d,r13d
+ ror r13d,9
+ or r14d,ecx
+
+ xor r11d,r13d
+ and r15d,ecx
+ add edx,r12d
+
+ and r14d,ebx
+ add r11d,r12d
+
+ or r14d,r15d
+ lea rdi,QWORD PTR[1+rdi]
+
+ add r11d,r14d
+ mov r13d,DWORD PTR[8+rsp]
+ mov r12d,DWORD PTR[60+rsp]
+
+ mov r15d,r13d
+
+ shr r13d,3
+ ror r15d,7
+
+ xor r13d,r15d
+ ror r15d,11
+
+ xor r13d,r15d
+ mov r14d,r12d
+
+ shr r12d,10
+ ror r14d,17
+
+ xor r12d,r14d
+ ror r14d,2
+
+ xor r12d,r14d
+
+ add r12d,r13d
+
+ add r12d,DWORD PTR[40+rsp]
+
+ add r12d,DWORD PTR[4+rsp]
+ mov r13d,edx
+ mov r14d,edx
+ mov r15d,r8d
+
+ ror r13d,6
+ ror r14d,11
+ xor r15d,r9d
+
+ xor r13d,r14d
+ ror r14d,14
+ and r15d,edx
+ mov DWORD PTR[4+rsp],r12d
+
+ xor r13d,r14d
+ xor r15d,r9d
+ add r12d,r10d
+
+ mov r10d,r11d
+ add r12d,r13d
+
+ add r12d,r15d
+ mov r13d,r11d
+ mov r14d,r11d
+
+ ror r10d,2
+ ror r13d,13
+ mov r15d,r11d
+ add r12d,DWORD PTR[rdi*4+rbp]
+
+ xor r10d,r13d
+ ror r13d,9
+ or r14d,ebx
+
+ xor r10d,r13d
+ and r15d,ebx
+ add ecx,r12d
+
+ and r14d,eax
+ add r10d,r12d
+
+ or r14d,r15d
+ lea rdi,QWORD PTR[1+rdi]
+
+ add r10d,r14d
+ mov r13d,DWORD PTR[12+rsp]
+ mov r12d,DWORD PTR[rsp]
+
+ mov r15d,r13d
+
+ shr r13d,3
+ ror r15d,7
+
+ xor r13d,r15d
+ ror r15d,11
+
+ xor r13d,r15d
+ mov r14d,r12d
+
+ shr r12d,10
+ ror r14d,17
+
+ xor r12d,r14d
+ ror r14d,2
+
+ xor r12d,r14d
+
+ add r12d,r13d
+
+ add r12d,DWORD PTR[44+rsp]
+
+ add r12d,DWORD PTR[8+rsp]
+ mov r13d,ecx
+ mov r14d,ecx
+ mov r15d,edx
+
+ ror r13d,6
+ ror r14d,11
+ xor r15d,r8d
+
+ xor r13d,r14d
+ ror r14d,14
+ and r15d,ecx
+ mov DWORD PTR[8+rsp],r12d
+
+ xor r13d,r14d
+ xor r15d,r8d
+ add r12d,r9d
+
+ mov r9d,r10d
+ add r12d,r13d
+
+ add r12d,r15d
+ mov r13d,r10d
+ mov r14d,r10d
+
+ ror r9d,2
+ ror r13d,13
+ mov r15d,r10d
+ add r12d,DWORD PTR[rdi*4+rbp]
+
+ xor r9d,r13d
+ ror r13d,9
+ or r14d,eax
+
+ xor r9d,r13d
+ and r15d,eax
+ add ebx,r12d
+
+ and r14d,r11d
+ add r9d,r12d
+
+ or r14d,r15d
+ lea rdi,QWORD PTR[1+rdi]
+
+ add r9d,r14d
+ mov r13d,DWORD PTR[16+rsp]
+ mov r12d,DWORD PTR[4+rsp]
+
+ mov r15d,r13d
+
+ shr r13d,3
+ ror r15d,7
+
+ xor r13d,r15d
+ ror r15d,11
+
+ xor r13d,r15d
+ mov r14d,r12d
+
+ shr r12d,10
+ ror r14d,17
+
+ xor r12d,r14d
+ ror r14d,2
+
+ xor r12d,r14d
+
+ add r12d,r13d
+
+ add r12d,DWORD PTR[48+rsp]
+
+ add r12d,DWORD PTR[12+rsp]
+ mov r13d,ebx
+ mov r14d,ebx
+ mov r15d,ecx
+
+ ror r13d,6
+ ror r14d,11
+ xor r15d,edx
+
+ xor r13d,r14d
+ ror r14d,14
+ and r15d,ebx
+ mov DWORD PTR[12+rsp],r12d
+
+ xor r13d,r14d
+ xor r15d,edx
+ add r12d,r8d
+
+ mov r8d,r9d
+ add r12d,r13d
+
+ add r12d,r15d
+ mov r13d,r9d
+ mov r14d,r9d
+
+ ror r8d,2
+ ror r13d,13
+ mov r15d,r9d
+ add r12d,DWORD PTR[rdi*4+rbp]
+
+ xor r8d,r13d
+ ror r13d,9
+ or r14d,r11d
+
+ xor r8d,r13d
+ and r15d,r11d
+ add eax,r12d
+
+ and r14d,r10d
+ add r8d,r12d
+
+ or r14d,r15d
+ lea rdi,QWORD PTR[1+rdi]
+
+ add r8d,r14d
+ mov r13d,DWORD PTR[20+rsp]
+ mov r12d,DWORD PTR[8+rsp]
+
+ mov r15d,r13d
+
+ shr r13d,3
+ ror r15d,7
+
+ xor r13d,r15d
+ ror r15d,11
+
+ xor r13d,r15d
+ mov r14d,r12d
+
+ shr r12d,10
+ ror r14d,17
+
+ xor r12d,r14d
+ ror r14d,2
+
+ xor r12d,r14d
+
+ add r12d,r13d
+
+ add r12d,DWORD PTR[52+rsp]
+
+ add r12d,DWORD PTR[16+rsp]
+ mov r13d,eax
+ mov r14d,eax
+ mov r15d,ebx
+
+ ror r13d,6
+ ror r14d,11
+ xor r15d,ecx
+
+ xor r13d,r14d
+ ror r14d,14
+ and r15d,eax
+ mov DWORD PTR[16+rsp],r12d
+
+ xor r13d,r14d
+ xor r15d,ecx
+ add r12d,edx
+
+ mov edx,r8d
+ add r12d,r13d
+
+ add r12d,r15d
+ mov r13d,r8d
+ mov r14d,r8d
+
+ ror edx,2
+ ror r13d,13
+ mov r15d,r8d
+ add r12d,DWORD PTR[rdi*4+rbp]
+
+ xor edx,r13d
+ ror r13d,9
+ or r14d,r10d
+
+ xor edx,r13d
+ and r15d,r10d
+ add r11d,r12d
+
+ and r14d,r9d
+ add edx,r12d
+
+ or r14d,r15d
+ lea rdi,QWORD PTR[1+rdi]
+
+ add edx,r14d
+ mov r13d,DWORD PTR[24+rsp]
+ mov r12d,DWORD PTR[12+rsp]
+
+ mov r15d,r13d
+
+ shr r13d,3
+ ror r15d,7
+
+ xor r13d,r15d
+ ror r15d,11
+
+ xor r13d,r15d
+ mov r14d,r12d
+
+ shr r12d,10
+ ror r14d,17
+
+ xor r12d,r14d
+ ror r14d,2
+
+ xor r12d,r14d
+
+ add r12d,r13d
+
+ add r12d,DWORD PTR[56+rsp]
+
+ add r12d,DWORD PTR[20+rsp]
+ mov r13d,r11d
+ mov r14d,r11d
+ mov r15d,eax
+
+ ror r13d,6
+ ror r14d,11
+ xor r15d,ebx
+
+ xor r13d,r14d
+ ror r14d,14
+ and r15d,r11d
+ mov DWORD PTR[20+rsp],r12d
+
+ xor r13d,r14d
+ xor r15d,ebx
+ add r12d,ecx
+
+ mov ecx,edx
+ add r12d,r13d
+
+ add r12d,r15d
+ mov r13d,edx
+ mov r14d,edx
+
+ ror ecx,2
+ ror r13d,13
+ mov r15d,edx
+ add r12d,DWORD PTR[rdi*4+rbp]
+
+ xor ecx,r13d
+ ror r13d,9
+ or r14d,r9d
+
+ xor ecx,r13d
+ and r15d,r9d
+ add r10d,r12d
+
+ and r14d,r8d
+ add ecx,r12d
+
+ or r14d,r15d
+ lea rdi,QWORD PTR[1+rdi]
+
+ add ecx,r14d
+ mov r13d,DWORD PTR[28+rsp]
+ mov r12d,DWORD PTR[16+rsp]
+
+ mov r15d,r13d
+
+ shr r13d,3
+ ror r15d,7
+
+ xor r13d,r15d
+ ror r15d,11
+
+ xor r13d,r15d
+ mov r14d,r12d
+
+ shr r12d,10
+ ror r14d,17
+
+ xor r12d,r14d
+ ror r14d,2
+
+ xor r12d,r14d
+
+ add r12d,r13d
+
+ add r12d,DWORD PTR[60+rsp]
+
+ add r12d,DWORD PTR[24+rsp]
+ mov r13d,r10d
+ mov r14d,r10d
+ mov r15d,r11d
+
+ ror r13d,6
+ ror r14d,11
+ xor r15d,eax
+
+ xor r13d,r14d
+ ror r14d,14
+ and r15d,r10d
+ mov DWORD PTR[24+rsp],r12d
+
+ xor r13d,r14d
+ xor r15d,eax
+ add r12d,ebx
+
+ mov ebx,ecx
+ add r12d,r13d
+
+ add r12d,r15d
+ mov r13d,ecx
+ mov r14d,ecx
+
+ ror ebx,2
+ ror r13d,13
+ mov r15d,ecx
+ add r12d,DWORD PTR[rdi*4+rbp]
+
+ xor ebx,r13d
+ ror r13d,9
+ or r14d,r8d
+
+ xor ebx,r13d
+ and r15d,r8d
+ add r9d,r12d
+
+ and r14d,edx
+ add ebx,r12d
+
+ or r14d,r15d
+ lea rdi,QWORD PTR[1+rdi]
+
+ add ebx,r14d
+ mov r13d,DWORD PTR[32+rsp]
+ mov r12d,DWORD PTR[20+rsp]
+
+ mov r15d,r13d
+
+ shr r13d,3
+ ror r15d,7
+
+ xor r13d,r15d
+ ror r15d,11
+
+ xor r13d,r15d
+ mov r14d,r12d
+
+ shr r12d,10
+ ror r14d,17
+
+ xor r12d,r14d
+ ror r14d,2
+
+ xor r12d,r14d
+
+ add r12d,r13d
+
+ add r12d,DWORD PTR[rsp]
+
+ add r12d,DWORD PTR[28+rsp]
+ mov r13d,r9d
+ mov r14d,r9d
+ mov r15d,r10d
+
+ ror r13d,6
+ ror r14d,11
+ xor r15d,r11d
+
+ xor r13d,r14d
+ ror r14d,14
+ and r15d,r9d
+ mov DWORD PTR[28+rsp],r12d
+
+ xor r13d,r14d
+ xor r15d,r11d
+ add r12d,eax
+
+ mov eax,ebx
+ add r12d,r13d
+
+ add r12d,r15d
+ mov r13d,ebx
+ mov r14d,ebx
+
+ ror eax,2
+ ror r13d,13
+ mov r15d,ebx
+ add r12d,DWORD PTR[rdi*4+rbp]
+
+ xor eax,r13d
+ ror r13d,9
+ or r14d,edx
+
+ xor eax,r13d
+ and r15d,edx
+ add r8d,r12d
+
+ and r14d,ecx
+ add eax,r12d
+
+ or r14d,r15d
+ lea rdi,QWORD PTR[1+rdi]
+
+ add eax,r14d
+ mov r13d,DWORD PTR[36+rsp]
+ mov r12d,DWORD PTR[24+rsp]
+
+ mov r15d,r13d
+
+ shr r13d,3
+ ror r15d,7
+
+ xor r13d,r15d
+ ror r15d,11
+
+ xor r13d,r15d
+ mov r14d,r12d
+
+ shr r12d,10
+ ror r14d,17
+
+ xor r12d,r14d
+ ror r14d,2
+
+ xor r12d,r14d
+
+ add r12d,r13d
+
+ add r12d,DWORD PTR[4+rsp]
+
+ add r12d,DWORD PTR[32+rsp]
+ mov r13d,r8d
+ mov r14d,r8d
+ mov r15d,r9d
+
+ ror r13d,6
+ ror r14d,11
+ xor r15d,r10d
+
+ xor r13d,r14d
+ ror r14d,14
+ and r15d,r8d
+ mov DWORD PTR[32+rsp],r12d
+
+ xor r13d,r14d
+ xor r15d,r10d
+ add r12d,r11d
+
+ mov r11d,eax
+ add r12d,r13d
+
+ add r12d,r15d
+ mov r13d,eax
+ mov r14d,eax
+
+ ror r11d,2
+ ror r13d,13
+ mov r15d,eax
+ add r12d,DWORD PTR[rdi*4+rbp]
+
+ xor r11d,r13d
+ ror r13d,9
+ or r14d,ecx
+
+ xor r11d,r13d
+ and r15d,ecx
+ add edx,r12d
+
+ and r14d,ebx
+ add r11d,r12d
+
+ or r14d,r15d
+ lea rdi,QWORD PTR[1+rdi]
+
+ add r11d,r14d
+ mov r13d,DWORD PTR[40+rsp]
+ mov r12d,DWORD PTR[28+rsp]
+
+ mov r15d,r13d
+
+ shr r13d,3
+ ror r15d,7
+
+ xor r13d,r15d
+ ror r15d,11
+
+ xor r13d,r15d
+ mov r14d,r12d
+
+ shr r12d,10
+ ror r14d,17
+
+ xor r12d,r14d
+ ror r14d,2
+
+ xor r12d,r14d
+
+ add r12d,r13d
+
+ add r12d,DWORD PTR[8+rsp]
+
+ add r12d,DWORD PTR[36+rsp]
+ mov r13d,edx
+ mov r14d,edx
+ mov r15d,r8d
+
+ ror r13d,6
+ ror r14d,11
+ xor r15d,r9d
+
+ xor r13d,r14d
+ ror r14d,14
+ and r15d,edx
+ mov DWORD PTR[36+rsp],r12d
+
+ xor r13d,r14d
+ xor r15d,r9d
+ add r12d,r10d
+
+ mov r10d,r11d
+ add r12d,r13d
+
+ add r12d,r15d
+ mov r13d,r11d
+ mov r14d,r11d
+
+ ror r10d,2
+ ror r13d,13
+ mov r15d,r11d
+ add r12d,DWORD PTR[rdi*4+rbp]
+
+ xor r10d,r13d
+ ror r13d,9
+ or r14d,ebx
+
+ xor r10d,r13d
+ and r15d,ebx
+ add ecx,r12d
+
+ and r14d,eax
+ add r10d,r12d
+
+ or r14d,r15d
+ lea rdi,QWORD PTR[1+rdi]
+
+ add r10d,r14d
+ mov r13d,DWORD PTR[44+rsp]
+ mov r12d,DWORD PTR[32+rsp]
+
+ mov r15d,r13d
+
+ shr r13d,3
+ ror r15d,7
+
+ xor r13d,r15d
+ ror r15d,11
+
+ xor r13d,r15d
+ mov r14d,r12d
+
+ shr r12d,10
+ ror r14d,17
+
+ xor r12d,r14d
+ ror r14d,2
+
+ xor r12d,r14d
+
+ add r12d,r13d
+
+ add r12d,DWORD PTR[12+rsp]
+
+ add r12d,DWORD PTR[40+rsp]
+ mov r13d,ecx
+ mov r14d,ecx
+ mov r15d,edx
+
+ ror r13d,6
+ ror r14d,11
+ xor r15d,r8d
+
+ xor r13d,r14d
+ ror r14d,14
+ and r15d,ecx
+ mov DWORD PTR[40+rsp],r12d
+
+ xor r13d,r14d
+ xor r15d,r8d
+ add r12d,r9d
+
+ mov r9d,r10d
+ add r12d,r13d
+
+ add r12d,r15d
+ mov r13d,r10d
+ mov r14d,r10d
+
+ ror r9d,2
+ ror r13d,13
+ mov r15d,r10d
+ add r12d,DWORD PTR[rdi*4+rbp]
+
+ xor r9d,r13d
+ ror r13d,9
+ or r14d,eax
+
+ xor r9d,r13d
+ and r15d,eax
+ add ebx,r12d
+
+ and r14d,r11d
+ add r9d,r12d
+
+ or r14d,r15d
+ lea rdi,QWORD PTR[1+rdi]
+
+ add r9d,r14d
+ mov r13d,DWORD PTR[48+rsp]
+ mov r12d,DWORD PTR[36+rsp]
+
+ mov r15d,r13d
+
+ shr r13d,3
+ ror r15d,7
+
+ xor r13d,r15d
+ ror r15d,11
+
+ xor r13d,r15d
+ mov r14d,r12d
+
+ shr r12d,10
+ ror r14d,17
+
+ xor r12d,r14d
+ ror r14d,2
+
+ xor r12d,r14d
+
+ add r12d,r13d
+
+ add r12d,DWORD PTR[16+rsp]
+
+ add r12d,DWORD PTR[44+rsp]
+ mov r13d,ebx
+ mov r14d,ebx
+ mov r15d,ecx
+
+ ror r13d,6
+ ror r14d,11
+ xor r15d,edx
+
+ xor r13d,r14d
+ ror r14d,14
+ and r15d,ebx
+ mov DWORD PTR[44+rsp],r12d
+
+ xor r13d,r14d
+ xor r15d,edx
+ add r12d,r8d
+
+ mov r8d,r9d
+ add r12d,r13d
+
+ add r12d,r15d
+ mov r13d,r9d
+ mov r14d,r9d
+
+ ror r8d,2
+ ror r13d,13
+ mov r15d,r9d
+ add r12d,DWORD PTR[rdi*4+rbp]
+
+ xor r8d,r13d
+ ror r13d,9
+ or r14d,r11d
+
+ xor r8d,r13d
+ and r15d,r11d
+ add eax,r12d
+
+ and r14d,r10d
+ add r8d,r12d
+
+ or r14d,r15d
+ lea rdi,QWORD PTR[1+rdi]
+
+ add r8d,r14d
+ mov r13d,DWORD PTR[52+rsp]
+ mov r12d,DWORD PTR[40+rsp]
+
+ mov r15d,r13d
+
+ shr r13d,3
+ ror r15d,7
+
+ xor r13d,r15d
+ ror r15d,11
+
+ xor r13d,r15d
+ mov r14d,r12d
+
+ shr r12d,10
+ ror r14d,17
+
+ xor r12d,r14d
+ ror r14d,2
+
+ xor r12d,r14d
+
+ add r12d,r13d
+
+ add r12d,DWORD PTR[20+rsp]
+
+ add r12d,DWORD PTR[48+rsp]
+ mov r13d,eax
+ mov r14d,eax
+ mov r15d,ebx
+
+ ror r13d,6
+ ror r14d,11
+ xor r15d,ecx
+
+ xor r13d,r14d
+ ror r14d,14
+ and r15d,eax
+ mov DWORD PTR[48+rsp],r12d
+
+ xor r13d,r14d
+ xor r15d,ecx
+ add r12d,edx
+
+ mov edx,r8d
+ add r12d,r13d
+
+ add r12d,r15d
+ mov r13d,r8d
+ mov r14d,r8d
+
+ ror edx,2
+ ror r13d,13
+ mov r15d,r8d
+ add r12d,DWORD PTR[rdi*4+rbp]
+
+ xor edx,r13d
+ ror r13d,9
+ or r14d,r10d
+
+ xor edx,r13d
+ and r15d,r10d
+ add r11d,r12d
+
+ and r14d,r9d
+ add edx,r12d
+
+ or r14d,r15d
+ lea rdi,QWORD PTR[1+rdi]
+
+ add edx,r14d
+ mov r13d,DWORD PTR[56+rsp]
+ mov r12d,DWORD PTR[44+rsp]
+
+ mov r15d,r13d
+
+ shr r13d,3
+ ror r15d,7
+
+ xor r13d,r15d
+ ror r15d,11
+
+ xor r13d,r15d
+ mov r14d,r12d
+
+ shr r12d,10
+ ror r14d,17
+
+ xor r12d,r14d
+ ror r14d,2
+
+ xor r12d,r14d
+
+ add r12d,r13d
+
+ add r12d,DWORD PTR[24+rsp]
+
+ add r12d,DWORD PTR[52+rsp]
+ mov r13d,r11d
+ mov r14d,r11d
+ mov r15d,eax
+
+ ror r13d,6
+ ror r14d,11
+ xor r15d,ebx
+
+ xor r13d,r14d
+ ror r14d,14
+ and r15d,r11d
+ mov DWORD PTR[52+rsp],r12d
+
+ xor r13d,r14d
+ xor r15d,ebx
+ add r12d,ecx
+
+ mov ecx,edx
+ add r12d,r13d
+
+ add r12d,r15d
+ mov r13d,edx
+ mov r14d,edx
+
+ ror ecx,2
+ ror r13d,13
+ mov r15d,edx
+ add r12d,DWORD PTR[rdi*4+rbp]
+
+ xor ecx,r13d
+ ror r13d,9
+ or r14d,r9d
+
+ xor ecx,r13d
+ and r15d,r9d
+ add r10d,r12d
+
+ and r14d,r8d
+ add ecx,r12d
+
+ or r14d,r15d
+ lea rdi,QWORD PTR[1+rdi]
+
+ add ecx,r14d
+ mov r13d,DWORD PTR[60+rsp]
+ mov r12d,DWORD PTR[48+rsp]
+
+ mov r15d,r13d
+
+ shr r13d,3
+ ror r15d,7
+
+ xor r13d,r15d
+ ror r15d,11
+
+ xor r13d,r15d
+ mov r14d,r12d
+
+ shr r12d,10
+ ror r14d,17
+
+ xor r12d,r14d
+ ror r14d,2
+
+ xor r12d,r14d
+
+ add r12d,r13d
+
+ add r12d,DWORD PTR[28+rsp]
+
+ add r12d,DWORD PTR[56+rsp]
+ mov r13d,r10d
+ mov r14d,r10d
+ mov r15d,r11d
+
+ ror r13d,6
+ ror r14d,11
+ xor r15d,eax
+
+ xor r13d,r14d
+ ror r14d,14
+ and r15d,r10d
+ mov DWORD PTR[56+rsp],r12d
+
+ xor r13d,r14d
+ xor r15d,eax
+ add r12d,ebx
+
+ mov ebx,ecx
+ add r12d,r13d
+
+ add r12d,r15d
+ mov r13d,ecx
+ mov r14d,ecx
+
+ ror ebx,2
+ ror r13d,13
+ mov r15d,ecx
+ add r12d,DWORD PTR[rdi*4+rbp]
+
+ xor ebx,r13d
+ ror r13d,9
+ or r14d,r8d
+
+ xor ebx,r13d
+ and r15d,r8d
+ add r9d,r12d
+
+ and r14d,edx
+ add ebx,r12d
+
+ or r14d,r15d
+ lea rdi,QWORD PTR[1+rdi]
+
+ add ebx,r14d
+ mov r13d,DWORD PTR[rsp]
+ mov r12d,DWORD PTR[52+rsp]
+
+ mov r15d,r13d
+
+ shr r13d,3
+ ror r15d,7
+
+ xor r13d,r15d
+ ror r15d,11
+
+ xor r13d,r15d
+ mov r14d,r12d
+
+ shr r12d,10
+ ror r14d,17
+
+ xor r12d,r14d
+ ror r14d,2
+
+ xor r12d,r14d
+
+ add r12d,r13d
+
+ add r12d,DWORD PTR[32+rsp]
+
+ add r12d,DWORD PTR[60+rsp]
+ mov r13d,r9d
+ mov r14d,r9d
+ mov r15d,r10d
+
+ ror r13d,6
+ ror r14d,11
+ xor r15d,r11d
+
+ xor r13d,r14d
+ ror r14d,14
+ and r15d,r9d
+ mov DWORD PTR[60+rsp],r12d
+
+ xor r13d,r14d
+ xor r15d,r11d
+ add r12d,eax
+
+ mov eax,ebx
+ add r12d,r13d
+
+ add r12d,r15d
+ mov r13d,ebx
+ mov r14d,ebx
+
+ ror eax,2
+ ror r13d,13
+ mov r15d,ebx
+ add r12d,DWORD PTR[rdi*4+rbp]
+
+ xor eax,r13d
+ ror r13d,9
+ or r14d,edx
+
+ xor eax,r13d
+ and r15d,edx
+ add r8d,r12d
+
+ and r14d,ecx
+ add eax,r12d
+
+ or r14d,r15d
+ lea rdi,QWORD PTR[1+rdi]
+
+ add eax,r14d
+ cmp rdi,64
+ jb $L$rounds_16_xx
+
+ mov rdi,QWORD PTR[((16*4+0*8))+rsp]
+ lea rsi,QWORD PTR[((16*4))+rsi]
+
+ add eax,DWORD PTR[((4*0))+rdi]
+ add ebx,DWORD PTR[((4*1))+rdi]
+ add ecx,DWORD PTR[((4*2))+rdi]
+ add edx,DWORD PTR[((4*3))+rdi]
+ add r8d,DWORD PTR[((4*4))+rdi]
+ add r9d,DWORD PTR[((4*5))+rdi]
+ add r10d,DWORD PTR[((4*6))+rdi]
+ add r11d,DWORD PTR[((4*7))+rdi]
+
+ cmp rsi,QWORD PTR[((16*4+2*8))+rsp]
+
+ mov DWORD PTR[((4*0))+rdi],eax
+ mov DWORD PTR[((4*1))+rdi],ebx
+ mov DWORD PTR[((4*2))+rdi],ecx
+ mov DWORD PTR[((4*3))+rdi],edx
+ mov DWORD PTR[((4*4))+rdi],r8d
+ mov DWORD PTR[((4*5))+rdi],r9d
+ mov DWORD PTR[((4*6))+rdi],r10d
+ mov DWORD PTR[((4*7))+rdi],r11d
+ jb $L$loop
+
+ mov rsi,QWORD PTR[((16*4+3*8))+rsp]
+ mov r15,QWORD PTR[rsi]
+ mov r14,QWORD PTR[8+rsi]
+ mov r13,QWORD PTR[16+rsi]
+ mov r12,QWORD PTR[24+rsi]
+ mov rbp,QWORD PTR[32+rsi]
+ mov rbx,QWORD PTR[40+rsi]
+ lea rsp,QWORD PTR[48+rsi]
+$L$epilogue::
+ mov rdi,QWORD PTR[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD PTR[16+rsp]
+ DB 0F3h,0C3h ;repret
+$L$SEH_end_sha256_block_data_order::
+sha256_block_data_order ENDP
+ALIGN 64
+
+K256::
+ DD 0428a2f98h,071374491h,0b5c0fbcfh,0e9b5dba5h
+ DD 03956c25bh,059f111f1h,0923f82a4h,0ab1c5ed5h
+ DD 0d807aa98h,012835b01h,0243185beh,0550c7dc3h
+ DD 072be5d74h,080deb1feh,09bdc06a7h,0c19bf174h
+ DD 0e49b69c1h,0efbe4786h,00fc19dc6h,0240ca1cch
+ DD 02de92c6fh,04a7484aah,05cb0a9dch,076f988dah
+ DD 0983e5152h,0a831c66dh,0b00327c8h,0bf597fc7h
+ DD 0c6e00bf3h,0d5a79147h,006ca6351h,014292967h
+ DD 027b70a85h,02e1b2138h,04d2c6dfch,053380d13h
+ DD 0650a7354h,0766a0abbh,081c2c92eh,092722c85h
+ DD 0a2bfe8a1h,0a81a664bh,0c24b8b70h,0c76c51a3h
+ DD 0d192e819h,0d6990624h,0f40e3585h,0106aa070h
+ DD 019a4c116h,01e376c08h,02748774ch,034b0bcb5h
+ DD 0391c0cb3h,04ed8aa4ah,05b9cca4fh,0682e6ff3h
+ DD 0748f82eeh,078a5636fh,084c87814h,08cc70208h
+ DD 090befffah,0a4506cebh,0bef9a3f7h,0c67178f2h
+EXTERN __imp_RtlVirtualUnwind:NEAR
+
+ALIGN 16
+se_handler PROC PRIVATE
+ push rsi
+ push rdi
+ push rbx
+ push rbp
+ push r12
+ push r13
+ push r14
+ push r15
+ pushfq
+ sub rsp,64
+
+ mov rax,QWORD PTR[120+r8]
+ mov rbx,QWORD PTR[248+r8]
+
+ lea r10,QWORD PTR[$L$prologue]
+ cmp rbx,r10
+ jb $L$in_prologue
+
+ mov rax,QWORD PTR[152+r8]
+
+ lea r10,QWORD PTR[$L$epilogue]
+ cmp rbx,r10
+ jae $L$in_prologue
+
+ mov rax,QWORD PTR[((16*4+3*8))+rax]
+ lea rax,QWORD PTR[48+rax]
+
+ mov rbx,QWORD PTR[((-8))+rax]
+ mov rbp,QWORD PTR[((-16))+rax]
+ mov r12,QWORD PTR[((-24))+rax]
+ mov r13,QWORD PTR[((-32))+rax]
+ mov r14,QWORD PTR[((-40))+rax]
+ mov r15,QWORD PTR[((-48))+rax]
+ mov QWORD PTR[144+r8],rbx
+ mov QWORD PTR[160+r8],rbp
+ mov QWORD PTR[216+r8],r12
+ mov QWORD PTR[224+r8],r13
+ mov QWORD PTR[232+r8],r14
+ mov QWORD PTR[240+r8],r15
+
+$L$in_prologue::
+ mov rdi,QWORD PTR[8+rax]
+ mov rsi,QWORD PTR[16+rax]
+ mov QWORD PTR[152+r8],rax
+ mov QWORD PTR[168+r8],rsi
+ mov QWORD PTR[176+r8],rdi
+
+ mov rdi,QWORD PTR[40+r9]
+ mov rsi,r8
+ mov ecx,154
+ DD 0a548f3fch
+
+
+ mov rsi,r9
+ xor rcx,rcx
+ mov rdx,QWORD PTR[8+rsi]
+ mov r8,QWORD PTR[rsi]
+ mov r9,QWORD PTR[16+rsi]
+ mov r10,QWORD PTR[40+rsi]
+ lea r11,QWORD PTR[56+rsi]
+ lea r12,QWORD PTR[24+rsi]
+ mov QWORD PTR[32+rsp],r10
+ mov QWORD PTR[40+rsp],r11
+ mov QWORD PTR[48+rsp],r12
+ mov QWORD PTR[56+rsp],rcx
+ call QWORD PTR[__imp_RtlVirtualUnwind]
+
+ mov eax,1
+ add rsp,64
+ popfq
+ pop r15
+ pop r14
+ pop r13
+ pop r12
+ pop rbp
+ pop rbx
+ pop rdi
+ pop rsi
+ DB 0F3h,0C3h ;repret
+se_handler ENDP
+
+.text$ ENDS
+.pdata SEGMENT READONLY ALIGN(4)
+ALIGN 4
+ DD imagerel $L$SEH_begin_sha256_block_data_order
+ DD imagerel $L$SEH_end_sha256_block_data_order
+ DD imagerel $L$SEH_info_sha256_block_data_order
+
+.pdata ENDS
+.xdata SEGMENT READONLY ALIGN(8)
+ALIGN 8
+$L$SEH_info_sha256_block_data_order::
+DB 9,0,0,0
+ DD imagerel se_handler
+
+.xdata ENDS
+END
diff --git a/deps/openssl/asm/x64-win32-masm/whrlpool/wp-x86_64.asm b/deps/openssl/asm/x64-win32-masm/whrlpool/wp-x86_64.asm
new file mode 100644
index 0000000000..25337b2440
--- /dev/null
+++ b/deps/openssl/asm/x64-win32-masm/whrlpool/wp-x86_64.asm
@@ -0,0 +1,972 @@
+OPTION DOTNAME
+.text$ SEGMENT ALIGN(64) 'CODE'
+
+PUBLIC whirlpool_block
+
+ALIGN 16
+whirlpool_block PROC PUBLIC
+ mov QWORD PTR[8+rsp],rdi ;WIN64 prologue
+ mov QWORD PTR[16+rsp],rsi
+ mov rax,rsp
+$L$SEH_begin_whirlpool_block::
+ mov rdi,rcx
+ mov rsi,rdx
+ mov rdx,r8
+
+
+ push rbx
+ push rbp
+ push r12
+ push r13
+ push r14
+ push r15
+
+ mov r11,rsp
+ sub rsp,128+40
+ and rsp,-64
+
+ lea r10,QWORD PTR[128+rsp]
+ mov QWORD PTR[r10],rdi
+ mov QWORD PTR[8+r10],rsi
+ mov QWORD PTR[16+r10],rdx
+ mov QWORD PTR[32+r10],r11
+$L$prologue::
+
+ mov rbx,r10
+ lea rbp,QWORD PTR[$L$table]
+
+ xor rcx,rcx
+ xor rdx,rdx
+ mov r8,QWORD PTR[((0*8))+rdi]
+ mov r9,QWORD PTR[((1*8))+rdi]
+ mov r10,QWORD PTR[((2*8))+rdi]
+ mov r11,QWORD PTR[((3*8))+rdi]
+ mov r12,QWORD PTR[((4*8))+rdi]
+ mov r13,QWORD PTR[((5*8))+rdi]
+ mov r14,QWORD PTR[((6*8))+rdi]
+ mov r15,QWORD PTR[((7*8))+rdi]
+$L$outerloop::
+ mov QWORD PTR[((0*8))+rsp],r8
+ mov QWORD PTR[((1*8))+rsp],r9
+ mov QWORD PTR[((2*8))+rsp],r10
+ mov QWORD PTR[((3*8))+rsp],r11
+ mov QWORD PTR[((4*8))+rsp],r12
+ mov QWORD PTR[((5*8))+rsp],r13
+ mov QWORD PTR[((6*8))+rsp],r14
+ mov QWORD PTR[((7*8))+rsp],r15
+ xor r8,QWORD PTR[((0*8))+rsi]
+ xor r9,QWORD PTR[((1*8))+rsi]
+ xor r10,QWORD PTR[((2*8))+rsi]
+ xor r11,QWORD PTR[((3*8))+rsi]
+ xor r12,QWORD PTR[((4*8))+rsi]
+ xor r13,QWORD PTR[((5*8))+rsi]
+ xor r14,QWORD PTR[((6*8))+rsi]
+ xor r15,QWORD PTR[((7*8))+rsi]
+ mov QWORD PTR[((64+0*8))+rsp],r8
+ mov QWORD PTR[((64+1*8))+rsp],r9
+ mov QWORD PTR[((64+2*8))+rsp],r10
+ mov QWORD PTR[((64+3*8))+rsp],r11
+ mov QWORD PTR[((64+4*8))+rsp],r12
+ mov QWORD PTR[((64+5*8))+rsp],r13
+ mov QWORD PTR[((64+6*8))+rsp],r14
+ mov QWORD PTR[((64+7*8))+rsp],r15
+ xor rsi,rsi
+ mov QWORD PTR[24+rbx],rsi
+ALIGN 16
+$L$round::
+ mov r8,QWORD PTR[4096+rsi*8+rbp]
+ mov eax,DWORD PTR[rsp]
+ mov ebx,DWORD PTR[4+rsp]
+ mov cl,al
+ mov dl,ah
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ shr eax,16
+ xor r8,QWORD PTR[rsi*8+rbp]
+ mov r9,QWORD PTR[7+rdi*8+rbp]
+ mov cl,al
+ mov dl,ah
+ mov eax,DWORD PTR[((0*8+8))+rsp]
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ mov r10,QWORD PTR[6+rsi*8+rbp]
+ mov r11,QWORD PTR[5+rdi*8+rbp]
+ mov cl,bl
+ mov dl,bh
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ shr ebx,16
+ mov r12,QWORD PTR[4+rsi*8+rbp]
+ mov r13,QWORD PTR[3+rdi*8+rbp]
+ mov cl,bl
+ mov dl,bh
+ mov ebx,DWORD PTR[((0*8+8+4))+rsp]
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ mov r14,QWORD PTR[2+rsi*8+rbp]
+ mov r15,QWORD PTR[1+rdi*8+rbp]
+ mov cl,al
+ mov dl,ah
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ shr eax,16
+ xor r9,QWORD PTR[rsi*8+rbp]
+ xor r10,QWORD PTR[7+rdi*8+rbp]
+ mov cl,al
+ mov dl,ah
+ mov eax,DWORD PTR[((1*8+8))+rsp]
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ xor r11,QWORD PTR[6+rsi*8+rbp]
+ xor r12,QWORD PTR[5+rdi*8+rbp]
+ mov cl,bl
+ mov dl,bh
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ shr ebx,16
+ xor r13,QWORD PTR[4+rsi*8+rbp]
+ xor r14,QWORD PTR[3+rdi*8+rbp]
+ mov cl,bl
+ mov dl,bh
+ mov ebx,DWORD PTR[((1*8+8+4))+rsp]
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ xor r15,QWORD PTR[2+rsi*8+rbp]
+ xor r8,QWORD PTR[1+rdi*8+rbp]
+ mov cl,al
+ mov dl,ah
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ shr eax,16
+ xor r10,QWORD PTR[rsi*8+rbp]
+ xor r11,QWORD PTR[7+rdi*8+rbp]
+ mov cl,al
+ mov dl,ah
+ mov eax,DWORD PTR[((2*8+8))+rsp]
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ xor r12,QWORD PTR[6+rsi*8+rbp]
+ xor r13,QWORD PTR[5+rdi*8+rbp]
+ mov cl,bl
+ mov dl,bh
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ shr ebx,16
+ xor r14,QWORD PTR[4+rsi*8+rbp]
+ xor r15,QWORD PTR[3+rdi*8+rbp]
+ mov cl,bl
+ mov dl,bh
+ mov ebx,DWORD PTR[((2*8+8+4))+rsp]
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ xor r8,QWORD PTR[2+rsi*8+rbp]
+ xor r9,QWORD PTR[1+rdi*8+rbp]
+ mov cl,al
+ mov dl,ah
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ shr eax,16
+ xor r11,QWORD PTR[rsi*8+rbp]
+ xor r12,QWORD PTR[7+rdi*8+rbp]
+ mov cl,al
+ mov dl,ah
+ mov eax,DWORD PTR[((3*8+8))+rsp]
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ xor r13,QWORD PTR[6+rsi*8+rbp]
+ xor r14,QWORD PTR[5+rdi*8+rbp]
+ mov cl,bl
+ mov dl,bh
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ shr ebx,16
+ xor r15,QWORD PTR[4+rsi*8+rbp]
+ xor r8,QWORD PTR[3+rdi*8+rbp]
+ mov cl,bl
+ mov dl,bh
+ mov ebx,DWORD PTR[((3*8+8+4))+rsp]
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ xor r9,QWORD PTR[2+rsi*8+rbp]
+ xor r10,QWORD PTR[1+rdi*8+rbp]
+ mov cl,al
+ mov dl,ah
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ shr eax,16
+ xor r12,QWORD PTR[rsi*8+rbp]
+ xor r13,QWORD PTR[7+rdi*8+rbp]
+ mov cl,al
+ mov dl,ah
+ mov eax,DWORD PTR[((4*8+8))+rsp]
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ xor r14,QWORD PTR[6+rsi*8+rbp]
+ xor r15,QWORD PTR[5+rdi*8+rbp]
+ mov cl,bl
+ mov dl,bh
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ shr ebx,16
+ xor r8,QWORD PTR[4+rsi*8+rbp]
+ xor r9,QWORD PTR[3+rdi*8+rbp]
+ mov cl,bl
+ mov dl,bh
+ mov ebx,DWORD PTR[((4*8+8+4))+rsp]
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ xor r10,QWORD PTR[2+rsi*8+rbp]
+ xor r11,QWORD PTR[1+rdi*8+rbp]
+ mov cl,al
+ mov dl,ah
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ shr eax,16
+ xor r13,QWORD PTR[rsi*8+rbp]
+ xor r14,QWORD PTR[7+rdi*8+rbp]
+ mov cl,al
+ mov dl,ah
+ mov eax,DWORD PTR[((5*8+8))+rsp]
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ xor r15,QWORD PTR[6+rsi*8+rbp]
+ xor r8,QWORD PTR[5+rdi*8+rbp]
+ mov cl,bl
+ mov dl,bh
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ shr ebx,16
+ xor r9,QWORD PTR[4+rsi*8+rbp]
+ xor r10,QWORD PTR[3+rdi*8+rbp]
+ mov cl,bl
+ mov dl,bh
+ mov ebx,DWORD PTR[((5*8+8+4))+rsp]
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ xor r11,QWORD PTR[2+rsi*8+rbp]
+ xor r12,QWORD PTR[1+rdi*8+rbp]
+ mov cl,al
+ mov dl,ah
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ shr eax,16
+ xor r14,QWORD PTR[rsi*8+rbp]
+ xor r15,QWORD PTR[7+rdi*8+rbp]
+ mov cl,al
+ mov dl,ah
+ mov eax,DWORD PTR[((6*8+8))+rsp]
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ xor r8,QWORD PTR[6+rsi*8+rbp]
+ xor r9,QWORD PTR[5+rdi*8+rbp]
+ mov cl,bl
+ mov dl,bh
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ shr ebx,16
+ xor r10,QWORD PTR[4+rsi*8+rbp]
+ xor r11,QWORD PTR[3+rdi*8+rbp]
+ mov cl,bl
+ mov dl,bh
+ mov ebx,DWORD PTR[((6*8+8+4))+rsp]
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ xor r12,QWORD PTR[2+rsi*8+rbp]
+ xor r13,QWORD PTR[1+rdi*8+rbp]
+ mov cl,al
+ mov dl,ah
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ shr eax,16
+ xor r15,QWORD PTR[rsi*8+rbp]
+ xor r8,QWORD PTR[7+rdi*8+rbp]
+ mov cl,al
+ mov dl,ah
+ mov eax,DWORD PTR[((7*8+8))+rsp]
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ xor r9,QWORD PTR[6+rsi*8+rbp]
+ xor r10,QWORD PTR[5+rdi*8+rbp]
+ mov cl,bl
+ mov dl,bh
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ shr ebx,16
+ xor r11,QWORD PTR[4+rsi*8+rbp]
+ xor r12,QWORD PTR[3+rdi*8+rbp]
+ mov cl,bl
+ mov dl,bh
+ mov ebx,DWORD PTR[((7*8+8+4))+rsp]
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ xor r13,QWORD PTR[2+rsi*8+rbp]
+ xor r14,QWORD PTR[1+rdi*8+rbp]
+ mov QWORD PTR[((0*8))+rsp],r8
+ mov QWORD PTR[((1*8))+rsp],r9
+ mov QWORD PTR[((2*8))+rsp],r10
+ mov QWORD PTR[((3*8))+rsp],r11
+ mov QWORD PTR[((4*8))+rsp],r12
+ mov QWORD PTR[((5*8))+rsp],r13
+ mov QWORD PTR[((6*8))+rsp],r14
+ mov QWORD PTR[((7*8))+rsp],r15
+ mov cl,al
+ mov dl,ah
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ shr eax,16
+ xor r8,QWORD PTR[rsi*8+rbp]
+ xor r9,QWORD PTR[7+rdi*8+rbp]
+ mov cl,al
+ mov dl,ah
+ mov eax,DWORD PTR[((64+0*8+8))+rsp]
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ xor r10,QWORD PTR[6+rsi*8+rbp]
+ xor r11,QWORD PTR[5+rdi*8+rbp]
+ mov cl,bl
+ mov dl,bh
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ shr ebx,16
+ xor r12,QWORD PTR[4+rsi*8+rbp]
+ xor r13,QWORD PTR[3+rdi*8+rbp]
+ mov cl,bl
+ mov dl,bh
+ mov ebx,DWORD PTR[((64+0*8+8+4))+rsp]
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ xor r14,QWORD PTR[2+rsi*8+rbp]
+ xor r15,QWORD PTR[1+rdi*8+rbp]
+ mov cl,al
+ mov dl,ah
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ shr eax,16
+ xor r9,QWORD PTR[rsi*8+rbp]
+ xor r10,QWORD PTR[7+rdi*8+rbp]
+ mov cl,al
+ mov dl,ah
+ mov eax,DWORD PTR[((64+1*8+8))+rsp]
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ xor r11,QWORD PTR[6+rsi*8+rbp]
+ xor r12,QWORD PTR[5+rdi*8+rbp]
+ mov cl,bl
+ mov dl,bh
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ shr ebx,16
+ xor r13,QWORD PTR[4+rsi*8+rbp]
+ xor r14,QWORD PTR[3+rdi*8+rbp]
+ mov cl,bl
+ mov dl,bh
+ mov ebx,DWORD PTR[((64+1*8+8+4))+rsp]
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ xor r15,QWORD PTR[2+rsi*8+rbp]
+ xor r8,QWORD PTR[1+rdi*8+rbp]
+ mov cl,al
+ mov dl,ah
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ shr eax,16
+ xor r10,QWORD PTR[rsi*8+rbp]
+ xor r11,QWORD PTR[7+rdi*8+rbp]
+ mov cl,al
+ mov dl,ah
+ mov eax,DWORD PTR[((64+2*8+8))+rsp]
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ xor r12,QWORD PTR[6+rsi*8+rbp]
+ xor r13,QWORD PTR[5+rdi*8+rbp]
+ mov cl,bl
+ mov dl,bh
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ shr ebx,16
+ xor r14,QWORD PTR[4+rsi*8+rbp]
+ xor r15,QWORD PTR[3+rdi*8+rbp]
+ mov cl,bl
+ mov dl,bh
+ mov ebx,DWORD PTR[((64+2*8+8+4))+rsp]
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ xor r8,QWORD PTR[2+rsi*8+rbp]
+ xor r9,QWORD PTR[1+rdi*8+rbp]
+ mov cl,al
+ mov dl,ah
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ shr eax,16
+ xor r11,QWORD PTR[rsi*8+rbp]
+ xor r12,QWORD PTR[7+rdi*8+rbp]
+ mov cl,al
+ mov dl,ah
+ mov eax,DWORD PTR[((64+3*8+8))+rsp]
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ xor r13,QWORD PTR[6+rsi*8+rbp]
+ xor r14,QWORD PTR[5+rdi*8+rbp]
+ mov cl,bl
+ mov dl,bh
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ shr ebx,16
+ xor r15,QWORD PTR[4+rsi*8+rbp]
+ xor r8,QWORD PTR[3+rdi*8+rbp]
+ mov cl,bl
+ mov dl,bh
+ mov ebx,DWORD PTR[((64+3*8+8+4))+rsp]
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ xor r9,QWORD PTR[2+rsi*8+rbp]
+ xor r10,QWORD PTR[1+rdi*8+rbp]
+ mov cl,al
+ mov dl,ah
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ shr eax,16
+ xor r12,QWORD PTR[rsi*8+rbp]
+ xor r13,QWORD PTR[7+rdi*8+rbp]
+ mov cl,al
+ mov dl,ah
+ mov eax,DWORD PTR[((64+4*8+8))+rsp]
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ xor r14,QWORD PTR[6+rsi*8+rbp]
+ xor r15,QWORD PTR[5+rdi*8+rbp]
+ mov cl,bl
+ mov dl,bh
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ shr ebx,16
+ xor r8,QWORD PTR[4+rsi*8+rbp]
+ xor r9,QWORD PTR[3+rdi*8+rbp]
+ mov cl,bl
+ mov dl,bh
+ mov ebx,DWORD PTR[((64+4*8+8+4))+rsp]
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ xor r10,QWORD PTR[2+rsi*8+rbp]
+ xor r11,QWORD PTR[1+rdi*8+rbp]
+ mov cl,al
+ mov dl,ah
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ shr eax,16
+ xor r13,QWORD PTR[rsi*8+rbp]
+ xor r14,QWORD PTR[7+rdi*8+rbp]
+ mov cl,al
+ mov dl,ah
+ mov eax,DWORD PTR[((64+5*8+8))+rsp]
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ xor r15,QWORD PTR[6+rsi*8+rbp]
+ xor r8,QWORD PTR[5+rdi*8+rbp]
+ mov cl,bl
+ mov dl,bh
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ shr ebx,16
+ xor r9,QWORD PTR[4+rsi*8+rbp]
+ xor r10,QWORD PTR[3+rdi*8+rbp]
+ mov cl,bl
+ mov dl,bh
+ mov ebx,DWORD PTR[((64+5*8+8+4))+rsp]
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ xor r11,QWORD PTR[2+rsi*8+rbp]
+ xor r12,QWORD PTR[1+rdi*8+rbp]
+ mov cl,al
+ mov dl,ah
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ shr eax,16
+ xor r14,QWORD PTR[rsi*8+rbp]
+ xor r15,QWORD PTR[7+rdi*8+rbp]
+ mov cl,al
+ mov dl,ah
+ mov eax,DWORD PTR[((64+6*8+8))+rsp]
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ xor r8,QWORD PTR[6+rsi*8+rbp]
+ xor r9,QWORD PTR[5+rdi*8+rbp]
+ mov cl,bl
+ mov dl,bh
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ shr ebx,16
+ xor r10,QWORD PTR[4+rsi*8+rbp]
+ xor r11,QWORD PTR[3+rdi*8+rbp]
+ mov cl,bl
+ mov dl,bh
+ mov ebx,DWORD PTR[((64+6*8+8+4))+rsp]
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ xor r12,QWORD PTR[2+rsi*8+rbp]
+ xor r13,QWORD PTR[1+rdi*8+rbp]
+ mov cl,al
+ mov dl,ah
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ shr eax,16
+ xor r15,QWORD PTR[rsi*8+rbp]
+ xor r8,QWORD PTR[7+rdi*8+rbp]
+ mov cl,al
+ mov dl,ah
+
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ xor r9,QWORD PTR[6+rsi*8+rbp]
+ xor r10,QWORD PTR[5+rdi*8+rbp]
+ mov cl,bl
+ mov dl,bh
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ shr ebx,16
+ xor r11,QWORD PTR[4+rsi*8+rbp]
+ xor r12,QWORD PTR[3+rdi*8+rbp]
+ mov cl,bl
+ mov dl,bh
+
+ lea rsi,QWORD PTR[rcx*1+rcx]
+ lea rdi,QWORD PTR[rdx*1+rdx]
+ xor r13,QWORD PTR[2+rsi*8+rbp]
+ xor r14,QWORD PTR[1+rdi*8+rbp]
+ lea rbx,QWORD PTR[128+rsp]
+ mov rsi,QWORD PTR[24+rbx]
+ add rsi,1
+ cmp rsi,10
+ je $L$roundsdone
+
+ mov QWORD PTR[24+rbx],rsi
+ mov QWORD PTR[((64+0*8))+rsp],r8
+ mov QWORD PTR[((64+1*8))+rsp],r9
+ mov QWORD PTR[((64+2*8))+rsp],r10
+ mov QWORD PTR[((64+3*8))+rsp],r11
+ mov QWORD PTR[((64+4*8))+rsp],r12
+ mov QWORD PTR[((64+5*8))+rsp],r13
+ mov QWORD PTR[((64+6*8))+rsp],r14
+ mov QWORD PTR[((64+7*8))+rsp],r15
+ jmp $L$round
+ALIGN 16
+$L$roundsdone::
+ mov rdi,QWORD PTR[rbx]
+ mov rsi,QWORD PTR[8+rbx]
+ mov rax,QWORD PTR[16+rbx]
+ xor r8,QWORD PTR[((0*8))+rsi]
+ xor r9,QWORD PTR[((1*8))+rsi]
+ xor r10,QWORD PTR[((2*8))+rsi]
+ xor r11,QWORD PTR[((3*8))+rsi]
+ xor r12,QWORD PTR[((4*8))+rsi]
+ xor r13,QWORD PTR[((5*8))+rsi]
+ xor r14,QWORD PTR[((6*8))+rsi]
+ xor r15,QWORD PTR[((7*8))+rsi]
+ xor r8,QWORD PTR[((0*8))+rdi]
+ xor r9,QWORD PTR[((1*8))+rdi]
+ xor r10,QWORD PTR[((2*8))+rdi]
+ xor r11,QWORD PTR[((3*8))+rdi]
+ xor r12,QWORD PTR[((4*8))+rdi]
+ xor r13,QWORD PTR[((5*8))+rdi]
+ xor r14,QWORD PTR[((6*8))+rdi]
+ xor r15,QWORD PTR[((7*8))+rdi]
+ mov QWORD PTR[((0*8))+rdi],r8
+ mov QWORD PTR[((1*8))+rdi],r9
+ mov QWORD PTR[((2*8))+rdi],r10
+ mov QWORD PTR[((3*8))+rdi],r11
+ mov QWORD PTR[((4*8))+rdi],r12
+ mov QWORD PTR[((5*8))+rdi],r13
+ mov QWORD PTR[((6*8))+rdi],r14
+ mov QWORD PTR[((7*8))+rdi],r15
+ lea rsi,QWORD PTR[64+rsi]
+ sub rax,1
+ jz $L$alldone
+ mov QWORD PTR[8+rbx],rsi
+ mov QWORD PTR[16+rbx],rax
+ jmp $L$outerloop
+$L$alldone::
+ mov rsi,QWORD PTR[32+rbx]
+ mov r15,QWORD PTR[rsi]
+ mov r14,QWORD PTR[8+rsi]
+ mov r13,QWORD PTR[16+rsi]
+ mov r12,QWORD PTR[24+rsi]
+ mov rbp,QWORD PTR[32+rsi]
+ mov rbx,QWORD PTR[40+rsi]
+ lea rsp,QWORD PTR[48+rsi]
+$L$epilogue::
+ mov rdi,QWORD PTR[8+rsp] ;WIN64 epilogue
+ mov rsi,QWORD PTR[16+rsp]
+ DB 0F3h,0C3h ;repret
+$L$SEH_end_whirlpool_block::
+whirlpool_block ENDP
+
+ALIGN 64
+
+$L$table::
+DB 24,24,96,24,192,120,48,216,24,24,96,24,192,120,48,216
+DB 35,35,140,35,5,175,70,38,35,35,140,35,5,175,70,38
+DB 198,198,63,198,126,249,145,184,198,198,63,198,126,249,145,184
+DB 232,232,135,232,19,111,205,251,232,232,135,232,19,111,205,251
+DB 135,135,38,135,76,161,19,203,135,135,38,135,76,161,19,203
+DB 184,184,218,184,169,98,109,17,184,184,218,184,169,98,109,17
+DB 1,1,4,1,8,5,2,9,1,1,4,1,8,5,2,9
+DB 79,79,33,79,66,110,158,13,79,79,33,79,66,110,158,13
+DB 54,54,216,54,173,238,108,155,54,54,216,54,173,238,108,155
+DB 166,166,162,166,89,4,81,255,166,166,162,166,89,4,81,255
+DB 210,210,111,210,222,189,185,12,210,210,111,210,222,189,185,12
+DB 245,245,243,245,251,6,247,14,245,245,243,245,251,6,247,14
+DB 121,121,249,121,239,128,242,150,121,121,249,121,239,128,242,150
+DB 111,111,161,111,95,206,222,48,111,111,161,111,95,206,222,48
+DB 145,145,126,145,252,239,63,109,145,145,126,145,252,239,63,109
+DB 82,82,85,82,170,7,164,248,82,82,85,82,170,7,164,248
+DB 96,96,157,96,39,253,192,71,96,96,157,96,39,253,192,71
+DB 188,188,202,188,137,118,101,53,188,188,202,188,137,118,101,53
+DB 155,155,86,155,172,205,43,55,155,155,86,155,172,205,43,55
+DB 142,142,2,142,4,140,1,138,142,142,2,142,4,140,1,138
+DB 163,163,182,163,113,21,91,210,163,163,182,163,113,21,91,210
+DB 12,12,48,12,96,60,24,108,12,12,48,12,96,60,24,108
+DB 123,123,241,123,255,138,246,132,123,123,241,123,255,138,246,132
+DB 53,53,212,53,181,225,106,128,53,53,212,53,181,225,106,128
+DB 29,29,116,29,232,105,58,245,29,29,116,29,232,105,58,245
+DB 224,224,167,224,83,71,221,179,224,224,167,224,83,71,221,179
+DB 215,215,123,215,246,172,179,33,215,215,123,215,246,172,179,33
+DB 194,194,47,194,94,237,153,156,194,194,47,194,94,237,153,156
+DB 46,46,184,46,109,150,92,67,46,46,184,46,109,150,92,67
+DB 75,75,49,75,98,122,150,41,75,75,49,75,98,122,150,41
+DB 254,254,223,254,163,33,225,93,254,254,223,254,163,33,225,93
+DB 87,87,65,87,130,22,174,213,87,87,65,87,130,22,174,213
+DB 21,21,84,21,168,65,42,189,21,21,84,21,168,65,42,189
+DB 119,119,193,119,159,182,238,232,119,119,193,119,159,182,238,232
+DB 55,55,220,55,165,235,110,146,55,55,220,55,165,235,110,146
+DB 229,229,179,229,123,86,215,158,229,229,179,229,123,86,215,158
+DB 159,159,70,159,140,217,35,19,159,159,70,159,140,217,35,19
+DB 240,240,231,240,211,23,253,35,240,240,231,240,211,23,253,35
+DB 74,74,53,74,106,127,148,32,74,74,53,74,106,127,148,32
+DB 218,218,79,218,158,149,169,68,218,218,79,218,158,149,169,68
+DB 88,88,125,88,250,37,176,162,88,88,125,88,250,37,176,162
+DB 201,201,3,201,6,202,143,207,201,201,3,201,6,202,143,207
+DB 41,41,164,41,85,141,82,124,41,41,164,41,85,141,82,124
+DB 10,10,40,10,80,34,20,90,10,10,40,10,80,34,20,90
+DB 177,177,254,177,225,79,127,80,177,177,254,177,225,79,127,80
+DB 160,160,186,160,105,26,93,201,160,160,186,160,105,26,93,201
+DB 107,107,177,107,127,218,214,20,107,107,177,107,127,218,214,20
+DB 133,133,46,133,92,171,23,217,133,133,46,133,92,171,23,217
+DB 189,189,206,189,129,115,103,60,189,189,206,189,129,115,103,60
+DB 93,93,105,93,210,52,186,143,93,93,105,93,210,52,186,143
+DB 16,16,64,16,128,80,32,144,16,16,64,16,128,80,32,144
+DB 244,244,247,244,243,3,245,7,244,244,247,244,243,3,245,7
+DB 203,203,11,203,22,192,139,221,203,203,11,203,22,192,139,221
+DB 62,62,248,62,237,198,124,211,62,62,248,62,237,198,124,211
+DB 5,5,20,5,40,17,10,45,5,5,20,5,40,17,10,45
+DB 103,103,129,103,31,230,206,120,103,103,129,103,31,230,206,120
+DB 228,228,183,228,115,83,213,151,228,228,183,228,115,83,213,151
+DB 39,39,156,39,37,187,78,2,39,39,156,39,37,187,78,2
+DB 65,65,25,65,50,88,130,115,65,65,25,65,50,88,130,115
+DB 139,139,22,139,44,157,11,167,139,139,22,139,44,157,11,167
+DB 167,167,166,167,81,1,83,246,167,167,166,167,81,1,83,246
+DB 125,125,233,125,207,148,250,178,125,125,233,125,207,148,250,178
+DB 149,149,110,149,220,251,55,73,149,149,110,149,220,251,55,73
+DB 216,216,71,216,142,159,173,86,216,216,71,216,142,159,173,86
+DB 251,251,203,251,139,48,235,112,251,251,203,251,139,48,235,112
+DB 238,238,159,238,35,113,193,205,238,238,159,238,35,113,193,205
+DB 124,124,237,124,199,145,248,187,124,124,237,124,199,145,248,187
+DB 102,102,133,102,23,227,204,113,102,102,133,102,23,227,204,113
+DB 221,221,83,221,166,142,167,123,221,221,83,221,166,142,167,123
+DB 23,23,92,23,184,75,46,175,23,23,92,23,184,75,46,175
+DB 71,71,1,71,2,70,142,69,71,71,1,71,2,70,142,69
+DB 158,158,66,158,132,220,33,26,158,158,66,158,132,220,33,26
+DB 202,202,15,202,30,197,137,212,202,202,15,202,30,197,137,212
+DB 45,45,180,45,117,153,90,88,45,45,180,45,117,153,90,88
+DB 191,191,198,191,145,121,99,46,191,191,198,191,145,121,99,46
+DB 7,7,28,7,56,27,14,63,7,7,28,7,56,27,14,63
+DB 173,173,142,173,1,35,71,172,173,173,142,173,1,35,71,172
+DB 90,90,117,90,234,47,180,176,90,90,117,90,234,47,180,176
+DB 131,131,54,131,108,181,27,239,131,131,54,131,108,181,27,239
+DB 51,51,204,51,133,255,102,182,51,51,204,51,133,255,102,182
+DB 99,99,145,99,63,242,198,92,99,99,145,99,63,242,198,92
+DB 2,2,8,2,16,10,4,18,2,2,8,2,16,10,4,18
+DB 170,170,146,170,57,56,73,147,170,170,146,170,57,56,73,147
+DB 113,113,217,113,175,168,226,222,113,113,217,113,175,168,226,222
+DB 200,200,7,200,14,207,141,198,200,200,7,200,14,207,141,198
+DB 25,25,100,25,200,125,50,209,25,25,100,25,200,125,50,209
+DB 73,73,57,73,114,112,146,59,73,73,57,73,114,112,146,59
+DB 217,217,67,217,134,154,175,95,217,217,67,217,134,154,175,95
+DB 242,242,239,242,195,29,249,49,242,242,239,242,195,29,249,49
+DB 227,227,171,227,75,72,219,168,227,227,171,227,75,72,219,168
+DB 91,91,113,91,226,42,182,185,91,91,113,91,226,42,182,185
+DB 136,136,26,136,52,146,13,188,136,136,26,136,52,146,13,188
+DB 154,154,82,154,164,200,41,62,154,154,82,154,164,200,41,62
+DB 38,38,152,38,45,190,76,11,38,38,152,38,45,190,76,11
+DB 50,50,200,50,141,250,100,191,50,50,200,50,141,250,100,191
+DB 176,176,250,176,233,74,125,89,176,176,250,176,233,74,125,89
+DB 233,233,131,233,27,106,207,242,233,233,131,233,27,106,207,242
+DB 15,15,60,15,120,51,30,119,15,15,60,15,120,51,30,119
+DB 213,213,115,213,230,166,183,51,213,213,115,213,230,166,183,51
+DB 128,128,58,128,116,186,29,244,128,128,58,128,116,186,29,244
+DB 190,190,194,190,153,124,97,39,190,190,194,190,153,124,97,39
+DB 205,205,19,205,38,222,135,235,205,205,19,205,38,222,135,235
+DB 52,52,208,52,189,228,104,137,52,52,208,52,189,228,104,137
+DB 72,72,61,72,122,117,144,50,72,72,61,72,122,117,144,50
+DB 255,255,219,255,171,36,227,84,255,255,219,255,171,36,227,84
+DB 122,122,245,122,247,143,244,141,122,122,245,122,247,143,244,141
+DB 144,144,122,144,244,234,61,100,144,144,122,144,244,234,61,100
+DB 95,95,97,95,194,62,190,157,95,95,97,95,194,62,190,157
+DB 32,32,128,32,29,160,64,61,32,32,128,32,29,160,64,61
+DB 104,104,189,104,103,213,208,15,104,104,189,104,103,213,208,15
+DB 26,26,104,26,208,114,52,202,26,26,104,26,208,114,52,202
+DB 174,174,130,174,25,44,65,183,174,174,130,174,25,44,65,183
+DB 180,180,234,180,201,94,117,125,180,180,234,180,201,94,117,125
+DB 84,84,77,84,154,25,168,206,84,84,77,84,154,25,168,206
+DB 147,147,118,147,236,229,59,127,147,147,118,147,236,229,59,127
+DB 34,34,136,34,13,170,68,47,34,34,136,34,13,170,68,47
+DB 100,100,141,100,7,233,200,99,100,100,141,100,7,233,200,99
+DB 241,241,227,241,219,18,255,42,241,241,227,241,219,18,255,42
+DB 115,115,209,115,191,162,230,204,115,115,209,115,191,162,230,204
+DB 18,18,72,18,144,90,36,130,18,18,72,18,144,90,36,130
+DB 64,64,29,64,58,93,128,122,64,64,29,64,58,93,128,122
+DB 8,8,32,8,64,40,16,72,8,8,32,8,64,40,16,72
+DB 195,195,43,195,86,232,155,149,195,195,43,195,86,232,155,149
+DB 236,236,151,236,51,123,197,223,236,236,151,236,51,123,197,223
+DB 219,219,75,219,150,144,171,77,219,219,75,219,150,144,171,77
+DB 161,161,190,161,97,31,95,192,161,161,190,161,97,31,95,192
+DB 141,141,14,141,28,131,7,145,141,141,14,141,28,131,7,145
+DB 61,61,244,61,245,201,122,200,61,61,244,61,245,201,122,200
+DB 151,151,102,151,204,241,51,91,151,151,102,151,204,241,51,91
+DB 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
+DB 207,207,27,207,54,212,131,249,207,207,27,207,54,212,131,249
+DB 43,43,172,43,69,135,86,110,43,43,172,43,69,135,86,110
+DB 118,118,197,118,151,179,236,225,118,118,197,118,151,179,236,225
+DB 130,130,50,130,100,176,25,230,130,130,50,130,100,176,25,230
+DB 214,214,127,214,254,169,177,40,214,214,127,214,254,169,177,40
+DB 27,27,108,27,216,119,54,195,27,27,108,27,216,119,54,195
+DB 181,181,238,181,193,91,119,116,181,181,238,181,193,91,119,116
+DB 175,175,134,175,17,41,67,190,175,175,134,175,17,41,67,190
+DB 106,106,181,106,119,223,212,29,106,106,181,106,119,223,212,29
+DB 80,80,93,80,186,13,160,234,80,80,93,80,186,13,160,234
+DB 69,69,9,69,18,76,138,87,69,69,9,69,18,76,138,87
+DB 243,243,235,243,203,24,251,56,243,243,235,243,203,24,251,56
+DB 48,48,192,48,157,240,96,173,48,48,192,48,157,240,96,173
+DB 239,239,155,239,43,116,195,196,239,239,155,239,43,116,195,196
+DB 63,63,252,63,229,195,126,218,63,63,252,63,229,195,126,218
+DB 85,85,73,85,146,28,170,199,85,85,73,85,146,28,170,199
+DB 162,162,178,162,121,16,89,219,162,162,178,162,121,16,89,219
+DB 234,234,143,234,3,101,201,233,234,234,143,234,3,101,201,233
+DB 101,101,137,101,15,236,202,106,101,101,137,101,15,236,202,106
+DB 186,186,210,186,185,104,105,3,186,186,210,186,185,104,105,3
+DB 47,47,188,47,101,147,94,74,47,47,188,47,101,147,94,74
+DB 192,192,39,192,78,231,157,142,192,192,39,192,78,231,157,142
+DB 222,222,95,222,190,129,161,96,222,222,95,222,190,129,161,96
+DB 28,28,112,28,224,108,56,252,28,28,112,28,224,108,56,252
+DB 253,253,211,253,187,46,231,70,253,253,211,253,187,46,231,70
+DB 77,77,41,77,82,100,154,31,77,77,41,77,82,100,154,31
+DB 146,146,114,146,228,224,57,118,146,146,114,146,228,224,57,118
+DB 117,117,201,117,143,188,234,250,117,117,201,117,143,188,234,250
+DB 6,6,24,6,48,30,12,54,6,6,24,6,48,30,12,54
+DB 138,138,18,138,36,152,9,174,138,138,18,138,36,152,9,174
+DB 178,178,242,178,249,64,121,75,178,178,242,178,249,64,121,75
+DB 230,230,191,230,99,89,209,133,230,230,191,230,99,89,209,133
+DB 14,14,56,14,112,54,28,126,14,14,56,14,112,54,28,126
+DB 31,31,124,31,248,99,62,231,31,31,124,31,248,99,62,231
+DB 98,98,149,98,55,247,196,85,98,98,149,98,55,247,196,85
+DB 212,212,119,212,238,163,181,58,212,212,119,212,238,163,181,58
+DB 168,168,154,168,41,50,77,129,168,168,154,168,41,50,77,129
+DB 150,150,98,150,196,244,49,82,150,150,98,150,196,244,49,82
+DB 249,249,195,249,155,58,239,98,249,249,195,249,155,58,239,98
+DB 197,197,51,197,102,246,151,163,197,197,51,197,102,246,151,163
+DB 37,37,148,37,53,177,74,16,37,37,148,37,53,177,74,16
+DB 89,89,121,89,242,32,178,171,89,89,121,89,242,32,178,171
+DB 132,132,42,132,84,174,21,208,132,132,42,132,84,174,21,208
+DB 114,114,213,114,183,167,228,197,114,114,213,114,183,167,228,197
+DB 57,57,228,57,213,221,114,236,57,57,228,57,213,221,114,236
+DB 76,76,45,76,90,97,152,22,76,76,45,76,90,97,152,22
+DB 94,94,101,94,202,59,188,148,94,94,101,94,202,59,188,148
+DB 120,120,253,120,231,133,240,159,120,120,253,120,231,133,240,159
+DB 56,56,224,56,221,216,112,229,56,56,224,56,221,216,112,229
+DB 140,140,10,140,20,134,5,152,140,140,10,140,20,134,5,152
+DB 209,209,99,209,198,178,191,23,209,209,99,209,198,178,191,23
+DB 165,165,174,165,65,11,87,228,165,165,174,165,65,11,87,228
+DB 226,226,175,226,67,77,217,161,226,226,175,226,67,77,217,161
+DB 97,97,153,97,47,248,194,78,97,97,153,97,47,248,194,78
+DB 179,179,246,179,241,69,123,66,179,179,246,179,241,69,123,66
+DB 33,33,132,33,21,165,66,52,33,33,132,33,21,165,66,52
+DB 156,156,74,156,148,214,37,8,156,156,74,156,148,214,37,8
+DB 30,30,120,30,240,102,60,238,30,30,120,30,240,102,60,238
+DB 67,67,17,67,34,82,134,97,67,67,17,67,34,82,134,97
+DB 199,199,59,199,118,252,147,177,199,199,59,199,118,252,147,177
+DB 252,252,215,252,179,43,229,79,252,252,215,252,179,43,229,79
+DB 4,4,16,4,32,20,8,36,4,4,16,4,32,20,8,36
+DB 81,81,89,81,178,8,162,227,81,81,89,81,178,8,162,227
+DB 153,153,94,153,188,199,47,37,153,153,94,153,188,199,47,37
+DB 109,109,169,109,79,196,218,34,109,109,169,109,79,196,218,34
+DB 13,13,52,13,104,57,26,101,13,13,52,13,104,57,26,101
+DB 250,250,207,250,131,53,233,121,250,250,207,250,131,53,233,121
+DB 223,223,91,223,182,132,163,105,223,223,91,223,182,132,163,105
+DB 126,126,229,126,215,155,252,169,126,126,229,126,215,155,252,169
+DB 36,36,144,36,61,180,72,25,36,36,144,36,61,180,72,25
+DB 59,59,236,59,197,215,118,254,59,59,236,59,197,215,118,254
+DB 171,171,150,171,49,61,75,154,171,171,150,171,49,61,75,154
+DB 206,206,31,206,62,209,129,240,206,206,31,206,62,209,129,240
+DB 17,17,68,17,136,85,34,153,17,17,68,17,136,85,34,153
+DB 143,143,6,143,12,137,3,131,143,143,6,143,12,137,3,131
+DB 78,78,37,78,74,107,156,4,78,78,37,78,74,107,156,4
+DB 183,183,230,183,209,81,115,102,183,183,230,183,209,81,115,102
+DB 235,235,139,235,11,96,203,224,235,235,139,235,11,96,203,224
+DB 60,60,240,60,253,204,120,193,60,60,240,60,253,204,120,193
+DB 129,129,62,129,124,191,31,253,129,129,62,129,124,191,31,253
+DB 148,148,106,148,212,254,53,64,148,148,106,148,212,254,53,64
+DB 247,247,251,247,235,12,243,28,247,247,251,247,235,12,243,28
+DB 185,185,222,185,161,103,111,24,185,185,222,185,161,103,111,24
+DB 19,19,76,19,152,95,38,139,19,19,76,19,152,95,38,139
+DB 44,44,176,44,125,156,88,81,44,44,176,44,125,156,88,81
+DB 211,211,107,211,214,184,187,5,211,211,107,211,214,184,187,5
+DB 231,231,187,231,107,92,211,140,231,231,187,231,107,92,211,140
+DB 110,110,165,110,87,203,220,57,110,110,165,110,87,203,220,57
+DB 196,196,55,196,110,243,149,170,196,196,55,196,110,243,149,170
+DB 3,3,12,3,24,15,6,27,3,3,12,3,24,15,6,27
+DB 86,86,69,86,138,19,172,220,86,86,69,86,138,19,172,220
+DB 68,68,13,68,26,73,136,94,68,68,13,68,26,73,136,94
+DB 127,127,225,127,223,158,254,160,127,127,225,127,223,158,254,160
+DB 169,169,158,169,33,55,79,136,169,169,158,169,33,55,79,136
+DB 42,42,168,42,77,130,84,103,42,42,168,42,77,130,84,103
+DB 187,187,214,187,177,109,107,10,187,187,214,187,177,109,107,10
+DB 193,193,35,193,70,226,159,135,193,193,35,193,70,226,159,135
+DB 83,83,81,83,162,2,166,241,83,83,81,83,162,2,166,241
+DB 220,220,87,220,174,139,165,114,220,220,87,220,174,139,165,114
+DB 11,11,44,11,88,39,22,83,11,11,44,11,88,39,22,83
+DB 157,157,78,157,156,211,39,1,157,157,78,157,156,211,39,1
+DB 108,108,173,108,71,193,216,43,108,108,173,108,71,193,216,43
+DB 49,49,196,49,149,245,98,164,49,49,196,49,149,245,98,164
+DB 116,116,205,116,135,185,232,243,116,116,205,116,135,185,232,243
+DB 246,246,255,246,227,9,241,21,246,246,255,246,227,9,241,21
+DB 70,70,5,70,10,67,140,76,70,70,5,70,10,67,140,76
+DB 172,172,138,172,9,38,69,165,172,172,138,172,9,38,69,165
+DB 137,137,30,137,60,151,15,181,137,137,30,137,60,151,15,181
+DB 20,20,80,20,160,68,40,180,20,20,80,20,160,68,40,180
+DB 225,225,163,225,91,66,223,186,225,225,163,225,91,66,223,186
+DB 22,22,88,22,176,78,44,166,22,22,88,22,176,78,44,166
+DB 58,58,232,58,205,210,116,247,58,58,232,58,205,210,116,247
+DB 105,105,185,105,111,208,210,6,105,105,185,105,111,208,210,6
+DB 9,9,36,9,72,45,18,65,9,9,36,9,72,45,18,65
+DB 112,112,221,112,167,173,224,215,112,112,221,112,167,173,224,215
+DB 182,182,226,182,217,84,113,111,182,182,226,182,217,84,113,111
+DB 208,208,103,208,206,183,189,30,208,208,103,208,206,183,189,30
+DB 237,237,147,237,59,126,199,214,237,237,147,237,59,126,199,214
+DB 204,204,23,204,46,219,133,226,204,204,23,204,46,219,133,226
+DB 66,66,21,66,42,87,132,104,66,66,21,66,42,87,132,104
+DB 152,152,90,152,180,194,45,44,152,152,90,152,180,194,45,44
+DB 164,164,170,164,73,14,85,237,164,164,170,164,73,14,85,237
+DB 40,40,160,40,93,136,80,117,40,40,160,40,93,136,80,117
+DB 92,92,109,92,218,49,184,134,92,92,109,92,218,49,184,134
+DB 248,248,199,248,147,63,237,107,248,248,199,248,147,63,237,107
+DB 134,134,34,134,68,164,17,194,134,134,34,134,68,164,17,194
+DB 24,35,198,232,135,184,1,79
+DB 54,166,210,245,121,111,145,82
+DB 96,188,155,142,163,12,123,53
+DB 29,224,215,194,46,75,254,87
+DB 21,119,55,229,159,240,74,218
+DB 88,201,41,10,177,160,107,133
+DB 189,93,16,244,203,62,5,103
+DB 228,39,65,139,167,125,149,216
+DB 251,238,124,102,221,23,71,158
+DB 202,45,191,7,173,90,131,51
+EXTERN __imp_RtlVirtualUnwind:NEAR
+
+ALIGN 16
+se_handler PROC PRIVATE
+ push rsi
+ push rdi
+ push rbx
+ push rbp
+ push r12
+ push r13
+ push r14
+ push r15
+ pushfq
+ sub rsp,64
+
+ mov rax,QWORD PTR[120+r8]
+ mov rbx,QWORD PTR[248+r8]
+
+ lea r10,QWORD PTR[$L$prologue]
+ cmp rbx,r10
+ jb $L$in_prologue
+
+ mov rax,QWORD PTR[152+r8]
+
+ lea r10,QWORD PTR[$L$epilogue]
+ cmp rbx,r10
+ jae $L$in_prologue
+
+ mov rax,QWORD PTR[((128+32))+rax]
+ lea rax,QWORD PTR[48+rax]
+
+ mov rbx,QWORD PTR[((-8))+rax]
+ mov rbp,QWORD PTR[((-16))+rax]
+ mov r12,QWORD PTR[((-24))+rax]
+ mov r13,QWORD PTR[((-32))+rax]
+ mov r14,QWORD PTR[((-40))+rax]
+ mov r15,QWORD PTR[((-48))+rax]
+ mov QWORD PTR[144+r8],rbx
+ mov QWORD PTR[160+r8],rbp
+ mov QWORD PTR[216+r8],r12
+ mov QWORD PTR[224+r8],r13
+ mov QWORD PTR[232+r8],r14
+ mov QWORD PTR[240+r8],r15
+
+$L$in_prologue::
+ mov rdi,QWORD PTR[8+rax]
+ mov rsi,QWORD PTR[16+rax]
+ mov QWORD PTR[152+r8],rax
+ mov QWORD PTR[168+r8],rsi
+ mov QWORD PTR[176+r8],rdi
+
+ mov rdi,QWORD PTR[40+r9]
+ mov rsi,r8
+ mov ecx,154
+ DD 0a548f3fch
+
+
+ mov rsi,r9
+ xor rcx,rcx
+ mov rdx,QWORD PTR[8+rsi]
+ mov r8,QWORD PTR[rsi]
+ mov r9,QWORD PTR[16+rsi]
+ mov r10,QWORD PTR[40+rsi]
+ lea r11,QWORD PTR[56+rsi]
+ lea r12,QWORD PTR[24+rsi]
+ mov QWORD PTR[32+rsp],r10
+ mov QWORD PTR[40+rsp],r11
+ mov QWORD PTR[48+rsp],r12
+ mov QWORD PTR[56+rsp],rcx
+ call QWORD PTR[__imp_RtlVirtualUnwind]
+
+ mov eax,1
+ add rsp,64
+ popfq
+ pop r15
+ pop r14
+ pop r13
+ pop r12
+ pop rbp
+ pop rbx
+ pop rdi
+ pop rsi
+ DB 0F3h,0C3h ;repret
+se_handler ENDP
+
+.text$ ENDS
+.pdata SEGMENT READONLY ALIGN(4)
+ALIGN 4
+ DD imagerel $L$SEH_begin_whirlpool_block
+ DD imagerel $L$SEH_end_whirlpool_block
+ DD imagerel $L$SEH_info_whirlpool_block
+
+.pdata ENDS
+.xdata SEGMENT READONLY ALIGN(8)
+ALIGN 8
+$L$SEH_info_whirlpool_block::
+DB 9,0,0,0
+ DD imagerel se_handler
+
+.xdata ENDS
+END
diff --git a/deps/openssl/asm/x64-win32-masm/x86_64cpuid.asm b/deps/openssl/asm/x64-win32-masm/x86_64cpuid.asm
new file mode 100644
index 0000000000..db26276770
--- /dev/null
+++ b/deps/openssl/asm/x64-win32-masm/x86_64cpuid.asm
@@ -0,0 +1,186 @@
+OPTION DOTNAME
+EXTERN OPENSSL_cpuid_setup:NEAR
+.CRT$XCU SEGMENT READONLY DWORD
+ DQ OPENSSL_cpuid_setup
+
+.CRT$XCU ENDS
+.text$ SEGMENT ALIGN(64) 'CODE'
+
+PUBLIC OPENSSL_atomic_add
+
+ALIGN 16
+OPENSSL_atomic_add PROC PUBLIC
+ mov eax,DWORD PTR[rcx]
+$L$spin:: lea r8,QWORD PTR[rax*1+rdx]
+DB 0f0h
+
+ cmpxchg DWORD PTR[rcx],r8d
+ jne $L$spin
+ mov eax,r8d
+DB 048h,098h
+
+ DB 0F3h,0C3h ;repret
+OPENSSL_atomic_add ENDP
+
+PUBLIC OPENSSL_rdtsc
+
+ALIGN 16
+OPENSSL_rdtsc PROC PUBLIC
+ rdtsc
+ shl rdx,32
+ or rax,rdx
+ DB 0F3h,0C3h ;repret
+OPENSSL_rdtsc ENDP
+
+PUBLIC OPENSSL_ia32_cpuid
+
+ALIGN 16
+OPENSSL_ia32_cpuid PROC PUBLIC
+ mov r8,rbx
+
+ xor eax,eax
+ cpuid
+ mov r11d,eax
+
+ xor eax,eax
+ cmp ebx,0756e6547h
+ setne al
+ mov r9d,eax
+ cmp edx,049656e69h
+ setne al
+ or r9d,eax
+ cmp ecx,06c65746eh
+ setne al
+ or r9d,eax
+ jz $L$intel
+
+ cmp ebx,068747541h
+ setne al
+ mov r10d,eax
+ cmp edx,069746E65h
+ setne al
+ or r10d,eax
+ cmp ecx,0444D4163h
+ setne al
+ or r10d,eax
+ jnz $L$intel
+
+
+ mov eax,080000000h
+ cpuid
+ cmp eax,080000008h
+ jb $L$intel
+
+ mov eax,080000008h
+ cpuid
+ movzx r10,cl
+ inc r10
+
+ mov eax,1
+ cpuid
+ bt edx,28
+ jnc $L$done
+ shr ebx,16
+ cmp bl,r10b
+ ja $L$done
+ and edx,0efffffffh
+ jmp $L$done
+
+$L$intel::
+ cmp r11d,4
+ mov r10d,-1
+ jb $L$nocacheinfo
+
+ mov eax,4
+ mov ecx,0
+ cpuid
+ mov r10d,eax
+ shr r10d,14
+ and r10d,0fffh
+
+$L$nocacheinfo::
+ mov eax,1
+ cpuid
+ cmp r9d,0
+ jne $L$notintel
+ or edx,000100000h
+ and ah,15
+ cmp ah,15
+ je $L$notintel
+ or edx,040000000h
+$L$notintel::
+ bt edx,28
+ jnc $L$done
+ and edx,0efffffffh
+ cmp r10d,0
+ je $L$done
+
+ or edx,010000000h
+ shr ebx,16
+ cmp bl,1
+ ja $L$done
+ and edx,0efffffffh
+$L$done::
+ shl rcx,32
+ mov eax,edx
+ mov rbx,r8
+ or rax,rcx
+ DB 0F3h,0C3h ;repret
+OPENSSL_ia32_cpuid ENDP
+
+PUBLIC OPENSSL_cleanse
+
+ALIGN 16
+OPENSSL_cleanse PROC PUBLIC
+ xor rax,rax
+ cmp rdx,15
+ jae $L$ot
+ cmp rdx,0
+ je $L$ret
+$L$ittle::
+ mov BYTE PTR[rcx],al
+ sub rdx,1
+ lea rcx,QWORD PTR[1+rcx]
+ jnz $L$ittle
+$L$ret::
+ DB 0F3h,0C3h ;repret
+ALIGN 16
+$L$ot::
+ test rcx,7
+ jz $L$aligned
+ mov BYTE PTR[rcx],al
+ lea rdx,QWORD PTR[((-1))+rdx]
+ lea rcx,QWORD PTR[1+rcx]
+ jmp $L$ot
+$L$aligned::
+ mov QWORD PTR[rcx],rax
+ lea rdx,QWORD PTR[((-8))+rdx]
+ test rdx,-8
+ lea rcx,QWORD PTR[8+rcx]
+ jnz $L$aligned
+ cmp rdx,0
+ jne $L$ittle
+ DB 0F3h,0C3h ;repret
+OPENSSL_cleanse ENDP
+PUBLIC OPENSSL_wipe_cpu
+
+ALIGN 16
+OPENSSL_wipe_cpu PROC PUBLIC
+ pxor xmm0,xmm0
+ pxor xmm1,xmm1
+ pxor xmm2,xmm2
+ pxor xmm3,xmm3
+ pxor xmm4,xmm4
+ pxor xmm5,xmm5
+ xor rcx,rcx
+ xor rdx,rdx
+ xor r8,r8
+ xor r9,r9
+ xor r10,r10
+ xor r11,r11
+ lea rax,QWORD PTR[8+rsp]
+ DB 0F3h,0C3h ;repret
+OPENSSL_wipe_cpu ENDP
+
+.text$ ENDS
+END
diff --git a/deps/openssl/asm/x86-elf-gas/aes/aes-586.s b/deps/openssl/asm/x86-elf-gas/aes/aes-586.s
new file mode 100644
index 0000000000..34c90a068b
--- /dev/null
+++ b/deps/openssl/asm/x86-elf-gas/aes/aes-586.s
@@ -0,0 +1,3234 @@
+.file "aes-586.s"
+.text
+.type _x86_AES_encrypt_compact,@function
+.align 16
+_x86_AES_encrypt_compact:
+ movl %edi,20(%esp)
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ movl 240(%edi),%esi
+ leal -2(%esi,%esi,1),%esi
+ leal (%edi,%esi,8),%esi
+ movl %esi,24(%esp)
+ movl -128(%ebp),%edi
+ movl -96(%ebp),%esi
+ movl -64(%ebp),%edi
+ movl -32(%ebp),%esi
+ movl (%ebp),%edi
+ movl 32(%ebp),%esi
+ movl 64(%ebp),%edi
+ movl 96(%ebp),%esi
+.align 16
+.L000loop:
+ movl %eax,%esi
+ andl $255,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %bh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,4(%esp)
+
+ movl %ebx,%esi
+ andl $255,%esi
+ shrl $16,%ebx
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %ch,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,8(%esp)
+
+ movl %ecx,%esi
+ andl $255,%esi
+ shrl $24,%ecx
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %dh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $16,%edi
+ andl $255,%edx
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movzbl %bh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+
+ andl $255,%edx
+ movzbl -128(%ebp,%edx,1),%edx
+ movzbl %ah,%eax
+ movzbl -128(%ebp,%eax,1),%eax
+ shll $8,%eax
+ xorl %eax,%edx
+ movl 4(%esp),%eax
+ andl $255,%ebx
+ movzbl -128(%ebp,%ebx,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%edx
+ movl 8(%esp),%ebx
+ movzbl -128(%ebp,%ecx,1),%ecx
+ shll $24,%ecx
+ xorl %ecx,%edx
+ movl %esi,%ecx
+
+ movl %ecx,%esi
+ andl $2155905152,%esi
+ movl %esi,%ebp
+ shrl $7,%ebp
+ leal (%ecx,%ecx,1),%edi
+ subl %ebp,%esi
+ andl $4278124286,%edi
+ andl $454761243,%esi
+ movl %ecx,%ebp
+ xorl %edi,%esi
+ xorl %esi,%ecx
+ roll $24,%ecx
+ xorl %esi,%ecx
+ rorl $16,%ebp
+ xorl %ebp,%ecx
+ rorl $8,%ebp
+ xorl %ebp,%ecx
+ movl %edx,%esi
+ andl $2155905152,%esi
+ movl %esi,%ebp
+ shrl $7,%ebp
+ leal (%edx,%edx,1),%edi
+ subl %ebp,%esi
+ andl $4278124286,%edi
+ andl $454761243,%esi
+ movl %edx,%ebp
+ xorl %edi,%esi
+ xorl %esi,%edx
+ roll $24,%edx
+ xorl %esi,%edx
+ rorl $16,%ebp
+ xorl %ebp,%edx
+ rorl $8,%ebp
+ xorl %ebp,%edx
+ movl %eax,%esi
+ andl $2155905152,%esi
+ movl %esi,%ebp
+ shrl $7,%ebp
+ leal (%eax,%eax,1),%edi
+ subl %ebp,%esi
+ andl $4278124286,%edi
+ andl $454761243,%esi
+ movl %eax,%ebp
+ xorl %edi,%esi
+ xorl %esi,%eax
+ roll $24,%eax
+ xorl %esi,%eax
+ rorl $16,%ebp
+ xorl %ebp,%eax
+ rorl $8,%ebp
+ xorl %ebp,%eax
+ movl %ebx,%esi
+ andl $2155905152,%esi
+ movl %esi,%ebp
+ shrl $7,%ebp
+ leal (%ebx,%ebx,1),%edi
+ subl %ebp,%esi
+ andl $4278124286,%edi
+ andl $454761243,%esi
+ movl %ebx,%ebp
+ xorl %edi,%esi
+ xorl %esi,%ebx
+ roll $24,%ebx
+ xorl %esi,%ebx
+ rorl $16,%ebp
+ xorl %ebp,%ebx
+ rorl $8,%ebp
+ xorl %ebp,%ebx
+ movl 20(%esp),%edi
+ movl 28(%esp),%ebp
+ addl $16,%edi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ cmpl 24(%esp),%edi
+ movl %edi,20(%esp)
+ jb .L000loop
+ movl %eax,%esi
+ andl $255,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %bh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,4(%esp)
+
+ movl %ebx,%esi
+ andl $255,%esi
+ shrl $16,%ebx
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %ch,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,8(%esp)
+
+ movl %ecx,%esi
+ andl $255,%esi
+ shrl $24,%ecx
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %dh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $16,%edi
+ andl $255,%edx
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movzbl %bh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+
+ movl 20(%esp),%edi
+ andl $255,%edx
+ movzbl -128(%ebp,%edx,1),%edx
+ movzbl %ah,%eax
+ movzbl -128(%ebp,%eax,1),%eax
+ shll $8,%eax
+ xorl %eax,%edx
+ movl 4(%esp),%eax
+ andl $255,%ebx
+ movzbl -128(%ebp,%ebx,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%edx
+ movl 8(%esp),%ebx
+ movzbl -128(%ebp,%ecx,1),%ecx
+ shll $24,%ecx
+ xorl %ecx,%edx
+ movl %esi,%ecx
+
+ xorl 16(%edi),%eax
+ xorl 20(%edi),%ebx
+ xorl 24(%edi),%ecx
+ xorl 28(%edi),%edx
+ ret
+.size _x86_AES_encrypt_compact,.-_x86_AES_encrypt_compact
+.type _sse_AES_encrypt_compact,@function
+.align 16
+_sse_AES_encrypt_compact:
+ pxor (%edi),%mm0
+ pxor 8(%edi),%mm4
+ movl 240(%edi),%esi
+ leal -2(%esi,%esi,1),%esi
+ leal (%edi,%esi,8),%esi
+ movl %esi,24(%esp)
+ movl $454761243,%eax
+ movl %eax,8(%esp)
+ movl %eax,12(%esp)
+ movl -128(%ebp),%eax
+ movl -96(%ebp),%ebx
+ movl -64(%ebp),%ecx
+ movl -32(%ebp),%edx
+ movl (%ebp),%eax
+ movl 32(%ebp),%ebx
+ movl 64(%ebp),%ecx
+ movl 96(%ebp),%edx
+.align 16
+.L001loop:
+ pshufw $8,%mm0,%mm1
+ pshufw $13,%mm4,%mm5
+ movd %mm1,%eax
+ movd %mm5,%ebx
+ movzbl %al,%esi
+ movzbl -128(%ebp,%esi,1),%ecx
+ pshufw $13,%mm0,%mm2
+ movzbl %ah,%edx
+ movzbl -128(%ebp,%edx,1),%edx
+ shll $8,%edx
+ shrl $16,%eax
+ movzbl %bl,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $16,%esi
+ orl %esi,%ecx
+ pshufw $8,%mm4,%mm6
+ movzbl %bh,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $24,%esi
+ orl %esi,%edx
+ shrl $16,%ebx
+ movzbl %ah,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $8,%esi
+ orl %esi,%ecx
+ movzbl %bh,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $24,%esi
+ orl %esi,%ecx
+ movd %ecx,%mm0
+ movzbl %al,%esi
+ movzbl -128(%ebp,%esi,1),%ecx
+ movd %mm2,%eax
+ movzbl %bl,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $16,%esi
+ orl %esi,%ecx
+ movd %mm6,%ebx
+ movzbl %ah,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $24,%esi
+ orl %esi,%ecx
+ movzbl %bh,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $8,%esi
+ orl %esi,%ecx
+ movd %ecx,%mm1
+ movzbl %bl,%esi
+ movzbl -128(%ebp,%esi,1),%ecx
+ shrl $16,%ebx
+ movzbl %al,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $16,%esi
+ orl %esi,%ecx
+ shrl $16,%eax
+ punpckldq %mm1,%mm0
+ movzbl %ah,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $24,%esi
+ orl %esi,%ecx
+ andl $255,%eax
+ movzbl -128(%ebp,%eax,1),%eax
+ shll $16,%eax
+ orl %eax,%edx
+ movzbl %bh,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $8,%esi
+ orl %esi,%ecx
+ movd %ecx,%mm4
+ andl $255,%ebx
+ movzbl -128(%ebp,%ebx,1),%ebx
+ orl %ebx,%edx
+ movd %edx,%mm5
+ punpckldq %mm5,%mm4
+ addl $16,%edi
+ cmpl 24(%esp),%edi
+ ja .L002out
+ movq 8(%esp),%mm2
+ pxor %mm3,%mm3
+ pxor %mm7,%mm7
+ movq %mm0,%mm1
+ movq %mm4,%mm5
+ pcmpgtb %mm0,%mm3
+ pcmpgtb %mm4,%mm7
+ pand %mm2,%mm3
+ pand %mm2,%mm7
+ pshufw $177,%mm0,%mm2
+ pshufw $177,%mm4,%mm6
+ paddb %mm0,%mm0
+ paddb %mm4,%mm4
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ pshufw $177,%mm2,%mm3
+ pshufw $177,%mm6,%mm7
+ pxor %mm0,%mm1
+ pxor %mm4,%mm5
+ pxor %mm2,%mm0
+ pxor %mm6,%mm4
+ movq %mm3,%mm2
+ movq %mm7,%mm6
+ pslld $8,%mm3
+ pslld $8,%mm7
+ psrld $24,%mm2
+ psrld $24,%mm6
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ pxor %mm2,%mm0
+ pxor %mm6,%mm4
+ movq %mm1,%mm3
+ movq %mm5,%mm7
+ movq (%edi),%mm2
+ movq 8(%edi),%mm6
+ psrld $8,%mm1
+ psrld $8,%mm5
+ movl -128(%ebp),%eax
+ pslld $24,%mm3
+ pslld $24,%mm7
+ movl -64(%ebp),%ebx
+ pxor %mm1,%mm0
+ pxor %mm5,%mm4
+ movl (%ebp),%ecx
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ movl 64(%ebp),%edx
+ pxor %mm2,%mm0
+ pxor %mm6,%mm4
+ jmp .L001loop
+.align 16
+.L002out:
+ pxor (%edi),%mm0
+ pxor 8(%edi),%mm4
+ ret
+.size _sse_AES_encrypt_compact,.-_sse_AES_encrypt_compact
+.type _x86_AES_encrypt,@function
+.align 16
+_x86_AES_encrypt:
+ movl %edi,20(%esp)
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ movl 240(%edi),%esi
+ leal -2(%esi,%esi,1),%esi
+ leal (%edi,%esi,8),%esi
+ movl %esi,24(%esp)
+.align 16
+.L003loop:
+ movl %eax,%esi
+ andl $255,%esi
+ movl (%ebp,%esi,8),%esi
+ movzbl %bh,%edi
+ xorl 3(%ebp,%edi,8),%esi
+ movl %ecx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ xorl 2(%ebp,%edi,8),%esi
+ movl %edx,%edi
+ shrl $24,%edi
+ xorl 1(%ebp,%edi,8),%esi
+ movl %esi,4(%esp)
+
+ movl %ebx,%esi
+ andl $255,%esi
+ shrl $16,%ebx
+ movl (%ebp,%esi,8),%esi
+ movzbl %ch,%edi
+ xorl 3(%ebp,%edi,8),%esi
+ movl %edx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ xorl 2(%ebp,%edi,8),%esi
+ movl %eax,%edi
+ shrl $24,%edi
+ xorl 1(%ebp,%edi,8),%esi
+ movl %esi,8(%esp)
+
+ movl %ecx,%esi
+ andl $255,%esi
+ shrl $24,%ecx
+ movl (%ebp,%esi,8),%esi
+ movzbl %dh,%edi
+ xorl 3(%ebp,%edi,8),%esi
+ movl %eax,%edi
+ shrl $16,%edi
+ andl $255,%edx
+ andl $255,%edi
+ xorl 2(%ebp,%edi,8),%esi
+ movzbl %bh,%edi
+ xorl 1(%ebp,%edi,8),%esi
+
+ movl 20(%esp),%edi
+ movl (%ebp,%edx,8),%edx
+ movzbl %ah,%eax
+ xorl 3(%ebp,%eax,8),%edx
+ movl 4(%esp),%eax
+ andl $255,%ebx
+ xorl 2(%ebp,%ebx,8),%edx
+ movl 8(%esp),%ebx
+ xorl 1(%ebp,%ecx,8),%edx
+ movl %esi,%ecx
+
+ addl $16,%edi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ cmpl 24(%esp),%edi
+ movl %edi,20(%esp)
+ jb .L003loop
+ movl %eax,%esi
+ andl $255,%esi
+ movl 2(%ebp,%esi,8),%esi
+ andl $255,%esi
+ movzbl %bh,%edi
+ movl (%ebp,%edi,8),%edi
+ andl $65280,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movl (%ebp,%edi,8),%edi
+ andl $16711680,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $24,%edi
+ movl 2(%ebp,%edi,8),%edi
+ andl $4278190080,%edi
+ xorl %edi,%esi
+ movl %esi,4(%esp)
+ movl %ebx,%esi
+ andl $255,%esi
+ shrl $16,%ebx
+ movl 2(%ebp,%esi,8),%esi
+ andl $255,%esi
+ movzbl %ch,%edi
+ movl (%ebp,%edi,8),%edi
+ andl $65280,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movl (%ebp,%edi,8),%edi
+ andl $16711680,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $24,%edi
+ movl 2(%ebp,%edi,8),%edi
+ andl $4278190080,%edi
+ xorl %edi,%esi
+ movl %esi,8(%esp)
+ movl %ecx,%esi
+ andl $255,%esi
+ shrl $24,%ecx
+ movl 2(%ebp,%esi,8),%esi
+ andl $255,%esi
+ movzbl %dh,%edi
+ movl (%ebp,%edi,8),%edi
+ andl $65280,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $16,%edi
+ andl $255,%edx
+ andl $255,%edi
+ movl (%ebp,%edi,8),%edi
+ andl $16711680,%edi
+ xorl %edi,%esi
+ movzbl %bh,%edi
+ movl 2(%ebp,%edi,8),%edi
+ andl $4278190080,%edi
+ xorl %edi,%esi
+ movl 20(%esp),%edi
+ andl $255,%edx
+ movl 2(%ebp,%edx,8),%edx
+ andl $255,%edx
+ movzbl %ah,%eax
+ movl (%ebp,%eax,8),%eax
+ andl $65280,%eax
+ xorl %eax,%edx
+ movl 4(%esp),%eax
+ andl $255,%ebx
+ movl (%ebp,%ebx,8),%ebx
+ andl $16711680,%ebx
+ xorl %ebx,%edx
+ movl 8(%esp),%ebx
+ movl 2(%ebp,%ecx,8),%ecx
+ andl $4278190080,%ecx
+ xorl %ecx,%edx
+ movl %esi,%ecx
+ addl $16,%edi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ ret
+.align 64
+.LAES_Te:
+.long 2774754246,2774754246
+.long 2222750968,2222750968
+.long 2574743534,2574743534
+.long 2373680118,2373680118
+.long 234025727,234025727
+.long 3177933782,3177933782
+.long 2976870366,2976870366
+.long 1422247313,1422247313
+.long 1345335392,1345335392
+.long 50397442,50397442
+.long 2842126286,2842126286
+.long 2099981142,2099981142
+.long 436141799,436141799
+.long 1658312629,1658312629
+.long 3870010189,3870010189
+.long 2591454956,2591454956
+.long 1170918031,1170918031
+.long 2642575903,2642575903
+.long 1086966153,1086966153
+.long 2273148410,2273148410
+.long 368769775,368769775
+.long 3948501426,3948501426
+.long 3376891790,3376891790
+.long 200339707,200339707
+.long 3970805057,3970805057
+.long 1742001331,1742001331
+.long 4255294047,4255294047
+.long 3937382213,3937382213
+.long 3214711843,3214711843
+.long 4154762323,4154762323
+.long 2524082916,2524082916
+.long 1539358875,1539358875
+.long 3266819957,3266819957
+.long 486407649,486407649
+.long 2928907069,2928907069
+.long 1780885068,1780885068
+.long 1513502316,1513502316
+.long 1094664062,1094664062
+.long 49805301,49805301
+.long 1338821763,1338821763
+.long 1546925160,1546925160
+.long 4104496465,4104496465
+.long 887481809,887481809
+.long 150073849,150073849
+.long 2473685474,2473685474
+.long 1943591083,1943591083
+.long 1395732834,1395732834
+.long 1058346282,1058346282
+.long 201589768,201589768
+.long 1388824469,1388824469
+.long 1696801606,1696801606
+.long 1589887901,1589887901
+.long 672667696,672667696
+.long 2711000631,2711000631
+.long 251987210,251987210
+.long 3046808111,3046808111
+.long 151455502,151455502
+.long 907153956,907153956
+.long 2608889883,2608889883
+.long 1038279391,1038279391
+.long 652995533,652995533
+.long 1764173646,1764173646
+.long 3451040383,3451040383
+.long 2675275242,2675275242
+.long 453576978,453576978
+.long 2659418909,2659418909
+.long 1949051992,1949051992
+.long 773462580,773462580
+.long 756751158,756751158
+.long 2993581788,2993581788
+.long 3998898868,3998898868
+.long 4221608027,4221608027
+.long 4132590244,4132590244
+.long 1295727478,1295727478
+.long 1641469623,1641469623
+.long 3467883389,3467883389
+.long 2066295122,2066295122
+.long 1055122397,1055122397
+.long 1898917726,1898917726
+.long 2542044179,2542044179
+.long 4115878822,4115878822
+.long 1758581177,1758581177
+.long 0,0
+.long 753790401,753790401
+.long 1612718144,1612718144
+.long 536673507,536673507
+.long 3367088505,3367088505
+.long 3982187446,3982187446
+.long 3194645204,3194645204
+.long 1187761037,1187761037
+.long 3653156455,3653156455
+.long 1262041458,1262041458
+.long 3729410708,3729410708
+.long 3561770136,3561770136
+.long 3898103984,3898103984
+.long 1255133061,1255133061
+.long 1808847035,1808847035
+.long 720367557,720367557
+.long 3853167183,3853167183
+.long 385612781,385612781
+.long 3309519750,3309519750
+.long 3612167578,3612167578
+.long 1429418854,1429418854
+.long 2491778321,2491778321
+.long 3477423498,3477423498
+.long 284817897,284817897
+.long 100794884,100794884
+.long 2172616702,2172616702
+.long 4031795360,4031795360
+.long 1144798328,1144798328
+.long 3131023141,3131023141
+.long 3819481163,3819481163
+.long 4082192802,4082192802
+.long 4272137053,4272137053
+.long 3225436288,3225436288
+.long 2324664069,2324664069
+.long 2912064063,2912064063
+.long 3164445985,3164445985
+.long 1211644016,1211644016
+.long 83228145,83228145
+.long 3753688163,3753688163
+.long 3249976951,3249976951
+.long 1977277103,1977277103
+.long 1663115586,1663115586
+.long 806359072,806359072
+.long 452984805,452984805
+.long 250868733,250868733
+.long 1842533055,1842533055
+.long 1288555905,1288555905
+.long 336333848,336333848
+.long 890442534,890442534
+.long 804056259,804056259
+.long 3781124030,3781124030
+.long 2727843637,2727843637
+.long 3427026056,3427026056
+.long 957814574,957814574
+.long 1472513171,1472513171
+.long 4071073621,4071073621
+.long 2189328124,2189328124
+.long 1195195770,1195195770
+.long 2892260552,2892260552
+.long 3881655738,3881655738
+.long 723065138,723065138
+.long 2507371494,2507371494
+.long 2690670784,2690670784
+.long 2558624025,2558624025
+.long 3511635870,3511635870
+.long 2145180835,2145180835
+.long 1713513028,1713513028
+.long 2116692564,2116692564
+.long 2878378043,2878378043
+.long 2206763019,2206763019
+.long 3393603212,3393603212
+.long 703524551,703524551
+.long 3552098411,3552098411
+.long 1007948840,1007948840
+.long 2044649127,2044649127
+.long 3797835452,3797835452
+.long 487262998,487262998
+.long 1994120109,1994120109
+.long 1004593371,1004593371
+.long 1446130276,1446130276
+.long 1312438900,1312438900
+.long 503974420,503974420
+.long 3679013266,3679013266
+.long 168166924,168166924
+.long 1814307912,1814307912
+.long 3831258296,3831258296
+.long 1573044895,1573044895
+.long 1859376061,1859376061
+.long 4021070915,4021070915
+.long 2791465668,2791465668
+.long 2828112185,2828112185
+.long 2761266481,2761266481
+.long 937747667,937747667
+.long 2339994098,2339994098
+.long 854058965,854058965
+.long 1137232011,1137232011
+.long 1496790894,1496790894
+.long 3077402074,3077402074
+.long 2358086913,2358086913
+.long 1691735473,1691735473
+.long 3528347292,3528347292
+.long 3769215305,3769215305
+.long 3027004632,3027004632
+.long 4199962284,4199962284
+.long 133494003,133494003
+.long 636152527,636152527
+.long 2942657994,2942657994
+.long 2390391540,2390391540
+.long 3920539207,3920539207
+.long 403179536,403179536
+.long 3585784431,3585784431
+.long 2289596656,2289596656
+.long 1864705354,1864705354
+.long 1915629148,1915629148
+.long 605822008,605822008
+.long 4054230615,4054230615
+.long 3350508659,3350508659
+.long 1371981463,1371981463
+.long 602466507,602466507
+.long 2094914977,2094914977
+.long 2624877800,2624877800
+.long 555687742,555687742
+.long 3712699286,3712699286
+.long 3703422305,3703422305
+.long 2257292045,2257292045
+.long 2240449039,2240449039
+.long 2423288032,2423288032
+.long 1111375484,1111375484
+.long 3300242801,3300242801
+.long 2858837708,2858837708
+.long 3628615824,3628615824
+.long 84083462,84083462
+.long 32962295,32962295
+.long 302911004,302911004
+.long 2741068226,2741068226
+.long 1597322602,1597322602
+.long 4183250862,4183250862
+.long 3501832553,3501832553
+.long 2441512471,2441512471
+.long 1489093017,1489093017
+.long 656219450,656219450
+.long 3114180135,3114180135
+.long 954327513,954327513
+.long 335083755,335083755
+.long 3013122091,3013122091
+.long 856756514,856756514
+.long 3144247762,3144247762
+.long 1893325225,1893325225
+.long 2307821063,2307821063
+.long 2811532339,2811532339
+.long 3063651117,3063651117
+.long 572399164,572399164
+.long 2458355477,2458355477
+.long 552200649,552200649
+.long 1238290055,1238290055
+.long 4283782570,4283782570
+.long 2015897680,2015897680
+.long 2061492133,2061492133
+.long 2408352771,2408352771
+.long 4171342169,4171342169
+.long 2156497161,2156497161
+.long 386731290,386731290
+.long 3669999461,3669999461
+.long 837215959,837215959
+.long 3326231172,3326231172
+.long 3093850320,3093850320
+.long 3275833730,3275833730
+.long 2962856233,2962856233
+.long 1999449434,1999449434
+.long 286199582,286199582
+.long 3417354363,3417354363
+.long 4233385128,4233385128
+.long 3602627437,3602627437
+.long 974525996,974525996
+.byte 99,124,119,123,242,107,111,197
+.byte 48,1,103,43,254,215,171,118
+.byte 202,130,201,125,250,89,71,240
+.byte 173,212,162,175,156,164,114,192
+.byte 183,253,147,38,54,63,247,204
+.byte 52,165,229,241,113,216,49,21
+.byte 4,199,35,195,24,150,5,154
+.byte 7,18,128,226,235,39,178,117
+.byte 9,131,44,26,27,110,90,160
+.byte 82,59,214,179,41,227,47,132
+.byte 83,209,0,237,32,252,177,91
+.byte 106,203,190,57,74,76,88,207
+.byte 208,239,170,251,67,77,51,133
+.byte 69,249,2,127,80,60,159,168
+.byte 81,163,64,143,146,157,56,245
+.byte 188,182,218,33,16,255,243,210
+.byte 205,12,19,236,95,151,68,23
+.byte 196,167,126,61,100,93,25,115
+.byte 96,129,79,220,34,42,144,136
+.byte 70,238,184,20,222,94,11,219
+.byte 224,50,58,10,73,6,36,92
+.byte 194,211,172,98,145,149,228,121
+.byte 231,200,55,109,141,213,78,169
+.byte 108,86,244,234,101,122,174,8
+.byte 186,120,37,46,28,166,180,198
+.byte 232,221,116,31,75,189,139,138
+.byte 112,62,181,102,72,3,246,14
+.byte 97,53,87,185,134,193,29,158
+.byte 225,248,152,17,105,217,142,148
+.byte 155,30,135,233,206,85,40,223
+.byte 140,161,137,13,191,230,66,104
+.byte 65,153,45,15,176,84,187,22
+.byte 99,124,119,123,242,107,111,197
+.byte 48,1,103,43,254,215,171,118
+.byte 202,130,201,125,250,89,71,240
+.byte 173,212,162,175,156,164,114,192
+.byte 183,253,147,38,54,63,247,204
+.byte 52,165,229,241,113,216,49,21
+.byte 4,199,35,195,24,150,5,154
+.byte 7,18,128,226,235,39,178,117
+.byte 9,131,44,26,27,110,90,160
+.byte 82,59,214,179,41,227,47,132
+.byte 83,209,0,237,32,252,177,91
+.byte 106,203,190,57,74,76,88,207
+.byte 208,239,170,251,67,77,51,133
+.byte 69,249,2,127,80,60,159,168
+.byte 81,163,64,143,146,157,56,245
+.byte 188,182,218,33,16,255,243,210
+.byte 205,12,19,236,95,151,68,23
+.byte 196,167,126,61,100,93,25,115
+.byte 96,129,79,220,34,42,144,136
+.byte 70,238,184,20,222,94,11,219
+.byte 224,50,58,10,73,6,36,92
+.byte 194,211,172,98,145,149,228,121
+.byte 231,200,55,109,141,213,78,169
+.byte 108,86,244,234,101,122,174,8
+.byte 186,120,37,46,28,166,180,198
+.byte 232,221,116,31,75,189,139,138
+.byte 112,62,181,102,72,3,246,14
+.byte 97,53,87,185,134,193,29,158
+.byte 225,248,152,17,105,217,142,148
+.byte 155,30,135,233,206,85,40,223
+.byte 140,161,137,13,191,230,66,104
+.byte 65,153,45,15,176,84,187,22
+.byte 99,124,119,123,242,107,111,197
+.byte 48,1,103,43,254,215,171,118
+.byte 202,130,201,125,250,89,71,240
+.byte 173,212,162,175,156,164,114,192
+.byte 183,253,147,38,54,63,247,204
+.byte 52,165,229,241,113,216,49,21
+.byte 4,199,35,195,24,150,5,154
+.byte 7,18,128,226,235,39,178,117
+.byte 9,131,44,26,27,110,90,160
+.byte 82,59,214,179,41,227,47,132
+.byte 83,209,0,237,32,252,177,91
+.byte 106,203,190,57,74,76,88,207
+.byte 208,239,170,251,67,77,51,133
+.byte 69,249,2,127,80,60,159,168
+.byte 81,163,64,143,146,157,56,245
+.byte 188,182,218,33,16,255,243,210
+.byte 205,12,19,236,95,151,68,23
+.byte 196,167,126,61,100,93,25,115
+.byte 96,129,79,220,34,42,144,136
+.byte 70,238,184,20,222,94,11,219
+.byte 224,50,58,10,73,6,36,92
+.byte 194,211,172,98,145,149,228,121
+.byte 231,200,55,109,141,213,78,169
+.byte 108,86,244,234,101,122,174,8
+.byte 186,120,37,46,28,166,180,198
+.byte 232,221,116,31,75,189,139,138
+.byte 112,62,181,102,72,3,246,14
+.byte 97,53,87,185,134,193,29,158
+.byte 225,248,152,17,105,217,142,148
+.byte 155,30,135,233,206,85,40,223
+.byte 140,161,137,13,191,230,66,104
+.byte 65,153,45,15,176,84,187,22
+.byte 99,124,119,123,242,107,111,197
+.byte 48,1,103,43,254,215,171,118
+.byte 202,130,201,125,250,89,71,240
+.byte 173,212,162,175,156,164,114,192
+.byte 183,253,147,38,54,63,247,204
+.byte 52,165,229,241,113,216,49,21
+.byte 4,199,35,195,24,150,5,154
+.byte 7,18,128,226,235,39,178,117
+.byte 9,131,44,26,27,110,90,160
+.byte 82,59,214,179,41,227,47,132
+.byte 83,209,0,237,32,252,177,91
+.byte 106,203,190,57,74,76,88,207
+.byte 208,239,170,251,67,77,51,133
+.byte 69,249,2,127,80,60,159,168
+.byte 81,163,64,143,146,157,56,245
+.byte 188,182,218,33,16,255,243,210
+.byte 205,12,19,236,95,151,68,23
+.byte 196,167,126,61,100,93,25,115
+.byte 96,129,79,220,34,42,144,136
+.byte 70,238,184,20,222,94,11,219
+.byte 224,50,58,10,73,6,36,92
+.byte 194,211,172,98,145,149,228,121
+.byte 231,200,55,109,141,213,78,169
+.byte 108,86,244,234,101,122,174,8
+.byte 186,120,37,46,28,166,180,198
+.byte 232,221,116,31,75,189,139,138
+.byte 112,62,181,102,72,3,246,14
+.byte 97,53,87,185,134,193,29,158
+.byte 225,248,152,17,105,217,142,148
+.byte 155,30,135,233,206,85,40,223
+.byte 140,161,137,13,191,230,66,104
+.byte 65,153,45,15,176,84,187,22
+.long 1,2,4,8
+.long 16,32,64,128
+.long 27,54,0,0
+.long 0,0,0,0
+.size _x86_AES_encrypt,.-_x86_AES_encrypt
+.globl AES_encrypt
+.type AES_encrypt,@function
+.align 16
+AES_encrypt:
+.L_AES_encrypt_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%esi
+ movl 28(%esp),%edi
+ movl %esp,%eax
+ subl $36,%esp
+ andl $-64,%esp
+ leal -127(%edi),%ebx
+ subl %esp,%ebx
+ negl %ebx
+ andl $960,%ebx
+ subl %ebx,%esp
+ addl $4,%esp
+ movl %eax,28(%esp)
+ call .L004pic_point
+.L004pic_point:
+ popl %ebp
+ leal OPENSSL_ia32cap_P,%eax
+ leal .LAES_Te-.L004pic_point(%ebp),%ebp
+ leal 764(%esp),%ebx
+ subl %ebp,%ebx
+ andl $768,%ebx
+ leal 2176(%ebp,%ebx,1),%ebp
+ btl $25,(%eax)
+ jnc .L005x86
+ movq (%esi),%mm0
+ movq 8(%esi),%mm4
+ call _sse_AES_encrypt_compact
+ movl 28(%esp),%esp
+ movl 24(%esp),%esi
+ movq %mm0,(%esi)
+ movq %mm4,8(%esi)
+ emms
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 16
+.L005x86:
+ movl %ebp,24(%esp)
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ call _x86_AES_encrypt_compact
+ movl 28(%esp),%esp
+ movl 24(%esp),%esi
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size AES_encrypt,.-.L_AES_encrypt_begin
+.type _x86_AES_decrypt_compact,@function
+.align 16
+_x86_AES_decrypt_compact:
+ movl %edi,20(%esp)
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ movl 240(%edi),%esi
+ leal -2(%esi,%esi,1),%esi
+ leal (%edi,%esi,8),%esi
+ movl %esi,24(%esp)
+ movl -128(%ebp),%edi
+ movl -96(%ebp),%esi
+ movl -64(%ebp),%edi
+ movl -32(%ebp),%esi
+ movl (%ebp),%edi
+ movl 32(%ebp),%esi
+ movl 64(%ebp),%edi
+ movl 96(%ebp),%esi
+.align 16
+.L006loop:
+ movl %eax,%esi
+ andl $255,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %dh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %ebx,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,4(%esp)
+ movl %ebx,%esi
+ andl $255,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %ah,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,8(%esp)
+ movl %ecx,%esi
+ andl $255,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %bh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ andl $255,%edx
+ movzbl -128(%ebp,%edx,1),%edx
+ movzbl %ch,%ecx
+ movzbl -128(%ebp,%ecx,1),%ecx
+ shll $8,%ecx
+ xorl %ecx,%edx
+ movl %esi,%ecx
+ shrl $16,%ebx
+ andl $255,%ebx
+ movzbl -128(%ebp,%ebx,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%edx
+ shrl $24,%eax
+ movzbl -128(%ebp,%eax,1),%eax
+ shll $24,%eax
+ xorl %eax,%edx
+ movl %ecx,%esi
+ andl $2155905152,%esi
+ movl %esi,%edi
+ shrl $7,%edi
+ leal (%ecx,%ecx,1),%eax
+ subl %edi,%esi
+ andl $4278124286,%eax
+ andl $454761243,%esi
+ xorl %eax,%esi
+ movl %esi,%eax
+ andl $2155905152,%esi
+ movl %esi,%edi
+ shrl $7,%edi
+ leal (%eax,%eax,1),%ebx
+ subl %edi,%esi
+ andl $4278124286,%ebx
+ andl $454761243,%esi
+ xorl %ecx,%eax
+ xorl %ebx,%esi
+ movl %esi,%ebx
+ andl $2155905152,%esi
+ movl %esi,%edi
+ shrl $7,%edi
+ leal (%ebx,%ebx,1),%ebp
+ subl %edi,%esi
+ andl $4278124286,%ebp
+ andl $454761243,%esi
+ xorl %ecx,%ebx
+ roll $8,%ecx
+ xorl %esi,%ebp
+ xorl %eax,%ecx
+ xorl %ebp,%eax
+ roll $24,%eax
+ xorl %ebx,%ecx
+ xorl %ebp,%ebx
+ roll $16,%ebx
+ xorl %ebp,%ecx
+ roll $8,%ebp
+ xorl %eax,%ecx
+ xorl %ebx,%ecx
+ movl 4(%esp),%eax
+ xorl %ebp,%ecx
+ movl %ecx,12(%esp)
+ movl %edx,%esi
+ andl $2155905152,%esi
+ movl %esi,%edi
+ shrl $7,%edi
+ leal (%edx,%edx,1),%ebx
+ subl %edi,%esi
+ andl $4278124286,%ebx
+ andl $454761243,%esi
+ xorl %ebx,%esi
+ movl %esi,%ebx
+ andl $2155905152,%esi
+ movl %esi,%edi
+ shrl $7,%edi
+ leal (%ebx,%ebx,1),%ecx
+ subl %edi,%esi
+ andl $4278124286,%ecx
+ andl $454761243,%esi
+ xorl %edx,%ebx
+ xorl %ecx,%esi
+ movl %esi,%ecx
+ andl $2155905152,%esi
+ movl %esi,%edi
+ shrl $7,%edi
+ leal (%ecx,%ecx,1),%ebp
+ subl %edi,%esi
+ andl $4278124286,%ebp
+ andl $454761243,%esi
+ xorl %edx,%ecx
+ roll $8,%edx
+ xorl %esi,%ebp
+ xorl %ebx,%edx
+ xorl %ebp,%ebx
+ roll $24,%ebx
+ xorl %ecx,%edx
+ xorl %ebp,%ecx
+ roll $16,%ecx
+ xorl %ebp,%edx
+ roll $8,%ebp
+ xorl %ebx,%edx
+ xorl %ecx,%edx
+ movl 8(%esp),%ebx
+ xorl %ebp,%edx
+ movl %edx,16(%esp)
+ movl %eax,%esi
+ andl $2155905152,%esi
+ movl %esi,%edi
+ shrl $7,%edi
+ leal (%eax,%eax,1),%ecx
+ subl %edi,%esi
+ andl $4278124286,%ecx
+ andl $454761243,%esi
+ xorl %ecx,%esi
+ movl %esi,%ecx
+ andl $2155905152,%esi
+ movl %esi,%edi
+ shrl $7,%edi
+ leal (%ecx,%ecx,1),%edx
+ subl %edi,%esi
+ andl $4278124286,%edx
+ andl $454761243,%esi
+ xorl %eax,%ecx
+ xorl %edx,%esi
+ movl %esi,%edx
+ andl $2155905152,%esi
+ movl %esi,%edi
+ shrl $7,%edi
+ leal (%edx,%edx,1),%ebp
+ subl %edi,%esi
+ andl $4278124286,%ebp
+ andl $454761243,%esi
+ xorl %eax,%edx
+ roll $8,%eax
+ xorl %esi,%ebp
+ xorl %ecx,%eax
+ xorl %ebp,%ecx
+ roll $24,%ecx
+ xorl %edx,%eax
+ xorl %ebp,%edx
+ roll $16,%edx
+ xorl %ebp,%eax
+ roll $8,%ebp
+ xorl %ecx,%eax
+ xorl %edx,%eax
+ xorl %ebp,%eax
+ movl %ebx,%esi
+ andl $2155905152,%esi
+ movl %esi,%edi
+ shrl $7,%edi
+ leal (%ebx,%ebx,1),%ecx
+ subl %edi,%esi
+ andl $4278124286,%ecx
+ andl $454761243,%esi
+ xorl %ecx,%esi
+ movl %esi,%ecx
+ andl $2155905152,%esi
+ movl %esi,%edi
+ shrl $7,%edi
+ leal (%ecx,%ecx,1),%edx
+ subl %edi,%esi
+ andl $4278124286,%edx
+ andl $454761243,%esi
+ xorl %ebx,%ecx
+ xorl %edx,%esi
+ movl %esi,%edx
+ andl $2155905152,%esi
+ movl %esi,%edi
+ shrl $7,%edi
+ leal (%edx,%edx,1),%ebp
+ subl %edi,%esi
+ andl $4278124286,%ebp
+ andl $454761243,%esi
+ xorl %ebx,%edx
+ roll $8,%ebx
+ xorl %esi,%ebp
+ xorl %ecx,%ebx
+ xorl %ebp,%ecx
+ roll $24,%ecx
+ xorl %edx,%ebx
+ xorl %ebp,%edx
+ roll $16,%edx
+ xorl %ebp,%ebx
+ roll $8,%ebp
+ xorl %ecx,%ebx
+ xorl %edx,%ebx
+ movl 12(%esp),%ecx
+ xorl %ebp,%ebx
+ movl 16(%esp),%edx
+ movl 20(%esp),%edi
+ movl 28(%esp),%ebp
+ addl $16,%edi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ cmpl 24(%esp),%edi
+ movl %edi,20(%esp)
+ jb .L006loop
+ movl %eax,%esi
+ andl $255,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %dh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %ebx,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,4(%esp)
+ movl %ebx,%esi
+ andl $255,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %ah,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,8(%esp)
+ movl %ecx,%esi
+ andl $255,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %bh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl 20(%esp),%edi
+ andl $255,%edx
+ movzbl -128(%ebp,%edx,1),%edx
+ movzbl %ch,%ecx
+ movzbl -128(%ebp,%ecx,1),%ecx
+ shll $8,%ecx
+ xorl %ecx,%edx
+ movl %esi,%ecx
+ shrl $16,%ebx
+ andl $255,%ebx
+ movzbl -128(%ebp,%ebx,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%edx
+ movl 8(%esp),%ebx
+ shrl $24,%eax
+ movzbl -128(%ebp,%eax,1),%eax
+ shll $24,%eax
+ xorl %eax,%edx
+ movl 4(%esp),%eax
+ xorl 16(%edi),%eax
+ xorl 20(%edi),%ebx
+ xorl 24(%edi),%ecx
+ xorl 28(%edi),%edx
+ ret
+.size _x86_AES_decrypt_compact,.-_x86_AES_decrypt_compact
+.type _sse_AES_decrypt_compact,@function
+.align 16
+_sse_AES_decrypt_compact:
+ pxor (%edi),%mm0
+ pxor 8(%edi),%mm4
+ movl 240(%edi),%esi
+ leal -2(%esi,%esi,1),%esi
+ leal (%edi,%esi,8),%esi
+ movl %esi,24(%esp)
+ movl $454761243,%eax
+ movl %eax,8(%esp)
+ movl %eax,12(%esp)
+ movl -128(%ebp),%eax
+ movl -96(%ebp),%ebx
+ movl -64(%ebp),%ecx
+ movl -32(%ebp),%edx
+ movl (%ebp),%eax
+ movl 32(%ebp),%ebx
+ movl 64(%ebp),%ecx
+ movl 96(%ebp),%edx
+.align 16
+.L007loop:
+ pshufw $12,%mm0,%mm1
+ movd %mm1,%eax
+ pshufw $9,%mm4,%mm5
+ movzbl %al,%esi
+ movzbl -128(%ebp,%esi,1),%ecx
+ movd %mm5,%ebx
+ movzbl %ah,%edx
+ movzbl -128(%ebp,%edx,1),%edx
+ shll $8,%edx
+ pshufw $6,%mm0,%mm2
+ movzbl %bl,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $16,%esi
+ orl %esi,%ecx
+ shrl $16,%eax
+ movzbl %bh,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $24,%esi
+ orl %esi,%edx
+ shrl $16,%ebx
+ pshufw $3,%mm4,%mm6
+ movzbl %ah,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $24,%esi
+ orl %esi,%ecx
+ movzbl %bh,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $8,%esi
+ orl %esi,%ecx
+ movd %ecx,%mm0
+ movzbl %al,%esi
+ movd %mm2,%eax
+ movzbl -128(%ebp,%esi,1),%ecx
+ shll $16,%ecx
+ movzbl %bl,%esi
+ movd %mm6,%ebx
+ movzbl -128(%ebp,%esi,1),%esi
+ orl %esi,%ecx
+ movzbl %al,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ orl %esi,%edx
+ movzbl %bl,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $16,%esi
+ orl %esi,%edx
+ movd %edx,%mm1
+ movzbl %ah,%esi
+ movzbl -128(%ebp,%esi,1),%edx
+ shll $8,%edx
+ movzbl %bh,%esi
+ shrl $16,%eax
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $24,%esi
+ orl %esi,%edx
+ shrl $16,%ebx
+ punpckldq %mm1,%mm0
+ movzbl %bh,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $8,%esi
+ orl %esi,%ecx
+ andl $255,%ebx
+ movzbl -128(%ebp,%ebx,1),%ebx
+ orl %ebx,%edx
+ movzbl %al,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $16,%esi
+ orl %esi,%edx
+ movd %edx,%mm4
+ movzbl %ah,%eax
+ movzbl -128(%ebp,%eax,1),%eax
+ shll $24,%eax
+ orl %eax,%ecx
+ movd %ecx,%mm5
+ punpckldq %mm5,%mm4
+ addl $16,%edi
+ cmpl 24(%esp),%edi
+ ja .L008out
+ movq %mm0,%mm3
+ movq %mm4,%mm7
+ pshufw $228,%mm0,%mm2
+ pshufw $228,%mm4,%mm6
+ movq %mm0,%mm1
+ movq %mm4,%mm5
+ pshufw $177,%mm0,%mm0
+ pshufw $177,%mm4,%mm4
+ pslld $8,%mm2
+ pslld $8,%mm6
+ psrld $8,%mm3
+ psrld $8,%mm7
+ pxor %mm2,%mm0
+ pxor %mm6,%mm4
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ pslld $16,%mm2
+ pslld $16,%mm6
+ psrld $16,%mm3
+ psrld $16,%mm7
+ pxor %mm2,%mm0
+ pxor %mm6,%mm4
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ movq 8(%esp),%mm3
+ pxor %mm2,%mm2
+ pxor %mm6,%mm6
+ pcmpgtb %mm1,%mm2
+ pcmpgtb %mm5,%mm6
+ pand %mm3,%mm2
+ pand %mm3,%mm6
+ paddb %mm1,%mm1
+ paddb %mm5,%mm5
+ pxor %mm2,%mm1
+ pxor %mm6,%mm5
+ movq %mm1,%mm3
+ movq %mm5,%mm7
+ movq %mm1,%mm2
+ movq %mm5,%mm6
+ pxor %mm1,%mm0
+ pxor %mm5,%mm4
+ pslld $24,%mm3
+ pslld $24,%mm7
+ psrld $8,%mm2
+ psrld $8,%mm6
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ pxor %mm2,%mm0
+ pxor %mm6,%mm4
+ movq 8(%esp),%mm2
+ pxor %mm3,%mm3
+ pxor %mm7,%mm7
+ pcmpgtb %mm1,%mm3
+ pcmpgtb %mm5,%mm7
+ pand %mm2,%mm3
+ pand %mm2,%mm7
+ paddb %mm1,%mm1
+ paddb %mm5,%mm5
+ pxor %mm3,%mm1
+ pxor %mm7,%mm5
+ pshufw $177,%mm1,%mm3
+ pshufw $177,%mm5,%mm7
+ pxor %mm1,%mm0
+ pxor %mm5,%mm4
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ pxor %mm3,%mm3
+ pxor %mm7,%mm7
+ pcmpgtb %mm1,%mm3
+ pcmpgtb %mm5,%mm7
+ pand %mm2,%mm3
+ pand %mm2,%mm7
+ paddb %mm1,%mm1
+ paddb %mm5,%mm5
+ pxor %mm3,%mm1
+ pxor %mm7,%mm5
+ pxor %mm1,%mm0
+ pxor %mm5,%mm4
+ movq %mm1,%mm3
+ movq %mm5,%mm7
+ pshufw $177,%mm1,%mm2
+ pshufw $177,%mm5,%mm6
+ pxor %mm2,%mm0
+ pxor %mm6,%mm4
+ pslld $8,%mm1
+ pslld $8,%mm5
+ psrld $8,%mm3
+ psrld $8,%mm7
+ movq (%edi),%mm2
+ movq 8(%edi),%mm6
+ pxor %mm1,%mm0
+ pxor %mm5,%mm4
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ movl -128(%ebp),%eax
+ pslld $16,%mm1
+ pslld $16,%mm5
+ movl -64(%ebp),%ebx
+ psrld $16,%mm3
+ psrld $16,%mm7
+ movl (%ebp),%ecx
+ pxor %mm1,%mm0
+ pxor %mm5,%mm4
+ movl 64(%ebp),%edx
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ pxor %mm2,%mm0
+ pxor %mm6,%mm4
+ jmp .L007loop
+.align 16
+.L008out:
+ pxor (%edi),%mm0
+ pxor 8(%edi),%mm4
+ ret
+.size _sse_AES_decrypt_compact,.-_sse_AES_decrypt_compact
+.type _x86_AES_decrypt,@function
+.align 16
+_x86_AES_decrypt:
+ movl %edi,20(%esp)
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ movl 240(%edi),%esi
+ leal -2(%esi,%esi,1),%esi
+ leal (%edi,%esi,8),%esi
+ movl %esi,24(%esp)
+.align 16
+.L009loop:
+ movl %eax,%esi
+ andl $255,%esi
+ movl (%ebp,%esi,8),%esi
+ movzbl %dh,%edi
+ xorl 3(%ebp,%edi,8),%esi
+ movl %ecx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ xorl 2(%ebp,%edi,8),%esi
+ movl %ebx,%edi
+ shrl $24,%edi
+ xorl 1(%ebp,%edi,8),%esi
+ movl %esi,4(%esp)
+
+ movl %ebx,%esi
+ andl $255,%esi
+ movl (%ebp,%esi,8),%esi
+ movzbl %ah,%edi
+ xorl 3(%ebp,%edi,8),%esi
+ movl %edx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ xorl 2(%ebp,%edi,8),%esi
+ movl %ecx,%edi
+ shrl $24,%edi
+ xorl 1(%ebp,%edi,8),%esi
+ movl %esi,8(%esp)
+
+ movl %ecx,%esi
+ andl $255,%esi
+ movl (%ebp,%esi,8),%esi
+ movzbl %bh,%edi
+ xorl 3(%ebp,%edi,8),%esi
+ movl %eax,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ xorl 2(%ebp,%edi,8),%esi
+ movl %edx,%edi
+ shrl $24,%edi
+ xorl 1(%ebp,%edi,8),%esi
+
+ movl 20(%esp),%edi
+ andl $255,%edx
+ movl (%ebp,%edx,8),%edx
+ movzbl %ch,%ecx
+ xorl 3(%ebp,%ecx,8),%edx
+ movl %esi,%ecx
+ shrl $16,%ebx
+ andl $255,%ebx
+ xorl 2(%ebp,%ebx,8),%edx
+ movl 8(%esp),%ebx
+ shrl $24,%eax
+ xorl 1(%ebp,%eax,8),%edx
+ movl 4(%esp),%eax
+
+ addl $16,%edi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ cmpl 24(%esp),%edi
+ movl %edi,20(%esp)
+ jb .L009loop
+ leal 2176(%ebp),%ebp
+ movl -128(%ebp),%edi
+ movl -96(%ebp),%esi
+ movl -64(%ebp),%edi
+ movl -32(%ebp),%esi
+ movl (%ebp),%edi
+ movl 32(%ebp),%esi
+ movl 64(%ebp),%edi
+ movl 96(%ebp),%esi
+ leal -128(%ebp),%ebp
+ movl %eax,%esi
+ andl $255,%esi
+ movzbl (%ebp,%esi,1),%esi
+ movzbl %dh,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %ebx,%edi
+ shrl $24,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,4(%esp)
+ movl %ebx,%esi
+ andl $255,%esi
+ movzbl (%ebp,%esi,1),%esi
+ movzbl %ah,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $24,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,8(%esp)
+ movl %ecx,%esi
+ andl $255,%esi
+ movzbl (%ebp,%esi,1),%esi
+ movzbl %bh,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $24,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl 20(%esp),%edi
+ andl $255,%edx
+ movzbl (%ebp,%edx,1),%edx
+ movzbl %ch,%ecx
+ movzbl (%ebp,%ecx,1),%ecx
+ shll $8,%ecx
+ xorl %ecx,%edx
+ movl %esi,%ecx
+ shrl $16,%ebx
+ andl $255,%ebx
+ movzbl (%ebp,%ebx,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%edx
+ movl 8(%esp),%ebx
+ shrl $24,%eax
+ movzbl (%ebp,%eax,1),%eax
+ shll $24,%eax
+ xorl %eax,%edx
+ movl 4(%esp),%eax
+ leal -2048(%ebp),%ebp
+ addl $16,%edi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ ret
+.align 64
+.LAES_Td:
+.long 1353184337,1353184337
+.long 1399144830,1399144830
+.long 3282310938,3282310938
+.long 2522752826,2522752826
+.long 3412831035,3412831035
+.long 4047871263,4047871263
+.long 2874735276,2874735276
+.long 2466505547,2466505547
+.long 1442459680,1442459680
+.long 4134368941,4134368941
+.long 2440481928,2440481928
+.long 625738485,625738485
+.long 4242007375,4242007375
+.long 3620416197,3620416197
+.long 2151953702,2151953702
+.long 2409849525,2409849525
+.long 1230680542,1230680542
+.long 1729870373,1729870373
+.long 2551114309,2551114309
+.long 3787521629,3787521629
+.long 41234371,41234371
+.long 317738113,317738113
+.long 2744600205,2744600205
+.long 3338261355,3338261355
+.long 3881799427,3881799427
+.long 2510066197,2510066197
+.long 3950669247,3950669247
+.long 3663286933,3663286933
+.long 763608788,763608788
+.long 3542185048,3542185048
+.long 694804553,694804553
+.long 1154009486,1154009486
+.long 1787413109,1787413109
+.long 2021232372,2021232372
+.long 1799248025,1799248025
+.long 3715217703,3715217703
+.long 3058688446,3058688446
+.long 397248752,397248752
+.long 1722556617,1722556617
+.long 3023752829,3023752829
+.long 407560035,407560035
+.long 2184256229,2184256229
+.long 1613975959,1613975959
+.long 1165972322,1165972322
+.long 3765920945,3765920945
+.long 2226023355,2226023355
+.long 480281086,480281086
+.long 2485848313,2485848313
+.long 1483229296,1483229296
+.long 436028815,436028815
+.long 2272059028,2272059028
+.long 3086515026,3086515026
+.long 601060267,601060267
+.long 3791801202,3791801202
+.long 1468997603,1468997603
+.long 715871590,715871590
+.long 120122290,120122290
+.long 63092015,63092015
+.long 2591802758,2591802758
+.long 2768779219,2768779219
+.long 4068943920,4068943920
+.long 2997206819,2997206819
+.long 3127509762,3127509762
+.long 1552029421,1552029421
+.long 723308426,723308426
+.long 2461301159,2461301159
+.long 4042393587,4042393587
+.long 2715969870,2715969870
+.long 3455375973,3455375973
+.long 3586000134,3586000134
+.long 526529745,526529745
+.long 2331944644,2331944644
+.long 2639474228,2639474228
+.long 2689987490,2689987490
+.long 853641733,853641733
+.long 1978398372,1978398372
+.long 971801355,971801355
+.long 2867814464,2867814464
+.long 111112542,111112542
+.long 1360031421,1360031421
+.long 4186579262,4186579262
+.long 1023860118,1023860118
+.long 2919579357,2919579357
+.long 1186850381,1186850381
+.long 3045938321,3045938321
+.long 90031217,90031217
+.long 1876166148,1876166148
+.long 4279586912,4279586912
+.long 620468249,620468249
+.long 2548678102,2548678102
+.long 3426959497,3426959497
+.long 2006899047,2006899047
+.long 3175278768,3175278768
+.long 2290845959,2290845959
+.long 945494503,945494503
+.long 3689859193,3689859193
+.long 1191869601,1191869601
+.long 3910091388,3910091388
+.long 3374220536,3374220536
+.long 0,0
+.long 2206629897,2206629897
+.long 1223502642,1223502642
+.long 2893025566,2893025566
+.long 1316117100,1316117100
+.long 4227796733,4227796733
+.long 1446544655,1446544655
+.long 517320253,517320253
+.long 658058550,658058550
+.long 1691946762,1691946762
+.long 564550760,564550760
+.long 3511966619,3511966619
+.long 976107044,976107044
+.long 2976320012,2976320012
+.long 266819475,266819475
+.long 3533106868,3533106868
+.long 2660342555,2660342555
+.long 1338359936,1338359936
+.long 2720062561,2720062561
+.long 1766553434,1766553434
+.long 370807324,370807324
+.long 179999714,179999714
+.long 3844776128,3844776128
+.long 1138762300,1138762300
+.long 488053522,488053522
+.long 185403662,185403662
+.long 2915535858,2915535858
+.long 3114841645,3114841645
+.long 3366526484,3366526484
+.long 2233069911,2233069911
+.long 1275557295,1275557295
+.long 3151862254,3151862254
+.long 4250959779,4250959779
+.long 2670068215,2670068215
+.long 3170202204,3170202204
+.long 3309004356,3309004356
+.long 880737115,880737115
+.long 1982415755,1982415755
+.long 3703972811,3703972811
+.long 1761406390,1761406390
+.long 1676797112,1676797112
+.long 3403428311,3403428311
+.long 277177154,277177154
+.long 1076008723,1076008723
+.long 538035844,538035844
+.long 2099530373,2099530373
+.long 4164795346,4164795346
+.long 288553390,288553390
+.long 1839278535,1839278535
+.long 1261411869,1261411869
+.long 4080055004,4080055004
+.long 3964831245,3964831245
+.long 3504587127,3504587127
+.long 1813426987,1813426987
+.long 2579067049,2579067049
+.long 4199060497,4199060497
+.long 577038663,577038663
+.long 3297574056,3297574056
+.long 440397984,440397984
+.long 3626794326,3626794326
+.long 4019204898,4019204898
+.long 3343796615,3343796615
+.long 3251714265,3251714265
+.long 4272081548,4272081548
+.long 906744984,906744984
+.long 3481400742,3481400742
+.long 685669029,685669029
+.long 646887386,646887386
+.long 2764025151,2764025151
+.long 3835509292,3835509292
+.long 227702864,227702864
+.long 2613862250,2613862250
+.long 1648787028,1648787028
+.long 3256061430,3256061430
+.long 3904428176,3904428176
+.long 1593260334,1593260334
+.long 4121936770,4121936770
+.long 3196083615,3196083615
+.long 2090061929,2090061929
+.long 2838353263,2838353263
+.long 3004310991,3004310991
+.long 999926984,999926984
+.long 2809993232,2809993232
+.long 1852021992,1852021992
+.long 2075868123,2075868123
+.long 158869197,158869197
+.long 4095236462,4095236462
+.long 28809964,28809964
+.long 2828685187,2828685187
+.long 1701746150,1701746150
+.long 2129067946,2129067946
+.long 147831841,147831841
+.long 3873969647,3873969647
+.long 3650873274,3650873274
+.long 3459673930,3459673930
+.long 3557400554,3557400554
+.long 3598495785,3598495785
+.long 2947720241,2947720241
+.long 824393514,824393514
+.long 815048134,815048134
+.long 3227951669,3227951669
+.long 935087732,935087732
+.long 2798289660,2798289660
+.long 2966458592,2966458592
+.long 366520115,366520115
+.long 1251476721,1251476721
+.long 4158319681,4158319681
+.long 240176511,240176511
+.long 804688151,804688151
+.long 2379631990,2379631990
+.long 1303441219,1303441219
+.long 1414376140,1414376140
+.long 3741619940,3741619940
+.long 3820343710,3820343710
+.long 461924940,461924940
+.long 3089050817,3089050817
+.long 2136040774,2136040774
+.long 82468509,82468509
+.long 1563790337,1563790337
+.long 1937016826,1937016826
+.long 776014843,776014843
+.long 1511876531,1511876531
+.long 1389550482,1389550482
+.long 861278441,861278441
+.long 323475053,323475053
+.long 2355222426,2355222426
+.long 2047648055,2047648055
+.long 2383738969,2383738969
+.long 2302415851,2302415851
+.long 3995576782,3995576782
+.long 902390199,902390199
+.long 3991215329,3991215329
+.long 1018251130,1018251130
+.long 1507840668,1507840668
+.long 1064563285,1064563285
+.long 2043548696,2043548696
+.long 3208103795,3208103795
+.long 3939366739,3939366739
+.long 1537932639,1537932639
+.long 342834655,342834655
+.long 2262516856,2262516856
+.long 2180231114,2180231114
+.long 1053059257,1053059257
+.long 741614648,741614648
+.long 1598071746,1598071746
+.long 1925389590,1925389590
+.long 203809468,203809468
+.long 2336832552,2336832552
+.long 1100287487,1100287487
+.long 1895934009,1895934009
+.long 3736275976,3736275976
+.long 2632234200,2632234200
+.long 2428589668,2428589668
+.long 1636092795,1636092795
+.long 1890988757,1890988757
+.long 1952214088,1952214088
+.long 1113045200,1113045200
+.byte 82,9,106,213,48,54,165,56
+.byte 191,64,163,158,129,243,215,251
+.byte 124,227,57,130,155,47,255,135
+.byte 52,142,67,68,196,222,233,203
+.byte 84,123,148,50,166,194,35,61
+.byte 238,76,149,11,66,250,195,78
+.byte 8,46,161,102,40,217,36,178
+.byte 118,91,162,73,109,139,209,37
+.byte 114,248,246,100,134,104,152,22
+.byte 212,164,92,204,93,101,182,146
+.byte 108,112,72,80,253,237,185,218
+.byte 94,21,70,87,167,141,157,132
+.byte 144,216,171,0,140,188,211,10
+.byte 247,228,88,5,184,179,69,6
+.byte 208,44,30,143,202,63,15,2
+.byte 193,175,189,3,1,19,138,107
+.byte 58,145,17,65,79,103,220,234
+.byte 151,242,207,206,240,180,230,115
+.byte 150,172,116,34,231,173,53,133
+.byte 226,249,55,232,28,117,223,110
+.byte 71,241,26,113,29,41,197,137
+.byte 111,183,98,14,170,24,190,27
+.byte 252,86,62,75,198,210,121,32
+.byte 154,219,192,254,120,205,90,244
+.byte 31,221,168,51,136,7,199,49
+.byte 177,18,16,89,39,128,236,95
+.byte 96,81,127,169,25,181,74,13
+.byte 45,229,122,159,147,201,156,239
+.byte 160,224,59,77,174,42,245,176
+.byte 200,235,187,60,131,83,153,97
+.byte 23,43,4,126,186,119,214,38
+.byte 225,105,20,99,85,33,12,125
+.byte 82,9,106,213,48,54,165,56
+.byte 191,64,163,158,129,243,215,251
+.byte 124,227,57,130,155,47,255,135
+.byte 52,142,67,68,196,222,233,203
+.byte 84,123,148,50,166,194,35,61
+.byte 238,76,149,11,66,250,195,78
+.byte 8,46,161,102,40,217,36,178
+.byte 118,91,162,73,109,139,209,37
+.byte 114,248,246,100,134,104,152,22
+.byte 212,164,92,204,93,101,182,146
+.byte 108,112,72,80,253,237,185,218
+.byte 94,21,70,87,167,141,157,132
+.byte 144,216,171,0,140,188,211,10
+.byte 247,228,88,5,184,179,69,6
+.byte 208,44,30,143,202,63,15,2
+.byte 193,175,189,3,1,19,138,107
+.byte 58,145,17,65,79,103,220,234
+.byte 151,242,207,206,240,180,230,115
+.byte 150,172,116,34,231,173,53,133
+.byte 226,249,55,232,28,117,223,110
+.byte 71,241,26,113,29,41,197,137
+.byte 111,183,98,14,170,24,190,27
+.byte 252,86,62,75,198,210,121,32
+.byte 154,219,192,254,120,205,90,244
+.byte 31,221,168,51,136,7,199,49
+.byte 177,18,16,89,39,128,236,95
+.byte 96,81,127,169,25,181,74,13
+.byte 45,229,122,159,147,201,156,239
+.byte 160,224,59,77,174,42,245,176
+.byte 200,235,187,60,131,83,153,97
+.byte 23,43,4,126,186,119,214,38
+.byte 225,105,20,99,85,33,12,125
+.byte 82,9,106,213,48,54,165,56
+.byte 191,64,163,158,129,243,215,251
+.byte 124,227,57,130,155,47,255,135
+.byte 52,142,67,68,196,222,233,203
+.byte 84,123,148,50,166,194,35,61
+.byte 238,76,149,11,66,250,195,78
+.byte 8,46,161,102,40,217,36,178
+.byte 118,91,162,73,109,139,209,37
+.byte 114,248,246,100,134,104,152,22
+.byte 212,164,92,204,93,101,182,146
+.byte 108,112,72,80,253,237,185,218
+.byte 94,21,70,87,167,141,157,132
+.byte 144,216,171,0,140,188,211,10
+.byte 247,228,88,5,184,179,69,6
+.byte 208,44,30,143,202,63,15,2
+.byte 193,175,189,3,1,19,138,107
+.byte 58,145,17,65,79,103,220,234
+.byte 151,242,207,206,240,180,230,115
+.byte 150,172,116,34,231,173,53,133
+.byte 226,249,55,232,28,117,223,110
+.byte 71,241,26,113,29,41,197,137
+.byte 111,183,98,14,170,24,190,27
+.byte 252,86,62,75,198,210,121,32
+.byte 154,219,192,254,120,205,90,244
+.byte 31,221,168,51,136,7,199,49
+.byte 177,18,16,89,39,128,236,95
+.byte 96,81,127,169,25,181,74,13
+.byte 45,229,122,159,147,201,156,239
+.byte 160,224,59,77,174,42,245,176
+.byte 200,235,187,60,131,83,153,97
+.byte 23,43,4,126,186,119,214,38
+.byte 225,105,20,99,85,33,12,125
+.byte 82,9,106,213,48,54,165,56
+.byte 191,64,163,158,129,243,215,251
+.byte 124,227,57,130,155,47,255,135
+.byte 52,142,67,68,196,222,233,203
+.byte 84,123,148,50,166,194,35,61
+.byte 238,76,149,11,66,250,195,78
+.byte 8,46,161,102,40,217,36,178
+.byte 118,91,162,73,109,139,209,37
+.byte 114,248,246,100,134,104,152,22
+.byte 212,164,92,204,93,101,182,146
+.byte 108,112,72,80,253,237,185,218
+.byte 94,21,70,87,167,141,157,132
+.byte 144,216,171,0,140,188,211,10
+.byte 247,228,88,5,184,179,69,6
+.byte 208,44,30,143,202,63,15,2
+.byte 193,175,189,3,1,19,138,107
+.byte 58,145,17,65,79,103,220,234
+.byte 151,242,207,206,240,180,230,115
+.byte 150,172,116,34,231,173,53,133
+.byte 226,249,55,232,28,117,223,110
+.byte 71,241,26,113,29,41,197,137
+.byte 111,183,98,14,170,24,190,27
+.byte 252,86,62,75,198,210,121,32
+.byte 154,219,192,254,120,205,90,244
+.byte 31,221,168,51,136,7,199,49
+.byte 177,18,16,89,39,128,236,95
+.byte 96,81,127,169,25,181,74,13
+.byte 45,229,122,159,147,201,156,239
+.byte 160,224,59,77,174,42,245,176
+.byte 200,235,187,60,131,83,153,97
+.byte 23,43,4,126,186,119,214,38
+.byte 225,105,20,99,85,33,12,125
+.size _x86_AES_decrypt,.-_x86_AES_decrypt
+.globl AES_decrypt
+.type AES_decrypt,@function
+.align 16
+AES_decrypt:
+.L_AES_decrypt_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%esi
+ movl 28(%esp),%edi
+ movl %esp,%eax
+ subl $36,%esp
+ andl $-64,%esp
+ leal -127(%edi),%ebx
+ subl %esp,%ebx
+ negl %ebx
+ andl $960,%ebx
+ subl %ebx,%esp
+ addl $4,%esp
+ movl %eax,28(%esp)
+ call .L010pic_point
+.L010pic_point:
+ popl %ebp
+ leal OPENSSL_ia32cap_P,%eax
+ leal .LAES_Td-.L010pic_point(%ebp),%ebp
+ leal 764(%esp),%ebx
+ subl %ebp,%ebx
+ andl $768,%ebx
+ leal 2176(%ebp,%ebx,1),%ebp
+ btl $25,(%eax)
+ jnc .L011x86
+ movq (%esi),%mm0
+ movq 8(%esi),%mm4
+ call _sse_AES_decrypt_compact
+ movl 28(%esp),%esp
+ movl 24(%esp),%esi
+ movq %mm0,(%esi)
+ movq %mm4,8(%esi)
+ emms
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 16
+.L011x86:
+ movl %ebp,24(%esp)
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ call _x86_AES_decrypt_compact
+ movl 28(%esp),%esp
+ movl 24(%esp),%esi
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size AES_decrypt,.-.L_AES_decrypt_begin
+.globl AES_cbc_encrypt
+.type AES_cbc_encrypt,@function
+.align 16
+AES_cbc_encrypt:
+.L_AES_cbc_encrypt_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 28(%esp),%ecx
+ cmpl $0,%ecx
+ je .L012drop_out
+ call .L013pic_point
+.L013pic_point:
+ popl %ebp
+ leal OPENSSL_ia32cap_P,%eax
+ cmpl $0,40(%esp)
+ leal .LAES_Te-.L013pic_point(%ebp),%ebp
+ jne .L014picked_te
+ leal .LAES_Td-.LAES_Te(%ebp),%ebp
+.L014picked_te:
+ pushfl
+ cld
+ cmpl $512,%ecx
+ jb .L015slow_way
+ testl $15,%ecx
+ jnz .L015slow_way
+ leal -324(%esp),%esi
+ andl $-64,%esi
+ movl %ebp,%eax
+ leal 2304(%ebp),%ebx
+ movl %esi,%edx
+ andl $4095,%eax
+ andl $4095,%ebx
+ andl $4095,%edx
+ cmpl %ebx,%edx
+ jb .L016tbl_break_out
+ subl %ebx,%edx
+ subl %edx,%esi
+ jmp .L017tbl_ok
+.align 4
+.L016tbl_break_out:
+ subl %eax,%edx
+ andl $4095,%edx
+ addl $384,%edx
+ subl %edx,%esi
+.align 4
+.L017tbl_ok:
+ leal 24(%esp),%edx
+ xchgl %esi,%esp
+ addl $4,%esp
+ movl %ebp,24(%esp)
+ movl %esi,28(%esp)
+ movl (%edx),%eax
+ movl 4(%edx),%ebx
+ movl 12(%edx),%edi
+ movl 16(%edx),%esi
+ movl 20(%edx),%edx
+ movl %eax,32(%esp)
+ movl %ebx,36(%esp)
+ movl %ecx,40(%esp)
+ movl %edi,44(%esp)
+ movl %esi,48(%esp)
+ movl $0,316(%esp)
+ movl %edi,%ebx
+ movl $61,%ecx
+ subl %ebp,%ebx
+ movl %edi,%esi
+ andl $4095,%ebx
+ leal 76(%esp),%edi
+ cmpl $2304,%ebx
+ jb .L018do_copy
+ cmpl $3852,%ebx
+ jb .L019skip_copy
+.align 4
+.L018do_copy:
+ movl %edi,44(%esp)
+.long 2784229001
+.L019skip_copy:
+ movl $16,%edi
+.align 4
+.L020prefetch_tbl:
+ movl (%ebp),%eax
+ movl 32(%ebp),%ebx
+ movl 64(%ebp),%ecx
+ movl 96(%ebp),%esi
+ leal 128(%ebp),%ebp
+ subl $1,%edi
+ jnz .L020prefetch_tbl
+ subl $2048,%ebp
+ movl 32(%esp),%esi
+ movl 48(%esp),%edi
+ cmpl $0,%edx
+ je .L021fast_decrypt
+ movl (%edi),%eax
+ movl 4(%edi),%ebx
+.align 16
+.L022fast_enc_loop:
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ xorl (%esi),%eax
+ xorl 4(%esi),%ebx
+ xorl 8(%esi),%ecx
+ xorl 12(%esi),%edx
+ movl 44(%esp),%edi
+ call _x86_AES_encrypt
+ movl 32(%esp),%esi
+ movl 36(%esp),%edi
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ leal 16(%esi),%esi
+ movl 40(%esp),%ecx
+ movl %esi,32(%esp)
+ leal 16(%edi),%edx
+ movl %edx,36(%esp)
+ subl $16,%ecx
+ movl %ecx,40(%esp)
+ jnz .L022fast_enc_loop
+ movl 48(%esp),%esi
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ cmpl $0,316(%esp)
+ movl 44(%esp),%edi
+ je .L023skip_ezero
+ movl $60,%ecx
+ xorl %eax,%eax
+.align 4
+.long 2884892297
+.L023skip_ezero:
+ movl 28(%esp),%esp
+ popfl
+.L012drop_out:
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+ pushfl
+.align 16
+.L021fast_decrypt:
+ cmpl 36(%esp),%esi
+ je .L024fast_dec_in_place
+ movl %edi,52(%esp)
+.align 4
+.align 16
+.L025fast_dec_loop:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ movl 44(%esp),%edi
+ call _x86_AES_decrypt
+ movl 52(%esp),%edi
+ movl 40(%esp),%esi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ movl 36(%esp),%edi
+ movl 32(%esp),%esi
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 40(%esp),%ecx
+ movl %esi,52(%esp)
+ leal 16(%esi),%esi
+ movl %esi,32(%esp)
+ leal 16(%edi),%edi
+ movl %edi,36(%esp)
+ subl $16,%ecx
+ movl %ecx,40(%esp)
+ jnz .L025fast_dec_loop
+ movl 52(%esp),%edi
+ movl 48(%esp),%esi
+ movl (%edi),%eax
+ movl 4(%edi),%ebx
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ jmp .L026fast_dec_out
+.align 16
+.L024fast_dec_in_place:
+.L027fast_dec_in_place_loop:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ leal 60(%esp),%edi
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 44(%esp),%edi
+ call _x86_AES_decrypt
+ movl 48(%esp),%edi
+ movl 36(%esp),%esi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ leal 16(%esi),%esi
+ movl %esi,36(%esp)
+ leal 60(%esp),%esi
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 32(%esp),%esi
+ movl 40(%esp),%ecx
+ leal 16(%esi),%esi
+ movl %esi,32(%esp)
+ subl $16,%ecx
+ movl %ecx,40(%esp)
+ jnz .L027fast_dec_in_place_loop
+.align 4
+.L026fast_dec_out:
+ cmpl $0,316(%esp)
+ movl 44(%esp),%edi
+ je .L028skip_dzero
+ movl $60,%ecx
+ xorl %eax,%eax
+.align 4
+.long 2884892297
+.L028skip_dzero:
+ movl 28(%esp),%esp
+ popfl
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+ pushfl
+.align 16
+.L015slow_way:
+ movl (%eax),%eax
+ movl 36(%esp),%edi
+ leal -80(%esp),%esi
+ andl $-64,%esi
+ leal -143(%edi),%ebx
+ subl %esi,%ebx
+ negl %ebx
+ andl $960,%ebx
+ subl %ebx,%esi
+ leal 768(%esi),%ebx
+ subl %ebp,%ebx
+ andl $768,%ebx
+ leal 2176(%ebp,%ebx,1),%ebp
+ leal 24(%esp),%edx
+ xchgl %esi,%esp
+ addl $4,%esp
+ movl %ebp,24(%esp)
+ movl %esi,28(%esp)
+ movl %eax,52(%esp)
+ movl (%edx),%eax
+ movl 4(%edx),%ebx
+ movl 16(%edx),%esi
+ movl 20(%edx),%edx
+ movl %eax,32(%esp)
+ movl %ebx,36(%esp)
+ movl %ecx,40(%esp)
+ movl %edi,44(%esp)
+ movl %esi,48(%esp)
+ movl %esi,%edi
+ movl %eax,%esi
+ cmpl $0,%edx
+ je .L029slow_decrypt
+ cmpl $16,%ecx
+ movl %ebx,%edx
+ jb .L030slow_enc_tail
+ btl $25,52(%esp)
+ jnc .L031slow_enc_x86
+ movq (%edi),%mm0
+ movq 8(%edi),%mm4
+.align 16
+.L032slow_enc_loop_sse:
+ pxor (%esi),%mm0
+ pxor 8(%esi),%mm4
+ movl 44(%esp),%edi
+ call _sse_AES_encrypt_compact
+ movl 32(%esp),%esi
+ movl 36(%esp),%edi
+ movl 40(%esp),%ecx
+ movq %mm0,(%edi)
+ movq %mm4,8(%edi)
+ leal 16(%esi),%esi
+ movl %esi,32(%esp)
+ leal 16(%edi),%edx
+ movl %edx,36(%esp)
+ subl $16,%ecx
+ cmpl $16,%ecx
+ movl %ecx,40(%esp)
+ jae .L032slow_enc_loop_sse
+ testl $15,%ecx
+ jnz .L030slow_enc_tail
+ movl 48(%esp),%esi
+ movq %mm0,(%esi)
+ movq %mm4,8(%esi)
+ emms
+ movl 28(%esp),%esp
+ popfl
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+ pushfl
+.align 16
+.L031slow_enc_x86:
+ movl (%edi),%eax
+ movl 4(%edi),%ebx
+.align 4
+.L033slow_enc_loop_x86:
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ xorl (%esi),%eax
+ xorl 4(%esi),%ebx
+ xorl 8(%esi),%ecx
+ xorl 12(%esi),%edx
+ movl 44(%esp),%edi
+ call _x86_AES_encrypt_compact
+ movl 32(%esp),%esi
+ movl 36(%esp),%edi
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 40(%esp),%ecx
+ leal 16(%esi),%esi
+ movl %esi,32(%esp)
+ leal 16(%edi),%edx
+ movl %edx,36(%esp)
+ subl $16,%ecx
+ cmpl $16,%ecx
+ movl %ecx,40(%esp)
+ jae .L033slow_enc_loop_x86
+ testl $15,%ecx
+ jnz .L030slow_enc_tail
+ movl 48(%esp),%esi
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ movl 28(%esp),%esp
+ popfl
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+ pushfl
+.align 16
+.L030slow_enc_tail:
+ emms
+ movl %edx,%edi
+ movl $16,%ebx
+ subl %ecx,%ebx
+ cmpl %esi,%edi
+ je .L034enc_in_place
+.align 4
+.long 2767451785
+ jmp .L035enc_skip_in_place
+.L034enc_in_place:
+ leal (%edi,%ecx,1),%edi
+.L035enc_skip_in_place:
+ movl %ebx,%ecx
+ xorl %eax,%eax
+.align 4
+.long 2868115081
+ movl 48(%esp),%edi
+ movl %edx,%esi
+ movl (%edi),%eax
+ movl 4(%edi),%ebx
+ movl $16,40(%esp)
+ jmp .L033slow_enc_loop_x86
+.align 16
+.L029slow_decrypt:
+ btl $25,52(%esp)
+ jnc .L036slow_dec_loop_x86
+.align 4
+.L037slow_dec_loop_sse:
+ movq (%esi),%mm0
+ movq 8(%esi),%mm4
+ movl 44(%esp),%edi
+ call _sse_AES_decrypt_compact
+ movl 32(%esp),%esi
+ leal 60(%esp),%eax
+ movl 36(%esp),%ebx
+ movl 40(%esp),%ecx
+ movl 48(%esp),%edi
+ movq (%esi),%mm1
+ movq 8(%esi),%mm5
+ pxor (%edi),%mm0
+ pxor 8(%edi),%mm4
+ movq %mm1,(%edi)
+ movq %mm5,8(%edi)
+ subl $16,%ecx
+ jc .L038slow_dec_partial_sse
+ movq %mm0,(%ebx)
+ movq %mm4,8(%ebx)
+ leal 16(%ebx),%ebx
+ movl %ebx,36(%esp)
+ leal 16(%esi),%esi
+ movl %esi,32(%esp)
+ movl %ecx,40(%esp)
+ jnz .L037slow_dec_loop_sse
+ emms
+ movl 28(%esp),%esp
+ popfl
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+ pushfl
+.align 16
+.L038slow_dec_partial_sse:
+ movq %mm0,(%eax)
+ movq %mm4,8(%eax)
+ emms
+ addl $16,%ecx
+ movl %ebx,%edi
+ movl %eax,%esi
+.align 4
+.long 2767451785
+ movl 28(%esp),%esp
+ popfl
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+ pushfl
+.align 16
+.L036slow_dec_loop_x86:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ leal 60(%esp),%edi
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 44(%esp),%edi
+ call _x86_AES_decrypt_compact
+ movl 48(%esp),%edi
+ movl 40(%esp),%esi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ subl $16,%esi
+ jc .L039slow_dec_partial_x86
+ movl %esi,40(%esp)
+ movl 36(%esp),%esi
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ leal 16(%esi),%esi
+ movl %esi,36(%esp)
+ leal 60(%esp),%esi
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 32(%esp),%esi
+ leal 16(%esi),%esi
+ movl %esi,32(%esp)
+ jnz .L036slow_dec_loop_x86
+ movl 28(%esp),%esp
+ popfl
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+ pushfl
+.align 16
+.L039slow_dec_partial_x86:
+ leal 60(%esp),%esi
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ movl 32(%esp),%esi
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 40(%esp),%ecx
+ movl 36(%esp),%edi
+ leal 60(%esp),%esi
+.align 4
+.long 2767451785
+ movl 28(%esp),%esp
+ popfl
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size AES_cbc_encrypt,.-.L_AES_cbc_encrypt_begin
+.type _x86_AES_set_encrypt_key,@function
+.align 16
+_x86_AES_set_encrypt_key:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 24(%esp),%esi
+ movl 32(%esp),%edi
+ testl $-1,%esi
+ jz .L040badpointer
+ testl $-1,%edi
+ jz .L040badpointer
+ call .L041pic_point
+.L041pic_point:
+ popl %ebp
+ leal .LAES_Te-.L041pic_point(%ebp),%ebp
+ leal 2176(%ebp),%ebp
+ movl -128(%ebp),%eax
+ movl -96(%ebp),%ebx
+ movl -64(%ebp),%ecx
+ movl -32(%ebp),%edx
+ movl (%ebp),%eax
+ movl 32(%ebp),%ebx
+ movl 64(%ebp),%ecx
+ movl 96(%ebp),%edx
+ movl 28(%esp),%ecx
+ cmpl $128,%ecx
+ je .L04210rounds
+ cmpl $192,%ecx
+ je .L04312rounds
+ cmpl $256,%ecx
+ je .L04414rounds
+ movl $-2,%eax
+ jmp .L045exit
+.L04210rounds:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ xorl %ecx,%ecx
+ jmp .L04610shortcut
+.align 4
+.L04710loop:
+ movl (%edi),%eax
+ movl 12(%edi),%edx
+.L04610shortcut:
+ movzbl %dl,%esi
+ movzbl -128(%ebp,%esi,1),%ebx
+ movzbl %dh,%esi
+ shll $24,%ebx
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ shrl $16,%edx
+ movzbl %dl,%esi
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ movzbl %dh,%esi
+ shll $8,%ebx
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%eax
+ xorl 896(%ebp,%ecx,4),%eax
+ movl %eax,16(%edi)
+ xorl 4(%edi),%eax
+ movl %eax,20(%edi)
+ xorl 8(%edi),%eax
+ movl %eax,24(%edi)
+ xorl 12(%edi),%eax
+ movl %eax,28(%edi)
+ incl %ecx
+ addl $16,%edi
+ cmpl $10,%ecx
+ jl .L04710loop
+ movl $10,80(%edi)
+ xorl %eax,%eax
+ jmp .L045exit
+.L04312rounds:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 16(%esi),%ecx
+ movl 20(%esi),%edx
+ movl %ecx,16(%edi)
+ movl %edx,20(%edi)
+ xorl %ecx,%ecx
+ jmp .L04812shortcut
+.align 4
+.L04912loop:
+ movl (%edi),%eax
+ movl 20(%edi),%edx
+.L04812shortcut:
+ movzbl %dl,%esi
+ movzbl -128(%ebp,%esi,1),%ebx
+ movzbl %dh,%esi
+ shll $24,%ebx
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ shrl $16,%edx
+ movzbl %dl,%esi
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ movzbl %dh,%esi
+ shll $8,%ebx
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%eax
+ xorl 896(%ebp,%ecx,4),%eax
+ movl %eax,24(%edi)
+ xorl 4(%edi),%eax
+ movl %eax,28(%edi)
+ xorl 8(%edi),%eax
+ movl %eax,32(%edi)
+ xorl 12(%edi),%eax
+ movl %eax,36(%edi)
+ cmpl $7,%ecx
+ je .L05012break
+ incl %ecx
+ xorl 16(%edi),%eax
+ movl %eax,40(%edi)
+ xorl 20(%edi),%eax
+ movl %eax,44(%edi)
+ addl $24,%edi
+ jmp .L04912loop
+.L05012break:
+ movl $12,72(%edi)
+ xorl %eax,%eax
+ jmp .L045exit
+.L04414rounds:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 16(%esi),%eax
+ movl 20(%esi),%ebx
+ movl 24(%esi),%ecx
+ movl 28(%esi),%edx
+ movl %eax,16(%edi)
+ movl %ebx,20(%edi)
+ movl %ecx,24(%edi)
+ movl %edx,28(%edi)
+ xorl %ecx,%ecx
+ jmp .L05114shortcut
+.align 4
+.L05214loop:
+ movl 28(%edi),%edx
+.L05114shortcut:
+ movl (%edi),%eax
+ movzbl %dl,%esi
+ movzbl -128(%ebp,%esi,1),%ebx
+ movzbl %dh,%esi
+ shll $24,%ebx
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ shrl $16,%edx
+ movzbl %dl,%esi
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ movzbl %dh,%esi
+ shll $8,%ebx
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%eax
+ xorl 896(%ebp,%ecx,4),%eax
+ movl %eax,32(%edi)
+ xorl 4(%edi),%eax
+ movl %eax,36(%edi)
+ xorl 8(%edi),%eax
+ movl %eax,40(%edi)
+ xorl 12(%edi),%eax
+ movl %eax,44(%edi)
+ cmpl $6,%ecx
+ je .L05314break
+ incl %ecx
+ movl %eax,%edx
+ movl 16(%edi),%eax
+ movzbl %dl,%esi
+ movzbl -128(%ebp,%esi,1),%ebx
+ movzbl %dh,%esi
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ shrl $16,%edx
+ shll $8,%ebx
+ movzbl %dl,%esi
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ movzbl %dh,%esi
+ shll $16,%ebx
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ shll $24,%ebx
+ xorl %ebx,%eax
+ movl %eax,48(%edi)
+ xorl 20(%edi),%eax
+ movl %eax,52(%edi)
+ xorl 24(%edi),%eax
+ movl %eax,56(%edi)
+ xorl 28(%edi),%eax
+ movl %eax,60(%edi)
+ addl $32,%edi
+ jmp .L05214loop
+.L05314break:
+ movl $14,48(%edi)
+ xorl %eax,%eax
+ jmp .L045exit
+.L040badpointer:
+ movl $-1,%eax
+.L045exit:
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size _x86_AES_set_encrypt_key,.-_x86_AES_set_encrypt_key
+.globl AES_set_encrypt_key
+.type AES_set_encrypt_key,@function
+.align 16
+AES_set_encrypt_key:
+.L_AES_set_encrypt_key_begin:
+ call _x86_AES_set_encrypt_key
+ ret
+.size AES_set_encrypt_key,.-.L_AES_set_encrypt_key_begin
+.globl AES_set_decrypt_key
+.type AES_set_decrypt_key,@function
+.align 16
+AES_set_decrypt_key:
+.L_AES_set_decrypt_key_begin:
+ call _x86_AES_set_encrypt_key
+ cmpl $0,%eax
+ je .L054proceed
+ ret
+.L054proceed:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 28(%esp),%esi
+ movl 240(%esi),%ecx
+ leal (,%ecx,4),%ecx
+ leal (%esi,%ecx,4),%edi
+.align 4
+.L055invert:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl (%edi),%ecx
+ movl 4(%edi),%edx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,(%esi)
+ movl %edx,4(%esi)
+ movl 8(%esi),%eax
+ movl 12(%esi),%ebx
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ movl %eax,8(%edi)
+ movl %ebx,12(%edi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ addl $16,%esi
+ subl $16,%edi
+ cmpl %edi,%esi
+ jne .L055invert
+ movl 28(%esp),%edi
+ movl 240(%edi),%esi
+ leal -2(%esi,%esi,1),%esi
+ leal (%edi,%esi,8),%esi
+ movl %esi,28(%esp)
+ movl 16(%edi),%eax
+.align 4
+.L056permute:
+ addl $16,%edi
+ movl %eax,%esi
+ andl $2155905152,%esi
+ movl %esi,%ebp
+ shrl $7,%ebp
+ leal (%eax,%eax,1),%ebx
+ subl %ebp,%esi
+ andl $4278124286,%ebx
+ andl $454761243,%esi
+ xorl %ebx,%esi
+ movl %esi,%ebx
+ andl $2155905152,%esi
+ movl %esi,%ebp
+ shrl $7,%ebp
+ leal (%ebx,%ebx,1),%ecx
+ subl %ebp,%esi
+ andl $4278124286,%ecx
+ andl $454761243,%esi
+ xorl %eax,%ebx
+ xorl %ecx,%esi
+ movl %esi,%ecx
+ andl $2155905152,%esi
+ movl %esi,%ebp
+ shrl $7,%ebp
+ leal (%ecx,%ecx,1),%edx
+ xorl %eax,%ecx
+ subl %ebp,%esi
+ andl $4278124286,%edx
+ andl $454761243,%esi
+ roll $8,%eax
+ xorl %esi,%edx
+ movl 4(%edi),%ebp
+ xorl %ebx,%eax
+ xorl %edx,%ebx
+ xorl %ecx,%eax
+ roll $24,%ebx
+ xorl %edx,%ecx
+ xorl %edx,%eax
+ roll $16,%ecx
+ xorl %ebx,%eax
+ roll $8,%edx
+ xorl %ecx,%eax
+ movl %ebp,%ebx
+ xorl %edx,%eax
+ movl %eax,(%edi)
+ movl %ebx,%esi
+ andl $2155905152,%esi
+ movl %esi,%ebp
+ shrl $7,%ebp
+ leal (%ebx,%ebx,1),%ecx
+ subl %ebp,%esi
+ andl $4278124286,%ecx
+ andl $454761243,%esi
+ xorl %ecx,%esi
+ movl %esi,%ecx
+ andl $2155905152,%esi
+ movl %esi,%ebp
+ shrl $7,%ebp
+ leal (%ecx,%ecx,1),%edx
+ subl %ebp,%esi
+ andl $4278124286,%edx
+ andl $454761243,%esi
+ xorl %ebx,%ecx
+ xorl %edx,%esi
+ movl %esi,%edx
+ andl $2155905152,%esi
+ movl %esi,%ebp
+ shrl $7,%ebp
+ leal (%edx,%edx,1),%eax
+ xorl %ebx,%edx
+ subl %ebp,%esi
+ andl $4278124286,%eax
+ andl $454761243,%esi
+ roll $8,%ebx
+ xorl %esi,%eax
+ movl 8(%edi),%ebp
+ xorl %ecx,%ebx
+ xorl %eax,%ecx
+ xorl %edx,%ebx
+ roll $24,%ecx
+ xorl %eax,%edx
+ xorl %eax,%ebx
+ roll $16,%edx
+ xorl %ecx,%ebx
+ roll $8,%eax
+ xorl %edx,%ebx
+ movl %ebp,%ecx
+ xorl %eax,%ebx
+ movl %ebx,4(%edi)
+ movl %ecx,%esi
+ andl $2155905152,%esi
+ movl %esi,%ebp
+ shrl $7,%ebp
+ leal (%ecx,%ecx,1),%edx
+ subl %ebp,%esi
+ andl $4278124286,%edx
+ andl $454761243,%esi
+ xorl %edx,%esi
+ movl %esi,%edx
+ andl $2155905152,%esi
+ movl %esi,%ebp
+ shrl $7,%ebp
+ leal (%edx,%edx,1),%eax
+ subl %ebp,%esi
+ andl $4278124286,%eax
+ andl $454761243,%esi
+ xorl %ecx,%edx
+ xorl %eax,%esi
+ movl %esi,%eax
+ andl $2155905152,%esi
+ movl %esi,%ebp
+ shrl $7,%ebp
+ leal (%eax,%eax,1),%ebx
+ xorl %ecx,%eax
+ subl %ebp,%esi
+ andl $4278124286,%ebx
+ andl $454761243,%esi
+ roll $8,%ecx
+ xorl %esi,%ebx
+ movl 12(%edi),%ebp
+ xorl %edx,%ecx
+ xorl %ebx,%edx
+ xorl %eax,%ecx
+ roll $24,%edx
+ xorl %ebx,%eax
+ xorl %ebx,%ecx
+ roll $16,%eax
+ xorl %edx,%ecx
+ roll $8,%ebx
+ xorl %eax,%ecx
+ movl %ebp,%edx
+ xorl %ebx,%ecx
+ movl %ecx,8(%edi)
+ movl %edx,%esi
+ andl $2155905152,%esi
+ movl %esi,%ebp
+ shrl $7,%ebp
+ leal (%edx,%edx,1),%eax
+ subl %ebp,%esi
+ andl $4278124286,%eax
+ andl $454761243,%esi
+ xorl %eax,%esi
+ movl %esi,%eax
+ andl $2155905152,%esi
+ movl %esi,%ebp
+ shrl $7,%ebp
+ leal (%eax,%eax,1),%ebx
+ subl %ebp,%esi
+ andl $4278124286,%ebx
+ andl $454761243,%esi
+ xorl %edx,%eax
+ xorl %ebx,%esi
+ movl %esi,%ebx
+ andl $2155905152,%esi
+ movl %esi,%ebp
+ shrl $7,%ebp
+ leal (%ebx,%ebx,1),%ecx
+ xorl %edx,%ebx
+ subl %ebp,%esi
+ andl $4278124286,%ecx
+ andl $454761243,%esi
+ roll $8,%edx
+ xorl %esi,%ecx
+ movl 16(%edi),%ebp
+ xorl %eax,%edx
+ xorl %ecx,%eax
+ xorl %ebx,%edx
+ roll $24,%eax
+ xorl %ecx,%ebx
+ xorl %ecx,%edx
+ roll $16,%ebx
+ xorl %eax,%edx
+ roll $8,%ecx
+ xorl %ebx,%edx
+ movl %ebp,%eax
+ xorl %ecx,%edx
+ movl %edx,12(%edi)
+ cmpl 28(%esp),%edi
+ jb .L056permute
+ xorl %eax,%eax
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size AES_set_decrypt_key,.-.L_AES_set_decrypt_key_begin
+.byte 65,69,83,32,102,111,114,32,120,56,54,44,32,67,82,89
+.byte 80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114
+.byte 111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.comm OPENSSL_ia32cap_P,4,4
diff --git a/deps/openssl/asm/x86-elf-gas/bf/bf-686.s b/deps/openssl/asm/x86-elf-gas/bf/bf-686.s
new file mode 100644
index 0000000000..69b0ce681b
--- /dev/null
+++ b/deps/openssl/asm/x86-elf-gas/bf/bf-686.s
@@ -0,0 +1,864 @@
+.file "bf-686.s"
+.text
+.globl BF_encrypt
+.type BF_encrypt,@function
+.align 16
+BF_encrypt:
+.L_BF_encrypt_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+
+
+ movl 20(%esp),%eax
+ movl (%eax),%ecx
+ movl 4(%eax),%edx
+
+
+ movl 24(%esp),%edi
+ xorl %eax,%eax
+ xorl %ebx,%ebx
+ xorl (%edi),%ecx
+
+
+ rorl $16,%ecx
+ movl 4(%edi),%esi
+ movb %ch,%al
+ movb %cl,%bl
+ rorl $16,%ecx
+ xorl %esi,%edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch,%al
+ movb %cl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%edx
+
+
+ rorl $16,%edx
+ movl 8(%edi),%esi
+ movb %dh,%al
+ movb %dl,%bl
+ rorl $16,%edx
+ xorl %esi,%ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh,%al
+ movb %dl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%ecx
+
+
+ rorl $16,%ecx
+ movl 12(%edi),%esi
+ movb %ch,%al
+ movb %cl,%bl
+ rorl $16,%ecx
+ xorl %esi,%edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch,%al
+ movb %cl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%edx
+
+
+ rorl $16,%edx
+ movl 16(%edi),%esi
+ movb %dh,%al
+ movb %dl,%bl
+ rorl $16,%edx
+ xorl %esi,%ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh,%al
+ movb %dl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%ecx
+
+
+ rorl $16,%ecx
+ movl 20(%edi),%esi
+ movb %ch,%al
+ movb %cl,%bl
+ rorl $16,%ecx
+ xorl %esi,%edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch,%al
+ movb %cl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%edx
+
+
+ rorl $16,%edx
+ movl 24(%edi),%esi
+ movb %dh,%al
+ movb %dl,%bl
+ rorl $16,%edx
+ xorl %esi,%ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh,%al
+ movb %dl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%ecx
+
+
+ rorl $16,%ecx
+ movl 28(%edi),%esi
+ movb %ch,%al
+ movb %cl,%bl
+ rorl $16,%ecx
+ xorl %esi,%edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch,%al
+ movb %cl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%edx
+
+
+ rorl $16,%edx
+ movl 32(%edi),%esi
+ movb %dh,%al
+ movb %dl,%bl
+ rorl $16,%edx
+ xorl %esi,%ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh,%al
+ movb %dl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%ecx
+
+
+ rorl $16,%ecx
+ movl 36(%edi),%esi
+ movb %ch,%al
+ movb %cl,%bl
+ rorl $16,%ecx
+ xorl %esi,%edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch,%al
+ movb %cl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%edx
+
+
+ rorl $16,%edx
+ movl 40(%edi),%esi
+ movb %dh,%al
+ movb %dl,%bl
+ rorl $16,%edx
+ xorl %esi,%ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh,%al
+ movb %dl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%ecx
+
+
+ rorl $16,%ecx
+ movl 44(%edi),%esi
+ movb %ch,%al
+ movb %cl,%bl
+ rorl $16,%ecx
+ xorl %esi,%edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch,%al
+ movb %cl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%edx
+
+
+ rorl $16,%edx
+ movl 48(%edi),%esi
+ movb %dh,%al
+ movb %dl,%bl
+ rorl $16,%edx
+ xorl %esi,%ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh,%al
+ movb %dl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%ecx
+
+
+ rorl $16,%ecx
+ movl 52(%edi),%esi
+ movb %ch,%al
+ movb %cl,%bl
+ rorl $16,%ecx
+ xorl %esi,%edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch,%al
+ movb %cl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%edx
+
+
+ rorl $16,%edx
+ movl 56(%edi),%esi
+ movb %dh,%al
+ movb %dl,%bl
+ rorl $16,%edx
+ xorl %esi,%ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh,%al
+ movb %dl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%ecx
+
+
+ rorl $16,%ecx
+ movl 60(%edi),%esi
+ movb %ch,%al
+ movb %cl,%bl
+ rorl $16,%ecx
+ xorl %esi,%edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch,%al
+ movb %cl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%edx
+
+
+ rorl $16,%edx
+ movl 64(%edi),%esi
+ movb %dh,%al
+ movb %dl,%bl
+ rorl $16,%edx
+ xorl %esi,%ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh,%al
+ movb %dl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%ecx
+ xorl 68(%edi),%edx
+ movl 20(%esp),%eax
+ movl %edx,(%eax)
+ movl %ecx,4(%eax)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size BF_encrypt,.-.L_BF_encrypt_begin
+.globl BF_decrypt
+.type BF_decrypt,@function
+.align 16
+BF_decrypt:
+.L_BF_decrypt_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+
+
+ movl 20(%esp),%eax
+ movl (%eax),%ecx
+ movl 4(%eax),%edx
+
+
+ movl 24(%esp),%edi
+ xorl %eax,%eax
+ xorl %ebx,%ebx
+ xorl 68(%edi),%ecx
+
+
+ rorl $16,%ecx
+ movl 64(%edi),%esi
+ movb %ch,%al
+ movb %cl,%bl
+ rorl $16,%ecx
+ xorl %esi,%edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch,%al
+ movb %cl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%edx
+
+
+ rorl $16,%edx
+ movl 60(%edi),%esi
+ movb %dh,%al
+ movb %dl,%bl
+ rorl $16,%edx
+ xorl %esi,%ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh,%al
+ movb %dl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%ecx
+
+
+ rorl $16,%ecx
+ movl 56(%edi),%esi
+ movb %ch,%al
+ movb %cl,%bl
+ rorl $16,%ecx
+ xorl %esi,%edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch,%al
+ movb %cl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%edx
+
+
+ rorl $16,%edx
+ movl 52(%edi),%esi
+ movb %dh,%al
+ movb %dl,%bl
+ rorl $16,%edx
+ xorl %esi,%ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh,%al
+ movb %dl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%ecx
+
+
+ rorl $16,%ecx
+ movl 48(%edi),%esi
+ movb %ch,%al
+ movb %cl,%bl
+ rorl $16,%ecx
+ xorl %esi,%edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch,%al
+ movb %cl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%edx
+
+
+ rorl $16,%edx
+ movl 44(%edi),%esi
+ movb %dh,%al
+ movb %dl,%bl
+ rorl $16,%edx
+ xorl %esi,%ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh,%al
+ movb %dl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%ecx
+
+
+ rorl $16,%ecx
+ movl 40(%edi),%esi
+ movb %ch,%al
+ movb %cl,%bl
+ rorl $16,%ecx
+ xorl %esi,%edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch,%al
+ movb %cl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%edx
+
+
+ rorl $16,%edx
+ movl 36(%edi),%esi
+ movb %dh,%al
+ movb %dl,%bl
+ rorl $16,%edx
+ xorl %esi,%ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh,%al
+ movb %dl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%ecx
+
+
+ rorl $16,%ecx
+ movl 32(%edi),%esi
+ movb %ch,%al
+ movb %cl,%bl
+ rorl $16,%ecx
+ xorl %esi,%edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch,%al
+ movb %cl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%edx
+
+
+ rorl $16,%edx
+ movl 28(%edi),%esi
+ movb %dh,%al
+ movb %dl,%bl
+ rorl $16,%edx
+ xorl %esi,%ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh,%al
+ movb %dl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%ecx
+
+
+ rorl $16,%ecx
+ movl 24(%edi),%esi
+ movb %ch,%al
+ movb %cl,%bl
+ rorl $16,%ecx
+ xorl %esi,%edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch,%al
+ movb %cl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%edx
+
+
+ rorl $16,%edx
+ movl 20(%edi),%esi
+ movb %dh,%al
+ movb %dl,%bl
+ rorl $16,%edx
+ xorl %esi,%ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh,%al
+ movb %dl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%ecx
+
+
+ rorl $16,%ecx
+ movl 16(%edi),%esi
+ movb %ch,%al
+ movb %cl,%bl
+ rorl $16,%ecx
+ xorl %esi,%edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch,%al
+ movb %cl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%edx
+
+
+ rorl $16,%edx
+ movl 12(%edi),%esi
+ movb %dh,%al
+ movb %dl,%bl
+ rorl $16,%edx
+ xorl %esi,%ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh,%al
+ movb %dl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%ecx
+
+
+ rorl $16,%ecx
+ movl 8(%edi),%esi
+ movb %ch,%al
+ movb %cl,%bl
+ rorl $16,%ecx
+ xorl %esi,%edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch,%al
+ movb %cl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%edx
+
+
+ rorl $16,%edx
+ movl 4(%edi),%esi
+ movb %dh,%al
+ movb %dl,%bl
+ rorl $16,%edx
+ xorl %esi,%ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh,%al
+ movb %dl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%ecx
+ xorl (%edi),%edx
+ movl 20(%esp),%eax
+ movl %edx,(%eax)
+ movl %ecx,4(%eax)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size BF_decrypt,.-.L_BF_decrypt_begin
+.globl BF_cbc_encrypt
+.type BF_cbc_encrypt,@function
+.align 16
+BF_cbc_encrypt:
+.L_BF_cbc_encrypt_begin:
+
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 28(%esp),%ebp
+
+ movl 36(%esp),%ebx
+ movl (%ebx),%esi
+ movl 4(%ebx),%edi
+ pushl %edi
+ pushl %esi
+ pushl %edi
+ pushl %esi
+ movl %esp,%ebx
+ movl 36(%esp),%esi
+ movl 40(%esp),%edi
+
+ movl 56(%esp),%ecx
+
+ movl 48(%esp),%eax
+ pushl %eax
+ pushl %ebx
+ cmpl $0,%ecx
+ jz .L000decrypt
+ andl $4294967288,%ebp
+ movl 8(%esp),%eax
+ movl 12(%esp),%ebx
+ jz .L001encrypt_finish
+.L002encrypt_loop:
+ movl (%esi),%ecx
+ movl 4(%esi),%edx
+ xorl %ecx,%eax
+ xorl %edx,%ebx
+ bswap %eax
+ bswap %ebx
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+ call .L_BF_encrypt_begin
+ movl 8(%esp),%eax
+ movl 12(%esp),%ebx
+ bswap %eax
+ bswap %ebx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ addl $8,%esi
+ addl $8,%edi
+ subl $8,%ebp
+ jnz .L002encrypt_loop
+.L001encrypt_finish:
+ movl 52(%esp),%ebp
+ andl $7,%ebp
+ jz .L003finish
+ call .L004PIC_point
+.L004PIC_point:
+ popl %edx
+ leal .L005cbc_enc_jmp_table-.L004PIC_point(%edx),%ecx
+ movl (%ecx,%ebp,4),%ebp
+ addl %edx,%ebp
+ xorl %ecx,%ecx
+ xorl %edx,%edx
+ jmp *%ebp
+.L006ej7:
+ movb 6(%esi),%dh
+ shll $8,%edx
+.L007ej6:
+ movb 5(%esi),%dh
+.L008ej5:
+ movb 4(%esi),%dl
+.L009ej4:
+ movl (%esi),%ecx
+ jmp .L010ejend
+.L011ej3:
+ movb 2(%esi),%ch
+ shll $8,%ecx
+.L012ej2:
+ movb 1(%esi),%ch
+.L013ej1:
+ movb (%esi),%cl
+.L010ejend:
+ xorl %ecx,%eax
+ xorl %edx,%ebx
+ bswap %eax
+ bswap %ebx
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+ call .L_BF_encrypt_begin
+ movl 8(%esp),%eax
+ movl 12(%esp),%ebx
+ bswap %eax
+ bswap %ebx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ jmp .L003finish
+.L000decrypt:
+ andl $4294967288,%ebp
+ movl 16(%esp),%eax
+ movl 20(%esp),%ebx
+ jz .L014decrypt_finish
+.L015decrypt_loop:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ bswap %eax
+ bswap %ebx
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+ call .L_BF_decrypt_begin
+ movl 8(%esp),%eax
+ movl 12(%esp),%ebx
+ bswap %eax
+ bswap %ebx
+ movl 16(%esp),%ecx
+ movl 20(%esp),%edx
+ xorl %eax,%ecx
+ xorl %ebx,%edx
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl %ecx,(%edi)
+ movl %edx,4(%edi)
+ movl %eax,16(%esp)
+ movl %ebx,20(%esp)
+ addl $8,%esi
+ addl $8,%edi
+ subl $8,%ebp
+ jnz .L015decrypt_loop
+.L014decrypt_finish:
+ movl 52(%esp),%ebp
+ andl $7,%ebp
+ jz .L003finish
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ bswap %eax
+ bswap %ebx
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+ call .L_BF_decrypt_begin
+ movl 8(%esp),%eax
+ movl 12(%esp),%ebx
+ bswap %eax
+ bswap %ebx
+ movl 16(%esp),%ecx
+ movl 20(%esp),%edx
+ xorl %eax,%ecx
+ xorl %ebx,%edx
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+.L016dj7:
+ rorl $16,%edx
+ movb %dl,6(%edi)
+ shrl $16,%edx
+.L017dj6:
+ movb %dh,5(%edi)
+.L018dj5:
+ movb %dl,4(%edi)
+.L019dj4:
+ movl %ecx,(%edi)
+ jmp .L020djend
+.L021dj3:
+ rorl $16,%ecx
+ movb %cl,2(%edi)
+ shll $16,%ecx
+.L022dj2:
+ movb %ch,1(%esi)
+.L023dj1:
+ movb %cl,(%esi)
+.L020djend:
+ jmp .L003finish
+.L003finish:
+ movl 60(%esp),%ecx
+ addl $24,%esp
+ movl %eax,(%ecx)
+ movl %ebx,4(%ecx)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 64
+.L005cbc_enc_jmp_table:
+.long 0
+.long .L013ej1-.L004PIC_point
+.long .L012ej2-.L004PIC_point
+.long .L011ej3-.L004PIC_point
+.long .L009ej4-.L004PIC_point
+.long .L008ej5-.L004PIC_point
+.long .L007ej6-.L004PIC_point
+.long .L006ej7-.L004PIC_point
+.align 64
+.size BF_cbc_encrypt,.-.L_BF_cbc_encrypt_begin
diff --git a/deps/openssl/asm/x86-elf-gas/bn/x86-mont.s b/deps/openssl/asm/x86-elf-gas/bn/x86-mont.s
new file mode 100644
index 0000000000..d71cc6441c
--- /dev/null
+++ b/deps/openssl/asm/x86-elf-gas/bn/x86-mont.s
@@ -0,0 +1,338 @@
+.file "../openssl/crypto/bn/asm/x86-mont.s"
+.text
+.globl bn_mul_mont
+.type bn_mul_mont,@function
+.align 16
+bn_mul_mont:
+.L_bn_mul_mont_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ xorl %eax,%eax
+ movl 40(%esp),%edi
+ cmpl $4,%edi
+ jl .L000just_leave
+ leal 20(%esp),%esi
+ leal 24(%esp),%edx
+ movl %esp,%ebp
+ addl $2,%edi
+ negl %edi
+ leal -32(%esp,%edi,4),%esp
+ negl %edi
+ movl %esp,%eax
+ subl %edx,%eax
+ andl $2047,%eax
+ subl %eax,%esp
+ xorl %esp,%edx
+ andl $2048,%edx
+ xorl $2048,%edx
+ subl %edx,%esp
+ andl $-64,%esp
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ movl 16(%esi),%esi
+ movl (%esi),%esi
+ movl %eax,4(%esp)
+ movl %ebx,8(%esp)
+ movl %ecx,12(%esp)
+ movl %edx,16(%esp)
+ movl %esi,20(%esp)
+ leal -3(%edi),%ebx
+ movl %ebp,24(%esp)
+ movl 8(%esp),%esi
+ leal 1(%ebx),%ebp
+ movl 12(%esp),%edi
+ xorl %ecx,%ecx
+ movl %esi,%edx
+ andl $1,%ebp
+ subl %edi,%edx
+ leal 4(%edi,%ebx,4),%eax
+ orl %edx,%ebp
+ movl (%edi),%edi
+ jz .L001bn_sqr_mont
+ movl %eax,28(%esp)
+ movl (%esi),%eax
+ xorl %edx,%edx
+.align 16
+.L002mull:
+ movl %edx,%ebp
+ mull %edi
+ addl %eax,%ebp
+ leal 1(%ecx),%ecx
+ adcl $0,%edx
+ movl (%esi,%ecx,4),%eax
+ cmpl %ebx,%ecx
+ movl %ebp,28(%esp,%ecx,4)
+ jl .L002mull
+ movl %edx,%ebp
+ mull %edi
+ movl 20(%esp),%edi
+ addl %ebp,%eax
+ movl 16(%esp),%esi
+ adcl $0,%edx
+ imull 32(%esp),%edi
+ movl %eax,32(%esp,%ebx,4)
+ xorl %ecx,%ecx
+ movl %edx,36(%esp,%ebx,4)
+ movl %ecx,40(%esp,%ebx,4)
+ movl (%esi),%eax
+ mull %edi
+ addl 32(%esp),%eax
+ movl 4(%esi),%eax
+ adcl $0,%edx
+ incl %ecx
+ jmp .L0032ndmadd
+.align 16
+.L0041stmadd:
+ movl %edx,%ebp
+ mull %edi
+ addl 32(%esp,%ecx,4),%ebp
+ leal 1(%ecx),%ecx
+ adcl $0,%edx
+ addl %eax,%ebp
+ movl (%esi,%ecx,4),%eax
+ adcl $0,%edx
+ cmpl %ebx,%ecx
+ movl %ebp,28(%esp,%ecx,4)
+ jl .L0041stmadd
+ movl %edx,%ebp
+ mull %edi
+ addl 32(%esp,%ebx,4),%eax
+ movl 20(%esp),%edi
+ adcl $0,%edx
+ movl 16(%esp),%esi
+ addl %eax,%ebp
+ adcl $0,%edx
+ imull 32(%esp),%edi
+ xorl %ecx,%ecx
+ addl 36(%esp,%ebx,4),%edx
+ movl %ebp,32(%esp,%ebx,4)
+ adcl $0,%ecx
+ movl (%esi),%eax
+ movl %edx,36(%esp,%ebx,4)
+ movl %ecx,40(%esp,%ebx,4)
+ mull %edi
+ addl 32(%esp),%eax
+ movl 4(%esi),%eax
+ adcl $0,%edx
+ movl $1,%ecx
+.align 16
+.L0032ndmadd:
+ movl %edx,%ebp
+ mull %edi
+ addl 32(%esp,%ecx,4),%ebp
+ leal 1(%ecx),%ecx
+ adcl $0,%edx
+ addl %eax,%ebp
+ movl (%esi,%ecx,4),%eax
+ adcl $0,%edx
+ cmpl %ebx,%ecx
+ movl %ebp,24(%esp,%ecx,4)
+ jl .L0032ndmadd
+ movl %edx,%ebp
+ mull %edi
+ addl 32(%esp,%ebx,4),%ebp
+ adcl $0,%edx
+ addl %eax,%ebp
+ adcl $0,%edx
+ movl %ebp,28(%esp,%ebx,4)
+ xorl %eax,%eax
+ movl 12(%esp),%ecx
+ addl 36(%esp,%ebx,4),%edx
+ adcl 40(%esp,%ebx,4),%eax
+ leal 4(%ecx),%ecx
+ movl %edx,32(%esp,%ebx,4)
+ cmpl 28(%esp),%ecx
+ movl %eax,36(%esp,%ebx,4)
+ je .L005common_tail
+ movl (%ecx),%edi
+ movl 8(%esp),%esi
+ movl %ecx,12(%esp)
+ xorl %ecx,%ecx
+ xorl %edx,%edx
+ movl (%esi),%eax
+ jmp .L0041stmadd
+.align 16
+.L001bn_sqr_mont:
+ movl %ebx,(%esp)
+ movl %ecx,12(%esp)
+ movl %edi,%eax
+ mull %edi
+ movl %eax,32(%esp)
+ movl %edx,%ebx
+ shrl $1,%edx
+ andl $1,%ebx
+ incl %ecx
+.align 16
+.L006sqr:
+ movl (%esi,%ecx,4),%eax
+ movl %edx,%ebp
+ mull %edi
+ addl %ebp,%eax
+ leal 1(%ecx),%ecx
+ adcl $0,%edx
+ leal (%ebx,%eax,2),%ebp
+ shrl $31,%eax
+ cmpl (%esp),%ecx
+ movl %eax,%ebx
+ movl %ebp,28(%esp,%ecx,4)
+ jl .L006sqr
+ movl (%esi,%ecx,4),%eax
+ movl %edx,%ebp
+ mull %edi
+ addl %ebp,%eax
+ movl 20(%esp),%edi
+ adcl $0,%edx
+ movl 16(%esp),%esi
+ leal (%ebx,%eax,2),%ebp
+ imull 32(%esp),%edi
+ shrl $31,%eax
+ movl %ebp,32(%esp,%ecx,4)
+ leal (%eax,%edx,2),%ebp
+ movl (%esi),%eax
+ shrl $31,%edx
+ movl %ebp,36(%esp,%ecx,4)
+ movl %edx,40(%esp,%ecx,4)
+ mull %edi
+ addl 32(%esp),%eax
+ movl %ecx,%ebx
+ adcl $0,%edx
+ movl 4(%esi),%eax
+ movl $1,%ecx
+.align 16
+.L0073rdmadd:
+ movl %edx,%ebp
+ mull %edi
+ addl 32(%esp,%ecx,4),%ebp
+ adcl $0,%edx
+ addl %eax,%ebp
+ movl 4(%esi,%ecx,4),%eax
+ adcl $0,%edx
+ movl %ebp,28(%esp,%ecx,4)
+ movl %edx,%ebp
+ mull %edi
+ addl 36(%esp,%ecx,4),%ebp
+ leal 2(%ecx),%ecx
+ adcl $0,%edx
+ addl %eax,%ebp
+ movl (%esi,%ecx,4),%eax
+ adcl $0,%edx
+ cmpl %ebx,%ecx
+ movl %ebp,24(%esp,%ecx,4)
+ jl .L0073rdmadd
+ movl %edx,%ebp
+ mull %edi
+ addl 32(%esp,%ebx,4),%ebp
+ adcl $0,%edx
+ addl %eax,%ebp
+ adcl $0,%edx
+ movl %ebp,28(%esp,%ebx,4)
+ movl 12(%esp),%ecx
+ xorl %eax,%eax
+ movl 8(%esp),%esi
+ addl 36(%esp,%ebx,4),%edx
+ adcl 40(%esp,%ebx,4),%eax
+ movl %edx,32(%esp,%ebx,4)
+ cmpl %ebx,%ecx
+ movl %eax,36(%esp,%ebx,4)
+ je .L005common_tail
+ movl 4(%esi,%ecx,4),%edi
+ leal 1(%ecx),%ecx
+ movl %edi,%eax
+ movl %ecx,12(%esp)
+ mull %edi
+ addl 32(%esp,%ecx,4),%eax
+ adcl $0,%edx
+ movl %eax,32(%esp,%ecx,4)
+ xorl %ebp,%ebp
+ cmpl %ebx,%ecx
+ leal 1(%ecx),%ecx
+ je .L008sqrlast
+ movl %edx,%ebx
+ shrl $1,%edx
+ andl $1,%ebx
+.align 16
+.L009sqradd:
+ movl (%esi,%ecx,4),%eax
+ movl %edx,%ebp
+ mull %edi
+ addl %ebp,%eax
+ leal (%eax,%eax,1),%ebp
+ adcl $0,%edx
+ shrl $31,%eax
+ addl 32(%esp,%ecx,4),%ebp
+ leal 1(%ecx),%ecx
+ adcl $0,%eax
+ addl %ebx,%ebp
+ adcl $0,%eax
+ cmpl (%esp),%ecx
+ movl %ebp,28(%esp,%ecx,4)
+ movl %eax,%ebx
+ jle .L009sqradd
+ movl %edx,%ebp
+ addl %edx,%edx
+ shrl $31,%ebp
+ addl %ebx,%edx
+ adcl $0,%ebp
+.L008sqrlast:
+ movl 20(%esp),%edi
+ movl 16(%esp),%esi
+ imull 32(%esp),%edi
+ addl 32(%esp,%ecx,4),%edx
+ movl (%esi),%eax
+ adcl $0,%ebp
+ movl %edx,32(%esp,%ecx,4)
+ movl %ebp,36(%esp,%ecx,4)
+ mull %edi
+ addl 32(%esp),%eax
+ leal -1(%ecx),%ebx
+ adcl $0,%edx
+ movl $1,%ecx
+ movl 4(%esi),%eax
+ jmp .L0073rdmadd
+.align 16
+.L005common_tail:
+ movl 16(%esp),%ebp
+ movl 4(%esp),%edi
+ leal 32(%esp),%esi
+ movl (%esi),%eax
+ movl %ebx,%ecx
+ xorl %edx,%edx
+.align 16
+.L010sub:
+ sbbl (%ebp,%edx,4),%eax
+ movl %eax,(%edi,%edx,4)
+ decl %ecx
+ movl 4(%esi,%edx,4),%eax
+ leal 1(%edx),%edx
+ jge .L010sub
+ sbbl $0,%eax
+ andl %eax,%esi
+ notl %eax
+ movl %edi,%ebp
+ andl %eax,%ebp
+ orl %ebp,%esi
+.align 16
+.L011copy:
+ movl (%esi,%ebx,4),%eax
+ movl %eax,(%edi,%ebx,4)
+ movl %ecx,32(%esp,%ebx,4)
+ decl %ebx
+ jge .L011copy
+ movl 24(%esp),%esp
+ movl $1,%eax
+.L000just_leave:
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size bn_mul_mont,.-.L_bn_mul_mont_begin
+.byte 77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105
+.byte 112,108,105,99,97,116,105,111,110,32,102,111,114,32,120,56
+.byte 54,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121
+.byte 32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46
+.byte 111,114,103,62,0
diff --git a/deps/openssl/asm/x86-elf-gas/bn/x86.s b/deps/openssl/asm/x86-elf-gas/bn/x86.s
new file mode 100644
index 0000000000..c41c8e917e
--- /dev/null
+++ b/deps/openssl/asm/x86-elf-gas/bn/x86.s
@@ -0,0 +1,2114 @@
+.file "../openssl/crypto/bn/asm/x86.s"
+.text
+.globl bn_mul_add_words
+.type bn_mul_add_words,@function
+.align 16
+bn_mul_add_words:
+.L_bn_mul_add_words_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+
+ xorl %esi,%esi
+ movl 20(%esp),%edi
+ movl 28(%esp),%ecx
+ movl 24(%esp),%ebx
+ andl $4294967288,%ecx
+ movl 32(%esp),%ebp
+ pushl %ecx
+ jz .L000maw_finish
+.L001maw_loop:
+ movl %ecx,(%esp)
+
+ movl (%ebx),%eax
+ mull %ebp
+ addl %esi,%eax
+ movl (%edi),%esi
+ adcl $0,%edx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,(%edi)
+ movl %edx,%esi
+
+ movl 4(%ebx),%eax
+ mull %ebp
+ addl %esi,%eax
+ movl 4(%edi),%esi
+ adcl $0,%edx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,4(%edi)
+ movl %edx,%esi
+
+ movl 8(%ebx),%eax
+ mull %ebp
+ addl %esi,%eax
+ movl 8(%edi),%esi
+ adcl $0,%edx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,8(%edi)
+ movl %edx,%esi
+
+ movl 12(%ebx),%eax
+ mull %ebp
+ addl %esi,%eax
+ movl 12(%edi),%esi
+ adcl $0,%edx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,12(%edi)
+ movl %edx,%esi
+
+ movl 16(%ebx),%eax
+ mull %ebp
+ addl %esi,%eax
+ movl 16(%edi),%esi
+ adcl $0,%edx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,16(%edi)
+ movl %edx,%esi
+
+ movl 20(%ebx),%eax
+ mull %ebp
+ addl %esi,%eax
+ movl 20(%edi),%esi
+ adcl $0,%edx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,20(%edi)
+ movl %edx,%esi
+
+ movl 24(%ebx),%eax
+ mull %ebp
+ addl %esi,%eax
+ movl 24(%edi),%esi
+ adcl $0,%edx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,24(%edi)
+ movl %edx,%esi
+
+ movl 28(%ebx),%eax
+ mull %ebp
+ addl %esi,%eax
+ movl 28(%edi),%esi
+ adcl $0,%edx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,28(%edi)
+ movl %edx,%esi
+
+ movl (%esp),%ecx
+ addl $32,%ebx
+ addl $32,%edi
+ subl $8,%ecx
+ jnz .L001maw_loop
+.L000maw_finish:
+ movl 32(%esp),%ecx
+ andl $7,%ecx
+ jnz .L002maw_finish2
+ jmp .L003maw_end
+.L002maw_finish2:
+
+ movl (%ebx),%eax
+ mull %ebp
+ addl %esi,%eax
+ movl (%edi),%esi
+ adcl $0,%edx
+ addl %esi,%eax
+ adcl $0,%edx
+ decl %ecx
+ movl %eax,(%edi)
+ movl %edx,%esi
+ jz .L003maw_end
+
+ movl 4(%ebx),%eax
+ mull %ebp
+ addl %esi,%eax
+ movl 4(%edi),%esi
+ adcl $0,%edx
+ addl %esi,%eax
+ adcl $0,%edx
+ decl %ecx
+ movl %eax,4(%edi)
+ movl %edx,%esi
+ jz .L003maw_end
+
+ movl 8(%ebx),%eax
+ mull %ebp
+ addl %esi,%eax
+ movl 8(%edi),%esi
+ adcl $0,%edx
+ addl %esi,%eax
+ adcl $0,%edx
+ decl %ecx
+ movl %eax,8(%edi)
+ movl %edx,%esi
+ jz .L003maw_end
+
+ movl 12(%ebx),%eax
+ mull %ebp
+ addl %esi,%eax
+ movl 12(%edi),%esi
+ adcl $0,%edx
+ addl %esi,%eax
+ adcl $0,%edx
+ decl %ecx
+ movl %eax,12(%edi)
+ movl %edx,%esi
+ jz .L003maw_end
+
+ movl 16(%ebx),%eax
+ mull %ebp
+ addl %esi,%eax
+ movl 16(%edi),%esi
+ adcl $0,%edx
+ addl %esi,%eax
+ adcl $0,%edx
+ decl %ecx
+ movl %eax,16(%edi)
+ movl %edx,%esi
+ jz .L003maw_end
+
+ movl 20(%ebx),%eax
+ mull %ebp
+ addl %esi,%eax
+ movl 20(%edi),%esi
+ adcl $0,%edx
+ addl %esi,%eax
+ adcl $0,%edx
+ decl %ecx
+ movl %eax,20(%edi)
+ movl %edx,%esi
+ jz .L003maw_end
+
+ movl 24(%ebx),%eax
+ mull %ebp
+ addl %esi,%eax
+ movl 24(%edi),%esi
+ adcl $0,%edx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,24(%edi)
+ movl %edx,%esi
+.L003maw_end:
+ movl %esi,%eax
+ popl %ecx
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size bn_mul_add_words,.-.L_bn_mul_add_words_begin
+.globl bn_mul_words
+.type bn_mul_words,@function
+.align 16
+bn_mul_words:
+.L_bn_mul_words_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+
+ xorl %esi,%esi
+ movl 20(%esp),%edi
+ movl 24(%esp),%ebx
+ movl 28(%esp),%ebp
+ movl 32(%esp),%ecx
+ andl $4294967288,%ebp
+ jz .L004mw_finish
+.L005mw_loop:
+
+ movl (%ebx),%eax
+ mull %ecx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,(%edi)
+ movl %edx,%esi
+
+ movl 4(%ebx),%eax
+ mull %ecx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,4(%edi)
+ movl %edx,%esi
+
+ movl 8(%ebx),%eax
+ mull %ecx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,8(%edi)
+ movl %edx,%esi
+
+ movl 12(%ebx),%eax
+ mull %ecx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,12(%edi)
+ movl %edx,%esi
+
+ movl 16(%ebx),%eax
+ mull %ecx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,16(%edi)
+ movl %edx,%esi
+
+ movl 20(%ebx),%eax
+ mull %ecx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,20(%edi)
+ movl %edx,%esi
+
+ movl 24(%ebx),%eax
+ mull %ecx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,24(%edi)
+ movl %edx,%esi
+
+ movl 28(%ebx),%eax
+ mull %ecx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,28(%edi)
+ movl %edx,%esi
+
+ addl $32,%ebx
+ addl $32,%edi
+ subl $8,%ebp
+ jz .L004mw_finish
+ jmp .L005mw_loop
+.L004mw_finish:
+ movl 28(%esp),%ebp
+ andl $7,%ebp
+ jnz .L006mw_finish2
+ jmp .L007mw_end
+.L006mw_finish2:
+
+ movl (%ebx),%eax
+ mull %ecx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,(%edi)
+ movl %edx,%esi
+ decl %ebp
+ jz .L007mw_end
+
+ movl 4(%ebx),%eax
+ mull %ecx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,4(%edi)
+ movl %edx,%esi
+ decl %ebp
+ jz .L007mw_end
+
+ movl 8(%ebx),%eax
+ mull %ecx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,8(%edi)
+ movl %edx,%esi
+ decl %ebp
+ jz .L007mw_end
+
+ movl 12(%ebx),%eax
+ mull %ecx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,12(%edi)
+ movl %edx,%esi
+ decl %ebp
+ jz .L007mw_end
+
+ movl 16(%ebx),%eax
+ mull %ecx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,16(%edi)
+ movl %edx,%esi
+ decl %ebp
+ jz .L007mw_end
+
+ movl 20(%ebx),%eax
+ mull %ecx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,20(%edi)
+ movl %edx,%esi
+ decl %ebp
+ jz .L007mw_end
+
+ movl 24(%ebx),%eax
+ mull %ecx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,24(%edi)
+ movl %edx,%esi
+.L007mw_end:
+ movl %esi,%eax
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size bn_mul_words,.-.L_bn_mul_words_begin
+.globl bn_sqr_words
+.type bn_sqr_words,@function
+.align 16
+bn_sqr_words:
+.L_bn_sqr_words_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+
+ movl 20(%esp),%esi
+ movl 24(%esp),%edi
+ movl 28(%esp),%ebx
+ andl $4294967288,%ebx
+ jz .L008sw_finish
+.L009sw_loop:
+
+ movl (%edi),%eax
+ mull %eax
+ movl %eax,(%esi)
+ movl %edx,4(%esi)
+
+ movl 4(%edi),%eax
+ mull %eax
+ movl %eax,8(%esi)
+ movl %edx,12(%esi)
+
+ movl 8(%edi),%eax
+ mull %eax
+ movl %eax,16(%esi)
+ movl %edx,20(%esi)
+
+ movl 12(%edi),%eax
+ mull %eax
+ movl %eax,24(%esi)
+ movl %edx,28(%esi)
+
+ movl 16(%edi),%eax
+ mull %eax
+ movl %eax,32(%esi)
+ movl %edx,36(%esi)
+
+ movl 20(%edi),%eax
+ mull %eax
+ movl %eax,40(%esi)
+ movl %edx,44(%esi)
+
+ movl 24(%edi),%eax
+ mull %eax
+ movl %eax,48(%esi)
+ movl %edx,52(%esi)
+
+ movl 28(%edi),%eax
+ mull %eax
+ movl %eax,56(%esi)
+ movl %edx,60(%esi)
+
+ addl $32,%edi
+ addl $64,%esi
+ subl $8,%ebx
+ jnz .L009sw_loop
+.L008sw_finish:
+ movl 28(%esp),%ebx
+ andl $7,%ebx
+ jz .L010sw_end
+
+ movl (%edi),%eax
+ mull %eax
+ movl %eax,(%esi)
+ decl %ebx
+ movl %edx,4(%esi)
+ jz .L010sw_end
+
+ movl 4(%edi),%eax
+ mull %eax
+ movl %eax,8(%esi)
+ decl %ebx
+ movl %edx,12(%esi)
+ jz .L010sw_end
+
+ movl 8(%edi),%eax
+ mull %eax
+ movl %eax,16(%esi)
+ decl %ebx
+ movl %edx,20(%esi)
+ jz .L010sw_end
+
+ movl 12(%edi),%eax
+ mull %eax
+ movl %eax,24(%esi)
+ decl %ebx
+ movl %edx,28(%esi)
+ jz .L010sw_end
+
+ movl 16(%edi),%eax
+ mull %eax
+ movl %eax,32(%esi)
+ decl %ebx
+ movl %edx,36(%esi)
+ jz .L010sw_end
+
+ movl 20(%edi),%eax
+ mull %eax
+ movl %eax,40(%esi)
+ decl %ebx
+ movl %edx,44(%esi)
+ jz .L010sw_end
+
+ movl 24(%edi),%eax
+ mull %eax
+ movl %eax,48(%esi)
+ movl %edx,52(%esi)
+.L010sw_end:
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size bn_sqr_words,.-.L_bn_sqr_words_begin
+.globl bn_div_words
+.type bn_div_words,@function
+.align 16
+bn_div_words:
+.L_bn_div_words_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%edx
+ movl 24(%esp),%eax
+ movl 28(%esp),%ebx
+ divl %ebx
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size bn_div_words,.-.L_bn_div_words_begin
+.globl bn_add_words
+.type bn_add_words,@function
+.align 16
+bn_add_words:
+.L_bn_add_words_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+
+ movl 20(%esp),%ebx
+ movl 24(%esp),%esi
+ movl 28(%esp),%edi
+ movl 32(%esp),%ebp
+ xorl %eax,%eax
+ andl $4294967288,%ebp
+ jz .L011aw_finish
+.L012aw_loop:
+
+ movl (%esi),%ecx
+ movl (%edi),%edx
+ addl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ addl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,(%ebx)
+
+ movl 4(%esi),%ecx
+ movl 4(%edi),%edx
+ addl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ addl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,4(%ebx)
+
+ movl 8(%esi),%ecx
+ movl 8(%edi),%edx
+ addl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ addl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,8(%ebx)
+
+ movl 12(%esi),%ecx
+ movl 12(%edi),%edx
+ addl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ addl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,12(%ebx)
+
+ movl 16(%esi),%ecx
+ movl 16(%edi),%edx
+ addl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ addl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,16(%ebx)
+
+ movl 20(%esi),%ecx
+ movl 20(%edi),%edx
+ addl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ addl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,20(%ebx)
+
+ movl 24(%esi),%ecx
+ movl 24(%edi),%edx
+ addl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ addl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,24(%ebx)
+
+ movl 28(%esi),%ecx
+ movl 28(%edi),%edx
+ addl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ addl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,28(%ebx)
+
+ addl $32,%esi
+ addl $32,%edi
+ addl $32,%ebx
+ subl $8,%ebp
+ jnz .L012aw_loop
+.L011aw_finish:
+ movl 32(%esp),%ebp
+ andl $7,%ebp
+ jz .L013aw_end
+
+ movl (%esi),%ecx
+ movl (%edi),%edx
+ addl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ addl %edx,%ecx
+ adcl $0,%eax
+ decl %ebp
+ movl %ecx,(%ebx)
+ jz .L013aw_end
+
+ movl 4(%esi),%ecx
+ movl 4(%edi),%edx
+ addl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ addl %edx,%ecx
+ adcl $0,%eax
+ decl %ebp
+ movl %ecx,4(%ebx)
+ jz .L013aw_end
+
+ movl 8(%esi),%ecx
+ movl 8(%edi),%edx
+ addl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ addl %edx,%ecx
+ adcl $0,%eax
+ decl %ebp
+ movl %ecx,8(%ebx)
+ jz .L013aw_end
+
+ movl 12(%esi),%ecx
+ movl 12(%edi),%edx
+ addl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ addl %edx,%ecx
+ adcl $0,%eax
+ decl %ebp
+ movl %ecx,12(%ebx)
+ jz .L013aw_end
+
+ movl 16(%esi),%ecx
+ movl 16(%edi),%edx
+ addl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ addl %edx,%ecx
+ adcl $0,%eax
+ decl %ebp
+ movl %ecx,16(%ebx)
+ jz .L013aw_end
+
+ movl 20(%esi),%ecx
+ movl 20(%edi),%edx
+ addl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ addl %edx,%ecx
+ adcl $0,%eax
+ decl %ebp
+ movl %ecx,20(%ebx)
+ jz .L013aw_end
+
+ movl 24(%esi),%ecx
+ movl 24(%edi),%edx
+ addl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ addl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,24(%ebx)
+.L013aw_end:
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size bn_add_words,.-.L_bn_add_words_begin
+.globl bn_sub_words
+.type bn_sub_words,@function
+.align 16
+bn_sub_words:
+.L_bn_sub_words_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+
+ movl 20(%esp),%ebx
+ movl 24(%esp),%esi
+ movl 28(%esp),%edi
+ movl 32(%esp),%ebp
+ xorl %eax,%eax
+ andl $4294967288,%ebp
+ jz .L014aw_finish
+.L015aw_loop:
+
+ movl (%esi),%ecx
+ movl (%edi),%edx
+ subl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ subl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,(%ebx)
+
+ movl 4(%esi),%ecx
+ movl 4(%edi),%edx
+ subl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ subl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,4(%ebx)
+
+ movl 8(%esi),%ecx
+ movl 8(%edi),%edx
+ subl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ subl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,8(%ebx)
+
+ movl 12(%esi),%ecx
+ movl 12(%edi),%edx
+ subl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ subl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,12(%ebx)
+
+ movl 16(%esi),%ecx
+ movl 16(%edi),%edx
+ subl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ subl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,16(%ebx)
+
+ movl 20(%esi),%ecx
+ movl 20(%edi),%edx
+ subl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ subl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,20(%ebx)
+
+ movl 24(%esi),%ecx
+ movl 24(%edi),%edx
+ subl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ subl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,24(%ebx)
+
+ movl 28(%esi),%ecx
+ movl 28(%edi),%edx
+ subl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ subl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,28(%ebx)
+
+ addl $32,%esi
+ addl $32,%edi
+ addl $32,%ebx
+ subl $8,%ebp
+ jnz .L015aw_loop
+.L014aw_finish:
+ movl 32(%esp),%ebp
+ andl $7,%ebp
+ jz .L016aw_end
+
+ movl (%esi),%ecx
+ movl (%edi),%edx
+ subl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ subl %edx,%ecx
+ adcl $0,%eax
+ decl %ebp
+ movl %ecx,(%ebx)
+ jz .L016aw_end
+
+ movl 4(%esi),%ecx
+ movl 4(%edi),%edx
+ subl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ subl %edx,%ecx
+ adcl $0,%eax
+ decl %ebp
+ movl %ecx,4(%ebx)
+ jz .L016aw_end
+
+ movl 8(%esi),%ecx
+ movl 8(%edi),%edx
+ subl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ subl %edx,%ecx
+ adcl $0,%eax
+ decl %ebp
+ movl %ecx,8(%ebx)
+ jz .L016aw_end
+
+ movl 12(%esi),%ecx
+ movl 12(%edi),%edx
+ subl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ subl %edx,%ecx
+ adcl $0,%eax
+ decl %ebp
+ movl %ecx,12(%ebx)
+ jz .L016aw_end
+
+ movl 16(%esi),%ecx
+ movl 16(%edi),%edx
+ subl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ subl %edx,%ecx
+ adcl $0,%eax
+ decl %ebp
+ movl %ecx,16(%ebx)
+ jz .L016aw_end
+
+ movl 20(%esi),%ecx
+ movl 20(%edi),%edx
+ subl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ subl %edx,%ecx
+ adcl $0,%eax
+ decl %ebp
+ movl %ecx,20(%ebx)
+ jz .L016aw_end
+
+ movl 24(%esi),%ecx
+ movl 24(%edi),%edx
+ subl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ subl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,24(%ebx)
+.L016aw_end:
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size bn_sub_words,.-.L_bn_sub_words_begin
+.globl bn_mul_comba8
+.type bn_mul_comba8,@function
+.align 16
+bn_mul_comba8:
+.L_bn_mul_comba8_begin:
+ pushl %esi
+ movl 12(%esp),%esi
+ pushl %edi
+ movl 20(%esp),%edi
+ pushl %ebp
+ pushl %ebx
+ xorl %ebx,%ebx
+ movl (%esi),%eax
+ xorl %ecx,%ecx
+ movl (%edi),%edx
+
+ xorl %ebp,%ebp
+
+ mull %edx
+ addl %eax,%ebx
+ movl 20(%esp),%eax
+ adcl %edx,%ecx
+ movl (%edi),%edx
+ adcl $0,%ebp
+ movl %ebx,(%eax)
+ movl 4(%esi),%eax
+
+
+ xorl %ebx,%ebx
+
+ mull %edx
+ addl %eax,%ecx
+ movl (%esi),%eax
+ adcl %edx,%ebp
+ movl 4(%edi),%edx
+ adcl $0,%ebx
+
+ mull %edx
+ addl %eax,%ecx
+ movl 20(%esp),%eax
+ adcl %edx,%ebp
+ movl (%edi),%edx
+ adcl $0,%ebx
+ movl %ecx,4(%eax)
+ movl 8(%esi),%eax
+
+
+ xorl %ecx,%ecx
+
+ mull %edx
+ addl %eax,%ebp
+ movl 4(%esi),%eax
+ adcl %edx,%ebx
+ movl 4(%edi),%edx
+ adcl $0,%ecx
+
+ mull %edx
+ addl %eax,%ebp
+ movl (%esi),%eax
+ adcl %edx,%ebx
+ movl 8(%edi),%edx
+ adcl $0,%ecx
+
+ mull %edx
+ addl %eax,%ebp
+ movl 20(%esp),%eax
+ adcl %edx,%ebx
+ movl (%edi),%edx
+ adcl $0,%ecx
+ movl %ebp,8(%eax)
+ movl 12(%esi),%eax
+
+
+ xorl %ebp,%ebp
+
+ mull %edx
+ addl %eax,%ebx
+ movl 8(%esi),%eax
+ adcl %edx,%ecx
+ movl 4(%edi),%edx
+ adcl $0,%ebp
+
+ mull %edx
+ addl %eax,%ebx
+ movl 4(%esi),%eax
+ adcl %edx,%ecx
+ movl 8(%edi),%edx
+ adcl $0,%ebp
+
+ mull %edx
+ addl %eax,%ebx
+ movl (%esi),%eax
+ adcl %edx,%ecx
+ movl 12(%edi),%edx
+ adcl $0,%ebp
+
+ mull %edx
+ addl %eax,%ebx
+ movl 20(%esp),%eax
+ adcl %edx,%ecx
+ movl (%edi),%edx
+ adcl $0,%ebp
+ movl %ebx,12(%eax)
+ movl 16(%esi),%eax
+
+
+ xorl %ebx,%ebx
+
+ mull %edx
+ addl %eax,%ecx
+ movl 12(%esi),%eax
+ adcl %edx,%ebp
+ movl 4(%edi),%edx
+ adcl $0,%ebx
+
+ mull %edx
+ addl %eax,%ecx
+ movl 8(%esi),%eax
+ adcl %edx,%ebp
+ movl 8(%edi),%edx
+ adcl $0,%ebx
+
+ mull %edx
+ addl %eax,%ecx
+ movl 4(%esi),%eax
+ adcl %edx,%ebp
+ movl 12(%edi),%edx
+ adcl $0,%ebx
+
+ mull %edx
+ addl %eax,%ecx
+ movl (%esi),%eax
+ adcl %edx,%ebp
+ movl 16(%edi),%edx
+ adcl $0,%ebx
+
+ mull %edx
+ addl %eax,%ecx
+ movl 20(%esp),%eax
+ adcl %edx,%ebp
+ movl (%edi),%edx
+ adcl $0,%ebx
+ movl %ecx,16(%eax)
+ movl 20(%esi),%eax
+
+
+ xorl %ecx,%ecx
+
+ mull %edx
+ addl %eax,%ebp
+ movl 16(%esi),%eax
+ adcl %edx,%ebx
+ movl 4(%edi),%edx
+ adcl $0,%ecx
+
+ mull %edx
+ addl %eax,%ebp
+ movl 12(%esi),%eax
+ adcl %edx,%ebx
+ movl 8(%edi),%edx
+ adcl $0,%ecx
+
+ mull %edx
+ addl %eax,%ebp
+ movl 8(%esi),%eax
+ adcl %edx,%ebx
+ movl 12(%edi),%edx
+ adcl $0,%ecx
+
+ mull %edx
+ addl %eax,%ebp
+ movl 4(%esi),%eax
+ adcl %edx,%ebx
+ movl 16(%edi),%edx
+ adcl $0,%ecx
+
+ mull %edx
+ addl %eax,%ebp
+ movl (%esi),%eax
+ adcl %edx,%ebx
+ movl 20(%edi),%edx
+ adcl $0,%ecx
+
+ mull %edx
+ addl %eax,%ebp
+ movl 20(%esp),%eax
+ adcl %edx,%ebx
+ movl (%edi),%edx
+ adcl $0,%ecx
+ movl %ebp,20(%eax)
+ movl 24(%esi),%eax
+
+
+ xorl %ebp,%ebp
+
+ mull %edx
+ addl %eax,%ebx
+ movl 20(%esi),%eax
+ adcl %edx,%ecx
+ movl 4(%edi),%edx
+ adcl $0,%ebp
+
+ mull %edx
+ addl %eax,%ebx
+ movl 16(%esi),%eax
+ adcl %edx,%ecx
+ movl 8(%edi),%edx
+ adcl $0,%ebp
+
+ mull %edx
+ addl %eax,%ebx
+ movl 12(%esi),%eax
+ adcl %edx,%ecx
+ movl 12(%edi),%edx
+ adcl $0,%ebp
+
+ mull %edx
+ addl %eax,%ebx
+ movl 8(%esi),%eax
+ adcl %edx,%ecx
+ movl 16(%edi),%edx
+ adcl $0,%ebp
+
+ mull %edx
+ addl %eax,%ebx
+ movl 4(%esi),%eax
+ adcl %edx,%ecx
+ movl 20(%edi),%edx
+ adcl $0,%ebp
+
+ mull %edx
+ addl %eax,%ebx
+ movl (%esi),%eax
+ adcl %edx,%ecx
+ movl 24(%edi),%edx
+ adcl $0,%ebp
+
+ mull %edx
+ addl %eax,%ebx
+ movl 20(%esp),%eax
+ adcl %edx,%ecx
+ movl (%edi),%edx
+ adcl $0,%ebp
+ movl %ebx,24(%eax)
+ movl 28(%esi),%eax
+
+
+ xorl %ebx,%ebx
+
+ mull %edx
+ addl %eax,%ecx
+ movl 24(%esi),%eax
+ adcl %edx,%ebp
+ movl 4(%edi),%edx
+ adcl $0,%ebx
+
+ mull %edx
+ addl %eax,%ecx
+ movl 20(%esi),%eax
+ adcl %edx,%ebp
+ movl 8(%edi),%edx
+ adcl $0,%ebx
+
+ mull %edx
+ addl %eax,%ecx
+ movl 16(%esi),%eax
+ adcl %edx,%ebp
+ movl 12(%edi),%edx
+ adcl $0,%ebx
+
+ mull %edx
+ addl %eax,%ecx
+ movl 12(%esi),%eax
+ adcl %edx,%ebp
+ movl 16(%edi),%edx
+ adcl $0,%ebx
+
+ mull %edx
+ addl %eax,%ecx
+ movl 8(%esi),%eax
+ adcl %edx,%ebp
+ movl 20(%edi),%edx
+ adcl $0,%ebx
+
+ mull %edx
+ addl %eax,%ecx
+ movl 4(%esi),%eax
+ adcl %edx,%ebp
+ movl 24(%edi),%edx
+ adcl $0,%ebx
+
+ mull %edx
+ addl %eax,%ecx
+ movl (%esi),%eax
+ adcl %edx,%ebp
+ movl 28(%edi),%edx
+ adcl $0,%ebx
+
+ mull %edx
+ addl %eax,%ecx
+ movl 20(%esp),%eax
+ adcl %edx,%ebp
+ movl 4(%edi),%edx
+ adcl $0,%ebx
+ movl %ecx,28(%eax)
+ movl 28(%esi),%eax
+
+
+ xorl %ecx,%ecx
+
+ mull %edx
+ addl %eax,%ebp
+ movl 24(%esi),%eax
+ adcl %edx,%ebx
+ movl 8(%edi),%edx
+ adcl $0,%ecx
+
+ mull %edx
+ addl %eax,%ebp
+ movl 20(%esi),%eax
+ adcl %edx,%ebx
+ movl 12(%edi),%edx
+ adcl $0,%ecx
+
+ mull %edx
+ addl %eax,%ebp
+ movl 16(%esi),%eax
+ adcl %edx,%ebx
+ movl 16(%edi),%edx
+ adcl $0,%ecx
+
+ mull %edx
+ addl %eax,%ebp
+ movl 12(%esi),%eax
+ adcl %edx,%ebx
+ movl 20(%edi),%edx
+ adcl $0,%ecx
+
+ mull %edx
+ addl %eax,%ebp
+ movl 8(%esi),%eax
+ adcl %edx,%ebx
+ movl 24(%edi),%edx
+ adcl $0,%ecx
+
+ mull %edx
+ addl %eax,%ebp
+ movl 4(%esi),%eax
+ adcl %edx,%ebx
+ movl 28(%edi),%edx
+ adcl $0,%ecx
+
+ mull %edx
+ addl %eax,%ebp
+ movl 20(%esp),%eax
+ adcl %edx,%ebx
+ movl 8(%edi),%edx
+ adcl $0,%ecx
+ movl %ebp,32(%eax)
+ movl 28(%esi),%eax
+
+
+ xorl %ebp,%ebp
+
+ mull %edx
+ addl %eax,%ebx
+ movl 24(%esi),%eax
+ adcl %edx,%ecx
+ movl 12(%edi),%edx
+ adcl $0,%ebp
+
+ mull %edx
+ addl %eax,%ebx
+ movl 20(%esi),%eax
+ adcl %edx,%ecx
+ movl 16(%edi),%edx
+ adcl $0,%ebp
+
+ mull %edx
+ addl %eax,%ebx
+ movl 16(%esi),%eax
+ adcl %edx,%ecx
+ movl 20(%edi),%edx
+ adcl $0,%ebp
+
+ mull %edx
+ addl %eax,%ebx
+ movl 12(%esi),%eax
+ adcl %edx,%ecx
+ movl 24(%edi),%edx
+ adcl $0,%ebp
+
+ mull %edx
+ addl %eax,%ebx
+ movl 8(%esi),%eax
+ adcl %edx,%ecx
+ movl 28(%edi),%edx
+ adcl $0,%ebp
+
+ mull %edx
+ addl %eax,%ebx
+ movl 20(%esp),%eax
+ adcl %edx,%ecx
+ movl 12(%edi),%edx
+ adcl $0,%ebp
+ movl %ebx,36(%eax)
+ movl 28(%esi),%eax
+
+
+ xorl %ebx,%ebx
+
+ mull %edx
+ addl %eax,%ecx
+ movl 24(%esi),%eax
+ adcl %edx,%ebp
+ movl 16(%edi),%edx
+ adcl $0,%ebx
+
+ mull %edx
+ addl %eax,%ecx
+ movl 20(%esi),%eax
+ adcl %edx,%ebp
+ movl 20(%edi),%edx
+ adcl $0,%ebx
+
+ mull %edx
+ addl %eax,%ecx
+ movl 16(%esi),%eax
+ adcl %edx,%ebp
+ movl 24(%edi),%edx
+ adcl $0,%ebx
+
+ mull %edx
+ addl %eax,%ecx
+ movl 12(%esi),%eax
+ adcl %edx,%ebp
+ movl 28(%edi),%edx
+ adcl $0,%ebx
+
+ mull %edx
+ addl %eax,%ecx
+ movl 20(%esp),%eax
+ adcl %edx,%ebp
+ movl 16(%edi),%edx
+ adcl $0,%ebx
+ movl %ecx,40(%eax)
+ movl 28(%esi),%eax
+
+
+ xorl %ecx,%ecx
+
+ mull %edx
+ addl %eax,%ebp
+ movl 24(%esi),%eax
+ adcl %edx,%ebx
+ movl 20(%edi),%edx
+ adcl $0,%ecx
+
+ mull %edx
+ addl %eax,%ebp
+ movl 20(%esi),%eax
+ adcl %edx,%ebx
+ movl 24(%edi),%edx
+ adcl $0,%ecx
+
+ mull %edx
+ addl %eax,%ebp
+ movl 16(%esi),%eax
+ adcl %edx,%ebx
+ movl 28(%edi),%edx
+ adcl $0,%ecx
+
+ mull %edx
+ addl %eax,%ebp
+ movl 20(%esp),%eax
+ adcl %edx,%ebx
+ movl 20(%edi),%edx
+ adcl $0,%ecx
+ movl %ebp,44(%eax)
+ movl 28(%esi),%eax
+
+
+ xorl %ebp,%ebp
+
+ mull %edx
+ addl %eax,%ebx
+ movl 24(%esi),%eax
+ adcl %edx,%ecx
+ movl 24(%edi),%edx
+ adcl $0,%ebp
+
+ mull %edx
+ addl %eax,%ebx
+ movl 20(%esi),%eax
+ adcl %edx,%ecx
+ movl 28(%edi),%edx
+ adcl $0,%ebp
+
+ mull %edx
+ addl %eax,%ebx
+ movl 20(%esp),%eax
+ adcl %edx,%ecx
+ movl 24(%edi),%edx
+ adcl $0,%ebp
+ movl %ebx,48(%eax)
+ movl 28(%esi),%eax
+
+
+ xorl %ebx,%ebx
+
+ mull %edx
+ addl %eax,%ecx
+ movl 24(%esi),%eax
+ adcl %edx,%ebp
+ movl 28(%edi),%edx
+ adcl $0,%ebx
+
+ mull %edx
+ addl %eax,%ecx
+ movl 20(%esp),%eax
+ adcl %edx,%ebp
+ movl 28(%edi),%edx
+ adcl $0,%ebx
+ movl %ecx,52(%eax)
+ movl 28(%esi),%eax
+
+
+ xorl %ecx,%ecx
+
+ mull %edx
+ addl %eax,%ebp
+ movl 20(%esp),%eax
+ adcl %edx,%ebx
+ adcl $0,%ecx
+ movl %ebp,56(%eax)
+
+
+ movl %ebx,60(%eax)
+ popl %ebx
+ popl %ebp
+ popl %edi
+ popl %esi
+ ret
+.size bn_mul_comba8,.-.L_bn_mul_comba8_begin
+.globl bn_mul_comba4
+.type bn_mul_comba4,@function
+.align 16
+bn_mul_comba4:
+.L_bn_mul_comba4_begin:
+ pushl %esi
+ movl 12(%esp),%esi
+ pushl %edi
+ movl 20(%esp),%edi
+ pushl %ebp
+ pushl %ebx
+ xorl %ebx,%ebx
+ movl (%esi),%eax
+ xorl %ecx,%ecx
+ movl (%edi),%edx
+
+ xorl %ebp,%ebp
+
+ mull %edx
+ addl %eax,%ebx
+ movl 20(%esp),%eax
+ adcl %edx,%ecx
+ movl (%edi),%edx
+ adcl $0,%ebp
+ movl %ebx,(%eax)
+ movl 4(%esi),%eax
+
+
+ xorl %ebx,%ebx
+
+ mull %edx
+ addl %eax,%ecx
+ movl (%esi),%eax
+ adcl %edx,%ebp
+ movl 4(%edi),%edx
+ adcl $0,%ebx
+
+ mull %edx
+ addl %eax,%ecx
+ movl 20(%esp),%eax
+ adcl %edx,%ebp
+ movl (%edi),%edx
+ adcl $0,%ebx
+ movl %ecx,4(%eax)
+ movl 8(%esi),%eax
+
+
+ xorl %ecx,%ecx
+
+ mull %edx
+ addl %eax,%ebp
+ movl 4(%esi),%eax
+ adcl %edx,%ebx
+ movl 4(%edi),%edx
+ adcl $0,%ecx
+
+ mull %edx
+ addl %eax,%ebp
+ movl (%esi),%eax
+ adcl %edx,%ebx
+ movl 8(%edi),%edx
+ adcl $0,%ecx
+
+ mull %edx
+ addl %eax,%ebp
+ movl 20(%esp),%eax
+ adcl %edx,%ebx
+ movl (%edi),%edx
+ adcl $0,%ecx
+ movl %ebp,8(%eax)
+ movl 12(%esi),%eax
+
+
+ xorl %ebp,%ebp
+
+ mull %edx
+ addl %eax,%ebx
+ movl 8(%esi),%eax
+ adcl %edx,%ecx
+ movl 4(%edi),%edx
+ adcl $0,%ebp
+
+ mull %edx
+ addl %eax,%ebx
+ movl 4(%esi),%eax
+ adcl %edx,%ecx
+ movl 8(%edi),%edx
+ adcl $0,%ebp
+
+ mull %edx
+ addl %eax,%ebx
+ movl (%esi),%eax
+ adcl %edx,%ecx
+ movl 12(%edi),%edx
+ adcl $0,%ebp
+
+ mull %edx
+ addl %eax,%ebx
+ movl 20(%esp),%eax
+ adcl %edx,%ecx
+ movl 4(%edi),%edx
+ adcl $0,%ebp
+ movl %ebx,12(%eax)
+ movl 12(%esi),%eax
+
+
+ xorl %ebx,%ebx
+
+ mull %edx
+ addl %eax,%ecx
+ movl 8(%esi),%eax
+ adcl %edx,%ebp
+ movl 8(%edi),%edx
+ adcl $0,%ebx
+
+ mull %edx
+ addl %eax,%ecx
+ movl 4(%esi),%eax
+ adcl %edx,%ebp
+ movl 12(%edi),%edx
+ adcl $0,%ebx
+
+ mull %edx
+ addl %eax,%ecx
+ movl 20(%esp),%eax
+ adcl %edx,%ebp
+ movl 8(%edi),%edx
+ adcl $0,%ebx
+ movl %ecx,16(%eax)
+ movl 12(%esi),%eax
+
+
+ xorl %ecx,%ecx
+
+ mull %edx
+ addl %eax,%ebp
+ movl 8(%esi),%eax
+ adcl %edx,%ebx
+ movl 12(%edi),%edx
+ adcl $0,%ecx
+
+ mull %edx
+ addl %eax,%ebp
+ movl 20(%esp),%eax
+ adcl %edx,%ebx
+ movl 12(%edi),%edx
+ adcl $0,%ecx
+ movl %ebp,20(%eax)
+ movl 12(%esi),%eax
+
+
+ xorl %ebp,%ebp
+
+ mull %edx
+ addl %eax,%ebx
+ movl 20(%esp),%eax
+ adcl %edx,%ecx
+ adcl $0,%ebp
+ movl %ebx,24(%eax)
+
+
+ movl %ecx,28(%eax)
+ popl %ebx
+ popl %ebp
+ popl %edi
+ popl %esi
+ ret
+.size bn_mul_comba4,.-.L_bn_mul_comba4_begin
+.globl bn_sqr_comba8
+.type bn_sqr_comba8,@function
+.align 16
+bn_sqr_comba8:
+.L_bn_sqr_comba8_begin:
+ pushl %esi
+ pushl %edi
+ pushl %ebp
+ pushl %ebx
+ movl 20(%esp),%edi
+ movl 24(%esp),%esi
+ xorl %ebx,%ebx
+ xorl %ecx,%ecx
+ movl (%esi),%eax
+
+ xorl %ebp,%ebp
+
+ mull %eax
+ addl %eax,%ebx
+ adcl %edx,%ecx
+ movl (%esi),%edx
+ adcl $0,%ebp
+ movl %ebx,(%edi)
+ movl 4(%esi),%eax
+
+
+ xorl %ebx,%ebx
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebx
+ addl %eax,%ecx
+ adcl %edx,%ebp
+ movl 8(%esi),%eax
+ adcl $0,%ebx
+ movl %ecx,4(%edi)
+ movl (%esi),%edx
+
+
+ xorl %ecx,%ecx
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ecx
+ addl %eax,%ebp
+ adcl %edx,%ebx
+ movl 4(%esi),%eax
+ adcl $0,%ecx
+
+ mull %eax
+ addl %eax,%ebp
+ adcl %edx,%ebx
+ movl (%esi),%edx
+ adcl $0,%ecx
+ movl %ebp,8(%edi)
+ movl 12(%esi),%eax
+
+
+ xorl %ebp,%ebp
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebp
+ addl %eax,%ebx
+ adcl %edx,%ecx
+ movl 8(%esi),%eax
+ adcl $0,%ebp
+ movl 4(%esi),%edx
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebp
+ addl %eax,%ebx
+ adcl %edx,%ecx
+ movl 16(%esi),%eax
+ adcl $0,%ebp
+ movl %ebx,12(%edi)
+ movl (%esi),%edx
+
+
+ xorl %ebx,%ebx
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebx
+ addl %eax,%ecx
+ adcl %edx,%ebp
+ movl 12(%esi),%eax
+ adcl $0,%ebx
+ movl 4(%esi),%edx
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebx
+ addl %eax,%ecx
+ adcl %edx,%ebp
+ movl 8(%esi),%eax
+ adcl $0,%ebx
+
+ mull %eax
+ addl %eax,%ecx
+ adcl %edx,%ebp
+ movl (%esi),%edx
+ adcl $0,%ebx
+ movl %ecx,16(%edi)
+ movl 20(%esi),%eax
+
+
+ xorl %ecx,%ecx
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ecx
+ addl %eax,%ebp
+ adcl %edx,%ebx
+ movl 16(%esi),%eax
+ adcl $0,%ecx
+ movl 4(%esi),%edx
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ecx
+ addl %eax,%ebp
+ adcl %edx,%ebx
+ movl 12(%esi),%eax
+ adcl $0,%ecx
+ movl 8(%esi),%edx
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ecx
+ addl %eax,%ebp
+ adcl %edx,%ebx
+ movl 24(%esi),%eax
+ adcl $0,%ecx
+ movl %ebp,20(%edi)
+ movl (%esi),%edx
+
+
+ xorl %ebp,%ebp
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebp
+ addl %eax,%ebx
+ adcl %edx,%ecx
+ movl 20(%esi),%eax
+ adcl $0,%ebp
+ movl 4(%esi),%edx
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebp
+ addl %eax,%ebx
+ adcl %edx,%ecx
+ movl 16(%esi),%eax
+ adcl $0,%ebp
+ movl 8(%esi),%edx
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebp
+ addl %eax,%ebx
+ adcl %edx,%ecx
+ movl 12(%esi),%eax
+ adcl $0,%ebp
+
+ mull %eax
+ addl %eax,%ebx
+ adcl %edx,%ecx
+ movl (%esi),%edx
+ adcl $0,%ebp
+ movl %ebx,24(%edi)
+ movl 28(%esi),%eax
+
+
+ xorl %ebx,%ebx
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebx
+ addl %eax,%ecx
+ adcl %edx,%ebp
+ movl 24(%esi),%eax
+ adcl $0,%ebx
+ movl 4(%esi),%edx
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebx
+ addl %eax,%ecx
+ adcl %edx,%ebp
+ movl 20(%esi),%eax
+ adcl $0,%ebx
+ movl 8(%esi),%edx
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebx
+ addl %eax,%ecx
+ adcl %edx,%ebp
+ movl 16(%esi),%eax
+ adcl $0,%ebx
+ movl 12(%esi),%edx
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebx
+ addl %eax,%ecx
+ adcl %edx,%ebp
+ movl 28(%esi),%eax
+ adcl $0,%ebx
+ movl %ecx,28(%edi)
+ movl 4(%esi),%edx
+
+
+ xorl %ecx,%ecx
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ecx
+ addl %eax,%ebp
+ adcl %edx,%ebx
+ movl 24(%esi),%eax
+ adcl $0,%ecx
+ movl 8(%esi),%edx
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ecx
+ addl %eax,%ebp
+ adcl %edx,%ebx
+ movl 20(%esi),%eax
+ adcl $0,%ecx
+ movl 12(%esi),%edx
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ecx
+ addl %eax,%ebp
+ adcl %edx,%ebx
+ movl 16(%esi),%eax
+ adcl $0,%ecx
+
+ mull %eax
+ addl %eax,%ebp
+ adcl %edx,%ebx
+ movl 8(%esi),%edx
+ adcl $0,%ecx
+ movl %ebp,32(%edi)
+ movl 28(%esi),%eax
+
+
+ xorl %ebp,%ebp
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebp
+ addl %eax,%ebx
+ adcl %edx,%ecx
+ movl 24(%esi),%eax
+ adcl $0,%ebp
+ movl 12(%esi),%edx
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebp
+ addl %eax,%ebx
+ adcl %edx,%ecx
+ movl 20(%esi),%eax
+ adcl $0,%ebp
+ movl 16(%esi),%edx
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebp
+ addl %eax,%ebx
+ adcl %edx,%ecx
+ movl 28(%esi),%eax
+ adcl $0,%ebp
+ movl %ebx,36(%edi)
+ movl 12(%esi),%edx
+
+
+ xorl %ebx,%ebx
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebx
+ addl %eax,%ecx
+ adcl %edx,%ebp
+ movl 24(%esi),%eax
+ adcl $0,%ebx
+ movl 16(%esi),%edx
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebx
+ addl %eax,%ecx
+ adcl %edx,%ebp
+ movl 20(%esi),%eax
+ adcl $0,%ebx
+
+ mull %eax
+ addl %eax,%ecx
+ adcl %edx,%ebp
+ movl 16(%esi),%edx
+ adcl $0,%ebx
+ movl %ecx,40(%edi)
+ movl 28(%esi),%eax
+
+
+ xorl %ecx,%ecx
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ecx
+ addl %eax,%ebp
+ adcl %edx,%ebx
+ movl 24(%esi),%eax
+ adcl $0,%ecx
+ movl 20(%esi),%edx
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ecx
+ addl %eax,%ebp
+ adcl %edx,%ebx
+ movl 28(%esi),%eax
+ adcl $0,%ecx
+ movl %ebp,44(%edi)
+ movl 20(%esi),%edx
+
+
+ xorl %ebp,%ebp
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebp
+ addl %eax,%ebx
+ adcl %edx,%ecx
+ movl 24(%esi),%eax
+ adcl $0,%ebp
+
+ mull %eax
+ addl %eax,%ebx
+ adcl %edx,%ecx
+ movl 24(%esi),%edx
+ adcl $0,%ebp
+ movl %ebx,48(%edi)
+ movl 28(%esi),%eax
+
+
+ xorl %ebx,%ebx
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebx
+ addl %eax,%ecx
+ adcl %edx,%ebp
+ movl 28(%esi),%eax
+ adcl $0,%ebx
+ movl %ecx,52(%edi)
+
+
+ xorl %ecx,%ecx
+
+ mull %eax
+ addl %eax,%ebp
+ adcl %edx,%ebx
+ adcl $0,%ecx
+ movl %ebp,56(%edi)
+
+ movl %ebx,60(%edi)
+ popl %ebx
+ popl %ebp
+ popl %edi
+ popl %esi
+ ret
+.size bn_sqr_comba8,.-.L_bn_sqr_comba8_begin
+.globl bn_sqr_comba4
+.type bn_sqr_comba4,@function
+.align 16
+bn_sqr_comba4:
+.L_bn_sqr_comba4_begin:
+ pushl %esi
+ pushl %edi
+ pushl %ebp
+ pushl %ebx
+ movl 20(%esp),%edi
+ movl 24(%esp),%esi
+ xorl %ebx,%ebx
+ xorl %ecx,%ecx
+ movl (%esi),%eax
+
+ xorl %ebp,%ebp
+
+ mull %eax
+ addl %eax,%ebx
+ adcl %edx,%ecx
+ movl (%esi),%edx
+ adcl $0,%ebp
+ movl %ebx,(%edi)
+ movl 4(%esi),%eax
+
+
+ xorl %ebx,%ebx
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebx
+ addl %eax,%ecx
+ adcl %edx,%ebp
+ movl 8(%esi),%eax
+ adcl $0,%ebx
+ movl %ecx,4(%edi)
+ movl (%esi),%edx
+
+
+ xorl %ecx,%ecx
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ecx
+ addl %eax,%ebp
+ adcl %edx,%ebx
+ movl 4(%esi),%eax
+ adcl $0,%ecx
+
+ mull %eax
+ addl %eax,%ebp
+ adcl %edx,%ebx
+ movl (%esi),%edx
+ adcl $0,%ecx
+ movl %ebp,8(%edi)
+ movl 12(%esi),%eax
+
+
+ xorl %ebp,%ebp
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebp
+ addl %eax,%ebx
+ adcl %edx,%ecx
+ movl 8(%esi),%eax
+ adcl $0,%ebp
+ movl 4(%esi),%edx
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebp
+ addl %eax,%ebx
+ adcl %edx,%ecx
+ movl 12(%esi),%eax
+ adcl $0,%ebp
+ movl %ebx,12(%edi)
+ movl 4(%esi),%edx
+
+
+ xorl %ebx,%ebx
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebx
+ addl %eax,%ecx
+ adcl %edx,%ebp
+ movl 8(%esi),%eax
+ adcl $0,%ebx
+
+ mull %eax
+ addl %eax,%ecx
+ adcl %edx,%ebp
+ movl 8(%esi),%edx
+ adcl $0,%ebx
+ movl %ecx,16(%edi)
+ movl 12(%esi),%eax
+
+
+ xorl %ecx,%ecx
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ecx
+ addl %eax,%ebp
+ adcl %edx,%ebx
+ movl 12(%esi),%eax
+ adcl $0,%ecx
+ movl %ebp,20(%edi)
+
+
+ xorl %ebp,%ebp
+
+ mull %eax
+ addl %eax,%ebx
+ adcl %edx,%ecx
+ adcl $0,%ebp
+ movl %ebx,24(%edi)
+
+ movl %ecx,28(%edi)
+ popl %ebx
+ popl %ebp
+ popl %edi
+ popl %esi
+ ret
+.size bn_sqr_comba4,.-.L_bn_sqr_comba4_begin
diff --git a/deps/openssl/asm/x86-elf-gas/camellia/cmll-x86.s b/deps/openssl/asm/x86-elf-gas/camellia/cmll-x86.s
new file mode 100644
index 0000000000..a896314cfe
--- /dev/null
+++ b/deps/openssl/asm/x86-elf-gas/camellia/cmll-x86.s
@@ -0,0 +1,2375 @@
+.file "cmll-586.s"
+.text
+.globl Camellia_EncryptBlock_Rounds
+.type Camellia_EncryptBlock_Rounds,@function
+.align 16
+Camellia_EncryptBlock_Rounds:
+.L_Camellia_EncryptBlock_Rounds_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%eax
+ movl 24(%esp),%esi
+ movl 28(%esp),%edi
+ movl %esp,%ebx
+ subl $28,%esp
+ andl $-64,%esp
+ leal -127(%edi),%ecx
+ subl %esp,%ecx
+ negl %ecx
+ andl $960,%ecx
+ subl %ecx,%esp
+ addl $4,%esp
+ shll $6,%eax
+ leal (%edi,%eax,1),%eax
+ movl %ebx,20(%esp)
+ movl %eax,16(%esp)
+ call .L000pic_point
+.L000pic_point:
+ popl %ebp
+ leal .LCamellia_SBOX-.L000pic_point(%ebp),%ebp
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ bswap %eax
+ movl 12(%esi),%edx
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ call _x86_Camellia_encrypt
+ movl 20(%esp),%esp
+ bswap %eax
+ movl 32(%esp),%esi
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size Camellia_EncryptBlock_Rounds,.-.L_Camellia_EncryptBlock_Rounds_begin
+.globl Camellia_EncryptBlock
+.type Camellia_EncryptBlock,@function
+.align 16
+Camellia_EncryptBlock:
+.L_Camellia_EncryptBlock_begin:
+ movl $128,%eax
+ subl 4(%esp),%eax
+ movl $3,%eax
+ adcl $0,%eax
+ movl %eax,4(%esp)
+ jmp .L_Camellia_EncryptBlock_Rounds_begin
+.size Camellia_EncryptBlock,.-.L_Camellia_EncryptBlock_begin
+.globl Camellia_encrypt
+.type Camellia_encrypt,@function
+.align 16
+Camellia_encrypt:
+.L_Camellia_encrypt_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%esi
+ movl 28(%esp),%edi
+ movl %esp,%ebx
+ subl $28,%esp
+ andl $-64,%esp
+ movl 272(%edi),%eax
+ leal -127(%edi),%ecx
+ subl %esp,%ecx
+ negl %ecx
+ andl $960,%ecx
+ subl %ecx,%esp
+ addl $4,%esp
+ shll $6,%eax
+ leal (%edi,%eax,1),%eax
+ movl %ebx,20(%esp)
+ movl %eax,16(%esp)
+ call .L001pic_point
+.L001pic_point:
+ popl %ebp
+ leal .LCamellia_SBOX-.L001pic_point(%ebp),%ebp
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ bswap %eax
+ movl 12(%esi),%edx
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ call _x86_Camellia_encrypt
+ movl 20(%esp),%esp
+ bswap %eax
+ movl 24(%esp),%esi
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size Camellia_encrypt,.-.L_Camellia_encrypt_begin
+.type _x86_Camellia_encrypt,@function
+.align 16
+_x86_Camellia_encrypt:
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ movl 16(%edi),%esi
+ movl %eax,4(%esp)
+ movl %ebx,8(%esp)
+ movl %ecx,12(%esp)
+ movl %edx,16(%esp)
+.align 16
+.L002loop:
+ xorl %esi,%eax
+ xorl 20(%edi),%ebx
+ movzbl %ah,%esi
+ movl 2052(%ebp,%esi,8),%edx
+ movzbl %al,%esi
+ xorl 4(%ebp,%esi,8),%edx
+ shrl $16,%eax
+ movzbl %bl,%esi
+ movl (%ebp,%esi,8),%ecx
+ movzbl %ah,%esi
+ xorl (%ebp,%esi,8),%edx
+ movzbl %bh,%esi
+ xorl 4(%ebp,%esi,8),%ecx
+ shrl $16,%ebx
+ movzbl %al,%eax
+ xorl 2048(%ebp,%eax,8),%edx
+ movzbl %bh,%esi
+ movl 16(%esp),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl 2048(%ebp,%esi,8),%ecx
+ movzbl %bl,%esi
+ movl 12(%esp),%ebx
+ xorl %eax,%edx
+ xorl 2052(%ebp,%esi,8),%ecx
+ movl 24(%edi),%esi
+ xorl %ecx,%edx
+ movl %edx,16(%esp)
+ xorl %ebx,%ecx
+ movl %ecx,12(%esp)
+ xorl %esi,%ecx
+ xorl 28(%edi),%edx
+ movzbl %ch,%esi
+ movl 2052(%ebp,%esi,8),%ebx
+ movzbl %cl,%esi
+ xorl 4(%ebp,%esi,8),%ebx
+ shrl $16,%ecx
+ movzbl %dl,%esi
+ movl (%ebp,%esi,8),%eax
+ movzbl %ch,%esi
+ xorl (%ebp,%esi,8),%ebx
+ movzbl %dh,%esi
+ xorl 4(%ebp,%esi,8),%eax
+ shrl $16,%edx
+ movzbl %cl,%ecx
+ xorl 2048(%ebp,%ecx,8),%ebx
+ movzbl %dh,%esi
+ movl 8(%esp),%ecx
+ xorl %ebx,%eax
+ rorl $8,%ebx
+ xorl 2048(%ebp,%esi,8),%eax
+ movzbl %dl,%esi
+ movl 4(%esp),%edx
+ xorl %ecx,%ebx
+ xorl 2052(%ebp,%esi,8),%eax
+ movl 32(%edi),%esi
+ xorl %eax,%ebx
+ movl %ebx,8(%esp)
+ xorl %edx,%eax
+ movl %eax,4(%esp)
+ xorl %esi,%eax
+ xorl 36(%edi),%ebx
+ movzbl %ah,%esi
+ movl 2052(%ebp,%esi,8),%edx
+ movzbl %al,%esi
+ xorl 4(%ebp,%esi,8),%edx
+ shrl $16,%eax
+ movzbl %bl,%esi
+ movl (%ebp,%esi,8),%ecx
+ movzbl %ah,%esi
+ xorl (%ebp,%esi,8),%edx
+ movzbl %bh,%esi
+ xorl 4(%ebp,%esi,8),%ecx
+ shrl $16,%ebx
+ movzbl %al,%eax
+ xorl 2048(%ebp,%eax,8),%edx
+ movzbl %bh,%esi
+ movl 16(%esp),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl 2048(%ebp,%esi,8),%ecx
+ movzbl %bl,%esi
+ movl 12(%esp),%ebx
+ xorl %eax,%edx
+ xorl 2052(%ebp,%esi,8),%ecx
+ movl 40(%edi),%esi
+ xorl %ecx,%edx
+ movl %edx,16(%esp)
+ xorl %ebx,%ecx
+ movl %ecx,12(%esp)
+ xorl %esi,%ecx
+ xorl 44(%edi),%edx
+ movzbl %ch,%esi
+ movl 2052(%ebp,%esi,8),%ebx
+ movzbl %cl,%esi
+ xorl 4(%ebp,%esi,8),%ebx
+ shrl $16,%ecx
+ movzbl %dl,%esi
+ movl (%ebp,%esi,8),%eax
+ movzbl %ch,%esi
+ xorl (%ebp,%esi,8),%ebx
+ movzbl %dh,%esi
+ xorl 4(%ebp,%esi,8),%eax
+ shrl $16,%edx
+ movzbl %cl,%ecx
+ xorl 2048(%ebp,%ecx,8),%ebx
+ movzbl %dh,%esi
+ movl 8(%esp),%ecx
+ xorl %ebx,%eax
+ rorl $8,%ebx
+ xorl 2048(%ebp,%esi,8),%eax
+ movzbl %dl,%esi
+ movl 4(%esp),%edx
+ xorl %ecx,%ebx
+ xorl 2052(%ebp,%esi,8),%eax
+ movl 48(%edi),%esi
+ xorl %eax,%ebx
+ movl %ebx,8(%esp)
+ xorl %edx,%eax
+ movl %eax,4(%esp)
+ xorl %esi,%eax
+ xorl 52(%edi),%ebx
+ movzbl %ah,%esi
+ movl 2052(%ebp,%esi,8),%edx
+ movzbl %al,%esi
+ xorl 4(%ebp,%esi,8),%edx
+ shrl $16,%eax
+ movzbl %bl,%esi
+ movl (%ebp,%esi,8),%ecx
+ movzbl %ah,%esi
+ xorl (%ebp,%esi,8),%edx
+ movzbl %bh,%esi
+ xorl 4(%ebp,%esi,8),%ecx
+ shrl $16,%ebx
+ movzbl %al,%eax
+ xorl 2048(%ebp,%eax,8),%edx
+ movzbl %bh,%esi
+ movl 16(%esp),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl 2048(%ebp,%esi,8),%ecx
+ movzbl %bl,%esi
+ movl 12(%esp),%ebx
+ xorl %eax,%edx
+ xorl 2052(%ebp,%esi,8),%ecx
+ movl 56(%edi),%esi
+ xorl %ecx,%edx
+ movl %edx,16(%esp)
+ xorl %ebx,%ecx
+ movl %ecx,12(%esp)
+ xorl %esi,%ecx
+ xorl 60(%edi),%edx
+ movzbl %ch,%esi
+ movl 2052(%ebp,%esi,8),%ebx
+ movzbl %cl,%esi
+ xorl 4(%ebp,%esi,8),%ebx
+ shrl $16,%ecx
+ movzbl %dl,%esi
+ movl (%ebp,%esi,8),%eax
+ movzbl %ch,%esi
+ xorl (%ebp,%esi,8),%ebx
+ movzbl %dh,%esi
+ xorl 4(%ebp,%esi,8),%eax
+ shrl $16,%edx
+ movzbl %cl,%ecx
+ xorl 2048(%ebp,%ecx,8),%ebx
+ movzbl %dh,%esi
+ movl 8(%esp),%ecx
+ xorl %ebx,%eax
+ rorl $8,%ebx
+ xorl 2048(%ebp,%esi,8),%eax
+ movzbl %dl,%esi
+ movl 4(%esp),%edx
+ xorl %ecx,%ebx
+ xorl 2052(%ebp,%esi,8),%eax
+ movl 64(%edi),%esi
+ xorl %eax,%ebx
+ movl %ebx,8(%esp)
+ xorl %edx,%eax
+ movl %eax,4(%esp)
+ addl $64,%edi
+ cmpl 20(%esp),%edi
+ je .L003done
+ andl %eax,%esi
+ movl 16(%esp),%edx
+ roll $1,%esi
+ movl %edx,%ecx
+ xorl %esi,%ebx
+ orl 12(%edi),%ecx
+ movl %ebx,8(%esp)
+ xorl 12(%esp),%ecx
+ movl 4(%edi),%esi
+ movl %ecx,12(%esp)
+ orl %ebx,%esi
+ andl 8(%edi),%ecx
+ xorl %esi,%eax
+ roll $1,%ecx
+ movl %eax,4(%esp)
+ xorl %ecx,%edx
+ movl 16(%edi),%esi
+ movl %edx,16(%esp)
+ jmp .L002loop
+.align 8
+.L003done:
+ movl %eax,%ecx
+ movl %ebx,%edx
+ movl 12(%esp),%eax
+ movl 16(%esp),%ebx
+ xorl %esi,%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ ret
+.size _x86_Camellia_encrypt,.-_x86_Camellia_encrypt
+.globl Camellia_DecryptBlock_Rounds
+.type Camellia_DecryptBlock_Rounds,@function
+.align 16
+Camellia_DecryptBlock_Rounds:
+.L_Camellia_DecryptBlock_Rounds_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%eax
+ movl 24(%esp),%esi
+ movl 28(%esp),%edi
+ movl %esp,%ebx
+ subl $28,%esp
+ andl $-64,%esp
+ leal -127(%edi),%ecx
+ subl %esp,%ecx
+ negl %ecx
+ andl $960,%ecx
+ subl %ecx,%esp
+ addl $4,%esp
+ shll $6,%eax
+ movl %edi,16(%esp)
+ leal (%edi,%eax,1),%edi
+ movl %ebx,20(%esp)
+ call .L004pic_point
+.L004pic_point:
+ popl %ebp
+ leal .LCamellia_SBOX-.L004pic_point(%ebp),%ebp
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ bswap %eax
+ movl 12(%esi),%edx
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ call _x86_Camellia_decrypt
+ movl 20(%esp),%esp
+ bswap %eax
+ movl 32(%esp),%esi
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size Camellia_DecryptBlock_Rounds,.-.L_Camellia_DecryptBlock_Rounds_begin
+.globl Camellia_DecryptBlock
+.type Camellia_DecryptBlock,@function
+.align 16
+Camellia_DecryptBlock:
+.L_Camellia_DecryptBlock_begin:
+ movl $128,%eax
+ subl 4(%esp),%eax
+ movl $3,%eax
+ adcl $0,%eax
+ movl %eax,4(%esp)
+ jmp .L_Camellia_DecryptBlock_Rounds_begin
+.size Camellia_DecryptBlock,.-.L_Camellia_DecryptBlock_begin
+.globl Camellia_decrypt
+.type Camellia_decrypt,@function
+.align 16
+Camellia_decrypt:
+.L_Camellia_decrypt_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%esi
+ movl 28(%esp),%edi
+ movl %esp,%ebx
+ subl $28,%esp
+ andl $-64,%esp
+ movl 272(%edi),%eax
+ leal -127(%edi),%ecx
+ subl %esp,%ecx
+ negl %ecx
+ andl $960,%ecx
+ subl %ecx,%esp
+ addl $4,%esp
+ shll $6,%eax
+ movl %edi,16(%esp)
+ leal (%edi,%eax,1),%edi
+ movl %ebx,20(%esp)
+ call .L005pic_point
+.L005pic_point:
+ popl %ebp
+ leal .LCamellia_SBOX-.L005pic_point(%ebp),%ebp
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ bswap %eax
+ movl 12(%esi),%edx
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ call _x86_Camellia_decrypt
+ movl 20(%esp),%esp
+ bswap %eax
+ movl 24(%esp),%esi
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size Camellia_decrypt,.-.L_Camellia_decrypt_begin
+.type _x86_Camellia_decrypt,@function
+.align 16
+_x86_Camellia_decrypt:
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ movl -8(%edi),%esi
+ movl %eax,4(%esp)
+ movl %ebx,8(%esp)
+ movl %ecx,12(%esp)
+ movl %edx,16(%esp)
+.align 16
+.L006loop:
+ xorl %esi,%eax
+ xorl -4(%edi),%ebx
+ movzbl %ah,%esi
+ movl 2052(%ebp,%esi,8),%edx
+ movzbl %al,%esi
+ xorl 4(%ebp,%esi,8),%edx
+ shrl $16,%eax
+ movzbl %bl,%esi
+ movl (%ebp,%esi,8),%ecx
+ movzbl %ah,%esi
+ xorl (%ebp,%esi,8),%edx
+ movzbl %bh,%esi
+ xorl 4(%ebp,%esi,8),%ecx
+ shrl $16,%ebx
+ movzbl %al,%eax
+ xorl 2048(%ebp,%eax,8),%edx
+ movzbl %bh,%esi
+ movl 16(%esp),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl 2048(%ebp,%esi,8),%ecx
+ movzbl %bl,%esi
+ movl 12(%esp),%ebx
+ xorl %eax,%edx
+ xorl 2052(%ebp,%esi,8),%ecx
+ movl -16(%edi),%esi
+ xorl %ecx,%edx
+ movl %edx,16(%esp)
+ xorl %ebx,%ecx
+ movl %ecx,12(%esp)
+ xorl %esi,%ecx
+ xorl -12(%edi),%edx
+ movzbl %ch,%esi
+ movl 2052(%ebp,%esi,8),%ebx
+ movzbl %cl,%esi
+ xorl 4(%ebp,%esi,8),%ebx
+ shrl $16,%ecx
+ movzbl %dl,%esi
+ movl (%ebp,%esi,8),%eax
+ movzbl %ch,%esi
+ xorl (%ebp,%esi,8),%ebx
+ movzbl %dh,%esi
+ xorl 4(%ebp,%esi,8),%eax
+ shrl $16,%edx
+ movzbl %cl,%ecx
+ xorl 2048(%ebp,%ecx,8),%ebx
+ movzbl %dh,%esi
+ movl 8(%esp),%ecx
+ xorl %ebx,%eax
+ rorl $8,%ebx
+ xorl 2048(%ebp,%esi,8),%eax
+ movzbl %dl,%esi
+ movl 4(%esp),%edx
+ xorl %ecx,%ebx
+ xorl 2052(%ebp,%esi,8),%eax
+ movl -24(%edi),%esi
+ xorl %eax,%ebx
+ movl %ebx,8(%esp)
+ xorl %edx,%eax
+ movl %eax,4(%esp)
+ xorl %esi,%eax
+ xorl -20(%edi),%ebx
+ movzbl %ah,%esi
+ movl 2052(%ebp,%esi,8),%edx
+ movzbl %al,%esi
+ xorl 4(%ebp,%esi,8),%edx
+ shrl $16,%eax
+ movzbl %bl,%esi
+ movl (%ebp,%esi,8),%ecx
+ movzbl %ah,%esi
+ xorl (%ebp,%esi,8),%edx
+ movzbl %bh,%esi
+ xorl 4(%ebp,%esi,8),%ecx
+ shrl $16,%ebx
+ movzbl %al,%eax
+ xorl 2048(%ebp,%eax,8),%edx
+ movzbl %bh,%esi
+ movl 16(%esp),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl 2048(%ebp,%esi,8),%ecx
+ movzbl %bl,%esi
+ movl 12(%esp),%ebx
+ xorl %eax,%edx
+ xorl 2052(%ebp,%esi,8),%ecx
+ movl -32(%edi),%esi
+ xorl %ecx,%edx
+ movl %edx,16(%esp)
+ xorl %ebx,%ecx
+ movl %ecx,12(%esp)
+ xorl %esi,%ecx
+ xorl -28(%edi),%edx
+ movzbl %ch,%esi
+ movl 2052(%ebp,%esi,8),%ebx
+ movzbl %cl,%esi
+ xorl 4(%ebp,%esi,8),%ebx
+ shrl $16,%ecx
+ movzbl %dl,%esi
+ movl (%ebp,%esi,8),%eax
+ movzbl %ch,%esi
+ xorl (%ebp,%esi,8),%ebx
+ movzbl %dh,%esi
+ xorl 4(%ebp,%esi,8),%eax
+ shrl $16,%edx
+ movzbl %cl,%ecx
+ xorl 2048(%ebp,%ecx,8),%ebx
+ movzbl %dh,%esi
+ movl 8(%esp),%ecx
+ xorl %ebx,%eax
+ rorl $8,%ebx
+ xorl 2048(%ebp,%esi,8),%eax
+ movzbl %dl,%esi
+ movl 4(%esp),%edx
+ xorl %ecx,%ebx
+ xorl 2052(%ebp,%esi,8),%eax
+ movl -40(%edi),%esi
+ xorl %eax,%ebx
+ movl %ebx,8(%esp)
+ xorl %edx,%eax
+ movl %eax,4(%esp)
+ xorl %esi,%eax
+ xorl -36(%edi),%ebx
+ movzbl %ah,%esi
+ movl 2052(%ebp,%esi,8),%edx
+ movzbl %al,%esi
+ xorl 4(%ebp,%esi,8),%edx
+ shrl $16,%eax
+ movzbl %bl,%esi
+ movl (%ebp,%esi,8),%ecx
+ movzbl %ah,%esi
+ xorl (%ebp,%esi,8),%edx
+ movzbl %bh,%esi
+ xorl 4(%ebp,%esi,8),%ecx
+ shrl $16,%ebx
+ movzbl %al,%eax
+ xorl 2048(%ebp,%eax,8),%edx
+ movzbl %bh,%esi
+ movl 16(%esp),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl 2048(%ebp,%esi,8),%ecx
+ movzbl %bl,%esi
+ movl 12(%esp),%ebx
+ xorl %eax,%edx
+ xorl 2052(%ebp,%esi,8),%ecx
+ movl -48(%edi),%esi
+ xorl %ecx,%edx
+ movl %edx,16(%esp)
+ xorl %ebx,%ecx
+ movl %ecx,12(%esp)
+ xorl %esi,%ecx
+ xorl -44(%edi),%edx
+ movzbl %ch,%esi
+ movl 2052(%ebp,%esi,8),%ebx
+ movzbl %cl,%esi
+ xorl 4(%ebp,%esi,8),%ebx
+ shrl $16,%ecx
+ movzbl %dl,%esi
+ movl (%ebp,%esi,8),%eax
+ movzbl %ch,%esi
+ xorl (%ebp,%esi,8),%ebx
+ movzbl %dh,%esi
+ xorl 4(%ebp,%esi,8),%eax
+ shrl $16,%edx
+ movzbl %cl,%ecx
+ xorl 2048(%ebp,%ecx,8),%ebx
+ movzbl %dh,%esi
+ movl 8(%esp),%ecx
+ xorl %ebx,%eax
+ rorl $8,%ebx
+ xorl 2048(%ebp,%esi,8),%eax
+ movzbl %dl,%esi
+ movl 4(%esp),%edx
+ xorl %ecx,%ebx
+ xorl 2052(%ebp,%esi,8),%eax
+ movl -56(%edi),%esi
+ xorl %eax,%ebx
+ movl %ebx,8(%esp)
+ xorl %edx,%eax
+ movl %eax,4(%esp)
+ subl $64,%edi
+ cmpl 20(%esp),%edi
+ je .L007done
+ andl %eax,%esi
+ movl 16(%esp),%edx
+ roll $1,%esi
+ movl %edx,%ecx
+ xorl %esi,%ebx
+ orl 4(%edi),%ecx
+ movl %ebx,8(%esp)
+ xorl 12(%esp),%ecx
+ movl 12(%edi),%esi
+ movl %ecx,12(%esp)
+ orl %ebx,%esi
+ andl (%edi),%ecx
+ xorl %esi,%eax
+ roll $1,%ecx
+ movl %eax,4(%esp)
+ xorl %ecx,%edx
+ movl -8(%edi),%esi
+ movl %edx,16(%esp)
+ jmp .L006loop
+.align 8
+.L007done:
+ movl %eax,%ecx
+ movl %ebx,%edx
+ movl 12(%esp),%eax
+ movl 16(%esp),%ebx
+ xorl %esi,%ecx
+ xorl 12(%edi),%edx
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ ret
+.size _x86_Camellia_decrypt,.-_x86_Camellia_decrypt
+.globl Camellia_Ekeygen
+.type Camellia_Ekeygen,@function
+.align 16
+Camellia_Ekeygen:
+.L_Camellia_Ekeygen_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ subl $16,%esp
+ movl 36(%esp),%ebp
+ movl 40(%esp),%esi
+ movl 44(%esp),%edi
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ cmpl $128,%ebp
+ je .L0081st128
+ movl 16(%esi),%eax
+ movl 20(%esi),%ebx
+ cmpl $192,%ebp
+ je .L0091st192
+ movl 24(%esi),%ecx
+ movl 28(%esi),%edx
+ jmp .L0101st256
+.align 4
+.L0091st192:
+ movl %eax,%ecx
+ movl %ebx,%edx
+ notl %ecx
+ notl %edx
+.align 4
+.L0101st256:
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ movl %eax,32(%edi)
+ movl %ebx,36(%edi)
+ movl %ecx,40(%edi)
+ movl %edx,44(%edi)
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+.align 4
+.L0081st128:
+ call .L011pic_point
+.L011pic_point:
+ popl %ebp
+ leal .LCamellia_SBOX-.L011pic_point(%ebp),%ebp
+ leal .LCamellia_SIGMA-.LCamellia_SBOX(%ebp),%edi
+ movl (%edi),%esi
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ movl %ecx,8(%esp)
+ movl %edx,12(%esp)
+ xorl %esi,%eax
+ xorl 4(%edi),%ebx
+ movzbl %ah,%esi
+ movl 2052(%ebp,%esi,8),%edx
+ movzbl %al,%esi
+ xorl 4(%ebp,%esi,8),%edx
+ shrl $16,%eax
+ movzbl %bl,%esi
+ movl (%ebp,%esi,8),%ecx
+ movzbl %ah,%esi
+ xorl (%ebp,%esi,8),%edx
+ movzbl %bh,%esi
+ xorl 4(%ebp,%esi,8),%ecx
+ shrl $16,%ebx
+ movzbl %al,%eax
+ xorl 2048(%ebp,%eax,8),%edx
+ movzbl %bh,%esi
+ movl 12(%esp),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl 2048(%ebp,%esi,8),%ecx
+ movzbl %bl,%esi
+ movl 8(%esp),%ebx
+ xorl %eax,%edx
+ xorl 2052(%ebp,%esi,8),%ecx
+ movl 8(%edi),%esi
+ xorl %ecx,%edx
+ movl %edx,12(%esp)
+ xorl %ebx,%ecx
+ movl %ecx,8(%esp)
+ xorl %esi,%ecx
+ xorl 12(%edi),%edx
+ movzbl %ch,%esi
+ movl 2052(%ebp,%esi,8),%ebx
+ movzbl %cl,%esi
+ xorl 4(%ebp,%esi,8),%ebx
+ shrl $16,%ecx
+ movzbl %dl,%esi
+ movl (%ebp,%esi,8),%eax
+ movzbl %ch,%esi
+ xorl (%ebp,%esi,8),%ebx
+ movzbl %dh,%esi
+ xorl 4(%ebp,%esi,8),%eax
+ shrl $16,%edx
+ movzbl %cl,%ecx
+ xorl 2048(%ebp,%ecx,8),%ebx
+ movzbl %dh,%esi
+ movl 4(%esp),%ecx
+ xorl %ebx,%eax
+ rorl $8,%ebx
+ xorl 2048(%ebp,%esi,8),%eax
+ movzbl %dl,%esi
+ movl (%esp),%edx
+ xorl %ecx,%ebx
+ xorl 2052(%ebp,%esi,8),%eax
+ movl 16(%edi),%esi
+ xorl %eax,%ebx
+ movl %ebx,4(%esp)
+ xorl %edx,%eax
+ movl %eax,(%esp)
+ movl 8(%esp),%ecx
+ movl 12(%esp),%edx
+ movl 44(%esp),%esi
+ xorl (%esi),%eax
+ xorl 4(%esi),%ebx
+ xorl 8(%esi),%ecx
+ xorl 12(%esi),%edx
+ movl 16(%edi),%esi
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ movl %ecx,8(%esp)
+ movl %edx,12(%esp)
+ xorl %esi,%eax
+ xorl 20(%edi),%ebx
+ movzbl %ah,%esi
+ movl 2052(%ebp,%esi,8),%edx
+ movzbl %al,%esi
+ xorl 4(%ebp,%esi,8),%edx
+ shrl $16,%eax
+ movzbl %bl,%esi
+ movl (%ebp,%esi,8),%ecx
+ movzbl %ah,%esi
+ xorl (%ebp,%esi,8),%edx
+ movzbl %bh,%esi
+ xorl 4(%ebp,%esi,8),%ecx
+ shrl $16,%ebx
+ movzbl %al,%eax
+ xorl 2048(%ebp,%eax,8),%edx
+ movzbl %bh,%esi
+ movl 12(%esp),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl 2048(%ebp,%esi,8),%ecx
+ movzbl %bl,%esi
+ movl 8(%esp),%ebx
+ xorl %eax,%edx
+ xorl 2052(%ebp,%esi,8),%ecx
+ movl 24(%edi),%esi
+ xorl %ecx,%edx
+ movl %edx,12(%esp)
+ xorl %ebx,%ecx
+ movl %ecx,8(%esp)
+ xorl %esi,%ecx
+ xorl 28(%edi),%edx
+ movzbl %ch,%esi
+ movl 2052(%ebp,%esi,8),%ebx
+ movzbl %cl,%esi
+ xorl 4(%ebp,%esi,8),%ebx
+ shrl $16,%ecx
+ movzbl %dl,%esi
+ movl (%ebp,%esi,8),%eax
+ movzbl %ch,%esi
+ xorl (%ebp,%esi,8),%ebx
+ movzbl %dh,%esi
+ xorl 4(%ebp,%esi,8),%eax
+ shrl $16,%edx
+ movzbl %cl,%ecx
+ xorl 2048(%ebp,%ecx,8),%ebx
+ movzbl %dh,%esi
+ movl 4(%esp),%ecx
+ xorl %ebx,%eax
+ rorl $8,%ebx
+ xorl 2048(%ebp,%esi,8),%eax
+ movzbl %dl,%esi
+ movl (%esp),%edx
+ xorl %ecx,%ebx
+ xorl 2052(%ebp,%esi,8),%eax
+ movl 32(%edi),%esi
+ xorl %eax,%ebx
+ movl %ebx,4(%esp)
+ xorl %edx,%eax
+ movl %eax,(%esp)
+ movl 8(%esp),%ecx
+ movl 12(%esp),%edx
+ movl 36(%esp),%esi
+ cmpl $128,%esi
+ jne .L0122nd256
+ movl 44(%esp),%edi
+ leal 128(%edi),%edi
+ movl %eax,-112(%edi)
+ movl %ebx,-108(%edi)
+ movl %ecx,-104(%edi)
+ movl %edx,-100(%edi)
+ movl %eax,%ebp
+ shll $15,%eax
+ movl %ebx,%esi
+ shrl $17,%esi
+ shll $15,%ebx
+ orl %esi,%eax
+ movl %ecx,%esi
+ shll $15,%ecx
+ movl %eax,-80(%edi)
+ shrl $17,%esi
+ orl %esi,%ebx
+ shrl $17,%ebp
+ movl %edx,%esi
+ shrl $17,%esi
+ movl %ebx,-76(%edi)
+ shll $15,%edx
+ orl %esi,%ecx
+ orl %ebp,%edx
+ movl %ecx,-72(%edi)
+ movl %edx,-68(%edi)
+ movl %eax,%ebp
+ shll $15,%eax
+ movl %ebx,%esi
+ shrl $17,%esi
+ shll $15,%ebx
+ orl %esi,%eax
+ movl %ecx,%esi
+ shll $15,%ecx
+ movl %eax,-64(%edi)
+ shrl $17,%esi
+ orl %esi,%ebx
+ shrl $17,%ebp
+ movl %edx,%esi
+ shrl $17,%esi
+ movl %ebx,-60(%edi)
+ shll $15,%edx
+ orl %esi,%ecx
+ orl %ebp,%edx
+ movl %ecx,-56(%edi)
+ movl %edx,-52(%edi)
+ movl %eax,%ebp
+ shll $15,%eax
+ movl %ebx,%esi
+ shrl $17,%esi
+ shll $15,%ebx
+ orl %esi,%eax
+ movl %ecx,%esi
+ shll $15,%ecx
+ movl %eax,-32(%edi)
+ shrl $17,%esi
+ orl %esi,%ebx
+ shrl $17,%ebp
+ movl %edx,%esi
+ shrl $17,%esi
+ movl %ebx,-28(%edi)
+ shll $15,%edx
+ orl %esi,%ecx
+ orl %ebp,%edx
+ movl %eax,%ebp
+ shll $15,%eax
+ movl %ebx,%esi
+ shrl $17,%esi
+ shll $15,%ebx
+ orl %esi,%eax
+ movl %ecx,%esi
+ shll $15,%ecx
+ movl %eax,-16(%edi)
+ shrl $17,%esi
+ orl %esi,%ebx
+ shrl $17,%ebp
+ movl %edx,%esi
+ shrl $17,%esi
+ movl %ebx,-12(%edi)
+ shll $15,%edx
+ orl %esi,%ecx
+ orl %ebp,%edx
+ movl %ecx,-8(%edi)
+ movl %edx,-4(%edi)
+ movl %ebx,%ebp
+ shll $2,%ebx
+ movl %ecx,%esi
+ shrl $30,%esi
+ shll $2,%ecx
+ orl %esi,%ebx
+ movl %edx,%esi
+ shll $2,%edx
+ movl %ebx,32(%edi)
+ shrl $30,%esi
+ orl %esi,%ecx
+ shrl $30,%ebp
+ movl %eax,%esi
+ shrl $30,%esi
+ movl %ecx,36(%edi)
+ shll $2,%eax
+ orl %esi,%edx
+ orl %ebp,%eax
+ movl %edx,40(%edi)
+ movl %eax,44(%edi)
+ movl %ebx,%ebp
+ shll $17,%ebx
+ movl %ecx,%esi
+ shrl $15,%esi
+ shll $17,%ecx
+ orl %esi,%ebx
+ movl %edx,%esi
+ shll $17,%edx
+ movl %ebx,64(%edi)
+ shrl $15,%esi
+ orl %esi,%ecx
+ shrl $15,%ebp
+ movl %eax,%esi
+ shrl $15,%esi
+ movl %ecx,68(%edi)
+ shll $17,%eax
+ orl %esi,%edx
+ orl %ebp,%eax
+ movl %edx,72(%edi)
+ movl %eax,76(%edi)
+ movl -128(%edi),%ebx
+ movl -124(%edi),%ecx
+ movl -120(%edi),%edx
+ movl -116(%edi),%eax
+ movl %ebx,%ebp
+ shll $15,%ebx
+ movl %ecx,%esi
+ shrl $17,%esi
+ shll $15,%ecx
+ orl %esi,%ebx
+ movl %edx,%esi
+ shll $15,%edx
+ movl %ebx,-96(%edi)
+ shrl $17,%esi
+ orl %esi,%ecx
+ shrl $17,%ebp
+ movl %eax,%esi
+ shrl $17,%esi
+ movl %ecx,-92(%edi)
+ shll $15,%eax
+ orl %esi,%edx
+ orl %ebp,%eax
+ movl %edx,-88(%edi)
+ movl %eax,-84(%edi)
+ movl %ebx,%ebp
+ shll $30,%ebx
+ movl %ecx,%esi
+ shrl $2,%esi
+ shll $30,%ecx
+ orl %esi,%ebx
+ movl %edx,%esi
+ shll $30,%edx
+ movl %ebx,-48(%edi)
+ shrl $2,%esi
+ orl %esi,%ecx
+ shrl $2,%ebp
+ movl %eax,%esi
+ shrl $2,%esi
+ movl %ecx,-44(%edi)
+ shll $30,%eax
+ orl %esi,%edx
+ orl %ebp,%eax
+ movl %edx,-40(%edi)
+ movl %eax,-36(%edi)
+ movl %ebx,%ebp
+ shll $15,%ebx
+ movl %ecx,%esi
+ shrl $17,%esi
+ shll $15,%ecx
+ orl %esi,%ebx
+ movl %edx,%esi
+ shll $15,%edx
+ shrl $17,%esi
+ orl %esi,%ecx
+ shrl $17,%ebp
+ movl %eax,%esi
+ shrl $17,%esi
+ shll $15,%eax
+ orl %esi,%edx
+ orl %ebp,%eax
+ movl %edx,-24(%edi)
+ movl %eax,-20(%edi)
+ movl %ebx,%ebp
+ shll $17,%ebx
+ movl %ecx,%esi
+ shrl $15,%esi
+ shll $17,%ecx
+ orl %esi,%ebx
+ movl %edx,%esi
+ shll $17,%edx
+ movl %ebx,(%edi)
+ shrl $15,%esi
+ orl %esi,%ecx
+ shrl $15,%ebp
+ movl %eax,%esi
+ shrl $15,%esi
+ movl %ecx,4(%edi)
+ shll $17,%eax
+ orl %esi,%edx
+ orl %ebp,%eax
+ movl %edx,8(%edi)
+ movl %eax,12(%edi)
+ movl %ebx,%ebp
+ shll $17,%ebx
+ movl %ecx,%esi
+ shrl $15,%esi
+ shll $17,%ecx
+ orl %esi,%ebx
+ movl %edx,%esi
+ shll $17,%edx
+ movl %ebx,16(%edi)
+ shrl $15,%esi
+ orl %esi,%ecx
+ shrl $15,%ebp
+ movl %eax,%esi
+ shrl $15,%esi
+ movl %ecx,20(%edi)
+ shll $17,%eax
+ orl %esi,%edx
+ orl %ebp,%eax
+ movl %edx,24(%edi)
+ movl %eax,28(%edi)
+ movl %ebx,%ebp
+ shll $17,%ebx
+ movl %ecx,%esi
+ shrl $15,%esi
+ shll $17,%ecx
+ orl %esi,%ebx
+ movl %edx,%esi
+ shll $17,%edx
+ movl %ebx,48(%edi)
+ shrl $15,%esi
+ orl %esi,%ecx
+ shrl $15,%ebp
+ movl %eax,%esi
+ shrl $15,%esi
+ movl %ecx,52(%edi)
+ shll $17,%eax
+ orl %esi,%edx
+ orl %ebp,%eax
+ movl %edx,56(%edi)
+ movl %eax,60(%edi)
+ movl $3,%eax
+ jmp .L013done
+.align 16
+.L0122nd256:
+ movl 44(%esp),%esi
+ movl %eax,48(%esi)
+ movl %ebx,52(%esi)
+ movl %ecx,56(%esi)
+ movl %edx,60(%esi)
+ xorl 32(%esi),%eax
+ xorl 36(%esi),%ebx
+ xorl 40(%esi),%ecx
+ xorl 44(%esi),%edx
+ movl 32(%edi),%esi
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ movl %ecx,8(%esp)
+ movl %edx,12(%esp)
+ xorl %esi,%eax
+ xorl 36(%edi),%ebx
+ movzbl %ah,%esi
+ movl 2052(%ebp,%esi,8),%edx
+ movzbl %al,%esi
+ xorl 4(%ebp,%esi,8),%edx
+ shrl $16,%eax
+ movzbl %bl,%esi
+ movl (%ebp,%esi,8),%ecx
+ movzbl %ah,%esi
+ xorl (%ebp,%esi,8),%edx
+ movzbl %bh,%esi
+ xorl 4(%ebp,%esi,8),%ecx
+ shrl $16,%ebx
+ movzbl %al,%eax
+ xorl 2048(%ebp,%eax,8),%edx
+ movzbl %bh,%esi
+ movl 12(%esp),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl 2048(%ebp,%esi,8),%ecx
+ movzbl %bl,%esi
+ movl 8(%esp),%ebx
+ xorl %eax,%edx
+ xorl 2052(%ebp,%esi,8),%ecx
+ movl 40(%edi),%esi
+ xorl %ecx,%edx
+ movl %edx,12(%esp)
+ xorl %ebx,%ecx
+ movl %ecx,8(%esp)
+ xorl %esi,%ecx
+ xorl 44(%edi),%edx
+ movzbl %ch,%esi
+ movl 2052(%ebp,%esi,8),%ebx
+ movzbl %cl,%esi
+ xorl 4(%ebp,%esi,8),%ebx
+ shrl $16,%ecx
+ movzbl %dl,%esi
+ movl (%ebp,%esi,8),%eax
+ movzbl %ch,%esi
+ xorl (%ebp,%esi,8),%ebx
+ movzbl %dh,%esi
+ xorl 4(%ebp,%esi,8),%eax
+ shrl $16,%edx
+ movzbl %cl,%ecx
+ xorl 2048(%ebp,%ecx,8),%ebx
+ movzbl %dh,%esi
+ movl 4(%esp),%ecx
+ xorl %ebx,%eax
+ rorl $8,%ebx
+ xorl 2048(%ebp,%esi,8),%eax
+ movzbl %dl,%esi
+ movl (%esp),%edx
+ xorl %ecx,%ebx
+ xorl 2052(%ebp,%esi,8),%eax
+ movl 48(%edi),%esi
+ xorl %eax,%ebx
+ movl %ebx,4(%esp)
+ xorl %edx,%eax
+ movl %eax,(%esp)
+ movl 8(%esp),%ecx
+ movl 12(%esp),%edx
+ movl 44(%esp),%edi
+ leal 128(%edi),%edi
+ movl %eax,-112(%edi)
+ movl %ebx,-108(%edi)
+ movl %ecx,-104(%edi)
+ movl %edx,-100(%edi)
+ movl %eax,%ebp
+ shll $30,%eax
+ movl %ebx,%esi
+ shrl $2,%esi
+ shll $30,%ebx
+ orl %esi,%eax
+ movl %ecx,%esi
+ shll $30,%ecx
+ movl %eax,-48(%edi)
+ shrl $2,%esi
+ orl %esi,%ebx
+ shrl $2,%ebp
+ movl %edx,%esi
+ shrl $2,%esi
+ movl %ebx,-44(%edi)
+ shll $30,%edx
+ orl %esi,%ecx
+ orl %ebp,%edx
+ movl %ecx,-40(%edi)
+ movl %edx,-36(%edi)
+ movl %eax,%ebp
+ shll $30,%eax
+ movl %ebx,%esi
+ shrl $2,%esi
+ shll $30,%ebx
+ orl %esi,%eax
+ movl %ecx,%esi
+ shll $30,%ecx
+ movl %eax,32(%edi)
+ shrl $2,%esi
+ orl %esi,%ebx
+ shrl $2,%ebp
+ movl %edx,%esi
+ shrl $2,%esi
+ movl %ebx,36(%edi)
+ shll $30,%edx
+ orl %esi,%ecx
+ orl %ebp,%edx
+ movl %ecx,40(%edi)
+ movl %edx,44(%edi)
+ movl %ebx,%ebp
+ shll $19,%ebx
+ movl %ecx,%esi
+ shrl $13,%esi
+ shll $19,%ecx
+ orl %esi,%ebx
+ movl %edx,%esi
+ shll $19,%edx
+ movl %ebx,128(%edi)
+ shrl $13,%esi
+ orl %esi,%ecx
+ shrl $13,%ebp
+ movl %eax,%esi
+ shrl $13,%esi
+ movl %ecx,132(%edi)
+ shll $19,%eax
+ orl %esi,%edx
+ orl %ebp,%eax
+ movl %edx,136(%edi)
+ movl %eax,140(%edi)
+ movl -96(%edi),%ebx
+ movl -92(%edi),%ecx
+ movl -88(%edi),%edx
+ movl -84(%edi),%eax
+ movl %ebx,%ebp
+ shll $15,%ebx
+ movl %ecx,%esi
+ shrl $17,%esi
+ shll $15,%ecx
+ orl %esi,%ebx
+ movl %edx,%esi
+ shll $15,%edx
+ movl %ebx,-96(%edi)
+ shrl $17,%esi
+ orl %esi,%ecx
+ shrl $17,%ebp
+ movl %eax,%esi
+ shrl $17,%esi
+ movl %ecx,-92(%edi)
+ shll $15,%eax
+ orl %esi,%edx
+ orl %ebp,%eax
+ movl %edx,-88(%edi)
+ movl %eax,-84(%edi)
+ movl %ebx,%ebp
+ shll $15,%ebx
+ movl %ecx,%esi
+ shrl $17,%esi
+ shll $15,%ecx
+ orl %esi,%ebx
+ movl %edx,%esi
+ shll $15,%edx
+ movl %ebx,-64(%edi)
+ shrl $17,%esi
+ orl %esi,%ecx
+ shrl $17,%ebp
+ movl %eax,%esi
+ shrl $17,%esi
+ movl %ecx,-60(%edi)
+ shll $15,%eax
+ orl %esi,%edx
+ orl %ebp,%eax
+ movl %edx,-56(%edi)
+ movl %eax,-52(%edi)
+ movl %ebx,%ebp
+ shll $30,%ebx
+ movl %ecx,%esi
+ shrl $2,%esi
+ shll $30,%ecx
+ orl %esi,%ebx
+ movl %edx,%esi
+ shll $30,%edx
+ movl %ebx,16(%edi)
+ shrl $2,%esi
+ orl %esi,%ecx
+ shrl $2,%ebp
+ movl %eax,%esi
+ shrl $2,%esi
+ movl %ecx,20(%edi)
+ shll $30,%eax
+ orl %esi,%edx
+ orl %ebp,%eax
+ movl %edx,24(%edi)
+ movl %eax,28(%edi)
+ movl %ecx,%ebp
+ shll $2,%ecx
+ movl %edx,%esi
+ shrl $30,%esi
+ shll $2,%edx
+ orl %esi,%ecx
+ movl %eax,%esi
+ shll $2,%eax
+ movl %ecx,80(%edi)
+ shrl $30,%esi
+ orl %esi,%edx
+ shrl $30,%ebp
+ movl %ebx,%esi
+ shrl $30,%esi
+ movl %edx,84(%edi)
+ shll $2,%ebx
+ orl %esi,%eax
+ orl %ebp,%ebx
+ movl %eax,88(%edi)
+ movl %ebx,92(%edi)
+ movl -80(%edi),%ecx
+ movl -76(%edi),%edx
+ movl -72(%edi),%eax
+ movl -68(%edi),%ebx
+ movl %ecx,%ebp
+ shll $15,%ecx
+ movl %edx,%esi
+ shrl $17,%esi
+ shll $15,%edx
+ orl %esi,%ecx
+ movl %eax,%esi
+ shll $15,%eax
+ movl %ecx,-80(%edi)
+ shrl $17,%esi
+ orl %esi,%edx
+ shrl $17,%ebp
+ movl %ebx,%esi
+ shrl $17,%esi
+ movl %edx,-76(%edi)
+ shll $15,%ebx
+ orl %esi,%eax
+ orl %ebp,%ebx
+ movl %eax,-72(%edi)
+ movl %ebx,-68(%edi)
+ movl %ecx,%ebp
+ shll $30,%ecx
+ movl %edx,%esi
+ shrl $2,%esi
+ shll $30,%edx
+ orl %esi,%ecx
+ movl %eax,%esi
+ shll $30,%eax
+ movl %ecx,-16(%edi)
+ shrl $2,%esi
+ orl %esi,%edx
+ shrl $2,%ebp
+ movl %ebx,%esi
+ shrl $2,%esi
+ movl %edx,-12(%edi)
+ shll $30,%ebx
+ orl %esi,%eax
+ orl %ebp,%ebx
+ movl %eax,-8(%edi)
+ movl %ebx,-4(%edi)
+ movl %edx,64(%edi)
+ movl %eax,68(%edi)
+ movl %ebx,72(%edi)
+ movl %ecx,76(%edi)
+ movl %edx,%ebp
+ shll $17,%edx
+ movl %eax,%esi
+ shrl $15,%esi
+ shll $17,%eax
+ orl %esi,%edx
+ movl %ebx,%esi
+ shll $17,%ebx
+ movl %edx,96(%edi)
+ shrl $15,%esi
+ orl %esi,%eax
+ shrl $15,%ebp
+ movl %ecx,%esi
+ shrl $15,%esi
+ movl %eax,100(%edi)
+ shll $17,%ecx
+ orl %esi,%ebx
+ orl %ebp,%ecx
+ movl %ebx,104(%edi)
+ movl %ecx,108(%edi)
+ movl -128(%edi),%edx
+ movl -124(%edi),%eax
+ movl -120(%edi),%ebx
+ movl -116(%edi),%ecx
+ movl %eax,%ebp
+ shll $13,%eax
+ movl %ebx,%esi
+ shrl $19,%esi
+ shll $13,%ebx
+ orl %esi,%eax
+ movl %ecx,%esi
+ shll $13,%ecx
+ movl %eax,-32(%edi)
+ shrl $19,%esi
+ orl %esi,%ebx
+ shrl $19,%ebp
+ movl %edx,%esi
+ shrl $19,%esi
+ movl %ebx,-28(%edi)
+ shll $13,%edx
+ orl %esi,%ecx
+ orl %ebp,%edx
+ movl %ecx,-24(%edi)
+ movl %edx,-20(%edi)
+ movl %eax,%ebp
+ shll $15,%eax
+ movl %ebx,%esi
+ shrl $17,%esi
+ shll $15,%ebx
+ orl %esi,%eax
+ movl %ecx,%esi
+ shll $15,%ecx
+ movl %eax,(%edi)
+ shrl $17,%esi
+ orl %esi,%ebx
+ shrl $17,%ebp
+ movl %edx,%esi
+ shrl $17,%esi
+ movl %ebx,4(%edi)
+ shll $15,%edx
+ orl %esi,%ecx
+ orl %ebp,%edx
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl %eax,%ebp
+ shll $17,%eax
+ movl %ebx,%esi
+ shrl $15,%esi
+ shll $17,%ebx
+ orl %esi,%eax
+ movl %ecx,%esi
+ shll $17,%ecx
+ movl %eax,48(%edi)
+ shrl $15,%esi
+ orl %esi,%ebx
+ shrl $15,%ebp
+ movl %edx,%esi
+ shrl $15,%esi
+ movl %ebx,52(%edi)
+ shll $17,%edx
+ orl %esi,%ecx
+ orl %ebp,%edx
+ movl %ecx,56(%edi)
+ movl %edx,60(%edi)
+ movl %ebx,%ebp
+ shll $2,%ebx
+ movl %ecx,%esi
+ shrl $30,%esi
+ shll $2,%ecx
+ orl %esi,%ebx
+ movl %edx,%esi
+ shll $2,%edx
+ movl %ebx,112(%edi)
+ shrl $30,%esi
+ orl %esi,%ecx
+ shrl $30,%ebp
+ movl %eax,%esi
+ shrl $30,%esi
+ movl %ecx,116(%edi)
+ shll $2,%eax
+ orl %esi,%edx
+ orl %ebp,%eax
+ movl %edx,120(%edi)
+ movl %eax,124(%edi)
+ movl $4,%eax
+.L013done:
+ leal 144(%edi),%edx
+ addl $16,%esp
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size Camellia_Ekeygen,.-.L_Camellia_Ekeygen_begin
+.globl Camellia_set_key
+.type Camellia_set_key,@function
+.align 16
+Camellia_set_key:
+.L_Camellia_set_key_begin:
+ pushl %ebx
+ movl 8(%esp),%ecx
+ movl 12(%esp),%ebx
+ movl 16(%esp),%edx
+ movl $-1,%eax
+ testl %ecx,%ecx
+ jz .L014done
+ testl %edx,%edx
+ jz .L014done
+ movl $-2,%eax
+ cmpl $256,%ebx
+ je .L015arg_ok
+ cmpl $192,%ebx
+ je .L015arg_ok
+ cmpl $128,%ebx
+ jne .L014done
+.align 4
+.L015arg_ok:
+ pushl %edx
+ pushl %ecx
+ pushl %ebx
+ call .L_Camellia_Ekeygen_begin
+ addl $12,%esp
+ movl %eax,(%edx)
+ xorl %eax,%eax
+.align 4
+.L014done:
+ popl %ebx
+ ret
+.size Camellia_set_key,.-.L_Camellia_set_key_begin
+.align 64
+.LCamellia_SIGMA:
+.long 2694735487,1003262091,3061508184,1286239154,3337565999,3914302142,1426019237,4057165596,283453434,3731369245,2958461122,3018244605,0,0,0,0
+.align 64
+.LCamellia_SBOX:
+.long 1886416896,1886388336
+.long 2189591040,741081132
+.long 741092352,3014852787
+.long 3974949888,3233808576
+.long 3014898432,3840147684
+.long 656877312,1465319511
+.long 3233857536,3941204202
+.long 3857048832,2930639022
+.long 3840205824,589496355
+.long 2240120064,1802174571
+.long 1465341696,1162149957
+.long 892679424,2779054245
+.long 3941263872,3991732461
+.long 202116096,1330577487
+.long 2930683392,488439837
+.long 1094795520,2459041938
+.long 589505280,2256928902
+.long 4025478912,2947481775
+.long 1802201856,2088501372
+.long 2475922176,522125343
+.long 1162167552,1044250686
+.long 421075200,3705405660
+.long 2779096320,1583218782
+.long 555819264,185270283
+.long 3991792896,2795896998
+.long 235802112,960036921
+.long 1330597632,3587506389
+.long 1313754624,1566376029
+.long 488447232,3654877401
+.long 1701143808,1515847770
+.long 2459079168,1364262993
+.long 3183328512,1819017324
+.long 2256963072,2341142667
+.long 3099113472,2593783962
+.long 2947526400,4227531003
+.long 2408550144,2964324528
+.long 2088532992,1953759348
+.long 3958106880,724238379
+.long 522133248,4042260720
+.long 3469659648,2223243396
+.long 1044266496,3755933919
+.long 808464384,3419078859
+.long 3705461760,875823156
+.long 1600085760,1987444854
+.long 1583242752,1835860077
+.long 3318072576,2846425257
+.long 185273088,3520135377
+.long 437918208,67371012
+.long 2795939328,336855060
+.long 3789676800,976879674
+.long 960051456,3739091166
+.long 3402287616,286326801
+.long 3587560704,842137650
+.long 1195853568,2627469468
+.long 1566399744,1397948499
+.long 1027423488,4075946226
+.long 3654932736,4278059262
+.long 16843008,3486449871
+.long 1515870720,3284336835
+.long 3604403712,2054815866
+.long 1364283648,606339108
+.long 1448498688,3907518696
+.long 1819044864,1616904288
+.long 1296911616,1768489065
+.long 2341178112,2863268010
+.long 218959104,2694840480
+.long 2593823232,2711683233
+.long 1717986816,1650589794
+.long 4227595008,1414791252
+.long 3435973632,505282590
+.long 2964369408,3772776672
+.long 757935360,1684275300
+.long 1953788928,269484048
+.long 303174144,0
+.long 724249344,2745368739
+.long 538976256,1970602101
+.long 4042321920,2324299914
+.long 2981212416,3873833190
+.long 2223277056,151584777
+.long 2576980224,3722248413
+.long 3755990784,2273771655
+.long 1280068608,2206400643
+.long 3419130624,3452764365
+.long 3267543552,2425356432
+.long 875836416,1936916595
+.long 2122219008,4143317238
+.long 1987474944,2644312221
+.long 84215040,3216965823
+.long 1835887872,1381105746
+.long 3082270464,3638034648
+.long 2846468352,3368550600
+.long 825307392,3334865094
+.long 3520188672,2172715137
+.long 387389184,1869545583
+.long 67372032,320012307
+.long 3621246720,1667432547
+.long 336860160,3924361449
+.long 1482184704,2812739751
+.long 976894464,2677997727
+.long 1633771776,3166437564
+.long 3739147776,690552873
+.long 454761216,4193845497
+.long 286331136,791609391
+.long 471604224,3031695540
+.long 842150400,2021130360
+.long 252645120,101056518
+.long 2627509248,3890675943
+.long 370546176,1903231089
+.long 1397969664,3570663636
+.long 404232192,2880110763
+.long 4076007936,2290614408
+.long 572662272,2374828173
+.long 4278124032,1920073842
+.long 1145324544,3115909305
+.long 3486502656,4177002744
+.long 2998055424,2896953516
+.long 3284386560,909508662
+.long 3048584448,707395626
+.long 2054846976,1010565180
+.long 2442236160,4059103473
+.long 606348288,1077936192
+.long 134744064,3553820883
+.long 3907577856,3149594811
+.long 2829625344,1128464451
+.long 1616928768,353697813
+.long 4244438016,2913796269
+.long 1768515840,2004287607
+.long 1347440640,2155872384
+.long 2863311360,2189557890
+.long 3503345664,3974889708
+.long 2694881280,656867367
+.long 2105376000,3856990437
+.long 2711724288,2240086149
+.long 2307492096,892665909
+.long 1650614784,202113036
+.long 2543294208,1094778945
+.long 1414812672,4025417967
+.long 1532713728,2475884691
+.long 505290240,421068825
+.long 2509608192,555810849
+.long 3772833792,235798542
+.long 4294967040,1313734734
+.long 1684300800,1701118053
+.long 3537031680,3183280317
+.long 269488128,3099066552
+.long 3301229568,2408513679
+.long 0,3958046955
+.long 1212696576,3469607118
+.long 2745410304,808452144
+.long 4160222976,1600061535
+.long 1970631936,3318022341
+.long 3688618752,437911578
+.long 2324335104,3789619425
+.long 50529024,3402236106
+.long 3873891840,1195835463
+.long 3671775744,1027407933
+.long 151587072,16842753
+.long 1061109504,3604349142
+.long 3722304768,1448476758
+.long 2492765184,1296891981
+.long 2273806080,218955789
+.long 1549556736,1717960806
+.long 2206434048,3435921612
+.long 33686016,757923885
+.long 3452816640,303169554
+.long 1246382592,538968096
+.long 2425393152,2981167281
+.long 858993408,2576941209
+.long 1936945920,1280049228
+.long 1734829824,3267494082
+.long 4143379968,2122186878
+.long 4092850944,84213765
+.long 2644352256,3082223799
+.long 2139062016,825294897
+.long 3217014528,387383319
+.long 3806519808,3621191895
+.long 1381126656,1482162264
+.long 2610666240,1633747041
+.long 3638089728,454754331
+.long 640034304,471597084
+.long 3368601600,252641295
+.long 926365440,370540566
+.long 3334915584,404226072
+.long 993737472,572653602
+.long 2172748032,1145307204
+.long 2526451200,2998010034
+.long 1869573888,3048538293
+.long 1263225600,2442199185
+.long 320017152,134742024
+.long 3200171520,2829582504
+.long 1667457792,4244373756
+.long 774778368,1347420240
+.long 3924420864,3503292624
+.long 2038003968,2105344125
+.long 2812782336,2307457161
+.long 2358021120,2543255703
+.long 2678038272,1532690523
+.long 1852730880,2509570197
+.long 3166485504,4294902015
+.long 2391707136,3536978130
+.long 690563328,3301179588
+.long 4126536960,1212678216
+.long 4193908992,4160159991
+.long 3065427456,3688562907
+.long 791621376,50528259
+.long 4261281024,3671720154
+.long 3031741440,1061093439
+.long 1499027712,2492727444
+.long 2021160960,1549533276
+.long 2560137216,33685506
+.long 101058048,1246363722
+.long 1785358848,858980403
+.long 3890734848,1734803559
+.long 1179010560,4092788979
+.long 1903259904,2139029631
+.long 3132799488,3806462178
+.long 3570717696,2610626715
+.long 623191296,640024614
+.long 2880154368,926351415
+.long 1111638528,993722427
+.long 2290649088,2526412950
+.long 2728567296,1263206475
+.long 2374864128,3200123070
+.long 4210752000,774766638
+.long 1920102912,2037973113
+.long 117901056,2357985420
+.long 3115956480,1852702830
+.long 1431655680,2391670926
+.long 4177065984,4126474485
+.long 4008635904,3065381046
+.long 2896997376,4261216509
+.long 168430080,1499005017
+.long 909522432,2560098456
+.long 1229539584,1785331818
+.long 707406336,1178992710
+.long 1751672832,3132752058
+.long 1010580480,623181861
+.long 943208448,1111621698
+.long 4059164928,2728525986
+.long 2762253312,4210688250
+.long 1077952512,117899271
+.long 673720320,1431634005
+.long 3553874688,4008575214
+.long 2071689984,168427530
+.long 3149642496,1229520969
+.long 3385444608,1751646312
+.long 1128481536,943194168
+.long 3250700544,2762211492
+.long 353703168,673710120
+.long 3823362816,2071658619
+.long 2913840384,3385393353
+.long 4109693952,3250651329
+.long 2004317952,3823304931
+.long 3351758592,4109631732
+.long 2155905024,3351707847
+.long 2661195264,2661154974
+.long 14737632,939538488
+.long 328965,1090535745
+.long 5789784,369104406
+.long 14277081,1979741814
+.long 6776679,3640711641
+.long 5131854,2466288531
+.long 8487297,1610637408
+.long 13355979,4060148466
+.long 13224393,1912631922
+.long 723723,3254829762
+.long 11447982,2868947883
+.long 6974058,2583730842
+.long 14013909,1962964341
+.long 1579032,100664838
+.long 6118749,1459640151
+.long 8553090,2684395680
+.long 4605510,2432733585
+.long 14671839,4144035831
+.long 14079702,3036722613
+.long 2565927,3372272073
+.long 9079434,2717950626
+.long 3289650,2348846220
+.long 4934475,3523269330
+.long 4342338,2415956112
+.long 14408667,4127258358
+.long 1842204,117442311
+.long 10395294,2801837991
+.long 10263708,654321447
+.long 3815994,2382401166
+.long 13290186,2986390194
+.long 2434341,1224755529
+.long 8092539,3724599006
+.long 855309,1124090691
+.long 7434609,1543527516
+.long 6250335,3607156695
+.long 2039583,3338717127
+.long 16316664,1040203326
+.long 14145495,4110480885
+.long 4079166,2399178639
+.long 10329501,1728079719
+.long 8158332,520101663
+.long 6316128,402659352
+.long 12171705,1845522030
+.long 12500670,2936057775
+.long 12369084,788541231
+.long 9145227,3791708898
+.long 1447446,2231403909
+.long 3421236,218107149
+.long 5066061,1392530259
+.long 12829635,4026593520
+.long 7500402,2617285788
+.long 9803157,1694524773
+.long 11250603,3925928682
+.long 9342606,2734728099
+.long 12237498,2919280302
+.long 8026746,2650840734
+.long 11776947,3959483628
+.long 131586,2147516544
+.long 11842740,754986285
+.long 11382189,1795189611
+.long 10658466,2818615464
+.long 11316396,721431339
+.long 14211288,905983542
+.long 10132122,2785060518
+.long 1513239,3305162181
+.long 1710618,2248181382
+.long 3487029,1291865421
+.long 13421772,855651123
+.long 16250871,4244700669
+.long 10066329,1711302246
+.long 6381921,1476417624
+.long 5921370,2516620950
+.long 15263976,973093434
+.long 2368548,150997257
+.long 5658198,2499843477
+.long 4210752,268439568
+.long 14803425,2013296760
+.long 6513507,3623934168
+.long 592137,1107313218
+.long 3355443,3422604492
+.long 12566463,4009816047
+.long 10000536,637543974
+.long 9934743,3842041317
+.long 8750469,1627414881
+.long 6842472,436214298
+.long 16579836,1056980799
+.long 15527148,989870907
+.long 657930,2181071490
+.long 14342874,3053500086
+.long 7303023,3674266587
+.long 5460819,3556824276
+.long 6447714,2550175896
+.long 10724259,3892373736
+.long 3026478,2332068747
+.long 526344,33554946
+.long 11513775,3942706155
+.long 2631720,167774730
+.long 11579568,738208812
+.long 7631988,486546717
+.long 12763842,2952835248
+.long 12434877,1862299503
+.long 3552822,2365623693
+.long 2236962,2281736328
+.long 3684408,234884622
+.long 6579300,419436825
+.long 1973790,2264958855
+.long 3750201,1308642894
+.long 2894892,184552203
+.long 10921638,2835392937
+.long 3158064,201329676
+.long 15066597,2030074233
+.long 4473924,285217041
+.long 16645629,2130739071
+.long 8947848,570434082
+.long 10461087,3875596263
+.long 6645093,1493195097
+.long 8882055,3774931425
+.long 7039851,3657489114
+.long 16053492,1023425853
+.long 2302755,3355494600
+.long 4737096,301994514
+.long 1052688,67109892
+.long 13750737,1946186868
+.long 5329233,1409307732
+.long 12632256,805318704
+.long 16382457,2113961598
+.long 13816530,3019945140
+.long 10526880,671098920
+.long 5592405,1426085205
+.long 10592673,1744857192
+.long 4276545,1342197840
+.long 16448250,3187719870
+.long 4408131,3489714384
+.long 1250067,3288384708
+.long 12895428,822096177
+.long 3092271,3405827019
+.long 11053224,704653866
+.long 11974326,2902502829
+.long 3947580,251662095
+.long 2829099,3389049546
+.long 12698049,1879076976
+.long 16777215,4278255615
+.long 13158600,838873650
+.long 10855845,1761634665
+.long 2105376,134219784
+.long 9013641,1644192354
+.long 0,0
+.long 9474192,603989028
+.long 4671303,3506491857
+.long 15724527,4211145723
+.long 15395562,3120609978
+.long 12040119,3976261101
+.long 1381653,1157645637
+.long 394758,2164294017
+.long 13487565,1929409395
+.long 11908533,1828744557
+.long 1184274,2214626436
+.long 8289918,2667618207
+.long 12303291,3993038574
+.long 2697513,1241533002
+.long 986895,3271607235
+.long 12105912,771763758
+.long 460551,3238052289
+.long 263172,16777473
+.long 10197915,3858818790
+.long 9737364,620766501
+.long 2171169,1207978056
+.long 6710886,2566953369
+.long 15132390,3103832505
+.long 13553358,3003167667
+.long 15592941,2063629179
+.long 15198183,4177590777
+.long 3881787,3456159438
+.long 16711422,3204497343
+.long 8355711,3741376479
+.long 12961221,1895854449
+.long 10790052,687876393
+.long 3618615,3439381965
+.long 11645361,1811967084
+.long 5000268,318771987
+.long 9539985,1677747300
+.long 7237230,2600508315
+.long 9276813,1660969827
+.long 7763574,2634063261
+.long 197379,3221274816
+.long 2960685,1258310475
+.long 14606046,3070277559
+.long 9868950,2768283045
+.long 2500134,2298513801
+.long 8224125,1593859935
+.long 13027014,2969612721
+.long 6052956,385881879
+.long 13882323,4093703412
+.long 15921906,3154164924
+.long 5197647,3540046803
+.long 1644825,1174423110
+.long 4144959,3472936911
+.long 14474460,922761015
+.long 7960953,1577082462
+.long 1907997,1191200583
+.long 5395026,2483066004
+.long 15461355,4194368250
+.long 15987699,4227923196
+.long 7171437,1526750043
+.long 6184542,2533398423
+.long 16514043,4261478142
+.long 6908265,1509972570
+.long 11711154,2885725356
+.long 15790320,1006648380
+.long 3223857,1275087948
+.long 789516,50332419
+.long 13948116,889206069
+.long 13619151,4076925939
+.long 9211020,587211555
+.long 14869218,3087055032
+.long 7697781,1560304989
+.long 11119017,1778412138
+.long 4868682,2449511058
+.long 5723991,3573601749
+.long 8684676,553656609
+.long 1118481,1140868164
+.long 4539717,1358975313
+.long 1776411,3321939654
+.long 16119285,2097184125
+.long 15000804,956315961
+.long 921102,2197848963
+.long 7566195,3691044060
+.long 11184810,2852170410
+.long 15856113,2080406652
+.long 14540253,1996519287
+.long 5855577,1442862678
+.long 1315860,83887365
+.long 7105644,452991771
+.long 9605778,2751505572
+.long 5526612,352326933
+.long 13684944,872428596
+.long 7895160,503324190
+.long 7368816,469769244
+.long 14935011,4160813304
+.long 4802889,1375752786
+.long 8421504,536879136
+.long 5263440,335549460
+.long 10987431,3909151209
+.long 16185078,3170942397
+.long 7829367,3707821533
+.long 9671571,3825263844
+.long 8816262,2701173153
+.long 8618883,3758153952
+.long 2763306,2315291274
+.long 13092807,4043370993
+.long 5987163,3590379222
+.long 15329769,2046851706
+.long 15658734,3137387451
+.long 9408399,3808486371
+.long 65793,1073758272
+.long 4013373,1325420367
+.globl Camellia_cbc_encrypt
+.type Camellia_cbc_encrypt,@function
+.align 16
+Camellia_cbc_encrypt:
+.L_Camellia_cbc_encrypt_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 28(%esp),%ecx
+ cmpl $0,%ecx
+ je .L016enc_out
+ pushfl
+ cld
+ movl 24(%esp),%eax
+ movl 28(%esp),%ebx
+ movl 36(%esp),%edx
+ movl 40(%esp),%ebp
+ leal -64(%esp),%esi
+ andl $-64,%esi
+ leal -127(%edx),%edi
+ subl %esi,%edi
+ negl %edi
+ andl $960,%edi
+ subl %edi,%esi
+ movl 44(%esp),%edi
+ xchgl %esi,%esp
+ addl $4,%esp
+ movl %esi,20(%esp)
+ movl %eax,24(%esp)
+ movl %ebx,28(%esp)
+ movl %ecx,32(%esp)
+ movl %edx,36(%esp)
+ movl %ebp,40(%esp)
+ call .L017pic_point
+.L017pic_point:
+ popl %ebp
+ leal .LCamellia_SBOX-.L017pic_point(%ebp),%ebp
+ movl $32,%esi
+.align 4
+.L018prefetch_sbox:
+ movl (%ebp),%eax
+ movl 32(%ebp),%ebx
+ movl 64(%ebp),%ecx
+ movl 96(%ebp),%edx
+ leal 128(%ebp),%ebp
+ decl %esi
+ jnz .L018prefetch_sbox
+ movl 36(%esp),%eax
+ subl $4096,%ebp
+ movl 24(%esp),%esi
+ movl 272(%eax),%edx
+ cmpl $0,%edi
+ je .L019DECRYPT
+ movl 32(%esp),%ecx
+ movl 40(%esp),%edi
+ shll $6,%edx
+ leal (%eax,%edx,1),%edx
+ movl %edx,16(%esp)
+ testl $4294967280,%ecx
+ jz .L020enc_tail
+ movl (%edi),%eax
+ movl 4(%edi),%ebx
+.align 4
+.L021enc_loop:
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ xorl (%esi),%eax
+ xorl 4(%esi),%ebx
+ xorl 8(%esi),%ecx
+ bswap %eax
+ xorl 12(%esi),%edx
+ bswap %ebx
+ movl 36(%esp),%edi
+ bswap %ecx
+ bswap %edx
+ call _x86_Camellia_encrypt
+ movl 24(%esp),%esi
+ movl 28(%esp),%edi
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ movl %eax,(%edi)
+ bswap %edx
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 32(%esp),%ecx
+ leal 16(%esi),%esi
+ movl %esi,24(%esp)
+ leal 16(%edi),%edx
+ movl %edx,28(%esp)
+ subl $16,%ecx
+ testl $4294967280,%ecx
+ movl %ecx,32(%esp)
+ jnz .L021enc_loop
+ testl $15,%ecx
+ jnz .L020enc_tail
+ movl 40(%esp),%esi
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ movl 20(%esp),%esp
+ popfl
+.L016enc_out:
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+ pushfl
+.align 4
+.L020enc_tail:
+ movl %edi,%eax
+ movl 28(%esp),%edi
+ pushl %eax
+ movl $16,%ebx
+ subl %ecx,%ebx
+ cmpl %esi,%edi
+ je .L022enc_in_place
+.align 4
+.long 2767451785
+ jmp .L023enc_skip_in_place
+.L022enc_in_place:
+ leal (%edi,%ecx,1),%edi
+.L023enc_skip_in_place:
+ movl %ebx,%ecx
+ xorl %eax,%eax
+.align 4
+.long 2868115081
+ popl %edi
+ movl 28(%esp),%esi
+ movl (%edi),%eax
+ movl 4(%edi),%ebx
+ movl $16,32(%esp)
+ jmp .L021enc_loop
+.align 16
+.L019DECRYPT:
+ shll $6,%edx
+ leal (%eax,%edx,1),%edx
+ movl %eax,16(%esp)
+ movl %edx,36(%esp)
+ cmpl 28(%esp),%esi
+ je .L024dec_in_place
+ movl 40(%esp),%edi
+ movl %edi,44(%esp)
+.align 4
+.L025dec_loop:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ bswap %eax
+ movl 12(%esi),%edx
+ bswap %ebx
+ movl 36(%esp),%edi
+ bswap %ecx
+ bswap %edx
+ call _x86_Camellia_decrypt
+ movl 44(%esp),%edi
+ movl 32(%esp),%esi
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ xorl (%edi),%eax
+ bswap %edx
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ subl $16,%esi
+ jc .L026dec_partial
+ movl %esi,32(%esp)
+ movl 24(%esp),%esi
+ movl 28(%esp),%edi
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl %esi,44(%esp)
+ leal 16(%esi),%esi
+ movl %esi,24(%esp)
+ leal 16(%edi),%edi
+ movl %edi,28(%esp)
+ jnz .L025dec_loop
+ movl 44(%esp),%edi
+.L027dec_end:
+ movl 40(%esp),%esi
+ movl (%edi),%eax
+ movl 4(%edi),%ebx
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ jmp .L028dec_out
+.align 4
+.L026dec_partial:
+ leal 44(%esp),%edi
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ leal 16(%esi),%ecx
+ movl %edi,%esi
+ movl 28(%esp),%edi
+.long 2767451785
+ movl 24(%esp),%edi
+ jmp .L027dec_end
+.align 4
+.L024dec_in_place:
+.L029dec_in_place_loop:
+ leal 44(%esp),%edi
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ bswap %eax
+ movl %edx,12(%edi)
+ bswap %ebx
+ movl 36(%esp),%edi
+ bswap %ecx
+ bswap %edx
+ call _x86_Camellia_decrypt
+ movl 40(%esp),%edi
+ movl 28(%esp),%esi
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ xorl (%edi),%eax
+ bswap %edx
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ leal 16(%esi),%esi
+ movl %esi,28(%esp)
+ leal 44(%esp),%esi
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 24(%esp),%esi
+ leal 16(%esi),%esi
+ movl %esi,24(%esp)
+ movl 32(%esp),%ecx
+ subl $16,%ecx
+ jc .L030dec_in_place_partial
+ movl %ecx,32(%esp)
+ jnz .L029dec_in_place_loop
+ jmp .L028dec_out
+.align 4
+.L030dec_in_place_partial:
+ movl 28(%esp),%edi
+ leal 44(%esp),%esi
+ leal (%edi,%ecx,1),%edi
+ leal 16(%esi,%ecx,1),%esi
+ negl %ecx
+.long 2767451785
+.align 4
+.L028dec_out:
+ movl 20(%esp),%esp
+ popfl
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size Camellia_cbc_encrypt,.-.L_Camellia_cbc_encrypt_begin
+.byte 67,97,109,101,108,108,105,97,32,102,111,114,32,120,56,54
+.byte 32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115
+.byte 115,108,46,111,114,103,62,0
diff --git a/deps/openssl/asm/x86-elf-gas/cast/cast-586.s b/deps/openssl/asm/x86-elf-gas/cast/cast-586.s
new file mode 100644
index 0000000000..3ef7fe32b7
--- /dev/null
+++ b/deps/openssl/asm/x86-elf-gas/cast/cast-586.s
@@ -0,0 +1,933 @@
+.file "cast-586.s"
+.text
+.globl CAST_encrypt
+.type CAST_encrypt,@function
+.align 16
+CAST_encrypt:
+.L_CAST_encrypt_begin:
+
+ pushl %ebp
+ pushl %ebx
+ movl 12(%esp),%ebx
+ movl 16(%esp),%ebp
+ pushl %esi
+ pushl %edi
+
+ movl (%ebx),%edi
+ movl 4(%ebx),%esi
+
+ movl 128(%ebp),%eax
+ pushl %eax
+ xorl %eax,%eax
+
+ movl (%ebp),%edx
+ movl 4(%ebp),%ecx
+ addl %esi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx,%ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx,%ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx,%ecx
+ xorl %ecx,%edi
+
+ movl 8(%ebp),%edx
+ movl 12(%ebp),%ecx
+ xorl %edi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx,%ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ addl %ebx,%ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx,%ecx
+ xorl %ecx,%esi
+
+ movl 16(%ebp),%edx
+ movl 20(%ebp),%ecx
+ subl %esi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx,%ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx,%ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ subl %ebx,%ecx
+ xorl %ecx,%edi
+
+ movl 24(%ebp),%edx
+ movl 28(%ebp),%ecx
+ addl %edi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx,%ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx,%ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx,%ecx
+ xorl %ecx,%esi
+
+ movl 32(%ebp),%edx
+ movl 36(%ebp),%ecx
+ xorl %esi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx,%ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ addl %ebx,%ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx,%ecx
+ xorl %ecx,%edi
+
+ movl 40(%ebp),%edx
+ movl 44(%ebp),%ecx
+ subl %edi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx,%ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx,%ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ subl %ebx,%ecx
+ xorl %ecx,%esi
+
+ movl 48(%ebp),%edx
+ movl 52(%ebp),%ecx
+ addl %esi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx,%ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx,%ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx,%ecx
+ xorl %ecx,%edi
+
+ movl 56(%ebp),%edx
+ movl 60(%ebp),%ecx
+ xorl %edi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx,%ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ addl %ebx,%ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx,%ecx
+ xorl %ecx,%esi
+
+ movl 64(%ebp),%edx
+ movl 68(%ebp),%ecx
+ subl %esi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx,%ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx,%ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ subl %ebx,%ecx
+ xorl %ecx,%edi
+
+ movl 72(%ebp),%edx
+ movl 76(%ebp),%ecx
+ addl %edi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx,%ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx,%ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx,%ecx
+ xorl %ecx,%esi
+
+ movl 80(%ebp),%edx
+ movl 84(%ebp),%ecx
+ xorl %esi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx,%ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ addl %ebx,%ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx,%ecx
+ xorl %ecx,%edi
+
+ movl 88(%ebp),%edx
+ movl 92(%ebp),%ecx
+ subl %edi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx,%ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx,%ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ subl %ebx,%ecx
+ xorl %ecx,%esi
+
+ popl %edx
+ orl %edx,%edx
+ jnz .L000cast_enc_done
+
+ movl 96(%ebp),%edx
+ movl 100(%ebp),%ecx
+ addl %esi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx,%ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx,%ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx,%ecx
+ xorl %ecx,%edi
+
+ movl 104(%ebp),%edx
+ movl 108(%ebp),%ecx
+ xorl %edi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx,%ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ addl %ebx,%ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx,%ecx
+ xorl %ecx,%esi
+
+ movl 112(%ebp),%edx
+ movl 116(%ebp),%ecx
+ subl %esi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx,%ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx,%ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ subl %ebx,%ecx
+ xorl %ecx,%edi
+
+ movl 120(%ebp),%edx
+ movl 124(%ebp),%ecx
+ addl %edi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx,%ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx,%ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx,%ecx
+ xorl %ecx,%esi
+.L000cast_enc_done:
+ nop
+ movl 20(%esp),%eax
+ movl %edi,4(%eax)
+ movl %esi,(%eax)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size CAST_encrypt,.-.L_CAST_encrypt_begin
+.globl CAST_decrypt
+.type CAST_decrypt,@function
+.align 16
+CAST_decrypt:
+.L_CAST_decrypt_begin:
+
+ pushl %ebp
+ pushl %ebx
+ movl 12(%esp),%ebx
+ movl 16(%esp),%ebp
+ pushl %esi
+ pushl %edi
+
+ movl (%ebx),%edi
+ movl 4(%ebx),%esi
+
+ movl 128(%ebp),%eax
+ orl %eax,%eax
+ jnz .L001cast_dec_skip
+ xorl %eax,%eax
+
+ movl 120(%ebp),%edx
+ movl 124(%ebp),%ecx
+ addl %esi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx,%ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx,%ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx,%ecx
+ xorl %ecx,%edi
+
+ movl 112(%ebp),%edx
+ movl 116(%ebp),%ecx
+ subl %edi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx,%ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx,%ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ subl %ebx,%ecx
+ xorl %ecx,%esi
+
+ movl 104(%ebp),%edx
+ movl 108(%ebp),%ecx
+ xorl %esi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx,%ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ addl %ebx,%ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx,%ecx
+ xorl %ecx,%edi
+
+ movl 96(%ebp),%edx
+ movl 100(%ebp),%ecx
+ addl %edi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx,%ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx,%ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx,%ecx
+ xorl %ecx,%esi
+.L001cast_dec_skip:
+
+ movl 88(%ebp),%edx
+ movl 92(%ebp),%ecx
+ subl %esi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx,%ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx,%ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ subl %ebx,%ecx
+ xorl %ecx,%edi
+
+ movl 80(%ebp),%edx
+ movl 84(%ebp),%ecx
+ xorl %edi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx,%ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ addl %ebx,%ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx,%ecx
+ xorl %ecx,%esi
+
+ movl 72(%ebp),%edx
+ movl 76(%ebp),%ecx
+ addl %esi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx,%ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx,%ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx,%ecx
+ xorl %ecx,%edi
+
+ movl 64(%ebp),%edx
+ movl 68(%ebp),%ecx
+ subl %edi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx,%ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx,%ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ subl %ebx,%ecx
+ xorl %ecx,%esi
+
+ movl 56(%ebp),%edx
+ movl 60(%ebp),%ecx
+ xorl %esi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx,%ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ addl %ebx,%ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx,%ecx
+ xorl %ecx,%edi
+
+ movl 48(%ebp),%edx
+ movl 52(%ebp),%ecx
+ addl %edi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx,%ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx,%ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx,%ecx
+ xorl %ecx,%esi
+
+ movl 40(%ebp),%edx
+ movl 44(%ebp),%ecx
+ subl %esi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx,%ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx,%ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ subl %ebx,%ecx
+ xorl %ecx,%edi
+
+ movl 32(%ebp),%edx
+ movl 36(%ebp),%ecx
+ xorl %edi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx,%ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ addl %ebx,%ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx,%ecx
+ xorl %ecx,%esi
+
+ movl 24(%ebp),%edx
+ movl 28(%ebp),%ecx
+ addl %esi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx,%ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx,%ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx,%ecx
+ xorl %ecx,%edi
+
+ movl 16(%ebp),%edx
+ movl 20(%ebp),%ecx
+ subl %edi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx,%ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx,%ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ subl %ebx,%ecx
+ xorl %ecx,%esi
+
+ movl 8(%ebp),%edx
+ movl 12(%ebp),%ecx
+ xorl %esi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx,%ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ addl %ebx,%ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx,%ecx
+ xorl %ecx,%edi
+
+ movl (%ebp),%edx
+ movl 4(%ebp),%ecx
+ addl %edi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx,%ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx,%ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx,%ecx
+ xorl %ecx,%esi
+ nop
+ movl 20(%esp),%eax
+ movl %edi,4(%eax)
+ movl %esi,(%eax)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size CAST_decrypt,.-.L_CAST_decrypt_begin
+.globl CAST_cbc_encrypt
+.type CAST_cbc_encrypt,@function
+.align 16
+CAST_cbc_encrypt:
+.L_CAST_cbc_encrypt_begin:
+
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 28(%esp),%ebp
+
+ movl 36(%esp),%ebx
+ movl (%ebx),%esi
+ movl 4(%ebx),%edi
+ pushl %edi
+ pushl %esi
+ pushl %edi
+ pushl %esi
+ movl %esp,%ebx
+ movl 36(%esp),%esi
+ movl 40(%esp),%edi
+
+ movl 56(%esp),%ecx
+
+ movl 48(%esp),%eax
+ pushl %eax
+ pushl %ebx
+ cmpl $0,%ecx
+ jz .L002decrypt
+ andl $4294967288,%ebp
+ movl 8(%esp),%eax
+ movl 12(%esp),%ebx
+ jz .L003encrypt_finish
+.L004encrypt_loop:
+ movl (%esi),%ecx
+ movl 4(%esi),%edx
+ xorl %ecx,%eax
+ xorl %edx,%ebx
+ bswap %eax
+ bswap %ebx
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+ call .L_CAST_encrypt_begin
+ movl 8(%esp),%eax
+ movl 12(%esp),%ebx
+ bswap %eax
+ bswap %ebx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ addl $8,%esi
+ addl $8,%edi
+ subl $8,%ebp
+ jnz .L004encrypt_loop
+.L003encrypt_finish:
+ movl 52(%esp),%ebp
+ andl $7,%ebp
+ jz .L005finish
+ call .L006PIC_point
+.L006PIC_point:
+ popl %edx
+ leal .L007cbc_enc_jmp_table-.L006PIC_point(%edx),%ecx
+ movl (%ecx,%ebp,4),%ebp
+ addl %edx,%ebp
+ xorl %ecx,%ecx
+ xorl %edx,%edx
+ jmp *%ebp
+.L008ej7:
+ movb 6(%esi),%dh
+ shll $8,%edx
+.L009ej6:
+ movb 5(%esi),%dh
+.L010ej5:
+ movb 4(%esi),%dl
+.L011ej4:
+ movl (%esi),%ecx
+ jmp .L012ejend
+.L013ej3:
+ movb 2(%esi),%ch
+ shll $8,%ecx
+.L014ej2:
+ movb 1(%esi),%ch
+.L015ej1:
+ movb (%esi),%cl
+.L012ejend:
+ xorl %ecx,%eax
+ xorl %edx,%ebx
+ bswap %eax
+ bswap %ebx
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+ call .L_CAST_encrypt_begin
+ movl 8(%esp),%eax
+ movl 12(%esp),%ebx
+ bswap %eax
+ bswap %ebx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ jmp .L005finish
+.L002decrypt:
+ andl $4294967288,%ebp
+ movl 16(%esp),%eax
+ movl 20(%esp),%ebx
+ jz .L016decrypt_finish
+.L017decrypt_loop:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ bswap %eax
+ bswap %ebx
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+ call .L_CAST_decrypt_begin
+ movl 8(%esp),%eax
+ movl 12(%esp),%ebx
+ bswap %eax
+ bswap %ebx
+ movl 16(%esp),%ecx
+ movl 20(%esp),%edx
+ xorl %eax,%ecx
+ xorl %ebx,%edx
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl %ecx,(%edi)
+ movl %edx,4(%edi)
+ movl %eax,16(%esp)
+ movl %ebx,20(%esp)
+ addl $8,%esi
+ addl $8,%edi
+ subl $8,%ebp
+ jnz .L017decrypt_loop
+.L016decrypt_finish:
+ movl 52(%esp),%ebp
+ andl $7,%ebp
+ jz .L005finish
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ bswap %eax
+ bswap %ebx
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+ call .L_CAST_decrypt_begin
+ movl 8(%esp),%eax
+ movl 12(%esp),%ebx
+ bswap %eax
+ bswap %ebx
+ movl 16(%esp),%ecx
+ movl 20(%esp),%edx
+ xorl %eax,%ecx
+ xorl %ebx,%edx
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+.L018dj7:
+ rorl $16,%edx
+ movb %dl,6(%edi)
+ shrl $16,%edx
+.L019dj6:
+ movb %dh,5(%edi)
+.L020dj5:
+ movb %dl,4(%edi)
+.L021dj4:
+ movl %ecx,(%edi)
+ jmp .L022djend
+.L023dj3:
+ rorl $16,%ecx
+ movb %cl,2(%edi)
+ shll $16,%ecx
+.L024dj2:
+ movb %ch,1(%esi)
+.L025dj1:
+ movb %cl,(%esi)
+.L022djend:
+ jmp .L005finish
+.L005finish:
+ movl 60(%esp),%ecx
+ addl $24,%esp
+ movl %eax,(%ecx)
+ movl %ebx,4(%ecx)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 64
+.L007cbc_enc_jmp_table:
+.long 0
+.long .L015ej1-.L006PIC_point
+.long .L014ej2-.L006PIC_point
+.long .L013ej3-.L006PIC_point
+.long .L011ej4-.L006PIC_point
+.long .L010ej5-.L006PIC_point
+.long .L009ej6-.L006PIC_point
+.long .L008ej7-.L006PIC_point
+.align 64
+.size CAST_cbc_encrypt,.-.L_CAST_cbc_encrypt_begin
diff --git a/deps/openssl/asm/x86-elf-gas/des/crypt586.s b/deps/openssl/asm/x86-elf-gas/des/crypt586.s
new file mode 100644
index 0000000000..46c81c493d
--- /dev/null
+++ b/deps/openssl/asm/x86-elf-gas/des/crypt586.s
@@ -0,0 +1,875 @@
+.file "crypt586.s"
+.text
+.globl fcrypt_body
+.type fcrypt_body,@function
+.align 16
+fcrypt_body:
+.L_fcrypt_body_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+
+
+ xorl %edi,%edi
+ xorl %esi,%esi
+ leal DES_SPtrans,%edx
+ pushl %edx
+ movl 28(%esp),%ebp
+ pushl $25
+.L000start:
+
+
+ movl 36(%esp),%eax
+ movl %esi,%edx
+ shrl $16,%edx
+ movl 40(%esp),%ecx
+ xorl %esi,%edx
+ andl %edx,%eax
+ andl %ecx,%edx
+ movl %eax,%ebx
+ shll $16,%ebx
+ movl %edx,%ecx
+ shll $16,%ecx
+ xorl %ebx,%eax
+ xorl %ecx,%edx
+ movl (%ebp),%ebx
+ xorl %ebx,%eax
+ movl 4(%ebp),%ecx
+ xorl %esi,%eax
+ xorl %esi,%edx
+ xorl %ecx,%edx
+ andl $0xfcfcfcfc,%eax
+ xorl %ebx,%ebx
+ andl $0xcfcfcfcf,%edx
+ xorl %ecx,%ecx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ movl 4(%esp),%ebp
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ movl 0x600(%ebp,%ebx,1),%ebx
+ xorl %ebx,%edi
+ movl 0x700(%ebp,%ecx,1),%ebx
+ xorl %ebx,%edi
+ movl 0x400(%ebp,%eax,1),%ebx
+ xorl %ebx,%edi
+ movl 0x500(%ebp,%edx,1),%ebx
+ xorl %ebx,%edi
+ movl 32(%esp),%ebp
+
+
+ movl 36(%esp),%eax
+ movl %edi,%edx
+ shrl $16,%edx
+ movl 40(%esp),%ecx
+ xorl %edi,%edx
+ andl %edx,%eax
+ andl %ecx,%edx
+ movl %eax,%ebx
+ shll $16,%ebx
+ movl %edx,%ecx
+ shll $16,%ecx
+ xorl %ebx,%eax
+ xorl %ecx,%edx
+ movl 8(%ebp),%ebx
+ xorl %ebx,%eax
+ movl 12(%ebp),%ecx
+ xorl %edi,%eax
+ xorl %edi,%edx
+ xorl %ecx,%edx
+ andl $0xfcfcfcfc,%eax
+ xorl %ebx,%ebx
+ andl $0xcfcfcfcf,%edx
+ xorl %ecx,%ecx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ movl 4(%esp),%ebp
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ movl 0x600(%ebp,%ebx,1),%ebx
+ xorl %ebx,%esi
+ movl 0x700(%ebp,%ecx,1),%ebx
+ xorl %ebx,%esi
+ movl 0x400(%ebp,%eax,1),%ebx
+ xorl %ebx,%esi
+ movl 0x500(%ebp,%edx,1),%ebx
+ xorl %ebx,%esi
+ movl 32(%esp),%ebp
+
+
+ movl 36(%esp),%eax
+ movl %esi,%edx
+ shrl $16,%edx
+ movl 40(%esp),%ecx
+ xorl %esi,%edx
+ andl %edx,%eax
+ andl %ecx,%edx
+ movl %eax,%ebx
+ shll $16,%ebx
+ movl %edx,%ecx
+ shll $16,%ecx
+ xorl %ebx,%eax
+ xorl %ecx,%edx
+ movl 16(%ebp),%ebx
+ xorl %ebx,%eax
+ movl 20(%ebp),%ecx
+ xorl %esi,%eax
+ xorl %esi,%edx
+ xorl %ecx,%edx
+ andl $0xfcfcfcfc,%eax
+ xorl %ebx,%ebx
+ andl $0xcfcfcfcf,%edx
+ xorl %ecx,%ecx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ movl 4(%esp),%ebp
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ movl 0x600(%ebp,%ebx,1),%ebx
+ xorl %ebx,%edi
+ movl 0x700(%ebp,%ecx,1),%ebx
+ xorl %ebx,%edi
+ movl 0x400(%ebp,%eax,1),%ebx
+ xorl %ebx,%edi
+ movl 0x500(%ebp,%edx,1),%ebx
+ xorl %ebx,%edi
+ movl 32(%esp),%ebp
+
+
+ movl 36(%esp),%eax
+ movl %edi,%edx
+ shrl $16,%edx
+ movl 40(%esp),%ecx
+ xorl %edi,%edx
+ andl %edx,%eax
+ andl %ecx,%edx
+ movl %eax,%ebx
+ shll $16,%ebx
+ movl %edx,%ecx
+ shll $16,%ecx
+ xorl %ebx,%eax
+ xorl %ecx,%edx
+ movl 24(%ebp),%ebx
+ xorl %ebx,%eax
+ movl 28(%ebp),%ecx
+ xorl %edi,%eax
+ xorl %edi,%edx
+ xorl %ecx,%edx
+ andl $0xfcfcfcfc,%eax
+ xorl %ebx,%ebx
+ andl $0xcfcfcfcf,%edx
+ xorl %ecx,%ecx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ movl 4(%esp),%ebp
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ movl 0x600(%ebp,%ebx,1),%ebx
+ xorl %ebx,%esi
+ movl 0x700(%ebp,%ecx,1),%ebx
+ xorl %ebx,%esi
+ movl 0x400(%ebp,%eax,1),%ebx
+ xorl %ebx,%esi
+ movl 0x500(%ebp,%edx,1),%ebx
+ xorl %ebx,%esi
+ movl 32(%esp),%ebp
+
+
+ movl 36(%esp),%eax
+ movl %esi,%edx
+ shrl $16,%edx
+ movl 40(%esp),%ecx
+ xorl %esi,%edx
+ andl %edx,%eax
+ andl %ecx,%edx
+ movl %eax,%ebx
+ shll $16,%ebx
+ movl %edx,%ecx
+ shll $16,%ecx
+ xorl %ebx,%eax
+ xorl %ecx,%edx
+ movl 32(%ebp),%ebx
+ xorl %ebx,%eax
+ movl 36(%ebp),%ecx
+ xorl %esi,%eax
+ xorl %esi,%edx
+ xorl %ecx,%edx
+ andl $0xfcfcfcfc,%eax
+ xorl %ebx,%ebx
+ andl $0xcfcfcfcf,%edx
+ xorl %ecx,%ecx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ movl 4(%esp),%ebp
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ movl 0x600(%ebp,%ebx,1),%ebx
+ xorl %ebx,%edi
+ movl 0x700(%ebp,%ecx,1),%ebx
+ xorl %ebx,%edi
+ movl 0x400(%ebp,%eax,1),%ebx
+ xorl %ebx,%edi
+ movl 0x500(%ebp,%edx,1),%ebx
+ xorl %ebx,%edi
+ movl 32(%esp),%ebp
+
+
+ movl 36(%esp),%eax
+ movl %edi,%edx
+ shrl $16,%edx
+ movl 40(%esp),%ecx
+ xorl %edi,%edx
+ andl %edx,%eax
+ andl %ecx,%edx
+ movl %eax,%ebx
+ shll $16,%ebx
+ movl %edx,%ecx
+ shll $16,%ecx
+ xorl %ebx,%eax
+ xorl %ecx,%edx
+ movl 40(%ebp),%ebx
+ xorl %ebx,%eax
+ movl 44(%ebp),%ecx
+ xorl %edi,%eax
+ xorl %edi,%edx
+ xorl %ecx,%edx
+ andl $0xfcfcfcfc,%eax
+ xorl %ebx,%ebx
+ andl $0xcfcfcfcf,%edx
+ xorl %ecx,%ecx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ movl 4(%esp),%ebp
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ movl 0x600(%ebp,%ebx,1),%ebx
+ xorl %ebx,%esi
+ movl 0x700(%ebp,%ecx,1),%ebx
+ xorl %ebx,%esi
+ movl 0x400(%ebp,%eax,1),%ebx
+ xorl %ebx,%esi
+ movl 0x500(%ebp,%edx,1),%ebx
+ xorl %ebx,%esi
+ movl 32(%esp),%ebp
+
+
+ movl 36(%esp),%eax
+ movl %esi,%edx
+ shrl $16,%edx
+ movl 40(%esp),%ecx
+ xorl %esi,%edx
+ andl %edx,%eax
+ andl %ecx,%edx
+ movl %eax,%ebx
+ shll $16,%ebx
+ movl %edx,%ecx
+ shll $16,%ecx
+ xorl %ebx,%eax
+ xorl %ecx,%edx
+ movl 48(%ebp),%ebx
+ xorl %ebx,%eax
+ movl 52(%ebp),%ecx
+ xorl %esi,%eax
+ xorl %esi,%edx
+ xorl %ecx,%edx
+ andl $0xfcfcfcfc,%eax
+ xorl %ebx,%ebx
+ andl $0xcfcfcfcf,%edx
+ xorl %ecx,%ecx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ movl 4(%esp),%ebp
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ movl 0x600(%ebp,%ebx,1),%ebx
+ xorl %ebx,%edi
+ movl 0x700(%ebp,%ecx,1),%ebx
+ xorl %ebx,%edi
+ movl 0x400(%ebp,%eax,1),%ebx
+ xorl %ebx,%edi
+ movl 0x500(%ebp,%edx,1),%ebx
+ xorl %ebx,%edi
+ movl 32(%esp),%ebp
+
+
+ movl 36(%esp),%eax
+ movl %edi,%edx
+ shrl $16,%edx
+ movl 40(%esp),%ecx
+ xorl %edi,%edx
+ andl %edx,%eax
+ andl %ecx,%edx
+ movl %eax,%ebx
+ shll $16,%ebx
+ movl %edx,%ecx
+ shll $16,%ecx
+ xorl %ebx,%eax
+ xorl %ecx,%edx
+ movl 56(%ebp),%ebx
+ xorl %ebx,%eax
+ movl 60(%ebp),%ecx
+ xorl %edi,%eax
+ xorl %edi,%edx
+ xorl %ecx,%edx
+ andl $0xfcfcfcfc,%eax
+ xorl %ebx,%ebx
+ andl $0xcfcfcfcf,%edx
+ xorl %ecx,%ecx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ movl 4(%esp),%ebp
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ movl 0x600(%ebp,%ebx,1),%ebx
+ xorl %ebx,%esi
+ movl 0x700(%ebp,%ecx,1),%ebx
+ xorl %ebx,%esi
+ movl 0x400(%ebp,%eax,1),%ebx
+ xorl %ebx,%esi
+ movl 0x500(%ebp,%edx,1),%ebx
+ xorl %ebx,%esi
+ movl 32(%esp),%ebp
+
+
+ movl 36(%esp),%eax
+ movl %esi,%edx
+ shrl $16,%edx
+ movl 40(%esp),%ecx
+ xorl %esi,%edx
+ andl %edx,%eax
+ andl %ecx,%edx
+ movl %eax,%ebx
+ shll $16,%ebx
+ movl %edx,%ecx
+ shll $16,%ecx
+ xorl %ebx,%eax
+ xorl %ecx,%edx
+ movl 64(%ebp),%ebx
+ xorl %ebx,%eax
+ movl 68(%ebp),%ecx
+ xorl %esi,%eax
+ xorl %esi,%edx
+ xorl %ecx,%edx
+ andl $0xfcfcfcfc,%eax
+ xorl %ebx,%ebx
+ andl $0xcfcfcfcf,%edx
+ xorl %ecx,%ecx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ movl 4(%esp),%ebp
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ movl 0x600(%ebp,%ebx,1),%ebx
+ xorl %ebx,%edi
+ movl 0x700(%ebp,%ecx,1),%ebx
+ xorl %ebx,%edi
+ movl 0x400(%ebp,%eax,1),%ebx
+ xorl %ebx,%edi
+ movl 0x500(%ebp,%edx,1),%ebx
+ xorl %ebx,%edi
+ movl 32(%esp),%ebp
+
+
+ movl 36(%esp),%eax
+ movl %edi,%edx
+ shrl $16,%edx
+ movl 40(%esp),%ecx
+ xorl %edi,%edx
+ andl %edx,%eax
+ andl %ecx,%edx
+ movl %eax,%ebx
+ shll $16,%ebx
+ movl %edx,%ecx
+ shll $16,%ecx
+ xorl %ebx,%eax
+ xorl %ecx,%edx
+ movl 72(%ebp),%ebx
+ xorl %ebx,%eax
+ movl 76(%ebp),%ecx
+ xorl %edi,%eax
+ xorl %edi,%edx
+ xorl %ecx,%edx
+ andl $0xfcfcfcfc,%eax
+ xorl %ebx,%ebx
+ andl $0xcfcfcfcf,%edx
+ xorl %ecx,%ecx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ movl 4(%esp),%ebp
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ movl 0x600(%ebp,%ebx,1),%ebx
+ xorl %ebx,%esi
+ movl 0x700(%ebp,%ecx,1),%ebx
+ xorl %ebx,%esi
+ movl 0x400(%ebp,%eax,1),%ebx
+ xorl %ebx,%esi
+ movl 0x500(%ebp,%edx,1),%ebx
+ xorl %ebx,%esi
+ movl 32(%esp),%ebp
+
+
+ movl 36(%esp),%eax
+ movl %esi,%edx
+ shrl $16,%edx
+ movl 40(%esp),%ecx
+ xorl %esi,%edx
+ andl %edx,%eax
+ andl %ecx,%edx
+ movl %eax,%ebx
+ shll $16,%ebx
+ movl %edx,%ecx
+ shll $16,%ecx
+ xorl %ebx,%eax
+ xorl %ecx,%edx
+ movl 80(%ebp),%ebx
+ xorl %ebx,%eax
+ movl 84(%ebp),%ecx
+ xorl %esi,%eax
+ xorl %esi,%edx
+ xorl %ecx,%edx
+ andl $0xfcfcfcfc,%eax
+ xorl %ebx,%ebx
+ andl $0xcfcfcfcf,%edx
+ xorl %ecx,%ecx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ movl 4(%esp),%ebp
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ movl 0x600(%ebp,%ebx,1),%ebx
+ xorl %ebx,%edi
+ movl 0x700(%ebp,%ecx,1),%ebx
+ xorl %ebx,%edi
+ movl 0x400(%ebp,%eax,1),%ebx
+ xorl %ebx,%edi
+ movl 0x500(%ebp,%edx,1),%ebx
+ xorl %ebx,%edi
+ movl 32(%esp),%ebp
+
+
+ movl 36(%esp),%eax
+ movl %edi,%edx
+ shrl $16,%edx
+ movl 40(%esp),%ecx
+ xorl %edi,%edx
+ andl %edx,%eax
+ andl %ecx,%edx
+ movl %eax,%ebx
+ shll $16,%ebx
+ movl %edx,%ecx
+ shll $16,%ecx
+ xorl %ebx,%eax
+ xorl %ecx,%edx
+ movl 88(%ebp),%ebx
+ xorl %ebx,%eax
+ movl 92(%ebp),%ecx
+ xorl %edi,%eax
+ xorl %edi,%edx
+ xorl %ecx,%edx
+ andl $0xfcfcfcfc,%eax
+ xorl %ebx,%ebx
+ andl $0xcfcfcfcf,%edx
+ xorl %ecx,%ecx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ movl 4(%esp),%ebp
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ movl 0x600(%ebp,%ebx,1),%ebx
+ xorl %ebx,%esi
+ movl 0x700(%ebp,%ecx,1),%ebx
+ xorl %ebx,%esi
+ movl 0x400(%ebp,%eax,1),%ebx
+ xorl %ebx,%esi
+ movl 0x500(%ebp,%edx,1),%ebx
+ xorl %ebx,%esi
+ movl 32(%esp),%ebp
+
+
+ movl 36(%esp),%eax
+ movl %esi,%edx
+ shrl $16,%edx
+ movl 40(%esp),%ecx
+ xorl %esi,%edx
+ andl %edx,%eax
+ andl %ecx,%edx
+ movl %eax,%ebx
+ shll $16,%ebx
+ movl %edx,%ecx
+ shll $16,%ecx
+ xorl %ebx,%eax
+ xorl %ecx,%edx
+ movl 96(%ebp),%ebx
+ xorl %ebx,%eax
+ movl 100(%ebp),%ecx
+ xorl %esi,%eax
+ xorl %esi,%edx
+ xorl %ecx,%edx
+ andl $0xfcfcfcfc,%eax
+ xorl %ebx,%ebx
+ andl $0xcfcfcfcf,%edx
+ xorl %ecx,%ecx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ movl 4(%esp),%ebp
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ movl 0x600(%ebp,%ebx,1),%ebx
+ xorl %ebx,%edi
+ movl 0x700(%ebp,%ecx,1),%ebx
+ xorl %ebx,%edi
+ movl 0x400(%ebp,%eax,1),%ebx
+ xorl %ebx,%edi
+ movl 0x500(%ebp,%edx,1),%ebx
+ xorl %ebx,%edi
+ movl 32(%esp),%ebp
+
+
+ movl 36(%esp),%eax
+ movl %edi,%edx
+ shrl $16,%edx
+ movl 40(%esp),%ecx
+ xorl %edi,%edx
+ andl %edx,%eax
+ andl %ecx,%edx
+ movl %eax,%ebx
+ shll $16,%ebx
+ movl %edx,%ecx
+ shll $16,%ecx
+ xorl %ebx,%eax
+ xorl %ecx,%edx
+ movl 104(%ebp),%ebx
+ xorl %ebx,%eax
+ movl 108(%ebp),%ecx
+ xorl %edi,%eax
+ xorl %edi,%edx
+ xorl %ecx,%edx
+ andl $0xfcfcfcfc,%eax
+ xorl %ebx,%ebx
+ andl $0xcfcfcfcf,%edx
+ xorl %ecx,%ecx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ movl 4(%esp),%ebp
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ movl 0x600(%ebp,%ebx,1),%ebx
+ xorl %ebx,%esi
+ movl 0x700(%ebp,%ecx,1),%ebx
+ xorl %ebx,%esi
+ movl 0x400(%ebp,%eax,1),%ebx
+ xorl %ebx,%esi
+ movl 0x500(%ebp,%edx,1),%ebx
+ xorl %ebx,%esi
+ movl 32(%esp),%ebp
+
+
+ movl 36(%esp),%eax
+ movl %esi,%edx
+ shrl $16,%edx
+ movl 40(%esp),%ecx
+ xorl %esi,%edx
+ andl %edx,%eax
+ andl %ecx,%edx
+ movl %eax,%ebx
+ shll $16,%ebx
+ movl %edx,%ecx
+ shll $16,%ecx
+ xorl %ebx,%eax
+ xorl %ecx,%edx
+ movl 112(%ebp),%ebx
+ xorl %ebx,%eax
+ movl 116(%ebp),%ecx
+ xorl %esi,%eax
+ xorl %esi,%edx
+ xorl %ecx,%edx
+ andl $0xfcfcfcfc,%eax
+ xorl %ebx,%ebx
+ andl $0xcfcfcfcf,%edx
+ xorl %ecx,%ecx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ movl 4(%esp),%ebp
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ movl 0x600(%ebp,%ebx,1),%ebx
+ xorl %ebx,%edi
+ movl 0x700(%ebp,%ecx,1),%ebx
+ xorl %ebx,%edi
+ movl 0x400(%ebp,%eax,1),%ebx
+ xorl %ebx,%edi
+ movl 0x500(%ebp,%edx,1),%ebx
+ xorl %ebx,%edi
+ movl 32(%esp),%ebp
+
+
+ movl 36(%esp),%eax
+ movl %edi,%edx
+ shrl $16,%edx
+ movl 40(%esp),%ecx
+ xorl %edi,%edx
+ andl %edx,%eax
+ andl %ecx,%edx
+ movl %eax,%ebx
+ shll $16,%ebx
+ movl %edx,%ecx
+ shll $16,%ecx
+ xorl %ebx,%eax
+ xorl %ecx,%edx
+ movl 120(%ebp),%ebx
+ xorl %ebx,%eax
+ movl 124(%ebp),%ecx
+ xorl %edi,%eax
+ xorl %edi,%edx
+ xorl %ecx,%edx
+ andl $0xfcfcfcfc,%eax
+ xorl %ebx,%ebx
+ andl $0xcfcfcfcf,%edx
+ xorl %ecx,%ecx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ movl 4(%esp),%ebp
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ movl 0x600(%ebp,%ebx,1),%ebx
+ xorl %ebx,%esi
+ movl 0x700(%ebp,%ecx,1),%ebx
+ xorl %ebx,%esi
+ movl 0x400(%ebp,%eax,1),%ebx
+ xorl %ebx,%esi
+ movl 0x500(%ebp,%edx,1),%ebx
+ xorl %ebx,%esi
+ movl 32(%esp),%ebp
+ movl (%esp),%ebx
+ movl %edi,%eax
+ decl %ebx
+ movl %esi,%edi
+ movl %eax,%esi
+ movl %ebx,(%esp)
+ jnz .L000start
+
+
+ movl 28(%esp),%edx
+ rorl $1,%edi
+ movl %esi,%eax
+ xorl %edi,%esi
+ andl $0xaaaaaaaa,%esi
+ xorl %esi,%eax
+ xorl %esi,%edi
+
+ roll $23,%eax
+ movl %eax,%esi
+ xorl %edi,%eax
+ andl $0x03fc03fc,%eax
+ xorl %eax,%esi
+ xorl %eax,%edi
+
+ roll $10,%esi
+ movl %esi,%eax
+ xorl %edi,%esi
+ andl $0x33333333,%esi
+ xorl %esi,%eax
+ xorl %esi,%edi
+
+ roll $18,%edi
+ movl %edi,%esi
+ xorl %eax,%edi
+ andl $0xfff0000f,%edi
+ xorl %edi,%esi
+ xorl %edi,%eax
+
+ roll $12,%esi
+ movl %esi,%edi
+ xorl %eax,%esi
+ andl $0xf0f0f0f0,%esi
+ xorl %esi,%edi
+ xorl %esi,%eax
+
+ rorl $4,%eax
+ movl %eax,(%edx)
+ movl %edi,4(%edx)
+ addl $8,%esp
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size fcrypt_body,.-.L_fcrypt_body_begin
diff --git a/deps/openssl/asm/x86-elf-gas/des/des-586.s b/deps/openssl/asm/x86-elf-gas/des/des-586.s
new file mode 100644
index 0000000000..2fbd340dae
--- /dev/null
+++ b/deps/openssl/asm/x86-elf-gas/des/des-586.s
@@ -0,0 +1,1837 @@
+.file "des-586.s"
+.text
+.globl DES_SPtrans
+.type _x86_DES_encrypt,@function
+.align 16
+_x86_DES_encrypt:
+ pushl %ecx
+
+ movl (%ecx),%eax
+ xorl %ebx,%ebx
+ movl 4(%ecx),%edx
+ xorl %esi,%eax
+ xorl %ecx,%ecx
+ xorl %esi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%edi
+ xorl 0x700(%ebp,%ecx,1),%edi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%edi
+ xorl 0x500(%ebp,%edx,1),%edi
+
+ movl 8(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 12(%ecx),%edx
+ xorl %edi,%eax
+ xorl %ecx,%ecx
+ xorl %edi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%esi
+ xorl 0x700(%ebp,%ecx,1),%esi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%esi
+ xorl 0x500(%ebp,%edx,1),%esi
+
+ movl 16(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 20(%ecx),%edx
+ xorl %esi,%eax
+ xorl %ecx,%ecx
+ xorl %esi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%edi
+ xorl 0x700(%ebp,%ecx,1),%edi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%edi
+ xorl 0x500(%ebp,%edx,1),%edi
+
+ movl 24(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 28(%ecx),%edx
+ xorl %edi,%eax
+ xorl %ecx,%ecx
+ xorl %edi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%esi
+ xorl 0x700(%ebp,%ecx,1),%esi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%esi
+ xorl 0x500(%ebp,%edx,1),%esi
+
+ movl 32(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 36(%ecx),%edx
+ xorl %esi,%eax
+ xorl %ecx,%ecx
+ xorl %esi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%edi
+ xorl 0x700(%ebp,%ecx,1),%edi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%edi
+ xorl 0x500(%ebp,%edx,1),%edi
+
+ movl 40(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 44(%ecx),%edx
+ xorl %edi,%eax
+ xorl %ecx,%ecx
+ xorl %edi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%esi
+ xorl 0x700(%ebp,%ecx,1),%esi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%esi
+ xorl 0x500(%ebp,%edx,1),%esi
+
+ movl 48(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 52(%ecx),%edx
+ xorl %esi,%eax
+ xorl %ecx,%ecx
+ xorl %esi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%edi
+ xorl 0x700(%ebp,%ecx,1),%edi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%edi
+ xorl 0x500(%ebp,%edx,1),%edi
+
+ movl 56(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 60(%ecx),%edx
+ xorl %edi,%eax
+ xorl %ecx,%ecx
+ xorl %edi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%esi
+ xorl 0x700(%ebp,%ecx,1),%esi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%esi
+ xorl 0x500(%ebp,%edx,1),%esi
+
+ movl 64(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 68(%ecx),%edx
+ xorl %esi,%eax
+ xorl %ecx,%ecx
+ xorl %esi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%edi
+ xorl 0x700(%ebp,%ecx,1),%edi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%edi
+ xorl 0x500(%ebp,%edx,1),%edi
+
+ movl 72(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 76(%ecx),%edx
+ xorl %edi,%eax
+ xorl %ecx,%ecx
+ xorl %edi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%esi
+ xorl 0x700(%ebp,%ecx,1),%esi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%esi
+ xorl 0x500(%ebp,%edx,1),%esi
+
+ movl 80(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 84(%ecx),%edx
+ xorl %esi,%eax
+ xorl %ecx,%ecx
+ xorl %esi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%edi
+ xorl 0x700(%ebp,%ecx,1),%edi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%edi
+ xorl 0x500(%ebp,%edx,1),%edi
+
+ movl 88(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 92(%ecx),%edx
+ xorl %edi,%eax
+ xorl %ecx,%ecx
+ xorl %edi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%esi
+ xorl 0x700(%ebp,%ecx,1),%esi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%esi
+ xorl 0x500(%ebp,%edx,1),%esi
+
+ movl 96(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 100(%ecx),%edx
+ xorl %esi,%eax
+ xorl %ecx,%ecx
+ xorl %esi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%edi
+ xorl 0x700(%ebp,%ecx,1),%edi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%edi
+ xorl 0x500(%ebp,%edx,1),%edi
+
+ movl 104(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 108(%ecx),%edx
+ xorl %edi,%eax
+ xorl %ecx,%ecx
+ xorl %edi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%esi
+ xorl 0x700(%ebp,%ecx,1),%esi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%esi
+ xorl 0x500(%ebp,%edx,1),%esi
+
+ movl 112(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 116(%ecx),%edx
+ xorl %esi,%eax
+ xorl %ecx,%ecx
+ xorl %esi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%edi
+ xorl 0x700(%ebp,%ecx,1),%edi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%edi
+ xorl 0x500(%ebp,%edx,1),%edi
+
+ movl 120(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 124(%ecx),%edx
+ xorl %edi,%eax
+ xorl %ecx,%ecx
+ xorl %edi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%esi
+ xorl 0x700(%ebp,%ecx,1),%esi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%esi
+ xorl 0x500(%ebp,%edx,1),%esi
+ addl $4,%esp
+ ret
+.size _x86_DES_encrypt,.-_x86_DES_encrypt
+.type _x86_DES_decrypt,@function
+.align 16
+_x86_DES_decrypt:
+ pushl %ecx
+
+ movl 120(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 124(%ecx),%edx
+ xorl %esi,%eax
+ xorl %ecx,%ecx
+ xorl %esi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%edi
+ xorl 0x700(%ebp,%ecx,1),%edi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%edi
+ xorl 0x500(%ebp,%edx,1),%edi
+
+ movl 112(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 116(%ecx),%edx
+ xorl %edi,%eax
+ xorl %ecx,%ecx
+ xorl %edi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%esi
+ xorl 0x700(%ebp,%ecx,1),%esi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%esi
+ xorl 0x500(%ebp,%edx,1),%esi
+
+ movl 104(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 108(%ecx),%edx
+ xorl %esi,%eax
+ xorl %ecx,%ecx
+ xorl %esi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%edi
+ xorl 0x700(%ebp,%ecx,1),%edi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%edi
+ xorl 0x500(%ebp,%edx,1),%edi
+
+ movl 96(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 100(%ecx),%edx
+ xorl %edi,%eax
+ xorl %ecx,%ecx
+ xorl %edi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%esi
+ xorl 0x700(%ebp,%ecx,1),%esi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%esi
+ xorl 0x500(%ebp,%edx,1),%esi
+
+ movl 88(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 92(%ecx),%edx
+ xorl %esi,%eax
+ xorl %ecx,%ecx
+ xorl %esi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%edi
+ xorl 0x700(%ebp,%ecx,1),%edi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%edi
+ xorl 0x500(%ebp,%edx,1),%edi
+
+ movl 80(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 84(%ecx),%edx
+ xorl %edi,%eax
+ xorl %ecx,%ecx
+ xorl %edi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%esi
+ xorl 0x700(%ebp,%ecx,1),%esi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%esi
+ xorl 0x500(%ebp,%edx,1),%esi
+
+ movl 72(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 76(%ecx),%edx
+ xorl %esi,%eax
+ xorl %ecx,%ecx
+ xorl %esi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%edi
+ xorl 0x700(%ebp,%ecx,1),%edi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%edi
+ xorl 0x500(%ebp,%edx,1),%edi
+
+ movl 64(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 68(%ecx),%edx
+ xorl %edi,%eax
+ xorl %ecx,%ecx
+ xorl %edi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%esi
+ xorl 0x700(%ebp,%ecx,1),%esi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%esi
+ xorl 0x500(%ebp,%edx,1),%esi
+
+ movl 56(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 60(%ecx),%edx
+ xorl %esi,%eax
+ xorl %ecx,%ecx
+ xorl %esi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%edi
+ xorl 0x700(%ebp,%ecx,1),%edi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%edi
+ xorl 0x500(%ebp,%edx,1),%edi
+
+ movl 48(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 52(%ecx),%edx
+ xorl %edi,%eax
+ xorl %ecx,%ecx
+ xorl %edi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%esi
+ xorl 0x700(%ebp,%ecx,1),%esi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%esi
+ xorl 0x500(%ebp,%edx,1),%esi
+
+ movl 40(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 44(%ecx),%edx
+ xorl %esi,%eax
+ xorl %ecx,%ecx
+ xorl %esi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%edi
+ xorl 0x700(%ebp,%ecx,1),%edi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%edi
+ xorl 0x500(%ebp,%edx,1),%edi
+
+ movl 32(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 36(%ecx),%edx
+ xorl %edi,%eax
+ xorl %ecx,%ecx
+ xorl %edi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%esi
+ xorl 0x700(%ebp,%ecx,1),%esi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%esi
+ xorl 0x500(%ebp,%edx,1),%esi
+
+ movl 24(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 28(%ecx),%edx
+ xorl %esi,%eax
+ xorl %ecx,%ecx
+ xorl %esi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%edi
+ xorl 0x700(%ebp,%ecx,1),%edi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%edi
+ xorl 0x500(%ebp,%edx,1),%edi
+
+ movl 16(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 20(%ecx),%edx
+ xorl %edi,%eax
+ xorl %ecx,%ecx
+ xorl %edi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%esi
+ xorl 0x700(%ebp,%ecx,1),%esi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%esi
+ xorl 0x500(%ebp,%edx,1),%esi
+
+ movl 8(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 12(%ecx),%edx
+ xorl %esi,%eax
+ xorl %ecx,%ecx
+ xorl %esi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%edi
+ xorl 0x700(%ebp,%ecx,1),%edi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%edi
+ xorl 0x500(%ebp,%edx,1),%edi
+
+ movl (%ecx),%eax
+ xorl %ebx,%ebx
+ movl 4(%ecx),%edx
+ xorl %edi,%eax
+ xorl %ecx,%ecx
+ xorl %edi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%esi
+ xorl 0x700(%ebp,%ecx,1),%esi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%esi
+ xorl 0x500(%ebp,%edx,1),%esi
+ addl $4,%esp
+ ret
+.size _x86_DES_decrypt,.-_x86_DES_decrypt
+.globl DES_encrypt1
+.type DES_encrypt1,@function
+.align 16
+DES_encrypt1:
+.L_DES_encrypt1_begin:
+ pushl %esi
+ pushl %edi
+
+
+ movl 12(%esp),%esi
+ xorl %ecx,%ecx
+ pushl %ebx
+ pushl %ebp
+ movl (%esi),%eax
+ movl 28(%esp),%ebx
+ movl 4(%esi),%edi
+
+
+ roll $4,%eax
+ movl %eax,%esi
+ xorl %edi,%eax
+ andl $0xf0f0f0f0,%eax
+ xorl %eax,%esi
+ xorl %eax,%edi
+
+ roll $20,%edi
+ movl %edi,%eax
+ xorl %esi,%edi
+ andl $0xfff0000f,%edi
+ xorl %edi,%eax
+ xorl %edi,%esi
+
+ roll $14,%eax
+ movl %eax,%edi
+ xorl %esi,%eax
+ andl $0x33333333,%eax
+ xorl %eax,%edi
+ xorl %eax,%esi
+
+ roll $22,%esi
+ movl %esi,%eax
+ xorl %edi,%esi
+ andl $0x03fc03fc,%esi
+ xorl %esi,%eax
+ xorl %esi,%edi
+
+ roll $9,%eax
+ movl %eax,%esi
+ xorl %edi,%eax
+ andl $0xaaaaaaaa,%eax
+ xorl %eax,%esi
+ xorl %eax,%edi
+
+ roll $1,%edi
+ call .L000pic_point
+.L000pic_point:
+ popl %ebp
+ leal DES_SPtrans-.L000pic_point(%ebp),%ebp
+ movl 24(%esp),%ecx
+ cmpl $0,%ebx
+ je .L001decrypt
+ call _x86_DES_encrypt
+ jmp .L002done
+.L001decrypt:
+ call _x86_DES_decrypt
+.L002done:
+
+
+ movl 20(%esp),%edx
+ rorl $1,%esi
+ movl %edi,%eax
+ xorl %esi,%edi
+ andl $0xaaaaaaaa,%edi
+ xorl %edi,%eax
+ xorl %edi,%esi
+
+ roll $23,%eax
+ movl %eax,%edi
+ xorl %esi,%eax
+ andl $0x03fc03fc,%eax
+ xorl %eax,%edi
+ xorl %eax,%esi
+
+ roll $10,%edi
+ movl %edi,%eax
+ xorl %esi,%edi
+ andl $0x33333333,%edi
+ xorl %edi,%eax
+ xorl %edi,%esi
+
+ roll $18,%esi
+ movl %esi,%edi
+ xorl %eax,%esi
+ andl $0xfff0000f,%esi
+ xorl %esi,%edi
+ xorl %esi,%eax
+
+ roll $12,%edi
+ movl %edi,%esi
+ xorl %eax,%edi
+ andl $0xf0f0f0f0,%edi
+ xorl %edi,%esi
+ xorl %edi,%eax
+
+ rorl $4,%eax
+ movl %eax,(%edx)
+ movl %esi,4(%edx)
+ popl %ebp
+ popl %ebx
+ popl %edi
+ popl %esi
+ ret
+.size DES_encrypt1,.-.L_DES_encrypt1_begin
+.globl DES_encrypt2
+.type DES_encrypt2,@function
+.align 16
+DES_encrypt2:
+.L_DES_encrypt2_begin:
+ pushl %esi
+ pushl %edi
+
+
+ movl 12(%esp),%eax
+ xorl %ecx,%ecx
+ pushl %ebx
+ pushl %ebp
+ movl (%eax),%esi
+ movl 28(%esp),%ebx
+ roll $3,%esi
+ movl 4(%eax),%edi
+ roll $3,%edi
+ call .L003pic_point
+.L003pic_point:
+ popl %ebp
+ leal DES_SPtrans-.L003pic_point(%ebp),%ebp
+ movl 24(%esp),%ecx
+ cmpl $0,%ebx
+ je .L004decrypt
+ call _x86_DES_encrypt
+ jmp .L005done
+.L004decrypt:
+ call _x86_DES_decrypt
+.L005done:
+
+
+ rorl $3,%edi
+ movl 20(%esp),%eax
+ rorl $3,%esi
+ movl %edi,(%eax)
+ movl %esi,4(%eax)
+ popl %ebp
+ popl %ebx
+ popl %edi
+ popl %esi
+ ret
+.size DES_encrypt2,.-.L_DES_encrypt2_begin
+.globl DES_encrypt3
+.type DES_encrypt3,@function
+.align 16
+DES_encrypt3:
+.L_DES_encrypt3_begin:
+ pushl %ebx
+ movl 8(%esp),%ebx
+ pushl %ebp
+ pushl %esi
+ pushl %edi
+
+
+ movl (%ebx),%edi
+ movl 4(%ebx),%esi
+ subl $12,%esp
+
+
+ roll $4,%edi
+ movl %edi,%edx
+ xorl %esi,%edi
+ andl $0xf0f0f0f0,%edi
+ xorl %edi,%edx
+ xorl %edi,%esi
+
+ roll $20,%esi
+ movl %esi,%edi
+ xorl %edx,%esi
+ andl $0xfff0000f,%esi
+ xorl %esi,%edi
+ xorl %esi,%edx
+
+ roll $14,%edi
+ movl %edi,%esi
+ xorl %edx,%edi
+ andl $0x33333333,%edi
+ xorl %edi,%esi
+ xorl %edi,%edx
+
+ roll $22,%edx
+ movl %edx,%edi
+ xorl %esi,%edx
+ andl $0x03fc03fc,%edx
+ xorl %edx,%edi
+ xorl %edx,%esi
+
+ roll $9,%edi
+ movl %edi,%edx
+ xorl %esi,%edi
+ andl $0xaaaaaaaa,%edi
+ xorl %edi,%edx
+ xorl %edi,%esi
+
+ rorl $3,%edx
+ rorl $2,%esi
+ movl %esi,4(%ebx)
+ movl 36(%esp),%eax
+ movl %edx,(%ebx)
+ movl 40(%esp),%edi
+ movl 44(%esp),%esi
+ movl $1,8(%esp)
+ movl %eax,4(%esp)
+ movl %ebx,(%esp)
+ call .L_DES_encrypt2_begin
+ movl $0,8(%esp)
+ movl %edi,4(%esp)
+ movl %ebx,(%esp)
+ call .L_DES_encrypt2_begin
+ movl $1,8(%esp)
+ movl %esi,4(%esp)
+ movl %ebx,(%esp)
+ call .L_DES_encrypt2_begin
+ addl $12,%esp
+ movl (%ebx),%edi
+ movl 4(%ebx),%esi
+
+
+ roll $2,%esi
+ roll $3,%edi
+ movl %edi,%eax
+ xorl %esi,%edi
+ andl $0xaaaaaaaa,%edi
+ xorl %edi,%eax
+ xorl %edi,%esi
+
+ roll $23,%eax
+ movl %eax,%edi
+ xorl %esi,%eax
+ andl $0x03fc03fc,%eax
+ xorl %eax,%edi
+ xorl %eax,%esi
+
+ roll $10,%edi
+ movl %edi,%eax
+ xorl %esi,%edi
+ andl $0x33333333,%edi
+ xorl %edi,%eax
+ xorl %edi,%esi
+
+ roll $18,%esi
+ movl %esi,%edi
+ xorl %eax,%esi
+ andl $0xfff0000f,%esi
+ xorl %esi,%edi
+ xorl %esi,%eax
+
+ roll $12,%edi
+ movl %edi,%esi
+ xorl %eax,%edi
+ andl $0xf0f0f0f0,%edi
+ xorl %edi,%esi
+ xorl %edi,%eax
+
+ rorl $4,%eax
+ movl %eax,(%ebx)
+ movl %esi,4(%ebx)
+ popl %edi
+ popl %esi
+ popl %ebp
+ popl %ebx
+ ret
+.size DES_encrypt3,.-.L_DES_encrypt3_begin
+.globl DES_decrypt3
+.type DES_decrypt3,@function
+.align 16
+DES_decrypt3:
+.L_DES_decrypt3_begin:
+ pushl %ebx
+ movl 8(%esp),%ebx
+ pushl %ebp
+ pushl %esi
+ pushl %edi
+
+
+ movl (%ebx),%edi
+ movl 4(%ebx),%esi
+ subl $12,%esp
+
+
+ roll $4,%edi
+ movl %edi,%edx
+ xorl %esi,%edi
+ andl $0xf0f0f0f0,%edi
+ xorl %edi,%edx
+ xorl %edi,%esi
+
+ roll $20,%esi
+ movl %esi,%edi
+ xorl %edx,%esi
+ andl $0xfff0000f,%esi
+ xorl %esi,%edi
+ xorl %esi,%edx
+
+ roll $14,%edi
+ movl %edi,%esi
+ xorl %edx,%edi
+ andl $0x33333333,%edi
+ xorl %edi,%esi
+ xorl %edi,%edx
+
+ roll $22,%edx
+ movl %edx,%edi
+ xorl %esi,%edx
+ andl $0x03fc03fc,%edx
+ xorl %edx,%edi
+ xorl %edx,%esi
+
+ roll $9,%edi
+ movl %edi,%edx
+ xorl %esi,%edi
+ andl $0xaaaaaaaa,%edi
+ xorl %edi,%edx
+ xorl %edi,%esi
+
+ rorl $3,%edx
+ rorl $2,%esi
+ movl %esi,4(%ebx)
+ movl 36(%esp),%esi
+ movl %edx,(%ebx)
+ movl 40(%esp),%edi
+ movl 44(%esp),%eax
+ movl $0,8(%esp)
+ movl %eax,4(%esp)
+ movl %ebx,(%esp)
+ call .L_DES_encrypt2_begin
+ movl $1,8(%esp)
+ movl %edi,4(%esp)
+ movl %ebx,(%esp)
+ call .L_DES_encrypt2_begin
+ movl $0,8(%esp)
+ movl %esi,4(%esp)
+ movl %ebx,(%esp)
+ call .L_DES_encrypt2_begin
+ addl $12,%esp
+ movl (%ebx),%edi
+ movl 4(%ebx),%esi
+
+
+ roll $2,%esi
+ roll $3,%edi
+ movl %edi,%eax
+ xorl %esi,%edi
+ andl $0xaaaaaaaa,%edi
+ xorl %edi,%eax
+ xorl %edi,%esi
+
+ roll $23,%eax
+ movl %eax,%edi
+ xorl %esi,%eax
+ andl $0x03fc03fc,%eax
+ xorl %eax,%edi
+ xorl %eax,%esi
+
+ roll $10,%edi
+ movl %edi,%eax
+ xorl %esi,%edi
+ andl $0x33333333,%edi
+ xorl %edi,%eax
+ xorl %edi,%esi
+
+ roll $18,%esi
+ movl %esi,%edi
+ xorl %eax,%esi
+ andl $0xfff0000f,%esi
+ xorl %esi,%edi
+ xorl %esi,%eax
+
+ roll $12,%edi
+ movl %edi,%esi
+ xorl %eax,%edi
+ andl $0xf0f0f0f0,%edi
+ xorl %edi,%esi
+ xorl %edi,%eax
+
+ rorl $4,%eax
+ movl %eax,(%ebx)
+ movl %esi,4(%ebx)
+ popl %edi
+ popl %esi
+ popl %ebp
+ popl %ebx
+ ret
+.size DES_decrypt3,.-.L_DES_decrypt3_begin
+.globl DES_ncbc_encrypt
+.type DES_ncbc_encrypt,@function
+.align 16
+DES_ncbc_encrypt:
+.L_DES_ncbc_encrypt_begin:
+
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 28(%esp),%ebp
+
+ movl 36(%esp),%ebx
+ movl (%ebx),%esi
+ movl 4(%ebx),%edi
+ pushl %edi
+ pushl %esi
+ pushl %edi
+ pushl %esi
+ movl %esp,%ebx
+ movl 36(%esp),%esi
+ movl 40(%esp),%edi
+
+ movl 56(%esp),%ecx
+
+ pushl %ecx
+
+ movl 52(%esp),%eax
+ pushl %eax
+ pushl %ebx
+ cmpl $0,%ecx
+ jz .L006decrypt
+ andl $4294967288,%ebp
+ movl 12(%esp),%eax
+ movl 16(%esp),%ebx
+ jz .L007encrypt_finish
+.L008encrypt_loop:
+ movl (%esi),%ecx
+ movl 4(%esi),%edx
+ xorl %ecx,%eax
+ xorl %edx,%ebx
+ movl %eax,12(%esp)
+ movl %ebx,16(%esp)
+ call .L_DES_encrypt1_begin
+ movl 12(%esp),%eax
+ movl 16(%esp),%ebx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ addl $8,%esi
+ addl $8,%edi
+ subl $8,%ebp
+ jnz .L008encrypt_loop
+.L007encrypt_finish:
+ movl 56(%esp),%ebp
+ andl $7,%ebp
+ jz .L009finish
+ call .L010PIC_point
+.L010PIC_point:
+ popl %edx
+ leal .L011cbc_enc_jmp_table-.L010PIC_point(%edx),%ecx
+ movl (%ecx,%ebp,4),%ebp
+ addl %edx,%ebp
+ xorl %ecx,%ecx
+ xorl %edx,%edx
+ jmp *%ebp
+.L012ej7:
+ movb 6(%esi),%dh
+ shll $8,%edx
+.L013ej6:
+ movb 5(%esi),%dh
+.L014ej5:
+ movb 4(%esi),%dl
+.L015ej4:
+ movl (%esi),%ecx
+ jmp .L016ejend
+.L017ej3:
+ movb 2(%esi),%ch
+ shll $8,%ecx
+.L018ej2:
+ movb 1(%esi),%ch
+.L019ej1:
+ movb (%esi),%cl
+.L016ejend:
+ xorl %ecx,%eax
+ xorl %edx,%ebx
+ movl %eax,12(%esp)
+ movl %ebx,16(%esp)
+ call .L_DES_encrypt1_begin
+ movl 12(%esp),%eax
+ movl 16(%esp),%ebx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ jmp .L009finish
+.L006decrypt:
+ andl $4294967288,%ebp
+ movl 20(%esp),%eax
+ movl 24(%esp),%ebx
+ jz .L020decrypt_finish
+.L021decrypt_loop:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl %eax,12(%esp)
+ movl %ebx,16(%esp)
+ call .L_DES_encrypt1_begin
+ movl 12(%esp),%eax
+ movl 16(%esp),%ebx
+ movl 20(%esp),%ecx
+ movl 24(%esp),%edx
+ xorl %eax,%ecx
+ xorl %ebx,%edx
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl %ecx,(%edi)
+ movl %edx,4(%edi)
+ movl %eax,20(%esp)
+ movl %ebx,24(%esp)
+ addl $8,%esi
+ addl $8,%edi
+ subl $8,%ebp
+ jnz .L021decrypt_loop
+.L020decrypt_finish:
+ movl 56(%esp),%ebp
+ andl $7,%ebp
+ jz .L009finish
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl %eax,12(%esp)
+ movl %ebx,16(%esp)
+ call .L_DES_encrypt1_begin
+ movl 12(%esp),%eax
+ movl 16(%esp),%ebx
+ movl 20(%esp),%ecx
+ movl 24(%esp),%edx
+ xorl %eax,%ecx
+ xorl %ebx,%edx
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+.L022dj7:
+ rorl $16,%edx
+ movb %dl,6(%edi)
+ shrl $16,%edx
+.L023dj6:
+ movb %dh,5(%edi)
+.L024dj5:
+ movb %dl,4(%edi)
+.L025dj4:
+ movl %ecx,(%edi)
+ jmp .L026djend
+.L027dj3:
+ rorl $16,%ecx
+ movb %cl,2(%edi)
+ shll $16,%ecx
+.L028dj2:
+ movb %ch,1(%esi)
+.L029dj1:
+ movb %cl,(%esi)
+.L026djend:
+ jmp .L009finish
+.L009finish:
+ movl 64(%esp),%ecx
+ addl $28,%esp
+ movl %eax,(%ecx)
+ movl %ebx,4(%ecx)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 64
+.L011cbc_enc_jmp_table:
+.long 0
+.long .L019ej1-.L010PIC_point
+.long .L018ej2-.L010PIC_point
+.long .L017ej3-.L010PIC_point
+.long .L015ej4-.L010PIC_point
+.long .L014ej5-.L010PIC_point
+.long .L013ej6-.L010PIC_point
+.long .L012ej7-.L010PIC_point
+.align 64
+.size DES_ncbc_encrypt,.-.L_DES_ncbc_encrypt_begin
+.globl DES_ede3_cbc_encrypt
+.type DES_ede3_cbc_encrypt,@function
+.align 16
+DES_ede3_cbc_encrypt:
+.L_DES_ede3_cbc_encrypt_begin:
+
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 28(%esp),%ebp
+
+ movl 44(%esp),%ebx
+ movl (%ebx),%esi
+ movl 4(%ebx),%edi
+ pushl %edi
+ pushl %esi
+ pushl %edi
+ pushl %esi
+ movl %esp,%ebx
+ movl 36(%esp),%esi
+ movl 40(%esp),%edi
+
+ movl 64(%esp),%ecx
+
+ movl 56(%esp),%eax
+ pushl %eax
+
+ movl 56(%esp),%eax
+ pushl %eax
+
+ movl 56(%esp),%eax
+ pushl %eax
+ pushl %ebx
+ cmpl $0,%ecx
+ jz .L030decrypt
+ andl $4294967288,%ebp
+ movl 16(%esp),%eax
+ movl 20(%esp),%ebx
+ jz .L031encrypt_finish
+.L032encrypt_loop:
+ movl (%esi),%ecx
+ movl 4(%esi),%edx
+ xorl %ecx,%eax
+ xorl %edx,%ebx
+ movl %eax,16(%esp)
+ movl %ebx,20(%esp)
+ call .L_DES_encrypt3_begin
+ movl 16(%esp),%eax
+ movl 20(%esp),%ebx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ addl $8,%esi
+ addl $8,%edi
+ subl $8,%ebp
+ jnz .L032encrypt_loop
+.L031encrypt_finish:
+ movl 60(%esp),%ebp
+ andl $7,%ebp
+ jz .L033finish
+ call .L034PIC_point
+.L034PIC_point:
+ popl %edx
+ leal .L035cbc_enc_jmp_table-.L034PIC_point(%edx),%ecx
+ movl (%ecx,%ebp,4),%ebp
+ addl %edx,%ebp
+ xorl %ecx,%ecx
+ xorl %edx,%edx
+ jmp *%ebp
+.L036ej7:
+ movb 6(%esi),%dh
+ shll $8,%edx
+.L037ej6:
+ movb 5(%esi),%dh
+.L038ej5:
+ movb 4(%esi),%dl
+.L039ej4:
+ movl (%esi),%ecx
+ jmp .L040ejend
+.L041ej3:
+ movb 2(%esi),%ch
+ shll $8,%ecx
+.L042ej2:
+ movb 1(%esi),%ch
+.L043ej1:
+ movb (%esi),%cl
+.L040ejend:
+ xorl %ecx,%eax
+ xorl %edx,%ebx
+ movl %eax,16(%esp)
+ movl %ebx,20(%esp)
+ call .L_DES_encrypt3_begin
+ movl 16(%esp),%eax
+ movl 20(%esp),%ebx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ jmp .L033finish
+.L030decrypt:
+ andl $4294967288,%ebp
+ movl 24(%esp),%eax
+ movl 28(%esp),%ebx
+ jz .L044decrypt_finish
+.L045decrypt_loop:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl %eax,16(%esp)
+ movl %ebx,20(%esp)
+ call .L_DES_decrypt3_begin
+ movl 16(%esp),%eax
+ movl 20(%esp),%ebx
+ movl 24(%esp),%ecx
+ movl 28(%esp),%edx
+ xorl %eax,%ecx
+ xorl %ebx,%edx
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl %ecx,(%edi)
+ movl %edx,4(%edi)
+ movl %eax,24(%esp)
+ movl %ebx,28(%esp)
+ addl $8,%esi
+ addl $8,%edi
+ subl $8,%ebp
+ jnz .L045decrypt_loop
+.L044decrypt_finish:
+ movl 60(%esp),%ebp
+ andl $7,%ebp
+ jz .L033finish
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl %eax,16(%esp)
+ movl %ebx,20(%esp)
+ call .L_DES_decrypt3_begin
+ movl 16(%esp),%eax
+ movl 20(%esp),%ebx
+ movl 24(%esp),%ecx
+ movl 28(%esp),%edx
+ xorl %eax,%ecx
+ xorl %ebx,%edx
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+.L046dj7:
+ rorl $16,%edx
+ movb %dl,6(%edi)
+ shrl $16,%edx
+.L047dj6:
+ movb %dh,5(%edi)
+.L048dj5:
+ movb %dl,4(%edi)
+.L049dj4:
+ movl %ecx,(%edi)
+ jmp .L050djend
+.L051dj3:
+ rorl $16,%ecx
+ movb %cl,2(%edi)
+ shll $16,%ecx
+.L052dj2:
+ movb %ch,1(%esi)
+.L053dj1:
+ movb %cl,(%esi)
+.L050djend:
+ jmp .L033finish
+.L033finish:
+ movl 76(%esp),%ecx
+ addl $32,%esp
+ movl %eax,(%ecx)
+ movl %ebx,4(%ecx)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 64
+.L035cbc_enc_jmp_table:
+.long 0
+.long .L043ej1-.L034PIC_point
+.long .L042ej2-.L034PIC_point
+.long .L041ej3-.L034PIC_point
+.long .L039ej4-.L034PIC_point
+.long .L038ej5-.L034PIC_point
+.long .L037ej6-.L034PIC_point
+.long .L036ej7-.L034PIC_point
+.align 64
+.size DES_ede3_cbc_encrypt,.-.L_DES_ede3_cbc_encrypt_begin
+.align 64
+DES_SPtrans:
+.long 34080768,524288,33554434,34080770
+.long 33554432,526338,524290,33554434
+.long 526338,34080768,34078720,2050
+.long 33556482,33554432,0,524290
+.long 524288,2,33556480,526336
+.long 34080770,34078720,2050,33556480
+.long 2,2048,526336,34078722
+.long 2048,33556482,34078722,0
+.long 0,34080770,33556480,524290
+.long 34080768,524288,2050,33556480
+.long 34078722,2048,526336,33554434
+.long 526338,2,33554434,34078720
+.long 34080770,526336,34078720,33556482
+.long 33554432,2050,524290,0
+.long 524288,33554432,33556482,34080768
+.long 2,34078722,2048,526338
+.long 1074823184,0,1081344,1074790400
+.long 1073741840,32784,1073774592,1081344
+.long 32768,1074790416,16,1073774592
+.long 1048592,1074823168,1074790400,16
+.long 1048576,1073774608,1074790416,32768
+.long 1081360,1073741824,0,1048592
+.long 1073774608,1081360,1074823168,1073741840
+.long 1073741824,1048576,32784,1074823184
+.long 1048592,1074823168,1073774592,1081360
+.long 1074823184,1048592,1073741840,0
+.long 1073741824,32784,1048576,1074790416
+.long 32768,1073741824,1081360,1073774608
+.long 1074823168,32768,0,1073741840
+.long 16,1074823184,1081344,1074790400
+.long 1074790416,1048576,32784,1073774592
+.long 1073774608,16,1074790400,1081344
+.long 67108865,67371264,256,67109121
+.long 262145,67108864,67109121,262400
+.long 67109120,262144,67371008,1
+.long 67371265,257,1,67371009
+.long 0,262145,67371264,256
+.long 257,67371265,262144,67108865
+.long 67371009,67109120,262401,67371008
+.long 262400,0,67108864,262401
+.long 67371264,256,1,262144
+.long 257,262145,67371008,67109121
+.long 0,67371264,262400,67371009
+.long 262145,67108864,67371265,1
+.long 262401,67108865,67108864,67371265
+.long 262144,67109120,67109121,262400
+.long 67109120,0,67371009,257
+.long 67108865,262401,256,67371008
+.long 4198408,268439552,8,272633864
+.long 0,272629760,268439560,4194312
+.long 272633856,268435464,268435456,4104
+.long 268435464,4198408,4194304,268435456
+.long 272629768,4198400,4096,8
+.long 4198400,268439560,272629760,4096
+.long 4104,0,4194312,272633856
+.long 268439552,272629768,272633864,4194304
+.long 272629768,4104,4194304,268435464
+.long 4198400,268439552,8,272629760
+.long 268439560,0,4096,4194312
+.long 0,272629768,272633856,4096
+.long 268435456,272633864,4198408,4194304
+.long 272633864,8,268439552,4198408
+.long 4194312,4198400,272629760,268439560
+.long 4104,268435456,268435464,272633856
+.long 134217728,65536,1024,134284320
+.long 134283296,134218752,66592,134283264
+.long 65536,32,134217760,66560
+.long 134218784,134283296,134284288,0
+.long 66560,134217728,65568,1056
+.long 134218752,66592,0,134217760
+.long 32,134218784,134284320,65568
+.long 134283264,1024,1056,134284288
+.long 134284288,134218784,65568,134283264
+.long 65536,32,134217760,134218752
+.long 134217728,66560,134284320,0
+.long 66592,134217728,1024,65568
+.long 134218784,1024,0,134284320
+.long 134283296,134284288,1056,65536
+.long 66560,134283296,134218752,1056
+.long 32,66592,134283264,134217760
+.long 2147483712,2097216,0,2149588992
+.long 2097216,8192,2147491904,2097152
+.long 8256,2149589056,2105344,2147483648
+.long 2147491840,2147483712,2149580800,2105408
+.long 2097152,2147491904,2149580864,0
+.long 8192,64,2149588992,2149580864
+.long 2149589056,2149580800,2147483648,8256
+.long 64,2105344,2105408,2147491840
+.long 8256,2147483648,2147491840,2105408
+.long 2149588992,2097216,0,2147491840
+.long 2147483648,8192,2149580864,2097152
+.long 2097216,2149589056,2105344,64
+.long 2149589056,2105344,2097152,2147491904
+.long 2147483712,2149580800,2105408,0
+.long 8192,2147483712,2147491904,2149588992
+.long 2149580800,8256,64,2149580864
+.long 16384,512,16777728,16777220
+.long 16794116,16388,16896,0
+.long 16777216,16777732,516,16793600
+.long 4,16794112,16793600,516
+.long 16777732,16384,16388,16794116
+.long 0,16777728,16777220,16896
+.long 16793604,16900,16794112,4
+.long 16900,16793604,512,16777216
+.long 16900,16793600,16793604,516
+.long 16384,512,16777216,16793604
+.long 16777732,16900,16896,0
+.long 512,16777220,4,16777728
+.long 0,16777732,16777728,16896
+.long 516,16384,16794116,16777216
+.long 16794112,4,16388,16794116
+.long 16777220,16794112,16793600,16388
+.long 545259648,545390592,131200,0
+.long 537001984,8388736,545259520,545390720
+.long 128,536870912,8519680,131200
+.long 8519808,537002112,536871040,545259520
+.long 131072,8519808,8388736,537001984
+.long 545390720,536871040,0,8519680
+.long 536870912,8388608,537002112,545259648
+.long 8388608,131072,545390592,128
+.long 8388608,131072,536871040,545390720
+.long 131200,536870912,0,8519680
+.long 545259648,537002112,537001984,8388736
+.long 545390592,128,8388736,537001984
+.long 545390720,8388608,545259520,536871040
+.long 8519680,131200,537002112,545259520
+.long 128,545390592,8519808,0
+.long 536870912,545259648,131072,8519808
diff --git a/deps/openssl/asm/x86-elf-gas/md5/md5-586.s b/deps/openssl/asm/x86-elf-gas/md5/md5-586.s
new file mode 100644
index 0000000000..e354c4ebcd
--- /dev/null
+++ b/deps/openssl/asm/x86-elf-gas/md5/md5-586.s
@@ -0,0 +1,679 @@
+.file "../openssl/crypto/md5/asm/md5-586.s"
+.text
+.globl md5_block_asm_data_order
+.type md5_block_asm_data_order,@function
+.align 16
+md5_block_asm_data_order:
+.L_md5_block_asm_data_order_begin:
+ pushl %esi
+ pushl %edi
+ movl 12(%esp),%edi
+ movl 16(%esp),%esi
+ movl 20(%esp),%ecx
+ pushl %ebp
+ shll $6,%ecx
+ pushl %ebx
+ addl %esi,%ecx
+ subl $64,%ecx
+ movl (%edi),%eax
+ pushl %ecx
+ movl 4(%edi),%ebx
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+.L000start:
+
+
+ movl %ecx,%edi
+ movl (%esi),%ebp
+
+ xorl %edx,%edi
+ andl %ebx,%edi
+ leal 3614090360(%eax,%ebp,1),%eax
+ xorl %edx,%edi
+ addl %edi,%eax
+ movl %ebx,%edi
+ roll $7,%eax
+ movl 4(%esi),%ebp
+ addl %ebx,%eax
+
+ xorl %ecx,%edi
+ andl %eax,%edi
+ leal 3905402710(%edx,%ebp,1),%edx
+ xorl %ecx,%edi
+ addl %edi,%edx
+ movl %eax,%edi
+ roll $12,%edx
+ movl 8(%esi),%ebp
+ addl %eax,%edx
+
+ xorl %ebx,%edi
+ andl %edx,%edi
+ leal 606105819(%ecx,%ebp,1),%ecx
+ xorl %ebx,%edi
+ addl %edi,%ecx
+ movl %edx,%edi
+ roll $17,%ecx
+ movl 12(%esi),%ebp
+ addl %edx,%ecx
+
+ xorl %eax,%edi
+ andl %ecx,%edi
+ leal 3250441966(%ebx,%ebp,1),%ebx
+ xorl %eax,%edi
+ addl %edi,%ebx
+ movl %ecx,%edi
+ roll $22,%ebx
+ movl 16(%esi),%ebp
+ addl %ecx,%ebx
+
+ xorl %edx,%edi
+ andl %ebx,%edi
+ leal 4118548399(%eax,%ebp,1),%eax
+ xorl %edx,%edi
+ addl %edi,%eax
+ movl %ebx,%edi
+ roll $7,%eax
+ movl 20(%esi),%ebp
+ addl %ebx,%eax
+
+ xorl %ecx,%edi
+ andl %eax,%edi
+ leal 1200080426(%edx,%ebp,1),%edx
+ xorl %ecx,%edi
+ addl %edi,%edx
+ movl %eax,%edi
+ roll $12,%edx
+ movl 24(%esi),%ebp
+ addl %eax,%edx
+
+ xorl %ebx,%edi
+ andl %edx,%edi
+ leal 2821735955(%ecx,%ebp,1),%ecx
+ xorl %ebx,%edi
+ addl %edi,%ecx
+ movl %edx,%edi
+ roll $17,%ecx
+ movl 28(%esi),%ebp
+ addl %edx,%ecx
+
+ xorl %eax,%edi
+ andl %ecx,%edi
+ leal 4249261313(%ebx,%ebp,1),%ebx
+ xorl %eax,%edi
+ addl %edi,%ebx
+ movl %ecx,%edi
+ roll $22,%ebx
+ movl 32(%esi),%ebp
+ addl %ecx,%ebx
+
+ xorl %edx,%edi
+ andl %ebx,%edi
+ leal 1770035416(%eax,%ebp,1),%eax
+ xorl %edx,%edi
+ addl %edi,%eax
+ movl %ebx,%edi
+ roll $7,%eax
+ movl 36(%esi),%ebp
+ addl %ebx,%eax
+
+ xorl %ecx,%edi
+ andl %eax,%edi
+ leal 2336552879(%edx,%ebp,1),%edx
+ xorl %ecx,%edi
+ addl %edi,%edx
+ movl %eax,%edi
+ roll $12,%edx
+ movl 40(%esi),%ebp
+ addl %eax,%edx
+
+ xorl %ebx,%edi
+ andl %edx,%edi
+ leal 4294925233(%ecx,%ebp,1),%ecx
+ xorl %ebx,%edi
+ addl %edi,%ecx
+ movl %edx,%edi
+ roll $17,%ecx
+ movl 44(%esi),%ebp
+ addl %edx,%ecx
+
+ xorl %eax,%edi
+ andl %ecx,%edi
+ leal 2304563134(%ebx,%ebp,1),%ebx
+ xorl %eax,%edi
+ addl %edi,%ebx
+ movl %ecx,%edi
+ roll $22,%ebx
+ movl 48(%esi),%ebp
+ addl %ecx,%ebx
+
+ xorl %edx,%edi
+ andl %ebx,%edi
+ leal 1804603682(%eax,%ebp,1),%eax
+ xorl %edx,%edi
+ addl %edi,%eax
+ movl %ebx,%edi
+ roll $7,%eax
+ movl 52(%esi),%ebp
+ addl %ebx,%eax
+
+ xorl %ecx,%edi
+ andl %eax,%edi
+ leal 4254626195(%edx,%ebp,1),%edx
+ xorl %ecx,%edi
+ addl %edi,%edx
+ movl %eax,%edi
+ roll $12,%edx
+ movl 56(%esi),%ebp
+ addl %eax,%edx
+
+ xorl %ebx,%edi
+ andl %edx,%edi
+ leal 2792965006(%ecx,%ebp,1),%ecx
+ xorl %ebx,%edi
+ addl %edi,%ecx
+ movl %edx,%edi
+ roll $17,%ecx
+ movl 60(%esi),%ebp
+ addl %edx,%ecx
+
+ xorl %eax,%edi
+ andl %ecx,%edi
+ leal 1236535329(%ebx,%ebp,1),%ebx
+ xorl %eax,%edi
+ addl %edi,%ebx
+ movl %ecx,%edi
+ roll $22,%ebx
+ movl 4(%esi),%ebp
+ addl %ecx,%ebx
+
+
+
+ leal 4129170786(%eax,%ebp,1),%eax
+ xorl %ebx,%edi
+ andl %edx,%edi
+ movl 24(%esi),%ebp
+ xorl %ecx,%edi
+ addl %edi,%eax
+ movl %ebx,%edi
+ roll $5,%eax
+ addl %ebx,%eax
+
+ leal 3225465664(%edx,%ebp,1),%edx
+ xorl %eax,%edi
+ andl %ecx,%edi
+ movl 44(%esi),%ebp
+ xorl %ebx,%edi
+ addl %edi,%edx
+ movl %eax,%edi
+ roll $9,%edx
+ addl %eax,%edx
+
+ leal 643717713(%ecx,%ebp,1),%ecx
+ xorl %edx,%edi
+ andl %ebx,%edi
+ movl (%esi),%ebp
+ xorl %eax,%edi
+ addl %edi,%ecx
+ movl %edx,%edi
+ roll $14,%ecx
+ addl %edx,%ecx
+
+ leal 3921069994(%ebx,%ebp,1),%ebx
+ xorl %ecx,%edi
+ andl %eax,%edi
+ movl 20(%esi),%ebp
+ xorl %edx,%edi
+ addl %edi,%ebx
+ movl %ecx,%edi
+ roll $20,%ebx
+ addl %ecx,%ebx
+
+ leal 3593408605(%eax,%ebp,1),%eax
+ xorl %ebx,%edi
+ andl %edx,%edi
+ movl 40(%esi),%ebp
+ xorl %ecx,%edi
+ addl %edi,%eax
+ movl %ebx,%edi
+ roll $5,%eax
+ addl %ebx,%eax
+
+ leal 38016083(%edx,%ebp,1),%edx
+ xorl %eax,%edi
+ andl %ecx,%edi
+ movl 60(%esi),%ebp
+ xorl %ebx,%edi
+ addl %edi,%edx
+ movl %eax,%edi
+ roll $9,%edx
+ addl %eax,%edx
+
+ leal 3634488961(%ecx,%ebp,1),%ecx
+ xorl %edx,%edi
+ andl %ebx,%edi
+ movl 16(%esi),%ebp
+ xorl %eax,%edi
+ addl %edi,%ecx
+ movl %edx,%edi
+ roll $14,%ecx
+ addl %edx,%ecx
+
+ leal 3889429448(%ebx,%ebp,1),%ebx
+ xorl %ecx,%edi
+ andl %eax,%edi
+ movl 36(%esi),%ebp
+ xorl %edx,%edi
+ addl %edi,%ebx
+ movl %ecx,%edi
+ roll $20,%ebx
+ addl %ecx,%ebx
+
+ leal 568446438(%eax,%ebp,1),%eax
+ xorl %ebx,%edi
+ andl %edx,%edi
+ movl 56(%esi),%ebp
+ xorl %ecx,%edi
+ addl %edi,%eax
+ movl %ebx,%edi
+ roll $5,%eax
+ addl %ebx,%eax
+
+ leal 3275163606(%edx,%ebp,1),%edx
+ xorl %eax,%edi
+ andl %ecx,%edi
+ movl 12(%esi),%ebp
+ xorl %ebx,%edi
+ addl %edi,%edx
+ movl %eax,%edi
+ roll $9,%edx
+ addl %eax,%edx
+
+ leal 4107603335(%ecx,%ebp,1),%ecx
+ xorl %edx,%edi
+ andl %ebx,%edi
+ movl 32(%esi),%ebp
+ xorl %eax,%edi
+ addl %edi,%ecx
+ movl %edx,%edi
+ roll $14,%ecx
+ addl %edx,%ecx
+
+ leal 1163531501(%ebx,%ebp,1),%ebx
+ xorl %ecx,%edi
+ andl %eax,%edi
+ movl 52(%esi),%ebp
+ xorl %edx,%edi
+ addl %edi,%ebx
+ movl %ecx,%edi
+ roll $20,%ebx
+ addl %ecx,%ebx
+
+ leal 2850285829(%eax,%ebp,1),%eax
+ xorl %ebx,%edi
+ andl %edx,%edi
+ movl 8(%esi),%ebp
+ xorl %ecx,%edi
+ addl %edi,%eax
+ movl %ebx,%edi
+ roll $5,%eax
+ addl %ebx,%eax
+
+ leal 4243563512(%edx,%ebp,1),%edx
+ xorl %eax,%edi
+ andl %ecx,%edi
+ movl 28(%esi),%ebp
+ xorl %ebx,%edi
+ addl %edi,%edx
+ movl %eax,%edi
+ roll $9,%edx
+ addl %eax,%edx
+
+ leal 1735328473(%ecx,%ebp,1),%ecx
+ xorl %edx,%edi
+ andl %ebx,%edi
+ movl 48(%esi),%ebp
+ xorl %eax,%edi
+ addl %edi,%ecx
+ movl %edx,%edi
+ roll $14,%ecx
+ addl %edx,%ecx
+
+ leal 2368359562(%ebx,%ebp,1),%ebx
+ xorl %ecx,%edi
+ andl %eax,%edi
+ movl 20(%esi),%ebp
+ xorl %edx,%edi
+ addl %edi,%ebx
+ movl %ecx,%edi
+ roll $20,%ebx
+ addl %ecx,%ebx
+
+
+
+ xorl %edx,%edi
+ xorl %ebx,%edi
+ leal 4294588738(%eax,%ebp,1),%eax
+ addl %edi,%eax
+ roll $4,%eax
+ movl 32(%esi),%ebp
+ movl %ebx,%edi
+
+ leal 2272392833(%edx,%ebp,1),%edx
+ addl %ebx,%eax
+ xorl %ecx,%edi
+ xorl %eax,%edi
+ movl 44(%esi),%ebp
+ addl %edi,%edx
+ movl %eax,%edi
+ roll $11,%edx
+ addl %eax,%edx
+
+ xorl %ebx,%edi
+ xorl %edx,%edi
+ leal 1839030562(%ecx,%ebp,1),%ecx
+ addl %edi,%ecx
+ roll $16,%ecx
+ movl 56(%esi),%ebp
+ movl %edx,%edi
+
+ leal 4259657740(%ebx,%ebp,1),%ebx
+ addl %edx,%ecx
+ xorl %eax,%edi
+ xorl %ecx,%edi
+ movl 4(%esi),%ebp
+ addl %edi,%ebx
+ movl %ecx,%edi
+ roll $23,%ebx
+ addl %ecx,%ebx
+
+ xorl %edx,%edi
+ xorl %ebx,%edi
+ leal 2763975236(%eax,%ebp,1),%eax
+ addl %edi,%eax
+ roll $4,%eax
+ movl 16(%esi),%ebp
+ movl %ebx,%edi
+
+ leal 1272893353(%edx,%ebp,1),%edx
+ addl %ebx,%eax
+ xorl %ecx,%edi
+ xorl %eax,%edi
+ movl 28(%esi),%ebp
+ addl %edi,%edx
+ movl %eax,%edi
+ roll $11,%edx
+ addl %eax,%edx
+
+ xorl %ebx,%edi
+ xorl %edx,%edi
+ leal 4139469664(%ecx,%ebp,1),%ecx
+ addl %edi,%ecx
+ roll $16,%ecx
+ movl 40(%esi),%ebp
+ movl %edx,%edi
+
+ leal 3200236656(%ebx,%ebp,1),%ebx
+ addl %edx,%ecx
+ xorl %eax,%edi
+ xorl %ecx,%edi
+ movl 52(%esi),%ebp
+ addl %edi,%ebx
+ movl %ecx,%edi
+ roll $23,%ebx
+ addl %ecx,%ebx
+
+ xorl %edx,%edi
+ xorl %ebx,%edi
+ leal 681279174(%eax,%ebp,1),%eax
+ addl %edi,%eax
+ roll $4,%eax
+ movl (%esi),%ebp
+ movl %ebx,%edi
+
+ leal 3936430074(%edx,%ebp,1),%edx
+ addl %ebx,%eax
+ xorl %ecx,%edi
+ xorl %eax,%edi
+ movl 12(%esi),%ebp
+ addl %edi,%edx
+ movl %eax,%edi
+ roll $11,%edx
+ addl %eax,%edx
+
+ xorl %ebx,%edi
+ xorl %edx,%edi
+ leal 3572445317(%ecx,%ebp,1),%ecx
+ addl %edi,%ecx
+ roll $16,%ecx
+ movl 24(%esi),%ebp
+ movl %edx,%edi
+
+ leal 76029189(%ebx,%ebp,1),%ebx
+ addl %edx,%ecx
+ xorl %eax,%edi
+ xorl %ecx,%edi
+ movl 36(%esi),%ebp
+ addl %edi,%ebx
+ movl %ecx,%edi
+ roll $23,%ebx
+ addl %ecx,%ebx
+
+ xorl %edx,%edi
+ xorl %ebx,%edi
+ leal 3654602809(%eax,%ebp,1),%eax
+ addl %edi,%eax
+ roll $4,%eax
+ movl 48(%esi),%ebp
+ movl %ebx,%edi
+
+ leal 3873151461(%edx,%ebp,1),%edx
+ addl %ebx,%eax
+ xorl %ecx,%edi
+ xorl %eax,%edi
+ movl 60(%esi),%ebp
+ addl %edi,%edx
+ movl %eax,%edi
+ roll $11,%edx
+ addl %eax,%edx
+
+ xorl %ebx,%edi
+ xorl %edx,%edi
+ leal 530742520(%ecx,%ebp,1),%ecx
+ addl %edi,%ecx
+ roll $16,%ecx
+ movl 8(%esi),%ebp
+ movl %edx,%edi
+
+ leal 3299628645(%ebx,%ebp,1),%ebx
+ addl %edx,%ecx
+ xorl %eax,%edi
+ xorl %ecx,%edi
+ movl (%esi),%ebp
+ addl %edi,%ebx
+ movl $-1,%edi
+ roll $23,%ebx
+ addl %ecx,%ebx
+
+
+
+ xorl %edx,%edi
+ orl %ebx,%edi
+ leal 4096336452(%eax,%ebp,1),%eax
+ xorl %ecx,%edi
+ movl 28(%esi),%ebp
+ addl %edi,%eax
+ movl $-1,%edi
+ roll $6,%eax
+ xorl %ecx,%edi
+ addl %ebx,%eax
+
+ orl %eax,%edi
+ leal 1126891415(%edx,%ebp,1),%edx
+ xorl %ebx,%edi
+ movl 56(%esi),%ebp
+ addl %edi,%edx
+ movl $-1,%edi
+ roll $10,%edx
+ xorl %ebx,%edi
+ addl %eax,%edx
+
+ orl %edx,%edi
+ leal 2878612391(%ecx,%ebp,1),%ecx
+ xorl %eax,%edi
+ movl 20(%esi),%ebp
+ addl %edi,%ecx
+ movl $-1,%edi
+ roll $15,%ecx
+ xorl %eax,%edi
+ addl %edx,%ecx
+
+ orl %ecx,%edi
+ leal 4237533241(%ebx,%ebp,1),%ebx
+ xorl %edx,%edi
+ movl 48(%esi),%ebp
+ addl %edi,%ebx
+ movl $-1,%edi
+ roll $21,%ebx
+ xorl %edx,%edi
+ addl %ecx,%ebx
+
+ orl %ebx,%edi
+ leal 1700485571(%eax,%ebp,1),%eax
+ xorl %ecx,%edi
+ movl 12(%esi),%ebp
+ addl %edi,%eax
+ movl $-1,%edi
+ roll $6,%eax
+ xorl %ecx,%edi
+ addl %ebx,%eax
+
+ orl %eax,%edi
+ leal 2399980690(%edx,%ebp,1),%edx
+ xorl %ebx,%edi
+ movl 40(%esi),%ebp
+ addl %edi,%edx
+ movl $-1,%edi
+ roll $10,%edx
+ xorl %ebx,%edi
+ addl %eax,%edx
+
+ orl %edx,%edi
+ leal 4293915773(%ecx,%ebp,1),%ecx
+ xorl %eax,%edi
+ movl 4(%esi),%ebp
+ addl %edi,%ecx
+ movl $-1,%edi
+ roll $15,%ecx
+ xorl %eax,%edi
+ addl %edx,%ecx
+
+ orl %ecx,%edi
+ leal 2240044497(%ebx,%ebp,1),%ebx
+ xorl %edx,%edi
+ movl 32(%esi),%ebp
+ addl %edi,%ebx
+ movl $-1,%edi
+ roll $21,%ebx
+ xorl %edx,%edi
+ addl %ecx,%ebx
+
+ orl %ebx,%edi
+ leal 1873313359(%eax,%ebp,1),%eax
+ xorl %ecx,%edi
+ movl 60(%esi),%ebp
+ addl %edi,%eax
+ movl $-1,%edi
+ roll $6,%eax
+ xorl %ecx,%edi
+ addl %ebx,%eax
+
+ orl %eax,%edi
+ leal 4264355552(%edx,%ebp,1),%edx
+ xorl %ebx,%edi
+ movl 24(%esi),%ebp
+ addl %edi,%edx
+ movl $-1,%edi
+ roll $10,%edx
+ xorl %ebx,%edi
+ addl %eax,%edx
+
+ orl %edx,%edi
+ leal 2734768916(%ecx,%ebp,1),%ecx
+ xorl %eax,%edi
+ movl 52(%esi),%ebp
+ addl %edi,%ecx
+ movl $-1,%edi
+ roll $15,%ecx
+ xorl %eax,%edi
+ addl %edx,%ecx
+
+ orl %ecx,%edi
+ leal 1309151649(%ebx,%ebp,1),%ebx
+ xorl %edx,%edi
+ movl 16(%esi),%ebp
+ addl %edi,%ebx
+ movl $-1,%edi
+ roll $21,%ebx
+ xorl %edx,%edi
+ addl %ecx,%ebx
+
+ orl %ebx,%edi
+ leal 4149444226(%eax,%ebp,1),%eax
+ xorl %ecx,%edi
+ movl 44(%esi),%ebp
+ addl %edi,%eax
+ movl $-1,%edi
+ roll $6,%eax
+ xorl %ecx,%edi
+ addl %ebx,%eax
+
+ orl %eax,%edi
+ leal 3174756917(%edx,%ebp,1),%edx
+ xorl %ebx,%edi
+ movl 8(%esi),%ebp
+ addl %edi,%edx
+ movl $-1,%edi
+ roll $10,%edx
+ xorl %ebx,%edi
+ addl %eax,%edx
+
+ orl %edx,%edi
+ leal 718787259(%ecx,%ebp,1),%ecx
+ xorl %eax,%edi
+ movl 36(%esi),%ebp
+ addl %edi,%ecx
+ movl $-1,%edi
+ roll $15,%ecx
+ xorl %eax,%edi
+ addl %edx,%ecx
+
+ orl %ecx,%edi
+ leal 3951481745(%ebx,%ebp,1),%ebx
+ xorl %edx,%edi
+ movl 24(%esp),%ebp
+ addl %edi,%ebx
+ addl $64,%esi
+ roll $21,%ebx
+ movl (%ebp),%edi
+ addl %ecx,%ebx
+ addl %edi,%eax
+ movl 4(%ebp),%edi
+ addl %edi,%ebx
+ movl 8(%ebp),%edi
+ addl %edi,%ecx
+ movl 12(%ebp),%edi
+ addl %edi,%edx
+ movl %eax,(%ebp)
+ movl %ebx,4(%ebp)
+ movl (%esp),%edi
+ movl %ecx,8(%ebp)
+ movl %edx,12(%ebp)
+ cmpl %esi,%edi
+ jae .L000start
+ popl %eax
+ popl %ebx
+ popl %ebp
+ popl %edi
+ popl %esi
+ ret
+.size md5_block_asm_data_order,.-.L_md5_block_asm_data_order_begin
diff --git a/deps/openssl/asm/x86-elf-gas/rc4/rc4-586.s b/deps/openssl/asm/x86-elf-gas/rc4/rc4-586.s
new file mode 100644
index 0000000000..9ba94e4b1a
--- /dev/null
+++ b/deps/openssl/asm/x86-elf-gas/rc4/rc4-586.s
@@ -0,0 +1,230 @@
+.file "rc4-586.s"
+.text
+.globl RC4
+.type RC4,@function
+.align 16
+RC4:
+.L_RC4_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%edi
+ movl 24(%esp),%edx
+ movl 28(%esp),%esi
+ movl 32(%esp),%ebp
+ xorl %eax,%eax
+ xorl %ebx,%ebx
+ cmpl $0,%edx
+ je .L000abort
+ movb (%edi),%al
+ movb 4(%edi),%bl
+ addl $8,%edi
+ leal (%esi,%edx,1),%ecx
+ subl %esi,%ebp
+ movl %ecx,24(%esp)
+ incb %al
+ cmpl $-1,256(%edi)
+ je .L001RC4_CHAR
+ movl (%edi,%eax,4),%ecx
+ andl $-4,%edx
+ jz .L002loop1
+ leal -4(%esi,%edx,1),%edx
+ movl %edx,28(%esp)
+ movl %ebp,32(%esp)
+.align 16
+.L003loop4:
+ addb %cl,%bl
+ movl (%edi,%ebx,4),%edx
+ movl %ecx,(%edi,%ebx,4)
+ movl %edx,(%edi,%eax,4)
+ addl %ecx,%edx
+ incb %al
+ andl $255,%edx
+ movl (%edi,%eax,4),%ecx
+ movl (%edi,%edx,4),%ebp
+ addb %cl,%bl
+ movl (%edi,%ebx,4),%edx
+ movl %ecx,(%edi,%ebx,4)
+ movl %edx,(%edi,%eax,4)
+ addl %ecx,%edx
+ incb %al
+ andl $255,%edx
+ rorl $8,%ebp
+ movl (%edi,%eax,4),%ecx
+ orl (%edi,%edx,4),%ebp
+ addb %cl,%bl
+ movl (%edi,%ebx,4),%edx
+ movl %ecx,(%edi,%ebx,4)
+ movl %edx,(%edi,%eax,4)
+ addl %ecx,%edx
+ incb %al
+ andl $255,%edx
+ rorl $8,%ebp
+ movl (%edi,%eax,4),%ecx
+ orl (%edi,%edx,4),%ebp
+ addb %cl,%bl
+ movl (%edi,%ebx,4),%edx
+ movl %ecx,(%edi,%ebx,4)
+ movl %edx,(%edi,%eax,4)
+ addl %ecx,%edx
+ incb %al
+ andl $255,%edx
+ rorl $8,%ebp
+ movl 32(%esp),%ecx
+ orl (%edi,%edx,4),%ebp
+ rorl $8,%ebp
+ xorl (%esi),%ebp
+ cmpl 28(%esp),%esi
+ movl %ebp,(%ecx,%esi,1)
+ leal 4(%esi),%esi
+ movl (%edi,%eax,4),%ecx
+ jb .L003loop4
+ cmpl 24(%esp),%esi
+ je .L004done
+ movl 32(%esp),%ebp
+.align 16
+.L002loop1:
+ addb %cl,%bl
+ movl (%edi,%ebx,4),%edx
+ movl %ecx,(%edi,%ebx,4)
+ movl %edx,(%edi,%eax,4)
+ addl %ecx,%edx
+ incb %al
+ andl $255,%edx
+ movl (%edi,%edx,4),%edx
+ xorb (%esi),%dl
+ leal 1(%esi),%esi
+ movl (%edi,%eax,4),%ecx
+ cmpl 24(%esp),%esi
+ movb %dl,-1(%ebp,%esi,1)
+ jb .L002loop1
+ jmp .L004done
+.align 16
+.L001RC4_CHAR:
+ movzbl (%edi,%eax,1),%ecx
+.L005cloop1:
+ addb %cl,%bl
+ movzbl (%edi,%ebx,1),%edx
+ movb %cl,(%edi,%ebx,1)
+ movb %dl,(%edi,%eax,1)
+ addb %cl,%dl
+ movzbl (%edi,%edx,1),%edx
+ addb $1,%al
+ xorb (%esi),%dl
+ leal 1(%esi),%esi
+ movzbl (%edi,%eax,1),%ecx
+ cmpl 24(%esp),%esi
+ movb %dl,-1(%ebp,%esi,1)
+ jb .L005cloop1
+.L004done:
+ decb %al
+ movb %bl,-4(%edi)
+ movb %al,-8(%edi)
+.L000abort:
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size RC4,.-.L_RC4_begin
+.globl RC4_set_key
+.type RC4_set_key,@function
+.align 16
+RC4_set_key:
+.L_RC4_set_key_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%edi
+ movl 24(%esp),%ebp
+ movl 28(%esp),%esi
+ leal OPENSSL_ia32cap_P,%edx
+ leal 8(%edi),%edi
+ leal (%esi,%ebp,1),%esi
+ negl %ebp
+ xorl %eax,%eax
+ movl %ebp,-4(%edi)
+ btl $20,(%edx)
+ jc .L006c1stloop
+.align 16
+.L007w1stloop:
+ movl %eax,(%edi,%eax,4)
+ addb $1,%al
+ jnc .L007w1stloop
+ xorl %ecx,%ecx
+ xorl %edx,%edx
+.align 16
+.L008w2ndloop:
+ movl (%edi,%ecx,4),%eax
+ addb (%esi,%ebp,1),%dl
+ addb %al,%dl
+ addl $1,%ebp
+ movl (%edi,%edx,4),%ebx
+ jnz .L009wnowrap
+ movl -4(%edi),%ebp
+.L009wnowrap:
+ movl %eax,(%edi,%edx,4)
+ movl %ebx,(%edi,%ecx,4)
+ addb $1,%cl
+ jnc .L008w2ndloop
+ jmp .L010exit
+.align 16
+.L006c1stloop:
+ movb %al,(%edi,%eax,1)
+ addb $1,%al
+ jnc .L006c1stloop
+ xorl %ecx,%ecx
+ xorl %edx,%edx
+ xorl %ebx,%ebx
+.align 16
+.L011c2ndloop:
+ movb (%edi,%ecx,1),%al
+ addb (%esi,%ebp,1),%dl
+ addb %al,%dl
+ addl $1,%ebp
+ movb (%edi,%edx,1),%bl
+ jnz .L012cnowrap
+ movl -4(%edi),%ebp
+.L012cnowrap:
+ movb %al,(%edi,%edx,1)
+ movb %bl,(%edi,%ecx,1)
+ addb $1,%cl
+ jnc .L011c2ndloop
+ movl $-1,256(%edi)
+.L010exit:
+ xorl %eax,%eax
+ movl %eax,-8(%edi)
+ movl %eax,-4(%edi)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size RC4_set_key,.-.L_RC4_set_key_begin
+.globl RC4_options
+.type RC4_options,@function
+.align 16
+RC4_options:
+.L_RC4_options_begin:
+ call .L013pic_point
+.L013pic_point:
+ popl %eax
+ leal .L014opts-.L013pic_point(%eax),%eax
+ leal OPENSSL_ia32cap_P,%edx
+ btl $20,(%edx)
+ jnc .L015skip
+ addl $12,%eax
+.L015skip:
+ ret
+.align 64
+.L014opts:
+.byte 114,99,52,40,52,120,44,105,110,116,41,0
+.byte 114,99,52,40,49,120,44,99,104,97,114,41,0
+.byte 82,67,52,32,102,111,114,32,120,56,54,44,32,67,82,89
+.byte 80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114
+.byte 111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.align 64
+.size RC4_options,.-.L_RC4_options_begin
+.comm OPENSSL_ia32cap_P,4,4
diff --git a/deps/openssl/asm/x86-elf-gas/rc5/rc5-586.s b/deps/openssl/asm/x86-elf-gas/rc5/rc5-586.s
new file mode 100644
index 0000000000..ff8a4929ab
--- /dev/null
+++ b/deps/openssl/asm/x86-elf-gas/rc5/rc5-586.s
@@ -0,0 +1,564 @@
+.file "rc5-586.s"
+.text
+.globl RC5_32_encrypt
+.type RC5_32_encrypt,@function
+.align 16
+RC5_32_encrypt:
+.L_RC5_32_encrypt_begin:
+
+ pushl %ebp
+ pushl %esi
+ pushl %edi
+ movl 16(%esp),%edx
+ movl 20(%esp),%ebp
+
+ movl (%edx),%edi
+ movl 4(%edx),%esi
+ pushl %ebx
+ movl (%ebp),%ebx
+ addl 4(%ebp),%edi
+ addl 8(%ebp),%esi
+ xorl %esi,%edi
+ movl 12(%ebp),%eax
+ movl %esi,%ecx
+ roll %cl,%edi
+ addl %eax,%edi
+ xorl %edi,%esi
+ movl 16(%ebp),%eax
+ movl %edi,%ecx
+ roll %cl,%esi
+ addl %eax,%esi
+ xorl %esi,%edi
+ movl 20(%ebp),%eax
+ movl %esi,%ecx
+ roll %cl,%edi
+ addl %eax,%edi
+ xorl %edi,%esi
+ movl 24(%ebp),%eax
+ movl %edi,%ecx
+ roll %cl,%esi
+ addl %eax,%esi
+ xorl %esi,%edi
+ movl 28(%ebp),%eax
+ movl %esi,%ecx
+ roll %cl,%edi
+ addl %eax,%edi
+ xorl %edi,%esi
+ movl 32(%ebp),%eax
+ movl %edi,%ecx
+ roll %cl,%esi
+ addl %eax,%esi
+ xorl %esi,%edi
+ movl 36(%ebp),%eax
+ movl %esi,%ecx
+ roll %cl,%edi
+ addl %eax,%edi
+ xorl %edi,%esi
+ movl 40(%ebp),%eax
+ movl %edi,%ecx
+ roll %cl,%esi
+ addl %eax,%esi
+ xorl %esi,%edi
+ movl 44(%ebp),%eax
+ movl %esi,%ecx
+ roll %cl,%edi
+ addl %eax,%edi
+ xorl %edi,%esi
+ movl 48(%ebp),%eax
+ movl %edi,%ecx
+ roll %cl,%esi
+ addl %eax,%esi
+ xorl %esi,%edi
+ movl 52(%ebp),%eax
+ movl %esi,%ecx
+ roll %cl,%edi
+ addl %eax,%edi
+ xorl %edi,%esi
+ movl 56(%ebp),%eax
+ movl %edi,%ecx
+ roll %cl,%esi
+ addl %eax,%esi
+ xorl %esi,%edi
+ movl 60(%ebp),%eax
+ movl %esi,%ecx
+ roll %cl,%edi
+ addl %eax,%edi
+ xorl %edi,%esi
+ movl 64(%ebp),%eax
+ movl %edi,%ecx
+ roll %cl,%esi
+ addl %eax,%esi
+ xorl %esi,%edi
+ movl 68(%ebp),%eax
+ movl %esi,%ecx
+ roll %cl,%edi
+ addl %eax,%edi
+ xorl %edi,%esi
+ movl 72(%ebp),%eax
+ movl %edi,%ecx
+ roll %cl,%esi
+ addl %eax,%esi
+ cmpl $8,%ebx
+ je .L000rc5_exit
+ xorl %esi,%edi
+ movl 76(%ebp),%eax
+ movl %esi,%ecx
+ roll %cl,%edi
+ addl %eax,%edi
+ xorl %edi,%esi
+ movl 80(%ebp),%eax
+ movl %edi,%ecx
+ roll %cl,%esi
+ addl %eax,%esi
+ xorl %esi,%edi
+ movl 84(%ebp),%eax
+ movl %esi,%ecx
+ roll %cl,%edi
+ addl %eax,%edi
+ xorl %edi,%esi
+ movl 88(%ebp),%eax
+ movl %edi,%ecx
+ roll %cl,%esi
+ addl %eax,%esi
+ xorl %esi,%edi
+ movl 92(%ebp),%eax
+ movl %esi,%ecx
+ roll %cl,%edi
+ addl %eax,%edi
+ xorl %edi,%esi
+ movl 96(%ebp),%eax
+ movl %edi,%ecx
+ roll %cl,%esi
+ addl %eax,%esi
+ xorl %esi,%edi
+ movl 100(%ebp),%eax
+ movl %esi,%ecx
+ roll %cl,%edi
+ addl %eax,%edi
+ xorl %edi,%esi
+ movl 104(%ebp),%eax
+ movl %edi,%ecx
+ roll %cl,%esi
+ addl %eax,%esi
+ cmpl $12,%ebx
+ je .L000rc5_exit
+ xorl %esi,%edi
+ movl 108(%ebp),%eax
+ movl %esi,%ecx
+ roll %cl,%edi
+ addl %eax,%edi
+ xorl %edi,%esi
+ movl 112(%ebp),%eax
+ movl %edi,%ecx
+ roll %cl,%esi
+ addl %eax,%esi
+ xorl %esi,%edi
+ movl 116(%ebp),%eax
+ movl %esi,%ecx
+ roll %cl,%edi
+ addl %eax,%edi
+ xorl %edi,%esi
+ movl 120(%ebp),%eax
+ movl %edi,%ecx
+ roll %cl,%esi
+ addl %eax,%esi
+ xorl %esi,%edi
+ movl 124(%ebp),%eax
+ movl %esi,%ecx
+ roll %cl,%edi
+ addl %eax,%edi
+ xorl %edi,%esi
+ movl 128(%ebp),%eax
+ movl %edi,%ecx
+ roll %cl,%esi
+ addl %eax,%esi
+ xorl %esi,%edi
+ movl 132(%ebp),%eax
+ movl %esi,%ecx
+ roll %cl,%edi
+ addl %eax,%edi
+ xorl %edi,%esi
+ movl 136(%ebp),%eax
+ movl %edi,%ecx
+ roll %cl,%esi
+ addl %eax,%esi
+.L000rc5_exit:
+ movl %edi,(%edx)
+ movl %esi,4(%edx)
+ popl %ebx
+ popl %edi
+ popl %esi
+ popl %ebp
+ ret
+.size RC5_32_encrypt,.-.L_RC5_32_encrypt_begin
+.globl RC5_32_decrypt
+.type RC5_32_decrypt,@function
+.align 16
+RC5_32_decrypt:
+.L_RC5_32_decrypt_begin:
+
+ pushl %ebp
+ pushl %esi
+ pushl %edi
+ movl 16(%esp),%edx
+ movl 20(%esp),%ebp
+
+ movl (%edx),%edi
+ movl 4(%edx),%esi
+ pushl %ebx
+ movl (%ebp),%ebx
+ cmpl $12,%ebx
+ je .L001rc5_dec_12
+ cmpl $8,%ebx
+ je .L002rc5_dec_8
+ movl 136(%ebp),%eax
+ subl %eax,%esi
+ movl %edi,%ecx
+ rorl %cl,%esi
+ xorl %edi,%esi
+ movl 132(%ebp),%eax
+ subl %eax,%edi
+ movl %esi,%ecx
+ rorl %cl,%edi
+ xorl %esi,%edi
+ movl 128(%ebp),%eax
+ subl %eax,%esi
+ movl %edi,%ecx
+ rorl %cl,%esi
+ xorl %edi,%esi
+ movl 124(%ebp),%eax
+ subl %eax,%edi
+ movl %esi,%ecx
+ rorl %cl,%edi
+ xorl %esi,%edi
+ movl 120(%ebp),%eax
+ subl %eax,%esi
+ movl %edi,%ecx
+ rorl %cl,%esi
+ xorl %edi,%esi
+ movl 116(%ebp),%eax
+ subl %eax,%edi
+ movl %esi,%ecx
+ rorl %cl,%edi
+ xorl %esi,%edi
+ movl 112(%ebp),%eax
+ subl %eax,%esi
+ movl %edi,%ecx
+ rorl %cl,%esi
+ xorl %edi,%esi
+ movl 108(%ebp),%eax
+ subl %eax,%edi
+ movl %esi,%ecx
+ rorl %cl,%edi
+ xorl %esi,%edi
+.L001rc5_dec_12:
+ movl 104(%ebp),%eax
+ subl %eax,%esi
+ movl %edi,%ecx
+ rorl %cl,%esi
+ xorl %edi,%esi
+ movl 100(%ebp),%eax
+ subl %eax,%edi
+ movl %esi,%ecx
+ rorl %cl,%edi
+ xorl %esi,%edi
+ movl 96(%ebp),%eax
+ subl %eax,%esi
+ movl %edi,%ecx
+ rorl %cl,%esi
+ xorl %edi,%esi
+ movl 92(%ebp),%eax
+ subl %eax,%edi
+ movl %esi,%ecx
+ rorl %cl,%edi
+ xorl %esi,%edi
+ movl 88(%ebp),%eax
+ subl %eax,%esi
+ movl %edi,%ecx
+ rorl %cl,%esi
+ xorl %edi,%esi
+ movl 84(%ebp),%eax
+ subl %eax,%edi
+ movl %esi,%ecx
+ rorl %cl,%edi
+ xorl %esi,%edi
+ movl 80(%ebp),%eax
+ subl %eax,%esi
+ movl %edi,%ecx
+ rorl %cl,%esi
+ xorl %edi,%esi
+ movl 76(%ebp),%eax
+ subl %eax,%edi
+ movl %esi,%ecx
+ rorl %cl,%edi
+ xorl %esi,%edi
+.L002rc5_dec_8:
+ movl 72(%ebp),%eax
+ subl %eax,%esi
+ movl %edi,%ecx
+ rorl %cl,%esi
+ xorl %edi,%esi
+ movl 68(%ebp),%eax
+ subl %eax,%edi
+ movl %esi,%ecx
+ rorl %cl,%edi
+ xorl %esi,%edi
+ movl 64(%ebp),%eax
+ subl %eax,%esi
+ movl %edi,%ecx
+ rorl %cl,%esi
+ xorl %edi,%esi
+ movl 60(%ebp),%eax
+ subl %eax,%edi
+ movl %esi,%ecx
+ rorl %cl,%edi
+ xorl %esi,%edi
+ movl 56(%ebp),%eax
+ subl %eax,%esi
+ movl %edi,%ecx
+ rorl %cl,%esi
+ xorl %edi,%esi
+ movl 52(%ebp),%eax
+ subl %eax,%edi
+ movl %esi,%ecx
+ rorl %cl,%edi
+ xorl %esi,%edi
+ movl 48(%ebp),%eax
+ subl %eax,%esi
+ movl %edi,%ecx
+ rorl %cl,%esi
+ xorl %edi,%esi
+ movl 44(%ebp),%eax
+ subl %eax,%edi
+ movl %esi,%ecx
+ rorl %cl,%edi
+ xorl %esi,%edi
+ movl 40(%ebp),%eax
+ subl %eax,%esi
+ movl %edi,%ecx
+ rorl %cl,%esi
+ xorl %edi,%esi
+ movl 36(%ebp),%eax
+ subl %eax,%edi
+ movl %esi,%ecx
+ rorl %cl,%edi
+ xorl %esi,%edi
+ movl 32(%ebp),%eax
+ subl %eax,%esi
+ movl %edi,%ecx
+ rorl %cl,%esi
+ xorl %edi,%esi
+ movl 28(%ebp),%eax
+ subl %eax,%edi
+ movl %esi,%ecx
+ rorl %cl,%edi
+ xorl %esi,%edi
+ movl 24(%ebp),%eax
+ subl %eax,%esi
+ movl %edi,%ecx
+ rorl %cl,%esi
+ xorl %edi,%esi
+ movl 20(%ebp),%eax
+ subl %eax,%edi
+ movl %esi,%ecx
+ rorl %cl,%edi
+ xorl %esi,%edi
+ movl 16(%ebp),%eax
+ subl %eax,%esi
+ movl %edi,%ecx
+ rorl %cl,%esi
+ xorl %edi,%esi
+ movl 12(%ebp),%eax
+ subl %eax,%edi
+ movl %esi,%ecx
+ rorl %cl,%edi
+ xorl %esi,%edi
+ subl 8(%ebp),%esi
+ subl 4(%ebp),%edi
+.L003rc5_exit:
+ movl %edi,(%edx)
+ movl %esi,4(%edx)
+ popl %ebx
+ popl %edi
+ popl %esi
+ popl %ebp
+ ret
+.size RC5_32_decrypt,.-.L_RC5_32_decrypt_begin
+.globl RC5_32_cbc_encrypt
+.type RC5_32_cbc_encrypt,@function
+.align 16
+RC5_32_cbc_encrypt:
+.L_RC5_32_cbc_encrypt_begin:
+
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 28(%esp),%ebp
+
+ movl 36(%esp),%ebx
+ movl (%ebx),%esi
+ movl 4(%ebx),%edi
+ pushl %edi
+ pushl %esi
+ pushl %edi
+ pushl %esi
+ movl %esp,%ebx
+ movl 36(%esp),%esi
+ movl 40(%esp),%edi
+
+ movl 56(%esp),%ecx
+
+ movl 48(%esp),%eax
+ pushl %eax
+ pushl %ebx
+ cmpl $0,%ecx
+ jz .L004decrypt
+ andl $4294967288,%ebp
+ movl 8(%esp),%eax
+ movl 12(%esp),%ebx
+ jz .L005encrypt_finish
+.L006encrypt_loop:
+ movl (%esi),%ecx
+ movl 4(%esi),%edx
+ xorl %ecx,%eax
+ xorl %edx,%ebx
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+ call .L_RC5_32_encrypt_begin
+ movl 8(%esp),%eax
+ movl 12(%esp),%ebx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ addl $8,%esi
+ addl $8,%edi
+ subl $8,%ebp
+ jnz .L006encrypt_loop
+.L005encrypt_finish:
+ movl 52(%esp),%ebp
+ andl $7,%ebp
+ jz .L007finish
+ call .L008PIC_point
+.L008PIC_point:
+ popl %edx
+ leal .L009cbc_enc_jmp_table-.L008PIC_point(%edx),%ecx
+ movl (%ecx,%ebp,4),%ebp
+ addl %edx,%ebp
+ xorl %ecx,%ecx
+ xorl %edx,%edx
+ jmp *%ebp
+.L010ej7:
+ movb 6(%esi),%dh
+ shll $8,%edx
+.L011ej6:
+ movb 5(%esi),%dh
+.L012ej5:
+ movb 4(%esi),%dl
+.L013ej4:
+ movl (%esi),%ecx
+ jmp .L014ejend
+.L015ej3:
+ movb 2(%esi),%ch
+ shll $8,%ecx
+.L016ej2:
+ movb 1(%esi),%ch
+.L017ej1:
+ movb (%esi),%cl
+.L014ejend:
+ xorl %ecx,%eax
+ xorl %edx,%ebx
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+ call .L_RC5_32_encrypt_begin
+ movl 8(%esp),%eax
+ movl 12(%esp),%ebx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ jmp .L007finish
+.L004decrypt:
+ andl $4294967288,%ebp
+ movl 16(%esp),%eax
+ movl 20(%esp),%ebx
+ jz .L018decrypt_finish
+.L019decrypt_loop:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+ call .L_RC5_32_decrypt_begin
+ movl 8(%esp),%eax
+ movl 12(%esp),%ebx
+ movl 16(%esp),%ecx
+ movl 20(%esp),%edx
+ xorl %eax,%ecx
+ xorl %ebx,%edx
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl %ecx,(%edi)
+ movl %edx,4(%edi)
+ movl %eax,16(%esp)
+ movl %ebx,20(%esp)
+ addl $8,%esi
+ addl $8,%edi
+ subl $8,%ebp
+ jnz .L019decrypt_loop
+.L018decrypt_finish:
+ movl 52(%esp),%ebp
+ andl $7,%ebp
+ jz .L007finish
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+ call .L_RC5_32_decrypt_begin
+ movl 8(%esp),%eax
+ movl 12(%esp),%ebx
+ movl 16(%esp),%ecx
+ movl 20(%esp),%edx
+ xorl %eax,%ecx
+ xorl %ebx,%edx
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+.L020dj7:
+ rorl $16,%edx
+ movb %dl,6(%edi)
+ shrl $16,%edx
+.L021dj6:
+ movb %dh,5(%edi)
+.L022dj5:
+ movb %dl,4(%edi)
+.L023dj4:
+ movl %ecx,(%edi)
+ jmp .L024djend
+.L025dj3:
+ rorl $16,%ecx
+ movb %cl,2(%edi)
+ shll $16,%ecx
+.L026dj2:
+ movb %ch,1(%esi)
+.L027dj1:
+ movb %cl,(%esi)
+.L024djend:
+ jmp .L007finish
+.L007finish:
+ movl 60(%esp),%ecx
+ addl $24,%esp
+ movl %eax,(%ecx)
+ movl %ebx,4(%ecx)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 64
+.L009cbc_enc_jmp_table:
+.long 0
+.long .L017ej1-.L008PIC_point
+.long .L016ej2-.L008PIC_point
+.long .L015ej3-.L008PIC_point
+.long .L013ej4-.L008PIC_point
+.long .L012ej5-.L008PIC_point
+.long .L011ej6-.L008PIC_point
+.long .L010ej7-.L008PIC_point
+.align 64
+.size RC5_32_cbc_encrypt,.-.L_RC5_32_cbc_encrypt_begin
diff --git a/deps/openssl/asm/x86-elf-gas/ripemd/rmd-586.s b/deps/openssl/asm/x86-elf-gas/ripemd/rmd-586.s
new file mode 100644
index 0000000000..3c45fb91d0
--- /dev/null
+++ b/deps/openssl/asm/x86-elf-gas/ripemd/rmd-586.s
@@ -0,0 +1,1965 @@
+.file "../openssl/crypto/ripemd/asm/rmd-586.s"
+.text
+.globl ripemd160_block_asm_data_order
+.type ripemd160_block_asm_data_order,@function
+.align 16
+ripemd160_block_asm_data_order:
+.L_ripemd160_block_asm_data_order_begin:
+ movl 4(%esp),%edx
+ movl 8(%esp),%eax
+ pushl %esi
+ movl (%edx),%ecx
+ pushl %edi
+ movl 4(%edx),%esi
+ pushl %ebp
+ movl 8(%edx),%edi
+ pushl %ebx
+ subl $108,%esp
+.L000start:
+
+ movl (%eax),%ebx
+ movl 4(%eax),%ebp
+ movl %ebx,(%esp)
+ movl %ebp,4(%esp)
+ movl 8(%eax),%ebx
+ movl 12(%eax),%ebp
+ movl %ebx,8(%esp)
+ movl %ebp,12(%esp)
+ movl 16(%eax),%ebx
+ movl 20(%eax),%ebp
+ movl %ebx,16(%esp)
+ movl %ebp,20(%esp)
+ movl 24(%eax),%ebx
+ movl 28(%eax),%ebp
+ movl %ebx,24(%esp)
+ movl %ebp,28(%esp)
+ movl 32(%eax),%ebx
+ movl 36(%eax),%ebp
+ movl %ebx,32(%esp)
+ movl %ebp,36(%esp)
+ movl 40(%eax),%ebx
+ movl 44(%eax),%ebp
+ movl %ebx,40(%esp)
+ movl %ebp,44(%esp)
+ movl 48(%eax),%ebx
+ movl 52(%eax),%ebp
+ movl %ebx,48(%esp)
+ movl %ebp,52(%esp)
+ movl 56(%eax),%ebx
+ movl 60(%eax),%ebp
+ movl %ebx,56(%esp)
+ movl %ebp,60(%esp)
+ movl %edi,%eax
+ movl 12(%edx),%ebx
+ movl 16(%edx),%ebp
+
+ xorl %ebx,%eax
+ movl (%esp),%edx
+ xorl %esi,%eax
+ addl %edx,%ecx
+ roll $10,%edi
+ addl %eax,%ecx
+ movl %esi,%eax
+ roll $11,%ecx
+ addl %ebp,%ecx
+
+ xorl %edi,%eax
+ movl 4(%esp),%edx
+ xorl %ecx,%eax
+ addl %eax,%ebp
+ movl %ecx,%eax
+ roll $10,%esi
+ addl %edx,%ebp
+ xorl %esi,%eax
+ roll $14,%ebp
+ addl %ebx,%ebp
+
+ movl 8(%esp),%edx
+ xorl %ebp,%eax
+ addl %edx,%ebx
+ roll $10,%ecx
+ addl %eax,%ebx
+ movl %ebp,%eax
+ roll $15,%ebx
+ addl %edi,%ebx
+
+ xorl %ecx,%eax
+ movl 12(%esp),%edx
+ xorl %ebx,%eax
+ addl %eax,%edi
+ movl %ebx,%eax
+ roll $10,%ebp
+ addl %edx,%edi
+ xorl %ebp,%eax
+ roll $12,%edi
+ addl %esi,%edi
+
+ movl 16(%esp),%edx
+ xorl %edi,%eax
+ addl %edx,%esi
+ roll $10,%ebx
+ addl %eax,%esi
+ movl %edi,%eax
+ roll $5,%esi
+ addl %ecx,%esi
+
+ xorl %ebx,%eax
+ movl 20(%esp),%edx
+ xorl %esi,%eax
+ addl %eax,%ecx
+ movl %esi,%eax
+ roll $10,%edi
+ addl %edx,%ecx
+ xorl %edi,%eax
+ roll $8,%ecx
+ addl %ebp,%ecx
+
+ movl 24(%esp),%edx
+ xorl %ecx,%eax
+ addl %edx,%ebp
+ roll $10,%esi
+ addl %eax,%ebp
+ movl %ecx,%eax
+ roll $7,%ebp
+ addl %ebx,%ebp
+
+ xorl %esi,%eax
+ movl 28(%esp),%edx
+ xorl %ebp,%eax
+ addl %eax,%ebx
+ movl %ebp,%eax
+ roll $10,%ecx
+ addl %edx,%ebx
+ xorl %ecx,%eax
+ roll $9,%ebx
+ addl %edi,%ebx
+
+ movl 32(%esp),%edx
+ xorl %ebx,%eax
+ addl %edx,%edi
+ roll $10,%ebp
+ addl %eax,%edi
+ movl %ebx,%eax
+ roll $11,%edi
+ addl %esi,%edi
+
+ xorl %ebp,%eax
+ movl 36(%esp),%edx
+ xorl %edi,%eax
+ addl %eax,%esi
+ movl %edi,%eax
+ roll $10,%ebx
+ addl %edx,%esi
+ xorl %ebx,%eax
+ roll $13,%esi
+ addl %ecx,%esi
+
+ movl 40(%esp),%edx
+ xorl %esi,%eax
+ addl %edx,%ecx
+ roll $10,%edi
+ addl %eax,%ecx
+ movl %esi,%eax
+ roll $14,%ecx
+ addl %ebp,%ecx
+
+ xorl %edi,%eax
+ movl 44(%esp),%edx
+ xorl %ecx,%eax
+ addl %eax,%ebp
+ movl %ecx,%eax
+ roll $10,%esi
+ addl %edx,%ebp
+ xorl %esi,%eax
+ roll $15,%ebp
+ addl %ebx,%ebp
+
+ movl 48(%esp),%edx
+ xorl %ebp,%eax
+ addl %edx,%ebx
+ roll $10,%ecx
+ addl %eax,%ebx
+ movl %ebp,%eax
+ roll $6,%ebx
+ addl %edi,%ebx
+
+ xorl %ecx,%eax
+ movl 52(%esp),%edx
+ xorl %ebx,%eax
+ addl %eax,%edi
+ movl %ebx,%eax
+ roll $10,%ebp
+ addl %edx,%edi
+ xorl %ebp,%eax
+ roll $7,%edi
+ addl %esi,%edi
+
+ movl 56(%esp),%edx
+ xorl %edi,%eax
+ addl %edx,%esi
+ roll $10,%ebx
+ addl %eax,%esi
+ movl %edi,%eax
+ roll $9,%esi
+ addl %ecx,%esi
+
+ xorl %ebx,%eax
+ movl 60(%esp),%edx
+ xorl %esi,%eax
+ addl %eax,%ecx
+ movl $-1,%eax
+ roll $10,%edi
+ addl %edx,%ecx
+ movl 28(%esp),%edx
+ roll $8,%ecx
+ addl %ebp,%ecx
+
+ addl %edx,%ebp
+ movl %esi,%edx
+ subl %ecx,%eax
+ andl %ecx,%edx
+ andl %edi,%eax
+ orl %eax,%edx
+ movl 16(%esp),%eax
+ roll $10,%esi
+ leal 1518500249(%ebp,%edx,1),%ebp
+ movl $-1,%edx
+ roll $7,%ebp
+ addl %ebx,%ebp
+
+ addl %eax,%ebx
+ movl %ecx,%eax
+ subl %ebp,%edx
+ andl %ebp,%eax
+ andl %esi,%edx
+ orl %edx,%eax
+ movl 52(%esp),%edx
+ roll $10,%ecx
+ leal 1518500249(%ebx,%eax,1),%ebx
+ movl $-1,%eax
+ roll $6,%ebx
+ addl %edi,%ebx
+
+ addl %edx,%edi
+ movl %ebp,%edx
+ subl %ebx,%eax
+ andl %ebx,%edx
+ andl %ecx,%eax
+ orl %eax,%edx
+ movl 4(%esp),%eax
+ roll $10,%ebp
+ leal 1518500249(%edi,%edx,1),%edi
+ movl $-1,%edx
+ roll $8,%edi
+ addl %esi,%edi
+
+ addl %eax,%esi
+ movl %ebx,%eax
+ subl %edi,%edx
+ andl %edi,%eax
+ andl %ebp,%edx
+ orl %edx,%eax
+ movl 40(%esp),%edx
+ roll $10,%ebx
+ leal 1518500249(%esi,%eax,1),%esi
+ movl $-1,%eax
+ roll $13,%esi
+ addl %ecx,%esi
+
+ addl %edx,%ecx
+ movl %edi,%edx
+ subl %esi,%eax
+ andl %esi,%edx
+ andl %ebx,%eax
+ orl %eax,%edx
+ movl 24(%esp),%eax
+ roll $10,%edi
+ leal 1518500249(%ecx,%edx,1),%ecx
+ movl $-1,%edx
+ roll $11,%ecx
+ addl %ebp,%ecx
+
+ addl %eax,%ebp
+ movl %esi,%eax
+ subl %ecx,%edx
+ andl %ecx,%eax
+ andl %edi,%edx
+ orl %edx,%eax
+ movl 60(%esp),%edx
+ roll $10,%esi
+ leal 1518500249(%ebp,%eax,1),%ebp
+ movl $-1,%eax
+ roll $9,%ebp
+ addl %ebx,%ebp
+
+ addl %edx,%ebx
+ movl %ecx,%edx
+ subl %ebp,%eax
+ andl %ebp,%edx
+ andl %esi,%eax
+ orl %eax,%edx
+ movl 12(%esp),%eax
+ roll $10,%ecx
+ leal 1518500249(%ebx,%edx,1),%ebx
+ movl $-1,%edx
+ roll $7,%ebx
+ addl %edi,%ebx
+
+ addl %eax,%edi
+ movl %ebp,%eax
+ subl %ebx,%edx
+ andl %ebx,%eax
+ andl %ecx,%edx
+ orl %edx,%eax
+ movl 48(%esp),%edx
+ roll $10,%ebp
+ leal 1518500249(%edi,%eax,1),%edi
+ movl $-1,%eax
+ roll $15,%edi
+ addl %esi,%edi
+
+ addl %edx,%esi
+ movl %ebx,%edx
+ subl %edi,%eax
+ andl %edi,%edx
+ andl %ebp,%eax
+ orl %eax,%edx
+ movl (%esp),%eax
+ roll $10,%ebx
+ leal 1518500249(%esi,%edx,1),%esi
+ movl $-1,%edx
+ roll $7,%esi
+ addl %ecx,%esi
+
+ addl %eax,%ecx
+ movl %edi,%eax
+ subl %esi,%edx
+ andl %esi,%eax
+ andl %ebx,%edx
+ orl %edx,%eax
+ movl 36(%esp),%edx
+ roll $10,%edi
+ leal 1518500249(%ecx,%eax,1),%ecx
+ movl $-1,%eax
+ roll $12,%ecx
+ addl %ebp,%ecx
+
+ addl %edx,%ebp
+ movl %esi,%edx
+ subl %ecx,%eax
+ andl %ecx,%edx
+ andl %edi,%eax
+ orl %eax,%edx
+ movl 20(%esp),%eax
+ roll $10,%esi
+ leal 1518500249(%ebp,%edx,1),%ebp
+ movl $-1,%edx
+ roll $15,%ebp
+ addl %ebx,%ebp
+
+ addl %eax,%ebx
+ movl %ecx,%eax
+ subl %ebp,%edx
+ andl %ebp,%eax
+ andl %esi,%edx
+ orl %edx,%eax
+ movl 8(%esp),%edx
+ roll $10,%ecx
+ leal 1518500249(%ebx,%eax,1),%ebx
+ movl $-1,%eax
+ roll $9,%ebx
+ addl %edi,%ebx
+
+ addl %edx,%edi
+ movl %ebp,%edx
+ subl %ebx,%eax
+ andl %ebx,%edx
+ andl %ecx,%eax
+ orl %eax,%edx
+ movl 56(%esp),%eax
+ roll $10,%ebp
+ leal 1518500249(%edi,%edx,1),%edi
+ movl $-1,%edx
+ roll $11,%edi
+ addl %esi,%edi
+
+ addl %eax,%esi
+ movl %ebx,%eax
+ subl %edi,%edx
+ andl %edi,%eax
+ andl %ebp,%edx
+ orl %edx,%eax
+ movl 44(%esp),%edx
+ roll $10,%ebx
+ leal 1518500249(%esi,%eax,1),%esi
+ movl $-1,%eax
+ roll $7,%esi
+ addl %ecx,%esi
+
+ addl %edx,%ecx
+ movl %edi,%edx
+ subl %esi,%eax
+ andl %esi,%edx
+ andl %ebx,%eax
+ orl %eax,%edx
+ movl 32(%esp),%eax
+ roll $10,%edi
+ leal 1518500249(%ecx,%edx,1),%ecx
+ movl $-1,%edx
+ roll $13,%ecx
+ addl %ebp,%ecx
+
+ addl %eax,%ebp
+ movl %esi,%eax
+ subl %ecx,%edx
+ andl %ecx,%eax
+ andl %edi,%edx
+ orl %edx,%eax
+ movl $-1,%edx
+ roll $10,%esi
+ leal 1518500249(%ebp,%eax,1),%ebp
+ subl %ecx,%edx
+ roll $12,%ebp
+ addl %ebx,%ebp
+
+ movl 12(%esp),%eax
+ orl %ebp,%edx
+ addl %eax,%ebx
+ xorl %esi,%edx
+ movl $-1,%eax
+ roll $10,%ecx
+ leal 1859775393(%ebx,%edx,1),%ebx
+ subl %ebp,%eax
+ roll $11,%ebx
+ addl %edi,%ebx
+
+ movl 40(%esp),%edx
+ orl %ebx,%eax
+ addl %edx,%edi
+ xorl %ecx,%eax
+ movl $-1,%edx
+ roll $10,%ebp
+ leal 1859775393(%edi,%eax,1),%edi
+ subl %ebx,%edx
+ roll $13,%edi
+ addl %esi,%edi
+
+ movl 56(%esp),%eax
+ orl %edi,%edx
+ addl %eax,%esi
+ xorl %ebp,%edx
+ movl $-1,%eax
+ roll $10,%ebx
+ leal 1859775393(%esi,%edx,1),%esi
+ subl %edi,%eax
+ roll $6,%esi
+ addl %ecx,%esi
+
+ movl 16(%esp),%edx
+ orl %esi,%eax
+ addl %edx,%ecx
+ xorl %ebx,%eax
+ movl $-1,%edx
+ roll $10,%edi
+ leal 1859775393(%ecx,%eax,1),%ecx
+ subl %esi,%edx
+ roll $7,%ecx
+ addl %ebp,%ecx
+
+ movl 36(%esp),%eax
+ orl %ecx,%edx
+ addl %eax,%ebp
+ xorl %edi,%edx
+ movl $-1,%eax
+ roll $10,%esi
+ leal 1859775393(%ebp,%edx,1),%ebp
+ subl %ecx,%eax
+ roll $14,%ebp
+ addl %ebx,%ebp
+
+ movl 60(%esp),%edx
+ orl %ebp,%eax
+ addl %edx,%ebx
+ xorl %esi,%eax
+ movl $-1,%edx
+ roll $10,%ecx
+ leal 1859775393(%ebx,%eax,1),%ebx
+ subl %ebp,%edx
+ roll $9,%ebx
+ addl %edi,%ebx
+
+ movl 32(%esp),%eax
+ orl %ebx,%edx
+ addl %eax,%edi
+ xorl %ecx,%edx
+ movl $-1,%eax
+ roll $10,%ebp
+ leal 1859775393(%edi,%edx,1),%edi
+ subl %ebx,%eax
+ roll $13,%edi
+ addl %esi,%edi
+
+ movl 4(%esp),%edx
+ orl %edi,%eax
+ addl %edx,%esi
+ xorl %ebp,%eax
+ movl $-1,%edx
+ roll $10,%ebx
+ leal 1859775393(%esi,%eax,1),%esi
+ subl %edi,%edx
+ roll $15,%esi
+ addl %ecx,%esi
+
+ movl 8(%esp),%eax
+ orl %esi,%edx
+ addl %eax,%ecx
+ xorl %ebx,%edx
+ movl $-1,%eax
+ roll $10,%edi
+ leal 1859775393(%ecx,%edx,1),%ecx
+ subl %esi,%eax
+ roll $14,%ecx
+ addl %ebp,%ecx
+
+ movl 28(%esp),%edx
+ orl %ecx,%eax
+ addl %edx,%ebp
+ xorl %edi,%eax
+ movl $-1,%edx
+ roll $10,%esi
+ leal 1859775393(%ebp,%eax,1),%ebp
+ subl %ecx,%edx
+ roll $8,%ebp
+ addl %ebx,%ebp
+
+ movl (%esp),%eax
+ orl %ebp,%edx
+ addl %eax,%ebx
+ xorl %esi,%edx
+ movl $-1,%eax
+ roll $10,%ecx
+ leal 1859775393(%ebx,%edx,1),%ebx
+ subl %ebp,%eax
+ roll $13,%ebx
+ addl %edi,%ebx
+
+ movl 24(%esp),%edx
+ orl %ebx,%eax
+ addl %edx,%edi
+ xorl %ecx,%eax
+ movl $-1,%edx
+ roll $10,%ebp
+ leal 1859775393(%edi,%eax,1),%edi
+ subl %ebx,%edx
+ roll $6,%edi
+ addl %esi,%edi
+
+ movl 52(%esp),%eax
+ orl %edi,%edx
+ addl %eax,%esi
+ xorl %ebp,%edx
+ movl $-1,%eax
+ roll $10,%ebx
+ leal 1859775393(%esi,%edx,1),%esi
+ subl %edi,%eax
+ roll $5,%esi
+ addl %ecx,%esi
+
+ movl 44(%esp),%edx
+ orl %esi,%eax
+ addl %edx,%ecx
+ xorl %ebx,%eax
+ movl $-1,%edx
+ roll $10,%edi
+ leal 1859775393(%ecx,%eax,1),%ecx
+ subl %esi,%edx
+ roll $12,%ecx
+ addl %ebp,%ecx
+
+ movl 20(%esp),%eax
+ orl %ecx,%edx
+ addl %eax,%ebp
+ xorl %edi,%edx
+ movl $-1,%eax
+ roll $10,%esi
+ leal 1859775393(%ebp,%edx,1),%ebp
+ subl %ecx,%eax
+ roll $7,%ebp
+ addl %ebx,%ebp
+
+ movl 48(%esp),%edx
+ orl %ebp,%eax
+ addl %edx,%ebx
+ xorl %esi,%eax
+ movl $-1,%edx
+ roll $10,%ecx
+ leal 1859775393(%ebx,%eax,1),%ebx
+ movl %ecx,%eax
+ roll $5,%ebx
+ addl %edi,%ebx
+
+ subl %ecx,%edx
+ andl %ebx,%eax
+ andl %ebp,%edx
+ orl %eax,%edx
+ movl 4(%esp),%eax
+ roll $10,%ebp
+ leal 2400959708(%edi,%edx,1),%edi
+ movl $-1,%edx
+ addl %eax,%edi
+ movl %ebp,%eax
+ roll $11,%edi
+ addl %esi,%edi
+
+ subl %ebp,%edx
+ andl %edi,%eax
+ andl %ebx,%edx
+ orl %eax,%edx
+ movl 36(%esp),%eax
+ roll $10,%ebx
+ leal 2400959708(%esi,%edx,1),%esi
+ movl $-1,%edx
+ addl %eax,%esi
+ movl %ebx,%eax
+ roll $12,%esi
+ addl %ecx,%esi
+
+ subl %ebx,%edx
+ andl %esi,%eax
+ andl %edi,%edx
+ orl %eax,%edx
+ movl 44(%esp),%eax
+ roll $10,%edi
+ leal 2400959708(%ecx,%edx,1),%ecx
+ movl $-1,%edx
+ addl %eax,%ecx
+ movl %edi,%eax
+ roll $14,%ecx
+ addl %ebp,%ecx
+
+ subl %edi,%edx
+ andl %ecx,%eax
+ andl %esi,%edx
+ orl %eax,%edx
+ movl 40(%esp),%eax
+ roll $10,%esi
+ leal 2400959708(%ebp,%edx,1),%ebp
+ movl $-1,%edx
+ addl %eax,%ebp
+ movl %esi,%eax
+ roll $15,%ebp
+ addl %ebx,%ebp
+
+ subl %esi,%edx
+ andl %ebp,%eax
+ andl %ecx,%edx
+ orl %eax,%edx
+ movl (%esp),%eax
+ roll $10,%ecx
+ leal 2400959708(%ebx,%edx,1),%ebx
+ movl $-1,%edx
+ addl %eax,%ebx
+ movl %ecx,%eax
+ roll $14,%ebx
+ addl %edi,%ebx
+
+ subl %ecx,%edx
+ andl %ebx,%eax
+ andl %ebp,%edx
+ orl %eax,%edx
+ movl 32(%esp),%eax
+ roll $10,%ebp
+ leal 2400959708(%edi,%edx,1),%edi
+ movl $-1,%edx
+ addl %eax,%edi
+ movl %ebp,%eax
+ roll $15,%edi
+ addl %esi,%edi
+
+ subl %ebp,%edx
+ andl %edi,%eax
+ andl %ebx,%edx
+ orl %eax,%edx
+ movl 48(%esp),%eax
+ roll $10,%ebx
+ leal 2400959708(%esi,%edx,1),%esi
+ movl $-1,%edx
+ addl %eax,%esi
+ movl %ebx,%eax
+ roll $9,%esi
+ addl %ecx,%esi
+
+ subl %ebx,%edx
+ andl %esi,%eax
+ andl %edi,%edx
+ orl %eax,%edx
+ movl 16(%esp),%eax
+ roll $10,%edi
+ leal 2400959708(%ecx,%edx,1),%ecx
+ movl $-1,%edx
+ addl %eax,%ecx
+ movl %edi,%eax
+ roll $8,%ecx
+ addl %ebp,%ecx
+
+ subl %edi,%edx
+ andl %ecx,%eax
+ andl %esi,%edx
+ orl %eax,%edx
+ movl 52(%esp),%eax
+ roll $10,%esi
+ leal 2400959708(%ebp,%edx,1),%ebp
+ movl $-1,%edx
+ addl %eax,%ebp
+ movl %esi,%eax
+ roll $9,%ebp
+ addl %ebx,%ebp
+
+ subl %esi,%edx
+ andl %ebp,%eax
+ andl %ecx,%edx
+ orl %eax,%edx
+ movl 12(%esp),%eax
+ roll $10,%ecx
+ leal 2400959708(%ebx,%edx,1),%ebx
+ movl $-1,%edx
+ addl %eax,%ebx
+ movl %ecx,%eax
+ roll $14,%ebx
+ addl %edi,%ebx
+
+ subl %ecx,%edx
+ andl %ebx,%eax
+ andl %ebp,%edx
+ orl %eax,%edx
+ movl 28(%esp),%eax
+ roll $10,%ebp
+ leal 2400959708(%edi,%edx,1),%edi
+ movl $-1,%edx
+ addl %eax,%edi
+ movl %ebp,%eax
+ roll $5,%edi
+ addl %esi,%edi
+
+ subl %ebp,%edx
+ andl %edi,%eax
+ andl %ebx,%edx
+ orl %eax,%edx
+ movl 60(%esp),%eax
+ roll $10,%ebx
+ leal 2400959708(%esi,%edx,1),%esi
+ movl $-1,%edx
+ addl %eax,%esi
+ movl %ebx,%eax
+ roll $6,%esi
+ addl %ecx,%esi
+
+ subl %ebx,%edx
+ andl %esi,%eax
+ andl %edi,%edx
+ orl %eax,%edx
+ movl 56(%esp),%eax
+ roll $10,%edi
+ leal 2400959708(%ecx,%edx,1),%ecx
+ movl $-1,%edx
+ addl %eax,%ecx
+ movl %edi,%eax
+ roll $8,%ecx
+ addl %ebp,%ecx
+
+ subl %edi,%edx
+ andl %ecx,%eax
+ andl %esi,%edx
+ orl %eax,%edx
+ movl 20(%esp),%eax
+ roll $10,%esi
+ leal 2400959708(%ebp,%edx,1),%ebp
+ movl $-1,%edx
+ addl %eax,%ebp
+ movl %esi,%eax
+ roll $6,%ebp
+ addl %ebx,%ebp
+
+ subl %esi,%edx
+ andl %ebp,%eax
+ andl %ecx,%edx
+ orl %eax,%edx
+ movl 24(%esp),%eax
+ roll $10,%ecx
+ leal 2400959708(%ebx,%edx,1),%ebx
+ movl $-1,%edx
+ addl %eax,%ebx
+ movl %ecx,%eax
+ roll $5,%ebx
+ addl %edi,%ebx
+
+ subl %ecx,%edx
+ andl %ebx,%eax
+ andl %ebp,%edx
+ orl %eax,%edx
+ movl 8(%esp),%eax
+ roll $10,%ebp
+ leal 2400959708(%edi,%edx,1),%edi
+ movl $-1,%edx
+ addl %eax,%edi
+ subl %ebp,%edx
+ roll $12,%edi
+ addl %esi,%edi
+
+ movl 16(%esp),%eax
+ orl %ebx,%edx
+ addl %eax,%esi
+ xorl %edi,%edx
+ movl $-1,%eax
+ roll $10,%ebx
+ leal 2840853838(%esi,%edx,1),%esi
+ subl %ebx,%eax
+ roll $9,%esi
+ addl %ecx,%esi
+
+ movl (%esp),%edx
+ orl %edi,%eax
+ addl %edx,%ecx
+ xorl %esi,%eax
+ movl $-1,%edx
+ roll $10,%edi
+ leal 2840853838(%ecx,%eax,1),%ecx
+ subl %edi,%edx
+ roll $15,%ecx
+ addl %ebp,%ecx
+
+ movl 20(%esp),%eax
+ orl %esi,%edx
+ addl %eax,%ebp
+ xorl %ecx,%edx
+ movl $-1,%eax
+ roll $10,%esi
+ leal 2840853838(%ebp,%edx,1),%ebp
+ subl %esi,%eax
+ roll $5,%ebp
+ addl %ebx,%ebp
+
+ movl 36(%esp),%edx
+ orl %ecx,%eax
+ addl %edx,%ebx
+ xorl %ebp,%eax
+ movl $-1,%edx
+ roll $10,%ecx
+ leal 2840853838(%ebx,%eax,1),%ebx
+ subl %ecx,%edx
+ roll $11,%ebx
+ addl %edi,%ebx
+
+ movl 28(%esp),%eax
+ orl %ebp,%edx
+ addl %eax,%edi
+ xorl %ebx,%edx
+ movl $-1,%eax
+ roll $10,%ebp
+ leal 2840853838(%edi,%edx,1),%edi
+ subl %ebp,%eax
+ roll $6,%edi
+ addl %esi,%edi
+
+ movl 48(%esp),%edx
+ orl %ebx,%eax
+ addl %edx,%esi
+ xorl %edi,%eax
+ movl $-1,%edx
+ roll $10,%ebx
+ leal 2840853838(%esi,%eax,1),%esi
+ subl %ebx,%edx
+ roll $8,%esi
+ addl %ecx,%esi
+
+ movl 8(%esp),%eax
+ orl %edi,%edx
+ addl %eax,%ecx
+ xorl %esi,%edx
+ movl $-1,%eax
+ roll $10,%edi
+ leal 2840853838(%ecx,%edx,1),%ecx
+ subl %edi,%eax
+ roll $13,%ecx
+ addl %ebp,%ecx
+
+ movl 40(%esp),%edx
+ orl %esi,%eax
+ addl %edx,%ebp
+ xorl %ecx,%eax
+ movl $-1,%edx
+ roll $10,%esi
+ leal 2840853838(%ebp,%eax,1),%ebp
+ subl %esi,%edx
+ roll $12,%ebp
+ addl %ebx,%ebp
+
+ movl 56(%esp),%eax
+ orl %ecx,%edx
+ addl %eax,%ebx
+ xorl %ebp,%edx
+ movl $-1,%eax
+ roll $10,%ecx
+ leal 2840853838(%ebx,%edx,1),%ebx
+ subl %ecx,%eax
+ roll $5,%ebx
+ addl %edi,%ebx
+
+ movl 4(%esp),%edx
+ orl %ebp,%eax
+ addl %edx,%edi
+ xorl %ebx,%eax
+ movl $-1,%edx
+ roll $10,%ebp
+ leal 2840853838(%edi,%eax,1),%edi
+ subl %ebp,%edx
+ roll $12,%edi
+ addl %esi,%edi
+
+ movl 12(%esp),%eax
+ orl %ebx,%edx
+ addl %eax,%esi
+ xorl %edi,%edx
+ movl $-1,%eax
+ roll $10,%ebx
+ leal 2840853838(%esi,%edx,1),%esi
+ subl %ebx,%eax
+ roll $13,%esi
+ addl %ecx,%esi
+
+ movl 32(%esp),%edx
+ orl %edi,%eax
+ addl %edx,%ecx
+ xorl %esi,%eax
+ movl $-1,%edx
+ roll $10,%edi
+ leal 2840853838(%ecx,%eax,1),%ecx
+ subl %edi,%edx
+ roll $14,%ecx
+ addl %ebp,%ecx
+
+ movl 44(%esp),%eax
+ orl %esi,%edx
+ addl %eax,%ebp
+ xorl %ecx,%edx
+ movl $-1,%eax
+ roll $10,%esi
+ leal 2840853838(%ebp,%edx,1),%ebp
+ subl %esi,%eax
+ roll $11,%ebp
+ addl %ebx,%ebp
+
+ movl 24(%esp),%edx
+ orl %ecx,%eax
+ addl %edx,%ebx
+ xorl %ebp,%eax
+ movl $-1,%edx
+ roll $10,%ecx
+ leal 2840853838(%ebx,%eax,1),%ebx
+ subl %ecx,%edx
+ roll $8,%ebx
+ addl %edi,%ebx
+
+ movl 60(%esp),%eax
+ orl %ebp,%edx
+ addl %eax,%edi
+ xorl %ebx,%edx
+ movl $-1,%eax
+ roll $10,%ebp
+ leal 2840853838(%edi,%edx,1),%edi
+ subl %ebp,%eax
+ roll $5,%edi
+ addl %esi,%edi
+
+ movl 52(%esp),%edx
+ orl %ebx,%eax
+ addl %edx,%esi
+ xorl %edi,%eax
+ movl 128(%esp),%edx
+ roll $10,%ebx
+ leal 2840853838(%esi,%eax,1),%esi
+ movl %ecx,64(%esp)
+ roll $6,%esi
+ addl %ecx,%esi
+ movl (%edx),%ecx
+ movl %esi,68(%esp)
+ movl %edi,72(%esp)
+ movl 4(%edx),%esi
+ movl %ebx,76(%esp)
+ movl 8(%edx),%edi
+ movl %ebp,80(%esp)
+ movl 12(%edx),%ebx
+ movl 16(%edx),%ebp
+
+ movl $-1,%edx
+ subl %ebx,%edx
+ movl 20(%esp),%eax
+ orl %edi,%edx
+ addl %eax,%ecx
+ xorl %esi,%edx
+ movl $-1,%eax
+ roll $10,%edi
+ leal 1352829926(%ecx,%edx,1),%ecx
+ subl %edi,%eax
+ roll $8,%ecx
+ addl %ebp,%ecx
+
+ movl 56(%esp),%edx
+ orl %esi,%eax
+ addl %edx,%ebp
+ xorl %ecx,%eax
+ movl $-1,%edx
+ roll $10,%esi
+ leal 1352829926(%ebp,%eax,1),%ebp
+ subl %esi,%edx
+ roll $9,%ebp
+ addl %ebx,%ebp
+
+ movl 28(%esp),%eax
+ orl %ecx,%edx
+ addl %eax,%ebx
+ xorl %ebp,%edx
+ movl $-1,%eax
+ roll $10,%ecx
+ leal 1352829926(%ebx,%edx,1),%ebx
+ subl %ecx,%eax
+ roll $9,%ebx
+ addl %edi,%ebx
+
+ movl (%esp),%edx
+ orl %ebp,%eax
+ addl %edx,%edi
+ xorl %ebx,%eax
+ movl $-1,%edx
+ roll $10,%ebp
+ leal 1352829926(%edi,%eax,1),%edi
+ subl %ebp,%edx
+ roll $11,%edi
+ addl %esi,%edi
+
+ movl 36(%esp),%eax
+ orl %ebx,%edx
+ addl %eax,%esi
+ xorl %edi,%edx
+ movl $-1,%eax
+ roll $10,%ebx
+ leal 1352829926(%esi,%edx,1),%esi
+ subl %ebx,%eax
+ roll $13,%esi
+ addl %ecx,%esi
+
+ movl 8(%esp),%edx
+ orl %edi,%eax
+ addl %edx,%ecx
+ xorl %esi,%eax
+ movl $-1,%edx
+ roll $10,%edi
+ leal 1352829926(%ecx,%eax,1),%ecx
+ subl %edi,%edx
+ roll $15,%ecx
+ addl %ebp,%ecx
+
+ movl 44(%esp),%eax
+ orl %esi,%edx
+ addl %eax,%ebp
+ xorl %ecx,%edx
+ movl $-1,%eax
+ roll $10,%esi
+ leal 1352829926(%ebp,%edx,1),%ebp
+ subl %esi,%eax
+ roll $15,%ebp
+ addl %ebx,%ebp
+
+ movl 16(%esp),%edx
+ orl %ecx,%eax
+ addl %edx,%ebx
+ xorl %ebp,%eax
+ movl $-1,%edx
+ roll $10,%ecx
+ leal 1352829926(%ebx,%eax,1),%ebx
+ subl %ecx,%edx
+ roll $5,%ebx
+ addl %edi,%ebx
+
+ movl 52(%esp),%eax
+ orl %ebp,%edx
+ addl %eax,%edi
+ xorl %ebx,%edx
+ movl $-1,%eax
+ roll $10,%ebp
+ leal 1352829926(%edi,%edx,1),%edi
+ subl %ebp,%eax
+ roll $7,%edi
+ addl %esi,%edi
+
+ movl 24(%esp),%edx
+ orl %ebx,%eax
+ addl %edx,%esi
+ xorl %edi,%eax
+ movl $-1,%edx
+ roll $10,%ebx
+ leal 1352829926(%esi,%eax,1),%esi
+ subl %ebx,%edx
+ roll $7,%esi
+ addl %ecx,%esi
+
+ movl 60(%esp),%eax
+ orl %edi,%edx
+ addl %eax,%ecx
+ xorl %esi,%edx
+ movl $-1,%eax
+ roll $10,%edi
+ leal 1352829926(%ecx,%edx,1),%ecx
+ subl %edi,%eax
+ roll $8,%ecx
+ addl %ebp,%ecx
+
+ movl 32(%esp),%edx
+ orl %esi,%eax
+ addl %edx,%ebp
+ xorl %ecx,%eax
+ movl $-1,%edx
+ roll $10,%esi
+ leal 1352829926(%ebp,%eax,1),%ebp
+ subl %esi,%edx
+ roll $11,%ebp
+ addl %ebx,%ebp
+
+ movl 4(%esp),%eax
+ orl %ecx,%edx
+ addl %eax,%ebx
+ xorl %ebp,%edx
+ movl $-1,%eax
+ roll $10,%ecx
+ leal 1352829926(%ebx,%edx,1),%ebx
+ subl %ecx,%eax
+ roll $14,%ebx
+ addl %edi,%ebx
+
+ movl 40(%esp),%edx
+ orl %ebp,%eax
+ addl %edx,%edi
+ xorl %ebx,%eax
+ movl $-1,%edx
+ roll $10,%ebp
+ leal 1352829926(%edi,%eax,1),%edi
+ subl %ebp,%edx
+ roll $14,%edi
+ addl %esi,%edi
+
+ movl 12(%esp),%eax
+ orl %ebx,%edx
+ addl %eax,%esi
+ xorl %edi,%edx
+ movl $-1,%eax
+ roll $10,%ebx
+ leal 1352829926(%esi,%edx,1),%esi
+ subl %ebx,%eax
+ roll $12,%esi
+ addl %ecx,%esi
+
+ movl 48(%esp),%edx
+ orl %edi,%eax
+ addl %edx,%ecx
+ xorl %esi,%eax
+ movl $-1,%edx
+ roll $10,%edi
+ leal 1352829926(%ecx,%eax,1),%ecx
+ movl %edi,%eax
+ roll $6,%ecx
+ addl %ebp,%ecx
+
+ subl %edi,%edx
+ andl %ecx,%eax
+ andl %esi,%edx
+ orl %eax,%edx
+ movl 24(%esp),%eax
+ roll $10,%esi
+ leal 1548603684(%ebp,%edx,1),%ebp
+ movl $-1,%edx
+ addl %eax,%ebp
+ movl %esi,%eax
+ roll $9,%ebp
+ addl %ebx,%ebp
+
+ subl %esi,%edx
+ andl %ebp,%eax
+ andl %ecx,%edx
+ orl %eax,%edx
+ movl 44(%esp),%eax
+ roll $10,%ecx
+ leal 1548603684(%ebx,%edx,1),%ebx
+ movl $-1,%edx
+ addl %eax,%ebx
+ movl %ecx,%eax
+ roll $13,%ebx
+ addl %edi,%ebx
+
+ subl %ecx,%edx
+ andl %ebx,%eax
+ andl %ebp,%edx
+ orl %eax,%edx
+ movl 12(%esp),%eax
+ roll $10,%ebp
+ leal 1548603684(%edi,%edx,1),%edi
+ movl $-1,%edx
+ addl %eax,%edi
+ movl %ebp,%eax
+ roll $15,%edi
+ addl %esi,%edi
+
+ subl %ebp,%edx
+ andl %edi,%eax
+ andl %ebx,%edx
+ orl %eax,%edx
+ movl 28(%esp),%eax
+ roll $10,%ebx
+ leal 1548603684(%esi,%edx,1),%esi
+ movl $-1,%edx
+ addl %eax,%esi
+ movl %ebx,%eax
+ roll $7,%esi
+ addl %ecx,%esi
+
+ subl %ebx,%edx
+ andl %esi,%eax
+ andl %edi,%edx
+ orl %eax,%edx
+ movl (%esp),%eax
+ roll $10,%edi
+ leal 1548603684(%ecx,%edx,1),%ecx
+ movl $-1,%edx
+ addl %eax,%ecx
+ movl %edi,%eax
+ roll $12,%ecx
+ addl %ebp,%ecx
+
+ subl %edi,%edx
+ andl %ecx,%eax
+ andl %esi,%edx
+ orl %eax,%edx
+ movl 52(%esp),%eax
+ roll $10,%esi
+ leal 1548603684(%ebp,%edx,1),%ebp
+ movl $-1,%edx
+ addl %eax,%ebp
+ movl %esi,%eax
+ roll $8,%ebp
+ addl %ebx,%ebp
+
+ subl %esi,%edx
+ andl %ebp,%eax
+ andl %ecx,%edx
+ orl %eax,%edx
+ movl 20(%esp),%eax
+ roll $10,%ecx
+ leal 1548603684(%ebx,%edx,1),%ebx
+ movl $-1,%edx
+ addl %eax,%ebx
+ movl %ecx,%eax
+ roll $9,%ebx
+ addl %edi,%ebx
+
+ subl %ecx,%edx
+ andl %ebx,%eax
+ andl %ebp,%edx
+ orl %eax,%edx
+ movl 40(%esp),%eax
+ roll $10,%ebp
+ leal 1548603684(%edi,%edx,1),%edi
+ movl $-1,%edx
+ addl %eax,%edi
+ movl %ebp,%eax
+ roll $11,%edi
+ addl %esi,%edi
+
+ subl %ebp,%edx
+ andl %edi,%eax
+ andl %ebx,%edx
+ orl %eax,%edx
+ movl 56(%esp),%eax
+ roll $10,%ebx
+ leal 1548603684(%esi,%edx,1),%esi
+ movl $-1,%edx
+ addl %eax,%esi
+ movl %ebx,%eax
+ roll $7,%esi
+ addl %ecx,%esi
+
+ subl %ebx,%edx
+ andl %esi,%eax
+ andl %edi,%edx
+ orl %eax,%edx
+ movl 60(%esp),%eax
+ roll $10,%edi
+ leal 1548603684(%ecx,%edx,1),%ecx
+ movl $-1,%edx
+ addl %eax,%ecx
+ movl %edi,%eax
+ roll $7,%ecx
+ addl %ebp,%ecx
+
+ subl %edi,%edx
+ andl %ecx,%eax
+ andl %esi,%edx
+ orl %eax,%edx
+ movl 32(%esp),%eax
+ roll $10,%esi
+ leal 1548603684(%ebp,%edx,1),%ebp
+ movl $-1,%edx
+ addl %eax,%ebp
+ movl %esi,%eax
+ roll $12,%ebp
+ addl %ebx,%ebp
+
+ subl %esi,%edx
+ andl %ebp,%eax
+ andl %ecx,%edx
+ orl %eax,%edx
+ movl 48(%esp),%eax
+ roll $10,%ecx
+ leal 1548603684(%ebx,%edx,1),%ebx
+ movl $-1,%edx
+ addl %eax,%ebx
+ movl %ecx,%eax
+ roll $7,%ebx
+ addl %edi,%ebx
+
+ subl %ecx,%edx
+ andl %ebx,%eax
+ andl %ebp,%edx
+ orl %eax,%edx
+ movl 16(%esp),%eax
+ roll $10,%ebp
+ leal 1548603684(%edi,%edx,1),%edi
+ movl $-1,%edx
+ addl %eax,%edi
+ movl %ebp,%eax
+ roll $6,%edi
+ addl %esi,%edi
+
+ subl %ebp,%edx
+ andl %edi,%eax
+ andl %ebx,%edx
+ orl %eax,%edx
+ movl 36(%esp),%eax
+ roll $10,%ebx
+ leal 1548603684(%esi,%edx,1),%esi
+ movl $-1,%edx
+ addl %eax,%esi
+ movl %ebx,%eax
+ roll $15,%esi
+ addl %ecx,%esi
+
+ subl %ebx,%edx
+ andl %esi,%eax
+ andl %edi,%edx
+ orl %eax,%edx
+ movl 4(%esp),%eax
+ roll $10,%edi
+ leal 1548603684(%ecx,%edx,1),%ecx
+ movl $-1,%edx
+ addl %eax,%ecx
+ movl %edi,%eax
+ roll $13,%ecx
+ addl %ebp,%ecx
+
+ subl %edi,%edx
+ andl %ecx,%eax
+ andl %esi,%edx
+ orl %eax,%edx
+ movl 8(%esp),%eax
+ roll $10,%esi
+ leal 1548603684(%ebp,%edx,1),%ebp
+ movl $-1,%edx
+ addl %eax,%ebp
+ subl %ecx,%edx
+ roll $11,%ebp
+ addl %ebx,%ebp
+
+ movl 60(%esp),%eax
+ orl %ebp,%edx
+ addl %eax,%ebx
+ xorl %esi,%edx
+ movl $-1,%eax
+ roll $10,%ecx
+ leal 1836072691(%ebx,%edx,1),%ebx
+ subl %ebp,%eax
+ roll $9,%ebx
+ addl %edi,%ebx
+
+ movl 20(%esp),%edx
+ orl %ebx,%eax
+ addl %edx,%edi
+ xorl %ecx,%eax
+ movl $-1,%edx
+ roll $10,%ebp
+ leal 1836072691(%edi,%eax,1),%edi
+ subl %ebx,%edx
+ roll $7,%edi
+ addl %esi,%edi
+
+ movl 4(%esp),%eax
+ orl %edi,%edx
+ addl %eax,%esi
+ xorl %ebp,%edx
+ movl $-1,%eax
+ roll $10,%ebx
+ leal 1836072691(%esi,%edx,1),%esi
+ subl %edi,%eax
+ roll $15,%esi
+ addl %ecx,%esi
+
+ movl 12(%esp),%edx
+ orl %esi,%eax
+ addl %edx,%ecx
+ xorl %ebx,%eax
+ movl $-1,%edx
+ roll $10,%edi
+ leal 1836072691(%ecx,%eax,1),%ecx
+ subl %esi,%edx
+ roll $11,%ecx
+ addl %ebp,%ecx
+
+ movl 28(%esp),%eax
+ orl %ecx,%edx
+ addl %eax,%ebp
+ xorl %edi,%edx
+ movl $-1,%eax
+ roll $10,%esi
+ leal 1836072691(%ebp,%edx,1),%ebp
+ subl %ecx,%eax
+ roll $8,%ebp
+ addl %ebx,%ebp
+
+ movl 56(%esp),%edx
+ orl %ebp,%eax
+ addl %edx,%ebx
+ xorl %esi,%eax
+ movl $-1,%edx
+ roll $10,%ecx
+ leal 1836072691(%ebx,%eax,1),%ebx
+ subl %ebp,%edx
+ roll $6,%ebx
+ addl %edi,%ebx
+
+ movl 24(%esp),%eax
+ orl %ebx,%edx
+ addl %eax,%edi
+ xorl %ecx,%edx
+ movl $-1,%eax
+ roll $10,%ebp
+ leal 1836072691(%edi,%edx,1),%edi
+ subl %ebx,%eax
+ roll $6,%edi
+ addl %esi,%edi
+
+ movl 36(%esp),%edx
+ orl %edi,%eax
+ addl %edx,%esi
+ xorl %ebp,%eax
+ movl $-1,%edx
+ roll $10,%ebx
+ leal 1836072691(%esi,%eax,1),%esi
+ subl %edi,%edx
+ roll $14,%esi
+ addl %ecx,%esi
+
+ movl 44(%esp),%eax
+ orl %esi,%edx
+ addl %eax,%ecx
+ xorl %ebx,%edx
+ movl $-1,%eax
+ roll $10,%edi
+ leal 1836072691(%ecx,%edx,1),%ecx
+ subl %esi,%eax
+ roll $12,%ecx
+ addl %ebp,%ecx
+
+ movl 32(%esp),%edx
+ orl %ecx,%eax
+ addl %edx,%ebp
+ xorl %edi,%eax
+ movl $-1,%edx
+ roll $10,%esi
+ leal 1836072691(%ebp,%eax,1),%ebp
+ subl %ecx,%edx
+ roll $13,%ebp
+ addl %ebx,%ebp
+
+ movl 48(%esp),%eax
+ orl %ebp,%edx
+ addl %eax,%ebx
+ xorl %esi,%edx
+ movl $-1,%eax
+ roll $10,%ecx
+ leal 1836072691(%ebx,%edx,1),%ebx
+ subl %ebp,%eax
+ roll $5,%ebx
+ addl %edi,%ebx
+
+ movl 8(%esp),%edx
+ orl %ebx,%eax
+ addl %edx,%edi
+ xorl %ecx,%eax
+ movl $-1,%edx
+ roll $10,%ebp
+ leal 1836072691(%edi,%eax,1),%edi
+ subl %ebx,%edx
+ roll $14,%edi
+ addl %esi,%edi
+
+ movl 40(%esp),%eax
+ orl %edi,%edx
+ addl %eax,%esi
+ xorl %ebp,%edx
+ movl $-1,%eax
+ roll $10,%ebx
+ leal 1836072691(%esi,%edx,1),%esi
+ subl %edi,%eax
+ roll $13,%esi
+ addl %ecx,%esi
+
+ movl (%esp),%edx
+ orl %esi,%eax
+ addl %edx,%ecx
+ xorl %ebx,%eax
+ movl $-1,%edx
+ roll $10,%edi
+ leal 1836072691(%ecx,%eax,1),%ecx
+ subl %esi,%edx
+ roll $13,%ecx
+ addl %ebp,%ecx
+
+ movl 16(%esp),%eax
+ orl %ecx,%edx
+ addl %eax,%ebp
+ xorl %edi,%edx
+ movl $-1,%eax
+ roll $10,%esi
+ leal 1836072691(%ebp,%edx,1),%ebp
+ subl %ecx,%eax
+ roll $7,%ebp
+ addl %ebx,%ebp
+
+ movl 52(%esp),%edx
+ orl %ebp,%eax
+ addl %edx,%ebx
+ xorl %esi,%eax
+ movl 32(%esp),%edx
+ roll $10,%ecx
+ leal 1836072691(%ebx,%eax,1),%ebx
+ movl $-1,%eax
+ roll $5,%ebx
+ addl %edi,%ebx
+
+ addl %edx,%edi
+ movl %ebp,%edx
+ subl %ebx,%eax
+ andl %ebx,%edx
+ andl %ecx,%eax
+ orl %eax,%edx
+ movl 24(%esp),%eax
+ roll $10,%ebp
+ leal 2053994217(%edi,%edx,1),%edi
+ movl $-1,%edx
+ roll $15,%edi
+ addl %esi,%edi
+
+ addl %eax,%esi
+ movl %ebx,%eax
+ subl %edi,%edx
+ andl %edi,%eax
+ andl %ebp,%edx
+ orl %edx,%eax
+ movl 16(%esp),%edx
+ roll $10,%ebx
+ leal 2053994217(%esi,%eax,1),%esi
+ movl $-1,%eax
+ roll $5,%esi
+ addl %ecx,%esi
+
+ addl %edx,%ecx
+ movl %edi,%edx
+ subl %esi,%eax
+ andl %esi,%edx
+ andl %ebx,%eax
+ orl %eax,%edx
+ movl 4(%esp),%eax
+ roll $10,%edi
+ leal 2053994217(%ecx,%edx,1),%ecx
+ movl $-1,%edx
+ roll $8,%ecx
+ addl %ebp,%ecx
+
+ addl %eax,%ebp
+ movl %esi,%eax
+ subl %ecx,%edx
+ andl %ecx,%eax
+ andl %edi,%edx
+ orl %edx,%eax
+ movl 12(%esp),%edx
+ roll $10,%esi
+ leal 2053994217(%ebp,%eax,1),%ebp
+ movl $-1,%eax
+ roll $11,%ebp
+ addl %ebx,%ebp
+
+ addl %edx,%ebx
+ movl %ecx,%edx
+ subl %ebp,%eax
+ andl %ebp,%edx
+ andl %esi,%eax
+ orl %eax,%edx
+ movl 44(%esp),%eax
+ roll $10,%ecx
+ leal 2053994217(%ebx,%edx,1),%ebx
+ movl $-1,%edx
+ roll $14,%ebx
+ addl %edi,%ebx
+
+ addl %eax,%edi
+ movl %ebp,%eax
+ subl %ebx,%edx
+ andl %ebx,%eax
+ andl %ecx,%edx
+ orl %edx,%eax
+ movl 60(%esp),%edx
+ roll $10,%ebp
+ leal 2053994217(%edi,%eax,1),%edi
+ movl $-1,%eax
+ roll $14,%edi
+ addl %esi,%edi
+
+ addl %edx,%esi
+ movl %ebx,%edx
+ subl %edi,%eax
+ andl %edi,%edx
+ andl %ebp,%eax
+ orl %eax,%edx
+ movl (%esp),%eax
+ roll $10,%ebx
+ leal 2053994217(%esi,%edx,1),%esi
+ movl $-1,%edx
+ roll $6,%esi
+ addl %ecx,%esi
+
+ addl %eax,%ecx
+ movl %edi,%eax
+ subl %esi,%edx
+ andl %esi,%eax
+ andl %ebx,%edx
+ orl %edx,%eax
+ movl 20(%esp),%edx
+ roll $10,%edi
+ leal 2053994217(%ecx,%eax,1),%ecx
+ movl $-1,%eax
+ roll $14,%ecx
+ addl %ebp,%ecx
+
+ addl %edx,%ebp
+ movl %esi,%edx
+ subl %ecx,%eax
+ andl %ecx,%edx
+ andl %edi,%eax
+ orl %eax,%edx
+ movl 48(%esp),%eax
+ roll $10,%esi
+ leal 2053994217(%ebp,%edx,1),%ebp
+ movl $-1,%edx
+ roll $6,%ebp
+ addl %ebx,%ebp
+
+ addl %eax,%ebx
+ movl %ecx,%eax
+ subl %ebp,%edx
+ andl %ebp,%eax
+ andl %esi,%edx
+ orl %edx,%eax
+ movl 8(%esp),%edx
+ roll $10,%ecx
+ leal 2053994217(%ebx,%eax,1),%ebx
+ movl $-1,%eax
+ roll $9,%ebx
+ addl %edi,%ebx
+
+ addl %edx,%edi
+ movl %ebp,%edx
+ subl %ebx,%eax
+ andl %ebx,%edx
+ andl %ecx,%eax
+ orl %eax,%edx
+ movl 52(%esp),%eax
+ roll $10,%ebp
+ leal 2053994217(%edi,%edx,1),%edi
+ movl $-1,%edx
+ roll $12,%edi
+ addl %esi,%edi
+
+ addl %eax,%esi
+ movl %ebx,%eax
+ subl %edi,%edx
+ andl %edi,%eax
+ andl %ebp,%edx
+ orl %edx,%eax
+ movl 36(%esp),%edx
+ roll $10,%ebx
+ leal 2053994217(%esi,%eax,1),%esi
+ movl $-1,%eax
+ roll $9,%esi
+ addl %ecx,%esi
+
+ addl %edx,%ecx
+ movl %edi,%edx
+ subl %esi,%eax
+ andl %esi,%edx
+ andl %ebx,%eax
+ orl %eax,%edx
+ movl 28(%esp),%eax
+ roll $10,%edi
+ leal 2053994217(%ecx,%edx,1),%ecx
+ movl $-1,%edx
+ roll $12,%ecx
+ addl %ebp,%ecx
+
+ addl %eax,%ebp
+ movl %esi,%eax
+ subl %ecx,%edx
+ andl %ecx,%eax
+ andl %edi,%edx
+ orl %edx,%eax
+ movl 40(%esp),%edx
+ roll $10,%esi
+ leal 2053994217(%ebp,%eax,1),%ebp
+ movl $-1,%eax
+ roll $5,%ebp
+ addl %ebx,%ebp
+
+ addl %edx,%ebx
+ movl %ecx,%edx
+ subl %ebp,%eax
+ andl %ebp,%edx
+ andl %esi,%eax
+ orl %eax,%edx
+ movl 56(%esp),%eax
+ roll $10,%ecx
+ leal 2053994217(%ebx,%edx,1),%ebx
+ movl $-1,%edx
+ roll $15,%ebx
+ addl %edi,%ebx
+
+ addl %eax,%edi
+ movl %ebp,%eax
+ subl %ebx,%edx
+ andl %ebx,%eax
+ andl %ecx,%edx
+ orl %eax,%edx
+ movl %ebx,%eax
+ roll $10,%ebp
+ leal 2053994217(%edi,%edx,1),%edi
+ xorl %ebp,%eax
+ roll $8,%edi
+ addl %esi,%edi
+
+ movl 48(%esp),%edx
+ xorl %edi,%eax
+ addl %edx,%esi
+ roll $10,%ebx
+ addl %eax,%esi
+ movl %edi,%eax
+ roll $8,%esi
+ addl %ecx,%esi
+
+ xorl %ebx,%eax
+ movl 60(%esp),%edx
+ xorl %esi,%eax
+ addl %eax,%ecx
+ movl %esi,%eax
+ roll $10,%edi
+ addl %edx,%ecx
+ xorl %edi,%eax
+ roll $5,%ecx
+ addl %ebp,%ecx
+
+ movl 40(%esp),%edx
+ xorl %ecx,%eax
+ addl %edx,%ebp
+ roll $10,%esi
+ addl %eax,%ebp
+ movl %ecx,%eax
+ roll $12,%ebp
+ addl %ebx,%ebp
+
+ xorl %esi,%eax
+ movl 16(%esp),%edx
+ xorl %ebp,%eax
+ addl %eax,%ebx
+ movl %ebp,%eax
+ roll $10,%ecx
+ addl %edx,%ebx
+ xorl %ecx,%eax
+ roll $9,%ebx
+ addl %edi,%ebx
+
+ movl 4(%esp),%edx
+ xorl %ebx,%eax
+ addl %edx,%edi
+ roll $10,%ebp
+ addl %eax,%edi
+ movl %ebx,%eax
+ roll $12,%edi
+ addl %esi,%edi
+
+ xorl %ebp,%eax
+ movl 20(%esp),%edx
+ xorl %edi,%eax
+ addl %eax,%esi
+ movl %edi,%eax
+ roll $10,%ebx
+ addl %edx,%esi
+ xorl %ebx,%eax
+ roll $5,%esi
+ addl %ecx,%esi
+
+ movl 32(%esp),%edx
+ xorl %esi,%eax
+ addl %edx,%ecx
+ roll $10,%edi
+ addl %eax,%ecx
+ movl %esi,%eax
+ roll $14,%ecx
+ addl %ebp,%ecx
+
+ xorl %edi,%eax
+ movl 28(%esp),%edx
+ xorl %ecx,%eax
+ addl %eax,%ebp
+ movl %ecx,%eax
+ roll $10,%esi
+ addl %edx,%ebp
+ xorl %esi,%eax
+ roll $6,%ebp
+ addl %ebx,%ebp
+
+ movl 24(%esp),%edx
+ xorl %ebp,%eax
+ addl %edx,%ebx
+ roll $10,%ecx
+ addl %eax,%ebx
+ movl %ebp,%eax
+ roll $8,%ebx
+ addl %edi,%ebx
+
+ xorl %ecx,%eax
+ movl 8(%esp),%edx
+ xorl %ebx,%eax
+ addl %eax,%edi
+ movl %ebx,%eax
+ roll $10,%ebp
+ addl %edx,%edi
+ xorl %ebp,%eax
+ roll $13,%edi
+ addl %esi,%edi
+
+ movl 52(%esp),%edx
+ xorl %edi,%eax
+ addl %edx,%esi
+ roll $10,%ebx
+ addl %eax,%esi
+ movl %edi,%eax
+ roll $6,%esi
+ addl %ecx,%esi
+
+ xorl %ebx,%eax
+ movl 56(%esp),%edx
+ xorl %esi,%eax
+ addl %eax,%ecx
+ movl %esi,%eax
+ roll $10,%edi
+ addl %edx,%ecx
+ xorl %edi,%eax
+ roll $5,%ecx
+ addl %ebp,%ecx
+
+ movl (%esp),%edx
+ xorl %ecx,%eax
+ addl %edx,%ebp
+ roll $10,%esi
+ addl %eax,%ebp
+ movl %ecx,%eax
+ roll $15,%ebp
+ addl %ebx,%ebp
+
+ xorl %esi,%eax
+ movl 12(%esp),%edx
+ xorl %ebp,%eax
+ addl %eax,%ebx
+ movl %ebp,%eax
+ roll $10,%ecx
+ addl %edx,%ebx
+ xorl %ecx,%eax
+ roll $13,%ebx
+ addl %edi,%ebx
+
+ movl 36(%esp),%edx
+ xorl %ebx,%eax
+ addl %edx,%edi
+ roll $10,%ebp
+ addl %eax,%edi
+ movl %ebx,%eax
+ roll $11,%edi
+ addl %esi,%edi
+
+ xorl %ebp,%eax
+ movl 44(%esp),%edx
+ xorl %edi,%eax
+ addl %eax,%esi
+ roll $10,%ebx
+ addl %edx,%esi
+ movl 128(%esp),%edx
+ roll $11,%esi
+ addl %ecx,%esi
+ movl 4(%edx),%eax
+ addl %eax,%ebx
+ movl 72(%esp),%eax
+ addl %eax,%ebx
+ movl 8(%edx),%eax
+ addl %eax,%ebp
+ movl 76(%esp),%eax
+ addl %eax,%ebp
+ movl 12(%edx),%eax
+ addl %eax,%ecx
+ movl 80(%esp),%eax
+ addl %eax,%ecx
+ movl 16(%edx),%eax
+ addl %eax,%esi
+ movl 64(%esp),%eax
+ addl %eax,%esi
+ movl (%edx),%eax
+ addl %eax,%edi
+ movl 68(%esp),%eax
+ addl %eax,%edi
+ movl 136(%esp),%eax
+ movl %ebx,(%edx)
+ movl %ebp,4(%edx)
+ movl %ecx,8(%edx)
+ subl $1,%eax
+ movl %esi,12(%edx)
+ movl %edi,16(%edx)
+ jle .L001get_out
+ movl %eax,136(%esp)
+ movl %ecx,%edi
+ movl 132(%esp),%eax
+ movl %ebx,%ecx
+ addl $64,%eax
+ movl %ebp,%esi
+ movl %eax,132(%esp)
+ jmp .L000start
+.L001get_out:
+ addl $108,%esp
+ popl %ebx
+ popl %ebp
+ popl %edi
+ popl %esi
+ ret
+.size ripemd160_block_asm_data_order,.-.L_ripemd160_block_asm_data_order_begin
diff --git a/deps/openssl/asm/x86-elf-gas/sha/sha1-586.s b/deps/openssl/asm/x86-elf-gas/sha/sha1-586.s
new file mode 100644
index 0000000000..cccb1aba85
--- /dev/null
+++ b/deps/openssl/asm/x86-elf-gas/sha/sha1-586.s
@@ -0,0 +1,1442 @@
+.file "sha1-586.s"
+.text
+.globl sha1_block_data_order
+.type sha1_block_data_order,@function
+.align 16
+sha1_block_data_order:
+.L_sha1_block_data_order_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%ebp
+ movl 24(%esp),%esi
+ movl 28(%esp),%eax
+ subl $64,%esp
+ shll $6,%eax
+ addl %esi,%eax
+ movl %eax,92(%esp)
+ movl 16(%ebp),%edi
+.align 16
+.L000loop:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ movl %ecx,8(%esp)
+ movl %edx,12(%esp)
+ movl 16(%esi),%eax
+ movl 20(%esi),%ebx
+ movl 24(%esi),%ecx
+ movl 28(%esi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ movl %eax,16(%esp)
+ movl %ebx,20(%esp)
+ movl %ecx,24(%esp)
+ movl %edx,28(%esp)
+ movl 32(%esi),%eax
+ movl 36(%esi),%ebx
+ movl 40(%esi),%ecx
+ movl 44(%esi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ movl %eax,32(%esp)
+ movl %ebx,36(%esp)
+ movl %ecx,40(%esp)
+ movl %edx,44(%esp)
+ movl 48(%esi),%eax
+ movl 52(%esi),%ebx
+ movl 56(%esi),%ecx
+ movl 60(%esi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ movl %eax,48(%esp)
+ movl %ebx,52(%esp)
+ movl %ecx,56(%esp)
+ movl %edx,60(%esp)
+ movl %esi,88(%esp)
+ movl (%ebp),%eax
+ movl 4(%ebp),%ebx
+ movl 8(%ebp),%ecx
+ movl 12(%ebp),%edx
+
+ movl %ecx,%esi
+ movl %eax,%ebp
+ roll $5,%ebp
+ xorl %edx,%esi
+ addl %edi,%ebp
+ andl %ebx,%esi
+ movl (%esp),%edi
+ xorl %edx,%esi
+ rorl $2,%ebx
+ leal 1518500249(%ebp,%edi,1),%ebp
+ addl %esi,%ebp
+
+ movl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ xorl %ecx,%edi
+ addl %edx,%ebp
+ andl %eax,%edi
+ movl 4(%esp),%edx
+ xorl %ecx,%edi
+ rorl $2,%eax
+ leal 1518500249(%ebp,%edx,1),%ebp
+ addl %edi,%ebp
+
+ movl %eax,%edx
+ movl %ebp,%edi
+ roll $5,%ebp
+ xorl %ebx,%edx
+ addl %ecx,%ebp
+ andl %esi,%edx
+ movl 8(%esp),%ecx
+ xorl %ebx,%edx
+ rorl $2,%esi
+ leal 1518500249(%ebp,%ecx,1),%ebp
+ addl %edx,%ebp
+
+ movl %esi,%ecx
+ movl %ebp,%edx
+ roll $5,%ebp
+ xorl %eax,%ecx
+ addl %ebx,%ebp
+ andl %edi,%ecx
+ movl 12(%esp),%ebx
+ xorl %eax,%ecx
+ rorl $2,%edi
+ leal 1518500249(%ebp,%ebx,1),%ebp
+ addl %ecx,%ebp
+
+ movl %edi,%ebx
+ movl %ebp,%ecx
+ roll $5,%ebp
+ xorl %esi,%ebx
+ addl %eax,%ebp
+ andl %edx,%ebx
+ movl 16(%esp),%eax
+ xorl %esi,%ebx
+ rorl $2,%edx
+ leal 1518500249(%ebp,%eax,1),%ebp
+ addl %ebx,%ebp
+
+ movl %edx,%eax
+ movl %ebp,%ebx
+ roll $5,%ebp
+ xorl %edi,%eax
+ addl %esi,%ebp
+ andl %ecx,%eax
+ movl 20(%esp),%esi
+ xorl %edi,%eax
+ rorl $2,%ecx
+ leal 1518500249(%ebp,%esi,1),%ebp
+ addl %eax,%ebp
+
+ movl %ecx,%esi
+ movl %ebp,%eax
+ roll $5,%ebp
+ xorl %edx,%esi
+ addl %edi,%ebp
+ andl %ebx,%esi
+ movl 24(%esp),%edi
+ xorl %edx,%esi
+ rorl $2,%ebx
+ leal 1518500249(%ebp,%edi,1),%ebp
+ addl %esi,%ebp
+
+ movl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ xorl %ecx,%edi
+ addl %edx,%ebp
+ andl %eax,%edi
+ movl 28(%esp),%edx
+ xorl %ecx,%edi
+ rorl $2,%eax
+ leal 1518500249(%ebp,%edx,1),%ebp
+ addl %edi,%ebp
+
+ movl %eax,%edx
+ movl %ebp,%edi
+ roll $5,%ebp
+ xorl %ebx,%edx
+ addl %ecx,%ebp
+ andl %esi,%edx
+ movl 32(%esp),%ecx
+ xorl %ebx,%edx
+ rorl $2,%esi
+ leal 1518500249(%ebp,%ecx,1),%ebp
+ addl %edx,%ebp
+
+ movl %esi,%ecx
+ movl %ebp,%edx
+ roll $5,%ebp
+ xorl %eax,%ecx
+ addl %ebx,%ebp
+ andl %edi,%ecx
+ movl 36(%esp),%ebx
+ xorl %eax,%ecx
+ rorl $2,%edi
+ leal 1518500249(%ebp,%ebx,1),%ebp
+ addl %ecx,%ebp
+
+ movl %edi,%ebx
+ movl %ebp,%ecx
+ roll $5,%ebp
+ xorl %esi,%ebx
+ addl %eax,%ebp
+ andl %edx,%ebx
+ movl 40(%esp),%eax
+ xorl %esi,%ebx
+ rorl $2,%edx
+ leal 1518500249(%ebp,%eax,1),%ebp
+ addl %ebx,%ebp
+
+ movl %edx,%eax
+ movl %ebp,%ebx
+ roll $5,%ebp
+ xorl %edi,%eax
+ addl %esi,%ebp
+ andl %ecx,%eax
+ movl 44(%esp),%esi
+ xorl %edi,%eax
+ rorl $2,%ecx
+ leal 1518500249(%ebp,%esi,1),%ebp
+ addl %eax,%ebp
+
+ movl %ecx,%esi
+ movl %ebp,%eax
+ roll $5,%ebp
+ xorl %edx,%esi
+ addl %edi,%ebp
+ andl %ebx,%esi
+ movl 48(%esp),%edi
+ xorl %edx,%esi
+ rorl $2,%ebx
+ leal 1518500249(%ebp,%edi,1),%ebp
+ addl %esi,%ebp
+
+ movl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ xorl %ecx,%edi
+ addl %edx,%ebp
+ andl %eax,%edi
+ movl 52(%esp),%edx
+ xorl %ecx,%edi
+ rorl $2,%eax
+ leal 1518500249(%ebp,%edx,1),%ebp
+ addl %edi,%ebp
+
+ movl %eax,%edx
+ movl %ebp,%edi
+ roll $5,%ebp
+ xorl %ebx,%edx
+ addl %ecx,%ebp
+ andl %esi,%edx
+ movl 56(%esp),%ecx
+ xorl %ebx,%edx
+ rorl $2,%esi
+ leal 1518500249(%ebp,%ecx,1),%ebp
+ addl %edx,%ebp
+
+ movl %esi,%ecx
+ movl %ebp,%edx
+ roll $5,%ebp
+ xorl %eax,%ecx
+ addl %ebx,%ebp
+ andl %edi,%ecx
+ movl 60(%esp),%ebx
+ xorl %eax,%ecx
+ rorl $2,%edi
+ leal 1518500249(%ebp,%ebx,1),%ebp
+ addl %ebp,%ecx
+
+ movl (%esp),%ebx
+ movl %edi,%ebp
+ xorl 8(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 32(%esp),%ebx
+ andl %edx,%ebp
+ rorl $2,%edx
+ xorl 52(%esp),%ebx
+ roll $1,%ebx
+ xorl %esi,%ebp
+ movl %ebx,(%esp)
+ leal 1518500249(%ebx,%eax,1),%ebx
+ movl %ecx,%eax
+ roll $5,%eax
+ addl %ebp,%ebx
+ addl %eax,%ebx
+
+ movl 4(%esp),%eax
+ movl %edx,%ebp
+ xorl 12(%esp),%eax
+ xorl %edi,%ebp
+ xorl 36(%esp),%eax
+ andl %ecx,%ebp
+ rorl $2,%ecx
+ xorl 56(%esp),%eax
+ roll $1,%eax
+ xorl %edi,%ebp
+ movl %eax,4(%esp)
+ leal 1518500249(%eax,%esi,1),%eax
+ movl %ebx,%esi
+ roll $5,%esi
+ addl %ebp,%eax
+ addl %esi,%eax
+
+ movl 8(%esp),%esi
+ movl %ecx,%ebp
+ xorl 16(%esp),%esi
+ xorl %edx,%ebp
+ xorl 40(%esp),%esi
+ andl %ebx,%ebp
+ rorl $2,%ebx
+ xorl 60(%esp),%esi
+ roll $1,%esi
+ xorl %edx,%ebp
+ movl %esi,8(%esp)
+ leal 1518500249(%esi,%edi,1),%esi
+ movl %eax,%edi
+ roll $5,%edi
+ addl %ebp,%esi
+ addl %edi,%esi
+
+ movl 12(%esp),%edi
+ movl %ebx,%ebp
+ xorl 20(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 44(%esp),%edi
+ andl %eax,%ebp
+ rorl $2,%eax
+ xorl (%esp),%edi
+ roll $1,%edi
+ xorl %ecx,%ebp
+ movl %edi,12(%esp)
+ leal 1518500249(%edi,%edx,1),%edi
+ movl %esi,%edx
+ roll $5,%edx
+ addl %ebp,%edi
+ addl %edx,%edi
+
+ movl %esi,%ebp
+ movl 16(%esp),%edx
+ rorl $2,%esi
+ xorl 24(%esp),%edx
+ xorl %eax,%ebp
+ xorl 48(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 4(%esp),%edx
+ roll $1,%edx
+ addl %ecx,%ebp
+ movl %edx,16(%esp)
+ movl %edi,%ecx
+ roll $5,%ecx
+ leal 1859775393(%edx,%ebp,1),%edx
+ addl %ecx,%edx
+
+ movl %edi,%ebp
+ movl 20(%esp),%ecx
+ rorl $2,%edi
+ xorl 28(%esp),%ecx
+ xorl %esi,%ebp
+ xorl 52(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 8(%esp),%ecx
+ roll $1,%ecx
+ addl %ebx,%ebp
+ movl %ecx,20(%esp)
+ movl %edx,%ebx
+ roll $5,%ebx
+ leal 1859775393(%ecx,%ebp,1),%ecx
+ addl %ebx,%ecx
+
+ movl %edx,%ebp
+ movl 24(%esp),%ebx
+ rorl $2,%edx
+ xorl 32(%esp),%ebx
+ xorl %edi,%ebp
+ xorl 56(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 12(%esp),%ebx
+ roll $1,%ebx
+ addl %eax,%ebp
+ movl %ebx,24(%esp)
+ movl %ecx,%eax
+ roll $5,%eax
+ leal 1859775393(%ebx,%ebp,1),%ebx
+ addl %eax,%ebx
+
+ movl %ecx,%ebp
+ movl 28(%esp),%eax
+ rorl $2,%ecx
+ xorl 36(%esp),%eax
+ xorl %edx,%ebp
+ xorl 60(%esp),%eax
+ xorl %edi,%ebp
+ xorl 16(%esp),%eax
+ roll $1,%eax
+ addl %esi,%ebp
+ movl %eax,28(%esp)
+ movl %ebx,%esi
+ roll $5,%esi
+ leal 1859775393(%eax,%ebp,1),%eax
+ addl %esi,%eax
+
+ movl %ebx,%ebp
+ movl 32(%esp),%esi
+ rorl $2,%ebx
+ xorl 40(%esp),%esi
+ xorl %ecx,%ebp
+ xorl (%esp),%esi
+ xorl %edx,%ebp
+ xorl 20(%esp),%esi
+ roll $1,%esi
+ addl %edi,%ebp
+ movl %esi,32(%esp)
+ movl %eax,%edi
+ roll $5,%edi
+ leal 1859775393(%esi,%ebp,1),%esi
+ addl %edi,%esi
+
+ movl %eax,%ebp
+ movl 36(%esp),%edi
+ rorl $2,%eax
+ xorl 44(%esp),%edi
+ xorl %ebx,%ebp
+ xorl 4(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 24(%esp),%edi
+ roll $1,%edi
+ addl %edx,%ebp
+ movl %edi,36(%esp)
+ movl %esi,%edx
+ roll $5,%edx
+ leal 1859775393(%edi,%ebp,1),%edi
+ addl %edx,%edi
+
+ movl %esi,%ebp
+ movl 40(%esp),%edx
+ rorl $2,%esi
+ xorl 48(%esp),%edx
+ xorl %eax,%ebp
+ xorl 8(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 28(%esp),%edx
+ roll $1,%edx
+ addl %ecx,%ebp
+ movl %edx,40(%esp)
+ movl %edi,%ecx
+ roll $5,%ecx
+ leal 1859775393(%edx,%ebp,1),%edx
+ addl %ecx,%edx
+
+ movl %edi,%ebp
+ movl 44(%esp),%ecx
+ rorl $2,%edi
+ xorl 52(%esp),%ecx
+ xorl %esi,%ebp
+ xorl 12(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 32(%esp),%ecx
+ roll $1,%ecx
+ addl %ebx,%ebp
+ movl %ecx,44(%esp)
+ movl %edx,%ebx
+ roll $5,%ebx
+ leal 1859775393(%ecx,%ebp,1),%ecx
+ addl %ebx,%ecx
+
+ movl %edx,%ebp
+ movl 48(%esp),%ebx
+ rorl $2,%edx
+ xorl 56(%esp),%ebx
+ xorl %edi,%ebp
+ xorl 16(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 36(%esp),%ebx
+ roll $1,%ebx
+ addl %eax,%ebp
+ movl %ebx,48(%esp)
+ movl %ecx,%eax
+ roll $5,%eax
+ leal 1859775393(%ebx,%ebp,1),%ebx
+ addl %eax,%ebx
+
+ movl %ecx,%ebp
+ movl 52(%esp),%eax
+ rorl $2,%ecx
+ xorl 60(%esp),%eax
+ xorl %edx,%ebp
+ xorl 20(%esp),%eax
+ xorl %edi,%ebp
+ xorl 40(%esp),%eax
+ roll $1,%eax
+ addl %esi,%ebp
+ movl %eax,52(%esp)
+ movl %ebx,%esi
+ roll $5,%esi
+ leal 1859775393(%eax,%ebp,1),%eax
+ addl %esi,%eax
+
+ movl %ebx,%ebp
+ movl 56(%esp),%esi
+ rorl $2,%ebx
+ xorl (%esp),%esi
+ xorl %ecx,%ebp
+ xorl 24(%esp),%esi
+ xorl %edx,%ebp
+ xorl 44(%esp),%esi
+ roll $1,%esi
+ addl %edi,%ebp
+ movl %esi,56(%esp)
+ movl %eax,%edi
+ roll $5,%edi
+ leal 1859775393(%esi,%ebp,1),%esi
+ addl %edi,%esi
+
+ movl %eax,%ebp
+ movl 60(%esp),%edi
+ rorl $2,%eax
+ xorl 4(%esp),%edi
+ xorl %ebx,%ebp
+ xorl 28(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 48(%esp),%edi
+ roll $1,%edi
+ addl %edx,%ebp
+ movl %edi,60(%esp)
+ movl %esi,%edx
+ roll $5,%edx
+ leal 1859775393(%edi,%ebp,1),%edi
+ addl %edx,%edi
+
+ movl %esi,%ebp
+ movl (%esp),%edx
+ rorl $2,%esi
+ xorl 8(%esp),%edx
+ xorl %eax,%ebp
+ xorl 32(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 52(%esp),%edx
+ roll $1,%edx
+ addl %ecx,%ebp
+ movl %edx,(%esp)
+ movl %edi,%ecx
+ roll $5,%ecx
+ leal 1859775393(%edx,%ebp,1),%edx
+ addl %ecx,%edx
+
+ movl %edi,%ebp
+ movl 4(%esp),%ecx
+ rorl $2,%edi
+ xorl 12(%esp),%ecx
+ xorl %esi,%ebp
+ xorl 36(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 56(%esp),%ecx
+ roll $1,%ecx
+ addl %ebx,%ebp
+ movl %ecx,4(%esp)
+ movl %edx,%ebx
+ roll $5,%ebx
+ leal 1859775393(%ecx,%ebp,1),%ecx
+ addl %ebx,%ecx
+
+ movl %edx,%ebp
+ movl 8(%esp),%ebx
+ rorl $2,%edx
+ xorl 16(%esp),%ebx
+ xorl %edi,%ebp
+ xorl 40(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 60(%esp),%ebx
+ roll $1,%ebx
+ addl %eax,%ebp
+ movl %ebx,8(%esp)
+ movl %ecx,%eax
+ roll $5,%eax
+ leal 1859775393(%ebx,%ebp,1),%ebx
+ addl %eax,%ebx
+
+ movl %ecx,%ebp
+ movl 12(%esp),%eax
+ rorl $2,%ecx
+ xorl 20(%esp),%eax
+ xorl %edx,%ebp
+ xorl 44(%esp),%eax
+ xorl %edi,%ebp
+ xorl (%esp),%eax
+ roll $1,%eax
+ addl %esi,%ebp
+ movl %eax,12(%esp)
+ movl %ebx,%esi
+ roll $5,%esi
+ leal 1859775393(%eax,%ebp,1),%eax
+ addl %esi,%eax
+
+ movl %ebx,%ebp
+ movl 16(%esp),%esi
+ rorl $2,%ebx
+ xorl 24(%esp),%esi
+ xorl %ecx,%ebp
+ xorl 48(%esp),%esi
+ xorl %edx,%ebp
+ xorl 4(%esp),%esi
+ roll $1,%esi
+ addl %edi,%ebp
+ movl %esi,16(%esp)
+ movl %eax,%edi
+ roll $5,%edi
+ leal 1859775393(%esi,%ebp,1),%esi
+ addl %edi,%esi
+
+ movl %eax,%ebp
+ movl 20(%esp),%edi
+ rorl $2,%eax
+ xorl 28(%esp),%edi
+ xorl %ebx,%ebp
+ xorl 52(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 8(%esp),%edi
+ roll $1,%edi
+ addl %edx,%ebp
+ movl %edi,20(%esp)
+ movl %esi,%edx
+ roll $5,%edx
+ leal 1859775393(%edi,%ebp,1),%edi
+ addl %edx,%edi
+
+ movl %esi,%ebp
+ movl 24(%esp),%edx
+ rorl $2,%esi
+ xorl 32(%esp),%edx
+ xorl %eax,%ebp
+ xorl 56(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 12(%esp),%edx
+ roll $1,%edx
+ addl %ecx,%ebp
+ movl %edx,24(%esp)
+ movl %edi,%ecx
+ roll $5,%ecx
+ leal 1859775393(%edx,%ebp,1),%edx
+ addl %ecx,%edx
+
+ movl %edi,%ebp
+ movl 28(%esp),%ecx
+ rorl $2,%edi
+ xorl 36(%esp),%ecx
+ xorl %esi,%ebp
+ xorl 60(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 16(%esp),%ecx
+ roll $1,%ecx
+ addl %ebx,%ebp
+ movl %ecx,28(%esp)
+ movl %edx,%ebx
+ roll $5,%ebx
+ leal 1859775393(%ecx,%ebp,1),%ecx
+ addl %ebx,%ecx
+
+ movl 32(%esp),%ebx
+ movl 40(%esp),%ebp
+ xorl %ebp,%ebx
+ movl (%esp),%ebp
+ xorl %ebp,%ebx
+ movl 20(%esp),%ebp
+ xorl %ebp,%ebx
+ movl %edx,%ebp
+ roll $1,%ebx
+ orl %edi,%ebp
+ movl %ebx,32(%esp)
+ andl %esi,%ebp
+ leal 2400959708(%ebx,%eax,1),%ebx
+ movl %edx,%eax
+ rorl $2,%edx
+ andl %edi,%eax
+ orl %eax,%ebp
+ movl %ecx,%eax
+ roll $5,%eax
+ addl %ebp,%ebx
+ addl %eax,%ebx
+
+ movl 36(%esp),%eax
+ movl 44(%esp),%ebp
+ xorl %ebp,%eax
+ movl 4(%esp),%ebp
+ xorl %ebp,%eax
+ movl 24(%esp),%ebp
+ xorl %ebp,%eax
+ movl %ecx,%ebp
+ roll $1,%eax
+ orl %edx,%ebp
+ movl %eax,36(%esp)
+ andl %edi,%ebp
+ leal 2400959708(%eax,%esi,1),%eax
+ movl %ecx,%esi
+ rorl $2,%ecx
+ andl %edx,%esi
+ orl %esi,%ebp
+ movl %ebx,%esi
+ roll $5,%esi
+ addl %ebp,%eax
+ addl %esi,%eax
+
+ movl 40(%esp),%esi
+ movl 48(%esp),%ebp
+ xorl %ebp,%esi
+ movl 8(%esp),%ebp
+ xorl %ebp,%esi
+ movl 28(%esp),%ebp
+ xorl %ebp,%esi
+ movl %ebx,%ebp
+ roll $1,%esi
+ orl %ecx,%ebp
+ movl %esi,40(%esp)
+ andl %edx,%ebp
+ leal 2400959708(%esi,%edi,1),%esi
+ movl %ebx,%edi
+ rorl $2,%ebx
+ andl %ecx,%edi
+ orl %edi,%ebp
+ movl %eax,%edi
+ roll $5,%edi
+ addl %ebp,%esi
+ addl %edi,%esi
+
+ movl 44(%esp),%edi
+ movl 52(%esp),%ebp
+ xorl %ebp,%edi
+ movl 12(%esp),%ebp
+ xorl %ebp,%edi
+ movl 32(%esp),%ebp
+ xorl %ebp,%edi
+ movl %eax,%ebp
+ roll $1,%edi
+ orl %ebx,%ebp
+ movl %edi,44(%esp)
+ andl %ecx,%ebp
+ leal 2400959708(%edi,%edx,1),%edi
+ movl %eax,%edx
+ rorl $2,%eax
+ andl %ebx,%edx
+ orl %edx,%ebp
+ movl %esi,%edx
+ roll $5,%edx
+ addl %ebp,%edi
+ addl %edx,%edi
+
+ movl 48(%esp),%edx
+ movl 56(%esp),%ebp
+ xorl %ebp,%edx
+ movl 16(%esp),%ebp
+ xorl %ebp,%edx
+ movl 36(%esp),%ebp
+ xorl %ebp,%edx
+ movl %esi,%ebp
+ roll $1,%edx
+ orl %eax,%ebp
+ movl %edx,48(%esp)
+ andl %ebx,%ebp
+ leal 2400959708(%edx,%ecx,1),%edx
+ movl %esi,%ecx
+ rorl $2,%esi
+ andl %eax,%ecx
+ orl %ecx,%ebp
+ movl %edi,%ecx
+ roll $5,%ecx
+ addl %ebp,%edx
+ addl %ecx,%edx
+
+ movl 52(%esp),%ecx
+ movl 60(%esp),%ebp
+ xorl %ebp,%ecx
+ movl 20(%esp),%ebp
+ xorl %ebp,%ecx
+ movl 40(%esp),%ebp
+ xorl %ebp,%ecx
+ movl %edi,%ebp
+ roll $1,%ecx
+ orl %esi,%ebp
+ movl %ecx,52(%esp)
+ andl %eax,%ebp
+ leal 2400959708(%ecx,%ebx,1),%ecx
+ movl %edi,%ebx
+ rorl $2,%edi
+ andl %esi,%ebx
+ orl %ebx,%ebp
+ movl %edx,%ebx
+ roll $5,%ebx
+ addl %ebp,%ecx
+ addl %ebx,%ecx
+
+ movl 56(%esp),%ebx
+ movl (%esp),%ebp
+ xorl %ebp,%ebx
+ movl 24(%esp),%ebp
+ xorl %ebp,%ebx
+ movl 44(%esp),%ebp
+ xorl %ebp,%ebx
+ movl %edx,%ebp
+ roll $1,%ebx
+ orl %edi,%ebp
+ movl %ebx,56(%esp)
+ andl %esi,%ebp
+ leal 2400959708(%ebx,%eax,1),%ebx
+ movl %edx,%eax
+ rorl $2,%edx
+ andl %edi,%eax
+ orl %eax,%ebp
+ movl %ecx,%eax
+ roll $5,%eax
+ addl %ebp,%ebx
+ addl %eax,%ebx
+
+ movl 60(%esp),%eax
+ movl 4(%esp),%ebp
+ xorl %ebp,%eax
+ movl 28(%esp),%ebp
+ xorl %ebp,%eax
+ movl 48(%esp),%ebp
+ xorl %ebp,%eax
+ movl %ecx,%ebp
+ roll $1,%eax
+ orl %edx,%ebp
+ movl %eax,60(%esp)
+ andl %edi,%ebp
+ leal 2400959708(%eax,%esi,1),%eax
+ movl %ecx,%esi
+ rorl $2,%ecx
+ andl %edx,%esi
+ orl %esi,%ebp
+ movl %ebx,%esi
+ roll $5,%esi
+ addl %ebp,%eax
+ addl %esi,%eax
+
+ movl (%esp),%esi
+ movl 8(%esp),%ebp
+ xorl %ebp,%esi
+ movl 32(%esp),%ebp
+ xorl %ebp,%esi
+ movl 52(%esp),%ebp
+ xorl %ebp,%esi
+ movl %ebx,%ebp
+ roll $1,%esi
+ orl %ecx,%ebp
+ movl %esi,(%esp)
+ andl %edx,%ebp
+ leal 2400959708(%esi,%edi,1),%esi
+ movl %ebx,%edi
+ rorl $2,%ebx
+ andl %ecx,%edi
+ orl %edi,%ebp
+ movl %eax,%edi
+ roll $5,%edi
+ addl %ebp,%esi
+ addl %edi,%esi
+
+ movl 4(%esp),%edi
+ movl 12(%esp),%ebp
+ xorl %ebp,%edi
+ movl 36(%esp),%ebp
+ xorl %ebp,%edi
+ movl 56(%esp),%ebp
+ xorl %ebp,%edi
+ movl %eax,%ebp
+ roll $1,%edi
+ orl %ebx,%ebp
+ movl %edi,4(%esp)
+ andl %ecx,%ebp
+ leal 2400959708(%edi,%edx,1),%edi
+ movl %eax,%edx
+ rorl $2,%eax
+ andl %ebx,%edx
+ orl %edx,%ebp
+ movl %esi,%edx
+ roll $5,%edx
+ addl %ebp,%edi
+ addl %edx,%edi
+
+ movl 8(%esp),%edx
+ movl 16(%esp),%ebp
+ xorl %ebp,%edx
+ movl 40(%esp),%ebp
+ xorl %ebp,%edx
+ movl 60(%esp),%ebp
+ xorl %ebp,%edx
+ movl %esi,%ebp
+ roll $1,%edx
+ orl %eax,%ebp
+ movl %edx,8(%esp)
+ andl %ebx,%ebp
+ leal 2400959708(%edx,%ecx,1),%edx
+ movl %esi,%ecx
+ rorl $2,%esi
+ andl %eax,%ecx
+ orl %ecx,%ebp
+ movl %edi,%ecx
+ roll $5,%ecx
+ addl %ebp,%edx
+ addl %ecx,%edx
+
+ movl 12(%esp),%ecx
+ movl 20(%esp),%ebp
+ xorl %ebp,%ecx
+ movl 44(%esp),%ebp
+ xorl %ebp,%ecx
+ movl (%esp),%ebp
+ xorl %ebp,%ecx
+ movl %edi,%ebp
+ roll $1,%ecx
+ orl %esi,%ebp
+ movl %ecx,12(%esp)
+ andl %eax,%ebp
+ leal 2400959708(%ecx,%ebx,1),%ecx
+ movl %edi,%ebx
+ rorl $2,%edi
+ andl %esi,%ebx
+ orl %ebx,%ebp
+ movl %edx,%ebx
+ roll $5,%ebx
+ addl %ebp,%ecx
+ addl %ebx,%ecx
+
+ movl 16(%esp),%ebx
+ movl 24(%esp),%ebp
+ xorl %ebp,%ebx
+ movl 48(%esp),%ebp
+ xorl %ebp,%ebx
+ movl 4(%esp),%ebp
+ xorl %ebp,%ebx
+ movl %edx,%ebp
+ roll $1,%ebx
+ orl %edi,%ebp
+ movl %ebx,16(%esp)
+ andl %esi,%ebp
+ leal 2400959708(%ebx,%eax,1),%ebx
+ movl %edx,%eax
+ rorl $2,%edx
+ andl %edi,%eax
+ orl %eax,%ebp
+ movl %ecx,%eax
+ roll $5,%eax
+ addl %ebp,%ebx
+ addl %eax,%ebx
+
+ movl 20(%esp),%eax
+ movl 28(%esp),%ebp
+ xorl %ebp,%eax
+ movl 52(%esp),%ebp
+ xorl %ebp,%eax
+ movl 8(%esp),%ebp
+ xorl %ebp,%eax
+ movl %ecx,%ebp
+ roll $1,%eax
+ orl %edx,%ebp
+ movl %eax,20(%esp)
+ andl %edi,%ebp
+ leal 2400959708(%eax,%esi,1),%eax
+ movl %ecx,%esi
+ rorl $2,%ecx
+ andl %edx,%esi
+ orl %esi,%ebp
+ movl %ebx,%esi
+ roll $5,%esi
+ addl %ebp,%eax
+ addl %esi,%eax
+
+ movl 24(%esp),%esi
+ movl 32(%esp),%ebp
+ xorl %ebp,%esi
+ movl 56(%esp),%ebp
+ xorl %ebp,%esi
+ movl 12(%esp),%ebp
+ xorl %ebp,%esi
+ movl %ebx,%ebp
+ roll $1,%esi
+ orl %ecx,%ebp
+ movl %esi,24(%esp)
+ andl %edx,%ebp
+ leal 2400959708(%esi,%edi,1),%esi
+ movl %ebx,%edi
+ rorl $2,%ebx
+ andl %ecx,%edi
+ orl %edi,%ebp
+ movl %eax,%edi
+ roll $5,%edi
+ addl %ebp,%esi
+ addl %edi,%esi
+
+ movl 28(%esp),%edi
+ movl 36(%esp),%ebp
+ xorl %ebp,%edi
+ movl 60(%esp),%ebp
+ xorl %ebp,%edi
+ movl 16(%esp),%ebp
+ xorl %ebp,%edi
+ movl %eax,%ebp
+ roll $1,%edi
+ orl %ebx,%ebp
+ movl %edi,28(%esp)
+ andl %ecx,%ebp
+ leal 2400959708(%edi,%edx,1),%edi
+ movl %eax,%edx
+ rorl $2,%eax
+ andl %ebx,%edx
+ orl %edx,%ebp
+ movl %esi,%edx
+ roll $5,%edx
+ addl %ebp,%edi
+ addl %edx,%edi
+
+ movl 32(%esp),%edx
+ movl 40(%esp),%ebp
+ xorl %ebp,%edx
+ movl (%esp),%ebp
+ xorl %ebp,%edx
+ movl 20(%esp),%ebp
+ xorl %ebp,%edx
+ movl %esi,%ebp
+ roll $1,%edx
+ orl %eax,%ebp
+ movl %edx,32(%esp)
+ andl %ebx,%ebp
+ leal 2400959708(%edx,%ecx,1),%edx
+ movl %esi,%ecx
+ rorl $2,%esi
+ andl %eax,%ecx
+ orl %ecx,%ebp
+ movl %edi,%ecx
+ roll $5,%ecx
+ addl %ebp,%edx
+ addl %ecx,%edx
+
+ movl 36(%esp),%ecx
+ movl 44(%esp),%ebp
+ xorl %ebp,%ecx
+ movl 4(%esp),%ebp
+ xorl %ebp,%ecx
+ movl 24(%esp),%ebp
+ xorl %ebp,%ecx
+ movl %edi,%ebp
+ roll $1,%ecx
+ orl %esi,%ebp
+ movl %ecx,36(%esp)
+ andl %eax,%ebp
+ leal 2400959708(%ecx,%ebx,1),%ecx
+ movl %edi,%ebx
+ rorl $2,%edi
+ andl %esi,%ebx
+ orl %ebx,%ebp
+ movl %edx,%ebx
+ roll $5,%ebx
+ addl %ebp,%ecx
+ addl %ebx,%ecx
+
+ movl 40(%esp),%ebx
+ movl 48(%esp),%ebp
+ xorl %ebp,%ebx
+ movl 8(%esp),%ebp
+ xorl %ebp,%ebx
+ movl 28(%esp),%ebp
+ xorl %ebp,%ebx
+ movl %edx,%ebp
+ roll $1,%ebx
+ orl %edi,%ebp
+ movl %ebx,40(%esp)
+ andl %esi,%ebp
+ leal 2400959708(%ebx,%eax,1),%ebx
+ movl %edx,%eax
+ rorl $2,%edx
+ andl %edi,%eax
+ orl %eax,%ebp
+ movl %ecx,%eax
+ roll $5,%eax
+ addl %ebp,%ebx
+ addl %eax,%ebx
+
+ movl 44(%esp),%eax
+ movl 52(%esp),%ebp
+ xorl %ebp,%eax
+ movl 12(%esp),%ebp
+ xorl %ebp,%eax
+ movl 32(%esp),%ebp
+ xorl %ebp,%eax
+ movl %ecx,%ebp
+ roll $1,%eax
+ orl %edx,%ebp
+ movl %eax,44(%esp)
+ andl %edi,%ebp
+ leal 2400959708(%eax,%esi,1),%eax
+ movl %ecx,%esi
+ rorl $2,%ecx
+ andl %edx,%esi
+ orl %esi,%ebp
+ movl %ebx,%esi
+ roll $5,%esi
+ addl %ebp,%eax
+ addl %esi,%eax
+
+ movl %ebx,%ebp
+ movl 48(%esp),%esi
+ rorl $2,%ebx
+ xorl 56(%esp),%esi
+ xorl %ecx,%ebp
+ xorl 16(%esp),%esi
+ xorl %edx,%ebp
+ xorl 36(%esp),%esi
+ roll $1,%esi
+ addl %edi,%ebp
+ movl %esi,48(%esp)
+ movl %eax,%edi
+ roll $5,%edi
+ leal 3395469782(%esi,%ebp,1),%esi
+ addl %edi,%esi
+
+ movl %eax,%ebp
+ movl 52(%esp),%edi
+ rorl $2,%eax
+ xorl 60(%esp),%edi
+ xorl %ebx,%ebp
+ xorl 20(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 40(%esp),%edi
+ roll $1,%edi
+ addl %edx,%ebp
+ movl %edi,52(%esp)
+ movl %esi,%edx
+ roll $5,%edx
+ leal 3395469782(%edi,%ebp,1),%edi
+ addl %edx,%edi
+
+ movl %esi,%ebp
+ movl 56(%esp),%edx
+ rorl $2,%esi
+ xorl (%esp),%edx
+ xorl %eax,%ebp
+ xorl 24(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 44(%esp),%edx
+ roll $1,%edx
+ addl %ecx,%ebp
+ movl %edx,56(%esp)
+ movl %edi,%ecx
+ roll $5,%ecx
+ leal 3395469782(%edx,%ebp,1),%edx
+ addl %ecx,%edx
+
+ movl %edi,%ebp
+ movl 60(%esp),%ecx
+ rorl $2,%edi
+ xorl 4(%esp),%ecx
+ xorl %esi,%ebp
+ xorl 28(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 48(%esp),%ecx
+ roll $1,%ecx
+ addl %ebx,%ebp
+ movl %ecx,60(%esp)
+ movl %edx,%ebx
+ roll $5,%ebx
+ leal 3395469782(%ecx,%ebp,1),%ecx
+ addl %ebx,%ecx
+
+ movl %edx,%ebp
+ movl (%esp),%ebx
+ rorl $2,%edx
+ xorl 8(%esp),%ebx
+ xorl %edi,%ebp
+ xorl 32(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 52(%esp),%ebx
+ roll $1,%ebx
+ addl %eax,%ebp
+ movl %ebx,(%esp)
+ movl %ecx,%eax
+ roll $5,%eax
+ leal 3395469782(%ebx,%ebp,1),%ebx
+ addl %eax,%ebx
+
+ movl %ecx,%ebp
+ movl 4(%esp),%eax
+ rorl $2,%ecx
+ xorl 12(%esp),%eax
+ xorl %edx,%ebp
+ xorl 36(%esp),%eax
+ xorl %edi,%ebp
+ xorl 56(%esp),%eax
+ roll $1,%eax
+ addl %esi,%ebp
+ movl %eax,4(%esp)
+ movl %ebx,%esi
+ roll $5,%esi
+ leal 3395469782(%eax,%ebp,1),%eax
+ addl %esi,%eax
+
+ movl %ebx,%ebp
+ movl 8(%esp),%esi
+ rorl $2,%ebx
+ xorl 16(%esp),%esi
+ xorl %ecx,%ebp
+ xorl 40(%esp),%esi
+ xorl %edx,%ebp
+ xorl 60(%esp),%esi
+ roll $1,%esi
+ addl %edi,%ebp
+ movl %esi,8(%esp)
+ movl %eax,%edi
+ roll $5,%edi
+ leal 3395469782(%esi,%ebp,1),%esi
+ addl %edi,%esi
+
+ movl %eax,%ebp
+ movl 12(%esp),%edi
+ rorl $2,%eax
+ xorl 20(%esp),%edi
+ xorl %ebx,%ebp
+ xorl 44(%esp),%edi
+ xorl %ecx,%ebp
+ xorl (%esp),%edi
+ roll $1,%edi
+ addl %edx,%ebp
+ movl %edi,12(%esp)
+ movl %esi,%edx
+ roll $5,%edx
+ leal 3395469782(%edi,%ebp,1),%edi
+ addl %edx,%edi
+
+ movl %esi,%ebp
+ movl 16(%esp),%edx
+ rorl $2,%esi
+ xorl 24(%esp),%edx
+ xorl %eax,%ebp
+ xorl 48(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 4(%esp),%edx
+ roll $1,%edx
+ addl %ecx,%ebp
+ movl %edx,16(%esp)
+ movl %edi,%ecx
+ roll $5,%ecx
+ leal 3395469782(%edx,%ebp,1),%edx
+ addl %ecx,%edx
+
+ movl %edi,%ebp
+ movl 20(%esp),%ecx
+ rorl $2,%edi
+ xorl 28(%esp),%ecx
+ xorl %esi,%ebp
+ xorl 52(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 8(%esp),%ecx
+ roll $1,%ecx
+ addl %ebx,%ebp
+ movl %ecx,20(%esp)
+ movl %edx,%ebx
+ roll $5,%ebx
+ leal 3395469782(%ecx,%ebp,1),%ecx
+ addl %ebx,%ecx
+
+ movl %edx,%ebp
+ movl 24(%esp),%ebx
+ rorl $2,%edx
+ xorl 32(%esp),%ebx
+ xorl %edi,%ebp
+ xorl 56(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 12(%esp),%ebx
+ roll $1,%ebx
+ addl %eax,%ebp
+ movl %ebx,24(%esp)
+ movl %ecx,%eax
+ roll $5,%eax
+ leal 3395469782(%ebx,%ebp,1),%ebx
+ addl %eax,%ebx
+
+ movl %ecx,%ebp
+ movl 28(%esp),%eax
+ rorl $2,%ecx
+ xorl 36(%esp),%eax
+ xorl %edx,%ebp
+ xorl 60(%esp),%eax
+ xorl %edi,%ebp
+ xorl 16(%esp),%eax
+ roll $1,%eax
+ addl %esi,%ebp
+ movl %eax,28(%esp)
+ movl %ebx,%esi
+ roll $5,%esi
+ leal 3395469782(%eax,%ebp,1),%eax
+ addl %esi,%eax
+
+ movl %ebx,%ebp
+ movl 32(%esp),%esi
+ rorl $2,%ebx
+ xorl 40(%esp),%esi
+ xorl %ecx,%ebp
+ xorl (%esp),%esi
+ xorl %edx,%ebp
+ xorl 20(%esp),%esi
+ roll $1,%esi
+ addl %edi,%ebp
+ movl %esi,32(%esp)
+ movl %eax,%edi
+ roll $5,%edi
+ leal 3395469782(%esi,%ebp,1),%esi
+ addl %edi,%esi
+
+ movl %eax,%ebp
+ movl 36(%esp),%edi
+ rorl $2,%eax
+ xorl 44(%esp),%edi
+ xorl %ebx,%ebp
+ xorl 4(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 24(%esp),%edi
+ roll $1,%edi
+ addl %edx,%ebp
+ movl %edi,36(%esp)
+ movl %esi,%edx
+ roll $5,%edx
+ leal 3395469782(%edi,%ebp,1),%edi
+ addl %edx,%edi
+
+ movl %esi,%ebp
+ movl 40(%esp),%edx
+ rorl $2,%esi
+ xorl 48(%esp),%edx
+ xorl %eax,%ebp
+ xorl 8(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 28(%esp),%edx
+ roll $1,%edx
+ addl %ecx,%ebp
+ movl %edx,40(%esp)
+ movl %edi,%ecx
+ roll $5,%ecx
+ leal 3395469782(%edx,%ebp,1),%edx
+ addl %ecx,%edx
+
+ movl %edi,%ebp
+ movl 44(%esp),%ecx
+ rorl $2,%edi
+ xorl 52(%esp),%ecx
+ xorl %esi,%ebp
+ xorl 12(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 32(%esp),%ecx
+ roll $1,%ecx
+ addl %ebx,%ebp
+ movl %ecx,44(%esp)
+ movl %edx,%ebx
+ roll $5,%ebx
+ leal 3395469782(%ecx,%ebp,1),%ecx
+ addl %ebx,%ecx
+
+ movl %edx,%ebp
+ movl 48(%esp),%ebx
+ rorl $2,%edx
+ xorl 56(%esp),%ebx
+ xorl %edi,%ebp
+ xorl 16(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 36(%esp),%ebx
+ roll $1,%ebx
+ addl %eax,%ebp
+ movl %ebx,48(%esp)
+ movl %ecx,%eax
+ roll $5,%eax
+ leal 3395469782(%ebx,%ebp,1),%ebx
+ addl %eax,%ebx
+
+ movl %ecx,%ebp
+ movl 52(%esp),%eax
+ rorl $2,%ecx
+ xorl 60(%esp),%eax
+ xorl %edx,%ebp
+ xorl 20(%esp),%eax
+ xorl %edi,%ebp
+ xorl 40(%esp),%eax
+ roll $1,%eax
+ addl %esi,%ebp
+ movl %eax,52(%esp)
+ movl %ebx,%esi
+ roll $5,%esi
+ leal 3395469782(%eax,%ebp,1),%eax
+ addl %esi,%eax
+
+ movl %ebx,%ebp
+ movl 56(%esp),%esi
+ rorl $2,%ebx
+ xorl (%esp),%esi
+ xorl %ecx,%ebp
+ xorl 24(%esp),%esi
+ xorl %edx,%ebp
+ xorl 44(%esp),%esi
+ roll $1,%esi
+ addl %edi,%ebp
+ movl %esi,56(%esp)
+ movl %eax,%edi
+ roll $5,%edi
+ leal 3395469782(%esi,%ebp,1),%esi
+ addl %edi,%esi
+
+ movl %eax,%ebp
+ movl 60(%esp),%edi
+ rorl $2,%eax
+ xorl 4(%esp),%edi
+ xorl %ebx,%ebp
+ xorl 28(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 48(%esp),%edi
+ roll $1,%edi
+ addl %edx,%ebp
+ movl %edi,60(%esp)
+ movl %esi,%edx
+ roll $5,%edx
+ leal 3395469782(%edi,%ebp,1),%edi
+ addl %edx,%edi
+ movl 84(%esp),%ebp
+ movl 88(%esp),%edx
+ addl (%ebp),%edi
+ addl 4(%ebp),%esi
+ addl 8(%ebp),%eax
+ addl 12(%ebp),%ebx
+ addl 16(%ebp),%ecx
+ movl %edi,(%ebp)
+ addl $64,%edx
+ movl %esi,4(%ebp)
+ cmpl 92(%esp),%edx
+ movl %eax,8(%ebp)
+ movl %ecx,%edi
+ movl %ebx,12(%ebp)
+ movl %edx,%esi
+ movl %ecx,16(%ebp)
+ jb .L000loop
+ addl $64,%esp
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size sha1_block_data_order,.-.L_sha1_block_data_order_begin
+.byte 83,72,65,49,32,98,108,111,99,107,32,116,114,97,110,115
+.byte 102,111,114,109,32,102,111,114,32,120,56,54,44,32,67,82
+.byte 89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112
+.byte 114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
diff --git a/deps/openssl/asm/x86-elf-gas/sha/sha256-586.s b/deps/openssl/asm/x86-elf-gas/sha/sha256-586.s
new file mode 100644
index 0000000000..973e50d198
--- /dev/null
+++ b/deps/openssl/asm/x86-elf-gas/sha/sha256-586.s
@@ -0,0 +1,261 @@
+.file "sha512-586.s"
+.text
+.globl sha256_block_data_order
+.type sha256_block_data_order,@function
+.align 16
+sha256_block_data_order:
+.L_sha256_block_data_order_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%esi
+ movl 24(%esp),%edi
+ movl 28(%esp),%eax
+ movl %esp,%ebx
+ call .L000pic_point
+.L000pic_point:
+ popl %ebp
+ leal .L001K256-.L000pic_point(%ebp),%ebp
+ subl $16,%esp
+ andl $-64,%esp
+ shll $6,%eax
+ addl %edi,%eax
+ movl %esi,(%esp)
+ movl %edi,4(%esp)
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+.align 16
+.L002loop:
+ movl (%edi),%eax
+ movl 4(%edi),%ebx
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 16(%edi),%eax
+ movl 20(%edi),%ebx
+ movl 24(%edi),%ecx
+ movl 28(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 32(%edi),%eax
+ movl 36(%edi),%ebx
+ movl 40(%edi),%ecx
+ movl 44(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 48(%edi),%eax
+ movl 52(%edi),%ebx
+ movl 56(%edi),%ecx
+ movl 60(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ addl $64,%edi
+ subl $32,%esp
+ movl %edi,100(%esp)
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edi
+ movl %ebx,4(%esp)
+ movl %ecx,8(%esp)
+ movl %edi,12(%esp)
+ movl 16(%esi),%edx
+ movl 20(%esi),%ebx
+ movl 24(%esi),%ecx
+ movl 28(%esi),%edi
+ movl %ebx,20(%esp)
+ movl %ecx,24(%esp)
+ movl %edi,28(%esp)
+.align 16
+.L00300_15:
+ movl 92(%esp),%ebx
+ movl %edx,%ecx
+ rorl $6,%ecx
+ movl %edx,%edi
+ rorl $11,%edi
+ movl 20(%esp),%esi
+ xorl %edi,%ecx
+ rorl $14,%edi
+ xorl %edi,%ecx
+ movl 24(%esp),%edi
+ addl %ecx,%ebx
+ movl %edx,16(%esp)
+ xorl %edi,%esi
+ movl %eax,%ecx
+ andl %edx,%esi
+ movl 12(%esp),%edx
+ xorl %edi,%esi
+ movl %eax,%edi
+ addl %esi,%ebx
+ rorl $2,%ecx
+ addl 28(%esp),%ebx
+ rorl $13,%edi
+ movl 4(%esp),%esi
+ xorl %edi,%ecx
+ rorl $9,%edi
+ addl %ebx,%edx
+ xorl %edi,%ecx
+ movl 8(%esp),%edi
+ addl %ecx,%ebx
+ movl %eax,(%esp)
+ movl %eax,%ecx
+ subl $4,%esp
+ orl %esi,%eax
+ andl %esi,%ecx
+ andl %edi,%eax
+ movl (%ebp),%esi
+ orl %ecx,%eax
+ addl $4,%ebp
+ addl %ebx,%eax
+ addl %esi,%edx
+ addl %esi,%eax
+ cmpl $3248222580,%esi
+ jne .L00300_15
+ movl 152(%esp),%ebx
+.align 16
+.L00416_63:
+ movl %ebx,%esi
+ movl 100(%esp),%ecx
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ xorl %esi,%ebx
+ shrl $10,%ecx
+ movl 156(%esp),%esi
+ rorl $17,%edi
+ xorl %edi,%ecx
+ rorl $2,%edi
+ addl %esi,%ebx
+ xorl %ecx,%edi
+ addl %edi,%ebx
+ movl %edx,%ecx
+ addl 120(%esp),%ebx
+ rorl $6,%ecx
+ movl %edx,%edi
+ rorl $11,%edi
+ movl 20(%esp),%esi
+ xorl %edi,%ecx
+ rorl $14,%edi
+ movl %ebx,92(%esp)
+ xorl %edi,%ecx
+ movl 24(%esp),%edi
+ addl %ecx,%ebx
+ movl %edx,16(%esp)
+ xorl %edi,%esi
+ movl %eax,%ecx
+ andl %edx,%esi
+ movl 12(%esp),%edx
+ xorl %edi,%esi
+ movl %eax,%edi
+ addl %esi,%ebx
+ rorl $2,%ecx
+ addl 28(%esp),%ebx
+ rorl $13,%edi
+ movl 4(%esp),%esi
+ xorl %edi,%ecx
+ rorl $9,%edi
+ addl %ebx,%edx
+ xorl %edi,%ecx
+ movl 8(%esp),%edi
+ addl %ecx,%ebx
+ movl %eax,(%esp)
+ movl %eax,%ecx
+ subl $4,%esp
+ orl %esi,%eax
+ andl %esi,%ecx
+ andl %edi,%eax
+ movl (%ebp),%esi
+ orl %ecx,%eax
+ addl $4,%ebp
+ addl %ebx,%eax
+ movl 152(%esp),%ebx
+ addl %esi,%edx
+ addl %esi,%eax
+ cmpl $3329325298,%esi
+ jne .L00416_63
+ movl 352(%esp),%esi
+ movl 4(%esp),%ebx
+ movl 8(%esp),%ecx
+ movl 12(%esp),%edi
+ addl (%esi),%eax
+ addl 4(%esi),%ebx
+ addl 8(%esi),%ecx
+ addl 12(%esi),%edi
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edi,12(%esi)
+ movl 20(%esp),%eax
+ movl 24(%esp),%ebx
+ movl 28(%esp),%ecx
+ movl 356(%esp),%edi
+ addl 16(%esi),%edx
+ addl 20(%esi),%eax
+ addl 24(%esi),%ebx
+ addl 28(%esi),%ecx
+ movl %edx,16(%esi)
+ movl %eax,20(%esi)
+ movl %ebx,24(%esi)
+ movl %ecx,28(%esi)
+ addl $352,%esp
+ subl $256,%ebp
+ cmpl 8(%esp),%edi
+ jb .L002loop
+ movl 12(%esp),%esp
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 64
+.L001K256:
+.long 1116352408,1899447441,3049323471,3921009573
+.long 961987163,1508970993,2453635748,2870763221
+.long 3624381080,310598401,607225278,1426881987
+.long 1925078388,2162078206,2614888103,3248222580
+.long 3835390401,4022224774,264347078,604807628
+.long 770255983,1249150122,1555081692,1996064986
+.long 2554220882,2821834349,2952996808,3210313671
+.long 3336571891,3584528711,113926993,338241895
+.long 666307205,773529912,1294757372,1396182291
+.long 1695183700,1986661051,2177026350,2456956037
+.long 2730485921,2820302411,3259730800,3345764771
+.long 3516065817,3600352804,4094571909,275423344
+.long 430227734,506948616,659060556,883997877
+.long 958139571,1322822218,1537002063,1747873779
+.long 1955562222,2024104815,2227730452,2361852424
+.long 2428436474,2756734187,3204031479,3329325298
+.size sha256_block_data_order,.-.L_sha256_block_data_order_begin
+.byte 83,72,65,50,53,54,32,98,108,111,99,107,32,116,114,97
+.byte 110,115,102,111,114,109,32,102,111,114,32,120,56,54,44,32
+.byte 67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97
+.byte 112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
+.byte 62,0
diff --git a/deps/openssl/asm/x86-elf-gas/sha/sha512-586.s b/deps/openssl/asm/x86-elf-gas/sha/sha512-586.s
new file mode 100644
index 0000000000..4b806f352e
--- /dev/null
+++ b/deps/openssl/asm/x86-elf-gas/sha/sha512-586.s
@@ -0,0 +1,563 @@
+.file "sha512-586.s"
+.text
+.globl sha512_block_data_order
+.type sha512_block_data_order,@function
+.align 16
+sha512_block_data_order:
+.L_sha512_block_data_order_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%esi
+ movl 24(%esp),%edi
+ movl 28(%esp),%eax
+ movl %esp,%ebx
+ call .L000pic_point
+.L000pic_point:
+ popl %ebp
+ leal .L001K512-.L000pic_point(%ebp),%ebp
+ subl $16,%esp
+ andl $-64,%esp
+ shll $7,%eax
+ addl %edi,%eax
+ movl %esi,(%esp)
+ movl %edi,4(%esp)
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+.align 16
+.L002loop_x86:
+ movl (%edi),%eax
+ movl 4(%edi),%ebx
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 16(%edi),%eax
+ movl 20(%edi),%ebx
+ movl 24(%edi),%ecx
+ movl 28(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 32(%edi),%eax
+ movl 36(%edi),%ebx
+ movl 40(%edi),%ecx
+ movl 44(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 48(%edi),%eax
+ movl 52(%edi),%ebx
+ movl 56(%edi),%ecx
+ movl 60(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 64(%edi),%eax
+ movl 68(%edi),%ebx
+ movl 72(%edi),%ecx
+ movl 76(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 80(%edi),%eax
+ movl 84(%edi),%ebx
+ movl 88(%edi),%ecx
+ movl 92(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 96(%edi),%eax
+ movl 100(%edi),%ebx
+ movl 104(%edi),%ecx
+ movl 108(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 112(%edi),%eax
+ movl 116(%edi),%ebx
+ movl 120(%edi),%ecx
+ movl 124(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ addl $128,%edi
+ subl $72,%esp
+ movl %edi,204(%esp)
+ leal 8(%esp),%edi
+ movl $16,%ecx
+.long 2784229001
+.align 16
+.L00300_15_x86:
+ movl 40(%esp),%ecx
+ movl 44(%esp),%edx
+ movl %ecx,%esi
+ shrl $9,%ecx
+ movl %edx,%edi
+ shrl $9,%edx
+ movl %ecx,%ebx
+ shll $14,%esi
+ movl %edx,%eax
+ shll $14,%edi
+ xorl %esi,%ebx
+ shrl $5,%ecx
+ xorl %edi,%eax
+ shrl $5,%edx
+ xorl %ecx,%eax
+ shll $4,%esi
+ xorl %edx,%ebx
+ shll $4,%edi
+ xorl %esi,%ebx
+ shrl $4,%ecx
+ xorl %edi,%eax
+ shrl $4,%edx
+ xorl %ecx,%eax
+ shll $5,%esi
+ xorl %edx,%ebx
+ shll $5,%edi
+ xorl %esi,%eax
+ xorl %edi,%ebx
+ movl 48(%esp),%ecx
+ movl 52(%esp),%edx
+ movl 56(%esp),%esi
+ movl 60(%esp),%edi
+ addl 64(%esp),%eax
+ adcl 68(%esp),%ebx
+ xorl %esi,%ecx
+ xorl %edi,%edx
+ andl 40(%esp),%ecx
+ andl 44(%esp),%edx
+ addl 192(%esp),%eax
+ adcl 196(%esp),%ebx
+ xorl %esi,%ecx
+ xorl %edi,%edx
+ movl (%ebp),%esi
+ movl 4(%ebp),%edi
+ addl %ecx,%eax
+ adcl %edx,%ebx
+ movl 32(%esp),%ecx
+ movl 36(%esp),%edx
+ addl %esi,%eax
+ adcl %edi,%ebx
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ addl %ecx,%eax
+ adcl %edx,%ebx
+ movl 8(%esp),%ecx
+ movl 12(%esp),%edx
+ movl %eax,32(%esp)
+ movl %ebx,36(%esp)
+ movl %ecx,%esi
+ shrl $2,%ecx
+ movl %edx,%edi
+ shrl $2,%edx
+ movl %ecx,%ebx
+ shll $4,%esi
+ movl %edx,%eax
+ shll $4,%edi
+ xorl %esi,%ebx
+ shrl $5,%ecx
+ xorl %edi,%eax
+ shrl $5,%edx
+ xorl %ecx,%ebx
+ shll $21,%esi
+ xorl %edx,%eax
+ shll $21,%edi
+ xorl %esi,%eax
+ shrl $21,%ecx
+ xorl %edi,%ebx
+ shrl $21,%edx
+ xorl %ecx,%eax
+ shll $5,%esi
+ xorl %edx,%ebx
+ shll $5,%edi
+ xorl %esi,%eax
+ xorl %edi,%ebx
+ movl 8(%esp),%ecx
+ movl 12(%esp),%edx
+ movl 16(%esp),%esi
+ movl 20(%esp),%edi
+ addl (%esp),%eax
+ adcl 4(%esp),%ebx
+ orl %esi,%ecx
+ orl %edi,%edx
+ andl 24(%esp),%ecx
+ andl 28(%esp),%edx
+ andl 8(%esp),%esi
+ andl 12(%esp),%edi
+ orl %esi,%ecx
+ orl %edi,%edx
+ addl %ecx,%eax
+ adcl %edx,%ebx
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ movb (%ebp),%dl
+ subl $8,%esp
+ leal 8(%ebp),%ebp
+ cmpb $148,%dl
+ jne .L00300_15_x86
+.align 16
+.L00416_79_x86:
+ movl 312(%esp),%ecx
+ movl 316(%esp),%edx
+ movl %ecx,%esi
+ shrl $1,%ecx
+ movl %edx,%edi
+ shrl $1,%edx
+ movl %ecx,%eax
+ shll $24,%esi
+ movl %edx,%ebx
+ shll $24,%edi
+ xorl %esi,%ebx
+ shrl $6,%ecx
+ xorl %edi,%eax
+ shrl $6,%edx
+ xorl %ecx,%eax
+ shll $7,%esi
+ xorl %edx,%ebx
+ shll $1,%edi
+ xorl %esi,%ebx
+ shrl $1,%ecx
+ xorl %edi,%eax
+ shrl $1,%edx
+ xorl %ecx,%eax
+ shll $6,%edi
+ xorl %edx,%ebx
+ xorl %edi,%eax
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ movl 208(%esp),%ecx
+ movl 212(%esp),%edx
+ movl %ecx,%esi
+ shrl $6,%ecx
+ movl %edx,%edi
+ shrl $6,%edx
+ movl %ecx,%eax
+ shll $3,%esi
+ movl %edx,%ebx
+ shll $3,%edi
+ xorl %esi,%eax
+ shrl $13,%ecx
+ xorl %edi,%ebx
+ shrl $13,%edx
+ xorl %ecx,%eax
+ shll $10,%esi
+ xorl %edx,%ebx
+ shll $10,%edi
+ xorl %esi,%ebx
+ shrl $10,%ecx
+ xorl %edi,%eax
+ shrl $10,%edx
+ xorl %ecx,%ebx
+ shll $13,%edi
+ xorl %edx,%eax
+ xorl %edi,%eax
+ movl 320(%esp),%ecx
+ movl 324(%esp),%edx
+ addl (%esp),%eax
+ adcl 4(%esp),%ebx
+ movl 248(%esp),%esi
+ movl 252(%esp),%edi
+ addl %ecx,%eax
+ adcl %edx,%ebx
+ addl %esi,%eax
+ adcl %edi,%ebx
+ movl %eax,192(%esp)
+ movl %ebx,196(%esp)
+ movl 40(%esp),%ecx
+ movl 44(%esp),%edx
+ movl %ecx,%esi
+ shrl $9,%ecx
+ movl %edx,%edi
+ shrl $9,%edx
+ movl %ecx,%ebx
+ shll $14,%esi
+ movl %edx,%eax
+ shll $14,%edi
+ xorl %esi,%ebx
+ shrl $5,%ecx
+ xorl %edi,%eax
+ shrl $5,%edx
+ xorl %ecx,%eax
+ shll $4,%esi
+ xorl %edx,%ebx
+ shll $4,%edi
+ xorl %esi,%ebx
+ shrl $4,%ecx
+ xorl %edi,%eax
+ shrl $4,%edx
+ xorl %ecx,%eax
+ shll $5,%esi
+ xorl %edx,%ebx
+ shll $5,%edi
+ xorl %esi,%eax
+ xorl %edi,%ebx
+ movl 48(%esp),%ecx
+ movl 52(%esp),%edx
+ movl 56(%esp),%esi
+ movl 60(%esp),%edi
+ addl 64(%esp),%eax
+ adcl 68(%esp),%ebx
+ xorl %esi,%ecx
+ xorl %edi,%edx
+ andl 40(%esp),%ecx
+ andl 44(%esp),%edx
+ addl 192(%esp),%eax
+ adcl 196(%esp),%ebx
+ xorl %esi,%ecx
+ xorl %edi,%edx
+ movl (%ebp),%esi
+ movl 4(%ebp),%edi
+ addl %ecx,%eax
+ adcl %edx,%ebx
+ movl 32(%esp),%ecx
+ movl 36(%esp),%edx
+ addl %esi,%eax
+ adcl %edi,%ebx
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ addl %ecx,%eax
+ adcl %edx,%ebx
+ movl 8(%esp),%ecx
+ movl 12(%esp),%edx
+ movl %eax,32(%esp)
+ movl %ebx,36(%esp)
+ movl %ecx,%esi
+ shrl $2,%ecx
+ movl %edx,%edi
+ shrl $2,%edx
+ movl %ecx,%ebx
+ shll $4,%esi
+ movl %edx,%eax
+ shll $4,%edi
+ xorl %esi,%ebx
+ shrl $5,%ecx
+ xorl %edi,%eax
+ shrl $5,%edx
+ xorl %ecx,%ebx
+ shll $21,%esi
+ xorl %edx,%eax
+ shll $21,%edi
+ xorl %esi,%eax
+ shrl $21,%ecx
+ xorl %edi,%ebx
+ shrl $21,%edx
+ xorl %ecx,%eax
+ shll $5,%esi
+ xorl %edx,%ebx
+ shll $5,%edi
+ xorl %esi,%eax
+ xorl %edi,%ebx
+ movl 8(%esp),%ecx
+ movl 12(%esp),%edx
+ movl 16(%esp),%esi
+ movl 20(%esp),%edi
+ addl (%esp),%eax
+ adcl 4(%esp),%ebx
+ orl %esi,%ecx
+ orl %edi,%edx
+ andl 24(%esp),%ecx
+ andl 28(%esp),%edx
+ andl 8(%esp),%esi
+ andl 12(%esp),%edi
+ orl %esi,%ecx
+ orl %edi,%edx
+ addl %ecx,%eax
+ adcl %edx,%ebx
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ movb (%ebp),%dl
+ subl $8,%esp
+ leal 8(%ebp),%ebp
+ cmpb $23,%dl
+ jne .L00416_79_x86
+ movl 840(%esp),%esi
+ movl 844(%esp),%edi
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ addl 8(%esp),%eax
+ adcl 12(%esp),%ebx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ addl 16(%esp),%ecx
+ adcl 20(%esp),%edx
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ movl 16(%esi),%eax
+ movl 20(%esi),%ebx
+ movl 24(%esi),%ecx
+ movl 28(%esi),%edx
+ addl 24(%esp),%eax
+ adcl 28(%esp),%ebx
+ movl %eax,16(%esi)
+ movl %ebx,20(%esi)
+ addl 32(%esp),%ecx
+ adcl 36(%esp),%edx
+ movl %ecx,24(%esi)
+ movl %edx,28(%esi)
+ movl 32(%esi),%eax
+ movl 36(%esi),%ebx
+ movl 40(%esi),%ecx
+ movl 44(%esi),%edx
+ addl 40(%esp),%eax
+ adcl 44(%esp),%ebx
+ movl %eax,32(%esi)
+ movl %ebx,36(%esi)
+ addl 48(%esp),%ecx
+ adcl 52(%esp),%edx
+ movl %ecx,40(%esi)
+ movl %edx,44(%esi)
+ movl 48(%esi),%eax
+ movl 52(%esi),%ebx
+ movl 56(%esi),%ecx
+ movl 60(%esi),%edx
+ addl 56(%esp),%eax
+ adcl 60(%esp),%ebx
+ movl %eax,48(%esi)
+ movl %ebx,52(%esi)
+ addl 64(%esp),%ecx
+ adcl 68(%esp),%edx
+ movl %ecx,56(%esi)
+ movl %edx,60(%esi)
+ addl $840,%esp
+ subl $640,%ebp
+ cmpl 8(%esp),%edi
+ jb .L002loop_x86
+ movl 12(%esp),%esp
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 64
+.L001K512:
+.long 3609767458,1116352408
+.long 602891725,1899447441
+.long 3964484399,3049323471
+.long 2173295548,3921009573
+.long 4081628472,961987163
+.long 3053834265,1508970993
+.long 2937671579,2453635748
+.long 3664609560,2870763221
+.long 2734883394,3624381080
+.long 1164996542,310598401
+.long 1323610764,607225278
+.long 3590304994,1426881987
+.long 4068182383,1925078388
+.long 991336113,2162078206
+.long 633803317,2614888103
+.long 3479774868,3248222580
+.long 2666613458,3835390401
+.long 944711139,4022224774
+.long 2341262773,264347078
+.long 2007800933,604807628
+.long 1495990901,770255983
+.long 1856431235,1249150122
+.long 3175218132,1555081692
+.long 2198950837,1996064986
+.long 3999719339,2554220882
+.long 766784016,2821834349
+.long 2566594879,2952996808
+.long 3203337956,3210313671
+.long 1034457026,3336571891
+.long 2466948901,3584528711
+.long 3758326383,113926993
+.long 168717936,338241895
+.long 1188179964,666307205
+.long 1546045734,773529912
+.long 1522805485,1294757372
+.long 2643833823,1396182291
+.long 2343527390,1695183700
+.long 1014477480,1986661051
+.long 1206759142,2177026350
+.long 344077627,2456956037
+.long 1290863460,2730485921
+.long 3158454273,2820302411
+.long 3505952657,3259730800
+.long 106217008,3345764771
+.long 3606008344,3516065817
+.long 1432725776,3600352804
+.long 1467031594,4094571909
+.long 851169720,275423344
+.long 3100823752,430227734
+.long 1363258195,506948616
+.long 3750685593,659060556
+.long 3785050280,883997877
+.long 3318307427,958139571
+.long 3812723403,1322822218
+.long 2003034995,1537002063
+.long 3602036899,1747873779
+.long 1575990012,1955562222
+.long 1125592928,2024104815
+.long 2716904306,2227730452
+.long 442776044,2361852424
+.long 593698344,2428436474
+.long 3733110249,2756734187
+.long 2999351573,3204031479
+.long 3815920427,3329325298
+.long 3928383900,3391569614
+.long 566280711,3515267271
+.long 3454069534,3940187606
+.long 4000239992,4118630271
+.long 1914138554,116418474
+.long 2731055270,174292421
+.long 3203993006,289380356
+.long 320620315,460393269
+.long 587496836,685471733
+.long 1086792851,852142971
+.long 365543100,1017036298
+.long 2618297676,1126000580
+.long 3409855158,1288033470
+.long 4234509866,1501505948
+.long 987167468,1607167915
+.long 1246189591,1816402316
+.size sha512_block_data_order,.-.L_sha512_block_data_order_begin
+.byte 83,72,65,53,49,50,32,98,108,111,99,107,32,116,114,97
+.byte 110,115,102,111,114,109,32,102,111,114,32,120,56,54,44,32
+.byte 67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97
+.byte 112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
+.byte 62,0
diff --git a/deps/openssl/asm/x86-elf-gas/whrlpool/wp-mmx.s b/deps/openssl/asm/x86-elf-gas/whrlpool/wp-mmx.s
new file mode 100644
index 0000000000..c03f3f6cb7
--- /dev/null
+++ b/deps/openssl/asm/x86-elf-gas/whrlpool/wp-mmx.s
@@ -0,0 +1,1105 @@
+.file "wp-mmx.s"
+.text
+.globl whirlpool_block_mmx
+.type whirlpool_block_mmx,@function
+.align 16
+whirlpool_block_mmx:
+.L_whirlpool_block_mmx_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%esi
+ movl 24(%esp),%edi
+ movl 28(%esp),%ebp
+ movl %esp,%eax
+ subl $148,%esp
+ andl $-64,%esp
+ leal 128(%esp),%ebx
+ movl %esi,(%ebx)
+ movl %edi,4(%ebx)
+ movl %ebp,8(%ebx)
+ movl %eax,16(%ebx)
+ call .L000pic_point
+.L000pic_point:
+ popl %ebp
+ leal .L001table-.L000pic_point(%ebp),%ebp
+ xorl %ecx,%ecx
+ xorl %edx,%edx
+ movq (%esi),%mm0
+ movq 8(%esi),%mm1
+ movq 16(%esi),%mm2
+ movq 24(%esi),%mm3
+ movq 32(%esi),%mm4
+ movq 40(%esi),%mm5
+ movq 48(%esi),%mm6
+ movq 56(%esi),%mm7
+.L002outerloop:
+ movq %mm0,(%esp)
+ movq %mm1,8(%esp)
+ movq %mm2,16(%esp)
+ movq %mm3,24(%esp)
+ movq %mm4,32(%esp)
+ movq %mm5,40(%esp)
+ movq %mm6,48(%esp)
+ movq %mm7,56(%esp)
+ pxor (%edi),%mm0
+ pxor 8(%edi),%mm1
+ pxor 16(%edi),%mm2
+ pxor 24(%edi),%mm3
+ pxor 32(%edi),%mm4
+ pxor 40(%edi),%mm5
+ pxor 48(%edi),%mm6
+ pxor 56(%edi),%mm7
+ movq %mm0,64(%esp)
+ movq %mm1,72(%esp)
+ movq %mm2,80(%esp)
+ movq %mm3,88(%esp)
+ movq %mm4,96(%esp)
+ movq %mm5,104(%esp)
+ movq %mm6,112(%esp)
+ movq %mm7,120(%esp)
+ xorl %esi,%esi
+ movl %esi,12(%ebx)
+.align 16
+.L003round:
+ movq 4096(%ebp,%esi,8),%mm0
+ movl (%esp),%eax
+ movl 4(%esp),%ebx
+ movb %al,%cl
+ movb %ah,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%eax
+ pxor (%ebp,%esi,8),%mm0
+ movq 7(%ebp,%edi,8),%mm1
+ movb %al,%cl
+ movb %ah,%dl
+ movl 8(%esp),%eax
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ movq 6(%ebp,%esi,8),%mm2
+ movq 5(%ebp,%edi,8),%mm3
+ movb %bl,%cl
+ movb %bh,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%ebx
+ movq 4(%ebp,%esi,8),%mm4
+ movq 3(%ebp,%edi,8),%mm5
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 12(%esp),%ebx
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ movq 2(%ebp,%esi,8),%mm6
+ movq 1(%ebp,%edi,8),%mm7
+ movb %al,%cl
+ movb %ah,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%eax
+ pxor (%ebp,%esi,8),%mm1
+ pxor 7(%ebp,%edi,8),%mm2
+ movb %al,%cl
+ movb %ah,%dl
+ movl 16(%esp),%eax
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 6(%ebp,%esi,8),%mm3
+ pxor 5(%ebp,%edi,8),%mm4
+ movb %bl,%cl
+ movb %bh,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%ebx
+ pxor 4(%ebp,%esi,8),%mm5
+ pxor 3(%ebp,%edi,8),%mm6
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 20(%esp),%ebx
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 2(%ebp,%esi,8),%mm7
+ pxor 1(%ebp,%edi,8),%mm0
+ movb %al,%cl
+ movb %ah,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%eax
+ pxor (%ebp,%esi,8),%mm2
+ pxor 7(%ebp,%edi,8),%mm3
+ movb %al,%cl
+ movb %ah,%dl
+ movl 24(%esp),%eax
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 6(%ebp,%esi,8),%mm4
+ pxor 5(%ebp,%edi,8),%mm5
+ movb %bl,%cl
+ movb %bh,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%ebx
+ pxor 4(%ebp,%esi,8),%mm6
+ pxor 3(%ebp,%edi,8),%mm7
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 28(%esp),%ebx
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 2(%ebp,%esi,8),%mm0
+ pxor 1(%ebp,%edi,8),%mm1
+ movb %al,%cl
+ movb %ah,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%eax
+ pxor (%ebp,%esi,8),%mm3
+ pxor 7(%ebp,%edi,8),%mm4
+ movb %al,%cl
+ movb %ah,%dl
+ movl 32(%esp),%eax
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 6(%ebp,%esi,8),%mm5
+ pxor 5(%ebp,%edi,8),%mm6
+ movb %bl,%cl
+ movb %bh,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%ebx
+ pxor 4(%ebp,%esi,8),%mm7
+ pxor 3(%ebp,%edi,8),%mm0
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 36(%esp),%ebx
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 2(%ebp,%esi,8),%mm1
+ pxor 1(%ebp,%edi,8),%mm2
+ movb %al,%cl
+ movb %ah,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%eax
+ pxor (%ebp,%esi,8),%mm4
+ pxor 7(%ebp,%edi,8),%mm5
+ movb %al,%cl
+ movb %ah,%dl
+ movl 40(%esp),%eax
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 6(%ebp,%esi,8),%mm6
+ pxor 5(%ebp,%edi,8),%mm7
+ movb %bl,%cl
+ movb %bh,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%ebx
+ pxor 4(%ebp,%esi,8),%mm0
+ pxor 3(%ebp,%edi,8),%mm1
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 44(%esp),%ebx
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 2(%ebp,%esi,8),%mm2
+ pxor 1(%ebp,%edi,8),%mm3
+ movb %al,%cl
+ movb %ah,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%eax
+ pxor (%ebp,%esi,8),%mm5
+ pxor 7(%ebp,%edi,8),%mm6
+ movb %al,%cl
+ movb %ah,%dl
+ movl 48(%esp),%eax
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 6(%ebp,%esi,8),%mm7
+ pxor 5(%ebp,%edi,8),%mm0
+ movb %bl,%cl
+ movb %bh,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%ebx
+ pxor 4(%ebp,%esi,8),%mm1
+ pxor 3(%ebp,%edi,8),%mm2
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 52(%esp),%ebx
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 2(%ebp,%esi,8),%mm3
+ pxor 1(%ebp,%edi,8),%mm4
+ movb %al,%cl
+ movb %ah,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%eax
+ pxor (%ebp,%esi,8),%mm6
+ pxor 7(%ebp,%edi,8),%mm7
+ movb %al,%cl
+ movb %ah,%dl
+ movl 56(%esp),%eax
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 6(%ebp,%esi,8),%mm0
+ pxor 5(%ebp,%edi,8),%mm1
+ movb %bl,%cl
+ movb %bh,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%ebx
+ pxor 4(%ebp,%esi,8),%mm2
+ pxor 3(%ebp,%edi,8),%mm3
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 60(%esp),%ebx
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 2(%ebp,%esi,8),%mm4
+ pxor 1(%ebp,%edi,8),%mm5
+ movb %al,%cl
+ movb %ah,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%eax
+ pxor (%ebp,%esi,8),%mm7
+ pxor 7(%ebp,%edi,8),%mm0
+ movb %al,%cl
+ movb %ah,%dl
+ movl 64(%esp),%eax
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 6(%ebp,%esi,8),%mm1
+ pxor 5(%ebp,%edi,8),%mm2
+ movb %bl,%cl
+ movb %bh,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%ebx
+ pxor 4(%ebp,%esi,8),%mm3
+ pxor 3(%ebp,%edi,8),%mm4
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 68(%esp),%ebx
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 2(%ebp,%esi,8),%mm5
+ pxor 1(%ebp,%edi,8),%mm6
+ movq %mm0,(%esp)
+ movq %mm1,8(%esp)
+ movq %mm2,16(%esp)
+ movq %mm3,24(%esp)
+ movq %mm4,32(%esp)
+ movq %mm5,40(%esp)
+ movq %mm6,48(%esp)
+ movq %mm7,56(%esp)
+ movb %al,%cl
+ movb %ah,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%eax
+ pxor (%ebp,%esi,8),%mm0
+ pxor 7(%ebp,%edi,8),%mm1
+ movb %al,%cl
+ movb %ah,%dl
+ movl 72(%esp),%eax
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 6(%ebp,%esi,8),%mm2
+ pxor 5(%ebp,%edi,8),%mm3
+ movb %bl,%cl
+ movb %bh,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%ebx
+ pxor 4(%ebp,%esi,8),%mm4
+ pxor 3(%ebp,%edi,8),%mm5
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 76(%esp),%ebx
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 2(%ebp,%esi,8),%mm6
+ pxor 1(%ebp,%edi,8),%mm7
+ movb %al,%cl
+ movb %ah,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%eax
+ pxor (%ebp,%esi,8),%mm1
+ pxor 7(%ebp,%edi,8),%mm2
+ movb %al,%cl
+ movb %ah,%dl
+ movl 80(%esp),%eax
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 6(%ebp,%esi,8),%mm3
+ pxor 5(%ebp,%edi,8),%mm4
+ movb %bl,%cl
+ movb %bh,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%ebx
+ pxor 4(%ebp,%esi,8),%mm5
+ pxor 3(%ebp,%edi,8),%mm6
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 84(%esp),%ebx
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 2(%ebp,%esi,8),%mm7
+ pxor 1(%ebp,%edi,8),%mm0
+ movb %al,%cl
+ movb %ah,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%eax
+ pxor (%ebp,%esi,8),%mm2
+ pxor 7(%ebp,%edi,8),%mm3
+ movb %al,%cl
+ movb %ah,%dl
+ movl 88(%esp),%eax
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 6(%ebp,%esi,8),%mm4
+ pxor 5(%ebp,%edi,8),%mm5
+ movb %bl,%cl
+ movb %bh,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%ebx
+ pxor 4(%ebp,%esi,8),%mm6
+ pxor 3(%ebp,%edi,8),%mm7
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 92(%esp),%ebx
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 2(%ebp,%esi,8),%mm0
+ pxor 1(%ebp,%edi,8),%mm1
+ movb %al,%cl
+ movb %ah,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%eax
+ pxor (%ebp,%esi,8),%mm3
+ pxor 7(%ebp,%edi,8),%mm4
+ movb %al,%cl
+ movb %ah,%dl
+ movl 96(%esp),%eax
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 6(%ebp,%esi,8),%mm5
+ pxor 5(%ebp,%edi,8),%mm6
+ movb %bl,%cl
+ movb %bh,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%ebx
+ pxor 4(%ebp,%esi,8),%mm7
+ pxor 3(%ebp,%edi,8),%mm0
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 100(%esp),%ebx
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 2(%ebp,%esi,8),%mm1
+ pxor 1(%ebp,%edi,8),%mm2
+ movb %al,%cl
+ movb %ah,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%eax
+ pxor (%ebp,%esi,8),%mm4
+ pxor 7(%ebp,%edi,8),%mm5
+ movb %al,%cl
+ movb %ah,%dl
+ movl 104(%esp),%eax
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 6(%ebp,%esi,8),%mm6
+ pxor 5(%ebp,%edi,8),%mm7
+ movb %bl,%cl
+ movb %bh,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%ebx
+ pxor 4(%ebp,%esi,8),%mm0
+ pxor 3(%ebp,%edi,8),%mm1
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 108(%esp),%ebx
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 2(%ebp,%esi,8),%mm2
+ pxor 1(%ebp,%edi,8),%mm3
+ movb %al,%cl
+ movb %ah,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%eax
+ pxor (%ebp,%esi,8),%mm5
+ pxor 7(%ebp,%edi,8),%mm6
+ movb %al,%cl
+ movb %ah,%dl
+ movl 112(%esp),%eax
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 6(%ebp,%esi,8),%mm7
+ pxor 5(%ebp,%edi,8),%mm0
+ movb %bl,%cl
+ movb %bh,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%ebx
+ pxor 4(%ebp,%esi,8),%mm1
+ pxor 3(%ebp,%edi,8),%mm2
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 116(%esp),%ebx
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 2(%ebp,%esi,8),%mm3
+ pxor 1(%ebp,%edi,8),%mm4
+ movb %al,%cl
+ movb %ah,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%eax
+ pxor (%ebp,%esi,8),%mm6
+ pxor 7(%ebp,%edi,8),%mm7
+ movb %al,%cl
+ movb %ah,%dl
+ movl 120(%esp),%eax
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 6(%ebp,%esi,8),%mm0
+ pxor 5(%ebp,%edi,8),%mm1
+ movb %bl,%cl
+ movb %bh,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%ebx
+ pxor 4(%ebp,%esi,8),%mm2
+ pxor 3(%ebp,%edi,8),%mm3
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 124(%esp),%ebx
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 2(%ebp,%esi,8),%mm4
+ pxor 1(%ebp,%edi,8),%mm5
+ movb %al,%cl
+ movb %ah,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%eax
+ pxor (%ebp,%esi,8),%mm7
+ pxor 7(%ebp,%edi,8),%mm0
+ movb %al,%cl
+ movb %ah,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 6(%ebp,%esi,8),%mm1
+ pxor 5(%ebp,%edi,8),%mm2
+ movb %bl,%cl
+ movb %bh,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%ebx
+ pxor 4(%ebp,%esi,8),%mm3
+ pxor 3(%ebp,%edi,8),%mm4
+ movb %bl,%cl
+ movb %bh,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 2(%ebp,%esi,8),%mm5
+ pxor 1(%ebp,%edi,8),%mm6
+ leal 128(%esp),%ebx
+ movl 12(%ebx),%esi
+ addl $1,%esi
+ cmpl $10,%esi
+ je .L004roundsdone
+ movl %esi,12(%ebx)
+ movq %mm0,64(%esp)
+ movq %mm1,72(%esp)
+ movq %mm2,80(%esp)
+ movq %mm3,88(%esp)
+ movq %mm4,96(%esp)
+ movq %mm5,104(%esp)
+ movq %mm6,112(%esp)
+ movq %mm7,120(%esp)
+ jmp .L003round
+.align 16
+.L004roundsdone:
+ movl (%ebx),%esi
+ movl 4(%ebx),%edi
+ movl 8(%ebx),%eax
+ pxor (%edi),%mm0
+ pxor 8(%edi),%mm1
+ pxor 16(%edi),%mm2
+ pxor 24(%edi),%mm3
+ pxor 32(%edi),%mm4
+ pxor 40(%edi),%mm5
+ pxor 48(%edi),%mm6
+ pxor 56(%edi),%mm7
+ pxor (%esi),%mm0
+ pxor 8(%esi),%mm1
+ pxor 16(%esi),%mm2
+ pxor 24(%esi),%mm3
+ pxor 32(%esi),%mm4
+ pxor 40(%esi),%mm5
+ pxor 48(%esi),%mm6
+ pxor 56(%esi),%mm7
+ movq %mm0,(%esi)
+ movq %mm1,8(%esi)
+ movq %mm2,16(%esi)
+ movq %mm3,24(%esi)
+ movq %mm4,32(%esi)
+ movq %mm5,40(%esi)
+ movq %mm6,48(%esi)
+ movq %mm7,56(%esi)
+ leal 64(%edi),%edi
+ subl $1,%eax
+ jz .L005alldone
+ movl %edi,4(%ebx)
+ movl %eax,8(%ebx)
+ jmp .L002outerloop
+.L005alldone:
+ emms
+ movl 16(%ebx),%esp
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 64
+.L001table:
+.byte 24,24,96,24,192,120,48,216
+.byte 24,24,96,24,192,120,48,216
+.byte 35,35,140,35,5,175,70,38
+.byte 35,35,140,35,5,175,70,38
+.byte 198,198,63,198,126,249,145,184
+.byte 198,198,63,198,126,249,145,184
+.byte 232,232,135,232,19,111,205,251
+.byte 232,232,135,232,19,111,205,251
+.byte 135,135,38,135,76,161,19,203
+.byte 135,135,38,135,76,161,19,203
+.byte 184,184,218,184,169,98,109,17
+.byte 184,184,218,184,169,98,109,17
+.byte 1,1,4,1,8,5,2,9
+.byte 1,1,4,1,8,5,2,9
+.byte 79,79,33,79,66,110,158,13
+.byte 79,79,33,79,66,110,158,13
+.byte 54,54,216,54,173,238,108,155
+.byte 54,54,216,54,173,238,108,155
+.byte 166,166,162,166,89,4,81,255
+.byte 166,166,162,166,89,4,81,255
+.byte 210,210,111,210,222,189,185,12
+.byte 210,210,111,210,222,189,185,12
+.byte 245,245,243,245,251,6,247,14
+.byte 245,245,243,245,251,6,247,14
+.byte 121,121,249,121,239,128,242,150
+.byte 121,121,249,121,239,128,242,150
+.byte 111,111,161,111,95,206,222,48
+.byte 111,111,161,111,95,206,222,48
+.byte 145,145,126,145,252,239,63,109
+.byte 145,145,126,145,252,239,63,109
+.byte 82,82,85,82,170,7,164,248
+.byte 82,82,85,82,170,7,164,248
+.byte 96,96,157,96,39,253,192,71
+.byte 96,96,157,96,39,253,192,71
+.byte 188,188,202,188,137,118,101,53
+.byte 188,188,202,188,137,118,101,53
+.byte 155,155,86,155,172,205,43,55
+.byte 155,155,86,155,172,205,43,55
+.byte 142,142,2,142,4,140,1,138
+.byte 142,142,2,142,4,140,1,138
+.byte 163,163,182,163,113,21,91,210
+.byte 163,163,182,163,113,21,91,210
+.byte 12,12,48,12,96,60,24,108
+.byte 12,12,48,12,96,60,24,108
+.byte 123,123,241,123,255,138,246,132
+.byte 123,123,241,123,255,138,246,132
+.byte 53,53,212,53,181,225,106,128
+.byte 53,53,212,53,181,225,106,128
+.byte 29,29,116,29,232,105,58,245
+.byte 29,29,116,29,232,105,58,245
+.byte 224,224,167,224,83,71,221,179
+.byte 224,224,167,224,83,71,221,179
+.byte 215,215,123,215,246,172,179,33
+.byte 215,215,123,215,246,172,179,33
+.byte 194,194,47,194,94,237,153,156
+.byte 194,194,47,194,94,237,153,156
+.byte 46,46,184,46,109,150,92,67
+.byte 46,46,184,46,109,150,92,67
+.byte 75,75,49,75,98,122,150,41
+.byte 75,75,49,75,98,122,150,41
+.byte 254,254,223,254,163,33,225,93
+.byte 254,254,223,254,163,33,225,93
+.byte 87,87,65,87,130,22,174,213
+.byte 87,87,65,87,130,22,174,213
+.byte 21,21,84,21,168,65,42,189
+.byte 21,21,84,21,168,65,42,189
+.byte 119,119,193,119,159,182,238,232
+.byte 119,119,193,119,159,182,238,232
+.byte 55,55,220,55,165,235,110,146
+.byte 55,55,220,55,165,235,110,146
+.byte 229,229,179,229,123,86,215,158
+.byte 229,229,179,229,123,86,215,158
+.byte 159,159,70,159,140,217,35,19
+.byte 159,159,70,159,140,217,35,19
+.byte 240,240,231,240,211,23,253,35
+.byte 240,240,231,240,211,23,253,35
+.byte 74,74,53,74,106,127,148,32
+.byte 74,74,53,74,106,127,148,32
+.byte 218,218,79,218,158,149,169,68
+.byte 218,218,79,218,158,149,169,68
+.byte 88,88,125,88,250,37,176,162
+.byte 88,88,125,88,250,37,176,162
+.byte 201,201,3,201,6,202,143,207
+.byte 201,201,3,201,6,202,143,207
+.byte 41,41,164,41,85,141,82,124
+.byte 41,41,164,41,85,141,82,124
+.byte 10,10,40,10,80,34,20,90
+.byte 10,10,40,10,80,34,20,90
+.byte 177,177,254,177,225,79,127,80
+.byte 177,177,254,177,225,79,127,80
+.byte 160,160,186,160,105,26,93,201
+.byte 160,160,186,160,105,26,93,201
+.byte 107,107,177,107,127,218,214,20
+.byte 107,107,177,107,127,218,214,20
+.byte 133,133,46,133,92,171,23,217
+.byte 133,133,46,133,92,171,23,217
+.byte 189,189,206,189,129,115,103,60
+.byte 189,189,206,189,129,115,103,60
+.byte 93,93,105,93,210,52,186,143
+.byte 93,93,105,93,210,52,186,143
+.byte 16,16,64,16,128,80,32,144
+.byte 16,16,64,16,128,80,32,144
+.byte 244,244,247,244,243,3,245,7
+.byte 244,244,247,244,243,3,245,7
+.byte 203,203,11,203,22,192,139,221
+.byte 203,203,11,203,22,192,139,221
+.byte 62,62,248,62,237,198,124,211
+.byte 62,62,248,62,237,198,124,211
+.byte 5,5,20,5,40,17,10,45
+.byte 5,5,20,5,40,17,10,45
+.byte 103,103,129,103,31,230,206,120
+.byte 103,103,129,103,31,230,206,120
+.byte 228,228,183,228,115,83,213,151
+.byte 228,228,183,228,115,83,213,151
+.byte 39,39,156,39,37,187,78,2
+.byte 39,39,156,39,37,187,78,2
+.byte 65,65,25,65,50,88,130,115
+.byte 65,65,25,65,50,88,130,115
+.byte 139,139,22,139,44,157,11,167
+.byte 139,139,22,139,44,157,11,167
+.byte 167,167,166,167,81,1,83,246
+.byte 167,167,166,167,81,1,83,246
+.byte 125,125,233,125,207,148,250,178
+.byte 125,125,233,125,207,148,250,178
+.byte 149,149,110,149,220,251,55,73
+.byte 149,149,110,149,220,251,55,73
+.byte 216,216,71,216,142,159,173,86
+.byte 216,216,71,216,142,159,173,86
+.byte 251,251,203,251,139,48,235,112
+.byte 251,251,203,251,139,48,235,112
+.byte 238,238,159,238,35,113,193,205
+.byte 238,238,159,238,35,113,193,205
+.byte 124,124,237,124,199,145,248,187
+.byte 124,124,237,124,199,145,248,187
+.byte 102,102,133,102,23,227,204,113
+.byte 102,102,133,102,23,227,204,113
+.byte 221,221,83,221,166,142,167,123
+.byte 221,221,83,221,166,142,167,123
+.byte 23,23,92,23,184,75,46,175
+.byte 23,23,92,23,184,75,46,175
+.byte 71,71,1,71,2,70,142,69
+.byte 71,71,1,71,2,70,142,69
+.byte 158,158,66,158,132,220,33,26
+.byte 158,158,66,158,132,220,33,26
+.byte 202,202,15,202,30,197,137,212
+.byte 202,202,15,202,30,197,137,212
+.byte 45,45,180,45,117,153,90,88
+.byte 45,45,180,45,117,153,90,88
+.byte 191,191,198,191,145,121,99,46
+.byte 191,191,198,191,145,121,99,46
+.byte 7,7,28,7,56,27,14,63
+.byte 7,7,28,7,56,27,14,63
+.byte 173,173,142,173,1,35,71,172
+.byte 173,173,142,173,1,35,71,172
+.byte 90,90,117,90,234,47,180,176
+.byte 90,90,117,90,234,47,180,176
+.byte 131,131,54,131,108,181,27,239
+.byte 131,131,54,131,108,181,27,239
+.byte 51,51,204,51,133,255,102,182
+.byte 51,51,204,51,133,255,102,182
+.byte 99,99,145,99,63,242,198,92
+.byte 99,99,145,99,63,242,198,92
+.byte 2,2,8,2,16,10,4,18
+.byte 2,2,8,2,16,10,4,18
+.byte 170,170,146,170,57,56,73,147
+.byte 170,170,146,170,57,56,73,147
+.byte 113,113,217,113,175,168,226,222
+.byte 113,113,217,113,175,168,226,222
+.byte 200,200,7,200,14,207,141,198
+.byte 200,200,7,200,14,207,141,198
+.byte 25,25,100,25,200,125,50,209
+.byte 25,25,100,25,200,125,50,209
+.byte 73,73,57,73,114,112,146,59
+.byte 73,73,57,73,114,112,146,59
+.byte 217,217,67,217,134,154,175,95
+.byte 217,217,67,217,134,154,175,95
+.byte 242,242,239,242,195,29,249,49
+.byte 242,242,239,242,195,29,249,49
+.byte 227,227,171,227,75,72,219,168
+.byte 227,227,171,227,75,72,219,168
+.byte 91,91,113,91,226,42,182,185
+.byte 91,91,113,91,226,42,182,185
+.byte 136,136,26,136,52,146,13,188
+.byte 136,136,26,136,52,146,13,188
+.byte 154,154,82,154,164,200,41,62
+.byte 154,154,82,154,164,200,41,62
+.byte 38,38,152,38,45,190,76,11
+.byte 38,38,152,38,45,190,76,11
+.byte 50,50,200,50,141,250,100,191
+.byte 50,50,200,50,141,250,100,191
+.byte 176,176,250,176,233,74,125,89
+.byte 176,176,250,176,233,74,125,89
+.byte 233,233,131,233,27,106,207,242
+.byte 233,233,131,233,27,106,207,242
+.byte 15,15,60,15,120,51,30,119
+.byte 15,15,60,15,120,51,30,119
+.byte 213,213,115,213,230,166,183,51
+.byte 213,213,115,213,230,166,183,51
+.byte 128,128,58,128,116,186,29,244
+.byte 128,128,58,128,116,186,29,244
+.byte 190,190,194,190,153,124,97,39
+.byte 190,190,194,190,153,124,97,39
+.byte 205,205,19,205,38,222,135,235
+.byte 205,205,19,205,38,222,135,235
+.byte 52,52,208,52,189,228,104,137
+.byte 52,52,208,52,189,228,104,137
+.byte 72,72,61,72,122,117,144,50
+.byte 72,72,61,72,122,117,144,50
+.byte 255,255,219,255,171,36,227,84
+.byte 255,255,219,255,171,36,227,84
+.byte 122,122,245,122,247,143,244,141
+.byte 122,122,245,122,247,143,244,141
+.byte 144,144,122,144,244,234,61,100
+.byte 144,144,122,144,244,234,61,100
+.byte 95,95,97,95,194,62,190,157
+.byte 95,95,97,95,194,62,190,157
+.byte 32,32,128,32,29,160,64,61
+.byte 32,32,128,32,29,160,64,61
+.byte 104,104,189,104,103,213,208,15
+.byte 104,104,189,104,103,213,208,15
+.byte 26,26,104,26,208,114,52,202
+.byte 26,26,104,26,208,114,52,202
+.byte 174,174,130,174,25,44,65,183
+.byte 174,174,130,174,25,44,65,183
+.byte 180,180,234,180,201,94,117,125
+.byte 180,180,234,180,201,94,117,125
+.byte 84,84,77,84,154,25,168,206
+.byte 84,84,77,84,154,25,168,206
+.byte 147,147,118,147,236,229,59,127
+.byte 147,147,118,147,236,229,59,127
+.byte 34,34,136,34,13,170,68,47
+.byte 34,34,136,34,13,170,68,47
+.byte 100,100,141,100,7,233,200,99
+.byte 100,100,141,100,7,233,200,99
+.byte 241,241,227,241,219,18,255,42
+.byte 241,241,227,241,219,18,255,42
+.byte 115,115,209,115,191,162,230,204
+.byte 115,115,209,115,191,162,230,204
+.byte 18,18,72,18,144,90,36,130
+.byte 18,18,72,18,144,90,36,130
+.byte 64,64,29,64,58,93,128,122
+.byte 64,64,29,64,58,93,128,122
+.byte 8,8,32,8,64,40,16,72
+.byte 8,8,32,8,64,40,16,72
+.byte 195,195,43,195,86,232,155,149
+.byte 195,195,43,195,86,232,155,149
+.byte 236,236,151,236,51,123,197,223
+.byte 236,236,151,236,51,123,197,223
+.byte 219,219,75,219,150,144,171,77
+.byte 219,219,75,219,150,144,171,77
+.byte 161,161,190,161,97,31,95,192
+.byte 161,161,190,161,97,31,95,192
+.byte 141,141,14,141,28,131,7,145
+.byte 141,141,14,141,28,131,7,145
+.byte 61,61,244,61,245,201,122,200
+.byte 61,61,244,61,245,201,122,200
+.byte 151,151,102,151,204,241,51,91
+.byte 151,151,102,151,204,241,51,91
+.byte 0,0,0,0,0,0,0,0
+.byte 0,0,0,0,0,0,0,0
+.byte 207,207,27,207,54,212,131,249
+.byte 207,207,27,207,54,212,131,249
+.byte 43,43,172,43,69,135,86,110
+.byte 43,43,172,43,69,135,86,110
+.byte 118,118,197,118,151,179,236,225
+.byte 118,118,197,118,151,179,236,225
+.byte 130,130,50,130,100,176,25,230
+.byte 130,130,50,130,100,176,25,230
+.byte 214,214,127,214,254,169,177,40
+.byte 214,214,127,214,254,169,177,40
+.byte 27,27,108,27,216,119,54,195
+.byte 27,27,108,27,216,119,54,195
+.byte 181,181,238,181,193,91,119,116
+.byte 181,181,238,181,193,91,119,116
+.byte 175,175,134,175,17,41,67,190
+.byte 175,175,134,175,17,41,67,190
+.byte 106,106,181,106,119,223,212,29
+.byte 106,106,181,106,119,223,212,29
+.byte 80,80,93,80,186,13,160,234
+.byte 80,80,93,80,186,13,160,234
+.byte 69,69,9,69,18,76,138,87
+.byte 69,69,9,69,18,76,138,87
+.byte 243,243,235,243,203,24,251,56
+.byte 243,243,235,243,203,24,251,56
+.byte 48,48,192,48,157,240,96,173
+.byte 48,48,192,48,157,240,96,173
+.byte 239,239,155,239,43,116,195,196
+.byte 239,239,155,239,43,116,195,196
+.byte 63,63,252,63,229,195,126,218
+.byte 63,63,252,63,229,195,126,218
+.byte 85,85,73,85,146,28,170,199
+.byte 85,85,73,85,146,28,170,199
+.byte 162,162,178,162,121,16,89,219
+.byte 162,162,178,162,121,16,89,219
+.byte 234,234,143,234,3,101,201,233
+.byte 234,234,143,234,3,101,201,233
+.byte 101,101,137,101,15,236,202,106
+.byte 101,101,137,101,15,236,202,106
+.byte 186,186,210,186,185,104,105,3
+.byte 186,186,210,186,185,104,105,3
+.byte 47,47,188,47,101,147,94,74
+.byte 47,47,188,47,101,147,94,74
+.byte 192,192,39,192,78,231,157,142
+.byte 192,192,39,192,78,231,157,142
+.byte 222,222,95,222,190,129,161,96
+.byte 222,222,95,222,190,129,161,96
+.byte 28,28,112,28,224,108,56,252
+.byte 28,28,112,28,224,108,56,252
+.byte 253,253,211,253,187,46,231,70
+.byte 253,253,211,253,187,46,231,70
+.byte 77,77,41,77,82,100,154,31
+.byte 77,77,41,77,82,100,154,31
+.byte 146,146,114,146,228,224,57,118
+.byte 146,146,114,146,228,224,57,118
+.byte 117,117,201,117,143,188,234,250
+.byte 117,117,201,117,143,188,234,250
+.byte 6,6,24,6,48,30,12,54
+.byte 6,6,24,6,48,30,12,54
+.byte 138,138,18,138,36,152,9,174
+.byte 138,138,18,138,36,152,9,174
+.byte 178,178,242,178,249,64,121,75
+.byte 178,178,242,178,249,64,121,75
+.byte 230,230,191,230,99,89,209,133
+.byte 230,230,191,230,99,89,209,133
+.byte 14,14,56,14,112,54,28,126
+.byte 14,14,56,14,112,54,28,126
+.byte 31,31,124,31,248,99,62,231
+.byte 31,31,124,31,248,99,62,231
+.byte 98,98,149,98,55,247,196,85
+.byte 98,98,149,98,55,247,196,85
+.byte 212,212,119,212,238,163,181,58
+.byte 212,212,119,212,238,163,181,58
+.byte 168,168,154,168,41,50,77,129
+.byte 168,168,154,168,41,50,77,129
+.byte 150,150,98,150,196,244,49,82
+.byte 150,150,98,150,196,244,49,82
+.byte 249,249,195,249,155,58,239,98
+.byte 249,249,195,249,155,58,239,98
+.byte 197,197,51,197,102,246,151,163
+.byte 197,197,51,197,102,246,151,163
+.byte 37,37,148,37,53,177,74,16
+.byte 37,37,148,37,53,177,74,16
+.byte 89,89,121,89,242,32,178,171
+.byte 89,89,121,89,242,32,178,171
+.byte 132,132,42,132,84,174,21,208
+.byte 132,132,42,132,84,174,21,208
+.byte 114,114,213,114,183,167,228,197
+.byte 114,114,213,114,183,167,228,197
+.byte 57,57,228,57,213,221,114,236
+.byte 57,57,228,57,213,221,114,236
+.byte 76,76,45,76,90,97,152,22
+.byte 76,76,45,76,90,97,152,22
+.byte 94,94,101,94,202,59,188,148
+.byte 94,94,101,94,202,59,188,148
+.byte 120,120,253,120,231,133,240,159
+.byte 120,120,253,120,231,133,240,159
+.byte 56,56,224,56,221,216,112,229
+.byte 56,56,224,56,221,216,112,229
+.byte 140,140,10,140,20,134,5,152
+.byte 140,140,10,140,20,134,5,152
+.byte 209,209,99,209,198,178,191,23
+.byte 209,209,99,209,198,178,191,23
+.byte 165,165,174,165,65,11,87,228
+.byte 165,165,174,165,65,11,87,228
+.byte 226,226,175,226,67,77,217,161
+.byte 226,226,175,226,67,77,217,161
+.byte 97,97,153,97,47,248,194,78
+.byte 97,97,153,97,47,248,194,78
+.byte 179,179,246,179,241,69,123,66
+.byte 179,179,246,179,241,69,123,66
+.byte 33,33,132,33,21,165,66,52
+.byte 33,33,132,33,21,165,66,52
+.byte 156,156,74,156,148,214,37,8
+.byte 156,156,74,156,148,214,37,8
+.byte 30,30,120,30,240,102,60,238
+.byte 30,30,120,30,240,102,60,238
+.byte 67,67,17,67,34,82,134,97
+.byte 67,67,17,67,34,82,134,97
+.byte 199,199,59,199,118,252,147,177
+.byte 199,199,59,199,118,252,147,177
+.byte 252,252,215,252,179,43,229,79
+.byte 252,252,215,252,179,43,229,79
+.byte 4,4,16,4,32,20,8,36
+.byte 4,4,16,4,32,20,8,36
+.byte 81,81,89,81,178,8,162,227
+.byte 81,81,89,81,178,8,162,227
+.byte 153,153,94,153,188,199,47,37
+.byte 153,153,94,153,188,199,47,37
+.byte 109,109,169,109,79,196,218,34
+.byte 109,109,169,109,79,196,218,34
+.byte 13,13,52,13,104,57,26,101
+.byte 13,13,52,13,104,57,26,101
+.byte 250,250,207,250,131,53,233,121
+.byte 250,250,207,250,131,53,233,121
+.byte 223,223,91,223,182,132,163,105
+.byte 223,223,91,223,182,132,163,105
+.byte 126,126,229,126,215,155,252,169
+.byte 126,126,229,126,215,155,252,169
+.byte 36,36,144,36,61,180,72,25
+.byte 36,36,144,36,61,180,72,25
+.byte 59,59,236,59,197,215,118,254
+.byte 59,59,236,59,197,215,118,254
+.byte 171,171,150,171,49,61,75,154
+.byte 171,171,150,171,49,61,75,154
+.byte 206,206,31,206,62,209,129,240
+.byte 206,206,31,206,62,209,129,240
+.byte 17,17,68,17,136,85,34,153
+.byte 17,17,68,17,136,85,34,153
+.byte 143,143,6,143,12,137,3,131
+.byte 143,143,6,143,12,137,3,131
+.byte 78,78,37,78,74,107,156,4
+.byte 78,78,37,78,74,107,156,4
+.byte 183,183,230,183,209,81,115,102
+.byte 183,183,230,183,209,81,115,102
+.byte 235,235,139,235,11,96,203,224
+.byte 235,235,139,235,11,96,203,224
+.byte 60,60,240,60,253,204,120,193
+.byte 60,60,240,60,253,204,120,193
+.byte 129,129,62,129,124,191,31,253
+.byte 129,129,62,129,124,191,31,253
+.byte 148,148,106,148,212,254,53,64
+.byte 148,148,106,148,212,254,53,64
+.byte 247,247,251,247,235,12,243,28
+.byte 247,247,251,247,235,12,243,28
+.byte 185,185,222,185,161,103,111,24
+.byte 185,185,222,185,161,103,111,24
+.byte 19,19,76,19,152,95,38,139
+.byte 19,19,76,19,152,95,38,139
+.byte 44,44,176,44,125,156,88,81
+.byte 44,44,176,44,125,156,88,81
+.byte 211,211,107,211,214,184,187,5
+.byte 211,211,107,211,214,184,187,5
+.byte 231,231,187,231,107,92,211,140
+.byte 231,231,187,231,107,92,211,140
+.byte 110,110,165,110,87,203,220,57
+.byte 110,110,165,110,87,203,220,57
+.byte 196,196,55,196,110,243,149,170
+.byte 196,196,55,196,110,243,149,170
+.byte 3,3,12,3,24,15,6,27
+.byte 3,3,12,3,24,15,6,27
+.byte 86,86,69,86,138,19,172,220
+.byte 86,86,69,86,138,19,172,220
+.byte 68,68,13,68,26,73,136,94
+.byte 68,68,13,68,26,73,136,94
+.byte 127,127,225,127,223,158,254,160
+.byte 127,127,225,127,223,158,254,160
+.byte 169,169,158,169,33,55,79,136
+.byte 169,169,158,169,33,55,79,136
+.byte 42,42,168,42,77,130,84,103
+.byte 42,42,168,42,77,130,84,103
+.byte 187,187,214,187,177,109,107,10
+.byte 187,187,214,187,177,109,107,10
+.byte 193,193,35,193,70,226,159,135
+.byte 193,193,35,193,70,226,159,135
+.byte 83,83,81,83,162,2,166,241
+.byte 83,83,81,83,162,2,166,241
+.byte 220,220,87,220,174,139,165,114
+.byte 220,220,87,220,174,139,165,114
+.byte 11,11,44,11,88,39,22,83
+.byte 11,11,44,11,88,39,22,83
+.byte 157,157,78,157,156,211,39,1
+.byte 157,157,78,157,156,211,39,1
+.byte 108,108,173,108,71,193,216,43
+.byte 108,108,173,108,71,193,216,43
+.byte 49,49,196,49,149,245,98,164
+.byte 49,49,196,49,149,245,98,164
+.byte 116,116,205,116,135,185,232,243
+.byte 116,116,205,116,135,185,232,243
+.byte 246,246,255,246,227,9,241,21
+.byte 246,246,255,246,227,9,241,21
+.byte 70,70,5,70,10,67,140,76
+.byte 70,70,5,70,10,67,140,76
+.byte 172,172,138,172,9,38,69,165
+.byte 172,172,138,172,9,38,69,165
+.byte 137,137,30,137,60,151,15,181
+.byte 137,137,30,137,60,151,15,181
+.byte 20,20,80,20,160,68,40,180
+.byte 20,20,80,20,160,68,40,180
+.byte 225,225,163,225,91,66,223,186
+.byte 225,225,163,225,91,66,223,186
+.byte 22,22,88,22,176,78,44,166
+.byte 22,22,88,22,176,78,44,166
+.byte 58,58,232,58,205,210,116,247
+.byte 58,58,232,58,205,210,116,247
+.byte 105,105,185,105,111,208,210,6
+.byte 105,105,185,105,111,208,210,6
+.byte 9,9,36,9,72,45,18,65
+.byte 9,9,36,9,72,45,18,65
+.byte 112,112,221,112,167,173,224,215
+.byte 112,112,221,112,167,173,224,215
+.byte 182,182,226,182,217,84,113,111
+.byte 182,182,226,182,217,84,113,111
+.byte 208,208,103,208,206,183,189,30
+.byte 208,208,103,208,206,183,189,30
+.byte 237,237,147,237,59,126,199,214
+.byte 237,237,147,237,59,126,199,214
+.byte 204,204,23,204,46,219,133,226
+.byte 204,204,23,204,46,219,133,226
+.byte 66,66,21,66,42,87,132,104
+.byte 66,66,21,66,42,87,132,104
+.byte 152,152,90,152,180,194,45,44
+.byte 152,152,90,152,180,194,45,44
+.byte 164,164,170,164,73,14,85,237
+.byte 164,164,170,164,73,14,85,237
+.byte 40,40,160,40,93,136,80,117
+.byte 40,40,160,40,93,136,80,117
+.byte 92,92,109,92,218,49,184,134
+.byte 92,92,109,92,218,49,184,134
+.byte 248,248,199,248,147,63,237,107
+.byte 248,248,199,248,147,63,237,107
+.byte 134,134,34,134,68,164,17,194
+.byte 134,134,34,134,68,164,17,194
+.byte 24,35,198,232,135,184,1,79
+.byte 54,166,210,245,121,111,145,82
+.byte 96,188,155,142,163,12,123,53
+.byte 29,224,215,194,46,75,254,87
+.byte 21,119,55,229,159,240,74,218
+.byte 88,201,41,10,177,160,107,133
+.byte 189,93,16,244,203,62,5,103
+.byte 228,39,65,139,167,125,149,216
+.byte 251,238,124,102,221,23,71,158
+.byte 202,45,191,7,173,90,131,51
+.size whirlpool_block_mmx,.-.L_whirlpool_block_mmx_begin
diff --git a/deps/openssl/asm/x86-elf-gas/x86cpuid.s b/deps/openssl/asm/x86-elf-gas/x86cpuid.s
new file mode 100644
index 0000000000..56a92bfcbe
--- /dev/null
+++ b/deps/openssl/asm/x86-elf-gas/x86cpuid.s
@@ -0,0 +1,279 @@
+.file "x86cpuid.s"
+.text
+.globl OPENSSL_ia32_cpuid
+.type OPENSSL_ia32_cpuid,@function
+.align 16
+OPENSSL_ia32_cpuid:
+.L_OPENSSL_ia32_cpuid_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ xorl %edx,%edx
+ pushfl
+ popl %eax
+ movl %eax,%ecx
+ xorl $2097152,%eax
+ pushl %eax
+ popfl
+ pushfl
+ popl %eax
+ xorl %eax,%ecx
+ btl $21,%ecx
+ jnc .L000done
+ xorl %eax,%eax
+ .byte 0x0f,0xa2
+ movl %eax,%edi
+ xorl %eax,%eax
+ cmpl $1970169159,%ebx
+ setne %al
+ movl %eax,%ebp
+ cmpl $1231384169,%edx
+ setne %al
+ orl %eax,%ebp
+ cmpl $1818588270,%ecx
+ setne %al
+ orl %eax,%ebp
+ jz .L001intel
+ cmpl $1752462657,%ebx
+ setne %al
+ movl %eax,%esi
+ cmpl $1769238117,%edx
+ setne %al
+ orl %eax,%esi
+ cmpl $1145913699,%ecx
+ setne %al
+ orl %eax,%esi
+ jnz .L001intel
+ movl $2147483648,%eax
+ .byte 0x0f,0xa2
+ cmpl $2147483656,%eax
+ jb .L001intel
+ movl $2147483656,%eax
+ .byte 0x0f,0xa2
+ movzbl %cl,%esi
+ incl %esi
+ movl $1,%eax
+ .byte 0x0f,0xa2
+ btl $28,%edx
+ jnc .L000done
+ shrl $16,%ebx
+ andl $255,%ebx
+ cmpl %esi,%ebx
+ ja .L000done
+ andl $4026531839,%edx
+ jmp .L000done
+.L001intel:
+ cmpl $4,%edi
+ movl $-1,%edi
+ jb .L002nocacheinfo
+ movl $4,%eax
+ movl $0,%ecx
+ .byte 0x0f,0xa2
+ movl %eax,%edi
+ shrl $14,%edi
+ andl $4095,%edi
+.L002nocacheinfo:
+ movl $1,%eax
+ .byte 0x0f,0xa2
+ cmpl $0,%ebp
+ jne .L003notP4
+ andb $15,%ah
+ cmpb $15,%ah
+ jne .L003notP4
+ orl $1048576,%edx
+.L003notP4:
+ btl $28,%edx
+ jnc .L000done
+ andl $4026531839,%edx
+ cmpl $0,%edi
+ je .L000done
+ orl $268435456,%edx
+ shrl $16,%ebx
+ cmpb $1,%bl
+ ja .L000done
+ andl $4026531839,%edx
+.L000done:
+ movl %edx,%eax
+ movl %ecx,%edx
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.size OPENSSL_ia32_cpuid,.-.L_OPENSSL_ia32_cpuid_begin
+.globl OPENSSL_rdtsc
+.type OPENSSL_rdtsc,@function
+.align 16
+OPENSSL_rdtsc:
+.L_OPENSSL_rdtsc_begin:
+ xorl %eax,%eax
+ xorl %edx,%edx
+ leal OPENSSL_ia32cap_P,%ecx
+ btl $4,(%ecx)
+ jnc .L004notsc
+ .byte 0x0f,0x31
+.L004notsc:
+ ret
+.size OPENSSL_rdtsc,.-.L_OPENSSL_rdtsc_begin
+.globl OPENSSL_instrument_halt
+.type OPENSSL_instrument_halt,@function
+.align 16
+OPENSSL_instrument_halt:
+.L_OPENSSL_instrument_halt_begin:
+ leal OPENSSL_ia32cap_P,%ecx
+ btl $4,(%ecx)
+ jnc .L005nohalt
+.long 2421723150
+ andl $3,%eax
+ jnz .L005nohalt
+ pushfl
+ popl %eax
+ btl $9,%eax
+ jnc .L005nohalt
+ .byte 0x0f,0x31
+ pushl %edx
+ pushl %eax
+ hlt
+ .byte 0x0f,0x31
+ subl (%esp),%eax
+ sbbl 4(%esp),%edx
+ addl $8,%esp
+ ret
+.L005nohalt:
+ xorl %eax,%eax
+ xorl %edx,%edx
+ ret
+.size OPENSSL_instrument_halt,.-.L_OPENSSL_instrument_halt_begin
+.globl OPENSSL_far_spin
+.type OPENSSL_far_spin,@function
+.align 16
+OPENSSL_far_spin:
+.L_OPENSSL_far_spin_begin:
+ pushfl
+ popl %eax
+ btl $9,%eax
+ jnc .L006nospin
+ movl 4(%esp),%eax
+ movl 8(%esp),%ecx
+.long 2430111262
+ xorl %eax,%eax
+ movl (%ecx),%edx
+ jmp .L007spin
+.align 16
+.L007spin:
+ incl %eax
+ cmpl (%ecx),%edx
+ je .L007spin
+.long 529567888
+ ret
+.L006nospin:
+ xorl %eax,%eax
+ xorl %edx,%edx
+ ret
+.size OPENSSL_far_spin,.-.L_OPENSSL_far_spin_begin
+.globl OPENSSL_wipe_cpu
+.type OPENSSL_wipe_cpu,@function
+.align 16
+OPENSSL_wipe_cpu:
+.L_OPENSSL_wipe_cpu_begin:
+ xorl %eax,%eax
+ xorl %edx,%edx
+ leal OPENSSL_ia32cap_P,%ecx
+ movl (%ecx),%ecx
+ btl $1,(%ecx)
+ jnc .L008no_x87
+.long 4007259865,4007259865,4007259865,4007259865,2430851995
+.L008no_x87:
+ leal 4(%esp),%eax
+ ret
+.size OPENSSL_wipe_cpu,.-.L_OPENSSL_wipe_cpu_begin
+.globl OPENSSL_atomic_add
+.type OPENSSL_atomic_add,@function
+.align 16
+OPENSSL_atomic_add:
+.L_OPENSSL_atomic_add_begin:
+ movl 4(%esp),%edx
+ movl 8(%esp),%ecx
+ pushl %ebx
+ nop
+ movl (%edx),%eax
+.L009spin:
+ leal (%eax,%ecx,1),%ebx
+ nop
+.long 447811568
+ jne .L009spin
+ movl %ebx,%eax
+ popl %ebx
+ ret
+.size OPENSSL_atomic_add,.-.L_OPENSSL_atomic_add_begin
+.globl OPENSSL_indirect_call
+.type OPENSSL_indirect_call,@function
+.align 16
+OPENSSL_indirect_call:
+.L_OPENSSL_indirect_call_begin:
+ pushl %ebp
+ movl %esp,%ebp
+ subl $28,%esp
+ movl 12(%ebp),%ecx
+ movl %ecx,(%esp)
+ movl 16(%ebp),%edx
+ movl %edx,4(%esp)
+ movl 20(%ebp),%eax
+ movl %eax,8(%esp)
+ movl 24(%ebp),%eax
+ movl %eax,12(%esp)
+ movl 28(%ebp),%eax
+ movl %eax,16(%esp)
+ movl 32(%ebp),%eax
+ movl %eax,20(%esp)
+ movl 36(%ebp),%eax
+ movl %eax,24(%esp)
+ call *8(%ebp)
+ movl %ebp,%esp
+ popl %ebp
+ ret
+.size OPENSSL_indirect_call,.-.L_OPENSSL_indirect_call_begin
+.globl OPENSSL_cleanse
+.type OPENSSL_cleanse,@function
+.align 16
+OPENSSL_cleanse:
+.L_OPENSSL_cleanse_begin:
+ movl 4(%esp),%edx
+ movl 8(%esp),%ecx
+ xorl %eax,%eax
+ cmpl $7,%ecx
+ jae .L010lot
+ cmpl $0,%ecx
+ je .L011ret
+.L012little:
+ movb %al,(%edx)
+ subl $1,%ecx
+ leal 1(%edx),%edx
+ jnz .L012little
+.L011ret:
+ ret
+.align 16
+.L010lot:
+ testl $3,%edx
+ jz .L013aligned
+ movb %al,(%edx)
+ leal -1(%ecx),%ecx
+ leal 1(%edx),%edx
+ jmp .L010lot
+.L013aligned:
+ movl %eax,(%edx)
+ leal -4(%ecx),%ecx
+ testl $-4,%ecx
+ leal 4(%edx),%edx
+ jnz .L013aligned
+ cmpl $0,%ecx
+ jne .L012little
+ ret
+.size OPENSSL_cleanse,.-.L_OPENSSL_cleanse_begin
+.comm OPENSSL_ia32cap_P,4,4
+.section .init
+ call OPENSSL_cpuid_setup
+ jmp .Linitalign
+.align 16
+.Linitalign:
diff --git a/deps/openssl/asm/x86-macosx-gas/aes/aes-586.s b/deps/openssl/asm/x86-macosx-gas/aes/aes-586.s
new file mode 100644
index 0000000000..ff56a4bef7
--- /dev/null
+++ b/deps/openssl/asm/x86-macosx-gas/aes/aes-586.s
@@ -0,0 +1,3194 @@
+.file "aes-586.s"
+.text
+.align 4
+__x86_AES_encrypt_compact:
+ movl %edi,20(%esp)
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ movl 240(%edi),%esi
+ leal -2(%esi,%esi,1),%esi
+ leal (%edi,%esi,8),%esi
+ movl %esi,24(%esp)
+ movl -128(%ebp),%edi
+ movl -96(%ebp),%esi
+ movl -64(%ebp),%edi
+ movl -32(%ebp),%esi
+ movl (%ebp),%edi
+ movl 32(%ebp),%esi
+ movl 64(%ebp),%edi
+ movl 96(%ebp),%esi
+.align 4,0x90
+L000loop:
+ movl %eax,%esi
+ andl $255,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %bh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,4(%esp)
+ movl %ebx,%esi
+ andl $255,%esi
+ shrl $16,%ebx
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %ch,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,8(%esp)
+ movl %ecx,%esi
+ andl $255,%esi
+ shrl $24,%ecx
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %dh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $16,%edi
+ andl $255,%edx
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movzbl %bh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ andl $255,%edx
+ movzbl -128(%ebp,%edx,1),%edx
+ movzbl %ah,%eax
+ movzbl -128(%ebp,%eax,1),%eax
+ shll $8,%eax
+ xorl %eax,%edx
+ movl 4(%esp),%eax
+ andl $255,%ebx
+ movzbl -128(%ebp,%ebx,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%edx
+ movl 8(%esp),%ebx
+ movzbl -128(%ebp,%ecx,1),%ecx
+ shll $24,%ecx
+ xorl %ecx,%edx
+ movl %esi,%ecx
+ movl %ecx,%esi
+ andl $2155905152,%esi
+ movl %esi,%ebp
+ shrl $7,%ebp
+ leal (%ecx,%ecx,1),%edi
+ subl %ebp,%esi
+ andl $4278124286,%edi
+ andl $454761243,%esi
+ movl %ecx,%ebp
+ xorl %edi,%esi
+ xorl %esi,%ecx
+ roll $24,%ecx
+ xorl %esi,%ecx
+ rorl $16,%ebp
+ xorl %ebp,%ecx
+ rorl $8,%ebp
+ xorl %ebp,%ecx
+ movl %edx,%esi
+ andl $2155905152,%esi
+ movl %esi,%ebp
+ shrl $7,%ebp
+ leal (%edx,%edx,1),%edi
+ subl %ebp,%esi
+ andl $4278124286,%edi
+ andl $454761243,%esi
+ movl %edx,%ebp
+ xorl %edi,%esi
+ xorl %esi,%edx
+ roll $24,%edx
+ xorl %esi,%edx
+ rorl $16,%ebp
+ xorl %ebp,%edx
+ rorl $8,%ebp
+ xorl %ebp,%edx
+ movl %eax,%esi
+ andl $2155905152,%esi
+ movl %esi,%ebp
+ shrl $7,%ebp
+ leal (%eax,%eax,1),%edi
+ subl %ebp,%esi
+ andl $4278124286,%edi
+ andl $454761243,%esi
+ movl %eax,%ebp
+ xorl %edi,%esi
+ xorl %esi,%eax
+ roll $24,%eax
+ xorl %esi,%eax
+ rorl $16,%ebp
+ xorl %ebp,%eax
+ rorl $8,%ebp
+ xorl %ebp,%eax
+ movl %ebx,%esi
+ andl $2155905152,%esi
+ movl %esi,%ebp
+ shrl $7,%ebp
+ leal (%ebx,%ebx,1),%edi
+ subl %ebp,%esi
+ andl $4278124286,%edi
+ andl $454761243,%esi
+ movl %ebx,%ebp
+ xorl %edi,%esi
+ xorl %esi,%ebx
+ roll $24,%ebx
+ xorl %esi,%ebx
+ rorl $16,%ebp
+ xorl %ebp,%ebx
+ rorl $8,%ebp
+ xorl %ebp,%ebx
+ movl 20(%esp),%edi
+ movl 28(%esp),%ebp
+ addl $16,%edi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ cmpl 24(%esp),%edi
+ movl %edi,20(%esp)
+ jb L000loop
+ movl %eax,%esi
+ andl $255,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %bh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,4(%esp)
+ movl %ebx,%esi
+ andl $255,%esi
+ shrl $16,%ebx
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %ch,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,8(%esp)
+ movl %ecx,%esi
+ andl $255,%esi
+ shrl $24,%ecx
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %dh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $16,%edi
+ andl $255,%edx
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movzbl %bh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl 20(%esp),%edi
+ andl $255,%edx
+ movzbl -128(%ebp,%edx,1),%edx
+ movzbl %ah,%eax
+ movzbl -128(%ebp,%eax,1),%eax
+ shll $8,%eax
+ xorl %eax,%edx
+ movl 4(%esp),%eax
+ andl $255,%ebx
+ movzbl -128(%ebp,%ebx,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%edx
+ movl 8(%esp),%ebx
+ movzbl -128(%ebp,%ecx,1),%ecx
+ shll $24,%ecx
+ xorl %ecx,%edx
+ movl %esi,%ecx
+ xorl 16(%edi),%eax
+ xorl 20(%edi),%ebx
+ xorl 24(%edi),%ecx
+ xorl 28(%edi),%edx
+ ret
+.align 4
+__sse_AES_encrypt_compact:
+ pxor (%edi),%mm0
+ pxor 8(%edi),%mm4
+ movl 240(%edi),%esi
+ leal -2(%esi,%esi,1),%esi
+ leal (%edi,%esi,8),%esi
+ movl %esi,24(%esp)
+ movl $454761243,%eax
+ movl %eax,8(%esp)
+ movl %eax,12(%esp)
+ movl -128(%ebp),%eax
+ movl -96(%ebp),%ebx
+ movl -64(%ebp),%ecx
+ movl -32(%ebp),%edx
+ movl (%ebp),%eax
+ movl 32(%ebp),%ebx
+ movl 64(%ebp),%ecx
+ movl 96(%ebp),%edx
+.align 4,0x90
+L001loop:
+ pshufw $8,%mm0,%mm1
+ pshufw $13,%mm4,%mm5
+ movd %mm1,%eax
+ movd %mm5,%ebx
+ movzbl %al,%esi
+ movzbl -128(%ebp,%esi,1),%ecx
+ pshufw $13,%mm0,%mm2
+ movzbl %ah,%edx
+ movzbl -128(%ebp,%edx,1),%edx
+ shll $8,%edx
+ shrl $16,%eax
+ movzbl %bl,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $16,%esi
+ orl %esi,%ecx
+ pshufw $8,%mm4,%mm6
+ movzbl %bh,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $24,%esi
+ orl %esi,%edx
+ shrl $16,%ebx
+ movzbl %ah,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $8,%esi
+ orl %esi,%ecx
+ movzbl %bh,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $24,%esi
+ orl %esi,%ecx
+ movd %ecx,%mm0
+ movzbl %al,%esi
+ movzbl -128(%ebp,%esi,1),%ecx
+ movd %mm2,%eax
+ movzbl %bl,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $16,%esi
+ orl %esi,%ecx
+ movd %mm6,%ebx
+ movzbl %ah,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $24,%esi
+ orl %esi,%ecx
+ movzbl %bh,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $8,%esi
+ orl %esi,%ecx
+ movd %ecx,%mm1
+ movzbl %bl,%esi
+ movzbl -128(%ebp,%esi,1),%ecx
+ shrl $16,%ebx
+ movzbl %al,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $16,%esi
+ orl %esi,%ecx
+ shrl $16,%eax
+ punpckldq %mm1,%mm0
+ movzbl %ah,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $24,%esi
+ orl %esi,%ecx
+ andl $255,%eax
+ movzbl -128(%ebp,%eax,1),%eax
+ shll $16,%eax
+ orl %eax,%edx
+ movzbl %bh,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $8,%esi
+ orl %esi,%ecx
+ movd %ecx,%mm4
+ andl $255,%ebx
+ movzbl -128(%ebp,%ebx,1),%ebx
+ orl %ebx,%edx
+ movd %edx,%mm5
+ punpckldq %mm5,%mm4
+ addl $16,%edi
+ cmpl 24(%esp),%edi
+ ja L002out
+ movq 8(%esp),%mm2
+ pxor %mm3,%mm3
+ pxor %mm7,%mm7
+ movq %mm0,%mm1
+ movq %mm4,%mm5
+ pcmpgtb %mm0,%mm3
+ pcmpgtb %mm4,%mm7
+ pand %mm2,%mm3
+ pand %mm2,%mm7
+ pshufw $177,%mm0,%mm2
+ pshufw $177,%mm4,%mm6
+ paddb %mm0,%mm0
+ paddb %mm4,%mm4
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ pshufw $177,%mm2,%mm3
+ pshufw $177,%mm6,%mm7
+ pxor %mm0,%mm1
+ pxor %mm4,%mm5
+ pxor %mm2,%mm0
+ pxor %mm6,%mm4
+ movq %mm3,%mm2
+ movq %mm7,%mm6
+ pslld $8,%mm3
+ pslld $8,%mm7
+ psrld $24,%mm2
+ psrld $24,%mm6
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ pxor %mm2,%mm0
+ pxor %mm6,%mm4
+ movq %mm1,%mm3
+ movq %mm5,%mm7
+ movq (%edi),%mm2
+ movq 8(%edi),%mm6
+ psrld $8,%mm1
+ psrld $8,%mm5
+ movl -128(%ebp),%eax
+ pslld $24,%mm3
+ pslld $24,%mm7
+ movl -64(%ebp),%ebx
+ pxor %mm1,%mm0
+ pxor %mm5,%mm4
+ movl (%ebp),%ecx
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ movl 64(%ebp),%edx
+ pxor %mm2,%mm0
+ pxor %mm6,%mm4
+ jmp L001loop
+.align 4,0x90
+L002out:
+ pxor (%edi),%mm0
+ pxor 8(%edi),%mm4
+ ret
+.align 4
+__x86_AES_encrypt:
+ movl %edi,20(%esp)
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ movl 240(%edi),%esi
+ leal -2(%esi,%esi,1),%esi
+ leal (%edi,%esi,8),%esi
+ movl %esi,24(%esp)
+.align 4,0x90
+L003loop:
+ movl %eax,%esi
+ andl $255,%esi
+ movl (%ebp,%esi,8),%esi
+ movzbl %bh,%edi
+ xorl 3(%ebp,%edi,8),%esi
+ movl %ecx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ xorl 2(%ebp,%edi,8),%esi
+ movl %edx,%edi
+ shrl $24,%edi
+ xorl 1(%ebp,%edi,8),%esi
+ movl %esi,4(%esp)
+ movl %ebx,%esi
+ andl $255,%esi
+ shrl $16,%ebx
+ movl (%ebp,%esi,8),%esi
+ movzbl %ch,%edi
+ xorl 3(%ebp,%edi,8),%esi
+ movl %edx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ xorl 2(%ebp,%edi,8),%esi
+ movl %eax,%edi
+ shrl $24,%edi
+ xorl 1(%ebp,%edi,8),%esi
+ movl %esi,8(%esp)
+ movl %ecx,%esi
+ andl $255,%esi
+ shrl $24,%ecx
+ movl (%ebp,%esi,8),%esi
+ movzbl %dh,%edi
+ xorl 3(%ebp,%edi,8),%esi
+ movl %eax,%edi
+ shrl $16,%edi
+ andl $255,%edx
+ andl $255,%edi
+ xorl 2(%ebp,%edi,8),%esi
+ movzbl %bh,%edi
+ xorl 1(%ebp,%edi,8),%esi
+ movl 20(%esp),%edi
+ movl (%ebp,%edx,8),%edx
+ movzbl %ah,%eax
+ xorl 3(%ebp,%eax,8),%edx
+ movl 4(%esp),%eax
+ andl $255,%ebx
+ xorl 2(%ebp,%ebx,8),%edx
+ movl 8(%esp),%ebx
+ xorl 1(%ebp,%ecx,8),%edx
+ movl %esi,%ecx
+ addl $16,%edi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ cmpl 24(%esp),%edi
+ movl %edi,20(%esp)
+ jb L003loop
+ movl %eax,%esi
+ andl $255,%esi
+ movl 2(%ebp,%esi,8),%esi
+ andl $255,%esi
+ movzbl %bh,%edi
+ movl (%ebp,%edi,8),%edi
+ andl $65280,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movl (%ebp,%edi,8),%edi
+ andl $16711680,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $24,%edi
+ movl 2(%ebp,%edi,8),%edi
+ andl $4278190080,%edi
+ xorl %edi,%esi
+ movl %esi,4(%esp)
+ movl %ebx,%esi
+ andl $255,%esi
+ shrl $16,%ebx
+ movl 2(%ebp,%esi,8),%esi
+ andl $255,%esi
+ movzbl %ch,%edi
+ movl (%ebp,%edi,8),%edi
+ andl $65280,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movl (%ebp,%edi,8),%edi
+ andl $16711680,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $24,%edi
+ movl 2(%ebp,%edi,8),%edi
+ andl $4278190080,%edi
+ xorl %edi,%esi
+ movl %esi,8(%esp)
+ movl %ecx,%esi
+ andl $255,%esi
+ shrl $24,%ecx
+ movl 2(%ebp,%esi,8),%esi
+ andl $255,%esi
+ movzbl %dh,%edi
+ movl (%ebp,%edi,8),%edi
+ andl $65280,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $16,%edi
+ andl $255,%edx
+ andl $255,%edi
+ movl (%ebp,%edi,8),%edi
+ andl $16711680,%edi
+ xorl %edi,%esi
+ movzbl %bh,%edi
+ movl 2(%ebp,%edi,8),%edi
+ andl $4278190080,%edi
+ xorl %edi,%esi
+ movl 20(%esp),%edi
+ andl $255,%edx
+ movl 2(%ebp,%edx,8),%edx
+ andl $255,%edx
+ movzbl %ah,%eax
+ movl (%ebp,%eax,8),%eax
+ andl $65280,%eax
+ xorl %eax,%edx
+ movl 4(%esp),%eax
+ andl $255,%ebx
+ movl (%ebp,%ebx,8),%ebx
+ andl $16711680,%ebx
+ xorl %ebx,%edx
+ movl 8(%esp),%ebx
+ movl 2(%ebp,%ecx,8),%ecx
+ andl $4278190080,%ecx
+ xorl %ecx,%edx
+ movl %esi,%ecx
+ addl $16,%edi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ ret
+.align 6,0x90
+LAES_Te:
+.long 2774754246,2774754246
+.long 2222750968,2222750968
+.long 2574743534,2574743534
+.long 2373680118,2373680118
+.long 234025727,234025727
+.long 3177933782,3177933782
+.long 2976870366,2976870366
+.long 1422247313,1422247313
+.long 1345335392,1345335392
+.long 50397442,50397442
+.long 2842126286,2842126286
+.long 2099981142,2099981142
+.long 436141799,436141799
+.long 1658312629,1658312629
+.long 3870010189,3870010189
+.long 2591454956,2591454956
+.long 1170918031,1170918031
+.long 2642575903,2642575903
+.long 1086966153,1086966153
+.long 2273148410,2273148410
+.long 368769775,368769775
+.long 3948501426,3948501426
+.long 3376891790,3376891790
+.long 200339707,200339707
+.long 3970805057,3970805057
+.long 1742001331,1742001331
+.long 4255294047,4255294047
+.long 3937382213,3937382213
+.long 3214711843,3214711843
+.long 4154762323,4154762323
+.long 2524082916,2524082916
+.long 1539358875,1539358875
+.long 3266819957,3266819957
+.long 486407649,486407649
+.long 2928907069,2928907069
+.long 1780885068,1780885068
+.long 1513502316,1513502316
+.long 1094664062,1094664062
+.long 49805301,49805301
+.long 1338821763,1338821763
+.long 1546925160,1546925160
+.long 4104496465,4104496465
+.long 887481809,887481809
+.long 150073849,150073849
+.long 2473685474,2473685474
+.long 1943591083,1943591083
+.long 1395732834,1395732834
+.long 1058346282,1058346282
+.long 201589768,201589768
+.long 1388824469,1388824469
+.long 1696801606,1696801606
+.long 1589887901,1589887901
+.long 672667696,672667696
+.long 2711000631,2711000631
+.long 251987210,251987210
+.long 3046808111,3046808111
+.long 151455502,151455502
+.long 907153956,907153956
+.long 2608889883,2608889883
+.long 1038279391,1038279391
+.long 652995533,652995533
+.long 1764173646,1764173646
+.long 3451040383,3451040383
+.long 2675275242,2675275242
+.long 453576978,453576978
+.long 2659418909,2659418909
+.long 1949051992,1949051992
+.long 773462580,773462580
+.long 756751158,756751158
+.long 2993581788,2993581788
+.long 3998898868,3998898868
+.long 4221608027,4221608027
+.long 4132590244,4132590244
+.long 1295727478,1295727478
+.long 1641469623,1641469623
+.long 3467883389,3467883389
+.long 2066295122,2066295122
+.long 1055122397,1055122397
+.long 1898917726,1898917726
+.long 2542044179,2542044179
+.long 4115878822,4115878822
+.long 1758581177,1758581177
+.long 0,0
+.long 753790401,753790401
+.long 1612718144,1612718144
+.long 536673507,536673507
+.long 3367088505,3367088505
+.long 3982187446,3982187446
+.long 3194645204,3194645204
+.long 1187761037,1187761037
+.long 3653156455,3653156455
+.long 1262041458,1262041458
+.long 3729410708,3729410708
+.long 3561770136,3561770136
+.long 3898103984,3898103984
+.long 1255133061,1255133061
+.long 1808847035,1808847035
+.long 720367557,720367557
+.long 3853167183,3853167183
+.long 385612781,385612781
+.long 3309519750,3309519750
+.long 3612167578,3612167578
+.long 1429418854,1429418854
+.long 2491778321,2491778321
+.long 3477423498,3477423498
+.long 284817897,284817897
+.long 100794884,100794884
+.long 2172616702,2172616702
+.long 4031795360,4031795360
+.long 1144798328,1144798328
+.long 3131023141,3131023141
+.long 3819481163,3819481163
+.long 4082192802,4082192802
+.long 4272137053,4272137053
+.long 3225436288,3225436288
+.long 2324664069,2324664069
+.long 2912064063,2912064063
+.long 3164445985,3164445985
+.long 1211644016,1211644016
+.long 83228145,83228145
+.long 3753688163,3753688163
+.long 3249976951,3249976951
+.long 1977277103,1977277103
+.long 1663115586,1663115586
+.long 806359072,806359072
+.long 452984805,452984805
+.long 250868733,250868733
+.long 1842533055,1842533055
+.long 1288555905,1288555905
+.long 336333848,336333848
+.long 890442534,890442534
+.long 804056259,804056259
+.long 3781124030,3781124030
+.long 2727843637,2727843637
+.long 3427026056,3427026056
+.long 957814574,957814574
+.long 1472513171,1472513171
+.long 4071073621,4071073621
+.long 2189328124,2189328124
+.long 1195195770,1195195770
+.long 2892260552,2892260552
+.long 3881655738,3881655738
+.long 723065138,723065138
+.long 2507371494,2507371494
+.long 2690670784,2690670784
+.long 2558624025,2558624025
+.long 3511635870,3511635870
+.long 2145180835,2145180835
+.long 1713513028,1713513028
+.long 2116692564,2116692564
+.long 2878378043,2878378043
+.long 2206763019,2206763019
+.long 3393603212,3393603212
+.long 703524551,703524551
+.long 3552098411,3552098411
+.long 1007948840,1007948840
+.long 2044649127,2044649127
+.long 3797835452,3797835452
+.long 487262998,487262998
+.long 1994120109,1994120109
+.long 1004593371,1004593371
+.long 1446130276,1446130276
+.long 1312438900,1312438900
+.long 503974420,503974420
+.long 3679013266,3679013266
+.long 168166924,168166924
+.long 1814307912,1814307912
+.long 3831258296,3831258296
+.long 1573044895,1573044895
+.long 1859376061,1859376061
+.long 4021070915,4021070915
+.long 2791465668,2791465668
+.long 2828112185,2828112185
+.long 2761266481,2761266481
+.long 937747667,937747667
+.long 2339994098,2339994098
+.long 854058965,854058965
+.long 1137232011,1137232011
+.long 1496790894,1496790894
+.long 3077402074,3077402074
+.long 2358086913,2358086913
+.long 1691735473,1691735473
+.long 3528347292,3528347292
+.long 3769215305,3769215305
+.long 3027004632,3027004632
+.long 4199962284,4199962284
+.long 133494003,133494003
+.long 636152527,636152527
+.long 2942657994,2942657994
+.long 2390391540,2390391540
+.long 3920539207,3920539207
+.long 403179536,403179536
+.long 3585784431,3585784431
+.long 2289596656,2289596656
+.long 1864705354,1864705354
+.long 1915629148,1915629148
+.long 605822008,605822008
+.long 4054230615,4054230615
+.long 3350508659,3350508659
+.long 1371981463,1371981463
+.long 602466507,602466507
+.long 2094914977,2094914977
+.long 2624877800,2624877800
+.long 555687742,555687742
+.long 3712699286,3712699286
+.long 3703422305,3703422305
+.long 2257292045,2257292045
+.long 2240449039,2240449039
+.long 2423288032,2423288032
+.long 1111375484,1111375484
+.long 3300242801,3300242801
+.long 2858837708,2858837708
+.long 3628615824,3628615824
+.long 84083462,84083462
+.long 32962295,32962295
+.long 302911004,302911004
+.long 2741068226,2741068226
+.long 1597322602,1597322602
+.long 4183250862,4183250862
+.long 3501832553,3501832553
+.long 2441512471,2441512471
+.long 1489093017,1489093017
+.long 656219450,656219450
+.long 3114180135,3114180135
+.long 954327513,954327513
+.long 335083755,335083755
+.long 3013122091,3013122091
+.long 856756514,856756514
+.long 3144247762,3144247762
+.long 1893325225,1893325225
+.long 2307821063,2307821063
+.long 2811532339,2811532339
+.long 3063651117,3063651117
+.long 572399164,572399164
+.long 2458355477,2458355477
+.long 552200649,552200649
+.long 1238290055,1238290055
+.long 4283782570,4283782570
+.long 2015897680,2015897680
+.long 2061492133,2061492133
+.long 2408352771,2408352771
+.long 4171342169,4171342169
+.long 2156497161,2156497161
+.long 386731290,386731290
+.long 3669999461,3669999461
+.long 837215959,837215959
+.long 3326231172,3326231172
+.long 3093850320,3093850320
+.long 3275833730,3275833730
+.long 2962856233,2962856233
+.long 1999449434,1999449434
+.long 286199582,286199582
+.long 3417354363,3417354363
+.long 4233385128,4233385128
+.long 3602627437,3602627437
+.long 974525996,974525996
+.byte 99,124,119,123,242,107,111,197
+.byte 48,1,103,43,254,215,171,118
+.byte 202,130,201,125,250,89,71,240
+.byte 173,212,162,175,156,164,114,192
+.byte 183,253,147,38,54,63,247,204
+.byte 52,165,229,241,113,216,49,21
+.byte 4,199,35,195,24,150,5,154
+.byte 7,18,128,226,235,39,178,117
+.byte 9,131,44,26,27,110,90,160
+.byte 82,59,214,179,41,227,47,132
+.byte 83,209,0,237,32,252,177,91
+.byte 106,203,190,57,74,76,88,207
+.byte 208,239,170,251,67,77,51,133
+.byte 69,249,2,127,80,60,159,168
+.byte 81,163,64,143,146,157,56,245
+.byte 188,182,218,33,16,255,243,210
+.byte 205,12,19,236,95,151,68,23
+.byte 196,167,126,61,100,93,25,115
+.byte 96,129,79,220,34,42,144,136
+.byte 70,238,184,20,222,94,11,219
+.byte 224,50,58,10,73,6,36,92
+.byte 194,211,172,98,145,149,228,121
+.byte 231,200,55,109,141,213,78,169
+.byte 108,86,244,234,101,122,174,8
+.byte 186,120,37,46,28,166,180,198
+.byte 232,221,116,31,75,189,139,138
+.byte 112,62,181,102,72,3,246,14
+.byte 97,53,87,185,134,193,29,158
+.byte 225,248,152,17,105,217,142,148
+.byte 155,30,135,233,206,85,40,223
+.byte 140,161,137,13,191,230,66,104
+.byte 65,153,45,15,176,84,187,22
+.byte 99,124,119,123,242,107,111,197
+.byte 48,1,103,43,254,215,171,118
+.byte 202,130,201,125,250,89,71,240
+.byte 173,212,162,175,156,164,114,192
+.byte 183,253,147,38,54,63,247,204
+.byte 52,165,229,241,113,216,49,21
+.byte 4,199,35,195,24,150,5,154
+.byte 7,18,128,226,235,39,178,117
+.byte 9,131,44,26,27,110,90,160
+.byte 82,59,214,179,41,227,47,132
+.byte 83,209,0,237,32,252,177,91
+.byte 106,203,190,57,74,76,88,207
+.byte 208,239,170,251,67,77,51,133
+.byte 69,249,2,127,80,60,159,168
+.byte 81,163,64,143,146,157,56,245
+.byte 188,182,218,33,16,255,243,210
+.byte 205,12,19,236,95,151,68,23
+.byte 196,167,126,61,100,93,25,115
+.byte 96,129,79,220,34,42,144,136
+.byte 70,238,184,20,222,94,11,219
+.byte 224,50,58,10,73,6,36,92
+.byte 194,211,172,98,145,149,228,121
+.byte 231,200,55,109,141,213,78,169
+.byte 108,86,244,234,101,122,174,8
+.byte 186,120,37,46,28,166,180,198
+.byte 232,221,116,31,75,189,139,138
+.byte 112,62,181,102,72,3,246,14
+.byte 97,53,87,185,134,193,29,158
+.byte 225,248,152,17,105,217,142,148
+.byte 155,30,135,233,206,85,40,223
+.byte 140,161,137,13,191,230,66,104
+.byte 65,153,45,15,176,84,187,22
+.byte 99,124,119,123,242,107,111,197
+.byte 48,1,103,43,254,215,171,118
+.byte 202,130,201,125,250,89,71,240
+.byte 173,212,162,175,156,164,114,192
+.byte 183,253,147,38,54,63,247,204
+.byte 52,165,229,241,113,216,49,21
+.byte 4,199,35,195,24,150,5,154
+.byte 7,18,128,226,235,39,178,117
+.byte 9,131,44,26,27,110,90,160
+.byte 82,59,214,179,41,227,47,132
+.byte 83,209,0,237,32,252,177,91
+.byte 106,203,190,57,74,76,88,207
+.byte 208,239,170,251,67,77,51,133
+.byte 69,249,2,127,80,60,159,168
+.byte 81,163,64,143,146,157,56,245
+.byte 188,182,218,33,16,255,243,210
+.byte 205,12,19,236,95,151,68,23
+.byte 196,167,126,61,100,93,25,115
+.byte 96,129,79,220,34,42,144,136
+.byte 70,238,184,20,222,94,11,219
+.byte 224,50,58,10,73,6,36,92
+.byte 194,211,172,98,145,149,228,121
+.byte 231,200,55,109,141,213,78,169
+.byte 108,86,244,234,101,122,174,8
+.byte 186,120,37,46,28,166,180,198
+.byte 232,221,116,31,75,189,139,138
+.byte 112,62,181,102,72,3,246,14
+.byte 97,53,87,185,134,193,29,158
+.byte 225,248,152,17,105,217,142,148
+.byte 155,30,135,233,206,85,40,223
+.byte 140,161,137,13,191,230,66,104
+.byte 65,153,45,15,176,84,187,22
+.byte 99,124,119,123,242,107,111,197
+.byte 48,1,103,43,254,215,171,118
+.byte 202,130,201,125,250,89,71,240
+.byte 173,212,162,175,156,164,114,192
+.byte 183,253,147,38,54,63,247,204
+.byte 52,165,229,241,113,216,49,21
+.byte 4,199,35,195,24,150,5,154
+.byte 7,18,128,226,235,39,178,117
+.byte 9,131,44,26,27,110,90,160
+.byte 82,59,214,179,41,227,47,132
+.byte 83,209,0,237,32,252,177,91
+.byte 106,203,190,57,74,76,88,207
+.byte 208,239,170,251,67,77,51,133
+.byte 69,249,2,127,80,60,159,168
+.byte 81,163,64,143,146,157,56,245
+.byte 188,182,218,33,16,255,243,210
+.byte 205,12,19,236,95,151,68,23
+.byte 196,167,126,61,100,93,25,115
+.byte 96,129,79,220,34,42,144,136
+.byte 70,238,184,20,222,94,11,219
+.byte 224,50,58,10,73,6,36,92
+.byte 194,211,172,98,145,149,228,121
+.byte 231,200,55,109,141,213,78,169
+.byte 108,86,244,234,101,122,174,8
+.byte 186,120,37,46,28,166,180,198
+.byte 232,221,116,31,75,189,139,138
+.byte 112,62,181,102,72,3,246,14
+.byte 97,53,87,185,134,193,29,158
+.byte 225,248,152,17,105,217,142,148
+.byte 155,30,135,233,206,85,40,223
+.byte 140,161,137,13,191,230,66,104
+.byte 65,153,45,15,176,84,187,22
+.long 1,2,4,8
+.long 16,32,64,128
+.long 27,54,0,0
+.long 0,0,0,0
+.globl _AES_encrypt
+.align 4
+_AES_encrypt:
+L_AES_encrypt_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%esi
+ movl 28(%esp),%edi
+ movl %esp,%eax
+ subl $36,%esp
+ andl $-64,%esp
+ leal -127(%edi),%ebx
+ subl %esp,%ebx
+ negl %ebx
+ andl $960,%ebx
+ subl %ebx,%esp
+ addl $4,%esp
+ movl %eax,28(%esp)
+ call L004pic_point
+L004pic_point:
+ popl %ebp
+ leal _OPENSSL_ia32cap_P,%eax
+ leal LAES_Te-L004pic_point(%ebp),%ebp
+ leal 764(%esp),%ebx
+ subl %ebp,%ebx
+ andl $768,%ebx
+ leal 2176(%ebp,%ebx,1),%ebp
+ btl $25,(%eax)
+ jnc L005x86
+ movq (%esi),%mm0
+ movq 8(%esi),%mm4
+ call __sse_AES_encrypt_compact
+ movl 28(%esp),%esp
+ movl 24(%esp),%esi
+ movq %mm0,(%esi)
+ movq %mm4,8(%esi)
+ emms
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 4,0x90
+L005x86:
+ movl %ebp,24(%esp)
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ call __x86_AES_encrypt_compact
+ movl 28(%esp),%esp
+ movl 24(%esp),%esi
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 4
+__x86_AES_decrypt_compact:
+ movl %edi,20(%esp)
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ movl 240(%edi),%esi
+ leal -2(%esi,%esi,1),%esi
+ leal (%edi,%esi,8),%esi
+ movl %esi,24(%esp)
+ movl -128(%ebp),%edi
+ movl -96(%ebp),%esi
+ movl -64(%ebp),%edi
+ movl -32(%ebp),%esi
+ movl (%ebp),%edi
+ movl 32(%ebp),%esi
+ movl 64(%ebp),%edi
+ movl 96(%ebp),%esi
+.align 4,0x90
+L006loop:
+ movl %eax,%esi
+ andl $255,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %dh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %ebx,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,4(%esp)
+ movl %ebx,%esi
+ andl $255,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %ah,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,8(%esp)
+ movl %ecx,%esi
+ andl $255,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %bh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ andl $255,%edx
+ movzbl -128(%ebp,%edx,1),%edx
+ movzbl %ch,%ecx
+ movzbl -128(%ebp,%ecx,1),%ecx
+ shll $8,%ecx
+ xorl %ecx,%edx
+ movl %esi,%ecx
+ shrl $16,%ebx
+ andl $255,%ebx
+ movzbl -128(%ebp,%ebx,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%edx
+ shrl $24,%eax
+ movzbl -128(%ebp,%eax,1),%eax
+ shll $24,%eax
+ xorl %eax,%edx
+ movl %ecx,%esi
+ andl $2155905152,%esi
+ movl %esi,%edi
+ shrl $7,%edi
+ leal (%ecx,%ecx,1),%eax
+ subl %edi,%esi
+ andl $4278124286,%eax
+ andl $454761243,%esi
+ xorl %eax,%esi
+ movl %esi,%eax
+ andl $2155905152,%esi
+ movl %esi,%edi
+ shrl $7,%edi
+ leal (%eax,%eax,1),%ebx
+ subl %edi,%esi
+ andl $4278124286,%ebx
+ andl $454761243,%esi
+ xorl %ecx,%eax
+ xorl %ebx,%esi
+ movl %esi,%ebx
+ andl $2155905152,%esi
+ movl %esi,%edi
+ shrl $7,%edi
+ leal (%ebx,%ebx,1),%ebp
+ subl %edi,%esi
+ andl $4278124286,%ebp
+ andl $454761243,%esi
+ xorl %ecx,%ebx
+ roll $8,%ecx
+ xorl %esi,%ebp
+ xorl %eax,%ecx
+ xorl %ebp,%eax
+ roll $24,%eax
+ xorl %ebx,%ecx
+ xorl %ebp,%ebx
+ roll $16,%ebx
+ xorl %ebp,%ecx
+ roll $8,%ebp
+ xorl %eax,%ecx
+ xorl %ebx,%ecx
+ movl 4(%esp),%eax
+ xorl %ebp,%ecx
+ movl %ecx,12(%esp)
+ movl %edx,%esi
+ andl $2155905152,%esi
+ movl %esi,%edi
+ shrl $7,%edi
+ leal (%edx,%edx,1),%ebx
+ subl %edi,%esi
+ andl $4278124286,%ebx
+ andl $454761243,%esi
+ xorl %ebx,%esi
+ movl %esi,%ebx
+ andl $2155905152,%esi
+ movl %esi,%edi
+ shrl $7,%edi
+ leal (%ebx,%ebx,1),%ecx
+ subl %edi,%esi
+ andl $4278124286,%ecx
+ andl $454761243,%esi
+ xorl %edx,%ebx
+ xorl %ecx,%esi
+ movl %esi,%ecx
+ andl $2155905152,%esi
+ movl %esi,%edi
+ shrl $7,%edi
+ leal (%ecx,%ecx,1),%ebp
+ subl %edi,%esi
+ andl $4278124286,%ebp
+ andl $454761243,%esi
+ xorl %edx,%ecx
+ roll $8,%edx
+ xorl %esi,%ebp
+ xorl %ebx,%edx
+ xorl %ebp,%ebx
+ roll $24,%ebx
+ xorl %ecx,%edx
+ xorl %ebp,%ecx
+ roll $16,%ecx
+ xorl %ebp,%edx
+ roll $8,%ebp
+ xorl %ebx,%edx
+ xorl %ecx,%edx
+ movl 8(%esp),%ebx
+ xorl %ebp,%edx
+ movl %edx,16(%esp)
+ movl %eax,%esi
+ andl $2155905152,%esi
+ movl %esi,%edi
+ shrl $7,%edi
+ leal (%eax,%eax,1),%ecx
+ subl %edi,%esi
+ andl $4278124286,%ecx
+ andl $454761243,%esi
+ xorl %ecx,%esi
+ movl %esi,%ecx
+ andl $2155905152,%esi
+ movl %esi,%edi
+ shrl $7,%edi
+ leal (%ecx,%ecx,1),%edx
+ subl %edi,%esi
+ andl $4278124286,%edx
+ andl $454761243,%esi
+ xorl %eax,%ecx
+ xorl %edx,%esi
+ movl %esi,%edx
+ andl $2155905152,%esi
+ movl %esi,%edi
+ shrl $7,%edi
+ leal (%edx,%edx,1),%ebp
+ subl %edi,%esi
+ andl $4278124286,%ebp
+ andl $454761243,%esi
+ xorl %eax,%edx
+ roll $8,%eax
+ xorl %esi,%ebp
+ xorl %ecx,%eax
+ xorl %ebp,%ecx
+ roll $24,%ecx
+ xorl %edx,%eax
+ xorl %ebp,%edx
+ roll $16,%edx
+ xorl %ebp,%eax
+ roll $8,%ebp
+ xorl %ecx,%eax
+ xorl %edx,%eax
+ xorl %ebp,%eax
+ movl %ebx,%esi
+ andl $2155905152,%esi
+ movl %esi,%edi
+ shrl $7,%edi
+ leal (%ebx,%ebx,1),%ecx
+ subl %edi,%esi
+ andl $4278124286,%ecx
+ andl $454761243,%esi
+ xorl %ecx,%esi
+ movl %esi,%ecx
+ andl $2155905152,%esi
+ movl %esi,%edi
+ shrl $7,%edi
+ leal (%ecx,%ecx,1),%edx
+ subl %edi,%esi
+ andl $4278124286,%edx
+ andl $454761243,%esi
+ xorl %ebx,%ecx
+ xorl %edx,%esi
+ movl %esi,%edx
+ andl $2155905152,%esi
+ movl %esi,%edi
+ shrl $7,%edi
+ leal (%edx,%edx,1),%ebp
+ subl %edi,%esi
+ andl $4278124286,%ebp
+ andl $454761243,%esi
+ xorl %ebx,%edx
+ roll $8,%ebx
+ xorl %esi,%ebp
+ xorl %ecx,%ebx
+ xorl %ebp,%ecx
+ roll $24,%ecx
+ xorl %edx,%ebx
+ xorl %ebp,%edx
+ roll $16,%edx
+ xorl %ebp,%ebx
+ roll $8,%ebp
+ xorl %ecx,%ebx
+ xorl %edx,%ebx
+ movl 12(%esp),%ecx
+ xorl %ebp,%ebx
+ movl 16(%esp),%edx
+ movl 20(%esp),%edi
+ movl 28(%esp),%ebp
+ addl $16,%edi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ cmpl 24(%esp),%edi
+ movl %edi,20(%esp)
+ jb L006loop
+ movl %eax,%esi
+ andl $255,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %dh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %ebx,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,4(%esp)
+ movl %ebx,%esi
+ andl $255,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %ah,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,8(%esp)
+ movl %ecx,%esi
+ andl $255,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ movzbl %bh,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $24,%edi
+ movzbl -128(%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl 20(%esp),%edi
+ andl $255,%edx
+ movzbl -128(%ebp,%edx,1),%edx
+ movzbl %ch,%ecx
+ movzbl -128(%ebp,%ecx,1),%ecx
+ shll $8,%ecx
+ xorl %ecx,%edx
+ movl %esi,%ecx
+ shrl $16,%ebx
+ andl $255,%ebx
+ movzbl -128(%ebp,%ebx,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%edx
+ movl 8(%esp),%ebx
+ shrl $24,%eax
+ movzbl -128(%ebp,%eax,1),%eax
+ shll $24,%eax
+ xorl %eax,%edx
+ movl 4(%esp),%eax
+ xorl 16(%edi),%eax
+ xorl 20(%edi),%ebx
+ xorl 24(%edi),%ecx
+ xorl 28(%edi),%edx
+ ret
+.align 4
+__sse_AES_decrypt_compact:
+ pxor (%edi),%mm0
+ pxor 8(%edi),%mm4
+ movl 240(%edi),%esi
+ leal -2(%esi,%esi,1),%esi
+ leal (%edi,%esi,8),%esi
+ movl %esi,24(%esp)
+ movl $454761243,%eax
+ movl %eax,8(%esp)
+ movl %eax,12(%esp)
+ movl -128(%ebp),%eax
+ movl -96(%ebp),%ebx
+ movl -64(%ebp),%ecx
+ movl -32(%ebp),%edx
+ movl (%ebp),%eax
+ movl 32(%ebp),%ebx
+ movl 64(%ebp),%ecx
+ movl 96(%ebp),%edx
+.align 4,0x90
+L007loop:
+ pshufw $12,%mm0,%mm1
+ movd %mm1,%eax
+ pshufw $9,%mm4,%mm5
+ movzbl %al,%esi
+ movzbl -128(%ebp,%esi,1),%ecx
+ movd %mm5,%ebx
+ movzbl %ah,%edx
+ movzbl -128(%ebp,%edx,1),%edx
+ shll $8,%edx
+ pshufw $6,%mm0,%mm2
+ movzbl %bl,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $16,%esi
+ orl %esi,%ecx
+ shrl $16,%eax
+ movzbl %bh,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $24,%esi
+ orl %esi,%edx
+ shrl $16,%ebx
+ pshufw $3,%mm4,%mm6
+ movzbl %ah,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $24,%esi
+ orl %esi,%ecx
+ movzbl %bh,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $8,%esi
+ orl %esi,%ecx
+ movd %ecx,%mm0
+ movzbl %al,%esi
+ movd %mm2,%eax
+ movzbl -128(%ebp,%esi,1),%ecx
+ shll $16,%ecx
+ movzbl %bl,%esi
+ movd %mm6,%ebx
+ movzbl -128(%ebp,%esi,1),%esi
+ orl %esi,%ecx
+ movzbl %al,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ orl %esi,%edx
+ movzbl %bl,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $16,%esi
+ orl %esi,%edx
+ movd %edx,%mm1
+ movzbl %ah,%esi
+ movzbl -128(%ebp,%esi,1),%edx
+ shll $8,%edx
+ movzbl %bh,%esi
+ shrl $16,%eax
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $24,%esi
+ orl %esi,%edx
+ shrl $16,%ebx
+ punpckldq %mm1,%mm0
+ movzbl %bh,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $8,%esi
+ orl %esi,%ecx
+ andl $255,%ebx
+ movzbl -128(%ebp,%ebx,1),%ebx
+ orl %ebx,%edx
+ movzbl %al,%esi
+ movzbl -128(%ebp,%esi,1),%esi
+ shll $16,%esi
+ orl %esi,%edx
+ movd %edx,%mm4
+ movzbl %ah,%eax
+ movzbl -128(%ebp,%eax,1),%eax
+ shll $24,%eax
+ orl %eax,%ecx
+ movd %ecx,%mm5
+ punpckldq %mm5,%mm4
+ addl $16,%edi
+ cmpl 24(%esp),%edi
+ ja L008out
+ movq %mm0,%mm3
+ movq %mm4,%mm7
+ pshufw $228,%mm0,%mm2
+ pshufw $228,%mm4,%mm6
+ movq %mm0,%mm1
+ movq %mm4,%mm5
+ pshufw $177,%mm0,%mm0
+ pshufw $177,%mm4,%mm4
+ pslld $8,%mm2
+ pslld $8,%mm6
+ psrld $8,%mm3
+ psrld $8,%mm7
+ pxor %mm2,%mm0
+ pxor %mm6,%mm4
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ pslld $16,%mm2
+ pslld $16,%mm6
+ psrld $16,%mm3
+ psrld $16,%mm7
+ pxor %mm2,%mm0
+ pxor %mm6,%mm4
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ movq 8(%esp),%mm3
+ pxor %mm2,%mm2
+ pxor %mm6,%mm6
+ pcmpgtb %mm1,%mm2
+ pcmpgtb %mm5,%mm6
+ pand %mm3,%mm2
+ pand %mm3,%mm6
+ paddb %mm1,%mm1
+ paddb %mm5,%mm5
+ pxor %mm2,%mm1
+ pxor %mm6,%mm5
+ movq %mm1,%mm3
+ movq %mm5,%mm7
+ movq %mm1,%mm2
+ movq %mm5,%mm6
+ pxor %mm1,%mm0
+ pxor %mm5,%mm4
+ pslld $24,%mm3
+ pslld $24,%mm7
+ psrld $8,%mm2
+ psrld $8,%mm6
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ pxor %mm2,%mm0
+ pxor %mm6,%mm4
+ movq 8(%esp),%mm2
+ pxor %mm3,%mm3
+ pxor %mm7,%mm7
+ pcmpgtb %mm1,%mm3
+ pcmpgtb %mm5,%mm7
+ pand %mm2,%mm3
+ pand %mm2,%mm7
+ paddb %mm1,%mm1
+ paddb %mm5,%mm5
+ pxor %mm3,%mm1
+ pxor %mm7,%mm5
+ pshufw $177,%mm1,%mm3
+ pshufw $177,%mm5,%mm7
+ pxor %mm1,%mm0
+ pxor %mm5,%mm4
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ pxor %mm3,%mm3
+ pxor %mm7,%mm7
+ pcmpgtb %mm1,%mm3
+ pcmpgtb %mm5,%mm7
+ pand %mm2,%mm3
+ pand %mm2,%mm7
+ paddb %mm1,%mm1
+ paddb %mm5,%mm5
+ pxor %mm3,%mm1
+ pxor %mm7,%mm5
+ pxor %mm1,%mm0
+ pxor %mm5,%mm4
+ movq %mm1,%mm3
+ movq %mm5,%mm7
+ pshufw $177,%mm1,%mm2
+ pshufw $177,%mm5,%mm6
+ pxor %mm2,%mm0
+ pxor %mm6,%mm4
+ pslld $8,%mm1
+ pslld $8,%mm5
+ psrld $8,%mm3
+ psrld $8,%mm7
+ movq (%edi),%mm2
+ movq 8(%edi),%mm6
+ pxor %mm1,%mm0
+ pxor %mm5,%mm4
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ movl -128(%ebp),%eax
+ pslld $16,%mm1
+ pslld $16,%mm5
+ movl -64(%ebp),%ebx
+ psrld $16,%mm3
+ psrld $16,%mm7
+ movl (%ebp),%ecx
+ pxor %mm1,%mm0
+ pxor %mm5,%mm4
+ movl 64(%ebp),%edx
+ pxor %mm3,%mm0
+ pxor %mm7,%mm4
+ pxor %mm2,%mm0
+ pxor %mm6,%mm4
+ jmp L007loop
+.align 4,0x90
+L008out:
+ pxor (%edi),%mm0
+ pxor 8(%edi),%mm4
+ ret
+.align 4
+__x86_AES_decrypt:
+ movl %edi,20(%esp)
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ movl 240(%edi),%esi
+ leal -2(%esi,%esi,1),%esi
+ leal (%edi,%esi,8),%esi
+ movl %esi,24(%esp)
+.align 4,0x90
+L009loop:
+ movl %eax,%esi
+ andl $255,%esi
+ movl (%ebp,%esi,8),%esi
+ movzbl %dh,%edi
+ xorl 3(%ebp,%edi,8),%esi
+ movl %ecx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ xorl 2(%ebp,%edi,8),%esi
+ movl %ebx,%edi
+ shrl $24,%edi
+ xorl 1(%ebp,%edi,8),%esi
+ movl %esi,4(%esp)
+ movl %ebx,%esi
+ andl $255,%esi
+ movl (%ebp,%esi,8),%esi
+ movzbl %ah,%edi
+ xorl 3(%ebp,%edi,8),%esi
+ movl %edx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ xorl 2(%ebp,%edi,8),%esi
+ movl %ecx,%edi
+ shrl $24,%edi
+ xorl 1(%ebp,%edi,8),%esi
+ movl %esi,8(%esp)
+ movl %ecx,%esi
+ andl $255,%esi
+ movl (%ebp,%esi,8),%esi
+ movzbl %bh,%edi
+ xorl 3(%ebp,%edi,8),%esi
+ movl %eax,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ xorl 2(%ebp,%edi,8),%esi
+ movl %edx,%edi
+ shrl $24,%edi
+ xorl 1(%ebp,%edi,8),%esi
+ movl 20(%esp),%edi
+ andl $255,%edx
+ movl (%ebp,%edx,8),%edx
+ movzbl %ch,%ecx
+ xorl 3(%ebp,%ecx,8),%edx
+ movl %esi,%ecx
+ shrl $16,%ebx
+ andl $255,%ebx
+ xorl 2(%ebp,%ebx,8),%edx
+ movl 8(%esp),%ebx
+ shrl $24,%eax
+ xorl 1(%ebp,%eax,8),%edx
+ movl 4(%esp),%eax
+ addl $16,%edi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ cmpl 24(%esp),%edi
+ movl %edi,20(%esp)
+ jb L009loop
+ leal 2176(%ebp),%ebp
+ movl -128(%ebp),%edi
+ movl -96(%ebp),%esi
+ movl -64(%ebp),%edi
+ movl -32(%ebp),%esi
+ movl (%ebp),%edi
+ movl 32(%ebp),%esi
+ movl 64(%ebp),%edi
+ movl 96(%ebp),%esi
+ leal -128(%ebp),%ebp
+ movl %eax,%esi
+ andl $255,%esi
+ movzbl (%ebp,%esi,1),%esi
+ movzbl %dh,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %ebx,%edi
+ shrl $24,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,4(%esp)
+ movl %ebx,%esi
+ andl $255,%esi
+ movzbl (%ebp,%esi,1),%esi
+ movzbl %ah,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %ecx,%edi
+ shrl $24,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl %esi,8(%esp)
+ movl %ecx,%esi
+ andl $255,%esi
+ movzbl (%ebp,%esi,1),%esi
+ movzbl %bh,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $8,%edi
+ xorl %edi,%esi
+ movl %eax,%edi
+ shrl $16,%edi
+ andl $255,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $16,%edi
+ xorl %edi,%esi
+ movl %edx,%edi
+ shrl $24,%edi
+ movzbl (%ebp,%edi,1),%edi
+ shll $24,%edi
+ xorl %edi,%esi
+ movl 20(%esp),%edi
+ andl $255,%edx
+ movzbl (%ebp,%edx,1),%edx
+ movzbl %ch,%ecx
+ movzbl (%ebp,%ecx,1),%ecx
+ shll $8,%ecx
+ xorl %ecx,%edx
+ movl %esi,%ecx
+ shrl $16,%ebx
+ andl $255,%ebx
+ movzbl (%ebp,%ebx,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%edx
+ movl 8(%esp),%ebx
+ shrl $24,%eax
+ movzbl (%ebp,%eax,1),%eax
+ shll $24,%eax
+ xorl %eax,%edx
+ movl 4(%esp),%eax
+ leal -2048(%ebp),%ebp
+ addl $16,%edi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ ret
+.align 6,0x90
+LAES_Td:
+.long 1353184337,1353184337
+.long 1399144830,1399144830
+.long 3282310938,3282310938
+.long 2522752826,2522752826
+.long 3412831035,3412831035
+.long 4047871263,4047871263
+.long 2874735276,2874735276
+.long 2466505547,2466505547
+.long 1442459680,1442459680
+.long 4134368941,4134368941
+.long 2440481928,2440481928
+.long 625738485,625738485
+.long 4242007375,4242007375
+.long 3620416197,3620416197
+.long 2151953702,2151953702
+.long 2409849525,2409849525
+.long 1230680542,1230680542
+.long 1729870373,1729870373
+.long 2551114309,2551114309
+.long 3787521629,3787521629
+.long 41234371,41234371
+.long 317738113,317738113
+.long 2744600205,2744600205
+.long 3338261355,3338261355
+.long 3881799427,3881799427
+.long 2510066197,2510066197
+.long 3950669247,3950669247
+.long 3663286933,3663286933
+.long 763608788,763608788
+.long 3542185048,3542185048
+.long 694804553,694804553
+.long 1154009486,1154009486
+.long 1787413109,1787413109
+.long 2021232372,2021232372
+.long 1799248025,1799248025
+.long 3715217703,3715217703
+.long 3058688446,3058688446
+.long 397248752,397248752
+.long 1722556617,1722556617
+.long 3023752829,3023752829
+.long 407560035,407560035
+.long 2184256229,2184256229
+.long 1613975959,1613975959
+.long 1165972322,1165972322
+.long 3765920945,3765920945
+.long 2226023355,2226023355
+.long 480281086,480281086
+.long 2485848313,2485848313
+.long 1483229296,1483229296
+.long 436028815,436028815
+.long 2272059028,2272059028
+.long 3086515026,3086515026
+.long 601060267,601060267
+.long 3791801202,3791801202
+.long 1468997603,1468997603
+.long 715871590,715871590
+.long 120122290,120122290
+.long 63092015,63092015
+.long 2591802758,2591802758
+.long 2768779219,2768779219
+.long 4068943920,4068943920
+.long 2997206819,2997206819
+.long 3127509762,3127509762
+.long 1552029421,1552029421
+.long 723308426,723308426
+.long 2461301159,2461301159
+.long 4042393587,4042393587
+.long 2715969870,2715969870
+.long 3455375973,3455375973
+.long 3586000134,3586000134
+.long 526529745,526529745
+.long 2331944644,2331944644
+.long 2639474228,2639474228
+.long 2689987490,2689987490
+.long 853641733,853641733
+.long 1978398372,1978398372
+.long 971801355,971801355
+.long 2867814464,2867814464
+.long 111112542,111112542
+.long 1360031421,1360031421
+.long 4186579262,4186579262
+.long 1023860118,1023860118
+.long 2919579357,2919579357
+.long 1186850381,1186850381
+.long 3045938321,3045938321
+.long 90031217,90031217
+.long 1876166148,1876166148
+.long 4279586912,4279586912
+.long 620468249,620468249
+.long 2548678102,2548678102
+.long 3426959497,3426959497
+.long 2006899047,2006899047
+.long 3175278768,3175278768
+.long 2290845959,2290845959
+.long 945494503,945494503
+.long 3689859193,3689859193
+.long 1191869601,1191869601
+.long 3910091388,3910091388
+.long 3374220536,3374220536
+.long 0,0
+.long 2206629897,2206629897
+.long 1223502642,1223502642
+.long 2893025566,2893025566
+.long 1316117100,1316117100
+.long 4227796733,4227796733
+.long 1446544655,1446544655
+.long 517320253,517320253
+.long 658058550,658058550
+.long 1691946762,1691946762
+.long 564550760,564550760
+.long 3511966619,3511966619
+.long 976107044,976107044
+.long 2976320012,2976320012
+.long 266819475,266819475
+.long 3533106868,3533106868
+.long 2660342555,2660342555
+.long 1338359936,1338359936
+.long 2720062561,2720062561
+.long 1766553434,1766553434
+.long 370807324,370807324
+.long 179999714,179999714
+.long 3844776128,3844776128
+.long 1138762300,1138762300
+.long 488053522,488053522
+.long 185403662,185403662
+.long 2915535858,2915535858
+.long 3114841645,3114841645
+.long 3366526484,3366526484
+.long 2233069911,2233069911
+.long 1275557295,1275557295
+.long 3151862254,3151862254
+.long 4250959779,4250959779
+.long 2670068215,2670068215
+.long 3170202204,3170202204
+.long 3309004356,3309004356
+.long 880737115,880737115
+.long 1982415755,1982415755
+.long 3703972811,3703972811
+.long 1761406390,1761406390
+.long 1676797112,1676797112
+.long 3403428311,3403428311
+.long 277177154,277177154
+.long 1076008723,1076008723
+.long 538035844,538035844
+.long 2099530373,2099530373
+.long 4164795346,4164795346
+.long 288553390,288553390
+.long 1839278535,1839278535
+.long 1261411869,1261411869
+.long 4080055004,4080055004
+.long 3964831245,3964831245
+.long 3504587127,3504587127
+.long 1813426987,1813426987
+.long 2579067049,2579067049
+.long 4199060497,4199060497
+.long 577038663,577038663
+.long 3297574056,3297574056
+.long 440397984,440397984
+.long 3626794326,3626794326
+.long 4019204898,4019204898
+.long 3343796615,3343796615
+.long 3251714265,3251714265
+.long 4272081548,4272081548
+.long 906744984,906744984
+.long 3481400742,3481400742
+.long 685669029,685669029
+.long 646887386,646887386
+.long 2764025151,2764025151
+.long 3835509292,3835509292
+.long 227702864,227702864
+.long 2613862250,2613862250
+.long 1648787028,1648787028
+.long 3256061430,3256061430
+.long 3904428176,3904428176
+.long 1593260334,1593260334
+.long 4121936770,4121936770
+.long 3196083615,3196083615
+.long 2090061929,2090061929
+.long 2838353263,2838353263
+.long 3004310991,3004310991
+.long 999926984,999926984
+.long 2809993232,2809993232
+.long 1852021992,1852021992
+.long 2075868123,2075868123
+.long 158869197,158869197
+.long 4095236462,4095236462
+.long 28809964,28809964
+.long 2828685187,2828685187
+.long 1701746150,1701746150
+.long 2129067946,2129067946
+.long 147831841,147831841
+.long 3873969647,3873969647
+.long 3650873274,3650873274
+.long 3459673930,3459673930
+.long 3557400554,3557400554
+.long 3598495785,3598495785
+.long 2947720241,2947720241
+.long 824393514,824393514
+.long 815048134,815048134
+.long 3227951669,3227951669
+.long 935087732,935087732
+.long 2798289660,2798289660
+.long 2966458592,2966458592
+.long 366520115,366520115
+.long 1251476721,1251476721
+.long 4158319681,4158319681
+.long 240176511,240176511
+.long 804688151,804688151
+.long 2379631990,2379631990
+.long 1303441219,1303441219
+.long 1414376140,1414376140
+.long 3741619940,3741619940
+.long 3820343710,3820343710
+.long 461924940,461924940
+.long 3089050817,3089050817
+.long 2136040774,2136040774
+.long 82468509,82468509
+.long 1563790337,1563790337
+.long 1937016826,1937016826
+.long 776014843,776014843
+.long 1511876531,1511876531
+.long 1389550482,1389550482
+.long 861278441,861278441
+.long 323475053,323475053
+.long 2355222426,2355222426
+.long 2047648055,2047648055
+.long 2383738969,2383738969
+.long 2302415851,2302415851
+.long 3995576782,3995576782
+.long 902390199,902390199
+.long 3991215329,3991215329
+.long 1018251130,1018251130
+.long 1507840668,1507840668
+.long 1064563285,1064563285
+.long 2043548696,2043548696
+.long 3208103795,3208103795
+.long 3939366739,3939366739
+.long 1537932639,1537932639
+.long 342834655,342834655
+.long 2262516856,2262516856
+.long 2180231114,2180231114
+.long 1053059257,1053059257
+.long 741614648,741614648
+.long 1598071746,1598071746
+.long 1925389590,1925389590
+.long 203809468,203809468
+.long 2336832552,2336832552
+.long 1100287487,1100287487
+.long 1895934009,1895934009
+.long 3736275976,3736275976
+.long 2632234200,2632234200
+.long 2428589668,2428589668
+.long 1636092795,1636092795
+.long 1890988757,1890988757
+.long 1952214088,1952214088
+.long 1113045200,1113045200
+.byte 82,9,106,213,48,54,165,56
+.byte 191,64,163,158,129,243,215,251
+.byte 124,227,57,130,155,47,255,135
+.byte 52,142,67,68,196,222,233,203
+.byte 84,123,148,50,166,194,35,61
+.byte 238,76,149,11,66,250,195,78
+.byte 8,46,161,102,40,217,36,178
+.byte 118,91,162,73,109,139,209,37
+.byte 114,248,246,100,134,104,152,22
+.byte 212,164,92,204,93,101,182,146
+.byte 108,112,72,80,253,237,185,218
+.byte 94,21,70,87,167,141,157,132
+.byte 144,216,171,0,140,188,211,10
+.byte 247,228,88,5,184,179,69,6
+.byte 208,44,30,143,202,63,15,2
+.byte 193,175,189,3,1,19,138,107
+.byte 58,145,17,65,79,103,220,234
+.byte 151,242,207,206,240,180,230,115
+.byte 150,172,116,34,231,173,53,133
+.byte 226,249,55,232,28,117,223,110
+.byte 71,241,26,113,29,41,197,137
+.byte 111,183,98,14,170,24,190,27
+.byte 252,86,62,75,198,210,121,32
+.byte 154,219,192,254,120,205,90,244
+.byte 31,221,168,51,136,7,199,49
+.byte 177,18,16,89,39,128,236,95
+.byte 96,81,127,169,25,181,74,13
+.byte 45,229,122,159,147,201,156,239
+.byte 160,224,59,77,174,42,245,176
+.byte 200,235,187,60,131,83,153,97
+.byte 23,43,4,126,186,119,214,38
+.byte 225,105,20,99,85,33,12,125
+.byte 82,9,106,213,48,54,165,56
+.byte 191,64,163,158,129,243,215,251
+.byte 124,227,57,130,155,47,255,135
+.byte 52,142,67,68,196,222,233,203
+.byte 84,123,148,50,166,194,35,61
+.byte 238,76,149,11,66,250,195,78
+.byte 8,46,161,102,40,217,36,178
+.byte 118,91,162,73,109,139,209,37
+.byte 114,248,246,100,134,104,152,22
+.byte 212,164,92,204,93,101,182,146
+.byte 108,112,72,80,253,237,185,218
+.byte 94,21,70,87,167,141,157,132
+.byte 144,216,171,0,140,188,211,10
+.byte 247,228,88,5,184,179,69,6
+.byte 208,44,30,143,202,63,15,2
+.byte 193,175,189,3,1,19,138,107
+.byte 58,145,17,65,79,103,220,234
+.byte 151,242,207,206,240,180,230,115
+.byte 150,172,116,34,231,173,53,133
+.byte 226,249,55,232,28,117,223,110
+.byte 71,241,26,113,29,41,197,137
+.byte 111,183,98,14,170,24,190,27
+.byte 252,86,62,75,198,210,121,32
+.byte 154,219,192,254,120,205,90,244
+.byte 31,221,168,51,136,7,199,49
+.byte 177,18,16,89,39,128,236,95
+.byte 96,81,127,169,25,181,74,13
+.byte 45,229,122,159,147,201,156,239
+.byte 160,224,59,77,174,42,245,176
+.byte 200,235,187,60,131,83,153,97
+.byte 23,43,4,126,186,119,214,38
+.byte 225,105,20,99,85,33,12,125
+.byte 82,9,106,213,48,54,165,56
+.byte 191,64,163,158,129,243,215,251
+.byte 124,227,57,130,155,47,255,135
+.byte 52,142,67,68,196,222,233,203
+.byte 84,123,148,50,166,194,35,61
+.byte 238,76,149,11,66,250,195,78
+.byte 8,46,161,102,40,217,36,178
+.byte 118,91,162,73,109,139,209,37
+.byte 114,248,246,100,134,104,152,22
+.byte 212,164,92,204,93,101,182,146
+.byte 108,112,72,80,253,237,185,218
+.byte 94,21,70,87,167,141,157,132
+.byte 144,216,171,0,140,188,211,10
+.byte 247,228,88,5,184,179,69,6
+.byte 208,44,30,143,202,63,15,2
+.byte 193,175,189,3,1,19,138,107
+.byte 58,145,17,65,79,103,220,234
+.byte 151,242,207,206,240,180,230,115
+.byte 150,172,116,34,231,173,53,133
+.byte 226,249,55,232,28,117,223,110
+.byte 71,241,26,113,29,41,197,137
+.byte 111,183,98,14,170,24,190,27
+.byte 252,86,62,75,198,210,121,32
+.byte 154,219,192,254,120,205,90,244
+.byte 31,221,168,51,136,7,199,49
+.byte 177,18,16,89,39,128,236,95
+.byte 96,81,127,169,25,181,74,13
+.byte 45,229,122,159,147,201,156,239
+.byte 160,224,59,77,174,42,245,176
+.byte 200,235,187,60,131,83,153,97
+.byte 23,43,4,126,186,119,214,38
+.byte 225,105,20,99,85,33,12,125
+.byte 82,9,106,213,48,54,165,56
+.byte 191,64,163,158,129,243,215,251
+.byte 124,227,57,130,155,47,255,135
+.byte 52,142,67,68,196,222,233,203
+.byte 84,123,148,50,166,194,35,61
+.byte 238,76,149,11,66,250,195,78
+.byte 8,46,161,102,40,217,36,178
+.byte 118,91,162,73,109,139,209,37
+.byte 114,248,246,100,134,104,152,22
+.byte 212,164,92,204,93,101,182,146
+.byte 108,112,72,80,253,237,185,218
+.byte 94,21,70,87,167,141,157,132
+.byte 144,216,171,0,140,188,211,10
+.byte 247,228,88,5,184,179,69,6
+.byte 208,44,30,143,202,63,15,2
+.byte 193,175,189,3,1,19,138,107
+.byte 58,145,17,65,79,103,220,234
+.byte 151,242,207,206,240,180,230,115
+.byte 150,172,116,34,231,173,53,133
+.byte 226,249,55,232,28,117,223,110
+.byte 71,241,26,113,29,41,197,137
+.byte 111,183,98,14,170,24,190,27
+.byte 252,86,62,75,198,210,121,32
+.byte 154,219,192,254,120,205,90,244
+.byte 31,221,168,51,136,7,199,49
+.byte 177,18,16,89,39,128,236,95
+.byte 96,81,127,169,25,181,74,13
+.byte 45,229,122,159,147,201,156,239
+.byte 160,224,59,77,174,42,245,176
+.byte 200,235,187,60,131,83,153,97
+.byte 23,43,4,126,186,119,214,38
+.byte 225,105,20,99,85,33,12,125
+.globl _AES_decrypt
+.align 4
+_AES_decrypt:
+L_AES_decrypt_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%esi
+ movl 28(%esp),%edi
+ movl %esp,%eax
+ subl $36,%esp
+ andl $-64,%esp
+ leal -127(%edi),%ebx
+ subl %esp,%ebx
+ negl %ebx
+ andl $960,%ebx
+ subl %ebx,%esp
+ addl $4,%esp
+ movl %eax,28(%esp)
+ call L010pic_point
+L010pic_point:
+ popl %ebp
+ leal _OPENSSL_ia32cap_P,%eax
+ leal LAES_Td-L010pic_point(%ebp),%ebp
+ leal 764(%esp),%ebx
+ subl %ebp,%ebx
+ andl $768,%ebx
+ leal 2176(%ebp,%ebx,1),%ebp
+ btl $25,(%eax)
+ jnc L011x86
+ movq (%esi),%mm0
+ movq 8(%esi),%mm4
+ call __sse_AES_decrypt_compact
+ movl 28(%esp),%esp
+ movl 24(%esp),%esi
+ movq %mm0,(%esi)
+ movq %mm4,8(%esi)
+ emms
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 4,0x90
+L011x86:
+ movl %ebp,24(%esp)
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ call __x86_AES_decrypt_compact
+ movl 28(%esp),%esp
+ movl 24(%esp),%esi
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.globl _AES_cbc_encrypt
+.align 4
+_AES_cbc_encrypt:
+L_AES_cbc_encrypt_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 28(%esp),%ecx
+ cmpl $0,%ecx
+ je L012drop_out
+ call L013pic_point
+L013pic_point:
+ popl %ebp
+ leal _OPENSSL_ia32cap_P,%eax
+ cmpl $0,40(%esp)
+ leal LAES_Te-L013pic_point(%ebp),%ebp
+ jne L014picked_te
+ leal LAES_Td-LAES_Te(%ebp),%ebp
+L014picked_te:
+ pushfl
+ cld
+ cmpl $512,%ecx
+ jb L015slow_way
+ testl $15,%ecx
+ jnz L015slow_way
+ leal -324(%esp),%esi
+ andl $-64,%esi
+ movl %ebp,%eax
+ leal 2304(%ebp),%ebx
+ movl %esi,%edx
+ andl $4095,%eax
+ andl $4095,%ebx
+ andl $4095,%edx
+ cmpl %ebx,%edx
+ jb L016tbl_break_out
+ subl %ebx,%edx
+ subl %edx,%esi
+ jmp L017tbl_ok
+.align 2,0x90
+L016tbl_break_out:
+ subl %eax,%edx
+ andl $4095,%edx
+ addl $384,%edx
+ subl %edx,%esi
+.align 2,0x90
+L017tbl_ok:
+ leal 24(%esp),%edx
+ xchgl %esi,%esp
+ addl $4,%esp
+ movl %ebp,24(%esp)
+ movl %esi,28(%esp)
+ movl (%edx),%eax
+ movl 4(%edx),%ebx
+ movl 12(%edx),%edi
+ movl 16(%edx),%esi
+ movl 20(%edx),%edx
+ movl %eax,32(%esp)
+ movl %ebx,36(%esp)
+ movl %ecx,40(%esp)
+ movl %edi,44(%esp)
+ movl %esi,48(%esp)
+ movl $0,316(%esp)
+ movl %edi,%ebx
+ movl $61,%ecx
+ subl %ebp,%ebx
+ movl %edi,%esi
+ andl $4095,%ebx
+ leal 76(%esp),%edi
+ cmpl $2304,%ebx
+ jb L018do_copy
+ cmpl $3852,%ebx
+ jb L019skip_copy
+.align 2,0x90
+L018do_copy:
+ movl %edi,44(%esp)
+.long 2784229001
+L019skip_copy:
+ movl $16,%edi
+.align 2,0x90
+L020prefetch_tbl:
+ movl (%ebp),%eax
+ movl 32(%ebp),%ebx
+ movl 64(%ebp),%ecx
+ movl 96(%ebp),%esi
+ leal 128(%ebp),%ebp
+ subl $1,%edi
+ jnz L020prefetch_tbl
+ subl $2048,%ebp
+ movl 32(%esp),%esi
+ movl 48(%esp),%edi
+ cmpl $0,%edx
+ je L021fast_decrypt
+ movl (%edi),%eax
+ movl 4(%edi),%ebx
+.align 4,0x90
+L022fast_enc_loop:
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ xorl (%esi),%eax
+ xorl 4(%esi),%ebx
+ xorl 8(%esi),%ecx
+ xorl 12(%esi),%edx
+ movl 44(%esp),%edi
+ call __x86_AES_encrypt
+ movl 32(%esp),%esi
+ movl 36(%esp),%edi
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ leal 16(%esi),%esi
+ movl 40(%esp),%ecx
+ movl %esi,32(%esp)
+ leal 16(%edi),%edx
+ movl %edx,36(%esp)
+ subl $16,%ecx
+ movl %ecx,40(%esp)
+ jnz L022fast_enc_loop
+ movl 48(%esp),%esi
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ cmpl $0,316(%esp)
+ movl 44(%esp),%edi
+ je L023skip_ezero
+ movl $60,%ecx
+ xorl %eax,%eax
+.align 2,0x90
+.long 2884892297
+L023skip_ezero:
+ movl 28(%esp),%esp
+ popfl
+L012drop_out:
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+ pushfl
+.align 4,0x90
+L021fast_decrypt:
+ cmpl 36(%esp),%esi
+ je L024fast_dec_in_place
+ movl %edi,52(%esp)
+.align 2,0x90
+.align 4,0x90
+L025fast_dec_loop:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ movl 44(%esp),%edi
+ call __x86_AES_decrypt
+ movl 52(%esp),%edi
+ movl 40(%esp),%esi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ movl 36(%esp),%edi
+ movl 32(%esp),%esi
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 40(%esp),%ecx
+ movl %esi,52(%esp)
+ leal 16(%esi),%esi
+ movl %esi,32(%esp)
+ leal 16(%edi),%edi
+ movl %edi,36(%esp)
+ subl $16,%ecx
+ movl %ecx,40(%esp)
+ jnz L025fast_dec_loop
+ movl 52(%esp),%edi
+ movl 48(%esp),%esi
+ movl (%edi),%eax
+ movl 4(%edi),%ebx
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ jmp L026fast_dec_out
+.align 4,0x90
+L024fast_dec_in_place:
+L027fast_dec_in_place_loop:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ leal 60(%esp),%edi
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 44(%esp),%edi
+ call __x86_AES_decrypt
+ movl 48(%esp),%edi
+ movl 36(%esp),%esi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ leal 16(%esi),%esi
+ movl %esi,36(%esp)
+ leal 60(%esp),%esi
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 32(%esp),%esi
+ movl 40(%esp),%ecx
+ leal 16(%esi),%esi
+ movl %esi,32(%esp)
+ subl $16,%ecx
+ movl %ecx,40(%esp)
+ jnz L027fast_dec_in_place_loop
+.align 2,0x90
+L026fast_dec_out:
+ cmpl $0,316(%esp)
+ movl 44(%esp),%edi
+ je L028skip_dzero
+ movl $60,%ecx
+ xorl %eax,%eax
+.align 2,0x90
+.long 2884892297
+L028skip_dzero:
+ movl 28(%esp),%esp
+ popfl
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+ pushfl
+.align 4,0x90
+L015slow_way:
+ movl (%eax),%eax
+ movl 36(%esp),%edi
+ leal -80(%esp),%esi
+ andl $-64,%esi
+ leal -143(%edi),%ebx
+ subl %esi,%ebx
+ negl %ebx
+ andl $960,%ebx
+ subl %ebx,%esi
+ leal 768(%esi),%ebx
+ subl %ebp,%ebx
+ andl $768,%ebx
+ leal 2176(%ebp,%ebx,1),%ebp
+ leal 24(%esp),%edx
+ xchgl %esi,%esp
+ addl $4,%esp
+ movl %ebp,24(%esp)
+ movl %esi,28(%esp)
+ movl %eax,52(%esp)
+ movl (%edx),%eax
+ movl 4(%edx),%ebx
+ movl 16(%edx),%esi
+ movl 20(%edx),%edx
+ movl %eax,32(%esp)
+ movl %ebx,36(%esp)
+ movl %ecx,40(%esp)
+ movl %edi,44(%esp)
+ movl %esi,48(%esp)
+ movl %esi,%edi
+ movl %eax,%esi
+ cmpl $0,%edx
+ je L029slow_decrypt
+ cmpl $16,%ecx
+ movl %ebx,%edx
+ jb L030slow_enc_tail
+ btl $25,52(%esp)
+ jnc L031slow_enc_x86
+ movq (%edi),%mm0
+ movq 8(%edi),%mm4
+.align 4,0x90
+L032slow_enc_loop_sse:
+ pxor (%esi),%mm0
+ pxor 8(%esi),%mm4
+ movl 44(%esp),%edi
+ call __sse_AES_encrypt_compact
+ movl 32(%esp),%esi
+ movl 36(%esp),%edi
+ movl 40(%esp),%ecx
+ movq %mm0,(%edi)
+ movq %mm4,8(%edi)
+ leal 16(%esi),%esi
+ movl %esi,32(%esp)
+ leal 16(%edi),%edx
+ movl %edx,36(%esp)
+ subl $16,%ecx
+ cmpl $16,%ecx
+ movl %ecx,40(%esp)
+ jae L032slow_enc_loop_sse
+ testl $15,%ecx
+ jnz L030slow_enc_tail
+ movl 48(%esp),%esi
+ movq %mm0,(%esi)
+ movq %mm4,8(%esi)
+ emms
+ movl 28(%esp),%esp
+ popfl
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+ pushfl
+.align 4,0x90
+L031slow_enc_x86:
+ movl (%edi),%eax
+ movl 4(%edi),%ebx
+.align 2,0x90
+L033slow_enc_loop_x86:
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ xorl (%esi),%eax
+ xorl 4(%esi),%ebx
+ xorl 8(%esi),%ecx
+ xorl 12(%esi),%edx
+ movl 44(%esp),%edi
+ call __x86_AES_encrypt_compact
+ movl 32(%esp),%esi
+ movl 36(%esp),%edi
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 40(%esp),%ecx
+ leal 16(%esi),%esi
+ movl %esi,32(%esp)
+ leal 16(%edi),%edx
+ movl %edx,36(%esp)
+ subl $16,%ecx
+ cmpl $16,%ecx
+ movl %ecx,40(%esp)
+ jae L033slow_enc_loop_x86
+ testl $15,%ecx
+ jnz L030slow_enc_tail
+ movl 48(%esp),%esi
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ movl 28(%esp),%esp
+ popfl
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+ pushfl
+.align 4,0x90
+L030slow_enc_tail:
+ emms
+ movl %edx,%edi
+ movl $16,%ebx
+ subl %ecx,%ebx
+ cmpl %esi,%edi
+ je L034enc_in_place
+.align 2,0x90
+.long 2767451785
+ jmp L035enc_skip_in_place
+L034enc_in_place:
+ leal (%edi,%ecx,1),%edi
+L035enc_skip_in_place:
+ movl %ebx,%ecx
+ xorl %eax,%eax
+.align 2,0x90
+.long 2868115081
+ movl 48(%esp),%edi
+ movl %edx,%esi
+ movl (%edi),%eax
+ movl 4(%edi),%ebx
+ movl $16,40(%esp)
+ jmp L033slow_enc_loop_x86
+.align 4,0x90
+L029slow_decrypt:
+ btl $25,52(%esp)
+ jnc L036slow_dec_loop_x86
+.align 2,0x90
+L037slow_dec_loop_sse:
+ movq (%esi),%mm0
+ movq 8(%esi),%mm4
+ movl 44(%esp),%edi
+ call __sse_AES_decrypt_compact
+ movl 32(%esp),%esi
+ leal 60(%esp),%eax
+ movl 36(%esp),%ebx
+ movl 40(%esp),%ecx
+ movl 48(%esp),%edi
+ movq (%esi),%mm1
+ movq 8(%esi),%mm5
+ pxor (%edi),%mm0
+ pxor 8(%edi),%mm4
+ movq %mm1,(%edi)
+ movq %mm5,8(%edi)
+ subl $16,%ecx
+ jc L038slow_dec_partial_sse
+ movq %mm0,(%ebx)
+ movq %mm4,8(%ebx)
+ leal 16(%ebx),%ebx
+ movl %ebx,36(%esp)
+ leal 16(%esi),%esi
+ movl %esi,32(%esp)
+ movl %ecx,40(%esp)
+ jnz L037slow_dec_loop_sse
+ emms
+ movl 28(%esp),%esp
+ popfl
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+ pushfl
+.align 4,0x90
+L038slow_dec_partial_sse:
+ movq %mm0,(%eax)
+ movq %mm4,8(%eax)
+ emms
+ addl $16,%ecx
+ movl %ebx,%edi
+ movl %eax,%esi
+.align 2,0x90
+.long 2767451785
+ movl 28(%esp),%esp
+ popfl
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+ pushfl
+.align 4,0x90
+L036slow_dec_loop_x86:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ leal 60(%esp),%edi
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 44(%esp),%edi
+ call __x86_AES_decrypt_compact
+ movl 48(%esp),%edi
+ movl 40(%esp),%esi
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ subl $16,%esi
+ jc L039slow_dec_partial_x86
+ movl %esi,40(%esp)
+ movl 36(%esp),%esi
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ leal 16(%esi),%esi
+ movl %esi,36(%esp)
+ leal 60(%esp),%esi
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 32(%esp),%esi
+ leal 16(%esi),%esi
+ movl %esi,32(%esp)
+ jnz L036slow_dec_loop_x86
+ movl 28(%esp),%esp
+ popfl
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+ pushfl
+.align 4,0x90
+L039slow_dec_partial_x86:
+ leal 60(%esp),%esi
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ movl 32(%esp),%esi
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 40(%esp),%ecx
+ movl 36(%esp),%edi
+ leal 60(%esp),%esi
+.align 2,0x90
+.long 2767451785
+ movl 28(%esp),%esp
+ popfl
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 4
+__x86_AES_set_encrypt_key:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 24(%esp),%esi
+ movl 32(%esp),%edi
+ testl $-1,%esi
+ jz L040badpointer
+ testl $-1,%edi
+ jz L040badpointer
+ call L041pic_point
+L041pic_point:
+ popl %ebp
+ leal LAES_Te-L041pic_point(%ebp),%ebp
+ leal 2176(%ebp),%ebp
+ movl -128(%ebp),%eax
+ movl -96(%ebp),%ebx
+ movl -64(%ebp),%ecx
+ movl -32(%ebp),%edx
+ movl (%ebp),%eax
+ movl 32(%ebp),%ebx
+ movl 64(%ebp),%ecx
+ movl 96(%ebp),%edx
+ movl 28(%esp),%ecx
+ cmpl $128,%ecx
+ je L04210rounds
+ cmpl $192,%ecx
+ je L04312rounds
+ cmpl $256,%ecx
+ je L04414rounds
+ movl $-2,%eax
+ jmp L045exit
+L04210rounds:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ xorl %ecx,%ecx
+ jmp L04610shortcut
+.align 2,0x90
+L04710loop:
+ movl (%edi),%eax
+ movl 12(%edi),%edx
+L04610shortcut:
+ movzbl %dl,%esi
+ movzbl -128(%ebp,%esi,1),%ebx
+ movzbl %dh,%esi
+ shll $24,%ebx
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ shrl $16,%edx
+ movzbl %dl,%esi
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ movzbl %dh,%esi
+ shll $8,%ebx
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%eax
+ xorl 896(%ebp,%ecx,4),%eax
+ movl %eax,16(%edi)
+ xorl 4(%edi),%eax
+ movl %eax,20(%edi)
+ xorl 8(%edi),%eax
+ movl %eax,24(%edi)
+ xorl 12(%edi),%eax
+ movl %eax,28(%edi)
+ incl %ecx
+ addl $16,%edi
+ cmpl $10,%ecx
+ jl L04710loop
+ movl $10,80(%edi)
+ xorl %eax,%eax
+ jmp L045exit
+L04312rounds:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 16(%esi),%ecx
+ movl 20(%esi),%edx
+ movl %ecx,16(%edi)
+ movl %edx,20(%edi)
+ xorl %ecx,%ecx
+ jmp L04812shortcut
+.align 2,0x90
+L04912loop:
+ movl (%edi),%eax
+ movl 20(%edi),%edx
+L04812shortcut:
+ movzbl %dl,%esi
+ movzbl -128(%ebp,%esi,1),%ebx
+ movzbl %dh,%esi
+ shll $24,%ebx
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ shrl $16,%edx
+ movzbl %dl,%esi
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ movzbl %dh,%esi
+ shll $8,%ebx
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%eax
+ xorl 896(%ebp,%ecx,4),%eax
+ movl %eax,24(%edi)
+ xorl 4(%edi),%eax
+ movl %eax,28(%edi)
+ xorl 8(%edi),%eax
+ movl %eax,32(%edi)
+ xorl 12(%edi),%eax
+ movl %eax,36(%edi)
+ cmpl $7,%ecx
+ je L05012break
+ incl %ecx
+ xorl 16(%edi),%eax
+ movl %eax,40(%edi)
+ xorl 20(%edi),%eax
+ movl %eax,44(%edi)
+ addl $24,%edi
+ jmp L04912loop
+L05012break:
+ movl $12,72(%edi)
+ xorl %eax,%eax
+ jmp L045exit
+L04414rounds:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 16(%esi),%eax
+ movl 20(%esi),%ebx
+ movl 24(%esi),%ecx
+ movl 28(%esi),%edx
+ movl %eax,16(%edi)
+ movl %ebx,20(%edi)
+ movl %ecx,24(%edi)
+ movl %edx,28(%edi)
+ xorl %ecx,%ecx
+ jmp L05114shortcut
+.align 2,0x90
+L05214loop:
+ movl 28(%edi),%edx
+L05114shortcut:
+ movl (%edi),%eax
+ movzbl %dl,%esi
+ movzbl -128(%ebp,%esi,1),%ebx
+ movzbl %dh,%esi
+ shll $24,%ebx
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ shrl $16,%edx
+ movzbl %dl,%esi
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ movzbl %dh,%esi
+ shll $8,%ebx
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ shll $16,%ebx
+ xorl %ebx,%eax
+ xorl 896(%ebp,%ecx,4),%eax
+ movl %eax,32(%edi)
+ xorl 4(%edi),%eax
+ movl %eax,36(%edi)
+ xorl 8(%edi),%eax
+ movl %eax,40(%edi)
+ xorl 12(%edi),%eax
+ movl %eax,44(%edi)
+ cmpl $6,%ecx
+ je L05314break
+ incl %ecx
+ movl %eax,%edx
+ movl 16(%edi),%eax
+ movzbl %dl,%esi
+ movzbl -128(%ebp,%esi,1),%ebx
+ movzbl %dh,%esi
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ shrl $16,%edx
+ shll $8,%ebx
+ movzbl %dl,%esi
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ movzbl %dh,%esi
+ shll $16,%ebx
+ xorl %ebx,%eax
+ movzbl -128(%ebp,%esi,1),%ebx
+ shll $24,%ebx
+ xorl %ebx,%eax
+ movl %eax,48(%edi)
+ xorl 20(%edi),%eax
+ movl %eax,52(%edi)
+ xorl 24(%edi),%eax
+ movl %eax,56(%edi)
+ xorl 28(%edi),%eax
+ movl %eax,60(%edi)
+ addl $32,%edi
+ jmp L05214loop
+L05314break:
+ movl $14,48(%edi)
+ xorl %eax,%eax
+ jmp L045exit
+L040badpointer:
+ movl $-1,%eax
+L045exit:
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.globl _AES_set_encrypt_key
+.align 4
+_AES_set_encrypt_key:
+L_AES_set_encrypt_key_begin:
+ call __x86_AES_set_encrypt_key
+ ret
+.globl _AES_set_decrypt_key
+.align 4
+_AES_set_decrypt_key:
+L_AES_set_decrypt_key_begin:
+ call __x86_AES_set_encrypt_key
+ cmpl $0,%eax
+ je L054proceed
+ ret
+L054proceed:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 28(%esp),%esi
+ movl 240(%esi),%ecx
+ leal (,%ecx,4),%ecx
+ leal (%esi,%ecx,4),%edi
+.align 2,0x90
+L055invert:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl (%edi),%ecx
+ movl 4(%edi),%edx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,(%esi)
+ movl %edx,4(%esi)
+ movl 8(%esi),%eax
+ movl 12(%esi),%ebx
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ movl %eax,8(%edi)
+ movl %ebx,12(%edi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ addl $16,%esi
+ subl $16,%edi
+ cmpl %edi,%esi
+ jne L055invert
+ movl 28(%esp),%edi
+ movl 240(%edi),%esi
+ leal -2(%esi,%esi,1),%esi
+ leal (%edi,%esi,8),%esi
+ movl %esi,28(%esp)
+ movl 16(%edi),%eax
+.align 2,0x90
+L056permute:
+ addl $16,%edi
+ movl %eax,%esi
+ andl $2155905152,%esi
+ movl %esi,%ebp
+ shrl $7,%ebp
+ leal (%eax,%eax,1),%ebx
+ subl %ebp,%esi
+ andl $4278124286,%ebx
+ andl $454761243,%esi
+ xorl %ebx,%esi
+ movl %esi,%ebx
+ andl $2155905152,%esi
+ movl %esi,%ebp
+ shrl $7,%ebp
+ leal (%ebx,%ebx,1),%ecx
+ subl %ebp,%esi
+ andl $4278124286,%ecx
+ andl $454761243,%esi
+ xorl %eax,%ebx
+ xorl %ecx,%esi
+ movl %esi,%ecx
+ andl $2155905152,%esi
+ movl %esi,%ebp
+ shrl $7,%ebp
+ leal (%ecx,%ecx,1),%edx
+ xorl %eax,%ecx
+ subl %ebp,%esi
+ andl $4278124286,%edx
+ andl $454761243,%esi
+ roll $8,%eax
+ xorl %esi,%edx
+ movl 4(%edi),%ebp
+ xorl %ebx,%eax
+ xorl %edx,%ebx
+ xorl %ecx,%eax
+ roll $24,%ebx
+ xorl %edx,%ecx
+ xorl %edx,%eax
+ roll $16,%ecx
+ xorl %ebx,%eax
+ roll $8,%edx
+ xorl %ecx,%eax
+ movl %ebp,%ebx
+ xorl %edx,%eax
+ movl %eax,(%edi)
+ movl %ebx,%esi
+ andl $2155905152,%esi
+ movl %esi,%ebp
+ shrl $7,%ebp
+ leal (%ebx,%ebx,1),%ecx
+ subl %ebp,%esi
+ andl $4278124286,%ecx
+ andl $454761243,%esi
+ xorl %ecx,%esi
+ movl %esi,%ecx
+ andl $2155905152,%esi
+ movl %esi,%ebp
+ shrl $7,%ebp
+ leal (%ecx,%ecx,1),%edx
+ subl %ebp,%esi
+ andl $4278124286,%edx
+ andl $454761243,%esi
+ xorl %ebx,%ecx
+ xorl %edx,%esi
+ movl %esi,%edx
+ andl $2155905152,%esi
+ movl %esi,%ebp
+ shrl $7,%ebp
+ leal (%edx,%edx,1),%eax
+ xorl %ebx,%edx
+ subl %ebp,%esi
+ andl $4278124286,%eax
+ andl $454761243,%esi
+ roll $8,%ebx
+ xorl %esi,%eax
+ movl 8(%edi),%ebp
+ xorl %ecx,%ebx
+ xorl %eax,%ecx
+ xorl %edx,%ebx
+ roll $24,%ecx
+ xorl %eax,%edx
+ xorl %eax,%ebx
+ roll $16,%edx
+ xorl %ecx,%ebx
+ roll $8,%eax
+ xorl %edx,%ebx
+ movl %ebp,%ecx
+ xorl %eax,%ebx
+ movl %ebx,4(%edi)
+ movl %ecx,%esi
+ andl $2155905152,%esi
+ movl %esi,%ebp
+ shrl $7,%ebp
+ leal (%ecx,%ecx,1),%edx
+ subl %ebp,%esi
+ andl $4278124286,%edx
+ andl $454761243,%esi
+ xorl %edx,%esi
+ movl %esi,%edx
+ andl $2155905152,%esi
+ movl %esi,%ebp
+ shrl $7,%ebp
+ leal (%edx,%edx,1),%eax
+ subl %ebp,%esi
+ andl $4278124286,%eax
+ andl $454761243,%esi
+ xorl %ecx,%edx
+ xorl %eax,%esi
+ movl %esi,%eax
+ andl $2155905152,%esi
+ movl %esi,%ebp
+ shrl $7,%ebp
+ leal (%eax,%eax,1),%ebx
+ xorl %ecx,%eax
+ subl %ebp,%esi
+ andl $4278124286,%ebx
+ andl $454761243,%esi
+ roll $8,%ecx
+ xorl %esi,%ebx
+ movl 12(%edi),%ebp
+ xorl %edx,%ecx
+ xorl %ebx,%edx
+ xorl %eax,%ecx
+ roll $24,%edx
+ xorl %ebx,%eax
+ xorl %ebx,%ecx
+ roll $16,%eax
+ xorl %edx,%ecx
+ roll $8,%ebx
+ xorl %eax,%ecx
+ movl %ebp,%edx
+ xorl %ebx,%ecx
+ movl %ecx,8(%edi)
+ movl %edx,%esi
+ andl $2155905152,%esi
+ movl %esi,%ebp
+ shrl $7,%ebp
+ leal (%edx,%edx,1),%eax
+ subl %ebp,%esi
+ andl $4278124286,%eax
+ andl $454761243,%esi
+ xorl %eax,%esi
+ movl %esi,%eax
+ andl $2155905152,%esi
+ movl %esi,%ebp
+ shrl $7,%ebp
+ leal (%eax,%eax,1),%ebx
+ subl %ebp,%esi
+ andl $4278124286,%ebx
+ andl $454761243,%esi
+ xorl %edx,%eax
+ xorl %ebx,%esi
+ movl %esi,%ebx
+ andl $2155905152,%esi
+ movl %esi,%ebp
+ shrl $7,%ebp
+ leal (%ebx,%ebx,1),%ecx
+ xorl %edx,%ebx
+ subl %ebp,%esi
+ andl $4278124286,%ecx
+ andl $454761243,%esi
+ roll $8,%edx
+ xorl %esi,%ecx
+ movl 16(%edi),%ebp
+ xorl %eax,%edx
+ xorl %ecx,%eax
+ xorl %ebx,%edx
+ roll $24,%eax
+ xorl %ecx,%ebx
+ xorl %ecx,%edx
+ roll $16,%ebx
+ xorl %eax,%edx
+ roll $8,%ecx
+ xorl %ebx,%edx
+ movl %ebp,%eax
+ xorl %ecx,%edx
+ movl %edx,12(%edi)
+ cmpl 28(%esp),%edi
+ jb L056permute
+ xorl %eax,%eax
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.byte 65,69,83,32,102,111,114,32,120,56,54,44,32,67,82,89
+.byte 80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114
+.byte 111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.comm _OPENSSL_ia32cap_P,4
diff --git a/deps/openssl/asm/x86-macosx-gas/bf/bf-686.s b/deps/openssl/asm/x86-macosx-gas/bf/bf-686.s
new file mode 100644
index 0000000000..013d2dec8e
--- /dev/null
+++ b/deps/openssl/asm/x86-macosx-gas/bf/bf-686.s
@@ -0,0 +1,897 @@
+.file "bf-686.s"
+.text
+.globl _BF_encrypt
+.align 4
+_BF_encrypt:
+L_BF_encrypt_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+
+ # Load the 2 words
+
+ movl 20(%esp),%eax
+ movl (%eax),%ecx
+ movl 4(%eax),%edx
+
+ # P pointer, s and enc flag
+
+ movl 24(%esp),%edi
+ xorl %eax,%eax
+ xorl %ebx,%ebx
+ xorl (%edi),%ecx
+
+ # Round 0
+
+ rorl $16,%ecx
+ movl 4(%edi),%esi
+ movb %ch,%al
+ movb %cl,%bl
+ rorl $16,%ecx
+ xorl %esi,%edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch,%al
+ movb %cl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%edx
+
+ # Round 1
+
+ rorl $16,%edx
+ movl 8(%edi),%esi
+ movb %dh,%al
+ movb %dl,%bl
+ rorl $16,%edx
+ xorl %esi,%ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh,%al
+ movb %dl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%ecx
+
+ # Round 2
+
+ rorl $16,%ecx
+ movl 12(%edi),%esi
+ movb %ch,%al
+ movb %cl,%bl
+ rorl $16,%ecx
+ xorl %esi,%edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch,%al
+ movb %cl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%edx
+
+ # Round 3
+
+ rorl $16,%edx
+ movl 16(%edi),%esi
+ movb %dh,%al
+ movb %dl,%bl
+ rorl $16,%edx
+ xorl %esi,%ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh,%al
+ movb %dl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%ecx
+
+ # Round 4
+
+ rorl $16,%ecx
+ movl 20(%edi),%esi
+ movb %ch,%al
+ movb %cl,%bl
+ rorl $16,%ecx
+ xorl %esi,%edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch,%al
+ movb %cl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%edx
+
+ # Round 5
+
+ rorl $16,%edx
+ movl 24(%edi),%esi
+ movb %dh,%al
+ movb %dl,%bl
+ rorl $16,%edx
+ xorl %esi,%ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh,%al
+ movb %dl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%ecx
+
+ # Round 6
+
+ rorl $16,%ecx
+ movl 28(%edi),%esi
+ movb %ch,%al
+ movb %cl,%bl
+ rorl $16,%ecx
+ xorl %esi,%edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch,%al
+ movb %cl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%edx
+
+ # Round 7
+
+ rorl $16,%edx
+ movl 32(%edi),%esi
+ movb %dh,%al
+ movb %dl,%bl
+ rorl $16,%edx
+ xorl %esi,%ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh,%al
+ movb %dl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%ecx
+
+ # Round 8
+
+ rorl $16,%ecx
+ movl 36(%edi),%esi
+ movb %ch,%al
+ movb %cl,%bl
+ rorl $16,%ecx
+ xorl %esi,%edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch,%al
+ movb %cl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%edx
+
+ # Round 9
+
+ rorl $16,%edx
+ movl 40(%edi),%esi
+ movb %dh,%al
+ movb %dl,%bl
+ rorl $16,%edx
+ xorl %esi,%ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh,%al
+ movb %dl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%ecx
+
+ # Round 10
+
+ rorl $16,%ecx
+ movl 44(%edi),%esi
+ movb %ch,%al
+ movb %cl,%bl
+ rorl $16,%ecx
+ xorl %esi,%edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch,%al
+ movb %cl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%edx
+
+ # Round 11
+
+ rorl $16,%edx
+ movl 48(%edi),%esi
+ movb %dh,%al
+ movb %dl,%bl
+ rorl $16,%edx
+ xorl %esi,%ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh,%al
+ movb %dl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%ecx
+
+ # Round 12
+
+ rorl $16,%ecx
+ movl 52(%edi),%esi
+ movb %ch,%al
+ movb %cl,%bl
+ rorl $16,%ecx
+ xorl %esi,%edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch,%al
+ movb %cl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%edx
+
+ # Round 13
+
+ rorl $16,%edx
+ movl 56(%edi),%esi
+ movb %dh,%al
+ movb %dl,%bl
+ rorl $16,%edx
+ xorl %esi,%ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh,%al
+ movb %dl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%ecx
+
+ # Round 14
+
+ rorl $16,%ecx
+ movl 60(%edi),%esi
+ movb %ch,%al
+ movb %cl,%bl
+ rorl $16,%ecx
+ xorl %esi,%edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch,%al
+ movb %cl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%edx
+
+ # Round 15
+
+ rorl $16,%edx
+ movl 64(%edi),%esi
+ movb %dh,%al
+ movb %dl,%bl
+ rorl $16,%edx
+ xorl %esi,%ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh,%al
+ movb %dl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%ecx
+ xorl 68(%edi),%edx
+ movl 20(%esp),%eax
+ movl %edx,(%eax)
+ movl %ecx,4(%eax)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.globl _BF_decrypt
+.align 4
+_BF_decrypt:
+L_BF_decrypt_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+
+ # Load the 2 words
+
+ movl 20(%esp),%eax
+ movl (%eax),%ecx
+ movl 4(%eax),%edx
+
+ # P pointer, s and enc flag
+
+ movl 24(%esp),%edi
+ xorl %eax,%eax
+ xorl %ebx,%ebx
+ xorl 68(%edi),%ecx
+
+ # Round 16
+
+ rorl $16,%ecx
+ movl 64(%edi),%esi
+ movb %ch,%al
+ movb %cl,%bl
+ rorl $16,%ecx
+ xorl %esi,%edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch,%al
+ movb %cl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%edx
+
+ # Round 15
+
+ rorl $16,%edx
+ movl 60(%edi),%esi
+ movb %dh,%al
+ movb %dl,%bl
+ rorl $16,%edx
+ xorl %esi,%ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh,%al
+ movb %dl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%ecx
+
+ # Round 14
+
+ rorl $16,%ecx
+ movl 56(%edi),%esi
+ movb %ch,%al
+ movb %cl,%bl
+ rorl $16,%ecx
+ xorl %esi,%edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch,%al
+ movb %cl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%edx
+
+ # Round 13
+
+ rorl $16,%edx
+ movl 52(%edi),%esi
+ movb %dh,%al
+ movb %dl,%bl
+ rorl $16,%edx
+ xorl %esi,%ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh,%al
+ movb %dl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%ecx
+
+ # Round 12
+
+ rorl $16,%ecx
+ movl 48(%edi),%esi
+ movb %ch,%al
+ movb %cl,%bl
+ rorl $16,%ecx
+ xorl %esi,%edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch,%al
+ movb %cl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%edx
+
+ # Round 11
+
+ rorl $16,%edx
+ movl 44(%edi),%esi
+ movb %dh,%al
+ movb %dl,%bl
+ rorl $16,%edx
+ xorl %esi,%ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh,%al
+ movb %dl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%ecx
+
+ # Round 10
+
+ rorl $16,%ecx
+ movl 40(%edi),%esi
+ movb %ch,%al
+ movb %cl,%bl
+ rorl $16,%ecx
+ xorl %esi,%edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch,%al
+ movb %cl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%edx
+
+ # Round 9
+
+ rorl $16,%edx
+ movl 36(%edi),%esi
+ movb %dh,%al
+ movb %dl,%bl
+ rorl $16,%edx
+ xorl %esi,%ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh,%al
+ movb %dl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%ecx
+
+ # Round 8
+
+ rorl $16,%ecx
+ movl 32(%edi),%esi
+ movb %ch,%al
+ movb %cl,%bl
+ rorl $16,%ecx
+ xorl %esi,%edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch,%al
+ movb %cl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%edx
+
+ # Round 7
+
+ rorl $16,%edx
+ movl 28(%edi),%esi
+ movb %dh,%al
+ movb %dl,%bl
+ rorl $16,%edx
+ xorl %esi,%ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh,%al
+ movb %dl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%ecx
+
+ # Round 6
+
+ rorl $16,%ecx
+ movl 24(%edi),%esi
+ movb %ch,%al
+ movb %cl,%bl
+ rorl $16,%ecx
+ xorl %esi,%edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch,%al
+ movb %cl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%edx
+
+ # Round 5
+
+ rorl $16,%edx
+ movl 20(%edi),%esi
+ movb %dh,%al
+ movb %dl,%bl
+ rorl $16,%edx
+ xorl %esi,%ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh,%al
+ movb %dl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%ecx
+
+ # Round 4
+
+ rorl $16,%ecx
+ movl 16(%edi),%esi
+ movb %ch,%al
+ movb %cl,%bl
+ rorl $16,%ecx
+ xorl %esi,%edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch,%al
+ movb %cl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%edx
+
+ # Round 3
+
+ rorl $16,%edx
+ movl 12(%edi),%esi
+ movb %dh,%al
+ movb %dl,%bl
+ rorl $16,%edx
+ xorl %esi,%ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh,%al
+ movb %dl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%ecx
+
+ # Round 2
+
+ rorl $16,%ecx
+ movl 8(%edi),%esi
+ movb %ch,%al
+ movb %cl,%bl
+ rorl $16,%ecx
+ xorl %esi,%edx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %ch,%al
+ movb %cl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%edx
+
+ # Round 1
+
+ rorl $16,%edx
+ movl 4(%edi),%esi
+ movb %dh,%al
+ movb %dl,%bl
+ rorl $16,%edx
+ xorl %esi,%ecx
+ movl 72(%edi,%eax,4),%esi
+ movl 1096(%edi,%ebx,4),%ebp
+ movb %dh,%al
+ movb %dl,%bl
+ addl %ebp,%esi
+ movl 2120(%edi,%eax,4),%eax
+ xorl %eax,%esi
+ movl 3144(%edi,%ebx,4),%ebp
+ addl %ebp,%esi
+ xorl %eax,%eax
+ xorl %esi,%ecx
+ xorl (%edi),%edx
+ movl 20(%esp),%eax
+ movl %edx,(%eax)
+ movl %ecx,4(%eax)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.globl _BF_cbc_encrypt
+.align 4
+_BF_cbc_encrypt:
+L_BF_cbc_encrypt_begin:
+
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 28(%esp),%ebp
+ # getting iv ptr from parameter 4
+
+ movl 36(%esp),%ebx
+ movl (%ebx),%esi
+ movl 4(%ebx),%edi
+ pushl %edi
+ pushl %esi
+ pushl %edi
+ pushl %esi
+ movl %esp,%ebx
+ movl 36(%esp),%esi
+ movl 40(%esp),%edi
+ # getting encrypt flag from parameter 5
+
+ movl 56(%esp),%ecx
+ # get and push parameter 3
+
+ movl 48(%esp),%eax
+ pushl %eax
+ pushl %ebx
+ cmpl $0,%ecx
+ jz L000decrypt
+ andl $4294967288,%ebp
+ movl 8(%esp),%eax
+ movl 12(%esp),%ebx
+ jz L001encrypt_finish
+L002encrypt_loop:
+ movl (%esi),%ecx
+ movl 4(%esi),%edx
+ xorl %ecx,%eax
+ xorl %edx,%ebx
+ bswap %eax
+ bswap %ebx
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+ call L_BF_encrypt_begin
+ movl 8(%esp),%eax
+ movl 12(%esp),%ebx
+ bswap %eax
+ bswap %ebx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ addl $8,%esi
+ addl $8,%edi
+ subl $8,%ebp
+ jnz L002encrypt_loop
+L001encrypt_finish:
+ movl 52(%esp),%ebp
+ andl $7,%ebp
+ jz L003finish
+ call L004PIC_point
+L004PIC_point:
+ popl %edx
+ leal L005cbc_enc_jmp_table-L004PIC_point(%edx),%ecx
+ movl (%ecx,%ebp,4),%ebp
+ addl %edx,%ebp
+ xorl %ecx,%ecx
+ xorl %edx,%edx
+ jmp *%ebp
+L006ej7:
+ movb 6(%esi),%dh
+ shll $8,%edx
+L007ej6:
+ movb 5(%esi),%dh
+L008ej5:
+ movb 4(%esi),%dl
+L009ej4:
+ movl (%esi),%ecx
+ jmp L010ejend
+L011ej3:
+ movb 2(%esi),%ch
+ shll $8,%ecx
+L012ej2:
+ movb 1(%esi),%ch
+L013ej1:
+ movb (%esi),%cl
+L010ejend:
+ xorl %ecx,%eax
+ xorl %edx,%ebx
+ bswap %eax
+ bswap %ebx
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+ call L_BF_encrypt_begin
+ movl 8(%esp),%eax
+ movl 12(%esp),%ebx
+ bswap %eax
+ bswap %ebx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ jmp L003finish
+L000decrypt:
+ andl $4294967288,%ebp
+ movl 16(%esp),%eax
+ movl 20(%esp),%ebx
+ jz L014decrypt_finish
+L015decrypt_loop:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ bswap %eax
+ bswap %ebx
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+ call L_BF_decrypt_begin
+ movl 8(%esp),%eax
+ movl 12(%esp),%ebx
+ bswap %eax
+ bswap %ebx
+ movl 16(%esp),%ecx
+ movl 20(%esp),%edx
+ xorl %eax,%ecx
+ xorl %ebx,%edx
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl %ecx,(%edi)
+ movl %edx,4(%edi)
+ movl %eax,16(%esp)
+ movl %ebx,20(%esp)
+ addl $8,%esi
+ addl $8,%edi
+ subl $8,%ebp
+ jnz L015decrypt_loop
+L014decrypt_finish:
+ movl 52(%esp),%ebp
+ andl $7,%ebp
+ jz L003finish
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ bswap %eax
+ bswap %ebx
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+ call L_BF_decrypt_begin
+ movl 8(%esp),%eax
+ movl 12(%esp),%ebx
+ bswap %eax
+ bswap %ebx
+ movl 16(%esp),%ecx
+ movl 20(%esp),%edx
+ xorl %eax,%ecx
+ xorl %ebx,%edx
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+L016dj7:
+ rorl $16,%edx
+ movb %dl,6(%edi)
+ shrl $16,%edx
+L017dj6:
+ movb %dh,5(%edi)
+L018dj5:
+ movb %dl,4(%edi)
+L019dj4:
+ movl %ecx,(%edi)
+ jmp L020djend
+L021dj3:
+ rorl $16,%ecx
+ movb %cl,2(%edi)
+ shll $16,%ecx
+L022dj2:
+ movb %ch,1(%esi)
+L023dj1:
+ movb %cl,(%esi)
+L020djend:
+ jmp L003finish
+L003finish:
+ movl 60(%esp),%ecx
+ addl $24,%esp
+ movl %eax,(%ecx)
+ movl %ebx,4(%ecx)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 6,0x90
+L005cbc_enc_jmp_table:
+.long 0
+.long L013ej1-L004PIC_point
+.long L012ej2-L004PIC_point
+.long L011ej3-L004PIC_point
+.long L009ej4-L004PIC_point
+.long L008ej5-L004PIC_point
+.long L007ej6-L004PIC_point
+.long L006ej7-L004PIC_point
+.align 6,0x90
diff --git a/deps/openssl/asm/x86-macosx-gas/bn/x86-mont.s b/deps/openssl/asm/x86-macosx-gas/bn/x86-mont.s
new file mode 100644
index 0000000000..48598cc62d
--- /dev/null
+++ b/deps/openssl/asm/x86-macosx-gas/bn/x86-mont.s
@@ -0,0 +1,336 @@
+.file "../openssl/crypto/bn/asm/x86-mont.s"
+.text
+.globl _bn_mul_mont
+.align 4
+_bn_mul_mont:
+L_bn_mul_mont_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ xorl %eax,%eax
+ movl 40(%esp),%edi
+ cmpl $4,%edi
+ jl L000just_leave
+ leal 20(%esp),%esi
+ leal 24(%esp),%edx
+ movl %esp,%ebp
+ addl $2,%edi
+ negl %edi
+ leal -32(%esp,%edi,4),%esp
+ negl %edi
+ movl %esp,%eax
+ subl %edx,%eax
+ andl $2047,%eax
+ subl %eax,%esp
+ xorl %esp,%edx
+ andl $2048,%edx
+ xorl $2048,%edx
+ subl %edx,%esp
+ andl $-64,%esp
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ movl 16(%esi),%esi
+ movl (%esi),%esi
+ movl %eax,4(%esp)
+ movl %ebx,8(%esp)
+ movl %ecx,12(%esp)
+ movl %edx,16(%esp)
+ movl %esi,20(%esp)
+ leal -3(%edi),%ebx
+ movl %ebp,24(%esp)
+ movl 8(%esp),%esi
+ leal 1(%ebx),%ebp
+ movl 12(%esp),%edi
+ xorl %ecx,%ecx
+ movl %esi,%edx
+ andl $1,%ebp
+ subl %edi,%edx
+ leal 4(%edi,%ebx,4),%eax
+ orl %edx,%ebp
+ movl (%edi),%edi
+ jz L001bn_sqr_mont
+ movl %eax,28(%esp)
+ movl (%esi),%eax
+ xorl %edx,%edx
+.align 4,0x90
+L002mull:
+ movl %edx,%ebp
+ mull %edi
+ addl %eax,%ebp
+ leal 1(%ecx),%ecx
+ adcl $0,%edx
+ movl (%esi,%ecx,4),%eax
+ cmpl %ebx,%ecx
+ movl %ebp,28(%esp,%ecx,4)
+ jl L002mull
+ movl %edx,%ebp
+ mull %edi
+ movl 20(%esp),%edi
+ addl %ebp,%eax
+ movl 16(%esp),%esi
+ adcl $0,%edx
+ imull 32(%esp),%edi
+ movl %eax,32(%esp,%ebx,4)
+ xorl %ecx,%ecx
+ movl %edx,36(%esp,%ebx,4)
+ movl %ecx,40(%esp,%ebx,4)
+ movl (%esi),%eax
+ mull %edi
+ addl 32(%esp),%eax
+ movl 4(%esi),%eax
+ adcl $0,%edx
+ incl %ecx
+ jmp L0032ndmadd
+.align 4,0x90
+L0041stmadd:
+ movl %edx,%ebp
+ mull %edi
+ addl 32(%esp,%ecx,4),%ebp
+ leal 1(%ecx),%ecx
+ adcl $0,%edx
+ addl %eax,%ebp
+ movl (%esi,%ecx,4),%eax
+ adcl $0,%edx
+ cmpl %ebx,%ecx
+ movl %ebp,28(%esp,%ecx,4)
+ jl L0041stmadd
+ movl %edx,%ebp
+ mull %edi
+ addl 32(%esp,%ebx,4),%eax
+ movl 20(%esp),%edi
+ adcl $0,%edx
+ movl 16(%esp),%esi
+ addl %eax,%ebp
+ adcl $0,%edx
+ imull 32(%esp),%edi
+ xorl %ecx,%ecx
+ addl 36(%esp,%ebx,4),%edx
+ movl %ebp,32(%esp,%ebx,4)
+ adcl $0,%ecx
+ movl (%esi),%eax
+ movl %edx,36(%esp,%ebx,4)
+ movl %ecx,40(%esp,%ebx,4)
+ mull %edi
+ addl 32(%esp),%eax
+ movl 4(%esi),%eax
+ adcl $0,%edx
+ movl $1,%ecx
+.align 4,0x90
+L0032ndmadd:
+ movl %edx,%ebp
+ mull %edi
+ addl 32(%esp,%ecx,4),%ebp
+ leal 1(%ecx),%ecx
+ adcl $0,%edx
+ addl %eax,%ebp
+ movl (%esi,%ecx,4),%eax
+ adcl $0,%edx
+ cmpl %ebx,%ecx
+ movl %ebp,24(%esp,%ecx,4)
+ jl L0032ndmadd
+ movl %edx,%ebp
+ mull %edi
+ addl 32(%esp,%ebx,4),%ebp
+ adcl $0,%edx
+ addl %eax,%ebp
+ adcl $0,%edx
+ movl %ebp,28(%esp,%ebx,4)
+ xorl %eax,%eax
+ movl 12(%esp),%ecx
+ addl 36(%esp,%ebx,4),%edx
+ adcl 40(%esp,%ebx,4),%eax
+ leal 4(%ecx),%ecx
+ movl %edx,32(%esp,%ebx,4)
+ cmpl 28(%esp),%ecx
+ movl %eax,36(%esp,%ebx,4)
+ je L005common_tail
+ movl (%ecx),%edi
+ movl 8(%esp),%esi
+ movl %ecx,12(%esp)
+ xorl %ecx,%ecx
+ xorl %edx,%edx
+ movl (%esi),%eax
+ jmp L0041stmadd
+.align 4,0x90
+L001bn_sqr_mont:
+ movl %ebx,(%esp)
+ movl %ecx,12(%esp)
+ movl %edi,%eax
+ mull %edi
+ movl %eax,32(%esp)
+ movl %edx,%ebx
+ shrl $1,%edx
+ andl $1,%ebx
+ incl %ecx
+.align 4,0x90
+L006sqr:
+ movl (%esi,%ecx,4),%eax
+ movl %edx,%ebp
+ mull %edi
+ addl %ebp,%eax
+ leal 1(%ecx),%ecx
+ adcl $0,%edx
+ leal (%ebx,%eax,2),%ebp
+ shrl $31,%eax
+ cmpl (%esp),%ecx
+ movl %eax,%ebx
+ movl %ebp,28(%esp,%ecx,4)
+ jl L006sqr
+ movl (%esi,%ecx,4),%eax
+ movl %edx,%ebp
+ mull %edi
+ addl %ebp,%eax
+ movl 20(%esp),%edi
+ adcl $0,%edx
+ movl 16(%esp),%esi
+ leal (%ebx,%eax,2),%ebp
+ imull 32(%esp),%edi
+ shrl $31,%eax
+ movl %ebp,32(%esp,%ecx,4)
+ leal (%eax,%edx,2),%ebp
+ movl (%esi),%eax
+ shrl $31,%edx
+ movl %ebp,36(%esp,%ecx,4)
+ movl %edx,40(%esp,%ecx,4)
+ mull %edi
+ addl 32(%esp),%eax
+ movl %ecx,%ebx
+ adcl $0,%edx
+ movl 4(%esi),%eax
+ movl $1,%ecx
+.align 4,0x90
+L0073rdmadd:
+ movl %edx,%ebp
+ mull %edi
+ addl 32(%esp,%ecx,4),%ebp
+ adcl $0,%edx
+ addl %eax,%ebp
+ movl 4(%esi,%ecx,4),%eax
+ adcl $0,%edx
+ movl %ebp,28(%esp,%ecx,4)
+ movl %edx,%ebp
+ mull %edi
+ addl 36(%esp,%ecx,4),%ebp
+ leal 2(%ecx),%ecx
+ adcl $0,%edx
+ addl %eax,%ebp
+ movl (%esi,%ecx,4),%eax
+ adcl $0,%edx
+ cmpl %ebx,%ecx
+ movl %ebp,24(%esp,%ecx,4)
+ jl L0073rdmadd
+ movl %edx,%ebp
+ mull %edi
+ addl 32(%esp,%ebx,4),%ebp
+ adcl $0,%edx
+ addl %eax,%ebp
+ adcl $0,%edx
+ movl %ebp,28(%esp,%ebx,4)
+ movl 12(%esp),%ecx
+ xorl %eax,%eax
+ movl 8(%esp),%esi
+ addl 36(%esp,%ebx,4),%edx
+ adcl 40(%esp,%ebx,4),%eax
+ movl %edx,32(%esp,%ebx,4)
+ cmpl %ebx,%ecx
+ movl %eax,36(%esp,%ebx,4)
+ je L005common_tail
+ movl 4(%esi,%ecx,4),%edi
+ leal 1(%ecx),%ecx
+ movl %edi,%eax
+ movl %ecx,12(%esp)
+ mull %edi
+ addl 32(%esp,%ecx,4),%eax
+ adcl $0,%edx
+ movl %eax,32(%esp,%ecx,4)
+ xorl %ebp,%ebp
+ cmpl %ebx,%ecx
+ leal 1(%ecx),%ecx
+ je L008sqrlast
+ movl %edx,%ebx
+ shrl $1,%edx
+ andl $1,%ebx
+.align 4,0x90
+L009sqradd:
+ movl (%esi,%ecx,4),%eax
+ movl %edx,%ebp
+ mull %edi
+ addl %ebp,%eax
+ leal (%eax,%eax,1),%ebp
+ adcl $0,%edx
+ shrl $31,%eax
+ addl 32(%esp,%ecx,4),%ebp
+ leal 1(%ecx),%ecx
+ adcl $0,%eax
+ addl %ebx,%ebp
+ adcl $0,%eax
+ cmpl (%esp),%ecx
+ movl %ebp,28(%esp,%ecx,4)
+ movl %eax,%ebx
+ jle L009sqradd
+ movl %edx,%ebp
+ addl %edx,%edx
+ shrl $31,%ebp
+ addl %ebx,%edx
+ adcl $0,%ebp
+L008sqrlast:
+ movl 20(%esp),%edi
+ movl 16(%esp),%esi
+ imull 32(%esp),%edi
+ addl 32(%esp,%ecx,4),%edx
+ movl (%esi),%eax
+ adcl $0,%ebp
+ movl %edx,32(%esp,%ecx,4)
+ movl %ebp,36(%esp,%ecx,4)
+ mull %edi
+ addl 32(%esp),%eax
+ leal -1(%ecx),%ebx
+ adcl $0,%edx
+ movl $1,%ecx
+ movl 4(%esi),%eax
+ jmp L0073rdmadd
+.align 4,0x90
+L005common_tail:
+ movl 16(%esp),%ebp
+ movl 4(%esp),%edi
+ leal 32(%esp),%esi
+ movl (%esi),%eax
+ movl %ebx,%ecx
+ xorl %edx,%edx
+.align 4,0x90
+L010sub:
+ sbbl (%ebp,%edx,4),%eax
+ movl %eax,(%edi,%edx,4)
+ decl %ecx
+ movl 4(%esi,%edx,4),%eax
+ leal 1(%edx),%edx
+ jge L010sub
+ sbbl $0,%eax
+ andl %eax,%esi
+ notl %eax
+ movl %edi,%ebp
+ andl %eax,%ebp
+ orl %ebp,%esi
+.align 4,0x90
+L011copy:
+ movl (%esi,%ebx,4),%eax
+ movl %eax,(%edi,%ebx,4)
+ movl %ecx,32(%esp,%ebx,4)
+ decl %ebx
+ jge L011copy
+ movl 24(%esp),%esp
+ movl $1,%eax
+L000just_leave:
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.byte 77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105
+.byte 112,108,105,99,97,116,105,111,110,32,102,111,114,32,120,56
+.byte 54,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121
+.byte 32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46
+.byte 111,114,103,62,0
diff --git a/deps/openssl/asm/x86-macosx-gas/bn/x86.s b/deps/openssl/asm/x86-macosx-gas/bn/x86.s
new file mode 100644
index 0000000000..eb975d247b
--- /dev/null
+++ b/deps/openssl/asm/x86-macosx-gas/bn/x86.s
@@ -0,0 +1,2385 @@
+.file "../openssl/crypto/bn/asm/x86.s"
+.text
+.globl _bn_mul_add_words
+.align 4
+_bn_mul_add_words:
+L_bn_mul_add_words_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+
+ xorl %esi,%esi
+ movl 20(%esp),%edi
+ movl 28(%esp),%ecx
+ movl 24(%esp),%ebx
+ andl $4294967288,%ecx
+ movl 32(%esp),%ebp
+ pushl %ecx
+ jz L000maw_finish
+L001maw_loop:
+ movl %ecx,(%esp)
+ # Round 0
+
+ movl (%ebx),%eax
+ mull %ebp
+ addl %esi,%eax
+ movl (%edi),%esi
+ adcl $0,%edx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,(%edi)
+ movl %edx,%esi
+ # Round 4
+
+ movl 4(%ebx),%eax
+ mull %ebp
+ addl %esi,%eax
+ movl 4(%edi),%esi
+ adcl $0,%edx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,4(%edi)
+ movl %edx,%esi
+ # Round 8
+
+ movl 8(%ebx),%eax
+ mull %ebp
+ addl %esi,%eax
+ movl 8(%edi),%esi
+ adcl $0,%edx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,8(%edi)
+ movl %edx,%esi
+ # Round 12
+
+ movl 12(%ebx),%eax
+ mull %ebp
+ addl %esi,%eax
+ movl 12(%edi),%esi
+ adcl $0,%edx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,12(%edi)
+ movl %edx,%esi
+ # Round 16
+
+ movl 16(%ebx),%eax
+ mull %ebp
+ addl %esi,%eax
+ movl 16(%edi),%esi
+ adcl $0,%edx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,16(%edi)
+ movl %edx,%esi
+ # Round 20
+
+ movl 20(%ebx),%eax
+ mull %ebp
+ addl %esi,%eax
+ movl 20(%edi),%esi
+ adcl $0,%edx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,20(%edi)
+ movl %edx,%esi
+ # Round 24
+
+ movl 24(%ebx),%eax
+ mull %ebp
+ addl %esi,%eax
+ movl 24(%edi),%esi
+ adcl $0,%edx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,24(%edi)
+ movl %edx,%esi
+ # Round 28
+
+ movl 28(%ebx),%eax
+ mull %ebp
+ addl %esi,%eax
+ movl 28(%edi),%esi
+ adcl $0,%edx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,28(%edi)
+ movl %edx,%esi
+
+ movl (%esp),%ecx
+ addl $32,%ebx
+ addl $32,%edi
+ subl $8,%ecx
+ jnz L001maw_loop
+L000maw_finish:
+ movl 32(%esp),%ecx
+ andl $7,%ecx
+ jnz L002maw_finish2
+ jmp L003maw_end
+L002maw_finish2:
+ # Tail Round 0
+
+ movl (%ebx),%eax
+ mull %ebp
+ addl %esi,%eax
+ movl (%edi),%esi
+ adcl $0,%edx
+ addl %esi,%eax
+ adcl $0,%edx
+ decl %ecx
+ movl %eax,(%edi)
+ movl %edx,%esi
+ jz L003maw_end
+ # Tail Round 1
+
+ movl 4(%ebx),%eax
+ mull %ebp
+ addl %esi,%eax
+ movl 4(%edi),%esi
+ adcl $0,%edx
+ addl %esi,%eax
+ adcl $0,%edx
+ decl %ecx
+ movl %eax,4(%edi)
+ movl %edx,%esi
+ jz L003maw_end
+ # Tail Round 2
+
+ movl 8(%ebx),%eax
+ mull %ebp
+ addl %esi,%eax
+ movl 8(%edi),%esi
+ adcl $0,%edx
+ addl %esi,%eax
+ adcl $0,%edx
+ decl %ecx
+ movl %eax,8(%edi)
+ movl %edx,%esi
+ jz L003maw_end
+ # Tail Round 3
+
+ movl 12(%ebx),%eax
+ mull %ebp
+ addl %esi,%eax
+ movl 12(%edi),%esi
+ adcl $0,%edx
+ addl %esi,%eax
+ adcl $0,%edx
+ decl %ecx
+ movl %eax,12(%edi)
+ movl %edx,%esi
+ jz L003maw_end
+ # Tail Round 4
+
+ movl 16(%ebx),%eax
+ mull %ebp
+ addl %esi,%eax
+ movl 16(%edi),%esi
+ adcl $0,%edx
+ addl %esi,%eax
+ adcl $0,%edx
+ decl %ecx
+ movl %eax,16(%edi)
+ movl %edx,%esi
+ jz L003maw_end
+ # Tail Round 5
+
+ movl 20(%ebx),%eax
+ mull %ebp
+ addl %esi,%eax
+ movl 20(%edi),%esi
+ adcl $0,%edx
+ addl %esi,%eax
+ adcl $0,%edx
+ decl %ecx
+ movl %eax,20(%edi)
+ movl %edx,%esi
+ jz L003maw_end
+ # Tail Round 6
+
+ movl 24(%ebx),%eax
+ mull %ebp
+ addl %esi,%eax
+ movl 24(%edi),%esi
+ adcl $0,%edx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,24(%edi)
+ movl %edx,%esi
+L003maw_end:
+ movl %esi,%eax
+ popl %ecx
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.globl _bn_mul_words
+.align 4
+_bn_mul_words:
+L_bn_mul_words_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+
+ xorl %esi,%esi
+ movl 20(%esp),%edi
+ movl 24(%esp),%ebx
+ movl 28(%esp),%ebp
+ movl 32(%esp),%ecx
+ andl $4294967288,%ebp
+ jz L004mw_finish
+L005mw_loop:
+ # Round 0
+
+ movl (%ebx),%eax
+ mull %ecx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,(%edi)
+ movl %edx,%esi
+ # Round 4
+
+ movl 4(%ebx),%eax
+ mull %ecx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,4(%edi)
+ movl %edx,%esi
+ # Round 8
+
+ movl 8(%ebx),%eax
+ mull %ecx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,8(%edi)
+ movl %edx,%esi
+ # Round 12
+
+ movl 12(%ebx),%eax
+ mull %ecx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,12(%edi)
+ movl %edx,%esi
+ # Round 16
+
+ movl 16(%ebx),%eax
+ mull %ecx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,16(%edi)
+ movl %edx,%esi
+ # Round 20
+
+ movl 20(%ebx),%eax
+ mull %ecx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,20(%edi)
+ movl %edx,%esi
+ # Round 24
+
+ movl 24(%ebx),%eax
+ mull %ecx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,24(%edi)
+ movl %edx,%esi
+ # Round 28
+
+ movl 28(%ebx),%eax
+ mull %ecx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,28(%edi)
+ movl %edx,%esi
+
+ addl $32,%ebx
+ addl $32,%edi
+ subl $8,%ebp
+ jz L004mw_finish
+ jmp L005mw_loop
+L004mw_finish:
+ movl 28(%esp),%ebp
+ andl $7,%ebp
+ jnz L006mw_finish2
+ jmp L007mw_end
+L006mw_finish2:
+ # Tail Round 0
+
+ movl (%ebx),%eax
+ mull %ecx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,(%edi)
+ movl %edx,%esi
+ decl %ebp
+ jz L007mw_end
+ # Tail Round 1
+
+ movl 4(%ebx),%eax
+ mull %ecx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,4(%edi)
+ movl %edx,%esi
+ decl %ebp
+ jz L007mw_end
+ # Tail Round 2
+
+ movl 8(%ebx),%eax
+ mull %ecx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,8(%edi)
+ movl %edx,%esi
+ decl %ebp
+ jz L007mw_end
+ # Tail Round 3
+
+ movl 12(%ebx),%eax
+ mull %ecx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,12(%edi)
+ movl %edx,%esi
+ decl %ebp
+ jz L007mw_end
+ # Tail Round 4
+
+ movl 16(%ebx),%eax
+ mull %ecx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,16(%edi)
+ movl %edx,%esi
+ decl %ebp
+ jz L007mw_end
+ # Tail Round 5
+
+ movl 20(%ebx),%eax
+ mull %ecx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,20(%edi)
+ movl %edx,%esi
+ decl %ebp
+ jz L007mw_end
+ # Tail Round 6
+
+ movl 24(%ebx),%eax
+ mull %ecx
+ addl %esi,%eax
+ adcl $0,%edx
+ movl %eax,24(%edi)
+ movl %edx,%esi
+L007mw_end:
+ movl %esi,%eax
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.globl _bn_sqr_words
+.align 4
+_bn_sqr_words:
+L_bn_sqr_words_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+
+ movl 20(%esp),%esi
+ movl 24(%esp),%edi
+ movl 28(%esp),%ebx
+ andl $4294967288,%ebx
+ jz L008sw_finish
+L009sw_loop:
+ # Round 0
+
+ movl (%edi),%eax
+ mull %eax
+ movl %eax,(%esi)
+ movl %edx,4(%esi)
+ # Round 4
+
+ movl 4(%edi),%eax
+ mull %eax
+ movl %eax,8(%esi)
+ movl %edx,12(%esi)
+ # Round 8
+
+ movl 8(%edi),%eax
+ mull %eax
+ movl %eax,16(%esi)
+ movl %edx,20(%esi)
+ # Round 12
+
+ movl 12(%edi),%eax
+ mull %eax
+ movl %eax,24(%esi)
+ movl %edx,28(%esi)
+ # Round 16
+
+ movl 16(%edi),%eax
+ mull %eax
+ movl %eax,32(%esi)
+ movl %edx,36(%esi)
+ # Round 20
+
+ movl 20(%edi),%eax
+ mull %eax
+ movl %eax,40(%esi)
+ movl %edx,44(%esi)
+ # Round 24
+
+ movl 24(%edi),%eax
+ mull %eax
+ movl %eax,48(%esi)
+ movl %edx,52(%esi)
+ # Round 28
+
+ movl 28(%edi),%eax
+ mull %eax
+ movl %eax,56(%esi)
+ movl %edx,60(%esi)
+
+ addl $32,%edi
+ addl $64,%esi
+ subl $8,%ebx
+ jnz L009sw_loop
+L008sw_finish:
+ movl 28(%esp),%ebx
+ andl $7,%ebx
+ jz L010sw_end
+ # Tail Round 0
+
+ movl (%edi),%eax
+ mull %eax
+ movl %eax,(%esi)
+ decl %ebx
+ movl %edx,4(%esi)
+ jz L010sw_end
+ # Tail Round 1
+
+ movl 4(%edi),%eax
+ mull %eax
+ movl %eax,8(%esi)
+ decl %ebx
+ movl %edx,12(%esi)
+ jz L010sw_end
+ # Tail Round 2
+
+ movl 8(%edi),%eax
+ mull %eax
+ movl %eax,16(%esi)
+ decl %ebx
+ movl %edx,20(%esi)
+ jz L010sw_end
+ # Tail Round 3
+
+ movl 12(%edi),%eax
+ mull %eax
+ movl %eax,24(%esi)
+ decl %ebx
+ movl %edx,28(%esi)
+ jz L010sw_end
+ # Tail Round 4
+
+ movl 16(%edi),%eax
+ mull %eax
+ movl %eax,32(%esi)
+ decl %ebx
+ movl %edx,36(%esi)
+ jz L010sw_end
+ # Tail Round 5
+
+ movl 20(%edi),%eax
+ mull %eax
+ movl %eax,40(%esi)
+ decl %ebx
+ movl %edx,44(%esi)
+ jz L010sw_end
+ # Tail Round 6
+
+ movl 24(%edi),%eax
+ mull %eax
+ movl %eax,48(%esi)
+ movl %edx,52(%esi)
+L010sw_end:
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.globl _bn_div_words
+.align 4
+_bn_div_words:
+L_bn_div_words_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%edx
+ movl 24(%esp),%eax
+ movl 28(%esp),%ebx
+ divl %ebx
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.globl _bn_add_words
+.align 4
+_bn_add_words:
+L_bn_add_words_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+
+ movl 20(%esp),%ebx
+ movl 24(%esp),%esi
+ movl 28(%esp),%edi
+ movl 32(%esp),%ebp
+ xorl %eax,%eax
+ andl $4294967288,%ebp
+ jz L011aw_finish
+L012aw_loop:
+ # Round 0
+
+ movl (%esi),%ecx
+ movl (%edi),%edx
+ addl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ addl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,(%ebx)
+ # Round 1
+
+ movl 4(%esi),%ecx
+ movl 4(%edi),%edx
+ addl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ addl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,4(%ebx)
+ # Round 2
+
+ movl 8(%esi),%ecx
+ movl 8(%edi),%edx
+ addl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ addl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,8(%ebx)
+ # Round 3
+
+ movl 12(%esi),%ecx
+ movl 12(%edi),%edx
+ addl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ addl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,12(%ebx)
+ # Round 4
+
+ movl 16(%esi),%ecx
+ movl 16(%edi),%edx
+ addl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ addl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,16(%ebx)
+ # Round 5
+
+ movl 20(%esi),%ecx
+ movl 20(%edi),%edx
+ addl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ addl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,20(%ebx)
+ # Round 6
+
+ movl 24(%esi),%ecx
+ movl 24(%edi),%edx
+ addl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ addl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,24(%ebx)
+ # Round 7
+
+ movl 28(%esi),%ecx
+ movl 28(%edi),%edx
+ addl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ addl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,28(%ebx)
+
+ addl $32,%esi
+ addl $32,%edi
+ addl $32,%ebx
+ subl $8,%ebp
+ jnz L012aw_loop
+L011aw_finish:
+ movl 32(%esp),%ebp
+ andl $7,%ebp
+ jz L013aw_end
+ # Tail Round 0
+
+ movl (%esi),%ecx
+ movl (%edi),%edx
+ addl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ addl %edx,%ecx
+ adcl $0,%eax
+ decl %ebp
+ movl %ecx,(%ebx)
+ jz L013aw_end
+ # Tail Round 1
+
+ movl 4(%esi),%ecx
+ movl 4(%edi),%edx
+ addl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ addl %edx,%ecx
+ adcl $0,%eax
+ decl %ebp
+ movl %ecx,4(%ebx)
+ jz L013aw_end
+ # Tail Round 2
+
+ movl 8(%esi),%ecx
+ movl 8(%edi),%edx
+ addl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ addl %edx,%ecx
+ adcl $0,%eax
+ decl %ebp
+ movl %ecx,8(%ebx)
+ jz L013aw_end
+ # Tail Round 3
+
+ movl 12(%esi),%ecx
+ movl 12(%edi),%edx
+ addl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ addl %edx,%ecx
+ adcl $0,%eax
+ decl %ebp
+ movl %ecx,12(%ebx)
+ jz L013aw_end
+ # Tail Round 4
+
+ movl 16(%esi),%ecx
+ movl 16(%edi),%edx
+ addl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ addl %edx,%ecx
+ adcl $0,%eax
+ decl %ebp
+ movl %ecx,16(%ebx)
+ jz L013aw_end
+ # Tail Round 5
+
+ movl 20(%esi),%ecx
+ movl 20(%edi),%edx
+ addl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ addl %edx,%ecx
+ adcl $0,%eax
+ decl %ebp
+ movl %ecx,20(%ebx)
+ jz L013aw_end
+ # Tail Round 6
+
+ movl 24(%esi),%ecx
+ movl 24(%edi),%edx
+ addl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ addl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,24(%ebx)
+L013aw_end:
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.globl _bn_sub_words
+.align 4
+_bn_sub_words:
+L_bn_sub_words_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+
+ movl 20(%esp),%ebx
+ movl 24(%esp),%esi
+ movl 28(%esp),%edi
+ movl 32(%esp),%ebp
+ xorl %eax,%eax
+ andl $4294967288,%ebp
+ jz L014aw_finish
+L015aw_loop:
+ # Round 0
+
+ movl (%esi),%ecx
+ movl (%edi),%edx
+ subl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ subl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,(%ebx)
+ # Round 1
+
+ movl 4(%esi),%ecx
+ movl 4(%edi),%edx
+ subl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ subl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,4(%ebx)
+ # Round 2
+
+ movl 8(%esi),%ecx
+ movl 8(%edi),%edx
+ subl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ subl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,8(%ebx)
+ # Round 3
+
+ movl 12(%esi),%ecx
+ movl 12(%edi),%edx
+ subl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ subl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,12(%ebx)
+ # Round 4
+
+ movl 16(%esi),%ecx
+ movl 16(%edi),%edx
+ subl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ subl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,16(%ebx)
+ # Round 5
+
+ movl 20(%esi),%ecx
+ movl 20(%edi),%edx
+ subl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ subl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,20(%ebx)
+ # Round 6
+
+ movl 24(%esi),%ecx
+ movl 24(%edi),%edx
+ subl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ subl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,24(%ebx)
+ # Round 7
+
+ movl 28(%esi),%ecx
+ movl 28(%edi),%edx
+ subl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ subl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,28(%ebx)
+
+ addl $32,%esi
+ addl $32,%edi
+ addl $32,%ebx
+ subl $8,%ebp
+ jnz L015aw_loop
+L014aw_finish:
+ movl 32(%esp),%ebp
+ andl $7,%ebp
+ jz L016aw_end
+ # Tail Round 0
+
+ movl (%esi),%ecx
+ movl (%edi),%edx
+ subl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ subl %edx,%ecx
+ adcl $0,%eax
+ decl %ebp
+ movl %ecx,(%ebx)
+ jz L016aw_end
+ # Tail Round 1
+
+ movl 4(%esi),%ecx
+ movl 4(%edi),%edx
+ subl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ subl %edx,%ecx
+ adcl $0,%eax
+ decl %ebp
+ movl %ecx,4(%ebx)
+ jz L016aw_end
+ # Tail Round 2
+
+ movl 8(%esi),%ecx
+ movl 8(%edi),%edx
+ subl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ subl %edx,%ecx
+ adcl $0,%eax
+ decl %ebp
+ movl %ecx,8(%ebx)
+ jz L016aw_end
+ # Tail Round 3
+
+ movl 12(%esi),%ecx
+ movl 12(%edi),%edx
+ subl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ subl %edx,%ecx
+ adcl $0,%eax
+ decl %ebp
+ movl %ecx,12(%ebx)
+ jz L016aw_end
+ # Tail Round 4
+
+ movl 16(%esi),%ecx
+ movl 16(%edi),%edx
+ subl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ subl %edx,%ecx
+ adcl $0,%eax
+ decl %ebp
+ movl %ecx,16(%ebx)
+ jz L016aw_end
+ # Tail Round 5
+
+ movl 20(%esi),%ecx
+ movl 20(%edi),%edx
+ subl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ subl %edx,%ecx
+ adcl $0,%eax
+ decl %ebp
+ movl %ecx,20(%ebx)
+ jz L016aw_end
+ # Tail Round 6
+
+ movl 24(%esi),%ecx
+ movl 24(%edi),%edx
+ subl %eax,%ecx
+ movl $0,%eax
+ adcl %eax,%eax
+ subl %edx,%ecx
+ adcl $0,%eax
+ movl %ecx,24(%ebx)
+L016aw_end:
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.globl _bn_mul_comba8
+.align 4
+_bn_mul_comba8:
+L_bn_mul_comba8_begin:
+ pushl %esi
+ movl 12(%esp),%esi
+ pushl %edi
+ movl 20(%esp),%edi
+ pushl %ebp
+ pushl %ebx
+ xorl %ebx,%ebx
+ movl (%esi),%eax
+ xorl %ecx,%ecx
+ movl (%edi),%edx
+ # ################## Calculate word 0
+
+ xorl %ebp,%ebp
+ # mul a[0]*b[0]
+
+ mull %edx
+ addl %eax,%ebx
+ movl 20(%esp),%eax
+ adcl %edx,%ecx
+ movl (%edi),%edx
+ adcl $0,%ebp
+ movl %ebx,(%eax)
+ movl 4(%esi),%eax
+ # saved r[0]
+
+ # ################## Calculate word 1
+
+ xorl %ebx,%ebx
+ # mul a[1]*b[0]
+
+ mull %edx
+ addl %eax,%ecx
+ movl (%esi),%eax
+ adcl %edx,%ebp
+ movl 4(%edi),%edx
+ adcl $0,%ebx
+ # mul a[0]*b[1]
+
+ mull %edx
+ addl %eax,%ecx
+ movl 20(%esp),%eax
+ adcl %edx,%ebp
+ movl (%edi),%edx
+ adcl $0,%ebx
+ movl %ecx,4(%eax)
+ movl 8(%esi),%eax
+ # saved r[1]
+
+ # ################## Calculate word 2
+
+ xorl %ecx,%ecx
+ # mul a[2]*b[0]
+
+ mull %edx
+ addl %eax,%ebp
+ movl 4(%esi),%eax
+ adcl %edx,%ebx
+ movl 4(%edi),%edx
+ adcl $0,%ecx
+ # mul a[1]*b[1]
+
+ mull %edx
+ addl %eax,%ebp
+ movl (%esi),%eax
+ adcl %edx,%ebx
+ movl 8(%edi),%edx
+ adcl $0,%ecx
+ # mul a[0]*b[2]
+
+ mull %edx
+ addl %eax,%ebp
+ movl 20(%esp),%eax
+ adcl %edx,%ebx
+ movl (%edi),%edx
+ adcl $0,%ecx
+ movl %ebp,8(%eax)
+ movl 12(%esi),%eax
+ # saved r[2]
+
+ # ################## Calculate word 3
+
+ xorl %ebp,%ebp
+ # mul a[3]*b[0]
+
+ mull %edx
+ addl %eax,%ebx
+ movl 8(%esi),%eax
+ adcl %edx,%ecx
+ movl 4(%edi),%edx
+ adcl $0,%ebp
+ # mul a[2]*b[1]
+
+ mull %edx
+ addl %eax,%ebx
+ movl 4(%esi),%eax
+ adcl %edx,%ecx
+ movl 8(%edi),%edx
+ adcl $0,%ebp
+ # mul a[1]*b[2]
+
+ mull %edx
+ addl %eax,%ebx
+ movl (%esi),%eax
+ adcl %edx,%ecx
+ movl 12(%edi),%edx
+ adcl $0,%ebp
+ # mul a[0]*b[3]
+
+ mull %edx
+ addl %eax,%ebx
+ movl 20(%esp),%eax
+ adcl %edx,%ecx
+ movl (%edi),%edx
+ adcl $0,%ebp
+ movl %ebx,12(%eax)
+ movl 16(%esi),%eax
+ # saved r[3]
+
+ # ################## Calculate word 4
+
+ xorl %ebx,%ebx
+ # mul a[4]*b[0]
+
+ mull %edx
+ addl %eax,%ecx
+ movl 12(%esi),%eax
+ adcl %edx,%ebp
+ movl 4(%edi),%edx
+ adcl $0,%ebx
+ # mul a[3]*b[1]
+
+ mull %edx
+ addl %eax,%ecx
+ movl 8(%esi),%eax
+ adcl %edx,%ebp
+ movl 8(%edi),%edx
+ adcl $0,%ebx
+ # mul a[2]*b[2]
+
+ mull %edx
+ addl %eax,%ecx
+ movl 4(%esi),%eax
+ adcl %edx,%ebp
+ movl 12(%edi),%edx
+ adcl $0,%ebx
+ # mul a[1]*b[3]
+
+ mull %edx
+ addl %eax,%ecx
+ movl (%esi),%eax
+ adcl %edx,%ebp
+ movl 16(%edi),%edx
+ adcl $0,%ebx
+ # mul a[0]*b[4]
+
+ mull %edx
+ addl %eax,%ecx
+ movl 20(%esp),%eax
+ adcl %edx,%ebp
+ movl (%edi),%edx
+ adcl $0,%ebx
+ movl %ecx,16(%eax)
+ movl 20(%esi),%eax
+ # saved r[4]
+
+ # ################## Calculate word 5
+
+ xorl %ecx,%ecx
+ # mul a[5]*b[0]
+
+ mull %edx
+ addl %eax,%ebp
+ movl 16(%esi),%eax
+ adcl %edx,%ebx
+ movl 4(%edi),%edx
+ adcl $0,%ecx
+ # mul a[4]*b[1]
+
+ mull %edx
+ addl %eax,%ebp
+ movl 12(%esi),%eax
+ adcl %edx,%ebx
+ movl 8(%edi),%edx
+ adcl $0,%ecx
+ # mul a[3]*b[2]
+
+ mull %edx
+ addl %eax,%ebp
+ movl 8(%esi),%eax
+ adcl %edx,%ebx
+ movl 12(%edi),%edx
+ adcl $0,%ecx
+ # mul a[2]*b[3]
+
+ mull %edx
+ addl %eax,%ebp
+ movl 4(%esi),%eax
+ adcl %edx,%ebx
+ movl 16(%edi),%edx
+ adcl $0,%ecx
+ # mul a[1]*b[4]
+
+ mull %edx
+ addl %eax,%ebp
+ movl (%esi),%eax
+ adcl %edx,%ebx
+ movl 20(%edi),%edx
+ adcl $0,%ecx
+ # mul a[0]*b[5]
+
+ mull %edx
+ addl %eax,%ebp
+ movl 20(%esp),%eax
+ adcl %edx,%ebx
+ movl (%edi),%edx
+ adcl $0,%ecx
+ movl %ebp,20(%eax)
+ movl 24(%esi),%eax
+ # saved r[5]
+
+ # ################## Calculate word 6
+
+ xorl %ebp,%ebp
+ # mul a[6]*b[0]
+
+ mull %edx
+ addl %eax,%ebx
+ movl 20(%esi),%eax
+ adcl %edx,%ecx
+ movl 4(%edi),%edx
+ adcl $0,%ebp
+ # mul a[5]*b[1]
+
+ mull %edx
+ addl %eax,%ebx
+ movl 16(%esi),%eax
+ adcl %edx,%ecx
+ movl 8(%edi),%edx
+ adcl $0,%ebp
+ # mul a[4]*b[2]
+
+ mull %edx
+ addl %eax,%ebx
+ movl 12(%esi),%eax
+ adcl %edx,%ecx
+ movl 12(%edi),%edx
+ adcl $0,%ebp
+ # mul a[3]*b[3]
+
+ mull %edx
+ addl %eax,%ebx
+ movl 8(%esi),%eax
+ adcl %edx,%ecx
+ movl 16(%edi),%edx
+ adcl $0,%ebp
+ # mul a[2]*b[4]
+
+ mull %edx
+ addl %eax,%ebx
+ movl 4(%esi),%eax
+ adcl %edx,%ecx
+ movl 20(%edi),%edx
+ adcl $0,%ebp
+ # mul a[1]*b[5]
+
+ mull %edx
+ addl %eax,%ebx
+ movl (%esi),%eax
+ adcl %edx,%ecx
+ movl 24(%edi),%edx
+ adcl $0,%ebp
+ # mul a[0]*b[6]
+
+ mull %edx
+ addl %eax,%ebx
+ movl 20(%esp),%eax
+ adcl %edx,%ecx
+ movl (%edi),%edx
+ adcl $0,%ebp
+ movl %ebx,24(%eax)
+ movl 28(%esi),%eax
+ # saved r[6]
+
+ # ################## Calculate word 7
+
+ xorl %ebx,%ebx
+ # mul a[7]*b[0]
+
+ mull %edx
+ addl %eax,%ecx
+ movl 24(%esi),%eax
+ adcl %edx,%ebp
+ movl 4(%edi),%edx
+ adcl $0,%ebx
+ # mul a[6]*b[1]
+
+ mull %edx
+ addl %eax,%ecx
+ movl 20(%esi),%eax
+ adcl %edx,%ebp
+ movl 8(%edi),%edx
+ adcl $0,%ebx
+ # mul a[5]*b[2]
+
+ mull %edx
+ addl %eax,%ecx
+ movl 16(%esi),%eax
+ adcl %edx,%ebp
+ movl 12(%edi),%edx
+ adcl $0,%ebx
+ # mul a[4]*b[3]
+
+ mull %edx
+ addl %eax,%ecx
+ movl 12(%esi),%eax
+ adcl %edx,%ebp
+ movl 16(%edi),%edx
+ adcl $0,%ebx
+ # mul a[3]*b[4]
+
+ mull %edx
+ addl %eax,%ecx
+ movl 8(%esi),%eax
+ adcl %edx,%ebp
+ movl 20(%edi),%edx
+ adcl $0,%ebx
+ # mul a[2]*b[5]
+
+ mull %edx
+ addl %eax,%ecx
+ movl 4(%esi),%eax
+ adcl %edx,%ebp
+ movl 24(%edi),%edx
+ adcl $0,%ebx
+ # mul a[1]*b[6]
+
+ mull %edx
+ addl %eax,%ecx
+ movl (%esi),%eax
+ adcl %edx,%ebp
+ movl 28(%edi),%edx
+ adcl $0,%ebx
+ # mul a[0]*b[7]
+
+ mull %edx
+ addl %eax,%ecx
+ movl 20(%esp),%eax
+ adcl %edx,%ebp
+ movl 4(%edi),%edx
+ adcl $0,%ebx
+ movl %ecx,28(%eax)
+ movl 28(%esi),%eax
+ # saved r[7]
+
+ # ################## Calculate word 8
+
+ xorl %ecx,%ecx
+ # mul a[7]*b[1]
+
+ mull %edx
+ addl %eax,%ebp
+ movl 24(%esi),%eax
+ adcl %edx,%ebx
+ movl 8(%edi),%edx
+ adcl $0,%ecx
+ # mul a[6]*b[2]
+
+ mull %edx
+ addl %eax,%ebp
+ movl 20(%esi),%eax
+ adcl %edx,%ebx
+ movl 12(%edi),%edx
+ adcl $0,%ecx
+ # mul a[5]*b[3]
+
+ mull %edx
+ addl %eax,%ebp
+ movl 16(%esi),%eax
+ adcl %edx,%ebx
+ movl 16(%edi),%edx
+ adcl $0,%ecx
+ # mul a[4]*b[4]
+
+ mull %edx
+ addl %eax,%ebp
+ movl 12(%esi),%eax
+ adcl %edx,%ebx
+ movl 20(%edi),%edx
+ adcl $0,%ecx
+ # mul a[3]*b[5]
+
+ mull %edx
+ addl %eax,%ebp
+ movl 8(%esi),%eax
+ adcl %edx,%ebx
+ movl 24(%edi),%edx
+ adcl $0,%ecx
+ # mul a[2]*b[6]
+
+ mull %edx
+ addl %eax,%ebp
+ movl 4(%esi),%eax
+ adcl %edx,%ebx
+ movl 28(%edi),%edx
+ adcl $0,%ecx
+ # mul a[1]*b[7]
+
+ mull %edx
+ addl %eax,%ebp
+ movl 20(%esp),%eax
+ adcl %edx,%ebx
+ movl 8(%edi),%edx
+ adcl $0,%ecx
+ movl %ebp,32(%eax)
+ movl 28(%esi),%eax
+ # saved r[8]
+
+ # ################## Calculate word 9
+
+ xorl %ebp,%ebp
+ # mul a[7]*b[2]
+
+ mull %edx
+ addl %eax,%ebx
+ movl 24(%esi),%eax
+ adcl %edx,%ecx
+ movl 12(%edi),%edx
+ adcl $0,%ebp
+ # mul a[6]*b[3]
+
+ mull %edx
+ addl %eax,%ebx
+ movl 20(%esi),%eax
+ adcl %edx,%ecx
+ movl 16(%edi),%edx
+ adcl $0,%ebp
+ # mul a[5]*b[4]
+
+ mull %edx
+ addl %eax,%ebx
+ movl 16(%esi),%eax
+ adcl %edx,%ecx
+ movl 20(%edi),%edx
+ adcl $0,%ebp
+ # mul a[4]*b[5]
+
+ mull %edx
+ addl %eax,%ebx
+ movl 12(%esi),%eax
+ adcl %edx,%ecx
+ movl 24(%edi),%edx
+ adcl $0,%ebp
+ # mul a[3]*b[6]
+
+ mull %edx
+ addl %eax,%ebx
+ movl 8(%esi),%eax
+ adcl %edx,%ecx
+ movl 28(%edi),%edx
+ adcl $0,%ebp
+ # mul a[2]*b[7]
+
+ mull %edx
+ addl %eax,%ebx
+ movl 20(%esp),%eax
+ adcl %edx,%ecx
+ movl 12(%edi),%edx
+ adcl $0,%ebp
+ movl %ebx,36(%eax)
+ movl 28(%esi),%eax
+ # saved r[9]
+
+ # ################## Calculate word 10
+
+ xorl %ebx,%ebx
+ # mul a[7]*b[3]
+
+ mull %edx
+ addl %eax,%ecx
+ movl 24(%esi),%eax
+ adcl %edx,%ebp
+ movl 16(%edi),%edx
+ adcl $0,%ebx
+ # mul a[6]*b[4]
+
+ mull %edx
+ addl %eax,%ecx
+ movl 20(%esi),%eax
+ adcl %edx,%ebp
+ movl 20(%edi),%edx
+ adcl $0,%ebx
+ # mul a[5]*b[5]
+
+ mull %edx
+ addl %eax,%ecx
+ movl 16(%esi),%eax
+ adcl %edx,%ebp
+ movl 24(%edi),%edx
+ adcl $0,%ebx
+ # mul a[4]*b[6]
+
+ mull %edx
+ addl %eax,%ecx
+ movl 12(%esi),%eax
+ adcl %edx,%ebp
+ movl 28(%edi),%edx
+ adcl $0,%ebx
+ # mul a[3]*b[7]
+
+ mull %edx
+ addl %eax,%ecx
+ movl 20(%esp),%eax
+ adcl %edx,%ebp
+ movl 16(%edi),%edx
+ adcl $0,%ebx
+ movl %ecx,40(%eax)
+ movl 28(%esi),%eax
+ # saved r[10]
+
+ # ################## Calculate word 11
+
+ xorl %ecx,%ecx
+ # mul a[7]*b[4]
+
+ mull %edx
+ addl %eax,%ebp
+ movl 24(%esi),%eax
+ adcl %edx,%ebx
+ movl 20(%edi),%edx
+ adcl $0,%ecx
+ # mul a[6]*b[5]
+
+ mull %edx
+ addl %eax,%ebp
+ movl 20(%esi),%eax
+ adcl %edx,%ebx
+ movl 24(%edi),%edx
+ adcl $0,%ecx
+ # mul a[5]*b[6]
+
+ mull %edx
+ addl %eax,%ebp
+ movl 16(%esi),%eax
+ adcl %edx,%ebx
+ movl 28(%edi),%edx
+ adcl $0,%ecx
+ # mul a[4]*b[7]
+
+ mull %edx
+ addl %eax,%ebp
+ movl 20(%esp),%eax
+ adcl %edx,%ebx
+ movl 20(%edi),%edx
+ adcl $0,%ecx
+ movl %ebp,44(%eax)
+ movl 28(%esi),%eax
+ # saved r[11]
+
+ # ################## Calculate word 12
+
+ xorl %ebp,%ebp
+ # mul a[7]*b[5]
+
+ mull %edx
+ addl %eax,%ebx
+ movl 24(%esi),%eax
+ adcl %edx,%ecx
+ movl 24(%edi),%edx
+ adcl $0,%ebp
+ # mul a[6]*b[6]
+
+ mull %edx
+ addl %eax,%ebx
+ movl 20(%esi),%eax
+ adcl %edx,%ecx
+ movl 28(%edi),%edx
+ adcl $0,%ebp
+ # mul a[5]*b[7]
+
+ mull %edx
+ addl %eax,%ebx
+ movl 20(%esp),%eax
+ adcl %edx,%ecx
+ movl 24(%edi),%edx
+ adcl $0,%ebp
+ movl %ebx,48(%eax)
+ movl 28(%esi),%eax
+ # saved r[12]
+
+ # ################## Calculate word 13
+
+ xorl %ebx,%ebx
+ # mul a[7]*b[6]
+
+ mull %edx
+ addl %eax,%ecx
+ movl 24(%esi),%eax
+ adcl %edx,%ebp
+ movl 28(%edi),%edx
+ adcl $0,%ebx
+ # mul a[6]*b[7]
+
+ mull %edx
+ addl %eax,%ecx
+ movl 20(%esp),%eax
+ adcl %edx,%ebp
+ movl 28(%edi),%edx
+ adcl $0,%ebx
+ movl %ecx,52(%eax)
+ movl 28(%esi),%eax
+ # saved r[13]
+
+ # ################## Calculate word 14
+
+ xorl %ecx,%ecx
+ # mul a[7]*b[7]
+
+ mull %edx
+ addl %eax,%ebp
+ movl 20(%esp),%eax
+ adcl %edx,%ebx
+ adcl $0,%ecx
+ movl %ebp,56(%eax)
+ # saved r[14]
+
+ # save r[15]
+
+ movl %ebx,60(%eax)
+ popl %ebx
+ popl %ebp
+ popl %edi
+ popl %esi
+ ret
+.globl _bn_mul_comba4
+.align 4
+_bn_mul_comba4:
+L_bn_mul_comba4_begin:
+ pushl %esi
+ movl 12(%esp),%esi
+ pushl %edi
+ movl 20(%esp),%edi
+ pushl %ebp
+ pushl %ebx
+ xorl %ebx,%ebx
+ movl (%esi),%eax
+ xorl %ecx,%ecx
+ movl (%edi),%edx
+ # ################## Calculate word 0
+
+ xorl %ebp,%ebp
+ # mul a[0]*b[0]
+
+ mull %edx
+ addl %eax,%ebx
+ movl 20(%esp),%eax
+ adcl %edx,%ecx
+ movl (%edi),%edx
+ adcl $0,%ebp
+ movl %ebx,(%eax)
+ movl 4(%esi),%eax
+ # saved r[0]
+
+ # ################## Calculate word 1
+
+ xorl %ebx,%ebx
+ # mul a[1]*b[0]
+
+ mull %edx
+ addl %eax,%ecx
+ movl (%esi),%eax
+ adcl %edx,%ebp
+ movl 4(%edi),%edx
+ adcl $0,%ebx
+ # mul a[0]*b[1]
+
+ mull %edx
+ addl %eax,%ecx
+ movl 20(%esp),%eax
+ adcl %edx,%ebp
+ movl (%edi),%edx
+ adcl $0,%ebx
+ movl %ecx,4(%eax)
+ movl 8(%esi),%eax
+ # saved r[1]
+
+ # ################## Calculate word 2
+
+ xorl %ecx,%ecx
+ # mul a[2]*b[0]
+
+ mull %edx
+ addl %eax,%ebp
+ movl 4(%esi),%eax
+ adcl %edx,%ebx
+ movl 4(%edi),%edx
+ adcl $0,%ecx
+ # mul a[1]*b[1]
+
+ mull %edx
+ addl %eax,%ebp
+ movl (%esi),%eax
+ adcl %edx,%ebx
+ movl 8(%edi),%edx
+ adcl $0,%ecx
+ # mul a[0]*b[2]
+
+ mull %edx
+ addl %eax,%ebp
+ movl 20(%esp),%eax
+ adcl %edx,%ebx
+ movl (%edi),%edx
+ adcl $0,%ecx
+ movl %ebp,8(%eax)
+ movl 12(%esi),%eax
+ # saved r[2]
+
+ # ################## Calculate word 3
+
+ xorl %ebp,%ebp
+ # mul a[3]*b[0]
+
+ mull %edx
+ addl %eax,%ebx
+ movl 8(%esi),%eax
+ adcl %edx,%ecx
+ movl 4(%edi),%edx
+ adcl $0,%ebp
+ # mul a[2]*b[1]
+
+ mull %edx
+ addl %eax,%ebx
+ movl 4(%esi),%eax
+ adcl %edx,%ecx
+ movl 8(%edi),%edx
+ adcl $0,%ebp
+ # mul a[1]*b[2]
+
+ mull %edx
+ addl %eax,%ebx
+ movl (%esi),%eax
+ adcl %edx,%ecx
+ movl 12(%edi),%edx
+ adcl $0,%ebp
+ # mul a[0]*b[3]
+
+ mull %edx
+ addl %eax,%ebx
+ movl 20(%esp),%eax
+ adcl %edx,%ecx
+ movl 4(%edi),%edx
+ adcl $0,%ebp
+ movl %ebx,12(%eax)
+ movl 12(%esi),%eax
+ # saved r[3]
+
+ # ################## Calculate word 4
+
+ xorl %ebx,%ebx
+ # mul a[3]*b[1]
+
+ mull %edx
+ addl %eax,%ecx
+ movl 8(%esi),%eax
+ adcl %edx,%ebp
+ movl 8(%edi),%edx
+ adcl $0,%ebx
+ # mul a[2]*b[2]
+
+ mull %edx
+ addl %eax,%ecx
+ movl 4(%esi),%eax
+ adcl %edx,%ebp
+ movl 12(%edi),%edx
+ adcl $0,%ebx
+ # mul a[1]*b[3]
+
+ mull %edx
+ addl %eax,%ecx
+ movl 20(%esp),%eax
+ adcl %edx,%ebp
+ movl 8(%edi),%edx
+ adcl $0,%ebx
+ movl %ecx,16(%eax)
+ movl 12(%esi),%eax
+ # saved r[4]
+
+ # ################## Calculate word 5
+
+ xorl %ecx,%ecx
+ # mul a[3]*b[2]
+
+ mull %edx
+ addl %eax,%ebp
+ movl 8(%esi),%eax
+ adcl %edx,%ebx
+ movl 12(%edi),%edx
+ adcl $0,%ecx
+ # mul a[2]*b[3]
+
+ mull %edx
+ addl %eax,%ebp
+ movl 20(%esp),%eax
+ adcl %edx,%ebx
+ movl 12(%edi),%edx
+ adcl $0,%ecx
+ movl %ebp,20(%eax)
+ movl 12(%esi),%eax
+ # saved r[5]
+
+ # ################## Calculate word 6
+
+ xorl %ebp,%ebp
+ # mul a[3]*b[3]
+
+ mull %edx
+ addl %eax,%ebx
+ movl 20(%esp),%eax
+ adcl %edx,%ecx
+ adcl $0,%ebp
+ movl %ebx,24(%eax)
+ # saved r[6]
+
+ # save r[7]
+
+ movl %ecx,28(%eax)
+ popl %ebx
+ popl %ebp
+ popl %edi
+ popl %esi
+ ret
+.globl _bn_sqr_comba8
+.align 4
+_bn_sqr_comba8:
+L_bn_sqr_comba8_begin:
+ pushl %esi
+ pushl %edi
+ pushl %ebp
+ pushl %ebx
+ movl 20(%esp),%edi
+ movl 24(%esp),%esi
+ xorl %ebx,%ebx
+ xorl %ecx,%ecx
+ movl (%esi),%eax
+ # ############### Calculate word 0
+
+ xorl %ebp,%ebp
+ # sqr a[0]*a[0]
+
+ mull %eax
+ addl %eax,%ebx
+ adcl %edx,%ecx
+ movl (%esi),%edx
+ adcl $0,%ebp
+ movl %ebx,(%edi)
+ movl 4(%esi),%eax
+ # saved r[0]
+
+ # ############### Calculate word 1
+
+ xorl %ebx,%ebx
+ # sqr a[1]*a[0]
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebx
+ addl %eax,%ecx
+ adcl %edx,%ebp
+ movl 8(%esi),%eax
+ adcl $0,%ebx
+ movl %ecx,4(%edi)
+ movl (%esi),%edx
+ # saved r[1]
+
+ # ############### Calculate word 2
+
+ xorl %ecx,%ecx
+ # sqr a[2]*a[0]
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ecx
+ addl %eax,%ebp
+ adcl %edx,%ebx
+ movl 4(%esi),%eax
+ adcl $0,%ecx
+ # sqr a[1]*a[1]
+
+ mull %eax
+ addl %eax,%ebp
+ adcl %edx,%ebx
+ movl (%esi),%edx
+ adcl $0,%ecx
+ movl %ebp,8(%edi)
+ movl 12(%esi),%eax
+ # saved r[2]
+
+ # ############### Calculate word 3
+
+ xorl %ebp,%ebp
+ # sqr a[3]*a[0]
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebp
+ addl %eax,%ebx
+ adcl %edx,%ecx
+ movl 8(%esi),%eax
+ adcl $0,%ebp
+ movl 4(%esi),%edx
+ # sqr a[2]*a[1]
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebp
+ addl %eax,%ebx
+ adcl %edx,%ecx
+ movl 16(%esi),%eax
+ adcl $0,%ebp
+ movl %ebx,12(%edi)
+ movl (%esi),%edx
+ # saved r[3]
+
+ # ############### Calculate word 4
+
+ xorl %ebx,%ebx
+ # sqr a[4]*a[0]
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebx
+ addl %eax,%ecx
+ adcl %edx,%ebp
+ movl 12(%esi),%eax
+ adcl $0,%ebx
+ movl 4(%esi),%edx
+ # sqr a[3]*a[1]
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebx
+ addl %eax,%ecx
+ adcl %edx,%ebp
+ movl 8(%esi),%eax
+ adcl $0,%ebx
+ # sqr a[2]*a[2]
+
+ mull %eax
+ addl %eax,%ecx
+ adcl %edx,%ebp
+ movl (%esi),%edx
+ adcl $0,%ebx
+ movl %ecx,16(%edi)
+ movl 20(%esi),%eax
+ # saved r[4]
+
+ # ############### Calculate word 5
+
+ xorl %ecx,%ecx
+ # sqr a[5]*a[0]
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ecx
+ addl %eax,%ebp
+ adcl %edx,%ebx
+ movl 16(%esi),%eax
+ adcl $0,%ecx
+ movl 4(%esi),%edx
+ # sqr a[4]*a[1]
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ecx
+ addl %eax,%ebp
+ adcl %edx,%ebx
+ movl 12(%esi),%eax
+ adcl $0,%ecx
+ movl 8(%esi),%edx
+ # sqr a[3]*a[2]
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ecx
+ addl %eax,%ebp
+ adcl %edx,%ebx
+ movl 24(%esi),%eax
+ adcl $0,%ecx
+ movl %ebp,20(%edi)
+ movl (%esi),%edx
+ # saved r[5]
+
+ # ############### Calculate word 6
+
+ xorl %ebp,%ebp
+ # sqr a[6]*a[0]
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebp
+ addl %eax,%ebx
+ adcl %edx,%ecx
+ movl 20(%esi),%eax
+ adcl $0,%ebp
+ movl 4(%esi),%edx
+ # sqr a[5]*a[1]
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebp
+ addl %eax,%ebx
+ adcl %edx,%ecx
+ movl 16(%esi),%eax
+ adcl $0,%ebp
+ movl 8(%esi),%edx
+ # sqr a[4]*a[2]
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebp
+ addl %eax,%ebx
+ adcl %edx,%ecx
+ movl 12(%esi),%eax
+ adcl $0,%ebp
+ # sqr a[3]*a[3]
+
+ mull %eax
+ addl %eax,%ebx
+ adcl %edx,%ecx
+ movl (%esi),%edx
+ adcl $0,%ebp
+ movl %ebx,24(%edi)
+ movl 28(%esi),%eax
+ # saved r[6]
+
+ # ############### Calculate word 7
+
+ xorl %ebx,%ebx
+ # sqr a[7]*a[0]
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebx
+ addl %eax,%ecx
+ adcl %edx,%ebp
+ movl 24(%esi),%eax
+ adcl $0,%ebx
+ movl 4(%esi),%edx
+ # sqr a[6]*a[1]
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebx
+ addl %eax,%ecx
+ adcl %edx,%ebp
+ movl 20(%esi),%eax
+ adcl $0,%ebx
+ movl 8(%esi),%edx
+ # sqr a[5]*a[2]
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebx
+ addl %eax,%ecx
+ adcl %edx,%ebp
+ movl 16(%esi),%eax
+ adcl $0,%ebx
+ movl 12(%esi),%edx
+ # sqr a[4]*a[3]
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebx
+ addl %eax,%ecx
+ adcl %edx,%ebp
+ movl 28(%esi),%eax
+ adcl $0,%ebx
+ movl %ecx,28(%edi)
+ movl 4(%esi),%edx
+ # saved r[7]
+
+ # ############### Calculate word 8
+
+ xorl %ecx,%ecx
+ # sqr a[7]*a[1]
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ecx
+ addl %eax,%ebp
+ adcl %edx,%ebx
+ movl 24(%esi),%eax
+ adcl $0,%ecx
+ movl 8(%esi),%edx
+ # sqr a[6]*a[2]
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ecx
+ addl %eax,%ebp
+ adcl %edx,%ebx
+ movl 20(%esi),%eax
+ adcl $0,%ecx
+ movl 12(%esi),%edx
+ # sqr a[5]*a[3]
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ecx
+ addl %eax,%ebp
+ adcl %edx,%ebx
+ movl 16(%esi),%eax
+ adcl $0,%ecx
+ # sqr a[4]*a[4]
+
+ mull %eax
+ addl %eax,%ebp
+ adcl %edx,%ebx
+ movl 8(%esi),%edx
+ adcl $0,%ecx
+ movl %ebp,32(%edi)
+ movl 28(%esi),%eax
+ # saved r[8]
+
+ # ############### Calculate word 9
+
+ xorl %ebp,%ebp
+ # sqr a[7]*a[2]
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebp
+ addl %eax,%ebx
+ adcl %edx,%ecx
+ movl 24(%esi),%eax
+ adcl $0,%ebp
+ movl 12(%esi),%edx
+ # sqr a[6]*a[3]
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebp
+ addl %eax,%ebx
+ adcl %edx,%ecx
+ movl 20(%esi),%eax
+ adcl $0,%ebp
+ movl 16(%esi),%edx
+ # sqr a[5]*a[4]
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebp
+ addl %eax,%ebx
+ adcl %edx,%ecx
+ movl 28(%esi),%eax
+ adcl $0,%ebp
+ movl %ebx,36(%edi)
+ movl 12(%esi),%edx
+ # saved r[9]
+
+ # ############### Calculate word 10
+
+ xorl %ebx,%ebx
+ # sqr a[7]*a[3]
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebx
+ addl %eax,%ecx
+ adcl %edx,%ebp
+ movl 24(%esi),%eax
+ adcl $0,%ebx
+ movl 16(%esi),%edx
+ # sqr a[6]*a[4]
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebx
+ addl %eax,%ecx
+ adcl %edx,%ebp
+ movl 20(%esi),%eax
+ adcl $0,%ebx
+ # sqr a[5]*a[5]
+
+ mull %eax
+ addl %eax,%ecx
+ adcl %edx,%ebp
+ movl 16(%esi),%edx
+ adcl $0,%ebx
+ movl %ecx,40(%edi)
+ movl 28(%esi),%eax
+ # saved r[10]
+
+ # ############### Calculate word 11
+
+ xorl %ecx,%ecx
+ # sqr a[7]*a[4]
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ecx
+ addl %eax,%ebp
+ adcl %edx,%ebx
+ movl 24(%esi),%eax
+ adcl $0,%ecx
+ movl 20(%esi),%edx
+ # sqr a[6]*a[5]
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ecx
+ addl %eax,%ebp
+ adcl %edx,%ebx
+ movl 28(%esi),%eax
+ adcl $0,%ecx
+ movl %ebp,44(%edi)
+ movl 20(%esi),%edx
+ # saved r[11]
+
+ # ############### Calculate word 12
+
+ xorl %ebp,%ebp
+ # sqr a[7]*a[5]
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebp
+ addl %eax,%ebx
+ adcl %edx,%ecx
+ movl 24(%esi),%eax
+ adcl $0,%ebp
+ # sqr a[6]*a[6]
+
+ mull %eax
+ addl %eax,%ebx
+ adcl %edx,%ecx
+ movl 24(%esi),%edx
+ adcl $0,%ebp
+ movl %ebx,48(%edi)
+ movl 28(%esi),%eax
+ # saved r[12]
+
+ # ############### Calculate word 13
+
+ xorl %ebx,%ebx
+ # sqr a[7]*a[6]
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebx
+ addl %eax,%ecx
+ adcl %edx,%ebp
+ movl 28(%esi),%eax
+ adcl $0,%ebx
+ movl %ecx,52(%edi)
+ # saved r[13]
+
+ # ############### Calculate word 14
+
+ xorl %ecx,%ecx
+ # sqr a[7]*a[7]
+
+ mull %eax
+ addl %eax,%ebp
+ adcl %edx,%ebx
+ adcl $0,%ecx
+ movl %ebp,56(%edi)
+ # saved r[14]
+
+ movl %ebx,60(%edi)
+ popl %ebx
+ popl %ebp
+ popl %edi
+ popl %esi
+ ret
+.globl _bn_sqr_comba4
+.align 4
+_bn_sqr_comba4:
+L_bn_sqr_comba4_begin:
+ pushl %esi
+ pushl %edi
+ pushl %ebp
+ pushl %ebx
+ movl 20(%esp),%edi
+ movl 24(%esp),%esi
+ xorl %ebx,%ebx
+ xorl %ecx,%ecx
+ movl (%esi),%eax
+ # ############### Calculate word 0
+
+ xorl %ebp,%ebp
+ # sqr a[0]*a[0]
+
+ mull %eax
+ addl %eax,%ebx
+ adcl %edx,%ecx
+ movl (%esi),%edx
+ adcl $0,%ebp
+ movl %ebx,(%edi)
+ movl 4(%esi),%eax
+ # saved r[0]
+
+ # ############### Calculate word 1
+
+ xorl %ebx,%ebx
+ # sqr a[1]*a[0]
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebx
+ addl %eax,%ecx
+ adcl %edx,%ebp
+ movl 8(%esi),%eax
+ adcl $0,%ebx
+ movl %ecx,4(%edi)
+ movl (%esi),%edx
+ # saved r[1]
+
+ # ############### Calculate word 2
+
+ xorl %ecx,%ecx
+ # sqr a[2]*a[0]
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ecx
+ addl %eax,%ebp
+ adcl %edx,%ebx
+ movl 4(%esi),%eax
+ adcl $0,%ecx
+ # sqr a[1]*a[1]
+
+ mull %eax
+ addl %eax,%ebp
+ adcl %edx,%ebx
+ movl (%esi),%edx
+ adcl $0,%ecx
+ movl %ebp,8(%edi)
+ movl 12(%esi),%eax
+ # saved r[2]
+
+ # ############### Calculate word 3
+
+ xorl %ebp,%ebp
+ # sqr a[3]*a[0]
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebp
+ addl %eax,%ebx
+ adcl %edx,%ecx
+ movl 8(%esi),%eax
+ adcl $0,%ebp
+ movl 4(%esi),%edx
+ # sqr a[2]*a[1]
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebp
+ addl %eax,%ebx
+ adcl %edx,%ecx
+ movl 12(%esi),%eax
+ adcl $0,%ebp
+ movl %ebx,12(%edi)
+ movl 4(%esi),%edx
+ # saved r[3]
+
+ # ############### Calculate word 4
+
+ xorl %ebx,%ebx
+ # sqr a[3]*a[1]
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ebx
+ addl %eax,%ecx
+ adcl %edx,%ebp
+ movl 8(%esi),%eax
+ adcl $0,%ebx
+ # sqr a[2]*a[2]
+
+ mull %eax
+ addl %eax,%ecx
+ adcl %edx,%ebp
+ movl 8(%esi),%edx
+ adcl $0,%ebx
+ movl %ecx,16(%edi)
+ movl 12(%esi),%eax
+ # saved r[4]
+
+ # ############### Calculate word 5
+
+ xorl %ecx,%ecx
+ # sqr a[3]*a[2]
+
+ mull %edx
+ addl %eax,%eax
+ adcl %edx,%edx
+ adcl $0,%ecx
+ addl %eax,%ebp
+ adcl %edx,%ebx
+ movl 12(%esi),%eax
+ adcl $0,%ecx
+ movl %ebp,20(%edi)
+ # saved r[5]
+
+ # ############### Calculate word 6
+
+ xorl %ebp,%ebp
+ # sqr a[3]*a[3]
+
+ mull %eax
+ addl %eax,%ebx
+ adcl %edx,%ecx
+ adcl $0,%ebp
+ movl %ebx,24(%edi)
+ # saved r[6]
+
+ movl %ecx,28(%edi)
+ popl %ebx
+ popl %ebp
+ popl %edi
+ popl %esi
+ ret
diff --git a/deps/openssl/asm/x86-macosx-gas/camellia/cmll-x86.s b/deps/openssl/asm/x86-macosx-gas/camellia/cmll-x86.s
new file mode 100644
index 0000000000..4d61caa680
--- /dev/null
+++ b/deps/openssl/asm/x86-macosx-gas/camellia/cmll-x86.s
@@ -0,0 +1,2353 @@
+.file "cmll-586.s"
+.text
+.globl _Camellia_EncryptBlock_Rounds
+.align 4
+_Camellia_EncryptBlock_Rounds:
+L_Camellia_EncryptBlock_Rounds_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%eax
+ movl 24(%esp),%esi
+ movl 28(%esp),%edi
+ movl %esp,%ebx
+ subl $28,%esp
+ andl $-64,%esp
+ leal -127(%edi),%ecx
+ subl %esp,%ecx
+ negl %ecx
+ andl $960,%ecx
+ subl %ecx,%esp
+ addl $4,%esp
+ shll $6,%eax
+ leal (%edi,%eax,1),%eax
+ movl %ebx,20(%esp)
+ movl %eax,16(%esp)
+ call L000pic_point
+L000pic_point:
+ popl %ebp
+ leal LCamellia_SBOX-L000pic_point(%ebp),%ebp
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ bswap %eax
+ movl 12(%esi),%edx
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ call __x86_Camellia_encrypt
+ movl 20(%esp),%esp
+ bswap %eax
+ movl 32(%esp),%esi
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.globl _Camellia_EncryptBlock
+.align 4
+_Camellia_EncryptBlock:
+L_Camellia_EncryptBlock_begin:
+ movl $128,%eax
+ subl 4(%esp),%eax
+ movl $3,%eax
+ adcl $0,%eax
+ movl %eax,4(%esp)
+ jmp L_Camellia_EncryptBlock_Rounds_begin
+.globl _Camellia_encrypt
+.align 4
+_Camellia_encrypt:
+L_Camellia_encrypt_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%esi
+ movl 28(%esp),%edi
+ movl %esp,%ebx
+ subl $28,%esp
+ andl $-64,%esp
+ movl 272(%edi),%eax
+ leal -127(%edi),%ecx
+ subl %esp,%ecx
+ negl %ecx
+ andl $960,%ecx
+ subl %ecx,%esp
+ addl $4,%esp
+ shll $6,%eax
+ leal (%edi,%eax,1),%eax
+ movl %ebx,20(%esp)
+ movl %eax,16(%esp)
+ call L001pic_point
+L001pic_point:
+ popl %ebp
+ leal LCamellia_SBOX-L001pic_point(%ebp),%ebp
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ bswap %eax
+ movl 12(%esi),%edx
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ call __x86_Camellia_encrypt
+ movl 20(%esp),%esp
+ bswap %eax
+ movl 24(%esp),%esi
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 4
+__x86_Camellia_encrypt:
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ movl 16(%edi),%esi
+ movl %eax,4(%esp)
+ movl %ebx,8(%esp)
+ movl %ecx,12(%esp)
+ movl %edx,16(%esp)
+.align 4,0x90
+L002loop:
+ xorl %esi,%eax
+ xorl 20(%edi),%ebx
+ movzbl %ah,%esi
+ movl 2052(%ebp,%esi,8),%edx
+ movzbl %al,%esi
+ xorl 4(%ebp,%esi,8),%edx
+ shrl $16,%eax
+ movzbl %bl,%esi
+ movl (%ebp,%esi,8),%ecx
+ movzbl %ah,%esi
+ xorl (%ebp,%esi,8),%edx
+ movzbl %bh,%esi
+ xorl 4(%ebp,%esi,8),%ecx
+ shrl $16,%ebx
+ movzbl %al,%eax
+ xorl 2048(%ebp,%eax,8),%edx
+ movzbl %bh,%esi
+ movl 16(%esp),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl 2048(%ebp,%esi,8),%ecx
+ movzbl %bl,%esi
+ movl 12(%esp),%ebx
+ xorl %eax,%edx
+ xorl 2052(%ebp,%esi,8),%ecx
+ movl 24(%edi),%esi
+ xorl %ecx,%edx
+ movl %edx,16(%esp)
+ xorl %ebx,%ecx
+ movl %ecx,12(%esp)
+ xorl %esi,%ecx
+ xorl 28(%edi),%edx
+ movzbl %ch,%esi
+ movl 2052(%ebp,%esi,8),%ebx
+ movzbl %cl,%esi
+ xorl 4(%ebp,%esi,8),%ebx
+ shrl $16,%ecx
+ movzbl %dl,%esi
+ movl (%ebp,%esi,8),%eax
+ movzbl %ch,%esi
+ xorl (%ebp,%esi,8),%ebx
+ movzbl %dh,%esi
+ xorl 4(%ebp,%esi,8),%eax
+ shrl $16,%edx
+ movzbl %cl,%ecx
+ xorl 2048(%ebp,%ecx,8),%ebx
+ movzbl %dh,%esi
+ movl 8(%esp),%ecx
+ xorl %ebx,%eax
+ rorl $8,%ebx
+ xorl 2048(%ebp,%esi,8),%eax
+ movzbl %dl,%esi
+ movl 4(%esp),%edx
+ xorl %ecx,%ebx
+ xorl 2052(%ebp,%esi,8),%eax
+ movl 32(%edi),%esi
+ xorl %eax,%ebx
+ movl %ebx,8(%esp)
+ xorl %edx,%eax
+ movl %eax,4(%esp)
+ xorl %esi,%eax
+ xorl 36(%edi),%ebx
+ movzbl %ah,%esi
+ movl 2052(%ebp,%esi,8),%edx
+ movzbl %al,%esi
+ xorl 4(%ebp,%esi,8),%edx
+ shrl $16,%eax
+ movzbl %bl,%esi
+ movl (%ebp,%esi,8),%ecx
+ movzbl %ah,%esi
+ xorl (%ebp,%esi,8),%edx
+ movzbl %bh,%esi
+ xorl 4(%ebp,%esi,8),%ecx
+ shrl $16,%ebx
+ movzbl %al,%eax
+ xorl 2048(%ebp,%eax,8),%edx
+ movzbl %bh,%esi
+ movl 16(%esp),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl 2048(%ebp,%esi,8),%ecx
+ movzbl %bl,%esi
+ movl 12(%esp),%ebx
+ xorl %eax,%edx
+ xorl 2052(%ebp,%esi,8),%ecx
+ movl 40(%edi),%esi
+ xorl %ecx,%edx
+ movl %edx,16(%esp)
+ xorl %ebx,%ecx
+ movl %ecx,12(%esp)
+ xorl %esi,%ecx
+ xorl 44(%edi),%edx
+ movzbl %ch,%esi
+ movl 2052(%ebp,%esi,8),%ebx
+ movzbl %cl,%esi
+ xorl 4(%ebp,%esi,8),%ebx
+ shrl $16,%ecx
+ movzbl %dl,%esi
+ movl (%ebp,%esi,8),%eax
+ movzbl %ch,%esi
+ xorl (%ebp,%esi,8),%ebx
+ movzbl %dh,%esi
+ xorl 4(%ebp,%esi,8),%eax
+ shrl $16,%edx
+ movzbl %cl,%ecx
+ xorl 2048(%ebp,%ecx,8),%ebx
+ movzbl %dh,%esi
+ movl 8(%esp),%ecx
+ xorl %ebx,%eax
+ rorl $8,%ebx
+ xorl 2048(%ebp,%esi,8),%eax
+ movzbl %dl,%esi
+ movl 4(%esp),%edx
+ xorl %ecx,%ebx
+ xorl 2052(%ebp,%esi,8),%eax
+ movl 48(%edi),%esi
+ xorl %eax,%ebx
+ movl %ebx,8(%esp)
+ xorl %edx,%eax
+ movl %eax,4(%esp)
+ xorl %esi,%eax
+ xorl 52(%edi),%ebx
+ movzbl %ah,%esi
+ movl 2052(%ebp,%esi,8),%edx
+ movzbl %al,%esi
+ xorl 4(%ebp,%esi,8),%edx
+ shrl $16,%eax
+ movzbl %bl,%esi
+ movl (%ebp,%esi,8),%ecx
+ movzbl %ah,%esi
+ xorl (%ebp,%esi,8),%edx
+ movzbl %bh,%esi
+ xorl 4(%ebp,%esi,8),%ecx
+ shrl $16,%ebx
+ movzbl %al,%eax
+ xorl 2048(%ebp,%eax,8),%edx
+ movzbl %bh,%esi
+ movl 16(%esp),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl 2048(%ebp,%esi,8),%ecx
+ movzbl %bl,%esi
+ movl 12(%esp),%ebx
+ xorl %eax,%edx
+ xorl 2052(%ebp,%esi,8),%ecx
+ movl 56(%edi),%esi
+ xorl %ecx,%edx
+ movl %edx,16(%esp)
+ xorl %ebx,%ecx
+ movl %ecx,12(%esp)
+ xorl %esi,%ecx
+ xorl 60(%edi),%edx
+ movzbl %ch,%esi
+ movl 2052(%ebp,%esi,8),%ebx
+ movzbl %cl,%esi
+ xorl 4(%ebp,%esi,8),%ebx
+ shrl $16,%ecx
+ movzbl %dl,%esi
+ movl (%ebp,%esi,8),%eax
+ movzbl %ch,%esi
+ xorl (%ebp,%esi,8),%ebx
+ movzbl %dh,%esi
+ xorl 4(%ebp,%esi,8),%eax
+ shrl $16,%edx
+ movzbl %cl,%ecx
+ xorl 2048(%ebp,%ecx,8),%ebx
+ movzbl %dh,%esi
+ movl 8(%esp),%ecx
+ xorl %ebx,%eax
+ rorl $8,%ebx
+ xorl 2048(%ebp,%esi,8),%eax
+ movzbl %dl,%esi
+ movl 4(%esp),%edx
+ xorl %ecx,%ebx
+ xorl 2052(%ebp,%esi,8),%eax
+ movl 64(%edi),%esi
+ xorl %eax,%ebx
+ movl %ebx,8(%esp)
+ xorl %edx,%eax
+ movl %eax,4(%esp)
+ addl $64,%edi
+ cmpl 20(%esp),%edi
+ je L003done
+ andl %eax,%esi
+ movl 16(%esp),%edx
+ roll $1,%esi
+ movl %edx,%ecx
+ xorl %esi,%ebx
+ orl 12(%edi),%ecx
+ movl %ebx,8(%esp)
+ xorl 12(%esp),%ecx
+ movl 4(%edi),%esi
+ movl %ecx,12(%esp)
+ orl %ebx,%esi
+ andl 8(%edi),%ecx
+ xorl %esi,%eax
+ roll $1,%ecx
+ movl %eax,4(%esp)
+ xorl %ecx,%edx
+ movl 16(%edi),%esi
+ movl %edx,16(%esp)
+ jmp L002loop
+.align 3,0x90
+L003done:
+ movl %eax,%ecx
+ movl %ebx,%edx
+ movl 12(%esp),%eax
+ movl 16(%esp),%ebx
+ xorl %esi,%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ ret
+.globl _Camellia_DecryptBlock_Rounds
+.align 4
+_Camellia_DecryptBlock_Rounds:
+L_Camellia_DecryptBlock_Rounds_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%eax
+ movl 24(%esp),%esi
+ movl 28(%esp),%edi
+ movl %esp,%ebx
+ subl $28,%esp
+ andl $-64,%esp
+ leal -127(%edi),%ecx
+ subl %esp,%ecx
+ negl %ecx
+ andl $960,%ecx
+ subl %ecx,%esp
+ addl $4,%esp
+ shll $6,%eax
+ movl %edi,16(%esp)
+ leal (%edi,%eax,1),%edi
+ movl %ebx,20(%esp)
+ call L004pic_point
+L004pic_point:
+ popl %ebp
+ leal LCamellia_SBOX-L004pic_point(%ebp),%ebp
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ bswap %eax
+ movl 12(%esi),%edx
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ call __x86_Camellia_decrypt
+ movl 20(%esp),%esp
+ bswap %eax
+ movl 32(%esp),%esi
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.globl _Camellia_DecryptBlock
+.align 4
+_Camellia_DecryptBlock:
+L_Camellia_DecryptBlock_begin:
+ movl $128,%eax
+ subl 4(%esp),%eax
+ movl $3,%eax
+ adcl $0,%eax
+ movl %eax,4(%esp)
+ jmp L_Camellia_DecryptBlock_Rounds_begin
+.globl _Camellia_decrypt
+.align 4
+_Camellia_decrypt:
+L_Camellia_decrypt_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%esi
+ movl 28(%esp),%edi
+ movl %esp,%ebx
+ subl $28,%esp
+ andl $-64,%esp
+ movl 272(%edi),%eax
+ leal -127(%edi),%ecx
+ subl %esp,%ecx
+ negl %ecx
+ andl $960,%ecx
+ subl %ecx,%esp
+ addl $4,%esp
+ shll $6,%eax
+ movl %edi,16(%esp)
+ leal (%edi,%eax,1),%edi
+ movl %ebx,20(%esp)
+ call L005pic_point
+L005pic_point:
+ popl %ebp
+ leal LCamellia_SBOX-L005pic_point(%ebp),%ebp
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ bswap %eax
+ movl 12(%esi),%edx
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ call __x86_Camellia_decrypt
+ movl 20(%esp),%esp
+ bswap %eax
+ movl 24(%esp),%esi
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 4
+__x86_Camellia_decrypt:
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ movl -8(%edi),%esi
+ movl %eax,4(%esp)
+ movl %ebx,8(%esp)
+ movl %ecx,12(%esp)
+ movl %edx,16(%esp)
+.align 4,0x90
+L006loop:
+ xorl %esi,%eax
+ xorl -4(%edi),%ebx
+ movzbl %ah,%esi
+ movl 2052(%ebp,%esi,8),%edx
+ movzbl %al,%esi
+ xorl 4(%ebp,%esi,8),%edx
+ shrl $16,%eax
+ movzbl %bl,%esi
+ movl (%ebp,%esi,8),%ecx
+ movzbl %ah,%esi
+ xorl (%ebp,%esi,8),%edx
+ movzbl %bh,%esi
+ xorl 4(%ebp,%esi,8),%ecx
+ shrl $16,%ebx
+ movzbl %al,%eax
+ xorl 2048(%ebp,%eax,8),%edx
+ movzbl %bh,%esi
+ movl 16(%esp),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl 2048(%ebp,%esi,8),%ecx
+ movzbl %bl,%esi
+ movl 12(%esp),%ebx
+ xorl %eax,%edx
+ xorl 2052(%ebp,%esi,8),%ecx
+ movl -16(%edi),%esi
+ xorl %ecx,%edx
+ movl %edx,16(%esp)
+ xorl %ebx,%ecx
+ movl %ecx,12(%esp)
+ xorl %esi,%ecx
+ xorl -12(%edi),%edx
+ movzbl %ch,%esi
+ movl 2052(%ebp,%esi,8),%ebx
+ movzbl %cl,%esi
+ xorl 4(%ebp,%esi,8),%ebx
+ shrl $16,%ecx
+ movzbl %dl,%esi
+ movl (%ebp,%esi,8),%eax
+ movzbl %ch,%esi
+ xorl (%ebp,%esi,8),%ebx
+ movzbl %dh,%esi
+ xorl 4(%ebp,%esi,8),%eax
+ shrl $16,%edx
+ movzbl %cl,%ecx
+ xorl 2048(%ebp,%ecx,8),%ebx
+ movzbl %dh,%esi
+ movl 8(%esp),%ecx
+ xorl %ebx,%eax
+ rorl $8,%ebx
+ xorl 2048(%ebp,%esi,8),%eax
+ movzbl %dl,%esi
+ movl 4(%esp),%edx
+ xorl %ecx,%ebx
+ xorl 2052(%ebp,%esi,8),%eax
+ movl -24(%edi),%esi
+ xorl %eax,%ebx
+ movl %ebx,8(%esp)
+ xorl %edx,%eax
+ movl %eax,4(%esp)
+ xorl %esi,%eax
+ xorl -20(%edi),%ebx
+ movzbl %ah,%esi
+ movl 2052(%ebp,%esi,8),%edx
+ movzbl %al,%esi
+ xorl 4(%ebp,%esi,8),%edx
+ shrl $16,%eax
+ movzbl %bl,%esi
+ movl (%ebp,%esi,8),%ecx
+ movzbl %ah,%esi
+ xorl (%ebp,%esi,8),%edx
+ movzbl %bh,%esi
+ xorl 4(%ebp,%esi,8),%ecx
+ shrl $16,%ebx
+ movzbl %al,%eax
+ xorl 2048(%ebp,%eax,8),%edx
+ movzbl %bh,%esi
+ movl 16(%esp),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl 2048(%ebp,%esi,8),%ecx
+ movzbl %bl,%esi
+ movl 12(%esp),%ebx
+ xorl %eax,%edx
+ xorl 2052(%ebp,%esi,8),%ecx
+ movl -32(%edi),%esi
+ xorl %ecx,%edx
+ movl %edx,16(%esp)
+ xorl %ebx,%ecx
+ movl %ecx,12(%esp)
+ xorl %esi,%ecx
+ xorl -28(%edi),%edx
+ movzbl %ch,%esi
+ movl 2052(%ebp,%esi,8),%ebx
+ movzbl %cl,%esi
+ xorl 4(%ebp,%esi,8),%ebx
+ shrl $16,%ecx
+ movzbl %dl,%esi
+ movl (%ebp,%esi,8),%eax
+ movzbl %ch,%esi
+ xorl (%ebp,%esi,8),%ebx
+ movzbl %dh,%esi
+ xorl 4(%ebp,%esi,8),%eax
+ shrl $16,%edx
+ movzbl %cl,%ecx
+ xorl 2048(%ebp,%ecx,8),%ebx
+ movzbl %dh,%esi
+ movl 8(%esp),%ecx
+ xorl %ebx,%eax
+ rorl $8,%ebx
+ xorl 2048(%ebp,%esi,8),%eax
+ movzbl %dl,%esi
+ movl 4(%esp),%edx
+ xorl %ecx,%ebx
+ xorl 2052(%ebp,%esi,8),%eax
+ movl -40(%edi),%esi
+ xorl %eax,%ebx
+ movl %ebx,8(%esp)
+ xorl %edx,%eax
+ movl %eax,4(%esp)
+ xorl %esi,%eax
+ xorl -36(%edi),%ebx
+ movzbl %ah,%esi
+ movl 2052(%ebp,%esi,8),%edx
+ movzbl %al,%esi
+ xorl 4(%ebp,%esi,8),%edx
+ shrl $16,%eax
+ movzbl %bl,%esi
+ movl (%ebp,%esi,8),%ecx
+ movzbl %ah,%esi
+ xorl (%ebp,%esi,8),%edx
+ movzbl %bh,%esi
+ xorl 4(%ebp,%esi,8),%ecx
+ shrl $16,%ebx
+ movzbl %al,%eax
+ xorl 2048(%ebp,%eax,8),%edx
+ movzbl %bh,%esi
+ movl 16(%esp),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl 2048(%ebp,%esi,8),%ecx
+ movzbl %bl,%esi
+ movl 12(%esp),%ebx
+ xorl %eax,%edx
+ xorl 2052(%ebp,%esi,8),%ecx
+ movl -48(%edi),%esi
+ xorl %ecx,%edx
+ movl %edx,16(%esp)
+ xorl %ebx,%ecx
+ movl %ecx,12(%esp)
+ xorl %esi,%ecx
+ xorl -44(%edi),%edx
+ movzbl %ch,%esi
+ movl 2052(%ebp,%esi,8),%ebx
+ movzbl %cl,%esi
+ xorl 4(%ebp,%esi,8),%ebx
+ shrl $16,%ecx
+ movzbl %dl,%esi
+ movl (%ebp,%esi,8),%eax
+ movzbl %ch,%esi
+ xorl (%ebp,%esi,8),%ebx
+ movzbl %dh,%esi
+ xorl 4(%ebp,%esi,8),%eax
+ shrl $16,%edx
+ movzbl %cl,%ecx
+ xorl 2048(%ebp,%ecx,8),%ebx
+ movzbl %dh,%esi
+ movl 8(%esp),%ecx
+ xorl %ebx,%eax
+ rorl $8,%ebx
+ xorl 2048(%ebp,%esi,8),%eax
+ movzbl %dl,%esi
+ movl 4(%esp),%edx
+ xorl %ecx,%ebx
+ xorl 2052(%ebp,%esi,8),%eax
+ movl -56(%edi),%esi
+ xorl %eax,%ebx
+ movl %ebx,8(%esp)
+ xorl %edx,%eax
+ movl %eax,4(%esp)
+ subl $64,%edi
+ cmpl 20(%esp),%edi
+ je L007done
+ andl %eax,%esi
+ movl 16(%esp),%edx
+ roll $1,%esi
+ movl %edx,%ecx
+ xorl %esi,%ebx
+ orl 4(%edi),%ecx
+ movl %ebx,8(%esp)
+ xorl 12(%esp),%ecx
+ movl 12(%edi),%esi
+ movl %ecx,12(%esp)
+ orl %ebx,%esi
+ andl (%edi),%ecx
+ xorl %esi,%eax
+ roll $1,%ecx
+ movl %eax,4(%esp)
+ xorl %ecx,%edx
+ movl -8(%edi),%esi
+ movl %edx,16(%esp)
+ jmp L006loop
+.align 3,0x90
+L007done:
+ movl %eax,%ecx
+ movl %ebx,%edx
+ movl 12(%esp),%eax
+ movl 16(%esp),%ebx
+ xorl %esi,%ecx
+ xorl 12(%edi),%edx
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ ret
+.globl _Camellia_Ekeygen
+.align 4
+_Camellia_Ekeygen:
+L_Camellia_Ekeygen_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ subl $16,%esp
+ movl 36(%esp),%ebp
+ movl 40(%esp),%esi
+ movl 44(%esp),%edi
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ cmpl $128,%ebp
+ je L0081st128
+ movl 16(%esi),%eax
+ movl 20(%esi),%ebx
+ cmpl $192,%ebp
+ je L0091st192
+ movl 24(%esi),%ecx
+ movl 28(%esi),%edx
+ jmp L0101st256
+.align 2,0x90
+L0091st192:
+ movl %eax,%ecx
+ movl %ebx,%edx
+ notl %ecx
+ notl %edx
+.align 2,0x90
+L0101st256:
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ movl %eax,32(%edi)
+ movl %ebx,36(%edi)
+ movl %ecx,40(%edi)
+ movl %edx,44(%edi)
+ xorl (%edi),%eax
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+.align 2,0x90
+L0081st128:
+ call L011pic_point
+L011pic_point:
+ popl %ebp
+ leal LCamellia_SBOX-L011pic_point(%ebp),%ebp
+ leal LCamellia_SIGMA-LCamellia_SBOX(%ebp),%edi
+ movl (%edi),%esi
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ movl %ecx,8(%esp)
+ movl %edx,12(%esp)
+ xorl %esi,%eax
+ xorl 4(%edi),%ebx
+ movzbl %ah,%esi
+ movl 2052(%ebp,%esi,8),%edx
+ movzbl %al,%esi
+ xorl 4(%ebp,%esi,8),%edx
+ shrl $16,%eax
+ movzbl %bl,%esi
+ movl (%ebp,%esi,8),%ecx
+ movzbl %ah,%esi
+ xorl (%ebp,%esi,8),%edx
+ movzbl %bh,%esi
+ xorl 4(%ebp,%esi,8),%ecx
+ shrl $16,%ebx
+ movzbl %al,%eax
+ xorl 2048(%ebp,%eax,8),%edx
+ movzbl %bh,%esi
+ movl 12(%esp),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl 2048(%ebp,%esi,8),%ecx
+ movzbl %bl,%esi
+ movl 8(%esp),%ebx
+ xorl %eax,%edx
+ xorl 2052(%ebp,%esi,8),%ecx
+ movl 8(%edi),%esi
+ xorl %ecx,%edx
+ movl %edx,12(%esp)
+ xorl %ebx,%ecx
+ movl %ecx,8(%esp)
+ xorl %esi,%ecx
+ xorl 12(%edi),%edx
+ movzbl %ch,%esi
+ movl 2052(%ebp,%esi,8),%ebx
+ movzbl %cl,%esi
+ xorl 4(%ebp,%esi,8),%ebx
+ shrl $16,%ecx
+ movzbl %dl,%esi
+ movl (%ebp,%esi,8),%eax
+ movzbl %ch,%esi
+ xorl (%ebp,%esi,8),%ebx
+ movzbl %dh,%esi
+ xorl 4(%ebp,%esi,8),%eax
+ shrl $16,%edx
+ movzbl %cl,%ecx
+ xorl 2048(%ebp,%ecx,8),%ebx
+ movzbl %dh,%esi
+ movl 4(%esp),%ecx
+ xorl %ebx,%eax
+ rorl $8,%ebx
+ xorl 2048(%ebp,%esi,8),%eax
+ movzbl %dl,%esi
+ movl (%esp),%edx
+ xorl %ecx,%ebx
+ xorl 2052(%ebp,%esi,8),%eax
+ movl 16(%edi),%esi
+ xorl %eax,%ebx
+ movl %ebx,4(%esp)
+ xorl %edx,%eax
+ movl %eax,(%esp)
+ movl 8(%esp),%ecx
+ movl 12(%esp),%edx
+ movl 44(%esp),%esi
+ xorl (%esi),%eax
+ xorl 4(%esi),%ebx
+ xorl 8(%esi),%ecx
+ xorl 12(%esi),%edx
+ movl 16(%edi),%esi
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ movl %ecx,8(%esp)
+ movl %edx,12(%esp)
+ xorl %esi,%eax
+ xorl 20(%edi),%ebx
+ movzbl %ah,%esi
+ movl 2052(%ebp,%esi,8),%edx
+ movzbl %al,%esi
+ xorl 4(%ebp,%esi,8),%edx
+ shrl $16,%eax
+ movzbl %bl,%esi
+ movl (%ebp,%esi,8),%ecx
+ movzbl %ah,%esi
+ xorl (%ebp,%esi,8),%edx
+ movzbl %bh,%esi
+ xorl 4(%ebp,%esi,8),%ecx
+ shrl $16,%ebx
+ movzbl %al,%eax
+ xorl 2048(%ebp,%eax,8),%edx
+ movzbl %bh,%esi
+ movl 12(%esp),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl 2048(%ebp,%esi,8),%ecx
+ movzbl %bl,%esi
+ movl 8(%esp),%ebx
+ xorl %eax,%edx
+ xorl 2052(%ebp,%esi,8),%ecx
+ movl 24(%edi),%esi
+ xorl %ecx,%edx
+ movl %edx,12(%esp)
+ xorl %ebx,%ecx
+ movl %ecx,8(%esp)
+ xorl %esi,%ecx
+ xorl 28(%edi),%edx
+ movzbl %ch,%esi
+ movl 2052(%ebp,%esi,8),%ebx
+ movzbl %cl,%esi
+ xorl 4(%ebp,%esi,8),%ebx
+ shrl $16,%ecx
+ movzbl %dl,%esi
+ movl (%ebp,%esi,8),%eax
+ movzbl %ch,%esi
+ xorl (%ebp,%esi,8),%ebx
+ movzbl %dh,%esi
+ xorl 4(%ebp,%esi,8),%eax
+ shrl $16,%edx
+ movzbl %cl,%ecx
+ xorl 2048(%ebp,%ecx,8),%ebx
+ movzbl %dh,%esi
+ movl 4(%esp),%ecx
+ xorl %ebx,%eax
+ rorl $8,%ebx
+ xorl 2048(%ebp,%esi,8),%eax
+ movzbl %dl,%esi
+ movl (%esp),%edx
+ xorl %ecx,%ebx
+ xorl 2052(%ebp,%esi,8),%eax
+ movl 32(%edi),%esi
+ xorl %eax,%ebx
+ movl %ebx,4(%esp)
+ xorl %edx,%eax
+ movl %eax,(%esp)
+ movl 8(%esp),%ecx
+ movl 12(%esp),%edx
+ movl 36(%esp),%esi
+ cmpl $128,%esi
+ jne L0122nd256
+ movl 44(%esp),%edi
+ leal 128(%edi),%edi
+ movl %eax,-112(%edi)
+ movl %ebx,-108(%edi)
+ movl %ecx,-104(%edi)
+ movl %edx,-100(%edi)
+ movl %eax,%ebp
+ shll $15,%eax
+ movl %ebx,%esi
+ shrl $17,%esi
+ shll $15,%ebx
+ orl %esi,%eax
+ movl %ecx,%esi
+ shll $15,%ecx
+ movl %eax,-80(%edi)
+ shrl $17,%esi
+ orl %esi,%ebx
+ shrl $17,%ebp
+ movl %edx,%esi
+ shrl $17,%esi
+ movl %ebx,-76(%edi)
+ shll $15,%edx
+ orl %esi,%ecx
+ orl %ebp,%edx
+ movl %ecx,-72(%edi)
+ movl %edx,-68(%edi)
+ movl %eax,%ebp
+ shll $15,%eax
+ movl %ebx,%esi
+ shrl $17,%esi
+ shll $15,%ebx
+ orl %esi,%eax
+ movl %ecx,%esi
+ shll $15,%ecx
+ movl %eax,-64(%edi)
+ shrl $17,%esi
+ orl %esi,%ebx
+ shrl $17,%ebp
+ movl %edx,%esi
+ shrl $17,%esi
+ movl %ebx,-60(%edi)
+ shll $15,%edx
+ orl %esi,%ecx
+ orl %ebp,%edx
+ movl %ecx,-56(%edi)
+ movl %edx,-52(%edi)
+ movl %eax,%ebp
+ shll $15,%eax
+ movl %ebx,%esi
+ shrl $17,%esi
+ shll $15,%ebx
+ orl %esi,%eax
+ movl %ecx,%esi
+ shll $15,%ecx
+ movl %eax,-32(%edi)
+ shrl $17,%esi
+ orl %esi,%ebx
+ shrl $17,%ebp
+ movl %edx,%esi
+ shrl $17,%esi
+ movl %ebx,-28(%edi)
+ shll $15,%edx
+ orl %esi,%ecx
+ orl %ebp,%edx
+ movl %eax,%ebp
+ shll $15,%eax
+ movl %ebx,%esi
+ shrl $17,%esi
+ shll $15,%ebx
+ orl %esi,%eax
+ movl %ecx,%esi
+ shll $15,%ecx
+ movl %eax,-16(%edi)
+ shrl $17,%esi
+ orl %esi,%ebx
+ shrl $17,%ebp
+ movl %edx,%esi
+ shrl $17,%esi
+ movl %ebx,-12(%edi)
+ shll $15,%edx
+ orl %esi,%ecx
+ orl %ebp,%edx
+ movl %ecx,-8(%edi)
+ movl %edx,-4(%edi)
+ movl %ebx,%ebp
+ shll $2,%ebx
+ movl %ecx,%esi
+ shrl $30,%esi
+ shll $2,%ecx
+ orl %esi,%ebx
+ movl %edx,%esi
+ shll $2,%edx
+ movl %ebx,32(%edi)
+ shrl $30,%esi
+ orl %esi,%ecx
+ shrl $30,%ebp
+ movl %eax,%esi
+ shrl $30,%esi
+ movl %ecx,36(%edi)
+ shll $2,%eax
+ orl %esi,%edx
+ orl %ebp,%eax
+ movl %edx,40(%edi)
+ movl %eax,44(%edi)
+ movl %ebx,%ebp
+ shll $17,%ebx
+ movl %ecx,%esi
+ shrl $15,%esi
+ shll $17,%ecx
+ orl %esi,%ebx
+ movl %edx,%esi
+ shll $17,%edx
+ movl %ebx,64(%edi)
+ shrl $15,%esi
+ orl %esi,%ecx
+ shrl $15,%ebp
+ movl %eax,%esi
+ shrl $15,%esi
+ movl %ecx,68(%edi)
+ shll $17,%eax
+ orl %esi,%edx
+ orl %ebp,%eax
+ movl %edx,72(%edi)
+ movl %eax,76(%edi)
+ movl -128(%edi),%ebx
+ movl -124(%edi),%ecx
+ movl -120(%edi),%edx
+ movl -116(%edi),%eax
+ movl %ebx,%ebp
+ shll $15,%ebx
+ movl %ecx,%esi
+ shrl $17,%esi
+ shll $15,%ecx
+ orl %esi,%ebx
+ movl %edx,%esi
+ shll $15,%edx
+ movl %ebx,-96(%edi)
+ shrl $17,%esi
+ orl %esi,%ecx
+ shrl $17,%ebp
+ movl %eax,%esi
+ shrl $17,%esi
+ movl %ecx,-92(%edi)
+ shll $15,%eax
+ orl %esi,%edx
+ orl %ebp,%eax
+ movl %edx,-88(%edi)
+ movl %eax,-84(%edi)
+ movl %ebx,%ebp
+ shll $30,%ebx
+ movl %ecx,%esi
+ shrl $2,%esi
+ shll $30,%ecx
+ orl %esi,%ebx
+ movl %edx,%esi
+ shll $30,%edx
+ movl %ebx,-48(%edi)
+ shrl $2,%esi
+ orl %esi,%ecx
+ shrl $2,%ebp
+ movl %eax,%esi
+ shrl $2,%esi
+ movl %ecx,-44(%edi)
+ shll $30,%eax
+ orl %esi,%edx
+ orl %ebp,%eax
+ movl %edx,-40(%edi)
+ movl %eax,-36(%edi)
+ movl %ebx,%ebp
+ shll $15,%ebx
+ movl %ecx,%esi
+ shrl $17,%esi
+ shll $15,%ecx
+ orl %esi,%ebx
+ movl %edx,%esi
+ shll $15,%edx
+ shrl $17,%esi
+ orl %esi,%ecx
+ shrl $17,%ebp
+ movl %eax,%esi
+ shrl $17,%esi
+ shll $15,%eax
+ orl %esi,%edx
+ orl %ebp,%eax
+ movl %edx,-24(%edi)
+ movl %eax,-20(%edi)
+ movl %ebx,%ebp
+ shll $17,%ebx
+ movl %ecx,%esi
+ shrl $15,%esi
+ shll $17,%ecx
+ orl %esi,%ebx
+ movl %edx,%esi
+ shll $17,%edx
+ movl %ebx,(%edi)
+ shrl $15,%esi
+ orl %esi,%ecx
+ shrl $15,%ebp
+ movl %eax,%esi
+ shrl $15,%esi
+ movl %ecx,4(%edi)
+ shll $17,%eax
+ orl %esi,%edx
+ orl %ebp,%eax
+ movl %edx,8(%edi)
+ movl %eax,12(%edi)
+ movl %ebx,%ebp
+ shll $17,%ebx
+ movl %ecx,%esi
+ shrl $15,%esi
+ shll $17,%ecx
+ orl %esi,%ebx
+ movl %edx,%esi
+ shll $17,%edx
+ movl %ebx,16(%edi)
+ shrl $15,%esi
+ orl %esi,%ecx
+ shrl $15,%ebp
+ movl %eax,%esi
+ shrl $15,%esi
+ movl %ecx,20(%edi)
+ shll $17,%eax
+ orl %esi,%edx
+ orl %ebp,%eax
+ movl %edx,24(%edi)
+ movl %eax,28(%edi)
+ movl %ebx,%ebp
+ shll $17,%ebx
+ movl %ecx,%esi
+ shrl $15,%esi
+ shll $17,%ecx
+ orl %esi,%ebx
+ movl %edx,%esi
+ shll $17,%edx
+ movl %ebx,48(%edi)
+ shrl $15,%esi
+ orl %esi,%ecx
+ shrl $15,%ebp
+ movl %eax,%esi
+ shrl $15,%esi
+ movl %ecx,52(%edi)
+ shll $17,%eax
+ orl %esi,%edx
+ orl %ebp,%eax
+ movl %edx,56(%edi)
+ movl %eax,60(%edi)
+ movl $3,%eax
+ jmp L013done
+.align 4,0x90
+L0122nd256:
+ movl 44(%esp),%esi
+ movl %eax,48(%esi)
+ movl %ebx,52(%esi)
+ movl %ecx,56(%esi)
+ movl %edx,60(%esi)
+ xorl 32(%esi),%eax
+ xorl 36(%esi),%ebx
+ xorl 40(%esi),%ecx
+ xorl 44(%esi),%edx
+ movl 32(%edi),%esi
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ movl %ecx,8(%esp)
+ movl %edx,12(%esp)
+ xorl %esi,%eax
+ xorl 36(%edi),%ebx
+ movzbl %ah,%esi
+ movl 2052(%ebp,%esi,8),%edx
+ movzbl %al,%esi
+ xorl 4(%ebp,%esi,8),%edx
+ shrl $16,%eax
+ movzbl %bl,%esi
+ movl (%ebp,%esi,8),%ecx
+ movzbl %ah,%esi
+ xorl (%ebp,%esi,8),%edx
+ movzbl %bh,%esi
+ xorl 4(%ebp,%esi,8),%ecx
+ shrl $16,%ebx
+ movzbl %al,%eax
+ xorl 2048(%ebp,%eax,8),%edx
+ movzbl %bh,%esi
+ movl 12(%esp),%eax
+ xorl %edx,%ecx
+ rorl $8,%edx
+ xorl 2048(%ebp,%esi,8),%ecx
+ movzbl %bl,%esi
+ movl 8(%esp),%ebx
+ xorl %eax,%edx
+ xorl 2052(%ebp,%esi,8),%ecx
+ movl 40(%edi),%esi
+ xorl %ecx,%edx
+ movl %edx,12(%esp)
+ xorl %ebx,%ecx
+ movl %ecx,8(%esp)
+ xorl %esi,%ecx
+ xorl 44(%edi),%edx
+ movzbl %ch,%esi
+ movl 2052(%ebp,%esi,8),%ebx
+ movzbl %cl,%esi
+ xorl 4(%ebp,%esi,8),%ebx
+ shrl $16,%ecx
+ movzbl %dl,%esi
+ movl (%ebp,%esi,8),%eax
+ movzbl %ch,%esi
+ xorl (%ebp,%esi,8),%ebx
+ movzbl %dh,%esi
+ xorl 4(%ebp,%esi,8),%eax
+ shrl $16,%edx
+ movzbl %cl,%ecx
+ xorl 2048(%ebp,%ecx,8),%ebx
+ movzbl %dh,%esi
+ movl 4(%esp),%ecx
+ xorl %ebx,%eax
+ rorl $8,%ebx
+ xorl 2048(%ebp,%esi,8),%eax
+ movzbl %dl,%esi
+ movl (%esp),%edx
+ xorl %ecx,%ebx
+ xorl 2052(%ebp,%esi,8),%eax
+ movl 48(%edi),%esi
+ xorl %eax,%ebx
+ movl %ebx,4(%esp)
+ xorl %edx,%eax
+ movl %eax,(%esp)
+ movl 8(%esp),%ecx
+ movl 12(%esp),%edx
+ movl 44(%esp),%edi
+ leal 128(%edi),%edi
+ movl %eax,-112(%edi)
+ movl %ebx,-108(%edi)
+ movl %ecx,-104(%edi)
+ movl %edx,-100(%edi)
+ movl %eax,%ebp
+ shll $30,%eax
+ movl %ebx,%esi
+ shrl $2,%esi
+ shll $30,%ebx
+ orl %esi,%eax
+ movl %ecx,%esi
+ shll $30,%ecx
+ movl %eax,-48(%edi)
+ shrl $2,%esi
+ orl %esi,%ebx
+ shrl $2,%ebp
+ movl %edx,%esi
+ shrl $2,%esi
+ movl %ebx,-44(%edi)
+ shll $30,%edx
+ orl %esi,%ecx
+ orl %ebp,%edx
+ movl %ecx,-40(%edi)
+ movl %edx,-36(%edi)
+ movl %eax,%ebp
+ shll $30,%eax
+ movl %ebx,%esi
+ shrl $2,%esi
+ shll $30,%ebx
+ orl %esi,%eax
+ movl %ecx,%esi
+ shll $30,%ecx
+ movl %eax,32(%edi)
+ shrl $2,%esi
+ orl %esi,%ebx
+ shrl $2,%ebp
+ movl %edx,%esi
+ shrl $2,%esi
+ movl %ebx,36(%edi)
+ shll $30,%edx
+ orl %esi,%ecx
+ orl %ebp,%edx
+ movl %ecx,40(%edi)
+ movl %edx,44(%edi)
+ movl %ebx,%ebp
+ shll $19,%ebx
+ movl %ecx,%esi
+ shrl $13,%esi
+ shll $19,%ecx
+ orl %esi,%ebx
+ movl %edx,%esi
+ shll $19,%edx
+ movl %ebx,128(%edi)
+ shrl $13,%esi
+ orl %esi,%ecx
+ shrl $13,%ebp
+ movl %eax,%esi
+ shrl $13,%esi
+ movl %ecx,132(%edi)
+ shll $19,%eax
+ orl %esi,%edx
+ orl %ebp,%eax
+ movl %edx,136(%edi)
+ movl %eax,140(%edi)
+ movl -96(%edi),%ebx
+ movl -92(%edi),%ecx
+ movl -88(%edi),%edx
+ movl -84(%edi),%eax
+ movl %ebx,%ebp
+ shll $15,%ebx
+ movl %ecx,%esi
+ shrl $17,%esi
+ shll $15,%ecx
+ orl %esi,%ebx
+ movl %edx,%esi
+ shll $15,%edx
+ movl %ebx,-96(%edi)
+ shrl $17,%esi
+ orl %esi,%ecx
+ shrl $17,%ebp
+ movl %eax,%esi
+ shrl $17,%esi
+ movl %ecx,-92(%edi)
+ shll $15,%eax
+ orl %esi,%edx
+ orl %ebp,%eax
+ movl %edx,-88(%edi)
+ movl %eax,-84(%edi)
+ movl %ebx,%ebp
+ shll $15,%ebx
+ movl %ecx,%esi
+ shrl $17,%esi
+ shll $15,%ecx
+ orl %esi,%ebx
+ movl %edx,%esi
+ shll $15,%edx
+ movl %ebx,-64(%edi)
+ shrl $17,%esi
+ orl %esi,%ecx
+ shrl $17,%ebp
+ movl %eax,%esi
+ shrl $17,%esi
+ movl %ecx,-60(%edi)
+ shll $15,%eax
+ orl %esi,%edx
+ orl %ebp,%eax
+ movl %edx,-56(%edi)
+ movl %eax,-52(%edi)
+ movl %ebx,%ebp
+ shll $30,%ebx
+ movl %ecx,%esi
+ shrl $2,%esi
+ shll $30,%ecx
+ orl %esi,%ebx
+ movl %edx,%esi
+ shll $30,%edx
+ movl %ebx,16(%edi)
+ shrl $2,%esi
+ orl %esi,%ecx
+ shrl $2,%ebp
+ movl %eax,%esi
+ shrl $2,%esi
+ movl %ecx,20(%edi)
+ shll $30,%eax
+ orl %esi,%edx
+ orl %ebp,%eax
+ movl %edx,24(%edi)
+ movl %eax,28(%edi)
+ movl %ecx,%ebp
+ shll $2,%ecx
+ movl %edx,%esi
+ shrl $30,%esi
+ shll $2,%edx
+ orl %esi,%ecx
+ movl %eax,%esi
+ shll $2,%eax
+ movl %ecx,80(%edi)
+ shrl $30,%esi
+ orl %esi,%edx
+ shrl $30,%ebp
+ movl %ebx,%esi
+ shrl $30,%esi
+ movl %edx,84(%edi)
+ shll $2,%ebx
+ orl %esi,%eax
+ orl %ebp,%ebx
+ movl %eax,88(%edi)
+ movl %ebx,92(%edi)
+ movl -80(%edi),%ecx
+ movl -76(%edi),%edx
+ movl -72(%edi),%eax
+ movl -68(%edi),%ebx
+ movl %ecx,%ebp
+ shll $15,%ecx
+ movl %edx,%esi
+ shrl $17,%esi
+ shll $15,%edx
+ orl %esi,%ecx
+ movl %eax,%esi
+ shll $15,%eax
+ movl %ecx,-80(%edi)
+ shrl $17,%esi
+ orl %esi,%edx
+ shrl $17,%ebp
+ movl %ebx,%esi
+ shrl $17,%esi
+ movl %edx,-76(%edi)
+ shll $15,%ebx
+ orl %esi,%eax
+ orl %ebp,%ebx
+ movl %eax,-72(%edi)
+ movl %ebx,-68(%edi)
+ movl %ecx,%ebp
+ shll $30,%ecx
+ movl %edx,%esi
+ shrl $2,%esi
+ shll $30,%edx
+ orl %esi,%ecx
+ movl %eax,%esi
+ shll $30,%eax
+ movl %ecx,-16(%edi)
+ shrl $2,%esi
+ orl %esi,%edx
+ shrl $2,%ebp
+ movl %ebx,%esi
+ shrl $2,%esi
+ movl %edx,-12(%edi)
+ shll $30,%ebx
+ orl %esi,%eax
+ orl %ebp,%ebx
+ movl %eax,-8(%edi)
+ movl %ebx,-4(%edi)
+ movl %edx,64(%edi)
+ movl %eax,68(%edi)
+ movl %ebx,72(%edi)
+ movl %ecx,76(%edi)
+ movl %edx,%ebp
+ shll $17,%edx
+ movl %eax,%esi
+ shrl $15,%esi
+ shll $17,%eax
+ orl %esi,%edx
+ movl %ebx,%esi
+ shll $17,%ebx
+ movl %edx,96(%edi)
+ shrl $15,%esi
+ orl %esi,%eax
+ shrl $15,%ebp
+ movl %ecx,%esi
+ shrl $15,%esi
+ movl %eax,100(%edi)
+ shll $17,%ecx
+ orl %esi,%ebx
+ orl %ebp,%ecx
+ movl %ebx,104(%edi)
+ movl %ecx,108(%edi)
+ movl -128(%edi),%edx
+ movl -124(%edi),%eax
+ movl -120(%edi),%ebx
+ movl -116(%edi),%ecx
+ movl %eax,%ebp
+ shll $13,%eax
+ movl %ebx,%esi
+ shrl $19,%esi
+ shll $13,%ebx
+ orl %esi,%eax
+ movl %ecx,%esi
+ shll $13,%ecx
+ movl %eax,-32(%edi)
+ shrl $19,%esi
+ orl %esi,%ebx
+ shrl $19,%ebp
+ movl %edx,%esi
+ shrl $19,%esi
+ movl %ebx,-28(%edi)
+ shll $13,%edx
+ orl %esi,%ecx
+ orl %ebp,%edx
+ movl %ecx,-24(%edi)
+ movl %edx,-20(%edi)
+ movl %eax,%ebp
+ shll $15,%eax
+ movl %ebx,%esi
+ shrl $17,%esi
+ shll $15,%ebx
+ orl %esi,%eax
+ movl %ecx,%esi
+ shll $15,%ecx
+ movl %eax,(%edi)
+ shrl $17,%esi
+ orl %esi,%ebx
+ shrl $17,%ebp
+ movl %edx,%esi
+ shrl $17,%esi
+ movl %ebx,4(%edi)
+ shll $15,%edx
+ orl %esi,%ecx
+ orl %ebp,%edx
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl %eax,%ebp
+ shll $17,%eax
+ movl %ebx,%esi
+ shrl $15,%esi
+ shll $17,%ebx
+ orl %esi,%eax
+ movl %ecx,%esi
+ shll $17,%ecx
+ movl %eax,48(%edi)
+ shrl $15,%esi
+ orl %esi,%ebx
+ shrl $15,%ebp
+ movl %edx,%esi
+ shrl $15,%esi
+ movl %ebx,52(%edi)
+ shll $17,%edx
+ orl %esi,%ecx
+ orl %ebp,%edx
+ movl %ecx,56(%edi)
+ movl %edx,60(%edi)
+ movl %ebx,%ebp
+ shll $2,%ebx
+ movl %ecx,%esi
+ shrl $30,%esi
+ shll $2,%ecx
+ orl %esi,%ebx
+ movl %edx,%esi
+ shll $2,%edx
+ movl %ebx,112(%edi)
+ shrl $30,%esi
+ orl %esi,%ecx
+ shrl $30,%ebp
+ movl %eax,%esi
+ shrl $30,%esi
+ movl %ecx,116(%edi)
+ shll $2,%eax
+ orl %esi,%edx
+ orl %ebp,%eax
+ movl %edx,120(%edi)
+ movl %eax,124(%edi)
+ movl $4,%eax
+L013done:
+ leal 144(%edi),%edx
+ addl $16,%esp
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.globl _Camellia_set_key
+.align 4
+_Camellia_set_key:
+L_Camellia_set_key_begin:
+ pushl %ebx
+ movl 8(%esp),%ecx
+ movl 12(%esp),%ebx
+ movl 16(%esp),%edx
+ movl $-1,%eax
+ testl %ecx,%ecx
+ jz L014done
+ testl %edx,%edx
+ jz L014done
+ movl $-2,%eax
+ cmpl $256,%ebx
+ je L015arg_ok
+ cmpl $192,%ebx
+ je L015arg_ok
+ cmpl $128,%ebx
+ jne L014done
+.align 2,0x90
+L015arg_ok:
+ pushl %edx
+ pushl %ecx
+ pushl %ebx
+ call L_Camellia_Ekeygen_begin
+ addl $12,%esp
+ movl %eax,(%edx)
+ xorl %eax,%eax
+.align 2,0x90
+L014done:
+ popl %ebx
+ ret
+.align 6,0x90
+LCamellia_SIGMA:
+.long 2694735487,1003262091,3061508184,1286239154,3337565999,3914302142,1426019237,4057165596,283453434,3731369245,2958461122,3018244605,0,0,0,0
+.align 6,0x90
+LCamellia_SBOX:
+.long 1886416896,1886388336
+.long 2189591040,741081132
+.long 741092352,3014852787
+.long 3974949888,3233808576
+.long 3014898432,3840147684
+.long 656877312,1465319511
+.long 3233857536,3941204202
+.long 3857048832,2930639022
+.long 3840205824,589496355
+.long 2240120064,1802174571
+.long 1465341696,1162149957
+.long 892679424,2779054245
+.long 3941263872,3991732461
+.long 202116096,1330577487
+.long 2930683392,488439837
+.long 1094795520,2459041938
+.long 589505280,2256928902
+.long 4025478912,2947481775
+.long 1802201856,2088501372
+.long 2475922176,522125343
+.long 1162167552,1044250686
+.long 421075200,3705405660
+.long 2779096320,1583218782
+.long 555819264,185270283
+.long 3991792896,2795896998
+.long 235802112,960036921
+.long 1330597632,3587506389
+.long 1313754624,1566376029
+.long 488447232,3654877401
+.long 1701143808,1515847770
+.long 2459079168,1364262993
+.long 3183328512,1819017324
+.long 2256963072,2341142667
+.long 3099113472,2593783962
+.long 2947526400,4227531003
+.long 2408550144,2964324528
+.long 2088532992,1953759348
+.long 3958106880,724238379
+.long 522133248,4042260720
+.long 3469659648,2223243396
+.long 1044266496,3755933919
+.long 808464384,3419078859
+.long 3705461760,875823156
+.long 1600085760,1987444854
+.long 1583242752,1835860077
+.long 3318072576,2846425257
+.long 185273088,3520135377
+.long 437918208,67371012
+.long 2795939328,336855060
+.long 3789676800,976879674
+.long 960051456,3739091166
+.long 3402287616,286326801
+.long 3587560704,842137650
+.long 1195853568,2627469468
+.long 1566399744,1397948499
+.long 1027423488,4075946226
+.long 3654932736,4278059262
+.long 16843008,3486449871
+.long 1515870720,3284336835
+.long 3604403712,2054815866
+.long 1364283648,606339108
+.long 1448498688,3907518696
+.long 1819044864,1616904288
+.long 1296911616,1768489065
+.long 2341178112,2863268010
+.long 218959104,2694840480
+.long 2593823232,2711683233
+.long 1717986816,1650589794
+.long 4227595008,1414791252
+.long 3435973632,505282590
+.long 2964369408,3772776672
+.long 757935360,1684275300
+.long 1953788928,269484048
+.long 303174144,0
+.long 724249344,2745368739
+.long 538976256,1970602101
+.long 4042321920,2324299914
+.long 2981212416,3873833190
+.long 2223277056,151584777
+.long 2576980224,3722248413
+.long 3755990784,2273771655
+.long 1280068608,2206400643
+.long 3419130624,3452764365
+.long 3267543552,2425356432
+.long 875836416,1936916595
+.long 2122219008,4143317238
+.long 1987474944,2644312221
+.long 84215040,3216965823
+.long 1835887872,1381105746
+.long 3082270464,3638034648
+.long 2846468352,3368550600
+.long 825307392,3334865094
+.long 3520188672,2172715137
+.long 387389184,1869545583
+.long 67372032,320012307
+.long 3621246720,1667432547
+.long 336860160,3924361449
+.long 1482184704,2812739751
+.long 976894464,2677997727
+.long 1633771776,3166437564
+.long 3739147776,690552873
+.long 454761216,4193845497
+.long 286331136,791609391
+.long 471604224,3031695540
+.long 842150400,2021130360
+.long 252645120,101056518
+.long 2627509248,3890675943
+.long 370546176,1903231089
+.long 1397969664,3570663636
+.long 404232192,2880110763
+.long 4076007936,2290614408
+.long 572662272,2374828173
+.long 4278124032,1920073842
+.long 1145324544,3115909305
+.long 3486502656,4177002744
+.long 2998055424,2896953516
+.long 3284386560,909508662
+.long 3048584448,707395626
+.long 2054846976,1010565180
+.long 2442236160,4059103473
+.long 606348288,1077936192
+.long 134744064,3553820883
+.long 3907577856,3149594811
+.long 2829625344,1128464451
+.long 1616928768,353697813
+.long 4244438016,2913796269
+.long 1768515840,2004287607
+.long 1347440640,2155872384
+.long 2863311360,2189557890
+.long 3503345664,3974889708
+.long 2694881280,656867367
+.long 2105376000,3856990437
+.long 2711724288,2240086149
+.long 2307492096,892665909
+.long 1650614784,202113036
+.long 2543294208,1094778945
+.long 1414812672,4025417967
+.long 1532713728,2475884691
+.long 505290240,421068825
+.long 2509608192,555810849
+.long 3772833792,235798542
+.long 4294967040,1313734734
+.long 1684300800,1701118053
+.long 3537031680,3183280317
+.long 269488128,3099066552
+.long 3301229568,2408513679
+.long 0,3958046955
+.long 1212696576,3469607118
+.long 2745410304,808452144
+.long 4160222976,1600061535
+.long 1970631936,3318022341
+.long 3688618752,437911578
+.long 2324335104,3789619425
+.long 50529024,3402236106
+.long 3873891840,1195835463
+.long 3671775744,1027407933
+.long 151587072,16842753
+.long 1061109504,3604349142
+.long 3722304768,1448476758
+.long 2492765184,1296891981
+.long 2273806080,218955789
+.long 1549556736,1717960806
+.long 2206434048,3435921612
+.long 33686016,757923885
+.long 3452816640,303169554
+.long 1246382592,538968096
+.long 2425393152,2981167281
+.long 858993408,2576941209
+.long 1936945920,1280049228
+.long 1734829824,3267494082
+.long 4143379968,2122186878
+.long 4092850944,84213765
+.long 2644352256,3082223799
+.long 2139062016,825294897
+.long 3217014528,387383319
+.long 3806519808,3621191895
+.long 1381126656,1482162264
+.long 2610666240,1633747041
+.long 3638089728,454754331
+.long 640034304,471597084
+.long 3368601600,252641295
+.long 926365440,370540566
+.long 3334915584,404226072
+.long 993737472,572653602
+.long 2172748032,1145307204
+.long 2526451200,2998010034
+.long 1869573888,3048538293
+.long 1263225600,2442199185
+.long 320017152,134742024
+.long 3200171520,2829582504
+.long 1667457792,4244373756
+.long 774778368,1347420240
+.long 3924420864,3503292624
+.long 2038003968,2105344125
+.long 2812782336,2307457161
+.long 2358021120,2543255703
+.long 2678038272,1532690523
+.long 1852730880,2509570197
+.long 3166485504,4294902015
+.long 2391707136,3536978130
+.long 690563328,3301179588
+.long 4126536960,1212678216
+.long 4193908992,4160159991
+.long 3065427456,3688562907
+.long 791621376,50528259
+.long 4261281024,3671720154
+.long 3031741440,1061093439
+.long 1499027712,2492727444
+.long 2021160960,1549533276
+.long 2560137216,33685506
+.long 101058048,1246363722
+.long 1785358848,858980403
+.long 3890734848,1734803559
+.long 1179010560,4092788979
+.long 1903259904,2139029631
+.long 3132799488,3806462178
+.long 3570717696,2610626715
+.long 623191296,640024614
+.long 2880154368,926351415
+.long 1111638528,993722427
+.long 2290649088,2526412950
+.long 2728567296,1263206475
+.long 2374864128,3200123070
+.long 4210752000,774766638
+.long 1920102912,2037973113
+.long 117901056,2357985420
+.long 3115956480,1852702830
+.long 1431655680,2391670926
+.long 4177065984,4126474485
+.long 4008635904,3065381046
+.long 2896997376,4261216509
+.long 168430080,1499005017
+.long 909522432,2560098456
+.long 1229539584,1785331818
+.long 707406336,1178992710
+.long 1751672832,3132752058
+.long 1010580480,623181861
+.long 943208448,1111621698
+.long 4059164928,2728525986
+.long 2762253312,4210688250
+.long 1077952512,117899271
+.long 673720320,1431634005
+.long 3553874688,4008575214
+.long 2071689984,168427530
+.long 3149642496,1229520969
+.long 3385444608,1751646312
+.long 1128481536,943194168
+.long 3250700544,2762211492
+.long 353703168,673710120
+.long 3823362816,2071658619
+.long 2913840384,3385393353
+.long 4109693952,3250651329
+.long 2004317952,3823304931
+.long 3351758592,4109631732
+.long 2155905024,3351707847
+.long 2661195264,2661154974
+.long 14737632,939538488
+.long 328965,1090535745
+.long 5789784,369104406
+.long 14277081,1979741814
+.long 6776679,3640711641
+.long 5131854,2466288531
+.long 8487297,1610637408
+.long 13355979,4060148466
+.long 13224393,1912631922
+.long 723723,3254829762
+.long 11447982,2868947883
+.long 6974058,2583730842
+.long 14013909,1962964341
+.long 1579032,100664838
+.long 6118749,1459640151
+.long 8553090,2684395680
+.long 4605510,2432733585
+.long 14671839,4144035831
+.long 14079702,3036722613
+.long 2565927,3372272073
+.long 9079434,2717950626
+.long 3289650,2348846220
+.long 4934475,3523269330
+.long 4342338,2415956112
+.long 14408667,4127258358
+.long 1842204,117442311
+.long 10395294,2801837991
+.long 10263708,654321447
+.long 3815994,2382401166
+.long 13290186,2986390194
+.long 2434341,1224755529
+.long 8092539,3724599006
+.long 855309,1124090691
+.long 7434609,1543527516
+.long 6250335,3607156695
+.long 2039583,3338717127
+.long 16316664,1040203326
+.long 14145495,4110480885
+.long 4079166,2399178639
+.long 10329501,1728079719
+.long 8158332,520101663
+.long 6316128,402659352
+.long 12171705,1845522030
+.long 12500670,2936057775
+.long 12369084,788541231
+.long 9145227,3791708898
+.long 1447446,2231403909
+.long 3421236,218107149
+.long 5066061,1392530259
+.long 12829635,4026593520
+.long 7500402,2617285788
+.long 9803157,1694524773
+.long 11250603,3925928682
+.long 9342606,2734728099
+.long 12237498,2919280302
+.long 8026746,2650840734
+.long 11776947,3959483628
+.long 131586,2147516544
+.long 11842740,754986285
+.long 11382189,1795189611
+.long 10658466,2818615464
+.long 11316396,721431339
+.long 14211288,905983542
+.long 10132122,2785060518
+.long 1513239,3305162181
+.long 1710618,2248181382
+.long 3487029,1291865421
+.long 13421772,855651123
+.long 16250871,4244700669
+.long 10066329,1711302246
+.long 6381921,1476417624
+.long 5921370,2516620950
+.long 15263976,973093434
+.long 2368548,150997257
+.long 5658198,2499843477
+.long 4210752,268439568
+.long 14803425,2013296760
+.long 6513507,3623934168
+.long 592137,1107313218
+.long 3355443,3422604492
+.long 12566463,4009816047
+.long 10000536,637543974
+.long 9934743,3842041317
+.long 8750469,1627414881
+.long 6842472,436214298
+.long 16579836,1056980799
+.long 15527148,989870907
+.long 657930,2181071490
+.long 14342874,3053500086
+.long 7303023,3674266587
+.long 5460819,3556824276
+.long 6447714,2550175896
+.long 10724259,3892373736
+.long 3026478,2332068747
+.long 526344,33554946
+.long 11513775,3942706155
+.long 2631720,167774730
+.long 11579568,738208812
+.long 7631988,486546717
+.long 12763842,2952835248
+.long 12434877,1862299503
+.long 3552822,2365623693
+.long 2236962,2281736328
+.long 3684408,234884622
+.long 6579300,419436825
+.long 1973790,2264958855
+.long 3750201,1308642894
+.long 2894892,184552203
+.long 10921638,2835392937
+.long 3158064,201329676
+.long 15066597,2030074233
+.long 4473924,285217041
+.long 16645629,2130739071
+.long 8947848,570434082
+.long 10461087,3875596263
+.long 6645093,1493195097
+.long 8882055,3774931425
+.long 7039851,3657489114
+.long 16053492,1023425853
+.long 2302755,3355494600
+.long 4737096,301994514
+.long 1052688,67109892
+.long 13750737,1946186868
+.long 5329233,1409307732
+.long 12632256,805318704
+.long 16382457,2113961598
+.long 13816530,3019945140
+.long 10526880,671098920
+.long 5592405,1426085205
+.long 10592673,1744857192
+.long 4276545,1342197840
+.long 16448250,3187719870
+.long 4408131,3489714384
+.long 1250067,3288384708
+.long 12895428,822096177
+.long 3092271,3405827019
+.long 11053224,704653866
+.long 11974326,2902502829
+.long 3947580,251662095
+.long 2829099,3389049546
+.long 12698049,1879076976
+.long 16777215,4278255615
+.long 13158600,838873650
+.long 10855845,1761634665
+.long 2105376,134219784
+.long 9013641,1644192354
+.long 0,0
+.long 9474192,603989028
+.long 4671303,3506491857
+.long 15724527,4211145723
+.long 15395562,3120609978
+.long 12040119,3976261101
+.long 1381653,1157645637
+.long 394758,2164294017
+.long 13487565,1929409395
+.long 11908533,1828744557
+.long 1184274,2214626436
+.long 8289918,2667618207
+.long 12303291,3993038574
+.long 2697513,1241533002
+.long 986895,3271607235
+.long 12105912,771763758
+.long 460551,3238052289
+.long 263172,16777473
+.long 10197915,3858818790
+.long 9737364,620766501
+.long 2171169,1207978056
+.long 6710886,2566953369
+.long 15132390,3103832505
+.long 13553358,3003167667
+.long 15592941,2063629179
+.long 15198183,4177590777
+.long 3881787,3456159438
+.long 16711422,3204497343
+.long 8355711,3741376479
+.long 12961221,1895854449
+.long 10790052,687876393
+.long 3618615,3439381965
+.long 11645361,1811967084
+.long 5000268,318771987
+.long 9539985,1677747300
+.long 7237230,2600508315
+.long 9276813,1660969827
+.long 7763574,2634063261
+.long 197379,3221274816
+.long 2960685,1258310475
+.long 14606046,3070277559
+.long 9868950,2768283045
+.long 2500134,2298513801
+.long 8224125,1593859935
+.long 13027014,2969612721
+.long 6052956,385881879
+.long 13882323,4093703412
+.long 15921906,3154164924
+.long 5197647,3540046803
+.long 1644825,1174423110
+.long 4144959,3472936911
+.long 14474460,922761015
+.long 7960953,1577082462
+.long 1907997,1191200583
+.long 5395026,2483066004
+.long 15461355,4194368250
+.long 15987699,4227923196
+.long 7171437,1526750043
+.long 6184542,2533398423
+.long 16514043,4261478142
+.long 6908265,1509972570
+.long 11711154,2885725356
+.long 15790320,1006648380
+.long 3223857,1275087948
+.long 789516,50332419
+.long 13948116,889206069
+.long 13619151,4076925939
+.long 9211020,587211555
+.long 14869218,3087055032
+.long 7697781,1560304989
+.long 11119017,1778412138
+.long 4868682,2449511058
+.long 5723991,3573601749
+.long 8684676,553656609
+.long 1118481,1140868164
+.long 4539717,1358975313
+.long 1776411,3321939654
+.long 16119285,2097184125
+.long 15000804,956315961
+.long 921102,2197848963
+.long 7566195,3691044060
+.long 11184810,2852170410
+.long 15856113,2080406652
+.long 14540253,1996519287
+.long 5855577,1442862678
+.long 1315860,83887365
+.long 7105644,452991771
+.long 9605778,2751505572
+.long 5526612,352326933
+.long 13684944,872428596
+.long 7895160,503324190
+.long 7368816,469769244
+.long 14935011,4160813304
+.long 4802889,1375752786
+.long 8421504,536879136
+.long 5263440,335549460
+.long 10987431,3909151209
+.long 16185078,3170942397
+.long 7829367,3707821533
+.long 9671571,3825263844
+.long 8816262,2701173153
+.long 8618883,3758153952
+.long 2763306,2315291274
+.long 13092807,4043370993
+.long 5987163,3590379222
+.long 15329769,2046851706
+.long 15658734,3137387451
+.long 9408399,3808486371
+.long 65793,1073758272
+.long 4013373,1325420367
+.globl _Camellia_cbc_encrypt
+.align 4
+_Camellia_cbc_encrypt:
+L_Camellia_cbc_encrypt_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 28(%esp),%ecx
+ cmpl $0,%ecx
+ je L016enc_out
+ pushfl
+ cld
+ movl 24(%esp),%eax
+ movl 28(%esp),%ebx
+ movl 36(%esp),%edx
+ movl 40(%esp),%ebp
+ leal -64(%esp),%esi
+ andl $-64,%esi
+ leal -127(%edx),%edi
+ subl %esi,%edi
+ negl %edi
+ andl $960,%edi
+ subl %edi,%esi
+ movl 44(%esp),%edi
+ xchgl %esi,%esp
+ addl $4,%esp
+ movl %esi,20(%esp)
+ movl %eax,24(%esp)
+ movl %ebx,28(%esp)
+ movl %ecx,32(%esp)
+ movl %edx,36(%esp)
+ movl %ebp,40(%esp)
+ call L017pic_point
+L017pic_point:
+ popl %ebp
+ leal LCamellia_SBOX-L017pic_point(%ebp),%ebp
+ movl $32,%esi
+.align 2,0x90
+L018prefetch_sbox:
+ movl (%ebp),%eax
+ movl 32(%ebp),%ebx
+ movl 64(%ebp),%ecx
+ movl 96(%ebp),%edx
+ leal 128(%ebp),%ebp
+ decl %esi
+ jnz L018prefetch_sbox
+ movl 36(%esp),%eax
+ subl $4096,%ebp
+ movl 24(%esp),%esi
+ movl 272(%eax),%edx
+ cmpl $0,%edi
+ je L019DECRYPT
+ movl 32(%esp),%ecx
+ movl 40(%esp),%edi
+ shll $6,%edx
+ leal (%eax,%edx,1),%edx
+ movl %edx,16(%esp)
+ testl $4294967280,%ecx
+ jz L020enc_tail
+ movl (%edi),%eax
+ movl 4(%edi),%ebx
+.align 2,0x90
+L021enc_loop:
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ xorl (%esi),%eax
+ xorl 4(%esi),%ebx
+ xorl 8(%esi),%ecx
+ bswap %eax
+ xorl 12(%esi),%edx
+ bswap %ebx
+ movl 36(%esp),%edi
+ bswap %ecx
+ bswap %edx
+ call __x86_Camellia_encrypt
+ movl 24(%esp),%esi
+ movl 28(%esp),%edi
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ movl %eax,(%edi)
+ bswap %edx
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 32(%esp),%ecx
+ leal 16(%esi),%esi
+ movl %esi,24(%esp)
+ leal 16(%edi),%edx
+ movl %edx,28(%esp)
+ subl $16,%ecx
+ testl $4294967280,%ecx
+ movl %ecx,32(%esp)
+ jnz L021enc_loop
+ testl $15,%ecx
+ jnz L020enc_tail
+ movl 40(%esp),%esi
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ movl 20(%esp),%esp
+ popfl
+L016enc_out:
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+ pushfl
+.align 2,0x90
+L020enc_tail:
+ movl %edi,%eax
+ movl 28(%esp),%edi
+ pushl %eax
+ movl $16,%ebx
+ subl %ecx,%ebx
+ cmpl %esi,%edi
+ je L022enc_in_place
+.align 2,0x90
+.long 2767451785
+ jmp L023enc_skip_in_place
+L022enc_in_place:
+ leal (%edi,%ecx,1),%edi
+L023enc_skip_in_place:
+ movl %ebx,%ecx
+ xorl %eax,%eax
+.align 2,0x90
+.long 2868115081
+ popl %edi
+ movl 28(%esp),%esi
+ movl (%edi),%eax
+ movl 4(%edi),%ebx
+ movl $16,32(%esp)
+ jmp L021enc_loop
+.align 4,0x90
+L019DECRYPT:
+ shll $6,%edx
+ leal (%eax,%edx,1),%edx
+ movl %eax,16(%esp)
+ movl %edx,36(%esp)
+ cmpl 28(%esp),%esi
+ je L024dec_in_place
+ movl 40(%esp),%edi
+ movl %edi,44(%esp)
+.align 2,0x90
+L025dec_loop:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ bswap %eax
+ movl 12(%esi),%edx
+ bswap %ebx
+ movl 36(%esp),%edi
+ bswap %ecx
+ bswap %edx
+ call __x86_Camellia_decrypt
+ movl 44(%esp),%edi
+ movl 32(%esp),%esi
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ xorl (%edi),%eax
+ bswap %edx
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ subl $16,%esi
+ jc L026dec_partial
+ movl %esi,32(%esp)
+ movl 24(%esp),%esi
+ movl 28(%esp),%edi
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl %esi,44(%esp)
+ leal 16(%esi),%esi
+ movl %esi,24(%esp)
+ leal 16(%edi),%edi
+ movl %edi,28(%esp)
+ jnz L025dec_loop
+ movl 44(%esp),%edi
+L027dec_end:
+ movl 40(%esp),%esi
+ movl (%edi),%eax
+ movl 4(%edi),%ebx
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ jmp L028dec_out
+.align 2,0x90
+L026dec_partial:
+ leal 44(%esp),%edi
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ leal 16(%esi),%ecx
+ movl %edi,%esi
+ movl 28(%esp),%edi
+.long 2767451785
+ movl 24(%esp),%edi
+ jmp L027dec_end
+.align 2,0x90
+L024dec_in_place:
+L029dec_in_place_loop:
+ leal 44(%esp),%edi
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ bswap %eax
+ movl %edx,12(%edi)
+ bswap %ebx
+ movl 36(%esp),%edi
+ bswap %ecx
+ bswap %edx
+ call __x86_Camellia_decrypt
+ movl 40(%esp),%edi
+ movl 28(%esp),%esi
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ xorl (%edi),%eax
+ bswap %edx
+ xorl 4(%edi),%ebx
+ xorl 8(%edi),%ecx
+ xorl 12(%edi),%edx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ leal 16(%esi),%esi
+ movl %esi,28(%esp)
+ leal 44(%esp),%esi
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ movl %ecx,8(%edi)
+ movl %edx,12(%edi)
+ movl 24(%esp),%esi
+ leal 16(%esi),%esi
+ movl %esi,24(%esp)
+ movl 32(%esp),%ecx
+ subl $16,%ecx
+ jc L030dec_in_place_partial
+ movl %ecx,32(%esp)
+ jnz L029dec_in_place_loop
+ jmp L028dec_out
+.align 2,0x90
+L030dec_in_place_partial:
+ movl 28(%esp),%edi
+ leal 44(%esp),%esi
+ leal (%edi,%ecx,1),%edi
+ leal 16(%esi,%ecx,1),%esi
+ negl %ecx
+.long 2767451785
+.align 2,0x90
+L028dec_out:
+ movl 20(%esp),%esp
+ popfl
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.byte 67,97,109,101,108,108,105,97,32,102,111,114,32,120,56,54
+.byte 32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115
+.byte 115,108,46,111,114,103,62,0
diff --git a/deps/openssl/asm/x86-macosx-gas/cast/cast-586.s b/deps/openssl/asm/x86-macosx-gas/cast/cast-586.s
new file mode 100644
index 0000000000..9314dff21d
--- /dev/null
+++ b/deps/openssl/asm/x86-macosx-gas/cast/cast-586.s
@@ -0,0 +1,967 @@
+.file "cast-586.s"
+.text
+.globl _CAST_encrypt
+.align 4
+_CAST_encrypt:
+L_CAST_encrypt_begin:
+
+ pushl %ebp
+ pushl %ebx
+ movl 12(%esp),%ebx
+ movl 16(%esp),%ebp
+ pushl %esi
+ pushl %edi
+ # Load the 2 words
+
+ movl (%ebx),%edi
+ movl 4(%ebx),%esi
+ # Get short key flag
+
+ movl 128(%ebp),%eax
+ pushl %eax
+ xorl %eax,%eax
+ # round 0
+
+ movl (%ebp),%edx
+ movl 4(%ebp),%ecx
+ addl %esi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl _CAST_S_table0(,%ecx,4),%ecx
+ movl _CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx,%ecx
+ movl _CAST_S_table2(,%eax,4),%ebx
+ subl %ebx,%ecx
+ movl _CAST_S_table3(,%edx,4),%ebx
+ addl %ebx,%ecx
+ xorl %ecx,%edi
+ # round 1
+
+ movl 8(%ebp),%edx
+ movl 12(%ebp),%ecx
+ xorl %edi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl _CAST_S_table0(,%ecx,4),%ecx
+ movl _CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx,%ecx
+ movl _CAST_S_table2(,%eax,4),%ebx
+ addl %ebx,%ecx
+ movl _CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx,%ecx
+ xorl %ecx,%esi
+ # round 2
+
+ movl 16(%ebp),%edx
+ movl 20(%ebp),%ecx
+ subl %esi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl _CAST_S_table0(,%ecx,4),%ecx
+ movl _CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx,%ecx
+ movl _CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx,%ecx
+ movl _CAST_S_table3(,%edx,4),%ebx
+ subl %ebx,%ecx
+ xorl %ecx,%edi
+ # round 3
+
+ movl 24(%ebp),%edx
+ movl 28(%ebp),%ecx
+ addl %edi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl _CAST_S_table0(,%ecx,4),%ecx
+ movl _CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx,%ecx
+ movl _CAST_S_table2(,%eax,4),%ebx
+ subl %ebx,%ecx
+ movl _CAST_S_table3(,%edx,4),%ebx
+ addl %ebx,%ecx
+ xorl %ecx,%esi
+ # round 4
+
+ movl 32(%ebp),%edx
+ movl 36(%ebp),%ecx
+ xorl %esi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl _CAST_S_table0(,%ecx,4),%ecx
+ movl _CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx,%ecx
+ movl _CAST_S_table2(,%eax,4),%ebx
+ addl %ebx,%ecx
+ movl _CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx,%ecx
+ xorl %ecx,%edi
+ # round 5
+
+ movl 40(%ebp),%edx
+ movl 44(%ebp),%ecx
+ subl %edi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl _CAST_S_table0(,%ecx,4),%ecx
+ movl _CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx,%ecx
+ movl _CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx,%ecx
+ movl _CAST_S_table3(,%edx,4),%ebx
+ subl %ebx,%ecx
+ xorl %ecx,%esi
+ # round 6
+
+ movl 48(%ebp),%edx
+ movl 52(%ebp),%ecx
+ addl %esi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl _CAST_S_table0(,%ecx,4),%ecx
+ movl _CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx,%ecx
+ movl _CAST_S_table2(,%eax,4),%ebx
+ subl %ebx,%ecx
+ movl _CAST_S_table3(,%edx,4),%ebx
+ addl %ebx,%ecx
+ xorl %ecx,%edi
+ # round 7
+
+ movl 56(%ebp),%edx
+ movl 60(%ebp),%ecx
+ xorl %edi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl _CAST_S_table0(,%ecx,4),%ecx
+ movl _CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx,%ecx
+ movl _CAST_S_table2(,%eax,4),%ebx
+ addl %ebx,%ecx
+ movl _CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx,%ecx
+ xorl %ecx,%esi
+ # round 8
+
+ movl 64(%ebp),%edx
+ movl 68(%ebp),%ecx
+ subl %esi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl _CAST_S_table0(,%ecx,4),%ecx
+ movl _CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx,%ecx
+ movl _CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx,%ecx
+ movl _CAST_S_table3(,%edx,4),%ebx
+ subl %ebx,%ecx
+ xorl %ecx,%edi
+ # round 9
+
+ movl 72(%ebp),%edx
+ movl 76(%ebp),%ecx
+ addl %edi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl _CAST_S_table0(,%ecx,4),%ecx
+ movl _CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx,%ecx
+ movl _CAST_S_table2(,%eax,4),%ebx
+ subl %ebx,%ecx
+ movl _CAST_S_table3(,%edx,4),%ebx
+ addl %ebx,%ecx
+ xorl %ecx,%esi
+ # round 10
+
+ movl 80(%ebp),%edx
+ movl 84(%ebp),%ecx
+ xorl %esi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl _CAST_S_table0(,%ecx,4),%ecx
+ movl _CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx,%ecx
+ movl _CAST_S_table2(,%eax,4),%ebx
+ addl %ebx,%ecx
+ movl _CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx,%ecx
+ xorl %ecx,%edi
+ # round 11
+
+ movl 88(%ebp),%edx
+ movl 92(%ebp),%ecx
+ subl %edi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl _CAST_S_table0(,%ecx,4),%ecx
+ movl _CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx,%ecx
+ movl _CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx,%ecx
+ movl _CAST_S_table3(,%edx,4),%ebx
+ subl %ebx,%ecx
+ xorl %ecx,%esi
+ # test short key flag
+
+ popl %edx
+ orl %edx,%edx
+ jnz L000cast_enc_done
+ # round 12
+
+ movl 96(%ebp),%edx
+ movl 100(%ebp),%ecx
+ addl %esi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl _CAST_S_table0(,%ecx,4),%ecx
+ movl _CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx,%ecx
+ movl _CAST_S_table2(,%eax,4),%ebx
+ subl %ebx,%ecx
+ movl _CAST_S_table3(,%edx,4),%ebx
+ addl %ebx,%ecx
+ xorl %ecx,%edi
+ # round 13
+
+ movl 104(%ebp),%edx
+ movl 108(%ebp),%ecx
+ xorl %edi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl _CAST_S_table0(,%ecx,4),%ecx
+ movl _CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx,%ecx
+ movl _CAST_S_table2(,%eax,4),%ebx
+ addl %ebx,%ecx
+ movl _CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx,%ecx
+ xorl %ecx,%esi
+ # round 14
+
+ movl 112(%ebp),%edx
+ movl 116(%ebp),%ecx
+ subl %esi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl _CAST_S_table0(,%ecx,4),%ecx
+ movl _CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx,%ecx
+ movl _CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx,%ecx
+ movl _CAST_S_table3(,%edx,4),%ebx
+ subl %ebx,%ecx
+ xorl %ecx,%edi
+ # round 15
+
+ movl 120(%ebp),%edx
+ movl 124(%ebp),%ecx
+ addl %edi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl _CAST_S_table0(,%ecx,4),%ecx
+ movl _CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx,%ecx
+ movl _CAST_S_table2(,%eax,4),%ebx
+ subl %ebx,%ecx
+ movl _CAST_S_table3(,%edx,4),%ebx
+ addl %ebx,%ecx
+ xorl %ecx,%esi
+L000cast_enc_done:
+ nop
+ movl 20(%esp),%eax
+ movl %edi,4(%eax)
+ movl %esi,(%eax)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.globl _CAST_decrypt
+.align 4
+_CAST_decrypt:
+L_CAST_decrypt_begin:
+
+ pushl %ebp
+ pushl %ebx
+ movl 12(%esp),%ebx
+ movl 16(%esp),%ebp
+ pushl %esi
+ pushl %edi
+ # Load the 2 words
+
+ movl (%ebx),%edi
+ movl 4(%ebx),%esi
+ # Get short key flag
+
+ movl 128(%ebp),%eax
+ orl %eax,%eax
+ jnz L001cast_dec_skip
+ xorl %eax,%eax
+ # round 15
+
+ movl 120(%ebp),%edx
+ movl 124(%ebp),%ecx
+ addl %esi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl _CAST_S_table0(,%ecx,4),%ecx
+ movl _CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx,%ecx
+ movl _CAST_S_table2(,%eax,4),%ebx
+ subl %ebx,%ecx
+ movl _CAST_S_table3(,%edx,4),%ebx
+ addl %ebx,%ecx
+ xorl %ecx,%edi
+ # round 14
+
+ movl 112(%ebp),%edx
+ movl 116(%ebp),%ecx
+ subl %edi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl _CAST_S_table0(,%ecx,4),%ecx
+ movl _CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx,%ecx
+ movl _CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx,%ecx
+ movl _CAST_S_table3(,%edx,4),%ebx
+ subl %ebx,%ecx
+ xorl %ecx,%esi
+ # round 13
+
+ movl 104(%ebp),%edx
+ movl 108(%ebp),%ecx
+ xorl %esi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl _CAST_S_table0(,%ecx,4),%ecx
+ movl _CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx,%ecx
+ movl _CAST_S_table2(,%eax,4),%ebx
+ addl %ebx,%ecx
+ movl _CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx,%ecx
+ xorl %ecx,%edi
+ # round 12
+
+ movl 96(%ebp),%edx
+ movl 100(%ebp),%ecx
+ addl %edi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl _CAST_S_table0(,%ecx,4),%ecx
+ movl _CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx,%ecx
+ movl _CAST_S_table2(,%eax,4),%ebx
+ subl %ebx,%ecx
+ movl _CAST_S_table3(,%edx,4),%ebx
+ addl %ebx,%ecx
+ xorl %ecx,%esi
+L001cast_dec_skip:
+ # round 11
+
+ movl 88(%ebp),%edx
+ movl 92(%ebp),%ecx
+ subl %esi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl _CAST_S_table0(,%ecx,4),%ecx
+ movl _CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx,%ecx
+ movl _CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx,%ecx
+ movl _CAST_S_table3(,%edx,4),%ebx
+ subl %ebx,%ecx
+ xorl %ecx,%edi
+ # round 10
+
+ movl 80(%ebp),%edx
+ movl 84(%ebp),%ecx
+ xorl %edi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl _CAST_S_table0(,%ecx,4),%ecx
+ movl _CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx,%ecx
+ movl _CAST_S_table2(,%eax,4),%ebx
+ addl %ebx,%ecx
+ movl _CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx,%ecx
+ xorl %ecx,%esi
+ # round 9
+
+ movl 72(%ebp),%edx
+ movl 76(%ebp),%ecx
+ addl %esi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl _CAST_S_table0(,%ecx,4),%ecx
+ movl _CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx,%ecx
+ movl _CAST_S_table2(,%eax,4),%ebx
+ subl %ebx,%ecx
+ movl _CAST_S_table3(,%edx,4),%ebx
+ addl %ebx,%ecx
+ xorl %ecx,%edi
+ # round 8
+
+ movl 64(%ebp),%edx
+ movl 68(%ebp),%ecx
+ subl %edi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl _CAST_S_table0(,%ecx,4),%ecx
+ movl _CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx,%ecx
+ movl _CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx,%ecx
+ movl _CAST_S_table3(,%edx,4),%ebx
+ subl %ebx,%ecx
+ xorl %ecx,%esi
+ # round 7
+
+ movl 56(%ebp),%edx
+ movl 60(%ebp),%ecx
+ xorl %esi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl _CAST_S_table0(,%ecx,4),%ecx
+ movl _CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx,%ecx
+ movl _CAST_S_table2(,%eax,4),%ebx
+ addl %ebx,%ecx
+ movl _CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx,%ecx
+ xorl %ecx,%edi
+ # round 6
+
+ movl 48(%ebp),%edx
+ movl 52(%ebp),%ecx
+ addl %edi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl _CAST_S_table0(,%ecx,4),%ecx
+ movl _CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx,%ecx
+ movl _CAST_S_table2(,%eax,4),%ebx
+ subl %ebx,%ecx
+ movl _CAST_S_table3(,%edx,4),%ebx
+ addl %ebx,%ecx
+ xorl %ecx,%esi
+ # round 5
+
+ movl 40(%ebp),%edx
+ movl 44(%ebp),%ecx
+ subl %esi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl _CAST_S_table0(,%ecx,4),%ecx
+ movl _CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx,%ecx
+ movl _CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx,%ecx
+ movl _CAST_S_table3(,%edx,4),%ebx
+ subl %ebx,%ecx
+ xorl %ecx,%edi
+ # round 4
+
+ movl 32(%ebp),%edx
+ movl 36(%ebp),%ecx
+ xorl %edi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl _CAST_S_table0(,%ecx,4),%ecx
+ movl _CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx,%ecx
+ movl _CAST_S_table2(,%eax,4),%ebx
+ addl %ebx,%ecx
+ movl _CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx,%ecx
+ xorl %ecx,%esi
+ # round 3
+
+ movl 24(%ebp),%edx
+ movl 28(%ebp),%ecx
+ addl %esi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl _CAST_S_table0(,%ecx,4),%ecx
+ movl _CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx,%ecx
+ movl _CAST_S_table2(,%eax,4),%ebx
+ subl %ebx,%ecx
+ movl _CAST_S_table3(,%edx,4),%ebx
+ addl %ebx,%ecx
+ xorl %ecx,%edi
+ # round 2
+
+ movl 16(%ebp),%edx
+ movl 20(%ebp),%ecx
+ subl %edi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl _CAST_S_table0(,%ecx,4),%ecx
+ movl _CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx,%ecx
+ movl _CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx,%ecx
+ movl _CAST_S_table3(,%edx,4),%ebx
+ subl %ebx,%ecx
+ xorl %ecx,%esi
+ # round 1
+
+ movl 8(%ebp),%edx
+ movl 12(%ebp),%ecx
+ xorl %esi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl _CAST_S_table0(,%ecx,4),%ecx
+ movl _CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx,%ecx
+ movl _CAST_S_table2(,%eax,4),%ebx
+ addl %ebx,%ecx
+ movl _CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx,%ecx
+ xorl %ecx,%edi
+ # round 0
+
+ movl (%ebp),%edx
+ movl 4(%ebp),%ecx
+ addl %edi,%edx
+ roll %cl,%edx
+ movl %edx,%ebx
+ xorl %ecx,%ecx
+ movb %dh,%cl
+ andl $255,%ebx
+ shrl $16,%edx
+ xorl %eax,%eax
+ movb %dh,%al
+ andl $255,%edx
+ movl _CAST_S_table0(,%ecx,4),%ecx
+ movl _CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx,%ecx
+ movl _CAST_S_table2(,%eax,4),%ebx
+ subl %ebx,%ecx
+ movl _CAST_S_table3(,%edx,4),%ebx
+ addl %ebx,%ecx
+ xorl %ecx,%esi
+ nop
+ movl 20(%esp),%eax
+ movl %edi,4(%eax)
+ movl %esi,(%eax)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.globl _CAST_cbc_encrypt
+.align 4
+_CAST_cbc_encrypt:
+L_CAST_cbc_encrypt_begin:
+
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 28(%esp),%ebp
+ # getting iv ptr from parameter 4
+
+ movl 36(%esp),%ebx
+ movl (%ebx),%esi
+ movl 4(%ebx),%edi
+ pushl %edi
+ pushl %esi
+ pushl %edi
+ pushl %esi
+ movl %esp,%ebx
+ movl 36(%esp),%esi
+ movl 40(%esp),%edi
+ # getting encrypt flag from parameter 5
+
+ movl 56(%esp),%ecx
+ # get and push parameter 3
+
+ movl 48(%esp),%eax
+ pushl %eax
+ pushl %ebx
+ cmpl $0,%ecx
+ jz L002decrypt
+ andl $4294967288,%ebp
+ movl 8(%esp),%eax
+ movl 12(%esp),%ebx
+ jz L003encrypt_finish
+L004encrypt_loop:
+ movl (%esi),%ecx
+ movl 4(%esi),%edx
+ xorl %ecx,%eax
+ xorl %edx,%ebx
+ bswap %eax
+ bswap %ebx
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+ call L_CAST_encrypt_begin
+ movl 8(%esp),%eax
+ movl 12(%esp),%ebx
+ bswap %eax
+ bswap %ebx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ addl $8,%esi
+ addl $8,%edi
+ subl $8,%ebp
+ jnz L004encrypt_loop
+L003encrypt_finish:
+ movl 52(%esp),%ebp
+ andl $7,%ebp
+ jz L005finish
+ call L006PIC_point
+L006PIC_point:
+ popl %edx
+ leal L007cbc_enc_jmp_table-L006PIC_point(%edx),%ecx
+ movl (%ecx,%ebp,4),%ebp
+ addl %edx,%ebp
+ xorl %ecx,%ecx
+ xorl %edx,%edx
+ jmp *%ebp
+L008ej7:
+ movb 6(%esi),%dh
+ shll $8,%edx
+L009ej6:
+ movb 5(%esi),%dh
+L010ej5:
+ movb 4(%esi),%dl
+L011ej4:
+ movl (%esi),%ecx
+ jmp L012ejend
+L013ej3:
+ movb 2(%esi),%ch
+ shll $8,%ecx
+L014ej2:
+ movb 1(%esi),%ch
+L015ej1:
+ movb (%esi),%cl
+L012ejend:
+ xorl %ecx,%eax
+ xorl %edx,%ebx
+ bswap %eax
+ bswap %ebx
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+ call L_CAST_encrypt_begin
+ movl 8(%esp),%eax
+ movl 12(%esp),%ebx
+ bswap %eax
+ bswap %ebx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ jmp L005finish
+L002decrypt:
+ andl $4294967288,%ebp
+ movl 16(%esp),%eax
+ movl 20(%esp),%ebx
+ jz L016decrypt_finish
+L017decrypt_loop:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ bswap %eax
+ bswap %ebx
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+ call L_CAST_decrypt_begin
+ movl 8(%esp),%eax
+ movl 12(%esp),%ebx
+ bswap %eax
+ bswap %ebx
+ movl 16(%esp),%ecx
+ movl 20(%esp),%edx
+ xorl %eax,%ecx
+ xorl %ebx,%edx
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl %ecx,(%edi)
+ movl %edx,4(%edi)
+ movl %eax,16(%esp)
+ movl %ebx,20(%esp)
+ addl $8,%esi
+ addl $8,%edi
+ subl $8,%ebp
+ jnz L017decrypt_loop
+L016decrypt_finish:
+ movl 52(%esp),%ebp
+ andl $7,%ebp
+ jz L005finish
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ bswap %eax
+ bswap %ebx
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+ call L_CAST_decrypt_begin
+ movl 8(%esp),%eax
+ movl 12(%esp),%ebx
+ bswap %eax
+ bswap %ebx
+ movl 16(%esp),%ecx
+ movl 20(%esp),%edx
+ xorl %eax,%ecx
+ xorl %ebx,%edx
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+L018dj7:
+ rorl $16,%edx
+ movb %dl,6(%edi)
+ shrl $16,%edx
+L019dj6:
+ movb %dh,5(%edi)
+L020dj5:
+ movb %dl,4(%edi)
+L021dj4:
+ movl %ecx,(%edi)
+ jmp L022djend
+L023dj3:
+ rorl $16,%ecx
+ movb %cl,2(%edi)
+ shll $16,%ecx
+L024dj2:
+ movb %ch,1(%esi)
+L025dj1:
+ movb %cl,(%esi)
+L022djend:
+ jmp L005finish
+L005finish:
+ movl 60(%esp),%ecx
+ addl $24,%esp
+ movl %eax,(%ecx)
+ movl %ebx,4(%ecx)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 6,0x90
+L007cbc_enc_jmp_table:
+.long 0
+.long L015ej1-L006PIC_point
+.long L014ej2-L006PIC_point
+.long L013ej3-L006PIC_point
+.long L011ej4-L006PIC_point
+.long L010ej5-L006PIC_point
+.long L009ej6-L006PIC_point
+.long L008ej7-L006PIC_point
+.align 6,0x90
diff --git a/deps/openssl/asm/x86-macosx-gas/des/crypt586.s b/deps/openssl/asm/x86-macosx-gas/des/crypt586.s
new file mode 100644
index 0000000000..edb1bb3915
--- /dev/null
+++ b/deps/openssl/asm/x86-macosx-gas/des/crypt586.s
@@ -0,0 +1,891 @@
+.file "crypt586.s"
+.text
+.globl _fcrypt_body
+.align 4
+_fcrypt_body:
+L_fcrypt_body_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+
+ # Load the 2 words
+
+ xorl %edi,%edi
+ xorl %esi,%esi
+ leal _DES_SPtrans,%edx
+ pushl %edx
+ movl 28(%esp),%ebp
+ pushl $25
+L000start:
+
+ # Round 0
+
+ movl 36(%esp),%eax
+ movl %esi,%edx
+ shrl $16,%edx
+ movl 40(%esp),%ecx
+ xorl %esi,%edx
+ andl %edx,%eax
+ andl %ecx,%edx
+ movl %eax,%ebx
+ shll $16,%ebx
+ movl %edx,%ecx
+ shll $16,%ecx
+ xorl %ebx,%eax
+ xorl %ecx,%edx
+ movl (%ebp),%ebx
+ xorl %ebx,%eax
+ movl 4(%ebp),%ecx
+ xorl %esi,%eax
+ xorl %esi,%edx
+ xorl %ecx,%edx
+ andl $0xfcfcfcfc,%eax
+ xorl %ebx,%ebx
+ andl $0xcfcfcfcf,%edx
+ xorl %ecx,%ecx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ movl 4(%esp),%ebp
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ movl 0x600(%ebp,%ebx,1),%ebx
+ xorl %ebx,%edi
+ movl 0x700(%ebp,%ecx,1),%ebx
+ xorl %ebx,%edi
+ movl 0x400(%ebp,%eax,1),%ebx
+ xorl %ebx,%edi
+ movl 0x500(%ebp,%edx,1),%ebx
+ xorl %ebx,%edi
+ movl 32(%esp),%ebp
+
+ # Round 1
+
+ movl 36(%esp),%eax
+ movl %edi,%edx
+ shrl $16,%edx
+ movl 40(%esp),%ecx
+ xorl %edi,%edx
+ andl %edx,%eax
+ andl %ecx,%edx
+ movl %eax,%ebx
+ shll $16,%ebx
+ movl %edx,%ecx
+ shll $16,%ecx
+ xorl %ebx,%eax
+ xorl %ecx,%edx
+ movl 8(%ebp),%ebx
+ xorl %ebx,%eax
+ movl 12(%ebp),%ecx
+ xorl %edi,%eax
+ xorl %edi,%edx
+ xorl %ecx,%edx
+ andl $0xfcfcfcfc,%eax
+ xorl %ebx,%ebx
+ andl $0xcfcfcfcf,%edx
+ xorl %ecx,%ecx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ movl 4(%esp),%ebp
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ movl 0x600(%ebp,%ebx,1),%ebx
+ xorl %ebx,%esi
+ movl 0x700(%ebp,%ecx,1),%ebx
+ xorl %ebx,%esi
+ movl 0x400(%ebp,%eax,1),%ebx
+ xorl %ebx,%esi
+ movl 0x500(%ebp,%edx,1),%ebx
+ xorl %ebx,%esi
+ movl 32(%esp),%ebp
+
+ # Round 2
+
+ movl 36(%esp),%eax
+ movl %esi,%edx
+ shrl $16,%edx
+ movl 40(%esp),%ecx
+ xorl %esi,%edx
+ andl %edx,%eax
+ andl %ecx,%edx
+ movl %eax,%ebx
+ shll $16,%ebx
+ movl %edx,%ecx
+ shll $16,%ecx
+ xorl %ebx,%eax
+ xorl %ecx,%edx
+ movl 16(%ebp),%ebx
+ xorl %ebx,%eax
+ movl 20(%ebp),%ecx
+ xorl %esi,%eax
+ xorl %esi,%edx
+ xorl %ecx,%edx
+ andl $0xfcfcfcfc,%eax
+ xorl %ebx,%ebx
+ andl $0xcfcfcfcf,%edx
+ xorl %ecx,%ecx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ movl 4(%esp),%ebp
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ movl 0x600(%ebp,%ebx,1),%ebx
+ xorl %ebx,%edi
+ movl 0x700(%ebp,%ecx,1),%ebx
+ xorl %ebx,%edi
+ movl 0x400(%ebp,%eax,1),%ebx
+ xorl %ebx,%edi
+ movl 0x500(%ebp,%edx,1),%ebx
+ xorl %ebx,%edi
+ movl 32(%esp),%ebp
+
+ # Round 3
+
+ movl 36(%esp),%eax
+ movl %edi,%edx
+ shrl $16,%edx
+ movl 40(%esp),%ecx
+ xorl %edi,%edx
+ andl %edx,%eax
+ andl %ecx,%edx
+ movl %eax,%ebx
+ shll $16,%ebx
+ movl %edx,%ecx
+ shll $16,%ecx
+ xorl %ebx,%eax
+ xorl %ecx,%edx
+ movl 24(%ebp),%ebx
+ xorl %ebx,%eax
+ movl 28(%ebp),%ecx
+ xorl %edi,%eax
+ xorl %edi,%edx
+ xorl %ecx,%edx
+ andl $0xfcfcfcfc,%eax
+ xorl %ebx,%ebx
+ andl $0xcfcfcfcf,%edx
+ xorl %ecx,%ecx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ movl 4(%esp),%ebp
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ movl 0x600(%ebp,%ebx,1),%ebx
+ xorl %ebx,%esi
+ movl 0x700(%ebp,%ecx,1),%ebx
+ xorl %ebx,%esi
+ movl 0x400(%ebp,%eax,1),%ebx
+ xorl %ebx,%esi
+ movl 0x500(%ebp,%edx,1),%ebx
+ xorl %ebx,%esi
+ movl 32(%esp),%ebp
+
+ # Round 4
+
+ movl 36(%esp),%eax
+ movl %esi,%edx
+ shrl $16,%edx
+ movl 40(%esp),%ecx
+ xorl %esi,%edx
+ andl %edx,%eax
+ andl %ecx,%edx
+ movl %eax,%ebx
+ shll $16,%ebx
+ movl %edx,%ecx
+ shll $16,%ecx
+ xorl %ebx,%eax
+ xorl %ecx,%edx
+ movl 32(%ebp),%ebx
+ xorl %ebx,%eax
+ movl 36(%ebp),%ecx
+ xorl %esi,%eax
+ xorl %esi,%edx
+ xorl %ecx,%edx
+ andl $0xfcfcfcfc,%eax
+ xorl %ebx,%ebx
+ andl $0xcfcfcfcf,%edx
+ xorl %ecx,%ecx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ movl 4(%esp),%ebp
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ movl 0x600(%ebp,%ebx,1),%ebx
+ xorl %ebx,%edi
+ movl 0x700(%ebp,%ecx,1),%ebx
+ xorl %ebx,%edi
+ movl 0x400(%ebp,%eax,1),%ebx
+ xorl %ebx,%edi
+ movl 0x500(%ebp,%edx,1),%ebx
+ xorl %ebx,%edi
+ movl 32(%esp),%ebp
+
+ # Round 5
+
+ movl 36(%esp),%eax
+ movl %edi,%edx
+ shrl $16,%edx
+ movl 40(%esp),%ecx
+ xorl %edi,%edx
+ andl %edx,%eax
+ andl %ecx,%edx
+ movl %eax,%ebx
+ shll $16,%ebx
+ movl %edx,%ecx
+ shll $16,%ecx
+ xorl %ebx,%eax
+ xorl %ecx,%edx
+ movl 40(%ebp),%ebx
+ xorl %ebx,%eax
+ movl 44(%ebp),%ecx
+ xorl %edi,%eax
+ xorl %edi,%edx
+ xorl %ecx,%edx
+ andl $0xfcfcfcfc,%eax
+ xorl %ebx,%ebx
+ andl $0xcfcfcfcf,%edx
+ xorl %ecx,%ecx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ movl 4(%esp),%ebp
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ movl 0x600(%ebp,%ebx,1),%ebx
+ xorl %ebx,%esi
+ movl 0x700(%ebp,%ecx,1),%ebx
+ xorl %ebx,%esi
+ movl 0x400(%ebp,%eax,1),%ebx
+ xorl %ebx,%esi
+ movl 0x500(%ebp,%edx,1),%ebx
+ xorl %ebx,%esi
+ movl 32(%esp),%ebp
+
+ # Round 6
+
+ movl 36(%esp),%eax
+ movl %esi,%edx
+ shrl $16,%edx
+ movl 40(%esp),%ecx
+ xorl %esi,%edx
+ andl %edx,%eax
+ andl %ecx,%edx
+ movl %eax,%ebx
+ shll $16,%ebx
+ movl %edx,%ecx
+ shll $16,%ecx
+ xorl %ebx,%eax
+ xorl %ecx,%edx
+ movl 48(%ebp),%ebx
+ xorl %ebx,%eax
+ movl 52(%ebp),%ecx
+ xorl %esi,%eax
+ xorl %esi,%edx
+ xorl %ecx,%edx
+ andl $0xfcfcfcfc,%eax
+ xorl %ebx,%ebx
+ andl $0xcfcfcfcf,%edx
+ xorl %ecx,%ecx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ movl 4(%esp),%ebp
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ movl 0x600(%ebp,%ebx,1),%ebx
+ xorl %ebx,%edi
+ movl 0x700(%ebp,%ecx,1),%ebx
+ xorl %ebx,%edi
+ movl 0x400(%ebp,%eax,1),%ebx
+ xorl %ebx,%edi
+ movl 0x500(%ebp,%edx,1),%ebx
+ xorl %ebx,%edi
+ movl 32(%esp),%ebp
+
+ # Round 7
+
+ movl 36(%esp),%eax
+ movl %edi,%edx
+ shrl $16,%edx
+ movl 40(%esp),%ecx
+ xorl %edi,%edx
+ andl %edx,%eax
+ andl %ecx,%edx
+ movl %eax,%ebx
+ shll $16,%ebx
+ movl %edx,%ecx
+ shll $16,%ecx
+ xorl %ebx,%eax
+ xorl %ecx,%edx
+ movl 56(%ebp),%ebx
+ xorl %ebx,%eax
+ movl 60(%ebp),%ecx
+ xorl %edi,%eax
+ xorl %edi,%edx
+ xorl %ecx,%edx
+ andl $0xfcfcfcfc,%eax
+ xorl %ebx,%ebx
+ andl $0xcfcfcfcf,%edx
+ xorl %ecx,%ecx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ movl 4(%esp),%ebp
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ movl 0x600(%ebp,%ebx,1),%ebx
+ xorl %ebx,%esi
+ movl 0x700(%ebp,%ecx,1),%ebx
+ xorl %ebx,%esi
+ movl 0x400(%ebp,%eax,1),%ebx
+ xorl %ebx,%esi
+ movl 0x500(%ebp,%edx,1),%ebx
+ xorl %ebx,%esi
+ movl 32(%esp),%ebp
+
+ # Round 8
+
+ movl 36(%esp),%eax
+ movl %esi,%edx
+ shrl $16,%edx
+ movl 40(%esp),%ecx
+ xorl %esi,%edx
+ andl %edx,%eax
+ andl %ecx,%edx
+ movl %eax,%ebx
+ shll $16,%ebx
+ movl %edx,%ecx
+ shll $16,%ecx
+ xorl %ebx,%eax
+ xorl %ecx,%edx
+ movl 64(%ebp),%ebx
+ xorl %ebx,%eax
+ movl 68(%ebp),%ecx
+ xorl %esi,%eax
+ xorl %esi,%edx
+ xorl %ecx,%edx
+ andl $0xfcfcfcfc,%eax
+ xorl %ebx,%ebx
+ andl $0xcfcfcfcf,%edx
+ xorl %ecx,%ecx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ movl 4(%esp),%ebp
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ movl 0x600(%ebp,%ebx,1),%ebx
+ xorl %ebx,%edi
+ movl 0x700(%ebp,%ecx,1),%ebx
+ xorl %ebx,%edi
+ movl 0x400(%ebp,%eax,1),%ebx
+ xorl %ebx,%edi
+ movl 0x500(%ebp,%edx,1),%ebx
+ xorl %ebx,%edi
+ movl 32(%esp),%ebp
+
+ # Round 9
+
+ movl 36(%esp),%eax
+ movl %edi,%edx
+ shrl $16,%edx
+ movl 40(%esp),%ecx
+ xorl %edi,%edx
+ andl %edx,%eax
+ andl %ecx,%edx
+ movl %eax,%ebx
+ shll $16,%ebx
+ movl %edx,%ecx
+ shll $16,%ecx
+ xorl %ebx,%eax
+ xorl %ecx,%edx
+ movl 72(%ebp),%ebx
+ xorl %ebx,%eax
+ movl 76(%ebp),%ecx
+ xorl %edi,%eax
+ xorl %edi,%edx
+ xorl %ecx,%edx
+ andl $0xfcfcfcfc,%eax
+ xorl %ebx,%ebx
+ andl $0xcfcfcfcf,%edx
+ xorl %ecx,%ecx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ movl 4(%esp),%ebp
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ movl 0x600(%ebp,%ebx,1),%ebx
+ xorl %ebx,%esi
+ movl 0x700(%ebp,%ecx,1),%ebx
+ xorl %ebx,%esi
+ movl 0x400(%ebp,%eax,1),%ebx
+ xorl %ebx,%esi
+ movl 0x500(%ebp,%edx,1),%ebx
+ xorl %ebx,%esi
+ movl 32(%esp),%ebp
+
+ # Round 10
+
+ movl 36(%esp),%eax
+ movl %esi,%edx
+ shrl $16,%edx
+ movl 40(%esp),%ecx
+ xorl %esi,%edx
+ andl %edx,%eax
+ andl %ecx,%edx
+ movl %eax,%ebx
+ shll $16,%ebx
+ movl %edx,%ecx
+ shll $16,%ecx
+ xorl %ebx,%eax
+ xorl %ecx,%edx
+ movl 80(%ebp),%ebx
+ xorl %ebx,%eax
+ movl 84(%ebp),%ecx
+ xorl %esi,%eax
+ xorl %esi,%edx
+ xorl %ecx,%edx
+ andl $0xfcfcfcfc,%eax
+ xorl %ebx,%ebx
+ andl $0xcfcfcfcf,%edx
+ xorl %ecx,%ecx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ movl 4(%esp),%ebp
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ movl 0x600(%ebp,%ebx,1),%ebx
+ xorl %ebx,%edi
+ movl 0x700(%ebp,%ecx,1),%ebx
+ xorl %ebx,%edi
+ movl 0x400(%ebp,%eax,1),%ebx
+ xorl %ebx,%edi
+ movl 0x500(%ebp,%edx,1),%ebx
+ xorl %ebx,%edi
+ movl 32(%esp),%ebp
+
+ # Round 11
+
+ movl 36(%esp),%eax
+ movl %edi,%edx
+ shrl $16,%edx
+ movl 40(%esp),%ecx
+ xorl %edi,%edx
+ andl %edx,%eax
+ andl %ecx,%edx
+ movl %eax,%ebx
+ shll $16,%ebx
+ movl %edx,%ecx
+ shll $16,%ecx
+ xorl %ebx,%eax
+ xorl %ecx,%edx
+ movl 88(%ebp),%ebx
+ xorl %ebx,%eax
+ movl 92(%ebp),%ecx
+ xorl %edi,%eax
+ xorl %edi,%edx
+ xorl %ecx,%edx
+ andl $0xfcfcfcfc,%eax
+ xorl %ebx,%ebx
+ andl $0xcfcfcfcf,%edx
+ xorl %ecx,%ecx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ movl 4(%esp),%ebp
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ movl 0x600(%ebp,%ebx,1),%ebx
+ xorl %ebx,%esi
+ movl 0x700(%ebp,%ecx,1),%ebx
+ xorl %ebx,%esi
+ movl 0x400(%ebp,%eax,1),%ebx
+ xorl %ebx,%esi
+ movl 0x500(%ebp,%edx,1),%ebx
+ xorl %ebx,%esi
+ movl 32(%esp),%ebp
+
+ # Round 12
+
+ movl 36(%esp),%eax
+ movl %esi,%edx
+ shrl $16,%edx
+ movl 40(%esp),%ecx
+ xorl %esi,%edx
+ andl %edx,%eax
+ andl %ecx,%edx
+ movl %eax,%ebx
+ shll $16,%ebx
+ movl %edx,%ecx
+ shll $16,%ecx
+ xorl %ebx,%eax
+ xorl %ecx,%edx
+ movl 96(%ebp),%ebx
+ xorl %ebx,%eax
+ movl 100(%ebp),%ecx
+ xorl %esi,%eax
+ xorl %esi,%edx
+ xorl %ecx,%edx
+ andl $0xfcfcfcfc,%eax
+ xorl %ebx,%ebx
+ andl $0xcfcfcfcf,%edx
+ xorl %ecx,%ecx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ movl 4(%esp),%ebp
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ movl 0x600(%ebp,%ebx,1),%ebx
+ xorl %ebx,%edi
+ movl 0x700(%ebp,%ecx,1),%ebx
+ xorl %ebx,%edi
+ movl 0x400(%ebp,%eax,1),%ebx
+ xorl %ebx,%edi
+ movl 0x500(%ebp,%edx,1),%ebx
+ xorl %ebx,%edi
+ movl 32(%esp),%ebp
+
+ # Round 13
+
+ movl 36(%esp),%eax
+ movl %edi,%edx
+ shrl $16,%edx
+ movl 40(%esp),%ecx
+ xorl %edi,%edx
+ andl %edx,%eax
+ andl %ecx,%edx
+ movl %eax,%ebx
+ shll $16,%ebx
+ movl %edx,%ecx
+ shll $16,%ecx
+ xorl %ebx,%eax
+ xorl %ecx,%edx
+ movl 104(%ebp),%ebx
+ xorl %ebx,%eax
+ movl 108(%ebp),%ecx
+ xorl %edi,%eax
+ xorl %edi,%edx
+ xorl %ecx,%edx
+ andl $0xfcfcfcfc,%eax
+ xorl %ebx,%ebx
+ andl $0xcfcfcfcf,%edx
+ xorl %ecx,%ecx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ movl 4(%esp),%ebp
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ movl 0x600(%ebp,%ebx,1),%ebx
+ xorl %ebx,%esi
+ movl 0x700(%ebp,%ecx,1),%ebx
+ xorl %ebx,%esi
+ movl 0x400(%ebp,%eax,1),%ebx
+ xorl %ebx,%esi
+ movl 0x500(%ebp,%edx,1),%ebx
+ xorl %ebx,%esi
+ movl 32(%esp),%ebp
+
+ # Round 14
+
+ movl 36(%esp),%eax
+ movl %esi,%edx
+ shrl $16,%edx
+ movl 40(%esp),%ecx
+ xorl %esi,%edx
+ andl %edx,%eax
+ andl %ecx,%edx
+ movl %eax,%ebx
+ shll $16,%ebx
+ movl %edx,%ecx
+ shll $16,%ecx
+ xorl %ebx,%eax
+ xorl %ecx,%edx
+ movl 112(%ebp),%ebx
+ xorl %ebx,%eax
+ movl 116(%ebp),%ecx
+ xorl %esi,%eax
+ xorl %esi,%edx
+ xorl %ecx,%edx
+ andl $0xfcfcfcfc,%eax
+ xorl %ebx,%ebx
+ andl $0xcfcfcfcf,%edx
+ xorl %ecx,%ecx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ movl 4(%esp),%ebp
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ movl 0x600(%ebp,%ebx,1),%ebx
+ xorl %ebx,%edi
+ movl 0x700(%ebp,%ecx,1),%ebx
+ xorl %ebx,%edi
+ movl 0x400(%ebp,%eax,1),%ebx
+ xorl %ebx,%edi
+ movl 0x500(%ebp,%edx,1),%ebx
+ xorl %ebx,%edi
+ movl 32(%esp),%ebp
+
+ # Round 15
+
+ movl 36(%esp),%eax
+ movl %edi,%edx
+ shrl $16,%edx
+ movl 40(%esp),%ecx
+ xorl %edi,%edx
+ andl %edx,%eax
+ andl %ecx,%edx
+ movl %eax,%ebx
+ shll $16,%ebx
+ movl %edx,%ecx
+ shll $16,%ecx
+ xorl %ebx,%eax
+ xorl %ecx,%edx
+ movl 120(%ebp),%ebx
+ xorl %ebx,%eax
+ movl 124(%ebp),%ecx
+ xorl %edi,%eax
+ xorl %edi,%edx
+ xorl %ecx,%edx
+ andl $0xfcfcfcfc,%eax
+ xorl %ebx,%ebx
+ andl $0xcfcfcfcf,%edx
+ xorl %ecx,%ecx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ movl 4(%esp),%ebp
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ movl 0x600(%ebp,%ebx,1),%ebx
+ xorl %ebx,%esi
+ movl 0x700(%ebp,%ecx,1),%ebx
+ xorl %ebx,%esi
+ movl 0x400(%ebp,%eax,1),%ebx
+ xorl %ebx,%esi
+ movl 0x500(%ebp,%edx,1),%ebx
+ xorl %ebx,%esi
+ movl 32(%esp),%ebp
+ movl (%esp),%ebx
+ movl %edi,%eax
+ decl %ebx
+ movl %esi,%edi
+ movl %eax,%esi
+ movl %ebx,(%esp)
+ jnz L000start
+
+ # FP
+
+ movl 28(%esp),%edx
+ rorl $1,%edi
+ movl %esi,%eax
+ xorl %edi,%esi
+ andl $0xaaaaaaaa,%esi
+ xorl %esi,%eax
+ xorl %esi,%edi
+
+ roll $23,%eax
+ movl %eax,%esi
+ xorl %edi,%eax
+ andl $0x03fc03fc,%eax
+ xorl %eax,%esi
+ xorl %eax,%edi
+
+ roll $10,%esi
+ movl %esi,%eax
+ xorl %edi,%esi
+ andl $0x33333333,%esi
+ xorl %esi,%eax
+ xorl %esi,%edi
+
+ roll $18,%edi
+ movl %edi,%esi
+ xorl %eax,%edi
+ andl $0xfff0000f,%edi
+ xorl %edi,%esi
+ xorl %edi,%eax
+
+ roll $12,%esi
+ movl %esi,%edi
+ xorl %eax,%esi
+ andl $0xf0f0f0f0,%esi
+ xorl %esi,%edi
+ xorl %esi,%eax
+
+ rorl $4,%eax
+ movl %eax,(%edx)
+ movl %edi,4(%edx)
+ addl $8,%esp
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
diff --git a/deps/openssl/asm/x86-macosx-gas/des/des-586.s b/deps/openssl/asm/x86-macosx-gas/des/des-586.s
new file mode 100644
index 0000000000..f9e0ad3337
--- /dev/null
+++ b/deps/openssl/asm/x86-macosx-gas/des/des-586.s
@@ -0,0 +1,1873 @@
+.file "des-586.s"
+.text
+.globl _DES_SPtrans
+.align 4
+__x86_DES_encrypt:
+ pushl %ecx
+ # Round 0
+
+ movl (%ecx),%eax
+ xorl %ebx,%ebx
+ movl 4(%ecx),%edx
+ xorl %esi,%eax
+ xorl %ecx,%ecx
+ xorl %esi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%edi
+ xorl 0x700(%ebp,%ecx,1),%edi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%edi
+ xorl 0x500(%ebp,%edx,1),%edi
+ # Round 1
+
+ movl 8(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 12(%ecx),%edx
+ xorl %edi,%eax
+ xorl %ecx,%ecx
+ xorl %edi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%esi
+ xorl 0x700(%ebp,%ecx,1),%esi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%esi
+ xorl 0x500(%ebp,%edx,1),%esi
+ # Round 2
+
+ movl 16(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 20(%ecx),%edx
+ xorl %esi,%eax
+ xorl %ecx,%ecx
+ xorl %esi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%edi
+ xorl 0x700(%ebp,%ecx,1),%edi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%edi
+ xorl 0x500(%ebp,%edx,1),%edi
+ # Round 3
+
+ movl 24(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 28(%ecx),%edx
+ xorl %edi,%eax
+ xorl %ecx,%ecx
+ xorl %edi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%esi
+ xorl 0x700(%ebp,%ecx,1),%esi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%esi
+ xorl 0x500(%ebp,%edx,1),%esi
+ # Round 4
+
+ movl 32(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 36(%ecx),%edx
+ xorl %esi,%eax
+ xorl %ecx,%ecx
+ xorl %esi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%edi
+ xorl 0x700(%ebp,%ecx,1),%edi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%edi
+ xorl 0x500(%ebp,%edx,1),%edi
+ # Round 5
+
+ movl 40(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 44(%ecx),%edx
+ xorl %edi,%eax
+ xorl %ecx,%ecx
+ xorl %edi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%esi
+ xorl 0x700(%ebp,%ecx,1),%esi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%esi
+ xorl 0x500(%ebp,%edx,1),%esi
+ # Round 6
+
+ movl 48(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 52(%ecx),%edx
+ xorl %esi,%eax
+ xorl %ecx,%ecx
+ xorl %esi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%edi
+ xorl 0x700(%ebp,%ecx,1),%edi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%edi
+ xorl 0x500(%ebp,%edx,1),%edi
+ # Round 7
+
+ movl 56(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 60(%ecx),%edx
+ xorl %edi,%eax
+ xorl %ecx,%ecx
+ xorl %edi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%esi
+ xorl 0x700(%ebp,%ecx,1),%esi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%esi
+ xorl 0x500(%ebp,%edx,1),%esi
+ # Round 8
+
+ movl 64(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 68(%ecx),%edx
+ xorl %esi,%eax
+ xorl %ecx,%ecx
+ xorl %esi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%edi
+ xorl 0x700(%ebp,%ecx,1),%edi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%edi
+ xorl 0x500(%ebp,%edx,1),%edi
+ # Round 9
+
+ movl 72(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 76(%ecx),%edx
+ xorl %edi,%eax
+ xorl %ecx,%ecx
+ xorl %edi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%esi
+ xorl 0x700(%ebp,%ecx,1),%esi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%esi
+ xorl 0x500(%ebp,%edx,1),%esi
+ # Round 10
+
+ movl 80(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 84(%ecx),%edx
+ xorl %esi,%eax
+ xorl %ecx,%ecx
+ xorl %esi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%edi
+ xorl 0x700(%ebp,%ecx,1),%edi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%edi
+ xorl 0x500(%ebp,%edx,1),%edi
+ # Round 11
+
+ movl 88(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 92(%ecx),%edx
+ xorl %edi,%eax
+ xorl %ecx,%ecx
+ xorl %edi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%esi
+ xorl 0x700(%ebp,%ecx,1),%esi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%esi
+ xorl 0x500(%ebp,%edx,1),%esi
+ # Round 12
+
+ movl 96(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 100(%ecx),%edx
+ xorl %esi,%eax
+ xorl %ecx,%ecx
+ xorl %esi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%edi
+ xorl 0x700(%ebp,%ecx,1),%edi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%edi
+ xorl 0x500(%ebp,%edx,1),%edi
+ # Round 13
+
+ movl 104(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 108(%ecx),%edx
+ xorl %edi,%eax
+ xorl %ecx,%ecx
+ xorl %edi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%esi
+ xorl 0x700(%ebp,%ecx,1),%esi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%esi
+ xorl 0x500(%ebp,%edx,1),%esi
+ # Round 14
+
+ movl 112(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 116(%ecx),%edx
+ xorl %esi,%eax
+ xorl %ecx,%ecx
+ xorl %esi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%edi
+ xorl 0x700(%ebp,%ecx,1),%edi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%edi
+ xorl 0x500(%ebp,%edx,1),%edi
+ # Round 15
+
+ movl 120(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 124(%ecx),%edx
+ xorl %edi,%eax
+ xorl %ecx,%ecx
+ xorl %edi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%esi
+ xorl 0x700(%ebp,%ecx,1),%esi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%esi
+ xorl 0x500(%ebp,%edx,1),%esi
+ addl $4,%esp
+ ret
+.align 4
+__x86_DES_decrypt:
+ pushl %ecx
+ # Round 15
+
+ movl 120(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 124(%ecx),%edx
+ xorl %esi,%eax
+ xorl %ecx,%ecx
+ xorl %esi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%edi
+ xorl 0x700(%ebp,%ecx,1),%edi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%edi
+ xorl 0x500(%ebp,%edx,1),%edi
+ # Round 14
+
+ movl 112(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 116(%ecx),%edx
+ xorl %edi,%eax
+ xorl %ecx,%ecx
+ xorl %edi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%esi
+ xorl 0x700(%ebp,%ecx,1),%esi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%esi
+ xorl 0x500(%ebp,%edx,1),%esi
+ # Round 13
+
+ movl 104(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 108(%ecx),%edx
+ xorl %esi,%eax
+ xorl %ecx,%ecx
+ xorl %esi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%edi
+ xorl 0x700(%ebp,%ecx,1),%edi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%edi
+ xorl 0x500(%ebp,%edx,1),%edi
+ # Round 12
+
+ movl 96(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 100(%ecx),%edx
+ xorl %edi,%eax
+ xorl %ecx,%ecx
+ xorl %edi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%esi
+ xorl 0x700(%ebp,%ecx,1),%esi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%esi
+ xorl 0x500(%ebp,%edx,1),%esi
+ # Round 11
+
+ movl 88(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 92(%ecx),%edx
+ xorl %esi,%eax
+ xorl %ecx,%ecx
+ xorl %esi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%edi
+ xorl 0x700(%ebp,%ecx,1),%edi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%edi
+ xorl 0x500(%ebp,%edx,1),%edi
+ # Round 10
+
+ movl 80(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 84(%ecx),%edx
+ xorl %edi,%eax
+ xorl %ecx,%ecx
+ xorl %edi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%esi
+ xorl 0x700(%ebp,%ecx,1),%esi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%esi
+ xorl 0x500(%ebp,%edx,1),%esi
+ # Round 9
+
+ movl 72(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 76(%ecx),%edx
+ xorl %esi,%eax
+ xorl %ecx,%ecx
+ xorl %esi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%edi
+ xorl 0x700(%ebp,%ecx,1),%edi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%edi
+ xorl 0x500(%ebp,%edx,1),%edi
+ # Round 8
+
+ movl 64(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 68(%ecx),%edx
+ xorl %edi,%eax
+ xorl %ecx,%ecx
+ xorl %edi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%esi
+ xorl 0x700(%ebp,%ecx,1),%esi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%esi
+ xorl 0x500(%ebp,%edx,1),%esi
+ # Round 7
+
+ movl 56(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 60(%ecx),%edx
+ xorl %esi,%eax
+ xorl %ecx,%ecx
+ xorl %esi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%edi
+ xorl 0x700(%ebp,%ecx,1),%edi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%edi
+ xorl 0x500(%ebp,%edx,1),%edi
+ # Round 6
+
+ movl 48(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 52(%ecx),%edx
+ xorl %edi,%eax
+ xorl %ecx,%ecx
+ xorl %edi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%esi
+ xorl 0x700(%ebp,%ecx,1),%esi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%esi
+ xorl 0x500(%ebp,%edx,1),%esi
+ # Round 5
+
+ movl 40(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 44(%ecx),%edx
+ xorl %esi,%eax
+ xorl %ecx,%ecx
+ xorl %esi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%edi
+ xorl 0x700(%ebp,%ecx,1),%edi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%edi
+ xorl 0x500(%ebp,%edx,1),%edi
+ # Round 4
+
+ movl 32(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 36(%ecx),%edx
+ xorl %edi,%eax
+ xorl %ecx,%ecx
+ xorl %edi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%esi
+ xorl 0x700(%ebp,%ecx,1),%esi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%esi
+ xorl 0x500(%ebp,%edx,1),%esi
+ # Round 3
+
+ movl 24(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 28(%ecx),%edx
+ xorl %esi,%eax
+ xorl %ecx,%ecx
+ xorl %esi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%edi
+ xorl 0x700(%ebp,%ecx,1),%edi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%edi
+ xorl 0x500(%ebp,%edx,1),%edi
+ # Round 2
+
+ movl 16(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 20(%ecx),%edx
+ xorl %edi,%eax
+ xorl %ecx,%ecx
+ xorl %edi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%esi
+ xorl 0x700(%ebp,%ecx,1),%esi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%esi
+ xorl 0x500(%ebp,%edx,1),%esi
+ # Round 1
+
+ movl 8(%ecx),%eax
+ xorl %ebx,%ebx
+ movl 12(%ecx),%edx
+ xorl %esi,%eax
+ xorl %ecx,%ecx
+ xorl %esi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%edi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%edi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%edi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%edi
+ xorl 0x700(%ebp,%ecx,1),%edi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%edi
+ xorl 0x500(%ebp,%edx,1),%edi
+ # Round 0
+
+ movl (%ecx),%eax
+ xorl %ebx,%ebx
+ movl 4(%ecx),%edx
+ xorl %edi,%eax
+ xorl %ecx,%ecx
+ xorl %edi,%edx
+ andl $0xfcfcfcfc,%eax
+ andl $0xcfcfcfcf,%edx
+ movb %al,%bl
+ movb %ah,%cl
+ rorl $4,%edx
+ xorl (%ebp,%ebx,1),%esi
+ movb %dl,%bl
+ xorl 0x200(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ shrl $16,%eax
+ xorl 0x100(%ebp,%ebx,1),%esi
+ movb %ah,%bl
+ shrl $16,%edx
+ xorl 0x300(%ebp,%ecx,1),%esi
+ movb %dh,%cl
+ andl $0xff,%eax
+ andl $0xff,%edx
+ xorl 0x600(%ebp,%ebx,1),%esi
+ xorl 0x700(%ebp,%ecx,1),%esi
+ movl (%esp),%ecx
+ xorl 0x400(%ebp,%eax,1),%esi
+ xorl 0x500(%ebp,%edx,1),%esi
+ addl $4,%esp
+ ret
+.globl _DES_encrypt1
+.align 4
+_DES_encrypt1:
+L_DES_encrypt1_begin:
+ pushl %esi
+ pushl %edi
+
+ # Load the 2 words
+
+ movl 12(%esp),%esi
+ xorl %ecx,%ecx
+ pushl %ebx
+ pushl %ebp
+ movl (%esi),%eax
+ movl 28(%esp),%ebx
+ movl 4(%esi),%edi
+
+ # IP
+
+ roll $4,%eax
+ movl %eax,%esi
+ xorl %edi,%eax
+ andl $0xf0f0f0f0,%eax
+ xorl %eax,%esi
+ xorl %eax,%edi
+
+ roll $20,%edi
+ movl %edi,%eax
+ xorl %esi,%edi
+ andl $0xfff0000f,%edi
+ xorl %edi,%eax
+ xorl %edi,%esi
+
+ roll $14,%eax
+ movl %eax,%edi
+ xorl %esi,%eax
+ andl $0x33333333,%eax
+ xorl %eax,%edi
+ xorl %eax,%esi
+
+ roll $22,%esi
+ movl %esi,%eax
+ xorl %edi,%esi
+ andl $0x03fc03fc,%esi
+ xorl %esi,%eax
+ xorl %esi,%edi
+
+ roll $9,%eax
+ movl %eax,%esi
+ xorl %edi,%eax
+ andl $0xaaaaaaaa,%eax
+ xorl %eax,%esi
+ xorl %eax,%edi
+
+ roll $1,%edi
+ call L000pic_point
+L000pic_point:
+ popl %ebp
+ leal _DES_SPtrans-L000pic_point(%ebp),%ebp
+ movl 24(%esp),%ecx
+ cmpl $0,%ebx
+ je L001decrypt
+ call __x86_DES_encrypt
+ jmp L002done
+L001decrypt:
+ call __x86_DES_decrypt
+L002done:
+
+ # FP
+
+ movl 20(%esp),%edx
+ rorl $1,%esi
+ movl %edi,%eax
+ xorl %esi,%edi
+ andl $0xaaaaaaaa,%edi
+ xorl %edi,%eax
+ xorl %edi,%esi
+
+ roll $23,%eax
+ movl %eax,%edi
+ xorl %esi,%eax
+ andl $0x03fc03fc,%eax
+ xorl %eax,%edi
+ xorl %eax,%esi
+
+ roll $10,%edi
+ movl %edi,%eax
+ xorl %esi,%edi
+ andl $0x33333333,%edi
+ xorl %edi,%eax
+ xorl %edi,%esi
+
+ roll $18,%esi
+ movl %esi,%edi
+ xorl %eax,%esi
+ andl $0xfff0000f,%esi
+ xorl %esi,%edi
+ xorl %esi,%eax
+
+ roll $12,%edi
+ movl %edi,%esi
+ xorl %eax,%edi
+ andl $0xf0f0f0f0,%edi
+ xorl %edi,%esi
+ xorl %edi,%eax
+
+ rorl $4,%eax
+ movl %eax,(%edx)
+ movl %esi,4(%edx)
+ popl %ebp
+ popl %ebx
+ popl %edi
+ popl %esi
+ ret
+.globl _DES_encrypt2
+.align 4
+_DES_encrypt2:
+L_DES_encrypt2_begin:
+ pushl %esi
+ pushl %edi
+
+ # Load the 2 words
+
+ movl 12(%esp),%eax
+ xorl %ecx,%ecx
+ pushl %ebx
+ pushl %ebp
+ movl (%eax),%esi
+ movl 28(%esp),%ebx
+ roll $3,%esi
+ movl 4(%eax),%edi
+ roll $3,%edi
+ call L003pic_point
+L003pic_point:
+ popl %ebp
+ leal _DES_SPtrans-L003pic_point(%ebp),%ebp
+ movl 24(%esp),%ecx
+ cmpl $0,%ebx
+ je L004decrypt
+ call __x86_DES_encrypt
+ jmp L005done
+L004decrypt:
+ call __x86_DES_decrypt
+L005done:
+
+ # Fixup
+
+ rorl $3,%edi
+ movl 20(%esp),%eax
+ rorl $3,%esi
+ movl %edi,(%eax)
+ movl %esi,4(%eax)
+ popl %ebp
+ popl %ebx
+ popl %edi
+ popl %esi
+ ret
+.globl _DES_encrypt3
+.align 4
+_DES_encrypt3:
+L_DES_encrypt3_begin:
+ pushl %ebx
+ movl 8(%esp),%ebx
+ pushl %ebp
+ pushl %esi
+ pushl %edi
+
+ # Load the data words
+
+ movl (%ebx),%edi
+ movl 4(%ebx),%esi
+ subl $12,%esp
+
+ # IP
+
+ roll $4,%edi
+ movl %edi,%edx
+ xorl %esi,%edi
+ andl $0xf0f0f0f0,%edi
+ xorl %edi,%edx
+ xorl %edi,%esi
+
+ roll $20,%esi
+ movl %esi,%edi
+ xorl %edx,%esi
+ andl $0xfff0000f,%esi
+ xorl %esi,%edi
+ xorl %esi,%edx
+
+ roll $14,%edi
+ movl %edi,%esi
+ xorl %edx,%edi
+ andl $0x33333333,%edi
+ xorl %edi,%esi
+ xorl %edi,%edx
+
+ roll $22,%edx
+ movl %edx,%edi
+ xorl %esi,%edx
+ andl $0x03fc03fc,%edx
+ xorl %edx,%edi
+ xorl %edx,%esi
+
+ roll $9,%edi
+ movl %edi,%edx
+ xorl %esi,%edi
+ andl $0xaaaaaaaa,%edi
+ xorl %edi,%edx
+ xorl %edi,%esi
+
+ rorl $3,%edx
+ rorl $2,%esi
+ movl %esi,4(%ebx)
+ movl 36(%esp),%eax
+ movl %edx,(%ebx)
+ movl 40(%esp),%edi
+ movl 44(%esp),%esi
+ movl $1,8(%esp)
+ movl %eax,4(%esp)
+ movl %ebx,(%esp)
+ call L_DES_encrypt2_begin
+ movl $0,8(%esp)
+ movl %edi,4(%esp)
+ movl %ebx,(%esp)
+ call L_DES_encrypt2_begin
+ movl $1,8(%esp)
+ movl %esi,4(%esp)
+ movl %ebx,(%esp)
+ call L_DES_encrypt2_begin
+ addl $12,%esp
+ movl (%ebx),%edi
+ movl 4(%ebx),%esi
+
+ # FP
+
+ roll $2,%esi
+ roll $3,%edi
+ movl %edi,%eax
+ xorl %esi,%edi
+ andl $0xaaaaaaaa,%edi
+ xorl %edi,%eax
+ xorl %edi,%esi
+
+ roll $23,%eax
+ movl %eax,%edi
+ xorl %esi,%eax
+ andl $0x03fc03fc,%eax
+ xorl %eax,%edi
+ xorl %eax,%esi
+
+ roll $10,%edi
+ movl %edi,%eax
+ xorl %esi,%edi
+ andl $0x33333333,%edi
+ xorl %edi,%eax
+ xorl %edi,%esi
+
+ roll $18,%esi
+ movl %esi,%edi
+ xorl %eax,%esi
+ andl $0xfff0000f,%esi
+ xorl %esi,%edi
+ xorl %esi,%eax
+
+ roll $12,%edi
+ movl %edi,%esi
+ xorl %eax,%edi
+ andl $0xf0f0f0f0,%edi
+ xorl %edi,%esi
+ xorl %edi,%eax
+
+ rorl $4,%eax
+ movl %eax,(%ebx)
+ movl %esi,4(%ebx)
+ popl %edi
+ popl %esi
+ popl %ebp
+ popl %ebx
+ ret
+.globl _DES_decrypt3
+.align 4
+_DES_decrypt3:
+L_DES_decrypt3_begin:
+ pushl %ebx
+ movl 8(%esp),%ebx
+ pushl %ebp
+ pushl %esi
+ pushl %edi
+
+ # Load the data words
+
+ movl (%ebx),%edi
+ movl 4(%ebx),%esi
+ subl $12,%esp
+
+ # IP
+
+ roll $4,%edi
+ movl %edi,%edx
+ xorl %esi,%edi
+ andl $0xf0f0f0f0,%edi
+ xorl %edi,%edx
+ xorl %edi,%esi
+
+ roll $20,%esi
+ movl %esi,%edi
+ xorl %edx,%esi
+ andl $0xfff0000f,%esi
+ xorl %esi,%edi
+ xorl %esi,%edx
+
+ roll $14,%edi
+ movl %edi,%esi
+ xorl %edx,%edi
+ andl $0x33333333,%edi
+ xorl %edi,%esi
+ xorl %edi,%edx
+
+ roll $22,%edx
+ movl %edx,%edi
+ xorl %esi,%edx
+ andl $0x03fc03fc,%edx
+ xorl %edx,%edi
+ xorl %edx,%esi
+
+ roll $9,%edi
+ movl %edi,%edx
+ xorl %esi,%edi
+ andl $0xaaaaaaaa,%edi
+ xorl %edi,%edx
+ xorl %edi,%esi
+
+ rorl $3,%edx
+ rorl $2,%esi
+ movl %esi,4(%ebx)
+ movl 36(%esp),%esi
+ movl %edx,(%ebx)
+ movl 40(%esp),%edi
+ movl 44(%esp),%eax
+ movl $0,8(%esp)
+ movl %eax,4(%esp)
+ movl %ebx,(%esp)
+ call L_DES_encrypt2_begin
+ movl $1,8(%esp)
+ movl %edi,4(%esp)
+ movl %ebx,(%esp)
+ call L_DES_encrypt2_begin
+ movl $0,8(%esp)
+ movl %esi,4(%esp)
+ movl %ebx,(%esp)
+ call L_DES_encrypt2_begin
+ addl $12,%esp
+ movl (%ebx),%edi
+ movl 4(%ebx),%esi
+
+ # FP
+
+ roll $2,%esi
+ roll $3,%edi
+ movl %edi,%eax
+ xorl %esi,%edi
+ andl $0xaaaaaaaa,%edi
+ xorl %edi,%eax
+ xorl %edi,%esi
+
+ roll $23,%eax
+ movl %eax,%edi
+ xorl %esi,%eax
+ andl $0x03fc03fc,%eax
+ xorl %eax,%edi
+ xorl %eax,%esi
+
+ roll $10,%edi
+ movl %edi,%eax
+ xorl %esi,%edi
+ andl $0x33333333,%edi
+ xorl %edi,%eax
+ xorl %edi,%esi
+
+ roll $18,%esi
+ movl %esi,%edi
+ xorl %eax,%esi
+ andl $0xfff0000f,%esi
+ xorl %esi,%edi
+ xorl %esi,%eax
+
+ roll $12,%edi
+ movl %edi,%esi
+ xorl %eax,%edi
+ andl $0xf0f0f0f0,%edi
+ xorl %edi,%esi
+ xorl %edi,%eax
+
+ rorl $4,%eax
+ movl %eax,(%ebx)
+ movl %esi,4(%ebx)
+ popl %edi
+ popl %esi
+ popl %ebp
+ popl %ebx
+ ret
+.globl _DES_ncbc_encrypt
+.align 4
+_DES_ncbc_encrypt:
+L_DES_ncbc_encrypt_begin:
+
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 28(%esp),%ebp
+ # getting iv ptr from parameter 4
+
+ movl 36(%esp),%ebx
+ movl (%ebx),%esi
+ movl 4(%ebx),%edi
+ pushl %edi
+ pushl %esi
+ pushl %edi
+ pushl %esi
+ movl %esp,%ebx
+ movl 36(%esp),%esi
+ movl 40(%esp),%edi
+ # getting encrypt flag from parameter 5
+
+ movl 56(%esp),%ecx
+ # get and push parameter 5
+
+ pushl %ecx
+ # get and push parameter 3
+
+ movl 52(%esp),%eax
+ pushl %eax
+ pushl %ebx
+ cmpl $0,%ecx
+ jz L006decrypt
+ andl $4294967288,%ebp
+ movl 12(%esp),%eax
+ movl 16(%esp),%ebx
+ jz L007encrypt_finish
+L008encrypt_loop:
+ movl (%esi),%ecx
+ movl 4(%esi),%edx
+ xorl %ecx,%eax
+ xorl %edx,%ebx
+ movl %eax,12(%esp)
+ movl %ebx,16(%esp)
+ call L_DES_encrypt1_begin
+ movl 12(%esp),%eax
+ movl 16(%esp),%ebx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ addl $8,%esi
+ addl $8,%edi
+ subl $8,%ebp
+ jnz L008encrypt_loop
+L007encrypt_finish:
+ movl 56(%esp),%ebp
+ andl $7,%ebp
+ jz L009finish
+ call L010PIC_point
+L010PIC_point:
+ popl %edx
+ leal L011cbc_enc_jmp_table-L010PIC_point(%edx),%ecx
+ movl (%ecx,%ebp,4),%ebp
+ addl %edx,%ebp
+ xorl %ecx,%ecx
+ xorl %edx,%edx
+ jmp *%ebp
+L012ej7:
+ movb 6(%esi),%dh
+ shll $8,%edx
+L013ej6:
+ movb 5(%esi),%dh
+L014ej5:
+ movb 4(%esi),%dl
+L015ej4:
+ movl (%esi),%ecx
+ jmp L016ejend
+L017ej3:
+ movb 2(%esi),%ch
+ shll $8,%ecx
+L018ej2:
+ movb 1(%esi),%ch
+L019ej1:
+ movb (%esi),%cl
+L016ejend:
+ xorl %ecx,%eax
+ xorl %edx,%ebx
+ movl %eax,12(%esp)
+ movl %ebx,16(%esp)
+ call L_DES_encrypt1_begin
+ movl 12(%esp),%eax
+ movl 16(%esp),%ebx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ jmp L009finish
+L006decrypt:
+ andl $4294967288,%ebp
+ movl 20(%esp),%eax
+ movl 24(%esp),%ebx
+ jz L020decrypt_finish
+L021decrypt_loop:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl %eax,12(%esp)
+ movl %ebx,16(%esp)
+ call L_DES_encrypt1_begin
+ movl 12(%esp),%eax
+ movl 16(%esp),%ebx
+ movl 20(%esp),%ecx
+ movl 24(%esp),%edx
+ xorl %eax,%ecx
+ xorl %ebx,%edx
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl %ecx,(%edi)
+ movl %edx,4(%edi)
+ movl %eax,20(%esp)
+ movl %ebx,24(%esp)
+ addl $8,%esi
+ addl $8,%edi
+ subl $8,%ebp
+ jnz L021decrypt_loop
+L020decrypt_finish:
+ movl 56(%esp),%ebp
+ andl $7,%ebp
+ jz L009finish
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl %eax,12(%esp)
+ movl %ebx,16(%esp)
+ call L_DES_encrypt1_begin
+ movl 12(%esp),%eax
+ movl 16(%esp),%ebx
+ movl 20(%esp),%ecx
+ movl 24(%esp),%edx
+ xorl %eax,%ecx
+ xorl %ebx,%edx
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+L022dj7:
+ rorl $16,%edx
+ movb %dl,6(%edi)
+ shrl $16,%edx
+L023dj6:
+ movb %dh,5(%edi)
+L024dj5:
+ movb %dl,4(%edi)
+L025dj4:
+ movl %ecx,(%edi)
+ jmp L026djend
+L027dj3:
+ rorl $16,%ecx
+ movb %cl,2(%edi)
+ shll $16,%ecx
+L028dj2:
+ movb %ch,1(%esi)
+L029dj1:
+ movb %cl,(%esi)
+L026djend:
+ jmp L009finish
+L009finish:
+ movl 64(%esp),%ecx
+ addl $28,%esp
+ movl %eax,(%ecx)
+ movl %ebx,4(%ecx)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 6,0x90
+L011cbc_enc_jmp_table:
+.long 0
+.long L019ej1-L010PIC_point
+.long L018ej2-L010PIC_point
+.long L017ej3-L010PIC_point
+.long L015ej4-L010PIC_point
+.long L014ej5-L010PIC_point
+.long L013ej6-L010PIC_point
+.long L012ej7-L010PIC_point
+.align 6,0x90
+.globl _DES_ede3_cbc_encrypt
+.align 4
+_DES_ede3_cbc_encrypt:
+L_DES_ede3_cbc_encrypt_begin:
+
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 28(%esp),%ebp
+ # getting iv ptr from parameter 6
+
+ movl 44(%esp),%ebx
+ movl (%ebx),%esi
+ movl 4(%ebx),%edi
+ pushl %edi
+ pushl %esi
+ pushl %edi
+ pushl %esi
+ movl %esp,%ebx
+ movl 36(%esp),%esi
+ movl 40(%esp),%edi
+ # getting encrypt flag from parameter 7
+
+ movl 64(%esp),%ecx
+ # get and push parameter 5
+
+ movl 56(%esp),%eax
+ pushl %eax
+ # get and push parameter 4
+
+ movl 56(%esp),%eax
+ pushl %eax
+ # get and push parameter 3
+
+ movl 56(%esp),%eax
+ pushl %eax
+ pushl %ebx
+ cmpl $0,%ecx
+ jz L030decrypt
+ andl $4294967288,%ebp
+ movl 16(%esp),%eax
+ movl 20(%esp),%ebx
+ jz L031encrypt_finish
+L032encrypt_loop:
+ movl (%esi),%ecx
+ movl 4(%esi),%edx
+ xorl %ecx,%eax
+ xorl %edx,%ebx
+ movl %eax,16(%esp)
+ movl %ebx,20(%esp)
+ call L_DES_encrypt3_begin
+ movl 16(%esp),%eax
+ movl 20(%esp),%ebx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ addl $8,%esi
+ addl $8,%edi
+ subl $8,%ebp
+ jnz L032encrypt_loop
+L031encrypt_finish:
+ movl 60(%esp),%ebp
+ andl $7,%ebp
+ jz L033finish
+ call L034PIC_point
+L034PIC_point:
+ popl %edx
+ leal L035cbc_enc_jmp_table-L034PIC_point(%edx),%ecx
+ movl (%ecx,%ebp,4),%ebp
+ addl %edx,%ebp
+ xorl %ecx,%ecx
+ xorl %edx,%edx
+ jmp *%ebp
+L036ej7:
+ movb 6(%esi),%dh
+ shll $8,%edx
+L037ej6:
+ movb 5(%esi),%dh
+L038ej5:
+ movb 4(%esi),%dl
+L039ej4:
+ movl (%esi),%ecx
+ jmp L040ejend
+L041ej3:
+ movb 2(%esi),%ch
+ shll $8,%ecx
+L042ej2:
+ movb 1(%esi),%ch
+L043ej1:
+ movb (%esi),%cl
+L040ejend:
+ xorl %ecx,%eax
+ xorl %edx,%ebx
+ movl %eax,16(%esp)
+ movl %ebx,20(%esp)
+ call L_DES_encrypt3_begin
+ movl 16(%esp),%eax
+ movl 20(%esp),%ebx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ jmp L033finish
+L030decrypt:
+ andl $4294967288,%ebp
+ movl 24(%esp),%eax
+ movl 28(%esp),%ebx
+ jz L044decrypt_finish
+L045decrypt_loop:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl %eax,16(%esp)
+ movl %ebx,20(%esp)
+ call L_DES_decrypt3_begin
+ movl 16(%esp),%eax
+ movl 20(%esp),%ebx
+ movl 24(%esp),%ecx
+ movl 28(%esp),%edx
+ xorl %eax,%ecx
+ xorl %ebx,%edx
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl %ecx,(%edi)
+ movl %edx,4(%edi)
+ movl %eax,24(%esp)
+ movl %ebx,28(%esp)
+ addl $8,%esi
+ addl $8,%edi
+ subl $8,%ebp
+ jnz L045decrypt_loop
+L044decrypt_finish:
+ movl 60(%esp),%ebp
+ andl $7,%ebp
+ jz L033finish
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl %eax,16(%esp)
+ movl %ebx,20(%esp)
+ call L_DES_decrypt3_begin
+ movl 16(%esp),%eax
+ movl 20(%esp),%ebx
+ movl 24(%esp),%ecx
+ movl 28(%esp),%edx
+ xorl %eax,%ecx
+ xorl %ebx,%edx
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+L046dj7:
+ rorl $16,%edx
+ movb %dl,6(%edi)
+ shrl $16,%edx
+L047dj6:
+ movb %dh,5(%edi)
+L048dj5:
+ movb %dl,4(%edi)
+L049dj4:
+ movl %ecx,(%edi)
+ jmp L050djend
+L051dj3:
+ rorl $16,%ecx
+ movb %cl,2(%edi)
+ shll $16,%ecx
+L052dj2:
+ movb %ch,1(%esi)
+L053dj1:
+ movb %cl,(%esi)
+L050djend:
+ jmp L033finish
+L033finish:
+ movl 76(%esp),%ecx
+ addl $32,%esp
+ movl %eax,(%ecx)
+ movl %ebx,4(%ecx)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 6,0x90
+L035cbc_enc_jmp_table:
+.long 0
+.long L043ej1-L034PIC_point
+.long L042ej2-L034PIC_point
+.long L041ej3-L034PIC_point
+.long L039ej4-L034PIC_point
+.long L038ej5-L034PIC_point
+.long L037ej6-L034PIC_point
+.long L036ej7-L034PIC_point
+.align 6,0x90
+.align 6,0x90
+_DES_SPtrans:
+.long 34080768,524288,33554434,34080770
+.long 33554432,526338,524290,33554434
+.long 526338,34080768,34078720,2050
+.long 33556482,33554432,0,524290
+.long 524288,2,33556480,526336
+.long 34080770,34078720,2050,33556480
+.long 2,2048,526336,34078722
+.long 2048,33556482,34078722,0
+.long 0,34080770,33556480,524290
+.long 34080768,524288,2050,33556480
+.long 34078722,2048,526336,33554434
+.long 526338,2,33554434,34078720
+.long 34080770,526336,34078720,33556482
+.long 33554432,2050,524290,0
+.long 524288,33554432,33556482,34080768
+.long 2,34078722,2048,526338
+.long 1074823184,0,1081344,1074790400
+.long 1073741840,32784,1073774592,1081344
+.long 32768,1074790416,16,1073774592
+.long 1048592,1074823168,1074790400,16
+.long 1048576,1073774608,1074790416,32768
+.long 1081360,1073741824,0,1048592
+.long 1073774608,1081360,1074823168,1073741840
+.long 1073741824,1048576,32784,1074823184
+.long 1048592,1074823168,1073774592,1081360
+.long 1074823184,1048592,1073741840,0
+.long 1073741824,32784,1048576,1074790416
+.long 32768,1073741824,1081360,1073774608
+.long 1074823168,32768,0,1073741840
+.long 16,1074823184,1081344,1074790400
+.long 1074790416,1048576,32784,1073774592
+.long 1073774608,16,1074790400,1081344
+.long 67108865,67371264,256,67109121
+.long 262145,67108864,67109121,262400
+.long 67109120,262144,67371008,1
+.long 67371265,257,1,67371009
+.long 0,262145,67371264,256
+.long 257,67371265,262144,67108865
+.long 67371009,67109120,262401,67371008
+.long 262400,0,67108864,262401
+.long 67371264,256,1,262144
+.long 257,262145,67371008,67109121
+.long 0,67371264,262400,67371009
+.long 262145,67108864,67371265,1
+.long 262401,67108865,67108864,67371265
+.long 262144,67109120,67109121,262400
+.long 67109120,0,67371009,257
+.long 67108865,262401,256,67371008
+.long 4198408,268439552,8,272633864
+.long 0,272629760,268439560,4194312
+.long 272633856,268435464,268435456,4104
+.long 268435464,4198408,4194304,268435456
+.long 272629768,4198400,4096,8
+.long 4198400,268439560,272629760,4096
+.long 4104,0,4194312,272633856
+.long 268439552,272629768,272633864,4194304
+.long 272629768,4104,4194304,268435464
+.long 4198400,268439552,8,272629760
+.long 268439560,0,4096,4194312
+.long 0,272629768,272633856,4096
+.long 268435456,272633864,4198408,4194304
+.long 272633864,8,268439552,4198408
+.long 4194312,4198400,272629760,268439560
+.long 4104,268435456,268435464,272633856
+.long 134217728,65536,1024,134284320
+.long 134283296,134218752,66592,134283264
+.long 65536,32,134217760,66560
+.long 134218784,134283296,134284288,0
+.long 66560,134217728,65568,1056
+.long 134218752,66592,0,134217760
+.long 32,134218784,134284320,65568
+.long 134283264,1024,1056,134284288
+.long 134284288,134218784,65568,134283264
+.long 65536,32,134217760,134218752
+.long 134217728,66560,134284320,0
+.long 66592,134217728,1024,65568
+.long 134218784,1024,0,134284320
+.long 134283296,134284288,1056,65536
+.long 66560,134283296,134218752,1056
+.long 32,66592,134283264,134217760
+.long 2147483712,2097216,0,2149588992
+.long 2097216,8192,2147491904,2097152
+.long 8256,2149589056,2105344,2147483648
+.long 2147491840,2147483712,2149580800,2105408
+.long 2097152,2147491904,2149580864,0
+.long 8192,64,2149588992,2149580864
+.long 2149589056,2149580800,2147483648,8256
+.long 64,2105344,2105408,2147491840
+.long 8256,2147483648,2147491840,2105408
+.long 2149588992,2097216,0,2147491840
+.long 2147483648,8192,2149580864,2097152
+.long 2097216,2149589056,2105344,64
+.long 2149589056,2105344,2097152,2147491904
+.long 2147483712,2149580800,2105408,0
+.long 8192,2147483712,2147491904,2149588992
+.long 2149580800,8256,64,2149580864
+.long 16384,512,16777728,16777220
+.long 16794116,16388,16896,0
+.long 16777216,16777732,516,16793600
+.long 4,16794112,16793600,516
+.long 16777732,16384,16388,16794116
+.long 0,16777728,16777220,16896
+.long 16793604,16900,16794112,4
+.long 16900,16793604,512,16777216
+.long 16900,16793600,16793604,516
+.long 16384,512,16777216,16793604
+.long 16777732,16900,16896,0
+.long 512,16777220,4,16777728
+.long 0,16777732,16777728,16896
+.long 516,16384,16794116,16777216
+.long 16794112,4,16388,16794116
+.long 16777220,16794112,16793600,16388
+.long 545259648,545390592,131200,0
+.long 537001984,8388736,545259520,545390720
+.long 128,536870912,8519680,131200
+.long 8519808,537002112,536871040,545259520
+.long 131072,8519808,8388736,537001984
+.long 545390720,536871040,0,8519680
+.long 536870912,8388608,537002112,545259648
+.long 8388608,131072,545390592,128
+.long 8388608,131072,536871040,545390720
+.long 131200,536870912,0,8519680
+.long 545259648,537002112,537001984,8388736
+.long 545390592,128,8388736,537001984
+.long 545390720,8388608,545259520,536871040
+.long 8519680,131200,537002112,545259520
+.long 128,545390592,8519808,0
+.long 536870912,545259648,131072,8519808
diff --git a/deps/openssl/asm/x86-macosx-gas/md5/md5-586.s b/deps/openssl/asm/x86-macosx-gas/md5/md5-586.s
new file mode 100644
index 0000000000..5336574c86
--- /dev/null
+++ b/deps/openssl/asm/x86-macosx-gas/md5/md5-586.s
@@ -0,0 +1,745 @@
+.file "../openssl/crypto/md5/asm/md5-586.s"
+.text
+.globl _md5_block_asm_data_order
+.align 4
+_md5_block_asm_data_order:
+L_md5_block_asm_data_order_begin:
+ pushl %esi
+ pushl %edi
+ movl 12(%esp),%edi
+ movl 16(%esp),%esi
+ movl 20(%esp),%ecx
+ pushl %ebp
+ shll $6,%ecx
+ pushl %ebx
+ addl %esi,%ecx
+ subl $64,%ecx
+ movl (%edi),%eax
+ pushl %ecx
+ movl 4(%edi),%ebx
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+L000start:
+
+ # R0 section
+
+ movl %ecx,%edi
+ movl (%esi),%ebp
+ # R0 0
+
+ xorl %edx,%edi
+ andl %ebx,%edi
+ leal 3614090360(%eax,%ebp,1),%eax
+ xorl %edx,%edi
+ addl %edi,%eax
+ movl %ebx,%edi
+ roll $7,%eax
+ movl 4(%esi),%ebp
+ addl %ebx,%eax
+ # R0 1
+
+ xorl %ecx,%edi
+ andl %eax,%edi
+ leal 3905402710(%edx,%ebp,1),%edx
+ xorl %ecx,%edi
+ addl %edi,%edx
+ movl %eax,%edi
+ roll $12,%edx
+ movl 8(%esi),%ebp
+ addl %eax,%edx
+ # R0 2
+
+ xorl %ebx,%edi
+ andl %edx,%edi
+ leal 606105819(%ecx,%ebp,1),%ecx
+ xorl %ebx,%edi
+ addl %edi,%ecx
+ movl %edx,%edi
+ roll $17,%ecx
+ movl 12(%esi),%ebp
+ addl %edx,%ecx
+ # R0 3
+
+ xorl %eax,%edi
+ andl %ecx,%edi
+ leal 3250441966(%ebx,%ebp,1),%ebx
+ xorl %eax,%edi
+ addl %edi,%ebx
+ movl %ecx,%edi
+ roll $22,%ebx
+ movl 16(%esi),%ebp
+ addl %ecx,%ebx
+ # R0 4
+
+ xorl %edx,%edi
+ andl %ebx,%edi
+ leal 4118548399(%eax,%ebp,1),%eax
+ xorl %edx,%edi
+ addl %edi,%eax
+ movl %ebx,%edi
+ roll $7,%eax
+ movl 20(%esi),%ebp
+ addl %ebx,%eax
+ # R0 5
+
+ xorl %ecx,%edi
+ andl %eax,%edi
+ leal 1200080426(%edx,%ebp,1),%edx
+ xorl %ecx,%edi
+ addl %edi,%edx
+ movl %eax,%edi
+ roll $12,%edx
+ movl 24(%esi),%ebp
+ addl %eax,%edx
+ # R0 6
+
+ xorl %ebx,%edi
+ andl %edx,%edi
+ leal 2821735955(%ecx,%ebp,1),%ecx
+ xorl %ebx,%edi
+ addl %edi,%ecx
+ movl %edx,%edi
+ roll $17,%ecx
+ movl 28(%esi),%ebp
+ addl %edx,%ecx
+ # R0 7
+
+ xorl %eax,%edi
+ andl %ecx,%edi
+ leal 4249261313(%ebx,%ebp,1),%ebx
+ xorl %eax,%edi
+ addl %edi,%ebx
+ movl %ecx,%edi
+ roll $22,%ebx
+ movl 32(%esi),%ebp
+ addl %ecx,%ebx
+ # R0 8
+
+ xorl %edx,%edi
+ andl %ebx,%edi
+ leal 1770035416(%eax,%ebp,1),%eax
+ xorl %edx,%edi
+ addl %edi,%eax
+ movl %ebx,%edi
+ roll $7,%eax
+ movl 36(%esi),%ebp
+ addl %ebx,%eax
+ # R0 9
+
+ xorl %ecx,%edi
+ andl %eax,%edi
+ leal 2336552879(%edx,%ebp,1),%edx
+ xorl %ecx,%edi
+ addl %edi,%edx
+ movl %eax,%edi
+ roll $12,%edx
+ movl 40(%esi),%ebp
+ addl %eax,%edx
+ # R0 10
+
+ xorl %ebx,%edi
+ andl %edx,%edi
+ leal 4294925233(%ecx,%ebp,1),%ecx
+ xorl %ebx,%edi
+ addl %edi,%ecx
+ movl %edx,%edi
+ roll $17,%ecx
+ movl 44(%esi),%ebp
+ addl %edx,%ecx
+ # R0 11
+
+ xorl %eax,%edi
+ andl %ecx,%edi
+ leal 2304563134(%ebx,%ebp,1),%ebx
+ xorl %eax,%edi
+ addl %edi,%ebx
+ movl %ecx,%edi
+ roll $22,%ebx
+ movl 48(%esi),%ebp
+ addl %ecx,%ebx
+ # R0 12
+
+ xorl %edx,%edi
+ andl %ebx,%edi
+ leal 1804603682(%eax,%ebp,1),%eax
+ xorl %edx,%edi
+ addl %edi,%eax
+ movl %ebx,%edi
+ roll $7,%eax
+ movl 52(%esi),%ebp
+ addl %ebx,%eax
+ # R0 13
+
+ xorl %ecx,%edi
+ andl %eax,%edi
+ leal 4254626195(%edx,%ebp,1),%edx
+ xorl %ecx,%edi
+ addl %edi,%edx
+ movl %eax,%edi
+ roll $12,%edx
+ movl 56(%esi),%ebp
+ addl %eax,%edx
+ # R0 14
+
+ xorl %ebx,%edi
+ andl %edx,%edi
+ leal 2792965006(%ecx,%ebp,1),%ecx
+ xorl %ebx,%edi
+ addl %edi,%ecx
+ movl %edx,%edi
+ roll $17,%ecx
+ movl 60(%esi),%ebp
+ addl %edx,%ecx
+ # R0 15
+
+ xorl %eax,%edi
+ andl %ecx,%edi
+ leal 1236535329(%ebx,%ebp,1),%ebx
+ xorl %eax,%edi
+ addl %edi,%ebx
+ movl %ecx,%edi
+ roll $22,%ebx
+ movl 4(%esi),%ebp
+ addl %ecx,%ebx
+
+ # R1 section
+
+ # R1 16
+
+ leal 4129170786(%eax,%ebp,1),%eax
+ xorl %ebx,%edi
+ andl %edx,%edi
+ movl 24(%esi),%ebp
+ xorl %ecx,%edi
+ addl %edi,%eax
+ movl %ebx,%edi
+ roll $5,%eax
+ addl %ebx,%eax
+ # R1 17
+
+ leal 3225465664(%edx,%ebp,1),%edx
+ xorl %eax,%edi
+ andl %ecx,%edi
+ movl 44(%esi),%ebp
+ xorl %ebx,%edi
+ addl %edi,%edx
+ movl %eax,%edi
+ roll $9,%edx
+ addl %eax,%edx
+ # R1 18
+
+ leal 643717713(%ecx,%ebp,1),%ecx
+ xorl %edx,%edi
+ andl %ebx,%edi
+ movl (%esi),%ebp
+ xorl %eax,%edi
+ addl %edi,%ecx
+ movl %edx,%edi
+ roll $14,%ecx
+ addl %edx,%ecx
+ # R1 19
+
+ leal 3921069994(%ebx,%ebp,1),%ebx
+ xorl %ecx,%edi
+ andl %eax,%edi
+ movl 20(%esi),%ebp
+ xorl %edx,%edi
+ addl %edi,%ebx
+ movl %ecx,%edi
+ roll $20,%ebx
+ addl %ecx,%ebx
+ # R1 20
+
+ leal 3593408605(%eax,%ebp,1),%eax
+ xorl %ebx,%edi
+ andl %edx,%edi
+ movl 40(%esi),%ebp
+ xorl %ecx,%edi
+ addl %edi,%eax
+ movl %ebx,%edi
+ roll $5,%eax
+ addl %ebx,%eax
+ # R1 21
+
+ leal 38016083(%edx,%ebp,1),%edx
+ xorl %eax,%edi
+ andl %ecx,%edi
+ movl 60(%esi),%ebp
+ xorl %ebx,%edi
+ addl %edi,%edx
+ movl %eax,%edi
+ roll $9,%edx
+ addl %eax,%edx
+ # R1 22
+
+ leal 3634488961(%ecx,%ebp,1),%ecx
+ xorl %edx,%edi
+ andl %ebx,%edi
+ movl 16(%esi),%ebp
+ xorl %eax,%edi
+ addl %edi,%ecx
+ movl %edx,%edi
+ roll $14,%ecx
+ addl %edx,%ecx
+ # R1 23
+
+ leal 3889429448(%ebx,%ebp,1),%ebx
+ xorl %ecx,%edi
+ andl %eax,%edi
+ movl 36(%esi),%ebp
+ xorl %edx,%edi
+ addl %edi,%ebx
+ movl %ecx,%edi
+ roll $20,%ebx
+ addl %ecx,%ebx
+ # R1 24
+
+ leal 568446438(%eax,%ebp,1),%eax
+ xorl %ebx,%edi
+ andl %edx,%edi
+ movl 56(%esi),%ebp
+ xorl %ecx,%edi
+ addl %edi,%eax
+ movl %ebx,%edi
+ roll $5,%eax
+ addl %ebx,%eax
+ # R1 25
+
+ leal 3275163606(%edx,%ebp,1),%edx
+ xorl %eax,%edi
+ andl %ecx,%edi
+ movl 12(%esi),%ebp
+ xorl %ebx,%edi
+ addl %edi,%edx
+ movl %eax,%edi
+ roll $9,%edx
+ addl %eax,%edx
+ # R1 26
+
+ leal 4107603335(%ecx,%ebp,1),%ecx
+ xorl %edx,%edi
+ andl %ebx,%edi
+ movl 32(%esi),%ebp
+ xorl %eax,%edi
+ addl %edi,%ecx
+ movl %edx,%edi
+ roll $14,%ecx
+ addl %edx,%ecx
+ # R1 27
+
+ leal 1163531501(%ebx,%ebp,1),%ebx
+ xorl %ecx,%edi
+ andl %eax,%edi
+ movl 52(%esi),%ebp
+ xorl %edx,%edi
+ addl %edi,%ebx
+ movl %ecx,%edi
+ roll $20,%ebx
+ addl %ecx,%ebx
+ # R1 28
+
+ leal 2850285829(%eax,%ebp,1),%eax
+ xorl %ebx,%edi
+ andl %edx,%edi
+ movl 8(%esi),%ebp
+ xorl %ecx,%edi
+ addl %edi,%eax
+ movl %ebx,%edi
+ roll $5,%eax
+ addl %ebx,%eax
+ # R1 29
+
+ leal 4243563512(%edx,%ebp,1),%edx
+ xorl %eax,%edi
+ andl %ecx,%edi
+ movl 28(%esi),%ebp
+ xorl %ebx,%edi
+ addl %edi,%edx
+ movl %eax,%edi
+ roll $9,%edx
+ addl %eax,%edx
+ # R1 30
+
+ leal 1735328473(%ecx,%ebp,1),%ecx
+ xorl %edx,%edi
+ andl %ebx,%edi
+ movl 48(%esi),%ebp
+ xorl %eax,%edi
+ addl %edi,%ecx
+ movl %edx,%edi
+ roll $14,%ecx
+ addl %edx,%ecx
+ # R1 31
+
+ leal 2368359562(%ebx,%ebp,1),%ebx
+ xorl %ecx,%edi
+ andl %eax,%edi
+ movl 20(%esi),%ebp
+ xorl %edx,%edi
+ addl %edi,%ebx
+ movl %ecx,%edi
+ roll $20,%ebx
+ addl %ecx,%ebx
+
+ # R2 section
+
+ # R2 32
+
+ xorl %edx,%edi
+ xorl %ebx,%edi
+ leal 4294588738(%eax,%ebp,1),%eax
+ addl %edi,%eax
+ roll $4,%eax
+ movl 32(%esi),%ebp
+ movl %ebx,%edi
+ # R2 33
+
+ leal 2272392833(%edx,%ebp,1),%edx
+ addl %ebx,%eax
+ xorl %ecx,%edi
+ xorl %eax,%edi
+ movl 44(%esi),%ebp
+ addl %edi,%edx
+ movl %eax,%edi
+ roll $11,%edx
+ addl %eax,%edx
+ # R2 34
+
+ xorl %ebx,%edi
+ xorl %edx,%edi
+ leal 1839030562(%ecx,%ebp,1),%ecx
+ addl %edi,%ecx
+ roll $16,%ecx
+ movl 56(%esi),%ebp
+ movl %edx,%edi
+ # R2 35
+
+ leal 4259657740(%ebx,%ebp,1),%ebx
+ addl %edx,%ecx
+ xorl %eax,%edi
+ xorl %ecx,%edi
+ movl 4(%esi),%ebp
+ addl %edi,%ebx
+ movl %ecx,%edi
+ roll $23,%ebx
+ addl %ecx,%ebx
+ # R2 36
+
+ xorl %edx,%edi
+ xorl %ebx,%edi
+ leal 2763975236(%eax,%ebp,1),%eax
+ addl %edi,%eax
+ roll $4,%eax
+ movl 16(%esi),%ebp
+ movl %ebx,%edi
+ # R2 37
+
+ leal 1272893353(%edx,%ebp,1),%edx
+ addl %ebx,%eax
+ xorl %ecx,%edi
+ xorl %eax,%edi
+ movl 28(%esi),%ebp
+ addl %edi,%edx
+ movl %eax,%edi
+ roll $11,%edx
+ addl %eax,%edx
+ # R2 38
+
+ xorl %ebx,%edi
+ xorl %edx,%edi
+ leal 4139469664(%ecx,%ebp,1),%ecx
+ addl %edi,%ecx
+ roll $16,%ecx
+ movl 40(%esi),%ebp
+ movl %edx,%edi
+ # R2 39
+
+ leal 3200236656(%ebx,%ebp,1),%ebx
+ addl %edx,%ecx
+ xorl %eax,%edi
+ xorl %ecx,%edi
+ movl 52(%esi),%ebp
+ addl %edi,%ebx
+ movl %ecx,%edi
+ roll $23,%ebx
+ addl %ecx,%ebx
+ # R2 40
+
+ xorl %edx,%edi
+ xorl %ebx,%edi
+ leal 681279174(%eax,%ebp,1),%eax
+ addl %edi,%eax
+ roll $4,%eax
+ movl (%esi),%ebp
+ movl %ebx,%edi
+ # R2 41
+
+ leal 3936430074(%edx,%ebp,1),%edx
+ addl %ebx,%eax
+ xorl %ecx,%edi
+ xorl %eax,%edi
+ movl 12(%esi),%ebp
+ addl %edi,%edx
+ movl %eax,%edi
+ roll $11,%edx
+ addl %eax,%edx
+ # R2 42
+
+ xorl %ebx,%edi
+ xorl %edx,%edi
+ leal 3572445317(%ecx,%ebp,1),%ecx
+ addl %edi,%ecx
+ roll $16,%ecx
+ movl 24(%esi),%ebp
+ movl %edx,%edi
+ # R2 43
+
+ leal 76029189(%ebx,%ebp,1),%ebx
+ addl %edx,%ecx
+ xorl %eax,%edi
+ xorl %ecx,%edi
+ movl 36(%esi),%ebp
+ addl %edi,%ebx
+ movl %ecx,%edi
+ roll $23,%ebx
+ addl %ecx,%ebx
+ # R2 44
+
+ xorl %edx,%edi
+ xorl %ebx,%edi
+ leal 3654602809(%eax,%ebp,1),%eax
+ addl %edi,%eax
+ roll $4,%eax
+ movl 48(%esi),%ebp
+ movl %ebx,%edi
+ # R2 45
+
+ leal 3873151461(%edx,%ebp,1),%edx
+ addl %ebx,%eax
+ xorl %ecx,%edi
+ xorl %eax,%edi
+ movl 60(%esi),%ebp
+ addl %edi,%edx
+ movl %eax,%edi
+ roll $11,%edx
+ addl %eax,%edx
+ # R2 46
+
+ xorl %ebx,%edi
+ xorl %edx,%edi
+ leal 530742520(%ecx,%ebp,1),%ecx
+ addl %edi,%ecx
+ roll $16,%ecx
+ movl 8(%esi),%ebp
+ movl %edx,%edi
+ # R2 47
+
+ leal 3299628645(%ebx,%ebp,1),%ebx
+ addl %edx,%ecx
+ xorl %eax,%edi
+ xorl %ecx,%edi
+ movl (%esi),%ebp
+ addl %edi,%ebx
+ movl $-1,%edi
+ roll $23,%ebx
+ addl %ecx,%ebx
+
+ # R3 section
+
+ # R3 48
+
+ xorl %edx,%edi
+ orl %ebx,%edi
+ leal 4096336452(%eax,%ebp,1),%eax
+ xorl %ecx,%edi
+ movl 28(%esi),%ebp
+ addl %edi,%eax
+ movl $-1,%edi
+ roll $6,%eax
+ xorl %ecx,%edi
+ addl %ebx,%eax
+ # R3 49
+
+ orl %eax,%edi
+ leal 1126891415(%edx,%ebp,1),%edx
+ xorl %ebx,%edi
+ movl 56(%esi),%ebp
+ addl %edi,%edx
+ movl $-1,%edi
+ roll $10,%edx
+ xorl %ebx,%edi
+ addl %eax,%edx
+ # R3 50
+
+ orl %edx,%edi
+ leal 2878612391(%ecx,%ebp,1),%ecx
+ xorl %eax,%edi
+ movl 20(%esi),%ebp
+ addl %edi,%ecx
+ movl $-1,%edi
+ roll $15,%ecx
+ xorl %eax,%edi
+ addl %edx,%ecx
+ # R3 51
+
+ orl %ecx,%edi
+ leal 4237533241(%ebx,%ebp,1),%ebx
+ xorl %edx,%edi
+ movl 48(%esi),%ebp
+ addl %edi,%ebx
+ movl $-1,%edi
+ roll $21,%ebx
+ xorl %edx,%edi
+ addl %ecx,%ebx
+ # R3 52
+
+ orl %ebx,%edi
+ leal 1700485571(%eax,%ebp,1),%eax
+ xorl %ecx,%edi
+ movl 12(%esi),%ebp
+ addl %edi,%eax
+ movl $-1,%edi
+ roll $6,%eax
+ xorl %ecx,%edi
+ addl %ebx,%eax
+ # R3 53
+
+ orl %eax,%edi
+ leal 2399980690(%edx,%ebp,1),%edx
+ xorl %ebx,%edi
+ movl 40(%esi),%ebp
+ addl %edi,%edx
+ movl $-1,%edi
+ roll $10,%edx
+ xorl %ebx,%edi
+ addl %eax,%edx
+ # R3 54
+
+ orl %edx,%edi
+ leal 4293915773(%ecx,%ebp,1),%ecx
+ xorl %eax,%edi
+ movl 4(%esi),%ebp
+ addl %edi,%ecx
+ movl $-1,%edi
+ roll $15,%ecx
+ xorl %eax,%edi
+ addl %edx,%ecx
+ # R3 55
+
+ orl %ecx,%edi
+ leal 2240044497(%ebx,%ebp,1),%ebx
+ xorl %edx,%edi
+ movl 32(%esi),%ebp
+ addl %edi,%ebx
+ movl $-1,%edi
+ roll $21,%ebx
+ xorl %edx,%edi
+ addl %ecx,%ebx
+ # R3 56
+
+ orl %ebx,%edi
+ leal 1873313359(%eax,%ebp,1),%eax
+ xorl %ecx,%edi
+ movl 60(%esi),%ebp
+ addl %edi,%eax
+ movl $-1,%edi
+ roll $6,%eax
+ xorl %ecx,%edi
+ addl %ebx,%eax
+ # R3 57
+
+ orl %eax,%edi
+ leal 4264355552(%edx,%ebp,1),%edx
+ xorl %ebx,%edi
+ movl 24(%esi),%ebp
+ addl %edi,%edx
+ movl $-1,%edi
+ roll $10,%edx
+ xorl %ebx,%edi
+ addl %eax,%edx
+ # R3 58
+
+ orl %edx,%edi
+ leal 2734768916(%ecx,%ebp,1),%ecx
+ xorl %eax,%edi
+ movl 52(%esi),%ebp
+ addl %edi,%ecx
+ movl $-1,%edi
+ roll $15,%ecx
+ xorl %eax,%edi
+ addl %edx,%ecx
+ # R3 59
+
+ orl %ecx,%edi
+ leal 1309151649(%ebx,%ebp,1),%ebx
+ xorl %edx,%edi
+ movl 16(%esi),%ebp
+ addl %edi,%ebx
+ movl $-1,%edi
+ roll $21,%ebx
+ xorl %edx,%edi
+ addl %ecx,%ebx
+ # R3 60
+
+ orl %ebx,%edi
+ leal 4149444226(%eax,%ebp,1),%eax
+ xorl %ecx,%edi
+ movl 44(%esi),%ebp
+ addl %edi,%eax
+ movl $-1,%edi
+ roll $6,%eax
+ xorl %ecx,%edi
+ addl %ebx,%eax
+ # R3 61
+
+ orl %eax,%edi
+ leal 3174756917(%edx,%ebp,1),%edx
+ xorl %ebx,%edi
+ movl 8(%esi),%ebp
+ addl %edi,%edx
+ movl $-1,%edi
+ roll $10,%edx
+ xorl %ebx,%edi
+ addl %eax,%edx
+ # R3 62
+
+ orl %edx,%edi
+ leal 718787259(%ecx,%ebp,1),%ecx
+ xorl %eax,%edi
+ movl 36(%esi),%ebp
+ addl %edi,%ecx
+ movl $-1,%edi
+ roll $15,%ecx
+ xorl %eax,%edi
+ addl %edx,%ecx
+ # R3 63
+
+ orl %ecx,%edi
+ leal 3951481745(%ebx,%ebp,1),%ebx
+ xorl %edx,%edi
+ movl 24(%esp),%ebp
+ addl %edi,%ebx
+ addl $64,%esi
+ roll $21,%ebx
+ movl (%ebp),%edi
+ addl %ecx,%ebx
+ addl %edi,%eax
+ movl 4(%ebp),%edi
+ addl %edi,%ebx
+ movl 8(%ebp),%edi
+ addl %edi,%ecx
+ movl 12(%ebp),%edi
+ addl %edi,%edx
+ movl %eax,(%ebp)
+ movl %ebx,4(%ebp)
+ movl (%esp),%edi
+ movl %ecx,8(%ebp)
+ movl %edx,12(%ebp)
+ cmpl %esi,%edi
+ jae L000start
+ popl %eax
+ popl %ebx
+ popl %ebp
+ popl %edi
+ popl %esi
+ ret
diff --git a/deps/openssl/asm/x86-macosx-gas/rc4/rc4-586.s b/deps/openssl/asm/x86-macosx-gas/rc4/rc4-586.s
new file mode 100644
index 0000000000..a821dc9503
--- /dev/null
+++ b/deps/openssl/asm/x86-macosx-gas/rc4/rc4-586.s
@@ -0,0 +1,224 @@
+.file "rc4-586.s"
+.text
+.globl _RC4
+.align 4
+_RC4:
+L_RC4_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%edi
+ movl 24(%esp),%edx
+ movl 28(%esp),%esi
+ movl 32(%esp),%ebp
+ xorl %eax,%eax
+ xorl %ebx,%ebx
+ cmpl $0,%edx
+ je L000abort
+ movb (%edi),%al
+ movb 4(%edi),%bl
+ addl $8,%edi
+ leal (%esi,%edx,1),%ecx
+ subl %esi,%ebp
+ movl %ecx,24(%esp)
+ incb %al
+ cmpl $-1,256(%edi)
+ je L001RC4_CHAR
+ movl (%edi,%eax,4),%ecx
+ andl $-4,%edx
+ jz L002loop1
+ leal -4(%esi,%edx,1),%edx
+ movl %edx,28(%esp)
+ movl %ebp,32(%esp)
+.align 4,0x90
+L003loop4:
+ addb %cl,%bl
+ movl (%edi,%ebx,4),%edx
+ movl %ecx,(%edi,%ebx,4)
+ movl %edx,(%edi,%eax,4)
+ addl %ecx,%edx
+ incb %al
+ andl $255,%edx
+ movl (%edi,%eax,4),%ecx
+ movl (%edi,%edx,4),%ebp
+ addb %cl,%bl
+ movl (%edi,%ebx,4),%edx
+ movl %ecx,(%edi,%ebx,4)
+ movl %edx,(%edi,%eax,4)
+ addl %ecx,%edx
+ incb %al
+ andl $255,%edx
+ rorl $8,%ebp
+ movl (%edi,%eax,4),%ecx
+ orl (%edi,%edx,4),%ebp
+ addb %cl,%bl
+ movl (%edi,%ebx,4),%edx
+ movl %ecx,(%edi,%ebx,4)
+ movl %edx,(%edi,%eax,4)
+ addl %ecx,%edx
+ incb %al
+ andl $255,%edx
+ rorl $8,%ebp
+ movl (%edi,%eax,4),%ecx
+ orl (%edi,%edx,4),%ebp
+ addb %cl,%bl
+ movl (%edi,%ebx,4),%edx
+ movl %ecx,(%edi,%ebx,4)
+ movl %edx,(%edi,%eax,4)
+ addl %ecx,%edx
+ incb %al
+ andl $255,%edx
+ rorl $8,%ebp
+ movl 32(%esp),%ecx
+ orl (%edi,%edx,4),%ebp
+ rorl $8,%ebp
+ xorl (%esi),%ebp
+ cmpl 28(%esp),%esi
+ movl %ebp,(%ecx,%esi,1)
+ leal 4(%esi),%esi
+ movl (%edi,%eax,4),%ecx
+ jb L003loop4
+ cmpl 24(%esp),%esi
+ je L004done
+ movl 32(%esp),%ebp
+.align 4,0x90
+L002loop1:
+ addb %cl,%bl
+ movl (%edi,%ebx,4),%edx
+ movl %ecx,(%edi,%ebx,4)
+ movl %edx,(%edi,%eax,4)
+ addl %ecx,%edx
+ incb %al
+ andl $255,%edx
+ movl (%edi,%edx,4),%edx
+ xorb (%esi),%dl
+ leal 1(%esi),%esi
+ movl (%edi,%eax,4),%ecx
+ cmpl 24(%esp),%esi
+ movb %dl,-1(%ebp,%esi,1)
+ jb L002loop1
+ jmp L004done
+.align 4,0x90
+L001RC4_CHAR:
+ movzbl (%edi,%eax,1),%ecx
+L005cloop1:
+ addb %cl,%bl
+ movzbl (%edi,%ebx,1),%edx
+ movb %cl,(%edi,%ebx,1)
+ movb %dl,(%edi,%eax,1)
+ addb %cl,%dl
+ movzbl (%edi,%edx,1),%edx
+ addb $1,%al
+ xorb (%esi),%dl
+ leal 1(%esi),%esi
+ movzbl (%edi,%eax,1),%ecx
+ cmpl 24(%esp),%esi
+ movb %dl,-1(%ebp,%esi,1)
+ jb L005cloop1
+L004done:
+ decb %al
+ movb %bl,-4(%edi)
+ movb %al,-8(%edi)
+L000abort:
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.globl _RC4_set_key
+.align 4
+_RC4_set_key:
+L_RC4_set_key_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%edi
+ movl 24(%esp),%ebp
+ movl 28(%esp),%esi
+ leal _OPENSSL_ia32cap_P,%edx
+ leal 8(%edi),%edi
+ leal (%esi,%ebp,1),%esi
+ negl %ebp
+ xorl %eax,%eax
+ movl %ebp,-4(%edi)
+ btl $20,(%edx)
+ jc L006c1stloop
+.align 4,0x90
+L007w1stloop:
+ movl %eax,(%edi,%eax,4)
+ addb $1,%al
+ jnc L007w1stloop
+ xorl %ecx,%ecx
+ xorl %edx,%edx
+.align 4,0x90
+L008w2ndloop:
+ movl (%edi,%ecx,4),%eax
+ addb (%esi,%ebp,1),%dl
+ addb %al,%dl
+ addl $1,%ebp
+ movl (%edi,%edx,4),%ebx
+ jnz L009wnowrap
+ movl -4(%edi),%ebp
+L009wnowrap:
+ movl %eax,(%edi,%edx,4)
+ movl %ebx,(%edi,%ecx,4)
+ addb $1,%cl
+ jnc L008w2ndloop
+ jmp L010exit
+.align 4,0x90
+L006c1stloop:
+ movb %al,(%edi,%eax,1)
+ addb $1,%al
+ jnc L006c1stloop
+ xorl %ecx,%ecx
+ xorl %edx,%edx
+ xorl %ebx,%ebx
+.align 4,0x90
+L011c2ndloop:
+ movb (%edi,%ecx,1),%al
+ addb (%esi,%ebp,1),%dl
+ addb %al,%dl
+ addl $1,%ebp
+ movb (%edi,%edx,1),%bl
+ jnz L012cnowrap
+ movl -4(%edi),%ebp
+L012cnowrap:
+ movb %al,(%edi,%edx,1)
+ movb %bl,(%edi,%ecx,1)
+ addb $1,%cl
+ jnc L011c2ndloop
+ movl $-1,256(%edi)
+L010exit:
+ xorl %eax,%eax
+ movl %eax,-8(%edi)
+ movl %eax,-4(%edi)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.globl _RC4_options
+.align 4
+_RC4_options:
+L_RC4_options_begin:
+ call L013pic_point
+L013pic_point:
+ popl %eax
+ leal L014opts-L013pic_point(%eax),%eax
+ leal _OPENSSL_ia32cap_P,%edx
+ btl $20,(%edx)
+ jnc L015skip
+ addl $12,%eax
+L015skip:
+ ret
+.align 6,0x90
+L014opts:
+.byte 114,99,52,40,52,120,44,105,110,116,41,0
+.byte 114,99,52,40,49,120,44,99,104,97,114,41,0
+.byte 82,67,52,32,102,111,114,32,120,56,54,44,32,67,82,89
+.byte 80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114
+.byte 111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.align 6,0x90
+.comm _OPENSSL_ia32cap_P,4
diff --git a/deps/openssl/asm/x86-macosx-gas/rc5/rc5-586.s b/deps/openssl/asm/x86-macosx-gas/rc5/rc5-586.s
new file mode 100644
index 0000000000..ed7f7dc762
--- /dev/null
+++ b/deps/openssl/asm/x86-macosx-gas/rc5/rc5-586.s
@@ -0,0 +1,563 @@
+.file "rc5-586.s"
+.text
+.globl _RC5_32_encrypt
+.align 4
+_RC5_32_encrypt:
+L_RC5_32_encrypt_begin:
+
+ pushl %ebp
+ pushl %esi
+ pushl %edi
+ movl 16(%esp),%edx
+ movl 20(%esp),%ebp
+ # Load the 2 words
+
+ movl (%edx),%edi
+ movl 4(%edx),%esi
+ pushl %ebx
+ movl (%ebp),%ebx
+ addl 4(%ebp),%edi
+ addl 8(%ebp),%esi
+ xorl %esi,%edi
+ movl 12(%ebp),%eax
+ movl %esi,%ecx
+ roll %cl,%edi
+ addl %eax,%edi
+ xorl %edi,%esi
+ movl 16(%ebp),%eax
+ movl %edi,%ecx
+ roll %cl,%esi
+ addl %eax,%esi
+ xorl %esi,%edi
+ movl 20(%ebp),%eax
+ movl %esi,%ecx
+ roll %cl,%edi
+ addl %eax,%edi
+ xorl %edi,%esi
+ movl 24(%ebp),%eax
+ movl %edi,%ecx
+ roll %cl,%esi
+ addl %eax,%esi
+ xorl %esi,%edi
+ movl 28(%ebp),%eax
+ movl %esi,%ecx
+ roll %cl,%edi
+ addl %eax,%edi
+ xorl %edi,%esi
+ movl 32(%ebp),%eax
+ movl %edi,%ecx
+ roll %cl,%esi
+ addl %eax,%esi
+ xorl %esi,%edi
+ movl 36(%ebp),%eax
+ movl %esi,%ecx
+ roll %cl,%edi
+ addl %eax,%edi
+ xorl %edi,%esi
+ movl 40(%ebp),%eax
+ movl %edi,%ecx
+ roll %cl,%esi
+ addl %eax,%esi
+ xorl %esi,%edi
+ movl 44(%ebp),%eax
+ movl %esi,%ecx
+ roll %cl,%edi
+ addl %eax,%edi
+ xorl %edi,%esi
+ movl 48(%ebp),%eax
+ movl %edi,%ecx
+ roll %cl,%esi
+ addl %eax,%esi
+ xorl %esi,%edi
+ movl 52(%ebp),%eax
+ movl %esi,%ecx
+ roll %cl,%edi
+ addl %eax,%edi
+ xorl %edi,%esi
+ movl 56(%ebp),%eax
+ movl %edi,%ecx
+ roll %cl,%esi
+ addl %eax,%esi
+ xorl %esi,%edi
+ movl 60(%ebp),%eax
+ movl %esi,%ecx
+ roll %cl,%edi
+ addl %eax,%edi
+ xorl %edi,%esi
+ movl 64(%ebp),%eax
+ movl %edi,%ecx
+ roll %cl,%esi
+ addl %eax,%esi
+ xorl %esi,%edi
+ movl 68(%ebp),%eax
+ movl %esi,%ecx
+ roll %cl,%edi
+ addl %eax,%edi
+ xorl %edi,%esi
+ movl 72(%ebp),%eax
+ movl %edi,%ecx
+ roll %cl,%esi
+ addl %eax,%esi
+ cmpl $8,%ebx
+ je L000rc5_exit
+ xorl %esi,%edi
+ movl 76(%ebp),%eax
+ movl %esi,%ecx
+ roll %cl,%edi
+ addl %eax,%edi
+ xorl %edi,%esi
+ movl 80(%ebp),%eax
+ movl %edi,%ecx
+ roll %cl,%esi
+ addl %eax,%esi
+ xorl %esi,%edi
+ movl 84(%ebp),%eax
+ movl %esi,%ecx
+ roll %cl,%edi
+ addl %eax,%edi
+ xorl %edi,%esi
+ movl 88(%ebp),%eax
+ movl %edi,%ecx
+ roll %cl,%esi
+ addl %eax,%esi
+ xorl %esi,%edi
+ movl 92(%ebp),%eax
+ movl %esi,%ecx
+ roll %cl,%edi
+ addl %eax,%edi
+ xorl %edi,%esi
+ movl 96(%ebp),%eax
+ movl %edi,%ecx
+ roll %cl,%esi
+ addl %eax,%esi
+ xorl %esi,%edi
+ movl 100(%ebp),%eax
+ movl %esi,%ecx
+ roll %cl,%edi
+ addl %eax,%edi
+ xorl %edi,%esi
+ movl 104(%ebp),%eax
+ movl %edi,%ecx
+ roll %cl,%esi
+ addl %eax,%esi
+ cmpl $12,%ebx
+ je L000rc5_exit
+ xorl %esi,%edi
+ movl 108(%ebp),%eax
+ movl %esi,%ecx
+ roll %cl,%edi
+ addl %eax,%edi
+ xorl %edi,%esi
+ movl 112(%ebp),%eax
+ movl %edi,%ecx
+ roll %cl,%esi
+ addl %eax,%esi
+ xorl %esi,%edi
+ movl 116(%ebp),%eax
+ movl %esi,%ecx
+ roll %cl,%edi
+ addl %eax,%edi
+ xorl %edi,%esi
+ movl 120(%ebp),%eax
+ movl %edi,%ecx
+ roll %cl,%esi
+ addl %eax,%esi
+ xorl %esi,%edi
+ movl 124(%ebp),%eax
+ movl %esi,%ecx
+ roll %cl,%edi
+ addl %eax,%edi
+ xorl %edi,%esi
+ movl 128(%ebp),%eax
+ movl %edi,%ecx
+ roll %cl,%esi
+ addl %eax,%esi
+ xorl %esi,%edi
+ movl 132(%ebp),%eax
+ movl %esi,%ecx
+ roll %cl,%edi
+ addl %eax,%edi
+ xorl %edi,%esi
+ movl 136(%ebp),%eax
+ movl %edi,%ecx
+ roll %cl,%esi
+ addl %eax,%esi
+L000rc5_exit:
+ movl %edi,(%edx)
+ movl %esi,4(%edx)
+ popl %ebx
+ popl %edi
+ popl %esi
+ popl %ebp
+ ret
+.globl _RC5_32_decrypt
+.align 4
+_RC5_32_decrypt:
+L_RC5_32_decrypt_begin:
+
+ pushl %ebp
+ pushl %esi
+ pushl %edi
+ movl 16(%esp),%edx
+ movl 20(%esp),%ebp
+ # Load the 2 words
+
+ movl (%edx),%edi
+ movl 4(%edx),%esi
+ pushl %ebx
+ movl (%ebp),%ebx
+ cmpl $12,%ebx
+ je L001rc5_dec_12
+ cmpl $8,%ebx
+ je L002rc5_dec_8
+ movl 136(%ebp),%eax
+ subl %eax,%esi
+ movl %edi,%ecx
+ rorl %cl,%esi
+ xorl %edi,%esi
+ movl 132(%ebp),%eax
+ subl %eax,%edi
+ movl %esi,%ecx
+ rorl %cl,%edi
+ xorl %esi,%edi
+ movl 128(%ebp),%eax
+ subl %eax,%esi
+ movl %edi,%ecx
+ rorl %cl,%esi
+ xorl %edi,%esi
+ movl 124(%ebp),%eax
+ subl %eax,%edi
+ movl %esi,%ecx
+ rorl %cl,%edi
+ xorl %esi,%edi
+ movl 120(%ebp),%eax
+ subl %eax,%esi
+ movl %edi,%ecx
+ rorl %cl,%esi
+ xorl %edi,%esi
+ movl 116(%ebp),%eax
+ subl %eax,%edi
+ movl %esi,%ecx
+ rorl %cl,%edi
+ xorl %esi,%edi
+ movl 112(%ebp),%eax
+ subl %eax,%esi
+ movl %edi,%ecx
+ rorl %cl,%esi
+ xorl %edi,%esi
+ movl 108(%ebp),%eax
+ subl %eax,%edi
+ movl %esi,%ecx
+ rorl %cl,%edi
+ xorl %esi,%edi
+L001rc5_dec_12:
+ movl 104(%ebp),%eax
+ subl %eax,%esi
+ movl %edi,%ecx
+ rorl %cl,%esi
+ xorl %edi,%esi
+ movl 100(%ebp),%eax
+ subl %eax,%edi
+ movl %esi,%ecx
+ rorl %cl,%edi
+ xorl %esi,%edi
+ movl 96(%ebp),%eax
+ subl %eax,%esi
+ movl %edi,%ecx
+ rorl %cl,%esi
+ xorl %edi,%esi
+ movl 92(%ebp),%eax
+ subl %eax,%edi
+ movl %esi,%ecx
+ rorl %cl,%edi
+ xorl %esi,%edi
+ movl 88(%ebp),%eax
+ subl %eax,%esi
+ movl %edi,%ecx
+ rorl %cl,%esi
+ xorl %edi,%esi
+ movl 84(%ebp),%eax
+ subl %eax,%edi
+ movl %esi,%ecx
+ rorl %cl,%edi
+ xorl %esi,%edi
+ movl 80(%ebp),%eax
+ subl %eax,%esi
+ movl %edi,%ecx
+ rorl %cl,%esi
+ xorl %edi,%esi
+ movl 76(%ebp),%eax
+ subl %eax,%edi
+ movl %esi,%ecx
+ rorl %cl,%edi
+ xorl %esi,%edi
+L002rc5_dec_8:
+ movl 72(%ebp),%eax
+ subl %eax,%esi
+ movl %edi,%ecx
+ rorl %cl,%esi
+ xorl %edi,%esi
+ movl 68(%ebp),%eax
+ subl %eax,%edi
+ movl %esi,%ecx
+ rorl %cl,%edi
+ xorl %esi,%edi
+ movl 64(%ebp),%eax
+ subl %eax,%esi
+ movl %edi,%ecx
+ rorl %cl,%esi
+ xorl %edi,%esi
+ movl 60(%ebp),%eax
+ subl %eax,%edi
+ movl %esi,%ecx
+ rorl %cl,%edi
+ xorl %esi,%edi
+ movl 56(%ebp),%eax
+ subl %eax,%esi
+ movl %edi,%ecx
+ rorl %cl,%esi
+ xorl %edi,%esi
+ movl 52(%ebp),%eax
+ subl %eax,%edi
+ movl %esi,%ecx
+ rorl %cl,%edi
+ xorl %esi,%edi
+ movl 48(%ebp),%eax
+ subl %eax,%esi
+ movl %edi,%ecx
+ rorl %cl,%esi
+ xorl %edi,%esi
+ movl 44(%ebp),%eax
+ subl %eax,%edi
+ movl %esi,%ecx
+ rorl %cl,%edi
+ xorl %esi,%edi
+ movl 40(%ebp),%eax
+ subl %eax,%esi
+ movl %edi,%ecx
+ rorl %cl,%esi
+ xorl %edi,%esi
+ movl 36(%ebp),%eax
+ subl %eax,%edi
+ movl %esi,%ecx
+ rorl %cl,%edi
+ xorl %esi,%edi
+ movl 32(%ebp),%eax
+ subl %eax,%esi
+ movl %edi,%ecx
+ rorl %cl,%esi
+ xorl %edi,%esi
+ movl 28(%ebp),%eax
+ subl %eax,%edi
+ movl %esi,%ecx
+ rorl %cl,%edi
+ xorl %esi,%edi
+ movl 24(%ebp),%eax
+ subl %eax,%esi
+ movl %edi,%ecx
+ rorl %cl,%esi
+ xorl %edi,%esi
+ movl 20(%ebp),%eax
+ subl %eax,%edi
+ movl %esi,%ecx
+ rorl %cl,%edi
+ xorl %esi,%edi
+ movl 16(%ebp),%eax
+ subl %eax,%esi
+ movl %edi,%ecx
+ rorl %cl,%esi
+ xorl %edi,%esi
+ movl 12(%ebp),%eax
+ subl %eax,%edi
+ movl %esi,%ecx
+ rorl %cl,%edi
+ xorl %esi,%edi
+ subl 8(%ebp),%esi
+ subl 4(%ebp),%edi
+L003rc5_exit:
+ movl %edi,(%edx)
+ movl %esi,4(%edx)
+ popl %ebx
+ popl %edi
+ popl %esi
+ popl %ebp
+ ret
+.globl _RC5_32_cbc_encrypt
+.align 4
+_RC5_32_cbc_encrypt:
+L_RC5_32_cbc_encrypt_begin:
+
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 28(%esp),%ebp
+ # getting iv ptr from parameter 4
+
+ movl 36(%esp),%ebx
+ movl (%ebx),%esi
+ movl 4(%ebx),%edi
+ pushl %edi
+ pushl %esi
+ pushl %edi
+ pushl %esi
+ movl %esp,%ebx
+ movl 36(%esp),%esi
+ movl 40(%esp),%edi
+ # getting encrypt flag from parameter 5
+
+ movl 56(%esp),%ecx
+ # get and push parameter 3
+
+ movl 48(%esp),%eax
+ pushl %eax
+ pushl %ebx
+ cmpl $0,%ecx
+ jz L004decrypt
+ andl $4294967288,%ebp
+ movl 8(%esp),%eax
+ movl 12(%esp),%ebx
+ jz L005encrypt_finish
+L006encrypt_loop:
+ movl (%esi),%ecx
+ movl 4(%esi),%edx
+ xorl %ecx,%eax
+ xorl %edx,%ebx
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+ call L_RC5_32_encrypt_begin
+ movl 8(%esp),%eax
+ movl 12(%esp),%ebx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ addl $8,%esi
+ addl $8,%edi
+ subl $8,%ebp
+ jnz L006encrypt_loop
+L005encrypt_finish:
+ movl 52(%esp),%ebp
+ andl $7,%ebp
+ jz L007finish
+ call L008PIC_point
+L008PIC_point:
+ popl %edx
+ leal L009cbc_enc_jmp_table-L008PIC_point(%edx),%ecx
+ movl (%ecx,%ebp,4),%ebp
+ addl %edx,%ebp
+ xorl %ecx,%ecx
+ xorl %edx,%edx
+ jmp *%ebp
+L010ej7:
+ movb 6(%esi),%dh
+ shll $8,%edx
+L011ej6:
+ movb 5(%esi),%dh
+L012ej5:
+ movb 4(%esi),%dl
+L013ej4:
+ movl (%esi),%ecx
+ jmp L014ejend
+L015ej3:
+ movb 2(%esi),%ch
+ shll $8,%ecx
+L016ej2:
+ movb 1(%esi),%ch
+L017ej1:
+ movb (%esi),%cl
+L014ejend:
+ xorl %ecx,%eax
+ xorl %edx,%ebx
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+ call L_RC5_32_encrypt_begin
+ movl 8(%esp),%eax
+ movl 12(%esp),%ebx
+ movl %eax,(%edi)
+ movl %ebx,4(%edi)
+ jmp L007finish
+L004decrypt:
+ andl $4294967288,%ebp
+ movl 16(%esp),%eax
+ movl 20(%esp),%ebx
+ jz L018decrypt_finish
+L019decrypt_loop:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+ call L_RC5_32_decrypt_begin
+ movl 8(%esp),%eax
+ movl 12(%esp),%ebx
+ movl 16(%esp),%ecx
+ movl 20(%esp),%edx
+ xorl %eax,%ecx
+ xorl %ebx,%edx
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl %ecx,(%edi)
+ movl %edx,4(%edi)
+ movl %eax,16(%esp)
+ movl %ebx,20(%esp)
+ addl $8,%esi
+ addl $8,%edi
+ subl $8,%ebp
+ jnz L019decrypt_loop
+L018decrypt_finish:
+ movl 52(%esp),%ebp
+ andl $7,%ebp
+ jz L007finish
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+ call L_RC5_32_decrypt_begin
+ movl 8(%esp),%eax
+ movl 12(%esp),%ebx
+ movl 16(%esp),%ecx
+ movl 20(%esp),%edx
+ xorl %eax,%ecx
+ xorl %ebx,%edx
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+L020dj7:
+ rorl $16,%edx
+ movb %dl,6(%edi)
+ shrl $16,%edx
+L021dj6:
+ movb %dh,5(%edi)
+L022dj5:
+ movb %dl,4(%edi)
+L023dj4:
+ movl %ecx,(%edi)
+ jmp L024djend
+L025dj3:
+ rorl $16,%ecx
+ movb %cl,2(%edi)
+ shll $16,%ecx
+L026dj2:
+ movb %ch,1(%esi)
+L027dj1:
+ movb %cl,(%esi)
+L024djend:
+ jmp L007finish
+L007finish:
+ movl 60(%esp),%ecx
+ addl $24,%esp
+ movl %eax,(%ecx)
+ movl %ebx,4(%ecx)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 6,0x90
+L009cbc_enc_jmp_table:
+.long 0
+.long L017ej1-L008PIC_point
+.long L016ej2-L008PIC_point
+.long L015ej3-L008PIC_point
+.long L013ej4-L008PIC_point
+.long L012ej5-L008PIC_point
+.long L011ej6-L008PIC_point
+.long L010ej7-L008PIC_point
+.align 6,0x90
diff --git a/deps/openssl/asm/x86-macosx-gas/ripemd/rmd-586.s b/deps/openssl/asm/x86-macosx-gas/ripemd/rmd-586.s
new file mode 100644
index 0000000000..7d9d0e4d68
--- /dev/null
+++ b/deps/openssl/asm/x86-macosx-gas/ripemd/rmd-586.s
@@ -0,0 +1,2123 @@
+.file "../openssl/crypto/ripemd/asm/rmd-586.s"
+.text
+.globl _ripemd160_block_asm_data_order
+.align 4
+_ripemd160_block_asm_data_order:
+L_ripemd160_block_asm_data_order_begin:
+ movl 4(%esp),%edx
+ movl 8(%esp),%eax
+ pushl %esi
+ movl (%edx),%ecx
+ pushl %edi
+ movl 4(%edx),%esi
+ pushl %ebp
+ movl 8(%edx),%edi
+ pushl %ebx
+ subl $108,%esp
+L000start:
+
+ movl (%eax),%ebx
+ movl 4(%eax),%ebp
+ movl %ebx,(%esp)
+ movl %ebp,4(%esp)
+ movl 8(%eax),%ebx
+ movl 12(%eax),%ebp
+ movl %ebx,8(%esp)
+ movl %ebp,12(%esp)
+ movl 16(%eax),%ebx
+ movl 20(%eax),%ebp
+ movl %ebx,16(%esp)
+ movl %ebp,20(%esp)
+ movl 24(%eax),%ebx
+ movl 28(%eax),%ebp
+ movl %ebx,24(%esp)
+ movl %ebp,28(%esp)
+ movl 32(%eax),%ebx
+ movl 36(%eax),%ebp
+ movl %ebx,32(%esp)
+ movl %ebp,36(%esp)
+ movl 40(%eax),%ebx
+ movl 44(%eax),%ebp
+ movl %ebx,40(%esp)
+ movl %ebp,44(%esp)
+ movl 48(%eax),%ebx
+ movl 52(%eax),%ebp
+ movl %ebx,48(%esp)
+ movl %ebp,52(%esp)
+ movl 56(%eax),%ebx
+ movl 60(%eax),%ebp
+ movl %ebx,56(%esp)
+ movl %ebp,60(%esp)
+ movl %edi,%eax
+ movl 12(%edx),%ebx
+ movl 16(%edx),%ebp
+ # 0
+
+ xorl %ebx,%eax
+ movl (%esp),%edx
+ xorl %esi,%eax
+ addl %edx,%ecx
+ roll $10,%edi
+ addl %eax,%ecx
+ movl %esi,%eax
+ roll $11,%ecx
+ addl %ebp,%ecx
+ # 1
+
+ xorl %edi,%eax
+ movl 4(%esp),%edx
+ xorl %ecx,%eax
+ addl %eax,%ebp
+ movl %ecx,%eax
+ roll $10,%esi
+ addl %edx,%ebp
+ xorl %esi,%eax
+ roll $14,%ebp
+ addl %ebx,%ebp
+ # 2
+
+ movl 8(%esp),%edx
+ xorl %ebp,%eax
+ addl %edx,%ebx
+ roll $10,%ecx
+ addl %eax,%ebx
+ movl %ebp,%eax
+ roll $15,%ebx
+ addl %edi,%ebx
+ # 3
+
+ xorl %ecx,%eax
+ movl 12(%esp),%edx
+ xorl %ebx,%eax
+ addl %eax,%edi
+ movl %ebx,%eax
+ roll $10,%ebp
+ addl %edx,%edi
+ xorl %ebp,%eax
+ roll $12,%edi
+ addl %esi,%edi
+ # 4
+
+ movl 16(%esp),%edx
+ xorl %edi,%eax
+ addl %edx,%esi
+ roll $10,%ebx
+ addl %eax,%esi
+ movl %edi,%eax
+ roll $5,%esi
+ addl %ecx,%esi
+ # 5
+
+ xorl %ebx,%eax
+ movl 20(%esp),%edx
+ xorl %esi,%eax
+ addl %eax,%ecx
+ movl %esi,%eax
+ roll $10,%edi
+ addl %edx,%ecx
+ xorl %edi,%eax
+ roll $8,%ecx
+ addl %ebp,%ecx
+ # 6
+
+ movl 24(%esp),%edx
+ xorl %ecx,%eax
+ addl %edx,%ebp
+ roll $10,%esi
+ addl %eax,%ebp
+ movl %ecx,%eax
+ roll $7,%ebp
+ addl %ebx,%ebp
+ # 7
+
+ xorl %esi,%eax
+ movl 28(%esp),%edx
+ xorl %ebp,%eax
+ addl %eax,%ebx
+ movl %ebp,%eax
+ roll $10,%ecx
+ addl %edx,%ebx
+ xorl %ecx,%eax
+ roll $9,%ebx
+ addl %edi,%ebx
+ # 8
+
+ movl 32(%esp),%edx
+ xorl %ebx,%eax
+ addl %edx,%edi
+ roll $10,%ebp
+ addl %eax,%edi
+ movl %ebx,%eax
+ roll $11,%edi
+ addl %esi,%edi
+ # 9
+
+ xorl %ebp,%eax
+ movl 36(%esp),%edx
+ xorl %edi,%eax
+ addl %eax,%esi
+ movl %edi,%eax
+ roll $10,%ebx
+ addl %edx,%esi
+ xorl %ebx,%eax
+ roll $13,%esi
+ addl %ecx,%esi
+ # 10
+
+ movl 40(%esp),%edx
+ xorl %esi,%eax
+ addl %edx,%ecx
+ roll $10,%edi
+ addl %eax,%ecx
+ movl %esi,%eax
+ roll $14,%ecx
+ addl %ebp,%ecx
+ # 11
+
+ xorl %edi,%eax
+ movl 44(%esp),%edx
+ xorl %ecx,%eax
+ addl %eax,%ebp
+ movl %ecx,%eax
+ roll $10,%esi
+ addl %edx,%ebp
+ xorl %esi,%eax
+ roll $15,%ebp
+ addl %ebx,%ebp
+ # 12
+
+ movl 48(%esp),%edx
+ xorl %ebp,%eax
+ addl %edx,%ebx
+ roll $10,%ecx
+ addl %eax,%ebx
+ movl %ebp,%eax
+ roll $6,%ebx
+ addl %edi,%ebx
+ # 13
+
+ xorl %ecx,%eax
+ movl 52(%esp),%edx
+ xorl %ebx,%eax
+ addl %eax,%edi
+ movl %ebx,%eax
+ roll $10,%ebp
+ addl %edx,%edi
+ xorl %ebp,%eax
+ roll $7,%edi
+ addl %esi,%edi
+ # 14
+
+ movl 56(%esp),%edx
+ xorl %edi,%eax
+ addl %edx,%esi
+ roll $10,%ebx
+ addl %eax,%esi
+ movl %edi,%eax
+ roll $9,%esi
+ addl %ecx,%esi
+ # 15
+
+ xorl %ebx,%eax
+ movl 60(%esp),%edx
+ xorl %esi,%eax
+ addl %eax,%ecx
+ movl $-1,%eax
+ roll $10,%edi
+ addl %edx,%ecx
+ movl 28(%esp),%edx
+ roll $8,%ecx
+ addl %ebp,%ecx
+ # 16
+
+ addl %edx,%ebp
+ movl %esi,%edx
+ subl %ecx,%eax
+ andl %ecx,%edx
+ andl %edi,%eax
+ orl %eax,%edx
+ movl 16(%esp),%eax
+ roll $10,%esi
+ leal 1518500249(%ebp,%edx,1),%ebp
+ movl $-1,%edx
+ roll $7,%ebp
+ addl %ebx,%ebp
+ # 17
+
+ addl %eax,%ebx
+ movl %ecx,%eax
+ subl %ebp,%edx
+ andl %ebp,%eax
+ andl %esi,%edx
+ orl %edx,%eax
+ movl 52(%esp),%edx
+ roll $10,%ecx
+ leal 1518500249(%ebx,%eax,1),%ebx
+ movl $-1,%eax
+ roll $6,%ebx
+ addl %edi,%ebx
+ # 18
+
+ addl %edx,%edi
+ movl %ebp,%edx
+ subl %ebx,%eax
+ andl %ebx,%edx
+ andl %ecx,%eax
+ orl %eax,%edx
+ movl 4(%esp),%eax
+ roll $10,%ebp
+ leal 1518500249(%edi,%edx,1),%edi
+ movl $-1,%edx
+ roll $8,%edi
+ addl %esi,%edi
+ # 19
+
+ addl %eax,%esi
+ movl %ebx,%eax
+ subl %edi,%edx
+ andl %edi,%eax
+ andl %ebp,%edx
+ orl %edx,%eax
+ movl 40(%esp),%edx
+ roll $10,%ebx
+ leal 1518500249(%esi,%eax,1),%esi
+ movl $-1,%eax
+ roll $13,%esi
+ addl %ecx,%esi
+ # 20
+
+ addl %edx,%ecx
+ movl %edi,%edx
+ subl %esi,%eax
+ andl %esi,%edx
+ andl %ebx,%eax
+ orl %eax,%edx
+ movl 24(%esp),%eax
+ roll $10,%edi
+ leal 1518500249(%ecx,%edx,1),%ecx
+ movl $-1,%edx
+ roll $11,%ecx
+ addl %ebp,%ecx
+ # 21
+
+ addl %eax,%ebp
+ movl %esi,%eax
+ subl %ecx,%edx
+ andl %ecx,%eax
+ andl %edi,%edx
+ orl %edx,%eax
+ movl 60(%esp),%edx
+ roll $10,%esi
+ leal 1518500249(%ebp,%eax,1),%ebp
+ movl $-1,%eax
+ roll $9,%ebp
+ addl %ebx,%ebp
+ # 22
+
+ addl %edx,%ebx
+ movl %ecx,%edx
+ subl %ebp,%eax
+ andl %ebp,%edx
+ andl %esi,%eax
+ orl %eax,%edx
+ movl 12(%esp),%eax
+ roll $10,%ecx
+ leal 1518500249(%ebx,%edx,1),%ebx
+ movl $-1,%edx
+ roll $7,%ebx
+ addl %edi,%ebx
+ # 23
+
+ addl %eax,%edi
+ movl %ebp,%eax
+ subl %ebx,%edx
+ andl %ebx,%eax
+ andl %ecx,%edx
+ orl %edx,%eax
+ movl 48(%esp),%edx
+ roll $10,%ebp
+ leal 1518500249(%edi,%eax,1),%edi
+ movl $-1,%eax
+ roll $15,%edi
+ addl %esi,%edi
+ # 24
+
+ addl %edx,%esi
+ movl %ebx,%edx
+ subl %edi,%eax
+ andl %edi,%edx
+ andl %ebp,%eax
+ orl %eax,%edx
+ movl (%esp),%eax
+ roll $10,%ebx
+ leal 1518500249(%esi,%edx,1),%esi
+ movl $-1,%edx
+ roll $7,%esi
+ addl %ecx,%esi
+ # 25
+
+ addl %eax,%ecx
+ movl %edi,%eax
+ subl %esi,%edx
+ andl %esi,%eax
+ andl %ebx,%edx
+ orl %edx,%eax
+ movl 36(%esp),%edx
+ roll $10,%edi
+ leal 1518500249(%ecx,%eax,1),%ecx
+ movl $-1,%eax
+ roll $12,%ecx
+ addl %ebp,%ecx
+ # 26
+
+ addl %edx,%ebp
+ movl %esi,%edx
+ subl %ecx,%eax
+ andl %ecx,%edx
+ andl %edi,%eax
+ orl %eax,%edx
+ movl 20(%esp),%eax
+ roll $10,%esi
+ leal 1518500249(%ebp,%edx,1),%ebp
+ movl $-1,%edx
+ roll $15,%ebp
+ addl %ebx,%ebp
+ # 27
+
+ addl %eax,%ebx
+ movl %ecx,%eax
+ subl %ebp,%edx
+ andl %ebp,%eax
+ andl %esi,%edx
+ orl %edx,%eax
+ movl 8(%esp),%edx
+ roll $10,%ecx
+ leal 1518500249(%ebx,%eax,1),%ebx
+ movl $-1,%eax
+ roll $9,%ebx
+ addl %edi,%ebx
+ # 28
+
+ addl %edx,%edi
+ movl %ebp,%edx
+ subl %ebx,%eax
+ andl %ebx,%edx
+ andl %ecx,%eax
+ orl %eax,%edx
+ movl 56(%esp),%eax
+ roll $10,%ebp
+ leal 1518500249(%edi,%edx,1),%edi
+ movl $-1,%edx
+ roll $11,%edi
+ addl %esi,%edi
+ # 29
+
+ addl %eax,%esi
+ movl %ebx,%eax
+ subl %edi,%edx
+ andl %edi,%eax
+ andl %ebp,%edx
+ orl %edx,%eax
+ movl 44(%esp),%edx
+ roll $10,%ebx
+ leal 1518500249(%esi,%eax,1),%esi
+ movl $-1,%eax
+ roll $7,%esi
+ addl %ecx,%esi
+ # 30
+
+ addl %edx,%ecx
+ movl %edi,%edx
+ subl %esi,%eax
+ andl %esi,%edx
+ andl %ebx,%eax
+ orl %eax,%edx
+ movl 32(%esp),%eax
+ roll $10,%edi
+ leal 1518500249(%ecx,%edx,1),%ecx
+ movl $-1,%edx
+ roll $13,%ecx
+ addl %ebp,%ecx
+ # 31
+
+ addl %eax,%ebp
+ movl %esi,%eax
+ subl %ecx,%edx
+ andl %ecx,%eax
+ andl %edi,%edx
+ orl %edx,%eax
+ movl $-1,%edx
+ roll $10,%esi
+ leal 1518500249(%ebp,%eax,1),%ebp
+ subl %ecx,%edx
+ roll $12,%ebp
+ addl %ebx,%ebp
+ # 32
+
+ movl 12(%esp),%eax
+ orl %ebp,%edx
+ addl %eax,%ebx
+ xorl %esi,%edx
+ movl $-1,%eax
+ roll $10,%ecx
+ leal 1859775393(%ebx,%edx,1),%ebx
+ subl %ebp,%eax
+ roll $11,%ebx
+ addl %edi,%ebx
+ # 33
+
+ movl 40(%esp),%edx
+ orl %ebx,%eax
+ addl %edx,%edi
+ xorl %ecx,%eax
+ movl $-1,%edx
+ roll $10,%ebp
+ leal 1859775393(%edi,%eax,1),%edi
+ subl %ebx,%edx
+ roll $13,%edi
+ addl %esi,%edi
+ # 34
+
+ movl 56(%esp),%eax
+ orl %edi,%edx
+ addl %eax,%esi
+ xorl %ebp,%edx
+ movl $-1,%eax
+ roll $10,%ebx
+ leal 1859775393(%esi,%edx,1),%esi
+ subl %edi,%eax
+ roll $6,%esi
+ addl %ecx,%esi
+ # 35
+
+ movl 16(%esp),%edx
+ orl %esi,%eax
+ addl %edx,%ecx
+ xorl %ebx,%eax
+ movl $-1,%edx
+ roll $10,%edi
+ leal 1859775393(%ecx,%eax,1),%ecx
+ subl %esi,%edx
+ roll $7,%ecx
+ addl %ebp,%ecx
+ # 36
+
+ movl 36(%esp),%eax
+ orl %ecx,%edx
+ addl %eax,%ebp
+ xorl %edi,%edx
+ movl $-1,%eax
+ roll $10,%esi
+ leal 1859775393(%ebp,%edx,1),%ebp
+ subl %ecx,%eax
+ roll $14,%ebp
+ addl %ebx,%ebp
+ # 37
+
+ movl 60(%esp),%edx
+ orl %ebp,%eax
+ addl %edx,%ebx
+ xorl %esi,%eax
+ movl $-1,%edx
+ roll $10,%ecx
+ leal 1859775393(%ebx,%eax,1),%ebx
+ subl %ebp,%edx
+ roll $9,%ebx
+ addl %edi,%ebx
+ # 38
+
+ movl 32(%esp),%eax
+ orl %ebx,%edx
+ addl %eax,%edi
+ xorl %ecx,%edx
+ movl $-1,%eax
+ roll $10,%ebp
+ leal 1859775393(%edi,%edx,1),%edi
+ subl %ebx,%eax
+ roll $13,%edi
+ addl %esi,%edi
+ # 39
+
+ movl 4(%esp),%edx
+ orl %edi,%eax
+ addl %edx,%esi
+ xorl %ebp,%eax
+ movl $-1,%edx
+ roll $10,%ebx
+ leal 1859775393(%esi,%eax,1),%esi
+ subl %edi,%edx
+ roll $15,%esi
+ addl %ecx,%esi
+ # 40
+
+ movl 8(%esp),%eax
+ orl %esi,%edx
+ addl %eax,%ecx
+ xorl %ebx,%edx
+ movl $-1,%eax
+ roll $10,%edi
+ leal 1859775393(%ecx,%edx,1),%ecx
+ subl %esi,%eax
+ roll $14,%ecx
+ addl %ebp,%ecx
+ # 41
+
+ movl 28(%esp),%edx
+ orl %ecx,%eax
+ addl %edx,%ebp
+ xorl %edi,%eax
+ movl $-1,%edx
+ roll $10,%esi
+ leal 1859775393(%ebp,%eax,1),%ebp
+ subl %ecx,%edx
+ roll $8,%ebp
+ addl %ebx,%ebp
+ # 42
+
+ movl (%esp),%eax
+ orl %ebp,%edx
+ addl %eax,%ebx
+ xorl %esi,%edx
+ movl $-1,%eax
+ roll $10,%ecx
+ leal 1859775393(%ebx,%edx,1),%ebx
+ subl %ebp,%eax
+ roll $13,%ebx
+ addl %edi,%ebx
+ # 43
+
+ movl 24(%esp),%edx
+ orl %ebx,%eax
+ addl %edx,%edi
+ xorl %ecx,%eax
+ movl $-1,%edx
+ roll $10,%ebp
+ leal 1859775393(%edi,%eax,1),%edi
+ subl %ebx,%edx
+ roll $6,%edi
+ addl %esi,%edi
+ # 44
+
+ movl 52(%esp),%eax
+ orl %edi,%edx
+ addl %eax,%esi
+ xorl %ebp,%edx
+ movl $-1,%eax
+ roll $10,%ebx
+ leal 1859775393(%esi,%edx,1),%esi
+ subl %edi,%eax
+ roll $5,%esi
+ addl %ecx,%esi
+ # 45
+
+ movl 44(%esp),%edx
+ orl %esi,%eax
+ addl %edx,%ecx
+ xorl %ebx,%eax
+ movl $-1,%edx
+ roll $10,%edi
+ leal 1859775393(%ecx,%eax,1),%ecx
+ subl %esi,%edx
+ roll $12,%ecx
+ addl %ebp,%ecx
+ # 46
+
+ movl 20(%esp),%eax
+ orl %ecx,%edx
+ addl %eax,%ebp
+ xorl %edi,%edx
+ movl $-1,%eax
+ roll $10,%esi
+ leal 1859775393(%ebp,%edx,1),%ebp
+ subl %ecx,%eax
+ roll $7,%ebp
+ addl %ebx,%ebp
+ # 47
+
+ movl 48(%esp),%edx
+ orl %ebp,%eax
+ addl %edx,%ebx
+ xorl %esi,%eax
+ movl $-1,%edx
+ roll $10,%ecx
+ leal 1859775393(%ebx,%eax,1),%ebx
+ movl %ecx,%eax
+ roll $5,%ebx
+ addl %edi,%ebx
+ # 48
+
+ subl %ecx,%edx
+ andl %ebx,%eax
+ andl %ebp,%edx
+ orl %eax,%edx
+ movl 4(%esp),%eax
+ roll $10,%ebp
+ leal 2400959708(%edi,%edx,1),%edi
+ movl $-1,%edx
+ addl %eax,%edi
+ movl %ebp,%eax
+ roll $11,%edi
+ addl %esi,%edi
+ # 49
+
+ subl %ebp,%edx
+ andl %edi,%eax
+ andl %ebx,%edx
+ orl %eax,%edx
+ movl 36(%esp),%eax
+ roll $10,%ebx
+ leal 2400959708(%esi,%edx,1),%esi
+ movl $-1,%edx
+ addl %eax,%esi
+ movl %ebx,%eax
+ roll $12,%esi
+ addl %ecx,%esi
+ # 50
+
+ subl %ebx,%edx
+ andl %esi,%eax
+ andl %edi,%edx
+ orl %eax,%edx
+ movl 44(%esp),%eax
+ roll $10,%edi
+ leal 2400959708(%ecx,%edx,1),%ecx
+ movl $-1,%edx
+ addl %eax,%ecx
+ movl %edi,%eax
+ roll $14,%ecx
+ addl %ebp,%ecx
+ # 51
+
+ subl %edi,%edx
+ andl %ecx,%eax
+ andl %esi,%edx
+ orl %eax,%edx
+ movl 40(%esp),%eax
+ roll $10,%esi
+ leal 2400959708(%ebp,%edx,1),%ebp
+ movl $-1,%edx
+ addl %eax,%ebp
+ movl %esi,%eax
+ roll $15,%ebp
+ addl %ebx,%ebp
+ # 52
+
+ subl %esi,%edx
+ andl %ebp,%eax
+ andl %ecx,%edx
+ orl %eax,%edx
+ movl (%esp),%eax
+ roll $10,%ecx
+ leal 2400959708(%ebx,%edx,1),%ebx
+ movl $-1,%edx
+ addl %eax,%ebx
+ movl %ecx,%eax
+ roll $14,%ebx
+ addl %edi,%ebx
+ # 53
+
+ subl %ecx,%edx
+ andl %ebx,%eax
+ andl %ebp,%edx
+ orl %eax,%edx
+ movl 32(%esp),%eax
+ roll $10,%ebp
+ leal 2400959708(%edi,%edx,1),%edi
+ movl $-1,%edx
+ addl %eax,%edi
+ movl %ebp,%eax
+ roll $15,%edi
+ addl %esi,%edi
+ # 54
+
+ subl %ebp,%edx
+ andl %edi,%eax
+ andl %ebx,%edx
+ orl %eax,%edx
+ movl 48(%esp),%eax
+ roll $10,%ebx
+ leal 2400959708(%esi,%edx,1),%esi
+ movl $-1,%edx
+ addl %eax,%esi
+ movl %ebx,%eax
+ roll $9,%esi
+ addl %ecx,%esi
+ # 55
+
+ subl %ebx,%edx
+ andl %esi,%eax
+ andl %edi,%edx
+ orl %eax,%edx
+ movl 16(%esp),%eax
+ roll $10,%edi
+ leal 2400959708(%ecx,%edx,1),%ecx
+ movl $-1,%edx
+ addl %eax,%ecx
+ movl %edi,%eax
+ roll $8,%ecx
+ addl %ebp,%ecx
+ # 56
+
+ subl %edi,%edx
+ andl %ecx,%eax
+ andl %esi,%edx
+ orl %eax,%edx
+ movl 52(%esp),%eax
+ roll $10,%esi
+ leal 2400959708(%ebp,%edx,1),%ebp
+ movl $-1,%edx
+ addl %eax,%ebp
+ movl %esi,%eax
+ roll $9,%ebp
+ addl %ebx,%ebp
+ # 57
+
+ subl %esi,%edx
+ andl %ebp,%eax
+ andl %ecx,%edx
+ orl %eax,%edx
+ movl 12(%esp),%eax
+ roll $10,%ecx
+ leal 2400959708(%ebx,%edx,1),%ebx
+ movl $-1,%edx
+ addl %eax,%ebx
+ movl %ecx,%eax
+ roll $14,%ebx
+ addl %edi,%ebx
+ # 58
+
+ subl %ecx,%edx
+ andl %ebx,%eax
+ andl %ebp,%edx
+ orl %eax,%edx
+ movl 28(%esp),%eax
+ roll $10,%ebp
+ leal 2400959708(%edi,%edx,1),%edi
+ movl $-1,%edx
+ addl %eax,%edi
+ movl %ebp,%eax
+ roll $5,%edi
+ addl %esi,%edi
+ # 59
+
+ subl %ebp,%edx
+ andl %edi,%eax
+ andl %ebx,%edx
+ orl %eax,%edx
+ movl 60(%esp),%eax
+ roll $10,%ebx
+ leal 2400959708(%esi,%edx,1),%esi
+ movl $-1,%edx
+ addl %eax,%esi
+ movl %ebx,%eax
+ roll $6,%esi
+ addl %ecx,%esi
+ # 60
+
+ subl %ebx,%edx
+ andl %esi,%eax
+ andl %edi,%edx
+ orl %eax,%edx
+ movl 56(%esp),%eax
+ roll $10,%edi
+ leal 2400959708(%ecx,%edx,1),%ecx
+ movl $-1,%edx
+ addl %eax,%ecx
+ movl %edi,%eax
+ roll $8,%ecx
+ addl %ebp,%ecx
+ # 61
+
+ subl %edi,%edx
+ andl %ecx,%eax
+ andl %esi,%edx
+ orl %eax,%edx
+ movl 20(%esp),%eax
+ roll $10,%esi
+ leal 2400959708(%ebp,%edx,1),%ebp
+ movl $-1,%edx
+ addl %eax,%ebp
+ movl %esi,%eax
+ roll $6,%ebp
+ addl %ebx,%ebp
+ # 62
+
+ subl %esi,%edx
+ andl %ebp,%eax
+ andl %ecx,%edx
+ orl %eax,%edx
+ movl 24(%esp),%eax
+ roll $10,%ecx
+ leal 2400959708(%ebx,%edx,1),%ebx
+ movl $-1,%edx
+ addl %eax,%ebx
+ movl %ecx,%eax
+ roll $5,%ebx
+ addl %edi,%ebx
+ # 63
+
+ subl %ecx,%edx
+ andl %ebx,%eax
+ andl %ebp,%edx
+ orl %eax,%edx
+ movl 8(%esp),%eax
+ roll $10,%ebp
+ leal 2400959708(%edi,%edx,1),%edi
+ movl $-1,%edx
+ addl %eax,%edi
+ subl %ebp,%edx
+ roll $12,%edi
+ addl %esi,%edi
+ # 64
+
+ movl 16(%esp),%eax
+ orl %ebx,%edx
+ addl %eax,%esi
+ xorl %edi,%edx
+ movl $-1,%eax
+ roll $10,%ebx
+ leal 2840853838(%esi,%edx,1),%esi
+ subl %ebx,%eax
+ roll $9,%esi
+ addl %ecx,%esi
+ # 65
+
+ movl (%esp),%edx
+ orl %edi,%eax
+ addl %edx,%ecx
+ xorl %esi,%eax
+ movl $-1,%edx
+ roll $10,%edi
+ leal 2840853838(%ecx,%eax,1),%ecx
+ subl %edi,%edx
+ roll $15,%ecx
+ addl %ebp,%ecx
+ # 66
+
+ movl 20(%esp),%eax
+ orl %esi,%edx
+ addl %eax,%ebp
+ xorl %ecx,%edx
+ movl $-1,%eax
+ roll $10,%esi
+ leal 2840853838(%ebp,%edx,1),%ebp
+ subl %esi,%eax
+ roll $5,%ebp
+ addl %ebx,%ebp
+ # 67
+
+ movl 36(%esp),%edx
+ orl %ecx,%eax
+ addl %edx,%ebx
+ xorl %ebp,%eax
+ movl $-1,%edx
+ roll $10,%ecx
+ leal 2840853838(%ebx,%eax,1),%ebx
+ subl %ecx,%edx
+ roll $11,%ebx
+ addl %edi,%ebx
+ # 68
+
+ movl 28(%esp),%eax
+ orl %ebp,%edx
+ addl %eax,%edi
+ xorl %ebx,%edx
+ movl $-1,%eax
+ roll $10,%ebp
+ leal 2840853838(%edi,%edx,1),%edi
+ subl %ebp,%eax
+ roll $6,%edi
+ addl %esi,%edi
+ # 69
+
+ movl 48(%esp),%edx
+ orl %ebx,%eax
+ addl %edx,%esi
+ xorl %edi,%eax
+ movl $-1,%edx
+ roll $10,%ebx
+ leal 2840853838(%esi,%eax,1),%esi
+ subl %ebx,%edx
+ roll $8,%esi
+ addl %ecx,%esi
+ # 70
+
+ movl 8(%esp),%eax
+ orl %edi,%edx
+ addl %eax,%ecx
+ xorl %esi,%edx
+ movl $-1,%eax
+ roll $10,%edi
+ leal 2840853838(%ecx,%edx,1),%ecx
+ subl %edi,%eax
+ roll $13,%ecx
+ addl %ebp,%ecx
+ # 71
+
+ movl 40(%esp),%edx
+ orl %esi,%eax
+ addl %edx,%ebp
+ xorl %ecx,%eax
+ movl $-1,%edx
+ roll $10,%esi
+ leal 2840853838(%ebp,%eax,1),%ebp
+ subl %esi,%edx
+ roll $12,%ebp
+ addl %ebx,%ebp
+ # 72
+
+ movl 56(%esp),%eax
+ orl %ecx,%edx
+ addl %eax,%ebx
+ xorl %ebp,%edx
+ movl $-1,%eax
+ roll $10,%ecx
+ leal 2840853838(%ebx,%edx,1),%ebx
+ subl %ecx,%eax
+ roll $5,%ebx
+ addl %edi,%ebx
+ # 73
+
+ movl 4(%esp),%edx
+ orl %ebp,%eax
+ addl %edx,%edi
+ xorl %ebx,%eax
+ movl $-1,%edx
+ roll $10,%ebp
+ leal 2840853838(%edi,%eax,1),%edi
+ subl %ebp,%edx
+ roll $12,%edi
+ addl %esi,%edi
+ # 74
+
+ movl 12(%esp),%eax
+ orl %ebx,%edx
+ addl %eax,%esi
+ xorl %edi,%edx
+ movl $-1,%eax
+ roll $10,%ebx
+ leal 2840853838(%esi,%edx,1),%esi
+ subl %ebx,%eax
+ roll $13,%esi
+ addl %ecx,%esi
+ # 75
+
+ movl 32(%esp),%edx
+ orl %edi,%eax
+ addl %edx,%ecx
+ xorl %esi,%eax
+ movl $-1,%edx
+ roll $10,%edi
+ leal 2840853838(%ecx,%eax,1),%ecx
+ subl %edi,%edx
+ roll $14,%ecx
+ addl %ebp,%ecx
+ # 76
+
+ movl 44(%esp),%eax
+ orl %esi,%edx
+ addl %eax,%ebp
+ xorl %ecx,%edx
+ movl $-1,%eax
+ roll $10,%esi
+ leal 2840853838(%ebp,%edx,1),%ebp
+ subl %esi,%eax
+ roll $11,%ebp
+ addl %ebx,%ebp
+ # 77
+
+ movl 24(%esp),%edx
+ orl %ecx,%eax
+ addl %edx,%ebx
+ xorl %ebp,%eax
+ movl $-1,%edx
+ roll $10,%ecx
+ leal 2840853838(%ebx,%eax,1),%ebx
+ subl %ecx,%edx
+ roll $8,%ebx
+ addl %edi,%ebx
+ # 78
+
+ movl 60(%esp),%eax
+ orl %ebp,%edx
+ addl %eax,%edi
+ xorl %ebx,%edx
+ movl $-1,%eax
+ roll $10,%ebp
+ leal 2840853838(%edi,%edx,1),%edi
+ subl %ebp,%eax
+ roll $5,%edi
+ addl %esi,%edi
+ # 79
+
+ movl 52(%esp),%edx
+ orl %ebx,%eax
+ addl %edx,%esi
+ xorl %edi,%eax
+ movl 128(%esp),%edx
+ roll $10,%ebx
+ leal 2840853838(%esi,%eax,1),%esi
+ movl %ecx,64(%esp)
+ roll $6,%esi
+ addl %ecx,%esi
+ movl (%edx),%ecx
+ movl %esi,68(%esp)
+ movl %edi,72(%esp)
+ movl 4(%edx),%esi
+ movl %ebx,76(%esp)
+ movl 8(%edx),%edi
+ movl %ebp,80(%esp)
+ movl 12(%edx),%ebx
+ movl 16(%edx),%ebp
+ # 80
+
+ movl $-1,%edx
+ subl %ebx,%edx
+ movl 20(%esp),%eax
+ orl %edi,%edx
+ addl %eax,%ecx
+ xorl %esi,%edx
+ movl $-1,%eax
+ roll $10,%edi
+ leal 1352829926(%ecx,%edx,1),%ecx
+ subl %edi,%eax
+ roll $8,%ecx
+ addl %ebp,%ecx
+ # 81
+
+ movl 56(%esp),%edx
+ orl %esi,%eax
+ addl %edx,%ebp
+ xorl %ecx,%eax
+ movl $-1,%edx
+ roll $10,%esi
+ leal 1352829926(%ebp,%eax,1),%ebp
+ subl %esi,%edx
+ roll $9,%ebp
+ addl %ebx,%ebp
+ # 82
+
+ movl 28(%esp),%eax
+ orl %ecx,%edx
+ addl %eax,%ebx
+ xorl %ebp,%edx
+ movl $-1,%eax
+ roll $10,%ecx
+ leal 1352829926(%ebx,%edx,1),%ebx
+ subl %ecx,%eax
+ roll $9,%ebx
+ addl %edi,%ebx
+ # 83
+
+ movl (%esp),%edx
+ orl %ebp,%eax
+ addl %edx,%edi
+ xorl %ebx,%eax
+ movl $-1,%edx
+ roll $10,%ebp
+ leal 1352829926(%edi,%eax,1),%edi
+ subl %ebp,%edx
+ roll $11,%edi
+ addl %esi,%edi
+ # 84
+
+ movl 36(%esp),%eax
+ orl %ebx,%edx
+ addl %eax,%esi
+ xorl %edi,%edx
+ movl $-1,%eax
+ roll $10,%ebx
+ leal 1352829926(%esi,%edx,1),%esi
+ subl %ebx,%eax
+ roll $13,%esi
+ addl %ecx,%esi
+ # 85
+
+ movl 8(%esp),%edx
+ orl %edi,%eax
+ addl %edx,%ecx
+ xorl %esi,%eax
+ movl $-1,%edx
+ roll $10,%edi
+ leal 1352829926(%ecx,%eax,1),%ecx
+ subl %edi,%edx
+ roll $15,%ecx
+ addl %ebp,%ecx
+ # 86
+
+ movl 44(%esp),%eax
+ orl %esi,%edx
+ addl %eax,%ebp
+ xorl %ecx,%edx
+ movl $-1,%eax
+ roll $10,%esi
+ leal 1352829926(%ebp,%edx,1),%ebp
+ subl %esi,%eax
+ roll $15,%ebp
+ addl %ebx,%ebp
+ # 87
+
+ movl 16(%esp),%edx
+ orl %ecx,%eax
+ addl %edx,%ebx
+ xorl %ebp,%eax
+ movl $-1,%edx
+ roll $10,%ecx
+ leal 1352829926(%ebx,%eax,1),%ebx
+ subl %ecx,%edx
+ roll $5,%ebx
+ addl %edi,%ebx
+ # 88
+
+ movl 52(%esp),%eax
+ orl %ebp,%edx
+ addl %eax,%edi
+ xorl %ebx,%edx
+ movl $-1,%eax
+ roll $10,%ebp
+ leal 1352829926(%edi,%edx,1),%edi
+ subl %ebp,%eax
+ roll $7,%edi
+ addl %esi,%edi
+ # 89
+
+ movl 24(%esp),%edx
+ orl %ebx,%eax
+ addl %edx,%esi
+ xorl %edi,%eax
+ movl $-1,%edx
+ roll $10,%ebx
+ leal 1352829926(%esi,%eax,1),%esi
+ subl %ebx,%edx
+ roll $7,%esi
+ addl %ecx,%esi
+ # 90
+
+ movl 60(%esp),%eax
+ orl %edi,%edx
+ addl %eax,%ecx
+ xorl %esi,%edx
+ movl $-1,%eax
+ roll $10,%edi
+ leal 1352829926(%ecx,%edx,1),%ecx
+ subl %edi,%eax
+ roll $8,%ecx
+ addl %ebp,%ecx
+ # 91
+
+ movl 32(%esp),%edx
+ orl %esi,%eax
+ addl %edx,%ebp
+ xorl %ecx,%eax
+ movl $-1,%edx
+ roll $10,%esi
+ leal 1352829926(%ebp,%eax,1),%ebp
+ subl %esi,%edx
+ roll $11,%ebp
+ addl %ebx,%ebp
+ # 92
+
+ movl 4(%esp),%eax
+ orl %ecx,%edx
+ addl %eax,%ebx
+ xorl %ebp,%edx
+ movl $-1,%eax
+ roll $10,%ecx
+ leal 1352829926(%ebx,%edx,1),%ebx
+ subl %ecx,%eax
+ roll $14,%ebx
+ addl %edi,%ebx
+ # 93
+
+ movl 40(%esp),%edx
+ orl %ebp,%eax
+ addl %edx,%edi
+ xorl %ebx,%eax
+ movl $-1,%edx
+ roll $10,%ebp
+ leal 1352829926(%edi,%eax,1),%edi
+ subl %ebp,%edx
+ roll $14,%edi
+ addl %esi,%edi
+ # 94
+
+ movl 12(%esp),%eax
+ orl %ebx,%edx
+ addl %eax,%esi
+ xorl %edi,%edx
+ movl $-1,%eax
+ roll $10,%ebx
+ leal 1352829926(%esi,%edx,1),%esi
+ subl %ebx,%eax
+ roll $12,%esi
+ addl %ecx,%esi
+ # 95
+
+ movl 48(%esp),%edx
+ orl %edi,%eax
+ addl %edx,%ecx
+ xorl %esi,%eax
+ movl $-1,%edx
+ roll $10,%edi
+ leal 1352829926(%ecx,%eax,1),%ecx
+ movl %edi,%eax
+ roll $6,%ecx
+ addl %ebp,%ecx
+ # 96
+
+ subl %edi,%edx
+ andl %ecx,%eax
+ andl %esi,%edx
+ orl %eax,%edx
+ movl 24(%esp),%eax
+ roll $10,%esi
+ leal 1548603684(%ebp,%edx,1),%ebp
+ movl $-1,%edx
+ addl %eax,%ebp
+ movl %esi,%eax
+ roll $9,%ebp
+ addl %ebx,%ebp
+ # 97
+
+ subl %esi,%edx
+ andl %ebp,%eax
+ andl %ecx,%edx
+ orl %eax,%edx
+ movl 44(%esp),%eax
+ roll $10,%ecx
+ leal 1548603684(%ebx,%edx,1),%ebx
+ movl $-1,%edx
+ addl %eax,%ebx
+ movl %ecx,%eax
+ roll $13,%ebx
+ addl %edi,%ebx
+ # 98
+
+ subl %ecx,%edx
+ andl %ebx,%eax
+ andl %ebp,%edx
+ orl %eax,%edx
+ movl 12(%esp),%eax
+ roll $10,%ebp
+ leal 1548603684(%edi,%edx,1),%edi
+ movl $-1,%edx
+ addl %eax,%edi
+ movl %ebp,%eax
+ roll $15,%edi
+ addl %esi,%edi
+ # 99
+
+ subl %ebp,%edx
+ andl %edi,%eax
+ andl %ebx,%edx
+ orl %eax,%edx
+ movl 28(%esp),%eax
+ roll $10,%ebx
+ leal 1548603684(%esi,%edx,1),%esi
+ movl $-1,%edx
+ addl %eax,%esi
+ movl %ebx,%eax
+ roll $7,%esi
+ addl %ecx,%esi
+ # 100
+
+ subl %ebx,%edx
+ andl %esi,%eax
+ andl %edi,%edx
+ orl %eax,%edx
+ movl (%esp),%eax
+ roll $10,%edi
+ leal 1548603684(%ecx,%edx,1),%ecx
+ movl $-1,%edx
+ addl %eax,%ecx
+ movl %edi,%eax
+ roll $12,%ecx
+ addl %ebp,%ecx
+ # 101
+
+ subl %edi,%edx
+ andl %ecx,%eax
+ andl %esi,%edx
+ orl %eax,%edx
+ movl 52(%esp),%eax
+ roll $10,%esi
+ leal 1548603684(%ebp,%edx,1),%ebp
+ movl $-1,%edx
+ addl %eax,%ebp
+ movl %esi,%eax
+ roll $8,%ebp
+ addl %ebx,%ebp
+ # 102
+
+ subl %esi,%edx
+ andl %ebp,%eax
+ andl %ecx,%edx
+ orl %eax,%edx
+ movl 20(%esp),%eax
+ roll $10,%ecx
+ leal 1548603684(%ebx,%edx,1),%ebx
+ movl $-1,%edx
+ addl %eax,%ebx
+ movl %ecx,%eax
+ roll $9,%ebx
+ addl %edi,%ebx
+ # 103
+
+ subl %ecx,%edx
+ andl %ebx,%eax
+ andl %ebp,%edx
+ orl %eax,%edx
+ movl 40(%esp),%eax
+ roll $10,%ebp
+ leal 1548603684(%edi,%edx,1),%edi
+ movl $-1,%edx
+ addl %eax,%edi
+ movl %ebp,%eax
+ roll $11,%edi
+ addl %esi,%edi
+ # 104
+
+ subl %ebp,%edx
+ andl %edi,%eax
+ andl %ebx,%edx
+ orl %eax,%edx
+ movl 56(%esp),%eax
+ roll $10,%ebx
+ leal 1548603684(%esi,%edx,1),%esi
+ movl $-1,%edx
+ addl %eax,%esi
+ movl %ebx,%eax
+ roll $7,%esi
+ addl %ecx,%esi
+ # 105
+
+ subl %ebx,%edx
+ andl %esi,%eax
+ andl %edi,%edx
+ orl %eax,%edx
+ movl 60(%esp),%eax
+ roll $10,%edi
+ leal 1548603684(%ecx,%edx,1),%ecx
+ movl $-1,%edx
+ addl %eax,%ecx
+ movl %edi,%eax
+ roll $7,%ecx
+ addl %ebp,%ecx
+ # 106
+
+ subl %edi,%edx
+ andl %ecx,%eax
+ andl %esi,%edx
+ orl %eax,%edx
+ movl 32(%esp),%eax
+ roll $10,%esi
+ leal 1548603684(%ebp,%edx,1),%ebp
+ movl $-1,%edx
+ addl %eax,%ebp
+ movl %esi,%eax
+ roll $12,%ebp
+ addl %ebx,%ebp
+ # 107
+
+ subl %esi,%edx
+ andl %ebp,%eax
+ andl %ecx,%edx
+ orl %eax,%edx
+ movl 48(%esp),%eax
+ roll $10,%ecx
+ leal 1548603684(%ebx,%edx,1),%ebx
+ movl $-1,%edx
+ addl %eax,%ebx
+ movl %ecx,%eax
+ roll $7,%ebx
+ addl %edi,%ebx
+ # 108
+
+ subl %ecx,%edx
+ andl %ebx,%eax
+ andl %ebp,%edx
+ orl %eax,%edx
+ movl 16(%esp),%eax
+ roll $10,%ebp
+ leal 1548603684(%edi,%edx,1),%edi
+ movl $-1,%edx
+ addl %eax,%edi
+ movl %ebp,%eax
+ roll $6,%edi
+ addl %esi,%edi
+ # 109
+
+ subl %ebp,%edx
+ andl %edi,%eax
+ andl %ebx,%edx
+ orl %eax,%edx
+ movl 36(%esp),%eax
+ roll $10,%ebx
+ leal 1548603684(%esi,%edx,1),%esi
+ movl $-1,%edx
+ addl %eax,%esi
+ movl %ebx,%eax
+ roll $15,%esi
+ addl %ecx,%esi
+ # 110
+
+ subl %ebx,%edx
+ andl %esi,%eax
+ andl %edi,%edx
+ orl %eax,%edx
+ movl 4(%esp),%eax
+ roll $10,%edi
+ leal 1548603684(%ecx,%edx,1),%ecx
+ movl $-1,%edx
+ addl %eax,%ecx
+ movl %edi,%eax
+ roll $13,%ecx
+ addl %ebp,%ecx
+ # 111
+
+ subl %edi,%edx
+ andl %ecx,%eax
+ andl %esi,%edx
+ orl %eax,%edx
+ movl 8(%esp),%eax
+ roll $10,%esi
+ leal 1548603684(%ebp,%edx,1),%ebp
+ movl $-1,%edx
+ addl %eax,%ebp
+ subl %ecx,%edx
+ roll $11,%ebp
+ addl %ebx,%ebp
+ # 112
+
+ movl 60(%esp),%eax
+ orl %ebp,%edx
+ addl %eax,%ebx
+ xorl %esi,%edx
+ movl $-1,%eax
+ roll $10,%ecx
+ leal 1836072691(%ebx,%edx,1),%ebx
+ subl %ebp,%eax
+ roll $9,%ebx
+ addl %edi,%ebx
+ # 113
+
+ movl 20(%esp),%edx
+ orl %ebx,%eax
+ addl %edx,%edi
+ xorl %ecx,%eax
+ movl $-1,%edx
+ roll $10,%ebp
+ leal 1836072691(%edi,%eax,1),%edi
+ subl %ebx,%edx
+ roll $7,%edi
+ addl %esi,%edi
+ # 114
+
+ movl 4(%esp),%eax
+ orl %edi,%edx
+ addl %eax,%esi
+ xorl %ebp,%edx
+ movl $-1,%eax
+ roll $10,%ebx
+ leal 1836072691(%esi,%edx,1),%esi
+ subl %edi,%eax
+ roll $15,%esi
+ addl %ecx,%esi
+ # 115
+
+ movl 12(%esp),%edx
+ orl %esi,%eax
+ addl %edx,%ecx
+ xorl %ebx,%eax
+ movl $-1,%edx
+ roll $10,%edi
+ leal 1836072691(%ecx,%eax,1),%ecx
+ subl %esi,%edx
+ roll $11,%ecx
+ addl %ebp,%ecx
+ # 116
+
+ movl 28(%esp),%eax
+ orl %ecx,%edx
+ addl %eax,%ebp
+ xorl %edi,%edx
+ movl $-1,%eax
+ roll $10,%esi
+ leal 1836072691(%ebp,%edx,1),%ebp
+ subl %ecx,%eax
+ roll $8,%ebp
+ addl %ebx,%ebp
+ # 117
+
+ movl 56(%esp),%edx
+ orl %ebp,%eax
+ addl %edx,%ebx
+ xorl %esi,%eax
+ movl $-1,%edx
+ roll $10,%ecx
+ leal 1836072691(%ebx,%eax,1),%ebx
+ subl %ebp,%edx
+ roll $6,%ebx
+ addl %edi,%ebx
+ # 118
+
+ movl 24(%esp),%eax
+ orl %ebx,%edx
+ addl %eax,%edi
+ xorl %ecx,%edx
+ movl $-1,%eax
+ roll $10,%ebp
+ leal 1836072691(%edi,%edx,1),%edi
+ subl %ebx,%eax
+ roll $6,%edi
+ addl %esi,%edi
+ # 119
+
+ movl 36(%esp),%edx
+ orl %edi,%eax
+ addl %edx,%esi
+ xorl %ebp,%eax
+ movl $-1,%edx
+ roll $10,%ebx
+ leal 1836072691(%esi,%eax,1),%esi
+ subl %edi,%edx
+ roll $14,%esi
+ addl %ecx,%esi
+ # 120
+
+ movl 44(%esp),%eax
+ orl %esi,%edx
+ addl %eax,%ecx
+ xorl %ebx,%edx
+ movl $-1,%eax
+ roll $10,%edi
+ leal 1836072691(%ecx,%edx,1),%ecx
+ subl %esi,%eax
+ roll $12,%ecx
+ addl %ebp,%ecx
+ # 121
+
+ movl 32(%esp),%edx
+ orl %ecx,%eax
+ addl %edx,%ebp
+ xorl %edi,%eax
+ movl $-1,%edx
+ roll $10,%esi
+ leal 1836072691(%ebp,%eax,1),%ebp
+ subl %ecx,%edx
+ roll $13,%ebp
+ addl %ebx,%ebp
+ # 122
+
+ movl 48(%esp),%eax
+ orl %ebp,%edx
+ addl %eax,%ebx
+ xorl %esi,%edx
+ movl $-1,%eax
+ roll $10,%ecx
+ leal 1836072691(%ebx,%edx,1),%ebx
+ subl %ebp,%eax
+ roll $5,%ebx
+ addl %edi,%ebx
+ # 123
+
+ movl 8(%esp),%edx
+ orl %ebx,%eax
+ addl %edx,%edi
+ xorl %ecx,%eax
+ movl $-1,%edx
+ roll $10,%ebp
+ leal 1836072691(%edi,%eax,1),%edi
+ subl %ebx,%edx
+ roll $14,%edi
+ addl %esi,%edi
+ # 124
+
+ movl 40(%esp),%eax
+ orl %edi,%edx
+ addl %eax,%esi
+ xorl %ebp,%edx
+ movl $-1,%eax
+ roll $10,%ebx
+ leal 1836072691(%esi,%edx,1),%esi
+ subl %edi,%eax
+ roll $13,%esi
+ addl %ecx,%esi
+ # 125
+
+ movl (%esp),%edx
+ orl %esi,%eax
+ addl %edx,%ecx
+ xorl %ebx,%eax
+ movl $-1,%edx
+ roll $10,%edi
+ leal 1836072691(%ecx,%eax,1),%ecx
+ subl %esi,%edx
+ roll $13,%ecx
+ addl %ebp,%ecx
+ # 126
+
+ movl 16(%esp),%eax
+ orl %ecx,%edx
+ addl %eax,%ebp
+ xorl %edi,%edx
+ movl $-1,%eax
+ roll $10,%esi
+ leal 1836072691(%ebp,%edx,1),%ebp
+ subl %ecx,%eax
+ roll $7,%ebp
+ addl %ebx,%ebp
+ # 127
+
+ movl 52(%esp),%edx
+ orl %ebp,%eax
+ addl %edx,%ebx
+ xorl %esi,%eax
+ movl 32(%esp),%edx
+ roll $10,%ecx
+ leal 1836072691(%ebx,%eax,1),%ebx
+ movl $-1,%eax
+ roll $5,%ebx
+ addl %edi,%ebx
+ # 128
+
+ addl %edx,%edi
+ movl %ebp,%edx
+ subl %ebx,%eax
+ andl %ebx,%edx
+ andl %ecx,%eax
+ orl %eax,%edx
+ movl 24(%esp),%eax
+ roll $10,%ebp
+ leal 2053994217(%edi,%edx,1),%edi
+ movl $-1,%edx
+ roll $15,%edi
+ addl %esi,%edi
+ # 129
+
+ addl %eax,%esi
+ movl %ebx,%eax
+ subl %edi,%edx
+ andl %edi,%eax
+ andl %ebp,%edx
+ orl %edx,%eax
+ movl 16(%esp),%edx
+ roll $10,%ebx
+ leal 2053994217(%esi,%eax,1),%esi
+ movl $-1,%eax
+ roll $5,%esi
+ addl %ecx,%esi
+ # 130
+
+ addl %edx,%ecx
+ movl %edi,%edx
+ subl %esi,%eax
+ andl %esi,%edx
+ andl %ebx,%eax
+ orl %eax,%edx
+ movl 4(%esp),%eax
+ roll $10,%edi
+ leal 2053994217(%ecx,%edx,1),%ecx
+ movl $-1,%edx
+ roll $8,%ecx
+ addl %ebp,%ecx
+ # 131
+
+ addl %eax,%ebp
+ movl %esi,%eax
+ subl %ecx,%edx
+ andl %ecx,%eax
+ andl %edi,%edx
+ orl %edx,%eax
+ movl 12(%esp),%edx
+ roll $10,%esi
+ leal 2053994217(%ebp,%eax,1),%ebp
+ movl $-1,%eax
+ roll $11,%ebp
+ addl %ebx,%ebp
+ # 132
+
+ addl %edx,%ebx
+ movl %ecx,%edx
+ subl %ebp,%eax
+ andl %ebp,%edx
+ andl %esi,%eax
+ orl %eax,%edx
+ movl 44(%esp),%eax
+ roll $10,%ecx
+ leal 2053994217(%ebx,%edx,1),%ebx
+ movl $-1,%edx
+ roll $14,%ebx
+ addl %edi,%ebx
+ # 133
+
+ addl %eax,%edi
+ movl %ebp,%eax
+ subl %ebx,%edx
+ andl %ebx,%eax
+ andl %ecx,%edx
+ orl %edx,%eax
+ movl 60(%esp),%edx
+ roll $10,%ebp
+ leal 2053994217(%edi,%eax,1),%edi
+ movl $-1,%eax
+ roll $14,%edi
+ addl %esi,%edi
+ # 134
+
+ addl %edx,%esi
+ movl %ebx,%edx
+ subl %edi,%eax
+ andl %edi,%edx
+ andl %ebp,%eax
+ orl %eax,%edx
+ movl (%esp),%eax
+ roll $10,%ebx
+ leal 2053994217(%esi,%edx,1),%esi
+ movl $-1,%edx
+ roll $6,%esi
+ addl %ecx,%esi
+ # 135
+
+ addl %eax,%ecx
+ movl %edi,%eax
+ subl %esi,%edx
+ andl %esi,%eax
+ andl %ebx,%edx
+ orl %edx,%eax
+ movl 20(%esp),%edx
+ roll $10,%edi
+ leal 2053994217(%ecx,%eax,1),%ecx
+ movl $-1,%eax
+ roll $14,%ecx
+ addl %ebp,%ecx
+ # 136
+
+ addl %edx,%ebp
+ movl %esi,%edx
+ subl %ecx,%eax
+ andl %ecx,%edx
+ andl %edi,%eax
+ orl %eax,%edx
+ movl 48(%esp),%eax
+ roll $10,%esi
+ leal 2053994217(%ebp,%edx,1),%ebp
+ movl $-1,%edx
+ roll $6,%ebp
+ addl %ebx,%ebp
+ # 137
+
+ addl %eax,%ebx
+ movl %ecx,%eax
+ subl %ebp,%edx
+ andl %ebp,%eax
+ andl %esi,%edx
+ orl %edx,%eax
+ movl 8(%esp),%edx
+ roll $10,%ecx
+ leal 2053994217(%ebx,%eax,1),%ebx
+ movl $-1,%eax
+ roll $9,%ebx
+ addl %edi,%ebx
+ # 138
+
+ addl %edx,%edi
+ movl %ebp,%edx
+ subl %ebx,%eax
+ andl %ebx,%edx
+ andl %ecx,%eax
+ orl %eax,%edx
+ movl 52(%esp),%eax
+ roll $10,%ebp
+ leal 2053994217(%edi,%edx,1),%edi
+ movl $-1,%edx
+ roll $12,%edi
+ addl %esi,%edi
+ # 139
+
+ addl %eax,%esi
+ movl %ebx,%eax
+ subl %edi,%edx
+ andl %edi,%eax
+ andl %ebp,%edx
+ orl %edx,%eax
+ movl 36(%esp),%edx
+ roll $10,%ebx
+ leal 2053994217(%esi,%eax,1),%esi
+ movl $-1,%eax
+ roll $9,%esi
+ addl %ecx,%esi
+ # 140
+
+ addl %edx,%ecx
+ movl %edi,%edx
+ subl %esi,%eax
+ andl %esi,%edx
+ andl %ebx,%eax
+ orl %eax,%edx
+ movl 28(%esp),%eax
+ roll $10,%edi
+ leal 2053994217(%ecx,%edx,1),%ecx
+ movl $-1,%edx
+ roll $12,%ecx
+ addl %ebp,%ecx
+ # 141
+
+ addl %eax,%ebp
+ movl %esi,%eax
+ subl %ecx,%edx
+ andl %ecx,%eax
+ andl %edi,%edx
+ orl %edx,%eax
+ movl 40(%esp),%edx
+ roll $10,%esi
+ leal 2053994217(%ebp,%eax,1),%ebp
+ movl $-1,%eax
+ roll $5,%ebp
+ addl %ebx,%ebp
+ # 142
+
+ addl %edx,%ebx
+ movl %ecx,%edx
+ subl %ebp,%eax
+ andl %ebp,%edx
+ andl %esi,%eax
+ orl %eax,%edx
+ movl 56(%esp),%eax
+ roll $10,%ecx
+ leal 2053994217(%ebx,%edx,1),%ebx
+ movl $-1,%edx
+ roll $15,%ebx
+ addl %edi,%ebx
+ # 143
+
+ addl %eax,%edi
+ movl %ebp,%eax
+ subl %ebx,%edx
+ andl %ebx,%eax
+ andl %ecx,%edx
+ orl %eax,%edx
+ movl %ebx,%eax
+ roll $10,%ebp
+ leal 2053994217(%edi,%edx,1),%edi
+ xorl %ebp,%eax
+ roll $8,%edi
+ addl %esi,%edi
+ # 144
+
+ movl 48(%esp),%edx
+ xorl %edi,%eax
+ addl %edx,%esi
+ roll $10,%ebx
+ addl %eax,%esi
+ movl %edi,%eax
+ roll $8,%esi
+ addl %ecx,%esi
+ # 145
+
+ xorl %ebx,%eax
+ movl 60(%esp),%edx
+ xorl %esi,%eax
+ addl %eax,%ecx
+ movl %esi,%eax
+ roll $10,%edi
+ addl %edx,%ecx
+ xorl %edi,%eax
+ roll $5,%ecx
+ addl %ebp,%ecx
+ # 146
+
+ movl 40(%esp),%edx
+ xorl %ecx,%eax
+ addl %edx,%ebp
+ roll $10,%esi
+ addl %eax,%ebp
+ movl %ecx,%eax
+ roll $12,%ebp
+ addl %ebx,%ebp
+ # 147
+
+ xorl %esi,%eax
+ movl 16(%esp),%edx
+ xorl %ebp,%eax
+ addl %eax,%ebx
+ movl %ebp,%eax
+ roll $10,%ecx
+ addl %edx,%ebx
+ xorl %ecx,%eax
+ roll $9,%ebx
+ addl %edi,%ebx
+ # 148
+
+ movl 4(%esp),%edx
+ xorl %ebx,%eax
+ addl %edx,%edi
+ roll $10,%ebp
+ addl %eax,%edi
+ movl %ebx,%eax
+ roll $12,%edi
+ addl %esi,%edi
+ # 149
+
+ xorl %ebp,%eax
+ movl 20(%esp),%edx
+ xorl %edi,%eax
+ addl %eax,%esi
+ movl %edi,%eax
+ roll $10,%ebx
+ addl %edx,%esi
+ xorl %ebx,%eax
+ roll $5,%esi
+ addl %ecx,%esi
+ # 150
+
+ movl 32(%esp),%edx
+ xorl %esi,%eax
+ addl %edx,%ecx
+ roll $10,%edi
+ addl %eax,%ecx
+ movl %esi,%eax
+ roll $14,%ecx
+ addl %ebp,%ecx
+ # 151
+
+ xorl %edi,%eax
+ movl 28(%esp),%edx
+ xorl %ecx,%eax
+ addl %eax,%ebp
+ movl %ecx,%eax
+ roll $10,%esi
+ addl %edx,%ebp
+ xorl %esi,%eax
+ roll $6,%ebp
+ addl %ebx,%ebp
+ # 152
+
+ movl 24(%esp),%edx
+ xorl %ebp,%eax
+ addl %edx,%ebx
+ roll $10,%ecx
+ addl %eax,%ebx
+ movl %ebp,%eax
+ roll $8,%ebx
+ addl %edi,%ebx
+ # 153
+
+ xorl %ecx,%eax
+ movl 8(%esp),%edx
+ xorl %ebx,%eax
+ addl %eax,%edi
+ movl %ebx,%eax
+ roll $10,%ebp
+ addl %edx,%edi
+ xorl %ebp,%eax
+ roll $13,%edi
+ addl %esi,%edi
+ # 154
+
+ movl 52(%esp),%edx
+ xorl %edi,%eax
+ addl %edx,%esi
+ roll $10,%ebx
+ addl %eax,%esi
+ movl %edi,%eax
+ roll $6,%esi
+ addl %ecx,%esi
+ # 155
+
+ xorl %ebx,%eax
+ movl 56(%esp),%edx
+ xorl %esi,%eax
+ addl %eax,%ecx
+ movl %esi,%eax
+ roll $10,%edi
+ addl %edx,%ecx
+ xorl %edi,%eax
+ roll $5,%ecx
+ addl %ebp,%ecx
+ # 156
+
+ movl (%esp),%edx
+ xorl %ecx,%eax
+ addl %edx,%ebp
+ roll $10,%esi
+ addl %eax,%ebp
+ movl %ecx,%eax
+ roll $15,%ebp
+ addl %ebx,%ebp
+ # 157
+
+ xorl %esi,%eax
+ movl 12(%esp),%edx
+ xorl %ebp,%eax
+ addl %eax,%ebx
+ movl %ebp,%eax
+ roll $10,%ecx
+ addl %edx,%ebx
+ xorl %ecx,%eax
+ roll $13,%ebx
+ addl %edi,%ebx
+ # 158
+
+ movl 36(%esp),%edx
+ xorl %ebx,%eax
+ addl %edx,%edi
+ roll $10,%ebp
+ addl %eax,%edi
+ movl %ebx,%eax
+ roll $11,%edi
+ addl %esi,%edi
+ # 159
+
+ xorl %ebp,%eax
+ movl 44(%esp),%edx
+ xorl %edi,%eax
+ addl %eax,%esi
+ roll $10,%ebx
+ addl %edx,%esi
+ movl 128(%esp),%edx
+ roll $11,%esi
+ addl %ecx,%esi
+ movl 4(%edx),%eax
+ addl %eax,%ebx
+ movl 72(%esp),%eax
+ addl %eax,%ebx
+ movl 8(%edx),%eax
+ addl %eax,%ebp
+ movl 76(%esp),%eax
+ addl %eax,%ebp
+ movl 12(%edx),%eax
+ addl %eax,%ecx
+ movl 80(%esp),%eax
+ addl %eax,%ecx
+ movl 16(%edx),%eax
+ addl %eax,%esi
+ movl 64(%esp),%eax
+ addl %eax,%esi
+ movl (%edx),%eax
+ addl %eax,%edi
+ movl 68(%esp),%eax
+ addl %eax,%edi
+ movl 136(%esp),%eax
+ movl %ebx,(%edx)
+ movl %ebp,4(%edx)
+ movl %ecx,8(%edx)
+ subl $1,%eax
+ movl %esi,12(%edx)
+ movl %edi,16(%edx)
+ jle L001get_out
+ movl %eax,136(%esp)
+ movl %ecx,%edi
+ movl 132(%esp),%eax
+ movl %ebx,%ecx
+ addl $64,%eax
+ movl %ebp,%esi
+ movl %eax,132(%esp)
+ jmp L000start
+L001get_out:
+ addl $108,%esp
+ popl %ebx
+ popl %ebp
+ popl %edi
+ popl %esi
+ ret
diff --git a/deps/openssl/asm/x86-macosx-gas/sha/sha1-586.s b/deps/openssl/asm/x86-macosx-gas/sha/sha1-586.s
new file mode 100644
index 0000000000..4f356fe70f
--- /dev/null
+++ b/deps/openssl/asm/x86-macosx-gas/sha/sha1-586.s
@@ -0,0 +1,1520 @@
+.file "sha1-586.s"
+.text
+.globl _sha1_block_data_order
+.align 4
+_sha1_block_data_order:
+L_sha1_block_data_order_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%ebp
+ movl 24(%esp),%esi
+ movl 28(%esp),%eax
+ subl $64,%esp
+ shll $6,%eax
+ addl %esi,%eax
+ movl %eax,92(%esp)
+ movl 16(%ebp),%edi
+.align 4,0x90
+L000loop:
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ movl %ecx,8(%esp)
+ movl %edx,12(%esp)
+ movl 16(%esi),%eax
+ movl 20(%esi),%ebx
+ movl 24(%esi),%ecx
+ movl 28(%esi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ movl %eax,16(%esp)
+ movl %ebx,20(%esp)
+ movl %ecx,24(%esp)
+ movl %edx,28(%esp)
+ movl 32(%esi),%eax
+ movl 36(%esi),%ebx
+ movl 40(%esi),%ecx
+ movl 44(%esi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ movl %eax,32(%esp)
+ movl %ebx,36(%esp)
+ movl %ecx,40(%esp)
+ movl %edx,44(%esp)
+ movl 48(%esi),%eax
+ movl 52(%esi),%ebx
+ movl 56(%esi),%ecx
+ movl 60(%esi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ movl %eax,48(%esp)
+ movl %ebx,52(%esp)
+ movl %ecx,56(%esp)
+ movl %edx,60(%esp)
+ movl %esi,88(%esp)
+ movl (%ebp),%eax
+ movl 4(%ebp),%ebx
+ movl 8(%ebp),%ecx
+ movl 12(%ebp),%edx
+ # 00_15 0
+
+ movl %ecx,%esi
+ movl %eax,%ebp
+ roll $5,%ebp
+ xorl %edx,%esi
+ addl %edi,%ebp
+ andl %ebx,%esi
+ movl (%esp),%edi
+ xorl %edx,%esi
+ rorl $2,%ebx
+ leal 1518500249(%ebp,%edi,1),%ebp
+ addl %esi,%ebp
+ # 00_15 1
+
+ movl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ xorl %ecx,%edi
+ addl %edx,%ebp
+ andl %eax,%edi
+ movl 4(%esp),%edx
+ xorl %ecx,%edi
+ rorl $2,%eax
+ leal 1518500249(%ebp,%edx,1),%ebp
+ addl %edi,%ebp
+ # 00_15 2
+
+ movl %eax,%edx
+ movl %ebp,%edi
+ roll $5,%ebp
+ xorl %ebx,%edx
+ addl %ecx,%ebp
+ andl %esi,%edx
+ movl 8(%esp),%ecx
+ xorl %ebx,%edx
+ rorl $2,%esi
+ leal 1518500249(%ebp,%ecx,1),%ebp
+ addl %edx,%ebp
+ # 00_15 3
+
+ movl %esi,%ecx
+ movl %ebp,%edx
+ roll $5,%ebp
+ xorl %eax,%ecx
+ addl %ebx,%ebp
+ andl %edi,%ecx
+ movl 12(%esp),%ebx
+ xorl %eax,%ecx
+ rorl $2,%edi
+ leal 1518500249(%ebp,%ebx,1),%ebp
+ addl %ecx,%ebp
+ # 00_15 4
+
+ movl %edi,%ebx
+ movl %ebp,%ecx
+ roll $5,%ebp
+ xorl %esi,%ebx
+ addl %eax,%ebp
+ andl %edx,%ebx
+ movl 16(%esp),%eax
+ xorl %esi,%ebx
+ rorl $2,%edx
+ leal 1518500249(%ebp,%eax,1),%ebp
+ addl %ebx,%ebp
+ # 00_15 5
+
+ movl %edx,%eax
+ movl %ebp,%ebx
+ roll $5,%ebp
+ xorl %edi,%eax
+ addl %esi,%ebp
+ andl %ecx,%eax
+ movl 20(%esp),%esi
+ xorl %edi,%eax
+ rorl $2,%ecx
+ leal 1518500249(%ebp,%esi,1),%ebp
+ addl %eax,%ebp
+ # 00_15 6
+
+ movl %ecx,%esi
+ movl %ebp,%eax
+ roll $5,%ebp
+ xorl %edx,%esi
+ addl %edi,%ebp
+ andl %ebx,%esi
+ movl 24(%esp),%edi
+ xorl %edx,%esi
+ rorl $2,%ebx
+ leal 1518500249(%ebp,%edi,1),%ebp
+ addl %esi,%ebp
+ # 00_15 7
+
+ movl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ xorl %ecx,%edi
+ addl %edx,%ebp
+ andl %eax,%edi
+ movl 28(%esp),%edx
+ xorl %ecx,%edi
+ rorl $2,%eax
+ leal 1518500249(%ebp,%edx,1),%ebp
+ addl %edi,%ebp
+ # 00_15 8
+
+ movl %eax,%edx
+ movl %ebp,%edi
+ roll $5,%ebp
+ xorl %ebx,%edx
+ addl %ecx,%ebp
+ andl %esi,%edx
+ movl 32(%esp),%ecx
+ xorl %ebx,%edx
+ rorl $2,%esi
+ leal 1518500249(%ebp,%ecx,1),%ebp
+ addl %edx,%ebp
+ # 00_15 9
+
+ movl %esi,%ecx
+ movl %ebp,%edx
+ roll $5,%ebp
+ xorl %eax,%ecx
+ addl %ebx,%ebp
+ andl %edi,%ecx
+ movl 36(%esp),%ebx
+ xorl %eax,%ecx
+ rorl $2,%edi
+ leal 1518500249(%ebp,%ebx,1),%ebp
+ addl %ecx,%ebp
+ # 00_15 10
+
+ movl %edi,%ebx
+ movl %ebp,%ecx
+ roll $5,%ebp
+ xorl %esi,%ebx
+ addl %eax,%ebp
+ andl %edx,%ebx
+ movl 40(%esp),%eax
+ xorl %esi,%ebx
+ rorl $2,%edx
+ leal 1518500249(%ebp,%eax,1),%ebp
+ addl %ebx,%ebp
+ # 00_15 11
+
+ movl %edx,%eax
+ movl %ebp,%ebx
+ roll $5,%ebp
+ xorl %edi,%eax
+ addl %esi,%ebp
+ andl %ecx,%eax
+ movl 44(%esp),%esi
+ xorl %edi,%eax
+ rorl $2,%ecx
+ leal 1518500249(%ebp,%esi,1),%ebp
+ addl %eax,%ebp
+ # 00_15 12
+
+ movl %ecx,%esi
+ movl %ebp,%eax
+ roll $5,%ebp
+ xorl %edx,%esi
+ addl %edi,%ebp
+ andl %ebx,%esi
+ movl 48(%esp),%edi
+ xorl %edx,%esi
+ rorl $2,%ebx
+ leal 1518500249(%ebp,%edi,1),%ebp
+ addl %esi,%ebp
+ # 00_15 13
+
+ movl %ebx,%edi
+ movl %ebp,%esi
+ roll $5,%ebp
+ xorl %ecx,%edi
+ addl %edx,%ebp
+ andl %eax,%edi
+ movl 52(%esp),%edx
+ xorl %ecx,%edi
+ rorl $2,%eax
+ leal 1518500249(%ebp,%edx,1),%ebp
+ addl %edi,%ebp
+ # 00_15 14
+
+ movl %eax,%edx
+ movl %ebp,%edi
+ roll $5,%ebp
+ xorl %ebx,%edx
+ addl %ecx,%ebp
+ andl %esi,%edx
+ movl 56(%esp),%ecx
+ xorl %ebx,%edx
+ rorl $2,%esi
+ leal 1518500249(%ebp,%ecx,1),%ebp
+ addl %edx,%ebp
+ # 00_15 15
+
+ movl %esi,%ecx
+ movl %ebp,%edx
+ roll $5,%ebp
+ xorl %eax,%ecx
+ addl %ebx,%ebp
+ andl %edi,%ecx
+ movl 60(%esp),%ebx
+ xorl %eax,%ecx
+ rorl $2,%edi
+ leal 1518500249(%ebp,%ebx,1),%ebp
+ addl %ebp,%ecx
+ # 16_19 16
+
+ movl (%esp),%ebx
+ movl %edi,%ebp
+ xorl 8(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 32(%esp),%ebx
+ andl %edx,%ebp
+ rorl $2,%edx
+ xorl 52(%esp),%ebx
+ roll $1,%ebx
+ xorl %esi,%ebp
+ movl %ebx,(%esp)
+ leal 1518500249(%ebx,%eax,1),%ebx
+ movl %ecx,%eax
+ roll $5,%eax
+ addl %ebp,%ebx
+ addl %eax,%ebx
+ # 16_19 17
+
+ movl 4(%esp),%eax
+ movl %edx,%ebp
+ xorl 12(%esp),%eax
+ xorl %edi,%ebp
+ xorl 36(%esp),%eax
+ andl %ecx,%ebp
+ rorl $2,%ecx
+ xorl 56(%esp),%eax
+ roll $1,%eax
+ xorl %edi,%ebp
+ movl %eax,4(%esp)
+ leal 1518500249(%eax,%esi,1),%eax
+ movl %ebx,%esi
+ roll $5,%esi
+ addl %ebp,%eax
+ addl %esi,%eax
+ # 16_19 18
+
+ movl 8(%esp),%esi
+ movl %ecx,%ebp
+ xorl 16(%esp),%esi
+ xorl %edx,%ebp
+ xorl 40(%esp),%esi
+ andl %ebx,%ebp
+ rorl $2,%ebx
+ xorl 60(%esp),%esi
+ roll $1,%esi
+ xorl %edx,%ebp
+ movl %esi,8(%esp)
+ leal 1518500249(%esi,%edi,1),%esi
+ movl %eax,%edi
+ roll $5,%edi
+ addl %ebp,%esi
+ addl %edi,%esi
+ # 16_19 19
+
+ movl 12(%esp),%edi
+ movl %ebx,%ebp
+ xorl 20(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 44(%esp),%edi
+ andl %eax,%ebp
+ rorl $2,%eax
+ xorl (%esp),%edi
+ roll $1,%edi
+ xorl %ecx,%ebp
+ movl %edi,12(%esp)
+ leal 1518500249(%edi,%edx,1),%edi
+ movl %esi,%edx
+ roll $5,%edx
+ addl %ebp,%edi
+ addl %edx,%edi
+ # 20_39 20
+
+ movl %esi,%ebp
+ movl 16(%esp),%edx
+ rorl $2,%esi
+ xorl 24(%esp),%edx
+ xorl %eax,%ebp
+ xorl 48(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 4(%esp),%edx
+ roll $1,%edx
+ addl %ecx,%ebp
+ movl %edx,16(%esp)
+ movl %edi,%ecx
+ roll $5,%ecx
+ leal 1859775393(%edx,%ebp,1),%edx
+ addl %ecx,%edx
+ # 20_39 21
+
+ movl %edi,%ebp
+ movl 20(%esp),%ecx
+ rorl $2,%edi
+ xorl 28(%esp),%ecx
+ xorl %esi,%ebp
+ xorl 52(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 8(%esp),%ecx
+ roll $1,%ecx
+ addl %ebx,%ebp
+ movl %ecx,20(%esp)
+ movl %edx,%ebx
+ roll $5,%ebx
+ leal 1859775393(%ecx,%ebp,1),%ecx
+ addl %ebx,%ecx
+ # 20_39 22
+
+ movl %edx,%ebp
+ movl 24(%esp),%ebx
+ rorl $2,%edx
+ xorl 32(%esp),%ebx
+ xorl %edi,%ebp
+ xorl 56(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 12(%esp),%ebx
+ roll $1,%ebx
+ addl %eax,%ebp
+ movl %ebx,24(%esp)
+ movl %ecx,%eax
+ roll $5,%eax
+ leal 1859775393(%ebx,%ebp,1),%ebx
+ addl %eax,%ebx
+ # 20_39 23
+
+ movl %ecx,%ebp
+ movl 28(%esp),%eax
+ rorl $2,%ecx
+ xorl 36(%esp),%eax
+ xorl %edx,%ebp
+ xorl 60(%esp),%eax
+ xorl %edi,%ebp
+ xorl 16(%esp),%eax
+ roll $1,%eax
+ addl %esi,%ebp
+ movl %eax,28(%esp)
+ movl %ebx,%esi
+ roll $5,%esi
+ leal 1859775393(%eax,%ebp,1),%eax
+ addl %esi,%eax
+ # 20_39 24
+
+ movl %ebx,%ebp
+ movl 32(%esp),%esi
+ rorl $2,%ebx
+ xorl 40(%esp),%esi
+ xorl %ecx,%ebp
+ xorl (%esp),%esi
+ xorl %edx,%ebp
+ xorl 20(%esp),%esi
+ roll $1,%esi
+ addl %edi,%ebp
+ movl %esi,32(%esp)
+ movl %eax,%edi
+ roll $5,%edi
+ leal 1859775393(%esi,%ebp,1),%esi
+ addl %edi,%esi
+ # 20_39 25
+
+ movl %eax,%ebp
+ movl 36(%esp),%edi
+ rorl $2,%eax
+ xorl 44(%esp),%edi
+ xorl %ebx,%ebp
+ xorl 4(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 24(%esp),%edi
+ roll $1,%edi
+ addl %edx,%ebp
+ movl %edi,36(%esp)
+ movl %esi,%edx
+ roll $5,%edx
+ leal 1859775393(%edi,%ebp,1),%edi
+ addl %edx,%edi
+ # 20_39 26
+
+ movl %esi,%ebp
+ movl 40(%esp),%edx
+ rorl $2,%esi
+ xorl 48(%esp),%edx
+ xorl %eax,%ebp
+ xorl 8(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 28(%esp),%edx
+ roll $1,%edx
+ addl %ecx,%ebp
+ movl %edx,40(%esp)
+ movl %edi,%ecx
+ roll $5,%ecx
+ leal 1859775393(%edx,%ebp,1),%edx
+ addl %ecx,%edx
+ # 20_39 27
+
+ movl %edi,%ebp
+ movl 44(%esp),%ecx
+ rorl $2,%edi
+ xorl 52(%esp),%ecx
+ xorl %esi,%ebp
+ xorl 12(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 32(%esp),%ecx
+ roll $1,%ecx
+ addl %ebx,%ebp
+ movl %ecx,44(%esp)
+ movl %edx,%ebx
+ roll $5,%ebx
+ leal 1859775393(%ecx,%ebp,1),%ecx
+ addl %ebx,%ecx
+ # 20_39 28
+
+ movl %edx,%ebp
+ movl 48(%esp),%ebx
+ rorl $2,%edx
+ xorl 56(%esp),%ebx
+ xorl %edi,%ebp
+ xorl 16(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 36(%esp),%ebx
+ roll $1,%ebx
+ addl %eax,%ebp
+ movl %ebx,48(%esp)
+ movl %ecx,%eax
+ roll $5,%eax
+ leal 1859775393(%ebx,%ebp,1),%ebx
+ addl %eax,%ebx
+ # 20_39 29
+
+ movl %ecx,%ebp
+ movl 52(%esp),%eax
+ rorl $2,%ecx
+ xorl 60(%esp),%eax
+ xorl %edx,%ebp
+ xorl 20(%esp),%eax
+ xorl %edi,%ebp
+ xorl 40(%esp),%eax
+ roll $1,%eax
+ addl %esi,%ebp
+ movl %eax,52(%esp)
+ movl %ebx,%esi
+ roll $5,%esi
+ leal 1859775393(%eax,%ebp,1),%eax
+ addl %esi,%eax
+ # 20_39 30
+
+ movl %ebx,%ebp
+ movl 56(%esp),%esi
+ rorl $2,%ebx
+ xorl (%esp),%esi
+ xorl %ecx,%ebp
+ xorl 24(%esp),%esi
+ xorl %edx,%ebp
+ xorl 44(%esp),%esi
+ roll $1,%esi
+ addl %edi,%ebp
+ movl %esi,56(%esp)
+ movl %eax,%edi
+ roll $5,%edi
+ leal 1859775393(%esi,%ebp,1),%esi
+ addl %edi,%esi
+ # 20_39 31
+
+ movl %eax,%ebp
+ movl 60(%esp),%edi
+ rorl $2,%eax
+ xorl 4(%esp),%edi
+ xorl %ebx,%ebp
+ xorl 28(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 48(%esp),%edi
+ roll $1,%edi
+ addl %edx,%ebp
+ movl %edi,60(%esp)
+ movl %esi,%edx
+ roll $5,%edx
+ leal 1859775393(%edi,%ebp,1),%edi
+ addl %edx,%edi
+ # 20_39 32
+
+ movl %esi,%ebp
+ movl (%esp),%edx
+ rorl $2,%esi
+ xorl 8(%esp),%edx
+ xorl %eax,%ebp
+ xorl 32(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 52(%esp),%edx
+ roll $1,%edx
+ addl %ecx,%ebp
+ movl %edx,(%esp)
+ movl %edi,%ecx
+ roll $5,%ecx
+ leal 1859775393(%edx,%ebp,1),%edx
+ addl %ecx,%edx
+ # 20_39 33
+
+ movl %edi,%ebp
+ movl 4(%esp),%ecx
+ rorl $2,%edi
+ xorl 12(%esp),%ecx
+ xorl %esi,%ebp
+ xorl 36(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 56(%esp),%ecx
+ roll $1,%ecx
+ addl %ebx,%ebp
+ movl %ecx,4(%esp)
+ movl %edx,%ebx
+ roll $5,%ebx
+ leal 1859775393(%ecx,%ebp,1),%ecx
+ addl %ebx,%ecx
+ # 20_39 34
+
+ movl %edx,%ebp
+ movl 8(%esp),%ebx
+ rorl $2,%edx
+ xorl 16(%esp),%ebx
+ xorl %edi,%ebp
+ xorl 40(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 60(%esp),%ebx
+ roll $1,%ebx
+ addl %eax,%ebp
+ movl %ebx,8(%esp)
+ movl %ecx,%eax
+ roll $5,%eax
+ leal 1859775393(%ebx,%ebp,1),%ebx
+ addl %eax,%ebx
+ # 20_39 35
+
+ movl %ecx,%ebp
+ movl 12(%esp),%eax
+ rorl $2,%ecx
+ xorl 20(%esp),%eax
+ xorl %edx,%ebp
+ xorl 44(%esp),%eax
+ xorl %edi,%ebp
+ xorl (%esp),%eax
+ roll $1,%eax
+ addl %esi,%ebp
+ movl %eax,12(%esp)
+ movl %ebx,%esi
+ roll $5,%esi
+ leal 1859775393(%eax,%ebp,1),%eax
+ addl %esi,%eax
+ # 20_39 36
+
+ movl %ebx,%ebp
+ movl 16(%esp),%esi
+ rorl $2,%ebx
+ xorl 24(%esp),%esi
+ xorl %ecx,%ebp
+ xorl 48(%esp),%esi
+ xorl %edx,%ebp
+ xorl 4(%esp),%esi
+ roll $1,%esi
+ addl %edi,%ebp
+ movl %esi,16(%esp)
+ movl %eax,%edi
+ roll $5,%edi
+ leal 1859775393(%esi,%ebp,1),%esi
+ addl %edi,%esi
+ # 20_39 37
+
+ movl %eax,%ebp
+ movl 20(%esp),%edi
+ rorl $2,%eax
+ xorl 28(%esp),%edi
+ xorl %ebx,%ebp
+ xorl 52(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 8(%esp),%edi
+ roll $1,%edi
+ addl %edx,%ebp
+ movl %edi,20(%esp)
+ movl %esi,%edx
+ roll $5,%edx
+ leal 1859775393(%edi,%ebp,1),%edi
+ addl %edx,%edi
+ # 20_39 38
+
+ movl %esi,%ebp
+ movl 24(%esp),%edx
+ rorl $2,%esi
+ xorl 32(%esp),%edx
+ xorl %eax,%ebp
+ xorl 56(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 12(%esp),%edx
+ roll $1,%edx
+ addl %ecx,%ebp
+ movl %edx,24(%esp)
+ movl %edi,%ecx
+ roll $5,%ecx
+ leal 1859775393(%edx,%ebp,1),%edx
+ addl %ecx,%edx
+ # 20_39 39
+
+ movl %edi,%ebp
+ movl 28(%esp),%ecx
+ rorl $2,%edi
+ xorl 36(%esp),%ecx
+ xorl %esi,%ebp
+ xorl 60(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 16(%esp),%ecx
+ roll $1,%ecx
+ addl %ebx,%ebp
+ movl %ecx,28(%esp)
+ movl %edx,%ebx
+ roll $5,%ebx
+ leal 1859775393(%ecx,%ebp,1),%ecx
+ addl %ebx,%ecx
+ # 40_59 40
+
+ movl 32(%esp),%ebx
+ movl 40(%esp),%ebp
+ xorl %ebp,%ebx
+ movl (%esp),%ebp
+ xorl %ebp,%ebx
+ movl 20(%esp),%ebp
+ xorl %ebp,%ebx
+ movl %edx,%ebp
+ roll $1,%ebx
+ orl %edi,%ebp
+ movl %ebx,32(%esp)
+ andl %esi,%ebp
+ leal 2400959708(%ebx,%eax,1),%ebx
+ movl %edx,%eax
+ rorl $2,%edx
+ andl %edi,%eax
+ orl %eax,%ebp
+ movl %ecx,%eax
+ roll $5,%eax
+ addl %ebp,%ebx
+ addl %eax,%ebx
+ # 40_59 41
+
+ movl 36(%esp),%eax
+ movl 44(%esp),%ebp
+ xorl %ebp,%eax
+ movl 4(%esp),%ebp
+ xorl %ebp,%eax
+ movl 24(%esp),%ebp
+ xorl %ebp,%eax
+ movl %ecx,%ebp
+ roll $1,%eax
+ orl %edx,%ebp
+ movl %eax,36(%esp)
+ andl %edi,%ebp
+ leal 2400959708(%eax,%esi,1),%eax
+ movl %ecx,%esi
+ rorl $2,%ecx
+ andl %edx,%esi
+ orl %esi,%ebp
+ movl %ebx,%esi
+ roll $5,%esi
+ addl %ebp,%eax
+ addl %esi,%eax
+ # 40_59 42
+
+ movl 40(%esp),%esi
+ movl 48(%esp),%ebp
+ xorl %ebp,%esi
+ movl 8(%esp),%ebp
+ xorl %ebp,%esi
+ movl 28(%esp),%ebp
+ xorl %ebp,%esi
+ movl %ebx,%ebp
+ roll $1,%esi
+ orl %ecx,%ebp
+ movl %esi,40(%esp)
+ andl %edx,%ebp
+ leal 2400959708(%esi,%edi,1),%esi
+ movl %ebx,%edi
+ rorl $2,%ebx
+ andl %ecx,%edi
+ orl %edi,%ebp
+ movl %eax,%edi
+ roll $5,%edi
+ addl %ebp,%esi
+ addl %edi,%esi
+ # 40_59 43
+
+ movl 44(%esp),%edi
+ movl 52(%esp),%ebp
+ xorl %ebp,%edi
+ movl 12(%esp),%ebp
+ xorl %ebp,%edi
+ movl 32(%esp),%ebp
+ xorl %ebp,%edi
+ movl %eax,%ebp
+ roll $1,%edi
+ orl %ebx,%ebp
+ movl %edi,44(%esp)
+ andl %ecx,%ebp
+ leal 2400959708(%edi,%edx,1),%edi
+ movl %eax,%edx
+ rorl $2,%eax
+ andl %ebx,%edx
+ orl %edx,%ebp
+ movl %esi,%edx
+ roll $5,%edx
+ addl %ebp,%edi
+ addl %edx,%edi
+ # 40_59 44
+
+ movl 48(%esp),%edx
+ movl 56(%esp),%ebp
+ xorl %ebp,%edx
+ movl 16(%esp),%ebp
+ xorl %ebp,%edx
+ movl 36(%esp),%ebp
+ xorl %ebp,%edx
+ movl %esi,%ebp
+ roll $1,%edx
+ orl %eax,%ebp
+ movl %edx,48(%esp)
+ andl %ebx,%ebp
+ leal 2400959708(%edx,%ecx,1),%edx
+ movl %esi,%ecx
+ rorl $2,%esi
+ andl %eax,%ecx
+ orl %ecx,%ebp
+ movl %edi,%ecx
+ roll $5,%ecx
+ addl %ebp,%edx
+ addl %ecx,%edx
+ # 40_59 45
+
+ movl 52(%esp),%ecx
+ movl 60(%esp),%ebp
+ xorl %ebp,%ecx
+ movl 20(%esp),%ebp
+ xorl %ebp,%ecx
+ movl 40(%esp),%ebp
+ xorl %ebp,%ecx
+ movl %edi,%ebp
+ roll $1,%ecx
+ orl %esi,%ebp
+ movl %ecx,52(%esp)
+ andl %eax,%ebp
+ leal 2400959708(%ecx,%ebx,1),%ecx
+ movl %edi,%ebx
+ rorl $2,%edi
+ andl %esi,%ebx
+ orl %ebx,%ebp
+ movl %edx,%ebx
+ roll $5,%ebx
+ addl %ebp,%ecx
+ addl %ebx,%ecx
+ # 40_59 46
+
+ movl 56(%esp),%ebx
+ movl (%esp),%ebp
+ xorl %ebp,%ebx
+ movl 24(%esp),%ebp
+ xorl %ebp,%ebx
+ movl 44(%esp),%ebp
+ xorl %ebp,%ebx
+ movl %edx,%ebp
+ roll $1,%ebx
+ orl %edi,%ebp
+ movl %ebx,56(%esp)
+ andl %esi,%ebp
+ leal 2400959708(%ebx,%eax,1),%ebx
+ movl %edx,%eax
+ rorl $2,%edx
+ andl %edi,%eax
+ orl %eax,%ebp
+ movl %ecx,%eax
+ roll $5,%eax
+ addl %ebp,%ebx
+ addl %eax,%ebx
+ # 40_59 47
+
+ movl 60(%esp),%eax
+ movl 4(%esp),%ebp
+ xorl %ebp,%eax
+ movl 28(%esp),%ebp
+ xorl %ebp,%eax
+ movl 48(%esp),%ebp
+ xorl %ebp,%eax
+ movl %ecx,%ebp
+ roll $1,%eax
+ orl %edx,%ebp
+ movl %eax,60(%esp)
+ andl %edi,%ebp
+ leal 2400959708(%eax,%esi,1),%eax
+ movl %ecx,%esi
+ rorl $2,%ecx
+ andl %edx,%esi
+ orl %esi,%ebp
+ movl %ebx,%esi
+ roll $5,%esi
+ addl %ebp,%eax
+ addl %esi,%eax
+ # 40_59 48
+
+ movl (%esp),%esi
+ movl 8(%esp),%ebp
+ xorl %ebp,%esi
+ movl 32(%esp),%ebp
+ xorl %ebp,%esi
+ movl 52(%esp),%ebp
+ xorl %ebp,%esi
+ movl %ebx,%ebp
+ roll $1,%esi
+ orl %ecx,%ebp
+ movl %esi,(%esp)
+ andl %edx,%ebp
+ leal 2400959708(%esi,%edi,1),%esi
+ movl %ebx,%edi
+ rorl $2,%ebx
+ andl %ecx,%edi
+ orl %edi,%ebp
+ movl %eax,%edi
+ roll $5,%edi
+ addl %ebp,%esi
+ addl %edi,%esi
+ # 40_59 49
+
+ movl 4(%esp),%edi
+ movl 12(%esp),%ebp
+ xorl %ebp,%edi
+ movl 36(%esp),%ebp
+ xorl %ebp,%edi
+ movl 56(%esp),%ebp
+ xorl %ebp,%edi
+ movl %eax,%ebp
+ roll $1,%edi
+ orl %ebx,%ebp
+ movl %edi,4(%esp)
+ andl %ecx,%ebp
+ leal 2400959708(%edi,%edx,1),%edi
+ movl %eax,%edx
+ rorl $2,%eax
+ andl %ebx,%edx
+ orl %edx,%ebp
+ movl %esi,%edx
+ roll $5,%edx
+ addl %ebp,%edi
+ addl %edx,%edi
+ # 40_59 50
+
+ movl 8(%esp),%edx
+ movl 16(%esp),%ebp
+ xorl %ebp,%edx
+ movl 40(%esp),%ebp
+ xorl %ebp,%edx
+ movl 60(%esp),%ebp
+ xorl %ebp,%edx
+ movl %esi,%ebp
+ roll $1,%edx
+ orl %eax,%ebp
+ movl %edx,8(%esp)
+ andl %ebx,%ebp
+ leal 2400959708(%edx,%ecx,1),%edx
+ movl %esi,%ecx
+ rorl $2,%esi
+ andl %eax,%ecx
+ orl %ecx,%ebp
+ movl %edi,%ecx
+ roll $5,%ecx
+ addl %ebp,%edx
+ addl %ecx,%edx
+ # 40_59 51
+
+ movl 12(%esp),%ecx
+ movl 20(%esp),%ebp
+ xorl %ebp,%ecx
+ movl 44(%esp),%ebp
+ xorl %ebp,%ecx
+ movl (%esp),%ebp
+ xorl %ebp,%ecx
+ movl %edi,%ebp
+ roll $1,%ecx
+ orl %esi,%ebp
+ movl %ecx,12(%esp)
+ andl %eax,%ebp
+ leal 2400959708(%ecx,%ebx,1),%ecx
+ movl %edi,%ebx
+ rorl $2,%edi
+ andl %esi,%ebx
+ orl %ebx,%ebp
+ movl %edx,%ebx
+ roll $5,%ebx
+ addl %ebp,%ecx
+ addl %ebx,%ecx
+ # 40_59 52
+
+ movl 16(%esp),%ebx
+ movl 24(%esp),%ebp
+ xorl %ebp,%ebx
+ movl 48(%esp),%ebp
+ xorl %ebp,%ebx
+ movl 4(%esp),%ebp
+ xorl %ebp,%ebx
+ movl %edx,%ebp
+ roll $1,%ebx
+ orl %edi,%ebp
+ movl %ebx,16(%esp)
+ andl %esi,%ebp
+ leal 2400959708(%ebx,%eax,1),%ebx
+ movl %edx,%eax
+ rorl $2,%edx
+ andl %edi,%eax
+ orl %eax,%ebp
+ movl %ecx,%eax
+ roll $5,%eax
+ addl %ebp,%ebx
+ addl %eax,%ebx
+ # 40_59 53
+
+ movl 20(%esp),%eax
+ movl 28(%esp),%ebp
+ xorl %ebp,%eax
+ movl 52(%esp),%ebp
+ xorl %ebp,%eax
+ movl 8(%esp),%ebp
+ xorl %ebp,%eax
+ movl %ecx,%ebp
+ roll $1,%eax
+ orl %edx,%ebp
+ movl %eax,20(%esp)
+ andl %edi,%ebp
+ leal 2400959708(%eax,%esi,1),%eax
+ movl %ecx,%esi
+ rorl $2,%ecx
+ andl %edx,%esi
+ orl %esi,%ebp
+ movl %ebx,%esi
+ roll $5,%esi
+ addl %ebp,%eax
+ addl %esi,%eax
+ # 40_59 54
+
+ movl 24(%esp),%esi
+ movl 32(%esp),%ebp
+ xorl %ebp,%esi
+ movl 56(%esp),%ebp
+ xorl %ebp,%esi
+ movl 12(%esp),%ebp
+ xorl %ebp,%esi
+ movl %ebx,%ebp
+ roll $1,%esi
+ orl %ecx,%ebp
+ movl %esi,24(%esp)
+ andl %edx,%ebp
+ leal 2400959708(%esi,%edi,1),%esi
+ movl %ebx,%edi
+ rorl $2,%ebx
+ andl %ecx,%edi
+ orl %edi,%ebp
+ movl %eax,%edi
+ roll $5,%edi
+ addl %ebp,%esi
+ addl %edi,%esi
+ # 40_59 55
+
+ movl 28(%esp),%edi
+ movl 36(%esp),%ebp
+ xorl %ebp,%edi
+ movl 60(%esp),%ebp
+ xorl %ebp,%edi
+ movl 16(%esp),%ebp
+ xorl %ebp,%edi
+ movl %eax,%ebp
+ roll $1,%edi
+ orl %ebx,%ebp
+ movl %edi,28(%esp)
+ andl %ecx,%ebp
+ leal 2400959708(%edi,%edx,1),%edi
+ movl %eax,%edx
+ rorl $2,%eax
+ andl %ebx,%edx
+ orl %edx,%ebp
+ movl %esi,%edx
+ roll $5,%edx
+ addl %ebp,%edi
+ addl %edx,%edi
+ # 40_59 56
+
+ movl 32(%esp),%edx
+ movl 40(%esp),%ebp
+ xorl %ebp,%edx
+ movl (%esp),%ebp
+ xorl %ebp,%edx
+ movl 20(%esp),%ebp
+ xorl %ebp,%edx
+ movl %esi,%ebp
+ roll $1,%edx
+ orl %eax,%ebp
+ movl %edx,32(%esp)
+ andl %ebx,%ebp
+ leal 2400959708(%edx,%ecx,1),%edx
+ movl %esi,%ecx
+ rorl $2,%esi
+ andl %eax,%ecx
+ orl %ecx,%ebp
+ movl %edi,%ecx
+ roll $5,%ecx
+ addl %ebp,%edx
+ addl %ecx,%edx
+ # 40_59 57
+
+ movl 36(%esp),%ecx
+ movl 44(%esp),%ebp
+ xorl %ebp,%ecx
+ movl 4(%esp),%ebp
+ xorl %ebp,%ecx
+ movl 24(%esp),%ebp
+ xorl %ebp,%ecx
+ movl %edi,%ebp
+ roll $1,%ecx
+ orl %esi,%ebp
+ movl %ecx,36(%esp)
+ andl %eax,%ebp
+ leal 2400959708(%ecx,%ebx,1),%ecx
+ movl %edi,%ebx
+ rorl $2,%edi
+ andl %esi,%ebx
+ orl %ebx,%ebp
+ movl %edx,%ebx
+ roll $5,%ebx
+ addl %ebp,%ecx
+ addl %ebx,%ecx
+ # 40_59 58
+
+ movl 40(%esp),%ebx
+ movl 48(%esp),%ebp
+ xorl %ebp,%ebx
+ movl 8(%esp),%ebp
+ xorl %ebp,%ebx
+ movl 28(%esp),%ebp
+ xorl %ebp,%ebx
+ movl %edx,%ebp
+ roll $1,%ebx
+ orl %edi,%ebp
+ movl %ebx,40(%esp)
+ andl %esi,%ebp
+ leal 2400959708(%ebx,%eax,1),%ebx
+ movl %edx,%eax
+ rorl $2,%edx
+ andl %edi,%eax
+ orl %eax,%ebp
+ movl %ecx,%eax
+ roll $5,%eax
+ addl %ebp,%ebx
+ addl %eax,%ebx
+ # 40_59 59
+
+ movl 44(%esp),%eax
+ movl 52(%esp),%ebp
+ xorl %ebp,%eax
+ movl 12(%esp),%ebp
+ xorl %ebp,%eax
+ movl 32(%esp),%ebp
+ xorl %ebp,%eax
+ movl %ecx,%ebp
+ roll $1,%eax
+ orl %edx,%ebp
+ movl %eax,44(%esp)
+ andl %edi,%ebp
+ leal 2400959708(%eax,%esi,1),%eax
+ movl %ecx,%esi
+ rorl $2,%ecx
+ andl %edx,%esi
+ orl %esi,%ebp
+ movl %ebx,%esi
+ roll $5,%esi
+ addl %ebp,%eax
+ addl %esi,%eax
+ # 20_39 60
+
+ movl %ebx,%ebp
+ movl 48(%esp),%esi
+ rorl $2,%ebx
+ xorl 56(%esp),%esi
+ xorl %ecx,%ebp
+ xorl 16(%esp),%esi
+ xorl %edx,%ebp
+ xorl 36(%esp),%esi
+ roll $1,%esi
+ addl %edi,%ebp
+ movl %esi,48(%esp)
+ movl %eax,%edi
+ roll $5,%edi
+ leal 3395469782(%esi,%ebp,1),%esi
+ addl %edi,%esi
+ # 20_39 61
+
+ movl %eax,%ebp
+ movl 52(%esp),%edi
+ rorl $2,%eax
+ xorl 60(%esp),%edi
+ xorl %ebx,%ebp
+ xorl 20(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 40(%esp),%edi
+ roll $1,%edi
+ addl %edx,%ebp
+ movl %edi,52(%esp)
+ movl %esi,%edx
+ roll $5,%edx
+ leal 3395469782(%edi,%ebp,1),%edi
+ addl %edx,%edi
+ # 20_39 62
+
+ movl %esi,%ebp
+ movl 56(%esp),%edx
+ rorl $2,%esi
+ xorl (%esp),%edx
+ xorl %eax,%ebp
+ xorl 24(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 44(%esp),%edx
+ roll $1,%edx
+ addl %ecx,%ebp
+ movl %edx,56(%esp)
+ movl %edi,%ecx
+ roll $5,%ecx
+ leal 3395469782(%edx,%ebp,1),%edx
+ addl %ecx,%edx
+ # 20_39 63
+
+ movl %edi,%ebp
+ movl 60(%esp),%ecx
+ rorl $2,%edi
+ xorl 4(%esp),%ecx
+ xorl %esi,%ebp
+ xorl 28(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 48(%esp),%ecx
+ roll $1,%ecx
+ addl %ebx,%ebp
+ movl %ecx,60(%esp)
+ movl %edx,%ebx
+ roll $5,%ebx
+ leal 3395469782(%ecx,%ebp,1),%ecx
+ addl %ebx,%ecx
+ # 20_39 64
+
+ movl %edx,%ebp
+ movl (%esp),%ebx
+ rorl $2,%edx
+ xorl 8(%esp),%ebx
+ xorl %edi,%ebp
+ xorl 32(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 52(%esp),%ebx
+ roll $1,%ebx
+ addl %eax,%ebp
+ movl %ebx,(%esp)
+ movl %ecx,%eax
+ roll $5,%eax
+ leal 3395469782(%ebx,%ebp,1),%ebx
+ addl %eax,%ebx
+ # 20_39 65
+
+ movl %ecx,%ebp
+ movl 4(%esp),%eax
+ rorl $2,%ecx
+ xorl 12(%esp),%eax
+ xorl %edx,%ebp
+ xorl 36(%esp),%eax
+ xorl %edi,%ebp
+ xorl 56(%esp),%eax
+ roll $1,%eax
+ addl %esi,%ebp
+ movl %eax,4(%esp)
+ movl %ebx,%esi
+ roll $5,%esi
+ leal 3395469782(%eax,%ebp,1),%eax
+ addl %esi,%eax
+ # 20_39 66
+
+ movl %ebx,%ebp
+ movl 8(%esp),%esi
+ rorl $2,%ebx
+ xorl 16(%esp),%esi
+ xorl %ecx,%ebp
+ xorl 40(%esp),%esi
+ xorl %edx,%ebp
+ xorl 60(%esp),%esi
+ roll $1,%esi
+ addl %edi,%ebp
+ movl %esi,8(%esp)
+ movl %eax,%edi
+ roll $5,%edi
+ leal 3395469782(%esi,%ebp,1),%esi
+ addl %edi,%esi
+ # 20_39 67
+
+ movl %eax,%ebp
+ movl 12(%esp),%edi
+ rorl $2,%eax
+ xorl 20(%esp),%edi
+ xorl %ebx,%ebp
+ xorl 44(%esp),%edi
+ xorl %ecx,%ebp
+ xorl (%esp),%edi
+ roll $1,%edi
+ addl %edx,%ebp
+ movl %edi,12(%esp)
+ movl %esi,%edx
+ roll $5,%edx
+ leal 3395469782(%edi,%ebp,1),%edi
+ addl %edx,%edi
+ # 20_39 68
+
+ movl %esi,%ebp
+ movl 16(%esp),%edx
+ rorl $2,%esi
+ xorl 24(%esp),%edx
+ xorl %eax,%ebp
+ xorl 48(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 4(%esp),%edx
+ roll $1,%edx
+ addl %ecx,%ebp
+ movl %edx,16(%esp)
+ movl %edi,%ecx
+ roll $5,%ecx
+ leal 3395469782(%edx,%ebp,1),%edx
+ addl %ecx,%edx
+ # 20_39 69
+
+ movl %edi,%ebp
+ movl 20(%esp),%ecx
+ rorl $2,%edi
+ xorl 28(%esp),%ecx
+ xorl %esi,%ebp
+ xorl 52(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 8(%esp),%ecx
+ roll $1,%ecx
+ addl %ebx,%ebp
+ movl %ecx,20(%esp)
+ movl %edx,%ebx
+ roll $5,%ebx
+ leal 3395469782(%ecx,%ebp,1),%ecx
+ addl %ebx,%ecx
+ # 20_39 70
+
+ movl %edx,%ebp
+ movl 24(%esp),%ebx
+ rorl $2,%edx
+ xorl 32(%esp),%ebx
+ xorl %edi,%ebp
+ xorl 56(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 12(%esp),%ebx
+ roll $1,%ebx
+ addl %eax,%ebp
+ movl %ebx,24(%esp)
+ movl %ecx,%eax
+ roll $5,%eax
+ leal 3395469782(%ebx,%ebp,1),%ebx
+ addl %eax,%ebx
+ # 20_39 71
+
+ movl %ecx,%ebp
+ movl 28(%esp),%eax
+ rorl $2,%ecx
+ xorl 36(%esp),%eax
+ xorl %edx,%ebp
+ xorl 60(%esp),%eax
+ xorl %edi,%ebp
+ xorl 16(%esp),%eax
+ roll $1,%eax
+ addl %esi,%ebp
+ movl %eax,28(%esp)
+ movl %ebx,%esi
+ roll $5,%esi
+ leal 3395469782(%eax,%ebp,1),%eax
+ addl %esi,%eax
+ # 20_39 72
+
+ movl %ebx,%ebp
+ movl 32(%esp),%esi
+ rorl $2,%ebx
+ xorl 40(%esp),%esi
+ xorl %ecx,%ebp
+ xorl (%esp),%esi
+ xorl %edx,%ebp
+ xorl 20(%esp),%esi
+ roll $1,%esi
+ addl %edi,%ebp
+ movl %esi,32(%esp)
+ movl %eax,%edi
+ roll $5,%edi
+ leal 3395469782(%esi,%ebp,1),%esi
+ addl %edi,%esi
+ # 20_39 73
+
+ movl %eax,%ebp
+ movl 36(%esp),%edi
+ rorl $2,%eax
+ xorl 44(%esp),%edi
+ xorl %ebx,%ebp
+ xorl 4(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 24(%esp),%edi
+ roll $1,%edi
+ addl %edx,%ebp
+ movl %edi,36(%esp)
+ movl %esi,%edx
+ roll $5,%edx
+ leal 3395469782(%edi,%ebp,1),%edi
+ addl %edx,%edi
+ # 20_39 74
+
+ movl %esi,%ebp
+ movl 40(%esp),%edx
+ rorl $2,%esi
+ xorl 48(%esp),%edx
+ xorl %eax,%ebp
+ xorl 8(%esp),%edx
+ xorl %ebx,%ebp
+ xorl 28(%esp),%edx
+ roll $1,%edx
+ addl %ecx,%ebp
+ movl %edx,40(%esp)
+ movl %edi,%ecx
+ roll $5,%ecx
+ leal 3395469782(%edx,%ebp,1),%edx
+ addl %ecx,%edx
+ # 20_39 75
+
+ movl %edi,%ebp
+ movl 44(%esp),%ecx
+ rorl $2,%edi
+ xorl 52(%esp),%ecx
+ xorl %esi,%ebp
+ xorl 12(%esp),%ecx
+ xorl %eax,%ebp
+ xorl 32(%esp),%ecx
+ roll $1,%ecx
+ addl %ebx,%ebp
+ movl %ecx,44(%esp)
+ movl %edx,%ebx
+ roll $5,%ebx
+ leal 3395469782(%ecx,%ebp,1),%ecx
+ addl %ebx,%ecx
+ # 20_39 76
+
+ movl %edx,%ebp
+ movl 48(%esp),%ebx
+ rorl $2,%edx
+ xorl 56(%esp),%ebx
+ xorl %edi,%ebp
+ xorl 16(%esp),%ebx
+ xorl %esi,%ebp
+ xorl 36(%esp),%ebx
+ roll $1,%ebx
+ addl %eax,%ebp
+ movl %ebx,48(%esp)
+ movl %ecx,%eax
+ roll $5,%eax
+ leal 3395469782(%ebx,%ebp,1),%ebx
+ addl %eax,%ebx
+ # 20_39 77
+
+ movl %ecx,%ebp
+ movl 52(%esp),%eax
+ rorl $2,%ecx
+ xorl 60(%esp),%eax
+ xorl %edx,%ebp
+ xorl 20(%esp),%eax
+ xorl %edi,%ebp
+ xorl 40(%esp),%eax
+ roll $1,%eax
+ addl %esi,%ebp
+ movl %eax,52(%esp)
+ movl %ebx,%esi
+ roll $5,%esi
+ leal 3395469782(%eax,%ebp,1),%eax
+ addl %esi,%eax
+ # 20_39 78
+
+ movl %ebx,%ebp
+ movl 56(%esp),%esi
+ rorl $2,%ebx
+ xorl (%esp),%esi
+ xorl %ecx,%ebp
+ xorl 24(%esp),%esi
+ xorl %edx,%ebp
+ xorl 44(%esp),%esi
+ roll $1,%esi
+ addl %edi,%ebp
+ movl %esi,56(%esp)
+ movl %eax,%edi
+ roll $5,%edi
+ leal 3395469782(%esi,%ebp,1),%esi
+ addl %edi,%esi
+ # 20_39 79
+
+ movl %eax,%ebp
+ movl 60(%esp),%edi
+ rorl $2,%eax
+ xorl 4(%esp),%edi
+ xorl %ebx,%ebp
+ xorl 28(%esp),%edi
+ xorl %ecx,%ebp
+ xorl 48(%esp),%edi
+ roll $1,%edi
+ addl %edx,%ebp
+ movl %edi,60(%esp)
+ movl %esi,%edx
+ roll $5,%edx
+ leal 3395469782(%edi,%ebp,1),%edi
+ addl %edx,%edi
+ movl 84(%esp),%ebp
+ movl 88(%esp),%edx
+ addl (%ebp),%edi
+ addl 4(%ebp),%esi
+ addl 8(%ebp),%eax
+ addl 12(%ebp),%ebx
+ addl 16(%ebp),%ecx
+ movl %edi,(%ebp)
+ addl $64,%edx
+ movl %esi,4(%ebp)
+ cmpl 92(%esp),%edx
+ movl %eax,8(%ebp)
+ movl %ecx,%edi
+ movl %ebx,12(%ebp)
+ movl %edx,%esi
+ movl %ecx,16(%ebp)
+ jb L000loop
+ addl $64,%esp
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.byte 83,72,65,49,32,98,108,111,99,107,32,116,114,97,110,115
+.byte 102,111,114,109,32,102,111,114,32,120,56,54,44,32,67,82
+.byte 89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112
+.byte 114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
diff --git a/deps/openssl/asm/x86-macosx-gas/sha/sha256-586.s b/deps/openssl/asm/x86-macosx-gas/sha/sha256-586.s
new file mode 100644
index 0000000000..1190be7503
--- /dev/null
+++ b/deps/openssl/asm/x86-macosx-gas/sha/sha256-586.s
@@ -0,0 +1,259 @@
+.file "sha512-586.s"
+.text
+.globl _sha256_block_data_order
+.align 4
+_sha256_block_data_order:
+L_sha256_block_data_order_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%esi
+ movl 24(%esp),%edi
+ movl 28(%esp),%eax
+ movl %esp,%ebx
+ call L000pic_point
+L000pic_point:
+ popl %ebp
+ leal L001K256-L000pic_point(%ebp),%ebp
+ subl $16,%esp
+ andl $-64,%esp
+ shll $6,%eax
+ addl %edi,%eax
+ movl %esi,(%esp)
+ movl %edi,4(%esp)
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+.align 4,0x90
+L002loop:
+ movl (%edi),%eax
+ movl 4(%edi),%ebx
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 16(%edi),%eax
+ movl 20(%edi),%ebx
+ movl 24(%edi),%ecx
+ movl 28(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 32(%edi),%eax
+ movl 36(%edi),%ebx
+ movl 40(%edi),%ecx
+ movl 44(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 48(%edi),%eax
+ movl 52(%edi),%ebx
+ movl 56(%edi),%ecx
+ movl 60(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ addl $64,%edi
+ subl $32,%esp
+ movl %edi,100(%esp)
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edi
+ movl %ebx,4(%esp)
+ movl %ecx,8(%esp)
+ movl %edi,12(%esp)
+ movl 16(%esi),%edx
+ movl 20(%esi),%ebx
+ movl 24(%esi),%ecx
+ movl 28(%esi),%edi
+ movl %ebx,20(%esp)
+ movl %ecx,24(%esp)
+ movl %edi,28(%esp)
+.align 4,0x90
+L00300_15:
+ movl 92(%esp),%ebx
+ movl %edx,%ecx
+ rorl $6,%ecx
+ movl %edx,%edi
+ rorl $11,%edi
+ movl 20(%esp),%esi
+ xorl %edi,%ecx
+ rorl $14,%edi
+ xorl %edi,%ecx
+ movl 24(%esp),%edi
+ addl %ecx,%ebx
+ movl %edx,16(%esp)
+ xorl %edi,%esi
+ movl %eax,%ecx
+ andl %edx,%esi
+ movl 12(%esp),%edx
+ xorl %edi,%esi
+ movl %eax,%edi
+ addl %esi,%ebx
+ rorl $2,%ecx
+ addl 28(%esp),%ebx
+ rorl $13,%edi
+ movl 4(%esp),%esi
+ xorl %edi,%ecx
+ rorl $9,%edi
+ addl %ebx,%edx
+ xorl %edi,%ecx
+ movl 8(%esp),%edi
+ addl %ecx,%ebx
+ movl %eax,(%esp)
+ movl %eax,%ecx
+ subl $4,%esp
+ orl %esi,%eax
+ andl %esi,%ecx
+ andl %edi,%eax
+ movl (%ebp),%esi
+ orl %ecx,%eax
+ addl $4,%ebp
+ addl %ebx,%eax
+ addl %esi,%edx
+ addl %esi,%eax
+ cmpl $3248222580,%esi
+ jne L00300_15
+ movl 152(%esp),%ebx
+.align 4,0x90
+L00416_63:
+ movl %ebx,%esi
+ movl 100(%esp),%ecx
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %esi,%ebx
+ rorl $11,%esi
+ movl %ecx,%edi
+ xorl %esi,%ebx
+ shrl $10,%ecx
+ movl 156(%esp),%esi
+ rorl $17,%edi
+ xorl %edi,%ecx
+ rorl $2,%edi
+ addl %esi,%ebx
+ xorl %ecx,%edi
+ addl %edi,%ebx
+ movl %edx,%ecx
+ addl 120(%esp),%ebx
+ rorl $6,%ecx
+ movl %edx,%edi
+ rorl $11,%edi
+ movl 20(%esp),%esi
+ xorl %edi,%ecx
+ rorl $14,%edi
+ movl %ebx,92(%esp)
+ xorl %edi,%ecx
+ movl 24(%esp),%edi
+ addl %ecx,%ebx
+ movl %edx,16(%esp)
+ xorl %edi,%esi
+ movl %eax,%ecx
+ andl %edx,%esi
+ movl 12(%esp),%edx
+ xorl %edi,%esi
+ movl %eax,%edi
+ addl %esi,%ebx
+ rorl $2,%ecx
+ addl 28(%esp),%ebx
+ rorl $13,%edi
+ movl 4(%esp),%esi
+ xorl %edi,%ecx
+ rorl $9,%edi
+ addl %ebx,%edx
+ xorl %edi,%ecx
+ movl 8(%esp),%edi
+ addl %ecx,%ebx
+ movl %eax,(%esp)
+ movl %eax,%ecx
+ subl $4,%esp
+ orl %esi,%eax
+ andl %esi,%ecx
+ andl %edi,%eax
+ movl (%ebp),%esi
+ orl %ecx,%eax
+ addl $4,%ebp
+ addl %ebx,%eax
+ movl 152(%esp),%ebx
+ addl %esi,%edx
+ addl %esi,%eax
+ cmpl $3329325298,%esi
+ jne L00416_63
+ movl 352(%esp),%esi
+ movl 4(%esp),%ebx
+ movl 8(%esp),%ecx
+ movl 12(%esp),%edi
+ addl (%esi),%eax
+ addl 4(%esi),%ebx
+ addl 8(%esi),%ecx
+ addl 12(%esi),%edi
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ movl %ecx,8(%esi)
+ movl %edi,12(%esi)
+ movl 20(%esp),%eax
+ movl 24(%esp),%ebx
+ movl 28(%esp),%ecx
+ movl 356(%esp),%edi
+ addl 16(%esi),%edx
+ addl 20(%esi),%eax
+ addl 24(%esi),%ebx
+ addl 28(%esi),%ecx
+ movl %edx,16(%esi)
+ movl %eax,20(%esi)
+ movl %ebx,24(%esi)
+ movl %ecx,28(%esi)
+ addl $352,%esp
+ subl $256,%ebp
+ cmpl 8(%esp),%edi
+ jb L002loop
+ movl 12(%esp),%esp
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 6,0x90
+L001K256:
+.long 1116352408,1899447441,3049323471,3921009573
+.long 961987163,1508970993,2453635748,2870763221
+.long 3624381080,310598401,607225278,1426881987
+.long 1925078388,2162078206,2614888103,3248222580
+.long 3835390401,4022224774,264347078,604807628
+.long 770255983,1249150122,1555081692,1996064986
+.long 2554220882,2821834349,2952996808,3210313671
+.long 3336571891,3584528711,113926993,338241895
+.long 666307205,773529912,1294757372,1396182291
+.long 1695183700,1986661051,2177026350,2456956037
+.long 2730485921,2820302411,3259730800,3345764771
+.long 3516065817,3600352804,4094571909,275423344
+.long 430227734,506948616,659060556,883997877
+.long 958139571,1322822218,1537002063,1747873779
+.long 1955562222,2024104815,2227730452,2361852424
+.long 2428436474,2756734187,3204031479,3329325298
+.byte 83,72,65,50,53,54,32,98,108,111,99,107,32,116,114,97
+.byte 110,115,102,111,114,109,32,102,111,114,32,120,56,54,44,32
+.byte 67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97
+.byte 112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
+.byte 62,0
diff --git a/deps/openssl/asm/x86-macosx-gas/sha/sha512-586.s b/deps/openssl/asm/x86-macosx-gas/sha/sha512-586.s
new file mode 100644
index 0000000000..2c9975305a
--- /dev/null
+++ b/deps/openssl/asm/x86-macosx-gas/sha/sha512-586.s
@@ -0,0 +1,561 @@
+.file "sha512-586.s"
+.text
+.globl _sha512_block_data_order
+.align 4
+_sha512_block_data_order:
+L_sha512_block_data_order_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%esi
+ movl 24(%esp),%edi
+ movl 28(%esp),%eax
+ movl %esp,%ebx
+ call L000pic_point
+L000pic_point:
+ popl %ebp
+ leal L001K512-L000pic_point(%ebp),%ebp
+ subl $16,%esp
+ andl $-64,%esp
+ shll $7,%eax
+ addl %edi,%eax
+ movl %esi,(%esp)
+ movl %edi,4(%esp)
+ movl %eax,8(%esp)
+ movl %ebx,12(%esp)
+.align 4,0x90
+L002loop_x86:
+ movl (%edi),%eax
+ movl 4(%edi),%ebx
+ movl 8(%edi),%ecx
+ movl 12(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 16(%edi),%eax
+ movl 20(%edi),%ebx
+ movl 24(%edi),%ecx
+ movl 28(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 32(%edi),%eax
+ movl 36(%edi),%ebx
+ movl 40(%edi),%ecx
+ movl 44(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 48(%edi),%eax
+ movl 52(%edi),%ebx
+ movl 56(%edi),%ecx
+ movl 60(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 64(%edi),%eax
+ movl 68(%edi),%ebx
+ movl 72(%edi),%ecx
+ movl 76(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 80(%edi),%eax
+ movl 84(%edi),%ebx
+ movl 88(%edi),%ecx
+ movl 92(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 96(%edi),%eax
+ movl 100(%edi),%ebx
+ movl 104(%edi),%ecx
+ movl 108(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ movl 112(%edi),%eax
+ movl 116(%edi),%ebx
+ movl 120(%edi),%ecx
+ movl 124(%edi),%edx
+ bswap %eax
+ bswap %ebx
+ bswap %ecx
+ bswap %edx
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ addl $128,%edi
+ subl $72,%esp
+ movl %edi,204(%esp)
+ leal 8(%esp),%edi
+ movl $16,%ecx
+.long 2784229001
+.align 4,0x90
+L00300_15_x86:
+ movl 40(%esp),%ecx
+ movl 44(%esp),%edx
+ movl %ecx,%esi
+ shrl $9,%ecx
+ movl %edx,%edi
+ shrl $9,%edx
+ movl %ecx,%ebx
+ shll $14,%esi
+ movl %edx,%eax
+ shll $14,%edi
+ xorl %esi,%ebx
+ shrl $5,%ecx
+ xorl %edi,%eax
+ shrl $5,%edx
+ xorl %ecx,%eax
+ shll $4,%esi
+ xorl %edx,%ebx
+ shll $4,%edi
+ xorl %esi,%ebx
+ shrl $4,%ecx
+ xorl %edi,%eax
+ shrl $4,%edx
+ xorl %ecx,%eax
+ shll $5,%esi
+ xorl %edx,%ebx
+ shll $5,%edi
+ xorl %esi,%eax
+ xorl %edi,%ebx
+ movl 48(%esp),%ecx
+ movl 52(%esp),%edx
+ movl 56(%esp),%esi
+ movl 60(%esp),%edi
+ addl 64(%esp),%eax
+ adcl 68(%esp),%ebx
+ xorl %esi,%ecx
+ xorl %edi,%edx
+ andl 40(%esp),%ecx
+ andl 44(%esp),%edx
+ addl 192(%esp),%eax
+ adcl 196(%esp),%ebx
+ xorl %esi,%ecx
+ xorl %edi,%edx
+ movl (%ebp),%esi
+ movl 4(%ebp),%edi
+ addl %ecx,%eax
+ adcl %edx,%ebx
+ movl 32(%esp),%ecx
+ movl 36(%esp),%edx
+ addl %esi,%eax
+ adcl %edi,%ebx
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ addl %ecx,%eax
+ adcl %edx,%ebx
+ movl 8(%esp),%ecx
+ movl 12(%esp),%edx
+ movl %eax,32(%esp)
+ movl %ebx,36(%esp)
+ movl %ecx,%esi
+ shrl $2,%ecx
+ movl %edx,%edi
+ shrl $2,%edx
+ movl %ecx,%ebx
+ shll $4,%esi
+ movl %edx,%eax
+ shll $4,%edi
+ xorl %esi,%ebx
+ shrl $5,%ecx
+ xorl %edi,%eax
+ shrl $5,%edx
+ xorl %ecx,%ebx
+ shll $21,%esi
+ xorl %edx,%eax
+ shll $21,%edi
+ xorl %esi,%eax
+ shrl $21,%ecx
+ xorl %edi,%ebx
+ shrl $21,%edx
+ xorl %ecx,%eax
+ shll $5,%esi
+ xorl %edx,%ebx
+ shll $5,%edi
+ xorl %esi,%eax
+ xorl %edi,%ebx
+ movl 8(%esp),%ecx
+ movl 12(%esp),%edx
+ movl 16(%esp),%esi
+ movl 20(%esp),%edi
+ addl (%esp),%eax
+ adcl 4(%esp),%ebx
+ orl %esi,%ecx
+ orl %edi,%edx
+ andl 24(%esp),%ecx
+ andl 28(%esp),%edx
+ andl 8(%esp),%esi
+ andl 12(%esp),%edi
+ orl %esi,%ecx
+ orl %edi,%edx
+ addl %ecx,%eax
+ adcl %edx,%ebx
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ movb (%ebp),%dl
+ subl $8,%esp
+ leal 8(%ebp),%ebp
+ cmpb $148,%dl
+ jne L00300_15_x86
+.align 4,0x90
+L00416_79_x86:
+ movl 312(%esp),%ecx
+ movl 316(%esp),%edx
+ movl %ecx,%esi
+ shrl $1,%ecx
+ movl %edx,%edi
+ shrl $1,%edx
+ movl %ecx,%eax
+ shll $24,%esi
+ movl %edx,%ebx
+ shll $24,%edi
+ xorl %esi,%ebx
+ shrl $6,%ecx
+ xorl %edi,%eax
+ shrl $6,%edx
+ xorl %ecx,%eax
+ shll $7,%esi
+ xorl %edx,%ebx
+ shll $1,%edi
+ xorl %esi,%ebx
+ shrl $1,%ecx
+ xorl %edi,%eax
+ shrl $1,%edx
+ xorl %ecx,%eax
+ shll $6,%edi
+ xorl %edx,%ebx
+ xorl %edi,%eax
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ movl 208(%esp),%ecx
+ movl 212(%esp),%edx
+ movl %ecx,%esi
+ shrl $6,%ecx
+ movl %edx,%edi
+ shrl $6,%edx
+ movl %ecx,%eax
+ shll $3,%esi
+ movl %edx,%ebx
+ shll $3,%edi
+ xorl %esi,%eax
+ shrl $13,%ecx
+ xorl %edi,%ebx
+ shrl $13,%edx
+ xorl %ecx,%eax
+ shll $10,%esi
+ xorl %edx,%ebx
+ shll $10,%edi
+ xorl %esi,%ebx
+ shrl $10,%ecx
+ xorl %edi,%eax
+ shrl $10,%edx
+ xorl %ecx,%ebx
+ shll $13,%edi
+ xorl %edx,%eax
+ xorl %edi,%eax
+ movl 320(%esp),%ecx
+ movl 324(%esp),%edx
+ addl (%esp),%eax
+ adcl 4(%esp),%ebx
+ movl 248(%esp),%esi
+ movl 252(%esp),%edi
+ addl %ecx,%eax
+ adcl %edx,%ebx
+ addl %esi,%eax
+ adcl %edi,%ebx
+ movl %eax,192(%esp)
+ movl %ebx,196(%esp)
+ movl 40(%esp),%ecx
+ movl 44(%esp),%edx
+ movl %ecx,%esi
+ shrl $9,%ecx
+ movl %edx,%edi
+ shrl $9,%edx
+ movl %ecx,%ebx
+ shll $14,%esi
+ movl %edx,%eax
+ shll $14,%edi
+ xorl %esi,%ebx
+ shrl $5,%ecx
+ xorl %edi,%eax
+ shrl $5,%edx
+ xorl %ecx,%eax
+ shll $4,%esi
+ xorl %edx,%ebx
+ shll $4,%edi
+ xorl %esi,%ebx
+ shrl $4,%ecx
+ xorl %edi,%eax
+ shrl $4,%edx
+ xorl %ecx,%eax
+ shll $5,%esi
+ xorl %edx,%ebx
+ shll $5,%edi
+ xorl %esi,%eax
+ xorl %edi,%ebx
+ movl 48(%esp),%ecx
+ movl 52(%esp),%edx
+ movl 56(%esp),%esi
+ movl 60(%esp),%edi
+ addl 64(%esp),%eax
+ adcl 68(%esp),%ebx
+ xorl %esi,%ecx
+ xorl %edi,%edx
+ andl 40(%esp),%ecx
+ andl 44(%esp),%edx
+ addl 192(%esp),%eax
+ adcl 196(%esp),%ebx
+ xorl %esi,%ecx
+ xorl %edi,%edx
+ movl (%ebp),%esi
+ movl 4(%ebp),%edi
+ addl %ecx,%eax
+ adcl %edx,%ebx
+ movl 32(%esp),%ecx
+ movl 36(%esp),%edx
+ addl %esi,%eax
+ adcl %edi,%ebx
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ addl %ecx,%eax
+ adcl %edx,%ebx
+ movl 8(%esp),%ecx
+ movl 12(%esp),%edx
+ movl %eax,32(%esp)
+ movl %ebx,36(%esp)
+ movl %ecx,%esi
+ shrl $2,%ecx
+ movl %edx,%edi
+ shrl $2,%edx
+ movl %ecx,%ebx
+ shll $4,%esi
+ movl %edx,%eax
+ shll $4,%edi
+ xorl %esi,%ebx
+ shrl $5,%ecx
+ xorl %edi,%eax
+ shrl $5,%edx
+ xorl %ecx,%ebx
+ shll $21,%esi
+ xorl %edx,%eax
+ shll $21,%edi
+ xorl %esi,%eax
+ shrl $21,%ecx
+ xorl %edi,%ebx
+ shrl $21,%edx
+ xorl %ecx,%eax
+ shll $5,%esi
+ xorl %edx,%ebx
+ shll $5,%edi
+ xorl %esi,%eax
+ xorl %edi,%ebx
+ movl 8(%esp),%ecx
+ movl 12(%esp),%edx
+ movl 16(%esp),%esi
+ movl 20(%esp),%edi
+ addl (%esp),%eax
+ adcl 4(%esp),%ebx
+ orl %esi,%ecx
+ orl %edi,%edx
+ andl 24(%esp),%ecx
+ andl 28(%esp),%edx
+ andl 8(%esp),%esi
+ andl 12(%esp),%edi
+ orl %esi,%ecx
+ orl %edi,%edx
+ addl %ecx,%eax
+ adcl %edx,%ebx
+ movl %eax,(%esp)
+ movl %ebx,4(%esp)
+ movb (%ebp),%dl
+ subl $8,%esp
+ leal 8(%ebp),%ebp
+ cmpb $23,%dl
+ jne L00416_79_x86
+ movl 840(%esp),%esi
+ movl 844(%esp),%edi
+ movl (%esi),%eax
+ movl 4(%esi),%ebx
+ movl 8(%esi),%ecx
+ movl 12(%esi),%edx
+ addl 8(%esp),%eax
+ adcl 12(%esp),%ebx
+ movl %eax,(%esi)
+ movl %ebx,4(%esi)
+ addl 16(%esp),%ecx
+ adcl 20(%esp),%edx
+ movl %ecx,8(%esi)
+ movl %edx,12(%esi)
+ movl 16(%esi),%eax
+ movl 20(%esi),%ebx
+ movl 24(%esi),%ecx
+ movl 28(%esi),%edx
+ addl 24(%esp),%eax
+ adcl 28(%esp),%ebx
+ movl %eax,16(%esi)
+ movl %ebx,20(%esi)
+ addl 32(%esp),%ecx
+ adcl 36(%esp),%edx
+ movl %ecx,24(%esi)
+ movl %edx,28(%esi)
+ movl 32(%esi),%eax
+ movl 36(%esi),%ebx
+ movl 40(%esi),%ecx
+ movl 44(%esi),%edx
+ addl 40(%esp),%eax
+ adcl 44(%esp),%ebx
+ movl %eax,32(%esi)
+ movl %ebx,36(%esi)
+ addl 48(%esp),%ecx
+ adcl 52(%esp),%edx
+ movl %ecx,40(%esi)
+ movl %edx,44(%esi)
+ movl 48(%esi),%eax
+ movl 52(%esi),%ebx
+ movl 56(%esi),%ecx
+ movl 60(%esi),%edx
+ addl 56(%esp),%eax
+ adcl 60(%esp),%ebx
+ movl %eax,48(%esi)
+ movl %ebx,52(%esi)
+ addl 64(%esp),%ecx
+ adcl 68(%esp),%edx
+ movl %ecx,56(%esi)
+ movl %edx,60(%esi)
+ addl $840,%esp
+ subl $640,%ebp
+ cmpl 8(%esp),%edi
+ jb L002loop_x86
+ movl 12(%esp),%esp
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 6,0x90
+L001K512:
+.long 3609767458,1116352408
+.long 602891725,1899447441
+.long 3964484399,3049323471
+.long 2173295548,3921009573
+.long 4081628472,961987163
+.long 3053834265,1508970993
+.long 2937671579,2453635748
+.long 3664609560,2870763221
+.long 2734883394,3624381080
+.long 1164996542,310598401
+.long 1323610764,607225278
+.long 3590304994,1426881987
+.long 4068182383,1925078388
+.long 991336113,2162078206
+.long 633803317,2614888103
+.long 3479774868,3248222580
+.long 2666613458,3835390401
+.long 944711139,4022224774
+.long 2341262773,264347078
+.long 2007800933,604807628
+.long 1495990901,770255983
+.long 1856431235,1249150122
+.long 3175218132,1555081692
+.long 2198950837,1996064986
+.long 3999719339,2554220882
+.long 766784016,2821834349
+.long 2566594879,2952996808
+.long 3203337956,3210313671
+.long 1034457026,3336571891
+.long 2466948901,3584528711
+.long 3758326383,113926993
+.long 168717936,338241895
+.long 1188179964,666307205
+.long 1546045734,773529912
+.long 1522805485,1294757372
+.long 2643833823,1396182291
+.long 2343527390,1695183700
+.long 1014477480,1986661051
+.long 1206759142,2177026350
+.long 344077627,2456956037
+.long 1290863460,2730485921
+.long 3158454273,2820302411
+.long 3505952657,3259730800
+.long 106217008,3345764771
+.long 3606008344,3516065817
+.long 1432725776,3600352804
+.long 1467031594,4094571909
+.long 851169720,275423344
+.long 3100823752,430227734
+.long 1363258195,506948616
+.long 3750685593,659060556
+.long 3785050280,883997877
+.long 3318307427,958139571
+.long 3812723403,1322822218
+.long 2003034995,1537002063
+.long 3602036899,1747873779
+.long 1575990012,1955562222
+.long 1125592928,2024104815
+.long 2716904306,2227730452
+.long 442776044,2361852424
+.long 593698344,2428436474
+.long 3733110249,2756734187
+.long 2999351573,3204031479
+.long 3815920427,3329325298
+.long 3928383900,3391569614
+.long 566280711,3515267271
+.long 3454069534,3940187606
+.long 4000239992,4118630271
+.long 1914138554,116418474
+.long 2731055270,174292421
+.long 3203993006,289380356
+.long 320620315,460393269
+.long 587496836,685471733
+.long 1086792851,852142971
+.long 365543100,1017036298
+.long 2618297676,1126000580
+.long 3409855158,1288033470
+.long 4234509866,1501505948
+.long 987167468,1607167915
+.long 1246189591,1816402316
+.byte 83,72,65,53,49,50,32,98,108,111,99,107,32,116,114,97
+.byte 110,115,102,111,114,109,32,102,111,114,32,120,56,54,44,32
+.byte 67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97
+.byte 112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
+.byte 62,0
diff --git a/deps/openssl/asm/x86-macosx-gas/whrlpool/wp-mmx.s b/deps/openssl/asm/x86-macosx-gas/whrlpool/wp-mmx.s
new file mode 100644
index 0000000000..5d612e0f75
--- /dev/null
+++ b/deps/openssl/asm/x86-macosx-gas/whrlpool/wp-mmx.s
@@ -0,0 +1,1103 @@
+.file "wp-mmx.s"
+.text
+.globl _whirlpool_block_mmx
+.align 4
+_whirlpool_block_mmx:
+L_whirlpool_block_mmx_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 20(%esp),%esi
+ movl 24(%esp),%edi
+ movl 28(%esp),%ebp
+ movl %esp,%eax
+ subl $148,%esp
+ andl $-64,%esp
+ leal 128(%esp),%ebx
+ movl %esi,(%ebx)
+ movl %edi,4(%ebx)
+ movl %ebp,8(%ebx)
+ movl %eax,16(%ebx)
+ call L000pic_point
+L000pic_point:
+ popl %ebp
+ leal L001table-L000pic_point(%ebp),%ebp
+ xorl %ecx,%ecx
+ xorl %edx,%edx
+ movq (%esi),%mm0
+ movq 8(%esi),%mm1
+ movq 16(%esi),%mm2
+ movq 24(%esi),%mm3
+ movq 32(%esi),%mm4
+ movq 40(%esi),%mm5
+ movq 48(%esi),%mm6
+ movq 56(%esi),%mm7
+L002outerloop:
+ movq %mm0,(%esp)
+ movq %mm1,8(%esp)
+ movq %mm2,16(%esp)
+ movq %mm3,24(%esp)
+ movq %mm4,32(%esp)
+ movq %mm5,40(%esp)
+ movq %mm6,48(%esp)
+ movq %mm7,56(%esp)
+ pxor (%edi),%mm0
+ pxor 8(%edi),%mm1
+ pxor 16(%edi),%mm2
+ pxor 24(%edi),%mm3
+ pxor 32(%edi),%mm4
+ pxor 40(%edi),%mm5
+ pxor 48(%edi),%mm6
+ pxor 56(%edi),%mm7
+ movq %mm0,64(%esp)
+ movq %mm1,72(%esp)
+ movq %mm2,80(%esp)
+ movq %mm3,88(%esp)
+ movq %mm4,96(%esp)
+ movq %mm5,104(%esp)
+ movq %mm6,112(%esp)
+ movq %mm7,120(%esp)
+ xorl %esi,%esi
+ movl %esi,12(%ebx)
+.align 4,0x90
+L003round:
+ movq 4096(%ebp,%esi,8),%mm0
+ movl (%esp),%eax
+ movl 4(%esp),%ebx
+ movb %al,%cl
+ movb %ah,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%eax
+ pxor (%ebp,%esi,8),%mm0
+ movq 7(%ebp,%edi,8),%mm1
+ movb %al,%cl
+ movb %ah,%dl
+ movl 8(%esp),%eax
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ movq 6(%ebp,%esi,8),%mm2
+ movq 5(%ebp,%edi,8),%mm3
+ movb %bl,%cl
+ movb %bh,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%ebx
+ movq 4(%ebp,%esi,8),%mm4
+ movq 3(%ebp,%edi,8),%mm5
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 12(%esp),%ebx
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ movq 2(%ebp,%esi,8),%mm6
+ movq 1(%ebp,%edi,8),%mm7
+ movb %al,%cl
+ movb %ah,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%eax
+ pxor (%ebp,%esi,8),%mm1
+ pxor 7(%ebp,%edi,8),%mm2
+ movb %al,%cl
+ movb %ah,%dl
+ movl 16(%esp),%eax
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 6(%ebp,%esi,8),%mm3
+ pxor 5(%ebp,%edi,8),%mm4
+ movb %bl,%cl
+ movb %bh,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%ebx
+ pxor 4(%ebp,%esi,8),%mm5
+ pxor 3(%ebp,%edi,8),%mm6
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 20(%esp),%ebx
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 2(%ebp,%esi,8),%mm7
+ pxor 1(%ebp,%edi,8),%mm0
+ movb %al,%cl
+ movb %ah,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%eax
+ pxor (%ebp,%esi,8),%mm2
+ pxor 7(%ebp,%edi,8),%mm3
+ movb %al,%cl
+ movb %ah,%dl
+ movl 24(%esp),%eax
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 6(%ebp,%esi,8),%mm4
+ pxor 5(%ebp,%edi,8),%mm5
+ movb %bl,%cl
+ movb %bh,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%ebx
+ pxor 4(%ebp,%esi,8),%mm6
+ pxor 3(%ebp,%edi,8),%mm7
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 28(%esp),%ebx
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 2(%ebp,%esi,8),%mm0
+ pxor 1(%ebp,%edi,8),%mm1
+ movb %al,%cl
+ movb %ah,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%eax
+ pxor (%ebp,%esi,8),%mm3
+ pxor 7(%ebp,%edi,8),%mm4
+ movb %al,%cl
+ movb %ah,%dl
+ movl 32(%esp),%eax
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 6(%ebp,%esi,8),%mm5
+ pxor 5(%ebp,%edi,8),%mm6
+ movb %bl,%cl
+ movb %bh,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%ebx
+ pxor 4(%ebp,%esi,8),%mm7
+ pxor 3(%ebp,%edi,8),%mm0
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 36(%esp),%ebx
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 2(%ebp,%esi,8),%mm1
+ pxor 1(%ebp,%edi,8),%mm2
+ movb %al,%cl
+ movb %ah,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%eax
+ pxor (%ebp,%esi,8),%mm4
+ pxor 7(%ebp,%edi,8),%mm5
+ movb %al,%cl
+ movb %ah,%dl
+ movl 40(%esp),%eax
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 6(%ebp,%esi,8),%mm6
+ pxor 5(%ebp,%edi,8),%mm7
+ movb %bl,%cl
+ movb %bh,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%ebx
+ pxor 4(%ebp,%esi,8),%mm0
+ pxor 3(%ebp,%edi,8),%mm1
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 44(%esp),%ebx
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 2(%ebp,%esi,8),%mm2
+ pxor 1(%ebp,%edi,8),%mm3
+ movb %al,%cl
+ movb %ah,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%eax
+ pxor (%ebp,%esi,8),%mm5
+ pxor 7(%ebp,%edi,8),%mm6
+ movb %al,%cl
+ movb %ah,%dl
+ movl 48(%esp),%eax
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 6(%ebp,%esi,8),%mm7
+ pxor 5(%ebp,%edi,8),%mm0
+ movb %bl,%cl
+ movb %bh,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%ebx
+ pxor 4(%ebp,%esi,8),%mm1
+ pxor 3(%ebp,%edi,8),%mm2
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 52(%esp),%ebx
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 2(%ebp,%esi,8),%mm3
+ pxor 1(%ebp,%edi,8),%mm4
+ movb %al,%cl
+ movb %ah,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%eax
+ pxor (%ebp,%esi,8),%mm6
+ pxor 7(%ebp,%edi,8),%mm7
+ movb %al,%cl
+ movb %ah,%dl
+ movl 56(%esp),%eax
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 6(%ebp,%esi,8),%mm0
+ pxor 5(%ebp,%edi,8),%mm1
+ movb %bl,%cl
+ movb %bh,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%ebx
+ pxor 4(%ebp,%esi,8),%mm2
+ pxor 3(%ebp,%edi,8),%mm3
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 60(%esp),%ebx
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 2(%ebp,%esi,8),%mm4
+ pxor 1(%ebp,%edi,8),%mm5
+ movb %al,%cl
+ movb %ah,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%eax
+ pxor (%ebp,%esi,8),%mm7
+ pxor 7(%ebp,%edi,8),%mm0
+ movb %al,%cl
+ movb %ah,%dl
+ movl 64(%esp),%eax
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 6(%ebp,%esi,8),%mm1
+ pxor 5(%ebp,%edi,8),%mm2
+ movb %bl,%cl
+ movb %bh,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%ebx
+ pxor 4(%ebp,%esi,8),%mm3
+ pxor 3(%ebp,%edi,8),%mm4
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 68(%esp),%ebx
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 2(%ebp,%esi,8),%mm5
+ pxor 1(%ebp,%edi,8),%mm6
+ movq %mm0,(%esp)
+ movq %mm1,8(%esp)
+ movq %mm2,16(%esp)
+ movq %mm3,24(%esp)
+ movq %mm4,32(%esp)
+ movq %mm5,40(%esp)
+ movq %mm6,48(%esp)
+ movq %mm7,56(%esp)
+ movb %al,%cl
+ movb %ah,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%eax
+ pxor (%ebp,%esi,8),%mm0
+ pxor 7(%ebp,%edi,8),%mm1
+ movb %al,%cl
+ movb %ah,%dl
+ movl 72(%esp),%eax
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 6(%ebp,%esi,8),%mm2
+ pxor 5(%ebp,%edi,8),%mm3
+ movb %bl,%cl
+ movb %bh,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%ebx
+ pxor 4(%ebp,%esi,8),%mm4
+ pxor 3(%ebp,%edi,8),%mm5
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 76(%esp),%ebx
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 2(%ebp,%esi,8),%mm6
+ pxor 1(%ebp,%edi,8),%mm7
+ movb %al,%cl
+ movb %ah,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%eax
+ pxor (%ebp,%esi,8),%mm1
+ pxor 7(%ebp,%edi,8),%mm2
+ movb %al,%cl
+ movb %ah,%dl
+ movl 80(%esp),%eax
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 6(%ebp,%esi,8),%mm3
+ pxor 5(%ebp,%edi,8),%mm4
+ movb %bl,%cl
+ movb %bh,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%ebx
+ pxor 4(%ebp,%esi,8),%mm5
+ pxor 3(%ebp,%edi,8),%mm6
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 84(%esp),%ebx
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 2(%ebp,%esi,8),%mm7
+ pxor 1(%ebp,%edi,8),%mm0
+ movb %al,%cl
+ movb %ah,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%eax
+ pxor (%ebp,%esi,8),%mm2
+ pxor 7(%ebp,%edi,8),%mm3
+ movb %al,%cl
+ movb %ah,%dl
+ movl 88(%esp),%eax
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 6(%ebp,%esi,8),%mm4
+ pxor 5(%ebp,%edi,8),%mm5
+ movb %bl,%cl
+ movb %bh,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%ebx
+ pxor 4(%ebp,%esi,8),%mm6
+ pxor 3(%ebp,%edi,8),%mm7
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 92(%esp),%ebx
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 2(%ebp,%esi,8),%mm0
+ pxor 1(%ebp,%edi,8),%mm1
+ movb %al,%cl
+ movb %ah,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%eax
+ pxor (%ebp,%esi,8),%mm3
+ pxor 7(%ebp,%edi,8),%mm4
+ movb %al,%cl
+ movb %ah,%dl
+ movl 96(%esp),%eax
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 6(%ebp,%esi,8),%mm5
+ pxor 5(%ebp,%edi,8),%mm6
+ movb %bl,%cl
+ movb %bh,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%ebx
+ pxor 4(%ebp,%esi,8),%mm7
+ pxor 3(%ebp,%edi,8),%mm0
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 100(%esp),%ebx
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 2(%ebp,%esi,8),%mm1
+ pxor 1(%ebp,%edi,8),%mm2
+ movb %al,%cl
+ movb %ah,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%eax
+ pxor (%ebp,%esi,8),%mm4
+ pxor 7(%ebp,%edi,8),%mm5
+ movb %al,%cl
+ movb %ah,%dl
+ movl 104(%esp),%eax
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 6(%ebp,%esi,8),%mm6
+ pxor 5(%ebp,%edi,8),%mm7
+ movb %bl,%cl
+ movb %bh,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%ebx
+ pxor 4(%ebp,%esi,8),%mm0
+ pxor 3(%ebp,%edi,8),%mm1
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 108(%esp),%ebx
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 2(%ebp,%esi,8),%mm2
+ pxor 1(%ebp,%edi,8),%mm3
+ movb %al,%cl
+ movb %ah,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%eax
+ pxor (%ebp,%esi,8),%mm5
+ pxor 7(%ebp,%edi,8),%mm6
+ movb %al,%cl
+ movb %ah,%dl
+ movl 112(%esp),%eax
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 6(%ebp,%esi,8),%mm7
+ pxor 5(%ebp,%edi,8),%mm0
+ movb %bl,%cl
+ movb %bh,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%ebx
+ pxor 4(%ebp,%esi,8),%mm1
+ pxor 3(%ebp,%edi,8),%mm2
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 116(%esp),%ebx
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 2(%ebp,%esi,8),%mm3
+ pxor 1(%ebp,%edi,8),%mm4
+ movb %al,%cl
+ movb %ah,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%eax
+ pxor (%ebp,%esi,8),%mm6
+ pxor 7(%ebp,%edi,8),%mm7
+ movb %al,%cl
+ movb %ah,%dl
+ movl 120(%esp),%eax
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 6(%ebp,%esi,8),%mm0
+ pxor 5(%ebp,%edi,8),%mm1
+ movb %bl,%cl
+ movb %bh,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%ebx
+ pxor 4(%ebp,%esi,8),%mm2
+ pxor 3(%ebp,%edi,8),%mm3
+ movb %bl,%cl
+ movb %bh,%dl
+ movl 124(%esp),%ebx
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 2(%ebp,%esi,8),%mm4
+ pxor 1(%ebp,%edi,8),%mm5
+ movb %al,%cl
+ movb %ah,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%eax
+ pxor (%ebp,%esi,8),%mm7
+ pxor 7(%ebp,%edi,8),%mm0
+ movb %al,%cl
+ movb %ah,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 6(%ebp,%esi,8),%mm1
+ pxor 5(%ebp,%edi,8),%mm2
+ movb %bl,%cl
+ movb %bh,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ shrl $16,%ebx
+ pxor 4(%ebp,%esi,8),%mm3
+ pxor 3(%ebp,%edi,8),%mm4
+ movb %bl,%cl
+ movb %bh,%dl
+ leal (%ecx,%ecx,1),%esi
+ leal (%edx,%edx,1),%edi
+ pxor 2(%ebp,%esi,8),%mm5
+ pxor 1(%ebp,%edi,8),%mm6
+ leal 128(%esp),%ebx
+ movl 12(%ebx),%esi
+ addl $1,%esi
+ cmpl $10,%esi
+ je L004roundsdone
+ movl %esi,12(%ebx)
+ movq %mm0,64(%esp)
+ movq %mm1,72(%esp)
+ movq %mm2,80(%esp)
+ movq %mm3,88(%esp)
+ movq %mm4,96(%esp)
+ movq %mm5,104(%esp)
+ movq %mm6,112(%esp)
+ movq %mm7,120(%esp)
+ jmp L003round
+.align 4,0x90
+L004roundsdone:
+ movl (%ebx),%esi
+ movl 4(%ebx),%edi
+ movl 8(%ebx),%eax
+ pxor (%edi),%mm0
+ pxor 8(%edi),%mm1
+ pxor 16(%edi),%mm2
+ pxor 24(%edi),%mm3
+ pxor 32(%edi),%mm4
+ pxor 40(%edi),%mm5
+ pxor 48(%edi),%mm6
+ pxor 56(%edi),%mm7
+ pxor (%esi),%mm0
+ pxor 8(%esi),%mm1
+ pxor 16(%esi),%mm2
+ pxor 24(%esi),%mm3
+ pxor 32(%esi),%mm4
+ pxor 40(%esi),%mm5
+ pxor 48(%esi),%mm6
+ pxor 56(%esi),%mm7
+ movq %mm0,(%esi)
+ movq %mm1,8(%esi)
+ movq %mm2,16(%esi)
+ movq %mm3,24(%esi)
+ movq %mm4,32(%esi)
+ movq %mm5,40(%esi)
+ movq %mm6,48(%esi)
+ movq %mm7,56(%esi)
+ leal 64(%edi),%edi
+ subl $1,%eax
+ jz L005alldone
+ movl %edi,4(%ebx)
+ movl %eax,8(%ebx)
+ jmp L002outerloop
+L005alldone:
+ emms
+ movl 16(%ebx),%esp
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align 6,0x90
+L001table:
+.byte 24,24,96,24,192,120,48,216
+.byte 24,24,96,24,192,120,48,216
+.byte 35,35,140,35,5,175,70,38
+.byte 35,35,140,35,5,175,70,38
+.byte 198,198,63,198,126,249,145,184
+.byte 198,198,63,198,126,249,145,184
+.byte 232,232,135,232,19,111,205,251
+.byte 232,232,135,232,19,111,205,251
+.byte 135,135,38,135,76,161,19,203
+.byte 135,135,38,135,76,161,19,203
+.byte 184,184,218,184,169,98,109,17
+.byte 184,184,218,184,169,98,109,17
+.byte 1,1,4,1,8,5,2,9
+.byte 1,1,4,1,8,5,2,9
+.byte 79,79,33,79,66,110,158,13
+.byte 79,79,33,79,66,110,158,13
+.byte 54,54,216,54,173,238,108,155
+.byte 54,54,216,54,173,238,108,155
+.byte 166,166,162,166,89,4,81,255
+.byte 166,166,162,166,89,4,81,255
+.byte 210,210,111,210,222,189,185,12
+.byte 210,210,111,210,222,189,185,12
+.byte 245,245,243,245,251,6,247,14
+.byte 245,245,243,245,251,6,247,14
+.byte 121,121,249,121,239,128,242,150
+.byte 121,121,249,121,239,128,242,150
+.byte 111,111,161,111,95,206,222,48
+.byte 111,111,161,111,95,206,222,48
+.byte 145,145,126,145,252,239,63,109
+.byte 145,145,126,145,252,239,63,109
+.byte 82,82,85,82,170,7,164,248
+.byte 82,82,85,82,170,7,164,248
+.byte 96,96,157,96,39,253,192,71
+.byte 96,96,157,96,39,253,192,71
+.byte 188,188,202,188,137,118,101,53
+.byte 188,188,202,188,137,118,101,53
+.byte 155,155,86,155,172,205,43,55
+.byte 155,155,86,155,172,205,43,55
+.byte 142,142,2,142,4,140,1,138
+.byte 142,142,2,142,4,140,1,138
+.byte 163,163,182,163,113,21,91,210
+.byte 163,163,182,163,113,21,91,210
+.byte 12,12,48,12,96,60,24,108
+.byte 12,12,48,12,96,60,24,108
+.byte 123,123,241,123,255,138,246,132
+.byte 123,123,241,123,255,138,246,132
+.byte 53,53,212,53,181,225,106,128
+.byte 53,53,212,53,181,225,106,128
+.byte 29,29,116,29,232,105,58,245
+.byte 29,29,116,29,232,105,58,245
+.byte 224,224,167,224,83,71,221,179
+.byte 224,224,167,224,83,71,221,179
+.byte 215,215,123,215,246,172,179,33
+.byte 215,215,123,215,246,172,179,33
+.byte 194,194,47,194,94,237,153,156
+.byte 194,194,47,194,94,237,153,156
+.byte 46,46,184,46,109,150,92,67
+.byte 46,46,184,46,109,150,92,67
+.byte 75,75,49,75,98,122,150,41
+.byte 75,75,49,75,98,122,150,41
+.byte 254,254,223,254,163,33,225,93
+.byte 254,254,223,254,163,33,225,93
+.byte 87,87,65,87,130,22,174,213
+.byte 87,87,65,87,130,22,174,213
+.byte 21,21,84,21,168,65,42,189
+.byte 21,21,84,21,168,65,42,189
+.byte 119,119,193,119,159,182,238,232
+.byte 119,119,193,119,159,182,238,232
+.byte 55,55,220,55,165,235,110,146
+.byte 55,55,220,55,165,235,110,146
+.byte 229,229,179,229,123,86,215,158
+.byte 229,229,179,229,123,86,215,158
+.byte 159,159,70,159,140,217,35,19
+.byte 159,159,70,159,140,217,35,19
+.byte 240,240,231,240,211,23,253,35
+.byte 240,240,231,240,211,23,253,35
+.byte 74,74,53,74,106,127,148,32
+.byte 74,74,53,74,106,127,148,32
+.byte 218,218,79,218,158,149,169,68
+.byte 218,218,79,218,158,149,169,68
+.byte 88,88,125,88,250,37,176,162
+.byte 88,88,125,88,250,37,176,162
+.byte 201,201,3,201,6,202,143,207
+.byte 201,201,3,201,6,202,143,207
+.byte 41,41,164,41,85,141,82,124
+.byte 41,41,164,41,85,141,82,124
+.byte 10,10,40,10,80,34,20,90
+.byte 10,10,40,10,80,34,20,90
+.byte 177,177,254,177,225,79,127,80
+.byte 177,177,254,177,225,79,127,80
+.byte 160,160,186,160,105,26,93,201
+.byte 160,160,186,160,105,26,93,201
+.byte 107,107,177,107,127,218,214,20
+.byte 107,107,177,107,127,218,214,20
+.byte 133,133,46,133,92,171,23,217
+.byte 133,133,46,133,92,171,23,217
+.byte 189,189,206,189,129,115,103,60
+.byte 189,189,206,189,129,115,103,60
+.byte 93,93,105,93,210,52,186,143
+.byte 93,93,105,93,210,52,186,143
+.byte 16,16,64,16,128,80,32,144
+.byte 16,16,64,16,128,80,32,144
+.byte 244,244,247,244,243,3,245,7
+.byte 244,244,247,244,243,3,245,7
+.byte 203,203,11,203,22,192,139,221
+.byte 203,203,11,203,22,192,139,221
+.byte 62,62,248,62,237,198,124,211
+.byte 62,62,248,62,237,198,124,211
+.byte 5,5,20,5,40,17,10,45
+.byte 5,5,20,5,40,17,10,45
+.byte 103,103,129,103,31,230,206,120
+.byte 103,103,129,103,31,230,206,120
+.byte 228,228,183,228,115,83,213,151
+.byte 228,228,183,228,115,83,213,151
+.byte 39,39,156,39,37,187,78,2
+.byte 39,39,156,39,37,187,78,2
+.byte 65,65,25,65,50,88,130,115
+.byte 65,65,25,65,50,88,130,115
+.byte 139,139,22,139,44,157,11,167
+.byte 139,139,22,139,44,157,11,167
+.byte 167,167,166,167,81,1,83,246
+.byte 167,167,166,167,81,1,83,246
+.byte 125,125,233,125,207,148,250,178
+.byte 125,125,233,125,207,148,250,178
+.byte 149,149,110,149,220,251,55,73
+.byte 149,149,110,149,220,251,55,73
+.byte 216,216,71,216,142,159,173,86
+.byte 216,216,71,216,142,159,173,86
+.byte 251,251,203,251,139,48,235,112
+.byte 251,251,203,251,139,48,235,112
+.byte 238,238,159,238,35,113,193,205
+.byte 238,238,159,238,35,113,193,205
+.byte 124,124,237,124,199,145,248,187
+.byte 124,124,237,124,199,145,248,187
+.byte 102,102,133,102,23,227,204,113
+.byte 102,102,133,102,23,227,204,113
+.byte 221,221,83,221,166,142,167,123
+.byte 221,221,83,221,166,142,167,123
+.byte 23,23,92,23,184,75,46,175
+.byte 23,23,92,23,184,75,46,175
+.byte 71,71,1,71,2,70,142,69
+.byte 71,71,1,71,2,70,142,69
+.byte 158,158,66,158,132,220,33,26
+.byte 158,158,66,158,132,220,33,26
+.byte 202,202,15,202,30,197,137,212
+.byte 202,202,15,202,30,197,137,212
+.byte 45,45,180,45,117,153,90,88
+.byte 45,45,180,45,117,153,90,88
+.byte 191,191,198,191,145,121,99,46
+.byte 191,191,198,191,145,121,99,46
+.byte 7,7,28,7,56,27,14,63
+.byte 7,7,28,7,56,27,14,63
+.byte 173,173,142,173,1,35,71,172
+.byte 173,173,142,173,1,35,71,172
+.byte 90,90,117,90,234,47,180,176
+.byte 90,90,117,90,234,47,180,176
+.byte 131,131,54,131,108,181,27,239
+.byte 131,131,54,131,108,181,27,239
+.byte 51,51,204,51,133,255,102,182
+.byte 51,51,204,51,133,255,102,182
+.byte 99,99,145,99,63,242,198,92
+.byte 99,99,145,99,63,242,198,92
+.byte 2,2,8,2,16,10,4,18
+.byte 2,2,8,2,16,10,4,18
+.byte 170,170,146,170,57,56,73,147
+.byte 170,170,146,170,57,56,73,147
+.byte 113,113,217,113,175,168,226,222
+.byte 113,113,217,113,175,168,226,222
+.byte 200,200,7,200,14,207,141,198
+.byte 200,200,7,200,14,207,141,198
+.byte 25,25,100,25,200,125,50,209
+.byte 25,25,100,25,200,125,50,209
+.byte 73,73,57,73,114,112,146,59
+.byte 73,73,57,73,114,112,146,59
+.byte 217,217,67,217,134,154,175,95
+.byte 217,217,67,217,134,154,175,95
+.byte 242,242,239,242,195,29,249,49
+.byte 242,242,239,242,195,29,249,49
+.byte 227,227,171,227,75,72,219,168
+.byte 227,227,171,227,75,72,219,168
+.byte 91,91,113,91,226,42,182,185
+.byte 91,91,113,91,226,42,182,185
+.byte 136,136,26,136,52,146,13,188
+.byte 136,136,26,136,52,146,13,188
+.byte 154,154,82,154,164,200,41,62
+.byte 154,154,82,154,164,200,41,62
+.byte 38,38,152,38,45,190,76,11
+.byte 38,38,152,38,45,190,76,11
+.byte 50,50,200,50,141,250,100,191
+.byte 50,50,200,50,141,250,100,191
+.byte 176,176,250,176,233,74,125,89
+.byte 176,176,250,176,233,74,125,89
+.byte 233,233,131,233,27,106,207,242
+.byte 233,233,131,233,27,106,207,242
+.byte 15,15,60,15,120,51,30,119
+.byte 15,15,60,15,120,51,30,119
+.byte 213,213,115,213,230,166,183,51
+.byte 213,213,115,213,230,166,183,51
+.byte 128,128,58,128,116,186,29,244
+.byte 128,128,58,128,116,186,29,244
+.byte 190,190,194,190,153,124,97,39
+.byte 190,190,194,190,153,124,97,39
+.byte 205,205,19,205,38,222,135,235
+.byte 205,205,19,205,38,222,135,235
+.byte 52,52,208,52,189,228,104,137
+.byte 52,52,208,52,189,228,104,137
+.byte 72,72,61,72,122,117,144,50
+.byte 72,72,61,72,122,117,144,50
+.byte 255,255,219,255,171,36,227,84
+.byte 255,255,219,255,171,36,227,84
+.byte 122,122,245,122,247,143,244,141
+.byte 122,122,245,122,247,143,244,141
+.byte 144,144,122,144,244,234,61,100
+.byte 144,144,122,144,244,234,61,100
+.byte 95,95,97,95,194,62,190,157
+.byte 95,95,97,95,194,62,190,157
+.byte 32,32,128,32,29,160,64,61
+.byte 32,32,128,32,29,160,64,61
+.byte 104,104,189,104,103,213,208,15
+.byte 104,104,189,104,103,213,208,15
+.byte 26,26,104,26,208,114,52,202
+.byte 26,26,104,26,208,114,52,202
+.byte 174,174,130,174,25,44,65,183
+.byte 174,174,130,174,25,44,65,183
+.byte 180,180,234,180,201,94,117,125
+.byte 180,180,234,180,201,94,117,125
+.byte 84,84,77,84,154,25,168,206
+.byte 84,84,77,84,154,25,168,206
+.byte 147,147,118,147,236,229,59,127
+.byte 147,147,118,147,236,229,59,127
+.byte 34,34,136,34,13,170,68,47
+.byte 34,34,136,34,13,170,68,47
+.byte 100,100,141,100,7,233,200,99
+.byte 100,100,141,100,7,233,200,99
+.byte 241,241,227,241,219,18,255,42
+.byte 241,241,227,241,219,18,255,42
+.byte 115,115,209,115,191,162,230,204
+.byte 115,115,209,115,191,162,230,204
+.byte 18,18,72,18,144,90,36,130
+.byte 18,18,72,18,144,90,36,130
+.byte 64,64,29,64,58,93,128,122
+.byte 64,64,29,64,58,93,128,122
+.byte 8,8,32,8,64,40,16,72
+.byte 8,8,32,8,64,40,16,72
+.byte 195,195,43,195,86,232,155,149
+.byte 195,195,43,195,86,232,155,149
+.byte 236,236,151,236,51,123,197,223
+.byte 236,236,151,236,51,123,197,223
+.byte 219,219,75,219,150,144,171,77
+.byte 219,219,75,219,150,144,171,77
+.byte 161,161,190,161,97,31,95,192
+.byte 161,161,190,161,97,31,95,192
+.byte 141,141,14,141,28,131,7,145
+.byte 141,141,14,141,28,131,7,145
+.byte 61,61,244,61,245,201,122,200
+.byte 61,61,244,61,245,201,122,200
+.byte 151,151,102,151,204,241,51,91
+.byte 151,151,102,151,204,241,51,91
+.byte 0,0,0,0,0,0,0,0
+.byte 0,0,0,0,0,0,0,0
+.byte 207,207,27,207,54,212,131,249
+.byte 207,207,27,207,54,212,131,249
+.byte 43,43,172,43,69,135,86,110
+.byte 43,43,172,43,69,135,86,110
+.byte 118,118,197,118,151,179,236,225
+.byte 118,118,197,118,151,179,236,225
+.byte 130,130,50,130,100,176,25,230
+.byte 130,130,50,130,100,176,25,230
+.byte 214,214,127,214,254,169,177,40
+.byte 214,214,127,214,254,169,177,40
+.byte 27,27,108,27,216,119,54,195
+.byte 27,27,108,27,216,119,54,195
+.byte 181,181,238,181,193,91,119,116
+.byte 181,181,238,181,193,91,119,116
+.byte 175,175,134,175,17,41,67,190
+.byte 175,175,134,175,17,41,67,190
+.byte 106,106,181,106,119,223,212,29
+.byte 106,106,181,106,119,223,212,29
+.byte 80,80,93,80,186,13,160,234
+.byte 80,80,93,80,186,13,160,234
+.byte 69,69,9,69,18,76,138,87
+.byte 69,69,9,69,18,76,138,87
+.byte 243,243,235,243,203,24,251,56
+.byte 243,243,235,243,203,24,251,56
+.byte 48,48,192,48,157,240,96,173
+.byte 48,48,192,48,157,240,96,173
+.byte 239,239,155,239,43,116,195,196
+.byte 239,239,155,239,43,116,195,196
+.byte 63,63,252,63,229,195,126,218
+.byte 63,63,252,63,229,195,126,218
+.byte 85,85,73,85,146,28,170,199
+.byte 85,85,73,85,146,28,170,199
+.byte 162,162,178,162,121,16,89,219
+.byte 162,162,178,162,121,16,89,219
+.byte 234,234,143,234,3,101,201,233
+.byte 234,234,143,234,3,101,201,233
+.byte 101,101,137,101,15,236,202,106
+.byte 101,101,137,101,15,236,202,106
+.byte 186,186,210,186,185,104,105,3
+.byte 186,186,210,186,185,104,105,3
+.byte 47,47,188,47,101,147,94,74
+.byte 47,47,188,47,101,147,94,74
+.byte 192,192,39,192,78,231,157,142
+.byte 192,192,39,192,78,231,157,142
+.byte 222,222,95,222,190,129,161,96
+.byte 222,222,95,222,190,129,161,96
+.byte 28,28,112,28,224,108,56,252
+.byte 28,28,112,28,224,108,56,252
+.byte 253,253,211,253,187,46,231,70
+.byte 253,253,211,253,187,46,231,70
+.byte 77,77,41,77,82,100,154,31
+.byte 77,77,41,77,82,100,154,31
+.byte 146,146,114,146,228,224,57,118
+.byte 146,146,114,146,228,224,57,118
+.byte 117,117,201,117,143,188,234,250
+.byte 117,117,201,117,143,188,234,250
+.byte 6,6,24,6,48,30,12,54
+.byte 6,6,24,6,48,30,12,54
+.byte 138,138,18,138,36,152,9,174
+.byte 138,138,18,138,36,152,9,174
+.byte 178,178,242,178,249,64,121,75
+.byte 178,178,242,178,249,64,121,75
+.byte 230,230,191,230,99,89,209,133
+.byte 230,230,191,230,99,89,209,133
+.byte 14,14,56,14,112,54,28,126
+.byte 14,14,56,14,112,54,28,126
+.byte 31,31,124,31,248,99,62,231
+.byte 31,31,124,31,248,99,62,231
+.byte 98,98,149,98,55,247,196,85
+.byte 98,98,149,98,55,247,196,85
+.byte 212,212,119,212,238,163,181,58
+.byte 212,212,119,212,238,163,181,58
+.byte 168,168,154,168,41,50,77,129
+.byte 168,168,154,168,41,50,77,129
+.byte 150,150,98,150,196,244,49,82
+.byte 150,150,98,150,196,244,49,82
+.byte 249,249,195,249,155,58,239,98
+.byte 249,249,195,249,155,58,239,98
+.byte 197,197,51,197,102,246,151,163
+.byte 197,197,51,197,102,246,151,163
+.byte 37,37,148,37,53,177,74,16
+.byte 37,37,148,37,53,177,74,16
+.byte 89,89,121,89,242,32,178,171
+.byte 89,89,121,89,242,32,178,171
+.byte 132,132,42,132,84,174,21,208
+.byte 132,132,42,132,84,174,21,208
+.byte 114,114,213,114,183,167,228,197
+.byte 114,114,213,114,183,167,228,197
+.byte 57,57,228,57,213,221,114,236
+.byte 57,57,228,57,213,221,114,236
+.byte 76,76,45,76,90,97,152,22
+.byte 76,76,45,76,90,97,152,22
+.byte 94,94,101,94,202,59,188,148
+.byte 94,94,101,94,202,59,188,148
+.byte 120,120,253,120,231,133,240,159
+.byte 120,120,253,120,231,133,240,159
+.byte 56,56,224,56,221,216,112,229
+.byte 56,56,224,56,221,216,112,229
+.byte 140,140,10,140,20,134,5,152
+.byte 140,140,10,140,20,134,5,152
+.byte 209,209,99,209,198,178,191,23
+.byte 209,209,99,209,198,178,191,23
+.byte 165,165,174,165,65,11,87,228
+.byte 165,165,174,165,65,11,87,228
+.byte 226,226,175,226,67,77,217,161
+.byte 226,226,175,226,67,77,217,161
+.byte 97,97,153,97,47,248,194,78
+.byte 97,97,153,97,47,248,194,78
+.byte 179,179,246,179,241,69,123,66
+.byte 179,179,246,179,241,69,123,66
+.byte 33,33,132,33,21,165,66,52
+.byte 33,33,132,33,21,165,66,52
+.byte 156,156,74,156,148,214,37,8
+.byte 156,156,74,156,148,214,37,8
+.byte 30,30,120,30,240,102,60,238
+.byte 30,30,120,30,240,102,60,238
+.byte 67,67,17,67,34,82,134,97
+.byte 67,67,17,67,34,82,134,97
+.byte 199,199,59,199,118,252,147,177
+.byte 199,199,59,199,118,252,147,177
+.byte 252,252,215,252,179,43,229,79
+.byte 252,252,215,252,179,43,229,79
+.byte 4,4,16,4,32,20,8,36
+.byte 4,4,16,4,32,20,8,36
+.byte 81,81,89,81,178,8,162,227
+.byte 81,81,89,81,178,8,162,227
+.byte 153,153,94,153,188,199,47,37
+.byte 153,153,94,153,188,199,47,37
+.byte 109,109,169,109,79,196,218,34
+.byte 109,109,169,109,79,196,218,34
+.byte 13,13,52,13,104,57,26,101
+.byte 13,13,52,13,104,57,26,101
+.byte 250,250,207,250,131,53,233,121
+.byte 250,250,207,250,131,53,233,121
+.byte 223,223,91,223,182,132,163,105
+.byte 223,223,91,223,182,132,163,105
+.byte 126,126,229,126,215,155,252,169
+.byte 126,126,229,126,215,155,252,169
+.byte 36,36,144,36,61,180,72,25
+.byte 36,36,144,36,61,180,72,25
+.byte 59,59,236,59,197,215,118,254
+.byte 59,59,236,59,197,215,118,254
+.byte 171,171,150,171,49,61,75,154
+.byte 171,171,150,171,49,61,75,154
+.byte 206,206,31,206,62,209,129,240
+.byte 206,206,31,206,62,209,129,240
+.byte 17,17,68,17,136,85,34,153
+.byte 17,17,68,17,136,85,34,153
+.byte 143,143,6,143,12,137,3,131
+.byte 143,143,6,143,12,137,3,131
+.byte 78,78,37,78,74,107,156,4
+.byte 78,78,37,78,74,107,156,4
+.byte 183,183,230,183,209,81,115,102
+.byte 183,183,230,183,209,81,115,102
+.byte 235,235,139,235,11,96,203,224
+.byte 235,235,139,235,11,96,203,224
+.byte 60,60,240,60,253,204,120,193
+.byte 60,60,240,60,253,204,120,193
+.byte 129,129,62,129,124,191,31,253
+.byte 129,129,62,129,124,191,31,253
+.byte 148,148,106,148,212,254,53,64
+.byte 148,148,106,148,212,254,53,64
+.byte 247,247,251,247,235,12,243,28
+.byte 247,247,251,247,235,12,243,28
+.byte 185,185,222,185,161,103,111,24
+.byte 185,185,222,185,161,103,111,24
+.byte 19,19,76,19,152,95,38,139
+.byte 19,19,76,19,152,95,38,139
+.byte 44,44,176,44,125,156,88,81
+.byte 44,44,176,44,125,156,88,81
+.byte 211,211,107,211,214,184,187,5
+.byte 211,211,107,211,214,184,187,5
+.byte 231,231,187,231,107,92,211,140
+.byte 231,231,187,231,107,92,211,140
+.byte 110,110,165,110,87,203,220,57
+.byte 110,110,165,110,87,203,220,57
+.byte 196,196,55,196,110,243,149,170
+.byte 196,196,55,196,110,243,149,170
+.byte 3,3,12,3,24,15,6,27
+.byte 3,3,12,3,24,15,6,27
+.byte 86,86,69,86,138,19,172,220
+.byte 86,86,69,86,138,19,172,220
+.byte 68,68,13,68,26,73,136,94
+.byte 68,68,13,68,26,73,136,94
+.byte 127,127,225,127,223,158,254,160
+.byte 127,127,225,127,223,158,254,160
+.byte 169,169,158,169,33,55,79,136
+.byte 169,169,158,169,33,55,79,136
+.byte 42,42,168,42,77,130,84,103
+.byte 42,42,168,42,77,130,84,103
+.byte 187,187,214,187,177,109,107,10
+.byte 187,187,214,187,177,109,107,10
+.byte 193,193,35,193,70,226,159,135
+.byte 193,193,35,193,70,226,159,135
+.byte 83,83,81,83,162,2,166,241
+.byte 83,83,81,83,162,2,166,241
+.byte 220,220,87,220,174,139,165,114
+.byte 220,220,87,220,174,139,165,114
+.byte 11,11,44,11,88,39,22,83
+.byte 11,11,44,11,88,39,22,83
+.byte 157,157,78,157,156,211,39,1
+.byte 157,157,78,157,156,211,39,1
+.byte 108,108,173,108,71,193,216,43
+.byte 108,108,173,108,71,193,216,43
+.byte 49,49,196,49,149,245,98,164
+.byte 49,49,196,49,149,245,98,164
+.byte 116,116,205,116,135,185,232,243
+.byte 116,116,205,116,135,185,232,243
+.byte 246,246,255,246,227,9,241,21
+.byte 246,246,255,246,227,9,241,21
+.byte 70,70,5,70,10,67,140,76
+.byte 70,70,5,70,10,67,140,76
+.byte 172,172,138,172,9,38,69,165
+.byte 172,172,138,172,9,38,69,165
+.byte 137,137,30,137,60,151,15,181
+.byte 137,137,30,137,60,151,15,181
+.byte 20,20,80,20,160,68,40,180
+.byte 20,20,80,20,160,68,40,180
+.byte 225,225,163,225,91,66,223,186
+.byte 225,225,163,225,91,66,223,186
+.byte 22,22,88,22,176,78,44,166
+.byte 22,22,88,22,176,78,44,166
+.byte 58,58,232,58,205,210,116,247
+.byte 58,58,232,58,205,210,116,247
+.byte 105,105,185,105,111,208,210,6
+.byte 105,105,185,105,111,208,210,6
+.byte 9,9,36,9,72,45,18,65
+.byte 9,9,36,9,72,45,18,65
+.byte 112,112,221,112,167,173,224,215
+.byte 112,112,221,112,167,173,224,215
+.byte 182,182,226,182,217,84,113,111
+.byte 182,182,226,182,217,84,113,111
+.byte 208,208,103,208,206,183,189,30
+.byte 208,208,103,208,206,183,189,30
+.byte 237,237,147,237,59,126,199,214
+.byte 237,237,147,237,59,126,199,214
+.byte 204,204,23,204,46,219,133,226
+.byte 204,204,23,204,46,219,133,226
+.byte 66,66,21,66,42,87,132,104
+.byte 66,66,21,66,42,87,132,104
+.byte 152,152,90,152,180,194,45,44
+.byte 152,152,90,152,180,194,45,44
+.byte 164,164,170,164,73,14,85,237
+.byte 164,164,170,164,73,14,85,237
+.byte 40,40,160,40,93,136,80,117
+.byte 40,40,160,40,93,136,80,117
+.byte 92,92,109,92,218,49,184,134
+.byte 92,92,109,92,218,49,184,134
+.byte 248,248,199,248,147,63,237,107
+.byte 248,248,199,248,147,63,237,107
+.byte 134,134,34,134,68,164,17,194
+.byte 134,134,34,134,68,164,17,194
+.byte 24,35,198,232,135,184,1,79
+.byte 54,166,210,245,121,111,145,82
+.byte 96,188,155,142,163,12,123,53
+.byte 29,224,215,194,46,75,254,87
+.byte 21,119,55,229,159,240,74,218
+.byte 88,201,41,10,177,160,107,133
+.byte 189,93,16,244,203,62,5,103
+.byte 228,39,65,139,167,125,149,216
+.byte 251,238,124,102,221,23,71,158
+.byte 202,45,191,7,173,90,131,51
diff --git a/deps/openssl/asm/x86-macosx-gas/x86cpuid.s b/deps/openssl/asm/x86-macosx-gas/x86cpuid.s
new file mode 100644
index 0000000000..b5e80f83a3
--- /dev/null
+++ b/deps/openssl/asm/x86-macosx-gas/x86cpuid.s
@@ -0,0 +1,261 @@
+.file "x86cpuid.s"
+.text
+.globl _OPENSSL_ia32_cpuid
+.align 4
+_OPENSSL_ia32_cpuid:
+L_OPENSSL_ia32_cpuid_begin:
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ xorl %edx,%edx
+ pushfl
+ popl %eax
+ movl %eax,%ecx
+ xorl $2097152,%eax
+ pushl %eax
+ popfl
+ pushfl
+ popl %eax
+ xorl %eax,%ecx
+ btl $21,%ecx
+ jnc L000done
+ xorl %eax,%eax
+ .byte 0x0f,0xa2
+ movl %eax,%edi
+ xorl %eax,%eax
+ cmpl $1970169159,%ebx
+ setne %al
+ movl %eax,%ebp
+ cmpl $1231384169,%edx
+ setne %al
+ orl %eax,%ebp
+ cmpl $1818588270,%ecx
+ setne %al
+ orl %eax,%ebp
+ jz L001intel
+ cmpl $1752462657,%ebx
+ setne %al
+ movl %eax,%esi
+ cmpl $1769238117,%edx
+ setne %al
+ orl %eax,%esi
+ cmpl $1145913699,%ecx
+ setne %al
+ orl %eax,%esi
+ jnz L001intel
+ movl $2147483648,%eax
+ .byte 0x0f,0xa2
+ cmpl $2147483656,%eax
+ jb L001intel
+ movl $2147483656,%eax
+ .byte 0x0f,0xa2
+ movzbl %cl,%esi
+ incl %esi
+ movl $1,%eax
+ .byte 0x0f,0xa2
+ btl $28,%edx
+ jnc L000done
+ shrl $16,%ebx
+ andl $255,%ebx
+ cmpl %esi,%ebx
+ ja L000done
+ andl $4026531839,%edx
+ jmp L000done
+L001intel:
+ cmpl $4,%edi
+ movl $-1,%edi
+ jb L002nocacheinfo
+ movl $4,%eax
+ movl $0,%ecx
+ .byte 0x0f,0xa2
+ movl %eax,%edi
+ shrl $14,%edi
+ andl $4095,%edi
+L002nocacheinfo:
+ movl $1,%eax
+ .byte 0x0f,0xa2
+ cmpl $0,%ebp
+ jne L003notP4
+ andb $15,%ah
+ cmpb $15,%ah
+ jne L003notP4
+ orl $1048576,%edx
+L003notP4:
+ btl $28,%edx
+ jnc L000done
+ andl $4026531839,%edx
+ cmpl $0,%edi
+ je L000done
+ orl $268435456,%edx
+ shrl $16,%ebx
+ cmpb $1,%bl
+ ja L000done
+ andl $4026531839,%edx
+L000done:
+ movl %edx,%eax
+ movl %ecx,%edx
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.globl _OPENSSL_rdtsc
+.align 4
+_OPENSSL_rdtsc:
+L_OPENSSL_rdtsc_begin:
+ xorl %eax,%eax
+ xorl %edx,%edx
+ leal _OPENSSL_ia32cap_P,%ecx
+ btl $4,(%ecx)
+ jnc L004notsc
+ .byte 0x0f,0x31
+L004notsc:
+ ret
+.globl _OPENSSL_instrument_halt
+.align 4
+_OPENSSL_instrument_halt:
+L_OPENSSL_instrument_halt_begin:
+ leal _OPENSSL_ia32cap_P,%ecx
+ btl $4,(%ecx)
+ jnc L005nohalt
+.long 2421723150
+ andl $3,%eax
+ jnz L005nohalt
+ pushfl
+ popl %eax
+ btl $9,%eax
+ jnc L005nohalt
+ .byte 0x0f,0x31
+ pushl %edx
+ pushl %eax
+ hlt
+ .byte 0x0f,0x31
+ subl (%esp),%eax
+ sbbl 4(%esp),%edx
+ addl $8,%esp
+ ret
+L005nohalt:
+ xorl %eax,%eax
+ xorl %edx,%edx
+ ret
+.globl _OPENSSL_far_spin
+.align 4
+_OPENSSL_far_spin:
+L_OPENSSL_far_spin_begin:
+ pushfl
+ popl %eax
+ btl $9,%eax
+ jnc L006nospin
+ movl 4(%esp),%eax
+ movl 8(%esp),%ecx
+.long 2430111262
+ xorl %eax,%eax
+ movl (%ecx),%edx
+ jmp L007spin
+.align 4,0x90
+L007spin:
+ incl %eax
+ cmpl (%ecx),%edx
+ je L007spin
+.long 529567888
+ ret
+L006nospin:
+ xorl %eax,%eax
+ xorl %edx,%edx
+ ret
+.globl _OPENSSL_wipe_cpu
+.align 4
+_OPENSSL_wipe_cpu:
+L_OPENSSL_wipe_cpu_begin:
+ xorl %eax,%eax
+ xorl %edx,%edx
+ leal _OPENSSL_ia32cap_P,%ecx
+ movl (%ecx),%ecx
+ btl $1,(%ecx)
+ jnc L008no_x87
+.long 4007259865,4007259865,4007259865,4007259865,2430851995
+L008no_x87:
+ leal 4(%esp),%eax
+ ret
+.globl _OPENSSL_atomic_add
+.align 4
+_OPENSSL_atomic_add:
+L_OPENSSL_atomic_add_begin:
+ movl 4(%esp),%edx
+ movl 8(%esp),%ecx
+ pushl %ebx
+ nop
+ movl (%edx),%eax
+L009spin:
+ leal (%eax,%ecx,1),%ebx
+ nop
+.long 447811568
+ jne L009spin
+ movl %ebx,%eax
+ popl %ebx
+ ret
+.globl _OPENSSL_indirect_call
+.align 4
+_OPENSSL_indirect_call:
+L_OPENSSL_indirect_call_begin:
+ pushl %ebp
+ movl %esp,%ebp
+ subl $28,%esp
+ movl 12(%ebp),%ecx
+ movl %ecx,(%esp)
+ movl 16(%ebp),%edx
+ movl %edx,4(%esp)
+ movl 20(%ebp),%eax
+ movl %eax,8(%esp)
+ movl 24(%ebp),%eax
+ movl %eax,12(%esp)
+ movl 28(%ebp),%eax
+ movl %eax,16(%esp)
+ movl 32(%ebp),%eax
+ movl %eax,20(%esp)
+ movl 36(%ebp),%eax
+ movl %eax,24(%esp)
+ call *8(%ebp)
+ movl %ebp,%esp
+ popl %ebp
+ ret
+.globl _OPENSSL_cleanse
+.align 4
+_OPENSSL_cleanse:
+L_OPENSSL_cleanse_begin:
+ movl 4(%esp),%edx
+ movl 8(%esp),%ecx
+ xorl %eax,%eax
+ cmpl $7,%ecx
+ jae L010lot
+ cmpl $0,%ecx
+ je L011ret
+L012little:
+ movb %al,(%edx)
+ subl $1,%ecx
+ leal 1(%edx),%edx
+ jnz L012little
+L011ret:
+ ret
+.align 4,0x90
+L010lot:
+ testl $3,%edx
+ jz L013aligned
+ movb %al,(%edx)
+ leal -1(%ecx),%ecx
+ leal 1(%edx),%edx
+ jmp L010lot
+L013aligned:
+ movl %eax,(%edx)
+ leal -4(%ecx),%ecx
+ testl $-4,%ecx
+ leal 4(%edx),%edx
+ jnz L013aligned
+ cmpl $0,%ecx
+ jne L012little
+ ret
+.comm _OPENSSL_ia32cap_P,4
+.mod_init_func
+.align 2
+.long _OPENSSL_cpuid_setup
diff --git a/deps/openssl/asm/x86-win32-masm/aes/aes-586.asm b/deps/openssl/asm/x86-win32-masm/aes/aes-586.asm
new file mode 100644
index 0000000000..22dd21fbcd
--- /dev/null
+++ b/deps/openssl/asm/x86-win32-masm/aes/aes-586.asm
@@ -0,0 +1,3222 @@
+TITLE aes-586.asm
+IF @Version LT 800
+ECHO MASM version 8.00 or later is strongly recommended.
+ENDIF
+.686
+.XMM
+IF @Version LT 800
+XMMWORD STRUCT 16
+DQ 2 dup (?)
+XMMWORD ENDS
+ENDIF
+
+.MODEL FLAT
+OPTION DOTNAME
+IF @Version LT 800
+.text$ SEGMENT PAGE 'CODE'
+ELSE
+.text$ SEGMENT ALIGN(64) 'CODE'
+ENDIF
+ALIGN 16
+__x86_AES_encrypt_compact PROC PRIVATE
+ mov DWORD PTR 20[esp],edi
+ xor eax,DWORD PTR [edi]
+ xor ebx,DWORD PTR 4[edi]
+ xor ecx,DWORD PTR 8[edi]
+ xor edx,DWORD PTR 12[edi]
+ mov esi,DWORD PTR 240[edi]
+ lea esi,DWORD PTR [esi*1+esi-2]
+ lea esi,DWORD PTR [esi*8+edi]
+ mov DWORD PTR 24[esp],esi
+ mov edi,DWORD PTR [ebp-128]
+ mov esi,DWORD PTR [ebp-96]
+ mov edi,DWORD PTR [ebp-64]
+ mov esi,DWORD PTR [ebp-32]
+ mov edi,DWORD PTR [ebp]
+ mov esi,DWORD PTR 32[ebp]
+ mov edi,DWORD PTR 64[ebp]
+ mov esi,DWORD PTR 96[ebp]
+ALIGN 16
+$L000loop:
+ mov esi,eax
+ and esi,255
+ movzx esi,BYTE PTR [esi*1+ebp-128]
+ movzx edi,bh
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,8
+ xor esi,edi
+ mov edi,ecx
+ shr edi,16
+ and edi,255
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,16
+ xor esi,edi
+ mov edi,edx
+ shr edi,24
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,24
+ xor esi,edi
+ mov DWORD PTR 4[esp],esi
+ mov esi,ebx
+ and esi,255
+ shr ebx,16
+ movzx esi,BYTE PTR [esi*1+ebp-128]
+ movzx edi,ch
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,8
+ xor esi,edi
+ mov edi,edx
+ shr edi,16
+ and edi,255
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,16
+ xor esi,edi
+ mov edi,eax
+ shr edi,24
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,24
+ xor esi,edi
+ mov DWORD PTR 8[esp],esi
+ mov esi,ecx
+ and esi,255
+ shr ecx,24
+ movzx esi,BYTE PTR [esi*1+ebp-128]
+ movzx edi,dh
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,8
+ xor esi,edi
+ mov edi,eax
+ shr edi,16
+ and edx,255
+ and edi,255
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,16
+ xor esi,edi
+ movzx edi,bh
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,24
+ xor esi,edi
+ and edx,255
+ movzx edx,BYTE PTR [edx*1+ebp-128]
+ movzx eax,ah
+ movzx eax,BYTE PTR [eax*1+ebp-128]
+ shl eax,8
+ xor edx,eax
+ mov eax,DWORD PTR 4[esp]
+ and ebx,255
+ movzx ebx,BYTE PTR [ebx*1+ebp-128]
+ shl ebx,16
+ xor edx,ebx
+ mov ebx,DWORD PTR 8[esp]
+ movzx ecx,BYTE PTR [ecx*1+ebp-128]
+ shl ecx,24
+ xor edx,ecx
+ mov ecx,esi
+ mov esi,ecx
+ and esi,2155905152
+ mov ebp,esi
+ shr ebp,7
+ lea edi,DWORD PTR [ecx*1+ecx]
+ sub esi,ebp
+ and edi,4278124286
+ and esi,454761243
+ mov ebp,ecx
+ xor esi,edi
+ xor ecx,esi
+ rol ecx,24
+ xor ecx,esi
+ ror ebp,16
+ xor ecx,ebp
+ ror ebp,8
+ xor ecx,ebp
+ mov esi,edx
+ and esi,2155905152
+ mov ebp,esi
+ shr ebp,7
+ lea edi,DWORD PTR [edx*1+edx]
+ sub esi,ebp
+ and edi,4278124286
+ and esi,454761243
+ mov ebp,edx
+ xor esi,edi
+ xor edx,esi
+ rol edx,24
+ xor edx,esi
+ ror ebp,16
+ xor edx,ebp
+ ror ebp,8
+ xor edx,ebp
+ mov esi,eax
+ and esi,2155905152
+ mov ebp,esi
+ shr ebp,7
+ lea edi,DWORD PTR [eax*1+eax]
+ sub esi,ebp
+ and edi,4278124286
+ and esi,454761243
+ mov ebp,eax
+ xor esi,edi
+ xor eax,esi
+ rol eax,24
+ xor eax,esi
+ ror ebp,16
+ xor eax,ebp
+ ror ebp,8
+ xor eax,ebp
+ mov esi,ebx
+ and esi,2155905152
+ mov ebp,esi
+ shr ebp,7
+ lea edi,DWORD PTR [ebx*1+ebx]
+ sub esi,ebp
+ and edi,4278124286
+ and esi,454761243
+ mov ebp,ebx
+ xor esi,edi
+ xor ebx,esi
+ rol ebx,24
+ xor ebx,esi
+ ror ebp,16
+ xor ebx,ebp
+ ror ebp,8
+ xor ebx,ebp
+ mov edi,DWORD PTR 20[esp]
+ mov ebp,DWORD PTR 28[esp]
+ add edi,16
+ xor eax,DWORD PTR [edi]
+ xor ebx,DWORD PTR 4[edi]
+ xor ecx,DWORD PTR 8[edi]
+ xor edx,DWORD PTR 12[edi]
+ cmp edi,DWORD PTR 24[esp]
+ mov DWORD PTR 20[esp],edi
+ jb $L000loop
+ mov esi,eax
+ and esi,255
+ movzx esi,BYTE PTR [esi*1+ebp-128]
+ movzx edi,bh
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,8
+ xor esi,edi
+ mov edi,ecx
+ shr edi,16
+ and edi,255
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,16
+ xor esi,edi
+ mov edi,edx
+ shr edi,24
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,24
+ xor esi,edi
+ mov DWORD PTR 4[esp],esi
+ mov esi,ebx
+ and esi,255
+ shr ebx,16
+ movzx esi,BYTE PTR [esi*1+ebp-128]
+ movzx edi,ch
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,8
+ xor esi,edi
+ mov edi,edx
+ shr edi,16
+ and edi,255
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,16
+ xor esi,edi
+ mov edi,eax
+ shr edi,24
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,24
+ xor esi,edi
+ mov DWORD PTR 8[esp],esi
+ mov esi,ecx
+ and esi,255
+ shr ecx,24
+ movzx esi,BYTE PTR [esi*1+ebp-128]
+ movzx edi,dh
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,8
+ xor esi,edi
+ mov edi,eax
+ shr edi,16
+ and edx,255
+ and edi,255
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,16
+ xor esi,edi
+ movzx edi,bh
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,24
+ xor esi,edi
+ mov edi,DWORD PTR 20[esp]
+ and edx,255
+ movzx edx,BYTE PTR [edx*1+ebp-128]
+ movzx eax,ah
+ movzx eax,BYTE PTR [eax*1+ebp-128]
+ shl eax,8
+ xor edx,eax
+ mov eax,DWORD PTR 4[esp]
+ and ebx,255
+ movzx ebx,BYTE PTR [ebx*1+ebp-128]
+ shl ebx,16
+ xor edx,ebx
+ mov ebx,DWORD PTR 8[esp]
+ movzx ecx,BYTE PTR [ecx*1+ebp-128]
+ shl ecx,24
+ xor edx,ecx
+ mov ecx,esi
+ xor eax,DWORD PTR 16[edi]
+ xor ebx,DWORD PTR 20[edi]
+ xor ecx,DWORD PTR 24[edi]
+ xor edx,DWORD PTR 28[edi]
+ ret
+__x86_AES_encrypt_compact ENDP
+ALIGN 16
+__sse_AES_encrypt_compact PROC PRIVATE
+ pxor mm0,QWORD PTR [edi]
+ pxor mm4,QWORD PTR 8[edi]
+ mov esi,DWORD PTR 240[edi]
+ lea esi,DWORD PTR [esi*1+esi-2]
+ lea esi,DWORD PTR [esi*8+edi]
+ mov DWORD PTR 24[esp],esi
+ mov eax,454761243
+ mov DWORD PTR 8[esp],eax
+ mov DWORD PTR 12[esp],eax
+ mov eax,DWORD PTR [ebp-128]
+ mov ebx,DWORD PTR [ebp-96]
+ mov ecx,DWORD PTR [ebp-64]
+ mov edx,DWORD PTR [ebp-32]
+ mov eax,DWORD PTR [ebp]
+ mov ebx,DWORD PTR 32[ebp]
+ mov ecx,DWORD PTR 64[ebp]
+ mov edx,DWORD PTR 96[ebp]
+ALIGN 16
+$L001loop:
+ pshufw mm1,mm0,8
+ pshufw mm5,mm4,13
+ movd eax,mm1
+ movd ebx,mm5
+ movzx esi,al
+ movzx ecx,BYTE PTR [esi*1+ebp-128]
+ pshufw mm2,mm0,13
+ movzx edx,ah
+ movzx edx,BYTE PTR [edx*1+ebp-128]
+ shl edx,8
+ shr eax,16
+ movzx esi,bl
+ movzx esi,BYTE PTR [esi*1+ebp-128]
+ shl esi,16
+ or ecx,esi
+ pshufw mm6,mm4,8
+ movzx esi,bh
+ movzx esi,BYTE PTR [esi*1+ebp-128]
+ shl esi,24
+ or edx,esi
+ shr ebx,16
+ movzx esi,ah
+ movzx esi,BYTE PTR [esi*1+ebp-128]
+ shl esi,8
+ or ecx,esi
+ movzx esi,bh
+ movzx esi,BYTE PTR [esi*1+ebp-128]
+ shl esi,24
+ or ecx,esi
+ movd mm0,ecx
+ movzx esi,al
+ movzx ecx,BYTE PTR [esi*1+ebp-128]
+ movd eax,mm2
+ movzx esi,bl
+ movzx esi,BYTE PTR [esi*1+ebp-128]
+ shl esi,16
+ or ecx,esi
+ movd ebx,mm6
+ movzx esi,ah
+ movzx esi,BYTE PTR [esi*1+ebp-128]
+ shl esi,24
+ or ecx,esi
+ movzx esi,bh
+ movzx esi,BYTE PTR [esi*1+ebp-128]
+ shl esi,8
+ or ecx,esi
+ movd mm1,ecx
+ movzx esi,bl
+ movzx ecx,BYTE PTR [esi*1+ebp-128]
+ shr ebx,16
+ movzx esi,al
+ movzx esi,BYTE PTR [esi*1+ebp-128]
+ shl esi,16
+ or ecx,esi
+ shr eax,16
+ punpckldq mm0,mm1
+ movzx esi,ah
+ movzx esi,BYTE PTR [esi*1+ebp-128]
+ shl esi,24
+ or ecx,esi
+ and eax,255
+ movzx eax,BYTE PTR [eax*1+ebp-128]
+ shl eax,16
+ or edx,eax
+ movzx esi,bh
+ movzx esi,BYTE PTR [esi*1+ebp-128]
+ shl esi,8
+ or ecx,esi
+ movd mm4,ecx
+ and ebx,255
+ movzx ebx,BYTE PTR [ebx*1+ebp-128]
+ or edx,ebx
+ movd mm5,edx
+ punpckldq mm4,mm5
+ add edi,16
+ cmp edi,DWORD PTR 24[esp]
+ ja $L002out
+ movq mm2,QWORD PTR 8[esp]
+ pxor mm3,mm3
+ pxor mm7,mm7
+ movq mm1,mm0
+ movq mm5,mm4
+ pcmpgtb mm3,mm0
+ pcmpgtb mm7,mm4
+ pand mm3,mm2
+ pand mm7,mm2
+ pshufw mm2,mm0,177
+ pshufw mm6,mm4,177
+ paddb mm0,mm0
+ paddb mm4,mm4
+ pxor mm0,mm3
+ pxor mm4,mm7
+ pshufw mm3,mm2,177
+ pshufw mm7,mm6,177
+ pxor mm1,mm0
+ pxor mm5,mm4
+ pxor mm0,mm2
+ pxor mm4,mm6
+ movq mm2,mm3
+ movq mm6,mm7
+ pslld mm3,8
+ pslld mm7,8
+ psrld mm2,24
+ psrld mm6,24
+ pxor mm0,mm3
+ pxor mm4,mm7
+ pxor mm0,mm2
+ pxor mm4,mm6
+ movq mm3,mm1
+ movq mm7,mm5
+ movq mm2,QWORD PTR [edi]
+ movq mm6,QWORD PTR 8[edi]
+ psrld mm1,8
+ psrld mm5,8
+ mov eax,DWORD PTR [ebp-128]
+ pslld mm3,24
+ pslld mm7,24
+ mov ebx,DWORD PTR [ebp-64]
+ pxor mm0,mm1
+ pxor mm4,mm5
+ mov ecx,DWORD PTR [ebp]
+ pxor mm0,mm3
+ pxor mm4,mm7
+ mov edx,DWORD PTR 64[ebp]
+ pxor mm0,mm2
+ pxor mm4,mm6
+ jmp $L001loop
+ALIGN 16
+$L002out:
+ pxor mm0,QWORD PTR [edi]
+ pxor mm4,QWORD PTR 8[edi]
+ ret
+__sse_AES_encrypt_compact ENDP
+ALIGN 16
+__x86_AES_encrypt PROC PRIVATE
+ mov DWORD PTR 20[esp],edi
+ xor eax,DWORD PTR [edi]
+ xor ebx,DWORD PTR 4[edi]
+ xor ecx,DWORD PTR 8[edi]
+ xor edx,DWORD PTR 12[edi]
+ mov esi,DWORD PTR 240[edi]
+ lea esi,DWORD PTR [esi*1+esi-2]
+ lea esi,DWORD PTR [esi*8+edi]
+ mov DWORD PTR 24[esp],esi
+ALIGN 16
+$L003loop:
+ mov esi,eax
+ and esi,255
+ mov esi,DWORD PTR [esi*8+ebp]
+ movzx edi,bh
+ xor esi,DWORD PTR 3[edi*8+ebp]
+ mov edi,ecx
+ shr edi,16
+ and edi,255
+ xor esi,DWORD PTR 2[edi*8+ebp]
+ mov edi,edx
+ shr edi,24
+ xor esi,DWORD PTR 1[edi*8+ebp]
+ mov DWORD PTR 4[esp],esi
+ mov esi,ebx
+ and esi,255
+ shr ebx,16
+ mov esi,DWORD PTR [esi*8+ebp]
+ movzx edi,ch
+ xor esi,DWORD PTR 3[edi*8+ebp]
+ mov edi,edx
+ shr edi,16
+ and edi,255
+ xor esi,DWORD PTR 2[edi*8+ebp]
+ mov edi,eax
+ shr edi,24
+ xor esi,DWORD PTR 1[edi*8+ebp]
+ mov DWORD PTR 8[esp],esi
+ mov esi,ecx
+ and esi,255
+ shr ecx,24
+ mov esi,DWORD PTR [esi*8+ebp]
+ movzx edi,dh
+ xor esi,DWORD PTR 3[edi*8+ebp]
+ mov edi,eax
+ shr edi,16
+ and edx,255
+ and edi,255
+ xor esi,DWORD PTR 2[edi*8+ebp]
+ movzx edi,bh
+ xor esi,DWORD PTR 1[edi*8+ebp]
+ mov edi,DWORD PTR 20[esp]
+ mov edx,DWORD PTR [edx*8+ebp]
+ movzx eax,ah
+ xor edx,DWORD PTR 3[eax*8+ebp]
+ mov eax,DWORD PTR 4[esp]
+ and ebx,255
+ xor edx,DWORD PTR 2[ebx*8+ebp]
+ mov ebx,DWORD PTR 8[esp]
+ xor edx,DWORD PTR 1[ecx*8+ebp]
+ mov ecx,esi
+ add edi,16
+ xor eax,DWORD PTR [edi]
+ xor ebx,DWORD PTR 4[edi]
+ xor ecx,DWORD PTR 8[edi]
+ xor edx,DWORD PTR 12[edi]
+ cmp edi,DWORD PTR 24[esp]
+ mov DWORD PTR 20[esp],edi
+ jb $L003loop
+ mov esi,eax
+ and esi,255
+ mov esi,DWORD PTR 2[esi*8+ebp]
+ and esi,255
+ movzx edi,bh
+ mov edi,DWORD PTR [edi*8+ebp]
+ and edi,65280
+ xor esi,edi
+ mov edi,ecx
+ shr edi,16
+ and edi,255
+ mov edi,DWORD PTR [edi*8+ebp]
+ and edi,16711680
+ xor esi,edi
+ mov edi,edx
+ shr edi,24
+ mov edi,DWORD PTR 2[edi*8+ebp]
+ and edi,4278190080
+ xor esi,edi
+ mov DWORD PTR 4[esp],esi
+ mov esi,ebx
+ and esi,255
+ shr ebx,16
+ mov esi,DWORD PTR 2[esi*8+ebp]
+ and esi,255
+ movzx edi,ch
+ mov edi,DWORD PTR [edi*8+ebp]
+ and edi,65280
+ xor esi,edi
+ mov edi,edx
+ shr edi,16
+ and edi,255
+ mov edi,DWORD PTR [edi*8+ebp]
+ and edi,16711680
+ xor esi,edi
+ mov edi,eax
+ shr edi,24
+ mov edi,DWORD PTR 2[edi*8+ebp]
+ and edi,4278190080
+ xor esi,edi
+ mov DWORD PTR 8[esp],esi
+ mov esi,ecx
+ and esi,255
+ shr ecx,24
+ mov esi,DWORD PTR 2[esi*8+ebp]
+ and esi,255
+ movzx edi,dh
+ mov edi,DWORD PTR [edi*8+ebp]
+ and edi,65280
+ xor esi,edi
+ mov edi,eax
+ shr edi,16
+ and edx,255
+ and edi,255
+ mov edi,DWORD PTR [edi*8+ebp]
+ and edi,16711680
+ xor esi,edi
+ movzx edi,bh
+ mov edi,DWORD PTR 2[edi*8+ebp]
+ and edi,4278190080
+ xor esi,edi
+ mov edi,DWORD PTR 20[esp]
+ and edx,255
+ mov edx,DWORD PTR 2[edx*8+ebp]
+ and edx,255
+ movzx eax,ah
+ mov eax,DWORD PTR [eax*8+ebp]
+ and eax,65280
+ xor edx,eax
+ mov eax,DWORD PTR 4[esp]
+ and ebx,255
+ mov ebx,DWORD PTR [ebx*8+ebp]
+ and ebx,16711680
+ xor edx,ebx
+ mov ebx,DWORD PTR 8[esp]
+ mov ecx,DWORD PTR 2[ecx*8+ebp]
+ and ecx,4278190080
+ xor edx,ecx
+ mov ecx,esi
+ add edi,16
+ xor eax,DWORD PTR [edi]
+ xor ebx,DWORD PTR 4[edi]
+ xor ecx,DWORD PTR 8[edi]
+ xor edx,DWORD PTR 12[edi]
+ ret
+ALIGN 64
+$LAES_Te::
+DD 2774754246,2774754246
+DD 2222750968,2222750968
+DD 2574743534,2574743534
+DD 2373680118,2373680118
+DD 234025727,234025727
+DD 3177933782,3177933782
+DD 2976870366,2976870366
+DD 1422247313,1422247313
+DD 1345335392,1345335392
+DD 50397442,50397442
+DD 2842126286,2842126286
+DD 2099981142,2099981142
+DD 436141799,436141799
+DD 1658312629,1658312629
+DD 3870010189,3870010189
+DD 2591454956,2591454956
+DD 1170918031,1170918031
+DD 2642575903,2642575903
+DD 1086966153,1086966153
+DD 2273148410,2273148410
+DD 368769775,368769775
+DD 3948501426,3948501426
+DD 3376891790,3376891790
+DD 200339707,200339707
+DD 3970805057,3970805057
+DD 1742001331,1742001331
+DD 4255294047,4255294047
+DD 3937382213,3937382213
+DD 3214711843,3214711843
+DD 4154762323,4154762323
+DD 2524082916,2524082916
+DD 1539358875,1539358875
+DD 3266819957,3266819957
+DD 486407649,486407649
+DD 2928907069,2928907069
+DD 1780885068,1780885068
+DD 1513502316,1513502316
+DD 1094664062,1094664062
+DD 49805301,49805301
+DD 1338821763,1338821763
+DD 1546925160,1546925160
+DD 4104496465,4104496465
+DD 887481809,887481809
+DD 150073849,150073849
+DD 2473685474,2473685474
+DD 1943591083,1943591083
+DD 1395732834,1395732834
+DD 1058346282,1058346282
+DD 201589768,201589768
+DD 1388824469,1388824469
+DD 1696801606,1696801606
+DD 1589887901,1589887901
+DD 672667696,672667696
+DD 2711000631,2711000631
+DD 251987210,251987210
+DD 3046808111,3046808111
+DD 151455502,151455502
+DD 907153956,907153956
+DD 2608889883,2608889883
+DD 1038279391,1038279391
+DD 652995533,652995533
+DD 1764173646,1764173646
+DD 3451040383,3451040383
+DD 2675275242,2675275242
+DD 453576978,453576978
+DD 2659418909,2659418909
+DD 1949051992,1949051992
+DD 773462580,773462580
+DD 756751158,756751158
+DD 2993581788,2993581788
+DD 3998898868,3998898868
+DD 4221608027,4221608027
+DD 4132590244,4132590244
+DD 1295727478,1295727478
+DD 1641469623,1641469623
+DD 3467883389,3467883389
+DD 2066295122,2066295122
+DD 1055122397,1055122397
+DD 1898917726,1898917726
+DD 2542044179,2542044179
+DD 4115878822,4115878822
+DD 1758581177,1758581177
+DD 0,0
+DD 753790401,753790401
+DD 1612718144,1612718144
+DD 536673507,536673507
+DD 3367088505,3367088505
+DD 3982187446,3982187446
+DD 3194645204,3194645204
+DD 1187761037,1187761037
+DD 3653156455,3653156455
+DD 1262041458,1262041458
+DD 3729410708,3729410708
+DD 3561770136,3561770136
+DD 3898103984,3898103984
+DD 1255133061,1255133061
+DD 1808847035,1808847035
+DD 720367557,720367557
+DD 3853167183,3853167183
+DD 385612781,385612781
+DD 3309519750,3309519750
+DD 3612167578,3612167578
+DD 1429418854,1429418854
+DD 2491778321,2491778321
+DD 3477423498,3477423498
+DD 284817897,284817897
+DD 100794884,100794884
+DD 2172616702,2172616702
+DD 4031795360,4031795360
+DD 1144798328,1144798328
+DD 3131023141,3131023141
+DD 3819481163,3819481163
+DD 4082192802,4082192802
+DD 4272137053,4272137053
+DD 3225436288,3225436288
+DD 2324664069,2324664069
+DD 2912064063,2912064063
+DD 3164445985,3164445985
+DD 1211644016,1211644016
+DD 83228145,83228145
+DD 3753688163,3753688163
+DD 3249976951,3249976951
+DD 1977277103,1977277103
+DD 1663115586,1663115586
+DD 806359072,806359072
+DD 452984805,452984805
+DD 250868733,250868733
+DD 1842533055,1842533055
+DD 1288555905,1288555905
+DD 336333848,336333848
+DD 890442534,890442534
+DD 804056259,804056259
+DD 3781124030,3781124030
+DD 2727843637,2727843637
+DD 3427026056,3427026056
+DD 957814574,957814574
+DD 1472513171,1472513171
+DD 4071073621,4071073621
+DD 2189328124,2189328124
+DD 1195195770,1195195770
+DD 2892260552,2892260552
+DD 3881655738,3881655738
+DD 723065138,723065138
+DD 2507371494,2507371494
+DD 2690670784,2690670784
+DD 2558624025,2558624025
+DD 3511635870,3511635870
+DD 2145180835,2145180835
+DD 1713513028,1713513028
+DD 2116692564,2116692564
+DD 2878378043,2878378043
+DD 2206763019,2206763019
+DD 3393603212,3393603212
+DD 703524551,703524551
+DD 3552098411,3552098411
+DD 1007948840,1007948840
+DD 2044649127,2044649127
+DD 3797835452,3797835452
+DD 487262998,487262998
+DD 1994120109,1994120109
+DD 1004593371,1004593371
+DD 1446130276,1446130276
+DD 1312438900,1312438900
+DD 503974420,503974420
+DD 3679013266,3679013266
+DD 168166924,168166924
+DD 1814307912,1814307912
+DD 3831258296,3831258296
+DD 1573044895,1573044895
+DD 1859376061,1859376061
+DD 4021070915,4021070915
+DD 2791465668,2791465668
+DD 2828112185,2828112185
+DD 2761266481,2761266481
+DD 937747667,937747667
+DD 2339994098,2339994098
+DD 854058965,854058965
+DD 1137232011,1137232011
+DD 1496790894,1496790894
+DD 3077402074,3077402074
+DD 2358086913,2358086913
+DD 1691735473,1691735473
+DD 3528347292,3528347292
+DD 3769215305,3769215305
+DD 3027004632,3027004632
+DD 4199962284,4199962284
+DD 133494003,133494003
+DD 636152527,636152527
+DD 2942657994,2942657994
+DD 2390391540,2390391540
+DD 3920539207,3920539207
+DD 403179536,403179536
+DD 3585784431,3585784431
+DD 2289596656,2289596656
+DD 1864705354,1864705354
+DD 1915629148,1915629148
+DD 605822008,605822008
+DD 4054230615,4054230615
+DD 3350508659,3350508659
+DD 1371981463,1371981463
+DD 602466507,602466507
+DD 2094914977,2094914977
+DD 2624877800,2624877800
+DD 555687742,555687742
+DD 3712699286,3712699286
+DD 3703422305,3703422305
+DD 2257292045,2257292045
+DD 2240449039,2240449039
+DD 2423288032,2423288032
+DD 1111375484,1111375484
+DD 3300242801,3300242801
+DD 2858837708,2858837708
+DD 3628615824,3628615824
+DD 84083462,84083462
+DD 32962295,32962295
+DD 302911004,302911004
+DD 2741068226,2741068226
+DD 1597322602,1597322602
+DD 4183250862,4183250862
+DD 3501832553,3501832553
+DD 2441512471,2441512471
+DD 1489093017,1489093017
+DD 656219450,656219450
+DD 3114180135,3114180135
+DD 954327513,954327513
+DD 335083755,335083755
+DD 3013122091,3013122091
+DD 856756514,856756514
+DD 3144247762,3144247762
+DD 1893325225,1893325225
+DD 2307821063,2307821063
+DD 2811532339,2811532339
+DD 3063651117,3063651117
+DD 572399164,572399164
+DD 2458355477,2458355477
+DD 552200649,552200649
+DD 1238290055,1238290055
+DD 4283782570,4283782570
+DD 2015897680,2015897680
+DD 2061492133,2061492133
+DD 2408352771,2408352771
+DD 4171342169,4171342169
+DD 2156497161,2156497161
+DD 386731290,386731290
+DD 3669999461,3669999461
+DD 837215959,837215959
+DD 3326231172,3326231172
+DD 3093850320,3093850320
+DD 3275833730,3275833730
+DD 2962856233,2962856233
+DD 1999449434,1999449434
+DD 286199582,286199582
+DD 3417354363,3417354363
+DD 4233385128,4233385128
+DD 3602627437,3602627437
+DD 974525996,974525996
+DB 99,124,119,123,242,107,111,197
+DB 48,1,103,43,254,215,171,118
+DB 202,130,201,125,250,89,71,240
+DB 173,212,162,175,156,164,114,192
+DB 183,253,147,38,54,63,247,204
+DB 52,165,229,241,113,216,49,21
+DB 4,199,35,195,24,150,5,154
+DB 7,18,128,226,235,39,178,117
+DB 9,131,44,26,27,110,90,160
+DB 82,59,214,179,41,227,47,132
+DB 83,209,0,237,32,252,177,91
+DB 106,203,190,57,74,76,88,207
+DB 208,239,170,251,67,77,51,133
+DB 69,249,2,127,80,60,159,168
+DB 81,163,64,143,146,157,56,245
+DB 188,182,218,33,16,255,243,210
+DB 205,12,19,236,95,151,68,23
+DB 196,167,126,61,100,93,25,115
+DB 96,129,79,220,34,42,144,136
+DB 70,238,184,20,222,94,11,219
+DB 224,50,58,10,73,6,36,92
+DB 194,211,172,98,145,149,228,121
+DB 231,200,55,109,141,213,78,169
+DB 108,86,244,234,101,122,174,8
+DB 186,120,37,46,28,166,180,198
+DB 232,221,116,31,75,189,139,138
+DB 112,62,181,102,72,3,246,14
+DB 97,53,87,185,134,193,29,158
+DB 225,248,152,17,105,217,142,148
+DB 155,30,135,233,206,85,40,223
+DB 140,161,137,13,191,230,66,104
+DB 65,153,45,15,176,84,187,22
+DB 99,124,119,123,242,107,111,197
+DB 48,1,103,43,254,215,171,118
+DB 202,130,201,125,250,89,71,240
+DB 173,212,162,175,156,164,114,192
+DB 183,253,147,38,54,63,247,204
+DB 52,165,229,241,113,216,49,21
+DB 4,199,35,195,24,150,5,154
+DB 7,18,128,226,235,39,178,117
+DB 9,131,44,26,27,110,90,160
+DB 82,59,214,179,41,227,47,132
+DB 83,209,0,237,32,252,177,91
+DB 106,203,190,57,74,76,88,207
+DB 208,239,170,251,67,77,51,133
+DB 69,249,2,127,80,60,159,168
+DB 81,163,64,143,146,157,56,245
+DB 188,182,218,33,16,255,243,210
+DB 205,12,19,236,95,151,68,23
+DB 196,167,126,61,100,93,25,115
+DB 96,129,79,220,34,42,144,136
+DB 70,238,184,20,222,94,11,219
+DB 224,50,58,10,73,6,36,92
+DB 194,211,172,98,145,149,228,121
+DB 231,200,55,109,141,213,78,169
+DB 108,86,244,234,101,122,174,8
+DB 186,120,37,46,28,166,180,198
+DB 232,221,116,31,75,189,139,138
+DB 112,62,181,102,72,3,246,14
+DB 97,53,87,185,134,193,29,158
+DB 225,248,152,17,105,217,142,148
+DB 155,30,135,233,206,85,40,223
+DB 140,161,137,13,191,230,66,104
+DB 65,153,45,15,176,84,187,22
+DB 99,124,119,123,242,107,111,197
+DB 48,1,103,43,254,215,171,118
+DB 202,130,201,125,250,89,71,240
+DB 173,212,162,175,156,164,114,192
+DB 183,253,147,38,54,63,247,204
+DB 52,165,229,241,113,216,49,21
+DB 4,199,35,195,24,150,5,154
+DB 7,18,128,226,235,39,178,117
+DB 9,131,44,26,27,110,90,160
+DB 82,59,214,179,41,227,47,132
+DB 83,209,0,237,32,252,177,91
+DB 106,203,190,57,74,76,88,207
+DB 208,239,170,251,67,77,51,133
+DB 69,249,2,127,80,60,159,168
+DB 81,163,64,143,146,157,56,245
+DB 188,182,218,33,16,255,243,210
+DB 205,12,19,236,95,151,68,23
+DB 196,167,126,61,100,93,25,115
+DB 96,129,79,220,34,42,144,136
+DB 70,238,184,20,222,94,11,219
+DB 224,50,58,10,73,6,36,92
+DB 194,211,172,98,145,149,228,121
+DB 231,200,55,109,141,213,78,169
+DB 108,86,244,234,101,122,174,8
+DB 186,120,37,46,28,166,180,198
+DB 232,221,116,31,75,189,139,138
+DB 112,62,181,102,72,3,246,14
+DB 97,53,87,185,134,193,29,158
+DB 225,248,152,17,105,217,142,148
+DB 155,30,135,233,206,85,40,223
+DB 140,161,137,13,191,230,66,104
+DB 65,153,45,15,176,84,187,22
+DB 99,124,119,123,242,107,111,197
+DB 48,1,103,43,254,215,171,118
+DB 202,130,201,125,250,89,71,240
+DB 173,212,162,175,156,164,114,192
+DB 183,253,147,38,54,63,247,204
+DB 52,165,229,241,113,216,49,21
+DB 4,199,35,195,24,150,5,154
+DB 7,18,128,226,235,39,178,117
+DB 9,131,44,26,27,110,90,160
+DB 82,59,214,179,41,227,47,132
+DB 83,209,0,237,32,252,177,91
+DB 106,203,190,57,74,76,88,207
+DB 208,239,170,251,67,77,51,133
+DB 69,249,2,127,80,60,159,168
+DB 81,163,64,143,146,157,56,245
+DB 188,182,218,33,16,255,243,210
+DB 205,12,19,236,95,151,68,23
+DB 196,167,126,61,100,93,25,115
+DB 96,129,79,220,34,42,144,136
+DB 70,238,184,20,222,94,11,219
+DB 224,50,58,10,73,6,36,92
+DB 194,211,172,98,145,149,228,121
+DB 231,200,55,109,141,213,78,169
+DB 108,86,244,234,101,122,174,8
+DB 186,120,37,46,28,166,180,198
+DB 232,221,116,31,75,189,139,138
+DB 112,62,181,102,72,3,246,14
+DB 97,53,87,185,134,193,29,158
+DB 225,248,152,17,105,217,142,148
+DB 155,30,135,233,206,85,40,223
+DB 140,161,137,13,191,230,66,104
+DB 65,153,45,15,176,84,187,22
+DD 1,2,4,8
+DD 16,32,64,128
+DD 27,54,0,0
+DD 0,0,0,0
+__x86_AES_encrypt ENDP
+ALIGN 16
+_AES_encrypt PROC PUBLIC
+$L_AES_encrypt_begin::
+ push ebp
+ push ebx
+ push esi
+ push edi
+ mov esi,DWORD PTR 20[esp]
+ mov edi,DWORD PTR 28[esp]
+ mov eax,esp
+ sub esp,36
+ and esp,-64
+ lea ebx,DWORD PTR [edi-127]
+ sub ebx,esp
+ neg ebx
+ and ebx,960
+ sub esp,ebx
+ add esp,4
+ mov DWORD PTR 28[esp],eax
+ call $L004pic_point
+$L004pic_point:
+ pop ebp
+ lea eax,DWORD PTR _OPENSSL_ia32cap_P
+ lea ebp,DWORD PTR ($LAES_Te-$L004pic_point)[ebp]
+ lea ebx,DWORD PTR 764[esp]
+ sub ebx,ebp
+ and ebx,768
+ lea ebp,DWORD PTR 2176[ebx*1+ebp]
+ bt DWORD PTR [eax],25
+ jnc $L005x86
+ movq mm0,QWORD PTR [esi]
+ movq mm4,QWORD PTR 8[esi]
+ call __sse_AES_encrypt_compact
+ mov esp,DWORD PTR 28[esp]
+ mov esi,DWORD PTR 24[esp]
+ movq QWORD PTR [esi],mm0
+ movq QWORD PTR 8[esi],mm4
+ emms
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+ALIGN 16
+$L005x86:
+ mov DWORD PTR 24[esp],ebp
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ mov ecx,DWORD PTR 8[esi]
+ mov edx,DWORD PTR 12[esi]
+ call __x86_AES_encrypt_compact
+ mov esp,DWORD PTR 28[esp]
+ mov esi,DWORD PTR 24[esp]
+ mov DWORD PTR [esi],eax
+ mov DWORD PTR 4[esi],ebx
+ mov DWORD PTR 8[esi],ecx
+ mov DWORD PTR 12[esi],edx
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+_AES_encrypt ENDP
+ALIGN 16
+__x86_AES_decrypt_compact PROC PRIVATE
+ mov DWORD PTR 20[esp],edi
+ xor eax,DWORD PTR [edi]
+ xor ebx,DWORD PTR 4[edi]
+ xor ecx,DWORD PTR 8[edi]
+ xor edx,DWORD PTR 12[edi]
+ mov esi,DWORD PTR 240[edi]
+ lea esi,DWORD PTR [esi*1+esi-2]
+ lea esi,DWORD PTR [esi*8+edi]
+ mov DWORD PTR 24[esp],esi
+ mov edi,DWORD PTR [ebp-128]
+ mov esi,DWORD PTR [ebp-96]
+ mov edi,DWORD PTR [ebp-64]
+ mov esi,DWORD PTR [ebp-32]
+ mov edi,DWORD PTR [ebp]
+ mov esi,DWORD PTR 32[ebp]
+ mov edi,DWORD PTR 64[ebp]
+ mov esi,DWORD PTR 96[ebp]
+ALIGN 16
+$L006loop:
+ mov esi,eax
+ and esi,255
+ movzx esi,BYTE PTR [esi*1+ebp-128]
+ movzx edi,dh
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,8
+ xor esi,edi
+ mov edi,ecx
+ shr edi,16
+ and edi,255
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,16
+ xor esi,edi
+ mov edi,ebx
+ shr edi,24
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,24
+ xor esi,edi
+ mov DWORD PTR 4[esp],esi
+ mov esi,ebx
+ and esi,255
+ movzx esi,BYTE PTR [esi*1+ebp-128]
+ movzx edi,ah
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,8
+ xor esi,edi
+ mov edi,edx
+ shr edi,16
+ and edi,255
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,16
+ xor esi,edi
+ mov edi,ecx
+ shr edi,24
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,24
+ xor esi,edi
+ mov DWORD PTR 8[esp],esi
+ mov esi,ecx
+ and esi,255
+ movzx esi,BYTE PTR [esi*1+ebp-128]
+ movzx edi,bh
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,8
+ xor esi,edi
+ mov edi,eax
+ shr edi,16
+ and edi,255
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,16
+ xor esi,edi
+ mov edi,edx
+ shr edi,24
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,24
+ xor esi,edi
+ and edx,255
+ movzx edx,BYTE PTR [edx*1+ebp-128]
+ movzx ecx,ch
+ movzx ecx,BYTE PTR [ecx*1+ebp-128]
+ shl ecx,8
+ xor edx,ecx
+ mov ecx,esi
+ shr ebx,16
+ and ebx,255
+ movzx ebx,BYTE PTR [ebx*1+ebp-128]
+ shl ebx,16
+ xor edx,ebx
+ shr eax,24
+ movzx eax,BYTE PTR [eax*1+ebp-128]
+ shl eax,24
+ xor edx,eax
+ mov esi,ecx
+ and esi,2155905152
+ mov edi,esi
+ shr edi,7
+ lea eax,DWORD PTR [ecx*1+ecx]
+ sub esi,edi
+ and eax,4278124286
+ and esi,454761243
+ xor esi,eax
+ mov eax,esi
+ and esi,2155905152
+ mov edi,esi
+ shr edi,7
+ lea ebx,DWORD PTR [eax*1+eax]
+ sub esi,edi
+ and ebx,4278124286
+ and esi,454761243
+ xor eax,ecx
+ xor esi,ebx
+ mov ebx,esi
+ and esi,2155905152
+ mov edi,esi
+ shr edi,7
+ lea ebp,DWORD PTR [ebx*1+ebx]
+ sub esi,edi
+ and ebp,4278124286
+ and esi,454761243
+ xor ebx,ecx
+ rol ecx,8
+ xor ebp,esi
+ xor ecx,eax
+ xor eax,ebp
+ rol eax,24
+ xor ecx,ebx
+ xor ebx,ebp
+ rol ebx,16
+ xor ecx,ebp
+ rol ebp,8
+ xor ecx,eax
+ xor ecx,ebx
+ mov eax,DWORD PTR 4[esp]
+ xor ecx,ebp
+ mov DWORD PTR 12[esp],ecx
+ mov esi,edx
+ and esi,2155905152
+ mov edi,esi
+ shr edi,7
+ lea ebx,DWORD PTR [edx*1+edx]
+ sub esi,edi
+ and ebx,4278124286
+ and esi,454761243
+ xor esi,ebx
+ mov ebx,esi
+ and esi,2155905152
+ mov edi,esi
+ shr edi,7
+ lea ecx,DWORD PTR [ebx*1+ebx]
+ sub esi,edi
+ and ecx,4278124286
+ and esi,454761243
+ xor ebx,edx
+ xor esi,ecx
+ mov ecx,esi
+ and esi,2155905152
+ mov edi,esi
+ shr edi,7
+ lea ebp,DWORD PTR [ecx*1+ecx]
+ sub esi,edi
+ and ebp,4278124286
+ and esi,454761243
+ xor ecx,edx
+ rol edx,8
+ xor ebp,esi
+ xor edx,ebx
+ xor ebx,ebp
+ rol ebx,24
+ xor edx,ecx
+ xor ecx,ebp
+ rol ecx,16
+ xor edx,ebp
+ rol ebp,8
+ xor edx,ebx
+ xor edx,ecx
+ mov ebx,DWORD PTR 8[esp]
+ xor edx,ebp
+ mov DWORD PTR 16[esp],edx
+ mov esi,eax
+ and esi,2155905152
+ mov edi,esi
+ shr edi,7
+ lea ecx,DWORD PTR [eax*1+eax]
+ sub esi,edi
+ and ecx,4278124286
+ and esi,454761243
+ xor esi,ecx
+ mov ecx,esi
+ and esi,2155905152
+ mov edi,esi
+ shr edi,7
+ lea edx,DWORD PTR [ecx*1+ecx]
+ sub esi,edi
+ and edx,4278124286
+ and esi,454761243
+ xor ecx,eax
+ xor esi,edx
+ mov edx,esi
+ and esi,2155905152
+ mov edi,esi
+ shr edi,7
+ lea ebp,DWORD PTR [edx*1+edx]
+ sub esi,edi
+ and ebp,4278124286
+ and esi,454761243
+ xor edx,eax
+ rol eax,8
+ xor ebp,esi
+ xor eax,ecx
+ xor ecx,ebp
+ rol ecx,24
+ xor eax,edx
+ xor edx,ebp
+ rol edx,16
+ xor eax,ebp
+ rol ebp,8
+ xor eax,ecx
+ xor eax,edx
+ xor eax,ebp
+ mov esi,ebx
+ and esi,2155905152
+ mov edi,esi
+ shr edi,7
+ lea ecx,DWORD PTR [ebx*1+ebx]
+ sub esi,edi
+ and ecx,4278124286
+ and esi,454761243
+ xor esi,ecx
+ mov ecx,esi
+ and esi,2155905152
+ mov edi,esi
+ shr edi,7
+ lea edx,DWORD PTR [ecx*1+ecx]
+ sub esi,edi
+ and edx,4278124286
+ and esi,454761243
+ xor ecx,ebx
+ xor esi,edx
+ mov edx,esi
+ and esi,2155905152
+ mov edi,esi
+ shr edi,7
+ lea ebp,DWORD PTR [edx*1+edx]
+ sub esi,edi
+ and ebp,4278124286
+ and esi,454761243
+ xor edx,ebx
+ rol ebx,8
+ xor ebp,esi
+ xor ebx,ecx
+ xor ecx,ebp
+ rol ecx,24
+ xor ebx,edx
+ xor edx,ebp
+ rol edx,16
+ xor ebx,ebp
+ rol ebp,8
+ xor ebx,ecx
+ xor ebx,edx
+ mov ecx,DWORD PTR 12[esp]
+ xor ebx,ebp
+ mov edx,DWORD PTR 16[esp]
+ mov edi,DWORD PTR 20[esp]
+ mov ebp,DWORD PTR 28[esp]
+ add edi,16
+ xor eax,DWORD PTR [edi]
+ xor ebx,DWORD PTR 4[edi]
+ xor ecx,DWORD PTR 8[edi]
+ xor edx,DWORD PTR 12[edi]
+ cmp edi,DWORD PTR 24[esp]
+ mov DWORD PTR 20[esp],edi
+ jb $L006loop
+ mov esi,eax
+ and esi,255
+ movzx esi,BYTE PTR [esi*1+ebp-128]
+ movzx edi,dh
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,8
+ xor esi,edi
+ mov edi,ecx
+ shr edi,16
+ and edi,255
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,16
+ xor esi,edi
+ mov edi,ebx
+ shr edi,24
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,24
+ xor esi,edi
+ mov DWORD PTR 4[esp],esi
+ mov esi,ebx
+ and esi,255
+ movzx esi,BYTE PTR [esi*1+ebp-128]
+ movzx edi,ah
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,8
+ xor esi,edi
+ mov edi,edx
+ shr edi,16
+ and edi,255
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,16
+ xor esi,edi
+ mov edi,ecx
+ shr edi,24
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,24
+ xor esi,edi
+ mov DWORD PTR 8[esp],esi
+ mov esi,ecx
+ and esi,255
+ movzx esi,BYTE PTR [esi*1+ebp-128]
+ movzx edi,bh
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,8
+ xor esi,edi
+ mov edi,eax
+ shr edi,16
+ and edi,255
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,16
+ xor esi,edi
+ mov edi,edx
+ shr edi,24
+ movzx edi,BYTE PTR [edi*1+ebp-128]
+ shl edi,24
+ xor esi,edi
+ mov edi,DWORD PTR 20[esp]
+ and edx,255
+ movzx edx,BYTE PTR [edx*1+ebp-128]
+ movzx ecx,ch
+ movzx ecx,BYTE PTR [ecx*1+ebp-128]
+ shl ecx,8
+ xor edx,ecx
+ mov ecx,esi
+ shr ebx,16
+ and ebx,255
+ movzx ebx,BYTE PTR [ebx*1+ebp-128]
+ shl ebx,16
+ xor edx,ebx
+ mov ebx,DWORD PTR 8[esp]
+ shr eax,24
+ movzx eax,BYTE PTR [eax*1+ebp-128]
+ shl eax,24
+ xor edx,eax
+ mov eax,DWORD PTR 4[esp]
+ xor eax,DWORD PTR 16[edi]
+ xor ebx,DWORD PTR 20[edi]
+ xor ecx,DWORD PTR 24[edi]
+ xor edx,DWORD PTR 28[edi]
+ ret
+__x86_AES_decrypt_compact ENDP
+ALIGN 16
+__sse_AES_decrypt_compact PROC PRIVATE
+ pxor mm0,QWORD PTR [edi]
+ pxor mm4,QWORD PTR 8[edi]
+ mov esi,DWORD PTR 240[edi]
+ lea esi,DWORD PTR [esi*1+esi-2]
+ lea esi,DWORD PTR [esi*8+edi]
+ mov DWORD PTR 24[esp],esi
+ mov eax,454761243
+ mov DWORD PTR 8[esp],eax
+ mov DWORD PTR 12[esp],eax
+ mov eax,DWORD PTR [ebp-128]
+ mov ebx,DWORD PTR [ebp-96]
+ mov ecx,DWORD PTR [ebp-64]
+ mov edx,DWORD PTR [ebp-32]
+ mov eax,DWORD PTR [ebp]
+ mov ebx,DWORD PTR 32[ebp]
+ mov ecx,DWORD PTR 64[ebp]
+ mov edx,DWORD PTR 96[ebp]
+ALIGN 16
+$L007loop:
+ pshufw mm1,mm0,12
+ movd eax,mm1
+ pshufw mm5,mm4,9
+ movzx esi,al
+ movzx ecx,BYTE PTR [esi*1+ebp-128]
+ movd ebx,mm5
+ movzx edx,ah
+ movzx edx,BYTE PTR [edx*1+ebp-128]
+ shl edx,8
+ pshufw mm2,mm0,6
+ movzx esi,bl
+ movzx esi,BYTE PTR [esi*1+ebp-128]
+ shl esi,16
+ or ecx,esi
+ shr eax,16
+ movzx esi,bh
+ movzx esi,BYTE PTR [esi*1+ebp-128]
+ shl esi,24
+ or edx,esi
+ shr ebx,16
+ pshufw mm6,mm4,3
+ movzx esi,ah
+ movzx esi,BYTE PTR [esi*1+ebp-128]
+ shl esi,24
+ or ecx,esi
+ movzx esi,bh
+ movzx esi,BYTE PTR [esi*1+ebp-128]
+ shl esi,8
+ or ecx,esi
+ movd mm0,ecx
+ movzx esi,al
+ movd eax,mm2
+ movzx ecx,BYTE PTR [esi*1+ebp-128]
+ shl ecx,16
+ movzx esi,bl
+ movd ebx,mm6
+ movzx esi,BYTE PTR [esi*1+ebp-128]
+ or ecx,esi
+ movzx esi,al
+ movzx esi,BYTE PTR [esi*1+ebp-128]
+ or edx,esi
+ movzx esi,bl
+ movzx esi,BYTE PTR [esi*1+ebp-128]
+ shl esi,16
+ or edx,esi
+ movd mm1,edx
+ movzx esi,ah
+ movzx edx,BYTE PTR [esi*1+ebp-128]
+ shl edx,8
+ movzx esi,bh
+ shr eax,16
+ movzx esi,BYTE PTR [esi*1+ebp-128]
+ shl esi,24
+ or edx,esi
+ shr ebx,16
+ punpckldq mm0,mm1
+ movzx esi,bh
+ movzx esi,BYTE PTR [esi*1+ebp-128]
+ shl esi,8
+ or ecx,esi
+ and ebx,255
+ movzx ebx,BYTE PTR [ebx*1+ebp-128]
+ or edx,ebx
+ movzx esi,al
+ movzx esi,BYTE PTR [esi*1+ebp-128]
+ shl esi,16
+ or edx,esi
+ movd mm4,edx
+ movzx eax,ah
+ movzx eax,BYTE PTR [eax*1+ebp-128]
+ shl eax,24
+ or ecx,eax
+ movd mm5,ecx
+ punpckldq mm4,mm5
+ add edi,16
+ cmp edi,DWORD PTR 24[esp]
+ ja $L008out
+ movq mm3,mm0
+ movq mm7,mm4
+ pshufw mm2,mm0,228
+ pshufw mm6,mm4,228
+ movq mm1,mm0
+ movq mm5,mm4
+ pshufw mm0,mm0,177
+ pshufw mm4,mm4,177
+ pslld mm2,8
+ pslld mm6,8
+ psrld mm3,8
+ psrld mm7,8
+ pxor mm0,mm2
+ pxor mm4,mm6
+ pxor mm0,mm3
+ pxor mm4,mm7
+ pslld mm2,16
+ pslld mm6,16
+ psrld mm3,16
+ psrld mm7,16
+ pxor mm0,mm2
+ pxor mm4,mm6
+ pxor mm0,mm3
+ pxor mm4,mm7
+ movq mm3,QWORD PTR 8[esp]
+ pxor mm2,mm2
+ pxor mm6,mm6
+ pcmpgtb mm2,mm1
+ pcmpgtb mm6,mm5
+ pand mm2,mm3
+ pand mm6,mm3
+ paddb mm1,mm1
+ paddb mm5,mm5
+ pxor mm1,mm2
+ pxor mm5,mm6
+ movq mm3,mm1
+ movq mm7,mm5
+ movq mm2,mm1
+ movq mm6,mm5
+ pxor mm0,mm1
+ pxor mm4,mm5
+ pslld mm3,24
+ pslld mm7,24
+ psrld mm2,8
+ psrld mm6,8
+ pxor mm0,mm3
+ pxor mm4,mm7
+ pxor mm0,mm2
+ pxor mm4,mm6
+ movq mm2,QWORD PTR 8[esp]
+ pxor mm3,mm3
+ pxor mm7,mm7
+ pcmpgtb mm3,mm1
+ pcmpgtb mm7,mm5
+ pand mm3,mm2
+ pand mm7,mm2
+ paddb mm1,mm1
+ paddb mm5,mm5
+ pxor mm1,mm3
+ pxor mm5,mm7
+ pshufw mm3,mm1,177
+ pshufw mm7,mm5,177
+ pxor mm0,mm1
+ pxor mm4,mm5
+ pxor mm0,mm3
+ pxor mm4,mm7
+ pxor mm3,mm3
+ pxor mm7,mm7
+ pcmpgtb mm3,mm1
+ pcmpgtb mm7,mm5
+ pand mm3,mm2
+ pand mm7,mm2
+ paddb mm1,mm1
+ paddb mm5,mm5
+ pxor mm1,mm3
+ pxor mm5,mm7
+ pxor mm0,mm1
+ pxor mm4,mm5
+ movq mm3,mm1
+ movq mm7,mm5
+ pshufw mm2,mm1,177
+ pshufw mm6,mm5,177
+ pxor mm0,mm2
+ pxor mm4,mm6
+ pslld mm1,8
+ pslld mm5,8
+ psrld mm3,8
+ psrld mm7,8
+ movq mm2,QWORD PTR [edi]
+ movq mm6,QWORD PTR 8[edi]
+ pxor mm0,mm1
+ pxor mm4,mm5
+ pxor mm0,mm3
+ pxor mm4,mm7
+ mov eax,DWORD PTR [ebp-128]
+ pslld mm1,16
+ pslld mm5,16
+ mov ebx,DWORD PTR [ebp-64]
+ psrld mm3,16
+ psrld mm7,16
+ mov ecx,DWORD PTR [ebp]
+ pxor mm0,mm1
+ pxor mm4,mm5
+ mov edx,DWORD PTR 64[ebp]
+ pxor mm0,mm3
+ pxor mm4,mm7
+ pxor mm0,mm2
+ pxor mm4,mm6
+ jmp $L007loop
+ALIGN 16
+$L008out:
+ pxor mm0,QWORD PTR [edi]
+ pxor mm4,QWORD PTR 8[edi]
+ ret
+__sse_AES_decrypt_compact ENDP
+ALIGN 16
+__x86_AES_decrypt PROC PRIVATE
+ mov DWORD PTR 20[esp],edi
+ xor eax,DWORD PTR [edi]
+ xor ebx,DWORD PTR 4[edi]
+ xor ecx,DWORD PTR 8[edi]
+ xor edx,DWORD PTR 12[edi]
+ mov esi,DWORD PTR 240[edi]
+ lea esi,DWORD PTR [esi*1+esi-2]
+ lea esi,DWORD PTR [esi*8+edi]
+ mov DWORD PTR 24[esp],esi
+ALIGN 16
+$L009loop:
+ mov esi,eax
+ and esi,255
+ mov esi,DWORD PTR [esi*8+ebp]
+ movzx edi,dh
+ xor esi,DWORD PTR 3[edi*8+ebp]
+ mov edi,ecx
+ shr edi,16
+ and edi,255
+ xor esi,DWORD PTR 2[edi*8+ebp]
+ mov edi,ebx
+ shr edi,24
+ xor esi,DWORD PTR 1[edi*8+ebp]
+ mov DWORD PTR 4[esp],esi
+ mov esi,ebx
+ and esi,255
+ mov esi,DWORD PTR [esi*8+ebp]
+ movzx edi,ah
+ xor esi,DWORD PTR 3[edi*8+ebp]
+ mov edi,edx
+ shr edi,16
+ and edi,255
+ xor esi,DWORD PTR 2[edi*8+ebp]
+ mov edi,ecx
+ shr edi,24
+ xor esi,DWORD PTR 1[edi*8+ebp]
+ mov DWORD PTR 8[esp],esi
+ mov esi,ecx
+ and esi,255
+ mov esi,DWORD PTR [esi*8+ebp]
+ movzx edi,bh
+ xor esi,DWORD PTR 3[edi*8+ebp]
+ mov edi,eax
+ shr edi,16
+ and edi,255
+ xor esi,DWORD PTR 2[edi*8+ebp]
+ mov edi,edx
+ shr edi,24
+ xor esi,DWORD PTR 1[edi*8+ebp]
+ mov edi,DWORD PTR 20[esp]
+ and edx,255
+ mov edx,DWORD PTR [edx*8+ebp]
+ movzx ecx,ch
+ xor edx,DWORD PTR 3[ecx*8+ebp]
+ mov ecx,esi
+ shr ebx,16
+ and ebx,255
+ xor edx,DWORD PTR 2[ebx*8+ebp]
+ mov ebx,DWORD PTR 8[esp]
+ shr eax,24
+ xor edx,DWORD PTR 1[eax*8+ebp]
+ mov eax,DWORD PTR 4[esp]
+ add edi,16
+ xor eax,DWORD PTR [edi]
+ xor ebx,DWORD PTR 4[edi]
+ xor ecx,DWORD PTR 8[edi]
+ xor edx,DWORD PTR 12[edi]
+ cmp edi,DWORD PTR 24[esp]
+ mov DWORD PTR 20[esp],edi
+ jb $L009loop
+ lea ebp,DWORD PTR 2176[ebp]
+ mov edi,DWORD PTR [ebp-128]
+ mov esi,DWORD PTR [ebp-96]
+ mov edi,DWORD PTR [ebp-64]
+ mov esi,DWORD PTR [ebp-32]
+ mov edi,DWORD PTR [ebp]
+ mov esi,DWORD PTR 32[ebp]
+ mov edi,DWORD PTR 64[ebp]
+ mov esi,DWORD PTR 96[ebp]
+ lea ebp,DWORD PTR [ebp-128]
+ mov esi,eax
+ and esi,255
+ movzx esi,BYTE PTR [esi*1+ebp]
+ movzx edi,dh
+ movzx edi,BYTE PTR [edi*1+ebp]
+ shl edi,8
+ xor esi,edi
+ mov edi,ecx
+ shr edi,16
+ and edi,255
+ movzx edi,BYTE PTR [edi*1+ebp]
+ shl edi,16
+ xor esi,edi
+ mov edi,ebx
+ shr edi,24
+ movzx edi,BYTE PTR [edi*1+ebp]
+ shl edi,24
+ xor esi,edi
+ mov DWORD PTR 4[esp],esi
+ mov esi,ebx
+ and esi,255
+ movzx esi,BYTE PTR [esi*1+ebp]
+ movzx edi,ah
+ movzx edi,BYTE PTR [edi*1+ebp]
+ shl edi,8
+ xor esi,edi
+ mov edi,edx
+ shr edi,16
+ and edi,255
+ movzx edi,BYTE PTR [edi*1+ebp]
+ shl edi,16
+ xor esi,edi
+ mov edi,ecx
+ shr edi,24
+ movzx edi,BYTE PTR [edi*1+ebp]
+ shl edi,24
+ xor esi,edi
+ mov DWORD PTR 8[esp],esi
+ mov esi,ecx
+ and esi,255
+ movzx esi,BYTE PTR [esi*1+ebp]
+ movzx edi,bh
+ movzx edi,BYTE PTR [edi*1+ebp]
+ shl edi,8
+ xor esi,edi
+ mov edi,eax
+ shr edi,16
+ and edi,255
+ movzx edi,BYTE PTR [edi*1+ebp]
+ shl edi,16
+ xor esi,edi
+ mov edi,edx
+ shr edi,24
+ movzx edi,BYTE PTR [edi*1+ebp]
+ shl edi,24
+ xor esi,edi
+ mov edi,DWORD PTR 20[esp]
+ and edx,255
+ movzx edx,BYTE PTR [edx*1+ebp]
+ movzx ecx,ch
+ movzx ecx,BYTE PTR [ecx*1+ebp]
+ shl ecx,8
+ xor edx,ecx
+ mov ecx,esi
+ shr ebx,16
+ and ebx,255
+ movzx ebx,BYTE PTR [ebx*1+ebp]
+ shl ebx,16
+ xor edx,ebx
+ mov ebx,DWORD PTR 8[esp]
+ shr eax,24
+ movzx eax,BYTE PTR [eax*1+ebp]
+ shl eax,24
+ xor edx,eax
+ mov eax,DWORD PTR 4[esp]
+ lea ebp,DWORD PTR [ebp-2048]
+ add edi,16
+ xor eax,DWORD PTR [edi]
+ xor ebx,DWORD PTR 4[edi]
+ xor ecx,DWORD PTR 8[edi]
+ xor edx,DWORD PTR 12[edi]
+ ret
+ALIGN 64
+$LAES_Td::
+DD 1353184337,1353184337
+DD 1399144830,1399144830
+DD 3282310938,3282310938
+DD 2522752826,2522752826
+DD 3412831035,3412831035
+DD 4047871263,4047871263
+DD 2874735276,2874735276
+DD 2466505547,2466505547
+DD 1442459680,1442459680
+DD 4134368941,4134368941
+DD 2440481928,2440481928
+DD 625738485,625738485
+DD 4242007375,4242007375
+DD 3620416197,3620416197
+DD 2151953702,2151953702
+DD 2409849525,2409849525
+DD 1230680542,1230680542
+DD 1729870373,1729870373
+DD 2551114309,2551114309
+DD 3787521629,3787521629
+DD 41234371,41234371
+DD 317738113,317738113
+DD 2744600205,2744600205
+DD 3338261355,3338261355
+DD 3881799427,3881799427
+DD 2510066197,2510066197
+DD 3950669247,3950669247
+DD 3663286933,3663286933
+DD 763608788,763608788
+DD 3542185048,3542185048
+DD 694804553,694804553
+DD 1154009486,1154009486
+DD 1787413109,1787413109
+DD 2021232372,2021232372
+DD 1799248025,1799248025
+DD 3715217703,3715217703
+DD 3058688446,3058688446
+DD 397248752,397248752
+DD 1722556617,1722556617
+DD 3023752829,3023752829
+DD 407560035,407560035
+DD 2184256229,2184256229
+DD 1613975959,1613975959
+DD 1165972322,1165972322
+DD 3765920945,3765920945
+DD 2226023355,2226023355
+DD 480281086,480281086
+DD 2485848313,2485848313
+DD 1483229296,1483229296
+DD 436028815,436028815
+DD 2272059028,2272059028
+DD 3086515026,3086515026
+DD 601060267,601060267
+DD 3791801202,3791801202
+DD 1468997603,1468997603
+DD 715871590,715871590
+DD 120122290,120122290
+DD 63092015,63092015
+DD 2591802758,2591802758
+DD 2768779219,2768779219
+DD 4068943920,4068943920
+DD 2997206819,2997206819
+DD 3127509762,3127509762
+DD 1552029421,1552029421
+DD 723308426,723308426
+DD 2461301159,2461301159
+DD 4042393587,4042393587
+DD 2715969870,2715969870
+DD 3455375973,3455375973
+DD 3586000134,3586000134
+DD 526529745,526529745
+DD 2331944644,2331944644
+DD 2639474228,2639474228
+DD 2689987490,2689987490
+DD 853641733,853641733
+DD 1978398372,1978398372
+DD 971801355,971801355
+DD 2867814464,2867814464
+DD 111112542,111112542
+DD 1360031421,1360031421
+DD 4186579262,4186579262
+DD 1023860118,1023860118
+DD 2919579357,2919579357
+DD 1186850381,1186850381
+DD 3045938321,3045938321
+DD 90031217,90031217
+DD 1876166148,1876166148
+DD 4279586912,4279586912
+DD 620468249,620468249
+DD 2548678102,2548678102
+DD 3426959497,3426959497
+DD 2006899047,2006899047
+DD 3175278768,3175278768
+DD 2290845959,2290845959
+DD 945494503,945494503
+DD 3689859193,3689859193
+DD 1191869601,1191869601
+DD 3910091388,3910091388
+DD 3374220536,3374220536
+DD 0,0
+DD 2206629897,2206629897
+DD 1223502642,1223502642
+DD 2893025566,2893025566
+DD 1316117100,1316117100
+DD 4227796733,4227796733
+DD 1446544655,1446544655
+DD 517320253,517320253
+DD 658058550,658058550
+DD 1691946762,1691946762
+DD 564550760,564550760
+DD 3511966619,3511966619
+DD 976107044,976107044
+DD 2976320012,2976320012
+DD 266819475,266819475
+DD 3533106868,3533106868
+DD 2660342555,2660342555
+DD 1338359936,1338359936
+DD 2720062561,2720062561
+DD 1766553434,1766553434
+DD 370807324,370807324
+DD 179999714,179999714
+DD 3844776128,3844776128
+DD 1138762300,1138762300
+DD 488053522,488053522
+DD 185403662,185403662
+DD 2915535858,2915535858
+DD 3114841645,3114841645
+DD 3366526484,3366526484
+DD 2233069911,2233069911
+DD 1275557295,1275557295
+DD 3151862254,3151862254
+DD 4250959779,4250959779
+DD 2670068215,2670068215
+DD 3170202204,3170202204
+DD 3309004356,3309004356
+DD 880737115,880737115
+DD 1982415755,1982415755
+DD 3703972811,3703972811
+DD 1761406390,1761406390
+DD 1676797112,1676797112
+DD 3403428311,3403428311
+DD 277177154,277177154
+DD 1076008723,1076008723
+DD 538035844,538035844
+DD 2099530373,2099530373
+DD 4164795346,4164795346
+DD 288553390,288553390
+DD 1839278535,1839278535
+DD 1261411869,1261411869
+DD 4080055004,4080055004
+DD 3964831245,3964831245
+DD 3504587127,3504587127
+DD 1813426987,1813426987
+DD 2579067049,2579067049
+DD 4199060497,4199060497
+DD 577038663,577038663
+DD 3297574056,3297574056
+DD 440397984,440397984
+DD 3626794326,3626794326
+DD 4019204898,4019204898
+DD 3343796615,3343796615
+DD 3251714265,3251714265
+DD 4272081548,4272081548
+DD 906744984,906744984
+DD 3481400742,3481400742
+DD 685669029,685669029
+DD 646887386,646887386
+DD 2764025151,2764025151
+DD 3835509292,3835509292
+DD 227702864,227702864
+DD 2613862250,2613862250
+DD 1648787028,1648787028
+DD 3256061430,3256061430
+DD 3904428176,3904428176
+DD 1593260334,1593260334
+DD 4121936770,4121936770
+DD 3196083615,3196083615
+DD 2090061929,2090061929
+DD 2838353263,2838353263
+DD 3004310991,3004310991
+DD 999926984,999926984
+DD 2809993232,2809993232
+DD 1852021992,1852021992
+DD 2075868123,2075868123
+DD 158869197,158869197
+DD 4095236462,4095236462
+DD 28809964,28809964
+DD 2828685187,2828685187
+DD 1701746150,1701746150
+DD 2129067946,2129067946
+DD 147831841,147831841
+DD 3873969647,3873969647
+DD 3650873274,3650873274
+DD 3459673930,3459673930
+DD 3557400554,3557400554
+DD 3598495785,3598495785
+DD 2947720241,2947720241
+DD 824393514,824393514
+DD 815048134,815048134
+DD 3227951669,3227951669
+DD 935087732,935087732
+DD 2798289660,2798289660
+DD 2966458592,2966458592
+DD 366520115,366520115
+DD 1251476721,1251476721
+DD 4158319681,4158319681
+DD 240176511,240176511
+DD 804688151,804688151
+DD 2379631990,2379631990
+DD 1303441219,1303441219
+DD 1414376140,1414376140
+DD 3741619940,3741619940
+DD 3820343710,3820343710
+DD 461924940,461924940
+DD 3089050817,3089050817
+DD 2136040774,2136040774
+DD 82468509,82468509
+DD 1563790337,1563790337
+DD 1937016826,1937016826
+DD 776014843,776014843
+DD 1511876531,1511876531
+DD 1389550482,1389550482
+DD 861278441,861278441
+DD 323475053,323475053
+DD 2355222426,2355222426
+DD 2047648055,2047648055
+DD 2383738969,2383738969
+DD 2302415851,2302415851
+DD 3995576782,3995576782
+DD 902390199,902390199
+DD 3991215329,3991215329
+DD 1018251130,1018251130
+DD 1507840668,1507840668
+DD 1064563285,1064563285
+DD 2043548696,2043548696
+DD 3208103795,3208103795
+DD 3939366739,3939366739
+DD 1537932639,1537932639
+DD 342834655,342834655
+DD 2262516856,2262516856
+DD 2180231114,2180231114
+DD 1053059257,1053059257
+DD 741614648,741614648
+DD 1598071746,1598071746
+DD 1925389590,1925389590
+DD 203809468,203809468
+DD 2336832552,2336832552
+DD 1100287487,1100287487
+DD 1895934009,1895934009
+DD 3736275976,3736275976
+DD 2632234200,2632234200
+DD 2428589668,2428589668
+DD 1636092795,1636092795
+DD 1890988757,1890988757
+DD 1952214088,1952214088
+DD 1113045200,1113045200
+DB 82,9,106,213,48,54,165,56
+DB 191,64,163,158,129,243,215,251
+DB 124,227,57,130,155,47,255,135
+DB 52,142,67,68,196,222,233,203
+DB 84,123,148,50,166,194,35,61
+DB 238,76,149,11,66,250,195,78
+DB 8,46,161,102,40,217,36,178
+DB 118,91,162,73,109,139,209,37
+DB 114,248,246,100,134,104,152,22
+DB 212,164,92,204,93,101,182,146
+DB 108,112,72,80,253,237,185,218
+DB 94,21,70,87,167,141,157,132
+DB 144,216,171,0,140,188,211,10
+DB 247,228,88,5,184,179,69,6
+DB 208,44,30,143,202,63,15,2
+DB 193,175,189,3,1,19,138,107
+DB 58,145,17,65,79,103,220,234
+DB 151,242,207,206,240,180,230,115
+DB 150,172,116,34,231,173,53,133
+DB 226,249,55,232,28,117,223,110
+DB 71,241,26,113,29,41,197,137
+DB 111,183,98,14,170,24,190,27
+DB 252,86,62,75,198,210,121,32
+DB 154,219,192,254,120,205,90,244
+DB 31,221,168,51,136,7,199,49
+DB 177,18,16,89,39,128,236,95
+DB 96,81,127,169,25,181,74,13
+DB 45,229,122,159,147,201,156,239
+DB 160,224,59,77,174,42,245,176
+DB 200,235,187,60,131,83,153,97
+DB 23,43,4,126,186,119,214,38
+DB 225,105,20,99,85,33,12,125
+DB 82,9,106,213,48,54,165,56
+DB 191,64,163,158,129,243,215,251
+DB 124,227,57,130,155,47,255,135
+DB 52,142,67,68,196,222,233,203
+DB 84,123,148,50,166,194,35,61
+DB 238,76,149,11,66,250,195,78
+DB 8,46,161,102,40,217,36,178
+DB 118,91,162,73,109,139,209,37
+DB 114,248,246,100,134,104,152,22
+DB 212,164,92,204,93,101,182,146
+DB 108,112,72,80,253,237,185,218
+DB 94,21,70,87,167,141,157,132
+DB 144,216,171,0,140,188,211,10
+DB 247,228,88,5,184,179,69,6
+DB 208,44,30,143,202,63,15,2
+DB 193,175,189,3,1,19,138,107
+DB 58,145,17,65,79,103,220,234
+DB 151,242,207,206,240,180,230,115
+DB 150,172,116,34,231,173,53,133
+DB 226,249,55,232,28,117,223,110
+DB 71,241,26,113,29,41,197,137
+DB 111,183,98,14,170,24,190,27
+DB 252,86,62,75,198,210,121,32
+DB 154,219,192,254,120,205,90,244
+DB 31,221,168,51,136,7,199,49
+DB 177,18,16,89,39,128,236,95
+DB 96,81,127,169,25,181,74,13
+DB 45,229,122,159,147,201,156,239
+DB 160,224,59,77,174,42,245,176
+DB 200,235,187,60,131,83,153,97
+DB 23,43,4,126,186,119,214,38
+DB 225,105,20,99,85,33,12,125
+DB 82,9,106,213,48,54,165,56
+DB 191,64,163,158,129,243,215,251
+DB 124,227,57,130,155,47,255,135
+DB 52,142,67,68,196,222,233,203
+DB 84,123,148,50,166,194,35,61
+DB 238,76,149,11,66,250,195,78
+DB 8,46,161,102,40,217,36,178
+DB 118,91,162,73,109,139,209,37
+DB 114,248,246,100,134,104,152,22
+DB 212,164,92,204,93,101,182,146
+DB 108,112,72,80,253,237,185,218
+DB 94,21,70,87,167,141,157,132
+DB 144,216,171,0,140,188,211,10
+DB 247,228,88,5,184,179,69,6
+DB 208,44,30,143,202,63,15,2
+DB 193,175,189,3,1,19,138,107
+DB 58,145,17,65,79,103,220,234
+DB 151,242,207,206,240,180,230,115
+DB 150,172,116,34,231,173,53,133
+DB 226,249,55,232,28,117,223,110
+DB 71,241,26,113,29,41,197,137
+DB 111,183,98,14,170,24,190,27
+DB 252,86,62,75,198,210,121,32
+DB 154,219,192,254,120,205,90,244
+DB 31,221,168,51,136,7,199,49
+DB 177,18,16,89,39,128,236,95
+DB 96,81,127,169,25,181,74,13
+DB 45,229,122,159,147,201,156,239
+DB 160,224,59,77,174,42,245,176
+DB 200,235,187,60,131,83,153,97
+DB 23,43,4,126,186,119,214,38
+DB 225,105,20,99,85,33,12,125
+DB 82,9,106,213,48,54,165,56
+DB 191,64,163,158,129,243,215,251
+DB 124,227,57,130,155,47,255,135
+DB 52,142,67,68,196,222,233,203
+DB 84,123,148,50,166,194,35,61
+DB 238,76,149,11,66,250,195,78
+DB 8,46,161,102,40,217,36,178
+DB 118,91,162,73,109,139,209,37
+DB 114,248,246,100,134,104,152,22
+DB 212,164,92,204,93,101,182,146
+DB 108,112,72,80,253,237,185,218
+DB 94,21,70,87,167,141,157,132
+DB 144,216,171,0,140,188,211,10
+DB 247,228,88,5,184,179,69,6
+DB 208,44,30,143,202,63,15,2
+DB 193,175,189,3,1,19,138,107
+DB 58,145,17,65,79,103,220,234
+DB 151,242,207,206,240,180,230,115
+DB 150,172,116,34,231,173,53,133
+DB 226,249,55,232,28,117,223,110
+DB 71,241,26,113,29,41,197,137
+DB 111,183,98,14,170,24,190,27
+DB 252,86,62,75,198,210,121,32
+DB 154,219,192,254,120,205,90,244
+DB 31,221,168,51,136,7,199,49
+DB 177,18,16,89,39,128,236,95
+DB 96,81,127,169,25,181,74,13
+DB 45,229,122,159,147,201,156,239
+DB 160,224,59,77,174,42,245,176
+DB 200,235,187,60,131,83,153,97
+DB 23,43,4,126,186,119,214,38
+DB 225,105,20,99,85,33,12,125
+__x86_AES_decrypt ENDP
+ALIGN 16
+_AES_decrypt PROC PUBLIC
+$L_AES_decrypt_begin::
+ push ebp
+ push ebx
+ push esi
+ push edi
+ mov esi,DWORD PTR 20[esp]
+ mov edi,DWORD PTR 28[esp]
+ mov eax,esp
+ sub esp,36
+ and esp,-64
+ lea ebx,DWORD PTR [edi-127]
+ sub ebx,esp
+ neg ebx
+ and ebx,960
+ sub esp,ebx
+ add esp,4
+ mov DWORD PTR 28[esp],eax
+ call $L010pic_point
+$L010pic_point:
+ pop ebp
+ lea eax,DWORD PTR _OPENSSL_ia32cap_P
+ lea ebp,DWORD PTR ($LAES_Td-$L010pic_point)[ebp]
+ lea ebx,DWORD PTR 764[esp]
+ sub ebx,ebp
+ and ebx,768
+ lea ebp,DWORD PTR 2176[ebx*1+ebp]
+ bt DWORD PTR [eax],25
+ jnc $L011x86
+ movq mm0,QWORD PTR [esi]
+ movq mm4,QWORD PTR 8[esi]
+ call __sse_AES_decrypt_compact
+ mov esp,DWORD PTR 28[esp]
+ mov esi,DWORD PTR 24[esp]
+ movq QWORD PTR [esi],mm0
+ movq QWORD PTR 8[esi],mm4
+ emms
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+ALIGN 16
+$L011x86:
+ mov DWORD PTR 24[esp],ebp
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ mov ecx,DWORD PTR 8[esi]
+ mov edx,DWORD PTR 12[esi]
+ call __x86_AES_decrypt_compact
+ mov esp,DWORD PTR 28[esp]
+ mov esi,DWORD PTR 24[esp]
+ mov DWORD PTR [esi],eax
+ mov DWORD PTR 4[esi],ebx
+ mov DWORD PTR 8[esi],ecx
+ mov DWORD PTR 12[esi],edx
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+_AES_decrypt ENDP
+ALIGN 16
+_AES_cbc_encrypt PROC PUBLIC
+$L_AES_cbc_encrypt_begin::
+ push ebp
+ push ebx
+ push esi
+ push edi
+ mov ecx,DWORD PTR 28[esp]
+ cmp ecx,0
+ je $L012drop_out
+ call $L013pic_point
+$L013pic_point:
+ pop ebp
+ lea eax,DWORD PTR _OPENSSL_ia32cap_P
+ cmp DWORD PTR 40[esp],0
+ lea ebp,DWORD PTR ($LAES_Te-$L013pic_point)[ebp]
+ jne $L014picked_te
+ lea ebp,DWORD PTR ($LAES_Td-$LAES_Te)[ebp]
+$L014picked_te:
+ pushfd
+ cld
+ cmp ecx,512
+ jb $L015slow_way
+ test ecx,15
+ jnz $L015slow_way
+ lea esi,DWORD PTR [esp-324]
+ and esi,-64
+ mov eax,ebp
+ lea ebx,DWORD PTR 2304[ebp]
+ mov edx,esi
+ and eax,4095
+ and ebx,4095
+ and edx,4095
+ cmp edx,ebx
+ jb $L016tbl_break_out
+ sub edx,ebx
+ sub esi,edx
+ jmp $L017tbl_ok
+ALIGN 4
+$L016tbl_break_out:
+ sub edx,eax
+ and edx,4095
+ add edx,384
+ sub esi,edx
+ALIGN 4
+$L017tbl_ok:
+ lea edx,DWORD PTR 24[esp]
+ xchg esp,esi
+ add esp,4
+ mov DWORD PTR 24[esp],ebp
+ mov DWORD PTR 28[esp],esi
+ mov eax,DWORD PTR [edx]
+ mov ebx,DWORD PTR 4[edx]
+ mov edi,DWORD PTR 12[edx]
+ mov esi,DWORD PTR 16[edx]
+ mov edx,DWORD PTR 20[edx]
+ mov DWORD PTR 32[esp],eax
+ mov DWORD PTR 36[esp],ebx
+ mov DWORD PTR 40[esp],ecx
+ mov DWORD PTR 44[esp],edi
+ mov DWORD PTR 48[esp],esi
+ mov DWORD PTR 316[esp],0
+ mov ebx,edi
+ mov ecx,61
+ sub ebx,ebp
+ mov esi,edi
+ and ebx,4095
+ lea edi,DWORD PTR 76[esp]
+ cmp ebx,2304
+ jb $L018do_copy
+ cmp ebx,3852
+ jb $L019skip_copy
+ALIGN 4
+$L018do_copy:
+ mov DWORD PTR 44[esp],edi
+DD 2784229001
+$L019skip_copy:
+ mov edi,16
+ALIGN 4
+$L020prefetch_tbl:
+ mov eax,DWORD PTR [ebp]
+ mov ebx,DWORD PTR 32[ebp]
+ mov ecx,DWORD PTR 64[ebp]
+ mov esi,DWORD PTR 96[ebp]
+ lea ebp,DWORD PTR 128[ebp]
+ sub edi,1
+ jnz $L020prefetch_tbl
+ sub ebp,2048
+ mov esi,DWORD PTR 32[esp]
+ mov edi,DWORD PTR 48[esp]
+ cmp edx,0
+ je $L021fast_decrypt
+ mov eax,DWORD PTR [edi]
+ mov ebx,DWORD PTR 4[edi]
+ALIGN 16
+$L022fast_enc_loop:
+ mov ecx,DWORD PTR 8[edi]
+ mov edx,DWORD PTR 12[edi]
+ xor eax,DWORD PTR [esi]
+ xor ebx,DWORD PTR 4[esi]
+ xor ecx,DWORD PTR 8[esi]
+ xor edx,DWORD PTR 12[esi]
+ mov edi,DWORD PTR 44[esp]
+ call __x86_AES_encrypt
+ mov esi,DWORD PTR 32[esp]
+ mov edi,DWORD PTR 36[esp]
+ mov DWORD PTR [edi],eax
+ mov DWORD PTR 4[edi],ebx
+ mov DWORD PTR 8[edi],ecx
+ mov DWORD PTR 12[edi],edx
+ lea esi,DWORD PTR 16[esi]
+ mov ecx,DWORD PTR 40[esp]
+ mov DWORD PTR 32[esp],esi
+ lea edx,DWORD PTR 16[edi]
+ mov DWORD PTR 36[esp],edx
+ sub ecx,16
+ mov DWORD PTR 40[esp],ecx
+ jnz $L022fast_enc_loop
+ mov esi,DWORD PTR 48[esp]
+ mov ecx,DWORD PTR 8[edi]
+ mov edx,DWORD PTR 12[edi]
+ mov DWORD PTR [esi],eax
+ mov DWORD PTR 4[esi],ebx
+ mov DWORD PTR 8[esi],ecx
+ mov DWORD PTR 12[esi],edx
+ cmp DWORD PTR 316[esp],0
+ mov edi,DWORD PTR 44[esp]
+ je $L023skip_ezero
+ mov ecx,60
+ xor eax,eax
+ALIGN 4
+DD 2884892297
+$L023skip_ezero:
+ mov esp,DWORD PTR 28[esp]
+ popfd
+$L012drop_out:
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+ pushfd
+ALIGN 16
+$L021fast_decrypt:
+ cmp esi,DWORD PTR 36[esp]
+ je $L024fast_dec_in_place
+ mov DWORD PTR 52[esp],edi
+ALIGN 4
+ALIGN 16
+$L025fast_dec_loop:
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ mov ecx,DWORD PTR 8[esi]
+ mov edx,DWORD PTR 12[esi]
+ mov edi,DWORD PTR 44[esp]
+ call __x86_AES_decrypt
+ mov edi,DWORD PTR 52[esp]
+ mov esi,DWORD PTR 40[esp]
+ xor eax,DWORD PTR [edi]
+ xor ebx,DWORD PTR 4[edi]
+ xor ecx,DWORD PTR 8[edi]
+ xor edx,DWORD PTR 12[edi]
+ mov edi,DWORD PTR 36[esp]
+ mov esi,DWORD PTR 32[esp]
+ mov DWORD PTR [edi],eax
+ mov DWORD PTR 4[edi],ebx
+ mov DWORD PTR 8[edi],ecx
+ mov DWORD PTR 12[edi],edx
+ mov ecx,DWORD PTR 40[esp]
+ mov DWORD PTR 52[esp],esi
+ lea esi,DWORD PTR 16[esi]
+ mov DWORD PTR 32[esp],esi
+ lea edi,DWORD PTR 16[edi]
+ mov DWORD PTR 36[esp],edi
+ sub ecx,16
+ mov DWORD PTR 40[esp],ecx
+ jnz $L025fast_dec_loop
+ mov edi,DWORD PTR 52[esp]
+ mov esi,DWORD PTR 48[esp]
+ mov eax,DWORD PTR [edi]
+ mov ebx,DWORD PTR 4[edi]
+ mov ecx,DWORD PTR 8[edi]
+ mov edx,DWORD PTR 12[edi]
+ mov DWORD PTR [esi],eax
+ mov DWORD PTR 4[esi],ebx
+ mov DWORD PTR 8[esi],ecx
+ mov DWORD PTR 12[esi],edx
+ jmp $L026fast_dec_out
+ALIGN 16
+$L024fast_dec_in_place:
+$L027fast_dec_in_place_loop:
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ mov ecx,DWORD PTR 8[esi]
+ mov edx,DWORD PTR 12[esi]
+ lea edi,DWORD PTR 60[esp]
+ mov DWORD PTR [edi],eax
+ mov DWORD PTR 4[edi],ebx
+ mov DWORD PTR 8[edi],ecx
+ mov DWORD PTR 12[edi],edx
+ mov edi,DWORD PTR 44[esp]
+ call __x86_AES_decrypt
+ mov edi,DWORD PTR 48[esp]
+ mov esi,DWORD PTR 36[esp]
+ xor eax,DWORD PTR [edi]
+ xor ebx,DWORD PTR 4[edi]
+ xor ecx,DWORD PTR 8[edi]
+ xor edx,DWORD PTR 12[edi]
+ mov DWORD PTR [esi],eax
+ mov DWORD PTR 4[esi],ebx
+ mov DWORD PTR 8[esi],ecx
+ mov DWORD PTR 12[esi],edx
+ lea esi,DWORD PTR 16[esi]
+ mov DWORD PTR 36[esp],esi
+ lea esi,DWORD PTR 60[esp]
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ mov ecx,DWORD PTR 8[esi]
+ mov edx,DWORD PTR 12[esi]
+ mov DWORD PTR [edi],eax
+ mov DWORD PTR 4[edi],ebx
+ mov DWORD PTR 8[edi],ecx
+ mov DWORD PTR 12[edi],edx
+ mov esi,DWORD PTR 32[esp]
+ mov ecx,DWORD PTR 40[esp]
+ lea esi,DWORD PTR 16[esi]
+ mov DWORD PTR 32[esp],esi
+ sub ecx,16
+ mov DWORD PTR 40[esp],ecx
+ jnz $L027fast_dec_in_place_loop
+ALIGN 4
+$L026fast_dec_out:
+ cmp DWORD PTR 316[esp],0
+ mov edi,DWORD PTR 44[esp]
+ je $L028skip_dzero
+ mov ecx,60
+ xor eax,eax
+ALIGN 4
+DD 2884892297
+$L028skip_dzero:
+ mov esp,DWORD PTR 28[esp]
+ popfd
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+ pushfd
+ALIGN 16
+$L015slow_way:
+ mov eax,DWORD PTR [eax]
+ mov edi,DWORD PTR 36[esp]
+ lea esi,DWORD PTR [esp-80]
+ and esi,-64
+ lea ebx,DWORD PTR [edi-143]
+ sub ebx,esi
+ neg ebx
+ and ebx,960
+ sub esi,ebx
+ lea ebx,DWORD PTR 768[esi]
+ sub ebx,ebp
+ and ebx,768
+ lea ebp,DWORD PTR 2176[ebx*1+ebp]
+ lea edx,DWORD PTR 24[esp]
+ xchg esp,esi
+ add esp,4
+ mov DWORD PTR 24[esp],ebp
+ mov DWORD PTR 28[esp],esi
+ mov DWORD PTR 52[esp],eax
+ mov eax,DWORD PTR [edx]
+ mov ebx,DWORD PTR 4[edx]
+ mov esi,DWORD PTR 16[edx]
+ mov edx,DWORD PTR 20[edx]
+ mov DWORD PTR 32[esp],eax
+ mov DWORD PTR 36[esp],ebx
+ mov DWORD PTR 40[esp],ecx
+ mov DWORD PTR 44[esp],edi
+ mov DWORD PTR 48[esp],esi
+ mov edi,esi
+ mov esi,eax
+ cmp edx,0
+ je $L029slow_decrypt
+ cmp ecx,16
+ mov edx,ebx
+ jb $L030slow_enc_tail
+ bt DWORD PTR 52[esp],25
+ jnc $L031slow_enc_x86
+ movq mm0,QWORD PTR [edi]
+ movq mm4,QWORD PTR 8[edi]
+ALIGN 16
+$L032slow_enc_loop_sse:
+ pxor mm0,QWORD PTR [esi]
+ pxor mm4,QWORD PTR 8[esi]
+ mov edi,DWORD PTR 44[esp]
+ call __sse_AES_encrypt_compact
+ mov esi,DWORD PTR 32[esp]
+ mov edi,DWORD PTR 36[esp]
+ mov ecx,DWORD PTR 40[esp]
+ movq QWORD PTR [edi],mm0
+ movq QWORD PTR 8[edi],mm4
+ lea esi,DWORD PTR 16[esi]
+ mov DWORD PTR 32[esp],esi
+ lea edx,DWORD PTR 16[edi]
+ mov DWORD PTR 36[esp],edx
+ sub ecx,16
+ cmp ecx,16
+ mov DWORD PTR 40[esp],ecx
+ jae $L032slow_enc_loop_sse
+ test ecx,15
+ jnz $L030slow_enc_tail
+ mov esi,DWORD PTR 48[esp]
+ movq QWORD PTR [esi],mm0
+ movq QWORD PTR 8[esi],mm4
+ emms
+ mov esp,DWORD PTR 28[esp]
+ popfd
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+ pushfd
+ALIGN 16
+$L031slow_enc_x86:
+ mov eax,DWORD PTR [edi]
+ mov ebx,DWORD PTR 4[edi]
+ALIGN 4
+$L033slow_enc_loop_x86:
+ mov ecx,DWORD PTR 8[edi]
+ mov edx,DWORD PTR 12[edi]
+ xor eax,DWORD PTR [esi]
+ xor ebx,DWORD PTR 4[esi]
+ xor ecx,DWORD PTR 8[esi]
+ xor edx,DWORD PTR 12[esi]
+ mov edi,DWORD PTR 44[esp]
+ call __x86_AES_encrypt_compact
+ mov esi,DWORD PTR 32[esp]
+ mov edi,DWORD PTR 36[esp]
+ mov DWORD PTR [edi],eax
+ mov DWORD PTR 4[edi],ebx
+ mov DWORD PTR 8[edi],ecx
+ mov DWORD PTR 12[edi],edx
+ mov ecx,DWORD PTR 40[esp]
+ lea esi,DWORD PTR 16[esi]
+ mov DWORD PTR 32[esp],esi
+ lea edx,DWORD PTR 16[edi]
+ mov DWORD PTR 36[esp],edx
+ sub ecx,16
+ cmp ecx,16
+ mov DWORD PTR 40[esp],ecx
+ jae $L033slow_enc_loop_x86
+ test ecx,15
+ jnz $L030slow_enc_tail
+ mov esi,DWORD PTR 48[esp]
+ mov ecx,DWORD PTR 8[edi]
+ mov edx,DWORD PTR 12[edi]
+ mov DWORD PTR [esi],eax
+ mov DWORD PTR 4[esi],ebx
+ mov DWORD PTR 8[esi],ecx
+ mov DWORD PTR 12[esi],edx
+ mov esp,DWORD PTR 28[esp]
+ popfd
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+ pushfd
+ALIGN 16
+$L030slow_enc_tail:
+ emms
+ mov edi,edx
+ mov ebx,16
+ sub ebx,ecx
+ cmp edi,esi
+ je $L034enc_in_place
+ALIGN 4
+DD 2767451785
+ jmp $L035enc_skip_in_place
+$L034enc_in_place:
+ lea edi,DWORD PTR [ecx*1+edi]
+$L035enc_skip_in_place:
+ mov ecx,ebx
+ xor eax,eax
+ALIGN 4
+DD 2868115081
+ mov edi,DWORD PTR 48[esp]
+ mov esi,edx
+ mov eax,DWORD PTR [edi]
+ mov ebx,DWORD PTR 4[edi]
+ mov DWORD PTR 40[esp],16
+ jmp $L033slow_enc_loop_x86
+ALIGN 16
+$L029slow_decrypt:
+ bt DWORD PTR 52[esp],25
+ jnc $L036slow_dec_loop_x86
+ALIGN 4
+$L037slow_dec_loop_sse:
+ movq mm0,QWORD PTR [esi]
+ movq mm4,QWORD PTR 8[esi]
+ mov edi,DWORD PTR 44[esp]
+ call __sse_AES_decrypt_compact
+ mov esi,DWORD PTR 32[esp]
+ lea eax,DWORD PTR 60[esp]
+ mov ebx,DWORD PTR 36[esp]
+ mov ecx,DWORD PTR 40[esp]
+ mov edi,DWORD PTR 48[esp]
+ movq mm1,QWORD PTR [esi]
+ movq mm5,QWORD PTR 8[esi]
+ pxor mm0,QWORD PTR [edi]
+ pxor mm4,QWORD PTR 8[edi]
+ movq QWORD PTR [edi],mm1
+ movq QWORD PTR 8[edi],mm5
+ sub ecx,16
+ jc $L038slow_dec_partial_sse
+ movq QWORD PTR [ebx],mm0
+ movq QWORD PTR 8[ebx],mm4
+ lea ebx,DWORD PTR 16[ebx]
+ mov DWORD PTR 36[esp],ebx
+ lea esi,DWORD PTR 16[esi]
+ mov DWORD PTR 32[esp],esi
+ mov DWORD PTR 40[esp],ecx
+ jnz $L037slow_dec_loop_sse
+ emms
+ mov esp,DWORD PTR 28[esp]
+ popfd
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+ pushfd
+ALIGN 16
+$L038slow_dec_partial_sse:
+ movq QWORD PTR [eax],mm0
+ movq QWORD PTR 8[eax],mm4
+ emms
+ add ecx,16
+ mov edi,ebx
+ mov esi,eax
+ALIGN 4
+DD 2767451785
+ mov esp,DWORD PTR 28[esp]
+ popfd
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+ pushfd
+ALIGN 16
+$L036slow_dec_loop_x86:
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ mov ecx,DWORD PTR 8[esi]
+ mov edx,DWORD PTR 12[esi]
+ lea edi,DWORD PTR 60[esp]
+ mov DWORD PTR [edi],eax
+ mov DWORD PTR 4[edi],ebx
+ mov DWORD PTR 8[edi],ecx
+ mov DWORD PTR 12[edi],edx
+ mov edi,DWORD PTR 44[esp]
+ call __x86_AES_decrypt_compact
+ mov edi,DWORD PTR 48[esp]
+ mov esi,DWORD PTR 40[esp]
+ xor eax,DWORD PTR [edi]
+ xor ebx,DWORD PTR 4[edi]
+ xor ecx,DWORD PTR 8[edi]
+ xor edx,DWORD PTR 12[edi]
+ sub esi,16
+ jc $L039slow_dec_partial_x86
+ mov DWORD PTR 40[esp],esi
+ mov esi,DWORD PTR 36[esp]
+ mov DWORD PTR [esi],eax
+ mov DWORD PTR 4[esi],ebx
+ mov DWORD PTR 8[esi],ecx
+ mov DWORD PTR 12[esi],edx
+ lea esi,DWORD PTR 16[esi]
+ mov DWORD PTR 36[esp],esi
+ lea esi,DWORD PTR 60[esp]
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ mov ecx,DWORD PTR 8[esi]
+ mov edx,DWORD PTR 12[esi]
+ mov DWORD PTR [edi],eax
+ mov DWORD PTR 4[edi],ebx
+ mov DWORD PTR 8[edi],ecx
+ mov DWORD PTR 12[edi],edx
+ mov esi,DWORD PTR 32[esp]
+ lea esi,DWORD PTR 16[esi]
+ mov DWORD PTR 32[esp],esi
+ jnz $L036slow_dec_loop_x86
+ mov esp,DWORD PTR 28[esp]
+ popfd
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+ pushfd
+ALIGN 16
+$L039slow_dec_partial_x86:
+ lea esi,DWORD PTR 60[esp]
+ mov DWORD PTR [esi],eax
+ mov DWORD PTR 4[esi],ebx
+ mov DWORD PTR 8[esi],ecx
+ mov DWORD PTR 12[esi],edx
+ mov esi,DWORD PTR 32[esp]
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ mov ecx,DWORD PTR 8[esi]
+ mov edx,DWORD PTR 12[esi]
+ mov DWORD PTR [edi],eax
+ mov DWORD PTR 4[edi],ebx
+ mov DWORD PTR 8[edi],ecx
+ mov DWORD PTR 12[edi],edx
+ mov ecx,DWORD PTR 40[esp]
+ mov edi,DWORD PTR 36[esp]
+ lea esi,DWORD PTR 60[esp]
+ALIGN 4
+DD 2767451785
+ mov esp,DWORD PTR 28[esp]
+ popfd
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+_AES_cbc_encrypt ENDP
+ALIGN 16
+__x86_AES_set_encrypt_key PROC PRIVATE
+ push ebp
+ push ebx
+ push esi
+ push edi
+ mov esi,DWORD PTR 24[esp]
+ mov edi,DWORD PTR 32[esp]
+ test esi,-1
+ jz $L040badpointer
+ test edi,-1
+ jz $L040badpointer
+ call $L041pic_point
+$L041pic_point:
+ pop ebp
+ lea ebp,DWORD PTR ($LAES_Te-$L041pic_point)[ebp]
+ lea ebp,DWORD PTR 2176[ebp]
+ mov eax,DWORD PTR [ebp-128]
+ mov ebx,DWORD PTR [ebp-96]
+ mov ecx,DWORD PTR [ebp-64]
+ mov edx,DWORD PTR [ebp-32]
+ mov eax,DWORD PTR [ebp]
+ mov ebx,DWORD PTR 32[ebp]
+ mov ecx,DWORD PTR 64[ebp]
+ mov edx,DWORD PTR 96[ebp]
+ mov ecx,DWORD PTR 28[esp]
+ cmp ecx,128
+ je $L04210rounds
+ cmp ecx,192
+ je $L04312rounds
+ cmp ecx,256
+ je $L04414rounds
+ mov eax,-2
+ jmp $L045exit
+$L04210rounds:
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ mov ecx,DWORD PTR 8[esi]
+ mov edx,DWORD PTR 12[esi]
+ mov DWORD PTR [edi],eax
+ mov DWORD PTR 4[edi],ebx
+ mov DWORD PTR 8[edi],ecx
+ mov DWORD PTR 12[edi],edx
+ xor ecx,ecx
+ jmp $L04610shortcut
+ALIGN 4
+$L04710loop:
+ mov eax,DWORD PTR [edi]
+ mov edx,DWORD PTR 12[edi]
+$L04610shortcut:
+ movzx esi,dl
+ movzx ebx,BYTE PTR [esi*1+ebp-128]
+ movzx esi,dh
+ shl ebx,24
+ xor eax,ebx
+ movzx ebx,BYTE PTR [esi*1+ebp-128]
+ shr edx,16
+ movzx esi,dl
+ xor eax,ebx
+ movzx ebx,BYTE PTR [esi*1+ebp-128]
+ movzx esi,dh
+ shl ebx,8
+ xor eax,ebx
+ movzx ebx,BYTE PTR [esi*1+ebp-128]
+ shl ebx,16
+ xor eax,ebx
+ xor eax,DWORD PTR 896[ecx*4+ebp]
+ mov DWORD PTR 16[edi],eax
+ xor eax,DWORD PTR 4[edi]
+ mov DWORD PTR 20[edi],eax
+ xor eax,DWORD PTR 8[edi]
+ mov DWORD PTR 24[edi],eax
+ xor eax,DWORD PTR 12[edi]
+ mov DWORD PTR 28[edi],eax
+ inc ecx
+ add edi,16
+ cmp ecx,10
+ jl $L04710loop
+ mov DWORD PTR 80[edi],10
+ xor eax,eax
+ jmp $L045exit
+$L04312rounds:
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ mov ecx,DWORD PTR 8[esi]
+ mov edx,DWORD PTR 12[esi]
+ mov DWORD PTR [edi],eax
+ mov DWORD PTR 4[edi],ebx
+ mov DWORD PTR 8[edi],ecx
+ mov DWORD PTR 12[edi],edx
+ mov ecx,DWORD PTR 16[esi]
+ mov edx,DWORD PTR 20[esi]
+ mov DWORD PTR 16[edi],ecx
+ mov DWORD PTR 20[edi],edx
+ xor ecx,ecx
+ jmp $L04812shortcut
+ALIGN 4
+$L04912loop:
+ mov eax,DWORD PTR [edi]
+ mov edx,DWORD PTR 20[edi]
+$L04812shortcut:
+ movzx esi,dl
+ movzx ebx,BYTE PTR [esi*1+ebp-128]
+ movzx esi,dh
+ shl ebx,24
+ xor eax,ebx
+ movzx ebx,BYTE PTR [esi*1+ebp-128]
+ shr edx,16
+ movzx esi,dl
+ xor eax,ebx
+ movzx ebx,BYTE PTR [esi*1+ebp-128]
+ movzx esi,dh
+ shl ebx,8
+ xor eax,ebx
+ movzx ebx,BYTE PTR [esi*1+ebp-128]
+ shl ebx,16
+ xor eax,ebx
+ xor eax,DWORD PTR 896[ecx*4+ebp]
+ mov DWORD PTR 24[edi],eax
+ xor eax,DWORD PTR 4[edi]
+ mov DWORD PTR 28[edi],eax
+ xor eax,DWORD PTR 8[edi]
+ mov DWORD PTR 32[edi],eax
+ xor eax,DWORD PTR 12[edi]
+ mov DWORD PTR 36[edi],eax
+ cmp ecx,7
+ je $L05012break
+ inc ecx
+ xor eax,DWORD PTR 16[edi]
+ mov DWORD PTR 40[edi],eax
+ xor eax,DWORD PTR 20[edi]
+ mov DWORD PTR 44[edi],eax
+ add edi,24
+ jmp $L04912loop
+$L05012break:
+ mov DWORD PTR 72[edi],12
+ xor eax,eax
+ jmp $L045exit
+$L04414rounds:
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ mov ecx,DWORD PTR 8[esi]
+ mov edx,DWORD PTR 12[esi]
+ mov DWORD PTR [edi],eax
+ mov DWORD PTR 4[edi],ebx
+ mov DWORD PTR 8[edi],ecx
+ mov DWORD PTR 12[edi],edx
+ mov eax,DWORD PTR 16[esi]
+ mov ebx,DWORD PTR 20[esi]
+ mov ecx,DWORD PTR 24[esi]
+ mov edx,DWORD PTR 28[esi]
+ mov DWORD PTR 16[edi],eax
+ mov DWORD PTR 20[edi],ebx
+ mov DWORD PTR 24[edi],ecx
+ mov DWORD PTR 28[edi],edx
+ xor ecx,ecx
+ jmp $L05114shortcut
+ALIGN 4
+$L05214loop:
+ mov edx,DWORD PTR 28[edi]
+$L05114shortcut:
+ mov eax,DWORD PTR [edi]
+ movzx esi,dl
+ movzx ebx,BYTE PTR [esi*1+ebp-128]
+ movzx esi,dh
+ shl ebx,24
+ xor eax,ebx
+ movzx ebx,BYTE PTR [esi*1+ebp-128]
+ shr edx,16
+ movzx esi,dl
+ xor eax,ebx
+ movzx ebx,BYTE PTR [esi*1+ebp-128]
+ movzx esi,dh
+ shl ebx,8
+ xor eax,ebx
+ movzx ebx,BYTE PTR [esi*1+ebp-128]
+ shl ebx,16
+ xor eax,ebx
+ xor eax,DWORD PTR 896[ecx*4+ebp]
+ mov DWORD PTR 32[edi],eax
+ xor eax,DWORD PTR 4[edi]
+ mov DWORD PTR 36[edi],eax
+ xor eax,DWORD PTR 8[edi]
+ mov DWORD PTR 40[edi],eax
+ xor eax,DWORD PTR 12[edi]
+ mov DWORD PTR 44[edi],eax
+ cmp ecx,6
+ je $L05314break
+ inc ecx
+ mov edx,eax
+ mov eax,DWORD PTR 16[edi]
+ movzx esi,dl
+ movzx ebx,BYTE PTR [esi*1+ebp-128]
+ movzx esi,dh
+ xor eax,ebx
+ movzx ebx,BYTE PTR [esi*1+ebp-128]
+ shr edx,16
+ shl ebx,8
+ movzx esi,dl
+ xor eax,ebx
+ movzx ebx,BYTE PTR [esi*1+ebp-128]
+ movzx esi,dh
+ shl ebx,16
+ xor eax,ebx
+ movzx ebx,BYTE PTR [esi*1+ebp-128]
+ shl ebx,24
+ xor eax,ebx
+ mov DWORD PTR 48[edi],eax
+ xor eax,DWORD PTR 20[edi]
+ mov DWORD PTR 52[edi],eax
+ xor eax,DWORD PTR 24[edi]
+ mov DWORD PTR 56[edi],eax
+ xor eax,DWORD PTR 28[edi]
+ mov DWORD PTR 60[edi],eax
+ add edi,32
+ jmp $L05214loop
+$L05314break:
+ mov DWORD PTR 48[edi],14
+ xor eax,eax
+ jmp $L045exit
+$L040badpointer:
+ mov eax,-1
+$L045exit:
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+__x86_AES_set_encrypt_key ENDP
+ALIGN 16
+_AES_set_encrypt_key PROC PUBLIC
+$L_AES_set_encrypt_key_begin::
+ call __x86_AES_set_encrypt_key
+ ret
+_AES_set_encrypt_key ENDP
+ALIGN 16
+_AES_set_decrypt_key PROC PUBLIC
+$L_AES_set_decrypt_key_begin::
+ call __x86_AES_set_encrypt_key
+ cmp eax,0
+ je $L054proceed
+ ret
+$L054proceed:
+ push ebp
+ push ebx
+ push esi
+ push edi
+ mov esi,DWORD PTR 28[esp]
+ mov ecx,DWORD PTR 240[esi]
+ lea ecx,DWORD PTR [ecx*4]
+ lea edi,DWORD PTR [ecx*4+esi]
+ALIGN 4
+$L055invert:
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ mov ecx,DWORD PTR [edi]
+ mov edx,DWORD PTR 4[edi]
+ mov DWORD PTR [edi],eax
+ mov DWORD PTR 4[edi],ebx
+ mov DWORD PTR [esi],ecx
+ mov DWORD PTR 4[esi],edx
+ mov eax,DWORD PTR 8[esi]
+ mov ebx,DWORD PTR 12[esi]
+ mov ecx,DWORD PTR 8[edi]
+ mov edx,DWORD PTR 12[edi]
+ mov DWORD PTR 8[edi],eax
+ mov DWORD PTR 12[edi],ebx
+ mov DWORD PTR 8[esi],ecx
+ mov DWORD PTR 12[esi],edx
+ add esi,16
+ sub edi,16
+ cmp esi,edi
+ jne $L055invert
+ mov edi,DWORD PTR 28[esp]
+ mov esi,DWORD PTR 240[edi]
+ lea esi,DWORD PTR [esi*1+esi-2]
+ lea esi,DWORD PTR [esi*8+edi]
+ mov DWORD PTR 28[esp],esi
+ mov eax,DWORD PTR 16[edi]
+ALIGN 4
+$L056permute:
+ add edi,16
+ mov esi,eax
+ and esi,2155905152
+ mov ebp,esi
+ shr ebp,7
+ lea ebx,DWORD PTR [eax*1+eax]
+ sub esi,ebp
+ and ebx,4278124286
+ and esi,454761243
+ xor esi,ebx
+ mov ebx,esi
+ and esi,2155905152
+ mov ebp,esi
+ shr ebp,7
+ lea ecx,DWORD PTR [ebx*1+ebx]
+ sub esi,ebp
+ and ecx,4278124286
+ and esi,454761243
+ xor ebx,eax
+ xor esi,ecx
+ mov ecx,esi
+ and esi,2155905152
+ mov ebp,esi
+ shr ebp,7
+ lea edx,DWORD PTR [ecx*1+ecx]
+ xor ecx,eax
+ sub esi,ebp
+ and edx,4278124286
+ and esi,454761243
+ rol eax,8
+ xor edx,esi
+ mov ebp,DWORD PTR 4[edi]
+ xor eax,ebx
+ xor ebx,edx
+ xor eax,ecx
+ rol ebx,24
+ xor ecx,edx
+ xor eax,edx
+ rol ecx,16
+ xor eax,ebx
+ rol edx,8
+ xor eax,ecx
+ mov ebx,ebp
+ xor eax,edx
+ mov DWORD PTR [edi],eax
+ mov esi,ebx
+ and esi,2155905152
+ mov ebp,esi
+ shr ebp,7
+ lea ecx,DWORD PTR [ebx*1+ebx]
+ sub esi,ebp
+ and ecx,4278124286
+ and esi,454761243
+ xor esi,ecx
+ mov ecx,esi
+ and esi,2155905152
+ mov ebp,esi
+ shr ebp,7
+ lea edx,DWORD PTR [ecx*1+ecx]
+ sub esi,ebp
+ and edx,4278124286
+ and esi,454761243
+ xor ecx,ebx
+ xor esi,edx
+ mov edx,esi
+ and esi,2155905152
+ mov ebp,esi
+ shr ebp,7
+ lea eax,DWORD PTR [edx*1+edx]
+ xor edx,ebx
+ sub esi,ebp
+ and eax,4278124286
+ and esi,454761243
+ rol ebx,8
+ xor eax,esi
+ mov ebp,DWORD PTR 8[edi]
+ xor ebx,ecx
+ xor ecx,eax
+ xor ebx,edx
+ rol ecx,24
+ xor edx,eax
+ xor ebx,eax
+ rol edx,16
+ xor ebx,ecx
+ rol eax,8
+ xor ebx,edx
+ mov ecx,ebp
+ xor ebx,eax
+ mov DWORD PTR 4[edi],ebx
+ mov esi,ecx
+ and esi,2155905152
+ mov ebp,esi
+ shr ebp,7
+ lea edx,DWORD PTR [ecx*1+ecx]
+ sub esi,ebp
+ and edx,4278124286
+ and esi,454761243
+ xor esi,edx
+ mov edx,esi
+ and esi,2155905152
+ mov ebp,esi
+ shr ebp,7
+ lea eax,DWORD PTR [edx*1+edx]
+ sub esi,ebp
+ and eax,4278124286
+ and esi,454761243
+ xor edx,ecx
+ xor esi,eax
+ mov eax,esi
+ and esi,2155905152
+ mov ebp,esi
+ shr ebp,7
+ lea ebx,DWORD PTR [eax*1+eax]
+ xor eax,ecx
+ sub esi,ebp
+ and ebx,4278124286
+ and esi,454761243
+ rol ecx,8
+ xor ebx,esi
+ mov ebp,DWORD PTR 12[edi]
+ xor ecx,edx
+ xor edx,ebx
+ xor ecx,eax
+ rol edx,24
+ xor eax,ebx
+ xor ecx,ebx
+ rol eax,16
+ xor ecx,edx
+ rol ebx,8
+ xor ecx,eax
+ mov edx,ebp
+ xor ecx,ebx
+ mov DWORD PTR 8[edi],ecx
+ mov esi,edx
+ and esi,2155905152
+ mov ebp,esi
+ shr ebp,7
+ lea eax,DWORD PTR [edx*1+edx]
+ sub esi,ebp
+ and eax,4278124286
+ and esi,454761243
+ xor esi,eax
+ mov eax,esi
+ and esi,2155905152
+ mov ebp,esi
+ shr ebp,7
+ lea ebx,DWORD PTR [eax*1+eax]
+ sub esi,ebp
+ and ebx,4278124286
+ and esi,454761243
+ xor eax,edx
+ xor esi,ebx
+ mov ebx,esi
+ and esi,2155905152
+ mov ebp,esi
+ shr ebp,7
+ lea ecx,DWORD PTR [ebx*1+ebx]
+ xor ebx,edx
+ sub esi,ebp
+ and ecx,4278124286
+ and esi,454761243
+ rol edx,8
+ xor ecx,esi
+ mov ebp,DWORD PTR 16[edi]
+ xor edx,eax
+ xor eax,ecx
+ xor edx,ebx
+ rol eax,24
+ xor ebx,ecx
+ xor edx,ecx
+ rol ebx,16
+ xor edx,eax
+ rol ecx,8
+ xor edx,ebx
+ mov eax,ebp
+ xor edx,ecx
+ mov DWORD PTR 12[edi],edx
+ cmp edi,DWORD PTR 28[esp]
+ jb $L056permute
+ xor eax,eax
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+_AES_set_decrypt_key ENDP
+DB 65,69,83,32,102,111,114,32,120,56,54,44,32,67,82,89
+DB 80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114
+DB 111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.text$ ENDS
+.bss SEGMENT 'BSS'
+COMM _OPENSSL_ia32cap_P:DWORD
+.bss ENDS
+END
diff --git a/deps/openssl/asm/x86-win32-masm/bf/bf-686.asm b/deps/openssl/asm/x86-win32-masm/bf/bf-686.asm
new file mode 100644
index 0000000000..a802e7292f
--- /dev/null
+++ b/deps/openssl/asm/x86-win32-masm/bf/bf-686.asm
@@ -0,0 +1,907 @@
+TITLE bf-686.asm
+IF @Version LT 800
+ECHO MASM version 8.00 or later is strongly recommended.
+ENDIF
+.586
+.MODEL FLAT
+OPTION DOTNAME
+IF @Version LT 800
+.text$ SEGMENT PAGE 'CODE'
+ELSE
+.text$ SEGMENT ALIGN(64) 'CODE'
+ENDIF
+ALIGN 16
+_BF_encrypt PROC PUBLIC
+$L_BF_encrypt_begin::
+ push ebp
+ push ebx
+ push esi
+ push edi
+ ;
+
+ ; Load the 2 words
+ mov eax,DWORD PTR 20[esp]
+ mov ecx,DWORD PTR [eax]
+ mov edx,DWORD PTR 4[eax]
+ ;
+
+ ; P pointer, s and enc flag
+ mov edi,DWORD PTR 24[esp]
+ xor eax,eax
+ xor ebx,ebx
+ xor ecx,DWORD PTR [edi]
+ ;
+
+ ; Round 0
+ ror ecx,16
+ mov esi,DWORD PTR 4[edi]
+ mov al,ch
+ mov bl,cl
+ ror ecx,16
+ xor edx,esi
+ mov esi,DWORD PTR 72[eax*4+edi]
+ mov ebp,DWORD PTR 1096[ebx*4+edi]
+ mov al,ch
+ mov bl,cl
+ add esi,ebp
+ mov eax,DWORD PTR 2120[eax*4+edi]
+ xor esi,eax
+ mov ebp,DWORD PTR 3144[ebx*4+edi]
+ add esi,ebp
+ xor eax,eax
+ xor edx,esi
+ ;
+
+ ; Round 1
+ ror edx,16
+ mov esi,DWORD PTR 8[edi]
+ mov al,dh
+ mov bl,dl
+ ror edx,16
+ xor ecx,esi
+ mov esi,DWORD PTR 72[eax*4+edi]
+ mov ebp,DWORD PTR 1096[ebx*4+edi]
+ mov al,dh
+ mov bl,dl
+ add esi,ebp
+ mov eax,DWORD PTR 2120[eax*4+edi]
+ xor esi,eax
+ mov ebp,DWORD PTR 3144[ebx*4+edi]
+ add esi,ebp
+ xor eax,eax
+ xor ecx,esi
+ ;
+
+ ; Round 2
+ ror ecx,16
+ mov esi,DWORD PTR 12[edi]
+ mov al,ch
+ mov bl,cl
+ ror ecx,16
+ xor edx,esi
+ mov esi,DWORD PTR 72[eax*4+edi]
+ mov ebp,DWORD PTR 1096[ebx*4+edi]
+ mov al,ch
+ mov bl,cl
+ add esi,ebp
+ mov eax,DWORD PTR 2120[eax*4+edi]
+ xor esi,eax
+ mov ebp,DWORD PTR 3144[ebx*4+edi]
+ add esi,ebp
+ xor eax,eax
+ xor edx,esi
+ ;
+
+ ; Round 3
+ ror edx,16
+ mov esi,DWORD PTR 16[edi]
+ mov al,dh
+ mov bl,dl
+ ror edx,16
+ xor ecx,esi
+ mov esi,DWORD PTR 72[eax*4+edi]
+ mov ebp,DWORD PTR 1096[ebx*4+edi]
+ mov al,dh
+ mov bl,dl
+ add esi,ebp
+ mov eax,DWORD PTR 2120[eax*4+edi]
+ xor esi,eax
+ mov ebp,DWORD PTR 3144[ebx*4+edi]
+ add esi,ebp
+ xor eax,eax
+ xor ecx,esi
+ ;
+
+ ; Round 4
+ ror ecx,16
+ mov esi,DWORD PTR 20[edi]
+ mov al,ch
+ mov bl,cl
+ ror ecx,16
+ xor edx,esi
+ mov esi,DWORD PTR 72[eax*4+edi]
+ mov ebp,DWORD PTR 1096[ebx*4+edi]
+ mov al,ch
+ mov bl,cl
+ add esi,ebp
+ mov eax,DWORD PTR 2120[eax*4+edi]
+ xor esi,eax
+ mov ebp,DWORD PTR 3144[ebx*4+edi]
+ add esi,ebp
+ xor eax,eax
+ xor edx,esi
+ ;
+
+ ; Round 5
+ ror edx,16
+ mov esi,DWORD PTR 24[edi]
+ mov al,dh
+ mov bl,dl
+ ror edx,16
+ xor ecx,esi
+ mov esi,DWORD PTR 72[eax*4+edi]
+ mov ebp,DWORD PTR 1096[ebx*4+edi]
+ mov al,dh
+ mov bl,dl
+ add esi,ebp
+ mov eax,DWORD PTR 2120[eax*4+edi]
+ xor esi,eax
+ mov ebp,DWORD PTR 3144[ebx*4+edi]
+ add esi,ebp
+ xor eax,eax
+ xor ecx,esi
+ ;
+
+ ; Round 6
+ ror ecx,16
+ mov esi,DWORD PTR 28[edi]
+ mov al,ch
+ mov bl,cl
+ ror ecx,16
+ xor edx,esi
+ mov esi,DWORD PTR 72[eax*4+edi]
+ mov ebp,DWORD PTR 1096[ebx*4+edi]
+ mov al,ch
+ mov bl,cl
+ add esi,ebp
+ mov eax,DWORD PTR 2120[eax*4+edi]
+ xor esi,eax
+ mov ebp,DWORD PTR 3144[ebx*4+edi]
+ add esi,ebp
+ xor eax,eax
+ xor edx,esi
+ ;
+
+ ; Round 7
+ ror edx,16
+ mov esi,DWORD PTR 32[edi]
+ mov al,dh
+ mov bl,dl
+ ror edx,16
+ xor ecx,esi
+ mov esi,DWORD PTR 72[eax*4+edi]
+ mov ebp,DWORD PTR 1096[ebx*4+edi]
+ mov al,dh
+ mov bl,dl
+ add esi,ebp
+ mov eax,DWORD PTR 2120[eax*4+edi]
+ xor esi,eax
+ mov ebp,DWORD PTR 3144[ebx*4+edi]
+ add esi,ebp
+ xor eax,eax
+ xor ecx,esi
+ ;
+
+ ; Round 8
+ ror ecx,16
+ mov esi,DWORD PTR 36[edi]
+ mov al,ch
+ mov bl,cl
+ ror ecx,16
+ xor edx,esi
+ mov esi,DWORD PTR 72[eax*4+edi]
+ mov ebp,DWORD PTR 1096[ebx*4+edi]
+ mov al,ch
+ mov bl,cl
+ add esi,ebp
+ mov eax,DWORD PTR 2120[eax*4+edi]
+ xor esi,eax
+ mov ebp,DWORD PTR 3144[ebx*4+edi]
+ add esi,ebp
+ xor eax,eax
+ xor edx,esi
+ ;
+
+ ; Round 9
+ ror edx,16
+ mov esi,DWORD PTR 40[edi]
+ mov al,dh
+ mov bl,dl
+ ror edx,16
+ xor ecx,esi
+ mov esi,DWORD PTR 72[eax*4+edi]
+ mov ebp,DWORD PTR 1096[ebx*4+edi]
+ mov al,dh
+ mov bl,dl
+ add esi,ebp
+ mov eax,DWORD PTR 2120[eax*4+edi]
+ xor esi,eax
+ mov ebp,DWORD PTR 3144[ebx*4+edi]
+ add esi,ebp
+ xor eax,eax
+ xor ecx,esi
+ ;
+
+ ; Round 10
+ ror ecx,16
+ mov esi,DWORD PTR 44[edi]
+ mov al,ch
+ mov bl,cl
+ ror ecx,16
+ xor edx,esi
+ mov esi,DWORD PTR 72[eax*4+edi]
+ mov ebp,DWORD PTR 1096[ebx*4+edi]
+ mov al,ch
+ mov bl,cl
+ add esi,ebp
+ mov eax,DWORD PTR 2120[eax*4+edi]
+ xor esi,eax
+ mov ebp,DWORD PTR 3144[ebx*4+edi]
+ add esi,ebp
+ xor eax,eax
+ xor edx,esi
+ ;
+
+ ; Round 11
+ ror edx,16
+ mov esi,DWORD PTR 48[edi]
+ mov al,dh
+ mov bl,dl
+ ror edx,16
+ xor ecx,esi
+ mov esi,DWORD PTR 72[eax*4+edi]
+ mov ebp,DWORD PTR 1096[ebx*4+edi]
+ mov al,dh
+ mov bl,dl
+ add esi,ebp
+ mov eax,DWORD PTR 2120[eax*4+edi]
+ xor esi,eax
+ mov ebp,DWORD PTR 3144[ebx*4+edi]
+ add esi,ebp
+ xor eax,eax
+ xor ecx,esi
+ ;
+
+ ; Round 12
+ ror ecx,16
+ mov esi,DWORD PTR 52[edi]
+ mov al,ch
+ mov bl,cl
+ ror ecx,16
+ xor edx,esi
+ mov esi,DWORD PTR 72[eax*4+edi]
+ mov ebp,DWORD PTR 1096[ebx*4+edi]
+ mov al,ch
+ mov bl,cl
+ add esi,ebp
+ mov eax,DWORD PTR 2120[eax*4+edi]
+ xor esi,eax
+ mov ebp,DWORD PTR 3144[ebx*4+edi]
+ add esi,ebp
+ xor eax,eax
+ xor edx,esi
+ ;
+
+ ; Round 13
+ ror edx,16
+ mov esi,DWORD PTR 56[edi]
+ mov al,dh
+ mov bl,dl
+ ror edx,16
+ xor ecx,esi
+ mov esi,DWORD PTR 72[eax*4+edi]
+ mov ebp,DWORD PTR 1096[ebx*4+edi]
+ mov al,dh
+ mov bl,dl
+ add esi,ebp
+ mov eax,DWORD PTR 2120[eax*4+edi]
+ xor esi,eax
+ mov ebp,DWORD PTR 3144[ebx*4+edi]
+ add esi,ebp
+ xor eax,eax
+ xor ecx,esi
+ ;
+
+ ; Round 14
+ ror ecx,16
+ mov esi,DWORD PTR 60[edi]
+ mov al,ch
+ mov bl,cl
+ ror ecx,16
+ xor edx,esi
+ mov esi,DWORD PTR 72[eax*4+edi]
+ mov ebp,DWORD PTR 1096[ebx*4+edi]
+ mov al,ch
+ mov bl,cl
+ add esi,ebp
+ mov eax,DWORD PTR 2120[eax*4+edi]
+ xor esi,eax
+ mov ebp,DWORD PTR 3144[ebx*4+edi]
+ add esi,ebp
+ xor eax,eax
+ xor edx,esi
+ ;
+
+ ; Round 15
+ ror edx,16
+ mov esi,DWORD PTR 64[edi]
+ mov al,dh
+ mov bl,dl
+ ror edx,16
+ xor ecx,esi
+ mov esi,DWORD PTR 72[eax*4+edi]
+ mov ebp,DWORD PTR 1096[ebx*4+edi]
+ mov al,dh
+ mov bl,dl
+ add esi,ebp
+ mov eax,DWORD PTR 2120[eax*4+edi]
+ xor esi,eax
+ mov ebp,DWORD PTR 3144[ebx*4+edi]
+ add esi,ebp
+ xor eax,eax
+ xor ecx,esi
+ xor edx,DWORD PTR 68[edi]
+ mov eax,DWORD PTR 20[esp]
+ mov DWORD PTR [eax],edx
+ mov DWORD PTR 4[eax],ecx
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+_BF_encrypt ENDP
+ALIGN 16
+_BF_decrypt PROC PUBLIC
+$L_BF_decrypt_begin::
+ push ebp
+ push ebx
+ push esi
+ push edi
+ ;
+
+ ; Load the 2 words
+ mov eax,DWORD PTR 20[esp]
+ mov ecx,DWORD PTR [eax]
+ mov edx,DWORD PTR 4[eax]
+ ;
+
+ ; P pointer, s and enc flag
+ mov edi,DWORD PTR 24[esp]
+ xor eax,eax
+ xor ebx,ebx
+ xor ecx,DWORD PTR 68[edi]
+ ;
+
+ ; Round 16
+ ror ecx,16
+ mov esi,DWORD PTR 64[edi]
+ mov al,ch
+ mov bl,cl
+ ror ecx,16
+ xor edx,esi
+ mov esi,DWORD PTR 72[eax*4+edi]
+ mov ebp,DWORD PTR 1096[ebx*4+edi]
+ mov al,ch
+ mov bl,cl
+ add esi,ebp
+ mov eax,DWORD PTR 2120[eax*4+edi]
+ xor esi,eax
+ mov ebp,DWORD PTR 3144[ebx*4+edi]
+ add esi,ebp
+ xor eax,eax
+ xor edx,esi
+ ;
+
+ ; Round 15
+ ror edx,16
+ mov esi,DWORD PTR 60[edi]
+ mov al,dh
+ mov bl,dl
+ ror edx,16
+ xor ecx,esi
+ mov esi,DWORD PTR 72[eax*4+edi]
+ mov ebp,DWORD PTR 1096[ebx*4+edi]
+ mov al,dh
+ mov bl,dl
+ add esi,ebp
+ mov eax,DWORD PTR 2120[eax*4+edi]
+ xor esi,eax
+ mov ebp,DWORD PTR 3144[ebx*4+edi]
+ add esi,ebp
+ xor eax,eax
+ xor ecx,esi
+ ;
+
+ ; Round 14
+ ror ecx,16
+ mov esi,DWORD PTR 56[edi]
+ mov al,ch
+ mov bl,cl
+ ror ecx,16
+ xor edx,esi
+ mov esi,DWORD PTR 72[eax*4+edi]
+ mov ebp,DWORD PTR 1096[ebx*4+edi]
+ mov al,ch
+ mov bl,cl
+ add esi,ebp
+ mov eax,DWORD PTR 2120[eax*4+edi]
+ xor esi,eax
+ mov ebp,DWORD PTR 3144[ebx*4+edi]
+ add esi,ebp
+ xor eax,eax
+ xor edx,esi
+ ;
+
+ ; Round 13
+ ror edx,16
+ mov esi,DWORD PTR 52[edi]
+ mov al,dh
+ mov bl,dl
+ ror edx,16
+ xor ecx,esi
+ mov esi,DWORD PTR 72[eax*4+edi]
+ mov ebp,DWORD PTR 1096[ebx*4+edi]
+ mov al,dh
+ mov bl,dl
+ add esi,ebp
+ mov eax,DWORD PTR 2120[eax*4+edi]
+ xor esi,eax
+ mov ebp,DWORD PTR 3144[ebx*4+edi]
+ add esi,ebp
+ xor eax,eax
+ xor ecx,esi
+ ;
+
+ ; Round 12
+ ror ecx,16
+ mov esi,DWORD PTR 48[edi]
+ mov al,ch
+ mov bl,cl
+ ror ecx,16
+ xor edx,esi
+ mov esi,DWORD PTR 72[eax*4+edi]
+ mov ebp,DWORD PTR 1096[ebx*4+edi]
+ mov al,ch
+ mov bl,cl
+ add esi,ebp
+ mov eax,DWORD PTR 2120[eax*4+edi]
+ xor esi,eax
+ mov ebp,DWORD PTR 3144[ebx*4+edi]
+ add esi,ebp
+ xor eax,eax
+ xor edx,esi
+ ;
+
+ ; Round 11
+ ror edx,16
+ mov esi,DWORD PTR 44[edi]
+ mov al,dh
+ mov bl,dl
+ ror edx,16
+ xor ecx,esi
+ mov esi,DWORD PTR 72[eax*4+edi]
+ mov ebp,DWORD PTR 1096[ebx*4+edi]
+ mov al,dh
+ mov bl,dl
+ add esi,ebp
+ mov eax,DWORD PTR 2120[eax*4+edi]
+ xor esi,eax
+ mov ebp,DWORD PTR 3144[ebx*4+edi]
+ add esi,ebp
+ xor eax,eax
+ xor ecx,esi
+ ;
+
+ ; Round 10
+ ror ecx,16
+ mov esi,DWORD PTR 40[edi]
+ mov al,ch
+ mov bl,cl
+ ror ecx,16
+ xor edx,esi
+ mov esi,DWORD PTR 72[eax*4+edi]
+ mov ebp,DWORD PTR 1096[ebx*4+edi]
+ mov al,ch
+ mov bl,cl
+ add esi,ebp
+ mov eax,DWORD PTR 2120[eax*4+edi]
+ xor esi,eax
+ mov ebp,DWORD PTR 3144[ebx*4+edi]
+ add esi,ebp
+ xor eax,eax
+ xor edx,esi
+ ;
+
+ ; Round 9
+ ror edx,16
+ mov esi,DWORD PTR 36[edi]
+ mov al,dh
+ mov bl,dl
+ ror edx,16
+ xor ecx,esi
+ mov esi,DWORD PTR 72[eax*4+edi]
+ mov ebp,DWORD PTR 1096[ebx*4+edi]
+ mov al,dh
+ mov bl,dl
+ add esi,ebp
+ mov eax,DWORD PTR 2120[eax*4+edi]
+ xor esi,eax
+ mov ebp,DWORD PTR 3144[ebx*4+edi]
+ add esi,ebp
+ xor eax,eax
+ xor ecx,esi
+ ;
+
+ ; Round 8
+ ror ecx,16
+ mov esi,DWORD PTR 32[edi]
+ mov al,ch
+ mov bl,cl
+ ror ecx,16
+ xor edx,esi
+ mov esi,DWORD PTR 72[eax*4+edi]
+ mov ebp,DWORD PTR 1096[ebx*4+edi]
+ mov al,ch
+ mov bl,cl
+ add esi,ebp
+ mov eax,DWORD PTR 2120[eax*4+edi]
+ xor esi,eax
+ mov ebp,DWORD PTR 3144[ebx*4+edi]
+ add esi,ebp
+ xor eax,eax
+ xor edx,esi
+ ;
+
+ ; Round 7
+ ror edx,16
+ mov esi,DWORD PTR 28[edi]
+ mov al,dh
+ mov bl,dl
+ ror edx,16
+ xor ecx,esi
+ mov esi,DWORD PTR 72[eax*4+edi]
+ mov ebp,DWORD PTR 1096[ebx*4+edi]
+ mov al,dh
+ mov bl,dl
+ add esi,ebp
+ mov eax,DWORD PTR 2120[eax*4+edi]
+ xor esi,eax
+ mov ebp,DWORD PTR 3144[ebx*4+edi]
+ add esi,ebp
+ xor eax,eax
+ xor ecx,esi
+ ;
+
+ ; Round 6
+ ror ecx,16
+ mov esi,DWORD PTR 24[edi]
+ mov al,ch
+ mov bl,cl
+ ror ecx,16
+ xor edx,esi
+ mov esi,DWORD PTR 72[eax*4+edi]
+ mov ebp,DWORD PTR 1096[ebx*4+edi]
+ mov al,ch
+ mov bl,cl
+ add esi,ebp
+ mov eax,DWORD PTR 2120[eax*4+edi]
+ xor esi,eax
+ mov ebp,DWORD PTR 3144[ebx*4+edi]
+ add esi,ebp
+ xor eax,eax
+ xor edx,esi
+ ;
+
+ ; Round 5
+ ror edx,16
+ mov esi,DWORD PTR 20[edi]
+ mov al,dh
+ mov bl,dl
+ ror edx,16
+ xor ecx,esi
+ mov esi,DWORD PTR 72[eax*4+edi]
+ mov ebp,DWORD PTR 1096[ebx*4+edi]
+ mov al,dh
+ mov bl,dl
+ add esi,ebp
+ mov eax,DWORD PTR 2120[eax*4+edi]
+ xor esi,eax
+ mov ebp,DWORD PTR 3144[ebx*4+edi]
+ add esi,ebp
+ xor eax,eax
+ xor ecx,esi
+ ;
+
+ ; Round 4
+ ror ecx,16
+ mov esi,DWORD PTR 16[edi]
+ mov al,ch
+ mov bl,cl
+ ror ecx,16
+ xor edx,esi
+ mov esi,DWORD PTR 72[eax*4+edi]
+ mov ebp,DWORD PTR 1096[ebx*4+edi]
+ mov al,ch
+ mov bl,cl
+ add esi,ebp
+ mov eax,DWORD PTR 2120[eax*4+edi]
+ xor esi,eax
+ mov ebp,DWORD PTR 3144[ebx*4+edi]
+ add esi,ebp
+ xor eax,eax
+ xor edx,esi
+ ;
+
+ ; Round 3
+ ror edx,16
+ mov esi,DWORD PTR 12[edi]
+ mov al,dh
+ mov bl,dl
+ ror edx,16
+ xor ecx,esi
+ mov esi,DWORD PTR 72[eax*4+edi]
+ mov ebp,DWORD PTR 1096[ebx*4+edi]
+ mov al,dh
+ mov bl,dl
+ add esi,ebp
+ mov eax,DWORD PTR 2120[eax*4+edi]
+ xor esi,eax
+ mov ebp,DWORD PTR 3144[ebx*4+edi]
+ add esi,ebp
+ xor eax,eax
+ xor ecx,esi
+ ;
+
+ ; Round 2
+ ror ecx,16
+ mov esi,DWORD PTR 8[edi]
+ mov al,ch
+ mov bl,cl
+ ror ecx,16
+ xor edx,esi
+ mov esi,DWORD PTR 72[eax*4+edi]
+ mov ebp,DWORD PTR 1096[ebx*4+edi]
+ mov al,ch
+ mov bl,cl
+ add esi,ebp
+ mov eax,DWORD PTR 2120[eax*4+edi]
+ xor esi,eax
+ mov ebp,DWORD PTR 3144[ebx*4+edi]
+ add esi,ebp
+ xor eax,eax
+ xor edx,esi
+ ;
+
+ ; Round 1
+ ror edx,16
+ mov esi,DWORD PTR 4[edi]
+ mov al,dh
+ mov bl,dl
+ ror edx,16
+ xor ecx,esi
+ mov esi,DWORD PTR 72[eax*4+edi]
+ mov ebp,DWORD PTR 1096[ebx*4+edi]
+ mov al,dh
+ mov bl,dl
+ add esi,ebp
+ mov eax,DWORD PTR 2120[eax*4+edi]
+ xor esi,eax
+ mov ebp,DWORD PTR 3144[ebx*4+edi]
+ add esi,ebp
+ xor eax,eax
+ xor ecx,esi
+ xor edx,DWORD PTR [edi]
+ mov eax,DWORD PTR 20[esp]
+ mov DWORD PTR [eax],edx
+ mov DWORD PTR 4[eax],ecx
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+_BF_decrypt ENDP
+ALIGN 16
+_BF_cbc_encrypt PROC PUBLIC
+$L_BF_cbc_encrypt_begin::
+ ;
+
+ push ebp
+ push ebx
+ push esi
+ push edi
+ mov ebp,DWORD PTR 28[esp]
+ ; getting iv ptr from parameter 4
+ mov ebx,DWORD PTR 36[esp]
+ mov esi,DWORD PTR [ebx]
+ mov edi,DWORD PTR 4[ebx]
+ push edi
+ push esi
+ push edi
+ push esi
+ mov ebx,esp
+ mov esi,DWORD PTR 36[esp]
+ mov edi,DWORD PTR 40[esp]
+ ; getting encrypt flag from parameter 5
+ mov ecx,DWORD PTR 56[esp]
+ ; get and push parameter 3
+ mov eax,DWORD PTR 48[esp]
+ push eax
+ push ebx
+ cmp ecx,0
+ jz $L000decrypt
+ and ebp,4294967288
+ mov eax,DWORD PTR 8[esp]
+ mov ebx,DWORD PTR 12[esp]
+ jz $L001encrypt_finish
+$L002encrypt_loop:
+ mov ecx,DWORD PTR [esi]
+ mov edx,DWORD PTR 4[esi]
+ xor eax,ecx
+ xor ebx,edx
+ bswap eax
+ bswap ebx
+ mov DWORD PTR 8[esp],eax
+ mov DWORD PTR 12[esp],ebx
+ call $L_BF_encrypt_begin
+ mov eax,DWORD PTR 8[esp]
+ mov ebx,DWORD PTR 12[esp]
+ bswap eax
+ bswap ebx
+ mov DWORD PTR [edi],eax
+ mov DWORD PTR 4[edi],ebx
+ add esi,8
+ add edi,8
+ sub ebp,8
+ jnz $L002encrypt_loop
+$L001encrypt_finish:
+ mov ebp,DWORD PTR 52[esp]
+ and ebp,7
+ jz $L003finish
+ call $L004PIC_point
+$L004PIC_point:
+ pop edx
+ lea ecx,DWORD PTR ($L005cbc_enc_jmp_table-$L004PIC_point)[edx]
+ mov ebp,DWORD PTR [ebp*4+ecx]
+ add ebp,edx
+ xor ecx,ecx
+ xor edx,edx
+ jmp ebp
+$L006ej7:
+ mov dh,BYTE PTR 6[esi]
+ shl edx,8
+$L007ej6:
+ mov dh,BYTE PTR 5[esi]
+$L008ej5:
+ mov dl,BYTE PTR 4[esi]
+$L009ej4:
+ mov ecx,DWORD PTR [esi]
+ jmp $L010ejend
+$L011ej3:
+ mov ch,BYTE PTR 2[esi]
+ shl ecx,8
+$L012ej2:
+ mov ch,BYTE PTR 1[esi]
+$L013ej1:
+ mov cl,BYTE PTR [esi]
+$L010ejend:
+ xor eax,ecx
+ xor ebx,edx
+ bswap eax
+ bswap ebx
+ mov DWORD PTR 8[esp],eax
+ mov DWORD PTR 12[esp],ebx
+ call $L_BF_encrypt_begin
+ mov eax,DWORD PTR 8[esp]
+ mov ebx,DWORD PTR 12[esp]
+ bswap eax
+ bswap ebx
+ mov DWORD PTR [edi],eax
+ mov DWORD PTR 4[edi],ebx
+ jmp $L003finish
+$L000decrypt:
+ and ebp,4294967288
+ mov eax,DWORD PTR 16[esp]
+ mov ebx,DWORD PTR 20[esp]
+ jz $L014decrypt_finish
+$L015decrypt_loop:
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ bswap eax
+ bswap ebx
+ mov DWORD PTR 8[esp],eax
+ mov DWORD PTR 12[esp],ebx
+ call $L_BF_decrypt_begin
+ mov eax,DWORD PTR 8[esp]
+ mov ebx,DWORD PTR 12[esp]
+ bswap eax
+ bswap ebx
+ mov ecx,DWORD PTR 16[esp]
+ mov edx,DWORD PTR 20[esp]
+ xor ecx,eax
+ xor edx,ebx
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ mov DWORD PTR [edi],ecx
+ mov DWORD PTR 4[edi],edx
+ mov DWORD PTR 16[esp],eax
+ mov DWORD PTR 20[esp],ebx
+ add esi,8
+ add edi,8
+ sub ebp,8
+ jnz $L015decrypt_loop
+$L014decrypt_finish:
+ mov ebp,DWORD PTR 52[esp]
+ and ebp,7
+ jz $L003finish
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ bswap eax
+ bswap ebx
+ mov DWORD PTR 8[esp],eax
+ mov DWORD PTR 12[esp],ebx
+ call $L_BF_decrypt_begin
+ mov eax,DWORD PTR 8[esp]
+ mov ebx,DWORD PTR 12[esp]
+ bswap eax
+ bswap ebx
+ mov ecx,DWORD PTR 16[esp]
+ mov edx,DWORD PTR 20[esp]
+ xor ecx,eax
+ xor edx,ebx
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+$L016dj7:
+ ror edx,16
+ mov BYTE PTR 6[edi],dl
+ shr edx,16
+$L017dj6:
+ mov BYTE PTR 5[edi],dh
+$L018dj5:
+ mov BYTE PTR 4[edi],dl
+$L019dj4:
+ mov DWORD PTR [edi],ecx
+ jmp $L020djend
+$L021dj3:
+ ror ecx,16
+ mov BYTE PTR 2[edi],cl
+ shl ecx,16
+$L022dj2:
+ mov BYTE PTR 1[esi],ch
+$L023dj1:
+ mov BYTE PTR [esi],cl
+$L020djend:
+ jmp $L003finish
+$L003finish:
+ mov ecx,DWORD PTR 60[esp]
+ add esp,24
+ mov DWORD PTR [ecx],eax
+ mov DWORD PTR 4[ecx],ebx
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+ALIGN 64
+$L005cbc_enc_jmp_table:
+DD 0
+DD $L013ej1-$L004PIC_point
+DD $L012ej2-$L004PIC_point
+DD $L011ej3-$L004PIC_point
+DD $L009ej4-$L004PIC_point
+DD $L008ej5-$L004PIC_point
+DD $L007ej6-$L004PIC_point
+DD $L006ej7-$L004PIC_point
+ALIGN 64
+_BF_cbc_encrypt ENDP
+.text$ ENDS
+END
diff --git a/deps/openssl/asm/x86-win32-masm/bn/x86-mont.asm b/deps/openssl/asm/x86-win32-masm/bn/x86-mont.asm
new file mode 100644
index 0000000000..eaad4a073a
--- /dev/null
+++ b/deps/openssl/asm/x86-win32-masm/bn/x86-mont.asm
@@ -0,0 +1,348 @@
+TITLE ../openssl/crypto/bn/asm/x86-mont.asm
+IF @Version LT 800
+ECHO MASM version 8.00 or later is strongly recommended.
+ENDIF
+.586
+.MODEL FLAT
+OPTION DOTNAME
+IF @Version LT 800
+.text$ SEGMENT PAGE 'CODE'
+ELSE
+.text$ SEGMENT ALIGN(64) 'CODE'
+ENDIF
+ALIGN 16
+_bn_mul_mont PROC PUBLIC
+$L_bn_mul_mont_begin::
+ push ebp
+ push ebx
+ push esi
+ push edi
+ xor eax,eax
+ mov edi,DWORD PTR 40[esp]
+ cmp edi,4
+ jl $L000just_leave
+ lea esi,DWORD PTR 20[esp]
+ lea edx,DWORD PTR 24[esp]
+ mov ebp,esp
+ add edi,2
+ neg edi
+ lea esp,DWORD PTR [edi*4+esp-32]
+ neg edi
+ mov eax,esp
+ sub eax,edx
+ and eax,2047
+ sub esp,eax
+ xor edx,esp
+ and edx,2048
+ xor edx,2048
+ sub esp,edx
+ and esp,-64
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ mov ecx,DWORD PTR 8[esi]
+ mov edx,DWORD PTR 12[esi]
+ mov esi,DWORD PTR 16[esi]
+ mov esi,DWORD PTR [esi]
+ mov DWORD PTR 4[esp],eax
+ mov DWORD PTR 8[esp],ebx
+ mov DWORD PTR 12[esp],ecx
+ mov DWORD PTR 16[esp],edx
+ mov DWORD PTR 20[esp],esi
+ lea ebx,DWORD PTR [edi-3]
+ mov DWORD PTR 24[esp],ebp
+ mov esi,DWORD PTR 8[esp]
+ lea ebp,DWORD PTR 1[ebx]
+ mov edi,DWORD PTR 12[esp]
+ xor ecx,ecx
+ mov edx,esi
+ and ebp,1
+ sub edx,edi
+ lea eax,DWORD PTR 4[ebx*4+edi]
+ or ebp,edx
+ mov edi,DWORD PTR [edi]
+ jz $L001bn_sqr_mont
+ mov DWORD PTR 28[esp],eax
+ mov eax,DWORD PTR [esi]
+ xor edx,edx
+ALIGN 16
+$L002mull:
+ mov ebp,edx
+ mul edi
+ add ebp,eax
+ lea ecx,DWORD PTR 1[ecx]
+ adc edx,0
+ mov eax,DWORD PTR [ecx*4+esi]
+ cmp ecx,ebx
+ mov DWORD PTR 28[ecx*4+esp],ebp
+ jl $L002mull
+ mov ebp,edx
+ mul edi
+ mov edi,DWORD PTR 20[esp]
+ add eax,ebp
+ mov esi,DWORD PTR 16[esp]
+ adc edx,0
+ imul edi,DWORD PTR 32[esp]
+ mov DWORD PTR 32[ebx*4+esp],eax
+ xor ecx,ecx
+ mov DWORD PTR 36[ebx*4+esp],edx
+ mov DWORD PTR 40[ebx*4+esp],ecx
+ mov eax,DWORD PTR [esi]
+ mul edi
+ add eax,DWORD PTR 32[esp]
+ mov eax,DWORD PTR 4[esi]
+ adc edx,0
+ inc ecx
+ jmp $L0032ndmadd
+ALIGN 16
+$L0041stmadd:
+ mov ebp,edx
+ mul edi
+ add ebp,DWORD PTR 32[ecx*4+esp]
+ lea ecx,DWORD PTR 1[ecx]
+ adc edx,0
+ add ebp,eax
+ mov eax,DWORD PTR [ecx*4+esi]
+ adc edx,0
+ cmp ecx,ebx
+ mov DWORD PTR 28[ecx*4+esp],ebp
+ jl $L0041stmadd
+ mov ebp,edx
+ mul edi
+ add eax,DWORD PTR 32[ebx*4+esp]
+ mov edi,DWORD PTR 20[esp]
+ adc edx,0
+ mov esi,DWORD PTR 16[esp]
+ add ebp,eax
+ adc edx,0
+ imul edi,DWORD PTR 32[esp]
+ xor ecx,ecx
+ add edx,DWORD PTR 36[ebx*4+esp]
+ mov DWORD PTR 32[ebx*4+esp],ebp
+ adc ecx,0
+ mov eax,DWORD PTR [esi]
+ mov DWORD PTR 36[ebx*4+esp],edx
+ mov DWORD PTR 40[ebx*4+esp],ecx
+ mul edi
+ add eax,DWORD PTR 32[esp]
+ mov eax,DWORD PTR 4[esi]
+ adc edx,0
+ mov ecx,1
+ALIGN 16
+$L0032ndmadd:
+ mov ebp,edx
+ mul edi
+ add ebp,DWORD PTR 32[ecx*4+esp]
+ lea ecx,DWORD PTR 1[ecx]
+ adc edx,0
+ add ebp,eax
+ mov eax,DWORD PTR [ecx*4+esi]
+ adc edx,0
+ cmp ecx,ebx
+ mov DWORD PTR 24[ecx*4+esp],ebp
+ jl $L0032ndmadd
+ mov ebp,edx
+ mul edi
+ add ebp,DWORD PTR 32[ebx*4+esp]
+ adc edx,0
+ add ebp,eax
+ adc edx,0
+ mov DWORD PTR 28[ebx*4+esp],ebp
+ xor eax,eax
+ mov ecx,DWORD PTR 12[esp]
+ add edx,DWORD PTR 36[ebx*4+esp]
+ adc eax,DWORD PTR 40[ebx*4+esp]
+ lea ecx,DWORD PTR 4[ecx]
+ mov DWORD PTR 32[ebx*4+esp],edx
+ cmp ecx,DWORD PTR 28[esp]
+ mov DWORD PTR 36[ebx*4+esp],eax
+ je $L005common_tail
+ mov edi,DWORD PTR [ecx]
+ mov esi,DWORD PTR 8[esp]
+ mov DWORD PTR 12[esp],ecx
+ xor ecx,ecx
+ xor edx,edx
+ mov eax,DWORD PTR [esi]
+ jmp $L0041stmadd
+ALIGN 16
+$L001bn_sqr_mont:
+ mov DWORD PTR [esp],ebx
+ mov DWORD PTR 12[esp],ecx
+ mov eax,edi
+ mul edi
+ mov DWORD PTR 32[esp],eax
+ mov ebx,edx
+ shr edx,1
+ and ebx,1
+ inc ecx
+ALIGN 16
+$L006sqr:
+ mov eax,DWORD PTR [ecx*4+esi]
+ mov ebp,edx
+ mul edi
+ add eax,ebp
+ lea ecx,DWORD PTR 1[ecx]
+ adc edx,0
+ lea ebp,DWORD PTR [eax*2+ebx]
+ shr eax,31
+ cmp ecx,DWORD PTR [esp]
+ mov ebx,eax
+ mov DWORD PTR 28[ecx*4+esp],ebp
+ jl $L006sqr
+ mov eax,DWORD PTR [ecx*4+esi]
+ mov ebp,edx
+ mul edi
+ add eax,ebp
+ mov edi,DWORD PTR 20[esp]
+ adc edx,0
+ mov esi,DWORD PTR 16[esp]
+ lea ebp,DWORD PTR [eax*2+ebx]
+ imul edi,DWORD PTR 32[esp]
+ shr eax,31
+ mov DWORD PTR 32[ecx*4+esp],ebp
+ lea ebp,DWORD PTR [edx*2+eax]
+ mov eax,DWORD PTR [esi]
+ shr edx,31
+ mov DWORD PTR 36[ecx*4+esp],ebp
+ mov DWORD PTR 40[ecx*4+esp],edx
+ mul edi
+ add eax,DWORD PTR 32[esp]
+ mov ebx,ecx
+ adc edx,0
+ mov eax,DWORD PTR 4[esi]
+ mov ecx,1
+ALIGN 16
+$L0073rdmadd:
+ mov ebp,edx
+ mul edi
+ add ebp,DWORD PTR 32[ecx*4+esp]
+ adc edx,0
+ add ebp,eax
+ mov eax,DWORD PTR 4[ecx*4+esi]
+ adc edx,0
+ mov DWORD PTR 28[ecx*4+esp],ebp
+ mov ebp,edx
+ mul edi
+ add ebp,DWORD PTR 36[ecx*4+esp]
+ lea ecx,DWORD PTR 2[ecx]
+ adc edx,0
+ add ebp,eax
+ mov eax,DWORD PTR [ecx*4+esi]
+ adc edx,0
+ cmp ecx,ebx
+ mov DWORD PTR 24[ecx*4+esp],ebp
+ jl $L0073rdmadd
+ mov ebp,edx
+ mul edi
+ add ebp,DWORD PTR 32[ebx*4+esp]
+ adc edx,0
+ add ebp,eax
+ adc edx,0
+ mov DWORD PTR 28[ebx*4+esp],ebp
+ mov ecx,DWORD PTR 12[esp]
+ xor eax,eax
+ mov esi,DWORD PTR 8[esp]
+ add edx,DWORD PTR 36[ebx*4+esp]
+ adc eax,DWORD PTR 40[ebx*4+esp]
+ mov DWORD PTR 32[ebx*4+esp],edx
+ cmp ecx,ebx
+ mov DWORD PTR 36[ebx*4+esp],eax
+ je $L005common_tail
+ mov edi,DWORD PTR 4[ecx*4+esi]
+ lea ecx,DWORD PTR 1[ecx]
+ mov eax,edi
+ mov DWORD PTR 12[esp],ecx
+ mul edi
+ add eax,DWORD PTR 32[ecx*4+esp]
+ adc edx,0
+ mov DWORD PTR 32[ecx*4+esp],eax
+ xor ebp,ebp
+ cmp ecx,ebx
+ lea ecx,DWORD PTR 1[ecx]
+ je $L008sqrlast
+ mov ebx,edx
+ shr edx,1
+ and ebx,1
+ALIGN 16
+$L009sqradd:
+ mov eax,DWORD PTR [ecx*4+esi]
+ mov ebp,edx
+ mul edi
+ add eax,ebp
+ lea ebp,DWORD PTR [eax*1+eax]
+ adc edx,0
+ shr eax,31
+ add ebp,DWORD PTR 32[ecx*4+esp]
+ lea ecx,DWORD PTR 1[ecx]
+ adc eax,0
+ add ebp,ebx
+ adc eax,0
+ cmp ecx,DWORD PTR [esp]
+ mov DWORD PTR 28[ecx*4+esp],ebp
+ mov ebx,eax
+ jle $L009sqradd
+ mov ebp,edx
+ add edx,edx
+ shr ebp,31
+ add edx,ebx
+ adc ebp,0
+$L008sqrlast:
+ mov edi,DWORD PTR 20[esp]
+ mov esi,DWORD PTR 16[esp]
+ imul edi,DWORD PTR 32[esp]
+ add edx,DWORD PTR 32[ecx*4+esp]
+ mov eax,DWORD PTR [esi]
+ adc ebp,0
+ mov DWORD PTR 32[ecx*4+esp],edx
+ mov DWORD PTR 36[ecx*4+esp],ebp
+ mul edi
+ add eax,DWORD PTR 32[esp]
+ lea ebx,DWORD PTR [ecx-1]
+ adc edx,0
+ mov ecx,1
+ mov eax,DWORD PTR 4[esi]
+ jmp $L0073rdmadd
+ALIGN 16
+$L005common_tail:
+ mov ebp,DWORD PTR 16[esp]
+ mov edi,DWORD PTR 4[esp]
+ lea esi,DWORD PTR 32[esp]
+ mov eax,DWORD PTR [esi]
+ mov ecx,ebx
+ xor edx,edx
+ALIGN 16
+$L010sub:
+ sbb eax,DWORD PTR [edx*4+ebp]
+ mov DWORD PTR [edx*4+edi],eax
+ dec ecx
+ mov eax,DWORD PTR 4[edx*4+esi]
+ lea edx,DWORD PTR 1[edx]
+ jge $L010sub
+ sbb eax,0
+ and esi,eax
+ not eax
+ mov ebp,edi
+ and ebp,eax
+ or esi,ebp
+ALIGN 16
+$L011copy:
+ mov eax,DWORD PTR [ebx*4+esi]
+ mov DWORD PTR [ebx*4+edi],eax
+ mov DWORD PTR 32[ebx*4+esp],ecx
+ dec ebx
+ jge $L011copy
+ mov esp,DWORD PTR 24[esp]
+ mov eax,1
+$L000just_leave:
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+_bn_mul_mont ENDP
+DB 77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105
+DB 112,108,105,99,97,116,105,111,110,32,102,111,114,32,120,56
+DB 54,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121
+DB 32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46
+DB 111,114,103,62,0
+.text$ ENDS
+END
diff --git a/deps/openssl/asm/x86-win32-masm/bn/x86.asm b/deps/openssl/asm/x86-win32-masm/bn/x86.asm
new file mode 100644
index 0000000000..d7051fa4e5
--- /dev/null
+++ b/deps/openssl/asm/x86-win32-masm/bn/x86.asm
@@ -0,0 +1,2116 @@
+TITLE ../openssl/crypto/bn/asm/x86.asm
+IF @Version LT 800
+ECHO MASM version 8.00 or later is strongly recommended.
+ENDIF
+.586
+.MODEL FLAT
+OPTION DOTNAME
+IF @Version LT 800
+.text$ SEGMENT PAGE 'CODE'
+ELSE
+.text$ SEGMENT ALIGN(64) 'CODE'
+ENDIF
+ALIGN 16
+_bn_mul_add_words PROC PUBLIC
+$L_bn_mul_add_words_begin::
+ push ebp
+ push ebx
+ push esi
+ push edi
+ ;
+
+ xor esi,esi
+ mov edi,DWORD PTR 20[esp]
+ mov ecx,DWORD PTR 28[esp]
+ mov ebx,DWORD PTR 24[esp]
+ and ecx,4294967288
+ mov ebp,DWORD PTR 32[esp]
+ push ecx
+ jz $L000maw_finish
+$L001maw_loop:
+ mov DWORD PTR [esp],ecx
+ ; Round 0
+ mov eax,DWORD PTR [ebx]
+ mul ebp
+ add eax,esi
+ mov esi,DWORD PTR [edi]
+ adc edx,0
+ add eax,esi
+ adc edx,0
+ mov DWORD PTR [edi],eax
+ mov esi,edx
+ ; Round 4
+ mov eax,DWORD PTR 4[ebx]
+ mul ebp
+ add eax,esi
+ mov esi,DWORD PTR 4[edi]
+ adc edx,0
+ add eax,esi
+ adc edx,0
+ mov DWORD PTR 4[edi],eax
+ mov esi,edx
+ ; Round 8
+ mov eax,DWORD PTR 8[ebx]
+ mul ebp
+ add eax,esi
+ mov esi,DWORD PTR 8[edi]
+ adc edx,0
+ add eax,esi
+ adc edx,0
+ mov DWORD PTR 8[edi],eax
+ mov esi,edx
+ ; Round 12
+ mov eax,DWORD PTR 12[ebx]
+ mul ebp
+ add eax,esi
+ mov esi,DWORD PTR 12[edi]
+ adc edx,0
+ add eax,esi
+ adc edx,0
+ mov DWORD PTR 12[edi],eax
+ mov esi,edx
+ ; Round 16
+ mov eax,DWORD PTR 16[ebx]
+ mul ebp
+ add eax,esi
+ mov esi,DWORD PTR 16[edi]
+ adc edx,0
+ add eax,esi
+ adc edx,0
+ mov DWORD PTR 16[edi],eax
+ mov esi,edx
+ ; Round 20
+ mov eax,DWORD PTR 20[ebx]
+ mul ebp
+ add eax,esi
+ mov esi,DWORD PTR 20[edi]
+ adc edx,0
+ add eax,esi
+ adc edx,0
+ mov DWORD PTR 20[edi],eax
+ mov esi,edx
+ ; Round 24
+ mov eax,DWORD PTR 24[ebx]
+ mul ebp
+ add eax,esi
+ mov esi,DWORD PTR 24[edi]
+ adc edx,0
+ add eax,esi
+ adc edx,0
+ mov DWORD PTR 24[edi],eax
+ mov esi,edx
+ ; Round 28
+ mov eax,DWORD PTR 28[ebx]
+ mul ebp
+ add eax,esi
+ mov esi,DWORD PTR 28[edi]
+ adc edx,0
+ add eax,esi
+ adc edx,0
+ mov DWORD PTR 28[edi],eax
+ mov esi,edx
+ ;
+
+ mov ecx,DWORD PTR [esp]
+ add ebx,32
+ add edi,32
+ sub ecx,8
+ jnz $L001maw_loop
+$L000maw_finish:
+ mov ecx,DWORD PTR 32[esp]
+ and ecx,7
+ jnz $L002maw_finish2
+ jmp $L003maw_end
+$L002maw_finish2:
+ ; Tail Round 0
+ mov eax,DWORD PTR [ebx]
+ mul ebp
+ add eax,esi
+ mov esi,DWORD PTR [edi]
+ adc edx,0
+ add eax,esi
+ adc edx,0
+ dec ecx
+ mov DWORD PTR [edi],eax
+ mov esi,edx
+ jz $L003maw_end
+ ; Tail Round 1
+ mov eax,DWORD PTR 4[ebx]
+ mul ebp
+ add eax,esi
+ mov esi,DWORD PTR 4[edi]
+ adc edx,0
+ add eax,esi
+ adc edx,0
+ dec ecx
+ mov DWORD PTR 4[edi],eax
+ mov esi,edx
+ jz $L003maw_end
+ ; Tail Round 2
+ mov eax,DWORD PTR 8[ebx]
+ mul ebp
+ add eax,esi
+ mov esi,DWORD PTR 8[edi]
+ adc edx,0
+ add eax,esi
+ adc edx,0
+ dec ecx
+ mov DWORD PTR 8[edi],eax
+ mov esi,edx
+ jz $L003maw_end
+ ; Tail Round 3
+ mov eax,DWORD PTR 12[ebx]
+ mul ebp
+ add eax,esi
+ mov esi,DWORD PTR 12[edi]
+ adc edx,0
+ add eax,esi
+ adc edx,0
+ dec ecx
+ mov DWORD PTR 12[edi],eax
+ mov esi,edx
+ jz $L003maw_end
+ ; Tail Round 4
+ mov eax,DWORD PTR 16[ebx]
+ mul ebp
+ add eax,esi
+ mov esi,DWORD PTR 16[edi]
+ adc edx,0
+ add eax,esi
+ adc edx,0
+ dec ecx
+ mov DWORD PTR 16[edi],eax
+ mov esi,edx
+ jz $L003maw_end
+ ; Tail Round 5
+ mov eax,DWORD PTR 20[ebx]
+ mul ebp
+ add eax,esi
+ mov esi,DWORD PTR 20[edi]
+ adc edx,0
+ add eax,esi
+ adc edx,0
+ dec ecx
+ mov DWORD PTR 20[edi],eax
+ mov esi,edx
+ jz $L003maw_end
+ ; Tail Round 6
+ mov eax,DWORD PTR 24[ebx]
+ mul ebp
+ add eax,esi
+ mov esi,DWORD PTR 24[edi]
+ adc edx,0
+ add eax,esi
+ adc edx,0
+ mov DWORD PTR 24[edi],eax
+ mov esi,edx
+$L003maw_end:
+ mov eax,esi
+ pop ecx
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+_bn_mul_add_words ENDP
+ALIGN 16
+_bn_mul_words PROC PUBLIC
+$L_bn_mul_words_begin::
+ push ebp
+ push ebx
+ push esi
+ push edi
+ ;
+
+ xor esi,esi
+ mov edi,DWORD PTR 20[esp]
+ mov ebx,DWORD PTR 24[esp]
+ mov ebp,DWORD PTR 28[esp]
+ mov ecx,DWORD PTR 32[esp]
+ and ebp,4294967288
+ jz $L004mw_finish
+$L005mw_loop:
+ ; Round 0
+ mov eax,DWORD PTR [ebx]
+ mul ecx
+ add eax,esi
+ adc edx,0
+ mov DWORD PTR [edi],eax
+ mov esi,edx
+ ; Round 4
+ mov eax,DWORD PTR 4[ebx]
+ mul ecx
+ add eax,esi
+ adc edx,0
+ mov DWORD PTR 4[edi],eax
+ mov esi,edx
+ ; Round 8
+ mov eax,DWORD PTR 8[ebx]
+ mul ecx
+ add eax,esi
+ adc edx,0
+ mov DWORD PTR 8[edi],eax
+ mov esi,edx
+ ; Round 12
+ mov eax,DWORD PTR 12[ebx]
+ mul ecx
+ add eax,esi
+ adc edx,0
+ mov DWORD PTR 12[edi],eax
+ mov esi,edx
+ ; Round 16
+ mov eax,DWORD PTR 16[ebx]
+ mul ecx
+ add eax,esi
+ adc edx,0
+ mov DWORD PTR 16[edi],eax
+ mov esi,edx
+ ; Round 20
+ mov eax,DWORD PTR 20[ebx]
+ mul ecx
+ add eax,esi
+ adc edx,0
+ mov DWORD PTR 20[edi],eax
+ mov esi,edx
+ ; Round 24
+ mov eax,DWORD PTR 24[ebx]
+ mul ecx
+ add eax,esi
+ adc edx,0
+ mov DWORD PTR 24[edi],eax
+ mov esi,edx
+ ; Round 28
+ mov eax,DWORD PTR 28[ebx]
+ mul ecx
+ add eax,esi
+ adc edx,0
+ mov DWORD PTR 28[edi],eax
+ mov esi,edx
+ ;
+
+ add ebx,32
+ add edi,32
+ sub ebp,8
+ jz $L004mw_finish
+ jmp $L005mw_loop
+$L004mw_finish:
+ mov ebp,DWORD PTR 28[esp]
+ and ebp,7
+ jnz $L006mw_finish2
+ jmp $L007mw_end
+$L006mw_finish2:
+ ; Tail Round 0
+ mov eax,DWORD PTR [ebx]
+ mul ecx
+ add eax,esi
+ adc edx,0
+ mov DWORD PTR [edi],eax
+ mov esi,edx
+ dec ebp
+ jz $L007mw_end
+ ; Tail Round 1
+ mov eax,DWORD PTR 4[ebx]
+ mul ecx
+ add eax,esi
+ adc edx,0
+ mov DWORD PTR 4[edi],eax
+ mov esi,edx
+ dec ebp
+ jz $L007mw_end
+ ; Tail Round 2
+ mov eax,DWORD PTR 8[ebx]
+ mul ecx
+ add eax,esi
+ adc edx,0
+ mov DWORD PTR 8[edi],eax
+ mov esi,edx
+ dec ebp
+ jz $L007mw_end
+ ; Tail Round 3
+ mov eax,DWORD PTR 12[ebx]
+ mul ecx
+ add eax,esi
+ adc edx,0
+ mov DWORD PTR 12[edi],eax
+ mov esi,edx
+ dec ebp
+ jz $L007mw_end
+ ; Tail Round 4
+ mov eax,DWORD PTR 16[ebx]
+ mul ecx
+ add eax,esi
+ adc edx,0
+ mov DWORD PTR 16[edi],eax
+ mov esi,edx
+ dec ebp
+ jz $L007mw_end
+ ; Tail Round 5
+ mov eax,DWORD PTR 20[ebx]
+ mul ecx
+ add eax,esi
+ adc edx,0
+ mov DWORD PTR 20[edi],eax
+ mov esi,edx
+ dec ebp
+ jz $L007mw_end
+ ; Tail Round 6
+ mov eax,DWORD PTR 24[ebx]
+ mul ecx
+ add eax,esi
+ adc edx,0
+ mov DWORD PTR 24[edi],eax
+ mov esi,edx
+$L007mw_end:
+ mov eax,esi
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+_bn_mul_words ENDP
+ALIGN 16
+_bn_sqr_words PROC PUBLIC
+$L_bn_sqr_words_begin::
+ push ebp
+ push ebx
+ push esi
+ push edi
+ ;
+
+ mov esi,DWORD PTR 20[esp]
+ mov edi,DWORD PTR 24[esp]
+ mov ebx,DWORD PTR 28[esp]
+ and ebx,4294967288
+ jz $L008sw_finish
+$L009sw_loop:
+ ; Round 0
+ mov eax,DWORD PTR [edi]
+ mul eax
+ mov DWORD PTR [esi],eax
+ mov DWORD PTR 4[esi],edx
+ ; Round 4
+ mov eax,DWORD PTR 4[edi]
+ mul eax
+ mov DWORD PTR 8[esi],eax
+ mov DWORD PTR 12[esi],edx
+ ; Round 8
+ mov eax,DWORD PTR 8[edi]
+ mul eax
+ mov DWORD PTR 16[esi],eax
+ mov DWORD PTR 20[esi],edx
+ ; Round 12
+ mov eax,DWORD PTR 12[edi]
+ mul eax
+ mov DWORD PTR 24[esi],eax
+ mov DWORD PTR 28[esi],edx
+ ; Round 16
+ mov eax,DWORD PTR 16[edi]
+ mul eax
+ mov DWORD PTR 32[esi],eax
+ mov DWORD PTR 36[esi],edx
+ ; Round 20
+ mov eax,DWORD PTR 20[edi]
+ mul eax
+ mov DWORD PTR 40[esi],eax
+ mov DWORD PTR 44[esi],edx
+ ; Round 24
+ mov eax,DWORD PTR 24[edi]
+ mul eax
+ mov DWORD PTR 48[esi],eax
+ mov DWORD PTR 52[esi],edx
+ ; Round 28
+ mov eax,DWORD PTR 28[edi]
+ mul eax
+ mov DWORD PTR 56[esi],eax
+ mov DWORD PTR 60[esi],edx
+ ;
+
+ add edi,32
+ add esi,64
+ sub ebx,8
+ jnz $L009sw_loop
+$L008sw_finish:
+ mov ebx,DWORD PTR 28[esp]
+ and ebx,7
+ jz $L010sw_end
+ ; Tail Round 0
+ mov eax,DWORD PTR [edi]
+ mul eax
+ mov DWORD PTR [esi],eax
+ dec ebx
+ mov DWORD PTR 4[esi],edx
+ jz $L010sw_end
+ ; Tail Round 1
+ mov eax,DWORD PTR 4[edi]
+ mul eax
+ mov DWORD PTR 8[esi],eax
+ dec ebx
+ mov DWORD PTR 12[esi],edx
+ jz $L010sw_end
+ ; Tail Round 2
+ mov eax,DWORD PTR 8[edi]
+ mul eax
+ mov DWORD PTR 16[esi],eax
+ dec ebx
+ mov DWORD PTR 20[esi],edx
+ jz $L010sw_end
+ ; Tail Round 3
+ mov eax,DWORD PTR 12[edi]
+ mul eax
+ mov DWORD PTR 24[esi],eax
+ dec ebx
+ mov DWORD PTR 28[esi],edx
+ jz $L010sw_end
+ ; Tail Round 4
+ mov eax,DWORD PTR 16[edi]
+ mul eax
+ mov DWORD PTR 32[esi],eax
+ dec ebx
+ mov DWORD PTR 36[esi],edx
+ jz $L010sw_end
+ ; Tail Round 5
+ mov eax,DWORD PTR 20[edi]
+ mul eax
+ mov DWORD PTR 40[esi],eax
+ dec ebx
+ mov DWORD PTR 44[esi],edx
+ jz $L010sw_end
+ ; Tail Round 6
+ mov eax,DWORD PTR 24[edi]
+ mul eax
+ mov DWORD PTR 48[esi],eax
+ mov DWORD PTR 52[esi],edx
+$L010sw_end:
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+_bn_sqr_words ENDP
+ALIGN 16
+_bn_div_words PROC PUBLIC
+$L_bn_div_words_begin::
+ push ebp
+ push ebx
+ push esi
+ push edi
+ mov edx,DWORD PTR 20[esp]
+ mov eax,DWORD PTR 24[esp]
+ mov ebx,DWORD PTR 28[esp]
+ div ebx
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+_bn_div_words ENDP
+ALIGN 16
+_bn_add_words PROC PUBLIC
+$L_bn_add_words_begin::
+ push ebp
+ push ebx
+ push esi
+ push edi
+ ;
+
+ mov ebx,DWORD PTR 20[esp]
+ mov esi,DWORD PTR 24[esp]
+ mov edi,DWORD PTR 28[esp]
+ mov ebp,DWORD PTR 32[esp]
+ xor eax,eax
+ and ebp,4294967288
+ jz $L011aw_finish
+$L012aw_loop:
+ ; Round 0
+ mov ecx,DWORD PTR [esi]
+ mov edx,DWORD PTR [edi]
+ add ecx,eax
+ mov eax,0
+ adc eax,eax
+ add ecx,edx
+ adc eax,0
+ mov DWORD PTR [ebx],ecx
+ ; Round 1
+ mov ecx,DWORD PTR 4[esi]
+ mov edx,DWORD PTR 4[edi]
+ add ecx,eax
+ mov eax,0
+ adc eax,eax
+ add ecx,edx
+ adc eax,0
+ mov DWORD PTR 4[ebx],ecx
+ ; Round 2
+ mov ecx,DWORD PTR 8[esi]
+ mov edx,DWORD PTR 8[edi]
+ add ecx,eax
+ mov eax,0
+ adc eax,eax
+ add ecx,edx
+ adc eax,0
+ mov DWORD PTR 8[ebx],ecx
+ ; Round 3
+ mov ecx,DWORD PTR 12[esi]
+ mov edx,DWORD PTR 12[edi]
+ add ecx,eax
+ mov eax,0
+ adc eax,eax
+ add ecx,edx
+ adc eax,0
+ mov DWORD PTR 12[ebx],ecx
+ ; Round 4
+ mov ecx,DWORD PTR 16[esi]
+ mov edx,DWORD PTR 16[edi]
+ add ecx,eax
+ mov eax,0
+ adc eax,eax
+ add ecx,edx
+ adc eax,0
+ mov DWORD PTR 16[ebx],ecx
+ ; Round 5
+ mov ecx,DWORD PTR 20[esi]
+ mov edx,DWORD PTR 20[edi]
+ add ecx,eax
+ mov eax,0
+ adc eax,eax
+ add ecx,edx
+ adc eax,0
+ mov DWORD PTR 20[ebx],ecx
+ ; Round 6
+ mov ecx,DWORD PTR 24[esi]
+ mov edx,DWORD PTR 24[edi]
+ add ecx,eax
+ mov eax,0
+ adc eax,eax
+ add ecx,edx
+ adc eax,0
+ mov DWORD PTR 24[ebx],ecx
+ ; Round 7
+ mov ecx,DWORD PTR 28[esi]
+ mov edx,DWORD PTR 28[edi]
+ add ecx,eax
+ mov eax,0
+ adc eax,eax
+ add ecx,edx
+ adc eax,0
+ mov DWORD PTR 28[ebx],ecx
+ ;
+
+ add esi,32
+ add edi,32
+ add ebx,32
+ sub ebp,8
+ jnz $L012aw_loop
+$L011aw_finish:
+ mov ebp,DWORD PTR 32[esp]
+ and ebp,7
+ jz $L013aw_end
+ ; Tail Round 0
+ mov ecx,DWORD PTR [esi]
+ mov edx,DWORD PTR [edi]
+ add ecx,eax
+ mov eax,0
+ adc eax,eax
+ add ecx,edx
+ adc eax,0
+ dec ebp
+ mov DWORD PTR [ebx],ecx
+ jz $L013aw_end
+ ; Tail Round 1
+ mov ecx,DWORD PTR 4[esi]
+ mov edx,DWORD PTR 4[edi]
+ add ecx,eax
+ mov eax,0
+ adc eax,eax
+ add ecx,edx
+ adc eax,0
+ dec ebp
+ mov DWORD PTR 4[ebx],ecx
+ jz $L013aw_end
+ ; Tail Round 2
+ mov ecx,DWORD PTR 8[esi]
+ mov edx,DWORD PTR 8[edi]
+ add ecx,eax
+ mov eax,0
+ adc eax,eax
+ add ecx,edx
+ adc eax,0
+ dec ebp
+ mov DWORD PTR 8[ebx],ecx
+ jz $L013aw_end
+ ; Tail Round 3
+ mov ecx,DWORD PTR 12[esi]
+ mov edx,DWORD PTR 12[edi]
+ add ecx,eax
+ mov eax,0
+ adc eax,eax
+ add ecx,edx
+ adc eax,0
+ dec ebp
+ mov DWORD PTR 12[ebx],ecx
+ jz $L013aw_end
+ ; Tail Round 4
+ mov ecx,DWORD PTR 16[esi]
+ mov edx,DWORD PTR 16[edi]
+ add ecx,eax
+ mov eax,0
+ adc eax,eax
+ add ecx,edx
+ adc eax,0
+ dec ebp
+ mov DWORD PTR 16[ebx],ecx
+ jz $L013aw_end
+ ; Tail Round 5
+ mov ecx,DWORD PTR 20[esi]
+ mov edx,DWORD PTR 20[edi]
+ add ecx,eax
+ mov eax,0
+ adc eax,eax
+ add ecx,edx
+ adc eax,0
+ dec ebp
+ mov DWORD PTR 20[ebx],ecx
+ jz $L013aw_end
+ ; Tail Round 6
+ mov ecx,DWORD PTR 24[esi]
+ mov edx,DWORD PTR 24[edi]
+ add ecx,eax
+ mov eax,0
+ adc eax,eax
+ add ecx,edx
+ adc eax,0
+ mov DWORD PTR 24[ebx],ecx
+$L013aw_end:
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+_bn_add_words ENDP
+ALIGN 16
+_bn_sub_words PROC PUBLIC
+$L_bn_sub_words_begin::
+ push ebp
+ push ebx
+ push esi
+ push edi
+ ;
+
+ mov ebx,DWORD PTR 20[esp]
+ mov esi,DWORD PTR 24[esp]
+ mov edi,DWORD PTR 28[esp]
+ mov ebp,DWORD PTR 32[esp]
+ xor eax,eax
+ and ebp,4294967288
+ jz $L014aw_finish
+$L015aw_loop:
+ ; Round 0
+ mov ecx,DWORD PTR [esi]
+ mov edx,DWORD PTR [edi]
+ sub ecx,eax
+ mov eax,0
+ adc eax,eax
+ sub ecx,edx
+ adc eax,0
+ mov DWORD PTR [ebx],ecx
+ ; Round 1
+ mov ecx,DWORD PTR 4[esi]
+ mov edx,DWORD PTR 4[edi]
+ sub ecx,eax
+ mov eax,0
+ adc eax,eax
+ sub ecx,edx
+ adc eax,0
+ mov DWORD PTR 4[ebx],ecx
+ ; Round 2
+ mov ecx,DWORD PTR 8[esi]
+ mov edx,DWORD PTR 8[edi]
+ sub ecx,eax
+ mov eax,0
+ adc eax,eax
+ sub ecx,edx
+ adc eax,0
+ mov DWORD PTR 8[ebx],ecx
+ ; Round 3
+ mov ecx,DWORD PTR 12[esi]
+ mov edx,DWORD PTR 12[edi]
+ sub ecx,eax
+ mov eax,0
+ adc eax,eax
+ sub ecx,edx
+ adc eax,0
+ mov DWORD PTR 12[ebx],ecx
+ ; Round 4
+ mov ecx,DWORD PTR 16[esi]
+ mov edx,DWORD PTR 16[edi]
+ sub ecx,eax
+ mov eax,0
+ adc eax,eax
+ sub ecx,edx
+ adc eax,0
+ mov DWORD PTR 16[ebx],ecx
+ ; Round 5
+ mov ecx,DWORD PTR 20[esi]
+ mov edx,DWORD PTR 20[edi]
+ sub ecx,eax
+ mov eax,0
+ adc eax,eax
+ sub ecx,edx
+ adc eax,0
+ mov DWORD PTR 20[ebx],ecx
+ ; Round 6
+ mov ecx,DWORD PTR 24[esi]
+ mov edx,DWORD PTR 24[edi]
+ sub ecx,eax
+ mov eax,0
+ adc eax,eax
+ sub ecx,edx
+ adc eax,0
+ mov DWORD PTR 24[ebx],ecx
+ ; Round 7
+ mov ecx,DWORD PTR 28[esi]
+ mov edx,DWORD PTR 28[edi]
+ sub ecx,eax
+ mov eax,0
+ adc eax,eax
+ sub ecx,edx
+ adc eax,0
+ mov DWORD PTR 28[ebx],ecx
+ ;
+
+ add esi,32
+ add edi,32
+ add ebx,32
+ sub ebp,8
+ jnz $L015aw_loop
+$L014aw_finish:
+ mov ebp,DWORD PTR 32[esp]
+ and ebp,7
+ jz $L016aw_end
+ ; Tail Round 0
+ mov ecx,DWORD PTR [esi]
+ mov edx,DWORD PTR [edi]
+ sub ecx,eax
+ mov eax,0
+ adc eax,eax
+ sub ecx,edx
+ adc eax,0
+ dec ebp
+ mov DWORD PTR [ebx],ecx
+ jz $L016aw_end
+ ; Tail Round 1
+ mov ecx,DWORD PTR 4[esi]
+ mov edx,DWORD PTR 4[edi]
+ sub ecx,eax
+ mov eax,0
+ adc eax,eax
+ sub ecx,edx
+ adc eax,0
+ dec ebp
+ mov DWORD PTR 4[ebx],ecx
+ jz $L016aw_end
+ ; Tail Round 2
+ mov ecx,DWORD PTR 8[esi]
+ mov edx,DWORD PTR 8[edi]
+ sub ecx,eax
+ mov eax,0
+ adc eax,eax
+ sub ecx,edx
+ adc eax,0
+ dec ebp
+ mov DWORD PTR 8[ebx],ecx
+ jz $L016aw_end
+ ; Tail Round 3
+ mov ecx,DWORD PTR 12[esi]
+ mov edx,DWORD PTR 12[edi]
+ sub ecx,eax
+ mov eax,0
+ adc eax,eax
+ sub ecx,edx
+ adc eax,0
+ dec ebp
+ mov DWORD PTR 12[ebx],ecx
+ jz $L016aw_end
+ ; Tail Round 4
+ mov ecx,DWORD PTR 16[esi]
+ mov edx,DWORD PTR 16[edi]
+ sub ecx,eax
+ mov eax,0
+ adc eax,eax
+ sub ecx,edx
+ adc eax,0
+ dec ebp
+ mov DWORD PTR 16[ebx],ecx
+ jz $L016aw_end
+ ; Tail Round 5
+ mov ecx,DWORD PTR 20[esi]
+ mov edx,DWORD PTR 20[edi]
+ sub ecx,eax
+ mov eax,0
+ adc eax,eax
+ sub ecx,edx
+ adc eax,0
+ dec ebp
+ mov DWORD PTR 20[ebx],ecx
+ jz $L016aw_end
+ ; Tail Round 6
+ mov ecx,DWORD PTR 24[esi]
+ mov edx,DWORD PTR 24[edi]
+ sub ecx,eax
+ mov eax,0
+ adc eax,eax
+ sub ecx,edx
+ adc eax,0
+ mov DWORD PTR 24[ebx],ecx
+$L016aw_end:
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+_bn_sub_words ENDP
+ALIGN 16
+_bn_mul_comba8 PROC PUBLIC
+$L_bn_mul_comba8_begin::
+ push esi
+ mov esi,DWORD PTR 12[esp]
+ push edi
+ mov edi,DWORD PTR 20[esp]
+ push ebp
+ push ebx
+ xor ebx,ebx
+ mov eax,DWORD PTR [esi]
+ xor ecx,ecx
+ mov edx,DWORD PTR [edi]
+ ; ################## Calculate word 0
+ xor ebp,ebp
+ ; mul a[0]*b[0]
+ mul edx
+ add ebx,eax
+ mov eax,DWORD PTR 20[esp]
+ adc ecx,edx
+ mov edx,DWORD PTR [edi]
+ adc ebp,0
+ mov DWORD PTR [eax],ebx
+ mov eax,DWORD PTR 4[esi]
+ ; saved r[0]
+ ; ################## Calculate word 1
+ xor ebx,ebx
+ ; mul a[1]*b[0]
+ mul edx
+ add ecx,eax
+ mov eax,DWORD PTR [esi]
+ adc ebp,edx
+ mov edx,DWORD PTR 4[edi]
+ adc ebx,0
+ ; mul a[0]*b[1]
+ mul edx
+ add ecx,eax
+ mov eax,DWORD PTR 20[esp]
+ adc ebp,edx
+ mov edx,DWORD PTR [edi]
+ adc ebx,0
+ mov DWORD PTR 4[eax],ecx
+ mov eax,DWORD PTR 8[esi]
+ ; saved r[1]
+ ; ################## Calculate word 2
+ xor ecx,ecx
+ ; mul a[2]*b[0]
+ mul edx
+ add ebp,eax
+ mov eax,DWORD PTR 4[esi]
+ adc ebx,edx
+ mov edx,DWORD PTR 4[edi]
+ adc ecx,0
+ ; mul a[1]*b[1]
+ mul edx
+ add ebp,eax
+ mov eax,DWORD PTR [esi]
+ adc ebx,edx
+ mov edx,DWORD PTR 8[edi]
+ adc ecx,0
+ ; mul a[0]*b[2]
+ mul edx
+ add ebp,eax
+ mov eax,DWORD PTR 20[esp]
+ adc ebx,edx
+ mov edx,DWORD PTR [edi]
+ adc ecx,0
+ mov DWORD PTR 8[eax],ebp
+ mov eax,DWORD PTR 12[esi]
+ ; saved r[2]
+ ; ################## Calculate word 3
+ xor ebp,ebp
+ ; mul a[3]*b[0]
+ mul edx
+ add ebx,eax
+ mov eax,DWORD PTR 8[esi]
+ adc ecx,edx
+ mov edx,DWORD PTR 4[edi]
+ adc ebp,0
+ ; mul a[2]*b[1]
+ mul edx
+ add ebx,eax
+ mov eax,DWORD PTR 4[esi]
+ adc ecx,edx
+ mov edx,DWORD PTR 8[edi]
+ adc ebp,0
+ ; mul a[1]*b[2]
+ mul edx
+ add ebx,eax
+ mov eax,DWORD PTR [esi]
+ adc ecx,edx
+ mov edx,DWORD PTR 12[edi]
+ adc ebp,0
+ ; mul a[0]*b[3]
+ mul edx
+ add ebx,eax
+ mov eax,DWORD PTR 20[esp]
+ adc ecx,edx
+ mov edx,DWORD PTR [edi]
+ adc ebp,0
+ mov DWORD PTR 12[eax],ebx
+ mov eax,DWORD PTR 16[esi]
+ ; saved r[3]
+ ; ################## Calculate word 4
+ xor ebx,ebx
+ ; mul a[4]*b[0]
+ mul edx
+ add ecx,eax
+ mov eax,DWORD PTR 12[esi]
+ adc ebp,edx
+ mov edx,DWORD PTR 4[edi]
+ adc ebx,0
+ ; mul a[3]*b[1]
+ mul edx
+ add ecx,eax
+ mov eax,DWORD PTR 8[esi]
+ adc ebp,edx
+ mov edx,DWORD PTR 8[edi]
+ adc ebx,0
+ ; mul a[2]*b[2]
+ mul edx
+ add ecx,eax
+ mov eax,DWORD PTR 4[esi]
+ adc ebp,edx
+ mov edx,DWORD PTR 12[edi]
+ adc ebx,0
+ ; mul a[1]*b[3]
+ mul edx
+ add ecx,eax
+ mov eax,DWORD PTR [esi]
+ adc ebp,edx
+ mov edx,DWORD PTR 16[edi]
+ adc ebx,0
+ ; mul a[0]*b[4]
+ mul edx
+ add ecx,eax
+ mov eax,DWORD PTR 20[esp]
+ adc ebp,edx
+ mov edx,DWORD PTR [edi]
+ adc ebx,0
+ mov DWORD PTR 16[eax],ecx
+ mov eax,DWORD PTR 20[esi]
+ ; saved r[4]
+ ; ################## Calculate word 5
+ xor ecx,ecx
+ ; mul a[5]*b[0]
+ mul edx
+ add ebp,eax
+ mov eax,DWORD PTR 16[esi]
+ adc ebx,edx
+ mov edx,DWORD PTR 4[edi]
+ adc ecx,0
+ ; mul a[4]*b[1]
+ mul edx
+ add ebp,eax
+ mov eax,DWORD PTR 12[esi]
+ adc ebx,edx
+ mov edx,DWORD PTR 8[edi]
+ adc ecx,0
+ ; mul a[3]*b[2]
+ mul edx
+ add ebp,eax
+ mov eax,DWORD PTR 8[esi]
+ adc ebx,edx
+ mov edx,DWORD PTR 12[edi]
+ adc ecx,0
+ ; mul a[2]*b[3]
+ mul edx
+ add ebp,eax
+ mov eax,DWORD PTR 4[esi]
+ adc ebx,edx
+ mov edx,DWORD PTR 16[edi]
+ adc ecx,0
+ ; mul a[1]*b[4]
+ mul edx
+ add ebp,eax
+ mov eax,DWORD PTR [esi]
+ adc ebx,edx
+ mov edx,DWORD PTR 20[edi]
+ adc ecx,0
+ ; mul a[0]*b[5]
+ mul edx
+ add ebp,eax
+ mov eax,DWORD PTR 20[esp]
+ adc ebx,edx
+ mov edx,DWORD PTR [edi]
+ adc ecx,0
+ mov DWORD PTR 20[eax],ebp
+ mov eax,DWORD PTR 24[esi]
+ ; saved r[5]
+ ; ################## Calculate word 6
+ xor ebp,ebp
+ ; mul a[6]*b[0]
+ mul edx
+ add ebx,eax
+ mov eax,DWORD PTR 20[esi]
+ adc ecx,edx
+ mov edx,DWORD PTR 4[edi]
+ adc ebp,0
+ ; mul a[5]*b[1]
+ mul edx
+ add ebx,eax
+ mov eax,DWORD PTR 16[esi]
+ adc ecx,edx
+ mov edx,DWORD PTR 8[edi]
+ adc ebp,0
+ ; mul a[4]*b[2]
+ mul edx
+ add ebx,eax
+ mov eax,DWORD PTR 12[esi]
+ adc ecx,edx
+ mov edx,DWORD PTR 12[edi]
+ adc ebp,0
+ ; mul a[3]*b[3]
+ mul edx
+ add ebx,eax
+ mov eax,DWORD PTR 8[esi]
+ adc ecx,edx
+ mov edx,DWORD PTR 16[edi]
+ adc ebp,0
+ ; mul a[2]*b[4]
+ mul edx
+ add ebx,eax
+ mov eax,DWORD PTR 4[esi]
+ adc ecx,edx
+ mov edx,DWORD PTR 20[edi]
+ adc ebp,0
+ ; mul a[1]*b[5]
+ mul edx
+ add ebx,eax
+ mov eax,DWORD PTR [esi]
+ adc ecx,edx
+ mov edx,DWORD PTR 24[edi]
+ adc ebp,0
+ ; mul a[0]*b[6]
+ mul edx
+ add ebx,eax
+ mov eax,DWORD PTR 20[esp]
+ adc ecx,edx
+ mov edx,DWORD PTR [edi]
+ adc ebp,0
+ mov DWORD PTR 24[eax],ebx
+ mov eax,DWORD PTR 28[esi]
+ ; saved r[6]
+ ; ################## Calculate word 7
+ xor ebx,ebx
+ ; mul a[7]*b[0]
+ mul edx
+ add ecx,eax
+ mov eax,DWORD PTR 24[esi]
+ adc ebp,edx
+ mov edx,DWORD PTR 4[edi]
+ adc ebx,0
+ ; mul a[6]*b[1]
+ mul edx
+ add ecx,eax
+ mov eax,DWORD PTR 20[esi]
+ adc ebp,edx
+ mov edx,DWORD PTR 8[edi]
+ adc ebx,0
+ ; mul a[5]*b[2]
+ mul edx
+ add ecx,eax
+ mov eax,DWORD PTR 16[esi]
+ adc ebp,edx
+ mov edx,DWORD PTR 12[edi]
+ adc ebx,0
+ ; mul a[4]*b[3]
+ mul edx
+ add ecx,eax
+ mov eax,DWORD PTR 12[esi]
+ adc ebp,edx
+ mov edx,DWORD PTR 16[edi]
+ adc ebx,0
+ ; mul a[3]*b[4]
+ mul edx
+ add ecx,eax
+ mov eax,DWORD PTR 8[esi]
+ adc ebp,edx
+ mov edx,DWORD PTR 20[edi]
+ adc ebx,0
+ ; mul a[2]*b[5]
+ mul edx
+ add ecx,eax
+ mov eax,DWORD PTR 4[esi]
+ adc ebp,edx
+ mov edx,DWORD PTR 24[edi]
+ adc ebx,0
+ ; mul a[1]*b[6]
+ mul edx
+ add ecx,eax
+ mov eax,DWORD PTR [esi]
+ adc ebp,edx
+ mov edx,DWORD PTR 28[edi]
+ adc ebx,0
+ ; mul a[0]*b[7]
+ mul edx
+ add ecx,eax
+ mov eax,DWORD PTR 20[esp]
+ adc ebp,edx
+ mov edx,DWORD PTR 4[edi]
+ adc ebx,0
+ mov DWORD PTR 28[eax],ecx
+ mov eax,DWORD PTR 28[esi]
+ ; saved r[7]
+ ; ################## Calculate word 8
+ xor ecx,ecx
+ ; mul a[7]*b[1]
+ mul edx
+ add ebp,eax
+ mov eax,DWORD PTR 24[esi]
+ adc ebx,edx
+ mov edx,DWORD PTR 8[edi]
+ adc ecx,0
+ ; mul a[6]*b[2]
+ mul edx
+ add ebp,eax
+ mov eax,DWORD PTR 20[esi]
+ adc ebx,edx
+ mov edx,DWORD PTR 12[edi]
+ adc ecx,0
+ ; mul a[5]*b[3]
+ mul edx
+ add ebp,eax
+ mov eax,DWORD PTR 16[esi]
+ adc ebx,edx
+ mov edx,DWORD PTR 16[edi]
+ adc ecx,0
+ ; mul a[4]*b[4]
+ mul edx
+ add ebp,eax
+ mov eax,DWORD PTR 12[esi]
+ adc ebx,edx
+ mov edx,DWORD PTR 20[edi]
+ adc ecx,0
+ ; mul a[3]*b[5]
+ mul edx
+ add ebp,eax
+ mov eax,DWORD PTR 8[esi]
+ adc ebx,edx
+ mov edx,DWORD PTR 24[edi]
+ adc ecx,0
+ ; mul a[2]*b[6]
+ mul edx
+ add ebp,eax
+ mov eax,DWORD PTR 4[esi]
+ adc ebx,edx
+ mov edx,DWORD PTR 28[edi]
+ adc ecx,0
+ ; mul a[1]*b[7]
+ mul edx
+ add ebp,eax
+ mov eax,DWORD PTR 20[esp]
+ adc ebx,edx
+ mov edx,DWORD PTR 8[edi]
+ adc ecx,0
+ mov DWORD PTR 32[eax],ebp
+ mov eax,DWORD PTR 28[esi]
+ ; saved r[8]
+ ; ################## Calculate word 9
+ xor ebp,ebp
+ ; mul a[7]*b[2]
+ mul edx
+ add ebx,eax
+ mov eax,DWORD PTR 24[esi]
+ adc ecx,edx
+ mov edx,DWORD PTR 12[edi]
+ adc ebp,0
+ ; mul a[6]*b[3]
+ mul edx
+ add ebx,eax
+ mov eax,DWORD PTR 20[esi]
+ adc ecx,edx
+ mov edx,DWORD PTR 16[edi]
+ adc ebp,0
+ ; mul a[5]*b[4]
+ mul edx
+ add ebx,eax
+ mov eax,DWORD PTR 16[esi]
+ adc ecx,edx
+ mov edx,DWORD PTR 20[edi]
+ adc ebp,0
+ ; mul a[4]*b[5]
+ mul edx
+ add ebx,eax
+ mov eax,DWORD PTR 12[esi]
+ adc ecx,edx
+ mov edx,DWORD PTR 24[edi]
+ adc ebp,0
+ ; mul a[3]*b[6]
+ mul edx
+ add ebx,eax
+ mov eax,DWORD PTR 8[esi]
+ adc ecx,edx
+ mov edx,DWORD PTR 28[edi]
+ adc ebp,0
+ ; mul a[2]*b[7]
+ mul edx
+ add ebx,eax
+ mov eax,DWORD PTR 20[esp]
+ adc ecx,edx
+ mov edx,DWORD PTR 12[edi]
+ adc ebp,0
+ mov DWORD PTR 36[eax],ebx
+ mov eax,DWORD PTR 28[esi]
+ ; saved r[9]
+ ; ################## Calculate word 10
+ xor ebx,ebx
+ ; mul a[7]*b[3]
+ mul edx
+ add ecx,eax
+ mov eax,DWORD PTR 24[esi]
+ adc ebp,edx
+ mov edx,DWORD PTR 16[edi]
+ adc ebx,0
+ ; mul a[6]*b[4]
+ mul edx
+ add ecx,eax
+ mov eax,DWORD PTR 20[esi]
+ adc ebp,edx
+ mov edx,DWORD PTR 20[edi]
+ adc ebx,0
+ ; mul a[5]*b[5]
+ mul edx
+ add ecx,eax
+ mov eax,DWORD PTR 16[esi]
+ adc ebp,edx
+ mov edx,DWORD PTR 24[edi]
+ adc ebx,0
+ ; mul a[4]*b[6]
+ mul edx
+ add ecx,eax
+ mov eax,DWORD PTR 12[esi]
+ adc ebp,edx
+ mov edx,DWORD PTR 28[edi]
+ adc ebx,0
+ ; mul a[3]*b[7]
+ mul edx
+ add ecx,eax
+ mov eax,DWORD PTR 20[esp]
+ adc ebp,edx
+ mov edx,DWORD PTR 16[edi]
+ adc ebx,0
+ mov DWORD PTR 40[eax],ecx
+ mov eax,DWORD PTR 28[esi]
+ ; saved r[10]
+ ; ################## Calculate word 11
+ xor ecx,ecx
+ ; mul a[7]*b[4]
+ mul edx
+ add ebp,eax
+ mov eax,DWORD PTR 24[esi]
+ adc ebx,edx
+ mov edx,DWORD PTR 20[edi]
+ adc ecx,0
+ ; mul a[6]*b[5]
+ mul edx
+ add ebp,eax
+ mov eax,DWORD PTR 20[esi]
+ adc ebx,edx
+ mov edx,DWORD PTR 24[edi]
+ adc ecx,0
+ ; mul a[5]*b[6]
+ mul edx
+ add ebp,eax
+ mov eax,DWORD PTR 16[esi]
+ adc ebx,edx
+ mov edx,DWORD PTR 28[edi]
+ adc ecx,0
+ ; mul a[4]*b[7]
+ mul edx
+ add ebp,eax
+ mov eax,DWORD PTR 20[esp]
+ adc ebx,edx
+ mov edx,DWORD PTR 20[edi]
+ adc ecx,0
+ mov DWORD PTR 44[eax],ebp
+ mov eax,DWORD PTR 28[esi]
+ ; saved r[11]
+ ; ################## Calculate word 12
+ xor ebp,ebp
+ ; mul a[7]*b[5]
+ mul edx
+ add ebx,eax
+ mov eax,DWORD PTR 24[esi]
+ adc ecx,edx
+ mov edx,DWORD PTR 24[edi]
+ adc ebp,0
+ ; mul a[6]*b[6]
+ mul edx
+ add ebx,eax
+ mov eax,DWORD PTR 20[esi]
+ adc ecx,edx
+ mov edx,DWORD PTR 28[edi]
+ adc ebp,0
+ ; mul a[5]*b[7]
+ mul edx
+ add ebx,eax
+ mov eax,DWORD PTR 20[esp]
+ adc ecx,edx
+ mov edx,DWORD PTR 24[edi]
+ adc ebp,0
+ mov DWORD PTR 48[eax],ebx
+ mov eax,DWORD PTR 28[esi]
+ ; saved r[12]
+ ; ################## Calculate word 13
+ xor ebx,ebx
+ ; mul a[7]*b[6]
+ mul edx
+ add ecx,eax
+ mov eax,DWORD PTR 24[esi]
+ adc ebp,edx
+ mov edx,DWORD PTR 28[edi]
+ adc ebx,0
+ ; mul a[6]*b[7]
+ mul edx
+ add ecx,eax
+ mov eax,DWORD PTR 20[esp]
+ adc ebp,edx
+ mov edx,DWORD PTR 28[edi]
+ adc ebx,0
+ mov DWORD PTR 52[eax],ecx
+ mov eax,DWORD PTR 28[esi]
+ ; saved r[13]
+ ; ################## Calculate word 14
+ xor ecx,ecx
+ ; mul a[7]*b[7]
+ mul edx
+ add ebp,eax
+ mov eax,DWORD PTR 20[esp]
+ adc ebx,edx
+ adc ecx,0
+ mov DWORD PTR 56[eax],ebp
+ ; saved r[14]
+ ; save r[15]
+ mov DWORD PTR 60[eax],ebx
+ pop ebx
+ pop ebp
+ pop edi
+ pop esi
+ ret
+_bn_mul_comba8 ENDP
+ALIGN 16
+_bn_mul_comba4 PROC PUBLIC
+$L_bn_mul_comba4_begin::
+ push esi
+ mov esi,DWORD PTR 12[esp]
+ push edi
+ mov edi,DWORD PTR 20[esp]
+ push ebp
+ push ebx
+ xor ebx,ebx
+ mov eax,DWORD PTR [esi]
+ xor ecx,ecx
+ mov edx,DWORD PTR [edi]
+ ; ################## Calculate word 0
+ xor ebp,ebp
+ ; mul a[0]*b[0]
+ mul edx
+ add ebx,eax
+ mov eax,DWORD PTR 20[esp]
+ adc ecx,edx
+ mov edx,DWORD PTR [edi]
+ adc ebp,0
+ mov DWORD PTR [eax],ebx
+ mov eax,DWORD PTR 4[esi]
+ ; saved r[0]
+ ; ################## Calculate word 1
+ xor ebx,ebx
+ ; mul a[1]*b[0]
+ mul edx
+ add ecx,eax
+ mov eax,DWORD PTR [esi]
+ adc ebp,edx
+ mov edx,DWORD PTR 4[edi]
+ adc ebx,0
+ ; mul a[0]*b[1]
+ mul edx
+ add ecx,eax
+ mov eax,DWORD PTR 20[esp]
+ adc ebp,edx
+ mov edx,DWORD PTR [edi]
+ adc ebx,0
+ mov DWORD PTR 4[eax],ecx
+ mov eax,DWORD PTR 8[esi]
+ ; saved r[1]
+ ; ################## Calculate word 2
+ xor ecx,ecx
+ ; mul a[2]*b[0]
+ mul edx
+ add ebp,eax
+ mov eax,DWORD PTR 4[esi]
+ adc ebx,edx
+ mov edx,DWORD PTR 4[edi]
+ adc ecx,0
+ ; mul a[1]*b[1]
+ mul edx
+ add ebp,eax
+ mov eax,DWORD PTR [esi]
+ adc ebx,edx
+ mov edx,DWORD PTR 8[edi]
+ adc ecx,0
+ ; mul a[0]*b[2]
+ mul edx
+ add ebp,eax
+ mov eax,DWORD PTR 20[esp]
+ adc ebx,edx
+ mov edx,DWORD PTR [edi]
+ adc ecx,0
+ mov DWORD PTR 8[eax],ebp
+ mov eax,DWORD PTR 12[esi]
+ ; saved r[2]
+ ; ################## Calculate word 3
+ xor ebp,ebp
+ ; mul a[3]*b[0]
+ mul edx
+ add ebx,eax
+ mov eax,DWORD PTR 8[esi]
+ adc ecx,edx
+ mov edx,DWORD PTR 4[edi]
+ adc ebp,0
+ ; mul a[2]*b[1]
+ mul edx
+ add ebx,eax
+ mov eax,DWORD PTR 4[esi]
+ adc ecx,edx
+ mov edx,DWORD PTR 8[edi]
+ adc ebp,0
+ ; mul a[1]*b[2]
+ mul edx
+ add ebx,eax
+ mov eax,DWORD PTR [esi]
+ adc ecx,edx
+ mov edx,DWORD PTR 12[edi]
+ adc ebp,0
+ ; mul a[0]*b[3]
+ mul edx
+ add ebx,eax
+ mov eax,DWORD PTR 20[esp]
+ adc ecx,edx
+ mov edx,DWORD PTR 4[edi]
+ adc ebp,0
+ mov DWORD PTR 12[eax],ebx
+ mov eax,DWORD PTR 12[esi]
+ ; saved r[3]
+ ; ################## Calculate word 4
+ xor ebx,ebx
+ ; mul a[3]*b[1]
+ mul edx
+ add ecx,eax
+ mov eax,DWORD PTR 8[esi]
+ adc ebp,edx
+ mov edx,DWORD PTR 8[edi]
+ adc ebx,0
+ ; mul a[2]*b[2]
+ mul edx
+ add ecx,eax
+ mov eax,DWORD PTR 4[esi]
+ adc ebp,edx
+ mov edx,DWORD PTR 12[edi]
+ adc ebx,0
+ ; mul a[1]*b[3]
+ mul edx
+ add ecx,eax
+ mov eax,DWORD PTR 20[esp]
+ adc ebp,edx
+ mov edx,DWORD PTR 8[edi]
+ adc ebx,0
+ mov DWORD PTR 16[eax],ecx
+ mov eax,DWORD PTR 12[esi]
+ ; saved r[4]
+ ; ################## Calculate word 5
+ xor ecx,ecx
+ ; mul a[3]*b[2]
+ mul edx
+ add ebp,eax
+ mov eax,DWORD PTR 8[esi]
+ adc ebx,edx
+ mov edx,DWORD PTR 12[edi]
+ adc ecx,0
+ ; mul a[2]*b[3]
+ mul edx
+ add ebp,eax
+ mov eax,DWORD PTR 20[esp]
+ adc ebx,edx
+ mov edx,DWORD PTR 12[edi]
+ adc ecx,0
+ mov DWORD PTR 20[eax],ebp
+ mov eax,DWORD PTR 12[esi]
+ ; saved r[5]
+ ; ################## Calculate word 6
+ xor ebp,ebp
+ ; mul a[3]*b[3]
+ mul edx
+ add ebx,eax
+ mov eax,DWORD PTR 20[esp]
+ adc ecx,edx
+ adc ebp,0
+ mov DWORD PTR 24[eax],ebx
+ ; saved r[6]
+ ; save r[7]
+ mov DWORD PTR 28[eax],ecx
+ pop ebx
+ pop ebp
+ pop edi
+ pop esi
+ ret
+_bn_mul_comba4 ENDP
+ALIGN 16
+_bn_sqr_comba8 PROC PUBLIC
+$L_bn_sqr_comba8_begin::
+ push esi
+ push edi
+ push ebp
+ push ebx
+ mov edi,DWORD PTR 20[esp]
+ mov esi,DWORD PTR 24[esp]
+ xor ebx,ebx
+ xor ecx,ecx
+ mov eax,DWORD PTR [esi]
+ ; ############### Calculate word 0
+ xor ebp,ebp
+ ; sqr a[0]*a[0]
+ mul eax
+ add ebx,eax
+ adc ecx,edx
+ mov edx,DWORD PTR [esi]
+ adc ebp,0
+ mov DWORD PTR [edi],ebx
+ mov eax,DWORD PTR 4[esi]
+ ; saved r[0]
+ ; ############### Calculate word 1
+ xor ebx,ebx
+ ; sqr a[1]*a[0]
+ mul edx
+ add eax,eax
+ adc edx,edx
+ adc ebx,0
+ add ecx,eax
+ adc ebp,edx
+ mov eax,DWORD PTR 8[esi]
+ adc ebx,0
+ mov DWORD PTR 4[edi],ecx
+ mov edx,DWORD PTR [esi]
+ ; saved r[1]
+ ; ############### Calculate word 2
+ xor ecx,ecx
+ ; sqr a[2]*a[0]
+ mul edx
+ add eax,eax
+ adc edx,edx
+ adc ecx,0
+ add ebp,eax
+ adc ebx,edx
+ mov eax,DWORD PTR 4[esi]
+ adc ecx,0
+ ; sqr a[1]*a[1]
+ mul eax
+ add ebp,eax
+ adc ebx,edx
+ mov edx,DWORD PTR [esi]
+ adc ecx,0
+ mov DWORD PTR 8[edi],ebp
+ mov eax,DWORD PTR 12[esi]
+ ; saved r[2]
+ ; ############### Calculate word 3
+ xor ebp,ebp
+ ; sqr a[3]*a[0]
+ mul edx
+ add eax,eax
+ adc edx,edx
+ adc ebp,0
+ add ebx,eax
+ adc ecx,edx
+ mov eax,DWORD PTR 8[esi]
+ adc ebp,0
+ mov edx,DWORD PTR 4[esi]
+ ; sqr a[2]*a[1]
+ mul edx
+ add eax,eax
+ adc edx,edx
+ adc ebp,0
+ add ebx,eax
+ adc ecx,edx
+ mov eax,DWORD PTR 16[esi]
+ adc ebp,0
+ mov DWORD PTR 12[edi],ebx
+ mov edx,DWORD PTR [esi]
+ ; saved r[3]
+ ; ############### Calculate word 4
+ xor ebx,ebx
+ ; sqr a[4]*a[0]
+ mul edx
+ add eax,eax
+ adc edx,edx
+ adc ebx,0
+ add ecx,eax
+ adc ebp,edx
+ mov eax,DWORD PTR 12[esi]
+ adc ebx,0
+ mov edx,DWORD PTR 4[esi]
+ ; sqr a[3]*a[1]
+ mul edx
+ add eax,eax
+ adc edx,edx
+ adc ebx,0
+ add ecx,eax
+ adc ebp,edx
+ mov eax,DWORD PTR 8[esi]
+ adc ebx,0
+ ; sqr a[2]*a[2]
+ mul eax
+ add ecx,eax
+ adc ebp,edx
+ mov edx,DWORD PTR [esi]
+ adc ebx,0
+ mov DWORD PTR 16[edi],ecx
+ mov eax,DWORD PTR 20[esi]
+ ; saved r[4]
+ ; ############### Calculate word 5
+ xor ecx,ecx
+ ; sqr a[5]*a[0]
+ mul edx
+ add eax,eax
+ adc edx,edx
+ adc ecx,0
+ add ebp,eax
+ adc ebx,edx
+ mov eax,DWORD PTR 16[esi]
+ adc ecx,0
+ mov edx,DWORD PTR 4[esi]
+ ; sqr a[4]*a[1]
+ mul edx
+ add eax,eax
+ adc edx,edx
+ adc ecx,0
+ add ebp,eax
+ adc ebx,edx
+ mov eax,DWORD PTR 12[esi]
+ adc ecx,0
+ mov edx,DWORD PTR 8[esi]
+ ; sqr a[3]*a[2]
+ mul edx
+ add eax,eax
+ adc edx,edx
+ adc ecx,0
+ add ebp,eax
+ adc ebx,edx
+ mov eax,DWORD PTR 24[esi]
+ adc ecx,0
+ mov DWORD PTR 20[edi],ebp
+ mov edx,DWORD PTR [esi]
+ ; saved r[5]
+ ; ############### Calculate word 6
+ xor ebp,ebp
+ ; sqr a[6]*a[0]
+ mul edx
+ add eax,eax
+ adc edx,edx
+ adc ebp,0
+ add ebx,eax
+ adc ecx,edx
+ mov eax,DWORD PTR 20[esi]
+ adc ebp,0
+ mov edx,DWORD PTR 4[esi]
+ ; sqr a[5]*a[1]
+ mul edx
+ add eax,eax
+ adc edx,edx
+ adc ebp,0
+ add ebx,eax
+ adc ecx,edx
+ mov eax,DWORD PTR 16[esi]
+ adc ebp,0
+ mov edx,DWORD PTR 8[esi]
+ ; sqr a[4]*a[2]
+ mul edx
+ add eax,eax
+ adc edx,edx
+ adc ebp,0
+ add ebx,eax
+ adc ecx,edx
+ mov eax,DWORD PTR 12[esi]
+ adc ebp,0
+ ; sqr a[3]*a[3]
+ mul eax
+ add ebx,eax
+ adc ecx,edx
+ mov edx,DWORD PTR [esi]
+ adc ebp,0
+ mov DWORD PTR 24[edi],ebx
+ mov eax,DWORD PTR 28[esi]
+ ; saved r[6]
+ ; ############### Calculate word 7
+ xor ebx,ebx
+ ; sqr a[7]*a[0]
+ mul edx
+ add eax,eax
+ adc edx,edx
+ adc ebx,0
+ add ecx,eax
+ adc ebp,edx
+ mov eax,DWORD PTR 24[esi]
+ adc ebx,0
+ mov edx,DWORD PTR 4[esi]
+ ; sqr a[6]*a[1]
+ mul edx
+ add eax,eax
+ adc edx,edx
+ adc ebx,0
+ add ecx,eax
+ adc ebp,edx
+ mov eax,DWORD PTR 20[esi]
+ adc ebx,0
+ mov edx,DWORD PTR 8[esi]
+ ; sqr a[5]*a[2]
+ mul edx
+ add eax,eax
+ adc edx,edx
+ adc ebx,0
+ add ecx,eax
+ adc ebp,edx
+ mov eax,DWORD PTR 16[esi]
+ adc ebx,0
+ mov edx,DWORD PTR 12[esi]
+ ; sqr a[4]*a[3]
+ mul edx
+ add eax,eax
+ adc edx,edx
+ adc ebx,0
+ add ecx,eax
+ adc ebp,edx
+ mov eax,DWORD PTR 28[esi]
+ adc ebx,0
+ mov DWORD PTR 28[edi],ecx
+ mov edx,DWORD PTR 4[esi]
+ ; saved r[7]
+ ; ############### Calculate word 8
+ xor ecx,ecx
+ ; sqr a[7]*a[1]
+ mul edx
+ add eax,eax
+ adc edx,edx
+ adc ecx,0
+ add ebp,eax
+ adc ebx,edx
+ mov eax,DWORD PTR 24[esi]
+ adc ecx,0
+ mov edx,DWORD PTR 8[esi]
+ ; sqr a[6]*a[2]
+ mul edx
+ add eax,eax
+ adc edx,edx
+ adc ecx,0
+ add ebp,eax
+ adc ebx,edx
+ mov eax,DWORD PTR 20[esi]
+ adc ecx,0
+ mov edx,DWORD PTR 12[esi]
+ ; sqr a[5]*a[3]
+ mul edx
+ add eax,eax
+ adc edx,edx
+ adc ecx,0
+ add ebp,eax
+ adc ebx,edx
+ mov eax,DWORD PTR 16[esi]
+ adc ecx,0
+ ; sqr a[4]*a[4]
+ mul eax
+ add ebp,eax
+ adc ebx,edx
+ mov edx,DWORD PTR 8[esi]
+ adc ecx,0
+ mov DWORD PTR 32[edi],ebp
+ mov eax,DWORD PTR 28[esi]
+ ; saved r[8]
+ ; ############### Calculate word 9
+ xor ebp,ebp
+ ; sqr a[7]*a[2]
+ mul edx
+ add eax,eax
+ adc edx,edx
+ adc ebp,0
+ add ebx,eax
+ adc ecx,edx
+ mov eax,DWORD PTR 24[esi]
+ adc ebp,0
+ mov edx,DWORD PTR 12[esi]
+ ; sqr a[6]*a[3]
+ mul edx
+ add eax,eax
+ adc edx,edx
+ adc ebp,0
+ add ebx,eax
+ adc ecx,edx
+ mov eax,DWORD PTR 20[esi]
+ adc ebp,0
+ mov edx,DWORD PTR 16[esi]
+ ; sqr a[5]*a[4]
+ mul edx
+ add eax,eax
+ adc edx,edx
+ adc ebp,0
+ add ebx,eax
+ adc ecx,edx
+ mov eax,DWORD PTR 28[esi]
+ adc ebp,0
+ mov DWORD PTR 36[edi],ebx
+ mov edx,DWORD PTR 12[esi]
+ ; saved r[9]
+ ; ############### Calculate word 10
+ xor ebx,ebx
+ ; sqr a[7]*a[3]
+ mul edx
+ add eax,eax
+ adc edx,edx
+ adc ebx,0
+ add ecx,eax
+ adc ebp,edx
+ mov eax,DWORD PTR 24[esi]
+ adc ebx,0
+ mov edx,DWORD PTR 16[esi]
+ ; sqr a[6]*a[4]
+ mul edx
+ add eax,eax
+ adc edx,edx
+ adc ebx,0
+ add ecx,eax
+ adc ebp,edx
+ mov eax,DWORD PTR 20[esi]
+ adc ebx,0
+ ; sqr a[5]*a[5]
+ mul eax
+ add ecx,eax
+ adc ebp,edx
+ mov edx,DWORD PTR 16[esi]
+ adc ebx,0
+ mov DWORD PTR 40[edi],ecx
+ mov eax,DWORD PTR 28[esi]
+ ; saved r[10]
+ ; ############### Calculate word 11
+ xor ecx,ecx
+ ; sqr a[7]*a[4]
+ mul edx
+ add eax,eax
+ adc edx,edx
+ adc ecx,0
+ add ebp,eax
+ adc ebx,edx
+ mov eax,DWORD PTR 24[esi]
+ adc ecx,0
+ mov edx,DWORD PTR 20[esi]
+ ; sqr a[6]*a[5]
+ mul edx
+ add eax,eax
+ adc edx,edx
+ adc ecx,0
+ add ebp,eax
+ adc ebx,edx
+ mov eax,DWORD PTR 28[esi]
+ adc ecx,0
+ mov DWORD PTR 44[edi],ebp
+ mov edx,DWORD PTR 20[esi]
+ ; saved r[11]
+ ; ############### Calculate word 12
+ xor ebp,ebp
+ ; sqr a[7]*a[5]
+ mul edx
+ add eax,eax
+ adc edx,edx
+ adc ebp,0
+ add ebx,eax
+ adc ecx,edx
+ mov eax,DWORD PTR 24[esi]
+ adc ebp,0
+ ; sqr a[6]*a[6]
+ mul eax
+ add ebx,eax
+ adc ecx,edx
+ mov edx,DWORD PTR 24[esi]
+ adc ebp,0
+ mov DWORD PTR 48[edi],ebx
+ mov eax,DWORD PTR 28[esi]
+ ; saved r[12]
+ ; ############### Calculate word 13
+ xor ebx,ebx
+ ; sqr a[7]*a[6]
+ mul edx
+ add eax,eax
+ adc edx,edx
+ adc ebx,0
+ add ecx,eax
+ adc ebp,edx
+ mov eax,DWORD PTR 28[esi]
+ adc ebx,0
+ mov DWORD PTR 52[edi],ecx
+ ; saved r[13]
+ ; ############### Calculate word 14
+ xor ecx,ecx
+ ; sqr a[7]*a[7]
+ mul eax
+ add ebp,eax
+ adc ebx,edx
+ adc ecx,0
+ mov DWORD PTR 56[edi],ebp
+ ; saved r[14]
+ mov DWORD PTR 60[edi],ebx
+ pop ebx
+ pop ebp
+ pop edi
+ pop esi
+ ret
+_bn_sqr_comba8 ENDP
+ALIGN 16
+_bn_sqr_comba4 PROC PUBLIC
+$L_bn_sqr_comba4_begin::
+ push esi
+ push edi
+ push ebp
+ push ebx
+ mov edi,DWORD PTR 20[esp]
+ mov esi,DWORD PTR 24[esp]
+ xor ebx,ebx
+ xor ecx,ecx
+ mov eax,DWORD PTR [esi]
+ ; ############### Calculate word 0
+ xor ebp,ebp
+ ; sqr a[0]*a[0]
+ mul eax
+ add ebx,eax
+ adc ecx,edx
+ mov edx,DWORD PTR [esi]
+ adc ebp,0
+ mov DWORD PTR [edi],ebx
+ mov eax,DWORD PTR 4[esi]
+ ; saved r[0]
+ ; ############### Calculate word 1
+ xor ebx,ebx
+ ; sqr a[1]*a[0]
+ mul edx
+ add eax,eax
+ adc edx,edx
+ adc ebx,0
+ add ecx,eax
+ adc ebp,edx
+ mov eax,DWORD PTR 8[esi]
+ adc ebx,0
+ mov DWORD PTR 4[edi],ecx
+ mov edx,DWORD PTR [esi]
+ ; saved r[1]
+ ; ############### Calculate word 2
+ xor ecx,ecx
+ ; sqr a[2]*a[0]
+ mul edx
+ add eax,eax
+ adc edx,edx
+ adc ecx,0
+ add ebp,eax
+ adc ebx,edx
+ mov eax,DWORD PTR 4[esi]
+ adc ecx,0
+ ; sqr a[1]*a[1]
+ mul eax
+ add ebp,eax
+ adc ebx,edx
+ mov edx,DWORD PTR [esi]
+ adc ecx,0
+ mov DWORD PTR 8[edi],ebp
+ mov eax,DWORD PTR 12[esi]
+ ; saved r[2]
+ ; ############### Calculate word 3
+ xor ebp,ebp
+ ; sqr a[3]*a[0]
+ mul edx
+ add eax,eax
+ adc edx,edx
+ adc ebp,0
+ add ebx,eax
+ adc ecx,edx
+ mov eax,DWORD PTR 8[esi]
+ adc ebp,0
+ mov edx,DWORD PTR 4[esi]
+ ; sqr a[2]*a[1]
+ mul edx
+ add eax,eax
+ adc edx,edx
+ adc ebp,0
+ add ebx,eax
+ adc ecx,edx
+ mov eax,DWORD PTR 12[esi]
+ adc ebp,0
+ mov DWORD PTR 12[edi],ebx
+ mov edx,DWORD PTR 4[esi]
+ ; saved r[3]
+ ; ############### Calculate word 4
+ xor ebx,ebx
+ ; sqr a[3]*a[1]
+ mul edx
+ add eax,eax
+ adc edx,edx
+ adc ebx,0
+ add ecx,eax
+ adc ebp,edx
+ mov eax,DWORD PTR 8[esi]
+ adc ebx,0
+ ; sqr a[2]*a[2]
+ mul eax
+ add ecx,eax
+ adc ebp,edx
+ mov edx,DWORD PTR 8[esi]
+ adc ebx,0
+ mov DWORD PTR 16[edi],ecx
+ mov eax,DWORD PTR 12[esi]
+ ; saved r[4]
+ ; ############### Calculate word 5
+ xor ecx,ecx
+ ; sqr a[3]*a[2]
+ mul edx
+ add eax,eax
+ adc edx,edx
+ adc ecx,0
+ add ebp,eax
+ adc ebx,edx
+ mov eax,DWORD PTR 12[esi]
+ adc ecx,0
+ mov DWORD PTR 20[edi],ebp
+ ; saved r[5]
+ ; ############### Calculate word 6
+ xor ebp,ebp
+ ; sqr a[3]*a[3]
+ mul eax
+ add ebx,eax
+ adc ecx,edx
+ adc ebp,0
+ mov DWORD PTR 24[edi],ebx
+ ; saved r[6]
+ mov DWORD PTR 28[edi],ecx
+ pop ebx
+ pop ebp
+ pop edi
+ pop esi
+ ret
+_bn_sqr_comba4 ENDP
+.text$ ENDS
+END
diff --git a/deps/openssl/asm/x86-win32-masm/camellia/cmll-x86.asm b/deps/openssl/asm/x86-win32-masm/camellia/cmll-x86.asm
new file mode 100644
index 0000000000..acdf6a2f8b
--- /dev/null
+++ b/deps/openssl/asm/x86-win32-masm/camellia/cmll-x86.asm
@@ -0,0 +1,2367 @@
+TITLE cmll-586.asm
+IF @Version LT 800
+ECHO MASM version 8.00 or later is strongly recommended.
+ENDIF
+.586
+.MODEL FLAT
+OPTION DOTNAME
+IF @Version LT 800
+.text$ SEGMENT PAGE 'CODE'
+ELSE
+.text$ SEGMENT ALIGN(64) 'CODE'
+ENDIF
+ALIGN 16
+_Camellia_EncryptBlock_Rounds PROC PUBLIC
+$L_Camellia_EncryptBlock_Rounds_begin::
+ push ebp
+ push ebx
+ push esi
+ push edi
+ mov eax,DWORD PTR 20[esp]
+ mov esi,DWORD PTR 24[esp]
+ mov edi,DWORD PTR 28[esp]
+ mov ebx,esp
+ sub esp,28
+ and esp,-64
+ lea ecx,DWORD PTR [edi-127]
+ sub ecx,esp
+ neg ecx
+ and ecx,960
+ sub esp,ecx
+ add esp,4
+ shl eax,6
+ lea eax,DWORD PTR [eax*1+edi]
+ mov DWORD PTR 20[esp],ebx
+ mov DWORD PTR 16[esp],eax
+ call $L000pic_point
+$L000pic_point:
+ pop ebp
+ lea ebp,DWORD PTR ($LCamellia_SBOX-$L000pic_point)[ebp]
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ mov ecx,DWORD PTR 8[esi]
+ bswap eax
+ mov edx,DWORD PTR 12[esi]
+ bswap ebx
+ bswap ecx
+ bswap edx
+ call __x86_Camellia_encrypt
+ mov esp,DWORD PTR 20[esp]
+ bswap eax
+ mov esi,DWORD PTR 32[esp]
+ bswap ebx
+ bswap ecx
+ bswap edx
+ mov DWORD PTR [esi],eax
+ mov DWORD PTR 4[esi],ebx
+ mov DWORD PTR 8[esi],ecx
+ mov DWORD PTR 12[esi],edx
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+_Camellia_EncryptBlock_Rounds ENDP
+ALIGN 16
+_Camellia_EncryptBlock PROC PUBLIC
+$L_Camellia_EncryptBlock_begin::
+ mov eax,128
+ sub eax,DWORD PTR 4[esp]
+ mov eax,3
+ adc eax,0
+ mov DWORD PTR 4[esp],eax
+ jmp $L_Camellia_EncryptBlock_Rounds_begin
+_Camellia_EncryptBlock ENDP
+ALIGN 16
+_Camellia_encrypt PROC PUBLIC
+$L_Camellia_encrypt_begin::
+ push ebp
+ push ebx
+ push esi
+ push edi
+ mov esi,DWORD PTR 20[esp]
+ mov edi,DWORD PTR 28[esp]
+ mov ebx,esp
+ sub esp,28
+ and esp,-64
+ mov eax,DWORD PTR 272[edi]
+ lea ecx,DWORD PTR [edi-127]
+ sub ecx,esp
+ neg ecx
+ and ecx,960
+ sub esp,ecx
+ add esp,4
+ shl eax,6
+ lea eax,DWORD PTR [eax*1+edi]
+ mov DWORD PTR 20[esp],ebx
+ mov DWORD PTR 16[esp],eax
+ call $L001pic_point
+$L001pic_point:
+ pop ebp
+ lea ebp,DWORD PTR ($LCamellia_SBOX-$L001pic_point)[ebp]
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ mov ecx,DWORD PTR 8[esi]
+ bswap eax
+ mov edx,DWORD PTR 12[esi]
+ bswap ebx
+ bswap ecx
+ bswap edx
+ call __x86_Camellia_encrypt
+ mov esp,DWORD PTR 20[esp]
+ bswap eax
+ mov esi,DWORD PTR 24[esp]
+ bswap ebx
+ bswap ecx
+ bswap edx
+ mov DWORD PTR [esi],eax
+ mov DWORD PTR 4[esi],ebx
+ mov DWORD PTR 8[esi],ecx
+ mov DWORD PTR 12[esi],edx
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+_Camellia_encrypt ENDP
+ALIGN 16
+__x86_Camellia_encrypt PROC PRIVATE
+ xor eax,DWORD PTR [edi]
+ xor ebx,DWORD PTR 4[edi]
+ xor ecx,DWORD PTR 8[edi]
+ xor edx,DWORD PTR 12[edi]
+ mov esi,DWORD PTR 16[edi]
+ mov DWORD PTR 4[esp],eax
+ mov DWORD PTR 8[esp],ebx
+ mov DWORD PTR 12[esp],ecx
+ mov DWORD PTR 16[esp],edx
+ALIGN 16
+$L002loop:
+ xor eax,esi
+ xor ebx,DWORD PTR 20[edi]
+ movzx esi,ah
+ mov edx,DWORD PTR 2052[esi*8+ebp]
+ movzx esi,al
+ xor edx,DWORD PTR 4[esi*8+ebp]
+ shr eax,16
+ movzx esi,bl
+ mov ecx,DWORD PTR [esi*8+ebp]
+ movzx esi,ah
+ xor edx,DWORD PTR [esi*8+ebp]
+ movzx esi,bh
+ xor ecx,DWORD PTR 4[esi*8+ebp]
+ shr ebx,16
+ movzx eax,al
+ xor edx,DWORD PTR 2048[eax*8+ebp]
+ movzx esi,bh
+ mov eax,DWORD PTR 16[esp]
+ xor ecx,edx
+ ror edx,8
+ xor ecx,DWORD PTR 2048[esi*8+ebp]
+ movzx esi,bl
+ mov ebx,DWORD PTR 12[esp]
+ xor edx,eax
+ xor ecx,DWORD PTR 2052[esi*8+ebp]
+ mov esi,DWORD PTR 24[edi]
+ xor edx,ecx
+ mov DWORD PTR 16[esp],edx
+ xor ecx,ebx
+ mov DWORD PTR 12[esp],ecx
+ xor ecx,esi
+ xor edx,DWORD PTR 28[edi]
+ movzx esi,ch
+ mov ebx,DWORD PTR 2052[esi*8+ebp]
+ movzx esi,cl
+ xor ebx,DWORD PTR 4[esi*8+ebp]
+ shr ecx,16
+ movzx esi,dl
+ mov eax,DWORD PTR [esi*8+ebp]
+ movzx esi,ch
+ xor ebx,DWORD PTR [esi*8+ebp]
+ movzx esi,dh
+ xor eax,DWORD PTR 4[esi*8+ebp]
+ shr edx,16
+ movzx ecx,cl
+ xor ebx,DWORD PTR 2048[ecx*8+ebp]
+ movzx esi,dh
+ mov ecx,DWORD PTR 8[esp]
+ xor eax,ebx
+ ror ebx,8
+ xor eax,DWORD PTR 2048[esi*8+ebp]
+ movzx esi,dl
+ mov edx,DWORD PTR 4[esp]
+ xor ebx,ecx
+ xor eax,DWORD PTR 2052[esi*8+ebp]
+ mov esi,DWORD PTR 32[edi]
+ xor ebx,eax
+ mov DWORD PTR 8[esp],ebx
+ xor eax,edx
+ mov DWORD PTR 4[esp],eax
+ xor eax,esi
+ xor ebx,DWORD PTR 36[edi]
+ movzx esi,ah
+ mov edx,DWORD PTR 2052[esi*8+ebp]
+ movzx esi,al
+ xor edx,DWORD PTR 4[esi*8+ebp]
+ shr eax,16
+ movzx esi,bl
+ mov ecx,DWORD PTR [esi*8+ebp]
+ movzx esi,ah
+ xor edx,DWORD PTR [esi*8+ebp]
+ movzx esi,bh
+ xor ecx,DWORD PTR 4[esi*8+ebp]
+ shr ebx,16
+ movzx eax,al
+ xor edx,DWORD PTR 2048[eax*8+ebp]
+ movzx esi,bh
+ mov eax,DWORD PTR 16[esp]
+ xor ecx,edx
+ ror edx,8
+ xor ecx,DWORD PTR 2048[esi*8+ebp]
+ movzx esi,bl
+ mov ebx,DWORD PTR 12[esp]
+ xor edx,eax
+ xor ecx,DWORD PTR 2052[esi*8+ebp]
+ mov esi,DWORD PTR 40[edi]
+ xor edx,ecx
+ mov DWORD PTR 16[esp],edx
+ xor ecx,ebx
+ mov DWORD PTR 12[esp],ecx
+ xor ecx,esi
+ xor edx,DWORD PTR 44[edi]
+ movzx esi,ch
+ mov ebx,DWORD PTR 2052[esi*8+ebp]
+ movzx esi,cl
+ xor ebx,DWORD PTR 4[esi*8+ebp]
+ shr ecx,16
+ movzx esi,dl
+ mov eax,DWORD PTR [esi*8+ebp]
+ movzx esi,ch
+ xor ebx,DWORD PTR [esi*8+ebp]
+ movzx esi,dh
+ xor eax,DWORD PTR 4[esi*8+ebp]
+ shr edx,16
+ movzx ecx,cl
+ xor ebx,DWORD PTR 2048[ecx*8+ebp]
+ movzx esi,dh
+ mov ecx,DWORD PTR 8[esp]
+ xor eax,ebx
+ ror ebx,8
+ xor eax,DWORD PTR 2048[esi*8+ebp]
+ movzx esi,dl
+ mov edx,DWORD PTR 4[esp]
+ xor ebx,ecx
+ xor eax,DWORD PTR 2052[esi*8+ebp]
+ mov esi,DWORD PTR 48[edi]
+ xor ebx,eax
+ mov DWORD PTR 8[esp],ebx
+ xor eax,edx
+ mov DWORD PTR 4[esp],eax
+ xor eax,esi
+ xor ebx,DWORD PTR 52[edi]
+ movzx esi,ah
+ mov edx,DWORD PTR 2052[esi*8+ebp]
+ movzx esi,al
+ xor edx,DWORD PTR 4[esi*8+ebp]
+ shr eax,16
+ movzx esi,bl
+ mov ecx,DWORD PTR [esi*8+ebp]
+ movzx esi,ah
+ xor edx,DWORD PTR [esi*8+ebp]
+ movzx esi,bh
+ xor ecx,DWORD PTR 4[esi*8+ebp]
+ shr ebx,16
+ movzx eax,al
+ xor edx,DWORD PTR 2048[eax*8+ebp]
+ movzx esi,bh
+ mov eax,DWORD PTR 16[esp]
+ xor ecx,edx
+ ror edx,8
+ xor ecx,DWORD PTR 2048[esi*8+ebp]
+ movzx esi,bl
+ mov ebx,DWORD PTR 12[esp]
+ xor edx,eax
+ xor ecx,DWORD PTR 2052[esi*8+ebp]
+ mov esi,DWORD PTR 56[edi]
+ xor edx,ecx
+ mov DWORD PTR 16[esp],edx
+ xor ecx,ebx
+ mov DWORD PTR 12[esp],ecx
+ xor ecx,esi
+ xor edx,DWORD PTR 60[edi]
+ movzx esi,ch
+ mov ebx,DWORD PTR 2052[esi*8+ebp]
+ movzx esi,cl
+ xor ebx,DWORD PTR 4[esi*8+ebp]
+ shr ecx,16
+ movzx esi,dl
+ mov eax,DWORD PTR [esi*8+ebp]
+ movzx esi,ch
+ xor ebx,DWORD PTR [esi*8+ebp]
+ movzx esi,dh
+ xor eax,DWORD PTR 4[esi*8+ebp]
+ shr edx,16
+ movzx ecx,cl
+ xor ebx,DWORD PTR 2048[ecx*8+ebp]
+ movzx esi,dh
+ mov ecx,DWORD PTR 8[esp]
+ xor eax,ebx
+ ror ebx,8
+ xor eax,DWORD PTR 2048[esi*8+ebp]
+ movzx esi,dl
+ mov edx,DWORD PTR 4[esp]
+ xor ebx,ecx
+ xor eax,DWORD PTR 2052[esi*8+ebp]
+ mov esi,DWORD PTR 64[edi]
+ xor ebx,eax
+ mov DWORD PTR 8[esp],ebx
+ xor eax,edx
+ mov DWORD PTR 4[esp],eax
+ add edi,64
+ cmp edi,DWORD PTR 20[esp]
+ je $L003done
+ and esi,eax
+ mov edx,DWORD PTR 16[esp]
+ rol esi,1
+ mov ecx,edx
+ xor ebx,esi
+ or ecx,DWORD PTR 12[edi]
+ mov DWORD PTR 8[esp],ebx
+ xor ecx,DWORD PTR 12[esp]
+ mov esi,DWORD PTR 4[edi]
+ mov DWORD PTR 12[esp],ecx
+ or esi,ebx
+ and ecx,DWORD PTR 8[edi]
+ xor eax,esi
+ rol ecx,1
+ mov DWORD PTR 4[esp],eax
+ xor edx,ecx
+ mov esi,DWORD PTR 16[edi]
+ mov DWORD PTR 16[esp],edx
+ jmp $L002loop
+ALIGN 8
+$L003done:
+ mov ecx,eax
+ mov edx,ebx
+ mov eax,DWORD PTR 12[esp]
+ mov ebx,DWORD PTR 16[esp]
+ xor eax,esi
+ xor ebx,DWORD PTR 4[edi]
+ xor ecx,DWORD PTR 8[edi]
+ xor edx,DWORD PTR 12[edi]
+ ret
+__x86_Camellia_encrypt ENDP
+ALIGN 16
+_Camellia_DecryptBlock_Rounds PROC PUBLIC
+$L_Camellia_DecryptBlock_Rounds_begin::
+ push ebp
+ push ebx
+ push esi
+ push edi
+ mov eax,DWORD PTR 20[esp]
+ mov esi,DWORD PTR 24[esp]
+ mov edi,DWORD PTR 28[esp]
+ mov ebx,esp
+ sub esp,28
+ and esp,-64
+ lea ecx,DWORD PTR [edi-127]
+ sub ecx,esp
+ neg ecx
+ and ecx,960
+ sub esp,ecx
+ add esp,4
+ shl eax,6
+ mov DWORD PTR 16[esp],edi
+ lea edi,DWORD PTR [eax*1+edi]
+ mov DWORD PTR 20[esp],ebx
+ call $L004pic_point
+$L004pic_point:
+ pop ebp
+ lea ebp,DWORD PTR ($LCamellia_SBOX-$L004pic_point)[ebp]
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ mov ecx,DWORD PTR 8[esi]
+ bswap eax
+ mov edx,DWORD PTR 12[esi]
+ bswap ebx
+ bswap ecx
+ bswap edx
+ call __x86_Camellia_decrypt
+ mov esp,DWORD PTR 20[esp]
+ bswap eax
+ mov esi,DWORD PTR 32[esp]
+ bswap ebx
+ bswap ecx
+ bswap edx
+ mov DWORD PTR [esi],eax
+ mov DWORD PTR 4[esi],ebx
+ mov DWORD PTR 8[esi],ecx
+ mov DWORD PTR 12[esi],edx
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+_Camellia_DecryptBlock_Rounds ENDP
+ALIGN 16
+_Camellia_DecryptBlock PROC PUBLIC
+$L_Camellia_DecryptBlock_begin::
+ mov eax,128
+ sub eax,DWORD PTR 4[esp]
+ mov eax,3
+ adc eax,0
+ mov DWORD PTR 4[esp],eax
+ jmp $L_Camellia_DecryptBlock_Rounds_begin
+_Camellia_DecryptBlock ENDP
+ALIGN 16
+_Camellia_decrypt PROC PUBLIC
+$L_Camellia_decrypt_begin::
+ push ebp
+ push ebx
+ push esi
+ push edi
+ mov esi,DWORD PTR 20[esp]
+ mov edi,DWORD PTR 28[esp]
+ mov ebx,esp
+ sub esp,28
+ and esp,-64
+ mov eax,DWORD PTR 272[edi]
+ lea ecx,DWORD PTR [edi-127]
+ sub ecx,esp
+ neg ecx
+ and ecx,960
+ sub esp,ecx
+ add esp,4
+ shl eax,6
+ mov DWORD PTR 16[esp],edi
+ lea edi,DWORD PTR [eax*1+edi]
+ mov DWORD PTR 20[esp],ebx
+ call $L005pic_point
+$L005pic_point:
+ pop ebp
+ lea ebp,DWORD PTR ($LCamellia_SBOX-$L005pic_point)[ebp]
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ mov ecx,DWORD PTR 8[esi]
+ bswap eax
+ mov edx,DWORD PTR 12[esi]
+ bswap ebx
+ bswap ecx
+ bswap edx
+ call __x86_Camellia_decrypt
+ mov esp,DWORD PTR 20[esp]
+ bswap eax
+ mov esi,DWORD PTR 24[esp]
+ bswap ebx
+ bswap ecx
+ bswap edx
+ mov DWORD PTR [esi],eax
+ mov DWORD PTR 4[esi],ebx
+ mov DWORD PTR 8[esi],ecx
+ mov DWORD PTR 12[esi],edx
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+_Camellia_decrypt ENDP
+ALIGN 16
+__x86_Camellia_decrypt PROC PRIVATE
+ xor eax,DWORD PTR [edi]
+ xor ebx,DWORD PTR 4[edi]
+ xor ecx,DWORD PTR 8[edi]
+ xor edx,DWORD PTR 12[edi]
+ mov esi,DWORD PTR [edi-8]
+ mov DWORD PTR 4[esp],eax
+ mov DWORD PTR 8[esp],ebx
+ mov DWORD PTR 12[esp],ecx
+ mov DWORD PTR 16[esp],edx
+ALIGN 16
+$L006loop:
+ xor eax,esi
+ xor ebx,DWORD PTR [edi-4]
+ movzx esi,ah
+ mov edx,DWORD PTR 2052[esi*8+ebp]
+ movzx esi,al
+ xor edx,DWORD PTR 4[esi*8+ebp]
+ shr eax,16
+ movzx esi,bl
+ mov ecx,DWORD PTR [esi*8+ebp]
+ movzx esi,ah
+ xor edx,DWORD PTR [esi*8+ebp]
+ movzx esi,bh
+ xor ecx,DWORD PTR 4[esi*8+ebp]
+ shr ebx,16
+ movzx eax,al
+ xor edx,DWORD PTR 2048[eax*8+ebp]
+ movzx esi,bh
+ mov eax,DWORD PTR 16[esp]
+ xor ecx,edx
+ ror edx,8
+ xor ecx,DWORD PTR 2048[esi*8+ebp]
+ movzx esi,bl
+ mov ebx,DWORD PTR 12[esp]
+ xor edx,eax
+ xor ecx,DWORD PTR 2052[esi*8+ebp]
+ mov esi,DWORD PTR [edi-16]
+ xor edx,ecx
+ mov DWORD PTR 16[esp],edx
+ xor ecx,ebx
+ mov DWORD PTR 12[esp],ecx
+ xor ecx,esi
+ xor edx,DWORD PTR [edi-12]
+ movzx esi,ch
+ mov ebx,DWORD PTR 2052[esi*8+ebp]
+ movzx esi,cl
+ xor ebx,DWORD PTR 4[esi*8+ebp]
+ shr ecx,16
+ movzx esi,dl
+ mov eax,DWORD PTR [esi*8+ebp]
+ movzx esi,ch
+ xor ebx,DWORD PTR [esi*8+ebp]
+ movzx esi,dh
+ xor eax,DWORD PTR 4[esi*8+ebp]
+ shr edx,16
+ movzx ecx,cl
+ xor ebx,DWORD PTR 2048[ecx*8+ebp]
+ movzx esi,dh
+ mov ecx,DWORD PTR 8[esp]
+ xor eax,ebx
+ ror ebx,8
+ xor eax,DWORD PTR 2048[esi*8+ebp]
+ movzx esi,dl
+ mov edx,DWORD PTR 4[esp]
+ xor ebx,ecx
+ xor eax,DWORD PTR 2052[esi*8+ebp]
+ mov esi,DWORD PTR [edi-24]
+ xor ebx,eax
+ mov DWORD PTR 8[esp],ebx
+ xor eax,edx
+ mov DWORD PTR 4[esp],eax
+ xor eax,esi
+ xor ebx,DWORD PTR [edi-20]
+ movzx esi,ah
+ mov edx,DWORD PTR 2052[esi*8+ebp]
+ movzx esi,al
+ xor edx,DWORD PTR 4[esi*8+ebp]
+ shr eax,16
+ movzx esi,bl
+ mov ecx,DWORD PTR [esi*8+ebp]
+ movzx esi,ah
+ xor edx,DWORD PTR [esi*8+ebp]
+ movzx esi,bh
+ xor ecx,DWORD PTR 4[esi*8+ebp]
+ shr ebx,16
+ movzx eax,al
+ xor edx,DWORD PTR 2048[eax*8+ebp]
+ movzx esi,bh
+ mov eax,DWORD PTR 16[esp]
+ xor ecx,edx
+ ror edx,8
+ xor ecx,DWORD PTR 2048[esi*8+ebp]
+ movzx esi,bl
+ mov ebx,DWORD PTR 12[esp]
+ xor edx,eax
+ xor ecx,DWORD PTR 2052[esi*8+ebp]
+ mov esi,DWORD PTR [edi-32]
+ xor edx,ecx
+ mov DWORD PTR 16[esp],edx
+ xor ecx,ebx
+ mov DWORD PTR 12[esp],ecx
+ xor ecx,esi
+ xor edx,DWORD PTR [edi-28]
+ movzx esi,ch
+ mov ebx,DWORD PTR 2052[esi*8+ebp]
+ movzx esi,cl
+ xor ebx,DWORD PTR 4[esi*8+ebp]
+ shr ecx,16
+ movzx esi,dl
+ mov eax,DWORD PTR [esi*8+ebp]
+ movzx esi,ch
+ xor ebx,DWORD PTR [esi*8+ebp]
+ movzx esi,dh
+ xor eax,DWORD PTR 4[esi*8+ebp]
+ shr edx,16
+ movzx ecx,cl
+ xor ebx,DWORD PTR 2048[ecx*8+ebp]
+ movzx esi,dh
+ mov ecx,DWORD PTR 8[esp]
+ xor eax,ebx
+ ror ebx,8
+ xor eax,DWORD PTR 2048[esi*8+ebp]
+ movzx esi,dl
+ mov edx,DWORD PTR 4[esp]
+ xor ebx,ecx
+ xor eax,DWORD PTR 2052[esi*8+ebp]
+ mov esi,DWORD PTR [edi-40]
+ xor ebx,eax
+ mov DWORD PTR 8[esp],ebx
+ xor eax,edx
+ mov DWORD PTR 4[esp],eax
+ xor eax,esi
+ xor ebx,DWORD PTR [edi-36]
+ movzx esi,ah
+ mov edx,DWORD PTR 2052[esi*8+ebp]
+ movzx esi,al
+ xor edx,DWORD PTR 4[esi*8+ebp]
+ shr eax,16
+ movzx esi,bl
+ mov ecx,DWORD PTR [esi*8+ebp]
+ movzx esi,ah
+ xor edx,DWORD PTR [esi*8+ebp]
+ movzx esi,bh
+ xor ecx,DWORD PTR 4[esi*8+ebp]
+ shr ebx,16
+ movzx eax,al
+ xor edx,DWORD PTR 2048[eax*8+ebp]
+ movzx esi,bh
+ mov eax,DWORD PTR 16[esp]
+ xor ecx,edx
+ ror edx,8
+ xor ecx,DWORD PTR 2048[esi*8+ebp]
+ movzx esi,bl
+ mov ebx,DWORD PTR 12[esp]
+ xor edx,eax
+ xor ecx,DWORD PTR 2052[esi*8+ebp]
+ mov esi,DWORD PTR [edi-48]
+ xor edx,ecx
+ mov DWORD PTR 16[esp],edx
+ xor ecx,ebx
+ mov DWORD PTR 12[esp],ecx
+ xor ecx,esi
+ xor edx,DWORD PTR [edi-44]
+ movzx esi,ch
+ mov ebx,DWORD PTR 2052[esi*8+ebp]
+ movzx esi,cl
+ xor ebx,DWORD PTR 4[esi*8+ebp]
+ shr ecx,16
+ movzx esi,dl
+ mov eax,DWORD PTR [esi*8+ebp]
+ movzx esi,ch
+ xor ebx,DWORD PTR [esi*8+ebp]
+ movzx esi,dh
+ xor eax,DWORD PTR 4[esi*8+ebp]
+ shr edx,16
+ movzx ecx,cl
+ xor ebx,DWORD PTR 2048[ecx*8+ebp]
+ movzx esi,dh
+ mov ecx,DWORD PTR 8[esp]
+ xor eax,ebx
+ ror ebx,8
+ xor eax,DWORD PTR 2048[esi*8+ebp]
+ movzx esi,dl
+ mov edx,DWORD PTR 4[esp]
+ xor ebx,ecx
+ xor eax,DWORD PTR 2052[esi*8+ebp]
+ mov esi,DWORD PTR [edi-56]
+ xor ebx,eax
+ mov DWORD PTR 8[esp],ebx
+ xor eax,edx
+ mov DWORD PTR 4[esp],eax
+ sub edi,64
+ cmp edi,DWORD PTR 20[esp]
+ je $L007done
+ and esi,eax
+ mov edx,DWORD PTR 16[esp]
+ rol esi,1
+ mov ecx,edx
+ xor ebx,esi
+ or ecx,DWORD PTR 4[edi]
+ mov DWORD PTR 8[esp],ebx
+ xor ecx,DWORD PTR 12[esp]
+ mov esi,DWORD PTR 12[edi]
+ mov DWORD PTR 12[esp],ecx
+ or esi,ebx
+ and ecx,DWORD PTR [edi]
+ xor eax,esi
+ rol ecx,1
+ mov DWORD PTR 4[esp],eax
+ xor edx,ecx
+ mov esi,DWORD PTR [edi-8]
+ mov DWORD PTR 16[esp],edx
+ jmp $L006loop
+ALIGN 8
+$L007done:
+ mov ecx,eax
+ mov edx,ebx
+ mov eax,DWORD PTR 12[esp]
+ mov ebx,DWORD PTR 16[esp]
+ xor ecx,esi
+ xor edx,DWORD PTR 12[edi]
+ xor eax,DWORD PTR [edi]
+ xor ebx,DWORD PTR 4[edi]
+ ret
+__x86_Camellia_decrypt ENDP
+ALIGN 16
+_Camellia_Ekeygen PROC PUBLIC
+$L_Camellia_Ekeygen_begin::
+ push ebp
+ push ebx
+ push esi
+ push edi
+ sub esp,16
+ mov ebp,DWORD PTR 36[esp]
+ mov esi,DWORD PTR 40[esp]
+ mov edi,DWORD PTR 44[esp]
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ mov ecx,DWORD PTR 8[esi]
+ mov edx,DWORD PTR 12[esi]
+ bswap eax
+ bswap ebx
+ bswap ecx
+ bswap edx
+ mov DWORD PTR [edi],eax
+ mov DWORD PTR 4[edi],ebx
+ mov DWORD PTR 8[edi],ecx
+ mov DWORD PTR 12[edi],edx
+ cmp ebp,128
+ je $L0081st128
+ mov eax,DWORD PTR 16[esi]
+ mov ebx,DWORD PTR 20[esi]
+ cmp ebp,192
+ je $L0091st192
+ mov ecx,DWORD PTR 24[esi]
+ mov edx,DWORD PTR 28[esi]
+ jmp $L0101st256
+ALIGN 4
+$L0091st192:
+ mov ecx,eax
+ mov edx,ebx
+ not ecx
+ not edx
+ALIGN 4
+$L0101st256:
+ bswap eax
+ bswap ebx
+ bswap ecx
+ bswap edx
+ mov DWORD PTR 32[edi],eax
+ mov DWORD PTR 36[edi],ebx
+ mov DWORD PTR 40[edi],ecx
+ mov DWORD PTR 44[edi],edx
+ xor eax,DWORD PTR [edi]
+ xor ebx,DWORD PTR 4[edi]
+ xor ecx,DWORD PTR 8[edi]
+ xor edx,DWORD PTR 12[edi]
+ALIGN 4
+$L0081st128:
+ call $L011pic_point
+$L011pic_point:
+ pop ebp
+ lea ebp,DWORD PTR ($LCamellia_SBOX-$L011pic_point)[ebp]
+ lea edi,DWORD PTR ($LCamellia_SIGMA-$LCamellia_SBOX)[ebp]
+ mov esi,DWORD PTR [edi]
+ mov DWORD PTR [esp],eax
+ mov DWORD PTR 4[esp],ebx
+ mov DWORD PTR 8[esp],ecx
+ mov DWORD PTR 12[esp],edx
+ xor eax,esi
+ xor ebx,DWORD PTR 4[edi]
+ movzx esi,ah
+ mov edx,DWORD PTR 2052[esi*8+ebp]
+ movzx esi,al
+ xor edx,DWORD PTR 4[esi*8+ebp]
+ shr eax,16
+ movzx esi,bl
+ mov ecx,DWORD PTR [esi*8+ebp]
+ movzx esi,ah
+ xor edx,DWORD PTR [esi*8+ebp]
+ movzx esi,bh
+ xor ecx,DWORD PTR 4[esi*8+ebp]
+ shr ebx,16
+ movzx eax,al
+ xor edx,DWORD PTR 2048[eax*8+ebp]
+ movzx esi,bh
+ mov eax,DWORD PTR 12[esp]
+ xor ecx,edx
+ ror edx,8
+ xor ecx,DWORD PTR 2048[esi*8+ebp]
+ movzx esi,bl
+ mov ebx,DWORD PTR 8[esp]
+ xor edx,eax
+ xor ecx,DWORD PTR 2052[esi*8+ebp]
+ mov esi,DWORD PTR 8[edi]
+ xor edx,ecx
+ mov DWORD PTR 12[esp],edx
+ xor ecx,ebx
+ mov DWORD PTR 8[esp],ecx
+ xor ecx,esi
+ xor edx,DWORD PTR 12[edi]
+ movzx esi,ch
+ mov ebx,DWORD PTR 2052[esi*8+ebp]
+ movzx esi,cl
+ xor ebx,DWORD PTR 4[esi*8+ebp]
+ shr ecx,16
+ movzx esi,dl
+ mov eax,DWORD PTR [esi*8+ebp]
+ movzx esi,ch
+ xor ebx,DWORD PTR [esi*8+ebp]
+ movzx esi,dh
+ xor eax,DWORD PTR 4[esi*8+ebp]
+ shr edx,16
+ movzx ecx,cl
+ xor ebx,DWORD PTR 2048[ecx*8+ebp]
+ movzx esi,dh
+ mov ecx,DWORD PTR 4[esp]
+ xor eax,ebx
+ ror ebx,8
+ xor eax,DWORD PTR 2048[esi*8+ebp]
+ movzx esi,dl
+ mov edx,DWORD PTR [esp]
+ xor ebx,ecx
+ xor eax,DWORD PTR 2052[esi*8+ebp]
+ mov esi,DWORD PTR 16[edi]
+ xor ebx,eax
+ mov DWORD PTR 4[esp],ebx
+ xor eax,edx
+ mov DWORD PTR [esp],eax
+ mov ecx,DWORD PTR 8[esp]
+ mov edx,DWORD PTR 12[esp]
+ mov esi,DWORD PTR 44[esp]
+ xor eax,DWORD PTR [esi]
+ xor ebx,DWORD PTR 4[esi]
+ xor ecx,DWORD PTR 8[esi]
+ xor edx,DWORD PTR 12[esi]
+ mov esi,DWORD PTR 16[edi]
+ mov DWORD PTR [esp],eax
+ mov DWORD PTR 4[esp],ebx
+ mov DWORD PTR 8[esp],ecx
+ mov DWORD PTR 12[esp],edx
+ xor eax,esi
+ xor ebx,DWORD PTR 20[edi]
+ movzx esi,ah
+ mov edx,DWORD PTR 2052[esi*8+ebp]
+ movzx esi,al
+ xor edx,DWORD PTR 4[esi*8+ebp]
+ shr eax,16
+ movzx esi,bl
+ mov ecx,DWORD PTR [esi*8+ebp]
+ movzx esi,ah
+ xor edx,DWORD PTR [esi*8+ebp]
+ movzx esi,bh
+ xor ecx,DWORD PTR 4[esi*8+ebp]
+ shr ebx,16
+ movzx eax,al
+ xor edx,DWORD PTR 2048[eax*8+ebp]
+ movzx esi,bh
+ mov eax,DWORD PTR 12[esp]
+ xor ecx,edx
+ ror edx,8
+ xor ecx,DWORD PTR 2048[esi*8+ebp]
+ movzx esi,bl
+ mov ebx,DWORD PTR 8[esp]
+ xor edx,eax
+ xor ecx,DWORD PTR 2052[esi*8+ebp]
+ mov esi,DWORD PTR 24[edi]
+ xor edx,ecx
+ mov DWORD PTR 12[esp],edx
+ xor ecx,ebx
+ mov DWORD PTR 8[esp],ecx
+ xor ecx,esi
+ xor edx,DWORD PTR 28[edi]
+ movzx esi,ch
+ mov ebx,DWORD PTR 2052[esi*8+ebp]
+ movzx esi,cl
+ xor ebx,DWORD PTR 4[esi*8+ebp]
+ shr ecx,16
+ movzx esi,dl
+ mov eax,DWORD PTR [esi*8+ebp]
+ movzx esi,ch
+ xor ebx,DWORD PTR [esi*8+ebp]
+ movzx esi,dh
+ xor eax,DWORD PTR 4[esi*8+ebp]
+ shr edx,16
+ movzx ecx,cl
+ xor ebx,DWORD PTR 2048[ecx*8+ebp]
+ movzx esi,dh
+ mov ecx,DWORD PTR 4[esp]
+ xor eax,ebx
+ ror ebx,8
+ xor eax,DWORD PTR 2048[esi*8+ebp]
+ movzx esi,dl
+ mov edx,DWORD PTR [esp]
+ xor ebx,ecx
+ xor eax,DWORD PTR 2052[esi*8+ebp]
+ mov esi,DWORD PTR 32[edi]
+ xor ebx,eax
+ mov DWORD PTR 4[esp],ebx
+ xor eax,edx
+ mov DWORD PTR [esp],eax
+ mov ecx,DWORD PTR 8[esp]
+ mov edx,DWORD PTR 12[esp]
+ mov esi,DWORD PTR 36[esp]
+ cmp esi,128
+ jne $L0122nd256
+ mov edi,DWORD PTR 44[esp]
+ lea edi,DWORD PTR 128[edi]
+ mov DWORD PTR [edi-112],eax
+ mov DWORD PTR [edi-108],ebx
+ mov DWORD PTR [edi-104],ecx
+ mov DWORD PTR [edi-100],edx
+ mov ebp,eax
+ shl eax,15
+ mov esi,ebx
+ shr esi,17
+ shl ebx,15
+ or eax,esi
+ mov esi,ecx
+ shl ecx,15
+ mov DWORD PTR [edi-80],eax
+ shr esi,17
+ or ebx,esi
+ shr ebp,17
+ mov esi,edx
+ shr esi,17
+ mov DWORD PTR [edi-76],ebx
+ shl edx,15
+ or ecx,esi
+ or edx,ebp
+ mov DWORD PTR [edi-72],ecx
+ mov DWORD PTR [edi-68],edx
+ mov ebp,eax
+ shl eax,15
+ mov esi,ebx
+ shr esi,17
+ shl ebx,15
+ or eax,esi
+ mov esi,ecx
+ shl ecx,15
+ mov DWORD PTR [edi-64],eax
+ shr esi,17
+ or ebx,esi
+ shr ebp,17
+ mov esi,edx
+ shr esi,17
+ mov DWORD PTR [edi-60],ebx
+ shl edx,15
+ or ecx,esi
+ or edx,ebp
+ mov DWORD PTR [edi-56],ecx
+ mov DWORD PTR [edi-52],edx
+ mov ebp,eax
+ shl eax,15
+ mov esi,ebx
+ shr esi,17
+ shl ebx,15
+ or eax,esi
+ mov esi,ecx
+ shl ecx,15
+ mov DWORD PTR [edi-32],eax
+ shr esi,17
+ or ebx,esi
+ shr ebp,17
+ mov esi,edx
+ shr esi,17
+ mov DWORD PTR [edi-28],ebx
+ shl edx,15
+ or ecx,esi
+ or edx,ebp
+ mov ebp,eax
+ shl eax,15
+ mov esi,ebx
+ shr esi,17
+ shl ebx,15
+ or eax,esi
+ mov esi,ecx
+ shl ecx,15
+ mov DWORD PTR [edi-16],eax
+ shr esi,17
+ or ebx,esi
+ shr ebp,17
+ mov esi,edx
+ shr esi,17
+ mov DWORD PTR [edi-12],ebx
+ shl edx,15
+ or ecx,esi
+ or edx,ebp
+ mov DWORD PTR [edi-8],ecx
+ mov DWORD PTR [edi-4],edx
+ mov ebp,ebx
+ shl ebx,2
+ mov esi,ecx
+ shr esi,30
+ shl ecx,2
+ or ebx,esi
+ mov esi,edx
+ shl edx,2
+ mov DWORD PTR 32[edi],ebx
+ shr esi,30
+ or ecx,esi
+ shr ebp,30
+ mov esi,eax
+ shr esi,30
+ mov DWORD PTR 36[edi],ecx
+ shl eax,2
+ or edx,esi
+ or eax,ebp
+ mov DWORD PTR 40[edi],edx
+ mov DWORD PTR 44[edi],eax
+ mov ebp,ebx
+ shl ebx,17
+ mov esi,ecx
+ shr esi,15
+ shl ecx,17
+ or ebx,esi
+ mov esi,edx
+ shl edx,17
+ mov DWORD PTR 64[edi],ebx
+ shr esi,15
+ or ecx,esi
+ shr ebp,15
+ mov esi,eax
+ shr esi,15
+ mov DWORD PTR 68[edi],ecx
+ shl eax,17
+ or edx,esi
+ or eax,ebp
+ mov DWORD PTR 72[edi],edx
+ mov DWORD PTR 76[edi],eax
+ mov ebx,DWORD PTR [edi-128]
+ mov ecx,DWORD PTR [edi-124]
+ mov edx,DWORD PTR [edi-120]
+ mov eax,DWORD PTR [edi-116]
+ mov ebp,ebx
+ shl ebx,15
+ mov esi,ecx
+ shr esi,17
+ shl ecx,15
+ or ebx,esi
+ mov esi,edx
+ shl edx,15
+ mov DWORD PTR [edi-96],ebx
+ shr esi,17
+ or ecx,esi
+ shr ebp,17
+ mov esi,eax
+ shr esi,17
+ mov DWORD PTR [edi-92],ecx
+ shl eax,15
+ or edx,esi
+ or eax,ebp
+ mov DWORD PTR [edi-88],edx
+ mov DWORD PTR [edi-84],eax
+ mov ebp,ebx
+ shl ebx,30
+ mov esi,ecx
+ shr esi,2
+ shl ecx,30
+ or ebx,esi
+ mov esi,edx
+ shl edx,30
+ mov DWORD PTR [edi-48],ebx
+ shr esi,2
+ or ecx,esi
+ shr ebp,2
+ mov esi,eax
+ shr esi,2
+ mov DWORD PTR [edi-44],ecx
+ shl eax,30
+ or edx,esi
+ or eax,ebp
+ mov DWORD PTR [edi-40],edx
+ mov DWORD PTR [edi-36],eax
+ mov ebp,ebx
+ shl ebx,15
+ mov esi,ecx
+ shr esi,17
+ shl ecx,15
+ or ebx,esi
+ mov esi,edx
+ shl edx,15
+ shr esi,17
+ or ecx,esi
+ shr ebp,17
+ mov esi,eax
+ shr esi,17
+ shl eax,15
+ or edx,esi
+ or eax,ebp
+ mov DWORD PTR [edi-24],edx
+ mov DWORD PTR [edi-20],eax
+ mov ebp,ebx
+ shl ebx,17
+ mov esi,ecx
+ shr esi,15
+ shl ecx,17
+ or ebx,esi
+ mov esi,edx
+ shl edx,17
+ mov DWORD PTR [edi],ebx
+ shr esi,15
+ or ecx,esi
+ shr ebp,15
+ mov esi,eax
+ shr esi,15
+ mov DWORD PTR 4[edi],ecx
+ shl eax,17
+ or edx,esi
+ or eax,ebp
+ mov DWORD PTR 8[edi],edx
+ mov DWORD PTR 12[edi],eax
+ mov ebp,ebx
+ shl ebx,17
+ mov esi,ecx
+ shr esi,15
+ shl ecx,17
+ or ebx,esi
+ mov esi,edx
+ shl edx,17
+ mov DWORD PTR 16[edi],ebx
+ shr esi,15
+ or ecx,esi
+ shr ebp,15
+ mov esi,eax
+ shr esi,15
+ mov DWORD PTR 20[edi],ecx
+ shl eax,17
+ or edx,esi
+ or eax,ebp
+ mov DWORD PTR 24[edi],edx
+ mov DWORD PTR 28[edi],eax
+ mov ebp,ebx
+ shl ebx,17
+ mov esi,ecx
+ shr esi,15
+ shl ecx,17
+ or ebx,esi
+ mov esi,edx
+ shl edx,17
+ mov DWORD PTR 48[edi],ebx
+ shr esi,15
+ or ecx,esi
+ shr ebp,15
+ mov esi,eax
+ shr esi,15
+ mov DWORD PTR 52[edi],ecx
+ shl eax,17
+ or edx,esi
+ or eax,ebp
+ mov DWORD PTR 56[edi],edx
+ mov DWORD PTR 60[edi],eax
+ mov eax,3
+ jmp $L013done
+ALIGN 16
+$L0122nd256:
+ mov esi,DWORD PTR 44[esp]
+ mov DWORD PTR 48[esi],eax
+ mov DWORD PTR 52[esi],ebx
+ mov DWORD PTR 56[esi],ecx
+ mov DWORD PTR 60[esi],edx
+ xor eax,DWORD PTR 32[esi]
+ xor ebx,DWORD PTR 36[esi]
+ xor ecx,DWORD PTR 40[esi]
+ xor edx,DWORD PTR 44[esi]
+ mov esi,DWORD PTR 32[edi]
+ mov DWORD PTR [esp],eax
+ mov DWORD PTR 4[esp],ebx
+ mov DWORD PTR 8[esp],ecx
+ mov DWORD PTR 12[esp],edx
+ xor eax,esi
+ xor ebx,DWORD PTR 36[edi]
+ movzx esi,ah
+ mov edx,DWORD PTR 2052[esi*8+ebp]
+ movzx esi,al
+ xor edx,DWORD PTR 4[esi*8+ebp]
+ shr eax,16
+ movzx esi,bl
+ mov ecx,DWORD PTR [esi*8+ebp]
+ movzx esi,ah
+ xor edx,DWORD PTR [esi*8+ebp]
+ movzx esi,bh
+ xor ecx,DWORD PTR 4[esi*8+ebp]
+ shr ebx,16
+ movzx eax,al
+ xor edx,DWORD PTR 2048[eax*8+ebp]
+ movzx esi,bh
+ mov eax,DWORD PTR 12[esp]
+ xor ecx,edx
+ ror edx,8
+ xor ecx,DWORD PTR 2048[esi*8+ebp]
+ movzx esi,bl
+ mov ebx,DWORD PTR 8[esp]
+ xor edx,eax
+ xor ecx,DWORD PTR 2052[esi*8+ebp]
+ mov esi,DWORD PTR 40[edi]
+ xor edx,ecx
+ mov DWORD PTR 12[esp],edx
+ xor ecx,ebx
+ mov DWORD PTR 8[esp],ecx
+ xor ecx,esi
+ xor edx,DWORD PTR 44[edi]
+ movzx esi,ch
+ mov ebx,DWORD PTR 2052[esi*8+ebp]
+ movzx esi,cl
+ xor ebx,DWORD PTR 4[esi*8+ebp]
+ shr ecx,16
+ movzx esi,dl
+ mov eax,DWORD PTR [esi*8+ebp]
+ movzx esi,ch
+ xor ebx,DWORD PTR [esi*8+ebp]
+ movzx esi,dh
+ xor eax,DWORD PTR 4[esi*8+ebp]
+ shr edx,16
+ movzx ecx,cl
+ xor ebx,DWORD PTR 2048[ecx*8+ebp]
+ movzx esi,dh
+ mov ecx,DWORD PTR 4[esp]
+ xor eax,ebx
+ ror ebx,8
+ xor eax,DWORD PTR 2048[esi*8+ebp]
+ movzx esi,dl
+ mov edx,DWORD PTR [esp]
+ xor ebx,ecx
+ xor eax,DWORD PTR 2052[esi*8+ebp]
+ mov esi,DWORD PTR 48[edi]
+ xor ebx,eax
+ mov DWORD PTR 4[esp],ebx
+ xor eax,edx
+ mov DWORD PTR [esp],eax
+ mov ecx,DWORD PTR 8[esp]
+ mov edx,DWORD PTR 12[esp]
+ mov edi,DWORD PTR 44[esp]
+ lea edi,DWORD PTR 128[edi]
+ mov DWORD PTR [edi-112],eax
+ mov DWORD PTR [edi-108],ebx
+ mov DWORD PTR [edi-104],ecx
+ mov DWORD PTR [edi-100],edx
+ mov ebp,eax
+ shl eax,30
+ mov esi,ebx
+ shr esi,2
+ shl ebx,30
+ or eax,esi
+ mov esi,ecx
+ shl ecx,30
+ mov DWORD PTR [edi-48],eax
+ shr esi,2
+ or ebx,esi
+ shr ebp,2
+ mov esi,edx
+ shr esi,2
+ mov DWORD PTR [edi-44],ebx
+ shl edx,30
+ or ecx,esi
+ or edx,ebp
+ mov DWORD PTR [edi-40],ecx
+ mov DWORD PTR [edi-36],edx
+ mov ebp,eax
+ shl eax,30
+ mov esi,ebx
+ shr esi,2
+ shl ebx,30
+ or eax,esi
+ mov esi,ecx
+ shl ecx,30
+ mov DWORD PTR 32[edi],eax
+ shr esi,2
+ or ebx,esi
+ shr ebp,2
+ mov esi,edx
+ shr esi,2
+ mov DWORD PTR 36[edi],ebx
+ shl edx,30
+ or ecx,esi
+ or edx,ebp
+ mov DWORD PTR 40[edi],ecx
+ mov DWORD PTR 44[edi],edx
+ mov ebp,ebx
+ shl ebx,19
+ mov esi,ecx
+ shr esi,13
+ shl ecx,19
+ or ebx,esi
+ mov esi,edx
+ shl edx,19
+ mov DWORD PTR 128[edi],ebx
+ shr esi,13
+ or ecx,esi
+ shr ebp,13
+ mov esi,eax
+ shr esi,13
+ mov DWORD PTR 132[edi],ecx
+ shl eax,19
+ or edx,esi
+ or eax,ebp
+ mov DWORD PTR 136[edi],edx
+ mov DWORD PTR 140[edi],eax
+ mov ebx,DWORD PTR [edi-96]
+ mov ecx,DWORD PTR [edi-92]
+ mov edx,DWORD PTR [edi-88]
+ mov eax,DWORD PTR [edi-84]
+ mov ebp,ebx
+ shl ebx,15
+ mov esi,ecx
+ shr esi,17
+ shl ecx,15
+ or ebx,esi
+ mov esi,edx
+ shl edx,15
+ mov DWORD PTR [edi-96],ebx
+ shr esi,17
+ or ecx,esi
+ shr ebp,17
+ mov esi,eax
+ shr esi,17
+ mov DWORD PTR [edi-92],ecx
+ shl eax,15
+ or edx,esi
+ or eax,ebp
+ mov DWORD PTR [edi-88],edx
+ mov DWORD PTR [edi-84],eax
+ mov ebp,ebx
+ shl ebx,15
+ mov esi,ecx
+ shr esi,17
+ shl ecx,15
+ or ebx,esi
+ mov esi,edx
+ shl edx,15
+ mov DWORD PTR [edi-64],ebx
+ shr esi,17
+ or ecx,esi
+ shr ebp,17
+ mov esi,eax
+ shr esi,17
+ mov DWORD PTR [edi-60],ecx
+ shl eax,15
+ or edx,esi
+ or eax,ebp
+ mov DWORD PTR [edi-56],edx
+ mov DWORD PTR [edi-52],eax
+ mov ebp,ebx
+ shl ebx,30
+ mov esi,ecx
+ shr esi,2
+ shl ecx,30
+ or ebx,esi
+ mov esi,edx
+ shl edx,30
+ mov DWORD PTR 16[edi],ebx
+ shr esi,2
+ or ecx,esi
+ shr ebp,2
+ mov esi,eax
+ shr esi,2
+ mov DWORD PTR 20[edi],ecx
+ shl eax,30
+ or edx,esi
+ or eax,ebp
+ mov DWORD PTR 24[edi],edx
+ mov DWORD PTR 28[edi],eax
+ mov ebp,ecx
+ shl ecx,2
+ mov esi,edx
+ shr esi,30
+ shl edx,2
+ or ecx,esi
+ mov esi,eax
+ shl eax,2
+ mov DWORD PTR 80[edi],ecx
+ shr esi,30
+ or edx,esi
+ shr ebp,30
+ mov esi,ebx
+ shr esi,30
+ mov DWORD PTR 84[edi],edx
+ shl ebx,2
+ or eax,esi
+ or ebx,ebp
+ mov DWORD PTR 88[edi],eax
+ mov DWORD PTR 92[edi],ebx
+ mov ecx,DWORD PTR [edi-80]
+ mov edx,DWORD PTR [edi-76]
+ mov eax,DWORD PTR [edi-72]
+ mov ebx,DWORD PTR [edi-68]
+ mov ebp,ecx
+ shl ecx,15
+ mov esi,edx
+ shr esi,17
+ shl edx,15
+ or ecx,esi
+ mov esi,eax
+ shl eax,15
+ mov DWORD PTR [edi-80],ecx
+ shr esi,17
+ or edx,esi
+ shr ebp,17
+ mov esi,ebx
+ shr esi,17
+ mov DWORD PTR [edi-76],edx
+ shl ebx,15
+ or eax,esi
+ or ebx,ebp
+ mov DWORD PTR [edi-72],eax
+ mov DWORD PTR [edi-68],ebx
+ mov ebp,ecx
+ shl ecx,30
+ mov esi,edx
+ shr esi,2
+ shl edx,30
+ or ecx,esi
+ mov esi,eax
+ shl eax,30
+ mov DWORD PTR [edi-16],ecx
+ shr esi,2
+ or edx,esi
+ shr ebp,2
+ mov esi,ebx
+ shr esi,2
+ mov DWORD PTR [edi-12],edx
+ shl ebx,30
+ or eax,esi
+ or ebx,ebp
+ mov DWORD PTR [edi-8],eax
+ mov DWORD PTR [edi-4],ebx
+ mov DWORD PTR 64[edi],edx
+ mov DWORD PTR 68[edi],eax
+ mov DWORD PTR 72[edi],ebx
+ mov DWORD PTR 76[edi],ecx
+ mov ebp,edx
+ shl edx,17
+ mov esi,eax
+ shr esi,15
+ shl eax,17
+ or edx,esi
+ mov esi,ebx
+ shl ebx,17
+ mov DWORD PTR 96[edi],edx
+ shr esi,15
+ or eax,esi
+ shr ebp,15
+ mov esi,ecx
+ shr esi,15
+ mov DWORD PTR 100[edi],eax
+ shl ecx,17
+ or ebx,esi
+ or ecx,ebp
+ mov DWORD PTR 104[edi],ebx
+ mov DWORD PTR 108[edi],ecx
+ mov edx,DWORD PTR [edi-128]
+ mov eax,DWORD PTR [edi-124]
+ mov ebx,DWORD PTR [edi-120]
+ mov ecx,DWORD PTR [edi-116]
+ mov ebp,eax
+ shl eax,13
+ mov esi,ebx
+ shr esi,19
+ shl ebx,13
+ or eax,esi
+ mov esi,ecx
+ shl ecx,13
+ mov DWORD PTR [edi-32],eax
+ shr esi,19
+ or ebx,esi
+ shr ebp,19
+ mov esi,edx
+ shr esi,19
+ mov DWORD PTR [edi-28],ebx
+ shl edx,13
+ or ecx,esi
+ or edx,ebp
+ mov DWORD PTR [edi-24],ecx
+ mov DWORD PTR [edi-20],edx
+ mov ebp,eax
+ shl eax,15
+ mov esi,ebx
+ shr esi,17
+ shl ebx,15
+ or eax,esi
+ mov esi,ecx
+ shl ecx,15
+ mov DWORD PTR [edi],eax
+ shr esi,17
+ or ebx,esi
+ shr ebp,17
+ mov esi,edx
+ shr esi,17
+ mov DWORD PTR 4[edi],ebx
+ shl edx,15
+ or ecx,esi
+ or edx,ebp
+ mov DWORD PTR 8[edi],ecx
+ mov DWORD PTR 12[edi],edx
+ mov ebp,eax
+ shl eax,17
+ mov esi,ebx
+ shr esi,15
+ shl ebx,17
+ or eax,esi
+ mov esi,ecx
+ shl ecx,17
+ mov DWORD PTR 48[edi],eax
+ shr esi,15
+ or ebx,esi
+ shr ebp,15
+ mov esi,edx
+ shr esi,15
+ mov DWORD PTR 52[edi],ebx
+ shl edx,17
+ or ecx,esi
+ or edx,ebp
+ mov DWORD PTR 56[edi],ecx
+ mov DWORD PTR 60[edi],edx
+ mov ebp,ebx
+ shl ebx,2
+ mov esi,ecx
+ shr esi,30
+ shl ecx,2
+ or ebx,esi
+ mov esi,edx
+ shl edx,2
+ mov DWORD PTR 112[edi],ebx
+ shr esi,30
+ or ecx,esi
+ shr ebp,30
+ mov esi,eax
+ shr esi,30
+ mov DWORD PTR 116[edi],ecx
+ shl eax,2
+ or edx,esi
+ or eax,ebp
+ mov DWORD PTR 120[edi],edx
+ mov DWORD PTR 124[edi],eax
+ mov eax,4
+$L013done:
+ lea edx,DWORD PTR 144[edi]
+ add esp,16
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+_Camellia_Ekeygen ENDP
+ALIGN 16
+_Camellia_set_key PROC PUBLIC
+$L_Camellia_set_key_begin::
+ push ebx
+ mov ecx,DWORD PTR 8[esp]
+ mov ebx,DWORD PTR 12[esp]
+ mov edx,DWORD PTR 16[esp]
+ mov eax,-1
+ test ecx,ecx
+ jz $L014done
+ test edx,edx
+ jz $L014done
+ mov eax,-2
+ cmp ebx,256
+ je $L015arg_ok
+ cmp ebx,192
+ je $L015arg_ok
+ cmp ebx,128
+ jne $L014done
+ALIGN 4
+$L015arg_ok:
+ push edx
+ push ecx
+ push ebx
+ call $L_Camellia_Ekeygen_begin
+ add esp,12
+ mov DWORD PTR [edx],eax
+ xor eax,eax
+ALIGN 4
+$L014done:
+ pop ebx
+ ret
+_Camellia_set_key ENDP
+ALIGN 64
+$LCamellia_SIGMA::
+DD 2694735487,1003262091,3061508184,1286239154,3337565999,3914302142,1426019237,4057165596,283453434,3731369245,2958461122,3018244605,0,0,0,0
+ALIGN 64
+$LCamellia_SBOX::
+DD 1886416896,1886388336
+DD 2189591040,741081132
+DD 741092352,3014852787
+DD 3974949888,3233808576
+DD 3014898432,3840147684
+DD 656877312,1465319511
+DD 3233857536,3941204202
+DD 3857048832,2930639022
+DD 3840205824,589496355
+DD 2240120064,1802174571
+DD 1465341696,1162149957
+DD 892679424,2779054245
+DD 3941263872,3991732461
+DD 202116096,1330577487
+DD 2930683392,488439837
+DD 1094795520,2459041938
+DD 589505280,2256928902
+DD 4025478912,2947481775
+DD 1802201856,2088501372
+DD 2475922176,522125343
+DD 1162167552,1044250686
+DD 421075200,3705405660
+DD 2779096320,1583218782
+DD 555819264,185270283
+DD 3991792896,2795896998
+DD 235802112,960036921
+DD 1330597632,3587506389
+DD 1313754624,1566376029
+DD 488447232,3654877401
+DD 1701143808,1515847770
+DD 2459079168,1364262993
+DD 3183328512,1819017324
+DD 2256963072,2341142667
+DD 3099113472,2593783962
+DD 2947526400,4227531003
+DD 2408550144,2964324528
+DD 2088532992,1953759348
+DD 3958106880,724238379
+DD 522133248,4042260720
+DD 3469659648,2223243396
+DD 1044266496,3755933919
+DD 808464384,3419078859
+DD 3705461760,875823156
+DD 1600085760,1987444854
+DD 1583242752,1835860077
+DD 3318072576,2846425257
+DD 185273088,3520135377
+DD 437918208,67371012
+DD 2795939328,336855060
+DD 3789676800,976879674
+DD 960051456,3739091166
+DD 3402287616,286326801
+DD 3587560704,842137650
+DD 1195853568,2627469468
+DD 1566399744,1397948499
+DD 1027423488,4075946226
+DD 3654932736,4278059262
+DD 16843008,3486449871
+DD 1515870720,3284336835
+DD 3604403712,2054815866
+DD 1364283648,606339108
+DD 1448498688,3907518696
+DD 1819044864,1616904288
+DD 1296911616,1768489065
+DD 2341178112,2863268010
+DD 218959104,2694840480
+DD 2593823232,2711683233
+DD 1717986816,1650589794
+DD 4227595008,1414791252
+DD 3435973632,505282590
+DD 2964369408,3772776672
+DD 757935360,1684275300
+DD 1953788928,269484048
+DD 303174144,0
+DD 724249344,2745368739
+DD 538976256,1970602101
+DD 4042321920,2324299914
+DD 2981212416,3873833190
+DD 2223277056,151584777
+DD 2576980224,3722248413
+DD 3755990784,2273771655
+DD 1280068608,2206400643
+DD 3419130624,3452764365
+DD 3267543552,2425356432
+DD 875836416,1936916595
+DD 2122219008,4143317238
+DD 1987474944,2644312221
+DD 84215040,3216965823
+DD 1835887872,1381105746
+DD 3082270464,3638034648
+DD 2846468352,3368550600
+DD 825307392,3334865094
+DD 3520188672,2172715137
+DD 387389184,1869545583
+DD 67372032,320012307
+DD 3621246720,1667432547
+DD 336860160,3924361449
+DD 1482184704,2812739751
+DD 976894464,2677997727
+DD 1633771776,3166437564
+DD 3739147776,690552873
+DD 454761216,4193845497
+DD 286331136,791609391
+DD 471604224,3031695540
+DD 842150400,2021130360
+DD 252645120,101056518
+DD 2627509248,3890675943
+DD 370546176,1903231089
+DD 1397969664,3570663636
+DD 404232192,2880110763
+DD 4076007936,2290614408
+DD 572662272,2374828173
+DD 4278124032,1920073842
+DD 1145324544,3115909305
+DD 3486502656,4177002744
+DD 2998055424,2896953516
+DD 3284386560,909508662
+DD 3048584448,707395626
+DD 2054846976,1010565180
+DD 2442236160,4059103473
+DD 606348288,1077936192
+DD 134744064,3553820883
+DD 3907577856,3149594811
+DD 2829625344,1128464451
+DD 1616928768,353697813
+DD 4244438016,2913796269
+DD 1768515840,2004287607
+DD 1347440640,2155872384
+DD 2863311360,2189557890
+DD 3503345664,3974889708
+DD 2694881280,656867367
+DD 2105376000,3856990437
+DD 2711724288,2240086149
+DD 2307492096,892665909
+DD 1650614784,202113036
+DD 2543294208,1094778945
+DD 1414812672,4025417967
+DD 1532713728,2475884691
+DD 505290240,421068825
+DD 2509608192,555810849
+DD 3772833792,235798542
+DD 4294967040,1313734734
+DD 1684300800,1701118053
+DD 3537031680,3183280317
+DD 269488128,3099066552
+DD 3301229568,2408513679
+DD 0,3958046955
+DD 1212696576,3469607118
+DD 2745410304,808452144
+DD 4160222976,1600061535
+DD 1970631936,3318022341
+DD 3688618752,437911578
+DD 2324335104,3789619425
+DD 50529024,3402236106
+DD 3873891840,1195835463
+DD 3671775744,1027407933
+DD 151587072,16842753
+DD 1061109504,3604349142
+DD 3722304768,1448476758
+DD 2492765184,1296891981
+DD 2273806080,218955789
+DD 1549556736,1717960806
+DD 2206434048,3435921612
+DD 33686016,757923885
+DD 3452816640,303169554
+DD 1246382592,538968096
+DD 2425393152,2981167281
+DD 858993408,2576941209
+DD 1936945920,1280049228
+DD 1734829824,3267494082
+DD 4143379968,2122186878
+DD 4092850944,84213765
+DD 2644352256,3082223799
+DD 2139062016,825294897
+DD 3217014528,387383319
+DD 3806519808,3621191895
+DD 1381126656,1482162264
+DD 2610666240,1633747041
+DD 3638089728,454754331
+DD 640034304,471597084
+DD 3368601600,252641295
+DD 926365440,370540566
+DD 3334915584,404226072
+DD 993737472,572653602
+DD 2172748032,1145307204
+DD 2526451200,2998010034
+DD 1869573888,3048538293
+DD 1263225600,2442199185
+DD 320017152,134742024
+DD 3200171520,2829582504
+DD 1667457792,4244373756
+DD 774778368,1347420240
+DD 3924420864,3503292624
+DD 2038003968,2105344125
+DD 2812782336,2307457161
+DD 2358021120,2543255703
+DD 2678038272,1532690523
+DD 1852730880,2509570197
+DD 3166485504,4294902015
+DD 2391707136,3536978130
+DD 690563328,3301179588
+DD 4126536960,1212678216
+DD 4193908992,4160159991
+DD 3065427456,3688562907
+DD 791621376,50528259
+DD 4261281024,3671720154
+DD 3031741440,1061093439
+DD 1499027712,2492727444
+DD 2021160960,1549533276
+DD 2560137216,33685506
+DD 101058048,1246363722
+DD 1785358848,858980403
+DD 3890734848,1734803559
+DD 1179010560,4092788979
+DD 1903259904,2139029631
+DD 3132799488,3806462178
+DD 3570717696,2610626715
+DD 623191296,640024614
+DD 2880154368,926351415
+DD 1111638528,993722427
+DD 2290649088,2526412950
+DD 2728567296,1263206475
+DD 2374864128,3200123070
+DD 4210752000,774766638
+DD 1920102912,2037973113
+DD 117901056,2357985420
+DD 3115956480,1852702830
+DD 1431655680,2391670926
+DD 4177065984,4126474485
+DD 4008635904,3065381046
+DD 2896997376,4261216509
+DD 168430080,1499005017
+DD 909522432,2560098456
+DD 1229539584,1785331818
+DD 707406336,1178992710
+DD 1751672832,3132752058
+DD 1010580480,623181861
+DD 943208448,1111621698
+DD 4059164928,2728525986
+DD 2762253312,4210688250
+DD 1077952512,117899271
+DD 673720320,1431634005
+DD 3553874688,4008575214
+DD 2071689984,168427530
+DD 3149642496,1229520969
+DD 3385444608,1751646312
+DD 1128481536,943194168
+DD 3250700544,2762211492
+DD 353703168,673710120
+DD 3823362816,2071658619
+DD 2913840384,3385393353
+DD 4109693952,3250651329
+DD 2004317952,3823304931
+DD 3351758592,4109631732
+DD 2155905024,3351707847
+DD 2661195264,2661154974
+DD 14737632,939538488
+DD 328965,1090535745
+DD 5789784,369104406
+DD 14277081,1979741814
+DD 6776679,3640711641
+DD 5131854,2466288531
+DD 8487297,1610637408
+DD 13355979,4060148466
+DD 13224393,1912631922
+DD 723723,3254829762
+DD 11447982,2868947883
+DD 6974058,2583730842
+DD 14013909,1962964341
+DD 1579032,100664838
+DD 6118749,1459640151
+DD 8553090,2684395680
+DD 4605510,2432733585
+DD 14671839,4144035831
+DD 14079702,3036722613
+DD 2565927,3372272073
+DD 9079434,2717950626
+DD 3289650,2348846220
+DD 4934475,3523269330
+DD 4342338,2415956112
+DD 14408667,4127258358
+DD 1842204,117442311
+DD 10395294,2801837991
+DD 10263708,654321447
+DD 3815994,2382401166
+DD 13290186,2986390194
+DD 2434341,1224755529
+DD 8092539,3724599006
+DD 855309,1124090691
+DD 7434609,1543527516
+DD 6250335,3607156695
+DD 2039583,3338717127
+DD 16316664,1040203326
+DD 14145495,4110480885
+DD 4079166,2399178639
+DD 10329501,1728079719
+DD 8158332,520101663
+DD 6316128,402659352
+DD 12171705,1845522030
+DD 12500670,2936057775
+DD 12369084,788541231
+DD 9145227,3791708898
+DD 1447446,2231403909
+DD 3421236,218107149
+DD 5066061,1392530259
+DD 12829635,4026593520
+DD 7500402,2617285788
+DD 9803157,1694524773
+DD 11250603,3925928682
+DD 9342606,2734728099
+DD 12237498,2919280302
+DD 8026746,2650840734
+DD 11776947,3959483628
+DD 131586,2147516544
+DD 11842740,754986285
+DD 11382189,1795189611
+DD 10658466,2818615464
+DD 11316396,721431339
+DD 14211288,905983542
+DD 10132122,2785060518
+DD 1513239,3305162181
+DD 1710618,2248181382
+DD 3487029,1291865421
+DD 13421772,855651123
+DD 16250871,4244700669
+DD 10066329,1711302246
+DD 6381921,1476417624
+DD 5921370,2516620950
+DD 15263976,973093434
+DD 2368548,150997257
+DD 5658198,2499843477
+DD 4210752,268439568
+DD 14803425,2013296760
+DD 6513507,3623934168
+DD 592137,1107313218
+DD 3355443,3422604492
+DD 12566463,4009816047
+DD 10000536,637543974
+DD 9934743,3842041317
+DD 8750469,1627414881
+DD 6842472,436214298
+DD 16579836,1056980799
+DD 15527148,989870907
+DD 657930,2181071490
+DD 14342874,3053500086
+DD 7303023,3674266587
+DD 5460819,3556824276
+DD 6447714,2550175896
+DD 10724259,3892373736
+DD 3026478,2332068747
+DD 526344,33554946
+DD 11513775,3942706155
+DD 2631720,167774730
+DD 11579568,738208812
+DD 7631988,486546717
+DD 12763842,2952835248
+DD 12434877,1862299503
+DD 3552822,2365623693
+DD 2236962,2281736328
+DD 3684408,234884622
+DD 6579300,419436825
+DD 1973790,2264958855
+DD 3750201,1308642894
+DD 2894892,184552203
+DD 10921638,2835392937
+DD 3158064,201329676
+DD 15066597,2030074233
+DD 4473924,285217041
+DD 16645629,2130739071
+DD 8947848,570434082
+DD 10461087,3875596263
+DD 6645093,1493195097
+DD 8882055,3774931425
+DD 7039851,3657489114
+DD 16053492,1023425853
+DD 2302755,3355494600
+DD 4737096,301994514
+DD 1052688,67109892
+DD 13750737,1946186868
+DD 5329233,1409307732
+DD 12632256,805318704
+DD 16382457,2113961598
+DD 13816530,3019945140
+DD 10526880,671098920
+DD 5592405,1426085205
+DD 10592673,1744857192
+DD 4276545,1342197840
+DD 16448250,3187719870
+DD 4408131,3489714384
+DD 1250067,3288384708
+DD 12895428,822096177
+DD 3092271,3405827019
+DD 11053224,704653866
+DD 11974326,2902502829
+DD 3947580,251662095
+DD 2829099,3389049546
+DD 12698049,1879076976
+DD 16777215,4278255615
+DD 13158600,838873650
+DD 10855845,1761634665
+DD 2105376,134219784
+DD 9013641,1644192354
+DD 0,0
+DD 9474192,603989028
+DD 4671303,3506491857
+DD 15724527,4211145723
+DD 15395562,3120609978
+DD 12040119,3976261101
+DD 1381653,1157645637
+DD 394758,2164294017
+DD 13487565,1929409395
+DD 11908533,1828744557
+DD 1184274,2214626436
+DD 8289918,2667618207
+DD 12303291,3993038574
+DD 2697513,1241533002
+DD 986895,3271607235
+DD 12105912,771763758
+DD 460551,3238052289
+DD 263172,16777473
+DD 10197915,3858818790
+DD 9737364,620766501
+DD 2171169,1207978056
+DD 6710886,2566953369
+DD 15132390,3103832505
+DD 13553358,3003167667
+DD 15592941,2063629179
+DD 15198183,4177590777
+DD 3881787,3456159438
+DD 16711422,3204497343
+DD 8355711,3741376479
+DD 12961221,1895854449
+DD 10790052,687876393
+DD 3618615,3439381965
+DD 11645361,1811967084
+DD 5000268,318771987
+DD 9539985,1677747300
+DD 7237230,2600508315
+DD 9276813,1660969827
+DD 7763574,2634063261
+DD 197379,3221274816
+DD 2960685,1258310475
+DD 14606046,3070277559
+DD 9868950,2768283045
+DD 2500134,2298513801
+DD 8224125,1593859935
+DD 13027014,2969612721
+DD 6052956,385881879
+DD 13882323,4093703412
+DD 15921906,3154164924
+DD 5197647,3540046803
+DD 1644825,1174423110
+DD 4144959,3472936911
+DD 14474460,922761015
+DD 7960953,1577082462
+DD 1907997,1191200583
+DD 5395026,2483066004
+DD 15461355,4194368250
+DD 15987699,4227923196
+DD 7171437,1526750043
+DD 6184542,2533398423
+DD 16514043,4261478142
+DD 6908265,1509972570
+DD 11711154,2885725356
+DD 15790320,1006648380
+DD 3223857,1275087948
+DD 789516,50332419
+DD 13948116,889206069
+DD 13619151,4076925939
+DD 9211020,587211555
+DD 14869218,3087055032
+DD 7697781,1560304989
+DD 11119017,1778412138
+DD 4868682,2449511058
+DD 5723991,3573601749
+DD 8684676,553656609
+DD 1118481,1140868164
+DD 4539717,1358975313
+DD 1776411,3321939654
+DD 16119285,2097184125
+DD 15000804,956315961
+DD 921102,2197848963
+DD 7566195,3691044060
+DD 11184810,2852170410
+DD 15856113,2080406652
+DD 14540253,1996519287
+DD 5855577,1442862678
+DD 1315860,83887365
+DD 7105644,452991771
+DD 9605778,2751505572
+DD 5526612,352326933
+DD 13684944,872428596
+DD 7895160,503324190
+DD 7368816,469769244
+DD 14935011,4160813304
+DD 4802889,1375752786
+DD 8421504,536879136
+DD 5263440,335549460
+DD 10987431,3909151209
+DD 16185078,3170942397
+DD 7829367,3707821533
+DD 9671571,3825263844
+DD 8816262,2701173153
+DD 8618883,3758153952
+DD 2763306,2315291274
+DD 13092807,4043370993
+DD 5987163,3590379222
+DD 15329769,2046851706
+DD 15658734,3137387451
+DD 9408399,3808486371
+DD 65793,1073758272
+DD 4013373,1325420367
+ALIGN 16
+_Camellia_cbc_encrypt PROC PUBLIC
+$L_Camellia_cbc_encrypt_begin::
+ push ebp
+ push ebx
+ push esi
+ push edi
+ mov ecx,DWORD PTR 28[esp]
+ cmp ecx,0
+ je $L016enc_out
+ pushfd
+ cld
+ mov eax,DWORD PTR 24[esp]
+ mov ebx,DWORD PTR 28[esp]
+ mov edx,DWORD PTR 36[esp]
+ mov ebp,DWORD PTR 40[esp]
+ lea esi,DWORD PTR [esp-64]
+ and esi,-64
+ lea edi,DWORD PTR [edx-127]
+ sub edi,esi
+ neg edi
+ and edi,960
+ sub esi,edi
+ mov edi,DWORD PTR 44[esp]
+ xchg esp,esi
+ add esp,4
+ mov DWORD PTR 20[esp],esi
+ mov DWORD PTR 24[esp],eax
+ mov DWORD PTR 28[esp],ebx
+ mov DWORD PTR 32[esp],ecx
+ mov DWORD PTR 36[esp],edx
+ mov DWORD PTR 40[esp],ebp
+ call $L017pic_point
+$L017pic_point:
+ pop ebp
+ lea ebp,DWORD PTR ($LCamellia_SBOX-$L017pic_point)[ebp]
+ mov esi,32
+ALIGN 4
+$L018prefetch_sbox:
+ mov eax,DWORD PTR [ebp]
+ mov ebx,DWORD PTR 32[ebp]
+ mov ecx,DWORD PTR 64[ebp]
+ mov edx,DWORD PTR 96[ebp]
+ lea ebp,DWORD PTR 128[ebp]
+ dec esi
+ jnz $L018prefetch_sbox
+ mov eax,DWORD PTR 36[esp]
+ sub ebp,4096
+ mov esi,DWORD PTR 24[esp]
+ mov edx,DWORD PTR 272[eax]
+ cmp edi,0
+ je $L019DECRYPT
+ mov ecx,DWORD PTR 32[esp]
+ mov edi,DWORD PTR 40[esp]
+ shl edx,6
+ lea edx,DWORD PTR [edx*1+eax]
+ mov DWORD PTR 16[esp],edx
+ test ecx,4294967280
+ jz $L020enc_tail
+ mov eax,DWORD PTR [edi]
+ mov ebx,DWORD PTR 4[edi]
+ALIGN 4
+$L021enc_loop:
+ mov ecx,DWORD PTR 8[edi]
+ mov edx,DWORD PTR 12[edi]
+ xor eax,DWORD PTR [esi]
+ xor ebx,DWORD PTR 4[esi]
+ xor ecx,DWORD PTR 8[esi]
+ bswap eax
+ xor edx,DWORD PTR 12[esi]
+ bswap ebx
+ mov edi,DWORD PTR 36[esp]
+ bswap ecx
+ bswap edx
+ call __x86_Camellia_encrypt
+ mov esi,DWORD PTR 24[esp]
+ mov edi,DWORD PTR 28[esp]
+ bswap eax
+ bswap ebx
+ bswap ecx
+ mov DWORD PTR [edi],eax
+ bswap edx
+ mov DWORD PTR 4[edi],ebx
+ mov DWORD PTR 8[edi],ecx
+ mov DWORD PTR 12[edi],edx
+ mov ecx,DWORD PTR 32[esp]
+ lea esi,DWORD PTR 16[esi]
+ mov DWORD PTR 24[esp],esi
+ lea edx,DWORD PTR 16[edi]
+ mov DWORD PTR 28[esp],edx
+ sub ecx,16
+ test ecx,4294967280
+ mov DWORD PTR 32[esp],ecx
+ jnz $L021enc_loop
+ test ecx,15
+ jnz $L020enc_tail
+ mov esi,DWORD PTR 40[esp]
+ mov ecx,DWORD PTR 8[edi]
+ mov edx,DWORD PTR 12[edi]
+ mov DWORD PTR [esi],eax
+ mov DWORD PTR 4[esi],ebx
+ mov DWORD PTR 8[esi],ecx
+ mov DWORD PTR 12[esi],edx
+ mov esp,DWORD PTR 20[esp]
+ popfd
+$L016enc_out:
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+ pushfd
+ALIGN 4
+$L020enc_tail:
+ mov eax,edi
+ mov edi,DWORD PTR 28[esp]
+ push eax
+ mov ebx,16
+ sub ebx,ecx
+ cmp edi,esi
+ je $L022enc_in_place
+ALIGN 4
+DD 2767451785
+ jmp $L023enc_skip_in_place
+$L022enc_in_place:
+ lea edi,DWORD PTR [ecx*1+edi]
+$L023enc_skip_in_place:
+ mov ecx,ebx
+ xor eax,eax
+ALIGN 4
+DD 2868115081
+ pop edi
+ mov esi,DWORD PTR 28[esp]
+ mov eax,DWORD PTR [edi]
+ mov ebx,DWORD PTR 4[edi]
+ mov DWORD PTR 32[esp],16
+ jmp $L021enc_loop
+ALIGN 16
+$L019DECRYPT:
+ shl edx,6
+ lea edx,DWORD PTR [edx*1+eax]
+ mov DWORD PTR 16[esp],eax
+ mov DWORD PTR 36[esp],edx
+ cmp esi,DWORD PTR 28[esp]
+ je $L024dec_in_place
+ mov edi,DWORD PTR 40[esp]
+ mov DWORD PTR 44[esp],edi
+ALIGN 4
+$L025dec_loop:
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ mov ecx,DWORD PTR 8[esi]
+ bswap eax
+ mov edx,DWORD PTR 12[esi]
+ bswap ebx
+ mov edi,DWORD PTR 36[esp]
+ bswap ecx
+ bswap edx
+ call __x86_Camellia_decrypt
+ mov edi,DWORD PTR 44[esp]
+ mov esi,DWORD PTR 32[esp]
+ bswap eax
+ bswap ebx
+ bswap ecx
+ xor eax,DWORD PTR [edi]
+ bswap edx
+ xor ebx,DWORD PTR 4[edi]
+ xor ecx,DWORD PTR 8[edi]
+ xor edx,DWORD PTR 12[edi]
+ sub esi,16
+ jc $L026dec_partial
+ mov DWORD PTR 32[esp],esi
+ mov esi,DWORD PTR 24[esp]
+ mov edi,DWORD PTR 28[esp]
+ mov DWORD PTR [edi],eax
+ mov DWORD PTR 4[edi],ebx
+ mov DWORD PTR 8[edi],ecx
+ mov DWORD PTR 12[edi],edx
+ mov DWORD PTR 44[esp],esi
+ lea esi,DWORD PTR 16[esi]
+ mov DWORD PTR 24[esp],esi
+ lea edi,DWORD PTR 16[edi]
+ mov DWORD PTR 28[esp],edi
+ jnz $L025dec_loop
+ mov edi,DWORD PTR 44[esp]
+$L027dec_end:
+ mov esi,DWORD PTR 40[esp]
+ mov eax,DWORD PTR [edi]
+ mov ebx,DWORD PTR 4[edi]
+ mov ecx,DWORD PTR 8[edi]
+ mov edx,DWORD PTR 12[edi]
+ mov DWORD PTR [esi],eax
+ mov DWORD PTR 4[esi],ebx
+ mov DWORD PTR 8[esi],ecx
+ mov DWORD PTR 12[esi],edx
+ jmp $L028dec_out
+ALIGN 4
+$L026dec_partial:
+ lea edi,DWORD PTR 44[esp]
+ mov DWORD PTR [edi],eax
+ mov DWORD PTR 4[edi],ebx
+ mov DWORD PTR 8[edi],ecx
+ mov DWORD PTR 12[edi],edx
+ lea ecx,DWORD PTR 16[esi]
+ mov esi,edi
+ mov edi,DWORD PTR 28[esp]
+DD 2767451785
+ mov edi,DWORD PTR 24[esp]
+ jmp $L027dec_end
+ALIGN 4
+$L024dec_in_place:
+$L029dec_in_place_loop:
+ lea edi,DWORD PTR 44[esp]
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ mov ecx,DWORD PTR 8[esi]
+ mov edx,DWORD PTR 12[esi]
+ mov DWORD PTR [edi],eax
+ mov DWORD PTR 4[edi],ebx
+ mov DWORD PTR 8[edi],ecx
+ bswap eax
+ mov DWORD PTR 12[edi],edx
+ bswap ebx
+ mov edi,DWORD PTR 36[esp]
+ bswap ecx
+ bswap edx
+ call __x86_Camellia_decrypt
+ mov edi,DWORD PTR 40[esp]
+ mov esi,DWORD PTR 28[esp]
+ bswap eax
+ bswap ebx
+ bswap ecx
+ xor eax,DWORD PTR [edi]
+ bswap edx
+ xor ebx,DWORD PTR 4[edi]
+ xor ecx,DWORD PTR 8[edi]
+ xor edx,DWORD PTR 12[edi]
+ mov DWORD PTR [esi],eax
+ mov DWORD PTR 4[esi],ebx
+ mov DWORD PTR 8[esi],ecx
+ mov DWORD PTR 12[esi],edx
+ lea esi,DWORD PTR 16[esi]
+ mov DWORD PTR 28[esp],esi
+ lea esi,DWORD PTR 44[esp]
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ mov ecx,DWORD PTR 8[esi]
+ mov edx,DWORD PTR 12[esi]
+ mov DWORD PTR [edi],eax
+ mov DWORD PTR 4[edi],ebx
+ mov DWORD PTR 8[edi],ecx
+ mov DWORD PTR 12[edi],edx
+ mov esi,DWORD PTR 24[esp]
+ lea esi,DWORD PTR 16[esi]
+ mov DWORD PTR 24[esp],esi
+ mov ecx,DWORD PTR 32[esp]
+ sub ecx,16
+ jc $L030dec_in_place_partial
+ mov DWORD PTR 32[esp],ecx
+ jnz $L029dec_in_place_loop
+ jmp $L028dec_out
+ALIGN 4
+$L030dec_in_place_partial:
+ mov edi,DWORD PTR 28[esp]
+ lea esi,DWORD PTR 44[esp]
+ lea edi,DWORD PTR [ecx*1+edi]
+ lea esi,DWORD PTR 16[ecx*1+esi]
+ neg ecx
+DD 2767451785
+ALIGN 4
+$L028dec_out:
+ mov esp,DWORD PTR 20[esp]
+ popfd
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+_Camellia_cbc_encrypt ENDP
+DB 67,97,109,101,108,108,105,97,32,102,111,114,32,120,56,54
+DB 32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115
+DB 115,108,46,111,114,103,62,0
+.text$ ENDS
+END
diff --git a/deps/openssl/asm/x86-win32-masm/cast/cast-586.asm b/deps/openssl/asm/x86-win32-masm/cast/cast-586.asm
new file mode 100644
index 0000000000..1f2f0708a5
--- /dev/null
+++ b/deps/openssl/asm/x86-win32-masm/cast/cast-586.asm
@@ -0,0 +1,950 @@
+TITLE cast-586.asm
+IF @Version LT 800
+ECHO MASM version 8.00 or later is strongly recommended.
+ENDIF
+.586
+.MODEL FLAT
+OPTION DOTNAME
+IF @Version LT 800
+.text$ SEGMENT PAGE 'CODE'
+ELSE
+.text$ SEGMENT ALIGN(64) 'CODE'
+ENDIF
+EXTERN _CAST_S_table0:NEAR
+EXTERN _CAST_S_table1:NEAR
+EXTERN _CAST_S_table2:NEAR
+EXTERN _CAST_S_table3:NEAR
+ALIGN 16
+_CAST_encrypt PROC PUBLIC
+$L_CAST_encrypt_begin::
+ ;
+
+ push ebp
+ push ebx
+ mov ebx,DWORD PTR 12[esp]
+ mov ebp,DWORD PTR 16[esp]
+ push esi
+ push edi
+ ; Load the 2 words
+ mov edi,DWORD PTR [ebx]
+ mov esi,DWORD PTR 4[ebx]
+ ; Get short key flag
+ mov eax,DWORD PTR 128[ebp]
+ push eax
+ xor eax,eax
+ ; round 0
+ mov edx,DWORD PTR [ebp]
+ mov ecx,DWORD PTR 4[ebp]
+ add edx,esi
+ rol edx,cl
+ mov ebx,edx
+ xor ecx,ecx
+ mov cl,dh
+ and ebx,255
+ shr edx,16
+ xor eax,eax
+ mov al,dh
+ and edx,255
+ mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
+ mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
+ xor ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table2[eax*4]
+ sub ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table3[edx*4]
+ add ecx,ebx
+ xor edi,ecx
+ ; round 1
+ mov edx,DWORD PTR 8[ebp]
+ mov ecx,DWORD PTR 12[ebp]
+ xor edx,edi
+ rol edx,cl
+ mov ebx,edx
+ xor ecx,ecx
+ mov cl,dh
+ and ebx,255
+ shr edx,16
+ xor eax,eax
+ mov al,dh
+ and edx,255
+ mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
+ mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
+ sub ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table2[eax*4]
+ add ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table3[edx*4]
+ xor ecx,ebx
+ xor esi,ecx
+ ; round 2
+ mov edx,DWORD PTR 16[ebp]
+ mov ecx,DWORD PTR 20[ebp]
+ sub edx,esi
+ rol edx,cl
+ mov ebx,edx
+ xor ecx,ecx
+ mov cl,dh
+ and ebx,255
+ shr edx,16
+ xor eax,eax
+ mov al,dh
+ and edx,255
+ mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
+ mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
+ add ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table2[eax*4]
+ xor ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table3[edx*4]
+ sub ecx,ebx
+ xor edi,ecx
+ ; round 3
+ mov edx,DWORD PTR 24[ebp]
+ mov ecx,DWORD PTR 28[ebp]
+ add edx,edi
+ rol edx,cl
+ mov ebx,edx
+ xor ecx,ecx
+ mov cl,dh
+ and ebx,255
+ shr edx,16
+ xor eax,eax
+ mov al,dh
+ and edx,255
+ mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
+ mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
+ xor ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table2[eax*4]
+ sub ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table3[edx*4]
+ add ecx,ebx
+ xor esi,ecx
+ ; round 4
+ mov edx,DWORD PTR 32[ebp]
+ mov ecx,DWORD PTR 36[ebp]
+ xor edx,esi
+ rol edx,cl
+ mov ebx,edx
+ xor ecx,ecx
+ mov cl,dh
+ and ebx,255
+ shr edx,16
+ xor eax,eax
+ mov al,dh
+ and edx,255
+ mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
+ mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
+ sub ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table2[eax*4]
+ add ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table3[edx*4]
+ xor ecx,ebx
+ xor edi,ecx
+ ; round 5
+ mov edx,DWORD PTR 40[ebp]
+ mov ecx,DWORD PTR 44[ebp]
+ sub edx,edi
+ rol edx,cl
+ mov ebx,edx
+ xor ecx,ecx
+ mov cl,dh
+ and ebx,255
+ shr edx,16
+ xor eax,eax
+ mov al,dh
+ and edx,255
+ mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
+ mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
+ add ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table2[eax*4]
+ xor ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table3[edx*4]
+ sub ecx,ebx
+ xor esi,ecx
+ ; round 6
+ mov edx,DWORD PTR 48[ebp]
+ mov ecx,DWORD PTR 52[ebp]
+ add edx,esi
+ rol edx,cl
+ mov ebx,edx
+ xor ecx,ecx
+ mov cl,dh
+ and ebx,255
+ shr edx,16
+ xor eax,eax
+ mov al,dh
+ and edx,255
+ mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
+ mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
+ xor ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table2[eax*4]
+ sub ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table3[edx*4]
+ add ecx,ebx
+ xor edi,ecx
+ ; round 7
+ mov edx,DWORD PTR 56[ebp]
+ mov ecx,DWORD PTR 60[ebp]
+ xor edx,edi
+ rol edx,cl
+ mov ebx,edx
+ xor ecx,ecx
+ mov cl,dh
+ and ebx,255
+ shr edx,16
+ xor eax,eax
+ mov al,dh
+ and edx,255
+ mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
+ mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
+ sub ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table2[eax*4]
+ add ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table3[edx*4]
+ xor ecx,ebx
+ xor esi,ecx
+ ; round 8
+ mov edx,DWORD PTR 64[ebp]
+ mov ecx,DWORD PTR 68[ebp]
+ sub edx,esi
+ rol edx,cl
+ mov ebx,edx
+ xor ecx,ecx
+ mov cl,dh
+ and ebx,255
+ shr edx,16
+ xor eax,eax
+ mov al,dh
+ and edx,255
+ mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
+ mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
+ add ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table2[eax*4]
+ xor ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table3[edx*4]
+ sub ecx,ebx
+ xor edi,ecx
+ ; round 9
+ mov edx,DWORD PTR 72[ebp]
+ mov ecx,DWORD PTR 76[ebp]
+ add edx,edi
+ rol edx,cl
+ mov ebx,edx
+ xor ecx,ecx
+ mov cl,dh
+ and ebx,255
+ shr edx,16
+ xor eax,eax
+ mov al,dh
+ and edx,255
+ mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
+ mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
+ xor ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table2[eax*4]
+ sub ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table3[edx*4]
+ add ecx,ebx
+ xor esi,ecx
+ ; round 10
+ mov edx,DWORD PTR 80[ebp]
+ mov ecx,DWORD PTR 84[ebp]
+ xor edx,esi
+ rol edx,cl
+ mov ebx,edx
+ xor ecx,ecx
+ mov cl,dh
+ and ebx,255
+ shr edx,16
+ xor eax,eax
+ mov al,dh
+ and edx,255
+ mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
+ mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
+ sub ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table2[eax*4]
+ add ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table3[edx*4]
+ xor ecx,ebx
+ xor edi,ecx
+ ; round 11
+ mov edx,DWORD PTR 88[ebp]
+ mov ecx,DWORD PTR 92[ebp]
+ sub edx,edi
+ rol edx,cl
+ mov ebx,edx
+ xor ecx,ecx
+ mov cl,dh
+ and ebx,255
+ shr edx,16
+ xor eax,eax
+ mov al,dh
+ and edx,255
+ mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
+ mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
+ add ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table2[eax*4]
+ xor ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table3[edx*4]
+ sub ecx,ebx
+ xor esi,ecx
+ ; test short key flag
+ pop edx
+ or edx,edx
+ jnz $L000cast_enc_done
+ ; round 12
+ mov edx,DWORD PTR 96[ebp]
+ mov ecx,DWORD PTR 100[ebp]
+ add edx,esi
+ rol edx,cl
+ mov ebx,edx
+ xor ecx,ecx
+ mov cl,dh
+ and ebx,255
+ shr edx,16
+ xor eax,eax
+ mov al,dh
+ and edx,255
+ mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
+ mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
+ xor ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table2[eax*4]
+ sub ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table3[edx*4]
+ add ecx,ebx
+ xor edi,ecx
+ ; round 13
+ mov edx,DWORD PTR 104[ebp]
+ mov ecx,DWORD PTR 108[ebp]
+ xor edx,edi
+ rol edx,cl
+ mov ebx,edx
+ xor ecx,ecx
+ mov cl,dh
+ and ebx,255
+ shr edx,16
+ xor eax,eax
+ mov al,dh
+ and edx,255
+ mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
+ mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
+ sub ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table2[eax*4]
+ add ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table3[edx*4]
+ xor ecx,ebx
+ xor esi,ecx
+ ; round 14
+ mov edx,DWORD PTR 112[ebp]
+ mov ecx,DWORD PTR 116[ebp]
+ sub edx,esi
+ rol edx,cl
+ mov ebx,edx
+ xor ecx,ecx
+ mov cl,dh
+ and ebx,255
+ shr edx,16
+ xor eax,eax
+ mov al,dh
+ and edx,255
+ mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
+ mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
+ add ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table2[eax*4]
+ xor ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table3[edx*4]
+ sub ecx,ebx
+ xor edi,ecx
+ ; round 15
+ mov edx,DWORD PTR 120[ebp]
+ mov ecx,DWORD PTR 124[ebp]
+ add edx,edi
+ rol edx,cl
+ mov ebx,edx
+ xor ecx,ecx
+ mov cl,dh
+ and ebx,255
+ shr edx,16
+ xor eax,eax
+ mov al,dh
+ and edx,255
+ mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
+ mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
+ xor ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table2[eax*4]
+ sub ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table3[edx*4]
+ add ecx,ebx
+ xor esi,ecx
+$L000cast_enc_done:
+ nop
+ mov eax,DWORD PTR 20[esp]
+ mov DWORD PTR 4[eax],edi
+ mov DWORD PTR [eax],esi
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+_CAST_encrypt ENDP
+EXTERN _CAST_S_table0:NEAR
+EXTERN _CAST_S_table1:NEAR
+EXTERN _CAST_S_table2:NEAR
+EXTERN _CAST_S_table3:NEAR
+ALIGN 16
+_CAST_decrypt PROC PUBLIC
+$L_CAST_decrypt_begin::
+ ;
+
+ push ebp
+ push ebx
+ mov ebx,DWORD PTR 12[esp]
+ mov ebp,DWORD PTR 16[esp]
+ push esi
+ push edi
+ ; Load the 2 words
+ mov edi,DWORD PTR [ebx]
+ mov esi,DWORD PTR 4[ebx]
+ ; Get short key flag
+ mov eax,DWORD PTR 128[ebp]
+ or eax,eax
+ jnz $L001cast_dec_skip
+ xor eax,eax
+ ; round 15
+ mov edx,DWORD PTR 120[ebp]
+ mov ecx,DWORD PTR 124[ebp]
+ add edx,esi
+ rol edx,cl
+ mov ebx,edx
+ xor ecx,ecx
+ mov cl,dh
+ and ebx,255
+ shr edx,16
+ xor eax,eax
+ mov al,dh
+ and edx,255
+ mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
+ mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
+ xor ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table2[eax*4]
+ sub ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table3[edx*4]
+ add ecx,ebx
+ xor edi,ecx
+ ; round 14
+ mov edx,DWORD PTR 112[ebp]
+ mov ecx,DWORD PTR 116[ebp]
+ sub edx,edi
+ rol edx,cl
+ mov ebx,edx
+ xor ecx,ecx
+ mov cl,dh
+ and ebx,255
+ shr edx,16
+ xor eax,eax
+ mov al,dh
+ and edx,255
+ mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
+ mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
+ add ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table2[eax*4]
+ xor ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table3[edx*4]
+ sub ecx,ebx
+ xor esi,ecx
+ ; round 13
+ mov edx,DWORD PTR 104[ebp]
+ mov ecx,DWORD PTR 108[ebp]
+ xor edx,esi
+ rol edx,cl
+ mov ebx,edx
+ xor ecx,ecx
+ mov cl,dh
+ and ebx,255
+ shr edx,16
+ xor eax,eax
+ mov al,dh
+ and edx,255
+ mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
+ mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
+ sub ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table2[eax*4]
+ add ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table3[edx*4]
+ xor ecx,ebx
+ xor edi,ecx
+ ; round 12
+ mov edx,DWORD PTR 96[ebp]
+ mov ecx,DWORD PTR 100[ebp]
+ add edx,edi
+ rol edx,cl
+ mov ebx,edx
+ xor ecx,ecx
+ mov cl,dh
+ and ebx,255
+ shr edx,16
+ xor eax,eax
+ mov al,dh
+ and edx,255
+ mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
+ mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
+ xor ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table2[eax*4]
+ sub ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table3[edx*4]
+ add ecx,ebx
+ xor esi,ecx
+$L001cast_dec_skip:
+ ; round 11
+ mov edx,DWORD PTR 88[ebp]
+ mov ecx,DWORD PTR 92[ebp]
+ sub edx,esi
+ rol edx,cl
+ mov ebx,edx
+ xor ecx,ecx
+ mov cl,dh
+ and ebx,255
+ shr edx,16
+ xor eax,eax
+ mov al,dh
+ and edx,255
+ mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
+ mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
+ add ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table2[eax*4]
+ xor ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table3[edx*4]
+ sub ecx,ebx
+ xor edi,ecx
+ ; round 10
+ mov edx,DWORD PTR 80[ebp]
+ mov ecx,DWORD PTR 84[ebp]
+ xor edx,edi
+ rol edx,cl
+ mov ebx,edx
+ xor ecx,ecx
+ mov cl,dh
+ and ebx,255
+ shr edx,16
+ xor eax,eax
+ mov al,dh
+ and edx,255
+ mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
+ mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
+ sub ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table2[eax*4]
+ add ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table3[edx*4]
+ xor ecx,ebx
+ xor esi,ecx
+ ; round 9
+ mov edx,DWORD PTR 72[ebp]
+ mov ecx,DWORD PTR 76[ebp]
+ add edx,esi
+ rol edx,cl
+ mov ebx,edx
+ xor ecx,ecx
+ mov cl,dh
+ and ebx,255
+ shr edx,16
+ xor eax,eax
+ mov al,dh
+ and edx,255
+ mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
+ mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
+ xor ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table2[eax*4]
+ sub ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table3[edx*4]
+ add ecx,ebx
+ xor edi,ecx
+ ; round 8
+ mov edx,DWORD PTR 64[ebp]
+ mov ecx,DWORD PTR 68[ebp]
+ sub edx,edi
+ rol edx,cl
+ mov ebx,edx
+ xor ecx,ecx
+ mov cl,dh
+ and ebx,255
+ shr edx,16
+ xor eax,eax
+ mov al,dh
+ and edx,255
+ mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
+ mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
+ add ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table2[eax*4]
+ xor ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table3[edx*4]
+ sub ecx,ebx
+ xor esi,ecx
+ ; round 7
+ mov edx,DWORD PTR 56[ebp]
+ mov ecx,DWORD PTR 60[ebp]
+ xor edx,esi
+ rol edx,cl
+ mov ebx,edx
+ xor ecx,ecx
+ mov cl,dh
+ and ebx,255
+ shr edx,16
+ xor eax,eax
+ mov al,dh
+ and edx,255
+ mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
+ mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
+ sub ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table2[eax*4]
+ add ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table3[edx*4]
+ xor ecx,ebx
+ xor edi,ecx
+ ; round 6
+ mov edx,DWORD PTR 48[ebp]
+ mov ecx,DWORD PTR 52[ebp]
+ add edx,edi
+ rol edx,cl
+ mov ebx,edx
+ xor ecx,ecx
+ mov cl,dh
+ and ebx,255
+ shr edx,16
+ xor eax,eax
+ mov al,dh
+ and edx,255
+ mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
+ mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
+ xor ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table2[eax*4]
+ sub ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table3[edx*4]
+ add ecx,ebx
+ xor esi,ecx
+ ; round 5
+ mov edx,DWORD PTR 40[ebp]
+ mov ecx,DWORD PTR 44[ebp]
+ sub edx,esi
+ rol edx,cl
+ mov ebx,edx
+ xor ecx,ecx
+ mov cl,dh
+ and ebx,255
+ shr edx,16
+ xor eax,eax
+ mov al,dh
+ and edx,255
+ mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
+ mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
+ add ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table2[eax*4]
+ xor ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table3[edx*4]
+ sub ecx,ebx
+ xor edi,ecx
+ ; round 4
+ mov edx,DWORD PTR 32[ebp]
+ mov ecx,DWORD PTR 36[ebp]
+ xor edx,edi
+ rol edx,cl
+ mov ebx,edx
+ xor ecx,ecx
+ mov cl,dh
+ and ebx,255
+ shr edx,16
+ xor eax,eax
+ mov al,dh
+ and edx,255
+ mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
+ mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
+ sub ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table2[eax*4]
+ add ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table3[edx*4]
+ xor ecx,ebx
+ xor esi,ecx
+ ; round 3
+ mov edx,DWORD PTR 24[ebp]
+ mov ecx,DWORD PTR 28[ebp]
+ add edx,esi
+ rol edx,cl
+ mov ebx,edx
+ xor ecx,ecx
+ mov cl,dh
+ and ebx,255
+ shr edx,16
+ xor eax,eax
+ mov al,dh
+ and edx,255
+ mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
+ mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
+ xor ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table2[eax*4]
+ sub ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table3[edx*4]
+ add ecx,ebx
+ xor edi,ecx
+ ; round 2
+ mov edx,DWORD PTR 16[ebp]
+ mov ecx,DWORD PTR 20[ebp]
+ sub edx,edi
+ rol edx,cl
+ mov ebx,edx
+ xor ecx,ecx
+ mov cl,dh
+ and ebx,255
+ shr edx,16
+ xor eax,eax
+ mov al,dh
+ and edx,255
+ mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
+ mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
+ add ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table2[eax*4]
+ xor ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table3[edx*4]
+ sub ecx,ebx
+ xor esi,ecx
+ ; round 1
+ mov edx,DWORD PTR 8[ebp]
+ mov ecx,DWORD PTR 12[ebp]
+ xor edx,esi
+ rol edx,cl
+ mov ebx,edx
+ xor ecx,ecx
+ mov cl,dh
+ and ebx,255
+ shr edx,16
+ xor eax,eax
+ mov al,dh
+ and edx,255
+ mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
+ mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
+ sub ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table2[eax*4]
+ add ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table3[edx*4]
+ xor ecx,ebx
+ xor edi,ecx
+ ; round 0
+ mov edx,DWORD PTR [ebp]
+ mov ecx,DWORD PTR 4[ebp]
+ add edx,edi
+ rol edx,cl
+ mov ebx,edx
+ xor ecx,ecx
+ mov cl,dh
+ and ebx,255
+ shr edx,16
+ xor eax,eax
+ mov al,dh
+ and edx,255
+ mov ecx,DWORD PTR _CAST_S_table0[ecx*4]
+ mov ebx,DWORD PTR _CAST_S_table1[ebx*4]
+ xor ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table2[eax*4]
+ sub ecx,ebx
+ mov ebx,DWORD PTR _CAST_S_table3[edx*4]
+ add ecx,ebx
+ xor esi,ecx
+ nop
+ mov eax,DWORD PTR 20[esp]
+ mov DWORD PTR 4[eax],edi
+ mov DWORD PTR [eax],esi
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+_CAST_decrypt ENDP
+ALIGN 16
+_CAST_cbc_encrypt PROC PUBLIC
+$L_CAST_cbc_encrypt_begin::
+ ;
+
+ push ebp
+ push ebx
+ push esi
+ push edi
+ mov ebp,DWORD PTR 28[esp]
+ ; getting iv ptr from parameter 4
+ mov ebx,DWORD PTR 36[esp]
+ mov esi,DWORD PTR [ebx]
+ mov edi,DWORD PTR 4[ebx]
+ push edi
+ push esi
+ push edi
+ push esi
+ mov ebx,esp
+ mov esi,DWORD PTR 36[esp]
+ mov edi,DWORD PTR 40[esp]
+ ; getting encrypt flag from parameter 5
+ mov ecx,DWORD PTR 56[esp]
+ ; get and push parameter 3
+ mov eax,DWORD PTR 48[esp]
+ push eax
+ push ebx
+ cmp ecx,0
+ jz $L002decrypt
+ and ebp,4294967288
+ mov eax,DWORD PTR 8[esp]
+ mov ebx,DWORD PTR 12[esp]
+ jz $L003encrypt_finish
+$L004encrypt_loop:
+ mov ecx,DWORD PTR [esi]
+ mov edx,DWORD PTR 4[esi]
+ xor eax,ecx
+ xor ebx,edx
+ bswap eax
+ bswap ebx
+ mov DWORD PTR 8[esp],eax
+ mov DWORD PTR 12[esp],ebx
+ call $L_CAST_encrypt_begin
+ mov eax,DWORD PTR 8[esp]
+ mov ebx,DWORD PTR 12[esp]
+ bswap eax
+ bswap ebx
+ mov DWORD PTR [edi],eax
+ mov DWORD PTR 4[edi],ebx
+ add esi,8
+ add edi,8
+ sub ebp,8
+ jnz $L004encrypt_loop
+$L003encrypt_finish:
+ mov ebp,DWORD PTR 52[esp]
+ and ebp,7
+ jz $L005finish
+ call $L006PIC_point
+$L006PIC_point:
+ pop edx
+ lea ecx,DWORD PTR ($L007cbc_enc_jmp_table-$L006PIC_point)[edx]
+ mov ebp,DWORD PTR [ebp*4+ecx]
+ add ebp,edx
+ xor ecx,ecx
+ xor edx,edx
+ jmp ebp
+$L008ej7:
+ mov dh,BYTE PTR 6[esi]
+ shl edx,8
+$L009ej6:
+ mov dh,BYTE PTR 5[esi]
+$L010ej5:
+ mov dl,BYTE PTR 4[esi]
+$L011ej4:
+ mov ecx,DWORD PTR [esi]
+ jmp $L012ejend
+$L013ej3:
+ mov ch,BYTE PTR 2[esi]
+ shl ecx,8
+$L014ej2:
+ mov ch,BYTE PTR 1[esi]
+$L015ej1:
+ mov cl,BYTE PTR [esi]
+$L012ejend:
+ xor eax,ecx
+ xor ebx,edx
+ bswap eax
+ bswap ebx
+ mov DWORD PTR 8[esp],eax
+ mov DWORD PTR 12[esp],ebx
+ call $L_CAST_encrypt_begin
+ mov eax,DWORD PTR 8[esp]
+ mov ebx,DWORD PTR 12[esp]
+ bswap eax
+ bswap ebx
+ mov DWORD PTR [edi],eax
+ mov DWORD PTR 4[edi],ebx
+ jmp $L005finish
+$L002decrypt:
+ and ebp,4294967288
+ mov eax,DWORD PTR 16[esp]
+ mov ebx,DWORD PTR 20[esp]
+ jz $L016decrypt_finish
+$L017decrypt_loop:
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ bswap eax
+ bswap ebx
+ mov DWORD PTR 8[esp],eax
+ mov DWORD PTR 12[esp],ebx
+ call $L_CAST_decrypt_begin
+ mov eax,DWORD PTR 8[esp]
+ mov ebx,DWORD PTR 12[esp]
+ bswap eax
+ bswap ebx
+ mov ecx,DWORD PTR 16[esp]
+ mov edx,DWORD PTR 20[esp]
+ xor ecx,eax
+ xor edx,ebx
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ mov DWORD PTR [edi],ecx
+ mov DWORD PTR 4[edi],edx
+ mov DWORD PTR 16[esp],eax
+ mov DWORD PTR 20[esp],ebx
+ add esi,8
+ add edi,8
+ sub ebp,8
+ jnz $L017decrypt_loop
+$L016decrypt_finish:
+ mov ebp,DWORD PTR 52[esp]
+ and ebp,7
+ jz $L005finish
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ bswap eax
+ bswap ebx
+ mov DWORD PTR 8[esp],eax
+ mov DWORD PTR 12[esp],ebx
+ call $L_CAST_decrypt_begin
+ mov eax,DWORD PTR 8[esp]
+ mov ebx,DWORD PTR 12[esp]
+ bswap eax
+ bswap ebx
+ mov ecx,DWORD PTR 16[esp]
+ mov edx,DWORD PTR 20[esp]
+ xor ecx,eax
+ xor edx,ebx
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+$L018dj7:
+ ror edx,16
+ mov BYTE PTR 6[edi],dl
+ shr edx,16
+$L019dj6:
+ mov BYTE PTR 5[edi],dh
+$L020dj5:
+ mov BYTE PTR 4[edi],dl
+$L021dj4:
+ mov DWORD PTR [edi],ecx
+ jmp $L022djend
+$L023dj3:
+ ror ecx,16
+ mov BYTE PTR 2[edi],cl
+ shl ecx,16
+$L024dj2:
+ mov BYTE PTR 1[esi],ch
+$L025dj1:
+ mov BYTE PTR [esi],cl
+$L022djend:
+ jmp $L005finish
+$L005finish:
+ mov ecx,DWORD PTR 60[esp]
+ add esp,24
+ mov DWORD PTR [ecx],eax
+ mov DWORD PTR 4[ecx],ebx
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+ALIGN 64
+$L007cbc_enc_jmp_table:
+DD 0
+DD $L015ej1-$L006PIC_point
+DD $L014ej2-$L006PIC_point
+DD $L013ej3-$L006PIC_point
+DD $L011ej4-$L006PIC_point
+DD $L010ej5-$L006PIC_point
+DD $L009ej6-$L006PIC_point
+DD $L008ej7-$L006PIC_point
+ALIGN 64
+_CAST_cbc_encrypt ENDP
+.text$ ENDS
+END
diff --git a/deps/openssl/asm/x86-win32-masm/des/crypt586.asm b/deps/openssl/asm/x86-win32-masm/des/crypt586.asm
new file mode 100644
index 0000000000..24e474dfc5
--- /dev/null
+++ b/deps/openssl/asm/x86-win32-masm/des/crypt586.asm
@@ -0,0 +1,909 @@
+TITLE crypt586.asm
+IF @Version LT 800
+ECHO MASM version 8.00 or later is strongly recommended.
+ENDIF
+.586
+.MODEL FLAT
+OPTION DOTNAME
+IF @Version LT 800
+.text$ SEGMENT PAGE 'CODE'
+ELSE
+.text$ SEGMENT ALIGN(64) 'CODE'
+ENDIF
+EXTERN _DES_SPtrans:NEAR
+ALIGN 16
+_fcrypt_body PROC PUBLIC
+$L_fcrypt_body_begin::
+ push ebp
+ push ebx
+ push esi
+ push edi
+ ;
+
+ ; Load the 2 words
+ xor edi,edi
+ xor esi,esi
+ lea edx,DWORD PTR _DES_SPtrans
+ push edx
+ mov ebp,DWORD PTR 28[esp]
+ push 25
+$L000start:
+ ;
+
+ ; Round 0
+ mov eax,DWORD PTR 36[esp]
+ mov edx,esi
+ shr edx,16
+ mov ecx,DWORD PTR 40[esp]
+ xor edx,esi
+ and eax,edx
+ and edx,ecx
+ mov ebx,eax
+ shl ebx,16
+ mov ecx,edx
+ shl ecx,16
+ xor eax,ebx
+ xor edx,ecx
+ mov ebx,DWORD PTR [ebp]
+ xor eax,ebx
+ mov ecx,DWORD PTR 4[ebp]
+ xor eax,esi
+ xor edx,esi
+ xor edx,ecx
+ and eax,0fcfcfcfch
+ xor ebx,ebx
+ and edx,0cfcfcfcfh
+ xor ecx,ecx
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ mov ebp,DWORD PTR 4[esp]
+ xor edi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor edi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor edi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor edi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ mov ebx,DWORD PTR 0600h[ebx*1+ebp]
+ xor edi,ebx
+ mov ebx,DWORD PTR 0700h[ecx*1+ebp]
+ xor edi,ebx
+ mov ebx,DWORD PTR 0400h[eax*1+ebp]
+ xor edi,ebx
+ mov ebx,DWORD PTR 0500h[edx*1+ebp]
+ xor edi,ebx
+ mov ebp,DWORD PTR 32[esp]
+ ;
+
+ ; Round 1
+ mov eax,DWORD PTR 36[esp]
+ mov edx,edi
+ shr edx,16
+ mov ecx,DWORD PTR 40[esp]
+ xor edx,edi
+ and eax,edx
+ and edx,ecx
+ mov ebx,eax
+ shl ebx,16
+ mov ecx,edx
+ shl ecx,16
+ xor eax,ebx
+ xor edx,ecx
+ mov ebx,DWORD PTR 8[ebp]
+ xor eax,ebx
+ mov ecx,DWORD PTR 12[ebp]
+ xor eax,edi
+ xor edx,edi
+ xor edx,ecx
+ and eax,0fcfcfcfch
+ xor ebx,ebx
+ and edx,0cfcfcfcfh
+ xor ecx,ecx
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ mov ebp,DWORD PTR 4[esp]
+ xor esi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor esi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor esi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor esi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ mov ebx,DWORD PTR 0600h[ebx*1+ebp]
+ xor esi,ebx
+ mov ebx,DWORD PTR 0700h[ecx*1+ebp]
+ xor esi,ebx
+ mov ebx,DWORD PTR 0400h[eax*1+ebp]
+ xor esi,ebx
+ mov ebx,DWORD PTR 0500h[edx*1+ebp]
+ xor esi,ebx
+ mov ebp,DWORD PTR 32[esp]
+ ;
+
+ ; Round 2
+ mov eax,DWORD PTR 36[esp]
+ mov edx,esi
+ shr edx,16
+ mov ecx,DWORD PTR 40[esp]
+ xor edx,esi
+ and eax,edx
+ and edx,ecx
+ mov ebx,eax
+ shl ebx,16
+ mov ecx,edx
+ shl ecx,16
+ xor eax,ebx
+ xor edx,ecx
+ mov ebx,DWORD PTR 16[ebp]
+ xor eax,ebx
+ mov ecx,DWORD PTR 20[ebp]
+ xor eax,esi
+ xor edx,esi
+ xor edx,ecx
+ and eax,0fcfcfcfch
+ xor ebx,ebx
+ and edx,0cfcfcfcfh
+ xor ecx,ecx
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ mov ebp,DWORD PTR 4[esp]
+ xor edi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor edi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor edi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor edi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ mov ebx,DWORD PTR 0600h[ebx*1+ebp]
+ xor edi,ebx
+ mov ebx,DWORD PTR 0700h[ecx*1+ebp]
+ xor edi,ebx
+ mov ebx,DWORD PTR 0400h[eax*1+ebp]
+ xor edi,ebx
+ mov ebx,DWORD PTR 0500h[edx*1+ebp]
+ xor edi,ebx
+ mov ebp,DWORD PTR 32[esp]
+ ;
+
+ ; Round 3
+ mov eax,DWORD PTR 36[esp]
+ mov edx,edi
+ shr edx,16
+ mov ecx,DWORD PTR 40[esp]
+ xor edx,edi
+ and eax,edx
+ and edx,ecx
+ mov ebx,eax
+ shl ebx,16
+ mov ecx,edx
+ shl ecx,16
+ xor eax,ebx
+ xor edx,ecx
+ mov ebx,DWORD PTR 24[ebp]
+ xor eax,ebx
+ mov ecx,DWORD PTR 28[ebp]
+ xor eax,edi
+ xor edx,edi
+ xor edx,ecx
+ and eax,0fcfcfcfch
+ xor ebx,ebx
+ and edx,0cfcfcfcfh
+ xor ecx,ecx
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ mov ebp,DWORD PTR 4[esp]
+ xor esi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor esi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor esi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor esi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ mov ebx,DWORD PTR 0600h[ebx*1+ebp]
+ xor esi,ebx
+ mov ebx,DWORD PTR 0700h[ecx*1+ebp]
+ xor esi,ebx
+ mov ebx,DWORD PTR 0400h[eax*1+ebp]
+ xor esi,ebx
+ mov ebx,DWORD PTR 0500h[edx*1+ebp]
+ xor esi,ebx
+ mov ebp,DWORD PTR 32[esp]
+ ;
+
+ ; Round 4
+ mov eax,DWORD PTR 36[esp]
+ mov edx,esi
+ shr edx,16
+ mov ecx,DWORD PTR 40[esp]
+ xor edx,esi
+ and eax,edx
+ and edx,ecx
+ mov ebx,eax
+ shl ebx,16
+ mov ecx,edx
+ shl ecx,16
+ xor eax,ebx
+ xor edx,ecx
+ mov ebx,DWORD PTR 32[ebp]
+ xor eax,ebx
+ mov ecx,DWORD PTR 36[ebp]
+ xor eax,esi
+ xor edx,esi
+ xor edx,ecx
+ and eax,0fcfcfcfch
+ xor ebx,ebx
+ and edx,0cfcfcfcfh
+ xor ecx,ecx
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ mov ebp,DWORD PTR 4[esp]
+ xor edi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor edi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor edi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor edi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ mov ebx,DWORD PTR 0600h[ebx*1+ebp]
+ xor edi,ebx
+ mov ebx,DWORD PTR 0700h[ecx*1+ebp]
+ xor edi,ebx
+ mov ebx,DWORD PTR 0400h[eax*1+ebp]
+ xor edi,ebx
+ mov ebx,DWORD PTR 0500h[edx*1+ebp]
+ xor edi,ebx
+ mov ebp,DWORD PTR 32[esp]
+ ;
+
+ ; Round 5
+ mov eax,DWORD PTR 36[esp]
+ mov edx,edi
+ shr edx,16
+ mov ecx,DWORD PTR 40[esp]
+ xor edx,edi
+ and eax,edx
+ and edx,ecx
+ mov ebx,eax
+ shl ebx,16
+ mov ecx,edx
+ shl ecx,16
+ xor eax,ebx
+ xor edx,ecx
+ mov ebx,DWORD PTR 40[ebp]
+ xor eax,ebx
+ mov ecx,DWORD PTR 44[ebp]
+ xor eax,edi
+ xor edx,edi
+ xor edx,ecx
+ and eax,0fcfcfcfch
+ xor ebx,ebx
+ and edx,0cfcfcfcfh
+ xor ecx,ecx
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ mov ebp,DWORD PTR 4[esp]
+ xor esi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor esi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor esi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor esi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ mov ebx,DWORD PTR 0600h[ebx*1+ebp]
+ xor esi,ebx
+ mov ebx,DWORD PTR 0700h[ecx*1+ebp]
+ xor esi,ebx
+ mov ebx,DWORD PTR 0400h[eax*1+ebp]
+ xor esi,ebx
+ mov ebx,DWORD PTR 0500h[edx*1+ebp]
+ xor esi,ebx
+ mov ebp,DWORD PTR 32[esp]
+ ;
+
+ ; Round 6
+ mov eax,DWORD PTR 36[esp]
+ mov edx,esi
+ shr edx,16
+ mov ecx,DWORD PTR 40[esp]
+ xor edx,esi
+ and eax,edx
+ and edx,ecx
+ mov ebx,eax
+ shl ebx,16
+ mov ecx,edx
+ shl ecx,16
+ xor eax,ebx
+ xor edx,ecx
+ mov ebx,DWORD PTR 48[ebp]
+ xor eax,ebx
+ mov ecx,DWORD PTR 52[ebp]
+ xor eax,esi
+ xor edx,esi
+ xor edx,ecx
+ and eax,0fcfcfcfch
+ xor ebx,ebx
+ and edx,0cfcfcfcfh
+ xor ecx,ecx
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ mov ebp,DWORD PTR 4[esp]
+ xor edi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor edi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor edi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor edi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ mov ebx,DWORD PTR 0600h[ebx*1+ebp]
+ xor edi,ebx
+ mov ebx,DWORD PTR 0700h[ecx*1+ebp]
+ xor edi,ebx
+ mov ebx,DWORD PTR 0400h[eax*1+ebp]
+ xor edi,ebx
+ mov ebx,DWORD PTR 0500h[edx*1+ebp]
+ xor edi,ebx
+ mov ebp,DWORD PTR 32[esp]
+ ;
+
+ ; Round 7
+ mov eax,DWORD PTR 36[esp]
+ mov edx,edi
+ shr edx,16
+ mov ecx,DWORD PTR 40[esp]
+ xor edx,edi
+ and eax,edx
+ and edx,ecx
+ mov ebx,eax
+ shl ebx,16
+ mov ecx,edx
+ shl ecx,16
+ xor eax,ebx
+ xor edx,ecx
+ mov ebx,DWORD PTR 56[ebp]
+ xor eax,ebx
+ mov ecx,DWORD PTR 60[ebp]
+ xor eax,edi
+ xor edx,edi
+ xor edx,ecx
+ and eax,0fcfcfcfch
+ xor ebx,ebx
+ and edx,0cfcfcfcfh
+ xor ecx,ecx
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ mov ebp,DWORD PTR 4[esp]
+ xor esi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor esi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor esi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor esi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ mov ebx,DWORD PTR 0600h[ebx*1+ebp]
+ xor esi,ebx
+ mov ebx,DWORD PTR 0700h[ecx*1+ebp]
+ xor esi,ebx
+ mov ebx,DWORD PTR 0400h[eax*1+ebp]
+ xor esi,ebx
+ mov ebx,DWORD PTR 0500h[edx*1+ebp]
+ xor esi,ebx
+ mov ebp,DWORD PTR 32[esp]
+ ;
+
+ ; Round 8
+ mov eax,DWORD PTR 36[esp]
+ mov edx,esi
+ shr edx,16
+ mov ecx,DWORD PTR 40[esp]
+ xor edx,esi
+ and eax,edx
+ and edx,ecx
+ mov ebx,eax
+ shl ebx,16
+ mov ecx,edx
+ shl ecx,16
+ xor eax,ebx
+ xor edx,ecx
+ mov ebx,DWORD PTR 64[ebp]
+ xor eax,ebx
+ mov ecx,DWORD PTR 68[ebp]
+ xor eax,esi
+ xor edx,esi
+ xor edx,ecx
+ and eax,0fcfcfcfch
+ xor ebx,ebx
+ and edx,0cfcfcfcfh
+ xor ecx,ecx
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ mov ebp,DWORD PTR 4[esp]
+ xor edi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor edi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor edi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor edi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ mov ebx,DWORD PTR 0600h[ebx*1+ebp]
+ xor edi,ebx
+ mov ebx,DWORD PTR 0700h[ecx*1+ebp]
+ xor edi,ebx
+ mov ebx,DWORD PTR 0400h[eax*1+ebp]
+ xor edi,ebx
+ mov ebx,DWORD PTR 0500h[edx*1+ebp]
+ xor edi,ebx
+ mov ebp,DWORD PTR 32[esp]
+ ;
+
+ ; Round 9
+ mov eax,DWORD PTR 36[esp]
+ mov edx,edi
+ shr edx,16
+ mov ecx,DWORD PTR 40[esp]
+ xor edx,edi
+ and eax,edx
+ and edx,ecx
+ mov ebx,eax
+ shl ebx,16
+ mov ecx,edx
+ shl ecx,16
+ xor eax,ebx
+ xor edx,ecx
+ mov ebx,DWORD PTR 72[ebp]
+ xor eax,ebx
+ mov ecx,DWORD PTR 76[ebp]
+ xor eax,edi
+ xor edx,edi
+ xor edx,ecx
+ and eax,0fcfcfcfch
+ xor ebx,ebx
+ and edx,0cfcfcfcfh
+ xor ecx,ecx
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ mov ebp,DWORD PTR 4[esp]
+ xor esi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor esi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor esi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor esi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ mov ebx,DWORD PTR 0600h[ebx*1+ebp]
+ xor esi,ebx
+ mov ebx,DWORD PTR 0700h[ecx*1+ebp]
+ xor esi,ebx
+ mov ebx,DWORD PTR 0400h[eax*1+ebp]
+ xor esi,ebx
+ mov ebx,DWORD PTR 0500h[edx*1+ebp]
+ xor esi,ebx
+ mov ebp,DWORD PTR 32[esp]
+ ;
+
+ ; Round 10
+ mov eax,DWORD PTR 36[esp]
+ mov edx,esi
+ shr edx,16
+ mov ecx,DWORD PTR 40[esp]
+ xor edx,esi
+ and eax,edx
+ and edx,ecx
+ mov ebx,eax
+ shl ebx,16
+ mov ecx,edx
+ shl ecx,16
+ xor eax,ebx
+ xor edx,ecx
+ mov ebx,DWORD PTR 80[ebp]
+ xor eax,ebx
+ mov ecx,DWORD PTR 84[ebp]
+ xor eax,esi
+ xor edx,esi
+ xor edx,ecx
+ and eax,0fcfcfcfch
+ xor ebx,ebx
+ and edx,0cfcfcfcfh
+ xor ecx,ecx
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ mov ebp,DWORD PTR 4[esp]
+ xor edi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor edi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor edi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor edi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ mov ebx,DWORD PTR 0600h[ebx*1+ebp]
+ xor edi,ebx
+ mov ebx,DWORD PTR 0700h[ecx*1+ebp]
+ xor edi,ebx
+ mov ebx,DWORD PTR 0400h[eax*1+ebp]
+ xor edi,ebx
+ mov ebx,DWORD PTR 0500h[edx*1+ebp]
+ xor edi,ebx
+ mov ebp,DWORD PTR 32[esp]
+ ;
+
+ ; Round 11
+ mov eax,DWORD PTR 36[esp]
+ mov edx,edi
+ shr edx,16
+ mov ecx,DWORD PTR 40[esp]
+ xor edx,edi
+ and eax,edx
+ and edx,ecx
+ mov ebx,eax
+ shl ebx,16
+ mov ecx,edx
+ shl ecx,16
+ xor eax,ebx
+ xor edx,ecx
+ mov ebx,DWORD PTR 88[ebp]
+ xor eax,ebx
+ mov ecx,DWORD PTR 92[ebp]
+ xor eax,edi
+ xor edx,edi
+ xor edx,ecx
+ and eax,0fcfcfcfch
+ xor ebx,ebx
+ and edx,0cfcfcfcfh
+ xor ecx,ecx
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ mov ebp,DWORD PTR 4[esp]
+ xor esi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor esi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor esi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor esi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ mov ebx,DWORD PTR 0600h[ebx*1+ebp]
+ xor esi,ebx
+ mov ebx,DWORD PTR 0700h[ecx*1+ebp]
+ xor esi,ebx
+ mov ebx,DWORD PTR 0400h[eax*1+ebp]
+ xor esi,ebx
+ mov ebx,DWORD PTR 0500h[edx*1+ebp]
+ xor esi,ebx
+ mov ebp,DWORD PTR 32[esp]
+ ;
+
+ ; Round 12
+ mov eax,DWORD PTR 36[esp]
+ mov edx,esi
+ shr edx,16
+ mov ecx,DWORD PTR 40[esp]
+ xor edx,esi
+ and eax,edx
+ and edx,ecx
+ mov ebx,eax
+ shl ebx,16
+ mov ecx,edx
+ shl ecx,16
+ xor eax,ebx
+ xor edx,ecx
+ mov ebx,DWORD PTR 96[ebp]
+ xor eax,ebx
+ mov ecx,DWORD PTR 100[ebp]
+ xor eax,esi
+ xor edx,esi
+ xor edx,ecx
+ and eax,0fcfcfcfch
+ xor ebx,ebx
+ and edx,0cfcfcfcfh
+ xor ecx,ecx
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ mov ebp,DWORD PTR 4[esp]
+ xor edi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor edi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor edi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor edi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ mov ebx,DWORD PTR 0600h[ebx*1+ebp]
+ xor edi,ebx
+ mov ebx,DWORD PTR 0700h[ecx*1+ebp]
+ xor edi,ebx
+ mov ebx,DWORD PTR 0400h[eax*1+ebp]
+ xor edi,ebx
+ mov ebx,DWORD PTR 0500h[edx*1+ebp]
+ xor edi,ebx
+ mov ebp,DWORD PTR 32[esp]
+ ;
+
+ ; Round 13
+ mov eax,DWORD PTR 36[esp]
+ mov edx,edi
+ shr edx,16
+ mov ecx,DWORD PTR 40[esp]
+ xor edx,edi
+ and eax,edx
+ and edx,ecx
+ mov ebx,eax
+ shl ebx,16
+ mov ecx,edx
+ shl ecx,16
+ xor eax,ebx
+ xor edx,ecx
+ mov ebx,DWORD PTR 104[ebp]
+ xor eax,ebx
+ mov ecx,DWORD PTR 108[ebp]
+ xor eax,edi
+ xor edx,edi
+ xor edx,ecx
+ and eax,0fcfcfcfch
+ xor ebx,ebx
+ and edx,0cfcfcfcfh
+ xor ecx,ecx
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ mov ebp,DWORD PTR 4[esp]
+ xor esi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor esi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor esi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor esi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ mov ebx,DWORD PTR 0600h[ebx*1+ebp]
+ xor esi,ebx
+ mov ebx,DWORD PTR 0700h[ecx*1+ebp]
+ xor esi,ebx
+ mov ebx,DWORD PTR 0400h[eax*1+ebp]
+ xor esi,ebx
+ mov ebx,DWORD PTR 0500h[edx*1+ebp]
+ xor esi,ebx
+ mov ebp,DWORD PTR 32[esp]
+ ;
+
+ ; Round 14
+ mov eax,DWORD PTR 36[esp]
+ mov edx,esi
+ shr edx,16
+ mov ecx,DWORD PTR 40[esp]
+ xor edx,esi
+ and eax,edx
+ and edx,ecx
+ mov ebx,eax
+ shl ebx,16
+ mov ecx,edx
+ shl ecx,16
+ xor eax,ebx
+ xor edx,ecx
+ mov ebx,DWORD PTR 112[ebp]
+ xor eax,ebx
+ mov ecx,DWORD PTR 116[ebp]
+ xor eax,esi
+ xor edx,esi
+ xor edx,ecx
+ and eax,0fcfcfcfch
+ xor ebx,ebx
+ and edx,0cfcfcfcfh
+ xor ecx,ecx
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ mov ebp,DWORD PTR 4[esp]
+ xor edi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor edi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor edi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor edi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ mov ebx,DWORD PTR 0600h[ebx*1+ebp]
+ xor edi,ebx
+ mov ebx,DWORD PTR 0700h[ecx*1+ebp]
+ xor edi,ebx
+ mov ebx,DWORD PTR 0400h[eax*1+ebp]
+ xor edi,ebx
+ mov ebx,DWORD PTR 0500h[edx*1+ebp]
+ xor edi,ebx
+ mov ebp,DWORD PTR 32[esp]
+ ;
+
+ ; Round 15
+ mov eax,DWORD PTR 36[esp]
+ mov edx,edi
+ shr edx,16
+ mov ecx,DWORD PTR 40[esp]
+ xor edx,edi
+ and eax,edx
+ and edx,ecx
+ mov ebx,eax
+ shl ebx,16
+ mov ecx,edx
+ shl ecx,16
+ xor eax,ebx
+ xor edx,ecx
+ mov ebx,DWORD PTR 120[ebp]
+ xor eax,ebx
+ mov ecx,DWORD PTR 124[ebp]
+ xor eax,edi
+ xor edx,edi
+ xor edx,ecx
+ and eax,0fcfcfcfch
+ xor ebx,ebx
+ and edx,0cfcfcfcfh
+ xor ecx,ecx
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ mov ebp,DWORD PTR 4[esp]
+ xor esi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor esi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor esi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor esi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ mov ebx,DWORD PTR 0600h[ebx*1+ebp]
+ xor esi,ebx
+ mov ebx,DWORD PTR 0700h[ecx*1+ebp]
+ xor esi,ebx
+ mov ebx,DWORD PTR 0400h[eax*1+ebp]
+ xor esi,ebx
+ mov ebx,DWORD PTR 0500h[edx*1+ebp]
+ xor esi,ebx
+ mov ebp,DWORD PTR 32[esp]
+ mov ebx,DWORD PTR [esp]
+ mov eax,edi
+ dec ebx
+ mov edi,esi
+ mov esi,eax
+ mov DWORD PTR [esp],ebx
+ jnz $L000start
+ ;
+
+ ; FP
+ mov edx,DWORD PTR 28[esp]
+ ror edi,1
+ mov eax,esi
+ xor esi,edi
+ and esi,0aaaaaaaah
+ xor eax,esi
+ xor edi,esi
+ ;
+
+ rol eax,23
+ mov esi,eax
+ xor eax,edi
+ and eax,003fc03fch
+ xor esi,eax
+ xor edi,eax
+ ;
+
+ rol esi,10
+ mov eax,esi
+ xor esi,edi
+ and esi,033333333h
+ xor eax,esi
+ xor edi,esi
+ ;
+
+ rol edi,18
+ mov esi,edi
+ xor edi,eax
+ and edi,0fff0000fh
+ xor esi,edi
+ xor eax,edi
+ ;
+
+ rol esi,12
+ mov edi,esi
+ xor esi,eax
+ and esi,0f0f0f0f0h
+ xor edi,esi
+ xor eax,esi
+ ;
+
+ ror eax,4
+ mov DWORD PTR [edx],eax
+ mov DWORD PTR 4[edx],edi
+ add esp,8
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+_fcrypt_body ENDP
+.text$ ENDS
+END
diff --git a/deps/openssl/asm/x86-win32-masm/des/des-586.asm b/deps/openssl/asm/x86-win32-masm/des/des-586.asm
new file mode 100644
index 0000000000..3c630daff9
--- /dev/null
+++ b/deps/openssl/asm/x86-win32-masm/des/des-586.asm
@@ -0,0 +1,1878 @@
+TITLE des-586.asm
+IF @Version LT 800
+ECHO MASM version 8.00 or later is strongly recommended.
+ENDIF
+.586
+.MODEL FLAT
+OPTION DOTNAME
+IF @Version LT 800
+.text$ SEGMENT PAGE 'CODE'
+ELSE
+.text$ SEGMENT ALIGN(64) 'CODE'
+ENDIF
+PUBLIC _DES_SPtrans
+ALIGN 16
+__x86_DES_encrypt PROC PRIVATE
+ push ecx
+ ; Round 0
+ mov eax,DWORD PTR [ecx]
+ xor ebx,ebx
+ mov edx,DWORD PTR 4[ecx]
+ xor eax,esi
+ xor ecx,ecx
+ xor edx,esi
+ and eax,0fcfcfcfch
+ and edx,0cfcfcfcfh
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ xor edi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor edi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor edi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor edi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ xor edi,DWORD PTR 0600h[ebx*1+ebp]
+ xor edi,DWORD PTR 0700h[ecx*1+ebp]
+ mov ecx,DWORD PTR [esp]
+ xor edi,DWORD PTR 0400h[eax*1+ebp]
+ xor edi,DWORD PTR 0500h[edx*1+ebp]
+ ; Round 1
+ mov eax,DWORD PTR 8[ecx]
+ xor ebx,ebx
+ mov edx,DWORD PTR 12[ecx]
+ xor eax,edi
+ xor ecx,ecx
+ xor edx,edi
+ and eax,0fcfcfcfch
+ and edx,0cfcfcfcfh
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ xor esi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor esi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor esi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor esi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ xor esi,DWORD PTR 0600h[ebx*1+ebp]
+ xor esi,DWORD PTR 0700h[ecx*1+ebp]
+ mov ecx,DWORD PTR [esp]
+ xor esi,DWORD PTR 0400h[eax*1+ebp]
+ xor esi,DWORD PTR 0500h[edx*1+ebp]
+ ; Round 2
+ mov eax,DWORD PTR 16[ecx]
+ xor ebx,ebx
+ mov edx,DWORD PTR 20[ecx]
+ xor eax,esi
+ xor ecx,ecx
+ xor edx,esi
+ and eax,0fcfcfcfch
+ and edx,0cfcfcfcfh
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ xor edi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor edi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor edi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor edi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ xor edi,DWORD PTR 0600h[ebx*1+ebp]
+ xor edi,DWORD PTR 0700h[ecx*1+ebp]
+ mov ecx,DWORD PTR [esp]
+ xor edi,DWORD PTR 0400h[eax*1+ebp]
+ xor edi,DWORD PTR 0500h[edx*1+ebp]
+ ; Round 3
+ mov eax,DWORD PTR 24[ecx]
+ xor ebx,ebx
+ mov edx,DWORD PTR 28[ecx]
+ xor eax,edi
+ xor ecx,ecx
+ xor edx,edi
+ and eax,0fcfcfcfch
+ and edx,0cfcfcfcfh
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ xor esi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor esi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor esi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor esi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ xor esi,DWORD PTR 0600h[ebx*1+ebp]
+ xor esi,DWORD PTR 0700h[ecx*1+ebp]
+ mov ecx,DWORD PTR [esp]
+ xor esi,DWORD PTR 0400h[eax*1+ebp]
+ xor esi,DWORD PTR 0500h[edx*1+ebp]
+ ; Round 4
+ mov eax,DWORD PTR 32[ecx]
+ xor ebx,ebx
+ mov edx,DWORD PTR 36[ecx]
+ xor eax,esi
+ xor ecx,ecx
+ xor edx,esi
+ and eax,0fcfcfcfch
+ and edx,0cfcfcfcfh
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ xor edi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor edi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor edi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor edi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ xor edi,DWORD PTR 0600h[ebx*1+ebp]
+ xor edi,DWORD PTR 0700h[ecx*1+ebp]
+ mov ecx,DWORD PTR [esp]
+ xor edi,DWORD PTR 0400h[eax*1+ebp]
+ xor edi,DWORD PTR 0500h[edx*1+ebp]
+ ; Round 5
+ mov eax,DWORD PTR 40[ecx]
+ xor ebx,ebx
+ mov edx,DWORD PTR 44[ecx]
+ xor eax,edi
+ xor ecx,ecx
+ xor edx,edi
+ and eax,0fcfcfcfch
+ and edx,0cfcfcfcfh
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ xor esi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor esi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor esi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor esi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ xor esi,DWORD PTR 0600h[ebx*1+ebp]
+ xor esi,DWORD PTR 0700h[ecx*1+ebp]
+ mov ecx,DWORD PTR [esp]
+ xor esi,DWORD PTR 0400h[eax*1+ebp]
+ xor esi,DWORD PTR 0500h[edx*1+ebp]
+ ; Round 6
+ mov eax,DWORD PTR 48[ecx]
+ xor ebx,ebx
+ mov edx,DWORD PTR 52[ecx]
+ xor eax,esi
+ xor ecx,ecx
+ xor edx,esi
+ and eax,0fcfcfcfch
+ and edx,0cfcfcfcfh
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ xor edi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor edi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor edi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor edi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ xor edi,DWORD PTR 0600h[ebx*1+ebp]
+ xor edi,DWORD PTR 0700h[ecx*1+ebp]
+ mov ecx,DWORD PTR [esp]
+ xor edi,DWORD PTR 0400h[eax*1+ebp]
+ xor edi,DWORD PTR 0500h[edx*1+ebp]
+ ; Round 7
+ mov eax,DWORD PTR 56[ecx]
+ xor ebx,ebx
+ mov edx,DWORD PTR 60[ecx]
+ xor eax,edi
+ xor ecx,ecx
+ xor edx,edi
+ and eax,0fcfcfcfch
+ and edx,0cfcfcfcfh
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ xor esi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor esi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor esi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor esi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ xor esi,DWORD PTR 0600h[ebx*1+ebp]
+ xor esi,DWORD PTR 0700h[ecx*1+ebp]
+ mov ecx,DWORD PTR [esp]
+ xor esi,DWORD PTR 0400h[eax*1+ebp]
+ xor esi,DWORD PTR 0500h[edx*1+ebp]
+ ; Round 8
+ mov eax,DWORD PTR 64[ecx]
+ xor ebx,ebx
+ mov edx,DWORD PTR 68[ecx]
+ xor eax,esi
+ xor ecx,ecx
+ xor edx,esi
+ and eax,0fcfcfcfch
+ and edx,0cfcfcfcfh
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ xor edi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor edi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor edi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor edi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ xor edi,DWORD PTR 0600h[ebx*1+ebp]
+ xor edi,DWORD PTR 0700h[ecx*1+ebp]
+ mov ecx,DWORD PTR [esp]
+ xor edi,DWORD PTR 0400h[eax*1+ebp]
+ xor edi,DWORD PTR 0500h[edx*1+ebp]
+ ; Round 9
+ mov eax,DWORD PTR 72[ecx]
+ xor ebx,ebx
+ mov edx,DWORD PTR 76[ecx]
+ xor eax,edi
+ xor ecx,ecx
+ xor edx,edi
+ and eax,0fcfcfcfch
+ and edx,0cfcfcfcfh
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ xor esi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor esi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor esi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor esi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ xor esi,DWORD PTR 0600h[ebx*1+ebp]
+ xor esi,DWORD PTR 0700h[ecx*1+ebp]
+ mov ecx,DWORD PTR [esp]
+ xor esi,DWORD PTR 0400h[eax*1+ebp]
+ xor esi,DWORD PTR 0500h[edx*1+ebp]
+ ; Round 10
+ mov eax,DWORD PTR 80[ecx]
+ xor ebx,ebx
+ mov edx,DWORD PTR 84[ecx]
+ xor eax,esi
+ xor ecx,ecx
+ xor edx,esi
+ and eax,0fcfcfcfch
+ and edx,0cfcfcfcfh
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ xor edi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor edi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor edi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor edi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ xor edi,DWORD PTR 0600h[ebx*1+ebp]
+ xor edi,DWORD PTR 0700h[ecx*1+ebp]
+ mov ecx,DWORD PTR [esp]
+ xor edi,DWORD PTR 0400h[eax*1+ebp]
+ xor edi,DWORD PTR 0500h[edx*1+ebp]
+ ; Round 11
+ mov eax,DWORD PTR 88[ecx]
+ xor ebx,ebx
+ mov edx,DWORD PTR 92[ecx]
+ xor eax,edi
+ xor ecx,ecx
+ xor edx,edi
+ and eax,0fcfcfcfch
+ and edx,0cfcfcfcfh
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ xor esi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor esi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor esi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor esi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ xor esi,DWORD PTR 0600h[ebx*1+ebp]
+ xor esi,DWORD PTR 0700h[ecx*1+ebp]
+ mov ecx,DWORD PTR [esp]
+ xor esi,DWORD PTR 0400h[eax*1+ebp]
+ xor esi,DWORD PTR 0500h[edx*1+ebp]
+ ; Round 12
+ mov eax,DWORD PTR 96[ecx]
+ xor ebx,ebx
+ mov edx,DWORD PTR 100[ecx]
+ xor eax,esi
+ xor ecx,ecx
+ xor edx,esi
+ and eax,0fcfcfcfch
+ and edx,0cfcfcfcfh
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ xor edi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor edi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor edi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor edi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ xor edi,DWORD PTR 0600h[ebx*1+ebp]
+ xor edi,DWORD PTR 0700h[ecx*1+ebp]
+ mov ecx,DWORD PTR [esp]
+ xor edi,DWORD PTR 0400h[eax*1+ebp]
+ xor edi,DWORD PTR 0500h[edx*1+ebp]
+ ; Round 13
+ mov eax,DWORD PTR 104[ecx]
+ xor ebx,ebx
+ mov edx,DWORD PTR 108[ecx]
+ xor eax,edi
+ xor ecx,ecx
+ xor edx,edi
+ and eax,0fcfcfcfch
+ and edx,0cfcfcfcfh
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ xor esi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor esi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor esi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor esi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ xor esi,DWORD PTR 0600h[ebx*1+ebp]
+ xor esi,DWORD PTR 0700h[ecx*1+ebp]
+ mov ecx,DWORD PTR [esp]
+ xor esi,DWORD PTR 0400h[eax*1+ebp]
+ xor esi,DWORD PTR 0500h[edx*1+ebp]
+ ; Round 14
+ mov eax,DWORD PTR 112[ecx]
+ xor ebx,ebx
+ mov edx,DWORD PTR 116[ecx]
+ xor eax,esi
+ xor ecx,ecx
+ xor edx,esi
+ and eax,0fcfcfcfch
+ and edx,0cfcfcfcfh
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ xor edi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor edi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor edi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor edi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ xor edi,DWORD PTR 0600h[ebx*1+ebp]
+ xor edi,DWORD PTR 0700h[ecx*1+ebp]
+ mov ecx,DWORD PTR [esp]
+ xor edi,DWORD PTR 0400h[eax*1+ebp]
+ xor edi,DWORD PTR 0500h[edx*1+ebp]
+ ; Round 15
+ mov eax,DWORD PTR 120[ecx]
+ xor ebx,ebx
+ mov edx,DWORD PTR 124[ecx]
+ xor eax,edi
+ xor ecx,ecx
+ xor edx,edi
+ and eax,0fcfcfcfch
+ and edx,0cfcfcfcfh
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ xor esi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor esi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor esi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor esi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ xor esi,DWORD PTR 0600h[ebx*1+ebp]
+ xor esi,DWORD PTR 0700h[ecx*1+ebp]
+ mov ecx,DWORD PTR [esp]
+ xor esi,DWORD PTR 0400h[eax*1+ebp]
+ xor esi,DWORD PTR 0500h[edx*1+ebp]
+ add esp,4
+ ret
+__x86_DES_encrypt ENDP
+ALIGN 16
+__x86_DES_decrypt PROC PRIVATE
+ push ecx
+ ; Round 15
+ mov eax,DWORD PTR 120[ecx]
+ xor ebx,ebx
+ mov edx,DWORD PTR 124[ecx]
+ xor eax,esi
+ xor ecx,ecx
+ xor edx,esi
+ and eax,0fcfcfcfch
+ and edx,0cfcfcfcfh
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ xor edi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor edi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor edi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor edi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ xor edi,DWORD PTR 0600h[ebx*1+ebp]
+ xor edi,DWORD PTR 0700h[ecx*1+ebp]
+ mov ecx,DWORD PTR [esp]
+ xor edi,DWORD PTR 0400h[eax*1+ebp]
+ xor edi,DWORD PTR 0500h[edx*1+ebp]
+ ; Round 14
+ mov eax,DWORD PTR 112[ecx]
+ xor ebx,ebx
+ mov edx,DWORD PTR 116[ecx]
+ xor eax,edi
+ xor ecx,ecx
+ xor edx,edi
+ and eax,0fcfcfcfch
+ and edx,0cfcfcfcfh
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ xor esi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor esi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor esi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor esi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ xor esi,DWORD PTR 0600h[ebx*1+ebp]
+ xor esi,DWORD PTR 0700h[ecx*1+ebp]
+ mov ecx,DWORD PTR [esp]
+ xor esi,DWORD PTR 0400h[eax*1+ebp]
+ xor esi,DWORD PTR 0500h[edx*1+ebp]
+ ; Round 13
+ mov eax,DWORD PTR 104[ecx]
+ xor ebx,ebx
+ mov edx,DWORD PTR 108[ecx]
+ xor eax,esi
+ xor ecx,ecx
+ xor edx,esi
+ and eax,0fcfcfcfch
+ and edx,0cfcfcfcfh
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ xor edi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor edi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor edi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor edi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ xor edi,DWORD PTR 0600h[ebx*1+ebp]
+ xor edi,DWORD PTR 0700h[ecx*1+ebp]
+ mov ecx,DWORD PTR [esp]
+ xor edi,DWORD PTR 0400h[eax*1+ebp]
+ xor edi,DWORD PTR 0500h[edx*1+ebp]
+ ; Round 12
+ mov eax,DWORD PTR 96[ecx]
+ xor ebx,ebx
+ mov edx,DWORD PTR 100[ecx]
+ xor eax,edi
+ xor ecx,ecx
+ xor edx,edi
+ and eax,0fcfcfcfch
+ and edx,0cfcfcfcfh
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ xor esi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor esi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor esi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor esi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ xor esi,DWORD PTR 0600h[ebx*1+ebp]
+ xor esi,DWORD PTR 0700h[ecx*1+ebp]
+ mov ecx,DWORD PTR [esp]
+ xor esi,DWORD PTR 0400h[eax*1+ebp]
+ xor esi,DWORD PTR 0500h[edx*1+ebp]
+ ; Round 11
+ mov eax,DWORD PTR 88[ecx]
+ xor ebx,ebx
+ mov edx,DWORD PTR 92[ecx]
+ xor eax,esi
+ xor ecx,ecx
+ xor edx,esi
+ and eax,0fcfcfcfch
+ and edx,0cfcfcfcfh
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ xor edi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor edi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor edi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor edi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ xor edi,DWORD PTR 0600h[ebx*1+ebp]
+ xor edi,DWORD PTR 0700h[ecx*1+ebp]
+ mov ecx,DWORD PTR [esp]
+ xor edi,DWORD PTR 0400h[eax*1+ebp]
+ xor edi,DWORD PTR 0500h[edx*1+ebp]
+ ; Round 10
+ mov eax,DWORD PTR 80[ecx]
+ xor ebx,ebx
+ mov edx,DWORD PTR 84[ecx]
+ xor eax,edi
+ xor ecx,ecx
+ xor edx,edi
+ and eax,0fcfcfcfch
+ and edx,0cfcfcfcfh
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ xor esi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor esi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor esi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor esi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ xor esi,DWORD PTR 0600h[ebx*1+ebp]
+ xor esi,DWORD PTR 0700h[ecx*1+ebp]
+ mov ecx,DWORD PTR [esp]
+ xor esi,DWORD PTR 0400h[eax*1+ebp]
+ xor esi,DWORD PTR 0500h[edx*1+ebp]
+ ; Round 9
+ mov eax,DWORD PTR 72[ecx]
+ xor ebx,ebx
+ mov edx,DWORD PTR 76[ecx]
+ xor eax,esi
+ xor ecx,ecx
+ xor edx,esi
+ and eax,0fcfcfcfch
+ and edx,0cfcfcfcfh
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ xor edi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor edi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor edi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor edi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ xor edi,DWORD PTR 0600h[ebx*1+ebp]
+ xor edi,DWORD PTR 0700h[ecx*1+ebp]
+ mov ecx,DWORD PTR [esp]
+ xor edi,DWORD PTR 0400h[eax*1+ebp]
+ xor edi,DWORD PTR 0500h[edx*1+ebp]
+ ; Round 8
+ mov eax,DWORD PTR 64[ecx]
+ xor ebx,ebx
+ mov edx,DWORD PTR 68[ecx]
+ xor eax,edi
+ xor ecx,ecx
+ xor edx,edi
+ and eax,0fcfcfcfch
+ and edx,0cfcfcfcfh
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ xor esi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor esi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor esi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor esi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ xor esi,DWORD PTR 0600h[ebx*1+ebp]
+ xor esi,DWORD PTR 0700h[ecx*1+ebp]
+ mov ecx,DWORD PTR [esp]
+ xor esi,DWORD PTR 0400h[eax*1+ebp]
+ xor esi,DWORD PTR 0500h[edx*1+ebp]
+ ; Round 7
+ mov eax,DWORD PTR 56[ecx]
+ xor ebx,ebx
+ mov edx,DWORD PTR 60[ecx]
+ xor eax,esi
+ xor ecx,ecx
+ xor edx,esi
+ and eax,0fcfcfcfch
+ and edx,0cfcfcfcfh
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ xor edi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor edi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor edi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor edi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ xor edi,DWORD PTR 0600h[ebx*1+ebp]
+ xor edi,DWORD PTR 0700h[ecx*1+ebp]
+ mov ecx,DWORD PTR [esp]
+ xor edi,DWORD PTR 0400h[eax*1+ebp]
+ xor edi,DWORD PTR 0500h[edx*1+ebp]
+ ; Round 6
+ mov eax,DWORD PTR 48[ecx]
+ xor ebx,ebx
+ mov edx,DWORD PTR 52[ecx]
+ xor eax,edi
+ xor ecx,ecx
+ xor edx,edi
+ and eax,0fcfcfcfch
+ and edx,0cfcfcfcfh
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ xor esi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor esi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor esi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor esi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ xor esi,DWORD PTR 0600h[ebx*1+ebp]
+ xor esi,DWORD PTR 0700h[ecx*1+ebp]
+ mov ecx,DWORD PTR [esp]
+ xor esi,DWORD PTR 0400h[eax*1+ebp]
+ xor esi,DWORD PTR 0500h[edx*1+ebp]
+ ; Round 5
+ mov eax,DWORD PTR 40[ecx]
+ xor ebx,ebx
+ mov edx,DWORD PTR 44[ecx]
+ xor eax,esi
+ xor ecx,ecx
+ xor edx,esi
+ and eax,0fcfcfcfch
+ and edx,0cfcfcfcfh
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ xor edi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor edi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor edi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor edi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ xor edi,DWORD PTR 0600h[ebx*1+ebp]
+ xor edi,DWORD PTR 0700h[ecx*1+ebp]
+ mov ecx,DWORD PTR [esp]
+ xor edi,DWORD PTR 0400h[eax*1+ebp]
+ xor edi,DWORD PTR 0500h[edx*1+ebp]
+ ; Round 4
+ mov eax,DWORD PTR 32[ecx]
+ xor ebx,ebx
+ mov edx,DWORD PTR 36[ecx]
+ xor eax,edi
+ xor ecx,ecx
+ xor edx,edi
+ and eax,0fcfcfcfch
+ and edx,0cfcfcfcfh
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ xor esi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor esi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor esi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor esi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ xor esi,DWORD PTR 0600h[ebx*1+ebp]
+ xor esi,DWORD PTR 0700h[ecx*1+ebp]
+ mov ecx,DWORD PTR [esp]
+ xor esi,DWORD PTR 0400h[eax*1+ebp]
+ xor esi,DWORD PTR 0500h[edx*1+ebp]
+ ; Round 3
+ mov eax,DWORD PTR 24[ecx]
+ xor ebx,ebx
+ mov edx,DWORD PTR 28[ecx]
+ xor eax,esi
+ xor ecx,ecx
+ xor edx,esi
+ and eax,0fcfcfcfch
+ and edx,0cfcfcfcfh
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ xor edi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor edi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor edi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor edi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ xor edi,DWORD PTR 0600h[ebx*1+ebp]
+ xor edi,DWORD PTR 0700h[ecx*1+ebp]
+ mov ecx,DWORD PTR [esp]
+ xor edi,DWORD PTR 0400h[eax*1+ebp]
+ xor edi,DWORD PTR 0500h[edx*1+ebp]
+ ; Round 2
+ mov eax,DWORD PTR 16[ecx]
+ xor ebx,ebx
+ mov edx,DWORD PTR 20[ecx]
+ xor eax,edi
+ xor ecx,ecx
+ xor edx,edi
+ and eax,0fcfcfcfch
+ and edx,0cfcfcfcfh
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ xor esi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor esi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor esi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor esi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ xor esi,DWORD PTR 0600h[ebx*1+ebp]
+ xor esi,DWORD PTR 0700h[ecx*1+ebp]
+ mov ecx,DWORD PTR [esp]
+ xor esi,DWORD PTR 0400h[eax*1+ebp]
+ xor esi,DWORD PTR 0500h[edx*1+ebp]
+ ; Round 1
+ mov eax,DWORD PTR 8[ecx]
+ xor ebx,ebx
+ mov edx,DWORD PTR 12[ecx]
+ xor eax,esi
+ xor ecx,ecx
+ xor edx,esi
+ and eax,0fcfcfcfch
+ and edx,0cfcfcfcfh
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ xor edi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor edi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor edi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor edi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ xor edi,DWORD PTR 0600h[ebx*1+ebp]
+ xor edi,DWORD PTR 0700h[ecx*1+ebp]
+ mov ecx,DWORD PTR [esp]
+ xor edi,DWORD PTR 0400h[eax*1+ebp]
+ xor edi,DWORD PTR 0500h[edx*1+ebp]
+ ; Round 0
+ mov eax,DWORD PTR [ecx]
+ xor ebx,ebx
+ mov edx,DWORD PTR 4[ecx]
+ xor eax,edi
+ xor ecx,ecx
+ xor edx,edi
+ and eax,0fcfcfcfch
+ and edx,0cfcfcfcfh
+ mov bl,al
+ mov cl,ah
+ ror edx,4
+ xor esi,DWORD PTR [ebx*1+ebp]
+ mov bl,dl
+ xor esi,DWORD PTR 0200h[ecx*1+ebp]
+ mov cl,dh
+ shr eax,16
+ xor esi,DWORD PTR 0100h[ebx*1+ebp]
+ mov bl,ah
+ shr edx,16
+ xor esi,DWORD PTR 0300h[ecx*1+ebp]
+ mov cl,dh
+ and eax,0ffh
+ and edx,0ffh
+ xor esi,DWORD PTR 0600h[ebx*1+ebp]
+ xor esi,DWORD PTR 0700h[ecx*1+ebp]
+ mov ecx,DWORD PTR [esp]
+ xor esi,DWORD PTR 0400h[eax*1+ebp]
+ xor esi,DWORD PTR 0500h[edx*1+ebp]
+ add esp,4
+ ret
+__x86_DES_decrypt ENDP
+ALIGN 16
+_DES_encrypt1 PROC PUBLIC
+$L_DES_encrypt1_begin::
+ push esi
+ push edi
+ ;
+
+ ; Load the 2 words
+ mov esi,DWORD PTR 12[esp]
+ xor ecx,ecx
+ push ebx
+ push ebp
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 28[esp]
+ mov edi,DWORD PTR 4[esi]
+ ;
+
+ ; IP
+ rol eax,4
+ mov esi,eax
+ xor eax,edi
+ and eax,0f0f0f0f0h
+ xor esi,eax
+ xor edi,eax
+ ;
+
+ rol edi,20
+ mov eax,edi
+ xor edi,esi
+ and edi,0fff0000fh
+ xor eax,edi
+ xor esi,edi
+ ;
+
+ rol eax,14
+ mov edi,eax
+ xor eax,esi
+ and eax,033333333h
+ xor edi,eax
+ xor esi,eax
+ ;
+
+ rol esi,22
+ mov eax,esi
+ xor esi,edi
+ and esi,003fc03fch
+ xor eax,esi
+ xor edi,esi
+ ;
+
+ rol eax,9
+ mov esi,eax
+ xor eax,edi
+ and eax,0aaaaaaaah
+ xor esi,eax
+ xor edi,eax
+ ;
+
+ rol edi,1
+ call $L000pic_point
+$L000pic_point:
+ pop ebp
+ lea ebp,DWORD PTR (_DES_SPtrans-$L000pic_point)[ebp]
+ mov ecx,DWORD PTR 24[esp]
+ cmp ebx,0
+ je $L001decrypt
+ call __x86_DES_encrypt
+ jmp $L002done
+$L001decrypt:
+ call __x86_DES_decrypt
+$L002done:
+ ;
+
+ ; FP
+ mov edx,DWORD PTR 20[esp]
+ ror esi,1
+ mov eax,edi
+ xor edi,esi
+ and edi,0aaaaaaaah
+ xor eax,edi
+ xor esi,edi
+ ;
+
+ rol eax,23
+ mov edi,eax
+ xor eax,esi
+ and eax,003fc03fch
+ xor edi,eax
+ xor esi,eax
+ ;
+
+ rol edi,10
+ mov eax,edi
+ xor edi,esi
+ and edi,033333333h
+ xor eax,edi
+ xor esi,edi
+ ;
+
+ rol esi,18
+ mov edi,esi
+ xor esi,eax
+ and esi,0fff0000fh
+ xor edi,esi
+ xor eax,esi
+ ;
+
+ rol edi,12
+ mov esi,edi
+ xor edi,eax
+ and edi,0f0f0f0f0h
+ xor esi,edi
+ xor eax,edi
+ ;
+
+ ror eax,4
+ mov DWORD PTR [edx],eax
+ mov DWORD PTR 4[edx],esi
+ pop ebp
+ pop ebx
+ pop edi
+ pop esi
+ ret
+_DES_encrypt1 ENDP
+ALIGN 16
+_DES_encrypt2 PROC PUBLIC
+$L_DES_encrypt2_begin::
+ push esi
+ push edi
+ ;
+
+ ; Load the 2 words
+ mov eax,DWORD PTR 12[esp]
+ xor ecx,ecx
+ push ebx
+ push ebp
+ mov esi,DWORD PTR [eax]
+ mov ebx,DWORD PTR 28[esp]
+ rol esi,3
+ mov edi,DWORD PTR 4[eax]
+ rol edi,3
+ call $L003pic_point
+$L003pic_point:
+ pop ebp
+ lea ebp,DWORD PTR (_DES_SPtrans-$L003pic_point)[ebp]
+ mov ecx,DWORD PTR 24[esp]
+ cmp ebx,0
+ je $L004decrypt
+ call __x86_DES_encrypt
+ jmp $L005done
+$L004decrypt:
+ call __x86_DES_decrypt
+$L005done:
+ ;
+
+ ; Fixup
+ ror edi,3
+ mov eax,DWORD PTR 20[esp]
+ ror esi,3
+ mov DWORD PTR [eax],edi
+ mov DWORD PTR 4[eax],esi
+ pop ebp
+ pop ebx
+ pop edi
+ pop esi
+ ret
+_DES_encrypt2 ENDP
+ALIGN 16
+_DES_encrypt3 PROC PUBLIC
+$L_DES_encrypt3_begin::
+ push ebx
+ mov ebx,DWORD PTR 8[esp]
+ push ebp
+ push esi
+ push edi
+ ;
+
+ ; Load the data words
+ mov edi,DWORD PTR [ebx]
+ mov esi,DWORD PTR 4[ebx]
+ sub esp,12
+ ;
+
+ ; IP
+ rol edi,4
+ mov edx,edi
+ xor edi,esi
+ and edi,0f0f0f0f0h
+ xor edx,edi
+ xor esi,edi
+ ;
+
+ rol esi,20
+ mov edi,esi
+ xor esi,edx
+ and esi,0fff0000fh
+ xor edi,esi
+ xor edx,esi
+ ;
+
+ rol edi,14
+ mov esi,edi
+ xor edi,edx
+ and edi,033333333h
+ xor esi,edi
+ xor edx,edi
+ ;
+
+ rol edx,22
+ mov edi,edx
+ xor edx,esi
+ and edx,003fc03fch
+ xor edi,edx
+ xor esi,edx
+ ;
+
+ rol edi,9
+ mov edx,edi
+ xor edi,esi
+ and edi,0aaaaaaaah
+ xor edx,edi
+ xor esi,edi
+ ;
+
+ ror edx,3
+ ror esi,2
+ mov DWORD PTR 4[ebx],esi
+ mov eax,DWORD PTR 36[esp]
+ mov DWORD PTR [ebx],edx
+ mov edi,DWORD PTR 40[esp]
+ mov esi,DWORD PTR 44[esp]
+ mov DWORD PTR 8[esp],1
+ mov DWORD PTR 4[esp],eax
+ mov DWORD PTR [esp],ebx
+ call $L_DES_encrypt2_begin
+ mov DWORD PTR 8[esp],0
+ mov DWORD PTR 4[esp],edi
+ mov DWORD PTR [esp],ebx
+ call $L_DES_encrypt2_begin
+ mov DWORD PTR 8[esp],1
+ mov DWORD PTR 4[esp],esi
+ mov DWORD PTR [esp],ebx
+ call $L_DES_encrypt2_begin
+ add esp,12
+ mov edi,DWORD PTR [ebx]
+ mov esi,DWORD PTR 4[ebx]
+ ;
+
+ ; FP
+ rol esi,2
+ rol edi,3
+ mov eax,edi
+ xor edi,esi
+ and edi,0aaaaaaaah
+ xor eax,edi
+ xor esi,edi
+ ;
+
+ rol eax,23
+ mov edi,eax
+ xor eax,esi
+ and eax,003fc03fch
+ xor edi,eax
+ xor esi,eax
+ ;
+
+ rol edi,10
+ mov eax,edi
+ xor edi,esi
+ and edi,033333333h
+ xor eax,edi
+ xor esi,edi
+ ;
+
+ rol esi,18
+ mov edi,esi
+ xor esi,eax
+ and esi,0fff0000fh
+ xor edi,esi
+ xor eax,esi
+ ;
+
+ rol edi,12
+ mov esi,edi
+ xor edi,eax
+ and edi,0f0f0f0f0h
+ xor esi,edi
+ xor eax,edi
+ ;
+
+ ror eax,4
+ mov DWORD PTR [ebx],eax
+ mov DWORD PTR 4[ebx],esi
+ pop edi
+ pop esi
+ pop ebp
+ pop ebx
+ ret
+_DES_encrypt3 ENDP
+ALIGN 16
+_DES_decrypt3 PROC PUBLIC
+$L_DES_decrypt3_begin::
+ push ebx
+ mov ebx,DWORD PTR 8[esp]
+ push ebp
+ push esi
+ push edi
+ ;
+
+ ; Load the data words
+ mov edi,DWORD PTR [ebx]
+ mov esi,DWORD PTR 4[ebx]
+ sub esp,12
+ ;
+
+ ; IP
+ rol edi,4
+ mov edx,edi
+ xor edi,esi
+ and edi,0f0f0f0f0h
+ xor edx,edi
+ xor esi,edi
+ ;
+
+ rol esi,20
+ mov edi,esi
+ xor esi,edx
+ and esi,0fff0000fh
+ xor edi,esi
+ xor edx,esi
+ ;
+
+ rol edi,14
+ mov esi,edi
+ xor edi,edx
+ and edi,033333333h
+ xor esi,edi
+ xor edx,edi
+ ;
+
+ rol edx,22
+ mov edi,edx
+ xor edx,esi
+ and edx,003fc03fch
+ xor edi,edx
+ xor esi,edx
+ ;
+
+ rol edi,9
+ mov edx,edi
+ xor edi,esi
+ and edi,0aaaaaaaah
+ xor edx,edi
+ xor esi,edi
+ ;
+
+ ror edx,3
+ ror esi,2
+ mov DWORD PTR 4[ebx],esi
+ mov esi,DWORD PTR 36[esp]
+ mov DWORD PTR [ebx],edx
+ mov edi,DWORD PTR 40[esp]
+ mov eax,DWORD PTR 44[esp]
+ mov DWORD PTR 8[esp],0
+ mov DWORD PTR 4[esp],eax
+ mov DWORD PTR [esp],ebx
+ call $L_DES_encrypt2_begin
+ mov DWORD PTR 8[esp],1
+ mov DWORD PTR 4[esp],edi
+ mov DWORD PTR [esp],ebx
+ call $L_DES_encrypt2_begin
+ mov DWORD PTR 8[esp],0
+ mov DWORD PTR 4[esp],esi
+ mov DWORD PTR [esp],ebx
+ call $L_DES_encrypt2_begin
+ add esp,12
+ mov edi,DWORD PTR [ebx]
+ mov esi,DWORD PTR 4[ebx]
+ ;
+
+ ; FP
+ rol esi,2
+ rol edi,3
+ mov eax,edi
+ xor edi,esi
+ and edi,0aaaaaaaah
+ xor eax,edi
+ xor esi,edi
+ ;
+
+ rol eax,23
+ mov edi,eax
+ xor eax,esi
+ and eax,003fc03fch
+ xor edi,eax
+ xor esi,eax
+ ;
+
+ rol edi,10
+ mov eax,edi
+ xor edi,esi
+ and edi,033333333h
+ xor eax,edi
+ xor esi,edi
+ ;
+
+ rol esi,18
+ mov edi,esi
+ xor esi,eax
+ and esi,0fff0000fh
+ xor edi,esi
+ xor eax,esi
+ ;
+
+ rol edi,12
+ mov esi,edi
+ xor edi,eax
+ and edi,0f0f0f0f0h
+ xor esi,edi
+ xor eax,edi
+ ;
+
+ ror eax,4
+ mov DWORD PTR [ebx],eax
+ mov DWORD PTR 4[ebx],esi
+ pop edi
+ pop esi
+ pop ebp
+ pop ebx
+ ret
+_DES_decrypt3 ENDP
+ALIGN 16
+_DES_ncbc_encrypt PROC PUBLIC
+$L_DES_ncbc_encrypt_begin::
+ ;
+
+ push ebp
+ push ebx
+ push esi
+ push edi
+ mov ebp,DWORD PTR 28[esp]
+ ; getting iv ptr from parameter 4
+ mov ebx,DWORD PTR 36[esp]
+ mov esi,DWORD PTR [ebx]
+ mov edi,DWORD PTR 4[ebx]
+ push edi
+ push esi
+ push edi
+ push esi
+ mov ebx,esp
+ mov esi,DWORD PTR 36[esp]
+ mov edi,DWORD PTR 40[esp]
+ ; getting encrypt flag from parameter 5
+ mov ecx,DWORD PTR 56[esp]
+ ; get and push parameter 5
+ push ecx
+ ; get and push parameter 3
+ mov eax,DWORD PTR 52[esp]
+ push eax
+ push ebx
+ cmp ecx,0
+ jz $L006decrypt
+ and ebp,4294967288
+ mov eax,DWORD PTR 12[esp]
+ mov ebx,DWORD PTR 16[esp]
+ jz $L007encrypt_finish
+$L008encrypt_loop:
+ mov ecx,DWORD PTR [esi]
+ mov edx,DWORD PTR 4[esi]
+ xor eax,ecx
+ xor ebx,edx
+ mov DWORD PTR 12[esp],eax
+ mov DWORD PTR 16[esp],ebx
+ call $L_DES_encrypt1_begin
+ mov eax,DWORD PTR 12[esp]
+ mov ebx,DWORD PTR 16[esp]
+ mov DWORD PTR [edi],eax
+ mov DWORD PTR 4[edi],ebx
+ add esi,8
+ add edi,8
+ sub ebp,8
+ jnz $L008encrypt_loop
+$L007encrypt_finish:
+ mov ebp,DWORD PTR 56[esp]
+ and ebp,7
+ jz $L009finish
+ call $L010PIC_point
+$L010PIC_point:
+ pop edx
+ lea ecx,DWORD PTR ($L011cbc_enc_jmp_table-$L010PIC_point)[edx]
+ mov ebp,DWORD PTR [ebp*4+ecx]
+ add ebp,edx
+ xor ecx,ecx
+ xor edx,edx
+ jmp ebp
+$L012ej7:
+ mov dh,BYTE PTR 6[esi]
+ shl edx,8
+$L013ej6:
+ mov dh,BYTE PTR 5[esi]
+$L014ej5:
+ mov dl,BYTE PTR 4[esi]
+$L015ej4:
+ mov ecx,DWORD PTR [esi]
+ jmp $L016ejend
+$L017ej3:
+ mov ch,BYTE PTR 2[esi]
+ shl ecx,8
+$L018ej2:
+ mov ch,BYTE PTR 1[esi]
+$L019ej1:
+ mov cl,BYTE PTR [esi]
+$L016ejend:
+ xor eax,ecx
+ xor ebx,edx
+ mov DWORD PTR 12[esp],eax
+ mov DWORD PTR 16[esp],ebx
+ call $L_DES_encrypt1_begin
+ mov eax,DWORD PTR 12[esp]
+ mov ebx,DWORD PTR 16[esp]
+ mov DWORD PTR [edi],eax
+ mov DWORD PTR 4[edi],ebx
+ jmp $L009finish
+$L006decrypt:
+ and ebp,4294967288
+ mov eax,DWORD PTR 20[esp]
+ mov ebx,DWORD PTR 24[esp]
+ jz $L020decrypt_finish
+$L021decrypt_loop:
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ mov DWORD PTR 12[esp],eax
+ mov DWORD PTR 16[esp],ebx
+ call $L_DES_encrypt1_begin
+ mov eax,DWORD PTR 12[esp]
+ mov ebx,DWORD PTR 16[esp]
+ mov ecx,DWORD PTR 20[esp]
+ mov edx,DWORD PTR 24[esp]
+ xor ecx,eax
+ xor edx,ebx
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ mov DWORD PTR [edi],ecx
+ mov DWORD PTR 4[edi],edx
+ mov DWORD PTR 20[esp],eax
+ mov DWORD PTR 24[esp],ebx
+ add esi,8
+ add edi,8
+ sub ebp,8
+ jnz $L021decrypt_loop
+$L020decrypt_finish:
+ mov ebp,DWORD PTR 56[esp]
+ and ebp,7
+ jz $L009finish
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ mov DWORD PTR 12[esp],eax
+ mov DWORD PTR 16[esp],ebx
+ call $L_DES_encrypt1_begin
+ mov eax,DWORD PTR 12[esp]
+ mov ebx,DWORD PTR 16[esp]
+ mov ecx,DWORD PTR 20[esp]
+ mov edx,DWORD PTR 24[esp]
+ xor ecx,eax
+ xor edx,ebx
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+$L022dj7:
+ ror edx,16
+ mov BYTE PTR 6[edi],dl
+ shr edx,16
+$L023dj6:
+ mov BYTE PTR 5[edi],dh
+$L024dj5:
+ mov BYTE PTR 4[edi],dl
+$L025dj4:
+ mov DWORD PTR [edi],ecx
+ jmp $L026djend
+$L027dj3:
+ ror ecx,16
+ mov BYTE PTR 2[edi],cl
+ shl ecx,16
+$L028dj2:
+ mov BYTE PTR 1[esi],ch
+$L029dj1:
+ mov BYTE PTR [esi],cl
+$L026djend:
+ jmp $L009finish
+$L009finish:
+ mov ecx,DWORD PTR 64[esp]
+ add esp,28
+ mov DWORD PTR [ecx],eax
+ mov DWORD PTR 4[ecx],ebx
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+ALIGN 64
+$L011cbc_enc_jmp_table:
+DD 0
+DD $L019ej1-$L010PIC_point
+DD $L018ej2-$L010PIC_point
+DD $L017ej3-$L010PIC_point
+DD $L015ej4-$L010PIC_point
+DD $L014ej5-$L010PIC_point
+DD $L013ej6-$L010PIC_point
+DD $L012ej7-$L010PIC_point
+ALIGN 64
+_DES_ncbc_encrypt ENDP
+ALIGN 16
+_DES_ede3_cbc_encrypt PROC PUBLIC
+$L_DES_ede3_cbc_encrypt_begin::
+ ;
+
+ push ebp
+ push ebx
+ push esi
+ push edi
+ mov ebp,DWORD PTR 28[esp]
+ ; getting iv ptr from parameter 6
+ mov ebx,DWORD PTR 44[esp]
+ mov esi,DWORD PTR [ebx]
+ mov edi,DWORD PTR 4[ebx]
+ push edi
+ push esi
+ push edi
+ push esi
+ mov ebx,esp
+ mov esi,DWORD PTR 36[esp]
+ mov edi,DWORD PTR 40[esp]
+ ; getting encrypt flag from parameter 7
+ mov ecx,DWORD PTR 64[esp]
+ ; get and push parameter 5
+ mov eax,DWORD PTR 56[esp]
+ push eax
+ ; get and push parameter 4
+ mov eax,DWORD PTR 56[esp]
+ push eax
+ ; get and push parameter 3
+ mov eax,DWORD PTR 56[esp]
+ push eax
+ push ebx
+ cmp ecx,0
+ jz $L030decrypt
+ and ebp,4294967288
+ mov eax,DWORD PTR 16[esp]
+ mov ebx,DWORD PTR 20[esp]
+ jz $L031encrypt_finish
+$L032encrypt_loop:
+ mov ecx,DWORD PTR [esi]
+ mov edx,DWORD PTR 4[esi]
+ xor eax,ecx
+ xor ebx,edx
+ mov DWORD PTR 16[esp],eax
+ mov DWORD PTR 20[esp],ebx
+ call $L_DES_encrypt3_begin
+ mov eax,DWORD PTR 16[esp]
+ mov ebx,DWORD PTR 20[esp]
+ mov DWORD PTR [edi],eax
+ mov DWORD PTR 4[edi],ebx
+ add esi,8
+ add edi,8
+ sub ebp,8
+ jnz $L032encrypt_loop
+$L031encrypt_finish:
+ mov ebp,DWORD PTR 60[esp]
+ and ebp,7
+ jz $L033finish
+ call $L034PIC_point
+$L034PIC_point:
+ pop edx
+ lea ecx,DWORD PTR ($L035cbc_enc_jmp_table-$L034PIC_point)[edx]
+ mov ebp,DWORD PTR [ebp*4+ecx]
+ add ebp,edx
+ xor ecx,ecx
+ xor edx,edx
+ jmp ebp
+$L036ej7:
+ mov dh,BYTE PTR 6[esi]
+ shl edx,8
+$L037ej6:
+ mov dh,BYTE PTR 5[esi]
+$L038ej5:
+ mov dl,BYTE PTR 4[esi]
+$L039ej4:
+ mov ecx,DWORD PTR [esi]
+ jmp $L040ejend
+$L041ej3:
+ mov ch,BYTE PTR 2[esi]
+ shl ecx,8
+$L042ej2:
+ mov ch,BYTE PTR 1[esi]
+$L043ej1:
+ mov cl,BYTE PTR [esi]
+$L040ejend:
+ xor eax,ecx
+ xor ebx,edx
+ mov DWORD PTR 16[esp],eax
+ mov DWORD PTR 20[esp],ebx
+ call $L_DES_encrypt3_begin
+ mov eax,DWORD PTR 16[esp]
+ mov ebx,DWORD PTR 20[esp]
+ mov DWORD PTR [edi],eax
+ mov DWORD PTR 4[edi],ebx
+ jmp $L033finish
+$L030decrypt:
+ and ebp,4294967288
+ mov eax,DWORD PTR 24[esp]
+ mov ebx,DWORD PTR 28[esp]
+ jz $L044decrypt_finish
+$L045decrypt_loop:
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ mov DWORD PTR 16[esp],eax
+ mov DWORD PTR 20[esp],ebx
+ call $L_DES_decrypt3_begin
+ mov eax,DWORD PTR 16[esp]
+ mov ebx,DWORD PTR 20[esp]
+ mov ecx,DWORD PTR 24[esp]
+ mov edx,DWORD PTR 28[esp]
+ xor ecx,eax
+ xor edx,ebx
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ mov DWORD PTR [edi],ecx
+ mov DWORD PTR 4[edi],edx
+ mov DWORD PTR 24[esp],eax
+ mov DWORD PTR 28[esp],ebx
+ add esi,8
+ add edi,8
+ sub ebp,8
+ jnz $L045decrypt_loop
+$L044decrypt_finish:
+ mov ebp,DWORD PTR 60[esp]
+ and ebp,7
+ jz $L033finish
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ mov DWORD PTR 16[esp],eax
+ mov DWORD PTR 20[esp],ebx
+ call $L_DES_decrypt3_begin
+ mov eax,DWORD PTR 16[esp]
+ mov ebx,DWORD PTR 20[esp]
+ mov ecx,DWORD PTR 24[esp]
+ mov edx,DWORD PTR 28[esp]
+ xor ecx,eax
+ xor edx,ebx
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+$L046dj7:
+ ror edx,16
+ mov BYTE PTR 6[edi],dl
+ shr edx,16
+$L047dj6:
+ mov BYTE PTR 5[edi],dh
+$L048dj5:
+ mov BYTE PTR 4[edi],dl
+$L049dj4:
+ mov DWORD PTR [edi],ecx
+ jmp $L050djend
+$L051dj3:
+ ror ecx,16
+ mov BYTE PTR 2[edi],cl
+ shl ecx,16
+$L052dj2:
+ mov BYTE PTR 1[esi],ch
+$L053dj1:
+ mov BYTE PTR [esi],cl
+$L050djend:
+ jmp $L033finish
+$L033finish:
+ mov ecx,DWORD PTR 76[esp]
+ add esp,32
+ mov DWORD PTR [ecx],eax
+ mov DWORD PTR 4[ecx],ebx
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+ALIGN 64
+$L035cbc_enc_jmp_table:
+DD 0
+DD $L043ej1-$L034PIC_point
+DD $L042ej2-$L034PIC_point
+DD $L041ej3-$L034PIC_point
+DD $L039ej4-$L034PIC_point
+DD $L038ej5-$L034PIC_point
+DD $L037ej6-$L034PIC_point
+DD $L036ej7-$L034PIC_point
+ALIGN 64
+_DES_ede3_cbc_encrypt ENDP
+ALIGN 64
+_DES_SPtrans::
+DD 34080768,524288,33554434,34080770
+DD 33554432,526338,524290,33554434
+DD 526338,34080768,34078720,2050
+DD 33556482,33554432,0,524290
+DD 524288,2,33556480,526336
+DD 34080770,34078720,2050,33556480
+DD 2,2048,526336,34078722
+DD 2048,33556482,34078722,0
+DD 0,34080770,33556480,524290
+DD 34080768,524288,2050,33556480
+DD 34078722,2048,526336,33554434
+DD 526338,2,33554434,34078720
+DD 34080770,526336,34078720,33556482
+DD 33554432,2050,524290,0
+DD 524288,33554432,33556482,34080768
+DD 2,34078722,2048,526338
+DD 1074823184,0,1081344,1074790400
+DD 1073741840,32784,1073774592,1081344
+DD 32768,1074790416,16,1073774592
+DD 1048592,1074823168,1074790400,16
+DD 1048576,1073774608,1074790416,32768
+DD 1081360,1073741824,0,1048592
+DD 1073774608,1081360,1074823168,1073741840
+DD 1073741824,1048576,32784,1074823184
+DD 1048592,1074823168,1073774592,1081360
+DD 1074823184,1048592,1073741840,0
+DD 1073741824,32784,1048576,1074790416
+DD 32768,1073741824,1081360,1073774608
+DD 1074823168,32768,0,1073741840
+DD 16,1074823184,1081344,1074790400
+DD 1074790416,1048576,32784,1073774592
+DD 1073774608,16,1074790400,1081344
+DD 67108865,67371264,256,67109121
+DD 262145,67108864,67109121,262400
+DD 67109120,262144,67371008,1
+DD 67371265,257,1,67371009
+DD 0,262145,67371264,256
+DD 257,67371265,262144,67108865
+DD 67371009,67109120,262401,67371008
+DD 262400,0,67108864,262401
+DD 67371264,256,1,262144
+DD 257,262145,67371008,67109121
+DD 0,67371264,262400,67371009
+DD 262145,67108864,67371265,1
+DD 262401,67108865,67108864,67371265
+DD 262144,67109120,67109121,262400
+DD 67109120,0,67371009,257
+DD 67108865,262401,256,67371008
+DD 4198408,268439552,8,272633864
+DD 0,272629760,268439560,4194312
+DD 272633856,268435464,268435456,4104
+DD 268435464,4198408,4194304,268435456
+DD 272629768,4198400,4096,8
+DD 4198400,268439560,272629760,4096
+DD 4104,0,4194312,272633856
+DD 268439552,272629768,272633864,4194304
+DD 272629768,4104,4194304,268435464
+DD 4198400,268439552,8,272629760
+DD 268439560,0,4096,4194312
+DD 0,272629768,272633856,4096
+DD 268435456,272633864,4198408,4194304
+DD 272633864,8,268439552,4198408
+DD 4194312,4198400,272629760,268439560
+DD 4104,268435456,268435464,272633856
+DD 134217728,65536,1024,134284320
+DD 134283296,134218752,66592,134283264
+DD 65536,32,134217760,66560
+DD 134218784,134283296,134284288,0
+DD 66560,134217728,65568,1056
+DD 134218752,66592,0,134217760
+DD 32,134218784,134284320,65568
+DD 134283264,1024,1056,134284288
+DD 134284288,134218784,65568,134283264
+DD 65536,32,134217760,134218752
+DD 134217728,66560,134284320,0
+DD 66592,134217728,1024,65568
+DD 134218784,1024,0,134284320
+DD 134283296,134284288,1056,65536
+DD 66560,134283296,134218752,1056
+DD 32,66592,134283264,134217760
+DD 2147483712,2097216,0,2149588992
+DD 2097216,8192,2147491904,2097152
+DD 8256,2149589056,2105344,2147483648
+DD 2147491840,2147483712,2149580800,2105408
+DD 2097152,2147491904,2149580864,0
+DD 8192,64,2149588992,2149580864
+DD 2149589056,2149580800,2147483648,8256
+DD 64,2105344,2105408,2147491840
+DD 8256,2147483648,2147491840,2105408
+DD 2149588992,2097216,0,2147491840
+DD 2147483648,8192,2149580864,2097152
+DD 2097216,2149589056,2105344,64
+DD 2149589056,2105344,2097152,2147491904
+DD 2147483712,2149580800,2105408,0
+DD 8192,2147483712,2147491904,2149588992
+DD 2149580800,8256,64,2149580864
+DD 16384,512,16777728,16777220
+DD 16794116,16388,16896,0
+DD 16777216,16777732,516,16793600
+DD 4,16794112,16793600,516
+DD 16777732,16384,16388,16794116
+DD 0,16777728,16777220,16896
+DD 16793604,16900,16794112,4
+DD 16900,16793604,512,16777216
+DD 16900,16793600,16793604,516
+DD 16384,512,16777216,16793604
+DD 16777732,16900,16896,0
+DD 512,16777220,4,16777728
+DD 0,16777732,16777728,16896
+DD 516,16384,16794116,16777216
+DD 16794112,4,16388,16794116
+DD 16777220,16794112,16793600,16388
+DD 545259648,545390592,131200,0
+DD 537001984,8388736,545259520,545390720
+DD 128,536870912,8519680,131200
+DD 8519808,537002112,536871040,545259520
+DD 131072,8519808,8388736,537001984
+DD 545390720,536871040,0,8519680
+DD 536870912,8388608,537002112,545259648
+DD 8388608,131072,545390592,128
+DD 8388608,131072,536871040,545390720
+DD 131200,536870912,0,8519680
+DD 545259648,537002112,537001984,8388736
+DD 545390592,128,8388736,537001984
+DD 545390720,8388608,545259520,536871040
+DD 8519680,131200,537002112,545259520
+DD 128,545390592,8519808,0
+DD 536870912,545259648,131072,8519808
+.text$ ENDS
+END
diff --git a/deps/openssl/asm/x86-win32-masm/md5/md5-586.asm b/deps/openssl/asm/x86-win32-masm/md5/md5-586.asm
new file mode 100644
index 0000000000..c8edae762d
--- /dev/null
+++ b/deps/openssl/asm/x86-win32-masm/md5/md5-586.asm
@@ -0,0 +1,693 @@
+TITLE ../openssl/crypto/md5/asm/md5-586.asm
+IF @Version LT 800
+ECHO MASM version 8.00 or later is strongly recommended.
+ENDIF
+.586
+.MODEL FLAT
+OPTION DOTNAME
+IF @Version LT 800
+.text$ SEGMENT PAGE 'CODE'
+ELSE
+.text$ SEGMENT ALIGN(64) 'CODE'
+ENDIF
+ALIGN 16
+_md5_block_asm_data_order PROC PUBLIC
+$L_md5_block_asm_data_order_begin::
+ push esi
+ push edi
+ mov edi,DWORD PTR 12[esp]
+ mov esi,DWORD PTR 16[esp]
+ mov ecx,DWORD PTR 20[esp]
+ push ebp
+ shl ecx,6
+ push ebx
+ add ecx,esi
+ sub ecx,64
+ mov eax,DWORD PTR [edi]
+ push ecx
+ mov ebx,DWORD PTR 4[edi]
+ mov ecx,DWORD PTR 8[edi]
+ mov edx,DWORD PTR 12[edi]
+$L000start:
+ ;
+
+ ; R0 section
+ mov edi,ecx
+ mov ebp,DWORD PTR [esi]
+ ; R0 0
+ xor edi,edx
+ and edi,ebx
+ lea eax,DWORD PTR 3614090360[ebp*1+eax]
+ xor edi,edx
+ add eax,edi
+ mov edi,ebx
+ rol eax,7
+ mov ebp,DWORD PTR 4[esi]
+ add eax,ebx
+ ; R0 1
+ xor edi,ecx
+ and edi,eax
+ lea edx,DWORD PTR 3905402710[ebp*1+edx]
+ xor edi,ecx
+ add edx,edi
+ mov edi,eax
+ rol edx,12
+ mov ebp,DWORD PTR 8[esi]
+ add edx,eax
+ ; R0 2
+ xor edi,ebx
+ and edi,edx
+ lea ecx,DWORD PTR 606105819[ebp*1+ecx]
+ xor edi,ebx
+ add ecx,edi
+ mov edi,edx
+ rol ecx,17
+ mov ebp,DWORD PTR 12[esi]
+ add ecx,edx
+ ; R0 3
+ xor edi,eax
+ and edi,ecx
+ lea ebx,DWORD PTR 3250441966[ebp*1+ebx]
+ xor edi,eax
+ add ebx,edi
+ mov edi,ecx
+ rol ebx,22
+ mov ebp,DWORD PTR 16[esi]
+ add ebx,ecx
+ ; R0 4
+ xor edi,edx
+ and edi,ebx
+ lea eax,DWORD PTR 4118548399[ebp*1+eax]
+ xor edi,edx
+ add eax,edi
+ mov edi,ebx
+ rol eax,7
+ mov ebp,DWORD PTR 20[esi]
+ add eax,ebx
+ ; R0 5
+ xor edi,ecx
+ and edi,eax
+ lea edx,DWORD PTR 1200080426[ebp*1+edx]
+ xor edi,ecx
+ add edx,edi
+ mov edi,eax
+ rol edx,12
+ mov ebp,DWORD PTR 24[esi]
+ add edx,eax
+ ; R0 6
+ xor edi,ebx
+ and edi,edx
+ lea ecx,DWORD PTR 2821735955[ebp*1+ecx]
+ xor edi,ebx
+ add ecx,edi
+ mov edi,edx
+ rol ecx,17
+ mov ebp,DWORD PTR 28[esi]
+ add ecx,edx
+ ; R0 7
+ xor edi,eax
+ and edi,ecx
+ lea ebx,DWORD PTR 4249261313[ebp*1+ebx]
+ xor edi,eax
+ add ebx,edi
+ mov edi,ecx
+ rol ebx,22
+ mov ebp,DWORD PTR 32[esi]
+ add ebx,ecx
+ ; R0 8
+ xor edi,edx
+ and edi,ebx
+ lea eax,DWORD PTR 1770035416[ebp*1+eax]
+ xor edi,edx
+ add eax,edi
+ mov edi,ebx
+ rol eax,7
+ mov ebp,DWORD PTR 36[esi]
+ add eax,ebx
+ ; R0 9
+ xor edi,ecx
+ and edi,eax
+ lea edx,DWORD PTR 2336552879[ebp*1+edx]
+ xor edi,ecx
+ add edx,edi
+ mov edi,eax
+ rol edx,12
+ mov ebp,DWORD PTR 40[esi]
+ add edx,eax
+ ; R0 10
+ xor edi,ebx
+ and edi,edx
+ lea ecx,DWORD PTR 4294925233[ebp*1+ecx]
+ xor edi,ebx
+ add ecx,edi
+ mov edi,edx
+ rol ecx,17
+ mov ebp,DWORD PTR 44[esi]
+ add ecx,edx
+ ; R0 11
+ xor edi,eax
+ and edi,ecx
+ lea ebx,DWORD PTR 2304563134[ebp*1+ebx]
+ xor edi,eax
+ add ebx,edi
+ mov edi,ecx
+ rol ebx,22
+ mov ebp,DWORD PTR 48[esi]
+ add ebx,ecx
+ ; R0 12
+ xor edi,edx
+ and edi,ebx
+ lea eax,DWORD PTR 1804603682[ebp*1+eax]
+ xor edi,edx
+ add eax,edi
+ mov edi,ebx
+ rol eax,7
+ mov ebp,DWORD PTR 52[esi]
+ add eax,ebx
+ ; R0 13
+ xor edi,ecx
+ and edi,eax
+ lea edx,DWORD PTR 4254626195[ebp*1+edx]
+ xor edi,ecx
+ add edx,edi
+ mov edi,eax
+ rol edx,12
+ mov ebp,DWORD PTR 56[esi]
+ add edx,eax
+ ; R0 14
+ xor edi,ebx
+ and edi,edx
+ lea ecx,DWORD PTR 2792965006[ebp*1+ecx]
+ xor edi,ebx
+ add ecx,edi
+ mov edi,edx
+ rol ecx,17
+ mov ebp,DWORD PTR 60[esi]
+ add ecx,edx
+ ; R0 15
+ xor edi,eax
+ and edi,ecx
+ lea ebx,DWORD PTR 1236535329[ebp*1+ebx]
+ xor edi,eax
+ add ebx,edi
+ mov edi,ecx
+ rol ebx,22
+ mov ebp,DWORD PTR 4[esi]
+ add ebx,ecx
+ ;
+
+ ; R1 section
+ ; R1 16
+ lea eax,DWORD PTR 4129170786[ebp*1+eax]
+ xor edi,ebx
+ and edi,edx
+ mov ebp,DWORD PTR 24[esi]
+ xor edi,ecx
+ add eax,edi
+ mov edi,ebx
+ rol eax,5
+ add eax,ebx
+ ; R1 17
+ lea edx,DWORD PTR 3225465664[ebp*1+edx]
+ xor edi,eax
+ and edi,ecx
+ mov ebp,DWORD PTR 44[esi]
+ xor edi,ebx
+ add edx,edi
+ mov edi,eax
+ rol edx,9
+ add edx,eax
+ ; R1 18
+ lea ecx,DWORD PTR 643717713[ebp*1+ecx]
+ xor edi,edx
+ and edi,ebx
+ mov ebp,DWORD PTR [esi]
+ xor edi,eax
+ add ecx,edi
+ mov edi,edx
+ rol ecx,14
+ add ecx,edx
+ ; R1 19
+ lea ebx,DWORD PTR 3921069994[ebp*1+ebx]
+ xor edi,ecx
+ and edi,eax
+ mov ebp,DWORD PTR 20[esi]
+ xor edi,edx
+ add ebx,edi
+ mov edi,ecx
+ rol ebx,20
+ add ebx,ecx
+ ; R1 20
+ lea eax,DWORD PTR 3593408605[ebp*1+eax]
+ xor edi,ebx
+ and edi,edx
+ mov ebp,DWORD PTR 40[esi]
+ xor edi,ecx
+ add eax,edi
+ mov edi,ebx
+ rol eax,5
+ add eax,ebx
+ ; R1 21
+ lea edx,DWORD PTR 38016083[ebp*1+edx]
+ xor edi,eax
+ and edi,ecx
+ mov ebp,DWORD PTR 60[esi]
+ xor edi,ebx
+ add edx,edi
+ mov edi,eax
+ rol edx,9
+ add edx,eax
+ ; R1 22
+ lea ecx,DWORD PTR 3634488961[ebp*1+ecx]
+ xor edi,edx
+ and edi,ebx
+ mov ebp,DWORD PTR 16[esi]
+ xor edi,eax
+ add ecx,edi
+ mov edi,edx
+ rol ecx,14
+ add ecx,edx
+ ; R1 23
+ lea ebx,DWORD PTR 3889429448[ebp*1+ebx]
+ xor edi,ecx
+ and edi,eax
+ mov ebp,DWORD PTR 36[esi]
+ xor edi,edx
+ add ebx,edi
+ mov edi,ecx
+ rol ebx,20
+ add ebx,ecx
+ ; R1 24
+ lea eax,DWORD PTR 568446438[ebp*1+eax]
+ xor edi,ebx
+ and edi,edx
+ mov ebp,DWORD PTR 56[esi]
+ xor edi,ecx
+ add eax,edi
+ mov edi,ebx
+ rol eax,5
+ add eax,ebx
+ ; R1 25
+ lea edx,DWORD PTR 3275163606[ebp*1+edx]
+ xor edi,eax
+ and edi,ecx
+ mov ebp,DWORD PTR 12[esi]
+ xor edi,ebx
+ add edx,edi
+ mov edi,eax
+ rol edx,9
+ add edx,eax
+ ; R1 26
+ lea ecx,DWORD PTR 4107603335[ebp*1+ecx]
+ xor edi,edx
+ and edi,ebx
+ mov ebp,DWORD PTR 32[esi]
+ xor edi,eax
+ add ecx,edi
+ mov edi,edx
+ rol ecx,14
+ add ecx,edx
+ ; R1 27
+ lea ebx,DWORD PTR 1163531501[ebp*1+ebx]
+ xor edi,ecx
+ and edi,eax
+ mov ebp,DWORD PTR 52[esi]
+ xor edi,edx
+ add ebx,edi
+ mov edi,ecx
+ rol ebx,20
+ add ebx,ecx
+ ; R1 28
+ lea eax,DWORD PTR 2850285829[ebp*1+eax]
+ xor edi,ebx
+ and edi,edx
+ mov ebp,DWORD PTR 8[esi]
+ xor edi,ecx
+ add eax,edi
+ mov edi,ebx
+ rol eax,5
+ add eax,ebx
+ ; R1 29
+ lea edx,DWORD PTR 4243563512[ebp*1+edx]
+ xor edi,eax
+ and edi,ecx
+ mov ebp,DWORD PTR 28[esi]
+ xor edi,ebx
+ add edx,edi
+ mov edi,eax
+ rol edx,9
+ add edx,eax
+ ; R1 30
+ lea ecx,DWORD PTR 1735328473[ebp*1+ecx]
+ xor edi,edx
+ and edi,ebx
+ mov ebp,DWORD PTR 48[esi]
+ xor edi,eax
+ add ecx,edi
+ mov edi,edx
+ rol ecx,14
+ add ecx,edx
+ ; R1 31
+ lea ebx,DWORD PTR 2368359562[ebp*1+ebx]
+ xor edi,ecx
+ and edi,eax
+ mov ebp,DWORD PTR 20[esi]
+ xor edi,edx
+ add ebx,edi
+ mov edi,ecx
+ rol ebx,20
+ add ebx,ecx
+ ;
+
+ ; R2 section
+ ; R2 32
+ xor edi,edx
+ xor edi,ebx
+ lea eax,DWORD PTR 4294588738[ebp*1+eax]
+ add eax,edi
+ rol eax,4
+ mov ebp,DWORD PTR 32[esi]
+ mov edi,ebx
+ ; R2 33
+ lea edx,DWORD PTR 2272392833[ebp*1+edx]
+ add eax,ebx
+ xor edi,ecx
+ xor edi,eax
+ mov ebp,DWORD PTR 44[esi]
+ add edx,edi
+ mov edi,eax
+ rol edx,11
+ add edx,eax
+ ; R2 34
+ xor edi,ebx
+ xor edi,edx
+ lea ecx,DWORD PTR 1839030562[ebp*1+ecx]
+ add ecx,edi
+ rol ecx,16
+ mov ebp,DWORD PTR 56[esi]
+ mov edi,edx
+ ; R2 35
+ lea ebx,DWORD PTR 4259657740[ebp*1+ebx]
+ add ecx,edx
+ xor edi,eax
+ xor edi,ecx
+ mov ebp,DWORD PTR 4[esi]
+ add ebx,edi
+ mov edi,ecx
+ rol ebx,23
+ add ebx,ecx
+ ; R2 36
+ xor edi,edx
+ xor edi,ebx
+ lea eax,DWORD PTR 2763975236[ebp*1+eax]
+ add eax,edi
+ rol eax,4
+ mov ebp,DWORD PTR 16[esi]
+ mov edi,ebx
+ ; R2 37
+ lea edx,DWORD PTR 1272893353[ebp*1+edx]
+ add eax,ebx
+ xor edi,ecx
+ xor edi,eax
+ mov ebp,DWORD PTR 28[esi]
+ add edx,edi
+ mov edi,eax
+ rol edx,11
+ add edx,eax
+ ; R2 38
+ xor edi,ebx
+ xor edi,edx
+ lea ecx,DWORD PTR 4139469664[ebp*1+ecx]
+ add ecx,edi
+ rol ecx,16
+ mov ebp,DWORD PTR 40[esi]
+ mov edi,edx
+ ; R2 39
+ lea ebx,DWORD PTR 3200236656[ebp*1+ebx]
+ add ecx,edx
+ xor edi,eax
+ xor edi,ecx
+ mov ebp,DWORD PTR 52[esi]
+ add ebx,edi
+ mov edi,ecx
+ rol ebx,23
+ add ebx,ecx
+ ; R2 40
+ xor edi,edx
+ xor edi,ebx
+ lea eax,DWORD PTR 681279174[ebp*1+eax]
+ add eax,edi
+ rol eax,4
+ mov ebp,DWORD PTR [esi]
+ mov edi,ebx
+ ; R2 41
+ lea edx,DWORD PTR 3936430074[ebp*1+edx]
+ add eax,ebx
+ xor edi,ecx
+ xor edi,eax
+ mov ebp,DWORD PTR 12[esi]
+ add edx,edi
+ mov edi,eax
+ rol edx,11
+ add edx,eax
+ ; R2 42
+ xor edi,ebx
+ xor edi,edx
+ lea ecx,DWORD PTR 3572445317[ebp*1+ecx]
+ add ecx,edi
+ rol ecx,16
+ mov ebp,DWORD PTR 24[esi]
+ mov edi,edx
+ ; R2 43
+ lea ebx,DWORD PTR 76029189[ebp*1+ebx]
+ add ecx,edx
+ xor edi,eax
+ xor edi,ecx
+ mov ebp,DWORD PTR 36[esi]
+ add ebx,edi
+ mov edi,ecx
+ rol ebx,23
+ add ebx,ecx
+ ; R2 44
+ xor edi,edx
+ xor edi,ebx
+ lea eax,DWORD PTR 3654602809[ebp*1+eax]
+ add eax,edi
+ rol eax,4
+ mov ebp,DWORD PTR 48[esi]
+ mov edi,ebx
+ ; R2 45
+ lea edx,DWORD PTR 3873151461[ebp*1+edx]
+ add eax,ebx
+ xor edi,ecx
+ xor edi,eax
+ mov ebp,DWORD PTR 60[esi]
+ add edx,edi
+ mov edi,eax
+ rol edx,11
+ add edx,eax
+ ; R2 46
+ xor edi,ebx
+ xor edi,edx
+ lea ecx,DWORD PTR 530742520[ebp*1+ecx]
+ add ecx,edi
+ rol ecx,16
+ mov ebp,DWORD PTR 8[esi]
+ mov edi,edx
+ ; R2 47
+ lea ebx,DWORD PTR 3299628645[ebp*1+ebx]
+ add ecx,edx
+ xor edi,eax
+ xor edi,ecx
+ mov ebp,DWORD PTR [esi]
+ add ebx,edi
+ mov edi,-1
+ rol ebx,23
+ add ebx,ecx
+ ;
+
+ ; R3 section
+ ; R3 48
+ xor edi,edx
+ or edi,ebx
+ lea eax,DWORD PTR 4096336452[ebp*1+eax]
+ xor edi,ecx
+ mov ebp,DWORD PTR 28[esi]
+ add eax,edi
+ mov edi,-1
+ rol eax,6
+ xor edi,ecx
+ add eax,ebx
+ ; R3 49
+ or edi,eax
+ lea edx,DWORD PTR 1126891415[ebp*1+edx]
+ xor edi,ebx
+ mov ebp,DWORD PTR 56[esi]
+ add edx,edi
+ mov edi,-1
+ rol edx,10
+ xor edi,ebx
+ add edx,eax
+ ; R3 50
+ or edi,edx
+ lea ecx,DWORD PTR 2878612391[ebp*1+ecx]
+ xor edi,eax
+ mov ebp,DWORD PTR 20[esi]
+ add ecx,edi
+ mov edi,-1
+ rol ecx,15
+ xor edi,eax
+ add ecx,edx
+ ; R3 51
+ or edi,ecx
+ lea ebx,DWORD PTR 4237533241[ebp*1+ebx]
+ xor edi,edx
+ mov ebp,DWORD PTR 48[esi]
+ add ebx,edi
+ mov edi,-1
+ rol ebx,21
+ xor edi,edx
+ add ebx,ecx
+ ; R3 52
+ or edi,ebx
+ lea eax,DWORD PTR 1700485571[ebp*1+eax]
+ xor edi,ecx
+ mov ebp,DWORD PTR 12[esi]
+ add eax,edi
+ mov edi,-1
+ rol eax,6
+ xor edi,ecx
+ add eax,ebx
+ ; R3 53
+ or edi,eax
+ lea edx,DWORD PTR 2399980690[ebp*1+edx]
+ xor edi,ebx
+ mov ebp,DWORD PTR 40[esi]
+ add edx,edi
+ mov edi,-1
+ rol edx,10
+ xor edi,ebx
+ add edx,eax
+ ; R3 54
+ or edi,edx
+ lea ecx,DWORD PTR 4293915773[ebp*1+ecx]
+ xor edi,eax
+ mov ebp,DWORD PTR 4[esi]
+ add ecx,edi
+ mov edi,-1
+ rol ecx,15
+ xor edi,eax
+ add ecx,edx
+ ; R3 55
+ or edi,ecx
+ lea ebx,DWORD PTR 2240044497[ebp*1+ebx]
+ xor edi,edx
+ mov ebp,DWORD PTR 32[esi]
+ add ebx,edi
+ mov edi,-1
+ rol ebx,21
+ xor edi,edx
+ add ebx,ecx
+ ; R3 56
+ or edi,ebx
+ lea eax,DWORD PTR 1873313359[ebp*1+eax]
+ xor edi,ecx
+ mov ebp,DWORD PTR 60[esi]
+ add eax,edi
+ mov edi,-1
+ rol eax,6
+ xor edi,ecx
+ add eax,ebx
+ ; R3 57
+ or edi,eax
+ lea edx,DWORD PTR 4264355552[ebp*1+edx]
+ xor edi,ebx
+ mov ebp,DWORD PTR 24[esi]
+ add edx,edi
+ mov edi,-1
+ rol edx,10
+ xor edi,ebx
+ add edx,eax
+ ; R3 58
+ or edi,edx
+ lea ecx,DWORD PTR 2734768916[ebp*1+ecx]
+ xor edi,eax
+ mov ebp,DWORD PTR 52[esi]
+ add ecx,edi
+ mov edi,-1
+ rol ecx,15
+ xor edi,eax
+ add ecx,edx
+ ; R3 59
+ or edi,ecx
+ lea ebx,DWORD PTR 1309151649[ebp*1+ebx]
+ xor edi,edx
+ mov ebp,DWORD PTR 16[esi]
+ add ebx,edi
+ mov edi,-1
+ rol ebx,21
+ xor edi,edx
+ add ebx,ecx
+ ; R3 60
+ or edi,ebx
+ lea eax,DWORD PTR 4149444226[ebp*1+eax]
+ xor edi,ecx
+ mov ebp,DWORD PTR 44[esi]
+ add eax,edi
+ mov edi,-1
+ rol eax,6
+ xor edi,ecx
+ add eax,ebx
+ ; R3 61
+ or edi,eax
+ lea edx,DWORD PTR 3174756917[ebp*1+edx]
+ xor edi,ebx
+ mov ebp,DWORD PTR 8[esi]
+ add edx,edi
+ mov edi,-1
+ rol edx,10
+ xor edi,ebx
+ add edx,eax
+ ; R3 62
+ or edi,edx
+ lea ecx,DWORD PTR 718787259[ebp*1+ecx]
+ xor edi,eax
+ mov ebp,DWORD PTR 36[esi]
+ add ecx,edi
+ mov edi,-1
+ rol ecx,15
+ xor edi,eax
+ add ecx,edx
+ ; R3 63
+ or edi,ecx
+ lea ebx,DWORD PTR 3951481745[ebp*1+ebx]
+ xor edi,edx
+ mov ebp,DWORD PTR 24[esp]
+ add ebx,edi
+ add esi,64
+ rol ebx,21
+ mov edi,DWORD PTR [ebp]
+ add ebx,ecx
+ add eax,edi
+ mov edi,DWORD PTR 4[ebp]
+ add ebx,edi
+ mov edi,DWORD PTR 8[ebp]
+ add ecx,edi
+ mov edi,DWORD PTR 12[ebp]
+ add edx,edi
+ mov DWORD PTR [ebp],eax
+ mov DWORD PTR 4[ebp],ebx
+ mov edi,DWORD PTR [esp]
+ mov DWORD PTR 8[ebp],ecx
+ mov DWORD PTR 12[ebp],edx
+ cmp edi,esi
+ jae $L000start
+ pop eax
+ pop ebx
+ pop ebp
+ pop edi
+ pop esi
+ ret
+_md5_block_asm_data_order ENDP
+.text$ ENDS
+END
diff --git a/deps/openssl/asm/x86-win32-masm/rc4/rc4-586.asm b/deps/openssl/asm/x86-win32-masm/rc4/rc4-586.asm
new file mode 100644
index 0000000000..3eb66f7350
--- /dev/null
+++ b/deps/openssl/asm/x86-win32-masm/rc4/rc4-586.asm
@@ -0,0 +1,239 @@
+TITLE rc4-586.asm
+IF @Version LT 800
+ECHO MASM version 8.00 or later is strongly recommended.
+ENDIF
+.586
+.MODEL FLAT
+OPTION DOTNAME
+IF @Version LT 800
+.text$ SEGMENT PAGE 'CODE'
+ELSE
+.text$ SEGMENT ALIGN(64) 'CODE'
+ENDIF
+ALIGN 16
+_RC4 PROC PUBLIC
+$L_RC4_begin::
+ push ebp
+ push ebx
+ push esi
+ push edi
+ mov edi,DWORD PTR 20[esp]
+ mov edx,DWORD PTR 24[esp]
+ mov esi,DWORD PTR 28[esp]
+ mov ebp,DWORD PTR 32[esp]
+ xor eax,eax
+ xor ebx,ebx
+ cmp edx,0
+ je $L000abort
+ mov al,BYTE PTR [edi]
+ mov bl,BYTE PTR 4[edi]
+ add edi,8
+ lea ecx,DWORD PTR [edx*1+esi]
+ sub ebp,esi
+ mov DWORD PTR 24[esp],ecx
+ inc al
+ cmp DWORD PTR 256[edi],-1
+ je $L001RC4_CHAR
+ mov ecx,DWORD PTR [eax*4+edi]
+ and edx,-4
+ jz $L002loop1
+ lea edx,DWORD PTR [edx*1+esi-4]
+ mov DWORD PTR 28[esp],edx
+ mov DWORD PTR 32[esp],ebp
+ALIGN 16
+$L003loop4:
+ add bl,cl
+ mov edx,DWORD PTR [ebx*4+edi]
+ mov DWORD PTR [ebx*4+edi],ecx
+ mov DWORD PTR [eax*4+edi],edx
+ add edx,ecx
+ inc al
+ and edx,255
+ mov ecx,DWORD PTR [eax*4+edi]
+ mov ebp,DWORD PTR [edx*4+edi]
+ add bl,cl
+ mov edx,DWORD PTR [ebx*4+edi]
+ mov DWORD PTR [ebx*4+edi],ecx
+ mov DWORD PTR [eax*4+edi],edx
+ add edx,ecx
+ inc al
+ and edx,255
+ ror ebp,8
+ mov ecx,DWORD PTR [eax*4+edi]
+ or ebp,DWORD PTR [edx*4+edi]
+ add bl,cl
+ mov edx,DWORD PTR [ebx*4+edi]
+ mov DWORD PTR [ebx*4+edi],ecx
+ mov DWORD PTR [eax*4+edi],edx
+ add edx,ecx
+ inc al
+ and edx,255
+ ror ebp,8
+ mov ecx,DWORD PTR [eax*4+edi]
+ or ebp,DWORD PTR [edx*4+edi]
+ add bl,cl
+ mov edx,DWORD PTR [ebx*4+edi]
+ mov DWORD PTR [ebx*4+edi],ecx
+ mov DWORD PTR [eax*4+edi],edx
+ add edx,ecx
+ inc al
+ and edx,255
+ ror ebp,8
+ mov ecx,DWORD PTR 32[esp]
+ or ebp,DWORD PTR [edx*4+edi]
+ ror ebp,8
+ xor ebp,DWORD PTR [esi]
+ cmp esi,DWORD PTR 28[esp]
+ mov DWORD PTR [esi*1+ecx],ebp
+ lea esi,DWORD PTR 4[esi]
+ mov ecx,DWORD PTR [eax*4+edi]
+ jb $L003loop4
+ cmp esi,DWORD PTR 24[esp]
+ je $L004done
+ mov ebp,DWORD PTR 32[esp]
+ALIGN 16
+$L002loop1:
+ add bl,cl
+ mov edx,DWORD PTR [ebx*4+edi]
+ mov DWORD PTR [ebx*4+edi],ecx
+ mov DWORD PTR [eax*4+edi],edx
+ add edx,ecx
+ inc al
+ and edx,255
+ mov edx,DWORD PTR [edx*4+edi]
+ xor dl,BYTE PTR [esi]
+ lea esi,DWORD PTR 1[esi]
+ mov ecx,DWORD PTR [eax*4+edi]
+ cmp esi,DWORD PTR 24[esp]
+ mov BYTE PTR [esi*1+ebp-1],dl
+ jb $L002loop1
+ jmp $L004done
+ALIGN 16
+$L001RC4_CHAR:
+ movzx ecx,BYTE PTR [eax*1+edi]
+$L005cloop1:
+ add bl,cl
+ movzx edx,BYTE PTR [ebx*1+edi]
+ mov BYTE PTR [ebx*1+edi],cl
+ mov BYTE PTR [eax*1+edi],dl
+ add dl,cl
+ movzx edx,BYTE PTR [edx*1+edi]
+ add al,1
+ xor dl,BYTE PTR [esi]
+ lea esi,DWORD PTR 1[esi]
+ movzx ecx,BYTE PTR [eax*1+edi]
+ cmp esi,DWORD PTR 24[esp]
+ mov BYTE PTR [esi*1+ebp-1],dl
+ jb $L005cloop1
+$L004done:
+ dec al
+ mov BYTE PTR [edi-4],bl
+ mov BYTE PTR [edi-8],al
+$L000abort:
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+_RC4 ENDP
+;EXTERN _OPENSSL_ia32cap_P:NEAR
+ALIGN 16
+_RC4_set_key PROC PUBLIC
+$L_RC4_set_key_begin::
+ push ebp
+ push ebx
+ push esi
+ push edi
+ mov edi,DWORD PTR 20[esp]
+ mov ebp,DWORD PTR 24[esp]
+ mov esi,DWORD PTR 28[esp]
+ lea edx,DWORD PTR _OPENSSL_ia32cap_P
+ lea edi,DWORD PTR 8[edi]
+ lea esi,DWORD PTR [ebp*1+esi]
+ neg ebp
+ xor eax,eax
+ mov DWORD PTR [edi-4],ebp
+ bt DWORD PTR [edx],20
+ jc $L006c1stloop
+ALIGN 16
+$L007w1stloop:
+ mov DWORD PTR [eax*4+edi],eax
+ add al,1
+ jnc $L007w1stloop
+ xor ecx,ecx
+ xor edx,edx
+ALIGN 16
+$L008w2ndloop:
+ mov eax,DWORD PTR [ecx*4+edi]
+ add dl,BYTE PTR [ebp*1+esi]
+ add dl,al
+ add ebp,1
+ mov ebx,DWORD PTR [edx*4+edi]
+ jnz $L009wnowrap
+ mov ebp,DWORD PTR [edi-4]
+$L009wnowrap:
+ mov DWORD PTR [edx*4+edi],eax
+ mov DWORD PTR [ecx*4+edi],ebx
+ add cl,1
+ jnc $L008w2ndloop
+ jmp $L010exit
+ALIGN 16
+$L006c1stloop:
+ mov BYTE PTR [eax*1+edi],al
+ add al,1
+ jnc $L006c1stloop
+ xor ecx,ecx
+ xor edx,edx
+ xor ebx,ebx
+ALIGN 16
+$L011c2ndloop:
+ mov al,BYTE PTR [ecx*1+edi]
+ add dl,BYTE PTR [ebp*1+esi]
+ add dl,al
+ add ebp,1
+ mov bl,BYTE PTR [edx*1+edi]
+ jnz $L012cnowrap
+ mov ebp,DWORD PTR [edi-4]
+$L012cnowrap:
+ mov BYTE PTR [edx*1+edi],al
+ mov BYTE PTR [ecx*1+edi],bl
+ add cl,1
+ jnc $L011c2ndloop
+ mov DWORD PTR 256[edi],-1
+$L010exit:
+ xor eax,eax
+ mov DWORD PTR [edi-8],eax
+ mov DWORD PTR [edi-4],eax
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+_RC4_set_key ENDP
+ALIGN 16
+_RC4_options PROC PUBLIC
+$L_RC4_options_begin::
+ call $L013pic_point
+$L013pic_point:
+ pop eax
+ lea eax,DWORD PTR ($L014opts-$L013pic_point)[eax]
+ lea edx,DWORD PTR _OPENSSL_ia32cap_P
+ bt DWORD PTR [edx],20
+ jnc $L015skip
+ add eax,12
+$L015skip:
+ ret
+ALIGN 64
+$L014opts:
+DB 114,99,52,40,52,120,44,105,110,116,41,0
+DB 114,99,52,40,49,120,44,99,104,97,114,41,0
+DB 82,67,52,32,102,111,114,32,120,56,54,44,32,67,82,89
+DB 80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114
+DB 111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+ALIGN 64
+_RC4_options ENDP
+.text$ ENDS
+.bss SEGMENT 'BSS'
+COMM _OPENSSL_ia32cap_P:DWORD
+.bss ENDS
+END
diff --git a/deps/openssl/asm/x86-win32-masm/rc5/rc5-586.asm b/deps/openssl/asm/x86-win32-masm/rc5/rc5-586.asm
new file mode 100644
index 0000000000..e699d9173f
--- /dev/null
+++ b/deps/openssl/asm/x86-win32-masm/rc5/rc5-586.asm
@@ -0,0 +1,573 @@
+TITLE rc5-586.asm
+IF @Version LT 800
+ECHO MASM version 8.00 or later is strongly recommended.
+ENDIF
+.586
+.MODEL FLAT
+OPTION DOTNAME
+IF @Version LT 800
+.text$ SEGMENT PAGE 'CODE'
+ELSE
+.text$ SEGMENT ALIGN(64) 'CODE'
+ENDIF
+ALIGN 16
+_RC5_32_encrypt PROC PUBLIC
+$L_RC5_32_encrypt_begin::
+ ;
+
+ push ebp
+ push esi
+ push edi
+ mov edx,DWORD PTR 16[esp]
+ mov ebp,DWORD PTR 20[esp]
+ ; Load the 2 words
+ mov edi,DWORD PTR [edx]
+ mov esi,DWORD PTR 4[edx]
+ push ebx
+ mov ebx,DWORD PTR [ebp]
+ add edi,DWORD PTR 4[ebp]
+ add esi,DWORD PTR 8[ebp]
+ xor edi,esi
+ mov eax,DWORD PTR 12[ebp]
+ mov ecx,esi
+ rol edi,cl
+ add edi,eax
+ xor esi,edi
+ mov eax,DWORD PTR 16[ebp]
+ mov ecx,edi
+ rol esi,cl
+ add esi,eax
+ xor edi,esi
+ mov eax,DWORD PTR 20[ebp]
+ mov ecx,esi
+ rol edi,cl
+ add edi,eax
+ xor esi,edi
+ mov eax,DWORD PTR 24[ebp]
+ mov ecx,edi
+ rol esi,cl
+ add esi,eax
+ xor edi,esi
+ mov eax,DWORD PTR 28[ebp]
+ mov ecx,esi
+ rol edi,cl
+ add edi,eax
+ xor esi,edi
+ mov eax,DWORD PTR 32[ebp]
+ mov ecx,edi
+ rol esi,cl
+ add esi,eax
+ xor edi,esi
+ mov eax,DWORD PTR 36[ebp]
+ mov ecx,esi
+ rol edi,cl
+ add edi,eax
+ xor esi,edi
+ mov eax,DWORD PTR 40[ebp]
+ mov ecx,edi
+ rol esi,cl
+ add esi,eax
+ xor edi,esi
+ mov eax,DWORD PTR 44[ebp]
+ mov ecx,esi
+ rol edi,cl
+ add edi,eax
+ xor esi,edi
+ mov eax,DWORD PTR 48[ebp]
+ mov ecx,edi
+ rol esi,cl
+ add esi,eax
+ xor edi,esi
+ mov eax,DWORD PTR 52[ebp]
+ mov ecx,esi
+ rol edi,cl
+ add edi,eax
+ xor esi,edi
+ mov eax,DWORD PTR 56[ebp]
+ mov ecx,edi
+ rol esi,cl
+ add esi,eax
+ xor edi,esi
+ mov eax,DWORD PTR 60[ebp]
+ mov ecx,esi
+ rol edi,cl
+ add edi,eax
+ xor esi,edi
+ mov eax,DWORD PTR 64[ebp]
+ mov ecx,edi
+ rol esi,cl
+ add esi,eax
+ xor edi,esi
+ mov eax,DWORD PTR 68[ebp]
+ mov ecx,esi
+ rol edi,cl
+ add edi,eax
+ xor esi,edi
+ mov eax,DWORD PTR 72[ebp]
+ mov ecx,edi
+ rol esi,cl
+ add esi,eax
+ cmp ebx,8
+ je $L000rc5_exit
+ xor edi,esi
+ mov eax,DWORD PTR 76[ebp]
+ mov ecx,esi
+ rol edi,cl
+ add edi,eax
+ xor esi,edi
+ mov eax,DWORD PTR 80[ebp]
+ mov ecx,edi
+ rol esi,cl
+ add esi,eax
+ xor edi,esi
+ mov eax,DWORD PTR 84[ebp]
+ mov ecx,esi
+ rol edi,cl
+ add edi,eax
+ xor esi,edi
+ mov eax,DWORD PTR 88[ebp]
+ mov ecx,edi
+ rol esi,cl
+ add esi,eax
+ xor edi,esi
+ mov eax,DWORD PTR 92[ebp]
+ mov ecx,esi
+ rol edi,cl
+ add edi,eax
+ xor esi,edi
+ mov eax,DWORD PTR 96[ebp]
+ mov ecx,edi
+ rol esi,cl
+ add esi,eax
+ xor edi,esi
+ mov eax,DWORD PTR 100[ebp]
+ mov ecx,esi
+ rol edi,cl
+ add edi,eax
+ xor esi,edi
+ mov eax,DWORD PTR 104[ebp]
+ mov ecx,edi
+ rol esi,cl
+ add esi,eax
+ cmp ebx,12
+ je $L000rc5_exit
+ xor edi,esi
+ mov eax,DWORD PTR 108[ebp]
+ mov ecx,esi
+ rol edi,cl
+ add edi,eax
+ xor esi,edi
+ mov eax,DWORD PTR 112[ebp]
+ mov ecx,edi
+ rol esi,cl
+ add esi,eax
+ xor edi,esi
+ mov eax,DWORD PTR 116[ebp]
+ mov ecx,esi
+ rol edi,cl
+ add edi,eax
+ xor esi,edi
+ mov eax,DWORD PTR 120[ebp]
+ mov ecx,edi
+ rol esi,cl
+ add esi,eax
+ xor edi,esi
+ mov eax,DWORD PTR 124[ebp]
+ mov ecx,esi
+ rol edi,cl
+ add edi,eax
+ xor esi,edi
+ mov eax,DWORD PTR 128[ebp]
+ mov ecx,edi
+ rol esi,cl
+ add esi,eax
+ xor edi,esi
+ mov eax,DWORD PTR 132[ebp]
+ mov ecx,esi
+ rol edi,cl
+ add edi,eax
+ xor esi,edi
+ mov eax,DWORD PTR 136[ebp]
+ mov ecx,edi
+ rol esi,cl
+ add esi,eax
+$L000rc5_exit:
+ mov DWORD PTR [edx],edi
+ mov DWORD PTR 4[edx],esi
+ pop ebx
+ pop edi
+ pop esi
+ pop ebp
+ ret
+_RC5_32_encrypt ENDP
+ALIGN 16
+_RC5_32_decrypt PROC PUBLIC
+$L_RC5_32_decrypt_begin::
+ ;
+
+ push ebp
+ push esi
+ push edi
+ mov edx,DWORD PTR 16[esp]
+ mov ebp,DWORD PTR 20[esp]
+ ; Load the 2 words
+ mov edi,DWORD PTR [edx]
+ mov esi,DWORD PTR 4[edx]
+ push ebx
+ mov ebx,DWORD PTR [ebp]
+ cmp ebx,12
+ je $L001rc5_dec_12
+ cmp ebx,8
+ je $L002rc5_dec_8
+ mov eax,DWORD PTR 136[ebp]
+ sub esi,eax
+ mov ecx,edi
+ ror esi,cl
+ xor esi,edi
+ mov eax,DWORD PTR 132[ebp]
+ sub edi,eax
+ mov ecx,esi
+ ror edi,cl
+ xor edi,esi
+ mov eax,DWORD PTR 128[ebp]
+ sub esi,eax
+ mov ecx,edi
+ ror esi,cl
+ xor esi,edi
+ mov eax,DWORD PTR 124[ebp]
+ sub edi,eax
+ mov ecx,esi
+ ror edi,cl
+ xor edi,esi
+ mov eax,DWORD PTR 120[ebp]
+ sub esi,eax
+ mov ecx,edi
+ ror esi,cl
+ xor esi,edi
+ mov eax,DWORD PTR 116[ebp]
+ sub edi,eax
+ mov ecx,esi
+ ror edi,cl
+ xor edi,esi
+ mov eax,DWORD PTR 112[ebp]
+ sub esi,eax
+ mov ecx,edi
+ ror esi,cl
+ xor esi,edi
+ mov eax,DWORD PTR 108[ebp]
+ sub edi,eax
+ mov ecx,esi
+ ror edi,cl
+ xor edi,esi
+$L001rc5_dec_12:
+ mov eax,DWORD PTR 104[ebp]
+ sub esi,eax
+ mov ecx,edi
+ ror esi,cl
+ xor esi,edi
+ mov eax,DWORD PTR 100[ebp]
+ sub edi,eax
+ mov ecx,esi
+ ror edi,cl
+ xor edi,esi
+ mov eax,DWORD PTR 96[ebp]
+ sub esi,eax
+ mov ecx,edi
+ ror esi,cl
+ xor esi,edi
+ mov eax,DWORD PTR 92[ebp]
+ sub edi,eax
+ mov ecx,esi
+ ror edi,cl
+ xor edi,esi
+ mov eax,DWORD PTR 88[ebp]
+ sub esi,eax
+ mov ecx,edi
+ ror esi,cl
+ xor esi,edi
+ mov eax,DWORD PTR 84[ebp]
+ sub edi,eax
+ mov ecx,esi
+ ror edi,cl
+ xor edi,esi
+ mov eax,DWORD PTR 80[ebp]
+ sub esi,eax
+ mov ecx,edi
+ ror esi,cl
+ xor esi,edi
+ mov eax,DWORD PTR 76[ebp]
+ sub edi,eax
+ mov ecx,esi
+ ror edi,cl
+ xor edi,esi
+$L002rc5_dec_8:
+ mov eax,DWORD PTR 72[ebp]
+ sub esi,eax
+ mov ecx,edi
+ ror esi,cl
+ xor esi,edi
+ mov eax,DWORD PTR 68[ebp]
+ sub edi,eax
+ mov ecx,esi
+ ror edi,cl
+ xor edi,esi
+ mov eax,DWORD PTR 64[ebp]
+ sub esi,eax
+ mov ecx,edi
+ ror esi,cl
+ xor esi,edi
+ mov eax,DWORD PTR 60[ebp]
+ sub edi,eax
+ mov ecx,esi
+ ror edi,cl
+ xor edi,esi
+ mov eax,DWORD PTR 56[ebp]
+ sub esi,eax
+ mov ecx,edi
+ ror esi,cl
+ xor esi,edi
+ mov eax,DWORD PTR 52[ebp]
+ sub edi,eax
+ mov ecx,esi
+ ror edi,cl
+ xor edi,esi
+ mov eax,DWORD PTR 48[ebp]
+ sub esi,eax
+ mov ecx,edi
+ ror esi,cl
+ xor esi,edi
+ mov eax,DWORD PTR 44[ebp]
+ sub edi,eax
+ mov ecx,esi
+ ror edi,cl
+ xor edi,esi
+ mov eax,DWORD PTR 40[ebp]
+ sub esi,eax
+ mov ecx,edi
+ ror esi,cl
+ xor esi,edi
+ mov eax,DWORD PTR 36[ebp]
+ sub edi,eax
+ mov ecx,esi
+ ror edi,cl
+ xor edi,esi
+ mov eax,DWORD PTR 32[ebp]
+ sub esi,eax
+ mov ecx,edi
+ ror esi,cl
+ xor esi,edi
+ mov eax,DWORD PTR 28[ebp]
+ sub edi,eax
+ mov ecx,esi
+ ror edi,cl
+ xor edi,esi
+ mov eax,DWORD PTR 24[ebp]
+ sub esi,eax
+ mov ecx,edi
+ ror esi,cl
+ xor esi,edi
+ mov eax,DWORD PTR 20[ebp]
+ sub edi,eax
+ mov ecx,esi
+ ror edi,cl
+ xor edi,esi
+ mov eax,DWORD PTR 16[ebp]
+ sub esi,eax
+ mov ecx,edi
+ ror esi,cl
+ xor esi,edi
+ mov eax,DWORD PTR 12[ebp]
+ sub edi,eax
+ mov ecx,esi
+ ror edi,cl
+ xor edi,esi
+ sub esi,DWORD PTR 8[ebp]
+ sub edi,DWORD PTR 4[ebp]
+$L003rc5_exit:
+ mov DWORD PTR [edx],edi
+ mov DWORD PTR 4[edx],esi
+ pop ebx
+ pop edi
+ pop esi
+ pop ebp
+ ret
+_RC5_32_decrypt ENDP
+ALIGN 16
+_RC5_32_cbc_encrypt PROC PUBLIC
+$L_RC5_32_cbc_encrypt_begin::
+ ;
+
+ push ebp
+ push ebx
+ push esi
+ push edi
+ mov ebp,DWORD PTR 28[esp]
+ ; getting iv ptr from parameter 4
+ mov ebx,DWORD PTR 36[esp]
+ mov esi,DWORD PTR [ebx]
+ mov edi,DWORD PTR 4[ebx]
+ push edi
+ push esi
+ push edi
+ push esi
+ mov ebx,esp
+ mov esi,DWORD PTR 36[esp]
+ mov edi,DWORD PTR 40[esp]
+ ; getting encrypt flag from parameter 5
+ mov ecx,DWORD PTR 56[esp]
+ ; get and push parameter 3
+ mov eax,DWORD PTR 48[esp]
+ push eax
+ push ebx
+ cmp ecx,0
+ jz $L004decrypt
+ and ebp,4294967288
+ mov eax,DWORD PTR 8[esp]
+ mov ebx,DWORD PTR 12[esp]
+ jz $L005encrypt_finish
+$L006encrypt_loop:
+ mov ecx,DWORD PTR [esi]
+ mov edx,DWORD PTR 4[esi]
+ xor eax,ecx
+ xor ebx,edx
+ mov DWORD PTR 8[esp],eax
+ mov DWORD PTR 12[esp],ebx
+ call $L_RC5_32_encrypt_begin
+ mov eax,DWORD PTR 8[esp]
+ mov ebx,DWORD PTR 12[esp]
+ mov DWORD PTR [edi],eax
+ mov DWORD PTR 4[edi],ebx
+ add esi,8
+ add edi,8
+ sub ebp,8
+ jnz $L006encrypt_loop
+$L005encrypt_finish:
+ mov ebp,DWORD PTR 52[esp]
+ and ebp,7
+ jz $L007finish
+ call $L008PIC_point
+$L008PIC_point:
+ pop edx
+ lea ecx,DWORD PTR ($L009cbc_enc_jmp_table-$L008PIC_point)[edx]
+ mov ebp,DWORD PTR [ebp*4+ecx]
+ add ebp,edx
+ xor ecx,ecx
+ xor edx,edx
+ jmp ebp
+$L010ej7:
+ mov dh,BYTE PTR 6[esi]
+ shl edx,8
+$L011ej6:
+ mov dh,BYTE PTR 5[esi]
+$L012ej5:
+ mov dl,BYTE PTR 4[esi]
+$L013ej4:
+ mov ecx,DWORD PTR [esi]
+ jmp $L014ejend
+$L015ej3:
+ mov ch,BYTE PTR 2[esi]
+ shl ecx,8
+$L016ej2:
+ mov ch,BYTE PTR 1[esi]
+$L017ej1:
+ mov cl,BYTE PTR [esi]
+$L014ejend:
+ xor eax,ecx
+ xor ebx,edx
+ mov DWORD PTR 8[esp],eax
+ mov DWORD PTR 12[esp],ebx
+ call $L_RC5_32_encrypt_begin
+ mov eax,DWORD PTR 8[esp]
+ mov ebx,DWORD PTR 12[esp]
+ mov DWORD PTR [edi],eax
+ mov DWORD PTR 4[edi],ebx
+ jmp $L007finish
+$L004decrypt:
+ and ebp,4294967288
+ mov eax,DWORD PTR 16[esp]
+ mov ebx,DWORD PTR 20[esp]
+ jz $L018decrypt_finish
+$L019decrypt_loop:
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ mov DWORD PTR 8[esp],eax
+ mov DWORD PTR 12[esp],ebx
+ call $L_RC5_32_decrypt_begin
+ mov eax,DWORD PTR 8[esp]
+ mov ebx,DWORD PTR 12[esp]
+ mov ecx,DWORD PTR 16[esp]
+ mov edx,DWORD PTR 20[esp]
+ xor ecx,eax
+ xor edx,ebx
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ mov DWORD PTR [edi],ecx
+ mov DWORD PTR 4[edi],edx
+ mov DWORD PTR 16[esp],eax
+ mov DWORD PTR 20[esp],ebx
+ add esi,8
+ add edi,8
+ sub ebp,8
+ jnz $L019decrypt_loop
+$L018decrypt_finish:
+ mov ebp,DWORD PTR 52[esp]
+ and ebp,7
+ jz $L007finish
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ mov DWORD PTR 8[esp],eax
+ mov DWORD PTR 12[esp],ebx
+ call $L_RC5_32_decrypt_begin
+ mov eax,DWORD PTR 8[esp]
+ mov ebx,DWORD PTR 12[esp]
+ mov ecx,DWORD PTR 16[esp]
+ mov edx,DWORD PTR 20[esp]
+ xor ecx,eax
+ xor edx,ebx
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+$L020dj7:
+ ror edx,16
+ mov BYTE PTR 6[edi],dl
+ shr edx,16
+$L021dj6:
+ mov BYTE PTR 5[edi],dh
+$L022dj5:
+ mov BYTE PTR 4[edi],dl
+$L023dj4:
+ mov DWORD PTR [edi],ecx
+ jmp $L024djend
+$L025dj3:
+ ror ecx,16
+ mov BYTE PTR 2[edi],cl
+ shl ecx,16
+$L026dj2:
+ mov BYTE PTR 1[esi],ch
+$L027dj1:
+ mov BYTE PTR [esi],cl
+$L024djend:
+ jmp $L007finish
+$L007finish:
+ mov ecx,DWORD PTR 60[esp]
+ add esp,24
+ mov DWORD PTR [ecx],eax
+ mov DWORD PTR 4[ecx],ebx
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+ALIGN 64
+$L009cbc_enc_jmp_table:
+DD 0
+DD $L017ej1-$L008PIC_point
+DD $L016ej2-$L008PIC_point
+DD $L015ej3-$L008PIC_point
+DD $L013ej4-$L008PIC_point
+DD $L012ej5-$L008PIC_point
+DD $L011ej6-$L008PIC_point
+DD $L010ej7-$L008PIC_point
+ALIGN 64
+_RC5_32_cbc_encrypt ENDP
+.text$ ENDS
+END
diff --git a/deps/openssl/asm/x86-win32-masm/ripemd/rmd-586.asm b/deps/openssl/asm/x86-win32-masm/ripemd/rmd-586.asm
new file mode 100644
index 0000000000..8fa61f8f98
--- /dev/null
+++ b/deps/openssl/asm/x86-win32-masm/ripemd/rmd-586.asm
@@ -0,0 +1,1976 @@
+TITLE ../openssl/crypto/ripemd/asm/rmd-586.asm
+IF @Version LT 800
+ECHO MASM version 8.00 or later is strongly recommended.
+ENDIF
+.586
+.MODEL FLAT
+OPTION DOTNAME
+IF @Version LT 800
+.text$ SEGMENT PAGE 'CODE'
+ELSE
+.text$ SEGMENT ALIGN(64) 'CODE'
+ENDIF
+ALIGN 16
+_ripemd160_block_asm_data_order PROC PUBLIC
+$L_ripemd160_block_asm_data_order_begin::
+ mov edx,DWORD PTR 4[esp]
+ mov eax,DWORD PTR 8[esp]
+ push esi
+ mov ecx,DWORD PTR [edx]
+ push edi
+ mov esi,DWORD PTR 4[edx]
+ push ebp
+ mov edi,DWORD PTR 8[edx]
+ push ebx
+ sub esp,108
+$L000start:
+ ;
+
+ mov ebx,DWORD PTR [eax]
+ mov ebp,DWORD PTR 4[eax]
+ mov DWORD PTR [esp],ebx
+ mov DWORD PTR 4[esp],ebp
+ mov ebx,DWORD PTR 8[eax]
+ mov ebp,DWORD PTR 12[eax]
+ mov DWORD PTR 8[esp],ebx
+ mov DWORD PTR 12[esp],ebp
+ mov ebx,DWORD PTR 16[eax]
+ mov ebp,DWORD PTR 20[eax]
+ mov DWORD PTR 16[esp],ebx
+ mov DWORD PTR 20[esp],ebp
+ mov ebx,DWORD PTR 24[eax]
+ mov ebp,DWORD PTR 28[eax]
+ mov DWORD PTR 24[esp],ebx
+ mov DWORD PTR 28[esp],ebp
+ mov ebx,DWORD PTR 32[eax]
+ mov ebp,DWORD PTR 36[eax]
+ mov DWORD PTR 32[esp],ebx
+ mov DWORD PTR 36[esp],ebp
+ mov ebx,DWORD PTR 40[eax]
+ mov ebp,DWORD PTR 44[eax]
+ mov DWORD PTR 40[esp],ebx
+ mov DWORD PTR 44[esp],ebp
+ mov ebx,DWORD PTR 48[eax]
+ mov ebp,DWORD PTR 52[eax]
+ mov DWORD PTR 48[esp],ebx
+ mov DWORD PTR 52[esp],ebp
+ mov ebx,DWORD PTR 56[eax]
+ mov ebp,DWORD PTR 60[eax]
+ mov DWORD PTR 56[esp],ebx
+ mov DWORD PTR 60[esp],ebp
+ mov eax,edi
+ mov ebx,DWORD PTR 12[edx]
+ mov ebp,DWORD PTR 16[edx]
+ ; 0
+ xor eax,ebx
+ mov edx,DWORD PTR [esp]
+ xor eax,esi
+ add ecx,edx
+ rol edi,10
+ add ecx,eax
+ mov eax,esi
+ rol ecx,11
+ add ecx,ebp
+ ; 1
+ xor eax,edi
+ mov edx,DWORD PTR 4[esp]
+ xor eax,ecx
+ add ebp,eax
+ mov eax,ecx
+ rol esi,10
+ add ebp,edx
+ xor eax,esi
+ rol ebp,14
+ add ebp,ebx
+ ; 2
+ mov edx,DWORD PTR 8[esp]
+ xor eax,ebp
+ add ebx,edx
+ rol ecx,10
+ add ebx,eax
+ mov eax,ebp
+ rol ebx,15
+ add ebx,edi
+ ; 3
+ xor eax,ecx
+ mov edx,DWORD PTR 12[esp]
+ xor eax,ebx
+ add edi,eax
+ mov eax,ebx
+ rol ebp,10
+ add edi,edx
+ xor eax,ebp
+ rol edi,12
+ add edi,esi
+ ; 4
+ mov edx,DWORD PTR 16[esp]
+ xor eax,edi
+ add esi,edx
+ rol ebx,10
+ add esi,eax
+ mov eax,edi
+ rol esi,5
+ add esi,ecx
+ ; 5
+ xor eax,ebx
+ mov edx,DWORD PTR 20[esp]
+ xor eax,esi
+ add ecx,eax
+ mov eax,esi
+ rol edi,10
+ add ecx,edx
+ xor eax,edi
+ rol ecx,8
+ add ecx,ebp
+ ; 6
+ mov edx,DWORD PTR 24[esp]
+ xor eax,ecx
+ add ebp,edx
+ rol esi,10
+ add ebp,eax
+ mov eax,ecx
+ rol ebp,7
+ add ebp,ebx
+ ; 7
+ xor eax,esi
+ mov edx,DWORD PTR 28[esp]
+ xor eax,ebp
+ add ebx,eax
+ mov eax,ebp
+ rol ecx,10
+ add ebx,edx
+ xor eax,ecx
+ rol ebx,9
+ add ebx,edi
+ ; 8
+ mov edx,DWORD PTR 32[esp]
+ xor eax,ebx
+ add edi,edx
+ rol ebp,10
+ add edi,eax
+ mov eax,ebx
+ rol edi,11
+ add edi,esi
+ ; 9
+ xor eax,ebp
+ mov edx,DWORD PTR 36[esp]
+ xor eax,edi
+ add esi,eax
+ mov eax,edi
+ rol ebx,10
+ add esi,edx
+ xor eax,ebx
+ rol esi,13
+ add esi,ecx
+ ; 10
+ mov edx,DWORD PTR 40[esp]
+ xor eax,esi
+ add ecx,edx
+ rol edi,10
+ add ecx,eax
+ mov eax,esi
+ rol ecx,14
+ add ecx,ebp
+ ; 11
+ xor eax,edi
+ mov edx,DWORD PTR 44[esp]
+ xor eax,ecx
+ add ebp,eax
+ mov eax,ecx
+ rol esi,10
+ add ebp,edx
+ xor eax,esi
+ rol ebp,15
+ add ebp,ebx
+ ; 12
+ mov edx,DWORD PTR 48[esp]
+ xor eax,ebp
+ add ebx,edx
+ rol ecx,10
+ add ebx,eax
+ mov eax,ebp
+ rol ebx,6
+ add ebx,edi
+ ; 13
+ xor eax,ecx
+ mov edx,DWORD PTR 52[esp]
+ xor eax,ebx
+ add edi,eax
+ mov eax,ebx
+ rol ebp,10
+ add edi,edx
+ xor eax,ebp
+ rol edi,7
+ add edi,esi
+ ; 14
+ mov edx,DWORD PTR 56[esp]
+ xor eax,edi
+ add esi,edx
+ rol ebx,10
+ add esi,eax
+ mov eax,edi
+ rol esi,9
+ add esi,ecx
+ ; 15
+ xor eax,ebx
+ mov edx,DWORD PTR 60[esp]
+ xor eax,esi
+ add ecx,eax
+ mov eax,-1
+ rol edi,10
+ add ecx,edx
+ mov edx,DWORD PTR 28[esp]
+ rol ecx,8
+ add ecx,ebp
+ ; 16
+ add ebp,edx
+ mov edx,esi
+ sub eax,ecx
+ and edx,ecx
+ and eax,edi
+ or edx,eax
+ mov eax,DWORD PTR 16[esp]
+ rol esi,10
+ lea ebp,DWORD PTR 1518500249[edx*1+ebp]
+ mov edx,-1
+ rol ebp,7
+ add ebp,ebx
+ ; 17
+ add ebx,eax
+ mov eax,ecx
+ sub edx,ebp
+ and eax,ebp
+ and edx,esi
+ or eax,edx
+ mov edx,DWORD PTR 52[esp]
+ rol ecx,10
+ lea ebx,DWORD PTR 1518500249[eax*1+ebx]
+ mov eax,-1
+ rol ebx,6
+ add ebx,edi
+ ; 18
+ add edi,edx
+ mov edx,ebp
+ sub eax,ebx
+ and edx,ebx
+ and eax,ecx
+ or edx,eax
+ mov eax,DWORD PTR 4[esp]
+ rol ebp,10
+ lea edi,DWORD PTR 1518500249[edx*1+edi]
+ mov edx,-1
+ rol edi,8
+ add edi,esi
+ ; 19
+ add esi,eax
+ mov eax,ebx
+ sub edx,edi
+ and eax,edi
+ and edx,ebp
+ or eax,edx
+ mov edx,DWORD PTR 40[esp]
+ rol ebx,10
+ lea esi,DWORD PTR 1518500249[eax*1+esi]
+ mov eax,-1
+ rol esi,13
+ add esi,ecx
+ ; 20
+ add ecx,edx
+ mov edx,edi
+ sub eax,esi
+ and edx,esi
+ and eax,ebx
+ or edx,eax
+ mov eax,DWORD PTR 24[esp]
+ rol edi,10
+ lea ecx,DWORD PTR 1518500249[edx*1+ecx]
+ mov edx,-1
+ rol ecx,11
+ add ecx,ebp
+ ; 21
+ add ebp,eax
+ mov eax,esi
+ sub edx,ecx
+ and eax,ecx
+ and edx,edi
+ or eax,edx
+ mov edx,DWORD PTR 60[esp]
+ rol esi,10
+ lea ebp,DWORD PTR 1518500249[eax*1+ebp]
+ mov eax,-1
+ rol ebp,9
+ add ebp,ebx
+ ; 22
+ add ebx,edx
+ mov edx,ecx
+ sub eax,ebp
+ and edx,ebp
+ and eax,esi
+ or edx,eax
+ mov eax,DWORD PTR 12[esp]
+ rol ecx,10
+ lea ebx,DWORD PTR 1518500249[edx*1+ebx]
+ mov edx,-1
+ rol ebx,7
+ add ebx,edi
+ ; 23
+ add edi,eax
+ mov eax,ebp
+ sub edx,ebx
+ and eax,ebx
+ and edx,ecx
+ or eax,edx
+ mov edx,DWORD PTR 48[esp]
+ rol ebp,10
+ lea edi,DWORD PTR 1518500249[eax*1+edi]
+ mov eax,-1
+ rol edi,15
+ add edi,esi
+ ; 24
+ add esi,edx
+ mov edx,ebx
+ sub eax,edi
+ and edx,edi
+ and eax,ebp
+ or edx,eax
+ mov eax,DWORD PTR [esp]
+ rol ebx,10
+ lea esi,DWORD PTR 1518500249[edx*1+esi]
+ mov edx,-1
+ rol esi,7
+ add esi,ecx
+ ; 25
+ add ecx,eax
+ mov eax,edi
+ sub edx,esi
+ and eax,esi
+ and edx,ebx
+ or eax,edx
+ mov edx,DWORD PTR 36[esp]
+ rol edi,10
+ lea ecx,DWORD PTR 1518500249[eax*1+ecx]
+ mov eax,-1
+ rol ecx,12
+ add ecx,ebp
+ ; 26
+ add ebp,edx
+ mov edx,esi
+ sub eax,ecx
+ and edx,ecx
+ and eax,edi
+ or edx,eax
+ mov eax,DWORD PTR 20[esp]
+ rol esi,10
+ lea ebp,DWORD PTR 1518500249[edx*1+ebp]
+ mov edx,-1
+ rol ebp,15
+ add ebp,ebx
+ ; 27
+ add ebx,eax
+ mov eax,ecx
+ sub edx,ebp
+ and eax,ebp
+ and edx,esi
+ or eax,edx
+ mov edx,DWORD PTR 8[esp]
+ rol ecx,10
+ lea ebx,DWORD PTR 1518500249[eax*1+ebx]
+ mov eax,-1
+ rol ebx,9
+ add ebx,edi
+ ; 28
+ add edi,edx
+ mov edx,ebp
+ sub eax,ebx
+ and edx,ebx
+ and eax,ecx
+ or edx,eax
+ mov eax,DWORD PTR 56[esp]
+ rol ebp,10
+ lea edi,DWORD PTR 1518500249[edx*1+edi]
+ mov edx,-1
+ rol edi,11
+ add edi,esi
+ ; 29
+ add esi,eax
+ mov eax,ebx
+ sub edx,edi
+ and eax,edi
+ and edx,ebp
+ or eax,edx
+ mov edx,DWORD PTR 44[esp]
+ rol ebx,10
+ lea esi,DWORD PTR 1518500249[eax*1+esi]
+ mov eax,-1
+ rol esi,7
+ add esi,ecx
+ ; 30
+ add ecx,edx
+ mov edx,edi
+ sub eax,esi
+ and edx,esi
+ and eax,ebx
+ or edx,eax
+ mov eax,DWORD PTR 32[esp]
+ rol edi,10
+ lea ecx,DWORD PTR 1518500249[edx*1+ecx]
+ mov edx,-1
+ rol ecx,13
+ add ecx,ebp
+ ; 31
+ add ebp,eax
+ mov eax,esi
+ sub edx,ecx
+ and eax,ecx
+ and edx,edi
+ or eax,edx
+ mov edx,-1
+ rol esi,10
+ lea ebp,DWORD PTR 1518500249[eax*1+ebp]
+ sub edx,ecx
+ rol ebp,12
+ add ebp,ebx
+ ; 32
+ mov eax,DWORD PTR 12[esp]
+ or edx,ebp
+ add ebx,eax
+ xor edx,esi
+ mov eax,-1
+ rol ecx,10
+ lea ebx,DWORD PTR 1859775393[edx*1+ebx]
+ sub eax,ebp
+ rol ebx,11
+ add ebx,edi
+ ; 33
+ mov edx,DWORD PTR 40[esp]
+ or eax,ebx
+ add edi,edx
+ xor eax,ecx
+ mov edx,-1
+ rol ebp,10
+ lea edi,DWORD PTR 1859775393[eax*1+edi]
+ sub edx,ebx
+ rol edi,13
+ add edi,esi
+ ; 34
+ mov eax,DWORD PTR 56[esp]
+ or edx,edi
+ add esi,eax
+ xor edx,ebp
+ mov eax,-1
+ rol ebx,10
+ lea esi,DWORD PTR 1859775393[edx*1+esi]
+ sub eax,edi
+ rol esi,6
+ add esi,ecx
+ ; 35
+ mov edx,DWORD PTR 16[esp]
+ or eax,esi
+ add ecx,edx
+ xor eax,ebx
+ mov edx,-1
+ rol edi,10
+ lea ecx,DWORD PTR 1859775393[eax*1+ecx]
+ sub edx,esi
+ rol ecx,7
+ add ecx,ebp
+ ; 36
+ mov eax,DWORD PTR 36[esp]
+ or edx,ecx
+ add ebp,eax
+ xor edx,edi
+ mov eax,-1
+ rol esi,10
+ lea ebp,DWORD PTR 1859775393[edx*1+ebp]
+ sub eax,ecx
+ rol ebp,14
+ add ebp,ebx
+ ; 37
+ mov edx,DWORD PTR 60[esp]
+ or eax,ebp
+ add ebx,edx
+ xor eax,esi
+ mov edx,-1
+ rol ecx,10
+ lea ebx,DWORD PTR 1859775393[eax*1+ebx]
+ sub edx,ebp
+ rol ebx,9
+ add ebx,edi
+ ; 38
+ mov eax,DWORD PTR 32[esp]
+ or edx,ebx
+ add edi,eax
+ xor edx,ecx
+ mov eax,-1
+ rol ebp,10
+ lea edi,DWORD PTR 1859775393[edx*1+edi]
+ sub eax,ebx
+ rol edi,13
+ add edi,esi
+ ; 39
+ mov edx,DWORD PTR 4[esp]
+ or eax,edi
+ add esi,edx
+ xor eax,ebp
+ mov edx,-1
+ rol ebx,10
+ lea esi,DWORD PTR 1859775393[eax*1+esi]
+ sub edx,edi
+ rol esi,15
+ add esi,ecx
+ ; 40
+ mov eax,DWORD PTR 8[esp]
+ or edx,esi
+ add ecx,eax
+ xor edx,ebx
+ mov eax,-1
+ rol edi,10
+ lea ecx,DWORD PTR 1859775393[edx*1+ecx]
+ sub eax,esi
+ rol ecx,14
+ add ecx,ebp
+ ; 41
+ mov edx,DWORD PTR 28[esp]
+ or eax,ecx
+ add ebp,edx
+ xor eax,edi
+ mov edx,-1
+ rol esi,10
+ lea ebp,DWORD PTR 1859775393[eax*1+ebp]
+ sub edx,ecx
+ rol ebp,8
+ add ebp,ebx
+ ; 42
+ mov eax,DWORD PTR [esp]
+ or edx,ebp
+ add ebx,eax
+ xor edx,esi
+ mov eax,-1
+ rol ecx,10
+ lea ebx,DWORD PTR 1859775393[edx*1+ebx]
+ sub eax,ebp
+ rol ebx,13
+ add ebx,edi
+ ; 43
+ mov edx,DWORD PTR 24[esp]
+ or eax,ebx
+ add edi,edx
+ xor eax,ecx
+ mov edx,-1
+ rol ebp,10
+ lea edi,DWORD PTR 1859775393[eax*1+edi]
+ sub edx,ebx
+ rol edi,6
+ add edi,esi
+ ; 44
+ mov eax,DWORD PTR 52[esp]
+ or edx,edi
+ add esi,eax
+ xor edx,ebp
+ mov eax,-1
+ rol ebx,10
+ lea esi,DWORD PTR 1859775393[edx*1+esi]
+ sub eax,edi
+ rol esi,5
+ add esi,ecx
+ ; 45
+ mov edx,DWORD PTR 44[esp]
+ or eax,esi
+ add ecx,edx
+ xor eax,ebx
+ mov edx,-1
+ rol edi,10
+ lea ecx,DWORD PTR 1859775393[eax*1+ecx]
+ sub edx,esi
+ rol ecx,12
+ add ecx,ebp
+ ; 46
+ mov eax,DWORD PTR 20[esp]
+ or edx,ecx
+ add ebp,eax
+ xor edx,edi
+ mov eax,-1
+ rol esi,10
+ lea ebp,DWORD PTR 1859775393[edx*1+ebp]
+ sub eax,ecx
+ rol ebp,7
+ add ebp,ebx
+ ; 47
+ mov edx,DWORD PTR 48[esp]
+ or eax,ebp
+ add ebx,edx
+ xor eax,esi
+ mov edx,-1
+ rol ecx,10
+ lea ebx,DWORD PTR 1859775393[eax*1+ebx]
+ mov eax,ecx
+ rol ebx,5
+ add ebx,edi
+ ; 48
+ sub edx,ecx
+ and eax,ebx
+ and edx,ebp
+ or edx,eax
+ mov eax,DWORD PTR 4[esp]
+ rol ebp,10
+ lea edi,DWORD PTR 2400959708[edx*1+edi]
+ mov edx,-1
+ add edi,eax
+ mov eax,ebp
+ rol edi,11
+ add edi,esi
+ ; 49
+ sub edx,ebp
+ and eax,edi
+ and edx,ebx
+ or edx,eax
+ mov eax,DWORD PTR 36[esp]
+ rol ebx,10
+ lea esi,DWORD PTR 2400959708[edx*1+esi]
+ mov edx,-1
+ add esi,eax
+ mov eax,ebx
+ rol esi,12
+ add esi,ecx
+ ; 50
+ sub edx,ebx
+ and eax,esi
+ and edx,edi
+ or edx,eax
+ mov eax,DWORD PTR 44[esp]
+ rol edi,10
+ lea ecx,DWORD PTR 2400959708[edx*1+ecx]
+ mov edx,-1
+ add ecx,eax
+ mov eax,edi
+ rol ecx,14
+ add ecx,ebp
+ ; 51
+ sub edx,edi
+ and eax,ecx
+ and edx,esi
+ or edx,eax
+ mov eax,DWORD PTR 40[esp]
+ rol esi,10
+ lea ebp,DWORD PTR 2400959708[edx*1+ebp]
+ mov edx,-1
+ add ebp,eax
+ mov eax,esi
+ rol ebp,15
+ add ebp,ebx
+ ; 52
+ sub edx,esi
+ and eax,ebp
+ and edx,ecx
+ or edx,eax
+ mov eax,DWORD PTR [esp]
+ rol ecx,10
+ lea ebx,DWORD PTR 2400959708[edx*1+ebx]
+ mov edx,-1
+ add ebx,eax
+ mov eax,ecx
+ rol ebx,14
+ add ebx,edi
+ ; 53
+ sub edx,ecx
+ and eax,ebx
+ and edx,ebp
+ or edx,eax
+ mov eax,DWORD PTR 32[esp]
+ rol ebp,10
+ lea edi,DWORD PTR 2400959708[edx*1+edi]
+ mov edx,-1
+ add edi,eax
+ mov eax,ebp
+ rol edi,15
+ add edi,esi
+ ; 54
+ sub edx,ebp
+ and eax,edi
+ and edx,ebx
+ or edx,eax
+ mov eax,DWORD PTR 48[esp]
+ rol ebx,10
+ lea esi,DWORD PTR 2400959708[edx*1+esi]
+ mov edx,-1
+ add esi,eax
+ mov eax,ebx
+ rol esi,9
+ add esi,ecx
+ ; 55
+ sub edx,ebx
+ and eax,esi
+ and edx,edi
+ or edx,eax
+ mov eax,DWORD PTR 16[esp]
+ rol edi,10
+ lea ecx,DWORD PTR 2400959708[edx*1+ecx]
+ mov edx,-1
+ add ecx,eax
+ mov eax,edi
+ rol ecx,8
+ add ecx,ebp
+ ; 56
+ sub edx,edi
+ and eax,ecx
+ and edx,esi
+ or edx,eax
+ mov eax,DWORD PTR 52[esp]
+ rol esi,10
+ lea ebp,DWORD PTR 2400959708[edx*1+ebp]
+ mov edx,-1
+ add ebp,eax
+ mov eax,esi
+ rol ebp,9
+ add ebp,ebx
+ ; 57
+ sub edx,esi
+ and eax,ebp
+ and edx,ecx
+ or edx,eax
+ mov eax,DWORD PTR 12[esp]
+ rol ecx,10
+ lea ebx,DWORD PTR 2400959708[edx*1+ebx]
+ mov edx,-1
+ add ebx,eax
+ mov eax,ecx
+ rol ebx,14
+ add ebx,edi
+ ; 58
+ sub edx,ecx
+ and eax,ebx
+ and edx,ebp
+ or edx,eax
+ mov eax,DWORD PTR 28[esp]
+ rol ebp,10
+ lea edi,DWORD PTR 2400959708[edx*1+edi]
+ mov edx,-1
+ add edi,eax
+ mov eax,ebp
+ rol edi,5
+ add edi,esi
+ ; 59
+ sub edx,ebp
+ and eax,edi
+ and edx,ebx
+ or edx,eax
+ mov eax,DWORD PTR 60[esp]
+ rol ebx,10
+ lea esi,DWORD PTR 2400959708[edx*1+esi]
+ mov edx,-1
+ add esi,eax
+ mov eax,ebx
+ rol esi,6
+ add esi,ecx
+ ; 60
+ sub edx,ebx
+ and eax,esi
+ and edx,edi
+ or edx,eax
+ mov eax,DWORD PTR 56[esp]
+ rol edi,10
+ lea ecx,DWORD PTR 2400959708[edx*1+ecx]
+ mov edx,-1
+ add ecx,eax
+ mov eax,edi
+ rol ecx,8
+ add ecx,ebp
+ ; 61
+ sub edx,edi
+ and eax,ecx
+ and edx,esi
+ or edx,eax
+ mov eax,DWORD PTR 20[esp]
+ rol esi,10
+ lea ebp,DWORD PTR 2400959708[edx*1+ebp]
+ mov edx,-1
+ add ebp,eax
+ mov eax,esi
+ rol ebp,6
+ add ebp,ebx
+ ; 62
+ sub edx,esi
+ and eax,ebp
+ and edx,ecx
+ or edx,eax
+ mov eax,DWORD PTR 24[esp]
+ rol ecx,10
+ lea ebx,DWORD PTR 2400959708[edx*1+ebx]
+ mov edx,-1
+ add ebx,eax
+ mov eax,ecx
+ rol ebx,5
+ add ebx,edi
+ ; 63
+ sub edx,ecx
+ and eax,ebx
+ and edx,ebp
+ or edx,eax
+ mov eax,DWORD PTR 8[esp]
+ rol ebp,10
+ lea edi,DWORD PTR 2400959708[edx*1+edi]
+ mov edx,-1
+ add edi,eax
+ sub edx,ebp
+ rol edi,12
+ add edi,esi
+ ; 64
+ mov eax,DWORD PTR 16[esp]
+ or edx,ebx
+ add esi,eax
+ xor edx,edi
+ mov eax,-1
+ rol ebx,10
+ lea esi,DWORD PTR 2840853838[edx*1+esi]
+ sub eax,ebx
+ rol esi,9
+ add esi,ecx
+ ; 65
+ mov edx,DWORD PTR [esp]
+ or eax,edi
+ add ecx,edx
+ xor eax,esi
+ mov edx,-1
+ rol edi,10
+ lea ecx,DWORD PTR 2840853838[eax*1+ecx]
+ sub edx,edi
+ rol ecx,15
+ add ecx,ebp
+ ; 66
+ mov eax,DWORD PTR 20[esp]
+ or edx,esi
+ add ebp,eax
+ xor edx,ecx
+ mov eax,-1
+ rol esi,10
+ lea ebp,DWORD PTR 2840853838[edx*1+ebp]
+ sub eax,esi
+ rol ebp,5
+ add ebp,ebx
+ ; 67
+ mov edx,DWORD PTR 36[esp]
+ or eax,ecx
+ add ebx,edx
+ xor eax,ebp
+ mov edx,-1
+ rol ecx,10
+ lea ebx,DWORD PTR 2840853838[eax*1+ebx]
+ sub edx,ecx
+ rol ebx,11
+ add ebx,edi
+ ; 68
+ mov eax,DWORD PTR 28[esp]
+ or edx,ebp
+ add edi,eax
+ xor edx,ebx
+ mov eax,-1
+ rol ebp,10
+ lea edi,DWORD PTR 2840853838[edx*1+edi]
+ sub eax,ebp
+ rol edi,6
+ add edi,esi
+ ; 69
+ mov edx,DWORD PTR 48[esp]
+ or eax,ebx
+ add esi,edx
+ xor eax,edi
+ mov edx,-1
+ rol ebx,10
+ lea esi,DWORD PTR 2840853838[eax*1+esi]
+ sub edx,ebx
+ rol esi,8
+ add esi,ecx
+ ; 70
+ mov eax,DWORD PTR 8[esp]
+ or edx,edi
+ add ecx,eax
+ xor edx,esi
+ mov eax,-1
+ rol edi,10
+ lea ecx,DWORD PTR 2840853838[edx*1+ecx]
+ sub eax,edi
+ rol ecx,13
+ add ecx,ebp
+ ; 71
+ mov edx,DWORD PTR 40[esp]
+ or eax,esi
+ add ebp,edx
+ xor eax,ecx
+ mov edx,-1
+ rol esi,10
+ lea ebp,DWORD PTR 2840853838[eax*1+ebp]
+ sub edx,esi
+ rol ebp,12
+ add ebp,ebx
+ ; 72
+ mov eax,DWORD PTR 56[esp]
+ or edx,ecx
+ add ebx,eax
+ xor edx,ebp
+ mov eax,-1
+ rol ecx,10
+ lea ebx,DWORD PTR 2840853838[edx*1+ebx]
+ sub eax,ecx
+ rol ebx,5
+ add ebx,edi
+ ; 73
+ mov edx,DWORD PTR 4[esp]
+ or eax,ebp
+ add edi,edx
+ xor eax,ebx
+ mov edx,-1
+ rol ebp,10
+ lea edi,DWORD PTR 2840853838[eax*1+edi]
+ sub edx,ebp
+ rol edi,12
+ add edi,esi
+ ; 74
+ mov eax,DWORD PTR 12[esp]
+ or edx,ebx
+ add esi,eax
+ xor edx,edi
+ mov eax,-1
+ rol ebx,10
+ lea esi,DWORD PTR 2840853838[edx*1+esi]
+ sub eax,ebx
+ rol esi,13
+ add esi,ecx
+ ; 75
+ mov edx,DWORD PTR 32[esp]
+ or eax,edi
+ add ecx,edx
+ xor eax,esi
+ mov edx,-1
+ rol edi,10
+ lea ecx,DWORD PTR 2840853838[eax*1+ecx]
+ sub edx,edi
+ rol ecx,14
+ add ecx,ebp
+ ; 76
+ mov eax,DWORD PTR 44[esp]
+ or edx,esi
+ add ebp,eax
+ xor edx,ecx
+ mov eax,-1
+ rol esi,10
+ lea ebp,DWORD PTR 2840853838[edx*1+ebp]
+ sub eax,esi
+ rol ebp,11
+ add ebp,ebx
+ ; 77
+ mov edx,DWORD PTR 24[esp]
+ or eax,ecx
+ add ebx,edx
+ xor eax,ebp
+ mov edx,-1
+ rol ecx,10
+ lea ebx,DWORD PTR 2840853838[eax*1+ebx]
+ sub edx,ecx
+ rol ebx,8
+ add ebx,edi
+ ; 78
+ mov eax,DWORD PTR 60[esp]
+ or edx,ebp
+ add edi,eax
+ xor edx,ebx
+ mov eax,-1
+ rol ebp,10
+ lea edi,DWORD PTR 2840853838[edx*1+edi]
+ sub eax,ebp
+ rol edi,5
+ add edi,esi
+ ; 79
+ mov edx,DWORD PTR 52[esp]
+ or eax,ebx
+ add esi,edx
+ xor eax,edi
+ mov edx,DWORD PTR 128[esp]
+ rol ebx,10
+ lea esi,DWORD PTR 2840853838[eax*1+esi]
+ mov DWORD PTR 64[esp],ecx
+ rol esi,6
+ add esi,ecx
+ mov ecx,DWORD PTR [edx]
+ mov DWORD PTR 68[esp],esi
+ mov DWORD PTR 72[esp],edi
+ mov esi,DWORD PTR 4[edx]
+ mov DWORD PTR 76[esp],ebx
+ mov edi,DWORD PTR 8[edx]
+ mov DWORD PTR 80[esp],ebp
+ mov ebx,DWORD PTR 12[edx]
+ mov ebp,DWORD PTR 16[edx]
+ ; 80
+ mov edx,-1
+ sub edx,ebx
+ mov eax,DWORD PTR 20[esp]
+ or edx,edi
+ add ecx,eax
+ xor edx,esi
+ mov eax,-1
+ rol edi,10
+ lea ecx,DWORD PTR 1352829926[edx*1+ecx]
+ sub eax,edi
+ rol ecx,8
+ add ecx,ebp
+ ; 81
+ mov edx,DWORD PTR 56[esp]
+ or eax,esi
+ add ebp,edx
+ xor eax,ecx
+ mov edx,-1
+ rol esi,10
+ lea ebp,DWORD PTR 1352829926[eax*1+ebp]
+ sub edx,esi
+ rol ebp,9
+ add ebp,ebx
+ ; 82
+ mov eax,DWORD PTR 28[esp]
+ or edx,ecx
+ add ebx,eax
+ xor edx,ebp
+ mov eax,-1
+ rol ecx,10
+ lea ebx,DWORD PTR 1352829926[edx*1+ebx]
+ sub eax,ecx
+ rol ebx,9
+ add ebx,edi
+ ; 83
+ mov edx,DWORD PTR [esp]
+ or eax,ebp
+ add edi,edx
+ xor eax,ebx
+ mov edx,-1
+ rol ebp,10
+ lea edi,DWORD PTR 1352829926[eax*1+edi]
+ sub edx,ebp
+ rol edi,11
+ add edi,esi
+ ; 84
+ mov eax,DWORD PTR 36[esp]
+ or edx,ebx
+ add esi,eax
+ xor edx,edi
+ mov eax,-1
+ rol ebx,10
+ lea esi,DWORD PTR 1352829926[edx*1+esi]
+ sub eax,ebx
+ rol esi,13
+ add esi,ecx
+ ; 85
+ mov edx,DWORD PTR 8[esp]
+ or eax,edi
+ add ecx,edx
+ xor eax,esi
+ mov edx,-1
+ rol edi,10
+ lea ecx,DWORD PTR 1352829926[eax*1+ecx]
+ sub edx,edi
+ rol ecx,15
+ add ecx,ebp
+ ; 86
+ mov eax,DWORD PTR 44[esp]
+ or edx,esi
+ add ebp,eax
+ xor edx,ecx
+ mov eax,-1
+ rol esi,10
+ lea ebp,DWORD PTR 1352829926[edx*1+ebp]
+ sub eax,esi
+ rol ebp,15
+ add ebp,ebx
+ ; 87
+ mov edx,DWORD PTR 16[esp]
+ or eax,ecx
+ add ebx,edx
+ xor eax,ebp
+ mov edx,-1
+ rol ecx,10
+ lea ebx,DWORD PTR 1352829926[eax*1+ebx]
+ sub edx,ecx
+ rol ebx,5
+ add ebx,edi
+ ; 88
+ mov eax,DWORD PTR 52[esp]
+ or edx,ebp
+ add edi,eax
+ xor edx,ebx
+ mov eax,-1
+ rol ebp,10
+ lea edi,DWORD PTR 1352829926[edx*1+edi]
+ sub eax,ebp
+ rol edi,7
+ add edi,esi
+ ; 89
+ mov edx,DWORD PTR 24[esp]
+ or eax,ebx
+ add esi,edx
+ xor eax,edi
+ mov edx,-1
+ rol ebx,10
+ lea esi,DWORD PTR 1352829926[eax*1+esi]
+ sub edx,ebx
+ rol esi,7
+ add esi,ecx
+ ; 90
+ mov eax,DWORD PTR 60[esp]
+ or edx,edi
+ add ecx,eax
+ xor edx,esi
+ mov eax,-1
+ rol edi,10
+ lea ecx,DWORD PTR 1352829926[edx*1+ecx]
+ sub eax,edi
+ rol ecx,8
+ add ecx,ebp
+ ; 91
+ mov edx,DWORD PTR 32[esp]
+ or eax,esi
+ add ebp,edx
+ xor eax,ecx
+ mov edx,-1
+ rol esi,10
+ lea ebp,DWORD PTR 1352829926[eax*1+ebp]
+ sub edx,esi
+ rol ebp,11
+ add ebp,ebx
+ ; 92
+ mov eax,DWORD PTR 4[esp]
+ or edx,ecx
+ add ebx,eax
+ xor edx,ebp
+ mov eax,-1
+ rol ecx,10
+ lea ebx,DWORD PTR 1352829926[edx*1+ebx]
+ sub eax,ecx
+ rol ebx,14
+ add ebx,edi
+ ; 93
+ mov edx,DWORD PTR 40[esp]
+ or eax,ebp
+ add edi,edx
+ xor eax,ebx
+ mov edx,-1
+ rol ebp,10
+ lea edi,DWORD PTR 1352829926[eax*1+edi]
+ sub edx,ebp
+ rol edi,14
+ add edi,esi
+ ; 94
+ mov eax,DWORD PTR 12[esp]
+ or edx,ebx
+ add esi,eax
+ xor edx,edi
+ mov eax,-1
+ rol ebx,10
+ lea esi,DWORD PTR 1352829926[edx*1+esi]
+ sub eax,ebx
+ rol esi,12
+ add esi,ecx
+ ; 95
+ mov edx,DWORD PTR 48[esp]
+ or eax,edi
+ add ecx,edx
+ xor eax,esi
+ mov edx,-1
+ rol edi,10
+ lea ecx,DWORD PTR 1352829926[eax*1+ecx]
+ mov eax,edi
+ rol ecx,6
+ add ecx,ebp
+ ; 96
+ sub edx,edi
+ and eax,ecx
+ and edx,esi
+ or edx,eax
+ mov eax,DWORD PTR 24[esp]
+ rol esi,10
+ lea ebp,DWORD PTR 1548603684[edx*1+ebp]
+ mov edx,-1
+ add ebp,eax
+ mov eax,esi
+ rol ebp,9
+ add ebp,ebx
+ ; 97
+ sub edx,esi
+ and eax,ebp
+ and edx,ecx
+ or edx,eax
+ mov eax,DWORD PTR 44[esp]
+ rol ecx,10
+ lea ebx,DWORD PTR 1548603684[edx*1+ebx]
+ mov edx,-1
+ add ebx,eax
+ mov eax,ecx
+ rol ebx,13
+ add ebx,edi
+ ; 98
+ sub edx,ecx
+ and eax,ebx
+ and edx,ebp
+ or edx,eax
+ mov eax,DWORD PTR 12[esp]
+ rol ebp,10
+ lea edi,DWORD PTR 1548603684[edx*1+edi]
+ mov edx,-1
+ add edi,eax
+ mov eax,ebp
+ rol edi,15
+ add edi,esi
+ ; 99
+ sub edx,ebp
+ and eax,edi
+ and edx,ebx
+ or edx,eax
+ mov eax,DWORD PTR 28[esp]
+ rol ebx,10
+ lea esi,DWORD PTR 1548603684[edx*1+esi]
+ mov edx,-1
+ add esi,eax
+ mov eax,ebx
+ rol esi,7
+ add esi,ecx
+ ; 100
+ sub edx,ebx
+ and eax,esi
+ and edx,edi
+ or edx,eax
+ mov eax,DWORD PTR [esp]
+ rol edi,10
+ lea ecx,DWORD PTR 1548603684[edx*1+ecx]
+ mov edx,-1
+ add ecx,eax
+ mov eax,edi
+ rol ecx,12
+ add ecx,ebp
+ ; 101
+ sub edx,edi
+ and eax,ecx
+ and edx,esi
+ or edx,eax
+ mov eax,DWORD PTR 52[esp]
+ rol esi,10
+ lea ebp,DWORD PTR 1548603684[edx*1+ebp]
+ mov edx,-1
+ add ebp,eax
+ mov eax,esi
+ rol ebp,8
+ add ebp,ebx
+ ; 102
+ sub edx,esi
+ and eax,ebp
+ and edx,ecx
+ or edx,eax
+ mov eax,DWORD PTR 20[esp]
+ rol ecx,10
+ lea ebx,DWORD PTR 1548603684[edx*1+ebx]
+ mov edx,-1
+ add ebx,eax
+ mov eax,ecx
+ rol ebx,9
+ add ebx,edi
+ ; 103
+ sub edx,ecx
+ and eax,ebx
+ and edx,ebp
+ or edx,eax
+ mov eax,DWORD PTR 40[esp]
+ rol ebp,10
+ lea edi,DWORD PTR 1548603684[edx*1+edi]
+ mov edx,-1
+ add edi,eax
+ mov eax,ebp
+ rol edi,11
+ add edi,esi
+ ; 104
+ sub edx,ebp
+ and eax,edi
+ and edx,ebx
+ or edx,eax
+ mov eax,DWORD PTR 56[esp]
+ rol ebx,10
+ lea esi,DWORD PTR 1548603684[edx*1+esi]
+ mov edx,-1
+ add esi,eax
+ mov eax,ebx
+ rol esi,7
+ add esi,ecx
+ ; 105
+ sub edx,ebx
+ and eax,esi
+ and edx,edi
+ or edx,eax
+ mov eax,DWORD PTR 60[esp]
+ rol edi,10
+ lea ecx,DWORD PTR 1548603684[edx*1+ecx]
+ mov edx,-1
+ add ecx,eax
+ mov eax,edi
+ rol ecx,7
+ add ecx,ebp
+ ; 106
+ sub edx,edi
+ and eax,ecx
+ and edx,esi
+ or edx,eax
+ mov eax,DWORD PTR 32[esp]
+ rol esi,10
+ lea ebp,DWORD PTR 1548603684[edx*1+ebp]
+ mov edx,-1
+ add ebp,eax
+ mov eax,esi
+ rol ebp,12
+ add ebp,ebx
+ ; 107
+ sub edx,esi
+ and eax,ebp
+ and edx,ecx
+ or edx,eax
+ mov eax,DWORD PTR 48[esp]
+ rol ecx,10
+ lea ebx,DWORD PTR 1548603684[edx*1+ebx]
+ mov edx,-1
+ add ebx,eax
+ mov eax,ecx
+ rol ebx,7
+ add ebx,edi
+ ; 108
+ sub edx,ecx
+ and eax,ebx
+ and edx,ebp
+ or edx,eax
+ mov eax,DWORD PTR 16[esp]
+ rol ebp,10
+ lea edi,DWORD PTR 1548603684[edx*1+edi]
+ mov edx,-1
+ add edi,eax
+ mov eax,ebp
+ rol edi,6
+ add edi,esi
+ ; 109
+ sub edx,ebp
+ and eax,edi
+ and edx,ebx
+ or edx,eax
+ mov eax,DWORD PTR 36[esp]
+ rol ebx,10
+ lea esi,DWORD PTR 1548603684[edx*1+esi]
+ mov edx,-1
+ add esi,eax
+ mov eax,ebx
+ rol esi,15
+ add esi,ecx
+ ; 110
+ sub edx,ebx
+ and eax,esi
+ and edx,edi
+ or edx,eax
+ mov eax,DWORD PTR 4[esp]
+ rol edi,10
+ lea ecx,DWORD PTR 1548603684[edx*1+ecx]
+ mov edx,-1
+ add ecx,eax
+ mov eax,edi
+ rol ecx,13
+ add ecx,ebp
+ ; 111
+ sub edx,edi
+ and eax,ecx
+ and edx,esi
+ or edx,eax
+ mov eax,DWORD PTR 8[esp]
+ rol esi,10
+ lea ebp,DWORD PTR 1548603684[edx*1+ebp]
+ mov edx,-1
+ add ebp,eax
+ sub edx,ecx
+ rol ebp,11
+ add ebp,ebx
+ ; 112
+ mov eax,DWORD PTR 60[esp]
+ or edx,ebp
+ add ebx,eax
+ xor edx,esi
+ mov eax,-1
+ rol ecx,10
+ lea ebx,DWORD PTR 1836072691[edx*1+ebx]
+ sub eax,ebp
+ rol ebx,9
+ add ebx,edi
+ ; 113
+ mov edx,DWORD PTR 20[esp]
+ or eax,ebx
+ add edi,edx
+ xor eax,ecx
+ mov edx,-1
+ rol ebp,10
+ lea edi,DWORD PTR 1836072691[eax*1+edi]
+ sub edx,ebx
+ rol edi,7
+ add edi,esi
+ ; 114
+ mov eax,DWORD PTR 4[esp]
+ or edx,edi
+ add esi,eax
+ xor edx,ebp
+ mov eax,-1
+ rol ebx,10
+ lea esi,DWORD PTR 1836072691[edx*1+esi]
+ sub eax,edi
+ rol esi,15
+ add esi,ecx
+ ; 115
+ mov edx,DWORD PTR 12[esp]
+ or eax,esi
+ add ecx,edx
+ xor eax,ebx
+ mov edx,-1
+ rol edi,10
+ lea ecx,DWORD PTR 1836072691[eax*1+ecx]
+ sub edx,esi
+ rol ecx,11
+ add ecx,ebp
+ ; 116
+ mov eax,DWORD PTR 28[esp]
+ or edx,ecx
+ add ebp,eax
+ xor edx,edi
+ mov eax,-1
+ rol esi,10
+ lea ebp,DWORD PTR 1836072691[edx*1+ebp]
+ sub eax,ecx
+ rol ebp,8
+ add ebp,ebx
+ ; 117
+ mov edx,DWORD PTR 56[esp]
+ or eax,ebp
+ add ebx,edx
+ xor eax,esi
+ mov edx,-1
+ rol ecx,10
+ lea ebx,DWORD PTR 1836072691[eax*1+ebx]
+ sub edx,ebp
+ rol ebx,6
+ add ebx,edi
+ ; 118
+ mov eax,DWORD PTR 24[esp]
+ or edx,ebx
+ add edi,eax
+ xor edx,ecx
+ mov eax,-1
+ rol ebp,10
+ lea edi,DWORD PTR 1836072691[edx*1+edi]
+ sub eax,ebx
+ rol edi,6
+ add edi,esi
+ ; 119
+ mov edx,DWORD PTR 36[esp]
+ or eax,edi
+ add esi,edx
+ xor eax,ebp
+ mov edx,-1
+ rol ebx,10
+ lea esi,DWORD PTR 1836072691[eax*1+esi]
+ sub edx,edi
+ rol esi,14
+ add esi,ecx
+ ; 120
+ mov eax,DWORD PTR 44[esp]
+ or edx,esi
+ add ecx,eax
+ xor edx,ebx
+ mov eax,-1
+ rol edi,10
+ lea ecx,DWORD PTR 1836072691[edx*1+ecx]
+ sub eax,esi
+ rol ecx,12
+ add ecx,ebp
+ ; 121
+ mov edx,DWORD PTR 32[esp]
+ or eax,ecx
+ add ebp,edx
+ xor eax,edi
+ mov edx,-1
+ rol esi,10
+ lea ebp,DWORD PTR 1836072691[eax*1+ebp]
+ sub edx,ecx
+ rol ebp,13
+ add ebp,ebx
+ ; 122
+ mov eax,DWORD PTR 48[esp]
+ or edx,ebp
+ add ebx,eax
+ xor edx,esi
+ mov eax,-1
+ rol ecx,10
+ lea ebx,DWORD PTR 1836072691[edx*1+ebx]
+ sub eax,ebp
+ rol ebx,5
+ add ebx,edi
+ ; 123
+ mov edx,DWORD PTR 8[esp]
+ or eax,ebx
+ add edi,edx
+ xor eax,ecx
+ mov edx,-1
+ rol ebp,10
+ lea edi,DWORD PTR 1836072691[eax*1+edi]
+ sub edx,ebx
+ rol edi,14
+ add edi,esi
+ ; 124
+ mov eax,DWORD PTR 40[esp]
+ or edx,edi
+ add esi,eax
+ xor edx,ebp
+ mov eax,-1
+ rol ebx,10
+ lea esi,DWORD PTR 1836072691[edx*1+esi]
+ sub eax,edi
+ rol esi,13
+ add esi,ecx
+ ; 125
+ mov edx,DWORD PTR [esp]
+ or eax,esi
+ add ecx,edx
+ xor eax,ebx
+ mov edx,-1
+ rol edi,10
+ lea ecx,DWORD PTR 1836072691[eax*1+ecx]
+ sub edx,esi
+ rol ecx,13
+ add ecx,ebp
+ ; 126
+ mov eax,DWORD PTR 16[esp]
+ or edx,ecx
+ add ebp,eax
+ xor edx,edi
+ mov eax,-1
+ rol esi,10
+ lea ebp,DWORD PTR 1836072691[edx*1+ebp]
+ sub eax,ecx
+ rol ebp,7
+ add ebp,ebx
+ ; 127
+ mov edx,DWORD PTR 52[esp]
+ or eax,ebp
+ add ebx,edx
+ xor eax,esi
+ mov edx,DWORD PTR 32[esp]
+ rol ecx,10
+ lea ebx,DWORD PTR 1836072691[eax*1+ebx]
+ mov eax,-1
+ rol ebx,5
+ add ebx,edi
+ ; 128
+ add edi,edx
+ mov edx,ebp
+ sub eax,ebx
+ and edx,ebx
+ and eax,ecx
+ or edx,eax
+ mov eax,DWORD PTR 24[esp]
+ rol ebp,10
+ lea edi,DWORD PTR 2053994217[edx*1+edi]
+ mov edx,-1
+ rol edi,15
+ add edi,esi
+ ; 129
+ add esi,eax
+ mov eax,ebx
+ sub edx,edi
+ and eax,edi
+ and edx,ebp
+ or eax,edx
+ mov edx,DWORD PTR 16[esp]
+ rol ebx,10
+ lea esi,DWORD PTR 2053994217[eax*1+esi]
+ mov eax,-1
+ rol esi,5
+ add esi,ecx
+ ; 130
+ add ecx,edx
+ mov edx,edi
+ sub eax,esi
+ and edx,esi
+ and eax,ebx
+ or edx,eax
+ mov eax,DWORD PTR 4[esp]
+ rol edi,10
+ lea ecx,DWORD PTR 2053994217[edx*1+ecx]
+ mov edx,-1
+ rol ecx,8
+ add ecx,ebp
+ ; 131
+ add ebp,eax
+ mov eax,esi
+ sub edx,ecx
+ and eax,ecx
+ and edx,edi
+ or eax,edx
+ mov edx,DWORD PTR 12[esp]
+ rol esi,10
+ lea ebp,DWORD PTR 2053994217[eax*1+ebp]
+ mov eax,-1
+ rol ebp,11
+ add ebp,ebx
+ ; 132
+ add ebx,edx
+ mov edx,ecx
+ sub eax,ebp
+ and edx,ebp
+ and eax,esi
+ or edx,eax
+ mov eax,DWORD PTR 44[esp]
+ rol ecx,10
+ lea ebx,DWORD PTR 2053994217[edx*1+ebx]
+ mov edx,-1
+ rol ebx,14
+ add ebx,edi
+ ; 133
+ add edi,eax
+ mov eax,ebp
+ sub edx,ebx
+ and eax,ebx
+ and edx,ecx
+ or eax,edx
+ mov edx,DWORD PTR 60[esp]
+ rol ebp,10
+ lea edi,DWORD PTR 2053994217[eax*1+edi]
+ mov eax,-1
+ rol edi,14
+ add edi,esi
+ ; 134
+ add esi,edx
+ mov edx,ebx
+ sub eax,edi
+ and edx,edi
+ and eax,ebp
+ or edx,eax
+ mov eax,DWORD PTR [esp]
+ rol ebx,10
+ lea esi,DWORD PTR 2053994217[edx*1+esi]
+ mov edx,-1
+ rol esi,6
+ add esi,ecx
+ ; 135
+ add ecx,eax
+ mov eax,edi
+ sub edx,esi
+ and eax,esi
+ and edx,ebx
+ or eax,edx
+ mov edx,DWORD PTR 20[esp]
+ rol edi,10
+ lea ecx,DWORD PTR 2053994217[eax*1+ecx]
+ mov eax,-1
+ rol ecx,14
+ add ecx,ebp
+ ; 136
+ add ebp,edx
+ mov edx,esi
+ sub eax,ecx
+ and edx,ecx
+ and eax,edi
+ or edx,eax
+ mov eax,DWORD PTR 48[esp]
+ rol esi,10
+ lea ebp,DWORD PTR 2053994217[edx*1+ebp]
+ mov edx,-1
+ rol ebp,6
+ add ebp,ebx
+ ; 137
+ add ebx,eax
+ mov eax,ecx
+ sub edx,ebp
+ and eax,ebp
+ and edx,esi
+ or eax,edx
+ mov edx,DWORD PTR 8[esp]
+ rol ecx,10
+ lea ebx,DWORD PTR 2053994217[eax*1+ebx]
+ mov eax,-1
+ rol ebx,9
+ add ebx,edi
+ ; 138
+ add edi,edx
+ mov edx,ebp
+ sub eax,ebx
+ and edx,ebx
+ and eax,ecx
+ or edx,eax
+ mov eax,DWORD PTR 52[esp]
+ rol ebp,10
+ lea edi,DWORD PTR 2053994217[edx*1+edi]
+ mov edx,-1
+ rol edi,12
+ add edi,esi
+ ; 139
+ add esi,eax
+ mov eax,ebx
+ sub edx,edi
+ and eax,edi
+ and edx,ebp
+ or eax,edx
+ mov edx,DWORD PTR 36[esp]
+ rol ebx,10
+ lea esi,DWORD PTR 2053994217[eax*1+esi]
+ mov eax,-1
+ rol esi,9
+ add esi,ecx
+ ; 140
+ add ecx,edx
+ mov edx,edi
+ sub eax,esi
+ and edx,esi
+ and eax,ebx
+ or edx,eax
+ mov eax,DWORD PTR 28[esp]
+ rol edi,10
+ lea ecx,DWORD PTR 2053994217[edx*1+ecx]
+ mov edx,-1
+ rol ecx,12
+ add ecx,ebp
+ ; 141
+ add ebp,eax
+ mov eax,esi
+ sub edx,ecx
+ and eax,ecx
+ and edx,edi
+ or eax,edx
+ mov edx,DWORD PTR 40[esp]
+ rol esi,10
+ lea ebp,DWORD PTR 2053994217[eax*1+ebp]
+ mov eax,-1
+ rol ebp,5
+ add ebp,ebx
+ ; 142
+ add ebx,edx
+ mov edx,ecx
+ sub eax,ebp
+ and edx,ebp
+ and eax,esi
+ or edx,eax
+ mov eax,DWORD PTR 56[esp]
+ rol ecx,10
+ lea ebx,DWORD PTR 2053994217[edx*1+ebx]
+ mov edx,-1
+ rol ebx,15
+ add ebx,edi
+ ; 143
+ add edi,eax
+ mov eax,ebp
+ sub edx,ebx
+ and eax,ebx
+ and edx,ecx
+ or edx,eax
+ mov eax,ebx
+ rol ebp,10
+ lea edi,DWORD PTR 2053994217[edx*1+edi]
+ xor eax,ebp
+ rol edi,8
+ add edi,esi
+ ; 144
+ mov edx,DWORD PTR 48[esp]
+ xor eax,edi
+ add esi,edx
+ rol ebx,10
+ add esi,eax
+ mov eax,edi
+ rol esi,8
+ add esi,ecx
+ ; 145
+ xor eax,ebx
+ mov edx,DWORD PTR 60[esp]
+ xor eax,esi
+ add ecx,eax
+ mov eax,esi
+ rol edi,10
+ add ecx,edx
+ xor eax,edi
+ rol ecx,5
+ add ecx,ebp
+ ; 146
+ mov edx,DWORD PTR 40[esp]
+ xor eax,ecx
+ add ebp,edx
+ rol esi,10
+ add ebp,eax
+ mov eax,ecx
+ rol ebp,12
+ add ebp,ebx
+ ; 147
+ xor eax,esi
+ mov edx,DWORD PTR 16[esp]
+ xor eax,ebp
+ add ebx,eax
+ mov eax,ebp
+ rol ecx,10
+ add ebx,edx
+ xor eax,ecx
+ rol ebx,9
+ add ebx,edi
+ ; 148
+ mov edx,DWORD PTR 4[esp]
+ xor eax,ebx
+ add edi,edx
+ rol ebp,10
+ add edi,eax
+ mov eax,ebx
+ rol edi,12
+ add edi,esi
+ ; 149
+ xor eax,ebp
+ mov edx,DWORD PTR 20[esp]
+ xor eax,edi
+ add esi,eax
+ mov eax,edi
+ rol ebx,10
+ add esi,edx
+ xor eax,ebx
+ rol esi,5
+ add esi,ecx
+ ; 150
+ mov edx,DWORD PTR 32[esp]
+ xor eax,esi
+ add ecx,edx
+ rol edi,10
+ add ecx,eax
+ mov eax,esi
+ rol ecx,14
+ add ecx,ebp
+ ; 151
+ xor eax,edi
+ mov edx,DWORD PTR 28[esp]
+ xor eax,ecx
+ add ebp,eax
+ mov eax,ecx
+ rol esi,10
+ add ebp,edx
+ xor eax,esi
+ rol ebp,6
+ add ebp,ebx
+ ; 152
+ mov edx,DWORD PTR 24[esp]
+ xor eax,ebp
+ add ebx,edx
+ rol ecx,10
+ add ebx,eax
+ mov eax,ebp
+ rol ebx,8
+ add ebx,edi
+ ; 153
+ xor eax,ecx
+ mov edx,DWORD PTR 8[esp]
+ xor eax,ebx
+ add edi,eax
+ mov eax,ebx
+ rol ebp,10
+ add edi,edx
+ xor eax,ebp
+ rol edi,13
+ add edi,esi
+ ; 154
+ mov edx,DWORD PTR 52[esp]
+ xor eax,edi
+ add esi,edx
+ rol ebx,10
+ add esi,eax
+ mov eax,edi
+ rol esi,6
+ add esi,ecx
+ ; 155
+ xor eax,ebx
+ mov edx,DWORD PTR 56[esp]
+ xor eax,esi
+ add ecx,eax
+ mov eax,esi
+ rol edi,10
+ add ecx,edx
+ xor eax,edi
+ rol ecx,5
+ add ecx,ebp
+ ; 156
+ mov edx,DWORD PTR [esp]
+ xor eax,ecx
+ add ebp,edx
+ rol esi,10
+ add ebp,eax
+ mov eax,ecx
+ rol ebp,15
+ add ebp,ebx
+ ; 157
+ xor eax,esi
+ mov edx,DWORD PTR 12[esp]
+ xor eax,ebp
+ add ebx,eax
+ mov eax,ebp
+ rol ecx,10
+ add ebx,edx
+ xor eax,ecx
+ rol ebx,13
+ add ebx,edi
+ ; 158
+ mov edx,DWORD PTR 36[esp]
+ xor eax,ebx
+ add edi,edx
+ rol ebp,10
+ add edi,eax
+ mov eax,ebx
+ rol edi,11
+ add edi,esi
+ ; 159
+ xor eax,ebp
+ mov edx,DWORD PTR 44[esp]
+ xor eax,edi
+ add esi,eax
+ rol ebx,10
+ add esi,edx
+ mov edx,DWORD PTR 128[esp]
+ rol esi,11
+ add esi,ecx
+ mov eax,DWORD PTR 4[edx]
+ add ebx,eax
+ mov eax,DWORD PTR 72[esp]
+ add ebx,eax
+ mov eax,DWORD PTR 8[edx]
+ add ebp,eax
+ mov eax,DWORD PTR 76[esp]
+ add ebp,eax
+ mov eax,DWORD PTR 12[edx]
+ add ecx,eax
+ mov eax,DWORD PTR 80[esp]
+ add ecx,eax
+ mov eax,DWORD PTR 16[edx]
+ add esi,eax
+ mov eax,DWORD PTR 64[esp]
+ add esi,eax
+ mov eax,DWORD PTR [edx]
+ add edi,eax
+ mov eax,DWORD PTR 68[esp]
+ add edi,eax
+ mov eax,DWORD PTR 136[esp]
+ mov DWORD PTR [edx],ebx
+ mov DWORD PTR 4[edx],ebp
+ mov DWORD PTR 8[edx],ecx
+ sub eax,1
+ mov DWORD PTR 12[edx],esi
+ mov DWORD PTR 16[edx],edi
+ jle $L001get_out
+ mov DWORD PTR 136[esp],eax
+ mov edi,ecx
+ mov eax,DWORD PTR 132[esp]
+ mov ecx,ebx
+ add eax,64
+ mov esi,ebp
+ mov DWORD PTR 132[esp],eax
+ jmp $L000start
+$L001get_out:
+ add esp,108
+ pop ebx
+ pop ebp
+ pop edi
+ pop esi
+ ret
+_ripemd160_block_asm_data_order ENDP
+.text$ ENDS
+END
diff --git a/deps/openssl/asm/x86-win32-masm/sha/sha1-586.asm b/deps/openssl/asm/x86-win32-masm/sha/sha1-586.asm
new file mode 100644
index 0000000000..ce9f8d5b45
--- /dev/null
+++ b/deps/openssl/asm/x86-win32-masm/sha/sha1-586.asm
@@ -0,0 +1,1452 @@
+TITLE sha1-586.asm
+IF @Version LT 800
+ECHO MASM version 8.00 or later is strongly recommended.
+ENDIF
+.586
+.MODEL FLAT
+OPTION DOTNAME
+IF @Version LT 800
+.text$ SEGMENT PAGE 'CODE'
+ELSE
+.text$ SEGMENT ALIGN(64) 'CODE'
+ENDIF
+ALIGN 16
+_sha1_block_data_order PROC PUBLIC
+$L_sha1_block_data_order_begin::
+ push ebp
+ push ebx
+ push esi
+ push edi
+ mov ebp,DWORD PTR 20[esp]
+ mov esi,DWORD PTR 24[esp]
+ mov eax,DWORD PTR 28[esp]
+ sub esp,64
+ shl eax,6
+ add eax,esi
+ mov DWORD PTR 92[esp],eax
+ mov edi,DWORD PTR 16[ebp]
+ALIGN 16
+$L000loop:
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ mov ecx,DWORD PTR 8[esi]
+ mov edx,DWORD PTR 12[esi]
+ bswap eax
+ bswap ebx
+ bswap ecx
+ bswap edx
+ mov DWORD PTR [esp],eax
+ mov DWORD PTR 4[esp],ebx
+ mov DWORD PTR 8[esp],ecx
+ mov DWORD PTR 12[esp],edx
+ mov eax,DWORD PTR 16[esi]
+ mov ebx,DWORD PTR 20[esi]
+ mov ecx,DWORD PTR 24[esi]
+ mov edx,DWORD PTR 28[esi]
+ bswap eax
+ bswap ebx
+ bswap ecx
+ bswap edx
+ mov DWORD PTR 16[esp],eax
+ mov DWORD PTR 20[esp],ebx
+ mov DWORD PTR 24[esp],ecx
+ mov DWORD PTR 28[esp],edx
+ mov eax,DWORD PTR 32[esi]
+ mov ebx,DWORD PTR 36[esi]
+ mov ecx,DWORD PTR 40[esi]
+ mov edx,DWORD PTR 44[esi]
+ bswap eax
+ bswap ebx
+ bswap ecx
+ bswap edx
+ mov DWORD PTR 32[esp],eax
+ mov DWORD PTR 36[esp],ebx
+ mov DWORD PTR 40[esp],ecx
+ mov DWORD PTR 44[esp],edx
+ mov eax,DWORD PTR 48[esi]
+ mov ebx,DWORD PTR 52[esi]
+ mov ecx,DWORD PTR 56[esi]
+ mov edx,DWORD PTR 60[esi]
+ bswap eax
+ bswap ebx
+ bswap ecx
+ bswap edx
+ mov DWORD PTR 48[esp],eax
+ mov DWORD PTR 52[esp],ebx
+ mov DWORD PTR 56[esp],ecx
+ mov DWORD PTR 60[esp],edx
+ mov DWORD PTR 88[esp],esi
+ mov eax,DWORD PTR [ebp]
+ mov ebx,DWORD PTR 4[ebp]
+ mov ecx,DWORD PTR 8[ebp]
+ mov edx,DWORD PTR 12[ebp]
+ ; 00_15 0
+ mov esi,ecx
+ mov ebp,eax
+ rol ebp,5
+ xor esi,edx
+ add ebp,edi
+ and esi,ebx
+ mov edi,DWORD PTR [esp]
+ xor esi,edx
+ ror ebx,2
+ lea ebp,DWORD PTR 1518500249[edi*1+ebp]
+ add ebp,esi
+ ; 00_15 1
+ mov edi,ebx
+ mov esi,ebp
+ rol ebp,5
+ xor edi,ecx
+ add ebp,edx
+ and edi,eax
+ mov edx,DWORD PTR 4[esp]
+ xor edi,ecx
+ ror eax,2
+ lea ebp,DWORD PTR 1518500249[edx*1+ebp]
+ add ebp,edi
+ ; 00_15 2
+ mov edx,eax
+ mov edi,ebp
+ rol ebp,5
+ xor edx,ebx
+ add ebp,ecx
+ and edx,esi
+ mov ecx,DWORD PTR 8[esp]
+ xor edx,ebx
+ ror esi,2
+ lea ebp,DWORD PTR 1518500249[ecx*1+ebp]
+ add ebp,edx
+ ; 00_15 3
+ mov ecx,esi
+ mov edx,ebp
+ rol ebp,5
+ xor ecx,eax
+ add ebp,ebx
+ and ecx,edi
+ mov ebx,DWORD PTR 12[esp]
+ xor ecx,eax
+ ror edi,2
+ lea ebp,DWORD PTR 1518500249[ebx*1+ebp]
+ add ebp,ecx
+ ; 00_15 4
+ mov ebx,edi
+ mov ecx,ebp
+ rol ebp,5
+ xor ebx,esi
+ add ebp,eax
+ and ebx,edx
+ mov eax,DWORD PTR 16[esp]
+ xor ebx,esi
+ ror edx,2
+ lea ebp,DWORD PTR 1518500249[eax*1+ebp]
+ add ebp,ebx
+ ; 00_15 5
+ mov eax,edx
+ mov ebx,ebp
+ rol ebp,5
+ xor eax,edi
+ add ebp,esi
+ and eax,ecx
+ mov esi,DWORD PTR 20[esp]
+ xor eax,edi
+ ror ecx,2
+ lea ebp,DWORD PTR 1518500249[esi*1+ebp]
+ add ebp,eax
+ ; 00_15 6
+ mov esi,ecx
+ mov eax,ebp
+ rol ebp,5
+ xor esi,edx
+ add ebp,edi
+ and esi,ebx
+ mov edi,DWORD PTR 24[esp]
+ xor esi,edx
+ ror ebx,2
+ lea ebp,DWORD PTR 1518500249[edi*1+ebp]
+ add ebp,esi
+ ; 00_15 7
+ mov edi,ebx
+ mov esi,ebp
+ rol ebp,5
+ xor edi,ecx
+ add ebp,edx
+ and edi,eax
+ mov edx,DWORD PTR 28[esp]
+ xor edi,ecx
+ ror eax,2
+ lea ebp,DWORD PTR 1518500249[edx*1+ebp]
+ add ebp,edi
+ ; 00_15 8
+ mov edx,eax
+ mov edi,ebp
+ rol ebp,5
+ xor edx,ebx
+ add ebp,ecx
+ and edx,esi
+ mov ecx,DWORD PTR 32[esp]
+ xor edx,ebx
+ ror esi,2
+ lea ebp,DWORD PTR 1518500249[ecx*1+ebp]
+ add ebp,edx
+ ; 00_15 9
+ mov ecx,esi
+ mov edx,ebp
+ rol ebp,5
+ xor ecx,eax
+ add ebp,ebx
+ and ecx,edi
+ mov ebx,DWORD PTR 36[esp]
+ xor ecx,eax
+ ror edi,2
+ lea ebp,DWORD PTR 1518500249[ebx*1+ebp]
+ add ebp,ecx
+ ; 00_15 10
+ mov ebx,edi
+ mov ecx,ebp
+ rol ebp,5
+ xor ebx,esi
+ add ebp,eax
+ and ebx,edx
+ mov eax,DWORD PTR 40[esp]
+ xor ebx,esi
+ ror edx,2
+ lea ebp,DWORD PTR 1518500249[eax*1+ebp]
+ add ebp,ebx
+ ; 00_15 11
+ mov eax,edx
+ mov ebx,ebp
+ rol ebp,5
+ xor eax,edi
+ add ebp,esi
+ and eax,ecx
+ mov esi,DWORD PTR 44[esp]
+ xor eax,edi
+ ror ecx,2
+ lea ebp,DWORD PTR 1518500249[esi*1+ebp]
+ add ebp,eax
+ ; 00_15 12
+ mov esi,ecx
+ mov eax,ebp
+ rol ebp,5
+ xor esi,edx
+ add ebp,edi
+ and esi,ebx
+ mov edi,DWORD PTR 48[esp]
+ xor esi,edx
+ ror ebx,2
+ lea ebp,DWORD PTR 1518500249[edi*1+ebp]
+ add ebp,esi
+ ; 00_15 13
+ mov edi,ebx
+ mov esi,ebp
+ rol ebp,5
+ xor edi,ecx
+ add ebp,edx
+ and edi,eax
+ mov edx,DWORD PTR 52[esp]
+ xor edi,ecx
+ ror eax,2
+ lea ebp,DWORD PTR 1518500249[edx*1+ebp]
+ add ebp,edi
+ ; 00_15 14
+ mov edx,eax
+ mov edi,ebp
+ rol ebp,5
+ xor edx,ebx
+ add ebp,ecx
+ and edx,esi
+ mov ecx,DWORD PTR 56[esp]
+ xor edx,ebx
+ ror esi,2
+ lea ebp,DWORD PTR 1518500249[ecx*1+ebp]
+ add ebp,edx
+ ; 00_15 15
+ mov ecx,esi
+ mov edx,ebp
+ rol ebp,5
+ xor ecx,eax
+ add ebp,ebx
+ and ecx,edi
+ mov ebx,DWORD PTR 60[esp]
+ xor ecx,eax
+ ror edi,2
+ lea ebp,DWORD PTR 1518500249[ebx*1+ebp]
+ add ecx,ebp
+ ; 16_19 16
+ mov ebx,DWORD PTR [esp]
+ mov ebp,edi
+ xor ebx,DWORD PTR 8[esp]
+ xor ebp,esi
+ xor ebx,DWORD PTR 32[esp]
+ and ebp,edx
+ ror edx,2
+ xor ebx,DWORD PTR 52[esp]
+ rol ebx,1
+ xor ebp,esi
+ mov DWORD PTR [esp],ebx
+ lea ebx,DWORD PTR 1518500249[eax*1+ebx]
+ mov eax,ecx
+ rol eax,5
+ add ebx,ebp
+ add ebx,eax
+ ; 16_19 17
+ mov eax,DWORD PTR 4[esp]
+ mov ebp,edx
+ xor eax,DWORD PTR 12[esp]
+ xor ebp,edi
+ xor eax,DWORD PTR 36[esp]
+ and ebp,ecx
+ ror ecx,2
+ xor eax,DWORD PTR 56[esp]
+ rol eax,1
+ xor ebp,edi
+ mov DWORD PTR 4[esp],eax
+ lea eax,DWORD PTR 1518500249[esi*1+eax]
+ mov esi,ebx
+ rol esi,5
+ add eax,ebp
+ add eax,esi
+ ; 16_19 18
+ mov esi,DWORD PTR 8[esp]
+ mov ebp,ecx
+ xor esi,DWORD PTR 16[esp]
+ xor ebp,edx
+ xor esi,DWORD PTR 40[esp]
+ and ebp,ebx
+ ror ebx,2
+ xor esi,DWORD PTR 60[esp]
+ rol esi,1
+ xor ebp,edx
+ mov DWORD PTR 8[esp],esi
+ lea esi,DWORD PTR 1518500249[edi*1+esi]
+ mov edi,eax
+ rol edi,5
+ add esi,ebp
+ add esi,edi
+ ; 16_19 19
+ mov edi,DWORD PTR 12[esp]
+ mov ebp,ebx
+ xor edi,DWORD PTR 20[esp]
+ xor ebp,ecx
+ xor edi,DWORD PTR 44[esp]
+ and ebp,eax
+ ror eax,2
+ xor edi,DWORD PTR [esp]
+ rol edi,1
+ xor ebp,ecx
+ mov DWORD PTR 12[esp],edi
+ lea edi,DWORD PTR 1518500249[edx*1+edi]
+ mov edx,esi
+ rol edx,5
+ add edi,ebp
+ add edi,edx
+ ; 20_39 20
+ mov ebp,esi
+ mov edx,DWORD PTR 16[esp]
+ ror esi,2
+ xor edx,DWORD PTR 24[esp]
+ xor ebp,eax
+ xor edx,DWORD PTR 48[esp]
+ xor ebp,ebx
+ xor edx,DWORD PTR 4[esp]
+ rol edx,1
+ add ebp,ecx
+ mov DWORD PTR 16[esp],edx
+ mov ecx,edi
+ rol ecx,5
+ lea edx,DWORD PTR 1859775393[ebp*1+edx]
+ add edx,ecx
+ ; 20_39 21
+ mov ebp,edi
+ mov ecx,DWORD PTR 20[esp]
+ ror edi,2
+ xor ecx,DWORD PTR 28[esp]
+ xor ebp,esi
+ xor ecx,DWORD PTR 52[esp]
+ xor ebp,eax
+ xor ecx,DWORD PTR 8[esp]
+ rol ecx,1
+ add ebp,ebx
+ mov DWORD PTR 20[esp],ecx
+ mov ebx,edx
+ rol ebx,5
+ lea ecx,DWORD PTR 1859775393[ebp*1+ecx]
+ add ecx,ebx
+ ; 20_39 22
+ mov ebp,edx
+ mov ebx,DWORD PTR 24[esp]
+ ror edx,2
+ xor ebx,DWORD PTR 32[esp]
+ xor ebp,edi
+ xor ebx,DWORD PTR 56[esp]
+ xor ebp,esi
+ xor ebx,DWORD PTR 12[esp]
+ rol ebx,1
+ add ebp,eax
+ mov DWORD PTR 24[esp],ebx
+ mov eax,ecx
+ rol eax,5
+ lea ebx,DWORD PTR 1859775393[ebp*1+ebx]
+ add ebx,eax
+ ; 20_39 23
+ mov ebp,ecx
+ mov eax,DWORD PTR 28[esp]
+ ror ecx,2
+ xor eax,DWORD PTR 36[esp]
+ xor ebp,edx
+ xor eax,DWORD PTR 60[esp]
+ xor ebp,edi
+ xor eax,DWORD PTR 16[esp]
+ rol eax,1
+ add ebp,esi
+ mov DWORD PTR 28[esp],eax
+ mov esi,ebx
+ rol esi,5
+ lea eax,DWORD PTR 1859775393[ebp*1+eax]
+ add eax,esi
+ ; 20_39 24
+ mov ebp,ebx
+ mov esi,DWORD PTR 32[esp]
+ ror ebx,2
+ xor esi,DWORD PTR 40[esp]
+ xor ebp,ecx
+ xor esi,DWORD PTR [esp]
+ xor ebp,edx
+ xor esi,DWORD PTR 20[esp]
+ rol esi,1
+ add ebp,edi
+ mov DWORD PTR 32[esp],esi
+ mov edi,eax
+ rol edi,5
+ lea esi,DWORD PTR 1859775393[ebp*1+esi]
+ add esi,edi
+ ; 20_39 25
+ mov ebp,eax
+ mov edi,DWORD PTR 36[esp]
+ ror eax,2
+ xor edi,DWORD PTR 44[esp]
+ xor ebp,ebx
+ xor edi,DWORD PTR 4[esp]
+ xor ebp,ecx
+ xor edi,DWORD PTR 24[esp]
+ rol edi,1
+ add ebp,edx
+ mov DWORD PTR 36[esp],edi
+ mov edx,esi
+ rol edx,5
+ lea edi,DWORD PTR 1859775393[ebp*1+edi]
+ add edi,edx
+ ; 20_39 26
+ mov ebp,esi
+ mov edx,DWORD PTR 40[esp]
+ ror esi,2
+ xor edx,DWORD PTR 48[esp]
+ xor ebp,eax
+ xor edx,DWORD PTR 8[esp]
+ xor ebp,ebx
+ xor edx,DWORD PTR 28[esp]
+ rol edx,1
+ add ebp,ecx
+ mov DWORD PTR 40[esp],edx
+ mov ecx,edi
+ rol ecx,5
+ lea edx,DWORD PTR 1859775393[ebp*1+edx]
+ add edx,ecx
+ ; 20_39 27
+ mov ebp,edi
+ mov ecx,DWORD PTR 44[esp]
+ ror edi,2
+ xor ecx,DWORD PTR 52[esp]
+ xor ebp,esi
+ xor ecx,DWORD PTR 12[esp]
+ xor ebp,eax
+ xor ecx,DWORD PTR 32[esp]
+ rol ecx,1
+ add ebp,ebx
+ mov DWORD PTR 44[esp],ecx
+ mov ebx,edx
+ rol ebx,5
+ lea ecx,DWORD PTR 1859775393[ebp*1+ecx]
+ add ecx,ebx
+ ; 20_39 28
+ mov ebp,edx
+ mov ebx,DWORD PTR 48[esp]
+ ror edx,2
+ xor ebx,DWORD PTR 56[esp]
+ xor ebp,edi
+ xor ebx,DWORD PTR 16[esp]
+ xor ebp,esi
+ xor ebx,DWORD PTR 36[esp]
+ rol ebx,1
+ add ebp,eax
+ mov DWORD PTR 48[esp],ebx
+ mov eax,ecx
+ rol eax,5
+ lea ebx,DWORD PTR 1859775393[ebp*1+ebx]
+ add ebx,eax
+ ; 20_39 29
+ mov ebp,ecx
+ mov eax,DWORD PTR 52[esp]
+ ror ecx,2
+ xor eax,DWORD PTR 60[esp]
+ xor ebp,edx
+ xor eax,DWORD PTR 20[esp]
+ xor ebp,edi
+ xor eax,DWORD PTR 40[esp]
+ rol eax,1
+ add ebp,esi
+ mov DWORD PTR 52[esp],eax
+ mov esi,ebx
+ rol esi,5
+ lea eax,DWORD PTR 1859775393[ebp*1+eax]
+ add eax,esi
+ ; 20_39 30
+ mov ebp,ebx
+ mov esi,DWORD PTR 56[esp]
+ ror ebx,2
+ xor esi,DWORD PTR [esp]
+ xor ebp,ecx
+ xor esi,DWORD PTR 24[esp]
+ xor ebp,edx
+ xor esi,DWORD PTR 44[esp]
+ rol esi,1
+ add ebp,edi
+ mov DWORD PTR 56[esp],esi
+ mov edi,eax
+ rol edi,5
+ lea esi,DWORD PTR 1859775393[ebp*1+esi]
+ add esi,edi
+ ; 20_39 31
+ mov ebp,eax
+ mov edi,DWORD PTR 60[esp]
+ ror eax,2
+ xor edi,DWORD PTR 4[esp]
+ xor ebp,ebx
+ xor edi,DWORD PTR 28[esp]
+ xor ebp,ecx
+ xor edi,DWORD PTR 48[esp]
+ rol edi,1
+ add ebp,edx
+ mov DWORD PTR 60[esp],edi
+ mov edx,esi
+ rol edx,5
+ lea edi,DWORD PTR 1859775393[ebp*1+edi]
+ add edi,edx
+ ; 20_39 32
+ mov ebp,esi
+ mov edx,DWORD PTR [esp]
+ ror esi,2
+ xor edx,DWORD PTR 8[esp]
+ xor ebp,eax
+ xor edx,DWORD PTR 32[esp]
+ xor ebp,ebx
+ xor edx,DWORD PTR 52[esp]
+ rol edx,1
+ add ebp,ecx
+ mov DWORD PTR [esp],edx
+ mov ecx,edi
+ rol ecx,5
+ lea edx,DWORD PTR 1859775393[ebp*1+edx]
+ add edx,ecx
+ ; 20_39 33
+ mov ebp,edi
+ mov ecx,DWORD PTR 4[esp]
+ ror edi,2
+ xor ecx,DWORD PTR 12[esp]
+ xor ebp,esi
+ xor ecx,DWORD PTR 36[esp]
+ xor ebp,eax
+ xor ecx,DWORD PTR 56[esp]
+ rol ecx,1
+ add ebp,ebx
+ mov DWORD PTR 4[esp],ecx
+ mov ebx,edx
+ rol ebx,5
+ lea ecx,DWORD PTR 1859775393[ebp*1+ecx]
+ add ecx,ebx
+ ; 20_39 34
+ mov ebp,edx
+ mov ebx,DWORD PTR 8[esp]
+ ror edx,2
+ xor ebx,DWORD PTR 16[esp]
+ xor ebp,edi
+ xor ebx,DWORD PTR 40[esp]
+ xor ebp,esi
+ xor ebx,DWORD PTR 60[esp]
+ rol ebx,1
+ add ebp,eax
+ mov DWORD PTR 8[esp],ebx
+ mov eax,ecx
+ rol eax,5
+ lea ebx,DWORD PTR 1859775393[ebp*1+ebx]
+ add ebx,eax
+ ; 20_39 35
+ mov ebp,ecx
+ mov eax,DWORD PTR 12[esp]
+ ror ecx,2
+ xor eax,DWORD PTR 20[esp]
+ xor ebp,edx
+ xor eax,DWORD PTR 44[esp]
+ xor ebp,edi
+ xor eax,DWORD PTR [esp]
+ rol eax,1
+ add ebp,esi
+ mov DWORD PTR 12[esp],eax
+ mov esi,ebx
+ rol esi,5
+ lea eax,DWORD PTR 1859775393[ebp*1+eax]
+ add eax,esi
+ ; 20_39 36
+ mov ebp,ebx
+ mov esi,DWORD PTR 16[esp]
+ ror ebx,2
+ xor esi,DWORD PTR 24[esp]
+ xor ebp,ecx
+ xor esi,DWORD PTR 48[esp]
+ xor ebp,edx
+ xor esi,DWORD PTR 4[esp]
+ rol esi,1
+ add ebp,edi
+ mov DWORD PTR 16[esp],esi
+ mov edi,eax
+ rol edi,5
+ lea esi,DWORD PTR 1859775393[ebp*1+esi]
+ add esi,edi
+ ; 20_39 37
+ mov ebp,eax
+ mov edi,DWORD PTR 20[esp]
+ ror eax,2
+ xor edi,DWORD PTR 28[esp]
+ xor ebp,ebx
+ xor edi,DWORD PTR 52[esp]
+ xor ebp,ecx
+ xor edi,DWORD PTR 8[esp]
+ rol edi,1
+ add ebp,edx
+ mov DWORD PTR 20[esp],edi
+ mov edx,esi
+ rol edx,5
+ lea edi,DWORD PTR 1859775393[ebp*1+edi]
+ add edi,edx
+ ; 20_39 38
+ mov ebp,esi
+ mov edx,DWORD PTR 24[esp]
+ ror esi,2
+ xor edx,DWORD PTR 32[esp]
+ xor ebp,eax
+ xor edx,DWORD PTR 56[esp]
+ xor ebp,ebx
+ xor edx,DWORD PTR 12[esp]
+ rol edx,1
+ add ebp,ecx
+ mov DWORD PTR 24[esp],edx
+ mov ecx,edi
+ rol ecx,5
+ lea edx,DWORD PTR 1859775393[ebp*1+edx]
+ add edx,ecx
+ ; 20_39 39
+ mov ebp,edi
+ mov ecx,DWORD PTR 28[esp]
+ ror edi,2
+ xor ecx,DWORD PTR 36[esp]
+ xor ebp,esi
+ xor ecx,DWORD PTR 60[esp]
+ xor ebp,eax
+ xor ecx,DWORD PTR 16[esp]
+ rol ecx,1
+ add ebp,ebx
+ mov DWORD PTR 28[esp],ecx
+ mov ebx,edx
+ rol ebx,5
+ lea ecx,DWORD PTR 1859775393[ebp*1+ecx]
+ add ecx,ebx
+ ; 40_59 40
+ mov ebx,DWORD PTR 32[esp]
+ mov ebp,DWORD PTR 40[esp]
+ xor ebx,ebp
+ mov ebp,DWORD PTR [esp]
+ xor ebx,ebp
+ mov ebp,DWORD PTR 20[esp]
+ xor ebx,ebp
+ mov ebp,edx
+ rol ebx,1
+ or ebp,edi
+ mov DWORD PTR 32[esp],ebx
+ and ebp,esi
+ lea ebx,DWORD PTR 2400959708[eax*1+ebx]
+ mov eax,edx
+ ror edx,2
+ and eax,edi
+ or ebp,eax
+ mov eax,ecx
+ rol eax,5
+ add ebx,ebp
+ add ebx,eax
+ ; 40_59 41
+ mov eax,DWORD PTR 36[esp]
+ mov ebp,DWORD PTR 44[esp]
+ xor eax,ebp
+ mov ebp,DWORD PTR 4[esp]
+ xor eax,ebp
+ mov ebp,DWORD PTR 24[esp]
+ xor eax,ebp
+ mov ebp,ecx
+ rol eax,1
+ or ebp,edx
+ mov DWORD PTR 36[esp],eax
+ and ebp,edi
+ lea eax,DWORD PTR 2400959708[esi*1+eax]
+ mov esi,ecx
+ ror ecx,2
+ and esi,edx
+ or ebp,esi
+ mov esi,ebx
+ rol esi,5
+ add eax,ebp
+ add eax,esi
+ ; 40_59 42
+ mov esi,DWORD PTR 40[esp]
+ mov ebp,DWORD PTR 48[esp]
+ xor esi,ebp
+ mov ebp,DWORD PTR 8[esp]
+ xor esi,ebp
+ mov ebp,DWORD PTR 28[esp]
+ xor esi,ebp
+ mov ebp,ebx
+ rol esi,1
+ or ebp,ecx
+ mov DWORD PTR 40[esp],esi
+ and ebp,edx
+ lea esi,DWORD PTR 2400959708[edi*1+esi]
+ mov edi,ebx
+ ror ebx,2
+ and edi,ecx
+ or ebp,edi
+ mov edi,eax
+ rol edi,5
+ add esi,ebp
+ add esi,edi
+ ; 40_59 43
+ mov edi,DWORD PTR 44[esp]
+ mov ebp,DWORD PTR 52[esp]
+ xor edi,ebp
+ mov ebp,DWORD PTR 12[esp]
+ xor edi,ebp
+ mov ebp,DWORD PTR 32[esp]
+ xor edi,ebp
+ mov ebp,eax
+ rol edi,1
+ or ebp,ebx
+ mov DWORD PTR 44[esp],edi
+ and ebp,ecx
+ lea edi,DWORD PTR 2400959708[edx*1+edi]
+ mov edx,eax
+ ror eax,2
+ and edx,ebx
+ or ebp,edx
+ mov edx,esi
+ rol edx,5
+ add edi,ebp
+ add edi,edx
+ ; 40_59 44
+ mov edx,DWORD PTR 48[esp]
+ mov ebp,DWORD PTR 56[esp]
+ xor edx,ebp
+ mov ebp,DWORD PTR 16[esp]
+ xor edx,ebp
+ mov ebp,DWORD PTR 36[esp]
+ xor edx,ebp
+ mov ebp,esi
+ rol edx,1
+ or ebp,eax
+ mov DWORD PTR 48[esp],edx
+ and ebp,ebx
+ lea edx,DWORD PTR 2400959708[ecx*1+edx]
+ mov ecx,esi
+ ror esi,2
+ and ecx,eax
+ or ebp,ecx
+ mov ecx,edi
+ rol ecx,5
+ add edx,ebp
+ add edx,ecx
+ ; 40_59 45
+ mov ecx,DWORD PTR 52[esp]
+ mov ebp,DWORD PTR 60[esp]
+ xor ecx,ebp
+ mov ebp,DWORD PTR 20[esp]
+ xor ecx,ebp
+ mov ebp,DWORD PTR 40[esp]
+ xor ecx,ebp
+ mov ebp,edi
+ rol ecx,1
+ or ebp,esi
+ mov DWORD PTR 52[esp],ecx
+ and ebp,eax
+ lea ecx,DWORD PTR 2400959708[ebx*1+ecx]
+ mov ebx,edi
+ ror edi,2
+ and ebx,esi
+ or ebp,ebx
+ mov ebx,edx
+ rol ebx,5
+ add ecx,ebp
+ add ecx,ebx
+ ; 40_59 46
+ mov ebx,DWORD PTR 56[esp]
+ mov ebp,DWORD PTR [esp]
+ xor ebx,ebp
+ mov ebp,DWORD PTR 24[esp]
+ xor ebx,ebp
+ mov ebp,DWORD PTR 44[esp]
+ xor ebx,ebp
+ mov ebp,edx
+ rol ebx,1
+ or ebp,edi
+ mov DWORD PTR 56[esp],ebx
+ and ebp,esi
+ lea ebx,DWORD PTR 2400959708[eax*1+ebx]
+ mov eax,edx
+ ror edx,2
+ and eax,edi
+ or ebp,eax
+ mov eax,ecx
+ rol eax,5
+ add ebx,ebp
+ add ebx,eax
+ ; 40_59 47
+ mov eax,DWORD PTR 60[esp]
+ mov ebp,DWORD PTR 4[esp]
+ xor eax,ebp
+ mov ebp,DWORD PTR 28[esp]
+ xor eax,ebp
+ mov ebp,DWORD PTR 48[esp]
+ xor eax,ebp
+ mov ebp,ecx
+ rol eax,1
+ or ebp,edx
+ mov DWORD PTR 60[esp],eax
+ and ebp,edi
+ lea eax,DWORD PTR 2400959708[esi*1+eax]
+ mov esi,ecx
+ ror ecx,2
+ and esi,edx
+ or ebp,esi
+ mov esi,ebx
+ rol esi,5
+ add eax,ebp
+ add eax,esi
+ ; 40_59 48
+ mov esi,DWORD PTR [esp]
+ mov ebp,DWORD PTR 8[esp]
+ xor esi,ebp
+ mov ebp,DWORD PTR 32[esp]
+ xor esi,ebp
+ mov ebp,DWORD PTR 52[esp]
+ xor esi,ebp
+ mov ebp,ebx
+ rol esi,1
+ or ebp,ecx
+ mov DWORD PTR [esp],esi
+ and ebp,edx
+ lea esi,DWORD PTR 2400959708[edi*1+esi]
+ mov edi,ebx
+ ror ebx,2
+ and edi,ecx
+ or ebp,edi
+ mov edi,eax
+ rol edi,5
+ add esi,ebp
+ add esi,edi
+ ; 40_59 49
+ mov edi,DWORD PTR 4[esp]
+ mov ebp,DWORD PTR 12[esp]
+ xor edi,ebp
+ mov ebp,DWORD PTR 36[esp]
+ xor edi,ebp
+ mov ebp,DWORD PTR 56[esp]
+ xor edi,ebp
+ mov ebp,eax
+ rol edi,1
+ or ebp,ebx
+ mov DWORD PTR 4[esp],edi
+ and ebp,ecx
+ lea edi,DWORD PTR 2400959708[edx*1+edi]
+ mov edx,eax
+ ror eax,2
+ and edx,ebx
+ or ebp,edx
+ mov edx,esi
+ rol edx,5
+ add edi,ebp
+ add edi,edx
+ ; 40_59 50
+ mov edx,DWORD PTR 8[esp]
+ mov ebp,DWORD PTR 16[esp]
+ xor edx,ebp
+ mov ebp,DWORD PTR 40[esp]
+ xor edx,ebp
+ mov ebp,DWORD PTR 60[esp]
+ xor edx,ebp
+ mov ebp,esi
+ rol edx,1
+ or ebp,eax
+ mov DWORD PTR 8[esp],edx
+ and ebp,ebx
+ lea edx,DWORD PTR 2400959708[ecx*1+edx]
+ mov ecx,esi
+ ror esi,2
+ and ecx,eax
+ or ebp,ecx
+ mov ecx,edi
+ rol ecx,5
+ add edx,ebp
+ add edx,ecx
+ ; 40_59 51
+ mov ecx,DWORD PTR 12[esp]
+ mov ebp,DWORD PTR 20[esp]
+ xor ecx,ebp
+ mov ebp,DWORD PTR 44[esp]
+ xor ecx,ebp
+ mov ebp,DWORD PTR [esp]
+ xor ecx,ebp
+ mov ebp,edi
+ rol ecx,1
+ or ebp,esi
+ mov DWORD PTR 12[esp],ecx
+ and ebp,eax
+ lea ecx,DWORD PTR 2400959708[ebx*1+ecx]
+ mov ebx,edi
+ ror edi,2
+ and ebx,esi
+ or ebp,ebx
+ mov ebx,edx
+ rol ebx,5
+ add ecx,ebp
+ add ecx,ebx
+ ; 40_59 52
+ mov ebx,DWORD PTR 16[esp]
+ mov ebp,DWORD PTR 24[esp]
+ xor ebx,ebp
+ mov ebp,DWORD PTR 48[esp]
+ xor ebx,ebp
+ mov ebp,DWORD PTR 4[esp]
+ xor ebx,ebp
+ mov ebp,edx
+ rol ebx,1
+ or ebp,edi
+ mov DWORD PTR 16[esp],ebx
+ and ebp,esi
+ lea ebx,DWORD PTR 2400959708[eax*1+ebx]
+ mov eax,edx
+ ror edx,2
+ and eax,edi
+ or ebp,eax
+ mov eax,ecx
+ rol eax,5
+ add ebx,ebp
+ add ebx,eax
+ ; 40_59 53
+ mov eax,DWORD PTR 20[esp]
+ mov ebp,DWORD PTR 28[esp]
+ xor eax,ebp
+ mov ebp,DWORD PTR 52[esp]
+ xor eax,ebp
+ mov ebp,DWORD PTR 8[esp]
+ xor eax,ebp
+ mov ebp,ecx
+ rol eax,1
+ or ebp,edx
+ mov DWORD PTR 20[esp],eax
+ and ebp,edi
+ lea eax,DWORD PTR 2400959708[esi*1+eax]
+ mov esi,ecx
+ ror ecx,2
+ and esi,edx
+ or ebp,esi
+ mov esi,ebx
+ rol esi,5
+ add eax,ebp
+ add eax,esi
+ ; 40_59 54
+ mov esi,DWORD PTR 24[esp]
+ mov ebp,DWORD PTR 32[esp]
+ xor esi,ebp
+ mov ebp,DWORD PTR 56[esp]
+ xor esi,ebp
+ mov ebp,DWORD PTR 12[esp]
+ xor esi,ebp
+ mov ebp,ebx
+ rol esi,1
+ or ebp,ecx
+ mov DWORD PTR 24[esp],esi
+ and ebp,edx
+ lea esi,DWORD PTR 2400959708[edi*1+esi]
+ mov edi,ebx
+ ror ebx,2
+ and edi,ecx
+ or ebp,edi
+ mov edi,eax
+ rol edi,5
+ add esi,ebp
+ add esi,edi
+ ; 40_59 55
+ mov edi,DWORD PTR 28[esp]
+ mov ebp,DWORD PTR 36[esp]
+ xor edi,ebp
+ mov ebp,DWORD PTR 60[esp]
+ xor edi,ebp
+ mov ebp,DWORD PTR 16[esp]
+ xor edi,ebp
+ mov ebp,eax
+ rol edi,1
+ or ebp,ebx
+ mov DWORD PTR 28[esp],edi
+ and ebp,ecx
+ lea edi,DWORD PTR 2400959708[edx*1+edi]
+ mov edx,eax
+ ror eax,2
+ and edx,ebx
+ or ebp,edx
+ mov edx,esi
+ rol edx,5
+ add edi,ebp
+ add edi,edx
+ ; 40_59 56
+ mov edx,DWORD PTR 32[esp]
+ mov ebp,DWORD PTR 40[esp]
+ xor edx,ebp
+ mov ebp,DWORD PTR [esp]
+ xor edx,ebp
+ mov ebp,DWORD PTR 20[esp]
+ xor edx,ebp
+ mov ebp,esi
+ rol edx,1
+ or ebp,eax
+ mov DWORD PTR 32[esp],edx
+ and ebp,ebx
+ lea edx,DWORD PTR 2400959708[ecx*1+edx]
+ mov ecx,esi
+ ror esi,2
+ and ecx,eax
+ or ebp,ecx
+ mov ecx,edi
+ rol ecx,5
+ add edx,ebp
+ add edx,ecx
+ ; 40_59 57
+ mov ecx,DWORD PTR 36[esp]
+ mov ebp,DWORD PTR 44[esp]
+ xor ecx,ebp
+ mov ebp,DWORD PTR 4[esp]
+ xor ecx,ebp
+ mov ebp,DWORD PTR 24[esp]
+ xor ecx,ebp
+ mov ebp,edi
+ rol ecx,1
+ or ebp,esi
+ mov DWORD PTR 36[esp],ecx
+ and ebp,eax
+ lea ecx,DWORD PTR 2400959708[ebx*1+ecx]
+ mov ebx,edi
+ ror edi,2
+ and ebx,esi
+ or ebp,ebx
+ mov ebx,edx
+ rol ebx,5
+ add ecx,ebp
+ add ecx,ebx
+ ; 40_59 58
+ mov ebx,DWORD PTR 40[esp]
+ mov ebp,DWORD PTR 48[esp]
+ xor ebx,ebp
+ mov ebp,DWORD PTR 8[esp]
+ xor ebx,ebp
+ mov ebp,DWORD PTR 28[esp]
+ xor ebx,ebp
+ mov ebp,edx
+ rol ebx,1
+ or ebp,edi
+ mov DWORD PTR 40[esp],ebx
+ and ebp,esi
+ lea ebx,DWORD PTR 2400959708[eax*1+ebx]
+ mov eax,edx
+ ror edx,2
+ and eax,edi
+ or ebp,eax
+ mov eax,ecx
+ rol eax,5
+ add ebx,ebp
+ add ebx,eax
+ ; 40_59 59
+ mov eax,DWORD PTR 44[esp]
+ mov ebp,DWORD PTR 52[esp]
+ xor eax,ebp
+ mov ebp,DWORD PTR 12[esp]
+ xor eax,ebp
+ mov ebp,DWORD PTR 32[esp]
+ xor eax,ebp
+ mov ebp,ecx
+ rol eax,1
+ or ebp,edx
+ mov DWORD PTR 44[esp],eax
+ and ebp,edi
+ lea eax,DWORD PTR 2400959708[esi*1+eax]
+ mov esi,ecx
+ ror ecx,2
+ and esi,edx
+ or ebp,esi
+ mov esi,ebx
+ rol esi,5
+ add eax,ebp
+ add eax,esi
+ ; 20_39 60
+ mov ebp,ebx
+ mov esi,DWORD PTR 48[esp]
+ ror ebx,2
+ xor esi,DWORD PTR 56[esp]
+ xor ebp,ecx
+ xor esi,DWORD PTR 16[esp]
+ xor ebp,edx
+ xor esi,DWORD PTR 36[esp]
+ rol esi,1
+ add ebp,edi
+ mov DWORD PTR 48[esp],esi
+ mov edi,eax
+ rol edi,5
+ lea esi,DWORD PTR 3395469782[ebp*1+esi]
+ add esi,edi
+ ; 20_39 61
+ mov ebp,eax
+ mov edi,DWORD PTR 52[esp]
+ ror eax,2
+ xor edi,DWORD PTR 60[esp]
+ xor ebp,ebx
+ xor edi,DWORD PTR 20[esp]
+ xor ebp,ecx
+ xor edi,DWORD PTR 40[esp]
+ rol edi,1
+ add ebp,edx
+ mov DWORD PTR 52[esp],edi
+ mov edx,esi
+ rol edx,5
+ lea edi,DWORD PTR 3395469782[ebp*1+edi]
+ add edi,edx
+ ; 20_39 62
+ mov ebp,esi
+ mov edx,DWORD PTR 56[esp]
+ ror esi,2
+ xor edx,DWORD PTR [esp]
+ xor ebp,eax
+ xor edx,DWORD PTR 24[esp]
+ xor ebp,ebx
+ xor edx,DWORD PTR 44[esp]
+ rol edx,1
+ add ebp,ecx
+ mov DWORD PTR 56[esp],edx
+ mov ecx,edi
+ rol ecx,5
+ lea edx,DWORD PTR 3395469782[ebp*1+edx]
+ add edx,ecx
+ ; 20_39 63
+ mov ebp,edi
+ mov ecx,DWORD PTR 60[esp]
+ ror edi,2
+ xor ecx,DWORD PTR 4[esp]
+ xor ebp,esi
+ xor ecx,DWORD PTR 28[esp]
+ xor ebp,eax
+ xor ecx,DWORD PTR 48[esp]
+ rol ecx,1
+ add ebp,ebx
+ mov DWORD PTR 60[esp],ecx
+ mov ebx,edx
+ rol ebx,5
+ lea ecx,DWORD PTR 3395469782[ebp*1+ecx]
+ add ecx,ebx
+ ; 20_39 64
+ mov ebp,edx
+ mov ebx,DWORD PTR [esp]
+ ror edx,2
+ xor ebx,DWORD PTR 8[esp]
+ xor ebp,edi
+ xor ebx,DWORD PTR 32[esp]
+ xor ebp,esi
+ xor ebx,DWORD PTR 52[esp]
+ rol ebx,1
+ add ebp,eax
+ mov DWORD PTR [esp],ebx
+ mov eax,ecx
+ rol eax,5
+ lea ebx,DWORD PTR 3395469782[ebp*1+ebx]
+ add ebx,eax
+ ; 20_39 65
+ mov ebp,ecx
+ mov eax,DWORD PTR 4[esp]
+ ror ecx,2
+ xor eax,DWORD PTR 12[esp]
+ xor ebp,edx
+ xor eax,DWORD PTR 36[esp]
+ xor ebp,edi
+ xor eax,DWORD PTR 56[esp]
+ rol eax,1
+ add ebp,esi
+ mov DWORD PTR 4[esp],eax
+ mov esi,ebx
+ rol esi,5
+ lea eax,DWORD PTR 3395469782[ebp*1+eax]
+ add eax,esi
+ ; 20_39 66
+ mov ebp,ebx
+ mov esi,DWORD PTR 8[esp]
+ ror ebx,2
+ xor esi,DWORD PTR 16[esp]
+ xor ebp,ecx
+ xor esi,DWORD PTR 40[esp]
+ xor ebp,edx
+ xor esi,DWORD PTR 60[esp]
+ rol esi,1
+ add ebp,edi
+ mov DWORD PTR 8[esp],esi
+ mov edi,eax
+ rol edi,5
+ lea esi,DWORD PTR 3395469782[ebp*1+esi]
+ add esi,edi
+ ; 20_39 67
+ mov ebp,eax
+ mov edi,DWORD PTR 12[esp]
+ ror eax,2
+ xor edi,DWORD PTR 20[esp]
+ xor ebp,ebx
+ xor edi,DWORD PTR 44[esp]
+ xor ebp,ecx
+ xor edi,DWORD PTR [esp]
+ rol edi,1
+ add ebp,edx
+ mov DWORD PTR 12[esp],edi
+ mov edx,esi
+ rol edx,5
+ lea edi,DWORD PTR 3395469782[ebp*1+edi]
+ add edi,edx
+ ; 20_39 68
+ mov ebp,esi
+ mov edx,DWORD PTR 16[esp]
+ ror esi,2
+ xor edx,DWORD PTR 24[esp]
+ xor ebp,eax
+ xor edx,DWORD PTR 48[esp]
+ xor ebp,ebx
+ xor edx,DWORD PTR 4[esp]
+ rol edx,1
+ add ebp,ecx
+ mov DWORD PTR 16[esp],edx
+ mov ecx,edi
+ rol ecx,5
+ lea edx,DWORD PTR 3395469782[ebp*1+edx]
+ add edx,ecx
+ ; 20_39 69
+ mov ebp,edi
+ mov ecx,DWORD PTR 20[esp]
+ ror edi,2
+ xor ecx,DWORD PTR 28[esp]
+ xor ebp,esi
+ xor ecx,DWORD PTR 52[esp]
+ xor ebp,eax
+ xor ecx,DWORD PTR 8[esp]
+ rol ecx,1
+ add ebp,ebx
+ mov DWORD PTR 20[esp],ecx
+ mov ebx,edx
+ rol ebx,5
+ lea ecx,DWORD PTR 3395469782[ebp*1+ecx]
+ add ecx,ebx
+ ; 20_39 70
+ mov ebp,edx
+ mov ebx,DWORD PTR 24[esp]
+ ror edx,2
+ xor ebx,DWORD PTR 32[esp]
+ xor ebp,edi
+ xor ebx,DWORD PTR 56[esp]
+ xor ebp,esi
+ xor ebx,DWORD PTR 12[esp]
+ rol ebx,1
+ add ebp,eax
+ mov DWORD PTR 24[esp],ebx
+ mov eax,ecx
+ rol eax,5
+ lea ebx,DWORD PTR 3395469782[ebp*1+ebx]
+ add ebx,eax
+ ; 20_39 71
+ mov ebp,ecx
+ mov eax,DWORD PTR 28[esp]
+ ror ecx,2
+ xor eax,DWORD PTR 36[esp]
+ xor ebp,edx
+ xor eax,DWORD PTR 60[esp]
+ xor ebp,edi
+ xor eax,DWORD PTR 16[esp]
+ rol eax,1
+ add ebp,esi
+ mov DWORD PTR 28[esp],eax
+ mov esi,ebx
+ rol esi,5
+ lea eax,DWORD PTR 3395469782[ebp*1+eax]
+ add eax,esi
+ ; 20_39 72
+ mov ebp,ebx
+ mov esi,DWORD PTR 32[esp]
+ ror ebx,2
+ xor esi,DWORD PTR 40[esp]
+ xor ebp,ecx
+ xor esi,DWORD PTR [esp]
+ xor ebp,edx
+ xor esi,DWORD PTR 20[esp]
+ rol esi,1
+ add ebp,edi
+ mov DWORD PTR 32[esp],esi
+ mov edi,eax
+ rol edi,5
+ lea esi,DWORD PTR 3395469782[ebp*1+esi]
+ add esi,edi
+ ; 20_39 73
+ mov ebp,eax
+ mov edi,DWORD PTR 36[esp]
+ ror eax,2
+ xor edi,DWORD PTR 44[esp]
+ xor ebp,ebx
+ xor edi,DWORD PTR 4[esp]
+ xor ebp,ecx
+ xor edi,DWORD PTR 24[esp]
+ rol edi,1
+ add ebp,edx
+ mov DWORD PTR 36[esp],edi
+ mov edx,esi
+ rol edx,5
+ lea edi,DWORD PTR 3395469782[ebp*1+edi]
+ add edi,edx
+ ; 20_39 74
+ mov ebp,esi
+ mov edx,DWORD PTR 40[esp]
+ ror esi,2
+ xor edx,DWORD PTR 48[esp]
+ xor ebp,eax
+ xor edx,DWORD PTR 8[esp]
+ xor ebp,ebx
+ xor edx,DWORD PTR 28[esp]
+ rol edx,1
+ add ebp,ecx
+ mov DWORD PTR 40[esp],edx
+ mov ecx,edi
+ rol ecx,5
+ lea edx,DWORD PTR 3395469782[ebp*1+edx]
+ add edx,ecx
+ ; 20_39 75
+ mov ebp,edi
+ mov ecx,DWORD PTR 44[esp]
+ ror edi,2
+ xor ecx,DWORD PTR 52[esp]
+ xor ebp,esi
+ xor ecx,DWORD PTR 12[esp]
+ xor ebp,eax
+ xor ecx,DWORD PTR 32[esp]
+ rol ecx,1
+ add ebp,ebx
+ mov DWORD PTR 44[esp],ecx
+ mov ebx,edx
+ rol ebx,5
+ lea ecx,DWORD PTR 3395469782[ebp*1+ecx]
+ add ecx,ebx
+ ; 20_39 76
+ mov ebp,edx
+ mov ebx,DWORD PTR 48[esp]
+ ror edx,2
+ xor ebx,DWORD PTR 56[esp]
+ xor ebp,edi
+ xor ebx,DWORD PTR 16[esp]
+ xor ebp,esi
+ xor ebx,DWORD PTR 36[esp]
+ rol ebx,1
+ add ebp,eax
+ mov DWORD PTR 48[esp],ebx
+ mov eax,ecx
+ rol eax,5
+ lea ebx,DWORD PTR 3395469782[ebp*1+ebx]
+ add ebx,eax
+ ; 20_39 77
+ mov ebp,ecx
+ mov eax,DWORD PTR 52[esp]
+ ror ecx,2
+ xor eax,DWORD PTR 60[esp]
+ xor ebp,edx
+ xor eax,DWORD PTR 20[esp]
+ xor ebp,edi
+ xor eax,DWORD PTR 40[esp]
+ rol eax,1
+ add ebp,esi
+ mov DWORD PTR 52[esp],eax
+ mov esi,ebx
+ rol esi,5
+ lea eax,DWORD PTR 3395469782[ebp*1+eax]
+ add eax,esi
+ ; 20_39 78
+ mov ebp,ebx
+ mov esi,DWORD PTR 56[esp]
+ ror ebx,2
+ xor esi,DWORD PTR [esp]
+ xor ebp,ecx
+ xor esi,DWORD PTR 24[esp]
+ xor ebp,edx
+ xor esi,DWORD PTR 44[esp]
+ rol esi,1
+ add ebp,edi
+ mov DWORD PTR 56[esp],esi
+ mov edi,eax
+ rol edi,5
+ lea esi,DWORD PTR 3395469782[ebp*1+esi]
+ add esi,edi
+ ; 20_39 79
+ mov ebp,eax
+ mov edi,DWORD PTR 60[esp]
+ ror eax,2
+ xor edi,DWORD PTR 4[esp]
+ xor ebp,ebx
+ xor edi,DWORD PTR 28[esp]
+ xor ebp,ecx
+ xor edi,DWORD PTR 48[esp]
+ rol edi,1
+ add ebp,edx
+ mov DWORD PTR 60[esp],edi
+ mov edx,esi
+ rol edx,5
+ lea edi,DWORD PTR 3395469782[ebp*1+edi]
+ add edi,edx
+ mov ebp,DWORD PTR 84[esp]
+ mov edx,DWORD PTR 88[esp]
+ add edi,DWORD PTR [ebp]
+ add esi,DWORD PTR 4[ebp]
+ add eax,DWORD PTR 8[ebp]
+ add ebx,DWORD PTR 12[ebp]
+ add ecx,DWORD PTR 16[ebp]
+ mov DWORD PTR [ebp],edi
+ add edx,64
+ mov DWORD PTR 4[ebp],esi
+ cmp edx,DWORD PTR 92[esp]
+ mov DWORD PTR 8[ebp],eax
+ mov edi,ecx
+ mov DWORD PTR 12[ebp],ebx
+ mov esi,edx
+ mov DWORD PTR 16[ebp],ecx
+ jb $L000loop
+ add esp,64
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+_sha1_block_data_order ENDP
+DB 83,72,65,49,32,98,108,111,99,107,32,116,114,97,110,115
+DB 102,111,114,109,32,102,111,114,32,120,56,54,44,32,67,82
+DB 89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112
+DB 114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.text$ ENDS
+END
diff --git a/deps/openssl/asm/x86-win32-masm/sha/sha256-586.asm b/deps/openssl/asm/x86-win32-masm/sha/sha256-586.asm
new file mode 100644
index 0000000000..75b1dc8ac5
--- /dev/null
+++ b/deps/openssl/asm/x86-win32-masm/sha/sha256-586.asm
@@ -0,0 +1,271 @@
+TITLE sha512-586.asm
+IF @Version LT 800
+ECHO MASM version 8.00 or later is strongly recommended.
+ENDIF
+.586
+.MODEL FLAT
+OPTION DOTNAME
+IF @Version LT 800
+.text$ SEGMENT PAGE 'CODE'
+ELSE
+.text$ SEGMENT ALIGN(64) 'CODE'
+ENDIF
+ALIGN 16
+_sha256_block_data_order PROC PUBLIC
+$L_sha256_block_data_order_begin::
+ push ebp
+ push ebx
+ push esi
+ push edi
+ mov esi,DWORD PTR 20[esp]
+ mov edi,DWORD PTR 24[esp]
+ mov eax,DWORD PTR 28[esp]
+ mov ebx,esp
+ call $L000pic_point
+$L000pic_point:
+ pop ebp
+ lea ebp,DWORD PTR ($L001K256-$L000pic_point)[ebp]
+ sub esp,16
+ and esp,-64
+ shl eax,6
+ add eax,edi
+ mov DWORD PTR [esp],esi
+ mov DWORD PTR 4[esp],edi
+ mov DWORD PTR 8[esp],eax
+ mov DWORD PTR 12[esp],ebx
+ALIGN 16
+$L002loop:
+ mov eax,DWORD PTR [edi]
+ mov ebx,DWORD PTR 4[edi]
+ mov ecx,DWORD PTR 8[edi]
+ mov edx,DWORD PTR 12[edi]
+ bswap eax
+ bswap ebx
+ bswap ecx
+ bswap edx
+ push eax
+ push ebx
+ push ecx
+ push edx
+ mov eax,DWORD PTR 16[edi]
+ mov ebx,DWORD PTR 20[edi]
+ mov ecx,DWORD PTR 24[edi]
+ mov edx,DWORD PTR 28[edi]
+ bswap eax
+ bswap ebx
+ bswap ecx
+ bswap edx
+ push eax
+ push ebx
+ push ecx
+ push edx
+ mov eax,DWORD PTR 32[edi]
+ mov ebx,DWORD PTR 36[edi]
+ mov ecx,DWORD PTR 40[edi]
+ mov edx,DWORD PTR 44[edi]
+ bswap eax
+ bswap ebx
+ bswap ecx
+ bswap edx
+ push eax
+ push ebx
+ push ecx
+ push edx
+ mov eax,DWORD PTR 48[edi]
+ mov ebx,DWORD PTR 52[edi]
+ mov ecx,DWORD PTR 56[edi]
+ mov edx,DWORD PTR 60[edi]
+ bswap eax
+ bswap ebx
+ bswap ecx
+ bswap edx
+ push eax
+ push ebx
+ push ecx
+ push edx
+ add edi,64
+ sub esp,32
+ mov DWORD PTR 100[esp],edi
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ mov ecx,DWORD PTR 8[esi]
+ mov edi,DWORD PTR 12[esi]
+ mov DWORD PTR 4[esp],ebx
+ mov DWORD PTR 8[esp],ecx
+ mov DWORD PTR 12[esp],edi
+ mov edx,DWORD PTR 16[esi]
+ mov ebx,DWORD PTR 20[esi]
+ mov ecx,DWORD PTR 24[esi]
+ mov edi,DWORD PTR 28[esi]
+ mov DWORD PTR 20[esp],ebx
+ mov DWORD PTR 24[esp],ecx
+ mov DWORD PTR 28[esp],edi
+ALIGN 16
+$L00300_15:
+ mov ebx,DWORD PTR 92[esp]
+ mov ecx,edx
+ ror ecx,6
+ mov edi,edx
+ ror edi,11
+ mov esi,DWORD PTR 20[esp]
+ xor ecx,edi
+ ror edi,14
+ xor ecx,edi
+ mov edi,DWORD PTR 24[esp]
+ add ebx,ecx
+ mov DWORD PTR 16[esp],edx
+ xor esi,edi
+ mov ecx,eax
+ and esi,edx
+ mov edx,DWORD PTR 12[esp]
+ xor esi,edi
+ mov edi,eax
+ add ebx,esi
+ ror ecx,2
+ add ebx,DWORD PTR 28[esp]
+ ror edi,13
+ mov esi,DWORD PTR 4[esp]
+ xor ecx,edi
+ ror edi,9
+ add edx,ebx
+ xor ecx,edi
+ mov edi,DWORD PTR 8[esp]
+ add ebx,ecx
+ mov DWORD PTR [esp],eax
+ mov ecx,eax
+ sub esp,4
+ or eax,esi
+ and ecx,esi
+ and eax,edi
+ mov esi,DWORD PTR [ebp]
+ or eax,ecx
+ add ebp,4
+ add eax,ebx
+ add edx,esi
+ add eax,esi
+ cmp esi,3248222580
+ jne $L00300_15
+ mov ebx,DWORD PTR 152[esp]
+ALIGN 16
+$L00416_63:
+ mov esi,ebx
+ mov ecx,DWORD PTR 100[esp]
+ shr ebx,3
+ ror esi,7
+ xor ebx,esi
+ ror esi,11
+ mov edi,ecx
+ xor ebx,esi
+ shr ecx,10
+ mov esi,DWORD PTR 156[esp]
+ ror edi,17
+ xor ecx,edi
+ ror edi,2
+ add ebx,esi
+ xor edi,ecx
+ add ebx,edi
+ mov ecx,edx
+ add ebx,DWORD PTR 120[esp]
+ ror ecx,6
+ mov edi,edx
+ ror edi,11
+ mov esi,DWORD PTR 20[esp]
+ xor ecx,edi
+ ror edi,14
+ mov DWORD PTR 92[esp],ebx
+ xor ecx,edi
+ mov edi,DWORD PTR 24[esp]
+ add ebx,ecx
+ mov DWORD PTR 16[esp],edx
+ xor esi,edi
+ mov ecx,eax
+ and esi,edx
+ mov edx,DWORD PTR 12[esp]
+ xor esi,edi
+ mov edi,eax
+ add ebx,esi
+ ror ecx,2
+ add ebx,DWORD PTR 28[esp]
+ ror edi,13
+ mov esi,DWORD PTR 4[esp]
+ xor ecx,edi
+ ror edi,9
+ add edx,ebx
+ xor ecx,edi
+ mov edi,DWORD PTR 8[esp]
+ add ebx,ecx
+ mov DWORD PTR [esp],eax
+ mov ecx,eax
+ sub esp,4
+ or eax,esi
+ and ecx,esi
+ and eax,edi
+ mov esi,DWORD PTR [ebp]
+ or eax,ecx
+ add ebp,4
+ add eax,ebx
+ mov ebx,DWORD PTR 152[esp]
+ add edx,esi
+ add eax,esi
+ cmp esi,3329325298
+ jne $L00416_63
+ mov esi,DWORD PTR 352[esp]
+ mov ebx,DWORD PTR 4[esp]
+ mov ecx,DWORD PTR 8[esp]
+ mov edi,DWORD PTR 12[esp]
+ add eax,DWORD PTR [esi]
+ add ebx,DWORD PTR 4[esi]
+ add ecx,DWORD PTR 8[esi]
+ add edi,DWORD PTR 12[esi]
+ mov DWORD PTR [esi],eax
+ mov DWORD PTR 4[esi],ebx
+ mov DWORD PTR 8[esi],ecx
+ mov DWORD PTR 12[esi],edi
+ mov eax,DWORD PTR 20[esp]
+ mov ebx,DWORD PTR 24[esp]
+ mov ecx,DWORD PTR 28[esp]
+ mov edi,DWORD PTR 356[esp]
+ add edx,DWORD PTR 16[esi]
+ add eax,DWORD PTR 20[esi]
+ add ebx,DWORD PTR 24[esi]
+ add ecx,DWORD PTR 28[esi]
+ mov DWORD PTR 16[esi],edx
+ mov DWORD PTR 20[esi],eax
+ mov DWORD PTR 24[esi],ebx
+ mov DWORD PTR 28[esi],ecx
+ add esp,352
+ sub ebp,256
+ cmp edi,DWORD PTR 8[esp]
+ jb $L002loop
+ mov esp,DWORD PTR 12[esp]
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+ALIGN 64
+$L001K256:
+DD 1116352408,1899447441,3049323471,3921009573
+DD 961987163,1508970993,2453635748,2870763221
+DD 3624381080,310598401,607225278,1426881987
+DD 1925078388,2162078206,2614888103,3248222580
+DD 3835390401,4022224774,264347078,604807628
+DD 770255983,1249150122,1555081692,1996064986
+DD 2554220882,2821834349,2952996808,3210313671
+DD 3336571891,3584528711,113926993,338241895
+DD 666307205,773529912,1294757372,1396182291
+DD 1695183700,1986661051,2177026350,2456956037
+DD 2730485921,2820302411,3259730800,3345764771
+DD 3516065817,3600352804,4094571909,275423344
+DD 430227734,506948616,659060556,883997877
+DD 958139571,1322822218,1537002063,1747873779
+DD 1955562222,2024104815,2227730452,2361852424
+DD 2428436474,2756734187,3204031479,3329325298
+_sha256_block_data_order ENDP
+DB 83,72,65,50,53,54,32,98,108,111,99,107,32,116,114,97
+DB 110,115,102,111,114,109,32,102,111,114,32,120,56,54,44,32
+DB 67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97
+DB 112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
+DB 62,0
+.text$ ENDS
+END
diff --git a/deps/openssl/asm/x86-win32-masm/sha/sha512-586.asm b/deps/openssl/asm/x86-win32-masm/sha/sha512-586.asm
new file mode 100644
index 0000000000..9f3249762b
--- /dev/null
+++ b/deps/openssl/asm/x86-win32-masm/sha/sha512-586.asm
@@ -0,0 +1,573 @@
+TITLE sha512-586.asm
+IF @Version LT 800
+ECHO MASM version 8.00 or later is strongly recommended.
+ENDIF
+.586
+.MODEL FLAT
+OPTION DOTNAME
+IF @Version LT 800
+.text$ SEGMENT PAGE 'CODE'
+ELSE
+.text$ SEGMENT ALIGN(64) 'CODE'
+ENDIF
+ALIGN 16
+_sha512_block_data_order PROC PUBLIC
+$L_sha512_block_data_order_begin::
+ push ebp
+ push ebx
+ push esi
+ push edi
+ mov esi,DWORD PTR 20[esp]
+ mov edi,DWORD PTR 24[esp]
+ mov eax,DWORD PTR 28[esp]
+ mov ebx,esp
+ call $L000pic_point
+$L000pic_point:
+ pop ebp
+ lea ebp,DWORD PTR ($L001K512-$L000pic_point)[ebp]
+ sub esp,16
+ and esp,-64
+ shl eax,7
+ add eax,edi
+ mov DWORD PTR [esp],esi
+ mov DWORD PTR 4[esp],edi
+ mov DWORD PTR 8[esp],eax
+ mov DWORD PTR 12[esp],ebx
+ALIGN 16
+$L002loop_x86:
+ mov eax,DWORD PTR [edi]
+ mov ebx,DWORD PTR 4[edi]
+ mov ecx,DWORD PTR 8[edi]
+ mov edx,DWORD PTR 12[edi]
+ bswap eax
+ bswap ebx
+ bswap ecx
+ bswap edx
+ push eax
+ push ebx
+ push ecx
+ push edx
+ mov eax,DWORD PTR 16[edi]
+ mov ebx,DWORD PTR 20[edi]
+ mov ecx,DWORD PTR 24[edi]
+ mov edx,DWORD PTR 28[edi]
+ bswap eax
+ bswap ebx
+ bswap ecx
+ bswap edx
+ push eax
+ push ebx
+ push ecx
+ push edx
+ mov eax,DWORD PTR 32[edi]
+ mov ebx,DWORD PTR 36[edi]
+ mov ecx,DWORD PTR 40[edi]
+ mov edx,DWORD PTR 44[edi]
+ bswap eax
+ bswap ebx
+ bswap ecx
+ bswap edx
+ push eax
+ push ebx
+ push ecx
+ push edx
+ mov eax,DWORD PTR 48[edi]
+ mov ebx,DWORD PTR 52[edi]
+ mov ecx,DWORD PTR 56[edi]
+ mov edx,DWORD PTR 60[edi]
+ bswap eax
+ bswap ebx
+ bswap ecx
+ bswap edx
+ push eax
+ push ebx
+ push ecx
+ push edx
+ mov eax,DWORD PTR 64[edi]
+ mov ebx,DWORD PTR 68[edi]
+ mov ecx,DWORD PTR 72[edi]
+ mov edx,DWORD PTR 76[edi]
+ bswap eax
+ bswap ebx
+ bswap ecx
+ bswap edx
+ push eax
+ push ebx
+ push ecx
+ push edx
+ mov eax,DWORD PTR 80[edi]
+ mov ebx,DWORD PTR 84[edi]
+ mov ecx,DWORD PTR 88[edi]
+ mov edx,DWORD PTR 92[edi]
+ bswap eax
+ bswap ebx
+ bswap ecx
+ bswap edx
+ push eax
+ push ebx
+ push ecx
+ push edx
+ mov eax,DWORD PTR 96[edi]
+ mov ebx,DWORD PTR 100[edi]
+ mov ecx,DWORD PTR 104[edi]
+ mov edx,DWORD PTR 108[edi]
+ bswap eax
+ bswap ebx
+ bswap ecx
+ bswap edx
+ push eax
+ push ebx
+ push ecx
+ push edx
+ mov eax,DWORD PTR 112[edi]
+ mov ebx,DWORD PTR 116[edi]
+ mov ecx,DWORD PTR 120[edi]
+ mov edx,DWORD PTR 124[edi]
+ bswap eax
+ bswap ebx
+ bswap ecx
+ bswap edx
+ push eax
+ push ebx
+ push ecx
+ push edx
+ add edi,128
+ sub esp,72
+ mov DWORD PTR 204[esp],edi
+ lea edi,DWORD PTR 8[esp]
+ mov ecx,16
+DD 2784229001
+ALIGN 16
+$L00300_15_x86:
+ mov ecx,DWORD PTR 40[esp]
+ mov edx,DWORD PTR 44[esp]
+ mov esi,ecx
+ shr ecx,9
+ mov edi,edx
+ shr edx,9
+ mov ebx,ecx
+ shl esi,14
+ mov eax,edx
+ shl edi,14
+ xor ebx,esi
+ shr ecx,5
+ xor eax,edi
+ shr edx,5
+ xor eax,ecx
+ shl esi,4
+ xor ebx,edx
+ shl edi,4
+ xor ebx,esi
+ shr ecx,4
+ xor eax,edi
+ shr edx,4
+ xor eax,ecx
+ shl esi,5
+ xor ebx,edx
+ shl edi,5
+ xor eax,esi
+ xor ebx,edi
+ mov ecx,DWORD PTR 48[esp]
+ mov edx,DWORD PTR 52[esp]
+ mov esi,DWORD PTR 56[esp]
+ mov edi,DWORD PTR 60[esp]
+ add eax,DWORD PTR 64[esp]
+ adc ebx,DWORD PTR 68[esp]
+ xor ecx,esi
+ xor edx,edi
+ and ecx,DWORD PTR 40[esp]
+ and edx,DWORD PTR 44[esp]
+ add eax,DWORD PTR 192[esp]
+ adc ebx,DWORD PTR 196[esp]
+ xor ecx,esi
+ xor edx,edi
+ mov esi,DWORD PTR [ebp]
+ mov edi,DWORD PTR 4[ebp]
+ add eax,ecx
+ adc ebx,edx
+ mov ecx,DWORD PTR 32[esp]
+ mov edx,DWORD PTR 36[esp]
+ add eax,esi
+ adc ebx,edi
+ mov DWORD PTR [esp],eax
+ mov DWORD PTR 4[esp],ebx
+ add eax,ecx
+ adc ebx,edx
+ mov ecx,DWORD PTR 8[esp]
+ mov edx,DWORD PTR 12[esp]
+ mov DWORD PTR 32[esp],eax
+ mov DWORD PTR 36[esp],ebx
+ mov esi,ecx
+ shr ecx,2
+ mov edi,edx
+ shr edx,2
+ mov ebx,ecx
+ shl esi,4
+ mov eax,edx
+ shl edi,4
+ xor ebx,esi
+ shr ecx,5
+ xor eax,edi
+ shr edx,5
+ xor ebx,ecx
+ shl esi,21
+ xor eax,edx
+ shl edi,21
+ xor eax,esi
+ shr ecx,21
+ xor ebx,edi
+ shr edx,21
+ xor eax,ecx
+ shl esi,5
+ xor ebx,edx
+ shl edi,5
+ xor eax,esi
+ xor ebx,edi
+ mov ecx,DWORD PTR 8[esp]
+ mov edx,DWORD PTR 12[esp]
+ mov esi,DWORD PTR 16[esp]
+ mov edi,DWORD PTR 20[esp]
+ add eax,DWORD PTR [esp]
+ adc ebx,DWORD PTR 4[esp]
+ or ecx,esi
+ or edx,edi
+ and ecx,DWORD PTR 24[esp]
+ and edx,DWORD PTR 28[esp]
+ and esi,DWORD PTR 8[esp]
+ and edi,DWORD PTR 12[esp]
+ or ecx,esi
+ or edx,edi
+ add eax,ecx
+ adc ebx,edx
+ mov DWORD PTR [esp],eax
+ mov DWORD PTR 4[esp],ebx
+ mov dl,BYTE PTR [ebp]
+ sub esp,8
+ lea ebp,DWORD PTR 8[ebp]
+ cmp dl,148
+ jne $L00300_15_x86
+ALIGN 16
+$L00416_79_x86:
+ mov ecx,DWORD PTR 312[esp]
+ mov edx,DWORD PTR 316[esp]
+ mov esi,ecx
+ shr ecx,1
+ mov edi,edx
+ shr edx,1
+ mov eax,ecx
+ shl esi,24
+ mov ebx,edx
+ shl edi,24
+ xor ebx,esi
+ shr ecx,6
+ xor eax,edi
+ shr edx,6
+ xor eax,ecx
+ shl esi,7
+ xor ebx,edx
+ shl edi,1
+ xor ebx,esi
+ shr ecx,1
+ xor eax,edi
+ shr edx,1
+ xor eax,ecx
+ shl edi,6
+ xor ebx,edx
+ xor eax,edi
+ mov DWORD PTR [esp],eax
+ mov DWORD PTR 4[esp],ebx
+ mov ecx,DWORD PTR 208[esp]
+ mov edx,DWORD PTR 212[esp]
+ mov esi,ecx
+ shr ecx,6
+ mov edi,edx
+ shr edx,6
+ mov eax,ecx
+ shl esi,3
+ mov ebx,edx
+ shl edi,3
+ xor eax,esi
+ shr ecx,13
+ xor ebx,edi
+ shr edx,13
+ xor eax,ecx
+ shl esi,10
+ xor ebx,edx
+ shl edi,10
+ xor ebx,esi
+ shr ecx,10
+ xor eax,edi
+ shr edx,10
+ xor ebx,ecx
+ shl edi,13
+ xor eax,edx
+ xor eax,edi
+ mov ecx,DWORD PTR 320[esp]
+ mov edx,DWORD PTR 324[esp]
+ add eax,DWORD PTR [esp]
+ adc ebx,DWORD PTR 4[esp]
+ mov esi,DWORD PTR 248[esp]
+ mov edi,DWORD PTR 252[esp]
+ add eax,ecx
+ adc ebx,edx
+ add eax,esi
+ adc ebx,edi
+ mov DWORD PTR 192[esp],eax
+ mov DWORD PTR 196[esp],ebx
+ mov ecx,DWORD PTR 40[esp]
+ mov edx,DWORD PTR 44[esp]
+ mov esi,ecx
+ shr ecx,9
+ mov edi,edx
+ shr edx,9
+ mov ebx,ecx
+ shl esi,14
+ mov eax,edx
+ shl edi,14
+ xor ebx,esi
+ shr ecx,5
+ xor eax,edi
+ shr edx,5
+ xor eax,ecx
+ shl esi,4
+ xor ebx,edx
+ shl edi,4
+ xor ebx,esi
+ shr ecx,4
+ xor eax,edi
+ shr edx,4
+ xor eax,ecx
+ shl esi,5
+ xor ebx,edx
+ shl edi,5
+ xor eax,esi
+ xor ebx,edi
+ mov ecx,DWORD PTR 48[esp]
+ mov edx,DWORD PTR 52[esp]
+ mov esi,DWORD PTR 56[esp]
+ mov edi,DWORD PTR 60[esp]
+ add eax,DWORD PTR 64[esp]
+ adc ebx,DWORD PTR 68[esp]
+ xor ecx,esi
+ xor edx,edi
+ and ecx,DWORD PTR 40[esp]
+ and edx,DWORD PTR 44[esp]
+ add eax,DWORD PTR 192[esp]
+ adc ebx,DWORD PTR 196[esp]
+ xor ecx,esi
+ xor edx,edi
+ mov esi,DWORD PTR [ebp]
+ mov edi,DWORD PTR 4[ebp]
+ add eax,ecx
+ adc ebx,edx
+ mov ecx,DWORD PTR 32[esp]
+ mov edx,DWORD PTR 36[esp]
+ add eax,esi
+ adc ebx,edi
+ mov DWORD PTR [esp],eax
+ mov DWORD PTR 4[esp],ebx
+ add eax,ecx
+ adc ebx,edx
+ mov ecx,DWORD PTR 8[esp]
+ mov edx,DWORD PTR 12[esp]
+ mov DWORD PTR 32[esp],eax
+ mov DWORD PTR 36[esp],ebx
+ mov esi,ecx
+ shr ecx,2
+ mov edi,edx
+ shr edx,2
+ mov ebx,ecx
+ shl esi,4
+ mov eax,edx
+ shl edi,4
+ xor ebx,esi
+ shr ecx,5
+ xor eax,edi
+ shr edx,5
+ xor ebx,ecx
+ shl esi,21
+ xor eax,edx
+ shl edi,21
+ xor eax,esi
+ shr ecx,21
+ xor ebx,edi
+ shr edx,21
+ xor eax,ecx
+ shl esi,5
+ xor ebx,edx
+ shl edi,5
+ xor eax,esi
+ xor ebx,edi
+ mov ecx,DWORD PTR 8[esp]
+ mov edx,DWORD PTR 12[esp]
+ mov esi,DWORD PTR 16[esp]
+ mov edi,DWORD PTR 20[esp]
+ add eax,DWORD PTR [esp]
+ adc ebx,DWORD PTR 4[esp]
+ or ecx,esi
+ or edx,edi
+ and ecx,DWORD PTR 24[esp]
+ and edx,DWORD PTR 28[esp]
+ and esi,DWORD PTR 8[esp]
+ and edi,DWORD PTR 12[esp]
+ or ecx,esi
+ or edx,edi
+ add eax,ecx
+ adc ebx,edx
+ mov DWORD PTR [esp],eax
+ mov DWORD PTR 4[esp],ebx
+ mov dl,BYTE PTR [ebp]
+ sub esp,8
+ lea ebp,DWORD PTR 8[ebp]
+ cmp dl,23
+ jne $L00416_79_x86
+ mov esi,DWORD PTR 840[esp]
+ mov edi,DWORD PTR 844[esp]
+ mov eax,DWORD PTR [esi]
+ mov ebx,DWORD PTR 4[esi]
+ mov ecx,DWORD PTR 8[esi]
+ mov edx,DWORD PTR 12[esi]
+ add eax,DWORD PTR 8[esp]
+ adc ebx,DWORD PTR 12[esp]
+ mov DWORD PTR [esi],eax
+ mov DWORD PTR 4[esi],ebx
+ add ecx,DWORD PTR 16[esp]
+ adc edx,DWORD PTR 20[esp]
+ mov DWORD PTR 8[esi],ecx
+ mov DWORD PTR 12[esi],edx
+ mov eax,DWORD PTR 16[esi]
+ mov ebx,DWORD PTR 20[esi]
+ mov ecx,DWORD PTR 24[esi]
+ mov edx,DWORD PTR 28[esi]
+ add eax,DWORD PTR 24[esp]
+ adc ebx,DWORD PTR 28[esp]
+ mov DWORD PTR 16[esi],eax
+ mov DWORD PTR 20[esi],ebx
+ add ecx,DWORD PTR 32[esp]
+ adc edx,DWORD PTR 36[esp]
+ mov DWORD PTR 24[esi],ecx
+ mov DWORD PTR 28[esi],edx
+ mov eax,DWORD PTR 32[esi]
+ mov ebx,DWORD PTR 36[esi]
+ mov ecx,DWORD PTR 40[esi]
+ mov edx,DWORD PTR 44[esi]
+ add eax,DWORD PTR 40[esp]
+ adc ebx,DWORD PTR 44[esp]
+ mov DWORD PTR 32[esi],eax
+ mov DWORD PTR 36[esi],ebx
+ add ecx,DWORD PTR 48[esp]
+ adc edx,DWORD PTR 52[esp]
+ mov DWORD PTR 40[esi],ecx
+ mov DWORD PTR 44[esi],edx
+ mov eax,DWORD PTR 48[esi]
+ mov ebx,DWORD PTR 52[esi]
+ mov ecx,DWORD PTR 56[esi]
+ mov edx,DWORD PTR 60[esi]
+ add eax,DWORD PTR 56[esp]
+ adc ebx,DWORD PTR 60[esp]
+ mov DWORD PTR 48[esi],eax
+ mov DWORD PTR 52[esi],ebx
+ add ecx,DWORD PTR 64[esp]
+ adc edx,DWORD PTR 68[esp]
+ mov DWORD PTR 56[esi],ecx
+ mov DWORD PTR 60[esi],edx
+ add esp,840
+ sub ebp,640
+ cmp edi,DWORD PTR 8[esp]
+ jb $L002loop_x86
+ mov esp,DWORD PTR 12[esp]
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+ALIGN 64
+$L001K512:
+DD 3609767458,1116352408
+DD 602891725,1899447441
+DD 3964484399,3049323471
+DD 2173295548,3921009573
+DD 4081628472,961987163
+DD 3053834265,1508970993
+DD 2937671579,2453635748
+DD 3664609560,2870763221
+DD 2734883394,3624381080
+DD 1164996542,310598401
+DD 1323610764,607225278
+DD 3590304994,1426881987
+DD 4068182383,1925078388
+DD 991336113,2162078206
+DD 633803317,2614888103
+DD 3479774868,3248222580
+DD 2666613458,3835390401
+DD 944711139,4022224774
+DD 2341262773,264347078
+DD 2007800933,604807628
+DD 1495990901,770255983
+DD 1856431235,1249150122
+DD 3175218132,1555081692
+DD 2198950837,1996064986
+DD 3999719339,2554220882
+DD 766784016,2821834349
+DD 2566594879,2952996808
+DD 3203337956,3210313671
+DD 1034457026,3336571891
+DD 2466948901,3584528711
+DD 3758326383,113926993
+DD 168717936,338241895
+DD 1188179964,666307205
+DD 1546045734,773529912
+DD 1522805485,1294757372
+DD 2643833823,1396182291
+DD 2343527390,1695183700
+DD 1014477480,1986661051
+DD 1206759142,2177026350
+DD 344077627,2456956037
+DD 1290863460,2730485921
+DD 3158454273,2820302411
+DD 3505952657,3259730800
+DD 106217008,3345764771
+DD 3606008344,3516065817
+DD 1432725776,3600352804
+DD 1467031594,4094571909
+DD 851169720,275423344
+DD 3100823752,430227734
+DD 1363258195,506948616
+DD 3750685593,659060556
+DD 3785050280,883997877
+DD 3318307427,958139571
+DD 3812723403,1322822218
+DD 2003034995,1537002063
+DD 3602036899,1747873779
+DD 1575990012,1955562222
+DD 1125592928,2024104815
+DD 2716904306,2227730452
+DD 442776044,2361852424
+DD 593698344,2428436474
+DD 3733110249,2756734187
+DD 2999351573,3204031479
+DD 3815920427,3329325298
+DD 3928383900,3391569614
+DD 566280711,3515267271
+DD 3454069534,3940187606
+DD 4000239992,4118630271
+DD 1914138554,116418474
+DD 2731055270,174292421
+DD 3203993006,289380356
+DD 320620315,460393269
+DD 587496836,685471733
+DD 1086792851,852142971
+DD 365543100,1017036298
+DD 2618297676,1126000580
+DD 3409855158,1288033470
+DD 4234509866,1501505948
+DD 987167468,1607167915
+DD 1246189591,1816402316
+_sha512_block_data_order ENDP
+DB 83,72,65,53,49,50,32,98,108,111,99,107,32,116,114,97
+DB 110,115,102,111,114,109,32,102,111,114,32,120,56,54,44,32
+DB 67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97
+DB 112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
+DB 62,0
+.text$ ENDS
+END
diff --git a/deps/openssl/asm/x86-win32-masm/whrlpool/wp-mmx.asm b/deps/openssl/asm/x86-win32-masm/whrlpool/wp-mmx.asm
new file mode 100644
index 0000000000..9fa36662ce
--- /dev/null
+++ b/deps/openssl/asm/x86-win32-masm/whrlpool/wp-mmx.asm
@@ -0,0 +1,1122 @@
+TITLE wp-mmx.asm
+IF @Version LT 800
+ECHO MASM version 8.00 or later is strongly recommended.
+ENDIF
+.686
+.XMM
+IF @Version LT 800
+XMMWORD STRUCT 16
+DQ 2 dup (?)
+XMMWORD ENDS
+ENDIF
+
+.MODEL FLAT
+OPTION DOTNAME
+IF @Version LT 800
+.text$ SEGMENT PAGE 'CODE'
+ELSE
+.text$ SEGMENT ALIGN(64) 'CODE'
+ENDIF
+ALIGN 16
+_whirlpool_block_mmx PROC PUBLIC
+$L_whirlpool_block_mmx_begin::
+ push ebp
+ push ebx
+ push esi
+ push edi
+ mov esi,DWORD PTR 20[esp]
+ mov edi,DWORD PTR 24[esp]
+ mov ebp,DWORD PTR 28[esp]
+ mov eax,esp
+ sub esp,148
+ and esp,-64
+ lea ebx,DWORD PTR 128[esp]
+ mov DWORD PTR [ebx],esi
+ mov DWORD PTR 4[ebx],edi
+ mov DWORD PTR 8[ebx],ebp
+ mov DWORD PTR 16[ebx],eax
+ call $L000pic_point
+$L000pic_point:
+ pop ebp
+ lea ebp,DWORD PTR ($L001table-$L000pic_point)[ebp]
+ xor ecx,ecx
+ xor edx,edx
+ movq mm0,QWORD PTR [esi]
+ movq mm1,QWORD PTR 8[esi]
+ movq mm2,QWORD PTR 16[esi]
+ movq mm3,QWORD PTR 24[esi]
+ movq mm4,QWORD PTR 32[esi]
+ movq mm5,QWORD PTR 40[esi]
+ movq mm6,QWORD PTR 48[esi]
+ movq mm7,QWORD PTR 56[esi]
+$L002outerloop:
+ movq QWORD PTR [esp],mm0
+ movq QWORD PTR 8[esp],mm1
+ movq QWORD PTR 16[esp],mm2
+ movq QWORD PTR 24[esp],mm3
+ movq QWORD PTR 32[esp],mm4
+ movq QWORD PTR 40[esp],mm5
+ movq QWORD PTR 48[esp],mm6
+ movq QWORD PTR 56[esp],mm7
+ pxor mm0,QWORD PTR [edi]
+ pxor mm1,QWORD PTR 8[edi]
+ pxor mm2,QWORD PTR 16[edi]
+ pxor mm3,QWORD PTR 24[edi]
+ pxor mm4,QWORD PTR 32[edi]
+ pxor mm5,QWORD PTR 40[edi]
+ pxor mm6,QWORD PTR 48[edi]
+ pxor mm7,QWORD PTR 56[edi]
+ movq QWORD PTR 64[esp],mm0
+ movq QWORD PTR 72[esp],mm1
+ movq QWORD PTR 80[esp],mm2
+ movq QWORD PTR 88[esp],mm3
+ movq QWORD PTR 96[esp],mm4
+ movq QWORD PTR 104[esp],mm5
+ movq QWORD PTR 112[esp],mm6
+ movq QWORD PTR 120[esp],mm7
+ xor esi,esi
+ mov DWORD PTR 12[ebx],esi
+ALIGN 16
+$L003round:
+ movq mm0,QWORD PTR 4096[esi*8+ebp]
+ mov eax,DWORD PTR [esp]
+ mov ebx,DWORD PTR 4[esp]
+ mov cl,al
+ mov dl,ah
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ shr eax,16
+ pxor mm0,QWORD PTR [esi*8+ebp]
+ movq mm1,QWORD PTR 7[edi*8+ebp]
+ mov cl,al
+ mov dl,ah
+ mov eax,DWORD PTR 8[esp]
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ movq mm2,QWORD PTR 6[esi*8+ebp]
+ movq mm3,QWORD PTR 5[edi*8+ebp]
+ mov cl,bl
+ mov dl,bh
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ shr ebx,16
+ movq mm4,QWORD PTR 4[esi*8+ebp]
+ movq mm5,QWORD PTR 3[edi*8+ebp]
+ mov cl,bl
+ mov dl,bh
+ mov ebx,DWORD PTR 12[esp]
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ movq mm6,QWORD PTR 2[esi*8+ebp]
+ movq mm7,QWORD PTR 1[edi*8+ebp]
+ mov cl,al
+ mov dl,ah
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ shr eax,16
+ pxor mm1,QWORD PTR [esi*8+ebp]
+ pxor mm2,QWORD PTR 7[edi*8+ebp]
+ mov cl,al
+ mov dl,ah
+ mov eax,DWORD PTR 16[esp]
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ pxor mm3,QWORD PTR 6[esi*8+ebp]
+ pxor mm4,QWORD PTR 5[edi*8+ebp]
+ mov cl,bl
+ mov dl,bh
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ shr ebx,16
+ pxor mm5,QWORD PTR 4[esi*8+ebp]
+ pxor mm6,QWORD PTR 3[edi*8+ebp]
+ mov cl,bl
+ mov dl,bh
+ mov ebx,DWORD PTR 20[esp]
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ pxor mm7,QWORD PTR 2[esi*8+ebp]
+ pxor mm0,QWORD PTR 1[edi*8+ebp]
+ mov cl,al
+ mov dl,ah
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ shr eax,16
+ pxor mm2,QWORD PTR [esi*8+ebp]
+ pxor mm3,QWORD PTR 7[edi*8+ebp]
+ mov cl,al
+ mov dl,ah
+ mov eax,DWORD PTR 24[esp]
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ pxor mm4,QWORD PTR 6[esi*8+ebp]
+ pxor mm5,QWORD PTR 5[edi*8+ebp]
+ mov cl,bl
+ mov dl,bh
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ shr ebx,16
+ pxor mm6,QWORD PTR 4[esi*8+ebp]
+ pxor mm7,QWORD PTR 3[edi*8+ebp]
+ mov cl,bl
+ mov dl,bh
+ mov ebx,DWORD PTR 28[esp]
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ pxor mm0,QWORD PTR 2[esi*8+ebp]
+ pxor mm1,QWORD PTR 1[edi*8+ebp]
+ mov cl,al
+ mov dl,ah
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ shr eax,16
+ pxor mm3,QWORD PTR [esi*8+ebp]
+ pxor mm4,QWORD PTR 7[edi*8+ebp]
+ mov cl,al
+ mov dl,ah
+ mov eax,DWORD PTR 32[esp]
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ pxor mm5,QWORD PTR 6[esi*8+ebp]
+ pxor mm6,QWORD PTR 5[edi*8+ebp]
+ mov cl,bl
+ mov dl,bh
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ shr ebx,16
+ pxor mm7,QWORD PTR 4[esi*8+ebp]
+ pxor mm0,QWORD PTR 3[edi*8+ebp]
+ mov cl,bl
+ mov dl,bh
+ mov ebx,DWORD PTR 36[esp]
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ pxor mm1,QWORD PTR 2[esi*8+ebp]
+ pxor mm2,QWORD PTR 1[edi*8+ebp]
+ mov cl,al
+ mov dl,ah
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ shr eax,16
+ pxor mm4,QWORD PTR [esi*8+ebp]
+ pxor mm5,QWORD PTR 7[edi*8+ebp]
+ mov cl,al
+ mov dl,ah
+ mov eax,DWORD PTR 40[esp]
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ pxor mm6,QWORD PTR 6[esi*8+ebp]
+ pxor mm7,QWORD PTR 5[edi*8+ebp]
+ mov cl,bl
+ mov dl,bh
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ shr ebx,16
+ pxor mm0,QWORD PTR 4[esi*8+ebp]
+ pxor mm1,QWORD PTR 3[edi*8+ebp]
+ mov cl,bl
+ mov dl,bh
+ mov ebx,DWORD PTR 44[esp]
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ pxor mm2,QWORD PTR 2[esi*8+ebp]
+ pxor mm3,QWORD PTR 1[edi*8+ebp]
+ mov cl,al
+ mov dl,ah
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ shr eax,16
+ pxor mm5,QWORD PTR [esi*8+ebp]
+ pxor mm6,QWORD PTR 7[edi*8+ebp]
+ mov cl,al
+ mov dl,ah
+ mov eax,DWORD PTR 48[esp]
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ pxor mm7,QWORD PTR 6[esi*8+ebp]
+ pxor mm0,QWORD PTR 5[edi*8+ebp]
+ mov cl,bl
+ mov dl,bh
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ shr ebx,16
+ pxor mm1,QWORD PTR 4[esi*8+ebp]
+ pxor mm2,QWORD PTR 3[edi*8+ebp]
+ mov cl,bl
+ mov dl,bh
+ mov ebx,DWORD PTR 52[esp]
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ pxor mm3,QWORD PTR 2[esi*8+ebp]
+ pxor mm4,QWORD PTR 1[edi*8+ebp]
+ mov cl,al
+ mov dl,ah
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ shr eax,16
+ pxor mm6,QWORD PTR [esi*8+ebp]
+ pxor mm7,QWORD PTR 7[edi*8+ebp]
+ mov cl,al
+ mov dl,ah
+ mov eax,DWORD PTR 56[esp]
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ pxor mm0,QWORD PTR 6[esi*8+ebp]
+ pxor mm1,QWORD PTR 5[edi*8+ebp]
+ mov cl,bl
+ mov dl,bh
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ shr ebx,16
+ pxor mm2,QWORD PTR 4[esi*8+ebp]
+ pxor mm3,QWORD PTR 3[edi*8+ebp]
+ mov cl,bl
+ mov dl,bh
+ mov ebx,DWORD PTR 60[esp]
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ pxor mm4,QWORD PTR 2[esi*8+ebp]
+ pxor mm5,QWORD PTR 1[edi*8+ebp]
+ mov cl,al
+ mov dl,ah
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ shr eax,16
+ pxor mm7,QWORD PTR [esi*8+ebp]
+ pxor mm0,QWORD PTR 7[edi*8+ebp]
+ mov cl,al
+ mov dl,ah
+ mov eax,DWORD PTR 64[esp]
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ pxor mm1,QWORD PTR 6[esi*8+ebp]
+ pxor mm2,QWORD PTR 5[edi*8+ebp]
+ mov cl,bl
+ mov dl,bh
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ shr ebx,16
+ pxor mm3,QWORD PTR 4[esi*8+ebp]
+ pxor mm4,QWORD PTR 3[edi*8+ebp]
+ mov cl,bl
+ mov dl,bh
+ mov ebx,DWORD PTR 68[esp]
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ pxor mm5,QWORD PTR 2[esi*8+ebp]
+ pxor mm6,QWORD PTR 1[edi*8+ebp]
+ movq QWORD PTR [esp],mm0
+ movq QWORD PTR 8[esp],mm1
+ movq QWORD PTR 16[esp],mm2
+ movq QWORD PTR 24[esp],mm3
+ movq QWORD PTR 32[esp],mm4
+ movq QWORD PTR 40[esp],mm5
+ movq QWORD PTR 48[esp],mm6
+ movq QWORD PTR 56[esp],mm7
+ mov cl,al
+ mov dl,ah
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ shr eax,16
+ pxor mm0,QWORD PTR [esi*8+ebp]
+ pxor mm1,QWORD PTR 7[edi*8+ebp]
+ mov cl,al
+ mov dl,ah
+ mov eax,DWORD PTR 72[esp]
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ pxor mm2,QWORD PTR 6[esi*8+ebp]
+ pxor mm3,QWORD PTR 5[edi*8+ebp]
+ mov cl,bl
+ mov dl,bh
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ shr ebx,16
+ pxor mm4,QWORD PTR 4[esi*8+ebp]
+ pxor mm5,QWORD PTR 3[edi*8+ebp]
+ mov cl,bl
+ mov dl,bh
+ mov ebx,DWORD PTR 76[esp]
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ pxor mm6,QWORD PTR 2[esi*8+ebp]
+ pxor mm7,QWORD PTR 1[edi*8+ebp]
+ mov cl,al
+ mov dl,ah
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ shr eax,16
+ pxor mm1,QWORD PTR [esi*8+ebp]
+ pxor mm2,QWORD PTR 7[edi*8+ebp]
+ mov cl,al
+ mov dl,ah
+ mov eax,DWORD PTR 80[esp]
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ pxor mm3,QWORD PTR 6[esi*8+ebp]
+ pxor mm4,QWORD PTR 5[edi*8+ebp]
+ mov cl,bl
+ mov dl,bh
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ shr ebx,16
+ pxor mm5,QWORD PTR 4[esi*8+ebp]
+ pxor mm6,QWORD PTR 3[edi*8+ebp]
+ mov cl,bl
+ mov dl,bh
+ mov ebx,DWORD PTR 84[esp]
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ pxor mm7,QWORD PTR 2[esi*8+ebp]
+ pxor mm0,QWORD PTR 1[edi*8+ebp]
+ mov cl,al
+ mov dl,ah
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ shr eax,16
+ pxor mm2,QWORD PTR [esi*8+ebp]
+ pxor mm3,QWORD PTR 7[edi*8+ebp]
+ mov cl,al
+ mov dl,ah
+ mov eax,DWORD PTR 88[esp]
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ pxor mm4,QWORD PTR 6[esi*8+ebp]
+ pxor mm5,QWORD PTR 5[edi*8+ebp]
+ mov cl,bl
+ mov dl,bh
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ shr ebx,16
+ pxor mm6,QWORD PTR 4[esi*8+ebp]
+ pxor mm7,QWORD PTR 3[edi*8+ebp]
+ mov cl,bl
+ mov dl,bh
+ mov ebx,DWORD PTR 92[esp]
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ pxor mm0,QWORD PTR 2[esi*8+ebp]
+ pxor mm1,QWORD PTR 1[edi*8+ebp]
+ mov cl,al
+ mov dl,ah
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ shr eax,16
+ pxor mm3,QWORD PTR [esi*8+ebp]
+ pxor mm4,QWORD PTR 7[edi*8+ebp]
+ mov cl,al
+ mov dl,ah
+ mov eax,DWORD PTR 96[esp]
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ pxor mm5,QWORD PTR 6[esi*8+ebp]
+ pxor mm6,QWORD PTR 5[edi*8+ebp]
+ mov cl,bl
+ mov dl,bh
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ shr ebx,16
+ pxor mm7,QWORD PTR 4[esi*8+ebp]
+ pxor mm0,QWORD PTR 3[edi*8+ebp]
+ mov cl,bl
+ mov dl,bh
+ mov ebx,DWORD PTR 100[esp]
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ pxor mm1,QWORD PTR 2[esi*8+ebp]
+ pxor mm2,QWORD PTR 1[edi*8+ebp]
+ mov cl,al
+ mov dl,ah
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ shr eax,16
+ pxor mm4,QWORD PTR [esi*8+ebp]
+ pxor mm5,QWORD PTR 7[edi*8+ebp]
+ mov cl,al
+ mov dl,ah
+ mov eax,DWORD PTR 104[esp]
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ pxor mm6,QWORD PTR 6[esi*8+ebp]
+ pxor mm7,QWORD PTR 5[edi*8+ebp]
+ mov cl,bl
+ mov dl,bh
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ shr ebx,16
+ pxor mm0,QWORD PTR 4[esi*8+ebp]
+ pxor mm1,QWORD PTR 3[edi*8+ebp]
+ mov cl,bl
+ mov dl,bh
+ mov ebx,DWORD PTR 108[esp]
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ pxor mm2,QWORD PTR 2[esi*8+ebp]
+ pxor mm3,QWORD PTR 1[edi*8+ebp]
+ mov cl,al
+ mov dl,ah
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ shr eax,16
+ pxor mm5,QWORD PTR [esi*8+ebp]
+ pxor mm6,QWORD PTR 7[edi*8+ebp]
+ mov cl,al
+ mov dl,ah
+ mov eax,DWORD PTR 112[esp]
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ pxor mm7,QWORD PTR 6[esi*8+ebp]
+ pxor mm0,QWORD PTR 5[edi*8+ebp]
+ mov cl,bl
+ mov dl,bh
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ shr ebx,16
+ pxor mm1,QWORD PTR 4[esi*8+ebp]
+ pxor mm2,QWORD PTR 3[edi*8+ebp]
+ mov cl,bl
+ mov dl,bh
+ mov ebx,DWORD PTR 116[esp]
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ pxor mm3,QWORD PTR 2[esi*8+ebp]
+ pxor mm4,QWORD PTR 1[edi*8+ebp]
+ mov cl,al
+ mov dl,ah
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ shr eax,16
+ pxor mm6,QWORD PTR [esi*8+ebp]
+ pxor mm7,QWORD PTR 7[edi*8+ebp]
+ mov cl,al
+ mov dl,ah
+ mov eax,DWORD PTR 120[esp]
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ pxor mm0,QWORD PTR 6[esi*8+ebp]
+ pxor mm1,QWORD PTR 5[edi*8+ebp]
+ mov cl,bl
+ mov dl,bh
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ shr ebx,16
+ pxor mm2,QWORD PTR 4[esi*8+ebp]
+ pxor mm3,QWORD PTR 3[edi*8+ebp]
+ mov cl,bl
+ mov dl,bh
+ mov ebx,DWORD PTR 124[esp]
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ pxor mm4,QWORD PTR 2[esi*8+ebp]
+ pxor mm5,QWORD PTR 1[edi*8+ebp]
+ mov cl,al
+ mov dl,ah
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ shr eax,16
+ pxor mm7,QWORD PTR [esi*8+ebp]
+ pxor mm0,QWORD PTR 7[edi*8+ebp]
+ mov cl,al
+ mov dl,ah
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ pxor mm1,QWORD PTR 6[esi*8+ebp]
+ pxor mm2,QWORD PTR 5[edi*8+ebp]
+ mov cl,bl
+ mov dl,bh
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ shr ebx,16
+ pxor mm3,QWORD PTR 4[esi*8+ebp]
+ pxor mm4,QWORD PTR 3[edi*8+ebp]
+ mov cl,bl
+ mov dl,bh
+ lea esi,DWORD PTR [ecx*1+ecx]
+ lea edi,DWORD PTR [edx*1+edx]
+ pxor mm5,QWORD PTR 2[esi*8+ebp]
+ pxor mm6,QWORD PTR 1[edi*8+ebp]
+ lea ebx,DWORD PTR 128[esp]
+ mov esi,DWORD PTR 12[ebx]
+ add esi,1
+ cmp esi,10
+ je $L004roundsdone
+ mov DWORD PTR 12[ebx],esi
+ movq QWORD PTR 64[esp],mm0
+ movq QWORD PTR 72[esp],mm1
+ movq QWORD PTR 80[esp],mm2
+ movq QWORD PTR 88[esp],mm3
+ movq QWORD PTR 96[esp],mm4
+ movq QWORD PTR 104[esp],mm5
+ movq QWORD PTR 112[esp],mm6
+ movq QWORD PTR 120[esp],mm7
+ jmp $L003round
+ALIGN 16
+$L004roundsdone:
+ mov esi,DWORD PTR [ebx]
+ mov edi,DWORD PTR 4[ebx]
+ mov eax,DWORD PTR 8[ebx]
+ pxor mm0,QWORD PTR [edi]
+ pxor mm1,QWORD PTR 8[edi]
+ pxor mm2,QWORD PTR 16[edi]
+ pxor mm3,QWORD PTR 24[edi]
+ pxor mm4,QWORD PTR 32[edi]
+ pxor mm5,QWORD PTR 40[edi]
+ pxor mm6,QWORD PTR 48[edi]
+ pxor mm7,QWORD PTR 56[edi]
+ pxor mm0,QWORD PTR [esi]
+ pxor mm1,QWORD PTR 8[esi]
+ pxor mm2,QWORD PTR 16[esi]
+ pxor mm3,QWORD PTR 24[esi]
+ pxor mm4,QWORD PTR 32[esi]
+ pxor mm5,QWORD PTR 40[esi]
+ pxor mm6,QWORD PTR 48[esi]
+ pxor mm7,QWORD PTR 56[esi]
+ movq QWORD PTR [esi],mm0
+ movq QWORD PTR 8[esi],mm1
+ movq QWORD PTR 16[esi],mm2
+ movq QWORD PTR 24[esi],mm3
+ movq QWORD PTR 32[esi],mm4
+ movq QWORD PTR 40[esi],mm5
+ movq QWORD PTR 48[esi],mm6
+ movq QWORD PTR 56[esi],mm7
+ lea edi,DWORD PTR 64[edi]
+ sub eax,1
+ jz $L005alldone
+ mov DWORD PTR 4[ebx],edi
+ mov DWORD PTR 8[ebx],eax
+ jmp $L002outerloop
+$L005alldone:
+ emms
+ mov esp,DWORD PTR 16[ebx]
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+ALIGN 64
+$L001table:
+DB 24,24,96,24,192,120,48,216
+DB 24,24,96,24,192,120,48,216
+DB 35,35,140,35,5,175,70,38
+DB 35,35,140,35,5,175,70,38
+DB 198,198,63,198,126,249,145,184
+DB 198,198,63,198,126,249,145,184
+DB 232,232,135,232,19,111,205,251
+DB 232,232,135,232,19,111,205,251
+DB 135,135,38,135,76,161,19,203
+DB 135,135,38,135,76,161,19,203
+DB 184,184,218,184,169,98,109,17
+DB 184,184,218,184,169,98,109,17
+DB 1,1,4,1,8,5,2,9
+DB 1,1,4,1,8,5,2,9
+DB 79,79,33,79,66,110,158,13
+DB 79,79,33,79,66,110,158,13
+DB 54,54,216,54,173,238,108,155
+DB 54,54,216,54,173,238,108,155
+DB 166,166,162,166,89,4,81,255
+DB 166,166,162,166,89,4,81,255
+DB 210,210,111,210,222,189,185,12
+DB 210,210,111,210,222,189,185,12
+DB 245,245,243,245,251,6,247,14
+DB 245,245,243,245,251,6,247,14
+DB 121,121,249,121,239,128,242,150
+DB 121,121,249,121,239,128,242,150
+DB 111,111,161,111,95,206,222,48
+DB 111,111,161,111,95,206,222,48
+DB 145,145,126,145,252,239,63,109
+DB 145,145,126,145,252,239,63,109
+DB 82,82,85,82,170,7,164,248
+DB 82,82,85,82,170,7,164,248
+DB 96,96,157,96,39,253,192,71
+DB 96,96,157,96,39,253,192,71
+DB 188,188,202,188,137,118,101,53
+DB 188,188,202,188,137,118,101,53
+DB 155,155,86,155,172,205,43,55
+DB 155,155,86,155,172,205,43,55
+DB 142,142,2,142,4,140,1,138
+DB 142,142,2,142,4,140,1,138
+DB 163,163,182,163,113,21,91,210
+DB 163,163,182,163,113,21,91,210
+DB 12,12,48,12,96,60,24,108
+DB 12,12,48,12,96,60,24,108
+DB 123,123,241,123,255,138,246,132
+DB 123,123,241,123,255,138,246,132
+DB 53,53,212,53,181,225,106,128
+DB 53,53,212,53,181,225,106,128
+DB 29,29,116,29,232,105,58,245
+DB 29,29,116,29,232,105,58,245
+DB 224,224,167,224,83,71,221,179
+DB 224,224,167,224,83,71,221,179
+DB 215,215,123,215,246,172,179,33
+DB 215,215,123,215,246,172,179,33
+DB 194,194,47,194,94,237,153,156
+DB 194,194,47,194,94,237,153,156
+DB 46,46,184,46,109,150,92,67
+DB 46,46,184,46,109,150,92,67
+DB 75,75,49,75,98,122,150,41
+DB 75,75,49,75,98,122,150,41
+DB 254,254,223,254,163,33,225,93
+DB 254,254,223,254,163,33,225,93
+DB 87,87,65,87,130,22,174,213
+DB 87,87,65,87,130,22,174,213
+DB 21,21,84,21,168,65,42,189
+DB 21,21,84,21,168,65,42,189
+DB 119,119,193,119,159,182,238,232
+DB 119,119,193,119,159,182,238,232
+DB 55,55,220,55,165,235,110,146
+DB 55,55,220,55,165,235,110,146
+DB 229,229,179,229,123,86,215,158
+DB 229,229,179,229,123,86,215,158
+DB 159,159,70,159,140,217,35,19
+DB 159,159,70,159,140,217,35,19
+DB 240,240,231,240,211,23,253,35
+DB 240,240,231,240,211,23,253,35
+DB 74,74,53,74,106,127,148,32
+DB 74,74,53,74,106,127,148,32
+DB 218,218,79,218,158,149,169,68
+DB 218,218,79,218,158,149,169,68
+DB 88,88,125,88,250,37,176,162
+DB 88,88,125,88,250,37,176,162
+DB 201,201,3,201,6,202,143,207
+DB 201,201,3,201,6,202,143,207
+DB 41,41,164,41,85,141,82,124
+DB 41,41,164,41,85,141,82,124
+DB 10,10,40,10,80,34,20,90
+DB 10,10,40,10,80,34,20,90
+DB 177,177,254,177,225,79,127,80
+DB 177,177,254,177,225,79,127,80
+DB 160,160,186,160,105,26,93,201
+DB 160,160,186,160,105,26,93,201
+DB 107,107,177,107,127,218,214,20
+DB 107,107,177,107,127,218,214,20
+DB 133,133,46,133,92,171,23,217
+DB 133,133,46,133,92,171,23,217
+DB 189,189,206,189,129,115,103,60
+DB 189,189,206,189,129,115,103,60
+DB 93,93,105,93,210,52,186,143
+DB 93,93,105,93,210,52,186,143
+DB 16,16,64,16,128,80,32,144
+DB 16,16,64,16,128,80,32,144
+DB 244,244,247,244,243,3,245,7
+DB 244,244,247,244,243,3,245,7
+DB 203,203,11,203,22,192,139,221
+DB 203,203,11,203,22,192,139,221
+DB 62,62,248,62,237,198,124,211
+DB 62,62,248,62,237,198,124,211
+DB 5,5,20,5,40,17,10,45
+DB 5,5,20,5,40,17,10,45
+DB 103,103,129,103,31,230,206,120
+DB 103,103,129,103,31,230,206,120
+DB 228,228,183,228,115,83,213,151
+DB 228,228,183,228,115,83,213,151
+DB 39,39,156,39,37,187,78,2
+DB 39,39,156,39,37,187,78,2
+DB 65,65,25,65,50,88,130,115
+DB 65,65,25,65,50,88,130,115
+DB 139,139,22,139,44,157,11,167
+DB 139,139,22,139,44,157,11,167
+DB 167,167,166,167,81,1,83,246
+DB 167,167,166,167,81,1,83,246
+DB 125,125,233,125,207,148,250,178
+DB 125,125,233,125,207,148,250,178
+DB 149,149,110,149,220,251,55,73
+DB 149,149,110,149,220,251,55,73
+DB 216,216,71,216,142,159,173,86
+DB 216,216,71,216,142,159,173,86
+DB 251,251,203,251,139,48,235,112
+DB 251,251,203,251,139,48,235,112
+DB 238,238,159,238,35,113,193,205
+DB 238,238,159,238,35,113,193,205
+DB 124,124,237,124,199,145,248,187
+DB 124,124,237,124,199,145,248,187
+DB 102,102,133,102,23,227,204,113
+DB 102,102,133,102,23,227,204,113
+DB 221,221,83,221,166,142,167,123
+DB 221,221,83,221,166,142,167,123
+DB 23,23,92,23,184,75,46,175
+DB 23,23,92,23,184,75,46,175
+DB 71,71,1,71,2,70,142,69
+DB 71,71,1,71,2,70,142,69
+DB 158,158,66,158,132,220,33,26
+DB 158,158,66,158,132,220,33,26
+DB 202,202,15,202,30,197,137,212
+DB 202,202,15,202,30,197,137,212
+DB 45,45,180,45,117,153,90,88
+DB 45,45,180,45,117,153,90,88
+DB 191,191,198,191,145,121,99,46
+DB 191,191,198,191,145,121,99,46
+DB 7,7,28,7,56,27,14,63
+DB 7,7,28,7,56,27,14,63
+DB 173,173,142,173,1,35,71,172
+DB 173,173,142,173,1,35,71,172
+DB 90,90,117,90,234,47,180,176
+DB 90,90,117,90,234,47,180,176
+DB 131,131,54,131,108,181,27,239
+DB 131,131,54,131,108,181,27,239
+DB 51,51,204,51,133,255,102,182
+DB 51,51,204,51,133,255,102,182
+DB 99,99,145,99,63,242,198,92
+DB 99,99,145,99,63,242,198,92
+DB 2,2,8,2,16,10,4,18
+DB 2,2,8,2,16,10,4,18
+DB 170,170,146,170,57,56,73,147
+DB 170,170,146,170,57,56,73,147
+DB 113,113,217,113,175,168,226,222
+DB 113,113,217,113,175,168,226,222
+DB 200,200,7,200,14,207,141,198
+DB 200,200,7,200,14,207,141,198
+DB 25,25,100,25,200,125,50,209
+DB 25,25,100,25,200,125,50,209
+DB 73,73,57,73,114,112,146,59
+DB 73,73,57,73,114,112,146,59
+DB 217,217,67,217,134,154,175,95
+DB 217,217,67,217,134,154,175,95
+DB 242,242,239,242,195,29,249,49
+DB 242,242,239,242,195,29,249,49
+DB 227,227,171,227,75,72,219,168
+DB 227,227,171,227,75,72,219,168
+DB 91,91,113,91,226,42,182,185
+DB 91,91,113,91,226,42,182,185
+DB 136,136,26,136,52,146,13,188
+DB 136,136,26,136,52,146,13,188
+DB 154,154,82,154,164,200,41,62
+DB 154,154,82,154,164,200,41,62
+DB 38,38,152,38,45,190,76,11
+DB 38,38,152,38,45,190,76,11
+DB 50,50,200,50,141,250,100,191
+DB 50,50,200,50,141,250,100,191
+DB 176,176,250,176,233,74,125,89
+DB 176,176,250,176,233,74,125,89
+DB 233,233,131,233,27,106,207,242
+DB 233,233,131,233,27,106,207,242
+DB 15,15,60,15,120,51,30,119
+DB 15,15,60,15,120,51,30,119
+DB 213,213,115,213,230,166,183,51
+DB 213,213,115,213,230,166,183,51
+DB 128,128,58,128,116,186,29,244
+DB 128,128,58,128,116,186,29,244
+DB 190,190,194,190,153,124,97,39
+DB 190,190,194,190,153,124,97,39
+DB 205,205,19,205,38,222,135,235
+DB 205,205,19,205,38,222,135,235
+DB 52,52,208,52,189,228,104,137
+DB 52,52,208,52,189,228,104,137
+DB 72,72,61,72,122,117,144,50
+DB 72,72,61,72,122,117,144,50
+DB 255,255,219,255,171,36,227,84
+DB 255,255,219,255,171,36,227,84
+DB 122,122,245,122,247,143,244,141
+DB 122,122,245,122,247,143,244,141
+DB 144,144,122,144,244,234,61,100
+DB 144,144,122,144,244,234,61,100
+DB 95,95,97,95,194,62,190,157
+DB 95,95,97,95,194,62,190,157
+DB 32,32,128,32,29,160,64,61
+DB 32,32,128,32,29,160,64,61
+DB 104,104,189,104,103,213,208,15
+DB 104,104,189,104,103,213,208,15
+DB 26,26,104,26,208,114,52,202
+DB 26,26,104,26,208,114,52,202
+DB 174,174,130,174,25,44,65,183
+DB 174,174,130,174,25,44,65,183
+DB 180,180,234,180,201,94,117,125
+DB 180,180,234,180,201,94,117,125
+DB 84,84,77,84,154,25,168,206
+DB 84,84,77,84,154,25,168,206
+DB 147,147,118,147,236,229,59,127
+DB 147,147,118,147,236,229,59,127
+DB 34,34,136,34,13,170,68,47
+DB 34,34,136,34,13,170,68,47
+DB 100,100,141,100,7,233,200,99
+DB 100,100,141,100,7,233,200,99
+DB 241,241,227,241,219,18,255,42
+DB 241,241,227,241,219,18,255,42
+DB 115,115,209,115,191,162,230,204
+DB 115,115,209,115,191,162,230,204
+DB 18,18,72,18,144,90,36,130
+DB 18,18,72,18,144,90,36,130
+DB 64,64,29,64,58,93,128,122
+DB 64,64,29,64,58,93,128,122
+DB 8,8,32,8,64,40,16,72
+DB 8,8,32,8,64,40,16,72
+DB 195,195,43,195,86,232,155,149
+DB 195,195,43,195,86,232,155,149
+DB 236,236,151,236,51,123,197,223
+DB 236,236,151,236,51,123,197,223
+DB 219,219,75,219,150,144,171,77
+DB 219,219,75,219,150,144,171,77
+DB 161,161,190,161,97,31,95,192
+DB 161,161,190,161,97,31,95,192
+DB 141,141,14,141,28,131,7,145
+DB 141,141,14,141,28,131,7,145
+DB 61,61,244,61,245,201,122,200
+DB 61,61,244,61,245,201,122,200
+DB 151,151,102,151,204,241,51,91
+DB 151,151,102,151,204,241,51,91
+DB 0,0,0,0,0,0,0,0
+DB 0,0,0,0,0,0,0,0
+DB 207,207,27,207,54,212,131,249
+DB 207,207,27,207,54,212,131,249
+DB 43,43,172,43,69,135,86,110
+DB 43,43,172,43,69,135,86,110
+DB 118,118,197,118,151,179,236,225
+DB 118,118,197,118,151,179,236,225
+DB 130,130,50,130,100,176,25,230
+DB 130,130,50,130,100,176,25,230
+DB 214,214,127,214,254,169,177,40
+DB 214,214,127,214,254,169,177,40
+DB 27,27,108,27,216,119,54,195
+DB 27,27,108,27,216,119,54,195
+DB 181,181,238,181,193,91,119,116
+DB 181,181,238,181,193,91,119,116
+DB 175,175,134,175,17,41,67,190
+DB 175,175,134,175,17,41,67,190
+DB 106,106,181,106,119,223,212,29
+DB 106,106,181,106,119,223,212,29
+DB 80,80,93,80,186,13,160,234
+DB 80,80,93,80,186,13,160,234
+DB 69,69,9,69,18,76,138,87
+DB 69,69,9,69,18,76,138,87
+DB 243,243,235,243,203,24,251,56
+DB 243,243,235,243,203,24,251,56
+DB 48,48,192,48,157,240,96,173
+DB 48,48,192,48,157,240,96,173
+DB 239,239,155,239,43,116,195,196
+DB 239,239,155,239,43,116,195,196
+DB 63,63,252,63,229,195,126,218
+DB 63,63,252,63,229,195,126,218
+DB 85,85,73,85,146,28,170,199
+DB 85,85,73,85,146,28,170,199
+DB 162,162,178,162,121,16,89,219
+DB 162,162,178,162,121,16,89,219
+DB 234,234,143,234,3,101,201,233
+DB 234,234,143,234,3,101,201,233
+DB 101,101,137,101,15,236,202,106
+DB 101,101,137,101,15,236,202,106
+DB 186,186,210,186,185,104,105,3
+DB 186,186,210,186,185,104,105,3
+DB 47,47,188,47,101,147,94,74
+DB 47,47,188,47,101,147,94,74
+DB 192,192,39,192,78,231,157,142
+DB 192,192,39,192,78,231,157,142
+DB 222,222,95,222,190,129,161,96
+DB 222,222,95,222,190,129,161,96
+DB 28,28,112,28,224,108,56,252
+DB 28,28,112,28,224,108,56,252
+DB 253,253,211,253,187,46,231,70
+DB 253,253,211,253,187,46,231,70
+DB 77,77,41,77,82,100,154,31
+DB 77,77,41,77,82,100,154,31
+DB 146,146,114,146,228,224,57,118
+DB 146,146,114,146,228,224,57,118
+DB 117,117,201,117,143,188,234,250
+DB 117,117,201,117,143,188,234,250
+DB 6,6,24,6,48,30,12,54
+DB 6,6,24,6,48,30,12,54
+DB 138,138,18,138,36,152,9,174
+DB 138,138,18,138,36,152,9,174
+DB 178,178,242,178,249,64,121,75
+DB 178,178,242,178,249,64,121,75
+DB 230,230,191,230,99,89,209,133
+DB 230,230,191,230,99,89,209,133
+DB 14,14,56,14,112,54,28,126
+DB 14,14,56,14,112,54,28,126
+DB 31,31,124,31,248,99,62,231
+DB 31,31,124,31,248,99,62,231
+DB 98,98,149,98,55,247,196,85
+DB 98,98,149,98,55,247,196,85
+DB 212,212,119,212,238,163,181,58
+DB 212,212,119,212,238,163,181,58
+DB 168,168,154,168,41,50,77,129
+DB 168,168,154,168,41,50,77,129
+DB 150,150,98,150,196,244,49,82
+DB 150,150,98,150,196,244,49,82
+DB 249,249,195,249,155,58,239,98
+DB 249,249,195,249,155,58,239,98
+DB 197,197,51,197,102,246,151,163
+DB 197,197,51,197,102,246,151,163
+DB 37,37,148,37,53,177,74,16
+DB 37,37,148,37,53,177,74,16
+DB 89,89,121,89,242,32,178,171
+DB 89,89,121,89,242,32,178,171
+DB 132,132,42,132,84,174,21,208
+DB 132,132,42,132,84,174,21,208
+DB 114,114,213,114,183,167,228,197
+DB 114,114,213,114,183,167,228,197
+DB 57,57,228,57,213,221,114,236
+DB 57,57,228,57,213,221,114,236
+DB 76,76,45,76,90,97,152,22
+DB 76,76,45,76,90,97,152,22
+DB 94,94,101,94,202,59,188,148
+DB 94,94,101,94,202,59,188,148
+DB 120,120,253,120,231,133,240,159
+DB 120,120,253,120,231,133,240,159
+DB 56,56,224,56,221,216,112,229
+DB 56,56,224,56,221,216,112,229
+DB 140,140,10,140,20,134,5,152
+DB 140,140,10,140,20,134,5,152
+DB 209,209,99,209,198,178,191,23
+DB 209,209,99,209,198,178,191,23
+DB 165,165,174,165,65,11,87,228
+DB 165,165,174,165,65,11,87,228
+DB 226,226,175,226,67,77,217,161
+DB 226,226,175,226,67,77,217,161
+DB 97,97,153,97,47,248,194,78
+DB 97,97,153,97,47,248,194,78
+DB 179,179,246,179,241,69,123,66
+DB 179,179,246,179,241,69,123,66
+DB 33,33,132,33,21,165,66,52
+DB 33,33,132,33,21,165,66,52
+DB 156,156,74,156,148,214,37,8
+DB 156,156,74,156,148,214,37,8
+DB 30,30,120,30,240,102,60,238
+DB 30,30,120,30,240,102,60,238
+DB 67,67,17,67,34,82,134,97
+DB 67,67,17,67,34,82,134,97
+DB 199,199,59,199,118,252,147,177
+DB 199,199,59,199,118,252,147,177
+DB 252,252,215,252,179,43,229,79
+DB 252,252,215,252,179,43,229,79
+DB 4,4,16,4,32,20,8,36
+DB 4,4,16,4,32,20,8,36
+DB 81,81,89,81,178,8,162,227
+DB 81,81,89,81,178,8,162,227
+DB 153,153,94,153,188,199,47,37
+DB 153,153,94,153,188,199,47,37
+DB 109,109,169,109,79,196,218,34
+DB 109,109,169,109,79,196,218,34
+DB 13,13,52,13,104,57,26,101
+DB 13,13,52,13,104,57,26,101
+DB 250,250,207,250,131,53,233,121
+DB 250,250,207,250,131,53,233,121
+DB 223,223,91,223,182,132,163,105
+DB 223,223,91,223,182,132,163,105
+DB 126,126,229,126,215,155,252,169
+DB 126,126,229,126,215,155,252,169
+DB 36,36,144,36,61,180,72,25
+DB 36,36,144,36,61,180,72,25
+DB 59,59,236,59,197,215,118,254
+DB 59,59,236,59,197,215,118,254
+DB 171,171,150,171,49,61,75,154
+DB 171,171,150,171,49,61,75,154
+DB 206,206,31,206,62,209,129,240
+DB 206,206,31,206,62,209,129,240
+DB 17,17,68,17,136,85,34,153
+DB 17,17,68,17,136,85,34,153
+DB 143,143,6,143,12,137,3,131
+DB 143,143,6,143,12,137,3,131
+DB 78,78,37,78,74,107,156,4
+DB 78,78,37,78,74,107,156,4
+DB 183,183,230,183,209,81,115,102
+DB 183,183,230,183,209,81,115,102
+DB 235,235,139,235,11,96,203,224
+DB 235,235,139,235,11,96,203,224
+DB 60,60,240,60,253,204,120,193
+DB 60,60,240,60,253,204,120,193
+DB 129,129,62,129,124,191,31,253
+DB 129,129,62,129,124,191,31,253
+DB 148,148,106,148,212,254,53,64
+DB 148,148,106,148,212,254,53,64
+DB 247,247,251,247,235,12,243,28
+DB 247,247,251,247,235,12,243,28
+DB 185,185,222,185,161,103,111,24
+DB 185,185,222,185,161,103,111,24
+DB 19,19,76,19,152,95,38,139
+DB 19,19,76,19,152,95,38,139
+DB 44,44,176,44,125,156,88,81
+DB 44,44,176,44,125,156,88,81
+DB 211,211,107,211,214,184,187,5
+DB 211,211,107,211,214,184,187,5
+DB 231,231,187,231,107,92,211,140
+DB 231,231,187,231,107,92,211,140
+DB 110,110,165,110,87,203,220,57
+DB 110,110,165,110,87,203,220,57
+DB 196,196,55,196,110,243,149,170
+DB 196,196,55,196,110,243,149,170
+DB 3,3,12,3,24,15,6,27
+DB 3,3,12,3,24,15,6,27
+DB 86,86,69,86,138,19,172,220
+DB 86,86,69,86,138,19,172,220
+DB 68,68,13,68,26,73,136,94
+DB 68,68,13,68,26,73,136,94
+DB 127,127,225,127,223,158,254,160
+DB 127,127,225,127,223,158,254,160
+DB 169,169,158,169,33,55,79,136
+DB 169,169,158,169,33,55,79,136
+DB 42,42,168,42,77,130,84,103
+DB 42,42,168,42,77,130,84,103
+DB 187,187,214,187,177,109,107,10
+DB 187,187,214,187,177,109,107,10
+DB 193,193,35,193,70,226,159,135
+DB 193,193,35,193,70,226,159,135
+DB 83,83,81,83,162,2,166,241
+DB 83,83,81,83,162,2,166,241
+DB 220,220,87,220,174,139,165,114
+DB 220,220,87,220,174,139,165,114
+DB 11,11,44,11,88,39,22,83
+DB 11,11,44,11,88,39,22,83
+DB 157,157,78,157,156,211,39,1
+DB 157,157,78,157,156,211,39,1
+DB 108,108,173,108,71,193,216,43
+DB 108,108,173,108,71,193,216,43
+DB 49,49,196,49,149,245,98,164
+DB 49,49,196,49,149,245,98,164
+DB 116,116,205,116,135,185,232,243
+DB 116,116,205,116,135,185,232,243
+DB 246,246,255,246,227,9,241,21
+DB 246,246,255,246,227,9,241,21
+DB 70,70,5,70,10,67,140,76
+DB 70,70,5,70,10,67,140,76
+DB 172,172,138,172,9,38,69,165
+DB 172,172,138,172,9,38,69,165
+DB 137,137,30,137,60,151,15,181
+DB 137,137,30,137,60,151,15,181
+DB 20,20,80,20,160,68,40,180
+DB 20,20,80,20,160,68,40,180
+DB 225,225,163,225,91,66,223,186
+DB 225,225,163,225,91,66,223,186
+DB 22,22,88,22,176,78,44,166
+DB 22,22,88,22,176,78,44,166
+DB 58,58,232,58,205,210,116,247
+DB 58,58,232,58,205,210,116,247
+DB 105,105,185,105,111,208,210,6
+DB 105,105,185,105,111,208,210,6
+DB 9,9,36,9,72,45,18,65
+DB 9,9,36,9,72,45,18,65
+DB 112,112,221,112,167,173,224,215
+DB 112,112,221,112,167,173,224,215
+DB 182,182,226,182,217,84,113,111
+DB 182,182,226,182,217,84,113,111
+DB 208,208,103,208,206,183,189,30
+DB 208,208,103,208,206,183,189,30
+DB 237,237,147,237,59,126,199,214
+DB 237,237,147,237,59,126,199,214
+DB 204,204,23,204,46,219,133,226
+DB 204,204,23,204,46,219,133,226
+DB 66,66,21,66,42,87,132,104
+DB 66,66,21,66,42,87,132,104
+DB 152,152,90,152,180,194,45,44
+DB 152,152,90,152,180,194,45,44
+DB 164,164,170,164,73,14,85,237
+DB 164,164,170,164,73,14,85,237
+DB 40,40,160,40,93,136,80,117
+DB 40,40,160,40,93,136,80,117
+DB 92,92,109,92,218,49,184,134
+DB 92,92,109,92,218,49,184,134
+DB 248,248,199,248,147,63,237,107
+DB 248,248,199,248,147,63,237,107
+DB 134,134,34,134,68,164,17,194
+DB 134,134,34,134,68,164,17,194
+DB 24,35,198,232,135,184,1,79
+DB 54,166,210,245,121,111,145,82
+DB 96,188,155,142,163,12,123,53
+DB 29,224,215,194,46,75,254,87
+DB 21,119,55,229,159,240,74,218
+DB 88,201,41,10,177,160,107,133
+DB 189,93,16,244,203,62,5,103
+DB 228,39,65,139,167,125,149,216
+DB 251,238,124,102,221,23,71,158
+DB 202,45,191,7,173,90,131,51
+_whirlpool_block_mmx ENDP
+.text$ ENDS
+END
diff --git a/deps/openssl/asm/x86-win32-masm/x86cpuid.asm b/deps/openssl/asm/x86-win32-masm/x86cpuid.asm
new file mode 100644
index 0000000000..7e663d6645
--- /dev/null
+++ b/deps/openssl/asm/x86-win32-masm/x86cpuid.asm
@@ -0,0 +1,277 @@
+TITLE x86cpuid.asm
+IF @Version LT 800
+ECHO MASM version 8.00 or later is strongly recommended.
+ENDIF
+.586
+.MODEL FLAT
+OPTION DOTNAME
+IF @Version LT 800
+.text$ SEGMENT PAGE 'CODE'
+ELSE
+.text$ SEGMENT ALIGN(64) 'CODE'
+ENDIF
+ALIGN 16
+_OPENSSL_ia32_cpuid PROC PUBLIC
+$L_OPENSSL_ia32_cpuid_begin::
+ push ebp
+ push ebx
+ push esi
+ push edi
+ xor edx,edx
+ pushfd
+ pop eax
+ mov ecx,eax
+ xor eax,2097152
+ push eax
+ popfd
+ pushfd
+ pop eax
+ xor ecx,eax
+ bt ecx,21
+ jnc $L000done
+ xor eax,eax
+ cpuid
+ mov edi,eax
+ xor eax,eax
+ cmp ebx,1970169159
+ setne al
+ mov ebp,eax
+ cmp edx,1231384169
+ setne al
+ or ebp,eax
+ cmp ecx,1818588270
+ setne al
+ or ebp,eax
+ jz $L001intel
+ cmp ebx,1752462657
+ setne al
+ mov esi,eax
+ cmp edx,1769238117
+ setne al
+ or esi,eax
+ cmp ecx,1145913699
+ setne al
+ or esi,eax
+ jnz $L001intel
+ mov eax,2147483648
+ cpuid
+ cmp eax,2147483656
+ jb $L001intel
+ mov eax,2147483656
+ cpuid
+ movzx esi,cl
+ inc esi
+ mov eax,1
+ cpuid
+ bt edx,28
+ jnc $L000done
+ shr ebx,16
+ and ebx,255
+ cmp ebx,esi
+ ja $L000done
+ and edx,4026531839
+ jmp $L000done
+$L001intel:
+ cmp edi,4
+ mov edi,-1
+ jb $L002nocacheinfo
+ mov eax,4
+ mov ecx,0
+ cpuid
+ mov edi,eax
+ shr edi,14
+ and edi,4095
+$L002nocacheinfo:
+ mov eax,1
+ cpuid
+ cmp ebp,0
+ jne $L003notP4
+ and ah,15
+ cmp ah,15
+ jne $L003notP4
+ or edx,1048576
+$L003notP4:
+ bt edx,28
+ jnc $L000done
+ and edx,4026531839
+ cmp edi,0
+ je $L000done
+ or edx,268435456
+ shr ebx,16
+ cmp bl,1
+ ja $L000done
+ and edx,4026531839
+$L000done:
+ mov eax,edx
+ mov edx,ecx
+ pop edi
+ pop esi
+ pop ebx
+ pop ebp
+ ret
+_OPENSSL_ia32_cpuid ENDP
+;EXTERN _OPENSSL_ia32cap_P:NEAR
+ALIGN 16
+_OPENSSL_rdtsc PROC PUBLIC
+$L_OPENSSL_rdtsc_begin::
+ xor eax,eax
+ xor edx,edx
+ lea ecx,DWORD PTR _OPENSSL_ia32cap_P
+ bt DWORD PTR [ecx],4
+ jnc $L004notsc
+ rdtsc
+$L004notsc:
+ ret
+_OPENSSL_rdtsc ENDP
+ALIGN 16
+_OPENSSL_instrument_halt PROC PUBLIC
+$L_OPENSSL_instrument_halt_begin::
+ lea ecx,DWORD PTR _OPENSSL_ia32cap_P
+ bt DWORD PTR [ecx],4
+ jnc $L005nohalt
+DD 2421723150
+ and eax,3
+ jnz $L005nohalt
+ pushfd
+ pop eax
+ bt eax,9
+ jnc $L005nohalt
+ rdtsc
+ push edx
+ push eax
+ hlt
+ rdtsc
+ sub eax,DWORD PTR [esp]
+ sbb edx,DWORD PTR 4[esp]
+ add esp,8
+ ret
+$L005nohalt:
+ xor eax,eax
+ xor edx,edx
+ ret
+_OPENSSL_instrument_halt ENDP
+ALIGN 16
+_OPENSSL_far_spin PROC PUBLIC
+$L_OPENSSL_far_spin_begin::
+ pushfd
+ pop eax
+ bt eax,9
+ jnc $L006nospin
+ mov eax,DWORD PTR 4[esp]
+ mov ecx,DWORD PTR 8[esp]
+DD 2430111262
+ xor eax,eax
+ mov edx,DWORD PTR [ecx]
+ jmp $L007spin
+ALIGN 16
+$L007spin:
+ inc eax
+ cmp edx,DWORD PTR [ecx]
+ je $L007spin
+DD 529567888
+ ret
+$L006nospin:
+ xor eax,eax
+ xor edx,edx
+ ret
+_OPENSSL_far_spin ENDP
+ALIGN 16
+_OPENSSL_wipe_cpu PROC PUBLIC
+$L_OPENSSL_wipe_cpu_begin::
+ xor eax,eax
+ xor edx,edx
+ lea ecx,DWORD PTR _OPENSSL_ia32cap_P
+ mov ecx,DWORD PTR [ecx]
+ bt DWORD PTR [ecx],1
+ jnc $L008no_x87
+DD 4007259865,4007259865,4007259865,4007259865,2430851995
+$L008no_x87:
+ lea eax,DWORD PTR 4[esp]
+ ret
+_OPENSSL_wipe_cpu ENDP
+ALIGN 16
+_OPENSSL_atomic_add PROC PUBLIC
+$L_OPENSSL_atomic_add_begin::
+ mov edx,DWORD PTR 4[esp]
+ mov ecx,DWORD PTR 8[esp]
+ push ebx
+ nop
+ mov eax,DWORD PTR [edx]
+$L009spin:
+ lea ebx,DWORD PTR [ecx*1+eax]
+ nop
+DD 447811568
+ jne $L009spin
+ mov eax,ebx
+ pop ebx
+ ret
+_OPENSSL_atomic_add ENDP
+ALIGN 16
+_OPENSSL_indirect_call PROC PUBLIC
+$L_OPENSSL_indirect_call_begin::
+ push ebp
+ mov ebp,esp
+ sub esp,28
+ mov ecx,DWORD PTR 12[ebp]
+ mov DWORD PTR [esp],ecx
+ mov edx,DWORD PTR 16[ebp]
+ mov DWORD PTR 4[esp],edx
+ mov eax,DWORD PTR 20[ebp]
+ mov DWORD PTR 8[esp],eax
+ mov eax,DWORD PTR 24[ebp]
+ mov DWORD PTR 12[esp],eax
+ mov eax,DWORD PTR 28[ebp]
+ mov DWORD PTR 16[esp],eax
+ mov eax,DWORD PTR 32[ebp]
+ mov DWORD PTR 20[esp],eax
+ mov eax,DWORD PTR 36[ebp]
+ mov DWORD PTR 24[esp],eax
+ call DWORD PTR 8[ebp]
+ mov esp,ebp
+ pop ebp
+ ret
+_OPENSSL_indirect_call ENDP
+ALIGN 16
+_OPENSSL_cleanse PROC PUBLIC
+$L_OPENSSL_cleanse_begin::
+ mov edx,DWORD PTR 4[esp]
+ mov ecx,DWORD PTR 8[esp]
+ xor eax,eax
+ cmp ecx,7
+ jae $L010lot
+ cmp ecx,0
+ je $L011ret
+$L012little:
+ mov BYTE PTR [edx],al
+ sub ecx,1
+ lea edx,DWORD PTR 1[edx]
+ jnz $L012little
+$L011ret:
+ ret
+ALIGN 16
+$L010lot:
+ test edx,3
+ jz $L013aligned
+ mov BYTE PTR [edx],al
+ lea ecx,DWORD PTR [ecx-1]
+ lea edx,DWORD PTR 1[edx]
+ jmp $L010lot
+$L013aligned:
+ mov DWORD PTR [edx],eax
+ lea ecx,DWORD PTR [ecx-4]
+ test ecx,-4
+ lea edx,DWORD PTR 4[edx]
+ jnz $L013aligned
+ cmp ecx,0
+ jne $L012little
+ ret
+_OPENSSL_cleanse ENDP
+.text$ ENDS
+.bss SEGMENT 'BSS'
+COMM _OPENSSL_ia32cap_P:DWORD
+.bss ENDS
+.CRT$XCU SEGMENT DWORD PUBLIC 'DATA'
+EXTERN _OPENSSL_cpuid_setup:NEAR
+DD _OPENSSL_cpuid_setup
+.CRT$XCU ENDS
+END