summaryrefslogtreecommitdiff
path: root/deps/openssl/asm
diff options
context:
space:
mode:
Diffstat (limited to 'deps/openssl/asm')
-rw-r--r--deps/openssl/asm/Makefile30
-rw-r--r--deps/openssl/asm/x64-elf-gas/aes/aes-x86_64.s25
-rw-r--r--deps/openssl/asm/x64-elf-gas/bn/x86_64-mont.s1311
-rw-r--r--deps/openssl/asm/x64-elf-gas/rc4/rc4-x86_64.s734
-rw-r--r--deps/openssl/asm/x64-elf-gas/sha/sha1-x86_64.s3531
-rw-r--r--deps/openssl/asm/x64-elf-gas/sha/sha512-x86_64.s2144
-rw-r--r--deps/openssl/asm/x64-elf-gas/x86_64cpuid.s70
-rw-r--r--deps/openssl/asm/x64-macosx-gas/aes/aes-x86_64.s17
-rw-r--r--deps/openssl/asm/x64-macosx-gas/bn/x86_64-mont.s1309
-rw-r--r--deps/openssl/asm/x64-macosx-gas/md5/md5-x86_64.s1
-rw-r--r--deps/openssl/asm/x64-macosx-gas/rc4/rc4-x86_64.s731
-rw-r--r--deps/openssl/asm/x64-macosx-gas/sha/sha1-x86_64.s3529
-rw-r--r--deps/openssl/asm/x64-macosx-gas/sha/sha512-x86_64.s2144
-rw-r--r--deps/openssl/asm/x64-macosx-gas/x86_64cpuid.s69
-rw-r--r--deps/openssl/asm/x64-win32-masm/aes/aes-x86_64.asm45
-rw-r--r--deps/openssl/asm/x64-win32-masm/bn/x86_64-mont.asm1428
-rw-r--r--deps/openssl/asm/x64-win32-masm/camellia/cmll-x86_64.asm4
-rw-r--r--deps/openssl/asm/x64-win32-masm/md5/md5-x86_64.asm280
-rw-r--r--deps/openssl/asm/x64-win32-masm/rc4/rc4-x86_64.asm744
-rw-r--r--deps/openssl/asm/x64-win32-masm/sha/sha1-x86_64.asm3624
-rw-r--r--deps/openssl/asm/x64-win32-masm/sha/sha512-x86_64.asm2248
-rw-r--r--deps/openssl/asm/x64-win32-masm/whrlpool/wp-x86_64.asm204
-rw-r--r--deps/openssl/asm/x64-win32-masm/x86_64cpuid.asm72
-rw-r--r--deps/openssl/asm/x86-elf-gas/aes/aes-586.s22
-rw-r--r--deps/openssl/asm/x86-elf-gas/camellia/cmll-x86.s10
-rw-r--r--deps/openssl/asm/x86-elf-gas/rc4/rc4-586.s218
-rw-r--r--deps/openssl/asm/x86-elf-gas/sha/sha1-586.s1262
-rw-r--r--deps/openssl/asm/x86-elf-gas/sha/sha256-586.s65
-rw-r--r--deps/openssl/asm/x86-elf-gas/x86cpuid.s135
-rw-r--r--deps/openssl/asm/x86-macosx-gas/aes/aes-586.s24
-rw-r--r--deps/openssl/asm/x86-macosx-gas/camellia/cmll-x86.s6
-rw-r--r--deps/openssl/asm/x86-macosx-gas/des/crypt586.s13
-rw-r--r--deps/openssl/asm/x86-macosx-gas/rc4/rc4-586.s231
-rw-r--r--deps/openssl/asm/x86-macosx-gas/sha/sha1-586.s1262
-rw-r--r--deps/openssl/asm/x86-macosx-gas/sha/sha256-586.s65
-rw-r--r--deps/openssl/asm/x86-macosx-gas/x86cpuid.s149
-rw-r--r--deps/openssl/asm/x86-win32-masm/aes/aes-586.asm14
-rw-r--r--deps/openssl/asm/x86-win32-masm/bf/bf-686.asm2
-rw-r--r--deps/openssl/asm/x86-win32-masm/bn/x86-mont.asm2
-rw-r--r--deps/openssl/asm/x86-win32-masm/bn/x86.asm2
-rw-r--r--deps/openssl/asm/x86-win32-masm/camellia/cmll-x86.asm8
-rw-r--r--deps/openssl/asm/x86-win32-masm/cast/cast-586.asm2
-rw-r--r--deps/openssl/asm/x86-win32-masm/des/crypt586.asm2
-rw-r--r--deps/openssl/asm/x86-win32-masm/des/des-586.asm2
-rw-r--r--deps/openssl/asm/x86-win32-masm/md5/md5-586.asm2
-rw-r--r--deps/openssl/asm/x86-win32-masm/rc4/rc4-586.asm225
-rw-r--r--deps/openssl/asm/x86-win32-masm/rc5/rc5-586.asm2
-rw-r--r--deps/openssl/asm/x86-win32-masm/ripemd/rmd-586.asm2
-rw-r--r--deps/openssl/asm/x86-win32-masm/sha/sha1-586.asm1264
-rw-r--r--deps/openssl/asm/x86-win32-masm/sha/sha256-586.asm67
-rw-r--r--deps/openssl/asm/x86-win32-masm/sha/sha512-586.asm2
-rw-r--r--deps/openssl/asm/x86-win32-masm/x86cpuid.asm132
52 files changed, 10738 insertions, 18748 deletions
diff --git a/deps/openssl/asm/Makefile b/deps/openssl/asm/Makefile
index 9f54785b44..383d5f64c0 100644
--- a/deps/openssl/asm/Makefile
+++ b/deps/openssl/asm/Makefile
@@ -3,7 +3,6 @@ PERL += -I../openssl/crypto/perlasm -I../openssl/crypto/bn/asm
OUTPUTS = \
x86-elf-gas/aes/aes-586.s \
- x86-elf-gas/aes/aesni-x86.s \
x86-elf-gas/bf/bf-686.s \
x86-elf-gas/bn/x86-mont.s \
x86-elf-gas/bn/x86.s \
@@ -21,20 +20,15 @@ OUTPUTS = \
x86-elf-gas/whrlpool/wp-mmx.s \
x86-elf-gas/x86cpuid.s \
x64-elf-gas/aes/aes-x86_64.s \
- x64-elf-gas/aes/aesni-x86_64.s \
- x64-elf-gas/aes/aesni-sha1-x86_64.s \
- x64-elf-gas/bn/modexp512-x86_64.s \
x64-elf-gas/bn/x86_64-mont.s \
x64-elf-gas/camellia/cmll-x86_64.s \
x64-elf-gas/md5/md5-x86_64.s \
x64-elf-gas/rc4/rc4-x86_64.s \
- x64-elf-gas/rc4/rc4-md5-x86_64.s \
x64-elf-gas/sha/sha1-x86_64.s \
x64-elf-gas/sha/sha512-x86_64.s \
x64-elf-gas/whrlpool/wp-x86_64.s \
x64-elf-gas/x86_64cpuid.s \
x86-macosx-gas/aes/aes-586.s \
- x86-macosx-gas/aes/aesni-x86.s \
x86-macosx-gas/bf/bf-686.s \
x86-macosx-gas/bn/x86-mont.s \
x86-macosx-gas/bn/x86.s \
@@ -52,20 +46,15 @@ OUTPUTS = \
x86-macosx-gas/whrlpool/wp-mmx.s \
x86-macosx-gas/x86cpuid.s \
x64-macosx-gas/aes/aes-x86_64.s \
- x64-macosx-gas/aes/aesni-x86_64.s \
- x64-macosx-gas/aes/aesni-sha1-x86_64.s \
- x64-macosx-gas/bn/modexp512-x86_64.s \
x64-macosx-gas/bn/x86_64-mont.s \
x64-macosx-gas/camellia/cmll-x86_64.s \
x64-macosx-gas/md5/md5-x86_64.s \
x64-macosx-gas/rc4/rc4-x86_64.s \
- x64-macosx-gas/rc4/rc4-md5-x86_64.s \
x64-macosx-gas/sha/sha1-x86_64.s \
x64-macosx-gas/sha/sha512-x86_64.s \
x64-macosx-gas/whrlpool/wp-x86_64.s \
x64-macosx-gas/x86_64cpuid.s \
x86-win32-masm/aes/aes-586.asm \
- x86-win32-masm/aes/aesni-x86.asm \
x86-win32-masm/bf/bf-686.asm \
x86-win32-masm/bn/x86-mont.asm \
x86-win32-masm/bn/x86.asm \
@@ -83,14 +72,10 @@ OUTPUTS = \
x86-win32-masm/whrlpool/wp-mmx.asm \
x86-win32-masm/x86cpuid.asm \
x64-win32-masm/aes/aes-x86_64.asm \
- x64-win32-masm/aes/aesni-x86_64.asm \
- x64-win32-masm/aes/aesni-sha1-x86_64.asm \
- x64-win32-masm/bn/modexp512-x86_64.asm \
x64-win32-masm/bn/x86_64-mont.asm \
x64-win32-masm/camellia/cmll-x86_64.asm \
x64-win32-masm/md5/md5-x86_64.asm \
x64-win32-masm/rc4/rc4-x86_64.asm \
- x64-win32-masm/rc4/rc4-md5-x86_64.asm \
x64-win32-masm/sha/sha1-x86_64.asm \
x64-win32-masm/sha/sha512-x86_64.asm \
x64-win32-masm/whrlpool/wp-x86_64.asm \
@@ -118,46 +103,33 @@ clean:
find . -iname '*.s' -exec rm "{}" \;
x64-elf-gas/aes/aes-x86_64.s: ../openssl/crypto/aes/asm/aes-x86_64.pl
-x64-elf-gas/aes/aesni-x86_64.s: ../openssl/crypto/aes/asm/aesni-x86_64.pl
-x64-elf-gas/aes/aesni-sha1-x86_64.s: ../openssl/crypto/aes/asm/aesni-sha1-x86_64.pl
-x64-elf-gas/bn/modexp512-x86_64.s: ../openssl/crypto/bn/asm/modexp512-x86_64.pl
x64-elf-gas/bn/x86_64-mont.s: ../openssl/crypto/bn/asm/x86_64-mont.pl
x64-elf-gas/camellia/cmll-x86_64.s: ../openssl/crypto/camellia/asm/cmll-x86_64.pl
x64-elf-gas/md5/md5-x86_64.s: ../openssl/crypto/md5/asm/md5-x86_64.pl
x64-elf-gas/rc4/rc4-x86_64.s: ../openssl/crypto/rc4/asm/rc4-x86_64.pl
-x64-elf-gas/rc4/rc4-md5-x86_64.s: ../openssl/crypto/rc4/asm/rc4-md5-x86_64.pl
x64-elf-gas/sha/sha1-x86_64.s: ../openssl/crypto/sha/asm/sha1-x86_64.pl
x64-elf-gas/sha/sha512-x86_64.s: ../openssl/crypto/sha/asm/sha512-x86_64.pl
x64-elf-gas/whrlpool/wp-x86_64.s: ../openssl/crypto/whrlpool/asm/wp-x86_64.pl
x64-elf-gas/x86_64cpuid.s: ../openssl/crypto/x86_64cpuid.pl
x64-macosx-gas/aes/aes-x86_64.s: ../openssl/crypto/aes/asm/aes-x86_64.pl
-x64-macosx-gas/aes/aesni-x86_64.s: ../openssl/crypto/aes/asm/aesni-x86_64.pl
-x64-macosx-gas/aes/aesni-sha1-x86_64.s: ../openssl/crypto/aes/asm/aesni-sha1-x86_64.pl
-x64-macosx-gas/bn/modexp512-x86_64.s: ../openssl/crypto/bn/asm/modexp512-x86_64.pl
x64-macosx-gas/bn/x86_64-mont.s: ../openssl/crypto/bn/asm/x86_64-mont.pl
x64-macosx-gas/camellia/cmll-x86_64.s: ../openssl/crypto/camellia/asm/cmll-x86_64.pl
x64-macosx-gas/md5/md5-x86_64.s: ../openssl/crypto/md5/asm/md5-x86_64.pl
x64-macosx-gas/rc4/rc4-x86_64.s: ../openssl/crypto/rc4/asm/rc4-x86_64.pl
-x64-macosx-gas/rc4/rc4-md5-x86_64.s: ../openssl/crypto/rc4/asm/rc4-md5-x86_64.pl
x64-macosx-gas/sha/sha1-x86_64.s: ../openssl/crypto/sha/asm/sha1-x86_64.pl
x64-macosx-gas/sha/sha512-x86_64.s: ../openssl/crypto/sha/asm/sha512-x86_64.pl
x64-macosx-gas/whrlpool/wp-x86_64.s: ../openssl/crypto/whrlpool/asm/wp-x86_64.pl
x64-macosx-gas/x86_64cpuid.s: ../openssl/crypto/x86_64cpuid.pl
x64-win32-masm/aes/aes-x86_64.asm: ../openssl/crypto/aes/asm/aes-x86_64.pl
-x64-win32-masm/aes/aesni-x86_64.asm: ../openssl/crypto/aes/asm/aesni-x86_64.pl
-x64-win32-masm/aes/aesni-sha1-x86_64.asm: ../openssl/crypto/aes/asm/aesni-sha1-x86_64.pl
-x64-win32-masm/bn/modexp512-x86_64.asm: ../openssl/crypto/bn/asm/modexp512-x86_64.pl
x64-win32-masm/bn/x86_64-mont.asm: ../openssl/crypto/bn/asm/x86_64-mont.pl
x64-win32-masm/camellia/cmll-x86_64.asm: ../openssl/crypto/camellia/asm/cmll-x86_64.pl
x64-win32-masm/md5/md5-x86_64.asm: ../openssl/crypto/md5/asm/md5-x86_64.pl
x64-win32-masm/rc4/rc4-x86_64.asm: ../openssl/crypto/rc4/asm/rc4-x86_64.pl
-x64-win32-masm/rc4/rc4-md5-x86_64.asm: ../openssl/crypto/rc4/asm/rc4-md5-x86_64.pl
x64-win32-masm/sha/sha1-x86_64.asm: ../openssl/crypto/sha/asm/sha1-x86_64.pl
x64-win32-masm/sha/sha512-x86_64.asm: ../openssl/crypto/sha/asm/sha512-x86_64.pl
x64-win32-masm/whrlpool/wp-x86_64.asm: ../openssl/crypto/whrlpool/asm/wp-x86_64.pl
x64-win32-masm/x86_64cpuid.asm: ../openssl/crypto/x86_64cpuid.pl
x86-elf-gas/aes/aes-586.s: ../openssl/crypto/aes/asm/aes-586.pl
-x86-elf-gas/aes/aesni-x86.s: ../openssl/crypto/aes/asm/aesni-x86.pl
x86-elf-gas/bf/bf-686.s: ../openssl/crypto/bf/asm/bf-686.pl
x86-elf-gas/bn/x86-mont.s: ../openssl/crypto/bn/asm/x86-mont.pl
x86-elf-gas/bn/x86.s: ../openssl/crypto/bn/asm/x86.pl
@@ -175,7 +147,6 @@ x86-elf-gas/sha/sha512-586.s: ../openssl/crypto/sha/asm/sha512-586.pl
x86-elf-gas/whrlpool/wp-mmx.s: ../openssl/crypto/whrlpool/asm/wp-mmx.pl
x86-elf-gas/x86cpuid.s: ../openssl/crypto/x86cpuid.pl
x86-macosx-gas/aes/aes-586.s: ../openssl/crypto/aes/asm/aes-586.pl
-x86-macosx-gas/aes/aesni-x86.s: ../openssl/crypto/aes/asm/aesni-x86.pl
x86-macosx-gas/bf/bf-686.s: ../openssl/crypto/bf/asm/bf-686.pl
x86-macosx-gas/bn/x86-mont.s: ../openssl/crypto/bn/asm/x86-mont.pl
x86-macosx-gas/bn/x86.s: ../openssl/crypto/bn/asm/x86.pl
@@ -193,7 +164,6 @@ x86-macosx-gas/sha/sha512-586.s: ../openssl/crypto/sha/asm/sha512-586.pl
x86-macosx-gas/whrlpool/wp-mmx.s: ../openssl/crypto/whrlpool/asm/wp-mmx.pl
x86-macosx-gas/x86cpuid.s: ../openssl/crypto/x86cpuid.pl
x86-win32-masm/aes/aes-586.asm: ../openssl/crypto/aes/asm/aes-586.pl
-x86-win32-masm/aes/aesni-x86.asm: ../openssl/crypto/aes/asm/aesni-x86.pl
x86-win32-masm/bf/bf-686.asm: ../openssl/crypto/bf/asm/bf-686.pl
x86-win32-masm/bn/x86.asm: ../openssl/crypto/bn/asm/x86.pl
x86-win32-masm/bn/x86-mont.asm: ../openssl/crypto/bn/asm/x86-mont.pl
diff --git a/deps/openssl/asm/x64-elf-gas/aes/aes-x86_64.s b/deps/openssl/asm/x64-elf-gas/aes/aes-x86_64.s
index e7c261fe43..d7feffbfa5 100644
--- a/deps/openssl/asm/x64-elf-gas/aes/aes-x86_64.s
+++ b/deps/openssl/asm/x64-elf-gas/aes/aes-x86_64.s
@@ -333,9 +333,6 @@ _x86_64_AES_encrypt_compact:
.globl AES_encrypt
.type AES_encrypt,@function
.align 16
-.globl asm_AES_encrypt
-.hidden asm_AES_encrypt
-asm_AES_encrypt:
AES_encrypt:
pushq %rbx
pushq %rbp
@@ -783,9 +780,6 @@ _x86_64_AES_decrypt_compact:
.globl AES_decrypt
.type AES_decrypt,@function
.align 16
-.globl asm_AES_decrypt
-.hidden asm_AES_decrypt
-asm_AES_decrypt:
AES_decrypt:
pushq %rbx
pushq %rbp
@@ -849,10 +843,10 @@ AES_decrypt:
.Ldec_epilogue:
.byte 0xf3,0xc3
.size AES_decrypt,.-AES_decrypt
-.globl private_AES_set_encrypt_key
-.type private_AES_set_encrypt_key,@function
+.globl AES_set_encrypt_key
+.type AES_set_encrypt_key,@function
.align 16
-private_AES_set_encrypt_key:
+AES_set_encrypt_key:
pushq %rbx
pushq %rbp
pushq %r12
@@ -873,7 +867,7 @@ private_AES_set_encrypt_key:
addq $56,%rsp
.Lenc_key_epilogue:
.byte 0xf3,0xc3
-.size private_AES_set_encrypt_key,.-private_AES_set_encrypt_key
+.size AES_set_encrypt_key,.-AES_set_encrypt_key
.type _x86_64_AES_set_encrypt_key,@function
.align 16
@@ -1115,10 +1109,10 @@ _x86_64_AES_set_encrypt_key:
.byte 0xf3,0xc3
.size _x86_64_AES_set_encrypt_key,.-_x86_64_AES_set_encrypt_key
-.globl private_AES_set_decrypt_key
-.type private_AES_set_decrypt_key,@function
+.globl AES_set_decrypt_key
+.type AES_set_decrypt_key,@function
.align 16
-private_AES_set_decrypt_key:
+AES_set_decrypt_key:
pushq %rbx
pushq %rbp
pushq %r12
@@ -1301,14 +1295,11 @@ private_AES_set_decrypt_key:
addq $56,%rsp
.Ldec_key_epilogue:
.byte 0xf3,0xc3
-.size private_AES_set_decrypt_key,.-private_AES_set_decrypt_key
+.size AES_set_decrypt_key,.-AES_set_decrypt_key
.globl AES_cbc_encrypt
.type AES_cbc_encrypt,@function
.align 16
-.globl asm_AES_cbc_encrypt
-.hidden asm_AES_cbc_encrypt
-asm_AES_cbc_encrypt:
AES_cbc_encrypt:
cmpq $0,%rdx
je .Lcbc_epilogue
diff --git a/deps/openssl/asm/x64-elf-gas/bn/x86_64-mont.s b/deps/openssl/asm/x64-elf-gas/bn/x86_64-mont.s
index ea12bd408c..2dbcffc59d 100644
--- a/deps/openssl/asm/x64-elf-gas/bn/x86_64-mont.s
+++ b/deps/openssl/asm/x64-elf-gas/bn/x86_64-mont.s
@@ -5,16 +5,6 @@
.type bn_mul_mont,@function
.align 16
bn_mul_mont:
- testl $3,%r9d
- jnz .Lmul_enter
- cmpl $8,%r9d
- jb .Lmul_enter
- cmpq %rsi,%rdx
- jne .Lmul4x_enter
- jmp .Lsqr4x_enter
-
-.align 16
-.Lmul_enter:
pushq %rbx
pushq %rbp
pushq %r12
@@ -30,63 +20,48 @@ bn_mul_mont:
andq $-1024,%rsp
movq %r11,8(%rsp,%r9,8)
-.Lmul_body:
+.Lprologue:
movq %rdx,%r12
+
movq (%r8),%r8
- movq (%r12),%rbx
- movq (%rsi),%rax
xorq %r14,%r14
xorq %r15,%r15
- movq %r8,%rbp
+ movq (%r12),%rbx
+ movq (%rsi),%rax
mulq %rbx
movq %rax,%r10
- movq (%rcx),%rax
-
- imulq %r10,%rbp
movq %rdx,%r11
- mulq %rbp
- addq %rax,%r10
- movq 8(%rsi),%rax
+ imulq %r8,%rax
+ movq %rax,%rbp
+
+ mulq (%rcx)
+ addq %r10,%rax
adcq $0,%rdx
movq %rdx,%r13
leaq 1(%r15),%r15
- jmp .L1st_enter
-
-.align 16
.L1st:
- addq %rax,%r13
movq (%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r11,%r13
- movq %r10,%r11
- adcq $0,%rdx
- movq %r13,-16(%rsp,%r15,8)
- movq %rdx,%r13
-
-.L1st_enter:
mulq %rbx
- addq %rax,%r11
- movq (%rcx,%r15,8),%rax
+ addq %r11,%rax
adcq $0,%rdx
- leaq 1(%r15),%r15
- movq %rdx,%r10
+ movq %rax,%r10
+ movq (%rcx,%r15,8),%rax
+ movq %rdx,%r11
mulq %rbp
- cmpq %r9,%r15
- jne .L1st
-
- addq %rax,%r13
- movq (%rsi),%rax
+ addq %r13,%rax
+ leaq 1(%r15),%r15
adcq $0,%rdx
- addq %r11,%r13
+ addq %r10,%rax
adcq $0,%rdx
- movq %r13,-16(%rsp,%r15,8)
+ movq %rax,-16(%rsp,%r15,8)
+ cmpq %r9,%r15
movq %rdx,%r13
- movq %r10,%r11
+ jl .L1st
xorq %rdx,%rdx
addq %r11,%r13
@@ -95,64 +70,50 @@ bn_mul_mont:
movq %rdx,(%rsp,%r9,8)
leaq 1(%r14),%r14
- jmp .Louter
-.align 16
+.align 4
.Louter:
- movq (%r12,%r14,8),%rbx
xorq %r15,%r15
- movq %r8,%rbp
- movq (%rsp),%r10
+
+ movq (%r12,%r14,8),%rbx
+ movq (%rsi),%rax
mulq %rbx
- addq %rax,%r10
- movq (%rcx),%rax
+ addq (%rsp),%rax
adcq $0,%rdx
-
- imulq %r10,%rbp
+ movq %rax,%r10
movq %rdx,%r11
- mulq %rbp
- addq %rax,%r10
- movq 8(%rsi),%rax
- adcq $0,%rdx
+ imulq %r8,%rax
+ movq %rax,%rbp
+
+ mulq (%rcx,%r15,8)
+ addq %r10,%rax
movq 8(%rsp),%r10
+ adcq $0,%rdx
movq %rdx,%r13
leaq 1(%r15),%r15
- jmp .Linner_enter
-
-.align 16
+.align 4
.Linner:
- addq %rax,%r13
movq (%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r10,%r13
- movq (%rsp,%r15,8),%r10
- adcq $0,%rdx
- movq %r13,-16(%rsp,%r15,8)
- movq %rdx,%r13
-
-.Linner_enter:
mulq %rbx
- addq %rax,%r11
+ addq %r11,%rax
+ adcq $0,%rdx
+ addq %rax,%r10
movq (%rcx,%r15,8),%rax
adcq $0,%rdx
- addq %r11,%r10
movq %rdx,%r11
- adcq $0,%r11
- leaq 1(%r15),%r15
mulq %rbp
- cmpq %r9,%r15
- jne .Linner
-
- addq %rax,%r13
- movq (%rsi),%rax
+ addq %r13,%rax
+ leaq 1(%r15),%r15
adcq $0,%rdx
- addq %r10,%r13
- movq (%rsp,%r15,8),%r10
+ addq %r10,%rax
adcq $0,%rdx
- movq %r13,-16(%rsp,%r15,8)
+ movq (%rsp,%r15,8),%r10
+ cmpq %r9,%r15
+ movq %rax,-16(%rsp,%r15,8)
movq %rdx,%r13
+ jl .Linner
xorq %rdx,%rdx
addq %r11,%r13
@@ -166,434 +127,35 @@ bn_mul_mont:
cmpq %r9,%r14
jl .Louter
- xorq %r14,%r14
- movq (%rsp),%rax
leaq (%rsp),%rsi
- movq %r9,%r15
+ leaq -1(%r9),%r15
+
+ movq (%rsi),%rax
+ xorq %r14,%r14
jmp .Lsub
.align 16
.Lsub: sbbq (%rcx,%r14,8),%rax
movq %rax,(%rdi,%r14,8)
+ decq %r15
movq 8(%rsi,%r14,8),%rax
leaq 1(%r14),%r14
- decq %r15
- jnz .Lsub
+ jge .Lsub
sbbq $0,%rax
- xorq %r14,%r14
andq %rax,%rsi
notq %rax
movq %rdi,%rcx
andq %rax,%rcx
- movq %r9,%r15
+ leaq -1(%r9),%r15
orq %rcx,%rsi
.align 16
.Lcopy:
- movq (%rsi,%r14,8),%rax
- movq %r14,(%rsp,%r14,8)
- movq %rax,(%rdi,%r14,8)
- leaq 1(%r14),%r14
- subq $1,%r15
- jnz .Lcopy
-
- movq 8(%rsp,%r9,8),%rsi
- movq $1,%rax
- movq (%rsi),%r15
- movq 8(%rsi),%r14
- movq 16(%rsi),%r13
- movq 24(%rsi),%r12
- movq 32(%rsi),%rbp
- movq 40(%rsi),%rbx
- leaq 48(%rsi),%rsp
-.Lmul_epilogue:
- .byte 0xf3,0xc3
-.size bn_mul_mont,.-bn_mul_mont
-.type bn_mul4x_mont,@function
-.align 16
-bn_mul4x_mont:
-.Lmul4x_enter:
- pushq %rbx
- pushq %rbp
- pushq %r12
- pushq %r13
- pushq %r14
- pushq %r15
-
- movl %r9d,%r9d
- leaq 4(%r9),%r10
- movq %rsp,%r11
- negq %r10
- leaq (%rsp,%r10,8),%rsp
- andq $-1024,%rsp
-
- movq %r11,8(%rsp,%r9,8)
-.Lmul4x_body:
- movq %rdi,16(%rsp,%r9,8)
- movq %rdx,%r12
- movq (%r8),%r8
- movq (%r12),%rbx
- movq (%rsi),%rax
-
- xorq %r14,%r14
- xorq %r15,%r15
-
- movq %r8,%rbp
- mulq %rbx
- movq %rax,%r10
- movq (%rcx),%rax
-
- imulq %r10,%rbp
- movq %rdx,%r11
-
- mulq %rbp
- addq %rax,%r10
- movq 8(%rsi),%rax
- adcq $0,%rdx
- movq %rdx,%rdi
-
- mulq %rbx
- addq %rax,%r11
- movq 8(%rcx),%rax
- adcq $0,%rdx
- movq %rdx,%r10
-
- mulq %rbp
- addq %rax,%rdi
- movq 16(%rsi),%rax
- adcq $0,%rdx
- addq %r11,%rdi
- leaq 4(%r15),%r15
- adcq $0,%rdx
- movq %rdi,(%rsp)
- movq %rdx,%r13
- jmp .L1st4x
-.align 16
-.L1st4x:
- mulq %rbx
- addq %rax,%r10
- movq -16(%rcx,%r15,8),%rax
- adcq $0,%rdx
- movq %rdx,%r11
-
- mulq %rbp
- addq %rax,%r13
- movq -8(%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r10,%r13
- adcq $0,%rdx
- movq %r13,-24(%rsp,%r15,8)
- movq %rdx,%rdi
-
- mulq %rbx
- addq %rax,%r11
- movq -8(%rcx,%r15,8),%rax
- adcq $0,%rdx
- movq %rdx,%r10
-
- mulq %rbp
- addq %rax,%rdi
- movq (%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r11,%rdi
- adcq $0,%rdx
- movq %rdi,-16(%rsp,%r15,8)
- movq %rdx,%r13
-
- mulq %rbx
- addq %rax,%r10
- movq (%rcx,%r15,8),%rax
- adcq $0,%rdx
- movq %rdx,%r11
-
- mulq %rbp
- addq %rax,%r13
- movq 8(%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r10,%r13
- adcq $0,%rdx
- movq %r13,-8(%rsp,%r15,8)
- movq %rdx,%rdi
-
- mulq %rbx
- addq %rax,%r11
- movq 8(%rcx,%r15,8),%rax
- adcq $0,%rdx
- leaq 4(%r15),%r15
- movq %rdx,%r10
-
- mulq %rbp
- addq %rax,%rdi
- movq -16(%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r11,%rdi
- adcq $0,%rdx
- movq %rdi,-32(%rsp,%r15,8)
- movq %rdx,%r13
- cmpq %r9,%r15
- jl .L1st4x
-
- mulq %rbx
- addq %rax,%r10
- movq -16(%rcx,%r15,8),%rax
- adcq $0,%rdx
- movq %rdx,%r11
-
- mulq %rbp
- addq %rax,%r13
- movq -8(%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r10,%r13
- adcq $0,%rdx
- movq %r13,-24(%rsp,%r15,8)
- movq %rdx,%rdi
-
- mulq %rbx
- addq %rax,%r11
- movq -8(%rcx,%r15,8),%rax
- adcq $0,%rdx
- movq %rdx,%r10
-
- mulq %rbp
- addq %rax,%rdi
- movq (%rsi),%rax
- adcq $0,%rdx
- addq %r11,%rdi
- adcq $0,%rdx
- movq %rdi,-16(%rsp,%r15,8)
- movq %rdx,%r13
-
- xorq %rdi,%rdi
- addq %r10,%r13
- adcq $0,%rdi
- movq %r13,-8(%rsp,%r15,8)
- movq %rdi,(%rsp,%r15,8)
-
- leaq 1(%r14),%r14
-.align 4
-.Louter4x:
- movq (%r12,%r14,8),%rbx
- xorq %r15,%r15
- movq (%rsp),%r10
- movq %r8,%rbp
- mulq %rbx
- addq %rax,%r10
- movq (%rcx),%rax
- adcq $0,%rdx
-
- imulq %r10,%rbp
- movq %rdx,%r11
-
- mulq %rbp
- addq %rax,%r10
- movq 8(%rsi),%rax
- adcq $0,%rdx
- movq %rdx,%rdi
-
- mulq %rbx
- addq %rax,%r11
- movq 8(%rcx),%rax
- adcq $0,%rdx
- addq 8(%rsp),%r11
- adcq $0,%rdx
- movq %rdx,%r10
-
- mulq %rbp
- addq %rax,%rdi
- movq 16(%rsi),%rax
- adcq $0,%rdx
- addq %r11,%rdi
- leaq 4(%r15),%r15
- adcq $0,%rdx
- movq %rdi,(%rsp)
- movq %rdx,%r13
- jmp .Linner4x
-.align 16
-.Linner4x:
- mulq %rbx
- addq %rax,%r10
- movq -16(%rcx,%r15,8),%rax
- adcq $0,%rdx
- addq -16(%rsp,%r15,8),%r10
- adcq $0,%rdx
- movq %rdx,%r11
-
- mulq %rbp
- addq %rax,%r13
- movq -8(%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r10,%r13
- adcq $0,%rdx
- movq %r13,-24(%rsp,%r15,8)
- movq %rdx,%rdi
-
- mulq %rbx
- addq %rax,%r11
- movq -8(%rcx,%r15,8),%rax
- adcq $0,%rdx
- addq -8(%rsp,%r15,8),%r11
- adcq $0,%rdx
- movq %rdx,%r10
-
- mulq %rbp
- addq %rax,%rdi
movq (%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r11,%rdi
- adcq $0,%rdx
- movq %rdi,-16(%rsp,%r15,8)
- movq %rdx,%r13
-
- mulq %rbx
- addq %rax,%r10
- movq (%rcx,%r15,8),%rax
- adcq $0,%rdx
- addq (%rsp,%r15,8),%r10
- adcq $0,%rdx
- movq %rdx,%r11
-
- mulq %rbp
- addq %rax,%r13
- movq 8(%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r10,%r13
- adcq $0,%rdx
- movq %r13,-8(%rsp,%r15,8)
- movq %rdx,%rdi
-
- mulq %rbx
- addq %rax,%r11
- movq 8(%rcx,%r15,8),%rax
- adcq $0,%rdx
- addq 8(%rsp,%r15,8),%r11
- adcq $0,%rdx
- leaq 4(%r15),%r15
- movq %rdx,%r10
-
- mulq %rbp
- addq %rax,%rdi
- movq -16(%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r11,%rdi
- adcq $0,%rdx
- movq %rdi,-32(%rsp,%r15,8)
- movq %rdx,%r13
- cmpq %r9,%r15
- jl .Linner4x
-
- mulq %rbx
- addq %rax,%r10
- movq -16(%rcx,%r15,8),%rax
- adcq $0,%rdx
- addq -16(%rsp,%r15,8),%r10
- adcq $0,%rdx
- movq %rdx,%r11
-
- mulq %rbp
- addq %rax,%r13
- movq -8(%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r10,%r13
- adcq $0,%rdx
- movq %r13,-24(%rsp,%r15,8)
- movq %rdx,%rdi
-
- mulq %rbx
- addq %rax,%r11
- movq -8(%rcx,%r15,8),%rax
- adcq $0,%rdx
- addq -8(%rsp,%r15,8),%r11
- adcq $0,%rdx
- leaq 1(%r14),%r14
- movq %rdx,%r10
-
- mulq %rbp
- addq %rax,%rdi
- movq (%rsi),%rax
- adcq $0,%rdx
- addq %r11,%rdi
- adcq $0,%rdx
- movq %rdi,-16(%rsp,%r15,8)
- movq %rdx,%r13
-
- xorq %rdi,%rdi
- addq %r10,%r13
- adcq $0,%rdi
- addq (%rsp,%r9,8),%r13
- adcq $0,%rdi
- movq %r13,-8(%rsp,%r15,8)
- movq %rdi,(%rsp,%r15,8)
-
- cmpq %r9,%r14
- jl .Louter4x
- movq 16(%rsp,%r9,8),%rdi
- movq 0(%rsp),%rax
- pxor %xmm0,%xmm0
- movq 8(%rsp),%rdx
- shrq $2,%r9
- leaq (%rsp),%rsi
- xorq %r14,%r14
-
- subq 0(%rcx),%rax
- movq 16(%rsi),%rbx
- movq 24(%rsi),%rbp
- sbbq 8(%rcx),%rdx
- leaq -1(%r9),%r15
- jmp .Lsub4x
-.align 16
-.Lsub4x:
- movq %rax,0(%rdi,%r14,8)
- movq %rdx,8(%rdi,%r14,8)
- sbbq 16(%rcx,%r14,8),%rbx
- movq 32(%rsi,%r14,8),%rax
- movq 40(%rsi,%r14,8),%rdx
- sbbq 24(%rcx,%r14,8),%rbp
- movq %rbx,16(%rdi,%r14,8)
- movq %rbp,24(%rdi,%r14,8)
- sbbq 32(%rcx,%r14,8),%rax
- movq 48(%rsi,%r14,8),%rbx
- movq 56(%rsi,%r14,8),%rbp
- sbbq 40(%rcx,%r14,8),%rdx
- leaq 4(%r14),%r14
- decq %r15
- jnz .Lsub4x
-
- movq %rax,0(%rdi,%r14,8)
- movq 32(%rsi,%r14,8),%rax
- sbbq 16(%rcx,%r14,8),%rbx
- movq %rdx,8(%rdi,%r14,8)
- sbbq 24(%rcx,%r14,8),%rbp
- movq %rbx,16(%rdi,%r14,8)
-
- sbbq $0,%rax
- movq %rbp,24(%rdi,%r14,8)
- xorq %r14,%r14
- andq %rax,%rsi
- notq %rax
- movq %rdi,%rcx
- andq %rax,%rcx
- leaq -1(%r9),%r15
- orq %rcx,%rsi
-
- movdqu (%rsi),%xmm1
- movdqa %xmm0,(%rsp)
- movdqu %xmm1,(%rdi)
- jmp .Lcopy4x
-.align 16
-.Lcopy4x:
- movdqu 16(%rsi,%r14,1),%xmm2
- movdqu 32(%rsi,%r14,1),%xmm1
- movdqa %xmm0,16(%rsp,%r14,1)
- movdqu %xmm2,16(%rdi,%r14,1)
- movdqa %xmm0,32(%rsp,%r14,1)
- movdqu %xmm1,32(%rdi,%r14,1)
- leaq 32(%r14),%r14
+ movq %rax,(%rdi,%r15,8)
+ movq %r14,(%rsp,%r15,8)
decq %r15
- jnz .Lcopy4x
+ jge .Lcopy
- shlq $2,%r9
- movdqu 16(%rsi,%r14,1),%xmm2
- movdqa %xmm0,16(%rsp,%r14,1)
- movdqu %xmm2,16(%rdi,%r14,1)
movq 8(%rsp,%r9,8),%rsi
movq $1,%rax
movq (%rsi),%r15
@@ -603,773 +165,8 @@ bn_mul4x_mont:
movq 32(%rsi),%rbp
movq 40(%rsi),%rbx
leaq 48(%rsi),%rsp
-.Lmul4x_epilogue:
- .byte 0xf3,0xc3
-.size bn_mul4x_mont,.-bn_mul4x_mont
-.type bn_sqr4x_mont,@function
-.align 16
-bn_sqr4x_mont:
-.Lsqr4x_enter:
- pushq %rbx
- pushq %rbp
- pushq %r12
- pushq %r13
- pushq %r14
- pushq %r15
-
- shll $3,%r9d
- xorq %r10,%r10
- movq %rsp,%r11
- subq %r9,%r10
- movq (%r8),%r8
- leaq -72(%rsp,%r10,2),%rsp
- andq $-1024,%rsp
-
-
-
-
-
-
-
-
-
-
-
- movq %rdi,32(%rsp)
- movq %rcx,40(%rsp)
- movq %r8,48(%rsp)
- movq %r11,56(%rsp)
-.Lsqr4x_body:
-
-
-
-
-
-
-
- leaq 32(%r10),%rbp
- leaq (%rsi,%r9,1),%rsi
-
- movq %r9,%rcx
-
-
- movq -32(%rsi,%rbp,1),%r14
- leaq 64(%rsp,%r9,2),%rdi
- movq -24(%rsi,%rbp,1),%rax
- leaq -32(%rdi,%rbp,1),%rdi
- movq -16(%rsi,%rbp,1),%rbx
- movq %rax,%r15
-
- mulq %r14
- movq %rax,%r10
- movq %rbx,%rax
- movq %rdx,%r11
- movq %r10,-24(%rdi,%rbp,1)
-
- xorq %r10,%r10
- mulq %r14
- addq %rax,%r11
- movq %rbx,%rax
- adcq %rdx,%r10
- movq %r11,-16(%rdi,%rbp,1)
-
- leaq -16(%rbp),%rcx
-
-
- movq 8(%rsi,%rcx,1),%rbx
- mulq %r15
- movq %rax,%r12
- movq %rbx,%rax
- movq %rdx,%r13
-
- xorq %r11,%r11
- addq %r12,%r10
- leaq 16(%rcx),%rcx
- adcq $0,%r11
- mulq %r14
- addq %rax,%r10
- movq %rbx,%rax
- adcq %rdx,%r11
- movq %r10,-8(%rdi,%rcx,1)
- jmp .Lsqr4x_1st
-
-.align 16
-.Lsqr4x_1st:
- movq (%rsi,%rcx,1),%rbx
- xorq %r12,%r12
- mulq %r15
- addq %rax,%r13
- movq %rbx,%rax
- adcq %rdx,%r12
-
- xorq %r10,%r10
- addq %r13,%r11
- adcq $0,%r10
- mulq %r14
- addq %rax,%r11
- movq %rbx,%rax
- adcq %rdx,%r10
- movq %r11,(%rdi,%rcx,1)
-
-
- movq 8(%rsi,%rcx,1),%rbx
- xorq %r13,%r13
- mulq %r15
- addq %rax,%r12
- movq %rbx,%rax
- adcq %rdx,%r13
-
- xorq %r11,%r11
- addq %r12,%r10
- adcq $0,%r11
- mulq %r14
- addq %rax,%r10
- movq %rbx,%rax
- adcq %rdx,%r11
- movq %r10,8(%rdi,%rcx,1)
-
- movq 16(%rsi,%rcx,1),%rbx
- xorq %r12,%r12
- mulq %r15
- addq %rax,%r13
- movq %rbx,%rax
- adcq %rdx,%r12
-
- xorq %r10,%r10
- addq %r13,%r11
- adcq $0,%r10
- mulq %r14
- addq %rax,%r11
- movq %rbx,%rax
- adcq %rdx,%r10
- movq %r11,16(%rdi,%rcx,1)
-
-
- movq 24(%rsi,%rcx,1),%rbx
- xorq %r13,%r13
- mulq %r15
- addq %rax,%r12
- movq %rbx,%rax
- adcq %rdx,%r13
-
- xorq %r11,%r11
- addq %r12,%r10
- leaq 32(%rcx),%rcx
- adcq $0,%r11
- mulq %r14
- addq %rax,%r10
- movq %rbx,%rax
- adcq %rdx,%r11
- movq %r10,-8(%rdi,%rcx,1)
-
- cmpq $0,%rcx
- jne .Lsqr4x_1st
-
- xorq %r12,%r12
- addq %r11,%r13
- adcq $0,%r12
- mulq %r15
- addq %rax,%r13
- adcq %rdx,%r12
-
- movq %r13,(%rdi)
- leaq 16(%rbp),%rbp
- movq %r12,8(%rdi)
- jmp .Lsqr4x_outer
-
-.align 16
-.Lsqr4x_outer:
- movq -32(%rsi,%rbp,1),%r14
- leaq 64(%rsp,%r9,2),%rdi
- movq -24(%rsi,%rbp,1),%rax
- leaq -32(%rdi,%rbp,1),%rdi
- movq -16(%rsi,%rbp,1),%rbx
- movq %rax,%r15
-
- movq -24(%rdi,%rbp,1),%r10
- xorq %r11,%r11
- mulq %r14
- addq %rax,%r10
- movq %rbx,%rax
- adcq %rdx,%r11
- movq %r10,-24(%rdi,%rbp,1)
-
- xorq %r10,%r10
- addq -16(%rdi,%rbp,1),%r11
- adcq $0,%r10
- mulq %r14
- addq %rax,%r11
- movq %rbx,%rax
- adcq %rdx,%r10
- movq %r11,-16(%rdi,%rbp,1)
-
- leaq -16(%rbp),%rcx
- xorq %r12,%r12
-
-
- movq 8(%rsi,%rcx,1),%rbx
- xorq %r13,%r13
- addq 8(%rdi,%rcx,1),%r12
- adcq $0,%r13
- mulq %r15
- addq %rax,%r12
- movq %rbx,%rax
- adcq %rdx,%r13
-
- xorq %r11,%r11
- addq %r12,%r10
- adcq $0,%r11
- mulq %r14
- addq %rax,%r10
- movq %rbx,%rax
- adcq %rdx,%r11
- movq %r10,8(%rdi,%rcx,1)
-
- leaq 16(%rcx),%rcx
- jmp .Lsqr4x_inner
-
-.align 16
-.Lsqr4x_inner:
- movq (%rsi,%rcx,1),%rbx
- xorq %r12,%r12
- addq (%rdi,%rcx,1),%r13
- adcq $0,%r12
- mulq %r15
- addq %rax,%r13
- movq %rbx,%rax
- adcq %rdx,%r12
-
- xorq %r10,%r10
- addq %r13,%r11
- adcq $0,%r10
- mulq %r14
- addq %rax,%r11
- movq %rbx,%rax
- adcq %rdx,%r10
- movq %r11,(%rdi,%rcx,1)
-
- movq 8(%rsi,%rcx,1),%rbx
- xorq %r13,%r13
- addq 8(%rdi,%rcx,1),%r12
- adcq $0,%r13
- mulq %r15
- addq %rax,%r12
- movq %rbx,%rax
- adcq %rdx,%r13
-
- xorq %r11,%r11
- addq %r12,%r10
- leaq 16(%rcx),%rcx
- adcq $0,%r11
- mulq %r14
- addq %rax,%r10
- movq %rbx,%rax
- adcq %rdx,%r11
- movq %r10,-8(%rdi,%rcx,1)
-
- cmpq $0,%rcx
- jne .Lsqr4x_inner
-
- xorq %r12,%r12
- addq %r11,%r13
- adcq $0,%r12
- mulq %r15
- addq %rax,%r13
- adcq %rdx,%r12
-
- movq %r13,(%rdi)
- movq %r12,8(%rdi)
-
- addq $16,%rbp
- jnz .Lsqr4x_outer
-
-
- movq -32(%rsi),%r14
- leaq 64(%rsp,%r9,2),%rdi
- movq -24(%rsi),%rax
- leaq -32(%rdi,%rbp,1),%rdi
- movq -16(%rsi),%rbx
- movq %rax,%r15
-
- xorq %r11,%r11
- mulq %r14
- addq %rax,%r10
- movq %rbx,%rax
- adcq %rdx,%r11
- movq %r10,-24(%rdi)
-
- xorq %r10,%r10
- addq %r13,%r11
- adcq $0,%r10
- mulq %r14
- addq %rax,%r11
- movq %rbx,%rax
- adcq %rdx,%r10
- movq %r11,-16(%rdi)
-
- movq -8(%rsi),%rbx
- mulq %r15
- addq %rax,%r12
- movq %rbx,%rax
- adcq $0,%rdx
-
- xorq %r11,%r11
- addq %r12,%r10
- movq %rdx,%r13
- adcq $0,%r11
- mulq %r14
- addq %rax,%r10
- movq %rbx,%rax
- adcq %rdx,%r11
- movq %r10,-8(%rdi)
-
- xorq %r12,%r12
- addq %r11,%r13
- adcq $0,%r12
- mulq %r15
- addq %rax,%r13
- movq -16(%rsi),%rax
- adcq %rdx,%r12
-
- movq %r13,(%rdi)
- movq %r12,8(%rdi)
-
- mulq %rbx
- addq $16,%rbp
- xorq %r14,%r14
- subq %r9,%rbp
- xorq %r15,%r15
-
- addq %r12,%rax
- adcq $0,%rdx
- movq %rax,8(%rdi)
- movq %rdx,16(%rdi)
- movq %r15,24(%rdi)
-
- movq -16(%rsi,%rbp,1),%rax
- leaq 64(%rsp,%r9,2),%rdi
- xorq %r10,%r10
- movq -24(%rdi,%rbp,2),%r11
-
- leaq (%r14,%r10,2),%r12
- shrq $63,%r10
- leaq (%rcx,%r11,2),%r13
- shrq $63,%r11
- orq %r10,%r13
- movq -16(%rdi,%rbp,2),%r10
- movq %r11,%r14
- mulq %rax
- negq %r15
- movq -8(%rdi,%rbp,2),%r11
- adcq %rax,%r12
- movq -8(%rsi,%rbp,1),%rax
- movq %r12,-32(%rdi,%rbp,2)
- adcq %rdx,%r13
-
- leaq (%r14,%r10,2),%rbx
- movq %r13,-24(%rdi,%rbp,2)
- sbbq %r15,%r15
- shrq $63,%r10
- leaq (%rcx,%r11,2),%r8
- shrq $63,%r11
- orq %r10,%r8
- movq 0(%rdi,%rbp,2),%r10
- movq %r11,%r14
- mulq %rax
- negq %r15
- movq 8(%rdi,%rbp,2),%r11
- adcq %rax,%rbx
- movq 0(%rsi,%rbp,1),%rax
- movq %rbx,-16(%rdi,%rbp,2)
- adcq %rdx,%r8
- leaq 16(%rbp),%rbp
- movq %r8,-40(%rdi,%rbp,2)
- sbbq %r15,%r15
- jmp .Lsqr4x_shift_n_add
-
-.align 16
-.Lsqr4x_shift_n_add:
- leaq (%r14,%r10,2),%r12
- shrq $63,%r10
- leaq (%rcx,%r11,2),%r13
- shrq $63,%r11
- orq %r10,%r13
- movq -16(%rdi,%rbp,2),%r10
- movq %r11,%r14
- mulq %rax
- negq %r15
- movq -8(%rdi,%rbp,2),%r11
- adcq %rax,%r12
- movq -8(%rsi,%rbp,1),%rax
- movq %r12,-32(%rdi,%rbp,2)
- adcq %rdx,%r13
-
- leaq (%r14,%r10,2),%rbx
- movq %r13,-24(%rdi,%rbp,2)
- sbbq %r15,%r15
- shrq $63,%r10
- leaq (%rcx,%r11,2),%r8
- shrq $63,%r11
- orq %r10,%r8
- movq 0(%rdi,%rbp,2),%r10
- movq %r11,%r14
- mulq %rax
- negq %r15
- movq 8(%rdi,%rbp,2),%r11
- adcq %rax,%rbx
- movq 0(%rsi,%rbp,1),%rax
- movq %rbx,-16(%rdi,%rbp,2)
- adcq %rdx,%r8
-
- leaq (%r14,%r10,2),%r12
- movq %r8,-8(%rdi,%rbp,2)
- sbbq %r15,%r15
- shrq $63,%r10
- leaq (%rcx,%r11,2),%r13
- shrq $63,%r11
- orq %r10,%r13
- movq 16(%rdi,%rbp,2),%r10
- movq %r11,%r14
- mulq %rax
- negq %r15
- movq 24(%rdi,%rbp,2),%r11
- adcq %rax,%r12
- movq 8(%rsi,%rbp,1),%rax
- movq %r12,0(%rdi,%rbp,2)
- adcq %rdx,%r13
-
- leaq (%r14,%r10,2),%rbx
- movq %r13,8(%rdi,%rbp,2)
- sbbq %r15,%r15
- shrq $63,%r10
- leaq (%rcx,%r11,2),%r8
- shrq $63,%r11
- orq %r10,%r8
- movq 32(%rdi,%rbp,2),%r10
- movq %r11,%r14
- mulq %rax
- negq %r15
- movq 40(%rdi,%rbp,2),%r11
- adcq %rax,%rbx
- movq 16(%rsi,%rbp,1),%rax
- movq %rbx,16(%rdi,%rbp,2)
- adcq %rdx,%r8
- movq %r8,24(%rdi,%rbp,2)
- sbbq %r15,%r15
- addq $32,%rbp
- jnz .Lsqr4x_shift_n_add
-
- leaq (%r14,%r10,2),%r12
- shrq $63,%r10
- leaq (%rcx,%r11,2),%r13
- shrq $63,%r11
- orq %r10,%r13
- movq -16(%rdi),%r10
- movq %r11,%r14
- mulq %rax
- negq %r15
- movq -8(%rdi),%r11
- adcq %rax,%r12
- movq -8(%rsi),%rax
- movq %r12,-32(%rdi)
- adcq %rdx,%r13
-
- leaq (%r14,%r10,2),%rbx
- movq %r13,-24(%rdi)
- sbbq %r15,%r15
- shrq $63,%r10
- leaq (%rcx,%r11,2),%r8
- shrq $63,%r11
- orq %r10,%r8
- mulq %rax
- negq %r15
- adcq %rax,%rbx
- adcq %rdx,%r8
- movq %rbx,-16(%rdi)
- movq %r8,-8(%rdi)
- movq 40(%rsp),%rsi
- movq 48(%rsp),%r8
- xorq %rcx,%rcx
- movq %r9,0(%rsp)
- subq %r9,%rcx
- movq 64(%rsp),%r10
- movq %r8,%r14
- leaq 64(%rsp,%r9,2),%rax
- leaq 64(%rsp,%r9,1),%rdi
- movq %rax,8(%rsp)
- leaq (%rsi,%r9,1),%rsi
- xorq %rbp,%rbp
-
- movq 0(%rsi,%rcx,1),%rax
- movq 8(%rsi,%rcx,1),%r9
- imulq %r10,%r14
- movq %rax,%rbx
- jmp .Lsqr4x_mont_outer
-
-.align 16
-.Lsqr4x_mont_outer:
- xorq %r11,%r11
- mulq %r14
- addq %rax,%r10
- movq %r9,%rax
- adcq %rdx,%r11
- movq %r8,%r15
-
- xorq %r10,%r10
- addq 8(%rdi,%rcx,1),%r11
- adcq $0,%r10
- mulq %r14
- addq %rax,%r11
- movq %rbx,%rax
- adcq %rdx,%r10
-
- imulq %r11,%r15
-
- movq 16(%rsi,%rcx,1),%rbx
- xorq %r13,%r13
- addq %r11,%r12
- adcq $0,%r13
- mulq %r15
- addq %rax,%r12
- movq %rbx,%rax
- adcq %rdx,%r13
- movq %r12,8(%rdi,%rcx,1)
-
- xorq %r11,%r11
- addq 16(%rdi,%rcx,1),%r10
- adcq $0,%r11
- mulq %r14
- addq %rax,%r10
- movq %r9,%rax
- adcq %rdx,%r11
-
- movq 24(%rsi,%rcx,1),%r9
- xorq %r12,%r12
- addq %r10,%r13
- adcq $0,%r12
- mulq %r15
- addq %rax,%r13
- movq %r9,%rax
- adcq %rdx,%r12
- movq %r13,16(%rdi,%rcx,1)
-
- xorq %r10,%r10
- addq 24(%rdi,%rcx,1),%r11
- leaq 32(%rcx),%rcx
- adcq $0,%r10
- mulq %r14
- addq %rax,%r11
- movq %rbx,%rax
- adcq %rdx,%r10
- jmp .Lsqr4x_mont_inner
-
-.align 16
-.Lsqr4x_mont_inner:
- movq (%rsi,%rcx,1),%rbx
- xorq %r13,%r13
- addq %r11,%r12
- adcq $0,%r13
- mulq %r15
- addq %rax,%r12
- movq %rbx,%rax
- adcq %rdx,%r13
- movq %r12,-8(%rdi,%rcx,1)
-
- xorq %r11,%r11
- addq (%rdi,%rcx,1),%r10
- adcq $0,%r11
- mulq %r14
- addq %rax,%r10
- movq %r9,%rax
- adcq %rdx,%r11
-
- movq 8(%rsi,%rcx,1),%r9
- xorq %r12,%r12
- addq %r10,%r13
- adcq $0,%r12
- mulq %r15
- addq %rax,%r13
- movq %r9,%rax
- adcq %rdx,%r12
- movq %r13,(%rdi,%rcx,1)
-
- xorq %r10,%r10
- addq 8(%rdi,%rcx,1),%r11
- adcq $0,%r10
- mulq %r14
- addq %rax,%r11
- movq %rbx,%rax
- adcq %rdx,%r10
-
-
- movq 16(%rsi,%rcx,1),%rbx
- xorq %r13,%r13
- addq %r11,%r12
- adcq $0,%r13
- mulq %r15
- addq %rax,%r12
- movq %rbx,%rax
- adcq %rdx,%r13
- movq %r12,8(%rdi,%rcx,1)
-
- xorq %r11,%r11
- addq 16(%rdi,%rcx,1),%r10
- adcq $0,%r11
- mulq %r14
- addq %rax,%r10
- movq %r9,%rax
- adcq %rdx,%r11
-
- movq 24(%rsi,%rcx,1),%r9
- xorq %r12,%r12
- addq %r10,%r13
- adcq $0,%r12
- mulq %r15
- addq %rax,%r13
- movq %r9,%rax
- adcq %rdx,%r12
- movq %r13,16(%rdi,%rcx,1)
-
- xorq %r10,%r10
- addq 24(%rdi,%rcx,1),%r11
- leaq 32(%rcx),%rcx
- adcq $0,%r10
- mulq %r14
- addq %rax,%r11
- movq %rbx,%rax
- adcq %rdx,%r10
- cmpq $0,%rcx
- jne .Lsqr4x_mont_inner
-
- subq 0(%rsp),%rcx
- movq %r8,%r14
-
- xorq %r13,%r13
- addq %r11,%r12
- adcq $0,%r13
- mulq %r15
- addq %rax,%r12
- movq %r9,%rax
- adcq %rdx,%r13
- movq %r12,-8(%rdi)
-
- xorq %r11,%r11
- addq (%rdi),%r10
- adcq $0,%r11
- movq 0(%rsi,%rcx,1),%rbx
- addq %rbp,%r10
- adcq $0,%r11
-
- imulq 16(%rdi,%rcx,1),%r14
- xorq %r12,%r12
- movq 8(%rsi,%rcx,1),%r9
- addq %r10,%r13
- movq 16(%rdi,%rcx,1),%r10
- adcq $0,%r12
- mulq %r15
- addq %rax,%r13
- movq %rbx,%rax
- adcq %rdx,%r12
- movq %r13,(%rdi)
-
- xorq %rbp,%rbp
- addq 8(%rdi),%r12
- adcq %rbp,%rbp
- addq %r11,%r12
- leaq 16(%rdi),%rdi
- adcq $0,%rbp
- movq %r12,-8(%rdi)
- cmpq 8(%rsp),%rdi
- jb .Lsqr4x_mont_outer
-
- movq 0(%rsp),%r9
- movq %rbp,(%rdi)
- movq 64(%rsp,%r9,1),%rax
- leaq 64(%rsp,%r9,1),%rbx
- movq 40(%rsp),%rsi
- shrq $5,%r9
- movq 8(%rbx),%rdx
- xorq %rbp,%rbp
-
- movq 32(%rsp),%rdi
- subq 0(%rsi),%rax
- movq 16(%rbx),%r10
- movq 24(%rbx),%r11
- sbbq 8(%rsi),%rdx
- leaq -1(%r9),%rcx
- jmp .Lsqr4x_sub
-.align 16
-.Lsqr4x_sub:
- movq %rax,0(%rdi,%rbp,8)
- movq %rdx,8(%rdi,%rbp,8)
- sbbq 16(%rsi,%rbp,8),%r10
- movq 32(%rbx,%rbp,8),%rax
- movq 40(%rbx,%rbp,8),%rdx
- sbbq 24(%rsi,%rbp,8),%r11
- movq %r10,16(%rdi,%rbp,8)
- movq %r11,24(%rdi,%rbp,8)
- sbbq 32(%rsi,%rbp,8),%rax
- movq 48(%rbx,%rbp,8),%r10
- movq 56(%rbx,%rbp,8),%r11
- sbbq 40(%rsi,%rbp,8),%rdx
- leaq 4(%rbp),%rbp
- decq %rcx
- jnz .Lsqr4x_sub
-
- movq %rax,0(%rdi,%rbp,8)
- movq 32(%rbx,%rbp,8),%rax
- sbbq 16(%rsi,%rbp,8),%r10
- movq %rdx,8(%rdi,%rbp,8)
- sbbq 24(%rsi,%rbp,8),%r11
- movq %r10,16(%rdi,%rbp,8)
-
- sbbq $0,%rax
- movq %r11,24(%rdi,%rbp,8)
- xorq %rbp,%rbp
- andq %rax,%rbx
- notq %rax
- movq %rdi,%rsi
- andq %rax,%rsi
- leaq -1(%r9),%rcx
- orq %rsi,%rbx
-
- pxor %xmm0,%xmm0
- leaq 64(%rsp,%r9,8),%rsi
- movdqu (%rbx),%xmm1
- leaq (%rsi,%r9,8),%rsi
- movdqa %xmm0,64(%rsp)
- movdqa %xmm0,(%rsi)
- movdqu %xmm1,(%rdi)
- jmp .Lsqr4x_copy
-.align 16
-.Lsqr4x_copy:
- movdqu 16(%rbx,%rbp,1),%xmm2
- movdqu 32(%rbx,%rbp,1),%xmm1
- movdqa %xmm0,80(%rsp,%rbp,1)
- movdqa %xmm0,96(%rsp,%rbp,1)
- movdqa %xmm0,16(%rsi,%rbp,1)
- movdqa %xmm0,32(%rsi,%rbp,1)
- movdqu %xmm2,16(%rdi,%rbp,1)
- movdqu %xmm1,32(%rdi,%rbp,1)
- leaq 32(%rbp),%rbp
- decq %rcx
- jnz .Lsqr4x_copy
-
- movdqu 16(%rbx,%rbp,1),%xmm2
- movdqa %xmm0,80(%rsp,%rbp,1)
- movdqa %xmm0,16(%rsi,%rbp,1)
- movdqu %xmm2,16(%rdi,%rbp,1)
- movq 56(%rsp),%rsi
- movq $1,%rax
- movq 0(%rsi),%r15
- movq 8(%rsi),%r14
- movq 16(%rsi),%r13
- movq 24(%rsi),%r12
- movq 32(%rsi),%rbp
- movq 40(%rsi),%rbx
- leaq 48(%rsi),%rsp
-.Lsqr4x_epilogue:
+.Lepilogue:
.byte 0xf3,0xc3
-.size bn_sqr4x_mont,.-bn_sqr4x_mont
+.size bn_mul_mont,.-bn_mul_mont
.byte 77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105,112,108,105,99,97,116,105,111,110,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
.align 16
diff --git a/deps/openssl/asm/x64-elf-gas/rc4/rc4-x86_64.s b/deps/openssl/asm/x64-elf-gas/rc4/rc4-x86_64.s
index f2b8a8bc04..1bafefeb02 100644
--- a/deps/openssl/asm/x64-elf-gas/rc4/rc4-x86_64.s
+++ b/deps/openssl/asm/x64-elf-gas/rc4/rc4-x86_64.s
@@ -1,7 +1,6 @@
.text
-
.globl RC4
.type RC4,@function
.align 16
@@ -13,511 +12,316 @@ RC4: orq %rsi,%rsi
pushq %r12
pushq %r13
.Lprologue:
- movq %rsi,%r11
- movq %rdx,%r12
- movq %rcx,%r13
- xorq %r10,%r10
- xorq %rcx,%rcx
- leaq 8(%rdi),%rdi
- movb -8(%rdi),%r10b
- movb -4(%rdi),%cl
+ addq $8,%rdi
+ movl -8(%rdi),%r8d
+ movl -4(%rdi),%r12d
cmpl $-1,256(%rdi)
je .LRC4_CHAR
- movl OPENSSL_ia32cap_P(%rip),%r8d
- xorq %rbx,%rbx
- incb %r10b
- subq %r10,%rbx
- subq %r12,%r13
- movl (%rdi,%r10,4),%eax
- testq $-16,%r11
+ incb %r8b
+ movl (%rdi,%r8,4),%r9d
+ testq $-8,%rsi
jz .Lloop1
- btl $30,%r8d
- jc .Lintel
- andq $7,%rbx
- leaq 1(%r10),%rsi
- jz .Loop8
- subq %rbx,%r11
-.Loop8_warmup:
- addb %al,%cl
- movl (%rdi,%rcx,4),%edx
- movl %eax,(%rdi,%rcx,4)
- movl %edx,(%rdi,%r10,4)
- addb %dl,%al
- incb %r10b
- movl (%rdi,%rax,4),%edx
- movl (%rdi,%r10,4),%eax
- xorb (%r12),%dl
- movb %dl,(%r13,%r12,1)
- leaq 1(%r12),%r12
- decq %rbx
- jnz .Loop8_warmup
-
- leaq 1(%r10),%rsi
- jmp .Loop8
+ jmp .Lloop8
.align 16
-.Loop8:
- addb %al,%cl
- movl (%rdi,%rcx,4),%edx
- movl %eax,(%rdi,%rcx,4)
- movl 0(%rdi,%rsi,4),%ebx
- rorq $8,%r8
- movl %edx,0(%rdi,%r10,4)
- addb %al,%dl
- movb (%rdi,%rdx,4),%r8b
- addb %bl,%cl
- movl (%rdi,%rcx,4),%edx
- movl %ebx,(%rdi,%rcx,4)
- movl 4(%rdi,%rsi,4),%eax
- rorq $8,%r8
- movl %edx,4(%rdi,%r10,4)
- addb %bl,%dl
- movb (%rdi,%rdx,4),%r8b
- addb %al,%cl
- movl (%rdi,%rcx,4),%edx
- movl %eax,(%rdi,%rcx,4)
- movl 8(%rdi,%rsi,4),%ebx
- rorq $8,%r8
- movl %edx,8(%rdi,%r10,4)
- addb %al,%dl
- movb (%rdi,%rdx,4),%r8b
- addb %bl,%cl
- movl (%rdi,%rcx,4),%edx
- movl %ebx,(%rdi,%rcx,4)
- movl 12(%rdi,%rsi,4),%eax
- rorq $8,%r8
- movl %edx,12(%rdi,%r10,4)
- addb %bl,%dl
- movb (%rdi,%rdx,4),%r8b
- addb %al,%cl
- movl (%rdi,%rcx,4),%edx
- movl %eax,(%rdi,%rcx,4)
- movl 16(%rdi,%rsi,4),%ebx
- rorq $8,%r8
- movl %edx,16(%rdi,%r10,4)
- addb %al,%dl
- movb (%rdi,%rdx,4),%r8b
- addb %bl,%cl
- movl (%rdi,%rcx,4),%edx
- movl %ebx,(%rdi,%rcx,4)
- movl 20(%rdi,%rsi,4),%eax
- rorq $8,%r8
- movl %edx,20(%rdi,%r10,4)
- addb %bl,%dl
- movb (%rdi,%rdx,4),%r8b
- addb %al,%cl
- movl (%rdi,%rcx,4),%edx
- movl %eax,(%rdi,%rcx,4)
- movl 24(%rdi,%rsi,4),%ebx
- rorq $8,%r8
- movl %edx,24(%rdi,%r10,4)
- addb %al,%dl
- movb (%rdi,%rdx,4),%r8b
- addb $8,%sil
- addb %bl,%cl
- movl (%rdi,%rcx,4),%edx
- movl %ebx,(%rdi,%rcx,4)
- movl -4(%rdi,%rsi,4),%eax
- rorq $8,%r8
- movl %edx,28(%rdi,%r10,4)
- addb %bl,%dl
- movb (%rdi,%rdx,4),%r8b
- addb $8,%r10b
- rorq $8,%r8
- subq $8,%r11
-
- xorq (%r12),%r8
- movq %r8,(%r13,%r12,1)
- leaq 8(%r12),%r12
-
- testq $-8,%r11
- jnz .Loop8
- cmpq $0,%r11
- jne .Lloop1
- jmp .Lexit
-
-.align 16
-.Lintel:
- testq $-32,%r11
- jz .Lloop1
- andq $15,%rbx
- jz .Loop16_is_hot
- subq %rbx,%r11
-.Loop16_warmup:
- addb %al,%cl
- movl (%rdi,%rcx,4),%edx
- movl %eax,(%rdi,%rcx,4)
- movl %edx,(%rdi,%r10,4)
- addb %dl,%al
+.Lloop8:
+ addb %r9b,%r12b
+ movq %r8,%r10
+ movl (%rdi,%r12,4),%r13d
+ rorq $8,%rax
incb %r10b
- movl (%rdi,%rax,4),%edx
- movl (%rdi,%r10,4),%eax
- xorb (%r12),%dl
- movb %dl,(%r13,%r12,1)
- leaq 1(%r12),%r12
- decq %rbx
- jnz .Loop16_warmup
-
- movq %rcx,%rbx
- xorq %rcx,%rcx
- movb %bl,%cl
-
-.Loop16_is_hot:
- leaq (%rdi,%r10,4),%rsi
- addb %al,%cl
- movl (%rdi,%rcx,4),%edx
- pxor %xmm0,%xmm0
- movl %eax,(%rdi,%rcx,4)
- addb %dl,%al
- movl 4(%rsi),%ebx
- movzbl %al,%eax
- movl %edx,0(%rsi)
- addb %bl,%cl
- pinsrw $0,(%rdi,%rax,4),%xmm0
- jmp .Loop16_enter
-.align 16
-.Loop16:
- addb %al,%cl
- movl (%rdi,%rcx,4),%edx
- pxor %xmm0,%xmm2
- psllq $8,%xmm1
- pxor %xmm0,%xmm0
- movl %eax,(%rdi,%rcx,4)
- addb %dl,%al
- movl 4(%rsi),%ebx
- movzbl %al,%eax
- movl %edx,0(%rsi)
- pxor %xmm1,%xmm2
- addb %bl,%cl
- pinsrw $0,(%rdi,%rax,4),%xmm0
- movdqu %xmm2,(%r13,%r12,1)
- leaq 16(%r12),%r12
-.Loop16_enter:
- movl (%rdi,%rcx,4),%edx
- pxor %xmm1,%xmm1
- movl %ebx,(%rdi,%rcx,4)
- addb %dl,%bl
- movl 8(%rsi),%eax
- movzbl %bl,%ebx
- movl %edx,4(%rsi)
- addb %al,%cl
- pinsrw $0,(%rdi,%rbx,4),%xmm1
- movl (%rdi,%rcx,4),%edx
- movl %eax,(%rdi,%rcx,4)
- addb %dl,%al
- movl 12(%rsi),%ebx
- movzbl %al,%eax
- movl %edx,8(%rsi)
- addb %bl,%cl
- pinsrw $1,(%rdi,%rax,4),%xmm0
- movl (%rdi,%rcx,4),%edx
- movl %ebx,(%rdi,%rcx,4)
- addb %dl,%bl
- movl 16(%rsi),%eax
- movzbl %bl,%ebx
- movl %edx,12(%rsi)
- addb %al,%cl
- pinsrw $1,(%rdi,%rbx,4),%xmm1
- movl (%rdi,%rcx,4),%edx
- movl %eax,(%rdi,%rcx,4)
- addb %dl,%al
- movl 20(%rsi),%ebx
- movzbl %al,%eax
- movl %edx,16(%rsi)
- addb %bl,%cl
- pinsrw $2,(%rdi,%rax,4),%xmm0
- movl (%rdi,%rcx,4),%edx
- movl %ebx,(%rdi,%rcx,4)
- addb %dl,%bl
- movl 24(%rsi),%eax
- movzbl %bl,%ebx
- movl %edx,20(%rsi)
- addb %al,%cl
- pinsrw $2,(%rdi,%rbx,4),%xmm1
- movl (%rdi,%rcx,4),%edx
- movl %eax,(%rdi,%rcx,4)
- addb %dl,%al
- movl 28(%rsi),%ebx
- movzbl %al,%eax
- movl %edx,24(%rsi)
- addb %bl,%cl
- pinsrw $3,(%rdi,%rax,4),%xmm0
- movl (%rdi,%rcx,4),%edx
- movl %ebx,(%rdi,%rcx,4)
- addb %dl,%bl
- movl 32(%rsi),%eax
- movzbl %bl,%ebx
- movl %edx,28(%rsi)
- addb %al,%cl
- pinsrw $3,(%rdi,%rbx,4),%xmm1
- movl (%rdi,%rcx,4),%edx
- movl %eax,(%rdi,%rcx,4)
- addb %dl,%al
- movl 36(%rsi),%ebx
- movzbl %al,%eax
- movl %edx,32(%rsi)
- addb %bl,%cl
- pinsrw $4,(%rdi,%rax,4),%xmm0
- movl (%rdi,%rcx,4),%edx
- movl %ebx,(%rdi,%rcx,4)
- addb %dl,%bl
- movl 40(%rsi),%eax
- movzbl %bl,%ebx
- movl %edx,36(%rsi)
- addb %al,%cl
- pinsrw $4,(%rdi,%rbx,4),%xmm1
- movl (%rdi,%rcx,4),%edx
- movl %eax,(%rdi,%rcx,4)
- addb %dl,%al
- movl 44(%rsi),%ebx
- movzbl %al,%eax
- movl %edx,40(%rsi)
- addb %bl,%cl
- pinsrw $5,(%rdi,%rax,4),%xmm0
- movl (%rdi,%rcx,4),%edx
- movl %ebx,(%rdi,%rcx,4)
- addb %dl,%bl
- movl 48(%rsi),%eax
- movzbl %bl,%ebx
- movl %edx,44(%rsi)
- addb %al,%cl
- pinsrw $5,(%rdi,%rbx,4),%xmm1
- movl (%rdi,%rcx,4),%edx
- movl %eax,(%rdi,%rcx,4)
- addb %dl,%al
- movl 52(%rsi),%ebx
- movzbl %al,%eax
- movl %edx,48(%rsi)
- addb %bl,%cl
- pinsrw $6,(%rdi,%rax,4),%xmm0
- movl (%rdi,%rcx,4),%edx
- movl %ebx,(%rdi,%rcx,4)
- addb %dl,%bl
- movl 56(%rsi),%eax
- movzbl %bl,%ebx
- movl %edx,52(%rsi)
- addb %al,%cl
- pinsrw $6,(%rdi,%rbx,4),%xmm1
- movl (%rdi,%rcx,4),%edx
- movl %eax,(%rdi,%rcx,4)
- addb %dl,%al
- movl 60(%rsi),%ebx
- movzbl %al,%eax
- movl %edx,56(%rsi)
- addb %bl,%cl
- pinsrw $7,(%rdi,%rax,4),%xmm0
- addb $16,%r10b
- movdqu (%r12),%xmm2
- movl (%rdi,%rcx,4),%edx
- movl %ebx,(%rdi,%rcx,4)
- addb %dl,%bl
- movzbl %bl,%ebx
- movl %edx,60(%rsi)
- leaq (%rdi,%r10,4),%rsi
- pinsrw $7,(%rdi,%rbx,4),%xmm1
- movl (%rsi),%eax
- movq %rcx,%rbx
- xorq %rcx,%rcx
- subq $16,%r11
- movb %bl,%cl
- testq $-16,%r11
- jnz .Loop16
-
- psllq $8,%xmm1
- pxor %xmm0,%xmm2
- pxor %xmm1,%xmm2
- movdqu %xmm2,(%r13,%r12,1)
- leaq 16(%r12),%r12
-
- cmpq $0,%r11
+ movl (%rdi,%r10,4),%r11d
+ cmpq %r10,%r12
+ movl %r9d,(%rdi,%r12,4)
+ cmoveq %r9,%r11
+ movl %r13d,(%rdi,%r8,4)
+ addb %r9b,%r13b
+ movb (%rdi,%r13,4),%al
+ addb %r11b,%r12b
+ movq %r10,%r8
+ movl (%rdi,%r12,4),%r13d
+ rorq $8,%rax
+ incb %r8b
+ movl (%rdi,%r8,4),%r9d
+ cmpq %r8,%r12
+ movl %r11d,(%rdi,%r12,4)
+ cmoveq %r11,%r9
+ movl %r13d,(%rdi,%r10,4)
+ addb %r11b,%r13b
+ movb (%rdi,%r13,4),%al
+ addb %r9b,%r12b
+ movq %r8,%r10
+ movl (%rdi,%r12,4),%r13d
+ rorq $8,%rax
+ incb %r10b
+ movl (%rdi,%r10,4),%r11d
+ cmpq %r10,%r12
+ movl %r9d,(%rdi,%r12,4)
+ cmoveq %r9,%r11
+ movl %r13d,(%rdi,%r8,4)
+ addb %r9b,%r13b
+ movb (%rdi,%r13,4),%al
+ addb %r11b,%r12b
+ movq %r10,%r8
+ movl (%rdi,%r12,4),%r13d
+ rorq $8,%rax
+ incb %r8b
+ movl (%rdi,%r8,4),%r9d
+ cmpq %r8,%r12
+ movl %r11d,(%rdi,%r12,4)
+ cmoveq %r11,%r9
+ movl %r13d,(%rdi,%r10,4)
+ addb %r11b,%r13b
+ movb (%rdi,%r13,4),%al
+ addb %r9b,%r12b
+ movq %r8,%r10
+ movl (%rdi,%r12,4),%r13d
+ rorq $8,%rax
+ incb %r10b
+ movl (%rdi,%r10,4),%r11d
+ cmpq %r10,%r12
+ movl %r9d,(%rdi,%r12,4)
+ cmoveq %r9,%r11
+ movl %r13d,(%rdi,%r8,4)
+ addb %r9b,%r13b
+ movb (%rdi,%r13,4),%al
+ addb %r11b,%r12b
+ movq %r10,%r8
+ movl (%rdi,%r12,4),%r13d
+ rorq $8,%rax
+ incb %r8b
+ movl (%rdi,%r8,4),%r9d
+ cmpq %r8,%r12
+ movl %r11d,(%rdi,%r12,4)
+ cmoveq %r11,%r9
+ movl %r13d,(%rdi,%r10,4)
+ addb %r11b,%r13b
+ movb (%rdi,%r13,4),%al
+ addb %r9b,%r12b
+ movq %r8,%r10
+ movl (%rdi,%r12,4),%r13d
+ rorq $8,%rax
+ incb %r10b
+ movl (%rdi,%r10,4),%r11d
+ cmpq %r10,%r12
+ movl %r9d,(%rdi,%r12,4)
+ cmoveq %r9,%r11
+ movl %r13d,(%rdi,%r8,4)
+ addb %r9b,%r13b
+ movb (%rdi,%r13,4),%al
+ addb %r11b,%r12b
+ movq %r10,%r8
+ movl (%rdi,%r12,4),%r13d
+ rorq $8,%rax
+ incb %r8b
+ movl (%rdi,%r8,4),%r9d
+ cmpq %r8,%r12
+ movl %r11d,(%rdi,%r12,4)
+ cmoveq %r11,%r9
+ movl %r13d,(%rdi,%r10,4)
+ addb %r11b,%r13b
+ movb (%rdi,%r13,4),%al
+ rorq $8,%rax
+ subq $8,%rsi
+
+ xorq (%rdx),%rax
+ addq $8,%rdx
+ movq %rax,(%rcx)
+ addq $8,%rcx
+
+ testq $-8,%rsi
+ jnz .Lloop8
+ cmpq $0,%rsi
jne .Lloop1
jmp .Lexit
.align 16
.Lloop1:
- addb %al,%cl
- movl (%rdi,%rcx,4),%edx
- movl %eax,(%rdi,%rcx,4)
- movl %edx,(%rdi,%r10,4)
- addb %dl,%al
- incb %r10b
- movl (%rdi,%rax,4),%edx
- movl (%rdi,%r10,4),%eax
- xorb (%r12),%dl
- movb %dl,(%r13,%r12,1)
- leaq 1(%r12),%r12
- decq %r11
+ addb %r9b,%r12b
+ movl (%rdi,%r12,4),%r13d
+ movl %r9d,(%rdi,%r12,4)
+ movl %r13d,(%rdi,%r8,4)
+ addb %r13b,%r9b
+ incb %r8b
+ movl (%rdi,%r9,4),%r13d
+ movl (%rdi,%r8,4),%r9d
+ xorb (%rdx),%r13b
+ incq %rdx
+ movb %r13b,(%rcx)
+ incq %rcx
+ decq %rsi
jnz .Lloop1
jmp .Lexit
.align 16
.LRC4_CHAR:
- addb $1,%r10b
- movzbl (%rdi,%r10,1),%eax
- testq $-8,%r11
+ addb $1,%r8b
+ movzbl (%rdi,%r8,1),%r9d
+ testq $-8,%rsi
jz .Lcloop1
+ cmpl $0,260(%rdi)
+ jnz .Lcloop1
jmp .Lcloop8
.align 16
.Lcloop8:
- movl (%r12),%r8d
- movl 4(%r12),%r9d
- addb %al,%cl
- leaq 1(%r10),%rsi
- movzbl (%rdi,%rcx,1),%edx
- movzbl %sil,%esi
- movzbl (%rdi,%rsi,1),%ebx
- movb %al,(%rdi,%rcx,1)
- cmpq %rsi,%rcx
- movb %dl,(%rdi,%r10,1)
+ movl (%rdx),%eax
+ movl 4(%rdx),%ebx
+ addb %r9b,%r12b
+ leaq 1(%r8),%r10
+ movzbl (%rdi,%r12,1),%r13d
+ movzbl %r10b,%r10d
+ movzbl (%rdi,%r10,1),%r11d
+ movb %r9b,(%rdi,%r12,1)
+ cmpq %r10,%r12
+ movb %r13b,(%rdi,%r8,1)
jne .Lcmov0
- movq %rax,%rbx
+ movq %r9,%r11
.Lcmov0:
- addb %al,%dl
- xorb (%rdi,%rdx,1),%r8b
- rorl $8,%r8d
- addb %bl,%cl
- leaq 1(%rsi),%r10
- movzbl (%rdi,%rcx,1),%edx
- movzbl %r10b,%r10d
- movzbl (%rdi,%r10,1),%eax
- movb %bl,(%rdi,%rcx,1)
- cmpq %r10,%rcx
- movb %dl,(%rdi,%rsi,1)
+ addb %r9b,%r13b
+ xorb (%rdi,%r13,1),%al
+ rorl $8,%eax
+ addb %r11b,%r12b
+ leaq 1(%r10),%r8
+ movzbl (%rdi,%r12,1),%r13d
+ movzbl %r8b,%r8d
+ movzbl (%rdi,%r8,1),%r9d
+ movb %r11b,(%rdi,%r12,1)
+ cmpq %r8,%r12
+ movb %r13b,(%rdi,%r10,1)
jne .Lcmov1
- movq %rbx,%rax
+ movq %r11,%r9
.Lcmov1:
- addb %bl,%dl
- xorb (%rdi,%rdx,1),%r8b
- rorl $8,%r8d
- addb %al,%cl
- leaq 1(%r10),%rsi
- movzbl (%rdi,%rcx,1),%edx
- movzbl %sil,%esi
- movzbl (%rdi,%rsi,1),%ebx
- movb %al,(%rdi,%rcx,1)
- cmpq %rsi,%rcx
- movb %dl,(%rdi,%r10,1)
+ addb %r11b,%r13b
+ xorb (%rdi,%r13,1),%al
+ rorl $8,%eax
+ addb %r9b,%r12b
+ leaq 1(%r8),%r10
+ movzbl (%rdi,%r12,1),%r13d
+ movzbl %r10b,%r10d
+ movzbl (%rdi,%r10,1),%r11d
+ movb %r9b,(%rdi,%r12,1)
+ cmpq %r10,%r12
+ movb %r13b,(%rdi,%r8,1)
jne .Lcmov2
- movq %rax,%rbx
+ movq %r9,%r11
.Lcmov2:
- addb %al,%dl
- xorb (%rdi,%rdx,1),%r8b
- rorl $8,%r8d
- addb %bl,%cl
- leaq 1(%rsi),%r10
- movzbl (%rdi,%rcx,1),%edx
- movzbl %r10b,%r10d
- movzbl (%rdi,%r10,1),%eax
- movb %bl,(%rdi,%rcx,1)
- cmpq %r10,%rcx
- movb %dl,(%rdi,%rsi,1)
+ addb %r9b,%r13b
+ xorb (%rdi,%r13,1),%al
+ rorl $8,%eax
+ addb %r11b,%r12b
+ leaq 1(%r10),%r8
+ movzbl (%rdi,%r12,1),%r13d
+ movzbl %r8b,%r8d
+ movzbl (%rdi,%r8,1),%r9d
+ movb %r11b,(%rdi,%r12,1)
+ cmpq %r8,%r12
+ movb %r13b,(%rdi,%r10,1)
jne .Lcmov3
- movq %rbx,%rax
+ movq %r11,%r9
.Lcmov3:
- addb %bl,%dl
- xorb (%rdi,%rdx,1),%r8b
- rorl $8,%r8d
- addb %al,%cl
- leaq 1(%r10),%rsi
- movzbl (%rdi,%rcx,1),%edx
- movzbl %sil,%esi
- movzbl (%rdi,%rsi,1),%ebx
- movb %al,(%rdi,%rcx,1)
- cmpq %rsi,%rcx
- movb %dl,(%rdi,%r10,1)
+ addb %r11b,%r13b
+ xorb (%rdi,%r13,1),%al
+ rorl $8,%eax
+ addb %r9b,%r12b
+ leaq 1(%r8),%r10
+ movzbl (%rdi,%r12,1),%r13d
+ movzbl %r10b,%r10d
+ movzbl (%rdi,%r10,1),%r11d
+ movb %r9b,(%rdi,%r12,1)
+ cmpq %r10,%r12
+ movb %r13b,(%rdi,%r8,1)
jne .Lcmov4
- movq %rax,%rbx
+ movq %r9,%r11
.Lcmov4:
- addb %al,%dl
- xorb (%rdi,%rdx,1),%r9b
- rorl $8,%r9d
- addb %bl,%cl
- leaq 1(%rsi),%r10
- movzbl (%rdi,%rcx,1),%edx
- movzbl %r10b,%r10d
- movzbl (%rdi,%r10,1),%eax
- movb %bl,(%rdi,%rcx,1)
- cmpq %r10,%rcx
- movb %dl,(%rdi,%rsi,1)
+ addb %r9b,%r13b
+ xorb (%rdi,%r13,1),%bl
+ rorl $8,%ebx
+ addb %r11b,%r12b
+ leaq 1(%r10),%r8
+ movzbl (%rdi,%r12,1),%r13d
+ movzbl %r8b,%r8d
+ movzbl (%rdi,%r8,1),%r9d
+ movb %r11b,(%rdi,%r12,1)
+ cmpq %r8,%r12
+ movb %r13b,(%rdi,%r10,1)
jne .Lcmov5
- movq %rbx,%rax
+ movq %r11,%r9
.Lcmov5:
- addb %bl,%dl
- xorb (%rdi,%rdx,1),%r9b
- rorl $8,%r9d
- addb %al,%cl
- leaq 1(%r10),%rsi
- movzbl (%rdi,%rcx,1),%edx
- movzbl %sil,%esi
- movzbl (%rdi,%rsi,1),%ebx
- movb %al,(%rdi,%rcx,1)
- cmpq %rsi,%rcx
- movb %dl,(%rdi,%r10,1)
+ addb %r11b,%r13b
+ xorb (%rdi,%r13,1),%bl
+ rorl $8,%ebx
+ addb %r9b,%r12b
+ leaq 1(%r8),%r10
+ movzbl (%rdi,%r12,1),%r13d
+ movzbl %r10b,%r10d
+ movzbl (%rdi,%r10,1),%r11d
+ movb %r9b,(%rdi,%r12,1)
+ cmpq %r10,%r12
+ movb %r13b,(%rdi,%r8,1)
jne .Lcmov6
- movq %rax,%rbx
+ movq %r9,%r11
.Lcmov6:
- addb %al,%dl
- xorb (%rdi,%rdx,1),%r9b
- rorl $8,%r9d
- addb %bl,%cl
- leaq 1(%rsi),%r10
- movzbl (%rdi,%rcx,1),%edx
- movzbl %r10b,%r10d
- movzbl (%rdi,%r10,1),%eax
- movb %bl,(%rdi,%rcx,1)
- cmpq %r10,%rcx
- movb %dl,(%rdi,%rsi,1)
+ addb %r9b,%r13b
+ xorb (%rdi,%r13,1),%bl
+ rorl $8,%ebx
+ addb %r11b,%r12b
+ leaq 1(%r10),%r8
+ movzbl (%rdi,%r12,1),%r13d
+ movzbl %r8b,%r8d
+ movzbl (%rdi,%r8,1),%r9d
+ movb %r11b,(%rdi,%r12,1)
+ cmpq %r8,%r12
+ movb %r13b,(%rdi,%r10,1)
jne .Lcmov7
- movq %rbx,%rax
+ movq %r11,%r9
.Lcmov7:
- addb %bl,%dl
- xorb (%rdi,%rdx,1),%r9b
- rorl $8,%r9d
- leaq -8(%r11),%r11
- movl %r8d,(%r13)
- leaq 8(%r12),%r12
- movl %r9d,4(%r13)
- leaq 8(%r13),%r13
-
- testq $-8,%r11
+ addb %r11b,%r13b
+ xorb (%rdi,%r13,1),%bl
+ rorl $8,%ebx
+ leaq -8(%rsi),%rsi
+ movl %eax,(%rcx)
+ leaq 8(%rdx),%rdx
+ movl %ebx,4(%rcx)
+ leaq 8(%rcx),%rcx
+
+ testq $-8,%rsi
jnz .Lcloop8
- cmpq $0,%r11
+ cmpq $0,%rsi
jne .Lcloop1
jmp .Lexit
.align 16
.Lcloop1:
- addb %al,%cl
- movzbl %cl,%ecx
- movzbl (%rdi,%rcx,1),%edx
- movb %al,(%rdi,%rcx,1)
- movb %dl,(%rdi,%r10,1)
- addb %al,%dl
- addb $1,%r10b
- movzbl %dl,%edx
- movzbl %r10b,%r10d
- movzbl (%rdi,%rdx,1),%edx
- movzbl (%rdi,%r10,1),%eax
- xorb (%r12),%dl
- leaq 1(%r12),%r12
- movb %dl,(%r13)
- leaq 1(%r13),%r13
- subq $1,%r11
+ addb %r9b,%r12b
+ movzbl (%rdi,%r12,1),%r13d
+ movb %r9b,(%rdi,%r12,1)
+ movb %r13b,(%rdi,%r8,1)
+ addb %r9b,%r13b
+ addb $1,%r8b
+ movzbl %r13b,%r13d
+ movzbl %r8b,%r8d
+ movzbl (%rdi,%r13,1),%r13d
+ movzbl (%rdi,%r8,1),%r9d
+ xorb (%rdx),%r13b
+ leaq 1(%rdx),%rdx
+ movb %r13b,(%rcx)
+ leaq 1(%rcx),%rcx
+ subq $1,%rsi
jnz .Lcloop1
jmp .Lexit
.align 16
.Lexit:
- subb $1,%r10b
- movl %r10d,-8(%rdi)
- movl %ecx,-4(%rdi)
+ subb $1,%r8b
+ movl %r8d,-8(%rdi)
+ movl %r12d,-4(%rdi)
movq (%rsp),%r13
movq 8(%rsp),%r12
@@ -526,10 +330,11 @@ RC4: orq %rsi,%rsi
.Lepilogue:
.byte 0xf3,0xc3
.size RC4,.-RC4
-.globl private_RC4_set_key
-.type private_RC4_set_key,@function
+
+.globl RC4_set_key
+.type RC4_set_key,@function
.align 16
-private_RC4_set_key:
+RC4_set_key:
leaq 8(%rdi),%rdi
leaq (%rdx,%rsi,1),%rdx
negq %rsi
@@ -541,8 +346,11 @@ private_RC4_set_key:
movl OPENSSL_ia32cap_P(%rip),%r8d
btl $20,%r8d
- jc .Lc1stloop
- jmp .Lw1stloop
+ jnc .Lw1stloop
+ btl $30,%r8d
+ setc %r9b
+ movl %r9d,260(%rdi)
+ jmp .Lc1stloop
.align 16
.Lw1stloop:
@@ -596,7 +404,7 @@ private_RC4_set_key:
movl %eax,-8(%rdi)
movl %eax,-4(%rdi)
.byte 0xf3,0xc3
-.size private_RC4_set_key,.-private_RC4_set_key
+.size RC4_set_key,.-RC4_set_key
.globl RC4_options
.type RC4_options,@function
@@ -605,20 +413,18 @@ RC4_options:
leaq .Lopts(%rip),%rax
movl OPENSSL_ia32cap_P(%rip),%edx
btl $20,%edx
- jc .L8xchar
- btl $30,%edx
jnc .Ldone
- addq $25,%rax
- .byte 0xf3,0xc3
-.L8xchar:
addq $12,%rax
+ btl $30,%edx
+ jnc .Ldone
+ addq $13,%rax
.Ldone:
.byte 0xf3,0xc3
.align 64
.Lopts:
.byte 114,99,52,40,56,120,44,105,110,116,41,0
.byte 114,99,52,40,56,120,44,99,104,97,114,41,0
-.byte 114,99,52,40,49,54,120,44,105,110,116,41,0
+.byte 114,99,52,40,49,120,44,99,104,97,114,41,0
.byte 82,67,52,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
.align 64
.size RC4_options,.-RC4_options
diff --git a/deps/openssl/asm/x64-elf-gas/sha/sha1-x86_64.s b/deps/openssl/asm/x64-elf-gas/sha/sha1-x86_64.s
index c11c6f650b..208c2cdd26 100644
--- a/deps/openssl/asm/x64-elf-gas/sha/sha1-x86_64.s
+++ b/deps/openssl/asm/x64-elf-gas/sha/sha1-x86_64.s
@@ -1,23 +1,12 @@
.text
-
-
.globl sha1_block_data_order
.type sha1_block_data_order,@function
.align 16
sha1_block_data_order:
- movl OPENSSL_ia32cap_P+0(%rip),%r9d
- movl OPENSSL_ia32cap_P+4(%rip),%r8d
- testl $512,%r8d
- jz .Lialu
- jmp _ssse3_shortcut
-
-.align 16
-.Lialu:
pushq %rbx
pushq %rbp
pushq %r12
- pushq %r13
movq %rsp,%r11
movq %rdi,%r8
subq $72,%rsp
@@ -27,2466 +16,1268 @@ sha1_block_data_order:
movq %r11,64(%rsp)
.Lprologue:
- movl 0(%r8),%esi
- movl 4(%r8),%edi
- movl 8(%r8),%r11d
- movl 12(%r8),%r12d
- movl 16(%r8),%r13d
- jmp .Lloop
-
-.align 16
+ movl 0(%r8),%edx
+ movl 4(%r8),%esi
+ movl 8(%r8),%edi
+ movl 12(%r8),%ebp
+ movl 16(%r8),%r11d
+.align 4
.Lloop:
- movl 0(%r9),%edx
- bswapl %edx
- movl %edx,0(%rsp)
- movl %r11d,%eax
- movl 4(%r9),%ebp
- movl %esi,%ecx
- xorl %r12d,%eax
- bswapl %ebp
- roll $5,%ecx
- leal 1518500249(%rdx,%r13,1),%r13d
- andl %edi,%eax
- movl %ebp,4(%rsp)
- addl %ecx,%r13d
- xorl %r12d,%eax
- roll $30,%edi
- addl %eax,%r13d
- movl %edi,%eax
- movl 8(%r9),%edx
- movl %r13d,%ecx
- xorl %r11d,%eax
- bswapl %edx
- roll $5,%ecx
- leal 1518500249(%rbp,%r12,1),%r12d
- andl %esi,%eax
- movl %edx,8(%rsp)
- addl %ecx,%r12d
- xorl %r11d,%eax
- roll $30,%esi
- addl %eax,%r12d
- movl %esi,%eax
- movl 12(%r9),%ebp
- movl %r12d,%ecx
- xorl %edi,%eax
- bswapl %ebp
- roll $5,%ecx
- leal 1518500249(%rdx,%r11,1),%r11d
- andl %r13d,%eax
- movl %ebp,12(%rsp)
- addl %ecx,%r11d
- xorl %edi,%eax
- roll $30,%r13d
- addl %eax,%r11d
- movl %r13d,%eax
- movl 16(%r9),%edx
- movl %r11d,%ecx
- xorl %esi,%eax
- bswapl %edx
- roll $5,%ecx
- leal 1518500249(%rbp,%rdi,1),%edi
- andl %r12d,%eax
- movl %edx,16(%rsp)
- addl %ecx,%edi
- xorl %esi,%eax
- roll $30,%r12d
- addl %eax,%edi
- movl %r12d,%eax
- movl 20(%r9),%ebp
- movl %edi,%ecx
- xorl %r13d,%eax
- bswapl %ebp
- roll $5,%ecx
- leal 1518500249(%rdx,%rsi,1),%esi
- andl %r11d,%eax
- movl %ebp,20(%rsp)
- addl %ecx,%esi
- xorl %r13d,%eax
- roll $30,%r11d
- addl %eax,%esi
- movl %r11d,%eax
- movl 24(%r9),%edx
- movl %esi,%ecx
- xorl %r12d,%eax
- bswapl %edx
- roll $5,%ecx
- leal 1518500249(%rbp,%r13,1),%r13d
- andl %edi,%eax
- movl %edx,24(%rsp)
- addl %ecx,%r13d
- xorl %r12d,%eax
- roll $30,%edi
- addl %eax,%r13d
- movl %edi,%eax
- movl 28(%r9),%ebp
- movl %r13d,%ecx
- xorl %r11d,%eax
- bswapl %ebp
- roll $5,%ecx
- leal 1518500249(%rdx,%r12,1),%r12d
- andl %esi,%eax
- movl %ebp,28(%rsp)
- addl %ecx,%r12d
- xorl %r11d,%eax
- roll $30,%esi
- addl %eax,%r12d
- movl %esi,%eax
- movl 32(%r9),%edx
- movl %r12d,%ecx
- xorl %edi,%eax
- bswapl %edx
- roll $5,%ecx
- leal 1518500249(%rbp,%r11,1),%r11d
- andl %r13d,%eax
- movl %edx,32(%rsp)
- addl %ecx,%r11d
- xorl %edi,%eax
- roll $30,%r13d
- addl %eax,%r11d
- movl %r13d,%eax
- movl 36(%r9),%ebp
- movl %r11d,%ecx
- xorl %esi,%eax
- bswapl %ebp
- roll $5,%ecx
- leal 1518500249(%rdx,%rdi,1),%edi
- andl %r12d,%eax
- movl %ebp,36(%rsp)
- addl %ecx,%edi
- xorl %esi,%eax
- roll $30,%r12d
- addl %eax,%edi
- movl %r12d,%eax
- movl 40(%r9),%edx
- movl %edi,%ecx
- xorl %r13d,%eax
- bswapl %edx
- roll $5,%ecx
- leal 1518500249(%rbp,%rsi,1),%esi
- andl %r11d,%eax
- movl %edx,40(%rsp)
- addl %ecx,%esi
- xorl %r13d,%eax
- roll $30,%r11d
- addl %eax,%esi
- movl %r11d,%eax
- movl 44(%r9),%ebp
- movl %esi,%ecx
- xorl %r12d,%eax
- bswapl %ebp
- roll $5,%ecx
- leal 1518500249(%rdx,%r13,1),%r13d
- andl %edi,%eax
- movl %ebp,44(%rsp)
- addl %ecx,%r13d
- xorl %r12d,%eax
- roll $30,%edi
- addl %eax,%r13d
- movl %edi,%eax
- movl 48(%r9),%edx
- movl %r13d,%ecx
- xorl %r11d,%eax
- bswapl %edx
- roll $5,%ecx
- leal 1518500249(%rbp,%r12,1),%r12d
- andl %esi,%eax
- movl %edx,48(%rsp)
- addl %ecx,%r12d
- xorl %r11d,%eax
- roll $30,%esi
- addl %eax,%r12d
- movl %esi,%eax
- movl 52(%r9),%ebp
- movl %r12d,%ecx
- xorl %edi,%eax
- bswapl %ebp
- roll $5,%ecx
- leal 1518500249(%rdx,%r11,1),%r11d
- andl %r13d,%eax
- movl %ebp,52(%rsp)
- addl %ecx,%r11d
- xorl %edi,%eax
- roll $30,%r13d
- addl %eax,%r11d
- movl %r13d,%eax
- movl 56(%r9),%edx
- movl %r11d,%ecx
- xorl %esi,%eax
- bswapl %edx
- roll $5,%ecx
- leal 1518500249(%rbp,%rdi,1),%edi
- andl %r12d,%eax
- movl %edx,56(%rsp)
- addl %ecx,%edi
- xorl %esi,%eax
- roll $30,%r12d
- addl %eax,%edi
- movl %r12d,%eax
- movl 60(%r9),%ebp
- movl %edi,%ecx
- xorl %r13d,%eax
- bswapl %ebp
- roll $5,%ecx
- leal 1518500249(%rdx,%rsi,1),%esi
- andl %r11d,%eax
- movl %ebp,60(%rsp)
- addl %ecx,%esi
- xorl %r13d,%eax
- roll $30,%r11d
- addl %eax,%esi
- movl 0(%rsp),%edx
- movl %r11d,%eax
- movl %esi,%ecx
- xorl 8(%rsp),%edx
- xorl %r12d,%eax
- roll $5,%ecx
- xorl 32(%rsp),%edx
- andl %edi,%eax
- leal 1518500249(%rbp,%r13,1),%r13d
- xorl 52(%rsp),%edx
- xorl %r12d,%eax
- roll $1,%edx
- addl %ecx,%r13d
- roll $30,%edi
- movl %edx,0(%rsp)
- addl %eax,%r13d
- movl 4(%rsp),%ebp
- movl %edi,%eax
- movl %r13d,%ecx
- xorl 12(%rsp),%ebp
- xorl %r11d,%eax
- roll $5,%ecx
- xorl 36(%rsp),%ebp
- andl %esi,%eax
- leal 1518500249(%rdx,%r12,1),%r12d
- xorl 56(%rsp),%ebp
- xorl %r11d,%eax
- roll $1,%ebp
- addl %ecx,%r12d
- roll $30,%esi
- movl %ebp,4(%rsp)
- addl %eax,%r12d
- movl 8(%rsp),%edx
- movl %esi,%eax
- movl %r12d,%ecx
- xorl 16(%rsp),%edx
- xorl %edi,%eax
- roll $5,%ecx
- xorl 40(%rsp),%edx
- andl %r13d,%eax
- leal 1518500249(%rbp,%r11,1),%r11d
- xorl 60(%rsp),%edx
- xorl %edi,%eax
- roll $1,%edx
- addl %ecx,%r11d
- roll $30,%r13d
- movl %edx,8(%rsp)
- addl %eax,%r11d
- movl 12(%rsp),%ebp
- movl %r13d,%eax
- movl %r11d,%ecx
- xorl 20(%rsp),%ebp
- xorl %esi,%eax
- roll $5,%ecx
- xorl 44(%rsp),%ebp
- andl %r12d,%eax
- leal 1518500249(%rdx,%rdi,1),%edi
- xorl 0(%rsp),%ebp
- xorl %esi,%eax
- roll $1,%ebp
- addl %ecx,%edi
- roll $30,%r12d
- movl %ebp,12(%rsp)
- addl %eax,%edi
- movl 16(%rsp),%edx
- movl %r12d,%eax
- movl %edi,%ecx
- xorl 24(%rsp),%edx
- xorl %r13d,%eax
- roll $5,%ecx
- xorl 48(%rsp),%edx
- andl %r11d,%eax
- leal 1518500249(%rbp,%rsi,1),%esi
- xorl 4(%rsp),%edx
- xorl %r13d,%eax
- roll $1,%edx
- addl %ecx,%esi
- roll $30,%r11d
- movl %edx,16(%rsp)
- addl %eax,%esi
- movl 20(%rsp),%ebp
- movl %r11d,%eax
- movl %esi,%ecx
- xorl 28(%rsp),%ebp
- xorl %edi,%eax
- roll $5,%ecx
- leal 1859775393(%rdx,%r13,1),%r13d
- xorl 52(%rsp),%ebp
- xorl %r12d,%eax
- addl %ecx,%r13d
- xorl 8(%rsp),%ebp
- roll $30,%edi
- addl %eax,%r13d
- roll $1,%ebp
- movl %ebp,20(%rsp)
- movl 24(%rsp),%edx
- movl %edi,%eax
- movl %r13d,%ecx
- xorl 32(%rsp),%edx
- xorl %esi,%eax
- roll $5,%ecx
- leal 1859775393(%rbp,%r12,1),%r12d
- xorl 56(%rsp),%edx
- xorl %r11d,%eax
- addl %ecx,%r12d
- xorl 12(%rsp),%edx
- roll $30,%esi
- addl %eax,%r12d
- roll $1,%edx
- movl %edx,24(%rsp)
- movl 28(%rsp),%ebp
- movl %esi,%eax
- movl %r12d,%ecx
- xorl 36(%rsp),%ebp
- xorl %r13d,%eax
- roll $5,%ecx
- leal 1859775393(%rdx,%r11,1),%r11d
- xorl 60(%rsp),%ebp
- xorl %edi,%eax
- addl %ecx,%r11d
- xorl 16(%rsp),%ebp
- roll $30,%r13d
- addl %eax,%r11d
- roll $1,%ebp
- movl %ebp,28(%rsp)
- movl 32(%rsp),%edx
- movl %r13d,%eax
- movl %r11d,%ecx
- xorl 40(%rsp),%edx
- xorl %r12d,%eax
- roll $5,%ecx
- leal 1859775393(%rbp,%rdi,1),%edi
- xorl 0(%rsp),%edx
- xorl %esi,%eax
- addl %ecx,%edi
- xorl 20(%rsp),%edx
- roll $30,%r12d
- addl %eax,%edi
- roll $1,%edx
- movl %edx,32(%rsp)
- movl 36(%rsp),%ebp
- movl %r12d,%eax
- movl %edi,%ecx
- xorl 44(%rsp),%ebp
- xorl %r11d,%eax
- roll $5,%ecx
- leal 1859775393(%rdx,%rsi,1),%esi
- xorl 4(%rsp),%ebp
- xorl %r13d,%eax
- addl %ecx,%esi
- xorl 24(%rsp),%ebp
- roll $30,%r11d
- addl %eax,%esi
- roll $1,%ebp
- movl %ebp,36(%rsp)
- movl 40(%rsp),%edx
- movl %r11d,%eax
- movl %esi,%ecx
- xorl 48(%rsp),%edx
- xorl %edi,%eax
- roll $5,%ecx
- leal 1859775393(%rbp,%r13,1),%r13d
- xorl 8(%rsp),%edx
- xorl %r12d,%eax
- addl %ecx,%r13d
- xorl 28(%rsp),%edx
- roll $30,%edi
- addl %eax,%r13d
- roll $1,%edx
- movl %edx,40(%rsp)
- movl 44(%rsp),%ebp
- movl %edi,%eax
- movl %r13d,%ecx
- xorl 52(%rsp),%ebp
- xorl %esi,%eax
- roll $5,%ecx
- leal 1859775393(%rdx,%r12,1),%r12d
- xorl 12(%rsp),%ebp
- xorl %r11d,%eax
- addl %ecx,%r12d
- xorl 32(%rsp),%ebp
- roll $30,%esi
- addl %eax,%r12d
- roll $1,%ebp
- movl %ebp,44(%rsp)
- movl 48(%rsp),%edx
- movl %esi,%eax
- movl %r12d,%ecx
- xorl 56(%rsp),%edx
- xorl %r13d,%eax
- roll $5,%ecx
- leal 1859775393(%rbp,%r11,1),%r11d
- xorl 16(%rsp),%edx
- xorl %edi,%eax
- addl %ecx,%r11d
- xorl 36(%rsp),%edx
- roll $30,%r13d
- addl %eax,%r11d
- roll $1,%edx
- movl %edx,48(%rsp)
- movl 52(%rsp),%ebp
- movl %r13d,%eax
- movl %r11d,%ecx
- xorl 60(%rsp),%ebp
- xorl %r12d,%eax
- roll $5,%ecx
- leal 1859775393(%rdx,%rdi,1),%edi
- xorl 20(%rsp),%ebp
- xorl %esi,%eax
- addl %ecx,%edi
- xorl 40(%rsp),%ebp
- roll $30,%r12d
- addl %eax,%edi
- roll $1,%ebp
- movl %ebp,52(%rsp)
- movl 56(%rsp),%edx
- movl %r12d,%eax
- movl %edi,%ecx
- xorl 0(%rsp),%edx
- xorl %r11d,%eax
- roll $5,%ecx
- leal 1859775393(%rbp,%rsi,1),%esi
- xorl 24(%rsp),%edx
- xorl %r13d,%eax
- addl %ecx,%esi
- xorl 44(%rsp),%edx
- roll $30,%r11d
- addl %eax,%esi
- roll $1,%edx
- movl %edx,56(%rsp)
- movl 60(%rsp),%ebp
- movl %r11d,%eax
- movl %esi,%ecx
- xorl 4(%rsp),%ebp
- xorl %edi,%eax
- roll $5,%ecx
- leal 1859775393(%rdx,%r13,1),%r13d
- xorl 28(%rsp),%ebp
- xorl %r12d,%eax
- addl %ecx,%r13d
- xorl 48(%rsp),%ebp
- roll $30,%edi
- addl %eax,%r13d
- roll $1,%ebp
- movl %ebp,60(%rsp)
- movl 0(%rsp),%edx
- movl %edi,%eax
- movl %r13d,%ecx
- xorl 8(%rsp),%edx
- xorl %esi,%eax
- roll $5,%ecx
- leal 1859775393(%rbp,%r12,1),%r12d
- xorl 32(%rsp),%edx
- xorl %r11d,%eax
- addl %ecx,%r12d
- xorl 52(%rsp),%edx
- roll $30,%esi
- addl %eax,%r12d
- roll $1,%edx
- movl %edx,0(%rsp)
- movl 4(%rsp),%ebp
- movl %esi,%eax
- movl %r12d,%ecx
- xorl 12(%rsp),%ebp
- xorl %r13d,%eax
- roll $5,%ecx
- leal 1859775393(%rdx,%r11,1),%r11d
- xorl 36(%rsp),%ebp
- xorl %edi,%eax
- addl %ecx,%r11d
- xorl 56(%rsp),%ebp
- roll $30,%r13d
- addl %eax,%r11d
- roll $1,%ebp
- movl %ebp,4(%rsp)
- movl 8(%rsp),%edx
- movl %r13d,%eax
- movl %r11d,%ecx
- xorl 16(%rsp),%edx
- xorl %r12d,%eax
- roll $5,%ecx
- leal 1859775393(%rbp,%rdi,1),%edi
- xorl 40(%rsp),%edx
- xorl %esi,%eax
- addl %ecx,%edi
- xorl 60(%rsp),%edx
- roll $30,%r12d
- addl %eax,%edi
- roll $1,%edx
- movl %edx,8(%rsp)
- movl 12(%rsp),%ebp
- movl %r12d,%eax
- movl %edi,%ecx
- xorl 20(%rsp),%ebp
- xorl %r11d,%eax
- roll $5,%ecx
- leal 1859775393(%rdx,%rsi,1),%esi
- xorl 44(%rsp),%ebp
- xorl %r13d,%eax
- addl %ecx,%esi
- xorl 0(%rsp),%ebp
- roll $30,%r11d
- addl %eax,%esi
- roll $1,%ebp
- movl %ebp,12(%rsp)
- movl 16(%rsp),%edx
- movl %r11d,%eax
- movl %esi,%ecx
- xorl 24(%rsp),%edx
- xorl %edi,%eax
- roll $5,%ecx
- leal 1859775393(%rbp,%r13,1),%r13d
- xorl 48(%rsp),%edx
- xorl %r12d,%eax
- addl %ecx,%r13d
- xorl 4(%rsp),%edx
- roll $30,%edi
- addl %eax,%r13d
- roll $1,%edx
- movl %edx,16(%rsp)
- movl 20(%rsp),%ebp
- movl %edi,%eax
- movl %r13d,%ecx
- xorl 28(%rsp),%ebp
- xorl %esi,%eax
- roll $5,%ecx
- leal 1859775393(%rdx,%r12,1),%r12d
- xorl 52(%rsp),%ebp
- xorl %r11d,%eax
- addl %ecx,%r12d
- xorl 8(%rsp),%ebp
+ movl 0(%r9),%eax
+ bswapl %eax
+ movl %eax,0(%rsp)
+ leal 1518500249(%rax,%r11,1),%r12d
+ movl %edi,%ebx
+ movl 4(%r9),%eax
+ movl %edx,%r11d
+ xorl %ebp,%ebx
+ bswapl %eax
+ roll $5,%r11d
+ andl %esi,%ebx
+ movl %eax,4(%rsp)
+ addl %r11d,%r12d
+ xorl %ebp,%ebx
roll $30,%esi
- addl %eax,%r12d
- roll $1,%ebp
- movl %ebp,20(%rsp)
- movl 24(%rsp),%edx
- movl %esi,%eax
- movl %r12d,%ecx
- xorl 32(%rsp),%edx
- xorl %r13d,%eax
- roll $5,%ecx
- leal 1859775393(%rbp,%r11,1),%r11d
- xorl 56(%rsp),%edx
- xorl %edi,%eax
- addl %ecx,%r11d
- xorl 12(%rsp),%edx
- roll $30,%r13d
- addl %eax,%r11d
- roll $1,%edx
- movl %edx,24(%rsp)
- movl 28(%rsp),%ebp
- movl %r13d,%eax
- movl %r11d,%ecx
- xorl 36(%rsp),%ebp
- xorl %r12d,%eax
- roll $5,%ecx
- leal 1859775393(%rdx,%rdi,1),%edi
- xorl 60(%rsp),%ebp
- xorl %esi,%eax
- addl %ecx,%edi
- xorl 16(%rsp),%ebp
+ addl %ebx,%r12d
+ leal 1518500249(%rax,%rbp,1),%r11d
+ movl %esi,%ebx
+ movl 8(%r9),%eax
+ movl %r12d,%ebp
+ xorl %edi,%ebx
+ bswapl %eax
+ roll $5,%ebp
+ andl %edx,%ebx
+ movl %eax,8(%rsp)
+ addl %ebp,%r11d
+ xorl %edi,%ebx
+ roll $30,%edx
+ addl %ebx,%r11d
+ leal 1518500249(%rax,%rdi,1),%ebp
+ movl %edx,%ebx
+ movl 12(%r9),%eax
+ movl %r11d,%edi
+ xorl %esi,%ebx
+ bswapl %eax
+ roll $5,%edi
+ andl %r12d,%ebx
+ movl %eax,12(%rsp)
+ addl %edi,%ebp
+ xorl %esi,%ebx
roll $30,%r12d
- addl %eax,%edi
- roll $1,%ebp
- movl %ebp,28(%rsp)
- movl 32(%rsp),%edx
- movl %r12d,%eax
- movl %edi,%ecx
- xorl 40(%rsp),%edx
- xorl %r11d,%eax
- roll $5,%ecx
- leal 1859775393(%rbp,%rsi,1),%esi
- xorl 0(%rsp),%edx
- xorl %r13d,%eax
- addl %ecx,%esi
- xorl 20(%rsp),%edx
+ addl %ebx,%ebp
+ leal 1518500249(%rax,%rsi,1),%edi
+ movl %r12d,%ebx
+ movl 16(%r9),%eax
+ movl %ebp,%esi
+ xorl %edx,%ebx
+ bswapl %eax
+ roll $5,%esi
+ andl %r11d,%ebx
+ movl %eax,16(%rsp)
+ addl %esi,%edi
+ xorl %edx,%ebx
roll $30,%r11d
- addl %eax,%esi
- roll $1,%edx
- movl %edx,32(%rsp)
- movl 36(%rsp),%ebp
- movl %r11d,%eax
+ addl %ebx,%edi
+ leal 1518500249(%rax,%rdx,1),%esi
movl %r11d,%ebx
- xorl 44(%rsp),%ebp
- andl %r12d,%eax
- movl %esi,%ecx
- xorl 4(%rsp),%ebp
+ movl 20(%r9),%eax
+ movl %edi,%edx
xorl %r12d,%ebx
- leal -1894007588(%rdx,%r13,1),%r13d
- roll $5,%ecx
- xorl 24(%rsp),%ebp
- addl %eax,%r13d
+ bswapl %eax
+ roll $5,%edx
+ andl %ebp,%ebx
+ movl %eax,20(%rsp)
+ addl %edx,%esi
+ xorl %r12d,%ebx
+ roll $30,%ebp
+ addl %ebx,%esi
+ leal 1518500249(%rax,%r12,1),%edx
+ movl %ebp,%ebx
+ movl 24(%r9),%eax
+ movl %esi,%r12d
+ xorl %r11d,%ebx
+ bswapl %eax
+ roll $5,%r12d
andl %edi,%ebx
- roll $1,%ebp
- addl %ebx,%r13d
+ movl %eax,24(%rsp)
+ addl %r12d,%edx
+ xorl %r11d,%ebx
roll $30,%edi
- movl %ebp,36(%rsp)
- addl %ecx,%r13d
- movl 40(%rsp),%edx
- movl %edi,%eax
+ addl %ebx,%edx
+ leal 1518500249(%rax,%r11,1),%r12d
movl %edi,%ebx
- xorl 48(%rsp),%edx
- andl %r11d,%eax
- movl %r13d,%ecx
- xorl 8(%rsp),%edx
- xorl %r11d,%ebx
- leal -1894007588(%rbp,%r12,1),%r12d
- roll $5,%ecx
- xorl 28(%rsp),%edx
- addl %eax,%r12d
+ movl 28(%r9),%eax
+ movl %edx,%r11d
+ xorl %ebp,%ebx
+ bswapl %eax
+ roll $5,%r11d
andl %esi,%ebx
- roll $1,%edx
- addl %ebx,%r12d
+ movl %eax,28(%rsp)
+ addl %r11d,%r12d
+ xorl %ebp,%ebx
roll $30,%esi
- movl %edx,40(%rsp)
- addl %ecx,%r12d
- movl 44(%rsp),%ebp
- movl %esi,%eax
+ addl %ebx,%r12d
+ leal 1518500249(%rax,%rbp,1),%r11d
movl %esi,%ebx
- xorl 52(%rsp),%ebp
- andl %edi,%eax
- movl %r12d,%ecx
- xorl 12(%rsp),%ebp
+ movl 32(%r9),%eax
+ movl %r12d,%ebp
+ xorl %edi,%ebx
+ bswapl %eax
+ roll $5,%ebp
+ andl %edx,%ebx
+ movl %eax,32(%rsp)
+ addl %ebp,%r11d
xorl %edi,%ebx
- leal -1894007588(%rdx,%r11,1),%r11d
- roll $5,%ecx
- xorl 32(%rsp),%ebp
- addl %eax,%r11d
- andl %r13d,%ebx
- roll $1,%ebp
+ roll $30,%edx
addl %ebx,%r11d
- roll $30,%r13d
- movl %ebp,44(%rsp)
- addl %ecx,%r11d
- movl 48(%rsp),%edx
- movl %r13d,%eax
- movl %r13d,%ebx
- xorl 56(%rsp),%edx
- andl %esi,%eax
- movl %r11d,%ecx
- xorl 16(%rsp),%edx
+ leal 1518500249(%rax,%rdi,1),%ebp
+ movl %edx,%ebx
+ movl 36(%r9),%eax
+ movl %r11d,%edi
xorl %esi,%ebx
- leal -1894007588(%rbp,%rdi,1),%edi
- roll $5,%ecx
- xorl 36(%rsp),%edx
- addl %eax,%edi
+ bswapl %eax
+ roll $5,%edi
andl %r12d,%ebx
- roll $1,%edx
- addl %ebx,%edi
+ movl %eax,36(%rsp)
+ addl %edi,%ebp
+ xorl %esi,%ebx
roll $30,%r12d
- movl %edx,48(%rsp)
- addl %ecx,%edi
- movl 52(%rsp),%ebp
- movl %r12d,%eax
+ addl %ebx,%ebp
+ leal 1518500249(%rax,%rsi,1),%edi
movl %r12d,%ebx
- xorl 60(%rsp),%ebp
- andl %r13d,%eax
- movl %edi,%ecx
- xorl 20(%rsp),%ebp
- xorl %r13d,%ebx
- leal -1894007588(%rdx,%rsi,1),%esi
- roll $5,%ecx
- xorl 40(%rsp),%ebp
- addl %eax,%esi
+ movl 40(%r9),%eax
+ movl %ebp,%esi
+ xorl %edx,%ebx
+ bswapl %eax
+ roll $5,%esi
andl %r11d,%ebx
- roll $1,%ebp
- addl %ebx,%esi
+ movl %eax,40(%rsp)
+ addl %esi,%edi
+ xorl %edx,%ebx
roll $30,%r11d
- movl %ebp,52(%rsp)
- addl %ecx,%esi
- movl 56(%rsp),%edx
- movl %r11d,%eax
+ addl %ebx,%edi
+ leal 1518500249(%rax,%rdx,1),%esi
movl %r11d,%ebx
- xorl 0(%rsp),%edx
- andl %r12d,%eax
- movl %esi,%ecx
- xorl 24(%rsp),%edx
+ movl 44(%r9),%eax
+ movl %edi,%edx
+ xorl %r12d,%ebx
+ bswapl %eax
+ roll $5,%edx
+ andl %ebp,%ebx
+ movl %eax,44(%rsp)
+ addl %edx,%esi
xorl %r12d,%ebx
- leal -1894007588(%rbp,%r13,1),%r13d
- roll $5,%ecx
- xorl 44(%rsp),%edx
- addl %eax,%r13d
+ roll $30,%ebp
+ addl %ebx,%esi
+ leal 1518500249(%rax,%r12,1),%edx
+ movl %ebp,%ebx
+ movl 48(%r9),%eax
+ movl %esi,%r12d
+ xorl %r11d,%ebx
+ bswapl %eax
+ roll $5,%r12d
andl %edi,%ebx
- roll $1,%edx
- addl %ebx,%r13d
+ movl %eax,48(%rsp)
+ addl %r12d,%edx
+ xorl %r11d,%ebx
roll $30,%edi
- movl %edx,56(%rsp)
- addl %ecx,%r13d
- movl 60(%rsp),%ebp
- movl %edi,%eax
+ addl %ebx,%edx
+ leal 1518500249(%rax,%r11,1),%r12d
movl %edi,%ebx
- xorl 4(%rsp),%ebp
- andl %r11d,%eax
- movl %r13d,%ecx
- xorl 28(%rsp),%ebp
- xorl %r11d,%ebx
- leal -1894007588(%rdx,%r12,1),%r12d
- roll $5,%ecx
- xorl 48(%rsp),%ebp
- addl %eax,%r12d
+ movl 52(%r9),%eax
+ movl %edx,%r11d
+ xorl %ebp,%ebx
+ bswapl %eax
+ roll $5,%r11d
andl %esi,%ebx
- roll $1,%ebp
- addl %ebx,%r12d
+ movl %eax,52(%rsp)
+ addl %r11d,%r12d
+ xorl %ebp,%ebx
roll $30,%esi
- movl %ebp,60(%rsp)
- addl %ecx,%r12d
- movl 0(%rsp),%edx
- movl %esi,%eax
+ addl %ebx,%r12d
+ leal 1518500249(%rax,%rbp,1),%r11d
movl %esi,%ebx
- xorl 8(%rsp),%edx
- andl %edi,%eax
- movl %r12d,%ecx
- xorl 32(%rsp),%edx
+ movl 56(%r9),%eax
+ movl %r12d,%ebp
+ xorl %edi,%ebx
+ bswapl %eax
+ roll $5,%ebp
+ andl %edx,%ebx
+ movl %eax,56(%rsp)
+ addl %ebp,%r11d
xorl %edi,%ebx
- leal -1894007588(%rbp,%r11,1),%r11d
- roll $5,%ecx
- xorl 52(%rsp),%edx
- addl %eax,%r11d
- andl %r13d,%ebx
- roll $1,%edx
+ roll $30,%edx
addl %ebx,%r11d
- roll $30,%r13d
- movl %edx,0(%rsp)
- addl %ecx,%r11d
- movl 4(%rsp),%ebp
- movl %r13d,%eax
- movl %r13d,%ebx
- xorl 12(%rsp),%ebp
- andl %esi,%eax
- movl %r11d,%ecx
- xorl 36(%rsp),%ebp
+ leal 1518500249(%rax,%rdi,1),%ebp
+ movl %edx,%ebx
+ movl 60(%r9),%eax
+ movl %r11d,%edi
xorl %esi,%ebx
- leal -1894007588(%rdx,%rdi,1),%edi
- roll $5,%ecx
- xorl 56(%rsp),%ebp
- addl %eax,%edi
+ bswapl %eax
+ roll $5,%edi
andl %r12d,%ebx
- roll $1,%ebp
- addl %ebx,%edi
+ movl %eax,60(%rsp)
+ addl %edi,%ebp
+ xorl %esi,%ebx
roll $30,%r12d
- movl %ebp,4(%rsp)
- addl %ecx,%edi
- movl 8(%rsp),%edx
- movl %r12d,%eax
+ addl %ebx,%ebp
+ leal 1518500249(%rax,%rsi,1),%edi
+ movl 0(%rsp),%eax
movl %r12d,%ebx
- xorl 16(%rsp),%edx
- andl %r13d,%eax
- movl %edi,%ecx
- xorl 40(%rsp),%edx
- xorl %r13d,%ebx
- leal -1894007588(%rbp,%rsi,1),%esi
- roll $5,%ecx
- xorl 60(%rsp),%edx
- addl %eax,%esi
+ movl %ebp,%esi
+ xorl 8(%rsp),%eax
+ xorl %edx,%ebx
+ roll $5,%esi
+ xorl 32(%rsp),%eax
andl %r11d,%ebx
- roll $1,%edx
- addl %ebx,%esi
+ addl %esi,%edi
+ xorl 52(%rsp),%eax
+ xorl %edx,%ebx
roll $30,%r11d
- movl %edx,8(%rsp)
- addl %ecx,%esi
- movl 12(%rsp),%ebp
- movl %r11d,%eax
+ addl %ebx,%edi
+ roll $1,%eax
+ movl %eax,0(%rsp)
+ leal 1518500249(%rax,%rdx,1),%esi
+ movl 4(%rsp),%eax
movl %r11d,%ebx
- xorl 20(%rsp),%ebp
- andl %r12d,%eax
- movl %esi,%ecx
- xorl 44(%rsp),%ebp
+ movl %edi,%edx
+ xorl 12(%rsp),%eax
+ xorl %r12d,%ebx
+ roll $5,%edx
+ xorl 36(%rsp),%eax
+ andl %ebp,%ebx
+ addl %edx,%esi
+ xorl 56(%rsp),%eax
xorl %r12d,%ebx
- leal -1894007588(%rdx,%r13,1),%r13d
- roll $5,%ecx
- xorl 0(%rsp),%ebp
- addl %eax,%r13d
+ roll $30,%ebp
+ addl %ebx,%esi
+ roll $1,%eax
+ movl %eax,4(%rsp)
+ leal 1518500249(%rax,%r12,1),%edx
+ movl 8(%rsp),%eax
+ movl %ebp,%ebx
+ movl %esi,%r12d
+ xorl 16(%rsp),%eax
+ xorl %r11d,%ebx
+ roll $5,%r12d
+ xorl 40(%rsp),%eax
andl %edi,%ebx
- roll $1,%ebp
- addl %ebx,%r13d
+ addl %r12d,%edx
+ xorl 60(%rsp),%eax
+ xorl %r11d,%ebx
roll $30,%edi
- movl %ebp,12(%rsp)
- addl %ecx,%r13d
- movl 16(%rsp),%edx
- movl %edi,%eax
+ addl %ebx,%edx
+ roll $1,%eax
+ movl %eax,8(%rsp)
+ leal 1518500249(%rax,%r11,1),%r12d
+ movl 12(%rsp),%eax
movl %edi,%ebx
- xorl 24(%rsp),%edx
- andl %r11d,%eax
- movl %r13d,%ecx
- xorl 48(%rsp),%edx
- xorl %r11d,%ebx
- leal -1894007588(%rbp,%r12,1),%r12d
- roll $5,%ecx
- xorl 4(%rsp),%edx
- addl %eax,%r12d
+ movl %edx,%r11d
+ xorl 20(%rsp),%eax
+ xorl %ebp,%ebx
+ roll $5,%r11d
+ xorl 44(%rsp),%eax
andl %esi,%ebx
- roll $1,%edx
- addl %ebx,%r12d
+ addl %r11d,%r12d
+ xorl 0(%rsp),%eax
+ xorl %ebp,%ebx
roll $30,%esi
- movl %edx,16(%rsp)
- addl %ecx,%r12d
- movl 20(%rsp),%ebp
- movl %esi,%eax
+ addl %ebx,%r12d
+ roll $1,%eax
+ movl %eax,12(%rsp)
+ leal 1518500249(%rax,%rbp,1),%r11d
+ movl 16(%rsp),%eax
movl %esi,%ebx
- xorl 28(%rsp),%ebp
- andl %edi,%eax
- movl %r12d,%ecx
- xorl 52(%rsp),%ebp
+ movl %r12d,%ebp
+ xorl 24(%rsp),%eax
+ xorl %edi,%ebx
+ roll $5,%ebp
+ xorl 48(%rsp),%eax
+ andl %edx,%ebx
+ addl %ebp,%r11d
+ xorl 4(%rsp),%eax
xorl %edi,%ebx
- leal -1894007588(%rdx,%r11,1),%r11d
- roll $5,%ecx
- xorl 8(%rsp),%ebp
- addl %eax,%r11d
- andl %r13d,%ebx
- roll $1,%ebp
+ roll $30,%edx
addl %ebx,%r11d
- roll $30,%r13d
- movl %ebp,20(%rsp)
- addl %ecx,%r11d
- movl 24(%rsp),%edx
- movl %r13d,%eax
- movl %r13d,%ebx
- xorl 32(%rsp),%edx
- andl %esi,%eax
- movl %r11d,%ecx
- xorl 56(%rsp),%edx
+ roll $1,%eax
+ movl %eax,16(%rsp)
+ leal 1859775393(%rax,%rdi,1),%ebp
+ movl 20(%rsp),%eax
+ movl %edx,%ebx
+ movl %r11d,%edi
+ xorl 28(%rsp),%eax
+ xorl %r12d,%ebx
+ roll $5,%edi
+ xorl 52(%rsp),%eax
xorl %esi,%ebx
- leal -1894007588(%rbp,%rdi,1),%edi
- roll $5,%ecx
- xorl 12(%rsp),%edx
- addl %eax,%edi
- andl %r12d,%ebx
- roll $1,%edx
- addl %ebx,%edi
+ addl %edi,%ebp
+ xorl 8(%rsp),%eax
roll $30,%r12d
- movl %edx,24(%rsp)
- addl %ecx,%edi
- movl 28(%rsp),%ebp
- movl %r12d,%eax
+ addl %ebx,%ebp
+ roll $1,%eax
+ movl %eax,20(%rsp)
+ leal 1859775393(%rax,%rsi,1),%edi
+ movl 24(%rsp),%eax
movl %r12d,%ebx
- xorl 36(%rsp),%ebp
- andl %r13d,%eax
- movl %edi,%ecx
- xorl 60(%rsp),%ebp
- xorl %r13d,%ebx
- leal -1894007588(%rdx,%rsi,1),%esi
- roll $5,%ecx
- xorl 16(%rsp),%ebp
- addl %eax,%esi
- andl %r11d,%ebx
- roll $1,%ebp
- addl %ebx,%esi
+ movl %ebp,%esi
+ xorl 32(%rsp),%eax
+ xorl %r11d,%ebx
+ roll $5,%esi
+ xorl 56(%rsp),%eax
+ xorl %edx,%ebx
+ addl %esi,%edi
+ xorl 12(%rsp),%eax
roll $30,%r11d
- movl %ebp,28(%rsp)
- addl %ecx,%esi
- movl 32(%rsp),%edx
- movl %r11d,%eax
+ addl %ebx,%edi
+ roll $1,%eax
+ movl %eax,24(%rsp)
+ leal 1859775393(%rax,%rdx,1),%esi
+ movl 28(%rsp),%eax
movl %r11d,%ebx
- xorl 40(%rsp),%edx
- andl %r12d,%eax
- movl %esi,%ecx
- xorl 0(%rsp),%edx
+ movl %edi,%edx
+ xorl 36(%rsp),%eax
+ xorl %ebp,%ebx
+ roll $5,%edx
+ xorl 60(%rsp),%eax
xorl %r12d,%ebx
- leal -1894007588(%rbp,%r13,1),%r13d
- roll $5,%ecx
- xorl 20(%rsp),%edx
- addl %eax,%r13d
- andl %edi,%ebx
- roll $1,%edx
- addl %ebx,%r13d
+ addl %edx,%esi
+ xorl 16(%rsp),%eax
+ roll $30,%ebp
+ addl %ebx,%esi
+ roll $1,%eax
+ movl %eax,28(%rsp)
+ leal 1859775393(%rax,%r12,1),%edx
+ movl 32(%rsp),%eax
+ movl %ebp,%ebx
+ movl %esi,%r12d
+ xorl 40(%rsp),%eax
+ xorl %edi,%ebx
+ roll $5,%r12d
+ xorl 0(%rsp),%eax
+ xorl %r11d,%ebx
+ addl %r12d,%edx
+ xorl 20(%rsp),%eax
roll $30,%edi
- movl %edx,32(%rsp)
- addl %ecx,%r13d
- movl 36(%rsp),%ebp
- movl %edi,%eax
+ addl %ebx,%edx
+ roll $1,%eax
+ movl %eax,32(%rsp)
+ leal 1859775393(%rax,%r11,1),%r12d
+ movl 36(%rsp),%eax
movl %edi,%ebx
- xorl 44(%rsp),%ebp
- andl %r11d,%eax
- movl %r13d,%ecx
- xorl 4(%rsp),%ebp
- xorl %r11d,%ebx
- leal -1894007588(%rdx,%r12,1),%r12d
- roll $5,%ecx
- xorl 24(%rsp),%ebp
- addl %eax,%r12d
- andl %esi,%ebx
- roll $1,%ebp
- addl %ebx,%r12d
+ movl %edx,%r11d
+ xorl 44(%rsp),%eax
+ xorl %esi,%ebx
+ roll $5,%r11d
+ xorl 4(%rsp),%eax
+ xorl %ebp,%ebx
+ addl %r11d,%r12d
+ xorl 24(%rsp),%eax
roll $30,%esi
- movl %ebp,36(%rsp)
- addl %ecx,%r12d
- movl 40(%rsp),%edx
- movl %esi,%eax
+ addl %ebx,%r12d
+ roll $1,%eax
+ movl %eax,36(%rsp)
+ leal 1859775393(%rax,%rbp,1),%r11d
+ movl 40(%rsp),%eax
movl %esi,%ebx
- xorl 48(%rsp),%edx
- andl %edi,%eax
- movl %r12d,%ecx
- xorl 8(%rsp),%edx
+ movl %r12d,%ebp
+ xorl 48(%rsp),%eax
+ xorl %edx,%ebx
+ roll $5,%ebp
+ xorl 8(%rsp),%eax
xorl %edi,%ebx
- leal -1894007588(%rbp,%r11,1),%r11d
- roll $5,%ecx
- xorl 28(%rsp),%edx
- addl %eax,%r11d
- andl %r13d,%ebx
- roll $1,%edx
+ addl %ebp,%r11d
+ xorl 28(%rsp),%eax
+ roll $30,%edx
addl %ebx,%r11d
- roll $30,%r13d
- movl %edx,40(%rsp)
- addl %ecx,%r11d
- movl 44(%rsp),%ebp
- movl %r13d,%eax
- movl %r13d,%ebx
- xorl 52(%rsp),%ebp
- andl %esi,%eax
- movl %r11d,%ecx
- xorl 12(%rsp),%ebp
+ roll $1,%eax
+ movl %eax,40(%rsp)
+ leal 1859775393(%rax,%rdi,1),%ebp
+ movl 44(%rsp),%eax
+ movl %edx,%ebx
+ movl %r11d,%edi
+ xorl 52(%rsp),%eax
+ xorl %r12d,%ebx
+ roll $5,%edi
+ xorl 12(%rsp),%eax
xorl %esi,%ebx
- leal -1894007588(%rdx,%rdi,1),%edi
- roll $5,%ecx
- xorl 32(%rsp),%ebp
- addl %eax,%edi
- andl %r12d,%ebx
- roll $1,%ebp
- addl %ebx,%edi
+ addl %edi,%ebp
+ xorl 32(%rsp),%eax
roll $30,%r12d
- movl %ebp,44(%rsp)
- addl %ecx,%edi
- movl 48(%rsp),%edx
- movl %r12d,%eax
+ addl %ebx,%ebp
+ roll $1,%eax
+ movl %eax,44(%rsp)
+ leal 1859775393(%rax,%rsi,1),%edi
+ movl 48(%rsp),%eax
movl %r12d,%ebx
- xorl 56(%rsp),%edx
- andl %r13d,%eax
- movl %edi,%ecx
- xorl 16(%rsp),%edx
- xorl %r13d,%ebx
- leal -1894007588(%rbp,%rsi,1),%esi
- roll $5,%ecx
- xorl 36(%rsp),%edx
- addl %eax,%esi
- andl %r11d,%ebx
- roll $1,%edx
- addl %ebx,%esi
+ movl %ebp,%esi
+ xorl 56(%rsp),%eax
+ xorl %r11d,%ebx
+ roll $5,%esi
+ xorl 16(%rsp),%eax
+ xorl %edx,%ebx
+ addl %esi,%edi
+ xorl 36(%rsp),%eax
roll $30,%r11d
- movl %edx,48(%rsp)
- addl %ecx,%esi
- movl 52(%rsp),%ebp
- movl %r11d,%eax
- movl %esi,%ecx
- xorl 60(%rsp),%ebp
- xorl %edi,%eax
- roll $5,%ecx
- leal -899497514(%rdx,%r13,1),%r13d
- xorl 20(%rsp),%ebp
- xorl %r12d,%eax
- addl %ecx,%r13d
- xorl 40(%rsp),%ebp
+ addl %ebx,%edi
+ roll $1,%eax
+ movl %eax,48(%rsp)
+ leal 1859775393(%rax,%rdx,1),%esi
+ movl 52(%rsp),%eax
+ movl %r11d,%ebx
+ movl %edi,%edx
+ xorl 60(%rsp),%eax
+ xorl %ebp,%ebx
+ roll $5,%edx
+ xorl 20(%rsp),%eax
+ xorl %r12d,%ebx
+ addl %edx,%esi
+ xorl 40(%rsp),%eax
+ roll $30,%ebp
+ addl %ebx,%esi
+ roll $1,%eax
+ movl %eax,52(%rsp)
+ leal 1859775393(%rax,%r12,1),%edx
+ movl 56(%rsp),%eax
+ movl %ebp,%ebx
+ movl %esi,%r12d
+ xorl 0(%rsp),%eax
+ xorl %edi,%ebx
+ roll $5,%r12d
+ xorl 24(%rsp),%eax
+ xorl %r11d,%ebx
+ addl %r12d,%edx
+ xorl 44(%rsp),%eax
roll $30,%edi
- addl %eax,%r13d
- roll $1,%ebp
- movl %ebp,52(%rsp)
- movl 56(%rsp),%edx
- movl %edi,%eax
- movl %r13d,%ecx
- xorl 0(%rsp),%edx
- xorl %esi,%eax
- roll $5,%ecx
- leal -899497514(%rbp,%r12,1),%r12d
- xorl 24(%rsp),%edx
- xorl %r11d,%eax
- addl %ecx,%r12d
- xorl 44(%rsp),%edx
+ addl %ebx,%edx
+ roll $1,%eax
+ movl %eax,56(%rsp)
+ leal 1859775393(%rax,%r11,1),%r12d
+ movl 60(%rsp),%eax
+ movl %edi,%ebx
+ movl %edx,%r11d
+ xorl 4(%rsp),%eax
+ xorl %esi,%ebx
+ roll $5,%r11d
+ xorl 28(%rsp),%eax
+ xorl %ebp,%ebx
+ addl %r11d,%r12d
+ xorl 48(%rsp),%eax
roll $30,%esi
- addl %eax,%r12d
- roll $1,%edx
- movl %edx,56(%rsp)
- movl 60(%rsp),%ebp
- movl %esi,%eax
- movl %r12d,%ecx
- xorl 4(%rsp),%ebp
- xorl %r13d,%eax
- roll $5,%ecx
- leal -899497514(%rdx,%r11,1),%r11d
- xorl 28(%rsp),%ebp
- xorl %edi,%eax
- addl %ecx,%r11d
- xorl 48(%rsp),%ebp
- roll $30,%r13d
- addl %eax,%r11d
- roll $1,%ebp
- movl %ebp,60(%rsp)
- movl 0(%rsp),%edx
- movl %r13d,%eax
- movl %r11d,%ecx
- xorl 8(%rsp),%edx
- xorl %r12d,%eax
- roll $5,%ecx
- leal -899497514(%rbp,%rdi,1),%edi
- xorl 32(%rsp),%edx
- xorl %esi,%eax
- addl %ecx,%edi
- xorl 52(%rsp),%edx
+ addl %ebx,%r12d
+ roll $1,%eax
+ movl %eax,60(%rsp)
+ leal 1859775393(%rax,%rbp,1),%r11d
+ movl 0(%rsp),%eax
+ movl %esi,%ebx
+ movl %r12d,%ebp
+ xorl 8(%rsp),%eax
+ xorl %edx,%ebx
+ roll $5,%ebp
+ xorl 32(%rsp),%eax
+ xorl %edi,%ebx
+ addl %ebp,%r11d
+ xorl 52(%rsp),%eax
+ roll $30,%edx
+ addl %ebx,%r11d
+ roll $1,%eax
+ movl %eax,0(%rsp)
+ leal 1859775393(%rax,%rdi,1),%ebp
+ movl 4(%rsp),%eax
+ movl %edx,%ebx
+ movl %r11d,%edi
+ xorl 12(%rsp),%eax
+ xorl %r12d,%ebx
+ roll $5,%edi
+ xorl 36(%rsp),%eax
+ xorl %esi,%ebx
+ addl %edi,%ebp
+ xorl 56(%rsp),%eax
roll $30,%r12d
- addl %eax,%edi
- roll $1,%edx
- movl %edx,0(%rsp)
- movl 4(%rsp),%ebp
- movl %r12d,%eax
- movl %edi,%ecx
- xorl 12(%rsp),%ebp
- xorl %r11d,%eax
- roll $5,%ecx
- leal -899497514(%rdx,%rsi,1),%esi
- xorl 36(%rsp),%ebp
- xorl %r13d,%eax
- addl %ecx,%esi
- xorl 56(%rsp),%ebp
+ addl %ebx,%ebp
+ roll $1,%eax
+ movl %eax,4(%rsp)
+ leal 1859775393(%rax,%rsi,1),%edi
+ movl 8(%rsp),%eax
+ movl %r12d,%ebx
+ movl %ebp,%esi
+ xorl 16(%rsp),%eax
+ xorl %r11d,%ebx
+ roll $5,%esi
+ xorl 40(%rsp),%eax
+ xorl %edx,%ebx
+ addl %esi,%edi
+ xorl 60(%rsp),%eax
roll $30,%r11d
- addl %eax,%esi
- roll $1,%ebp
- movl %ebp,4(%rsp)
- movl 8(%rsp),%edx
- movl %r11d,%eax
- movl %esi,%ecx
- xorl 16(%rsp),%edx
- xorl %edi,%eax
- roll $5,%ecx
- leal -899497514(%rbp,%r13,1),%r13d
- xorl 40(%rsp),%edx
- xorl %r12d,%eax
- addl %ecx,%r13d
- xorl 60(%rsp),%edx
+ addl %ebx,%edi
+ roll $1,%eax
+ movl %eax,8(%rsp)
+ leal 1859775393(%rax,%rdx,1),%esi
+ movl 12(%rsp),%eax
+ movl %r11d,%ebx
+ movl %edi,%edx
+ xorl 20(%rsp),%eax
+ xorl %ebp,%ebx
+ roll $5,%edx
+ xorl 44(%rsp),%eax
+ xorl %r12d,%ebx
+ addl %edx,%esi
+ xorl 0(%rsp),%eax
+ roll $30,%ebp
+ addl %ebx,%esi
+ roll $1,%eax
+ movl %eax,12(%rsp)
+ leal 1859775393(%rax,%r12,1),%edx
+ movl 16(%rsp),%eax
+ movl %ebp,%ebx
+ movl %esi,%r12d
+ xorl 24(%rsp),%eax
+ xorl %edi,%ebx
+ roll $5,%r12d
+ xorl 48(%rsp),%eax
+ xorl %r11d,%ebx
+ addl %r12d,%edx
+ xorl 4(%rsp),%eax
roll $30,%edi
- addl %eax,%r13d
- roll $1,%edx
- movl %edx,8(%rsp)
- movl 12(%rsp),%ebp
- movl %edi,%eax
- movl %r13d,%ecx
- xorl 20(%rsp),%ebp
- xorl %esi,%eax
- roll $5,%ecx
- leal -899497514(%rdx,%r12,1),%r12d
- xorl 44(%rsp),%ebp
- xorl %r11d,%eax
- addl %ecx,%r12d
- xorl 0(%rsp),%ebp
+ addl %ebx,%edx
+ roll $1,%eax
+ movl %eax,16(%rsp)
+ leal 1859775393(%rax,%r11,1),%r12d
+ movl 20(%rsp),%eax
+ movl %edi,%ebx
+ movl %edx,%r11d
+ xorl 28(%rsp),%eax
+ xorl %esi,%ebx
+ roll $5,%r11d
+ xorl 52(%rsp),%eax
+ xorl %ebp,%ebx
+ addl %r11d,%r12d
+ xorl 8(%rsp),%eax
roll $30,%esi
- addl %eax,%r12d
- roll $1,%ebp
- movl %ebp,12(%rsp)
- movl 16(%rsp),%edx
- movl %esi,%eax
- movl %r12d,%ecx
- xorl 24(%rsp),%edx
- xorl %r13d,%eax
- roll $5,%ecx
- leal -899497514(%rbp,%r11,1),%r11d
- xorl 48(%rsp),%edx
- xorl %edi,%eax
- addl %ecx,%r11d
- xorl 4(%rsp),%edx
- roll $30,%r13d
- addl %eax,%r11d
- roll $1,%edx
- movl %edx,16(%rsp)
- movl 20(%rsp),%ebp
- movl %r13d,%eax
- movl %r11d,%ecx
- xorl 28(%rsp),%ebp
- xorl %r12d,%eax
- roll $5,%ecx
- leal -899497514(%rdx,%rdi,1),%edi
- xorl 52(%rsp),%ebp
- xorl %esi,%eax
- addl %ecx,%edi
- xorl 8(%rsp),%ebp
+ addl %ebx,%r12d
+ roll $1,%eax
+ movl %eax,20(%rsp)
+ leal 1859775393(%rax,%rbp,1),%r11d
+ movl 24(%rsp),%eax
+ movl %esi,%ebx
+ movl %r12d,%ebp
+ xorl 32(%rsp),%eax
+ xorl %edx,%ebx
+ roll $5,%ebp
+ xorl 56(%rsp),%eax
+ xorl %edi,%ebx
+ addl %ebp,%r11d
+ xorl 12(%rsp),%eax
+ roll $30,%edx
+ addl %ebx,%r11d
+ roll $1,%eax
+ movl %eax,24(%rsp)
+ leal 1859775393(%rax,%rdi,1),%ebp
+ movl 28(%rsp),%eax
+ movl %edx,%ebx
+ movl %r11d,%edi
+ xorl 36(%rsp),%eax
+ xorl %r12d,%ebx
+ roll $5,%edi
+ xorl 60(%rsp),%eax
+ xorl %esi,%ebx
+ addl %edi,%ebp
+ xorl 16(%rsp),%eax
roll $30,%r12d
- addl %eax,%edi
- roll $1,%ebp
- movl %ebp,20(%rsp)
- movl 24(%rsp),%edx
- movl %r12d,%eax
- movl %edi,%ecx
- xorl 32(%rsp),%edx
- xorl %r11d,%eax
- roll $5,%ecx
- leal -899497514(%rbp,%rsi,1),%esi
- xorl 56(%rsp),%edx
- xorl %r13d,%eax
- addl %ecx,%esi
- xorl 12(%rsp),%edx
+ addl %ebx,%ebp
+ roll $1,%eax
+ movl %eax,28(%rsp)
+ leal 1859775393(%rax,%rsi,1),%edi
+ movl 32(%rsp),%eax
+ movl %r12d,%ebx
+ movl %ebp,%esi
+ xorl 40(%rsp),%eax
+ xorl %r11d,%ebx
+ roll $5,%esi
+ xorl 0(%rsp),%eax
+ xorl %edx,%ebx
+ addl %esi,%edi
+ xorl 20(%rsp),%eax
roll $30,%r11d
- addl %eax,%esi
- roll $1,%edx
- movl %edx,24(%rsp)
- movl 28(%rsp),%ebp
- movl %r11d,%eax
- movl %esi,%ecx
- xorl 36(%rsp),%ebp
- xorl %edi,%eax
- roll $5,%ecx
- leal -899497514(%rdx,%r13,1),%r13d
- xorl 60(%rsp),%ebp
- xorl %r12d,%eax
- addl %ecx,%r13d
- xorl 16(%rsp),%ebp
+ addl %ebx,%edi
+ roll $1,%eax
+ movl %eax,32(%rsp)
+ leal -1894007588(%rax,%rdx,1),%esi
+ movl 36(%rsp),%eax
+ movl %ebp,%ebx
+ movl %ebp,%ecx
+ xorl 44(%rsp),%eax
+ movl %edi,%edx
+ andl %r11d,%ebx
+ xorl 4(%rsp),%eax
+ orl %r11d,%ecx
+ roll $5,%edx
+ xorl 24(%rsp),%eax
+ andl %r12d,%ecx
+ addl %edx,%esi
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%ebp
+ movl %eax,36(%rsp)
+ addl %ebx,%esi
+ leal -1894007588(%rax,%r12,1),%edx
+ movl 40(%rsp),%eax
+ movl %edi,%ebx
+ movl %edi,%ecx
+ xorl 48(%rsp),%eax
+ movl %esi,%r12d
+ andl %ebp,%ebx
+ xorl 8(%rsp),%eax
+ orl %ebp,%ecx
+ roll $5,%r12d
+ xorl 28(%rsp),%eax
+ andl %r11d,%ecx
+ addl %r12d,%edx
+ roll $1,%eax
+ orl %ecx,%ebx
roll $30,%edi
- addl %eax,%r13d
- roll $1,%ebp
- movl %ebp,28(%rsp)
- movl 32(%rsp),%edx
- movl %edi,%eax
- movl %r13d,%ecx
- xorl 40(%rsp),%edx
- xorl %esi,%eax
- roll $5,%ecx
- leal -899497514(%rbp,%r12,1),%r12d
- xorl 0(%rsp),%edx
- xorl %r11d,%eax
- addl %ecx,%r12d
- xorl 20(%rsp),%edx
+ movl %eax,40(%rsp)
+ addl %ebx,%edx
+ leal -1894007588(%rax,%r11,1),%r12d
+ movl 44(%rsp),%eax
+ movl %esi,%ebx
+ movl %esi,%ecx
+ xorl 52(%rsp),%eax
+ movl %edx,%r11d
+ andl %edi,%ebx
+ xorl 12(%rsp),%eax
+ orl %edi,%ecx
+ roll $5,%r11d
+ xorl 32(%rsp),%eax
+ andl %ebp,%ecx
+ addl %r11d,%r12d
+ roll $1,%eax
+ orl %ecx,%ebx
roll $30,%esi
- addl %eax,%r12d
- roll $1,%edx
- movl %edx,32(%rsp)
- movl 36(%rsp),%ebp
- movl %esi,%eax
+ movl %eax,44(%rsp)
+ addl %ebx,%r12d
+ leal -1894007588(%rax,%rbp,1),%r11d
+ movl 48(%rsp),%eax
+ movl %edx,%ebx
+ movl %edx,%ecx
+ xorl 56(%rsp),%eax
+ movl %r12d,%ebp
+ andl %esi,%ebx
+ xorl 16(%rsp),%eax
+ orl %esi,%ecx
+ roll $5,%ebp
+ xorl 36(%rsp),%eax
+ andl %edi,%ecx
+ addl %ebp,%r11d
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%edx
+ movl %eax,48(%rsp)
+ addl %ebx,%r11d
+ leal -1894007588(%rax,%rdi,1),%ebp
+ movl 52(%rsp),%eax
+ movl %r12d,%ebx
movl %r12d,%ecx
- xorl 44(%rsp),%ebp
- xorl %r13d,%eax
- roll $5,%ecx
- leal -899497514(%rdx,%r11,1),%r11d
- xorl 4(%rsp),%ebp
- xorl %edi,%eax
- addl %ecx,%r11d
- xorl 24(%rsp),%ebp
- roll $30,%r13d
- addl %eax,%r11d
- roll $1,%ebp
- movl %ebp,36(%rsp)
- movl 40(%rsp),%edx
- movl %r13d,%eax
- movl %r11d,%ecx
- xorl 48(%rsp),%edx
- xorl %r12d,%eax
- roll $5,%ecx
- leal -899497514(%rbp,%rdi,1),%edi
- xorl 8(%rsp),%edx
- xorl %esi,%eax
- addl %ecx,%edi
- xorl 28(%rsp),%edx
+ xorl 60(%rsp),%eax
+ movl %r11d,%edi
+ andl %edx,%ebx
+ xorl 20(%rsp),%eax
+ orl %edx,%ecx
+ roll $5,%edi
+ xorl 40(%rsp),%eax
+ andl %esi,%ecx
+ addl %edi,%ebp
+ roll $1,%eax
+ orl %ecx,%ebx
roll $30,%r12d
- addl %eax,%edi
- roll $1,%edx
- movl %edx,40(%rsp)
- movl 44(%rsp),%ebp
- movl %r12d,%eax
- movl %edi,%ecx
- xorl 52(%rsp),%ebp
- xorl %r11d,%eax
- roll $5,%ecx
- leal -899497514(%rdx,%rsi,1),%esi
- xorl 12(%rsp),%ebp
- xorl %r13d,%eax
- addl %ecx,%esi
- xorl 32(%rsp),%ebp
+ movl %eax,52(%rsp)
+ addl %ebx,%ebp
+ leal -1894007588(%rax,%rsi,1),%edi
+ movl 56(%rsp),%eax
+ movl %r11d,%ebx
+ movl %r11d,%ecx
+ xorl 0(%rsp),%eax
+ movl %ebp,%esi
+ andl %r12d,%ebx
+ xorl 24(%rsp),%eax
+ orl %r12d,%ecx
+ roll $5,%esi
+ xorl 44(%rsp),%eax
+ andl %edx,%ecx
+ addl %esi,%edi
+ roll $1,%eax
+ orl %ecx,%ebx
roll $30,%r11d
- addl %eax,%esi
- roll $1,%ebp
- movl %ebp,44(%rsp)
- movl 48(%rsp),%edx
- movl %r11d,%eax
- movl %esi,%ecx
- xorl 56(%rsp),%edx
- xorl %edi,%eax
- roll $5,%ecx
- leal -899497514(%rbp,%r13,1),%r13d
- xorl 16(%rsp),%edx
- xorl %r12d,%eax
- addl %ecx,%r13d
- xorl 36(%rsp),%edx
+ movl %eax,56(%rsp)
+ addl %ebx,%edi
+ leal -1894007588(%rax,%rdx,1),%esi
+ movl 60(%rsp),%eax
+ movl %ebp,%ebx
+ movl %ebp,%ecx
+ xorl 4(%rsp),%eax
+ movl %edi,%edx
+ andl %r11d,%ebx
+ xorl 28(%rsp),%eax
+ orl %r11d,%ecx
+ roll $5,%edx
+ xorl 48(%rsp),%eax
+ andl %r12d,%ecx
+ addl %edx,%esi
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%ebp
+ movl %eax,60(%rsp)
+ addl %ebx,%esi
+ leal -1894007588(%rax,%r12,1),%edx
+ movl 0(%rsp),%eax
+ movl %edi,%ebx
+ movl %edi,%ecx
+ xorl 8(%rsp),%eax
+ movl %esi,%r12d
+ andl %ebp,%ebx
+ xorl 32(%rsp),%eax
+ orl %ebp,%ecx
+ roll $5,%r12d
+ xorl 52(%rsp),%eax
+ andl %r11d,%ecx
+ addl %r12d,%edx
+ roll $1,%eax
+ orl %ecx,%ebx
roll $30,%edi
- addl %eax,%r13d
- roll $1,%edx
- movl %edx,48(%rsp)
- movl 52(%rsp),%ebp
- movl %edi,%eax
- movl %r13d,%ecx
- xorl 60(%rsp),%ebp
- xorl %esi,%eax
- roll $5,%ecx
- leal -899497514(%rdx,%r12,1),%r12d
- xorl 20(%rsp),%ebp
- xorl %r11d,%eax
- addl %ecx,%r12d
- xorl 40(%rsp),%ebp
+ movl %eax,0(%rsp)
+ addl %ebx,%edx
+ leal -1894007588(%rax,%r11,1),%r12d
+ movl 4(%rsp),%eax
+ movl %esi,%ebx
+ movl %esi,%ecx
+ xorl 12(%rsp),%eax
+ movl %edx,%r11d
+ andl %edi,%ebx
+ xorl 36(%rsp),%eax
+ orl %edi,%ecx
+ roll $5,%r11d
+ xorl 56(%rsp),%eax
+ andl %ebp,%ecx
+ addl %r11d,%r12d
+ roll $1,%eax
+ orl %ecx,%ebx
roll $30,%esi
- addl %eax,%r12d
- roll $1,%ebp
- movl 56(%rsp),%edx
- movl %esi,%eax
- movl %r12d,%ecx
- xorl 0(%rsp),%edx
- xorl %r13d,%eax
- roll $5,%ecx
- leal -899497514(%rbp,%r11,1),%r11d
- xorl 24(%rsp),%edx
- xorl %edi,%eax
- addl %ecx,%r11d
- xorl 44(%rsp),%edx
- roll $30,%r13d
- addl %eax,%r11d
- roll $1,%edx
- movl 60(%rsp),%ebp
- movl %r13d,%eax
- movl %r11d,%ecx
- xorl 4(%rsp),%ebp
- xorl %r12d,%eax
- roll $5,%ecx
- leal -899497514(%rdx,%rdi,1),%edi
- xorl 28(%rsp),%ebp
- xorl %esi,%eax
- addl %ecx,%edi
- xorl 48(%rsp),%ebp
- roll $30,%r12d
- addl %eax,%edi
- roll $1,%ebp
- movl %r12d,%eax
- movl %edi,%ecx
- xorl %r11d,%eax
- leal -899497514(%rbp,%rsi,1),%esi
- roll $5,%ecx
- xorl %r13d,%eax
- addl %ecx,%esi
- roll $30,%r11d
- addl %eax,%esi
- addl 0(%r8),%esi
- addl 4(%r8),%edi
- addl 8(%r8),%r11d
- addl 12(%r8),%r12d
- addl 16(%r8),%r13d
- movl %esi,0(%r8)
- movl %edi,4(%r8)
- movl %r11d,8(%r8)
- movl %r12d,12(%r8)
- movl %r13d,16(%r8)
-
- subq $1,%r10
- leaq 64(%r9),%r9
- jnz .Lloop
-
- movq 64(%rsp),%rsi
- movq (%rsi),%r13
- movq 8(%rsi),%r12
- movq 16(%rsi),%rbp
- movq 24(%rsi),%rbx
- leaq 32(%rsi),%rsp
-.Lepilogue:
- .byte 0xf3,0xc3
-.size sha1_block_data_order,.-sha1_block_data_order
-.type sha1_block_data_order_ssse3,@function
-.align 16
-sha1_block_data_order_ssse3:
-_ssse3_shortcut:
- pushq %rbx
- pushq %rbp
- pushq %r12
- leaq -64(%rsp),%rsp
- movq %rdi,%r8
- movq %rsi,%r9
- movq %rdx,%r10
-
- shlq $6,%r10
- addq %r9,%r10
- leaq K_XX_XX(%rip),%r11
-
- movl 0(%r8),%eax
- movl 4(%r8),%ebx
- movl 8(%r8),%ecx
- movl 12(%r8),%edx
- movl %ebx,%esi
- movl 16(%r8),%ebp
-
- movdqa 64(%r11),%xmm6
- movdqa 0(%r11),%xmm9
- movdqu 0(%r9),%xmm0
- movdqu 16(%r9),%xmm1
- movdqu 32(%r9),%xmm2
- movdqu 48(%r9),%xmm3
-.byte 102,15,56,0,198
- addq $64,%r9
-.byte 102,15,56,0,206
-.byte 102,15,56,0,214
-.byte 102,15,56,0,222
- paddd %xmm9,%xmm0
- paddd %xmm9,%xmm1
- paddd %xmm9,%xmm2
- movdqa %xmm0,0(%rsp)
- psubd %xmm9,%xmm0
- movdqa %xmm1,16(%rsp)
- psubd %xmm9,%xmm1
- movdqa %xmm2,32(%rsp)
- psubd %xmm9,%xmm2
- jmp .Loop_ssse3
-.align 16
-.Loop_ssse3:
- movdqa %xmm1,%xmm4
- addl 0(%rsp),%ebp
- xorl %edx,%ecx
- movdqa %xmm3,%xmm8
-.byte 102,15,58,15,224,8
- movl %eax,%edi
- roll $5,%eax
- paddd %xmm3,%xmm9
- andl %ecx,%esi
- xorl %edx,%ecx
- psrldq $4,%xmm8
- xorl %edx,%esi
- addl %eax,%ebp
- pxor %xmm0,%xmm4
- rorl $2,%ebx
- addl %esi,%ebp
- pxor %xmm2,%xmm8
- addl 4(%rsp),%edx
- xorl %ecx,%ebx
- movl %ebp,%esi
+ movl %eax,4(%rsp)
+ addl %ebx,%r12d
+ leal -1894007588(%rax,%rbp,1),%r11d
+ movl 8(%rsp),%eax
+ movl %edx,%ebx
+ movl %edx,%ecx
+ xorl 16(%rsp),%eax
+ movl %r12d,%ebp
+ andl %esi,%ebx
+ xorl 40(%rsp),%eax
+ orl %esi,%ecx
roll $5,%ebp
- pxor %xmm8,%xmm4
- andl %ebx,%edi
- xorl %ecx,%ebx
- movdqa %xmm9,48(%rsp)
- xorl %ecx,%edi
- addl %ebp,%edx
- movdqa %xmm4,%xmm10
- movdqa %xmm4,%xmm8
- rorl $7,%eax
- addl %edi,%edx
- addl 8(%rsp),%ecx
- xorl %ebx,%eax
- pslldq $12,%xmm10
- paddd %xmm4,%xmm4
- movl %edx,%edi
- roll $5,%edx
- andl %eax,%esi
- xorl %ebx,%eax
- psrld $31,%xmm8
- xorl %ebx,%esi
- addl %edx,%ecx
- movdqa %xmm10,%xmm9
- rorl $7,%ebp
- addl %esi,%ecx
- psrld $30,%xmm10
- por %xmm8,%xmm4
- addl 12(%rsp),%ebx
- xorl %eax,%ebp
- movl %ecx,%esi
- roll $5,%ecx
- pslld $2,%xmm9
- pxor %xmm10,%xmm4
- andl %ebp,%edi
- xorl %eax,%ebp
- movdqa 0(%r11),%xmm10
- xorl %eax,%edi
- addl %ecx,%ebx
- pxor %xmm9,%xmm4
- rorl $7,%edx
- addl %edi,%ebx
- movdqa %xmm2,%xmm5
- addl 16(%rsp),%eax
- xorl %ebp,%edx
- movdqa %xmm4,%xmm9
-.byte 102,15,58,15,233,8
- movl %ebx,%edi
- roll $5,%ebx
- paddd %xmm4,%xmm10
- andl %edx,%esi
- xorl %ebp,%edx
- psrldq $4,%xmm9
- xorl %ebp,%esi
- addl %ebx,%eax
- pxor %xmm1,%xmm5
- rorl $7,%ecx
- addl %esi,%eax
- pxor %xmm3,%xmm9
- addl 20(%rsp),%ebp
- xorl %edx,%ecx
- movl %eax,%esi
- roll $5,%eax
- pxor %xmm9,%xmm5
- andl %ecx,%edi
- xorl %edx,%ecx
- movdqa %xmm10,0(%rsp)
- xorl %edx,%edi
- addl %eax,%ebp
- movdqa %xmm5,%xmm8
- movdqa %xmm5,%xmm9
- rorl $7,%ebx
+ xorl 60(%rsp),%eax
+ andl %edi,%ecx
+ addl %ebp,%r11d
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%edx
+ movl %eax,8(%rsp)
+ addl %ebx,%r11d
+ leal -1894007588(%rax,%rdi,1),%ebp
+ movl 12(%rsp),%eax
+ movl %r12d,%ebx
+ movl %r12d,%ecx
+ xorl 20(%rsp),%eax
+ movl %r11d,%edi
+ andl %edx,%ebx
+ xorl 44(%rsp),%eax
+ orl %edx,%ecx
+ roll $5,%edi
+ xorl 0(%rsp),%eax
+ andl %esi,%ecx
addl %edi,%ebp
- addl 24(%rsp),%edx
- xorl %ecx,%ebx
- pslldq $12,%xmm8
- paddd %xmm5,%xmm5
- movl %ebp,%edi
- roll $5,%ebp
- andl %ebx,%esi
- xorl %ecx,%ebx
- psrld $31,%xmm9
- xorl %ecx,%esi
- addl %ebp,%edx
- movdqa %xmm8,%xmm10
- rorl $7,%eax
- addl %esi,%edx
- psrld $30,%xmm8
- por %xmm9,%xmm5
- addl 28(%rsp),%ecx
- xorl %ebx,%eax
- movl %edx,%esi
- roll $5,%edx
- pslld $2,%xmm10
- pxor %xmm8,%xmm5
- andl %eax,%edi
- xorl %ebx,%eax
- movdqa 16(%r11),%xmm8
- xorl %ebx,%edi
- addl %edx,%ecx
- pxor %xmm10,%xmm5
- rorl $7,%ebp
- addl %edi,%ecx
- movdqa %xmm3,%xmm6
- addl 32(%rsp),%ebx
- xorl %eax,%ebp
- movdqa %xmm5,%xmm10
-.byte 102,15,58,15,242,8
- movl %ecx,%edi
- roll $5,%ecx
- paddd %xmm5,%xmm8
- andl %ebp,%esi
- xorl %eax,%ebp
- psrldq $4,%xmm10
- xorl %eax,%esi
- addl %ecx,%ebx
- pxor %xmm2,%xmm6
- rorl $7,%edx
- addl %esi,%ebx
- pxor %xmm4,%xmm10
- addl 36(%rsp),%eax
- xorl %ebp,%edx
- movl %ebx,%esi
- roll $5,%ebx
- pxor %xmm10,%xmm6
- andl %edx,%edi
- xorl %ebp,%edx
- movdqa %xmm8,16(%rsp)
- xorl %ebp,%edi
- addl %ebx,%eax
- movdqa %xmm6,%xmm9
- movdqa %xmm6,%xmm10
- rorl $7,%ecx
- addl %edi,%eax
- addl 40(%rsp),%ebp
- xorl %edx,%ecx
- pslldq $12,%xmm9
- paddd %xmm6,%xmm6
- movl %eax,%edi
- roll $5,%eax
- andl %ecx,%esi
- xorl %edx,%ecx
- psrld $31,%xmm10
- xorl %edx,%esi
- addl %eax,%ebp
- movdqa %xmm9,%xmm8
- rorl $7,%ebx
- addl %esi,%ebp
- psrld $30,%xmm9
- por %xmm10,%xmm6
- addl 44(%rsp),%edx
- xorl %ecx,%ebx
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%r12d
+ movl %eax,12(%rsp)
+ addl %ebx,%ebp
+ leal -1894007588(%rax,%rsi,1),%edi
+ movl 16(%rsp),%eax
+ movl %r11d,%ebx
+ movl %r11d,%ecx
+ xorl 24(%rsp),%eax
movl %ebp,%esi
- roll $5,%ebp
- pslld $2,%xmm8
- pxor %xmm9,%xmm6
- andl %ebx,%edi
- xorl %ecx,%ebx
- movdqa 16(%r11),%xmm9
- xorl %ecx,%edi
- addl %ebp,%edx
- pxor %xmm8,%xmm6
- rorl $7,%eax
- addl %edi,%edx
- movdqa %xmm4,%xmm7
- addl 48(%rsp),%ecx
- xorl %ebx,%eax
- movdqa %xmm6,%xmm8
-.byte 102,15,58,15,251,8
- movl %edx,%edi
- roll $5,%edx
- paddd %xmm6,%xmm9
- andl %eax,%esi
- xorl %ebx,%eax
- psrldq $4,%xmm8
- xorl %ebx,%esi
- addl %edx,%ecx
- pxor %xmm3,%xmm7
- rorl $7,%ebp
- addl %esi,%ecx
- pxor %xmm5,%xmm8
- addl 52(%rsp),%ebx
- xorl %eax,%ebp
- movl %ecx,%esi
- roll $5,%ecx
- pxor %xmm8,%xmm7
- andl %ebp,%edi
- xorl %eax,%ebp
- movdqa %xmm9,32(%rsp)
- xorl %eax,%edi
- addl %ecx,%ebx
- movdqa %xmm7,%xmm10
- movdqa %xmm7,%xmm8
- rorl $7,%edx
- addl %edi,%ebx
- addl 56(%rsp),%eax
- xorl %ebp,%edx
- pslldq $12,%xmm10
- paddd %xmm7,%xmm7
- movl %ebx,%edi
- roll $5,%ebx
- andl %edx,%esi
- xorl %ebp,%edx
- psrld $31,%xmm8
- xorl %ebp,%esi
- addl %ebx,%eax
- movdqa %xmm10,%xmm9
- rorl $7,%ecx
- addl %esi,%eax
- psrld $30,%xmm10
- por %xmm8,%xmm7
- addl 60(%rsp),%ebp
- xorl %edx,%ecx
- movl %eax,%esi
- roll $5,%eax
- pslld $2,%xmm9
- pxor %xmm10,%xmm7
- andl %ecx,%edi
- xorl %edx,%ecx
- movdqa 16(%r11),%xmm10
- xorl %edx,%edi
- addl %eax,%ebp
- pxor %xmm9,%xmm7
- rorl $7,%ebx
- addl %edi,%ebp
- movdqa %xmm7,%xmm9
- addl 0(%rsp),%edx
- pxor %xmm4,%xmm0
-.byte 102,68,15,58,15,206,8
- xorl %ecx,%ebx
- movl %ebp,%edi
- roll $5,%ebp
- pxor %xmm1,%xmm0
- andl %ebx,%esi
- xorl %ecx,%ebx
- movdqa %xmm10,%xmm8
- paddd %xmm7,%xmm10
- xorl %ecx,%esi
- addl %ebp,%edx
- pxor %xmm9,%xmm0
- rorl $7,%eax
- addl %esi,%edx
- addl 4(%rsp),%ecx
- xorl %ebx,%eax
- movdqa %xmm0,%xmm9
- movdqa %xmm10,48(%rsp)
- movl %edx,%esi
+ andl %r12d,%ebx
+ xorl 48(%rsp),%eax
+ orl %r12d,%ecx
+ roll $5,%esi
+ xorl 4(%rsp),%eax
+ andl %edx,%ecx
+ addl %esi,%edi
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%r11d
+ movl %eax,16(%rsp)
+ addl %ebx,%edi
+ leal -1894007588(%rax,%rdx,1),%esi
+ movl 20(%rsp),%eax
+ movl %ebp,%ebx
+ movl %ebp,%ecx
+ xorl 28(%rsp),%eax
+ movl %edi,%edx
+ andl %r11d,%ebx
+ xorl 52(%rsp),%eax
+ orl %r11d,%ecx
roll $5,%edx
- andl %eax,%edi
- xorl %ebx,%eax
- pslld $2,%xmm0
- xorl %ebx,%edi
- addl %edx,%ecx
- psrld $30,%xmm9
- rorl $7,%ebp
- addl %edi,%ecx
- addl 8(%rsp),%ebx
- xorl %eax,%ebp
- movl %ecx,%edi
- roll $5,%ecx
- por %xmm9,%xmm0
- andl %ebp,%esi
- xorl %eax,%ebp
- movdqa %xmm0,%xmm10
- xorl %eax,%esi
- addl %ecx,%ebx
- rorl $7,%edx
- addl %esi,%ebx
- addl 12(%rsp),%eax
- xorl %ebp,%edx
- movl %ebx,%esi
- roll $5,%ebx
- andl %edx,%edi
- xorl %ebp,%edx
- xorl %ebp,%edi
- addl %ebx,%eax
- rorl $7,%ecx
- addl %edi,%eax
- addl 16(%rsp),%ebp
- pxor %xmm5,%xmm1
-.byte 102,68,15,58,15,215,8
- xorl %edx,%esi
- movl %eax,%edi
- roll $5,%eax
- pxor %xmm2,%xmm1
- xorl %ecx,%esi
- addl %eax,%ebp
- movdqa %xmm8,%xmm9
- paddd %xmm0,%xmm8
- rorl $7,%ebx
- addl %esi,%ebp
- pxor %xmm10,%xmm1
- addl 20(%rsp),%edx
- xorl %ecx,%edi
- movl %ebp,%esi
+ xorl 8(%rsp),%eax
+ andl %r12d,%ecx
+ addl %edx,%esi
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%ebp
+ movl %eax,20(%rsp)
+ addl %ebx,%esi
+ leal -1894007588(%rax,%r12,1),%edx
+ movl 24(%rsp),%eax
+ movl %edi,%ebx
+ movl %edi,%ecx
+ xorl 32(%rsp),%eax
+ movl %esi,%r12d
+ andl %ebp,%ebx
+ xorl 56(%rsp),%eax
+ orl %ebp,%ecx
+ roll $5,%r12d
+ xorl 12(%rsp),%eax
+ andl %r11d,%ecx
+ addl %r12d,%edx
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%edi
+ movl %eax,24(%rsp)
+ addl %ebx,%edx
+ leal -1894007588(%rax,%r11,1),%r12d
+ movl 28(%rsp),%eax
+ movl %esi,%ebx
+ movl %esi,%ecx
+ xorl 36(%rsp),%eax
+ movl %edx,%r11d
+ andl %edi,%ebx
+ xorl 60(%rsp),%eax
+ orl %edi,%ecx
+ roll $5,%r11d
+ xorl 16(%rsp),%eax
+ andl %ebp,%ecx
+ addl %r11d,%r12d
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%esi
+ movl %eax,28(%rsp)
+ addl %ebx,%r12d
+ leal -1894007588(%rax,%rbp,1),%r11d
+ movl 32(%rsp),%eax
+ movl %edx,%ebx
+ movl %edx,%ecx
+ xorl 40(%rsp),%eax
+ movl %r12d,%ebp
+ andl %esi,%ebx
+ xorl 0(%rsp),%eax
+ orl %esi,%ecx
roll $5,%ebp
- movdqa %xmm1,%xmm10
- movdqa %xmm8,0(%rsp)
- xorl %ebx,%edi
- addl %ebp,%edx
- rorl $7,%eax
- addl %edi,%edx
- pslld $2,%xmm1
- addl 24(%rsp),%ecx
- xorl %ebx,%esi
- psrld $30,%xmm10
- movl %edx,%edi
- roll $5,%edx
- xorl %eax,%esi
- addl %edx,%ecx
- rorl $7,%ebp
- addl %esi,%ecx
- por %xmm10,%xmm1
- addl 28(%rsp),%ebx
- xorl %eax,%edi
- movdqa %xmm1,%xmm8
- movl %ecx,%esi
- roll $5,%ecx
- xorl %ebp,%edi
- addl %ecx,%ebx
- rorl $7,%edx
- addl %edi,%ebx
- addl 32(%rsp),%eax
- pxor %xmm6,%xmm2
-.byte 102,68,15,58,15,192,8
- xorl %ebp,%esi
- movl %ebx,%edi
- roll $5,%ebx
- pxor %xmm3,%xmm2
- xorl %edx,%esi
- addl %ebx,%eax
- movdqa 32(%r11),%xmm10
- paddd %xmm1,%xmm9
- rorl $7,%ecx
- addl %esi,%eax
- pxor %xmm8,%xmm2
- addl 36(%rsp),%ebp
- xorl %edx,%edi
- movl %eax,%esi
- roll $5,%eax
- movdqa %xmm2,%xmm8
- movdqa %xmm9,16(%rsp)
- xorl %ecx,%edi
- addl %eax,%ebp
- rorl $7,%ebx
+ xorl 20(%rsp),%eax
+ andl %edi,%ecx
+ addl %ebp,%r11d
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%edx
+ movl %eax,32(%rsp)
+ addl %ebx,%r11d
+ leal -1894007588(%rax,%rdi,1),%ebp
+ movl 36(%rsp),%eax
+ movl %r12d,%ebx
+ movl %r12d,%ecx
+ xorl 44(%rsp),%eax
+ movl %r11d,%edi
+ andl %edx,%ebx
+ xorl 4(%rsp),%eax
+ orl %edx,%ecx
+ roll $5,%edi
+ xorl 24(%rsp),%eax
+ andl %esi,%ecx
addl %edi,%ebp
- pslld $2,%xmm2
- addl 40(%rsp),%edx
- xorl %ecx,%esi
- psrld $30,%xmm8
- movl %ebp,%edi
- roll $5,%ebp
- xorl %ebx,%esi
- addl %ebp,%edx
- rorl $7,%eax
- addl %esi,%edx
- por %xmm8,%xmm2
- addl 44(%rsp),%ecx
- xorl %ebx,%edi
- movdqa %xmm2,%xmm9
- movl %edx,%esi
- roll $5,%edx
- xorl %eax,%edi
- addl %edx,%ecx
- rorl $7,%ebp
- addl %edi,%ecx
- addl 48(%rsp),%ebx
- pxor %xmm7,%xmm3
-.byte 102,68,15,58,15,201,8
- xorl %eax,%esi
- movl %ecx,%edi
- roll $5,%ecx
- pxor %xmm4,%xmm3
- xorl %ebp,%esi
- addl %ecx,%ebx
- movdqa %xmm10,%xmm8
- paddd %xmm2,%xmm10
- rorl $7,%edx
- addl %esi,%ebx
- pxor %xmm9,%xmm3
- addl 52(%rsp),%eax
- xorl %ebp,%edi
- movl %ebx,%esi
- roll $5,%ebx
- movdqa %xmm3,%xmm9
- movdqa %xmm10,32(%rsp)
- xorl %edx,%edi
- addl %ebx,%eax
- rorl $7,%ecx
- addl %edi,%eax
- pslld $2,%xmm3
- addl 56(%rsp),%ebp
- xorl %edx,%esi
- psrld $30,%xmm9
- movl %eax,%edi
- roll $5,%eax
- xorl %ecx,%esi
- addl %eax,%ebp
- rorl $7,%ebx
- addl %esi,%ebp
- por %xmm9,%xmm3
- addl 60(%rsp),%edx
- xorl %ecx,%edi
- movdqa %xmm3,%xmm10
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%r12d
+ movl %eax,36(%rsp)
+ addl %ebx,%ebp
+ leal -1894007588(%rax,%rsi,1),%edi
+ movl 40(%rsp),%eax
+ movl %r11d,%ebx
+ movl %r11d,%ecx
+ xorl 48(%rsp),%eax
movl %ebp,%esi
- roll $5,%ebp
- xorl %ebx,%edi
- addl %ebp,%edx
- rorl $7,%eax
- addl %edi,%edx
- addl 0(%rsp),%ecx
- pxor %xmm0,%xmm4
-.byte 102,68,15,58,15,210,8
- xorl %ebx,%esi
- movl %edx,%edi
+ andl %r12d,%ebx
+ xorl 8(%rsp),%eax
+ orl %r12d,%ecx
+ roll $5,%esi
+ xorl 28(%rsp),%eax
+ andl %edx,%ecx
+ addl %esi,%edi
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%r11d
+ movl %eax,40(%rsp)
+ addl %ebx,%edi
+ leal -1894007588(%rax,%rdx,1),%esi
+ movl 44(%rsp),%eax
+ movl %ebp,%ebx
+ movl %ebp,%ecx
+ xorl 52(%rsp),%eax
+ movl %edi,%edx
+ andl %r11d,%ebx
+ xorl 12(%rsp),%eax
+ orl %r11d,%ecx
roll $5,%edx
- pxor %xmm5,%xmm4
- xorl %eax,%esi
- addl %edx,%ecx
- movdqa %xmm8,%xmm9
- paddd %xmm3,%xmm8
- rorl $7,%ebp
- addl %esi,%ecx
- pxor %xmm10,%xmm4
- addl 4(%rsp),%ebx
- xorl %eax,%edi
- movl %ecx,%esi
- roll $5,%ecx
- movdqa %xmm4,%xmm10
- movdqa %xmm8,48(%rsp)
- xorl %ebp,%edi
- addl %ecx,%ebx
- rorl $7,%edx
- addl %edi,%ebx
- pslld $2,%xmm4
- addl 8(%rsp),%eax
- xorl %ebp,%esi
- psrld $30,%xmm10
- movl %ebx,%edi
- roll $5,%ebx
- xorl %edx,%esi
- addl %ebx,%eax
- rorl $7,%ecx
- addl %esi,%eax
- por %xmm10,%xmm4
- addl 12(%rsp),%ebp
- xorl %edx,%edi
- movdqa %xmm4,%xmm8
- movl %eax,%esi
- roll $5,%eax
- xorl %ecx,%edi
- addl %eax,%ebp
- rorl $7,%ebx
- addl %edi,%ebp
- addl 16(%rsp),%edx
- pxor %xmm1,%xmm5
-.byte 102,68,15,58,15,195,8
- xorl %ecx,%esi
- movl %ebp,%edi
+ xorl 32(%rsp),%eax
+ andl %r12d,%ecx
+ addl %edx,%esi
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%ebp
+ movl %eax,44(%rsp)
+ addl %ebx,%esi
+ leal -1894007588(%rax,%r12,1),%edx
+ movl 48(%rsp),%eax
+ movl %edi,%ebx
+ movl %edi,%ecx
+ xorl 56(%rsp),%eax
+ movl %esi,%r12d
+ andl %ebp,%ebx
+ xorl 16(%rsp),%eax
+ orl %ebp,%ecx
+ roll $5,%r12d
+ xorl 36(%rsp),%eax
+ andl %r11d,%ecx
+ addl %r12d,%edx
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%edi
+ movl %eax,48(%rsp)
+ addl %ebx,%edx
+ leal -899497514(%rax,%r11,1),%r12d
+ movl 52(%rsp),%eax
+ movl %edi,%ebx
+ movl %edx,%r11d
+ xorl 60(%rsp),%eax
+ xorl %esi,%ebx
+ roll $5,%r11d
+ xorl 20(%rsp),%eax
+ xorl %ebp,%ebx
+ addl %r11d,%r12d
+ xorl 40(%rsp),%eax
+ roll $30,%esi
+ addl %ebx,%r12d
+ roll $1,%eax
+ movl %eax,52(%rsp)
+ leal -899497514(%rax,%rbp,1),%r11d
+ movl 56(%rsp),%eax
+ movl %esi,%ebx
+ movl %r12d,%ebp
+ xorl 0(%rsp),%eax
+ xorl %edx,%ebx
roll $5,%ebp
- pxor %xmm6,%xmm5
- xorl %ebx,%esi
- addl %ebp,%edx
- movdqa %xmm9,%xmm10
- paddd %xmm4,%xmm9
- rorl $7,%eax
- addl %esi,%edx
- pxor %xmm8,%xmm5
- addl 20(%rsp),%ecx
- xorl %ebx,%edi
- movl %edx,%esi
- roll $5,%edx
- movdqa %xmm5,%xmm8
- movdqa %xmm9,0(%rsp)
- xorl %eax,%edi
- addl %edx,%ecx
- rorl $7,%ebp
- addl %edi,%ecx
- pslld $2,%xmm5
- addl 24(%rsp),%ebx
- xorl %eax,%esi
- psrld $30,%xmm8
- movl %ecx,%edi
- roll $5,%ecx
- xorl %ebp,%esi
- addl %ecx,%ebx
- rorl $7,%edx
- addl %esi,%ebx
- por %xmm8,%xmm5
- addl 28(%rsp),%eax
- xorl %ebp,%edi
- movdqa %xmm5,%xmm9
- movl %ebx,%esi
- roll $5,%ebx
- xorl %edx,%edi
- addl %ebx,%eax
- rorl $7,%ecx
- addl %edi,%eax
- movl %ecx,%edi
- pxor %xmm2,%xmm6
-.byte 102,68,15,58,15,204,8
- xorl %edx,%ecx
- addl 32(%rsp),%ebp
- andl %edx,%edi
- pxor %xmm7,%xmm6
- andl %ecx,%esi
- rorl $7,%ebx
- movdqa %xmm10,%xmm8
- paddd %xmm5,%xmm10
+ xorl 24(%rsp),%eax
+ xorl %edi,%ebx
+ addl %ebp,%r11d
+ xorl 44(%rsp),%eax
+ roll $30,%edx
+ addl %ebx,%r11d
+ roll $1,%eax
+ movl %eax,56(%rsp)
+ leal -899497514(%rax,%rdi,1),%ebp
+ movl 60(%rsp),%eax
+ movl %edx,%ebx
+ movl %r11d,%edi
+ xorl 4(%rsp),%eax
+ xorl %r12d,%ebx
+ roll $5,%edi
+ xorl 28(%rsp),%eax
+ xorl %esi,%ebx
addl %edi,%ebp
- movl %eax,%edi
- pxor %xmm9,%xmm6
- roll $5,%eax
- addl %esi,%ebp
- xorl %edx,%ecx
- addl %eax,%ebp
- movdqa %xmm6,%xmm9
- movdqa %xmm10,16(%rsp)
- movl %ebx,%esi
- xorl %ecx,%ebx
- addl 36(%rsp),%edx
- andl %ecx,%esi
- pslld $2,%xmm6
- andl %ebx,%edi
- rorl $7,%eax
- psrld $30,%xmm9
- addl %esi,%edx
+ xorl 48(%rsp),%eax
+ roll $30,%r12d
+ addl %ebx,%ebp
+ roll $1,%eax
+ movl %eax,60(%rsp)
+ leal -899497514(%rax,%rsi,1),%edi
+ movl 0(%rsp),%eax
+ movl %r12d,%ebx
movl %ebp,%esi
- roll $5,%ebp
- addl %edi,%edx
- xorl %ecx,%ebx
- addl %ebp,%edx
- por %xmm9,%xmm6
- movl %eax,%edi
- xorl %ebx,%eax
- movdqa %xmm6,%xmm10
- addl 40(%rsp),%ecx
- andl %ebx,%edi
- andl %eax,%esi
- rorl $7,%ebp
- addl %edi,%ecx
- movl %edx,%edi
+ xorl 8(%rsp),%eax
+ xorl %r11d,%ebx
+ roll $5,%esi
+ xorl 32(%rsp),%eax
+ xorl %edx,%ebx
+ addl %esi,%edi
+ xorl 52(%rsp),%eax
+ roll $30,%r11d
+ addl %ebx,%edi
+ roll $1,%eax
+ movl %eax,0(%rsp)
+ leal -899497514(%rax,%rdx,1),%esi
+ movl 4(%rsp),%eax
+ movl %r11d,%ebx
+ movl %edi,%edx
+ xorl 12(%rsp),%eax
+ xorl %ebp,%ebx
roll $5,%edx
- addl %esi,%ecx
- xorl %ebx,%eax
- addl %edx,%ecx
- movl %ebp,%esi
- xorl %eax,%ebp
- addl 44(%rsp),%ebx
- andl %eax,%esi
- andl %ebp,%edi
- rorl $7,%edx
- addl %esi,%ebx
- movl %ecx,%esi
- roll $5,%ecx
- addl %edi,%ebx
- xorl %eax,%ebp
- addl %ecx,%ebx
- movl %edx,%edi
- pxor %xmm3,%xmm7
-.byte 102,68,15,58,15,213,8
- xorl %ebp,%edx
- addl 48(%rsp),%eax
- andl %ebp,%edi
- pxor %xmm0,%xmm7
- andl %edx,%esi
- rorl $7,%ecx
- movdqa 48(%r11),%xmm9
- paddd %xmm6,%xmm8
- addl %edi,%eax
- movl %ebx,%edi
- pxor %xmm10,%xmm7
- roll $5,%ebx
- addl %esi,%eax
- xorl %ebp,%edx
- addl %ebx,%eax
- movdqa %xmm7,%xmm10
- movdqa %xmm8,32(%rsp)
- movl %ecx,%esi
- xorl %edx,%ecx
- addl 52(%rsp),%ebp
- andl %edx,%esi
- pslld $2,%xmm7
- andl %ecx,%edi
- rorl $7,%ebx
- psrld $30,%xmm10
- addl %esi,%ebp
- movl %eax,%esi
- roll $5,%eax
- addl %edi,%ebp
- xorl %edx,%ecx
- addl %eax,%ebp
- por %xmm10,%xmm7
- movl %ebx,%edi
- xorl %ecx,%ebx
- movdqa %xmm7,%xmm8
- addl 56(%rsp),%edx
- andl %ecx,%edi
- andl %ebx,%esi
- rorl $7,%eax
- addl %edi,%edx
- movl %ebp,%edi
+ xorl 36(%rsp),%eax
+ xorl %r12d,%ebx
+ addl %edx,%esi
+ xorl 56(%rsp),%eax
+ roll $30,%ebp
+ addl %ebx,%esi
+ roll $1,%eax
+ movl %eax,4(%rsp)
+ leal -899497514(%rax,%r12,1),%edx
+ movl 8(%rsp),%eax
+ movl %ebp,%ebx
+ movl %esi,%r12d
+ xorl 16(%rsp),%eax
+ xorl %edi,%ebx
+ roll $5,%r12d
+ xorl 40(%rsp),%eax
+ xorl %r11d,%ebx
+ addl %r12d,%edx
+ xorl 60(%rsp),%eax
+ roll $30,%edi
+ addl %ebx,%edx
+ roll $1,%eax
+ movl %eax,8(%rsp)
+ leal -899497514(%rax,%r11,1),%r12d
+ movl 12(%rsp),%eax
+ movl %edi,%ebx
+ movl %edx,%r11d
+ xorl 20(%rsp),%eax
+ xorl %esi,%ebx
+ roll $5,%r11d
+ xorl 44(%rsp),%eax
+ xorl %ebp,%ebx
+ addl %r11d,%r12d
+ xorl 0(%rsp),%eax
+ roll $30,%esi
+ addl %ebx,%r12d
+ roll $1,%eax
+ movl %eax,12(%rsp)
+ leal -899497514(%rax,%rbp,1),%r11d
+ movl 16(%rsp),%eax
+ movl %esi,%ebx
+ movl %r12d,%ebp
+ xorl 24(%rsp),%eax
+ xorl %edx,%ebx
roll $5,%ebp
- addl %esi,%edx
- xorl %ecx,%ebx
- addl %ebp,%edx
- movl %eax,%esi
- xorl %ebx,%eax
- addl 60(%rsp),%ecx
- andl %ebx,%esi
- andl %eax,%edi
- rorl $7,%ebp
- addl %esi,%ecx
- movl %edx,%esi
- roll $5,%edx
- addl %edi,%ecx
- xorl %ebx,%eax
- addl %edx,%ecx
- movl %ebp,%edi
- pxor %xmm4,%xmm0
-.byte 102,68,15,58,15,198,8
- xorl %eax,%ebp
- addl 0(%rsp),%ebx
- andl %eax,%edi
- pxor %xmm1,%xmm0
- andl %ebp,%esi
- rorl $7,%edx
- movdqa %xmm9,%xmm10
- paddd %xmm7,%xmm9
- addl %edi,%ebx
- movl %ecx,%edi
- pxor %xmm8,%xmm0
- roll $5,%ecx
- addl %esi,%ebx
- xorl %eax,%ebp
- addl %ecx,%ebx
- movdqa %xmm0,%xmm8
- movdqa %xmm9,48(%rsp)
- movl %edx,%esi
- xorl %ebp,%edx
- addl 4(%rsp),%eax
- andl %ebp,%esi
- pslld $2,%xmm0
- andl %edx,%edi
- rorl $7,%ecx
- psrld $30,%xmm8
- addl %esi,%eax
- movl %ebx,%esi
- roll $5,%ebx
- addl %edi,%eax
- xorl %ebp,%edx
- addl %ebx,%eax
- por %xmm8,%xmm0
- movl %ecx,%edi
- xorl %edx,%ecx
- movdqa %xmm0,%xmm9
- addl 8(%rsp),%ebp
- andl %edx,%edi
- andl %ecx,%esi
- rorl $7,%ebx
+ xorl 48(%rsp),%eax
+ xorl %edi,%ebx
+ addl %ebp,%r11d
+ xorl 4(%rsp),%eax
+ roll $30,%edx
+ addl %ebx,%r11d
+ roll $1,%eax
+ movl %eax,16(%rsp)
+ leal -899497514(%rax,%rdi,1),%ebp
+ movl 20(%rsp),%eax
+ movl %edx,%ebx
+ movl %r11d,%edi
+ xorl 28(%rsp),%eax
+ xorl %r12d,%ebx
+ roll $5,%edi
+ xorl 52(%rsp),%eax
+ xorl %esi,%ebx
addl %edi,%ebp
- movl %eax,%edi
- roll $5,%eax
- addl %esi,%ebp
- xorl %edx,%ecx
- addl %eax,%ebp
- movl %ebx,%esi
- xorl %ecx,%ebx
- addl 12(%rsp),%edx
- andl %ecx,%esi
- andl %ebx,%edi
- rorl $7,%eax
- addl %esi,%edx
- movl %ebp,%esi
- roll $5,%ebp
- addl %edi,%edx
- xorl %ecx,%ebx
- addl %ebp,%edx
- movl %eax,%edi
- pxor %xmm5,%xmm1
-.byte 102,68,15,58,15,207,8
- xorl %ebx,%eax
- addl 16(%rsp),%ecx
- andl %ebx,%edi
- pxor %xmm2,%xmm1
- andl %eax,%esi
- rorl $7,%ebp
- movdqa %xmm10,%xmm8
- paddd %xmm0,%xmm10
- addl %edi,%ecx
- movl %edx,%edi
- pxor %xmm9,%xmm1
- roll $5,%edx
- addl %esi,%ecx
- xorl %ebx,%eax
- addl %edx,%ecx
- movdqa %xmm1,%xmm9
- movdqa %xmm10,0(%rsp)
+ xorl 8(%rsp),%eax
+ roll $30,%r12d
+ addl %ebx,%ebp
+ roll $1,%eax
+ movl %eax,20(%rsp)
+ leal -899497514(%rax,%rsi,1),%edi
+ movl 24(%rsp),%eax
+ movl %r12d,%ebx
movl %ebp,%esi
- xorl %eax,%ebp
- addl 20(%rsp),%ebx
- andl %eax,%esi
- pslld $2,%xmm1
- andl %ebp,%edi
- rorl $7,%edx
- psrld $30,%xmm9
- addl %esi,%ebx
- movl %ecx,%esi
- roll $5,%ecx
- addl %edi,%ebx
- xorl %eax,%ebp
- addl %ecx,%ebx
- por %xmm9,%xmm1
- movl %edx,%edi
- xorl %ebp,%edx
- movdqa %xmm1,%xmm10
- addl 24(%rsp),%eax
- andl %ebp,%edi
- andl %edx,%esi
- rorl $7,%ecx
- addl %edi,%eax
- movl %ebx,%edi
- roll $5,%ebx
- addl %esi,%eax
- xorl %ebp,%edx
- addl %ebx,%eax
- movl %ecx,%esi
- xorl %edx,%ecx
- addl 28(%rsp),%ebp
- andl %edx,%esi
- andl %ecx,%edi
- rorl $7,%ebx
- addl %esi,%ebp
- movl %eax,%esi
- roll $5,%eax
- addl %edi,%ebp
- xorl %edx,%ecx
- addl %eax,%ebp
- movl %ebx,%edi
- pxor %xmm6,%xmm2
-.byte 102,68,15,58,15,208,8
- xorl %ecx,%ebx
- addl 32(%rsp),%edx
- andl %ecx,%edi
- pxor %xmm3,%xmm2
- andl %ebx,%esi
- rorl $7,%eax
- movdqa %xmm8,%xmm9
- paddd %xmm1,%xmm8
- addl %edi,%edx
- movl %ebp,%edi
- pxor %xmm10,%xmm2
- roll $5,%ebp
- addl %esi,%edx
- xorl %ecx,%ebx
- addl %ebp,%edx
- movdqa %xmm2,%xmm10
- movdqa %xmm8,16(%rsp)
- movl %eax,%esi
- xorl %ebx,%eax
- addl 36(%rsp),%ecx
- andl %ebx,%esi
- pslld $2,%xmm2
- andl %eax,%edi
- rorl $7,%ebp
- psrld $30,%xmm10
- addl %esi,%ecx
- movl %edx,%esi
+ xorl 32(%rsp),%eax
+ xorl %r11d,%ebx
+ roll $5,%esi
+ xorl 56(%rsp),%eax
+ xorl %edx,%ebx
+ addl %esi,%edi
+ xorl 12(%rsp),%eax
+ roll $30,%r11d
+ addl %ebx,%edi
+ roll $1,%eax
+ movl %eax,24(%rsp)
+ leal -899497514(%rax,%rdx,1),%esi
+ movl 28(%rsp),%eax
+ movl %r11d,%ebx
+ movl %edi,%edx
+ xorl 36(%rsp),%eax
+ xorl %ebp,%ebx
roll $5,%edx
- addl %edi,%ecx
- xorl %ebx,%eax
- addl %edx,%ecx
- por %xmm10,%xmm2
- movl %ebp,%edi
- xorl %eax,%ebp
- movdqa %xmm2,%xmm8
- addl 40(%rsp),%ebx
- andl %eax,%edi
- andl %ebp,%esi
- rorl $7,%edx
- addl %edi,%ebx
- movl %ecx,%edi
- roll $5,%ecx
- addl %esi,%ebx
- xorl %eax,%ebp
- addl %ecx,%ebx
- movl %edx,%esi
- xorl %ebp,%edx
- addl 44(%rsp),%eax
- andl %ebp,%esi
- andl %edx,%edi
- rorl $7,%ecx
- addl %esi,%eax
- movl %ebx,%esi
- roll $5,%ebx
- addl %edi,%eax
- xorl %ebp,%edx
- addl %ebx,%eax
- addl 48(%rsp),%ebp
- pxor %xmm7,%xmm3
-.byte 102,68,15,58,15,193,8
- xorl %edx,%esi
- movl %eax,%edi
- roll $5,%eax
- pxor %xmm4,%xmm3
- xorl %ecx,%esi
- addl %eax,%ebp
- movdqa %xmm9,%xmm10
- paddd %xmm2,%xmm9
- rorl $7,%ebx
- addl %esi,%ebp
- pxor %xmm8,%xmm3
- addl 52(%rsp),%edx
- xorl %ecx,%edi
- movl %ebp,%esi
+ xorl 60(%rsp),%eax
+ xorl %r12d,%ebx
+ addl %edx,%esi
+ xorl 16(%rsp),%eax
+ roll $30,%ebp
+ addl %ebx,%esi
+ roll $1,%eax
+ movl %eax,28(%rsp)
+ leal -899497514(%rax,%r12,1),%edx
+ movl 32(%rsp),%eax
+ movl %ebp,%ebx
+ movl %esi,%r12d
+ xorl 40(%rsp),%eax
+ xorl %edi,%ebx
+ roll $5,%r12d
+ xorl 0(%rsp),%eax
+ xorl %r11d,%ebx
+ addl %r12d,%edx
+ xorl 20(%rsp),%eax
+ roll $30,%edi
+ addl %ebx,%edx
+ roll $1,%eax
+ movl %eax,32(%rsp)
+ leal -899497514(%rax,%r11,1),%r12d
+ movl 36(%rsp),%eax
+ movl %edi,%ebx
+ movl %edx,%r11d
+ xorl 44(%rsp),%eax
+ xorl %esi,%ebx
+ roll $5,%r11d
+ xorl 4(%rsp),%eax
+ xorl %ebp,%ebx
+ addl %r11d,%r12d
+ xorl 24(%rsp),%eax
+ roll $30,%esi
+ addl %ebx,%r12d
+ roll $1,%eax
+ movl %eax,36(%rsp)
+ leal -899497514(%rax,%rbp,1),%r11d
+ movl 40(%rsp),%eax
+ movl %esi,%ebx
+ movl %r12d,%ebp
+ xorl 48(%rsp),%eax
+ xorl %edx,%ebx
roll $5,%ebp
- movdqa %xmm3,%xmm8
- movdqa %xmm9,32(%rsp)
- xorl %ebx,%edi
- addl %ebp,%edx
- rorl $7,%eax
- addl %edi,%edx
- pslld $2,%xmm3
- addl 56(%rsp),%ecx
- xorl %ebx,%esi
- psrld $30,%xmm8
- movl %edx,%edi
- roll $5,%edx
- xorl %eax,%esi
- addl %edx,%ecx
- rorl $7,%ebp
- addl %esi,%ecx
- por %xmm8,%xmm3
- addl 60(%rsp),%ebx
- xorl %eax,%edi
- movl %ecx,%esi
- roll $5,%ecx
- xorl %ebp,%edi
- addl %ecx,%ebx
- rorl $7,%edx
- addl %edi,%ebx
- addl 0(%rsp),%eax
- paddd %xmm3,%xmm10
- xorl %ebp,%esi
- movl %ebx,%edi
- roll $5,%ebx
- xorl %edx,%esi
- movdqa %xmm10,48(%rsp)
- addl %ebx,%eax
- rorl $7,%ecx
- addl %esi,%eax
- addl 4(%rsp),%ebp
- xorl %edx,%edi
- movl %eax,%esi
- roll $5,%eax
- xorl %ecx,%edi
- addl %eax,%ebp
- rorl $7,%ebx
+ xorl 8(%rsp),%eax
+ xorl %edi,%ebx
+ addl %ebp,%r11d
+ xorl 28(%rsp),%eax
+ roll $30,%edx
+ addl %ebx,%r11d
+ roll $1,%eax
+ movl %eax,40(%rsp)
+ leal -899497514(%rax,%rdi,1),%ebp
+ movl 44(%rsp),%eax
+ movl %edx,%ebx
+ movl %r11d,%edi
+ xorl 52(%rsp),%eax
+ xorl %r12d,%ebx
+ roll $5,%edi
+ xorl 12(%rsp),%eax
+ xorl %esi,%ebx
addl %edi,%ebp
- addl 8(%rsp),%edx
- xorl %ecx,%esi
- movl %ebp,%edi
- roll $5,%ebp
- xorl %ebx,%esi
- addl %ebp,%edx
- rorl $7,%eax
- addl %esi,%edx
- addl 12(%rsp),%ecx
- xorl %ebx,%edi
- movl %edx,%esi
- roll $5,%edx
- xorl %eax,%edi
- addl %edx,%ecx
- rorl $7,%ebp
- addl %edi,%ecx
- cmpq %r10,%r9
- je .Ldone_ssse3
- movdqa 64(%r11),%xmm6
- movdqa 0(%r11),%xmm9
- movdqu 0(%r9),%xmm0
- movdqu 16(%r9),%xmm1
- movdqu 32(%r9),%xmm2
- movdqu 48(%r9),%xmm3
-.byte 102,15,56,0,198
- addq $64,%r9
- addl 16(%rsp),%ebx
- xorl %eax,%esi
-.byte 102,15,56,0,206
- movl %ecx,%edi
- roll $5,%ecx
- paddd %xmm9,%xmm0
- xorl %ebp,%esi
- addl %ecx,%ebx
- rorl $7,%edx
- addl %esi,%ebx
- movdqa %xmm0,0(%rsp)
- addl 20(%rsp),%eax
- xorl %ebp,%edi
- psubd %xmm9,%xmm0
- movl %ebx,%esi
- roll $5,%ebx
- xorl %edx,%edi
- addl %ebx,%eax
- rorl $7,%ecx
- addl %edi,%eax
- addl 24(%rsp),%ebp
- xorl %edx,%esi
- movl %eax,%edi
- roll $5,%eax
- xorl %ecx,%esi
- addl %eax,%ebp
- rorl $7,%ebx
- addl %esi,%ebp
- addl 28(%rsp),%edx
- xorl %ecx,%edi
+ xorl 32(%rsp),%eax
+ roll $30,%r12d
+ addl %ebx,%ebp
+ roll $1,%eax
+ movl %eax,44(%rsp)
+ leal -899497514(%rax,%rsi,1),%edi
+ movl 48(%rsp),%eax
+ movl %r12d,%ebx
movl %ebp,%esi
- roll $5,%ebp
- xorl %ebx,%edi
- addl %ebp,%edx
- rorl $7,%eax
- addl %edi,%edx
- addl 32(%rsp),%ecx
- xorl %ebx,%esi
-.byte 102,15,56,0,214
- movl %edx,%edi
- roll $5,%edx
- paddd %xmm9,%xmm1
- xorl %eax,%esi
- addl %edx,%ecx
- rorl $7,%ebp
- addl %esi,%ecx
- movdqa %xmm1,16(%rsp)
- addl 36(%rsp),%ebx
- xorl %eax,%edi
- psubd %xmm9,%xmm1
- movl %ecx,%esi
- roll $5,%ecx
- xorl %ebp,%edi
- addl %ecx,%ebx
- rorl $7,%edx
- addl %edi,%ebx
- addl 40(%rsp),%eax
- xorl %ebp,%esi
- movl %ebx,%edi
- roll $5,%ebx
- xorl %edx,%esi
- addl %ebx,%eax
- rorl $7,%ecx
- addl %esi,%eax
- addl 44(%rsp),%ebp
- xorl %edx,%edi
- movl %eax,%esi
- roll $5,%eax
- xorl %ecx,%edi
- addl %eax,%ebp
- rorl $7,%ebx
- addl %edi,%ebp
- addl 48(%rsp),%edx
- xorl %ecx,%esi
-.byte 102,15,56,0,222
- movl %ebp,%edi
- roll $5,%ebp
- paddd %xmm9,%xmm2
- xorl %ebx,%esi
- addl %ebp,%edx
- rorl $7,%eax
- addl %esi,%edx
- movdqa %xmm2,32(%rsp)
- addl 52(%rsp),%ecx
- xorl %ebx,%edi
- psubd %xmm9,%xmm2
- movl %edx,%esi
+ xorl 56(%rsp),%eax
+ xorl %r11d,%ebx
+ roll $5,%esi
+ xorl 16(%rsp),%eax
+ xorl %edx,%ebx
+ addl %esi,%edi
+ xorl 36(%rsp),%eax
+ roll $30,%r11d
+ addl %ebx,%edi
+ roll $1,%eax
+ movl %eax,48(%rsp)
+ leal -899497514(%rax,%rdx,1),%esi
+ movl 52(%rsp),%eax
+ movl %r11d,%ebx
+ movl %edi,%edx
+ xorl 60(%rsp),%eax
+ xorl %ebp,%ebx
roll $5,%edx
- xorl %eax,%edi
- addl %edx,%ecx
- rorl $7,%ebp
- addl %edi,%ecx
- addl 56(%rsp),%ebx
- xorl %eax,%esi
- movl %ecx,%edi
- roll $5,%ecx
- xorl %ebp,%esi
- addl %ecx,%ebx
- rorl $7,%edx
- addl %esi,%ebx
- addl 60(%rsp),%eax
- xorl %ebp,%edi
- movl %ebx,%esi
- roll $5,%ebx
- xorl %edx,%edi
- addl %ebx,%eax
- rorl $7,%ecx
- addl %edi,%eax
- addl 0(%r8),%eax
- addl 4(%r8),%esi
- addl 8(%r8),%ecx
- addl 12(%r8),%edx
- movl %eax,0(%r8)
- addl 16(%r8),%ebp
- movl %esi,4(%r8)
+ xorl 20(%rsp),%eax
+ xorl %r12d,%ebx
+ addl %edx,%esi
+ xorl 40(%rsp),%eax
+ roll $30,%ebp
+ addl %ebx,%esi
+ roll $1,%eax
+ leal -899497514(%rax,%r12,1),%edx
+ movl 56(%rsp),%eax
+ movl %ebp,%ebx
+ movl %esi,%r12d
+ xorl 0(%rsp),%eax
+ xorl %edi,%ebx
+ roll $5,%r12d
+ xorl 24(%rsp),%eax
+ xorl %r11d,%ebx
+ addl %r12d,%edx
+ xorl 44(%rsp),%eax
+ roll $30,%edi
+ addl %ebx,%edx
+ roll $1,%eax
+ leal -899497514(%rax,%r11,1),%r12d
+ movl 60(%rsp),%eax
+ movl %edi,%ebx
+ movl %edx,%r11d
+ xorl 4(%rsp),%eax
+ xorl %esi,%ebx
+ roll $5,%r11d
+ xorl 28(%rsp),%eax
+ xorl %ebp,%ebx
+ addl %r11d,%r12d
+ xorl 48(%rsp),%eax
+ roll $30,%esi
+ addl %ebx,%r12d
+ roll $1,%eax
+ leal -899497514(%rax,%rbp,1),%r11d
movl %esi,%ebx
- movl %ecx,8(%r8)
- movl %edx,12(%r8)
- movl %ebp,16(%r8)
- jmp .Loop_ssse3
-
-.align 16
-.Ldone_ssse3:
- addl 16(%rsp),%ebx
- xorl %eax,%esi
- movl %ecx,%edi
- roll $5,%ecx
- xorl %ebp,%esi
- addl %ecx,%ebx
- rorl $7,%edx
- addl %esi,%ebx
- addl 20(%rsp),%eax
- xorl %ebp,%edi
- movl %ebx,%esi
- roll $5,%ebx
- xorl %edx,%edi
- addl %ebx,%eax
- rorl $7,%ecx
- addl %edi,%eax
- addl 24(%rsp),%ebp
- xorl %edx,%esi
- movl %eax,%edi
- roll $5,%eax
- xorl %ecx,%esi
- addl %eax,%ebp
- rorl $7,%ebx
- addl %esi,%ebp
- addl 28(%rsp),%edx
- xorl %ecx,%edi
- movl %ebp,%esi
+ movl %r12d,%ebp
+ xorl %edx,%ebx
roll $5,%ebp
- xorl %ebx,%edi
- addl %ebp,%edx
- rorl $7,%eax
- addl %edi,%edx
- addl 32(%rsp),%ecx
- xorl %ebx,%esi
- movl %edx,%edi
- roll $5,%edx
- xorl %eax,%esi
- addl %edx,%ecx
- rorl $7,%ebp
- addl %esi,%ecx
- addl 36(%rsp),%ebx
- xorl %eax,%edi
- movl %ecx,%esi
- roll $5,%ecx
- xorl %ebp,%edi
- addl %ecx,%ebx
- rorl $7,%edx
- addl %edi,%ebx
- addl 40(%rsp),%eax
- xorl %ebp,%esi
- movl %ebx,%edi
- roll $5,%ebx
- xorl %edx,%esi
- addl %ebx,%eax
- rorl $7,%ecx
- addl %esi,%eax
- addl 44(%rsp),%ebp
- xorl %edx,%edi
- movl %eax,%esi
- roll $5,%eax
- xorl %ecx,%edi
- addl %eax,%ebp
- rorl $7,%ebx
- addl %edi,%ebp
- addl 48(%rsp),%edx
- xorl %ecx,%esi
- movl %ebp,%edi
- roll $5,%ebp
- xorl %ebx,%esi
- addl %ebp,%edx
- rorl $7,%eax
- addl %esi,%edx
- addl 52(%rsp),%ecx
- xorl %ebx,%edi
- movl %edx,%esi
- roll $5,%edx
- xorl %eax,%edi
- addl %edx,%ecx
- rorl $7,%ebp
- addl %edi,%ecx
- addl 56(%rsp),%ebx
- xorl %eax,%esi
- movl %ecx,%edi
- roll $5,%ecx
- xorl %ebp,%esi
- addl %ecx,%ebx
- rorl $7,%edx
- addl %esi,%ebx
- addl 60(%rsp),%eax
- xorl %ebp,%edi
- movl %ebx,%esi
- roll $5,%ebx
- xorl %edx,%edi
- addl %ebx,%eax
- rorl $7,%ecx
- addl %edi,%eax
- addl 0(%r8),%eax
- addl 4(%r8),%esi
- addl 8(%r8),%ecx
- movl %eax,0(%r8)
- addl 12(%r8),%edx
- movl %esi,4(%r8)
- addl 16(%r8),%ebp
- movl %ecx,8(%r8)
- movl %edx,12(%r8)
- movl %ebp,16(%r8)
- leaq 64(%rsp),%rsi
- movq 0(%rsi),%r12
+ xorl %edi,%ebx
+ addl %ebp,%r11d
+ roll $30,%edx
+ addl %ebx,%r11d
+ addl 0(%r8),%r11d
+ addl 4(%r8),%r12d
+ addl 8(%r8),%edx
+ addl 12(%r8),%esi
+ addl 16(%r8),%edi
+ movl %r11d,0(%r8)
+ movl %r12d,4(%r8)
+ movl %edx,8(%r8)
+ movl %esi,12(%r8)
+ movl %edi,16(%r8)
+
+ xchgl %r11d,%edx
+ xchgl %r12d,%esi
+ xchgl %r11d,%edi
+ xchgl %r12d,%ebp
+
+ leaq 64(%r9),%r9
+ subq $1,%r10
+ jnz .Lloop
+ movq 64(%rsp),%rsi
+ movq (%rsi),%r12
movq 8(%rsi),%rbp
movq 16(%rsi),%rbx
leaq 24(%rsi),%rsp
-.Lepilogue_ssse3:
+.Lepilogue:
.byte 0xf3,0xc3
-.size sha1_block_data_order_ssse3,.-sha1_block_data_order_ssse3
-.align 64
-K_XX_XX:
-.long 0x5a827999,0x5a827999,0x5a827999,0x5a827999
-
-.long 0x6ed9eba1,0x6ed9eba1,0x6ed9eba1,0x6ed9eba1
-
-.long 0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc
-
-.long 0xca62c1d6,0xca62c1d6,0xca62c1d6,0xca62c1d6
-
-.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
-
+.size sha1_block_data_order,.-sha1_block_data_order
.byte 83,72,65,49,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
-.align 64
+.align 16
diff --git a/deps/openssl/asm/x64-elf-gas/sha/sha512-x86_64.s b/deps/openssl/asm/x64-elf-gas/sha/sha512-x86_64.s
index 576d7d8bfd..ddf7b907a9 100644
--- a/deps/openssl/asm/x64-elf-gas/sha/sha512-x86_64.s
+++ b/deps/openssl/asm/x64-elf-gas/sha/sha512-x86_64.s
@@ -38,1688 +38,1880 @@ sha256_block_data_order:
.Lloop:
xorq %rdi,%rdi
movl 0(%rsi),%r12d
- movl %r8d,%r13d
- movl %eax,%r14d
bswapl %r12d
- rorl $14,%r13d
+ movl %r8d,%r13d
+ movl %r8d,%r14d
movl %r9d,%r15d
- movl %r12d,0(%rsp)
- rorl $9,%r14d
- xorl %r8d,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %r10d,%r15d
- rorl $5,%r13d
- addl %r11d,%r12d
- xorl %eax,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %r8d,%r15d
- movl %ebx,%r11d
+ movl %r12d,0(%rsp)
- rorl $11,%r14d
- xorl %r8d,%r13d
+ xorl %r14d,%r13d
xorl %r10d,%r15d
+ addl %r11d,%r12d
+
+ movl %eax,%r11d
+ addl %r13d,%r12d
- xorl %ecx,%r11d
- xorl %eax,%r14d
addl %r15d,%r12d
- movl %ebx,%r15d
+ movl %eax,%r13d
+ movl %eax,%r14d
- rorl $6,%r13d
- andl %eax,%r11d
- andl %ecx,%r15d
+ rorl $2,%r11d
+ rorl $13,%r13d
+ movl %eax,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%r11d
+ xorl %r13d,%r11d
+ rorl $9,%r13d
+ orl %ecx,%r14d
+ xorl %r13d,%r11d
+ andl %ecx,%r15d
addl %r12d,%edx
+
+ andl %ebx,%r14d
addl %r12d,%r11d
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%r11d
+ addl %r14d,%r11d
movl 4(%rsi),%r12d
- movl %edx,%r13d
- movl %r11d,%r14d
bswapl %r12d
- rorl $14,%r13d
+ movl %edx,%r13d
+ movl %edx,%r14d
movl %r8d,%r15d
- movl %r12d,4(%rsp)
- rorl $9,%r14d
- xorl %edx,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %r9d,%r15d
- rorl $5,%r13d
- addl %r10d,%r12d
- xorl %r11d,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %edx,%r15d
- movl %eax,%r10d
+ movl %r12d,4(%rsp)
- rorl $11,%r14d
- xorl %edx,%r13d
+ xorl %r14d,%r13d
xorl %r9d,%r15d
+ addl %r10d,%r12d
+
+ movl %r11d,%r10d
+ addl %r13d,%r12d
- xorl %ebx,%r10d
- xorl %r11d,%r14d
addl %r15d,%r12d
- movl %eax,%r15d
+ movl %r11d,%r13d
+ movl %r11d,%r14d
- rorl $6,%r13d
- andl %r11d,%r10d
- andl %ebx,%r15d
+ rorl $2,%r10d
+ rorl $13,%r13d
+ movl %r11d,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%r10d
+ xorl %r13d,%r10d
+ rorl $9,%r13d
+ orl %ebx,%r14d
+ xorl %r13d,%r10d
+ andl %ebx,%r15d
addl %r12d,%ecx
+
+ andl %eax,%r14d
addl %r12d,%r10d
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%r10d
+ addl %r14d,%r10d
movl 8(%rsi),%r12d
- movl %ecx,%r13d
- movl %r10d,%r14d
bswapl %r12d
- rorl $14,%r13d
+ movl %ecx,%r13d
+ movl %ecx,%r14d
movl %edx,%r15d
- movl %r12d,8(%rsp)
- rorl $9,%r14d
- xorl %ecx,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %r8d,%r15d
- rorl $5,%r13d
- addl %r9d,%r12d
- xorl %r10d,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %ecx,%r15d
- movl %r11d,%r9d
+ movl %r12d,8(%rsp)
- rorl $11,%r14d
- xorl %ecx,%r13d
+ xorl %r14d,%r13d
xorl %r8d,%r15d
+ addl %r9d,%r12d
+
+ movl %r10d,%r9d
+ addl %r13d,%r12d
- xorl %eax,%r9d
- xorl %r10d,%r14d
addl %r15d,%r12d
- movl %r11d,%r15d
+ movl %r10d,%r13d
+ movl %r10d,%r14d
- rorl $6,%r13d
- andl %r10d,%r9d
- andl %eax,%r15d
+ rorl $2,%r9d
+ rorl $13,%r13d
+ movl %r10d,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%r9d
+ xorl %r13d,%r9d
+ rorl $9,%r13d
+ orl %eax,%r14d
+ xorl %r13d,%r9d
+ andl %eax,%r15d
addl %r12d,%ebx
+
+ andl %r11d,%r14d
addl %r12d,%r9d
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%r9d
+ addl %r14d,%r9d
movl 12(%rsi),%r12d
- movl %ebx,%r13d
- movl %r9d,%r14d
bswapl %r12d
- rorl $14,%r13d
+ movl %ebx,%r13d
+ movl %ebx,%r14d
movl %ecx,%r15d
- movl %r12d,12(%rsp)
- rorl $9,%r14d
- xorl %ebx,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %edx,%r15d
- rorl $5,%r13d
- addl %r8d,%r12d
- xorl %r9d,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %ebx,%r15d
- movl %r10d,%r8d
+ movl %r12d,12(%rsp)
- rorl $11,%r14d
- xorl %ebx,%r13d
+ xorl %r14d,%r13d
xorl %edx,%r15d
+ addl %r8d,%r12d
+
+ movl %r9d,%r8d
+ addl %r13d,%r12d
- xorl %r11d,%r8d
- xorl %r9d,%r14d
addl %r15d,%r12d
- movl %r10d,%r15d
+ movl %r9d,%r13d
+ movl %r9d,%r14d
- rorl $6,%r13d
- andl %r9d,%r8d
- andl %r11d,%r15d
+ rorl $2,%r8d
+ rorl $13,%r13d
+ movl %r9d,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%r8d
+ xorl %r13d,%r8d
+ rorl $9,%r13d
+ orl %r11d,%r14d
+ xorl %r13d,%r8d
+ andl %r11d,%r15d
addl %r12d,%eax
+
+ andl %r10d,%r14d
addl %r12d,%r8d
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%r8d
+ addl %r14d,%r8d
movl 16(%rsi),%r12d
- movl %eax,%r13d
- movl %r8d,%r14d
bswapl %r12d
- rorl $14,%r13d
+ movl %eax,%r13d
+ movl %eax,%r14d
movl %ebx,%r15d
- movl %r12d,16(%rsp)
- rorl $9,%r14d
- xorl %eax,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %ecx,%r15d
- rorl $5,%r13d
- addl %edx,%r12d
- xorl %r8d,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %eax,%r15d
- movl %r9d,%edx
+ movl %r12d,16(%rsp)
- rorl $11,%r14d
- xorl %eax,%r13d
+ xorl %r14d,%r13d
xorl %ecx,%r15d
+ addl %edx,%r12d
+
+ movl %r8d,%edx
+ addl %r13d,%r12d
- xorl %r10d,%edx
- xorl %r8d,%r14d
addl %r15d,%r12d
- movl %r9d,%r15d
+ movl %r8d,%r13d
+ movl %r8d,%r14d
- rorl $6,%r13d
- andl %r8d,%edx
- andl %r10d,%r15d
+ rorl $2,%edx
+ rorl $13,%r13d
+ movl %r8d,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%edx
+ xorl %r13d,%edx
+ rorl $9,%r13d
+ orl %r10d,%r14d
+ xorl %r13d,%edx
+ andl %r10d,%r15d
addl %r12d,%r11d
+
+ andl %r9d,%r14d
addl %r12d,%edx
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%edx
+ addl %r14d,%edx
movl 20(%rsi),%r12d
- movl %r11d,%r13d
- movl %edx,%r14d
bswapl %r12d
- rorl $14,%r13d
+ movl %r11d,%r13d
+ movl %r11d,%r14d
movl %eax,%r15d
- movl %r12d,20(%rsp)
- rorl $9,%r14d
- xorl %r11d,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %ebx,%r15d
- rorl $5,%r13d
- addl %ecx,%r12d
- xorl %edx,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %r11d,%r15d
- movl %r8d,%ecx
+ movl %r12d,20(%rsp)
- rorl $11,%r14d
- xorl %r11d,%r13d
+ xorl %r14d,%r13d
xorl %ebx,%r15d
+ addl %ecx,%r12d
+
+ movl %edx,%ecx
+ addl %r13d,%r12d
- xorl %r9d,%ecx
- xorl %edx,%r14d
addl %r15d,%r12d
- movl %r8d,%r15d
+ movl %edx,%r13d
+ movl %edx,%r14d
- rorl $6,%r13d
- andl %edx,%ecx
- andl %r9d,%r15d
+ rorl $2,%ecx
+ rorl $13,%r13d
+ movl %edx,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%ecx
+ xorl %r13d,%ecx
+ rorl $9,%r13d
+ orl %r9d,%r14d
+ xorl %r13d,%ecx
+ andl %r9d,%r15d
addl %r12d,%r10d
+
+ andl %r8d,%r14d
addl %r12d,%ecx
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%ecx
+ addl %r14d,%ecx
movl 24(%rsi),%r12d
- movl %r10d,%r13d
- movl %ecx,%r14d
bswapl %r12d
- rorl $14,%r13d
+ movl %r10d,%r13d
+ movl %r10d,%r14d
movl %r11d,%r15d
- movl %r12d,24(%rsp)
- rorl $9,%r14d
- xorl %r10d,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %eax,%r15d
- rorl $5,%r13d
- addl %ebx,%r12d
- xorl %ecx,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %r10d,%r15d
- movl %edx,%ebx
+ movl %r12d,24(%rsp)
- rorl $11,%r14d
- xorl %r10d,%r13d
+ xorl %r14d,%r13d
xorl %eax,%r15d
+ addl %ebx,%r12d
+
+ movl %ecx,%ebx
+ addl %r13d,%r12d
- xorl %r8d,%ebx
- xorl %ecx,%r14d
addl %r15d,%r12d
- movl %edx,%r15d
+ movl %ecx,%r13d
+ movl %ecx,%r14d
- rorl $6,%r13d
- andl %ecx,%ebx
- andl %r8d,%r15d
+ rorl $2,%ebx
+ rorl $13,%r13d
+ movl %ecx,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%ebx
+ xorl %r13d,%ebx
+ rorl $9,%r13d
+ orl %r8d,%r14d
+ xorl %r13d,%ebx
+ andl %r8d,%r15d
addl %r12d,%r9d
+
+ andl %edx,%r14d
addl %r12d,%ebx
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%ebx
+ addl %r14d,%ebx
movl 28(%rsi),%r12d
- movl %r9d,%r13d
- movl %ebx,%r14d
bswapl %r12d
- rorl $14,%r13d
+ movl %r9d,%r13d
+ movl %r9d,%r14d
movl %r10d,%r15d
- movl %r12d,28(%rsp)
- rorl $9,%r14d
- xorl %r9d,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %r11d,%r15d
- rorl $5,%r13d
- addl %eax,%r12d
- xorl %ebx,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %r9d,%r15d
- movl %ecx,%eax
+ movl %r12d,28(%rsp)
- rorl $11,%r14d
- xorl %r9d,%r13d
+ xorl %r14d,%r13d
xorl %r11d,%r15d
+ addl %eax,%r12d
+
+ movl %ebx,%eax
+ addl %r13d,%r12d
- xorl %edx,%eax
- xorl %ebx,%r14d
addl %r15d,%r12d
- movl %ecx,%r15d
+ movl %ebx,%r13d
+ movl %ebx,%r14d
- rorl $6,%r13d
- andl %ebx,%eax
- andl %edx,%r15d
+ rorl $2,%eax
+ rorl $13,%r13d
+ movl %ebx,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%eax
+ xorl %r13d,%eax
+ rorl $9,%r13d
+ orl %edx,%r14d
+ xorl %r13d,%eax
+ andl %edx,%r15d
addl %r12d,%r8d
+
+ andl %ecx,%r14d
addl %r12d,%eax
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%eax
+ addl %r14d,%eax
movl 32(%rsi),%r12d
- movl %r8d,%r13d
- movl %eax,%r14d
bswapl %r12d
- rorl $14,%r13d
+ movl %r8d,%r13d
+ movl %r8d,%r14d
movl %r9d,%r15d
- movl %r12d,32(%rsp)
- rorl $9,%r14d
- xorl %r8d,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %r10d,%r15d
- rorl $5,%r13d
- addl %r11d,%r12d
- xorl %eax,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %r8d,%r15d
- movl %ebx,%r11d
+ movl %r12d,32(%rsp)
- rorl $11,%r14d
- xorl %r8d,%r13d
+ xorl %r14d,%r13d
xorl %r10d,%r15d
+ addl %r11d,%r12d
+
+ movl %eax,%r11d
+ addl %r13d,%r12d
- xorl %ecx,%r11d
- xorl %eax,%r14d
addl %r15d,%r12d
- movl %ebx,%r15d
+ movl %eax,%r13d
+ movl %eax,%r14d
- rorl $6,%r13d
- andl %eax,%r11d
- andl %ecx,%r15d
+ rorl $2,%r11d
+ rorl $13,%r13d
+ movl %eax,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%r11d
+ xorl %r13d,%r11d
+ rorl $9,%r13d
+ orl %ecx,%r14d
+ xorl %r13d,%r11d
+ andl %ecx,%r15d
addl %r12d,%edx
+
+ andl %ebx,%r14d
addl %r12d,%r11d
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%r11d
+ addl %r14d,%r11d
movl 36(%rsi),%r12d
- movl %edx,%r13d
- movl %r11d,%r14d
bswapl %r12d
- rorl $14,%r13d
+ movl %edx,%r13d
+ movl %edx,%r14d
movl %r8d,%r15d
- movl %r12d,36(%rsp)
- rorl $9,%r14d
- xorl %edx,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %r9d,%r15d
- rorl $5,%r13d
- addl %r10d,%r12d
- xorl %r11d,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %edx,%r15d
- movl %eax,%r10d
+ movl %r12d,36(%rsp)
- rorl $11,%r14d
- xorl %edx,%r13d
+ xorl %r14d,%r13d
xorl %r9d,%r15d
+ addl %r10d,%r12d
+
+ movl %r11d,%r10d
+ addl %r13d,%r12d
- xorl %ebx,%r10d
- xorl %r11d,%r14d
addl %r15d,%r12d
- movl %eax,%r15d
+ movl %r11d,%r13d
+ movl %r11d,%r14d
- rorl $6,%r13d
- andl %r11d,%r10d
- andl %ebx,%r15d
+ rorl $2,%r10d
+ rorl $13,%r13d
+ movl %r11d,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%r10d
+ xorl %r13d,%r10d
+ rorl $9,%r13d
+ orl %ebx,%r14d
+ xorl %r13d,%r10d
+ andl %ebx,%r15d
addl %r12d,%ecx
+
+ andl %eax,%r14d
addl %r12d,%r10d
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%r10d
+ addl %r14d,%r10d
movl 40(%rsi),%r12d
- movl %ecx,%r13d
- movl %r10d,%r14d
bswapl %r12d
- rorl $14,%r13d
+ movl %ecx,%r13d
+ movl %ecx,%r14d
movl %edx,%r15d
- movl %r12d,40(%rsp)
- rorl $9,%r14d
- xorl %ecx,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %r8d,%r15d
- rorl $5,%r13d
- addl %r9d,%r12d
- xorl %r10d,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %ecx,%r15d
- movl %r11d,%r9d
+ movl %r12d,40(%rsp)
- rorl $11,%r14d
- xorl %ecx,%r13d
+ xorl %r14d,%r13d
xorl %r8d,%r15d
+ addl %r9d,%r12d
+
+ movl %r10d,%r9d
+ addl %r13d,%r12d
- xorl %eax,%r9d
- xorl %r10d,%r14d
addl %r15d,%r12d
- movl %r11d,%r15d
+ movl %r10d,%r13d
+ movl %r10d,%r14d
- rorl $6,%r13d
- andl %r10d,%r9d
- andl %eax,%r15d
+ rorl $2,%r9d
+ rorl $13,%r13d
+ movl %r10d,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%r9d
+ xorl %r13d,%r9d
+ rorl $9,%r13d
+ orl %eax,%r14d
+ xorl %r13d,%r9d
+ andl %eax,%r15d
addl %r12d,%ebx
+
+ andl %r11d,%r14d
addl %r12d,%r9d
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%r9d
+ addl %r14d,%r9d
movl 44(%rsi),%r12d
- movl %ebx,%r13d
- movl %r9d,%r14d
bswapl %r12d
- rorl $14,%r13d
+ movl %ebx,%r13d
+ movl %ebx,%r14d
movl %ecx,%r15d
- movl %r12d,44(%rsp)
- rorl $9,%r14d
- xorl %ebx,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %edx,%r15d
- rorl $5,%r13d
- addl %r8d,%r12d
- xorl %r9d,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %ebx,%r15d
- movl %r10d,%r8d
+ movl %r12d,44(%rsp)
- rorl $11,%r14d
- xorl %ebx,%r13d
+ xorl %r14d,%r13d
xorl %edx,%r15d
+ addl %r8d,%r12d
+
+ movl %r9d,%r8d
+ addl %r13d,%r12d
- xorl %r11d,%r8d
- xorl %r9d,%r14d
addl %r15d,%r12d
- movl %r10d,%r15d
+ movl %r9d,%r13d
+ movl %r9d,%r14d
- rorl $6,%r13d
- andl %r9d,%r8d
- andl %r11d,%r15d
+ rorl $2,%r8d
+ rorl $13,%r13d
+ movl %r9d,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%r8d
+ xorl %r13d,%r8d
+ rorl $9,%r13d
+ orl %r11d,%r14d
+ xorl %r13d,%r8d
+ andl %r11d,%r15d
addl %r12d,%eax
+
+ andl %r10d,%r14d
addl %r12d,%r8d
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%r8d
+ addl %r14d,%r8d
movl 48(%rsi),%r12d
- movl %eax,%r13d
- movl %r8d,%r14d
bswapl %r12d
- rorl $14,%r13d
+ movl %eax,%r13d
+ movl %eax,%r14d
movl %ebx,%r15d
- movl %r12d,48(%rsp)
- rorl $9,%r14d
- xorl %eax,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %ecx,%r15d
- rorl $5,%r13d
- addl %edx,%r12d
- xorl %r8d,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %eax,%r15d
- movl %r9d,%edx
+ movl %r12d,48(%rsp)
- rorl $11,%r14d
- xorl %eax,%r13d
+ xorl %r14d,%r13d
xorl %ecx,%r15d
+ addl %edx,%r12d
+
+ movl %r8d,%edx
+ addl %r13d,%r12d
- xorl %r10d,%edx
- xorl %r8d,%r14d
addl %r15d,%r12d
- movl %r9d,%r15d
+ movl %r8d,%r13d
+ movl %r8d,%r14d
- rorl $6,%r13d
- andl %r8d,%edx
- andl %r10d,%r15d
+ rorl $2,%edx
+ rorl $13,%r13d
+ movl %r8d,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%edx
+ xorl %r13d,%edx
+ rorl $9,%r13d
+ orl %r10d,%r14d
+ xorl %r13d,%edx
+ andl %r10d,%r15d
addl %r12d,%r11d
+
+ andl %r9d,%r14d
addl %r12d,%edx
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%edx
+ addl %r14d,%edx
movl 52(%rsi),%r12d
- movl %r11d,%r13d
- movl %edx,%r14d
bswapl %r12d
- rorl $14,%r13d
+ movl %r11d,%r13d
+ movl %r11d,%r14d
movl %eax,%r15d
- movl %r12d,52(%rsp)
- rorl $9,%r14d
- xorl %r11d,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %ebx,%r15d
- rorl $5,%r13d
- addl %ecx,%r12d
- xorl %edx,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %r11d,%r15d
- movl %r8d,%ecx
+ movl %r12d,52(%rsp)
- rorl $11,%r14d
- xorl %r11d,%r13d
+ xorl %r14d,%r13d
xorl %ebx,%r15d
+ addl %ecx,%r12d
+
+ movl %edx,%ecx
+ addl %r13d,%r12d
- xorl %r9d,%ecx
- xorl %edx,%r14d
addl %r15d,%r12d
- movl %r8d,%r15d
+ movl %edx,%r13d
+ movl %edx,%r14d
- rorl $6,%r13d
- andl %edx,%ecx
- andl %r9d,%r15d
+ rorl $2,%ecx
+ rorl $13,%r13d
+ movl %edx,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%ecx
+ xorl %r13d,%ecx
+ rorl $9,%r13d
+ orl %r9d,%r14d
+ xorl %r13d,%ecx
+ andl %r9d,%r15d
addl %r12d,%r10d
+
+ andl %r8d,%r14d
addl %r12d,%ecx
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%ecx
+ addl %r14d,%ecx
movl 56(%rsi),%r12d
- movl %r10d,%r13d
- movl %ecx,%r14d
bswapl %r12d
- rorl $14,%r13d
+ movl %r10d,%r13d
+ movl %r10d,%r14d
movl %r11d,%r15d
- movl %r12d,56(%rsp)
- rorl $9,%r14d
- xorl %r10d,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %eax,%r15d
- rorl $5,%r13d
- addl %ebx,%r12d
- xorl %ecx,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %r10d,%r15d
- movl %edx,%ebx
+ movl %r12d,56(%rsp)
- rorl $11,%r14d
- xorl %r10d,%r13d
+ xorl %r14d,%r13d
xorl %eax,%r15d
+ addl %ebx,%r12d
+
+ movl %ecx,%ebx
+ addl %r13d,%r12d
- xorl %r8d,%ebx
- xorl %ecx,%r14d
addl %r15d,%r12d
- movl %edx,%r15d
+ movl %ecx,%r13d
+ movl %ecx,%r14d
- rorl $6,%r13d
- andl %ecx,%ebx
- andl %r8d,%r15d
+ rorl $2,%ebx
+ rorl $13,%r13d
+ movl %ecx,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%ebx
+ xorl %r13d,%ebx
+ rorl $9,%r13d
+ orl %r8d,%r14d
+ xorl %r13d,%ebx
+ andl %r8d,%r15d
addl %r12d,%r9d
+
+ andl %edx,%r14d
addl %r12d,%ebx
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%ebx
+ addl %r14d,%ebx
movl 60(%rsi),%r12d
- movl %r9d,%r13d
- movl %ebx,%r14d
bswapl %r12d
- rorl $14,%r13d
+ movl %r9d,%r13d
+ movl %r9d,%r14d
movl %r10d,%r15d
- movl %r12d,60(%rsp)
- rorl $9,%r14d
- xorl %r9d,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %r11d,%r15d
- rorl $5,%r13d
- addl %eax,%r12d
- xorl %ebx,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %r9d,%r15d
- movl %ecx,%eax
+ movl %r12d,60(%rsp)
- rorl $11,%r14d
- xorl %r9d,%r13d
+ xorl %r14d,%r13d
xorl %r11d,%r15d
+ addl %eax,%r12d
+
+ movl %ebx,%eax
+ addl %r13d,%r12d
- xorl %edx,%eax
- xorl %ebx,%r14d
addl %r15d,%r12d
- movl %ecx,%r15d
+ movl %ebx,%r13d
+ movl %ebx,%r14d
- rorl $6,%r13d
- andl %ebx,%eax
- andl %edx,%r15d
+ rorl $2,%eax
+ rorl $13,%r13d
+ movl %ebx,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%eax
+ xorl %r13d,%eax
+ rorl $9,%r13d
+ orl %edx,%r14d
+ xorl %r13d,%eax
+ andl %edx,%r15d
addl %r12d,%r8d
+
+ andl %ecx,%r14d
addl %r12d,%eax
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%eax
+ addl %r14d,%eax
jmp .Lrounds_16_xx
.align 16
.Lrounds_16_xx:
movl 4(%rsp),%r13d
- movl 56(%rsp),%r14d
- movl %r13d,%r12d
- movl %r14d,%r15d
+ movl 56(%rsp),%r12d
+
+ movl %r13d,%r15d
- rorl $11,%r12d
- xorl %r13d,%r12d
shrl $3,%r13d
+ rorl $7,%r15d
- rorl $7,%r12d
- xorl %r12d,%r13d
- movl 36(%rsp),%r12d
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
- rorl $2,%r15d
- xorl %r14d,%r15d
- shrl $10,%r14d
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
- rorl $17,%r15d
addl %r13d,%r12d
- xorl %r15d,%r14d
+
+ addl 36(%rsp),%r12d
addl 0(%rsp),%r12d
movl %r8d,%r13d
- addl %r14d,%r12d
- movl %eax,%r14d
- rorl $14,%r13d
+ movl %r8d,%r14d
movl %r9d,%r15d
- movl %r12d,0(%rsp)
- rorl $9,%r14d
- xorl %r8d,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %r10d,%r15d
- rorl $5,%r13d
- addl %r11d,%r12d
- xorl %eax,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %r8d,%r15d
- movl %ebx,%r11d
+ movl %r12d,0(%rsp)
- rorl $11,%r14d
- xorl %r8d,%r13d
+ xorl %r14d,%r13d
xorl %r10d,%r15d
+ addl %r11d,%r12d
+
+ movl %eax,%r11d
+ addl %r13d,%r12d
- xorl %ecx,%r11d
- xorl %eax,%r14d
addl %r15d,%r12d
- movl %ebx,%r15d
+ movl %eax,%r13d
+ movl %eax,%r14d
- rorl $6,%r13d
- andl %eax,%r11d
- andl %ecx,%r15d
+ rorl $2,%r11d
+ rorl $13,%r13d
+ movl %eax,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%r11d
+ xorl %r13d,%r11d
+ rorl $9,%r13d
+ orl %ecx,%r14d
+ xorl %r13d,%r11d
+ andl %ecx,%r15d
addl %r12d,%edx
+
+ andl %ebx,%r14d
addl %r12d,%r11d
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%r11d
+ addl %r14d,%r11d
movl 8(%rsp),%r13d
- movl 60(%rsp),%r14d
- movl %r13d,%r12d
- movl %r14d,%r15d
+ movl 60(%rsp),%r12d
+
+ movl %r13d,%r15d
- rorl $11,%r12d
- xorl %r13d,%r12d
shrl $3,%r13d
+ rorl $7,%r15d
- rorl $7,%r12d
- xorl %r12d,%r13d
- movl 40(%rsp),%r12d
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
- rorl $2,%r15d
- xorl %r14d,%r15d
- shrl $10,%r14d
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
- rorl $17,%r15d
addl %r13d,%r12d
- xorl %r15d,%r14d
+
+ addl 40(%rsp),%r12d
addl 4(%rsp),%r12d
movl %edx,%r13d
- addl %r14d,%r12d
- movl %r11d,%r14d
- rorl $14,%r13d
+ movl %edx,%r14d
movl %r8d,%r15d
- movl %r12d,4(%rsp)
- rorl $9,%r14d
- xorl %edx,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %r9d,%r15d
- rorl $5,%r13d
- addl %r10d,%r12d
- xorl %r11d,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %edx,%r15d
- movl %eax,%r10d
+ movl %r12d,4(%rsp)
- rorl $11,%r14d
- xorl %edx,%r13d
+ xorl %r14d,%r13d
xorl %r9d,%r15d
+ addl %r10d,%r12d
+
+ movl %r11d,%r10d
+ addl %r13d,%r12d
- xorl %ebx,%r10d
- xorl %r11d,%r14d
addl %r15d,%r12d
- movl %eax,%r15d
+ movl %r11d,%r13d
+ movl %r11d,%r14d
- rorl $6,%r13d
- andl %r11d,%r10d
- andl %ebx,%r15d
+ rorl $2,%r10d
+ rorl $13,%r13d
+ movl %r11d,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%r10d
+ xorl %r13d,%r10d
+ rorl $9,%r13d
+ orl %ebx,%r14d
+ xorl %r13d,%r10d
+ andl %ebx,%r15d
addl %r12d,%ecx
+
+ andl %eax,%r14d
addl %r12d,%r10d
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%r10d
+ addl %r14d,%r10d
movl 12(%rsp),%r13d
- movl 0(%rsp),%r14d
- movl %r13d,%r12d
- movl %r14d,%r15d
+ movl 0(%rsp),%r12d
+
+ movl %r13d,%r15d
- rorl $11,%r12d
- xorl %r13d,%r12d
shrl $3,%r13d
+ rorl $7,%r15d
- rorl $7,%r12d
- xorl %r12d,%r13d
- movl 44(%rsp),%r12d
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
- rorl $2,%r15d
- xorl %r14d,%r15d
- shrl $10,%r14d
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
- rorl $17,%r15d
addl %r13d,%r12d
- xorl %r15d,%r14d
+
+ addl 44(%rsp),%r12d
addl 8(%rsp),%r12d
movl %ecx,%r13d
- addl %r14d,%r12d
- movl %r10d,%r14d
- rorl $14,%r13d
+ movl %ecx,%r14d
movl %edx,%r15d
- movl %r12d,8(%rsp)
- rorl $9,%r14d
- xorl %ecx,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %r8d,%r15d
- rorl $5,%r13d
- addl %r9d,%r12d
- xorl %r10d,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %ecx,%r15d
- movl %r11d,%r9d
+ movl %r12d,8(%rsp)
- rorl $11,%r14d
- xorl %ecx,%r13d
+ xorl %r14d,%r13d
xorl %r8d,%r15d
+ addl %r9d,%r12d
+
+ movl %r10d,%r9d
+ addl %r13d,%r12d
- xorl %eax,%r9d
- xorl %r10d,%r14d
addl %r15d,%r12d
- movl %r11d,%r15d
+ movl %r10d,%r13d
+ movl %r10d,%r14d
- rorl $6,%r13d
- andl %r10d,%r9d
- andl %eax,%r15d
+ rorl $2,%r9d
+ rorl $13,%r13d
+ movl %r10d,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%r9d
+ xorl %r13d,%r9d
+ rorl $9,%r13d
+ orl %eax,%r14d
+ xorl %r13d,%r9d
+ andl %eax,%r15d
addl %r12d,%ebx
+
+ andl %r11d,%r14d
addl %r12d,%r9d
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%r9d
+ addl %r14d,%r9d
movl 16(%rsp),%r13d
- movl 4(%rsp),%r14d
- movl %r13d,%r12d
- movl %r14d,%r15d
+ movl 4(%rsp),%r12d
+
+ movl %r13d,%r15d
- rorl $11,%r12d
- xorl %r13d,%r12d
shrl $3,%r13d
+ rorl $7,%r15d
- rorl $7,%r12d
- xorl %r12d,%r13d
- movl 48(%rsp),%r12d
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
- rorl $2,%r15d
- xorl %r14d,%r15d
- shrl $10,%r14d
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
- rorl $17,%r15d
addl %r13d,%r12d
- xorl %r15d,%r14d
+
+ addl 48(%rsp),%r12d
addl 12(%rsp),%r12d
movl %ebx,%r13d
- addl %r14d,%r12d
- movl %r9d,%r14d
- rorl $14,%r13d
+ movl %ebx,%r14d
movl %ecx,%r15d
- movl %r12d,12(%rsp)
- rorl $9,%r14d
- xorl %ebx,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %edx,%r15d
- rorl $5,%r13d
- addl %r8d,%r12d
- xorl %r9d,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %ebx,%r15d
- movl %r10d,%r8d
+ movl %r12d,12(%rsp)
- rorl $11,%r14d
- xorl %ebx,%r13d
+ xorl %r14d,%r13d
xorl %edx,%r15d
+ addl %r8d,%r12d
+
+ movl %r9d,%r8d
+ addl %r13d,%r12d
- xorl %r11d,%r8d
- xorl %r9d,%r14d
addl %r15d,%r12d
- movl %r10d,%r15d
+ movl %r9d,%r13d
+ movl %r9d,%r14d
- rorl $6,%r13d
- andl %r9d,%r8d
- andl %r11d,%r15d
+ rorl $2,%r8d
+ rorl $13,%r13d
+ movl %r9d,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%r8d
+ xorl %r13d,%r8d
+ rorl $9,%r13d
+ orl %r11d,%r14d
+ xorl %r13d,%r8d
+ andl %r11d,%r15d
addl %r12d,%eax
+
+ andl %r10d,%r14d
addl %r12d,%r8d
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%r8d
+ addl %r14d,%r8d
movl 20(%rsp),%r13d
- movl 8(%rsp),%r14d
- movl %r13d,%r12d
- movl %r14d,%r15d
+ movl 8(%rsp),%r12d
+
+ movl %r13d,%r15d
- rorl $11,%r12d
- xorl %r13d,%r12d
shrl $3,%r13d
+ rorl $7,%r15d
- rorl $7,%r12d
- xorl %r12d,%r13d
- movl 52(%rsp),%r12d
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
- rorl $2,%r15d
- xorl %r14d,%r15d
- shrl $10,%r14d
+ xorl %r14d,%r12d
- rorl $17,%r15d
addl %r13d,%r12d
- xorl %r15d,%r14d
+
+ addl 52(%rsp),%r12d
addl 16(%rsp),%r12d
movl %eax,%r13d
- addl %r14d,%r12d
- movl %r8d,%r14d
- rorl $14,%r13d
+ movl %eax,%r14d
movl %ebx,%r15d
- movl %r12d,16(%rsp)
- rorl $9,%r14d
- xorl %eax,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %ecx,%r15d
- rorl $5,%r13d
- addl %edx,%r12d
- xorl %r8d,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %eax,%r15d
- movl %r9d,%edx
+ movl %r12d,16(%rsp)
- rorl $11,%r14d
- xorl %eax,%r13d
+ xorl %r14d,%r13d
xorl %ecx,%r15d
+ addl %edx,%r12d
+
+ movl %r8d,%edx
+ addl %r13d,%r12d
- xorl %r10d,%edx
- xorl %r8d,%r14d
addl %r15d,%r12d
- movl %r9d,%r15d
+ movl %r8d,%r13d
+ movl %r8d,%r14d
- rorl $6,%r13d
- andl %r8d,%edx
- andl %r10d,%r15d
+ rorl $2,%edx
+ rorl $13,%r13d
+ movl %r8d,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%edx
+ xorl %r13d,%edx
+ rorl $9,%r13d
+ orl %r10d,%r14d
+ xorl %r13d,%edx
+ andl %r10d,%r15d
addl %r12d,%r11d
+
+ andl %r9d,%r14d
addl %r12d,%edx
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%edx
+ addl %r14d,%edx
movl 24(%rsp),%r13d
- movl 12(%rsp),%r14d
- movl %r13d,%r12d
- movl %r14d,%r15d
+ movl 12(%rsp),%r12d
+
+ movl %r13d,%r15d
- rorl $11,%r12d
- xorl %r13d,%r12d
shrl $3,%r13d
+ rorl $7,%r15d
- rorl $7,%r12d
- xorl %r12d,%r13d
- movl 56(%rsp),%r12d
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
- rorl $2,%r15d
- xorl %r14d,%r15d
- shrl $10,%r14d
+ xorl %r14d,%r12d
- rorl $17,%r15d
addl %r13d,%r12d
- xorl %r15d,%r14d
+
+ addl 56(%rsp),%r12d
addl 20(%rsp),%r12d
movl %r11d,%r13d
- addl %r14d,%r12d
- movl %edx,%r14d
- rorl $14,%r13d
+ movl %r11d,%r14d
movl %eax,%r15d
- movl %r12d,20(%rsp)
- rorl $9,%r14d
- xorl %r11d,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %ebx,%r15d
- rorl $5,%r13d
- addl %ecx,%r12d
- xorl %edx,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %r11d,%r15d
- movl %r8d,%ecx
+ movl %r12d,20(%rsp)
- rorl $11,%r14d
- xorl %r11d,%r13d
+ xorl %r14d,%r13d
xorl %ebx,%r15d
+ addl %ecx,%r12d
+
+ movl %edx,%ecx
+ addl %r13d,%r12d
- xorl %r9d,%ecx
- xorl %edx,%r14d
addl %r15d,%r12d
- movl %r8d,%r15d
+ movl %edx,%r13d
+ movl %edx,%r14d
- rorl $6,%r13d
- andl %edx,%ecx
- andl %r9d,%r15d
+ rorl $2,%ecx
+ rorl $13,%r13d
+ movl %edx,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%ecx
+ xorl %r13d,%ecx
+ rorl $9,%r13d
+ orl %r9d,%r14d
+ xorl %r13d,%ecx
+ andl %r9d,%r15d
addl %r12d,%r10d
+
+ andl %r8d,%r14d
addl %r12d,%ecx
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%ecx
+ addl %r14d,%ecx
movl 28(%rsp),%r13d
- movl 16(%rsp),%r14d
- movl %r13d,%r12d
- movl %r14d,%r15d
+ movl 16(%rsp),%r12d
+
+ movl %r13d,%r15d
- rorl $11,%r12d
- xorl %r13d,%r12d
shrl $3,%r13d
+ rorl $7,%r15d
- rorl $7,%r12d
- xorl %r12d,%r13d
- movl 60(%rsp),%r12d
+ xorl %r15d,%r13d
+ rorl $11,%r15d
- rorl $2,%r15d
- xorl %r14d,%r15d
- shrl $10,%r14d
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
- rorl $17,%r15d
addl %r13d,%r12d
- xorl %r15d,%r14d
+
+ addl 60(%rsp),%r12d
addl 24(%rsp),%r12d
movl %r10d,%r13d
- addl %r14d,%r12d
- movl %ecx,%r14d
- rorl $14,%r13d
+ movl %r10d,%r14d
movl %r11d,%r15d
- movl %r12d,24(%rsp)
- rorl $9,%r14d
- xorl %r10d,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %eax,%r15d
- rorl $5,%r13d
- addl %ebx,%r12d
- xorl %ecx,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %r10d,%r15d
- movl %edx,%ebx
+ movl %r12d,24(%rsp)
- rorl $11,%r14d
- xorl %r10d,%r13d
+ xorl %r14d,%r13d
xorl %eax,%r15d
+ addl %ebx,%r12d
+
+ movl %ecx,%ebx
+ addl %r13d,%r12d
- xorl %r8d,%ebx
- xorl %ecx,%r14d
addl %r15d,%r12d
- movl %edx,%r15d
+ movl %ecx,%r13d
+ movl %ecx,%r14d
- rorl $6,%r13d
- andl %ecx,%ebx
- andl %r8d,%r15d
+ rorl $2,%ebx
+ rorl $13,%r13d
+ movl %ecx,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%ebx
+ xorl %r13d,%ebx
+ rorl $9,%r13d
+ orl %r8d,%r14d
+ xorl %r13d,%ebx
+ andl %r8d,%r15d
addl %r12d,%r9d
+
+ andl %edx,%r14d
addl %r12d,%ebx
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%ebx
+ addl %r14d,%ebx
movl 32(%rsp),%r13d
- movl 20(%rsp),%r14d
- movl %r13d,%r12d
- movl %r14d,%r15d
+ movl 20(%rsp),%r12d
+
+ movl %r13d,%r15d
- rorl $11,%r12d
- xorl %r13d,%r12d
shrl $3,%r13d
+ rorl $7,%r15d
- rorl $7,%r12d
- xorl %r12d,%r13d
- movl 0(%rsp),%r12d
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
- rorl $2,%r15d
- xorl %r14d,%r15d
- shrl $10,%r14d
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
- rorl $17,%r15d
addl %r13d,%r12d
- xorl %r15d,%r14d
+
+ addl 0(%rsp),%r12d
addl 28(%rsp),%r12d
movl %r9d,%r13d
- addl %r14d,%r12d
- movl %ebx,%r14d
- rorl $14,%r13d
+ movl %r9d,%r14d
movl %r10d,%r15d
- movl %r12d,28(%rsp)
- rorl $9,%r14d
- xorl %r9d,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %r11d,%r15d
- rorl $5,%r13d
- addl %eax,%r12d
- xorl %ebx,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %r9d,%r15d
- movl %ecx,%eax
+ movl %r12d,28(%rsp)
- rorl $11,%r14d
- xorl %r9d,%r13d
+ xorl %r14d,%r13d
xorl %r11d,%r15d
+ addl %eax,%r12d
+
+ movl %ebx,%eax
+ addl %r13d,%r12d
- xorl %edx,%eax
- xorl %ebx,%r14d
addl %r15d,%r12d
- movl %ecx,%r15d
+ movl %ebx,%r13d
+ movl %ebx,%r14d
- rorl $6,%r13d
- andl %ebx,%eax
- andl %edx,%r15d
+ rorl $2,%eax
+ rorl $13,%r13d
+ movl %ebx,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%eax
+ xorl %r13d,%eax
+ rorl $9,%r13d
+ orl %edx,%r14d
+ xorl %r13d,%eax
+ andl %edx,%r15d
addl %r12d,%r8d
+
+ andl %ecx,%r14d
addl %r12d,%eax
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%eax
+ addl %r14d,%eax
movl 36(%rsp),%r13d
- movl 24(%rsp),%r14d
- movl %r13d,%r12d
- movl %r14d,%r15d
+ movl 24(%rsp),%r12d
+
+ movl %r13d,%r15d
- rorl $11,%r12d
- xorl %r13d,%r12d
shrl $3,%r13d
+ rorl $7,%r15d
- rorl $7,%r12d
- xorl %r12d,%r13d
- movl 4(%rsp),%r12d
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
- rorl $2,%r15d
- xorl %r14d,%r15d
- shrl $10,%r14d
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
- rorl $17,%r15d
addl %r13d,%r12d
- xorl %r15d,%r14d
+
+ addl 4(%rsp),%r12d
addl 32(%rsp),%r12d
movl %r8d,%r13d
- addl %r14d,%r12d
- movl %eax,%r14d
- rorl $14,%r13d
+ movl %r8d,%r14d
movl %r9d,%r15d
- movl %r12d,32(%rsp)
- rorl $9,%r14d
- xorl %r8d,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %r10d,%r15d
- rorl $5,%r13d
- addl %r11d,%r12d
- xorl %eax,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %r8d,%r15d
- movl %ebx,%r11d
+ movl %r12d,32(%rsp)
- rorl $11,%r14d
- xorl %r8d,%r13d
+ xorl %r14d,%r13d
xorl %r10d,%r15d
+ addl %r11d,%r12d
+
+ movl %eax,%r11d
+ addl %r13d,%r12d
- xorl %ecx,%r11d
- xorl %eax,%r14d
addl %r15d,%r12d
- movl %ebx,%r15d
+ movl %eax,%r13d
+ movl %eax,%r14d
- rorl $6,%r13d
- andl %eax,%r11d
- andl %ecx,%r15d
+ rorl $2,%r11d
+ rorl $13,%r13d
+ movl %eax,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%r11d
+ xorl %r13d,%r11d
+ rorl $9,%r13d
+ orl %ecx,%r14d
+ xorl %r13d,%r11d
+ andl %ecx,%r15d
addl %r12d,%edx
+
+ andl %ebx,%r14d
addl %r12d,%r11d
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%r11d
+ addl %r14d,%r11d
movl 40(%rsp),%r13d
- movl 28(%rsp),%r14d
- movl %r13d,%r12d
- movl %r14d,%r15d
+ movl 28(%rsp),%r12d
+
+ movl %r13d,%r15d
- rorl $11,%r12d
- xorl %r13d,%r12d
shrl $3,%r13d
+ rorl $7,%r15d
- rorl $7,%r12d
- xorl %r12d,%r13d
- movl 8(%rsp),%r12d
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
- rorl $2,%r15d
- xorl %r14d,%r15d
- shrl $10,%r14d
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
- rorl $17,%r15d
addl %r13d,%r12d
- xorl %r15d,%r14d
+
+ addl 8(%rsp),%r12d
addl 36(%rsp),%r12d
movl %edx,%r13d
- addl %r14d,%r12d
- movl %r11d,%r14d
- rorl $14,%r13d
+ movl %edx,%r14d
movl %r8d,%r15d
- movl %r12d,36(%rsp)
- rorl $9,%r14d
- xorl %edx,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %r9d,%r15d
- rorl $5,%r13d
- addl %r10d,%r12d
- xorl %r11d,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %edx,%r15d
- movl %eax,%r10d
+ movl %r12d,36(%rsp)
- rorl $11,%r14d
- xorl %edx,%r13d
+ xorl %r14d,%r13d
xorl %r9d,%r15d
+ addl %r10d,%r12d
+
+ movl %r11d,%r10d
+ addl %r13d,%r12d
- xorl %ebx,%r10d
- xorl %r11d,%r14d
addl %r15d,%r12d
- movl %eax,%r15d
+ movl %r11d,%r13d
+ movl %r11d,%r14d
- rorl $6,%r13d
- andl %r11d,%r10d
- andl %ebx,%r15d
+ rorl $2,%r10d
+ rorl $13,%r13d
+ movl %r11d,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%r10d
+ xorl %r13d,%r10d
+ rorl $9,%r13d
+ orl %ebx,%r14d
+ xorl %r13d,%r10d
+ andl %ebx,%r15d
addl %r12d,%ecx
+
+ andl %eax,%r14d
addl %r12d,%r10d
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%r10d
+ addl %r14d,%r10d
movl 44(%rsp),%r13d
- movl 32(%rsp),%r14d
- movl %r13d,%r12d
- movl %r14d,%r15d
+ movl 32(%rsp),%r12d
+
+ movl %r13d,%r15d
- rorl $11,%r12d
- xorl %r13d,%r12d
shrl $3,%r13d
+ rorl $7,%r15d
- rorl $7,%r12d
- xorl %r12d,%r13d
- movl 12(%rsp),%r12d
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
- rorl $2,%r15d
- xorl %r14d,%r15d
- shrl $10,%r14d
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
- rorl $17,%r15d
addl %r13d,%r12d
- xorl %r15d,%r14d
+
+ addl 12(%rsp),%r12d
addl 40(%rsp),%r12d
movl %ecx,%r13d
- addl %r14d,%r12d
- movl %r10d,%r14d
- rorl $14,%r13d
+ movl %ecx,%r14d
movl %edx,%r15d
- movl %r12d,40(%rsp)
- rorl $9,%r14d
- xorl %ecx,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %r8d,%r15d
- rorl $5,%r13d
- addl %r9d,%r12d
- xorl %r10d,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %ecx,%r15d
- movl %r11d,%r9d
+ movl %r12d,40(%rsp)
- rorl $11,%r14d
- xorl %ecx,%r13d
+ xorl %r14d,%r13d
xorl %r8d,%r15d
+ addl %r9d,%r12d
+
+ movl %r10d,%r9d
+ addl %r13d,%r12d
- xorl %eax,%r9d
- xorl %r10d,%r14d
addl %r15d,%r12d
- movl %r11d,%r15d
+ movl %r10d,%r13d
+ movl %r10d,%r14d
- rorl $6,%r13d
- andl %r10d,%r9d
- andl %eax,%r15d
+ rorl $2,%r9d
+ rorl $13,%r13d
+ movl %r10d,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%r9d
+ xorl %r13d,%r9d
+ rorl $9,%r13d
+ orl %eax,%r14d
+ xorl %r13d,%r9d
+ andl %eax,%r15d
addl %r12d,%ebx
+
+ andl %r11d,%r14d
addl %r12d,%r9d
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%r9d
+ addl %r14d,%r9d
movl 48(%rsp),%r13d
- movl 36(%rsp),%r14d
- movl %r13d,%r12d
- movl %r14d,%r15d
+ movl 36(%rsp),%r12d
+
+ movl %r13d,%r15d
- rorl $11,%r12d
- xorl %r13d,%r12d
shrl $3,%r13d
+ rorl $7,%r15d
- rorl $7,%r12d
- xorl %r12d,%r13d
- movl 16(%rsp),%r12d
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
- rorl $2,%r15d
- xorl %r14d,%r15d
- shrl $10,%r14d
+ xorl %r14d,%r12d
- rorl $17,%r15d
addl %r13d,%r12d
- xorl %r15d,%r14d
+
+ addl 16(%rsp),%r12d
addl 44(%rsp),%r12d
movl %ebx,%r13d
- addl %r14d,%r12d
- movl %r9d,%r14d
- rorl $14,%r13d
+ movl %ebx,%r14d
movl %ecx,%r15d
- movl %r12d,44(%rsp)
- rorl $9,%r14d
- xorl %ebx,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %edx,%r15d
- rorl $5,%r13d
- addl %r8d,%r12d
- xorl %r9d,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %ebx,%r15d
- movl %r10d,%r8d
+ movl %r12d,44(%rsp)
- rorl $11,%r14d
- xorl %ebx,%r13d
+ xorl %r14d,%r13d
xorl %edx,%r15d
+ addl %r8d,%r12d
+
+ movl %r9d,%r8d
+ addl %r13d,%r12d
- xorl %r11d,%r8d
- xorl %r9d,%r14d
addl %r15d,%r12d
- movl %r10d,%r15d
+ movl %r9d,%r13d
+ movl %r9d,%r14d
- rorl $6,%r13d
- andl %r9d,%r8d
- andl %r11d,%r15d
+ rorl $2,%r8d
+ rorl $13,%r13d
+ movl %r9d,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%r8d
+ xorl %r13d,%r8d
+ rorl $9,%r13d
+ orl %r11d,%r14d
+ xorl %r13d,%r8d
+ andl %r11d,%r15d
addl %r12d,%eax
+
+ andl %r10d,%r14d
addl %r12d,%r8d
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%r8d
+ addl %r14d,%r8d
movl 52(%rsp),%r13d
- movl 40(%rsp),%r14d
- movl %r13d,%r12d
- movl %r14d,%r15d
+ movl 40(%rsp),%r12d
+
+ movl %r13d,%r15d
- rorl $11,%r12d
- xorl %r13d,%r12d
shrl $3,%r13d
+ rorl $7,%r15d
- rorl $7,%r12d
- xorl %r12d,%r13d
- movl 20(%rsp),%r12d
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
- rorl $2,%r15d
- xorl %r14d,%r15d
- shrl $10,%r14d
+ xorl %r14d,%r12d
- rorl $17,%r15d
addl %r13d,%r12d
- xorl %r15d,%r14d
+
+ addl 20(%rsp),%r12d
addl 48(%rsp),%r12d
movl %eax,%r13d
- addl %r14d,%r12d
- movl %r8d,%r14d
- rorl $14,%r13d
+ movl %eax,%r14d
movl %ebx,%r15d
- movl %r12d,48(%rsp)
- rorl $9,%r14d
- xorl %eax,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %ecx,%r15d
- rorl $5,%r13d
- addl %edx,%r12d
- xorl %r8d,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %eax,%r15d
- movl %r9d,%edx
+ movl %r12d,48(%rsp)
- rorl $11,%r14d
- xorl %eax,%r13d
+ xorl %r14d,%r13d
xorl %ecx,%r15d
+ addl %edx,%r12d
+
+ movl %r8d,%edx
+ addl %r13d,%r12d
- xorl %r10d,%edx
- xorl %r8d,%r14d
addl %r15d,%r12d
- movl %r9d,%r15d
+ movl %r8d,%r13d
+ movl %r8d,%r14d
- rorl $6,%r13d
- andl %r8d,%edx
- andl %r10d,%r15d
+ rorl $2,%edx
+ rorl $13,%r13d
+ movl %r8d,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%edx
+ xorl %r13d,%edx
+ rorl $9,%r13d
+ orl %r10d,%r14d
+ xorl %r13d,%edx
+ andl %r10d,%r15d
addl %r12d,%r11d
+
+ andl %r9d,%r14d
addl %r12d,%edx
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%edx
+ addl %r14d,%edx
movl 56(%rsp),%r13d
- movl 44(%rsp),%r14d
- movl %r13d,%r12d
- movl %r14d,%r15d
+ movl 44(%rsp),%r12d
+
+ movl %r13d,%r15d
- rorl $11,%r12d
- xorl %r13d,%r12d
shrl $3,%r13d
+ rorl $7,%r15d
- rorl $7,%r12d
- xorl %r12d,%r13d
- movl 24(%rsp),%r12d
+ xorl %r15d,%r13d
+ rorl $11,%r15d
- rorl $2,%r15d
- xorl %r14d,%r15d
- shrl $10,%r14d
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
- rorl $17,%r15d
addl %r13d,%r12d
- xorl %r15d,%r14d
+
+ addl 24(%rsp),%r12d
addl 52(%rsp),%r12d
movl %r11d,%r13d
- addl %r14d,%r12d
- movl %edx,%r14d
- rorl $14,%r13d
+ movl %r11d,%r14d
movl %eax,%r15d
- movl %r12d,52(%rsp)
- rorl $9,%r14d
- xorl %r11d,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %ebx,%r15d
- rorl $5,%r13d
- addl %ecx,%r12d
- xorl %edx,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %r11d,%r15d
- movl %r8d,%ecx
+ movl %r12d,52(%rsp)
- rorl $11,%r14d
- xorl %r11d,%r13d
+ xorl %r14d,%r13d
xorl %ebx,%r15d
+ addl %ecx,%r12d
+
+ movl %edx,%ecx
+ addl %r13d,%r12d
- xorl %r9d,%ecx
- xorl %edx,%r14d
addl %r15d,%r12d
- movl %r8d,%r15d
+ movl %edx,%r13d
+ movl %edx,%r14d
- rorl $6,%r13d
- andl %edx,%ecx
- andl %r9d,%r15d
+ rorl $2,%ecx
+ rorl $13,%r13d
+ movl %edx,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%ecx
+ xorl %r13d,%ecx
+ rorl $9,%r13d
+ orl %r9d,%r14d
+ xorl %r13d,%ecx
+ andl %r9d,%r15d
addl %r12d,%r10d
+
+ andl %r8d,%r14d
addl %r12d,%ecx
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%ecx
+ addl %r14d,%ecx
movl 60(%rsp),%r13d
- movl 48(%rsp),%r14d
- movl %r13d,%r12d
- movl %r14d,%r15d
+ movl 48(%rsp),%r12d
+
+ movl %r13d,%r15d
- rorl $11,%r12d
- xorl %r13d,%r12d
shrl $3,%r13d
+ rorl $7,%r15d
- rorl $7,%r12d
- xorl %r12d,%r13d
- movl 28(%rsp),%r12d
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
- rorl $2,%r15d
- xorl %r14d,%r15d
- shrl $10,%r14d
+ xorl %r14d,%r12d
- rorl $17,%r15d
addl %r13d,%r12d
- xorl %r15d,%r14d
+
+ addl 28(%rsp),%r12d
addl 56(%rsp),%r12d
movl %r10d,%r13d
- addl %r14d,%r12d
- movl %ecx,%r14d
- rorl $14,%r13d
+ movl %r10d,%r14d
movl %r11d,%r15d
- movl %r12d,56(%rsp)
- rorl $9,%r14d
- xorl %r10d,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %eax,%r15d
- rorl $5,%r13d
- addl %ebx,%r12d
- xorl %ecx,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %r10d,%r15d
- movl %edx,%ebx
+ movl %r12d,56(%rsp)
- rorl $11,%r14d
- xorl %r10d,%r13d
+ xorl %r14d,%r13d
xorl %eax,%r15d
+ addl %ebx,%r12d
+
+ movl %ecx,%ebx
+ addl %r13d,%r12d
- xorl %r8d,%ebx
- xorl %ecx,%r14d
addl %r15d,%r12d
- movl %edx,%r15d
+ movl %ecx,%r13d
+ movl %ecx,%r14d
- rorl $6,%r13d
- andl %ecx,%ebx
- andl %r8d,%r15d
+ rorl $2,%ebx
+ rorl $13,%r13d
+ movl %ecx,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%ebx
+ xorl %r13d,%ebx
+ rorl $9,%r13d
+ orl %r8d,%r14d
+ xorl %r13d,%ebx
+ andl %r8d,%r15d
addl %r12d,%r9d
+
+ andl %edx,%r14d
addl %r12d,%ebx
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%ebx
+ addl %r14d,%ebx
movl 0(%rsp),%r13d
- movl 52(%rsp),%r14d
- movl %r13d,%r12d
- movl %r14d,%r15d
+ movl 52(%rsp),%r12d
+
+ movl %r13d,%r15d
- rorl $11,%r12d
- xorl %r13d,%r12d
shrl $3,%r13d
+ rorl $7,%r15d
- rorl $7,%r12d
- xorl %r12d,%r13d
- movl 32(%rsp),%r12d
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
- rorl $2,%r15d
- xorl %r14d,%r15d
- shrl $10,%r14d
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
- rorl $17,%r15d
addl %r13d,%r12d
- xorl %r15d,%r14d
+
+ addl 32(%rsp),%r12d
addl 60(%rsp),%r12d
movl %r9d,%r13d
- addl %r14d,%r12d
- movl %ebx,%r14d
- rorl $14,%r13d
+ movl %r9d,%r14d
movl %r10d,%r15d
- movl %r12d,60(%rsp)
- rorl $9,%r14d
- xorl %r9d,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %r11d,%r15d
- rorl $5,%r13d
- addl %eax,%r12d
- xorl %ebx,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %r9d,%r15d
- movl %ecx,%eax
+ movl %r12d,60(%rsp)
- rorl $11,%r14d
- xorl %r9d,%r13d
+ xorl %r14d,%r13d
xorl %r11d,%r15d
+ addl %eax,%r12d
+
+ movl %ebx,%eax
+ addl %r13d,%r12d
- xorl %edx,%eax
- xorl %ebx,%r14d
addl %r15d,%r12d
- movl %ecx,%r15d
+ movl %ebx,%r13d
+ movl %ebx,%r14d
- rorl $6,%r13d
- andl %ebx,%eax
- andl %edx,%r15d
+ rorl $2,%eax
+ rorl $13,%r13d
+ movl %ebx,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%eax
+ xorl %r13d,%eax
+ rorl $9,%r13d
+ orl %edx,%r14d
+ xorl %r13d,%eax
+ andl %edx,%r15d
addl %r12d,%r8d
+
+ andl %ecx,%r14d
addl %r12d,%eax
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%eax
+ addl %r14d,%eax
cmpq $64,%rdi
jb .Lrounds_16_xx
diff --git a/deps/openssl/asm/x64-elf-gas/x86_64cpuid.s b/deps/openssl/asm/x64-elf-gas/x86_64cpuid.s
index e0a8287085..0a565a989b 100644
--- a/deps/openssl/asm/x64-elf-gas/x86_64cpuid.s
+++ b/deps/openssl/asm/x64-elf-gas/x86_64cpuid.s
@@ -1,11 +1,7 @@
-.hidden OPENSSL_cpuid_setup
.section .init
call OPENSSL_cpuid_setup
-.hidden OPENSSL_ia32cap_P
-.comm OPENSSL_ia32cap_P,8,4
-
.text
@@ -71,15 +67,7 @@ OPENSSL_ia32_cpuid:
movl $2147483648,%eax
cpuid
- cmpl $2147483649,%eax
- jb .Lintel
- movl %eax,%r10d
- movl $2147483649,%eax
- cpuid
- orl %ecx,%r9d
- andl $2049,%r9d
-
- cmpl $2147483656,%r10d
+ cmpl $2147483656,%eax
jb .Lintel
movl $2147483656,%eax
@@ -90,12 +78,12 @@ OPENSSL_ia32_cpuid:
movl $1,%eax
cpuid
btl $28,%edx
- jnc .Lgeneric
+ jnc .Ldone
shrl $16,%ebx
cmpb %r10b,%bl
- ja .Lgeneric
+ ja .Ldone
andl $4026531839,%edx
- jmp .Lgeneric
+ jmp .Ldone
.Lintel:
cmpl $4,%r11d
@@ -112,48 +100,30 @@ OPENSSL_ia32_cpuid:
.Lnocacheinfo:
movl $1,%eax
cpuid
- andl $3220176895,%edx
cmpl $0,%r9d
jne .Lnotintel
- orl $1073741824,%edx
+ orl $1048576,%edx
andb $15,%ah
cmpb $15,%ah
- jne .Lnotintel
- orl $1048576,%edx
+ je .Lnotintel
+ orl $1073741824,%edx
.Lnotintel:
btl $28,%edx
- jnc .Lgeneric
+ jnc .Ldone
andl $4026531839,%edx
cmpl $0,%r10d
- je .Lgeneric
+ je .Ldone
orl $268435456,%edx
shrl $16,%ebx
cmpb $1,%bl
- ja .Lgeneric
+ ja .Ldone
andl $4026531839,%edx
-.Lgeneric:
- andl $2048,%r9d
- andl $4294965247,%ecx
- orl %ecx,%r9d
-
- movl %edx,%r10d
- btl $27,%r9d
- jnc .Lclear_avx
- xorl %ecx,%ecx
-.byte 0x0f,0x01,0xd0
-
- andl $6,%eax
- cmpl $6,%eax
- je .Ldone
-.Lclear_avx:
- movl $4026525695,%eax
- andl %eax,%r9d
.Ldone:
- shlq $32,%r9
- movl %r10d,%eax
+ shlq $32,%rcx
+ movl %edx,%eax
movq %r8,%rbx
- orq %r9,%rax
+ orq %rcx,%rax
.byte 0xf3,0xc3
.size OPENSSL_ia32_cpuid,.-OPENSSL_ia32_cpuid
@@ -222,17 +192,3 @@ OPENSSL_wipe_cpu:
leaq 8(%rsp),%rax
.byte 0xf3,0xc3
.size OPENSSL_wipe_cpu,.-OPENSSL_wipe_cpu
-.globl OPENSSL_ia32_rdrand
-.type OPENSSL_ia32_rdrand,@function
-.align 16
-OPENSSL_ia32_rdrand:
- movl $8,%ecx
-.Loop_rdrand:
-.byte 72,15,199,240
- jc .Lbreak_rdrand
- loop .Loop_rdrand
-.Lbreak_rdrand:
- cmpq $0,%rax
- cmoveq %rcx,%rax
- .byte 0xf3,0xc3
-.size OPENSSL_ia32_rdrand,.-OPENSSL_ia32_rdrand
diff --git a/deps/openssl/asm/x64-macosx-gas/aes/aes-x86_64.s b/deps/openssl/asm/x64-macosx-gas/aes/aes-x86_64.s
index 88120a1898..d42e1ea79a 100644
--- a/deps/openssl/asm/x64-macosx-gas/aes/aes-x86_64.s
+++ b/deps/openssl/asm/x64-macosx-gas/aes/aes-x86_64.s
@@ -333,9 +333,6 @@ L$enc_compact_done:
.globl _AES_encrypt
.p2align 4
-.globl _asm_AES_encrypt
-.private_extern _asm_AES_encrypt
-_asm_AES_encrypt:
_AES_encrypt:
pushq %rbx
pushq %rbp
@@ -783,9 +780,6 @@ L$dec_compact_done:
.globl _AES_decrypt
.p2align 4
-.globl _asm_AES_decrypt
-.private_extern _asm_AES_decrypt
-_asm_AES_decrypt:
_AES_decrypt:
pushq %rbx
pushq %rbp
@@ -849,10 +843,10 @@ L$dec_prologue:
L$dec_epilogue:
.byte 0xf3,0xc3
-.globl _private_AES_set_encrypt_key
+.globl _AES_set_encrypt_key
.p2align 4
-_private_AES_set_encrypt_key:
+_AES_set_encrypt_key:
pushq %rbx
pushq %rbp
pushq %r12
@@ -1115,10 +1109,10 @@ L$exit:
.byte 0xf3,0xc3
-.globl _private_AES_set_decrypt_key
+.globl _AES_set_decrypt_key
.p2align 4
-_private_AES_set_decrypt_key:
+_AES_set_decrypt_key:
pushq %rbx
pushq %rbp
pushq %r12
@@ -1306,9 +1300,6 @@ L$dec_key_epilogue:
.p2align 4
-.globl _asm_AES_cbc_encrypt
-.private_extern _asm_AES_cbc_encrypt
-_asm_AES_cbc_encrypt:
_AES_cbc_encrypt:
cmpq $0,%rdx
je L$cbc_epilogue
diff --git a/deps/openssl/asm/x64-macosx-gas/bn/x86_64-mont.s b/deps/openssl/asm/x64-macosx-gas/bn/x86_64-mont.s
index ece106c498..23292a0716 100644
--- a/deps/openssl/asm/x64-macosx-gas/bn/x86_64-mont.s
+++ b/deps/openssl/asm/x64-macosx-gas/bn/x86_64-mont.s
@@ -5,16 +5,6 @@
.p2align 4
_bn_mul_mont:
- testl $3,%r9d
- jnz L$mul_enter
- cmpl $8,%r9d
- jb L$mul_enter
- cmpq %rsi,%rdx
- jne L$mul4x_enter
- jmp L$sqr4x_enter
-
-.p2align 4
-L$mul_enter:
pushq %rbx
pushq %rbp
pushq %r12
@@ -30,63 +20,48 @@ L$mul_enter:
andq $-1024,%rsp
movq %r11,8(%rsp,%r9,8)
-L$mul_body:
+L$prologue:
movq %rdx,%r12
+
movq (%r8),%r8
- movq (%r12),%rbx
- movq (%rsi),%rax
xorq %r14,%r14
xorq %r15,%r15
- movq %r8,%rbp
+ movq (%r12),%rbx
+ movq (%rsi),%rax
mulq %rbx
movq %rax,%r10
- movq (%rcx),%rax
-
- imulq %r10,%rbp
movq %rdx,%r11
- mulq %rbp
- addq %rax,%r10
- movq 8(%rsi),%rax
+ imulq %r8,%rax
+ movq %rax,%rbp
+
+ mulq (%rcx)
+ addq %r10,%rax
adcq $0,%rdx
movq %rdx,%r13
leaq 1(%r15),%r15
- jmp L$1st_enter
-
-.p2align 4
L$1st:
- addq %rax,%r13
movq (%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r11,%r13
- movq %r10,%r11
- adcq $0,%rdx
- movq %r13,-16(%rsp,%r15,8)
- movq %rdx,%r13
-
-L$1st_enter:
mulq %rbx
- addq %rax,%r11
- movq (%rcx,%r15,8),%rax
+ addq %r11,%rax
adcq $0,%rdx
- leaq 1(%r15),%r15
- movq %rdx,%r10
+ movq %rax,%r10
+ movq (%rcx,%r15,8),%rax
+ movq %rdx,%r11
mulq %rbp
- cmpq %r9,%r15
- jne L$1st
-
- addq %rax,%r13
- movq (%rsi),%rax
+ addq %r13,%rax
+ leaq 1(%r15),%r15
adcq $0,%rdx
- addq %r11,%r13
+ addq %r10,%rax
adcq $0,%rdx
- movq %r13,-16(%rsp,%r15,8)
+ movq %rax,-16(%rsp,%r15,8)
+ cmpq %r9,%r15
movq %rdx,%r13
- movq %r10,%r11
+ jl L$1st
xorq %rdx,%rdx
addq %r11,%r13
@@ -95,64 +70,50 @@ L$1st_enter:
movq %rdx,(%rsp,%r9,8)
leaq 1(%r14),%r14
- jmp L$outer
-.p2align 4
+.p2align 2
L$outer:
- movq (%r12,%r14,8),%rbx
xorq %r15,%r15
- movq %r8,%rbp
- movq (%rsp),%r10
+
+ movq (%r12,%r14,8),%rbx
+ movq (%rsi),%rax
mulq %rbx
- addq %rax,%r10
- movq (%rcx),%rax
+ addq (%rsp),%rax
adcq $0,%rdx
-
- imulq %r10,%rbp
+ movq %rax,%r10
movq %rdx,%r11
- mulq %rbp
- addq %rax,%r10
- movq 8(%rsi),%rax
- adcq $0,%rdx
+ imulq %r8,%rax
+ movq %rax,%rbp
+
+ mulq (%rcx,%r15,8)
+ addq %r10,%rax
movq 8(%rsp),%r10
+ adcq $0,%rdx
movq %rdx,%r13
leaq 1(%r15),%r15
- jmp L$inner_enter
-
-.p2align 4
+.p2align 2
L$inner:
- addq %rax,%r13
movq (%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r10,%r13
- movq (%rsp,%r15,8),%r10
- adcq $0,%rdx
- movq %r13,-16(%rsp,%r15,8)
- movq %rdx,%r13
-
-L$inner_enter:
mulq %rbx
- addq %rax,%r11
+ addq %r11,%rax
+ adcq $0,%rdx
+ addq %rax,%r10
movq (%rcx,%r15,8),%rax
adcq $0,%rdx
- addq %r11,%r10
movq %rdx,%r11
- adcq $0,%r11
- leaq 1(%r15),%r15
mulq %rbp
- cmpq %r9,%r15
- jne L$inner
-
- addq %rax,%r13
- movq (%rsi),%rax
+ addq %r13,%rax
+ leaq 1(%r15),%r15
adcq $0,%rdx
- addq %r10,%r13
- movq (%rsp,%r15,8),%r10
+ addq %r10,%rax
adcq $0,%rdx
- movq %r13,-16(%rsp,%r15,8)
+ movq (%rsp,%r15,8),%r10
+ cmpq %r9,%r15
+ movq %rax,-16(%rsp,%r15,8)
movq %rdx,%r13
+ jl L$inner
xorq %rdx,%rdx
addq %r11,%r13
@@ -166,434 +127,35 @@ L$inner_enter:
cmpq %r9,%r14
jl L$outer
- xorq %r14,%r14
- movq (%rsp),%rax
leaq (%rsp),%rsi
- movq %r9,%r15
+ leaq -1(%r9),%r15
+
+ movq (%rsi),%rax
+ xorq %r14,%r14
jmp L$sub
.p2align 4
L$sub: sbbq (%rcx,%r14,8),%rax
movq %rax,(%rdi,%r14,8)
+ decq %r15
movq 8(%rsi,%r14,8),%rax
leaq 1(%r14),%r14
- decq %r15
- jnz L$sub
+ jge L$sub
sbbq $0,%rax
- xorq %r14,%r14
andq %rax,%rsi
notq %rax
movq %rdi,%rcx
andq %rax,%rcx
- movq %r9,%r15
+ leaq -1(%r9),%r15
orq %rcx,%rsi
.p2align 4
L$copy:
- movq (%rsi,%r14,8),%rax
- movq %r14,(%rsp,%r14,8)
- movq %rax,(%rdi,%r14,8)
- leaq 1(%r14),%r14
- subq $1,%r15
- jnz L$copy
-
- movq 8(%rsp,%r9,8),%rsi
- movq $1,%rax
- movq (%rsi),%r15
- movq 8(%rsi),%r14
- movq 16(%rsi),%r13
- movq 24(%rsi),%r12
- movq 32(%rsi),%rbp
- movq 40(%rsi),%rbx
- leaq 48(%rsi),%rsp
-L$mul_epilogue:
- .byte 0xf3,0xc3
-
-
-.p2align 4
-bn_mul4x_mont:
-L$mul4x_enter:
- pushq %rbx
- pushq %rbp
- pushq %r12
- pushq %r13
- pushq %r14
- pushq %r15
-
- movl %r9d,%r9d
- leaq 4(%r9),%r10
- movq %rsp,%r11
- negq %r10
- leaq (%rsp,%r10,8),%rsp
- andq $-1024,%rsp
-
- movq %r11,8(%rsp,%r9,8)
-L$mul4x_body:
- movq %rdi,16(%rsp,%r9,8)
- movq %rdx,%r12
- movq (%r8),%r8
- movq (%r12),%rbx
- movq (%rsi),%rax
-
- xorq %r14,%r14
- xorq %r15,%r15
-
- movq %r8,%rbp
- mulq %rbx
- movq %rax,%r10
- movq (%rcx),%rax
-
- imulq %r10,%rbp
- movq %rdx,%r11
-
- mulq %rbp
- addq %rax,%r10
- movq 8(%rsi),%rax
- adcq $0,%rdx
- movq %rdx,%rdi
-
- mulq %rbx
- addq %rax,%r11
- movq 8(%rcx),%rax
- adcq $0,%rdx
- movq %rdx,%r10
-
- mulq %rbp
- addq %rax,%rdi
- movq 16(%rsi),%rax
- adcq $0,%rdx
- addq %r11,%rdi
- leaq 4(%r15),%r15
- adcq $0,%rdx
- movq %rdi,(%rsp)
- movq %rdx,%r13
- jmp L$1st4x
-.p2align 4
-L$1st4x:
- mulq %rbx
- addq %rax,%r10
- movq -16(%rcx,%r15,8),%rax
- adcq $0,%rdx
- movq %rdx,%r11
-
- mulq %rbp
- addq %rax,%r13
- movq -8(%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r10,%r13
- adcq $0,%rdx
- movq %r13,-24(%rsp,%r15,8)
- movq %rdx,%rdi
-
- mulq %rbx
- addq %rax,%r11
- movq -8(%rcx,%r15,8),%rax
- adcq $0,%rdx
- movq %rdx,%r10
-
- mulq %rbp
- addq %rax,%rdi
- movq (%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r11,%rdi
- adcq $0,%rdx
- movq %rdi,-16(%rsp,%r15,8)
- movq %rdx,%r13
-
- mulq %rbx
- addq %rax,%r10
- movq (%rcx,%r15,8),%rax
- adcq $0,%rdx
- movq %rdx,%r11
-
- mulq %rbp
- addq %rax,%r13
- movq 8(%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r10,%r13
- adcq $0,%rdx
- movq %r13,-8(%rsp,%r15,8)
- movq %rdx,%rdi
-
- mulq %rbx
- addq %rax,%r11
- movq 8(%rcx,%r15,8),%rax
- adcq $0,%rdx
- leaq 4(%r15),%r15
- movq %rdx,%r10
-
- mulq %rbp
- addq %rax,%rdi
- movq -16(%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r11,%rdi
- adcq $0,%rdx
- movq %rdi,-32(%rsp,%r15,8)
- movq %rdx,%r13
- cmpq %r9,%r15
- jl L$1st4x
-
- mulq %rbx
- addq %rax,%r10
- movq -16(%rcx,%r15,8),%rax
- adcq $0,%rdx
- movq %rdx,%r11
-
- mulq %rbp
- addq %rax,%r13
- movq -8(%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r10,%r13
- adcq $0,%rdx
- movq %r13,-24(%rsp,%r15,8)
- movq %rdx,%rdi
-
- mulq %rbx
- addq %rax,%r11
- movq -8(%rcx,%r15,8),%rax
- adcq $0,%rdx
- movq %rdx,%r10
-
- mulq %rbp
- addq %rax,%rdi
- movq (%rsi),%rax
- adcq $0,%rdx
- addq %r11,%rdi
- adcq $0,%rdx
- movq %rdi,-16(%rsp,%r15,8)
- movq %rdx,%r13
-
- xorq %rdi,%rdi
- addq %r10,%r13
- adcq $0,%rdi
- movq %r13,-8(%rsp,%r15,8)
- movq %rdi,(%rsp,%r15,8)
-
- leaq 1(%r14),%r14
-.p2align 2
-L$outer4x:
- movq (%r12,%r14,8),%rbx
- xorq %r15,%r15
- movq (%rsp),%r10
- movq %r8,%rbp
- mulq %rbx
- addq %rax,%r10
- movq (%rcx),%rax
- adcq $0,%rdx
-
- imulq %r10,%rbp
- movq %rdx,%r11
-
- mulq %rbp
- addq %rax,%r10
- movq 8(%rsi),%rax
- adcq $0,%rdx
- movq %rdx,%rdi
-
- mulq %rbx
- addq %rax,%r11
- movq 8(%rcx),%rax
- adcq $0,%rdx
- addq 8(%rsp),%r11
- adcq $0,%rdx
- movq %rdx,%r10
-
- mulq %rbp
- addq %rax,%rdi
- movq 16(%rsi),%rax
- adcq $0,%rdx
- addq %r11,%rdi
- leaq 4(%r15),%r15
- adcq $0,%rdx
- movq %rdi,(%rsp)
- movq %rdx,%r13
- jmp L$inner4x
-.p2align 4
-L$inner4x:
- mulq %rbx
- addq %rax,%r10
- movq -16(%rcx,%r15,8),%rax
- adcq $0,%rdx
- addq -16(%rsp,%r15,8),%r10
- adcq $0,%rdx
- movq %rdx,%r11
-
- mulq %rbp
- addq %rax,%r13
- movq -8(%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r10,%r13
- adcq $0,%rdx
- movq %r13,-24(%rsp,%r15,8)
- movq %rdx,%rdi
-
- mulq %rbx
- addq %rax,%r11
- movq -8(%rcx,%r15,8),%rax
- adcq $0,%rdx
- addq -8(%rsp,%r15,8),%r11
- adcq $0,%rdx
- movq %rdx,%r10
-
- mulq %rbp
- addq %rax,%rdi
movq (%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r11,%rdi
- adcq $0,%rdx
- movq %rdi,-16(%rsp,%r15,8)
- movq %rdx,%r13
-
- mulq %rbx
- addq %rax,%r10
- movq (%rcx,%r15,8),%rax
- adcq $0,%rdx
- addq (%rsp,%r15,8),%r10
- adcq $0,%rdx
- movq %rdx,%r11
-
- mulq %rbp
- addq %rax,%r13
- movq 8(%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r10,%r13
- adcq $0,%rdx
- movq %r13,-8(%rsp,%r15,8)
- movq %rdx,%rdi
-
- mulq %rbx
- addq %rax,%r11
- movq 8(%rcx,%r15,8),%rax
- adcq $0,%rdx
- addq 8(%rsp,%r15,8),%r11
- adcq $0,%rdx
- leaq 4(%r15),%r15
- movq %rdx,%r10
-
- mulq %rbp
- addq %rax,%rdi
- movq -16(%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r11,%rdi
- adcq $0,%rdx
- movq %rdi,-32(%rsp,%r15,8)
- movq %rdx,%r13
- cmpq %r9,%r15
- jl L$inner4x
-
- mulq %rbx
- addq %rax,%r10
- movq -16(%rcx,%r15,8),%rax
- adcq $0,%rdx
- addq -16(%rsp,%r15,8),%r10
- adcq $0,%rdx
- movq %rdx,%r11
-
- mulq %rbp
- addq %rax,%r13
- movq -8(%rsi,%r15,8),%rax
- adcq $0,%rdx
- addq %r10,%r13
- adcq $0,%rdx
- movq %r13,-24(%rsp,%r15,8)
- movq %rdx,%rdi
-
- mulq %rbx
- addq %rax,%r11
- movq -8(%rcx,%r15,8),%rax
- adcq $0,%rdx
- addq -8(%rsp,%r15,8),%r11
- adcq $0,%rdx
- leaq 1(%r14),%r14
- movq %rdx,%r10
-
- mulq %rbp
- addq %rax,%rdi
- movq (%rsi),%rax
- adcq $0,%rdx
- addq %r11,%rdi
- adcq $0,%rdx
- movq %rdi,-16(%rsp,%r15,8)
- movq %rdx,%r13
-
- xorq %rdi,%rdi
- addq %r10,%r13
- adcq $0,%rdi
- addq (%rsp,%r9,8),%r13
- adcq $0,%rdi
- movq %r13,-8(%rsp,%r15,8)
- movq %rdi,(%rsp,%r15,8)
-
- cmpq %r9,%r14
- jl L$outer4x
- movq 16(%rsp,%r9,8),%rdi
- movq 0(%rsp),%rax
- pxor %xmm0,%xmm0
- movq 8(%rsp),%rdx
- shrq $2,%r9
- leaq (%rsp),%rsi
- xorq %r14,%r14
-
- subq 0(%rcx),%rax
- movq 16(%rsi),%rbx
- movq 24(%rsi),%rbp
- sbbq 8(%rcx),%rdx
- leaq -1(%r9),%r15
- jmp L$sub4x
-.p2align 4
-L$sub4x:
- movq %rax,0(%rdi,%r14,8)
- movq %rdx,8(%rdi,%r14,8)
- sbbq 16(%rcx,%r14,8),%rbx
- movq 32(%rsi,%r14,8),%rax
- movq 40(%rsi,%r14,8),%rdx
- sbbq 24(%rcx,%r14,8),%rbp
- movq %rbx,16(%rdi,%r14,8)
- movq %rbp,24(%rdi,%r14,8)
- sbbq 32(%rcx,%r14,8),%rax
- movq 48(%rsi,%r14,8),%rbx
- movq 56(%rsi,%r14,8),%rbp
- sbbq 40(%rcx,%r14,8),%rdx
- leaq 4(%r14),%r14
+ movq %rax,(%rdi,%r15,8)
+ movq %r14,(%rsp,%r15,8)
decq %r15
- jnz L$sub4x
+ jge L$copy
- movq %rax,0(%rdi,%r14,8)
- movq 32(%rsi,%r14,8),%rax
- sbbq 16(%rcx,%r14,8),%rbx
- movq %rdx,8(%rdi,%r14,8)
- sbbq 24(%rcx,%r14,8),%rbp
- movq %rbx,16(%rdi,%r14,8)
-
- sbbq $0,%rax
- movq %rbp,24(%rdi,%r14,8)
- xorq %r14,%r14
- andq %rax,%rsi
- notq %rax
- movq %rdi,%rcx
- andq %rax,%rcx
- leaq -1(%r9),%r15
- orq %rcx,%rsi
-
- movdqu (%rsi),%xmm1
- movdqa %xmm0,(%rsp)
- movdqu %xmm1,(%rdi)
- jmp L$copy4x
-.p2align 4
-L$copy4x:
- movdqu 16(%rsi,%r14,1),%xmm2
- movdqu 32(%rsi,%r14,1),%xmm1
- movdqa %xmm0,16(%rsp,%r14,1)
- movdqu %xmm2,16(%rdi,%r14,1)
- movdqa %xmm0,32(%rsp,%r14,1)
- movdqu %xmm1,32(%rdi,%r14,1)
- leaq 32(%r14),%r14
- decq %r15
- jnz L$copy4x
-
- shlq $2,%r9
- movdqu 16(%rsi,%r14,1),%xmm2
- movdqa %xmm0,16(%rsp,%r14,1)
- movdqu %xmm2,16(%rdi,%r14,1)
movq 8(%rsp,%r9,8),%rsi
movq $1,%rax
movq (%rsi),%r15
@@ -603,772 +165,7 @@ L$copy4x:
movq 32(%rsi),%rbp
movq 40(%rsi),%rbx
leaq 48(%rsi),%rsp
-L$mul4x_epilogue:
- .byte 0xf3,0xc3
-
-
-.p2align 4
-bn_sqr4x_mont:
-L$sqr4x_enter:
- pushq %rbx
- pushq %rbp
- pushq %r12
- pushq %r13
- pushq %r14
- pushq %r15
-
- shll $3,%r9d
- xorq %r10,%r10
- movq %rsp,%r11
- subq %r9,%r10
- movq (%r8),%r8
- leaq -72(%rsp,%r10,2),%rsp
- andq $-1024,%rsp
-
-
-
-
-
-
-
-
-
-
-
- movq %rdi,32(%rsp)
- movq %rcx,40(%rsp)
- movq %r8,48(%rsp)
- movq %r11,56(%rsp)
-L$sqr4x_body:
-
-
-
-
-
-
-
- leaq 32(%r10),%rbp
- leaq (%rsi,%r9,1),%rsi
-
- movq %r9,%rcx
-
-
- movq -32(%rsi,%rbp,1),%r14
- leaq 64(%rsp,%r9,2),%rdi
- movq -24(%rsi,%rbp,1),%rax
- leaq -32(%rdi,%rbp,1),%rdi
- movq -16(%rsi,%rbp,1),%rbx
- movq %rax,%r15
-
- mulq %r14
- movq %rax,%r10
- movq %rbx,%rax
- movq %rdx,%r11
- movq %r10,-24(%rdi,%rbp,1)
-
- xorq %r10,%r10
- mulq %r14
- addq %rax,%r11
- movq %rbx,%rax
- adcq %rdx,%r10
- movq %r11,-16(%rdi,%rbp,1)
-
- leaq -16(%rbp),%rcx
-
-
- movq 8(%rsi,%rcx,1),%rbx
- mulq %r15
- movq %rax,%r12
- movq %rbx,%rax
- movq %rdx,%r13
-
- xorq %r11,%r11
- addq %r12,%r10
- leaq 16(%rcx),%rcx
- adcq $0,%r11
- mulq %r14
- addq %rax,%r10
- movq %rbx,%rax
- adcq %rdx,%r11
- movq %r10,-8(%rdi,%rcx,1)
- jmp L$sqr4x_1st
-
-.p2align 4
-L$sqr4x_1st:
- movq (%rsi,%rcx,1),%rbx
- xorq %r12,%r12
- mulq %r15
- addq %rax,%r13
- movq %rbx,%rax
- adcq %rdx,%r12
-
- xorq %r10,%r10
- addq %r13,%r11
- adcq $0,%r10
- mulq %r14
- addq %rax,%r11
- movq %rbx,%rax
- adcq %rdx,%r10
- movq %r11,(%rdi,%rcx,1)
-
-
- movq 8(%rsi,%rcx,1),%rbx
- xorq %r13,%r13
- mulq %r15
- addq %rax,%r12
- movq %rbx,%rax
- adcq %rdx,%r13
-
- xorq %r11,%r11
- addq %r12,%r10
- adcq $0,%r11
- mulq %r14
- addq %rax,%r10
- movq %rbx,%rax
- adcq %rdx,%r11
- movq %r10,8(%rdi,%rcx,1)
-
- movq 16(%rsi,%rcx,1),%rbx
- xorq %r12,%r12
- mulq %r15
- addq %rax,%r13
- movq %rbx,%rax
- adcq %rdx,%r12
-
- xorq %r10,%r10
- addq %r13,%r11
- adcq $0,%r10
- mulq %r14
- addq %rax,%r11
- movq %rbx,%rax
- adcq %rdx,%r10
- movq %r11,16(%rdi,%rcx,1)
-
-
- movq 24(%rsi,%rcx,1),%rbx
- xorq %r13,%r13
- mulq %r15
- addq %rax,%r12
- movq %rbx,%rax
- adcq %rdx,%r13
-
- xorq %r11,%r11
- addq %r12,%r10
- leaq 32(%rcx),%rcx
- adcq $0,%r11
- mulq %r14
- addq %rax,%r10
- movq %rbx,%rax
- adcq %rdx,%r11
- movq %r10,-8(%rdi,%rcx,1)
-
- cmpq $0,%rcx
- jne L$sqr4x_1st
-
- xorq %r12,%r12
- addq %r11,%r13
- adcq $0,%r12
- mulq %r15
- addq %rax,%r13
- adcq %rdx,%r12
-
- movq %r13,(%rdi)
- leaq 16(%rbp),%rbp
- movq %r12,8(%rdi)
- jmp L$sqr4x_outer
-
-.p2align 4
-L$sqr4x_outer:
- movq -32(%rsi,%rbp,1),%r14
- leaq 64(%rsp,%r9,2),%rdi
- movq -24(%rsi,%rbp,1),%rax
- leaq -32(%rdi,%rbp,1),%rdi
- movq -16(%rsi,%rbp,1),%rbx
- movq %rax,%r15
-
- movq -24(%rdi,%rbp,1),%r10
- xorq %r11,%r11
- mulq %r14
- addq %rax,%r10
- movq %rbx,%rax
- adcq %rdx,%r11
- movq %r10,-24(%rdi,%rbp,1)
-
- xorq %r10,%r10
- addq -16(%rdi,%rbp,1),%r11
- adcq $0,%r10
- mulq %r14
- addq %rax,%r11
- movq %rbx,%rax
- adcq %rdx,%r10
- movq %r11,-16(%rdi,%rbp,1)
-
- leaq -16(%rbp),%rcx
- xorq %r12,%r12
-
-
- movq 8(%rsi,%rcx,1),%rbx
- xorq %r13,%r13
- addq 8(%rdi,%rcx,1),%r12
- adcq $0,%r13
- mulq %r15
- addq %rax,%r12
- movq %rbx,%rax
- adcq %rdx,%r13
-
- xorq %r11,%r11
- addq %r12,%r10
- adcq $0,%r11
- mulq %r14
- addq %rax,%r10
- movq %rbx,%rax
- adcq %rdx,%r11
- movq %r10,8(%rdi,%rcx,1)
-
- leaq 16(%rcx),%rcx
- jmp L$sqr4x_inner
-
-.p2align 4
-L$sqr4x_inner:
- movq (%rsi,%rcx,1),%rbx
- xorq %r12,%r12
- addq (%rdi,%rcx,1),%r13
- adcq $0,%r12
- mulq %r15
- addq %rax,%r13
- movq %rbx,%rax
- adcq %rdx,%r12
-
- xorq %r10,%r10
- addq %r13,%r11
- adcq $0,%r10
- mulq %r14
- addq %rax,%r11
- movq %rbx,%rax
- adcq %rdx,%r10
- movq %r11,(%rdi,%rcx,1)
-
- movq 8(%rsi,%rcx,1),%rbx
- xorq %r13,%r13
- addq 8(%rdi,%rcx,1),%r12
- adcq $0,%r13
- mulq %r15
- addq %rax,%r12
- movq %rbx,%rax
- adcq %rdx,%r13
-
- xorq %r11,%r11
- addq %r12,%r10
- leaq 16(%rcx),%rcx
- adcq $0,%r11
- mulq %r14
- addq %rax,%r10
- movq %rbx,%rax
- adcq %rdx,%r11
- movq %r10,-8(%rdi,%rcx,1)
-
- cmpq $0,%rcx
- jne L$sqr4x_inner
-
- xorq %r12,%r12
- addq %r11,%r13
- adcq $0,%r12
- mulq %r15
- addq %rax,%r13
- adcq %rdx,%r12
-
- movq %r13,(%rdi)
- movq %r12,8(%rdi)
-
- addq $16,%rbp
- jnz L$sqr4x_outer
-
-
- movq -32(%rsi),%r14
- leaq 64(%rsp,%r9,2),%rdi
- movq -24(%rsi),%rax
- leaq -32(%rdi,%rbp,1),%rdi
- movq -16(%rsi),%rbx
- movq %rax,%r15
-
- xorq %r11,%r11
- mulq %r14
- addq %rax,%r10
- movq %rbx,%rax
- adcq %rdx,%r11
- movq %r10,-24(%rdi)
-
- xorq %r10,%r10
- addq %r13,%r11
- adcq $0,%r10
- mulq %r14
- addq %rax,%r11
- movq %rbx,%rax
- adcq %rdx,%r10
- movq %r11,-16(%rdi)
-
- movq -8(%rsi),%rbx
- mulq %r15
- addq %rax,%r12
- movq %rbx,%rax
- adcq $0,%rdx
-
- xorq %r11,%r11
- addq %r12,%r10
- movq %rdx,%r13
- adcq $0,%r11
- mulq %r14
- addq %rax,%r10
- movq %rbx,%rax
- adcq %rdx,%r11
- movq %r10,-8(%rdi)
-
- xorq %r12,%r12
- addq %r11,%r13
- adcq $0,%r12
- mulq %r15
- addq %rax,%r13
- movq -16(%rsi),%rax
- adcq %rdx,%r12
-
- movq %r13,(%rdi)
- movq %r12,8(%rdi)
-
- mulq %rbx
- addq $16,%rbp
- xorq %r14,%r14
- subq %r9,%rbp
- xorq %r15,%r15
-
- addq %r12,%rax
- adcq $0,%rdx
- movq %rax,8(%rdi)
- movq %rdx,16(%rdi)
- movq %r15,24(%rdi)
-
- movq -16(%rsi,%rbp,1),%rax
- leaq 64(%rsp,%r9,2),%rdi
- xorq %r10,%r10
- movq -24(%rdi,%rbp,2),%r11
-
- leaq (%r14,%r10,2),%r12
- shrq $63,%r10
- leaq (%rcx,%r11,2),%r13
- shrq $63,%r11
- orq %r10,%r13
- movq -16(%rdi,%rbp,2),%r10
- movq %r11,%r14
- mulq %rax
- negq %r15
- movq -8(%rdi,%rbp,2),%r11
- adcq %rax,%r12
- movq -8(%rsi,%rbp,1),%rax
- movq %r12,-32(%rdi,%rbp,2)
- adcq %rdx,%r13
-
- leaq (%r14,%r10,2),%rbx
- movq %r13,-24(%rdi,%rbp,2)
- sbbq %r15,%r15
- shrq $63,%r10
- leaq (%rcx,%r11,2),%r8
- shrq $63,%r11
- orq %r10,%r8
- movq 0(%rdi,%rbp,2),%r10
- movq %r11,%r14
- mulq %rax
- negq %r15
- movq 8(%rdi,%rbp,2),%r11
- adcq %rax,%rbx
- movq 0(%rsi,%rbp,1),%rax
- movq %rbx,-16(%rdi,%rbp,2)
- adcq %rdx,%r8
- leaq 16(%rbp),%rbp
- movq %r8,-40(%rdi,%rbp,2)
- sbbq %r15,%r15
- jmp L$sqr4x_shift_n_add
-
-.p2align 4
-L$sqr4x_shift_n_add:
- leaq (%r14,%r10,2),%r12
- shrq $63,%r10
- leaq (%rcx,%r11,2),%r13
- shrq $63,%r11
- orq %r10,%r13
- movq -16(%rdi,%rbp,2),%r10
- movq %r11,%r14
- mulq %rax
- negq %r15
- movq -8(%rdi,%rbp,2),%r11
- adcq %rax,%r12
- movq -8(%rsi,%rbp,1),%rax
- movq %r12,-32(%rdi,%rbp,2)
- adcq %rdx,%r13
-
- leaq (%r14,%r10,2),%rbx
- movq %r13,-24(%rdi,%rbp,2)
- sbbq %r15,%r15
- shrq $63,%r10
- leaq (%rcx,%r11,2),%r8
- shrq $63,%r11
- orq %r10,%r8
- movq 0(%rdi,%rbp,2),%r10
- movq %r11,%r14
- mulq %rax
- negq %r15
- movq 8(%rdi,%rbp,2),%r11
- adcq %rax,%rbx
- movq 0(%rsi,%rbp,1),%rax
- movq %rbx,-16(%rdi,%rbp,2)
- adcq %rdx,%r8
-
- leaq (%r14,%r10,2),%r12
- movq %r8,-8(%rdi,%rbp,2)
- sbbq %r15,%r15
- shrq $63,%r10
- leaq (%rcx,%r11,2),%r13
- shrq $63,%r11
- orq %r10,%r13
- movq 16(%rdi,%rbp,2),%r10
- movq %r11,%r14
- mulq %rax
- negq %r15
- movq 24(%rdi,%rbp,2),%r11
- adcq %rax,%r12
- movq 8(%rsi,%rbp,1),%rax
- movq %r12,0(%rdi,%rbp,2)
- adcq %rdx,%r13
-
- leaq (%r14,%r10,2),%rbx
- movq %r13,8(%rdi,%rbp,2)
- sbbq %r15,%r15
- shrq $63,%r10
- leaq (%rcx,%r11,2),%r8
- shrq $63,%r11
- orq %r10,%r8
- movq 32(%rdi,%rbp,2),%r10
- movq %r11,%r14
- mulq %rax
- negq %r15
- movq 40(%rdi,%rbp,2),%r11
- adcq %rax,%rbx
- movq 16(%rsi,%rbp,1),%rax
- movq %rbx,16(%rdi,%rbp,2)
- adcq %rdx,%r8
- movq %r8,24(%rdi,%rbp,2)
- sbbq %r15,%r15
- addq $32,%rbp
- jnz L$sqr4x_shift_n_add
-
- leaq (%r14,%r10,2),%r12
- shrq $63,%r10
- leaq (%rcx,%r11,2),%r13
- shrq $63,%r11
- orq %r10,%r13
- movq -16(%rdi),%r10
- movq %r11,%r14
- mulq %rax
- negq %r15
- movq -8(%rdi),%r11
- adcq %rax,%r12
- movq -8(%rsi),%rax
- movq %r12,-32(%rdi)
- adcq %rdx,%r13
-
- leaq (%r14,%r10,2),%rbx
- movq %r13,-24(%rdi)
- sbbq %r15,%r15
- shrq $63,%r10
- leaq (%rcx,%r11,2),%r8
- shrq $63,%r11
- orq %r10,%r8
- mulq %rax
- negq %r15
- adcq %rax,%rbx
- adcq %rdx,%r8
- movq %rbx,-16(%rdi)
- movq %r8,-8(%rdi)
- movq 40(%rsp),%rsi
- movq 48(%rsp),%r8
- xorq %rcx,%rcx
- movq %r9,0(%rsp)
- subq %r9,%rcx
- movq 64(%rsp),%r10
- movq %r8,%r14
- leaq 64(%rsp,%r9,2),%rax
- leaq 64(%rsp,%r9,1),%rdi
- movq %rax,8(%rsp)
- leaq (%rsi,%r9,1),%rsi
- xorq %rbp,%rbp
-
- movq 0(%rsi,%rcx,1),%rax
- movq 8(%rsi,%rcx,1),%r9
- imulq %r10,%r14
- movq %rax,%rbx
- jmp L$sqr4x_mont_outer
-
-.p2align 4
-L$sqr4x_mont_outer:
- xorq %r11,%r11
- mulq %r14
- addq %rax,%r10
- movq %r9,%rax
- adcq %rdx,%r11
- movq %r8,%r15
-
- xorq %r10,%r10
- addq 8(%rdi,%rcx,1),%r11
- adcq $0,%r10
- mulq %r14
- addq %rax,%r11
- movq %rbx,%rax
- adcq %rdx,%r10
-
- imulq %r11,%r15
-
- movq 16(%rsi,%rcx,1),%rbx
- xorq %r13,%r13
- addq %r11,%r12
- adcq $0,%r13
- mulq %r15
- addq %rax,%r12
- movq %rbx,%rax
- adcq %rdx,%r13
- movq %r12,8(%rdi,%rcx,1)
-
- xorq %r11,%r11
- addq 16(%rdi,%rcx,1),%r10
- adcq $0,%r11
- mulq %r14
- addq %rax,%r10
- movq %r9,%rax
- adcq %rdx,%r11
-
- movq 24(%rsi,%rcx,1),%r9
- xorq %r12,%r12
- addq %r10,%r13
- adcq $0,%r12
- mulq %r15
- addq %rax,%r13
- movq %r9,%rax
- adcq %rdx,%r12
- movq %r13,16(%rdi,%rcx,1)
-
- xorq %r10,%r10
- addq 24(%rdi,%rcx,1),%r11
- leaq 32(%rcx),%rcx
- adcq $0,%r10
- mulq %r14
- addq %rax,%r11
- movq %rbx,%rax
- adcq %rdx,%r10
- jmp L$sqr4x_mont_inner
-
-.p2align 4
-L$sqr4x_mont_inner:
- movq (%rsi,%rcx,1),%rbx
- xorq %r13,%r13
- addq %r11,%r12
- adcq $0,%r13
- mulq %r15
- addq %rax,%r12
- movq %rbx,%rax
- adcq %rdx,%r13
- movq %r12,-8(%rdi,%rcx,1)
-
- xorq %r11,%r11
- addq (%rdi,%rcx,1),%r10
- adcq $0,%r11
- mulq %r14
- addq %rax,%r10
- movq %r9,%rax
- adcq %rdx,%r11
-
- movq 8(%rsi,%rcx,1),%r9
- xorq %r12,%r12
- addq %r10,%r13
- adcq $0,%r12
- mulq %r15
- addq %rax,%r13
- movq %r9,%rax
- adcq %rdx,%r12
- movq %r13,(%rdi,%rcx,1)
-
- xorq %r10,%r10
- addq 8(%rdi,%rcx,1),%r11
- adcq $0,%r10
- mulq %r14
- addq %rax,%r11
- movq %rbx,%rax
- adcq %rdx,%r10
-
-
- movq 16(%rsi,%rcx,1),%rbx
- xorq %r13,%r13
- addq %r11,%r12
- adcq $0,%r13
- mulq %r15
- addq %rax,%r12
- movq %rbx,%rax
- adcq %rdx,%r13
- movq %r12,8(%rdi,%rcx,1)
-
- xorq %r11,%r11
- addq 16(%rdi,%rcx,1),%r10
- adcq $0,%r11
- mulq %r14
- addq %rax,%r10
- movq %r9,%rax
- adcq %rdx,%r11
-
- movq 24(%rsi,%rcx,1),%r9
- xorq %r12,%r12
- addq %r10,%r13
- adcq $0,%r12
- mulq %r15
- addq %rax,%r13
- movq %r9,%rax
- adcq %rdx,%r12
- movq %r13,16(%rdi,%rcx,1)
-
- xorq %r10,%r10
- addq 24(%rdi,%rcx,1),%r11
- leaq 32(%rcx),%rcx
- adcq $0,%r10
- mulq %r14
- addq %rax,%r11
- movq %rbx,%rax
- adcq %rdx,%r10
- cmpq $0,%rcx
- jne L$sqr4x_mont_inner
-
- subq 0(%rsp),%rcx
- movq %r8,%r14
-
- xorq %r13,%r13
- addq %r11,%r12
- adcq $0,%r13
- mulq %r15
- addq %rax,%r12
- movq %r9,%rax
- adcq %rdx,%r13
- movq %r12,-8(%rdi)
-
- xorq %r11,%r11
- addq (%rdi),%r10
- adcq $0,%r11
- movq 0(%rsi,%rcx,1),%rbx
- addq %rbp,%r10
- adcq $0,%r11
-
- imulq 16(%rdi,%rcx,1),%r14
- xorq %r12,%r12
- movq 8(%rsi,%rcx,1),%r9
- addq %r10,%r13
- movq 16(%rdi,%rcx,1),%r10
- adcq $0,%r12
- mulq %r15
- addq %rax,%r13
- movq %rbx,%rax
- adcq %rdx,%r12
- movq %r13,(%rdi)
-
- xorq %rbp,%rbp
- addq 8(%rdi),%r12
- adcq %rbp,%rbp
- addq %r11,%r12
- leaq 16(%rdi),%rdi
- adcq $0,%rbp
- movq %r12,-8(%rdi)
- cmpq 8(%rsp),%rdi
- jb L$sqr4x_mont_outer
-
- movq 0(%rsp),%r9
- movq %rbp,(%rdi)
- movq 64(%rsp,%r9,1),%rax
- leaq 64(%rsp,%r9,1),%rbx
- movq 40(%rsp),%rsi
- shrq $5,%r9
- movq 8(%rbx),%rdx
- xorq %rbp,%rbp
-
- movq 32(%rsp),%rdi
- subq 0(%rsi),%rax
- movq 16(%rbx),%r10
- movq 24(%rbx),%r11
- sbbq 8(%rsi),%rdx
- leaq -1(%r9),%rcx
- jmp L$sqr4x_sub
-.p2align 4
-L$sqr4x_sub:
- movq %rax,0(%rdi,%rbp,8)
- movq %rdx,8(%rdi,%rbp,8)
- sbbq 16(%rsi,%rbp,8),%r10
- movq 32(%rbx,%rbp,8),%rax
- movq 40(%rbx,%rbp,8),%rdx
- sbbq 24(%rsi,%rbp,8),%r11
- movq %r10,16(%rdi,%rbp,8)
- movq %r11,24(%rdi,%rbp,8)
- sbbq 32(%rsi,%rbp,8),%rax
- movq 48(%rbx,%rbp,8),%r10
- movq 56(%rbx,%rbp,8),%r11
- sbbq 40(%rsi,%rbp,8),%rdx
- leaq 4(%rbp),%rbp
- decq %rcx
- jnz L$sqr4x_sub
-
- movq %rax,0(%rdi,%rbp,8)
- movq 32(%rbx,%rbp,8),%rax
- sbbq 16(%rsi,%rbp,8),%r10
- movq %rdx,8(%rdi,%rbp,8)
- sbbq 24(%rsi,%rbp,8),%r11
- movq %r10,16(%rdi,%rbp,8)
-
- sbbq $0,%rax
- movq %r11,24(%rdi,%rbp,8)
- xorq %rbp,%rbp
- andq %rax,%rbx
- notq %rax
- movq %rdi,%rsi
- andq %rax,%rsi
- leaq -1(%r9),%rcx
- orq %rsi,%rbx
-
- pxor %xmm0,%xmm0
- leaq 64(%rsp,%r9,8),%rsi
- movdqu (%rbx),%xmm1
- leaq (%rsi,%r9,8),%rsi
- movdqa %xmm0,64(%rsp)
- movdqa %xmm0,(%rsi)
- movdqu %xmm1,(%rdi)
- jmp L$sqr4x_copy
-.p2align 4
-L$sqr4x_copy:
- movdqu 16(%rbx,%rbp,1),%xmm2
- movdqu 32(%rbx,%rbp,1),%xmm1
- movdqa %xmm0,80(%rsp,%rbp,1)
- movdqa %xmm0,96(%rsp,%rbp,1)
- movdqa %xmm0,16(%rsi,%rbp,1)
- movdqa %xmm0,32(%rsi,%rbp,1)
- movdqu %xmm2,16(%rdi,%rbp,1)
- movdqu %xmm1,32(%rdi,%rbp,1)
- leaq 32(%rbp),%rbp
- decq %rcx
- jnz L$sqr4x_copy
-
- movdqu 16(%rbx,%rbp,1),%xmm2
- movdqa %xmm0,80(%rsp,%rbp,1)
- movdqa %xmm0,16(%rsi,%rbp,1)
- movdqu %xmm2,16(%rdi,%rbp,1)
- movq 56(%rsp),%rsi
- movq $1,%rax
- movq 0(%rsi),%r15
- movq 8(%rsi),%r14
- movq 16(%rsi),%r13
- movq 24(%rsi),%r12
- movq 32(%rsi),%rbp
- movq 40(%rsi),%rbx
- leaq 48(%rsi),%rsp
-L$sqr4x_epilogue:
+L$epilogue:
.byte 0xf3,0xc3
.byte 77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105,112,108,105,99,97,116,105,111,110,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
diff --git a/deps/openssl/asm/x64-macosx-gas/md5/md5-x86_64.s b/deps/openssl/asm/x64-macosx-gas/md5/md5-x86_64.s
index cdecac7b4c..96f6ea16ce 100644
--- a/deps/openssl/asm/x64-macosx-gas/md5/md5-x86_64.s
+++ b/deps/openssl/asm/x64-macosx-gas/md5/md5-x86_64.s
@@ -668,3 +668,4 @@ L$end:
addq $40,%rsp
L$epilogue:
.byte 0xf3,0xc3
+
diff --git a/deps/openssl/asm/x64-macosx-gas/rc4/rc4-x86_64.s b/deps/openssl/asm/x64-macosx-gas/rc4/rc4-x86_64.s
index 8c4f29ecbb..41183cebec 100644
--- a/deps/openssl/asm/x64-macosx-gas/rc4/rc4-x86_64.s
+++ b/deps/openssl/asm/x64-macosx-gas/rc4/rc4-x86_64.s
@@ -1,7 +1,6 @@
.text
-
.globl _RC4
.p2align 4
@@ -13,511 +12,316 @@ L$entry:
pushq %r12
pushq %r13
L$prologue:
- movq %rsi,%r11
- movq %rdx,%r12
- movq %rcx,%r13
- xorq %r10,%r10
- xorq %rcx,%rcx
- leaq 8(%rdi),%rdi
- movb -8(%rdi),%r10b
- movb -4(%rdi),%cl
+ addq $8,%rdi
+ movl -8(%rdi),%r8d
+ movl -4(%rdi),%r12d
cmpl $-1,256(%rdi)
je L$RC4_CHAR
- movl _OPENSSL_ia32cap_P(%rip),%r8d
- xorq %rbx,%rbx
- incb %r10b
- subq %r10,%rbx
- subq %r12,%r13
- movl (%rdi,%r10,4),%eax
- testq $-16,%r11
+ incb %r8b
+ movl (%rdi,%r8,4),%r9d
+ testq $-8,%rsi
jz L$loop1
- btl $30,%r8d
- jc L$intel
- andq $7,%rbx
- leaq 1(%r10),%rsi
- jz L$oop8
- subq %rbx,%r11
-L$oop8_warmup:
- addb %al,%cl
- movl (%rdi,%rcx,4),%edx
- movl %eax,(%rdi,%rcx,4)
- movl %edx,(%rdi,%r10,4)
- addb %dl,%al
- incb %r10b
- movl (%rdi,%rax,4),%edx
- movl (%rdi,%r10,4),%eax
- xorb (%r12),%dl
- movb %dl,(%r13,%r12,1)
- leaq 1(%r12),%r12
- decq %rbx
- jnz L$oop8_warmup
-
- leaq 1(%r10),%rsi
- jmp L$oop8
-.p2align 4
-L$oop8:
- addb %al,%cl
- movl (%rdi,%rcx,4),%edx
- movl %eax,(%rdi,%rcx,4)
- movl 0(%rdi,%rsi,4),%ebx
- rorq $8,%r8
- movl %edx,0(%rdi,%r10,4)
- addb %al,%dl
- movb (%rdi,%rdx,4),%r8b
- addb %bl,%cl
- movl (%rdi,%rcx,4),%edx
- movl %ebx,(%rdi,%rcx,4)
- movl 4(%rdi,%rsi,4),%eax
- rorq $8,%r8
- movl %edx,4(%rdi,%r10,4)
- addb %bl,%dl
- movb (%rdi,%rdx,4),%r8b
- addb %al,%cl
- movl (%rdi,%rcx,4),%edx
- movl %eax,(%rdi,%rcx,4)
- movl 8(%rdi,%rsi,4),%ebx
- rorq $8,%r8
- movl %edx,8(%rdi,%r10,4)
- addb %al,%dl
- movb (%rdi,%rdx,4),%r8b
- addb %bl,%cl
- movl (%rdi,%rcx,4),%edx
- movl %ebx,(%rdi,%rcx,4)
- movl 12(%rdi,%rsi,4),%eax
- rorq $8,%r8
- movl %edx,12(%rdi,%r10,4)
- addb %bl,%dl
- movb (%rdi,%rdx,4),%r8b
- addb %al,%cl
- movl (%rdi,%rcx,4),%edx
- movl %eax,(%rdi,%rcx,4)
- movl 16(%rdi,%rsi,4),%ebx
- rorq $8,%r8
- movl %edx,16(%rdi,%r10,4)
- addb %al,%dl
- movb (%rdi,%rdx,4),%r8b
- addb %bl,%cl
- movl (%rdi,%rcx,4),%edx
- movl %ebx,(%rdi,%rcx,4)
- movl 20(%rdi,%rsi,4),%eax
- rorq $8,%r8
- movl %edx,20(%rdi,%r10,4)
- addb %bl,%dl
- movb (%rdi,%rdx,4),%r8b
- addb %al,%cl
- movl (%rdi,%rcx,4),%edx
- movl %eax,(%rdi,%rcx,4)
- movl 24(%rdi,%rsi,4),%ebx
- rorq $8,%r8
- movl %edx,24(%rdi,%r10,4)
- addb %al,%dl
- movb (%rdi,%rdx,4),%r8b
- addb $8,%sil
- addb %bl,%cl
- movl (%rdi,%rcx,4),%edx
- movl %ebx,(%rdi,%rcx,4)
- movl -4(%rdi,%rsi,4),%eax
- rorq $8,%r8
- movl %edx,28(%rdi,%r10,4)
- addb %bl,%dl
- movb (%rdi,%rdx,4),%r8b
- addb $8,%r10b
- rorq $8,%r8
- subq $8,%r11
-
- xorq (%r12),%r8
- movq %r8,(%r13,%r12,1)
- leaq 8(%r12),%r12
-
- testq $-8,%r11
- jnz L$oop8
- cmpq $0,%r11
- jne L$loop1
- jmp L$exit
-
+ jmp L$loop8
.p2align 4
-L$intel:
- testq $-32,%r11
- jz L$loop1
- andq $15,%rbx
- jz L$oop16_is_hot
- subq %rbx,%r11
-L$oop16_warmup:
- addb %al,%cl
- movl (%rdi,%rcx,4),%edx
- movl %eax,(%rdi,%rcx,4)
- movl %edx,(%rdi,%r10,4)
- addb %dl,%al
+L$loop8:
+ addb %r9b,%r12b
+ movq %r8,%r10
+ movl (%rdi,%r12,4),%r13d
+ rorq $8,%rax
incb %r10b
- movl (%rdi,%rax,4),%edx
- movl (%rdi,%r10,4),%eax
- xorb (%r12),%dl
- movb %dl,(%r13,%r12,1)
- leaq 1(%r12),%r12
- decq %rbx
- jnz L$oop16_warmup
-
- movq %rcx,%rbx
- xorq %rcx,%rcx
- movb %bl,%cl
-
-L$oop16_is_hot:
- leaq (%rdi,%r10,4),%rsi
- addb %al,%cl
- movl (%rdi,%rcx,4),%edx
- pxor %xmm0,%xmm0
- movl %eax,(%rdi,%rcx,4)
- addb %dl,%al
- movl 4(%rsi),%ebx
- movzbl %al,%eax
- movl %edx,0(%rsi)
- addb %bl,%cl
- pinsrw $0,(%rdi,%rax,4),%xmm0
- jmp L$oop16_enter
-.p2align 4
-L$oop16:
- addb %al,%cl
- movl (%rdi,%rcx,4),%edx
- pxor %xmm0,%xmm2
- psllq $8,%xmm1
- pxor %xmm0,%xmm0
- movl %eax,(%rdi,%rcx,4)
- addb %dl,%al
- movl 4(%rsi),%ebx
- movzbl %al,%eax
- movl %edx,0(%rsi)
- pxor %xmm1,%xmm2
- addb %bl,%cl
- pinsrw $0,(%rdi,%rax,4),%xmm0
- movdqu %xmm2,(%r13,%r12,1)
- leaq 16(%r12),%r12
-L$oop16_enter:
- movl (%rdi,%rcx,4),%edx
- pxor %xmm1,%xmm1
- movl %ebx,(%rdi,%rcx,4)
- addb %dl,%bl
- movl 8(%rsi),%eax
- movzbl %bl,%ebx
- movl %edx,4(%rsi)
- addb %al,%cl
- pinsrw $0,(%rdi,%rbx,4),%xmm1
- movl (%rdi,%rcx,4),%edx
- movl %eax,(%rdi,%rcx,4)
- addb %dl,%al
- movl 12(%rsi),%ebx
- movzbl %al,%eax
- movl %edx,8(%rsi)
- addb %bl,%cl
- pinsrw $1,(%rdi,%rax,4),%xmm0
- movl (%rdi,%rcx,4),%edx
- movl %ebx,(%rdi,%rcx,4)
- addb %dl,%bl
- movl 16(%rsi),%eax
- movzbl %bl,%ebx
- movl %edx,12(%rsi)
- addb %al,%cl
- pinsrw $1,(%rdi,%rbx,4),%xmm1
- movl (%rdi,%rcx,4),%edx
- movl %eax,(%rdi,%rcx,4)
- addb %dl,%al
- movl 20(%rsi),%ebx
- movzbl %al,%eax
- movl %edx,16(%rsi)
- addb %bl,%cl
- pinsrw $2,(%rdi,%rax,4),%xmm0
- movl (%rdi,%rcx,4),%edx
- movl %ebx,(%rdi,%rcx,4)
- addb %dl,%bl
- movl 24(%rsi),%eax
- movzbl %bl,%ebx
- movl %edx,20(%rsi)
- addb %al,%cl
- pinsrw $2,(%rdi,%rbx,4),%xmm1
- movl (%rdi,%rcx,4),%edx
- movl %eax,(%rdi,%rcx,4)
- addb %dl,%al
- movl 28(%rsi),%ebx
- movzbl %al,%eax
- movl %edx,24(%rsi)
- addb %bl,%cl
- pinsrw $3,(%rdi,%rax,4),%xmm0
- movl (%rdi,%rcx,4),%edx
- movl %ebx,(%rdi,%rcx,4)
- addb %dl,%bl
- movl 32(%rsi),%eax
- movzbl %bl,%ebx
- movl %edx,28(%rsi)
- addb %al,%cl
- pinsrw $3,(%rdi,%rbx,4),%xmm1
- movl (%rdi,%rcx,4),%edx
- movl %eax,(%rdi,%rcx,4)
- addb %dl,%al
- movl 36(%rsi),%ebx
- movzbl %al,%eax
- movl %edx,32(%rsi)
- addb %bl,%cl
- pinsrw $4,(%rdi,%rax,4),%xmm0
- movl (%rdi,%rcx,4),%edx
- movl %ebx,(%rdi,%rcx,4)
- addb %dl,%bl
- movl 40(%rsi),%eax
- movzbl %bl,%ebx
- movl %edx,36(%rsi)
- addb %al,%cl
- pinsrw $4,(%rdi,%rbx,4),%xmm1
- movl (%rdi,%rcx,4),%edx
- movl %eax,(%rdi,%rcx,4)
- addb %dl,%al
- movl 44(%rsi),%ebx
- movzbl %al,%eax
- movl %edx,40(%rsi)
- addb %bl,%cl
- pinsrw $5,(%rdi,%rax,4),%xmm0
- movl (%rdi,%rcx,4),%edx
- movl %ebx,(%rdi,%rcx,4)
- addb %dl,%bl
- movl 48(%rsi),%eax
- movzbl %bl,%ebx
- movl %edx,44(%rsi)
- addb %al,%cl
- pinsrw $5,(%rdi,%rbx,4),%xmm1
- movl (%rdi,%rcx,4),%edx
- movl %eax,(%rdi,%rcx,4)
- addb %dl,%al
- movl 52(%rsi),%ebx
- movzbl %al,%eax
- movl %edx,48(%rsi)
- addb %bl,%cl
- pinsrw $6,(%rdi,%rax,4),%xmm0
- movl (%rdi,%rcx,4),%edx
- movl %ebx,(%rdi,%rcx,4)
- addb %dl,%bl
- movl 56(%rsi),%eax
- movzbl %bl,%ebx
- movl %edx,52(%rsi)
- addb %al,%cl
- pinsrw $6,(%rdi,%rbx,4),%xmm1
- movl (%rdi,%rcx,4),%edx
- movl %eax,(%rdi,%rcx,4)
- addb %dl,%al
- movl 60(%rsi),%ebx
- movzbl %al,%eax
- movl %edx,56(%rsi)
- addb %bl,%cl
- pinsrw $7,(%rdi,%rax,4),%xmm0
- addb $16,%r10b
- movdqu (%r12),%xmm2
- movl (%rdi,%rcx,4),%edx
- movl %ebx,(%rdi,%rcx,4)
- addb %dl,%bl
- movzbl %bl,%ebx
- movl %edx,60(%rsi)
- leaq (%rdi,%r10,4),%rsi
- pinsrw $7,(%rdi,%rbx,4),%xmm1
- movl (%rsi),%eax
- movq %rcx,%rbx
- xorq %rcx,%rcx
- subq $16,%r11
- movb %bl,%cl
- testq $-16,%r11
- jnz L$oop16
-
- psllq $8,%xmm1
- pxor %xmm0,%xmm2
- pxor %xmm1,%xmm2
- movdqu %xmm2,(%r13,%r12,1)
- leaq 16(%r12),%r12
-
- cmpq $0,%r11
+ movl (%rdi,%r10,4),%r11d
+ cmpq %r10,%r12
+ movl %r9d,(%rdi,%r12,4)
+ cmoveq %r9,%r11
+ movl %r13d,(%rdi,%r8,4)
+ addb %r9b,%r13b
+ movb (%rdi,%r13,4),%al
+ addb %r11b,%r12b
+ movq %r10,%r8
+ movl (%rdi,%r12,4),%r13d
+ rorq $8,%rax
+ incb %r8b
+ movl (%rdi,%r8,4),%r9d
+ cmpq %r8,%r12
+ movl %r11d,(%rdi,%r12,4)
+ cmoveq %r11,%r9
+ movl %r13d,(%rdi,%r10,4)
+ addb %r11b,%r13b
+ movb (%rdi,%r13,4),%al
+ addb %r9b,%r12b
+ movq %r8,%r10
+ movl (%rdi,%r12,4),%r13d
+ rorq $8,%rax
+ incb %r10b
+ movl (%rdi,%r10,4),%r11d
+ cmpq %r10,%r12
+ movl %r9d,(%rdi,%r12,4)
+ cmoveq %r9,%r11
+ movl %r13d,(%rdi,%r8,4)
+ addb %r9b,%r13b
+ movb (%rdi,%r13,4),%al
+ addb %r11b,%r12b
+ movq %r10,%r8
+ movl (%rdi,%r12,4),%r13d
+ rorq $8,%rax
+ incb %r8b
+ movl (%rdi,%r8,4),%r9d
+ cmpq %r8,%r12
+ movl %r11d,(%rdi,%r12,4)
+ cmoveq %r11,%r9
+ movl %r13d,(%rdi,%r10,4)
+ addb %r11b,%r13b
+ movb (%rdi,%r13,4),%al
+ addb %r9b,%r12b
+ movq %r8,%r10
+ movl (%rdi,%r12,4),%r13d
+ rorq $8,%rax
+ incb %r10b
+ movl (%rdi,%r10,4),%r11d
+ cmpq %r10,%r12
+ movl %r9d,(%rdi,%r12,4)
+ cmoveq %r9,%r11
+ movl %r13d,(%rdi,%r8,4)
+ addb %r9b,%r13b
+ movb (%rdi,%r13,4),%al
+ addb %r11b,%r12b
+ movq %r10,%r8
+ movl (%rdi,%r12,4),%r13d
+ rorq $8,%rax
+ incb %r8b
+ movl (%rdi,%r8,4),%r9d
+ cmpq %r8,%r12
+ movl %r11d,(%rdi,%r12,4)
+ cmoveq %r11,%r9
+ movl %r13d,(%rdi,%r10,4)
+ addb %r11b,%r13b
+ movb (%rdi,%r13,4),%al
+ addb %r9b,%r12b
+ movq %r8,%r10
+ movl (%rdi,%r12,4),%r13d
+ rorq $8,%rax
+ incb %r10b
+ movl (%rdi,%r10,4),%r11d
+ cmpq %r10,%r12
+ movl %r9d,(%rdi,%r12,4)
+ cmoveq %r9,%r11
+ movl %r13d,(%rdi,%r8,4)
+ addb %r9b,%r13b
+ movb (%rdi,%r13,4),%al
+ addb %r11b,%r12b
+ movq %r10,%r8
+ movl (%rdi,%r12,4),%r13d
+ rorq $8,%rax
+ incb %r8b
+ movl (%rdi,%r8,4),%r9d
+ cmpq %r8,%r12
+ movl %r11d,(%rdi,%r12,4)
+ cmoveq %r11,%r9
+ movl %r13d,(%rdi,%r10,4)
+ addb %r11b,%r13b
+ movb (%rdi,%r13,4),%al
+ rorq $8,%rax
+ subq $8,%rsi
+
+ xorq (%rdx),%rax
+ addq $8,%rdx
+ movq %rax,(%rcx)
+ addq $8,%rcx
+
+ testq $-8,%rsi
+ jnz L$loop8
+ cmpq $0,%rsi
jne L$loop1
jmp L$exit
.p2align 4
L$loop1:
- addb %al,%cl
- movl (%rdi,%rcx,4),%edx
- movl %eax,(%rdi,%rcx,4)
- movl %edx,(%rdi,%r10,4)
- addb %dl,%al
- incb %r10b
- movl (%rdi,%rax,4),%edx
- movl (%rdi,%r10,4),%eax
- xorb (%r12),%dl
- movb %dl,(%r13,%r12,1)
- leaq 1(%r12),%r12
- decq %r11
+ addb %r9b,%r12b
+ movl (%rdi,%r12,4),%r13d
+ movl %r9d,(%rdi,%r12,4)
+ movl %r13d,(%rdi,%r8,4)
+ addb %r13b,%r9b
+ incb %r8b
+ movl (%rdi,%r9,4),%r13d
+ movl (%rdi,%r8,4),%r9d
+ xorb (%rdx),%r13b
+ incq %rdx
+ movb %r13b,(%rcx)
+ incq %rcx
+ decq %rsi
jnz L$loop1
jmp L$exit
.p2align 4
L$RC4_CHAR:
- addb $1,%r10b
- movzbl (%rdi,%r10,1),%eax
- testq $-8,%r11
+ addb $1,%r8b
+ movzbl (%rdi,%r8,1),%r9d
+ testq $-8,%rsi
jz L$cloop1
+ cmpl $0,260(%rdi)
+ jnz L$cloop1
jmp L$cloop8
.p2align 4
L$cloop8:
- movl (%r12),%r8d
- movl 4(%r12),%r9d
- addb %al,%cl
- leaq 1(%r10),%rsi
- movzbl (%rdi,%rcx,1),%edx
- movzbl %sil,%esi
- movzbl (%rdi,%rsi,1),%ebx
- movb %al,(%rdi,%rcx,1)
- cmpq %rsi,%rcx
- movb %dl,(%rdi,%r10,1)
+ movl (%rdx),%eax
+ movl 4(%rdx),%ebx
+ addb %r9b,%r12b
+ leaq 1(%r8),%r10
+ movzbl (%rdi,%r12,1),%r13d
+ movzbl %r10b,%r10d
+ movzbl (%rdi,%r10,1),%r11d
+ movb %r9b,(%rdi,%r12,1)
+ cmpq %r10,%r12
+ movb %r13b,(%rdi,%r8,1)
jne L$cmov0
- movq %rax,%rbx
+ movq %r9,%r11
L$cmov0:
- addb %al,%dl
- xorb (%rdi,%rdx,1),%r8b
- rorl $8,%r8d
- addb %bl,%cl
- leaq 1(%rsi),%r10
- movzbl (%rdi,%rcx,1),%edx
- movzbl %r10b,%r10d
- movzbl (%rdi,%r10,1),%eax
- movb %bl,(%rdi,%rcx,1)
- cmpq %r10,%rcx
- movb %dl,(%rdi,%rsi,1)
+ addb %r9b,%r13b
+ xorb (%rdi,%r13,1),%al
+ rorl $8,%eax
+ addb %r11b,%r12b
+ leaq 1(%r10),%r8
+ movzbl (%rdi,%r12,1),%r13d
+ movzbl %r8b,%r8d
+ movzbl (%rdi,%r8,1),%r9d
+ movb %r11b,(%rdi,%r12,1)
+ cmpq %r8,%r12
+ movb %r13b,(%rdi,%r10,1)
jne L$cmov1
- movq %rbx,%rax
+ movq %r11,%r9
L$cmov1:
- addb %bl,%dl
- xorb (%rdi,%rdx,1),%r8b
- rorl $8,%r8d
- addb %al,%cl
- leaq 1(%r10),%rsi
- movzbl (%rdi,%rcx,1),%edx
- movzbl %sil,%esi
- movzbl (%rdi,%rsi,1),%ebx
- movb %al,(%rdi,%rcx,1)
- cmpq %rsi,%rcx
- movb %dl,(%rdi,%r10,1)
+ addb %r11b,%r13b
+ xorb (%rdi,%r13,1),%al
+ rorl $8,%eax
+ addb %r9b,%r12b
+ leaq 1(%r8),%r10
+ movzbl (%rdi,%r12,1),%r13d
+ movzbl %r10b,%r10d
+ movzbl (%rdi,%r10,1),%r11d
+ movb %r9b,(%rdi,%r12,1)
+ cmpq %r10,%r12
+ movb %r13b,(%rdi,%r8,1)
jne L$cmov2
- movq %rax,%rbx
+ movq %r9,%r11
L$cmov2:
- addb %al,%dl
- xorb (%rdi,%rdx,1),%r8b
- rorl $8,%r8d
- addb %bl,%cl
- leaq 1(%rsi),%r10
- movzbl (%rdi,%rcx,1),%edx
- movzbl %r10b,%r10d
- movzbl (%rdi,%r10,1),%eax
- movb %bl,(%rdi,%rcx,1)
- cmpq %r10,%rcx
- movb %dl,(%rdi,%rsi,1)
+ addb %r9b,%r13b
+ xorb (%rdi,%r13,1),%al
+ rorl $8,%eax
+ addb %r11b,%r12b
+ leaq 1(%r10),%r8
+ movzbl (%rdi,%r12,1),%r13d
+ movzbl %r8b,%r8d
+ movzbl (%rdi,%r8,1),%r9d
+ movb %r11b,(%rdi,%r12,1)
+ cmpq %r8,%r12
+ movb %r13b,(%rdi,%r10,1)
jne L$cmov3
- movq %rbx,%rax
+ movq %r11,%r9
L$cmov3:
- addb %bl,%dl
- xorb (%rdi,%rdx,1),%r8b
- rorl $8,%r8d
- addb %al,%cl
- leaq 1(%r10),%rsi
- movzbl (%rdi,%rcx,1),%edx
- movzbl %sil,%esi
- movzbl (%rdi,%rsi,1),%ebx
- movb %al,(%rdi,%rcx,1)
- cmpq %rsi,%rcx
- movb %dl,(%rdi,%r10,1)
+ addb %r11b,%r13b
+ xorb (%rdi,%r13,1),%al
+ rorl $8,%eax
+ addb %r9b,%r12b
+ leaq 1(%r8),%r10
+ movzbl (%rdi,%r12,1),%r13d
+ movzbl %r10b,%r10d
+ movzbl (%rdi,%r10,1),%r11d
+ movb %r9b,(%rdi,%r12,1)
+ cmpq %r10,%r12
+ movb %r13b,(%rdi,%r8,1)
jne L$cmov4
- movq %rax,%rbx
+ movq %r9,%r11
L$cmov4:
- addb %al,%dl
- xorb (%rdi,%rdx,1),%r9b
- rorl $8,%r9d
- addb %bl,%cl
- leaq 1(%rsi),%r10
- movzbl (%rdi,%rcx,1),%edx
- movzbl %r10b,%r10d
- movzbl (%rdi,%r10,1),%eax
- movb %bl,(%rdi,%rcx,1)
- cmpq %r10,%rcx
- movb %dl,(%rdi,%rsi,1)
+ addb %r9b,%r13b
+ xorb (%rdi,%r13,1),%bl
+ rorl $8,%ebx
+ addb %r11b,%r12b
+ leaq 1(%r10),%r8
+ movzbl (%rdi,%r12,1),%r13d
+ movzbl %r8b,%r8d
+ movzbl (%rdi,%r8,1),%r9d
+ movb %r11b,(%rdi,%r12,1)
+ cmpq %r8,%r12
+ movb %r13b,(%rdi,%r10,1)
jne L$cmov5
- movq %rbx,%rax
+ movq %r11,%r9
L$cmov5:
- addb %bl,%dl
- xorb (%rdi,%rdx,1),%r9b
- rorl $8,%r9d
- addb %al,%cl
- leaq 1(%r10),%rsi
- movzbl (%rdi,%rcx,1),%edx
- movzbl %sil,%esi
- movzbl (%rdi,%rsi,1),%ebx
- movb %al,(%rdi,%rcx,1)
- cmpq %rsi,%rcx
- movb %dl,(%rdi,%r10,1)
+ addb %r11b,%r13b
+ xorb (%rdi,%r13,1),%bl
+ rorl $8,%ebx
+ addb %r9b,%r12b
+ leaq 1(%r8),%r10
+ movzbl (%rdi,%r12,1),%r13d
+ movzbl %r10b,%r10d
+ movzbl (%rdi,%r10,1),%r11d
+ movb %r9b,(%rdi,%r12,1)
+ cmpq %r10,%r12
+ movb %r13b,(%rdi,%r8,1)
jne L$cmov6
- movq %rax,%rbx
+ movq %r9,%r11
L$cmov6:
- addb %al,%dl
- xorb (%rdi,%rdx,1),%r9b
- rorl $8,%r9d
- addb %bl,%cl
- leaq 1(%rsi),%r10
- movzbl (%rdi,%rcx,1),%edx
- movzbl %r10b,%r10d
- movzbl (%rdi,%r10,1),%eax
- movb %bl,(%rdi,%rcx,1)
- cmpq %r10,%rcx
- movb %dl,(%rdi,%rsi,1)
+ addb %r9b,%r13b
+ xorb (%rdi,%r13,1),%bl
+ rorl $8,%ebx
+ addb %r11b,%r12b
+ leaq 1(%r10),%r8
+ movzbl (%rdi,%r12,1),%r13d
+ movzbl %r8b,%r8d
+ movzbl (%rdi,%r8,1),%r9d
+ movb %r11b,(%rdi,%r12,1)
+ cmpq %r8,%r12
+ movb %r13b,(%rdi,%r10,1)
jne L$cmov7
- movq %rbx,%rax
+ movq %r11,%r9
L$cmov7:
- addb %bl,%dl
- xorb (%rdi,%rdx,1),%r9b
- rorl $8,%r9d
- leaq -8(%r11),%r11
- movl %r8d,(%r13)
- leaq 8(%r12),%r12
- movl %r9d,4(%r13)
- leaq 8(%r13),%r13
-
- testq $-8,%r11
+ addb %r11b,%r13b
+ xorb (%rdi,%r13,1),%bl
+ rorl $8,%ebx
+ leaq -8(%rsi),%rsi
+ movl %eax,(%rcx)
+ leaq 8(%rdx),%rdx
+ movl %ebx,4(%rcx)
+ leaq 8(%rcx),%rcx
+
+ testq $-8,%rsi
jnz L$cloop8
- cmpq $0,%r11
+ cmpq $0,%rsi
jne L$cloop1
jmp L$exit
.p2align 4
L$cloop1:
- addb %al,%cl
- movzbl %cl,%ecx
- movzbl (%rdi,%rcx,1),%edx
- movb %al,(%rdi,%rcx,1)
- movb %dl,(%rdi,%r10,1)
- addb %al,%dl
- addb $1,%r10b
- movzbl %dl,%edx
- movzbl %r10b,%r10d
- movzbl (%rdi,%rdx,1),%edx
- movzbl (%rdi,%r10,1),%eax
- xorb (%r12),%dl
- leaq 1(%r12),%r12
- movb %dl,(%r13)
- leaq 1(%r13),%r13
- subq $1,%r11
+ addb %r9b,%r12b
+ movzbl (%rdi,%r12,1),%r13d
+ movb %r9b,(%rdi,%r12,1)
+ movb %r13b,(%rdi,%r8,1)
+ addb %r9b,%r13b
+ addb $1,%r8b
+ movzbl %r13b,%r13d
+ movzbl %r8b,%r8d
+ movzbl (%rdi,%r13,1),%r13d
+ movzbl (%rdi,%r8,1),%r9d
+ xorb (%rdx),%r13b
+ leaq 1(%rdx),%rdx
+ movb %r13b,(%rcx)
+ leaq 1(%rcx),%rcx
+ subq $1,%rsi
jnz L$cloop1
jmp L$exit
.p2align 4
L$exit:
- subb $1,%r10b
- movl %r10d,-8(%rdi)
- movl %ecx,-4(%rdi)
+ subb $1,%r8b
+ movl %r8d,-8(%rdi)
+ movl %r12d,-4(%rdi)
movq (%rsp),%r13
movq 8(%rsp),%r12
@@ -526,10 +330,11 @@ L$exit:
L$epilogue:
.byte 0xf3,0xc3
-.globl _private_RC4_set_key
+
+.globl _RC4_set_key
.p2align 4
-_private_RC4_set_key:
+_RC4_set_key:
leaq 8(%rdi),%rdi
leaq (%rdx,%rsi,1),%rdx
negq %rsi
@@ -541,8 +346,11 @@ _private_RC4_set_key:
movl _OPENSSL_ia32cap_P(%rip),%r8d
btl $20,%r8d
- jc L$c1stloop
- jmp L$w1stloop
+ jnc L$w1stloop
+ btl $30,%r8d
+ setc %r9b
+ movl %r9d,260(%rdi)
+ jmp L$c1stloop
.p2align 4
L$w1stloop:
@@ -605,19 +413,18 @@ _RC4_options:
leaq L$opts(%rip),%rax
movl _OPENSSL_ia32cap_P(%rip),%edx
btl $20,%edx
- jc L$8xchar
- btl $30,%edx
jnc L$done
- addq $25,%rax
- .byte 0xf3,0xc3
-L$8xchar:
addq $12,%rax
+ btl $30,%edx
+ jnc L$done
+ addq $13,%rax
L$done:
.byte 0xf3,0xc3
.p2align 6
L$opts:
.byte 114,99,52,40,56,120,44,105,110,116,41,0
.byte 114,99,52,40,56,120,44,99,104,97,114,41,0
-.byte 114,99,52,40,49,54,120,44,105,110,116,41,0
+.byte 114,99,52,40,49,120,44,99,104,97,114,41,0
.byte 82,67,52,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
.p2align 6
+
diff --git a/deps/openssl/asm/x64-macosx-gas/sha/sha1-x86_64.s b/deps/openssl/asm/x64-macosx-gas/sha/sha1-x86_64.s
index 9bb9bf0f22..f9dc2568e5 100644
--- a/deps/openssl/asm/x64-macosx-gas/sha/sha1-x86_64.s
+++ b/deps/openssl/asm/x64-macosx-gas/sha/sha1-x86_64.s
@@ -1,23 +1,12 @@
.text
-
-
.globl _sha1_block_data_order
.p2align 4
_sha1_block_data_order:
- movl _OPENSSL_ia32cap_P+0(%rip),%r9d
- movl _OPENSSL_ia32cap_P+4(%rip),%r8d
- testl $512,%r8d
- jz L$ialu
- jmp _ssse3_shortcut
-
-.p2align 4
-L$ialu:
pushq %rbx
pushq %rbp
pushq %r12
- pushq %r13
movq %rsp,%r11
movq %rdi,%r8
subq $72,%rsp
@@ -27,2466 +16,1268 @@ L$ialu:
movq %r11,64(%rsp)
L$prologue:
- movl 0(%r8),%esi
- movl 4(%r8),%edi
- movl 8(%r8),%r11d
- movl 12(%r8),%r12d
- movl 16(%r8),%r13d
- jmp L$loop
-
-.p2align 4
+ movl 0(%r8),%edx
+ movl 4(%r8),%esi
+ movl 8(%r8),%edi
+ movl 12(%r8),%ebp
+ movl 16(%r8),%r11d
+.p2align 2
L$loop:
- movl 0(%r9),%edx
- bswapl %edx
- movl %edx,0(%rsp)
- movl %r11d,%eax
- movl 4(%r9),%ebp
- movl %esi,%ecx
- xorl %r12d,%eax
- bswapl %ebp
- roll $5,%ecx
- leal 1518500249(%rdx,%r13,1),%r13d
- andl %edi,%eax
- movl %ebp,4(%rsp)
- addl %ecx,%r13d
- xorl %r12d,%eax
- roll $30,%edi
- addl %eax,%r13d
- movl %edi,%eax
- movl 8(%r9),%edx
- movl %r13d,%ecx
- xorl %r11d,%eax
- bswapl %edx
- roll $5,%ecx
- leal 1518500249(%rbp,%r12,1),%r12d
- andl %esi,%eax
- movl %edx,8(%rsp)
- addl %ecx,%r12d
- xorl %r11d,%eax
- roll $30,%esi
- addl %eax,%r12d
- movl %esi,%eax
- movl 12(%r9),%ebp
- movl %r12d,%ecx
- xorl %edi,%eax
- bswapl %ebp
- roll $5,%ecx
- leal 1518500249(%rdx,%r11,1),%r11d
- andl %r13d,%eax
- movl %ebp,12(%rsp)
- addl %ecx,%r11d
- xorl %edi,%eax
- roll $30,%r13d
- addl %eax,%r11d
- movl %r13d,%eax
- movl 16(%r9),%edx
- movl %r11d,%ecx
- xorl %esi,%eax
- bswapl %edx
- roll $5,%ecx
- leal 1518500249(%rbp,%rdi,1),%edi
- andl %r12d,%eax
- movl %edx,16(%rsp)
- addl %ecx,%edi
- xorl %esi,%eax
- roll $30,%r12d
- addl %eax,%edi
- movl %r12d,%eax
- movl 20(%r9),%ebp
- movl %edi,%ecx
- xorl %r13d,%eax
- bswapl %ebp
- roll $5,%ecx
- leal 1518500249(%rdx,%rsi,1),%esi
- andl %r11d,%eax
- movl %ebp,20(%rsp)
- addl %ecx,%esi
- xorl %r13d,%eax
- roll $30,%r11d
- addl %eax,%esi
- movl %r11d,%eax
- movl 24(%r9),%edx
- movl %esi,%ecx
- xorl %r12d,%eax
- bswapl %edx
- roll $5,%ecx
- leal 1518500249(%rbp,%r13,1),%r13d
- andl %edi,%eax
- movl %edx,24(%rsp)
- addl %ecx,%r13d
- xorl %r12d,%eax
- roll $30,%edi
- addl %eax,%r13d
- movl %edi,%eax
- movl 28(%r9),%ebp
- movl %r13d,%ecx
- xorl %r11d,%eax
- bswapl %ebp
- roll $5,%ecx
- leal 1518500249(%rdx,%r12,1),%r12d
- andl %esi,%eax
- movl %ebp,28(%rsp)
- addl %ecx,%r12d
- xorl %r11d,%eax
- roll $30,%esi
- addl %eax,%r12d
- movl %esi,%eax
- movl 32(%r9),%edx
- movl %r12d,%ecx
- xorl %edi,%eax
- bswapl %edx
- roll $5,%ecx
- leal 1518500249(%rbp,%r11,1),%r11d
- andl %r13d,%eax
- movl %edx,32(%rsp)
- addl %ecx,%r11d
- xorl %edi,%eax
- roll $30,%r13d
- addl %eax,%r11d
- movl %r13d,%eax
- movl 36(%r9),%ebp
- movl %r11d,%ecx
- xorl %esi,%eax
- bswapl %ebp
- roll $5,%ecx
- leal 1518500249(%rdx,%rdi,1),%edi
- andl %r12d,%eax
- movl %ebp,36(%rsp)
- addl %ecx,%edi
- xorl %esi,%eax
- roll $30,%r12d
- addl %eax,%edi
- movl %r12d,%eax
- movl 40(%r9),%edx
- movl %edi,%ecx
- xorl %r13d,%eax
- bswapl %edx
- roll $5,%ecx
- leal 1518500249(%rbp,%rsi,1),%esi
- andl %r11d,%eax
- movl %edx,40(%rsp)
- addl %ecx,%esi
- xorl %r13d,%eax
- roll $30,%r11d
- addl %eax,%esi
- movl %r11d,%eax
- movl 44(%r9),%ebp
- movl %esi,%ecx
- xorl %r12d,%eax
- bswapl %ebp
- roll $5,%ecx
- leal 1518500249(%rdx,%r13,1),%r13d
- andl %edi,%eax
- movl %ebp,44(%rsp)
- addl %ecx,%r13d
- xorl %r12d,%eax
- roll $30,%edi
- addl %eax,%r13d
- movl %edi,%eax
- movl 48(%r9),%edx
- movl %r13d,%ecx
- xorl %r11d,%eax
- bswapl %edx
- roll $5,%ecx
- leal 1518500249(%rbp,%r12,1),%r12d
- andl %esi,%eax
- movl %edx,48(%rsp)
- addl %ecx,%r12d
- xorl %r11d,%eax
- roll $30,%esi
- addl %eax,%r12d
- movl %esi,%eax
- movl 52(%r9),%ebp
- movl %r12d,%ecx
- xorl %edi,%eax
- bswapl %ebp
- roll $5,%ecx
- leal 1518500249(%rdx,%r11,1),%r11d
- andl %r13d,%eax
- movl %ebp,52(%rsp)
- addl %ecx,%r11d
- xorl %edi,%eax
- roll $30,%r13d
- addl %eax,%r11d
- movl %r13d,%eax
- movl 56(%r9),%edx
- movl %r11d,%ecx
- xorl %esi,%eax
- bswapl %edx
- roll $5,%ecx
- leal 1518500249(%rbp,%rdi,1),%edi
- andl %r12d,%eax
- movl %edx,56(%rsp)
- addl %ecx,%edi
- xorl %esi,%eax
- roll $30,%r12d
- addl %eax,%edi
- movl %r12d,%eax
- movl 60(%r9),%ebp
- movl %edi,%ecx
- xorl %r13d,%eax
- bswapl %ebp
- roll $5,%ecx
- leal 1518500249(%rdx,%rsi,1),%esi
- andl %r11d,%eax
- movl %ebp,60(%rsp)
- addl %ecx,%esi
- xorl %r13d,%eax
- roll $30,%r11d
- addl %eax,%esi
- movl 0(%rsp),%edx
- movl %r11d,%eax
- movl %esi,%ecx
- xorl 8(%rsp),%edx
- xorl %r12d,%eax
- roll $5,%ecx
- xorl 32(%rsp),%edx
- andl %edi,%eax
- leal 1518500249(%rbp,%r13,1),%r13d
- xorl 52(%rsp),%edx
- xorl %r12d,%eax
- roll $1,%edx
- addl %ecx,%r13d
- roll $30,%edi
- movl %edx,0(%rsp)
- addl %eax,%r13d
- movl 4(%rsp),%ebp
- movl %edi,%eax
- movl %r13d,%ecx
- xorl 12(%rsp),%ebp
- xorl %r11d,%eax
- roll $5,%ecx
- xorl 36(%rsp),%ebp
- andl %esi,%eax
- leal 1518500249(%rdx,%r12,1),%r12d
- xorl 56(%rsp),%ebp
- xorl %r11d,%eax
- roll $1,%ebp
- addl %ecx,%r12d
- roll $30,%esi
- movl %ebp,4(%rsp)
- addl %eax,%r12d
- movl 8(%rsp),%edx
- movl %esi,%eax
- movl %r12d,%ecx
- xorl 16(%rsp),%edx
- xorl %edi,%eax
- roll $5,%ecx
- xorl 40(%rsp),%edx
- andl %r13d,%eax
- leal 1518500249(%rbp,%r11,1),%r11d
- xorl 60(%rsp),%edx
- xorl %edi,%eax
- roll $1,%edx
- addl %ecx,%r11d
- roll $30,%r13d
- movl %edx,8(%rsp)
- addl %eax,%r11d
- movl 12(%rsp),%ebp
- movl %r13d,%eax
- movl %r11d,%ecx
- xorl 20(%rsp),%ebp
- xorl %esi,%eax
- roll $5,%ecx
- xorl 44(%rsp),%ebp
- andl %r12d,%eax
- leal 1518500249(%rdx,%rdi,1),%edi
- xorl 0(%rsp),%ebp
- xorl %esi,%eax
- roll $1,%ebp
- addl %ecx,%edi
- roll $30,%r12d
- movl %ebp,12(%rsp)
- addl %eax,%edi
- movl 16(%rsp),%edx
- movl %r12d,%eax
- movl %edi,%ecx
- xorl 24(%rsp),%edx
- xorl %r13d,%eax
- roll $5,%ecx
- xorl 48(%rsp),%edx
- andl %r11d,%eax
- leal 1518500249(%rbp,%rsi,1),%esi
- xorl 4(%rsp),%edx
- xorl %r13d,%eax
- roll $1,%edx
- addl %ecx,%esi
- roll $30,%r11d
- movl %edx,16(%rsp)
- addl %eax,%esi
- movl 20(%rsp),%ebp
- movl %r11d,%eax
- movl %esi,%ecx
- xorl 28(%rsp),%ebp
- xorl %edi,%eax
- roll $5,%ecx
- leal 1859775393(%rdx,%r13,1),%r13d
- xorl 52(%rsp),%ebp
- xorl %r12d,%eax
- addl %ecx,%r13d
- xorl 8(%rsp),%ebp
- roll $30,%edi
- addl %eax,%r13d
- roll $1,%ebp
- movl %ebp,20(%rsp)
- movl 24(%rsp),%edx
- movl %edi,%eax
- movl %r13d,%ecx
- xorl 32(%rsp),%edx
- xorl %esi,%eax
- roll $5,%ecx
- leal 1859775393(%rbp,%r12,1),%r12d
- xorl 56(%rsp),%edx
- xorl %r11d,%eax
- addl %ecx,%r12d
- xorl 12(%rsp),%edx
- roll $30,%esi
- addl %eax,%r12d
- roll $1,%edx
- movl %edx,24(%rsp)
- movl 28(%rsp),%ebp
- movl %esi,%eax
- movl %r12d,%ecx
- xorl 36(%rsp),%ebp
- xorl %r13d,%eax
- roll $5,%ecx
- leal 1859775393(%rdx,%r11,1),%r11d
- xorl 60(%rsp),%ebp
- xorl %edi,%eax
- addl %ecx,%r11d
- xorl 16(%rsp),%ebp
- roll $30,%r13d
- addl %eax,%r11d
- roll $1,%ebp
- movl %ebp,28(%rsp)
- movl 32(%rsp),%edx
- movl %r13d,%eax
- movl %r11d,%ecx
- xorl 40(%rsp),%edx
- xorl %r12d,%eax
- roll $5,%ecx
- leal 1859775393(%rbp,%rdi,1),%edi
- xorl 0(%rsp),%edx
- xorl %esi,%eax
- addl %ecx,%edi
- xorl 20(%rsp),%edx
- roll $30,%r12d
- addl %eax,%edi
- roll $1,%edx
- movl %edx,32(%rsp)
- movl 36(%rsp),%ebp
- movl %r12d,%eax
- movl %edi,%ecx
- xorl 44(%rsp),%ebp
- xorl %r11d,%eax
- roll $5,%ecx
- leal 1859775393(%rdx,%rsi,1),%esi
- xorl 4(%rsp),%ebp
- xorl %r13d,%eax
- addl %ecx,%esi
- xorl 24(%rsp),%ebp
- roll $30,%r11d
- addl %eax,%esi
- roll $1,%ebp
- movl %ebp,36(%rsp)
- movl 40(%rsp),%edx
- movl %r11d,%eax
- movl %esi,%ecx
- xorl 48(%rsp),%edx
- xorl %edi,%eax
- roll $5,%ecx
- leal 1859775393(%rbp,%r13,1),%r13d
- xorl 8(%rsp),%edx
- xorl %r12d,%eax
- addl %ecx,%r13d
- xorl 28(%rsp),%edx
- roll $30,%edi
- addl %eax,%r13d
- roll $1,%edx
- movl %edx,40(%rsp)
- movl 44(%rsp),%ebp
- movl %edi,%eax
- movl %r13d,%ecx
- xorl 52(%rsp),%ebp
- xorl %esi,%eax
- roll $5,%ecx
- leal 1859775393(%rdx,%r12,1),%r12d
- xorl 12(%rsp),%ebp
- xorl %r11d,%eax
- addl %ecx,%r12d
- xorl 32(%rsp),%ebp
- roll $30,%esi
- addl %eax,%r12d
- roll $1,%ebp
- movl %ebp,44(%rsp)
- movl 48(%rsp),%edx
- movl %esi,%eax
- movl %r12d,%ecx
- xorl 56(%rsp),%edx
- xorl %r13d,%eax
- roll $5,%ecx
- leal 1859775393(%rbp,%r11,1),%r11d
- xorl 16(%rsp),%edx
- xorl %edi,%eax
- addl %ecx,%r11d
- xorl 36(%rsp),%edx
- roll $30,%r13d
- addl %eax,%r11d
- roll $1,%edx
- movl %edx,48(%rsp)
- movl 52(%rsp),%ebp
- movl %r13d,%eax
- movl %r11d,%ecx
- xorl 60(%rsp),%ebp
- xorl %r12d,%eax
- roll $5,%ecx
- leal 1859775393(%rdx,%rdi,1),%edi
- xorl 20(%rsp),%ebp
- xorl %esi,%eax
- addl %ecx,%edi
- xorl 40(%rsp),%ebp
- roll $30,%r12d
- addl %eax,%edi
- roll $1,%ebp
- movl %ebp,52(%rsp)
- movl 56(%rsp),%edx
- movl %r12d,%eax
- movl %edi,%ecx
- xorl 0(%rsp),%edx
- xorl %r11d,%eax
- roll $5,%ecx
- leal 1859775393(%rbp,%rsi,1),%esi
- xorl 24(%rsp),%edx
- xorl %r13d,%eax
- addl %ecx,%esi
- xorl 44(%rsp),%edx
- roll $30,%r11d
- addl %eax,%esi
- roll $1,%edx
- movl %edx,56(%rsp)
- movl 60(%rsp),%ebp
- movl %r11d,%eax
- movl %esi,%ecx
- xorl 4(%rsp),%ebp
- xorl %edi,%eax
- roll $5,%ecx
- leal 1859775393(%rdx,%r13,1),%r13d
- xorl 28(%rsp),%ebp
- xorl %r12d,%eax
- addl %ecx,%r13d
- xorl 48(%rsp),%ebp
- roll $30,%edi
- addl %eax,%r13d
- roll $1,%ebp
- movl %ebp,60(%rsp)
- movl 0(%rsp),%edx
- movl %edi,%eax
- movl %r13d,%ecx
- xorl 8(%rsp),%edx
- xorl %esi,%eax
- roll $5,%ecx
- leal 1859775393(%rbp,%r12,1),%r12d
- xorl 32(%rsp),%edx
- xorl %r11d,%eax
- addl %ecx,%r12d
- xorl 52(%rsp),%edx
- roll $30,%esi
- addl %eax,%r12d
- roll $1,%edx
- movl %edx,0(%rsp)
- movl 4(%rsp),%ebp
- movl %esi,%eax
- movl %r12d,%ecx
- xorl 12(%rsp),%ebp
- xorl %r13d,%eax
- roll $5,%ecx
- leal 1859775393(%rdx,%r11,1),%r11d
- xorl 36(%rsp),%ebp
- xorl %edi,%eax
- addl %ecx,%r11d
- xorl 56(%rsp),%ebp
- roll $30,%r13d
- addl %eax,%r11d
- roll $1,%ebp
- movl %ebp,4(%rsp)
- movl 8(%rsp),%edx
- movl %r13d,%eax
- movl %r11d,%ecx
- xorl 16(%rsp),%edx
- xorl %r12d,%eax
- roll $5,%ecx
- leal 1859775393(%rbp,%rdi,1),%edi
- xorl 40(%rsp),%edx
- xorl %esi,%eax
- addl %ecx,%edi
- xorl 60(%rsp),%edx
- roll $30,%r12d
- addl %eax,%edi
- roll $1,%edx
- movl %edx,8(%rsp)
- movl 12(%rsp),%ebp
- movl %r12d,%eax
- movl %edi,%ecx
- xorl 20(%rsp),%ebp
- xorl %r11d,%eax
- roll $5,%ecx
- leal 1859775393(%rdx,%rsi,1),%esi
- xorl 44(%rsp),%ebp
- xorl %r13d,%eax
- addl %ecx,%esi
- xorl 0(%rsp),%ebp
- roll $30,%r11d
- addl %eax,%esi
- roll $1,%ebp
- movl %ebp,12(%rsp)
- movl 16(%rsp),%edx
- movl %r11d,%eax
- movl %esi,%ecx
- xorl 24(%rsp),%edx
- xorl %edi,%eax
- roll $5,%ecx
- leal 1859775393(%rbp,%r13,1),%r13d
- xorl 48(%rsp),%edx
- xorl %r12d,%eax
- addl %ecx,%r13d
- xorl 4(%rsp),%edx
- roll $30,%edi
- addl %eax,%r13d
- roll $1,%edx
- movl %edx,16(%rsp)
- movl 20(%rsp),%ebp
- movl %edi,%eax
- movl %r13d,%ecx
- xorl 28(%rsp),%ebp
- xorl %esi,%eax
- roll $5,%ecx
- leal 1859775393(%rdx,%r12,1),%r12d
- xorl 52(%rsp),%ebp
- xorl %r11d,%eax
- addl %ecx,%r12d
- xorl 8(%rsp),%ebp
+ movl 0(%r9),%eax
+ bswapl %eax
+ movl %eax,0(%rsp)
+ leal 1518500249(%rax,%r11,1),%r12d
+ movl %edi,%ebx
+ movl 4(%r9),%eax
+ movl %edx,%r11d
+ xorl %ebp,%ebx
+ bswapl %eax
+ roll $5,%r11d
+ andl %esi,%ebx
+ movl %eax,4(%rsp)
+ addl %r11d,%r12d
+ xorl %ebp,%ebx
roll $30,%esi
- addl %eax,%r12d
- roll $1,%ebp
- movl %ebp,20(%rsp)
- movl 24(%rsp),%edx
- movl %esi,%eax
- movl %r12d,%ecx
- xorl 32(%rsp),%edx
- xorl %r13d,%eax
- roll $5,%ecx
- leal 1859775393(%rbp,%r11,1),%r11d
- xorl 56(%rsp),%edx
- xorl %edi,%eax
- addl %ecx,%r11d
- xorl 12(%rsp),%edx
- roll $30,%r13d
- addl %eax,%r11d
- roll $1,%edx
- movl %edx,24(%rsp)
- movl 28(%rsp),%ebp
- movl %r13d,%eax
- movl %r11d,%ecx
- xorl 36(%rsp),%ebp
- xorl %r12d,%eax
- roll $5,%ecx
- leal 1859775393(%rdx,%rdi,1),%edi
- xorl 60(%rsp),%ebp
- xorl %esi,%eax
- addl %ecx,%edi
- xorl 16(%rsp),%ebp
+ addl %ebx,%r12d
+ leal 1518500249(%rax,%rbp,1),%r11d
+ movl %esi,%ebx
+ movl 8(%r9),%eax
+ movl %r12d,%ebp
+ xorl %edi,%ebx
+ bswapl %eax
+ roll $5,%ebp
+ andl %edx,%ebx
+ movl %eax,8(%rsp)
+ addl %ebp,%r11d
+ xorl %edi,%ebx
+ roll $30,%edx
+ addl %ebx,%r11d
+ leal 1518500249(%rax,%rdi,1),%ebp
+ movl %edx,%ebx
+ movl 12(%r9),%eax
+ movl %r11d,%edi
+ xorl %esi,%ebx
+ bswapl %eax
+ roll $5,%edi
+ andl %r12d,%ebx
+ movl %eax,12(%rsp)
+ addl %edi,%ebp
+ xorl %esi,%ebx
roll $30,%r12d
- addl %eax,%edi
- roll $1,%ebp
- movl %ebp,28(%rsp)
- movl 32(%rsp),%edx
- movl %r12d,%eax
- movl %edi,%ecx
- xorl 40(%rsp),%edx
- xorl %r11d,%eax
- roll $5,%ecx
- leal 1859775393(%rbp,%rsi,1),%esi
- xorl 0(%rsp),%edx
- xorl %r13d,%eax
- addl %ecx,%esi
- xorl 20(%rsp),%edx
+ addl %ebx,%ebp
+ leal 1518500249(%rax,%rsi,1),%edi
+ movl %r12d,%ebx
+ movl 16(%r9),%eax
+ movl %ebp,%esi
+ xorl %edx,%ebx
+ bswapl %eax
+ roll $5,%esi
+ andl %r11d,%ebx
+ movl %eax,16(%rsp)
+ addl %esi,%edi
+ xorl %edx,%ebx
roll $30,%r11d
- addl %eax,%esi
- roll $1,%edx
- movl %edx,32(%rsp)
- movl 36(%rsp),%ebp
- movl %r11d,%eax
+ addl %ebx,%edi
+ leal 1518500249(%rax,%rdx,1),%esi
movl %r11d,%ebx
- xorl 44(%rsp),%ebp
- andl %r12d,%eax
- movl %esi,%ecx
- xorl 4(%rsp),%ebp
+ movl 20(%r9),%eax
+ movl %edi,%edx
xorl %r12d,%ebx
- leal -1894007588(%rdx,%r13,1),%r13d
- roll $5,%ecx
- xorl 24(%rsp),%ebp
- addl %eax,%r13d
+ bswapl %eax
+ roll $5,%edx
+ andl %ebp,%ebx
+ movl %eax,20(%rsp)
+ addl %edx,%esi
+ xorl %r12d,%ebx
+ roll $30,%ebp
+ addl %ebx,%esi
+ leal 1518500249(%rax,%r12,1),%edx
+ movl %ebp,%ebx
+ movl 24(%r9),%eax
+ movl %esi,%r12d
+ xorl %r11d,%ebx
+ bswapl %eax
+ roll $5,%r12d
andl %edi,%ebx
- roll $1,%ebp
- addl %ebx,%r13d
+ movl %eax,24(%rsp)
+ addl %r12d,%edx
+ xorl %r11d,%ebx
roll $30,%edi
- movl %ebp,36(%rsp)
- addl %ecx,%r13d
- movl 40(%rsp),%edx
- movl %edi,%eax
+ addl %ebx,%edx
+ leal 1518500249(%rax,%r11,1),%r12d
movl %edi,%ebx
- xorl 48(%rsp),%edx
- andl %r11d,%eax
- movl %r13d,%ecx
- xorl 8(%rsp),%edx
- xorl %r11d,%ebx
- leal -1894007588(%rbp,%r12,1),%r12d
- roll $5,%ecx
- xorl 28(%rsp),%edx
- addl %eax,%r12d
+ movl 28(%r9),%eax
+ movl %edx,%r11d
+ xorl %ebp,%ebx
+ bswapl %eax
+ roll $5,%r11d
andl %esi,%ebx
- roll $1,%edx
- addl %ebx,%r12d
+ movl %eax,28(%rsp)
+ addl %r11d,%r12d
+ xorl %ebp,%ebx
roll $30,%esi
- movl %edx,40(%rsp)
- addl %ecx,%r12d
- movl 44(%rsp),%ebp
- movl %esi,%eax
+ addl %ebx,%r12d
+ leal 1518500249(%rax,%rbp,1),%r11d
movl %esi,%ebx
- xorl 52(%rsp),%ebp
- andl %edi,%eax
- movl %r12d,%ecx
- xorl 12(%rsp),%ebp
+ movl 32(%r9),%eax
+ movl %r12d,%ebp
+ xorl %edi,%ebx
+ bswapl %eax
+ roll $5,%ebp
+ andl %edx,%ebx
+ movl %eax,32(%rsp)
+ addl %ebp,%r11d
xorl %edi,%ebx
- leal -1894007588(%rdx,%r11,1),%r11d
- roll $5,%ecx
- xorl 32(%rsp),%ebp
- addl %eax,%r11d
- andl %r13d,%ebx
- roll $1,%ebp
+ roll $30,%edx
addl %ebx,%r11d
- roll $30,%r13d
- movl %ebp,44(%rsp)
- addl %ecx,%r11d
- movl 48(%rsp),%edx
- movl %r13d,%eax
- movl %r13d,%ebx
- xorl 56(%rsp),%edx
- andl %esi,%eax
- movl %r11d,%ecx
- xorl 16(%rsp),%edx
+ leal 1518500249(%rax,%rdi,1),%ebp
+ movl %edx,%ebx
+ movl 36(%r9),%eax
+ movl %r11d,%edi
xorl %esi,%ebx
- leal -1894007588(%rbp,%rdi,1),%edi
- roll $5,%ecx
- xorl 36(%rsp),%edx
- addl %eax,%edi
+ bswapl %eax
+ roll $5,%edi
andl %r12d,%ebx
- roll $1,%edx
- addl %ebx,%edi
+ movl %eax,36(%rsp)
+ addl %edi,%ebp
+ xorl %esi,%ebx
roll $30,%r12d
- movl %edx,48(%rsp)
- addl %ecx,%edi
- movl 52(%rsp),%ebp
- movl %r12d,%eax
+ addl %ebx,%ebp
+ leal 1518500249(%rax,%rsi,1),%edi
movl %r12d,%ebx
- xorl 60(%rsp),%ebp
- andl %r13d,%eax
- movl %edi,%ecx
- xorl 20(%rsp),%ebp
- xorl %r13d,%ebx
- leal -1894007588(%rdx,%rsi,1),%esi
- roll $5,%ecx
- xorl 40(%rsp),%ebp
- addl %eax,%esi
+ movl 40(%r9),%eax
+ movl %ebp,%esi
+ xorl %edx,%ebx
+ bswapl %eax
+ roll $5,%esi
andl %r11d,%ebx
- roll $1,%ebp
- addl %ebx,%esi
+ movl %eax,40(%rsp)
+ addl %esi,%edi
+ xorl %edx,%ebx
roll $30,%r11d
- movl %ebp,52(%rsp)
- addl %ecx,%esi
- movl 56(%rsp),%edx
- movl %r11d,%eax
+ addl %ebx,%edi
+ leal 1518500249(%rax,%rdx,1),%esi
movl %r11d,%ebx
- xorl 0(%rsp),%edx
- andl %r12d,%eax
- movl %esi,%ecx
- xorl 24(%rsp),%edx
+ movl 44(%r9),%eax
+ movl %edi,%edx
+ xorl %r12d,%ebx
+ bswapl %eax
+ roll $5,%edx
+ andl %ebp,%ebx
+ movl %eax,44(%rsp)
+ addl %edx,%esi
xorl %r12d,%ebx
- leal -1894007588(%rbp,%r13,1),%r13d
- roll $5,%ecx
- xorl 44(%rsp),%edx
- addl %eax,%r13d
+ roll $30,%ebp
+ addl %ebx,%esi
+ leal 1518500249(%rax,%r12,1),%edx
+ movl %ebp,%ebx
+ movl 48(%r9),%eax
+ movl %esi,%r12d
+ xorl %r11d,%ebx
+ bswapl %eax
+ roll $5,%r12d
andl %edi,%ebx
- roll $1,%edx
- addl %ebx,%r13d
+ movl %eax,48(%rsp)
+ addl %r12d,%edx
+ xorl %r11d,%ebx
roll $30,%edi
- movl %edx,56(%rsp)
- addl %ecx,%r13d
- movl 60(%rsp),%ebp
- movl %edi,%eax
+ addl %ebx,%edx
+ leal 1518500249(%rax,%r11,1),%r12d
movl %edi,%ebx
- xorl 4(%rsp),%ebp
- andl %r11d,%eax
- movl %r13d,%ecx
- xorl 28(%rsp),%ebp
- xorl %r11d,%ebx
- leal -1894007588(%rdx,%r12,1),%r12d
- roll $5,%ecx
- xorl 48(%rsp),%ebp
- addl %eax,%r12d
+ movl 52(%r9),%eax
+ movl %edx,%r11d
+ xorl %ebp,%ebx
+ bswapl %eax
+ roll $5,%r11d
andl %esi,%ebx
- roll $1,%ebp
- addl %ebx,%r12d
+ movl %eax,52(%rsp)
+ addl %r11d,%r12d
+ xorl %ebp,%ebx
roll $30,%esi
- movl %ebp,60(%rsp)
- addl %ecx,%r12d
- movl 0(%rsp),%edx
- movl %esi,%eax
+ addl %ebx,%r12d
+ leal 1518500249(%rax,%rbp,1),%r11d
movl %esi,%ebx
- xorl 8(%rsp),%edx
- andl %edi,%eax
- movl %r12d,%ecx
- xorl 32(%rsp),%edx
+ movl 56(%r9),%eax
+ movl %r12d,%ebp
+ xorl %edi,%ebx
+ bswapl %eax
+ roll $5,%ebp
+ andl %edx,%ebx
+ movl %eax,56(%rsp)
+ addl %ebp,%r11d
xorl %edi,%ebx
- leal -1894007588(%rbp,%r11,1),%r11d
- roll $5,%ecx
- xorl 52(%rsp),%edx
- addl %eax,%r11d
- andl %r13d,%ebx
- roll $1,%edx
+ roll $30,%edx
addl %ebx,%r11d
- roll $30,%r13d
- movl %edx,0(%rsp)
- addl %ecx,%r11d
- movl 4(%rsp),%ebp
- movl %r13d,%eax
- movl %r13d,%ebx
- xorl 12(%rsp),%ebp
- andl %esi,%eax
- movl %r11d,%ecx
- xorl 36(%rsp),%ebp
+ leal 1518500249(%rax,%rdi,1),%ebp
+ movl %edx,%ebx
+ movl 60(%r9),%eax
+ movl %r11d,%edi
xorl %esi,%ebx
- leal -1894007588(%rdx,%rdi,1),%edi
- roll $5,%ecx
- xorl 56(%rsp),%ebp
- addl %eax,%edi
+ bswapl %eax
+ roll $5,%edi
andl %r12d,%ebx
- roll $1,%ebp
- addl %ebx,%edi
+ movl %eax,60(%rsp)
+ addl %edi,%ebp
+ xorl %esi,%ebx
roll $30,%r12d
- movl %ebp,4(%rsp)
- addl %ecx,%edi
- movl 8(%rsp),%edx
- movl %r12d,%eax
+ addl %ebx,%ebp
+ leal 1518500249(%rax,%rsi,1),%edi
+ movl 0(%rsp),%eax
movl %r12d,%ebx
- xorl 16(%rsp),%edx
- andl %r13d,%eax
- movl %edi,%ecx
- xorl 40(%rsp),%edx
- xorl %r13d,%ebx
- leal -1894007588(%rbp,%rsi,1),%esi
- roll $5,%ecx
- xorl 60(%rsp),%edx
- addl %eax,%esi
+ movl %ebp,%esi
+ xorl 8(%rsp),%eax
+ xorl %edx,%ebx
+ roll $5,%esi
+ xorl 32(%rsp),%eax
andl %r11d,%ebx
- roll $1,%edx
- addl %ebx,%esi
+ addl %esi,%edi
+ xorl 52(%rsp),%eax
+ xorl %edx,%ebx
roll $30,%r11d
- movl %edx,8(%rsp)
- addl %ecx,%esi
- movl 12(%rsp),%ebp
- movl %r11d,%eax
+ addl %ebx,%edi
+ roll $1,%eax
+ movl %eax,0(%rsp)
+ leal 1518500249(%rax,%rdx,1),%esi
+ movl 4(%rsp),%eax
movl %r11d,%ebx
- xorl 20(%rsp),%ebp
- andl %r12d,%eax
- movl %esi,%ecx
- xorl 44(%rsp),%ebp
+ movl %edi,%edx
+ xorl 12(%rsp),%eax
+ xorl %r12d,%ebx
+ roll $5,%edx
+ xorl 36(%rsp),%eax
+ andl %ebp,%ebx
+ addl %edx,%esi
+ xorl 56(%rsp),%eax
xorl %r12d,%ebx
- leal -1894007588(%rdx,%r13,1),%r13d
- roll $5,%ecx
- xorl 0(%rsp),%ebp
- addl %eax,%r13d
+ roll $30,%ebp
+ addl %ebx,%esi
+ roll $1,%eax
+ movl %eax,4(%rsp)
+ leal 1518500249(%rax,%r12,1),%edx
+ movl 8(%rsp),%eax
+ movl %ebp,%ebx
+ movl %esi,%r12d
+ xorl 16(%rsp),%eax
+ xorl %r11d,%ebx
+ roll $5,%r12d
+ xorl 40(%rsp),%eax
andl %edi,%ebx
- roll $1,%ebp
- addl %ebx,%r13d
+ addl %r12d,%edx
+ xorl 60(%rsp),%eax
+ xorl %r11d,%ebx
roll $30,%edi
- movl %ebp,12(%rsp)
- addl %ecx,%r13d
- movl 16(%rsp),%edx
- movl %edi,%eax
+ addl %ebx,%edx
+ roll $1,%eax
+ movl %eax,8(%rsp)
+ leal 1518500249(%rax,%r11,1),%r12d
+ movl 12(%rsp),%eax
movl %edi,%ebx
- xorl 24(%rsp),%edx
- andl %r11d,%eax
- movl %r13d,%ecx
- xorl 48(%rsp),%edx
- xorl %r11d,%ebx
- leal -1894007588(%rbp,%r12,1),%r12d
- roll $5,%ecx
- xorl 4(%rsp),%edx
- addl %eax,%r12d
+ movl %edx,%r11d
+ xorl 20(%rsp),%eax
+ xorl %ebp,%ebx
+ roll $5,%r11d
+ xorl 44(%rsp),%eax
andl %esi,%ebx
- roll $1,%edx
- addl %ebx,%r12d
+ addl %r11d,%r12d
+ xorl 0(%rsp),%eax
+ xorl %ebp,%ebx
roll $30,%esi
- movl %edx,16(%rsp)
- addl %ecx,%r12d
- movl 20(%rsp),%ebp
- movl %esi,%eax
+ addl %ebx,%r12d
+ roll $1,%eax
+ movl %eax,12(%rsp)
+ leal 1518500249(%rax,%rbp,1),%r11d
+ movl 16(%rsp),%eax
movl %esi,%ebx
- xorl 28(%rsp),%ebp
- andl %edi,%eax
- movl %r12d,%ecx
- xorl 52(%rsp),%ebp
+ movl %r12d,%ebp
+ xorl 24(%rsp),%eax
+ xorl %edi,%ebx
+ roll $5,%ebp
+ xorl 48(%rsp),%eax
+ andl %edx,%ebx
+ addl %ebp,%r11d
+ xorl 4(%rsp),%eax
xorl %edi,%ebx
- leal -1894007588(%rdx,%r11,1),%r11d
- roll $5,%ecx
- xorl 8(%rsp),%ebp
- addl %eax,%r11d
- andl %r13d,%ebx
- roll $1,%ebp
+ roll $30,%edx
addl %ebx,%r11d
- roll $30,%r13d
- movl %ebp,20(%rsp)
- addl %ecx,%r11d
- movl 24(%rsp),%edx
- movl %r13d,%eax
- movl %r13d,%ebx
- xorl 32(%rsp),%edx
- andl %esi,%eax
- movl %r11d,%ecx
- xorl 56(%rsp),%edx
+ roll $1,%eax
+ movl %eax,16(%rsp)
+ leal 1859775393(%rax,%rdi,1),%ebp
+ movl 20(%rsp),%eax
+ movl %edx,%ebx
+ movl %r11d,%edi
+ xorl 28(%rsp),%eax
+ xorl %r12d,%ebx
+ roll $5,%edi
+ xorl 52(%rsp),%eax
xorl %esi,%ebx
- leal -1894007588(%rbp,%rdi,1),%edi
- roll $5,%ecx
- xorl 12(%rsp),%edx
- addl %eax,%edi
- andl %r12d,%ebx
- roll $1,%edx
- addl %ebx,%edi
+ addl %edi,%ebp
+ xorl 8(%rsp),%eax
roll $30,%r12d
- movl %edx,24(%rsp)
- addl %ecx,%edi
- movl 28(%rsp),%ebp
- movl %r12d,%eax
+ addl %ebx,%ebp
+ roll $1,%eax
+ movl %eax,20(%rsp)
+ leal 1859775393(%rax,%rsi,1),%edi
+ movl 24(%rsp),%eax
movl %r12d,%ebx
- xorl 36(%rsp),%ebp
- andl %r13d,%eax
- movl %edi,%ecx
- xorl 60(%rsp),%ebp
- xorl %r13d,%ebx
- leal -1894007588(%rdx,%rsi,1),%esi
- roll $5,%ecx
- xorl 16(%rsp),%ebp
- addl %eax,%esi
- andl %r11d,%ebx
- roll $1,%ebp
- addl %ebx,%esi
+ movl %ebp,%esi
+ xorl 32(%rsp),%eax
+ xorl %r11d,%ebx
+ roll $5,%esi
+ xorl 56(%rsp),%eax
+ xorl %edx,%ebx
+ addl %esi,%edi
+ xorl 12(%rsp),%eax
roll $30,%r11d
- movl %ebp,28(%rsp)
- addl %ecx,%esi
- movl 32(%rsp),%edx
- movl %r11d,%eax
+ addl %ebx,%edi
+ roll $1,%eax
+ movl %eax,24(%rsp)
+ leal 1859775393(%rax,%rdx,1),%esi
+ movl 28(%rsp),%eax
movl %r11d,%ebx
- xorl 40(%rsp),%edx
- andl %r12d,%eax
- movl %esi,%ecx
- xorl 0(%rsp),%edx
+ movl %edi,%edx
+ xorl 36(%rsp),%eax
+ xorl %ebp,%ebx
+ roll $5,%edx
+ xorl 60(%rsp),%eax
xorl %r12d,%ebx
- leal -1894007588(%rbp,%r13,1),%r13d
- roll $5,%ecx
- xorl 20(%rsp),%edx
- addl %eax,%r13d
- andl %edi,%ebx
- roll $1,%edx
- addl %ebx,%r13d
+ addl %edx,%esi
+ xorl 16(%rsp),%eax
+ roll $30,%ebp
+ addl %ebx,%esi
+ roll $1,%eax
+ movl %eax,28(%rsp)
+ leal 1859775393(%rax,%r12,1),%edx
+ movl 32(%rsp),%eax
+ movl %ebp,%ebx
+ movl %esi,%r12d
+ xorl 40(%rsp),%eax
+ xorl %edi,%ebx
+ roll $5,%r12d
+ xorl 0(%rsp),%eax
+ xorl %r11d,%ebx
+ addl %r12d,%edx
+ xorl 20(%rsp),%eax
roll $30,%edi
- movl %edx,32(%rsp)
- addl %ecx,%r13d
- movl 36(%rsp),%ebp
- movl %edi,%eax
+ addl %ebx,%edx
+ roll $1,%eax
+ movl %eax,32(%rsp)
+ leal 1859775393(%rax,%r11,1),%r12d
+ movl 36(%rsp),%eax
movl %edi,%ebx
- xorl 44(%rsp),%ebp
- andl %r11d,%eax
- movl %r13d,%ecx
- xorl 4(%rsp),%ebp
- xorl %r11d,%ebx
- leal -1894007588(%rdx,%r12,1),%r12d
- roll $5,%ecx
- xorl 24(%rsp),%ebp
- addl %eax,%r12d
- andl %esi,%ebx
- roll $1,%ebp
- addl %ebx,%r12d
+ movl %edx,%r11d
+ xorl 44(%rsp),%eax
+ xorl %esi,%ebx
+ roll $5,%r11d
+ xorl 4(%rsp),%eax
+ xorl %ebp,%ebx
+ addl %r11d,%r12d
+ xorl 24(%rsp),%eax
roll $30,%esi
- movl %ebp,36(%rsp)
- addl %ecx,%r12d
- movl 40(%rsp),%edx
- movl %esi,%eax
+ addl %ebx,%r12d
+ roll $1,%eax
+ movl %eax,36(%rsp)
+ leal 1859775393(%rax,%rbp,1),%r11d
+ movl 40(%rsp),%eax
movl %esi,%ebx
- xorl 48(%rsp),%edx
- andl %edi,%eax
- movl %r12d,%ecx
- xorl 8(%rsp),%edx
+ movl %r12d,%ebp
+ xorl 48(%rsp),%eax
+ xorl %edx,%ebx
+ roll $5,%ebp
+ xorl 8(%rsp),%eax
xorl %edi,%ebx
- leal -1894007588(%rbp,%r11,1),%r11d
- roll $5,%ecx
- xorl 28(%rsp),%edx
- addl %eax,%r11d
- andl %r13d,%ebx
- roll $1,%edx
+ addl %ebp,%r11d
+ xorl 28(%rsp),%eax
+ roll $30,%edx
addl %ebx,%r11d
- roll $30,%r13d
- movl %edx,40(%rsp)
- addl %ecx,%r11d
- movl 44(%rsp),%ebp
- movl %r13d,%eax
- movl %r13d,%ebx
- xorl 52(%rsp),%ebp
- andl %esi,%eax
- movl %r11d,%ecx
- xorl 12(%rsp),%ebp
+ roll $1,%eax
+ movl %eax,40(%rsp)
+ leal 1859775393(%rax,%rdi,1),%ebp
+ movl 44(%rsp),%eax
+ movl %edx,%ebx
+ movl %r11d,%edi
+ xorl 52(%rsp),%eax
+ xorl %r12d,%ebx
+ roll $5,%edi
+ xorl 12(%rsp),%eax
xorl %esi,%ebx
- leal -1894007588(%rdx,%rdi,1),%edi
- roll $5,%ecx
- xorl 32(%rsp),%ebp
- addl %eax,%edi
- andl %r12d,%ebx
- roll $1,%ebp
- addl %ebx,%edi
+ addl %edi,%ebp
+ xorl 32(%rsp),%eax
roll $30,%r12d
- movl %ebp,44(%rsp)
- addl %ecx,%edi
- movl 48(%rsp),%edx
- movl %r12d,%eax
+ addl %ebx,%ebp
+ roll $1,%eax
+ movl %eax,44(%rsp)
+ leal 1859775393(%rax,%rsi,1),%edi
+ movl 48(%rsp),%eax
movl %r12d,%ebx
- xorl 56(%rsp),%edx
- andl %r13d,%eax
- movl %edi,%ecx
- xorl 16(%rsp),%edx
- xorl %r13d,%ebx
- leal -1894007588(%rbp,%rsi,1),%esi
- roll $5,%ecx
- xorl 36(%rsp),%edx
- addl %eax,%esi
- andl %r11d,%ebx
- roll $1,%edx
- addl %ebx,%esi
+ movl %ebp,%esi
+ xorl 56(%rsp),%eax
+ xorl %r11d,%ebx
+ roll $5,%esi
+ xorl 16(%rsp),%eax
+ xorl %edx,%ebx
+ addl %esi,%edi
+ xorl 36(%rsp),%eax
roll $30,%r11d
- movl %edx,48(%rsp)
- addl %ecx,%esi
- movl 52(%rsp),%ebp
- movl %r11d,%eax
- movl %esi,%ecx
- xorl 60(%rsp),%ebp
- xorl %edi,%eax
- roll $5,%ecx
- leal -899497514(%rdx,%r13,1),%r13d
- xorl 20(%rsp),%ebp
- xorl %r12d,%eax
- addl %ecx,%r13d
- xorl 40(%rsp),%ebp
+ addl %ebx,%edi
+ roll $1,%eax
+ movl %eax,48(%rsp)
+ leal 1859775393(%rax,%rdx,1),%esi
+ movl 52(%rsp),%eax
+ movl %r11d,%ebx
+ movl %edi,%edx
+ xorl 60(%rsp),%eax
+ xorl %ebp,%ebx
+ roll $5,%edx
+ xorl 20(%rsp),%eax
+ xorl %r12d,%ebx
+ addl %edx,%esi
+ xorl 40(%rsp),%eax
+ roll $30,%ebp
+ addl %ebx,%esi
+ roll $1,%eax
+ movl %eax,52(%rsp)
+ leal 1859775393(%rax,%r12,1),%edx
+ movl 56(%rsp),%eax
+ movl %ebp,%ebx
+ movl %esi,%r12d
+ xorl 0(%rsp),%eax
+ xorl %edi,%ebx
+ roll $5,%r12d
+ xorl 24(%rsp),%eax
+ xorl %r11d,%ebx
+ addl %r12d,%edx
+ xorl 44(%rsp),%eax
roll $30,%edi
- addl %eax,%r13d
- roll $1,%ebp
- movl %ebp,52(%rsp)
- movl 56(%rsp),%edx
- movl %edi,%eax
- movl %r13d,%ecx
- xorl 0(%rsp),%edx
- xorl %esi,%eax
- roll $5,%ecx
- leal -899497514(%rbp,%r12,1),%r12d
- xorl 24(%rsp),%edx
- xorl %r11d,%eax
- addl %ecx,%r12d
- xorl 44(%rsp),%edx
+ addl %ebx,%edx
+ roll $1,%eax
+ movl %eax,56(%rsp)
+ leal 1859775393(%rax,%r11,1),%r12d
+ movl 60(%rsp),%eax
+ movl %edi,%ebx
+ movl %edx,%r11d
+ xorl 4(%rsp),%eax
+ xorl %esi,%ebx
+ roll $5,%r11d
+ xorl 28(%rsp),%eax
+ xorl %ebp,%ebx
+ addl %r11d,%r12d
+ xorl 48(%rsp),%eax
roll $30,%esi
- addl %eax,%r12d
- roll $1,%edx
- movl %edx,56(%rsp)
- movl 60(%rsp),%ebp
- movl %esi,%eax
- movl %r12d,%ecx
- xorl 4(%rsp),%ebp
- xorl %r13d,%eax
- roll $5,%ecx
- leal -899497514(%rdx,%r11,1),%r11d
- xorl 28(%rsp),%ebp
- xorl %edi,%eax
- addl %ecx,%r11d
- xorl 48(%rsp),%ebp
- roll $30,%r13d
- addl %eax,%r11d
- roll $1,%ebp
- movl %ebp,60(%rsp)
- movl 0(%rsp),%edx
- movl %r13d,%eax
- movl %r11d,%ecx
- xorl 8(%rsp),%edx
- xorl %r12d,%eax
- roll $5,%ecx
- leal -899497514(%rbp,%rdi,1),%edi
- xorl 32(%rsp),%edx
- xorl %esi,%eax
- addl %ecx,%edi
- xorl 52(%rsp),%edx
+ addl %ebx,%r12d
+ roll $1,%eax
+ movl %eax,60(%rsp)
+ leal 1859775393(%rax,%rbp,1),%r11d
+ movl 0(%rsp),%eax
+ movl %esi,%ebx
+ movl %r12d,%ebp
+ xorl 8(%rsp),%eax
+ xorl %edx,%ebx
+ roll $5,%ebp
+ xorl 32(%rsp),%eax
+ xorl %edi,%ebx
+ addl %ebp,%r11d
+ xorl 52(%rsp),%eax
+ roll $30,%edx
+ addl %ebx,%r11d
+ roll $1,%eax
+ movl %eax,0(%rsp)
+ leal 1859775393(%rax,%rdi,1),%ebp
+ movl 4(%rsp),%eax
+ movl %edx,%ebx
+ movl %r11d,%edi
+ xorl 12(%rsp),%eax
+ xorl %r12d,%ebx
+ roll $5,%edi
+ xorl 36(%rsp),%eax
+ xorl %esi,%ebx
+ addl %edi,%ebp
+ xorl 56(%rsp),%eax
roll $30,%r12d
- addl %eax,%edi
- roll $1,%edx
- movl %edx,0(%rsp)
- movl 4(%rsp),%ebp
- movl %r12d,%eax
- movl %edi,%ecx
- xorl 12(%rsp),%ebp
- xorl %r11d,%eax
- roll $5,%ecx
- leal -899497514(%rdx,%rsi,1),%esi
- xorl 36(%rsp),%ebp
- xorl %r13d,%eax
- addl %ecx,%esi
- xorl 56(%rsp),%ebp
+ addl %ebx,%ebp
+ roll $1,%eax
+ movl %eax,4(%rsp)
+ leal 1859775393(%rax,%rsi,1),%edi
+ movl 8(%rsp),%eax
+ movl %r12d,%ebx
+ movl %ebp,%esi
+ xorl 16(%rsp),%eax
+ xorl %r11d,%ebx
+ roll $5,%esi
+ xorl 40(%rsp),%eax
+ xorl %edx,%ebx
+ addl %esi,%edi
+ xorl 60(%rsp),%eax
roll $30,%r11d
- addl %eax,%esi
- roll $1,%ebp
- movl %ebp,4(%rsp)
- movl 8(%rsp),%edx
- movl %r11d,%eax
- movl %esi,%ecx
- xorl 16(%rsp),%edx
- xorl %edi,%eax
- roll $5,%ecx
- leal -899497514(%rbp,%r13,1),%r13d
- xorl 40(%rsp),%edx
- xorl %r12d,%eax
- addl %ecx,%r13d
- xorl 60(%rsp),%edx
+ addl %ebx,%edi
+ roll $1,%eax
+ movl %eax,8(%rsp)
+ leal 1859775393(%rax,%rdx,1),%esi
+ movl 12(%rsp),%eax
+ movl %r11d,%ebx
+ movl %edi,%edx
+ xorl 20(%rsp),%eax
+ xorl %ebp,%ebx
+ roll $5,%edx
+ xorl 44(%rsp),%eax
+ xorl %r12d,%ebx
+ addl %edx,%esi
+ xorl 0(%rsp),%eax
+ roll $30,%ebp
+ addl %ebx,%esi
+ roll $1,%eax
+ movl %eax,12(%rsp)
+ leal 1859775393(%rax,%r12,1),%edx
+ movl 16(%rsp),%eax
+ movl %ebp,%ebx
+ movl %esi,%r12d
+ xorl 24(%rsp),%eax
+ xorl %edi,%ebx
+ roll $5,%r12d
+ xorl 48(%rsp),%eax
+ xorl %r11d,%ebx
+ addl %r12d,%edx
+ xorl 4(%rsp),%eax
roll $30,%edi
- addl %eax,%r13d
- roll $1,%edx
- movl %edx,8(%rsp)
- movl 12(%rsp),%ebp
- movl %edi,%eax
- movl %r13d,%ecx
- xorl 20(%rsp),%ebp
- xorl %esi,%eax
- roll $5,%ecx
- leal -899497514(%rdx,%r12,1),%r12d
- xorl 44(%rsp),%ebp
- xorl %r11d,%eax
- addl %ecx,%r12d
- xorl 0(%rsp),%ebp
+ addl %ebx,%edx
+ roll $1,%eax
+ movl %eax,16(%rsp)
+ leal 1859775393(%rax,%r11,1),%r12d
+ movl 20(%rsp),%eax
+ movl %edi,%ebx
+ movl %edx,%r11d
+ xorl 28(%rsp),%eax
+ xorl %esi,%ebx
+ roll $5,%r11d
+ xorl 52(%rsp),%eax
+ xorl %ebp,%ebx
+ addl %r11d,%r12d
+ xorl 8(%rsp),%eax
roll $30,%esi
- addl %eax,%r12d
- roll $1,%ebp
- movl %ebp,12(%rsp)
- movl 16(%rsp),%edx
- movl %esi,%eax
- movl %r12d,%ecx
- xorl 24(%rsp),%edx
- xorl %r13d,%eax
- roll $5,%ecx
- leal -899497514(%rbp,%r11,1),%r11d
- xorl 48(%rsp),%edx
- xorl %edi,%eax
- addl %ecx,%r11d
- xorl 4(%rsp),%edx
- roll $30,%r13d
- addl %eax,%r11d
- roll $1,%edx
- movl %edx,16(%rsp)
- movl 20(%rsp),%ebp
- movl %r13d,%eax
- movl %r11d,%ecx
- xorl 28(%rsp),%ebp
- xorl %r12d,%eax
- roll $5,%ecx
- leal -899497514(%rdx,%rdi,1),%edi
- xorl 52(%rsp),%ebp
- xorl %esi,%eax
- addl %ecx,%edi
- xorl 8(%rsp),%ebp
+ addl %ebx,%r12d
+ roll $1,%eax
+ movl %eax,20(%rsp)
+ leal 1859775393(%rax,%rbp,1),%r11d
+ movl 24(%rsp),%eax
+ movl %esi,%ebx
+ movl %r12d,%ebp
+ xorl 32(%rsp),%eax
+ xorl %edx,%ebx
+ roll $5,%ebp
+ xorl 56(%rsp),%eax
+ xorl %edi,%ebx
+ addl %ebp,%r11d
+ xorl 12(%rsp),%eax
+ roll $30,%edx
+ addl %ebx,%r11d
+ roll $1,%eax
+ movl %eax,24(%rsp)
+ leal 1859775393(%rax,%rdi,1),%ebp
+ movl 28(%rsp),%eax
+ movl %edx,%ebx
+ movl %r11d,%edi
+ xorl 36(%rsp),%eax
+ xorl %r12d,%ebx
+ roll $5,%edi
+ xorl 60(%rsp),%eax
+ xorl %esi,%ebx
+ addl %edi,%ebp
+ xorl 16(%rsp),%eax
roll $30,%r12d
- addl %eax,%edi
- roll $1,%ebp
- movl %ebp,20(%rsp)
- movl 24(%rsp),%edx
- movl %r12d,%eax
- movl %edi,%ecx
- xorl 32(%rsp),%edx
- xorl %r11d,%eax
- roll $5,%ecx
- leal -899497514(%rbp,%rsi,1),%esi
- xorl 56(%rsp),%edx
- xorl %r13d,%eax
- addl %ecx,%esi
- xorl 12(%rsp),%edx
+ addl %ebx,%ebp
+ roll $1,%eax
+ movl %eax,28(%rsp)
+ leal 1859775393(%rax,%rsi,1),%edi
+ movl 32(%rsp),%eax
+ movl %r12d,%ebx
+ movl %ebp,%esi
+ xorl 40(%rsp),%eax
+ xorl %r11d,%ebx
+ roll $5,%esi
+ xorl 0(%rsp),%eax
+ xorl %edx,%ebx
+ addl %esi,%edi
+ xorl 20(%rsp),%eax
roll $30,%r11d
- addl %eax,%esi
- roll $1,%edx
- movl %edx,24(%rsp)
- movl 28(%rsp),%ebp
- movl %r11d,%eax
- movl %esi,%ecx
- xorl 36(%rsp),%ebp
- xorl %edi,%eax
- roll $5,%ecx
- leal -899497514(%rdx,%r13,1),%r13d
- xorl 60(%rsp),%ebp
- xorl %r12d,%eax
- addl %ecx,%r13d
- xorl 16(%rsp),%ebp
+ addl %ebx,%edi
+ roll $1,%eax
+ movl %eax,32(%rsp)
+ leal -1894007588(%rax,%rdx,1),%esi
+ movl 36(%rsp),%eax
+ movl %ebp,%ebx
+ movl %ebp,%ecx
+ xorl 44(%rsp),%eax
+ movl %edi,%edx
+ andl %r11d,%ebx
+ xorl 4(%rsp),%eax
+ orl %r11d,%ecx
+ roll $5,%edx
+ xorl 24(%rsp),%eax
+ andl %r12d,%ecx
+ addl %edx,%esi
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%ebp
+ movl %eax,36(%rsp)
+ addl %ebx,%esi
+ leal -1894007588(%rax,%r12,1),%edx
+ movl 40(%rsp),%eax
+ movl %edi,%ebx
+ movl %edi,%ecx
+ xorl 48(%rsp),%eax
+ movl %esi,%r12d
+ andl %ebp,%ebx
+ xorl 8(%rsp),%eax
+ orl %ebp,%ecx
+ roll $5,%r12d
+ xorl 28(%rsp),%eax
+ andl %r11d,%ecx
+ addl %r12d,%edx
+ roll $1,%eax
+ orl %ecx,%ebx
roll $30,%edi
- addl %eax,%r13d
- roll $1,%ebp
- movl %ebp,28(%rsp)
- movl 32(%rsp),%edx
- movl %edi,%eax
- movl %r13d,%ecx
- xorl 40(%rsp),%edx
- xorl %esi,%eax
- roll $5,%ecx
- leal -899497514(%rbp,%r12,1),%r12d
- xorl 0(%rsp),%edx
- xorl %r11d,%eax
- addl %ecx,%r12d
- xorl 20(%rsp),%edx
+ movl %eax,40(%rsp)
+ addl %ebx,%edx
+ leal -1894007588(%rax,%r11,1),%r12d
+ movl 44(%rsp),%eax
+ movl %esi,%ebx
+ movl %esi,%ecx
+ xorl 52(%rsp),%eax
+ movl %edx,%r11d
+ andl %edi,%ebx
+ xorl 12(%rsp),%eax
+ orl %edi,%ecx
+ roll $5,%r11d
+ xorl 32(%rsp),%eax
+ andl %ebp,%ecx
+ addl %r11d,%r12d
+ roll $1,%eax
+ orl %ecx,%ebx
roll $30,%esi
- addl %eax,%r12d
- roll $1,%edx
- movl %edx,32(%rsp)
- movl 36(%rsp),%ebp
- movl %esi,%eax
+ movl %eax,44(%rsp)
+ addl %ebx,%r12d
+ leal -1894007588(%rax,%rbp,1),%r11d
+ movl 48(%rsp),%eax
+ movl %edx,%ebx
+ movl %edx,%ecx
+ xorl 56(%rsp),%eax
+ movl %r12d,%ebp
+ andl %esi,%ebx
+ xorl 16(%rsp),%eax
+ orl %esi,%ecx
+ roll $5,%ebp
+ xorl 36(%rsp),%eax
+ andl %edi,%ecx
+ addl %ebp,%r11d
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%edx
+ movl %eax,48(%rsp)
+ addl %ebx,%r11d
+ leal -1894007588(%rax,%rdi,1),%ebp
+ movl 52(%rsp),%eax
+ movl %r12d,%ebx
movl %r12d,%ecx
- xorl 44(%rsp),%ebp
- xorl %r13d,%eax
- roll $5,%ecx
- leal -899497514(%rdx,%r11,1),%r11d
- xorl 4(%rsp),%ebp
- xorl %edi,%eax
- addl %ecx,%r11d
- xorl 24(%rsp),%ebp
- roll $30,%r13d
- addl %eax,%r11d
- roll $1,%ebp
- movl %ebp,36(%rsp)
- movl 40(%rsp),%edx
- movl %r13d,%eax
- movl %r11d,%ecx
- xorl 48(%rsp),%edx
- xorl %r12d,%eax
- roll $5,%ecx
- leal -899497514(%rbp,%rdi,1),%edi
- xorl 8(%rsp),%edx
- xorl %esi,%eax
- addl %ecx,%edi
- xorl 28(%rsp),%edx
+ xorl 60(%rsp),%eax
+ movl %r11d,%edi
+ andl %edx,%ebx
+ xorl 20(%rsp),%eax
+ orl %edx,%ecx
+ roll $5,%edi
+ xorl 40(%rsp),%eax
+ andl %esi,%ecx
+ addl %edi,%ebp
+ roll $1,%eax
+ orl %ecx,%ebx
roll $30,%r12d
- addl %eax,%edi
- roll $1,%edx
- movl %edx,40(%rsp)
- movl 44(%rsp),%ebp
- movl %r12d,%eax
- movl %edi,%ecx
- xorl 52(%rsp),%ebp
- xorl %r11d,%eax
- roll $5,%ecx
- leal -899497514(%rdx,%rsi,1),%esi
- xorl 12(%rsp),%ebp
- xorl %r13d,%eax
- addl %ecx,%esi
- xorl 32(%rsp),%ebp
+ movl %eax,52(%rsp)
+ addl %ebx,%ebp
+ leal -1894007588(%rax,%rsi,1),%edi
+ movl 56(%rsp),%eax
+ movl %r11d,%ebx
+ movl %r11d,%ecx
+ xorl 0(%rsp),%eax
+ movl %ebp,%esi
+ andl %r12d,%ebx
+ xorl 24(%rsp),%eax
+ orl %r12d,%ecx
+ roll $5,%esi
+ xorl 44(%rsp),%eax
+ andl %edx,%ecx
+ addl %esi,%edi
+ roll $1,%eax
+ orl %ecx,%ebx
roll $30,%r11d
- addl %eax,%esi
- roll $1,%ebp
- movl %ebp,44(%rsp)
- movl 48(%rsp),%edx
- movl %r11d,%eax
- movl %esi,%ecx
- xorl 56(%rsp),%edx
- xorl %edi,%eax
- roll $5,%ecx
- leal -899497514(%rbp,%r13,1),%r13d
- xorl 16(%rsp),%edx
- xorl %r12d,%eax
- addl %ecx,%r13d
- xorl 36(%rsp),%edx
+ movl %eax,56(%rsp)
+ addl %ebx,%edi
+ leal -1894007588(%rax,%rdx,1),%esi
+ movl 60(%rsp),%eax
+ movl %ebp,%ebx
+ movl %ebp,%ecx
+ xorl 4(%rsp),%eax
+ movl %edi,%edx
+ andl %r11d,%ebx
+ xorl 28(%rsp),%eax
+ orl %r11d,%ecx
+ roll $5,%edx
+ xorl 48(%rsp),%eax
+ andl %r12d,%ecx
+ addl %edx,%esi
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%ebp
+ movl %eax,60(%rsp)
+ addl %ebx,%esi
+ leal -1894007588(%rax,%r12,1),%edx
+ movl 0(%rsp),%eax
+ movl %edi,%ebx
+ movl %edi,%ecx
+ xorl 8(%rsp),%eax
+ movl %esi,%r12d
+ andl %ebp,%ebx
+ xorl 32(%rsp),%eax
+ orl %ebp,%ecx
+ roll $5,%r12d
+ xorl 52(%rsp),%eax
+ andl %r11d,%ecx
+ addl %r12d,%edx
+ roll $1,%eax
+ orl %ecx,%ebx
roll $30,%edi
- addl %eax,%r13d
- roll $1,%edx
- movl %edx,48(%rsp)
- movl 52(%rsp),%ebp
- movl %edi,%eax
- movl %r13d,%ecx
- xorl 60(%rsp),%ebp
- xorl %esi,%eax
- roll $5,%ecx
- leal -899497514(%rdx,%r12,1),%r12d
- xorl 20(%rsp),%ebp
- xorl %r11d,%eax
- addl %ecx,%r12d
- xorl 40(%rsp),%ebp
+ movl %eax,0(%rsp)
+ addl %ebx,%edx
+ leal -1894007588(%rax,%r11,1),%r12d
+ movl 4(%rsp),%eax
+ movl %esi,%ebx
+ movl %esi,%ecx
+ xorl 12(%rsp),%eax
+ movl %edx,%r11d
+ andl %edi,%ebx
+ xorl 36(%rsp),%eax
+ orl %edi,%ecx
+ roll $5,%r11d
+ xorl 56(%rsp),%eax
+ andl %ebp,%ecx
+ addl %r11d,%r12d
+ roll $1,%eax
+ orl %ecx,%ebx
roll $30,%esi
- addl %eax,%r12d
- roll $1,%ebp
- movl 56(%rsp),%edx
- movl %esi,%eax
- movl %r12d,%ecx
- xorl 0(%rsp),%edx
- xorl %r13d,%eax
- roll $5,%ecx
- leal -899497514(%rbp,%r11,1),%r11d
- xorl 24(%rsp),%edx
- xorl %edi,%eax
- addl %ecx,%r11d
- xorl 44(%rsp),%edx
- roll $30,%r13d
- addl %eax,%r11d
- roll $1,%edx
- movl 60(%rsp),%ebp
- movl %r13d,%eax
- movl %r11d,%ecx
- xorl 4(%rsp),%ebp
- xorl %r12d,%eax
- roll $5,%ecx
- leal -899497514(%rdx,%rdi,1),%edi
- xorl 28(%rsp),%ebp
- xorl %esi,%eax
- addl %ecx,%edi
- xorl 48(%rsp),%ebp
- roll $30,%r12d
- addl %eax,%edi
- roll $1,%ebp
- movl %r12d,%eax
- movl %edi,%ecx
- xorl %r11d,%eax
- leal -899497514(%rbp,%rsi,1),%esi
- roll $5,%ecx
- xorl %r13d,%eax
- addl %ecx,%esi
- roll $30,%r11d
- addl %eax,%esi
- addl 0(%r8),%esi
- addl 4(%r8),%edi
- addl 8(%r8),%r11d
- addl 12(%r8),%r12d
- addl 16(%r8),%r13d
- movl %esi,0(%r8)
- movl %edi,4(%r8)
- movl %r11d,8(%r8)
- movl %r12d,12(%r8)
- movl %r13d,16(%r8)
-
- subq $1,%r10
- leaq 64(%r9),%r9
- jnz L$loop
-
- movq 64(%rsp),%rsi
- movq (%rsi),%r13
- movq 8(%rsi),%r12
- movq 16(%rsi),%rbp
- movq 24(%rsi),%rbx
- leaq 32(%rsi),%rsp
-L$epilogue:
- .byte 0xf3,0xc3
-
-
-.p2align 4
-sha1_block_data_order_ssse3:
-_ssse3_shortcut:
- pushq %rbx
- pushq %rbp
- pushq %r12
- leaq -64(%rsp),%rsp
- movq %rdi,%r8
- movq %rsi,%r9
- movq %rdx,%r10
-
- shlq $6,%r10
- addq %r9,%r10
- leaq K_XX_XX(%rip),%r11
-
- movl 0(%r8),%eax
- movl 4(%r8),%ebx
- movl 8(%r8),%ecx
- movl 12(%r8),%edx
- movl %ebx,%esi
- movl 16(%r8),%ebp
-
- movdqa 64(%r11),%xmm6
- movdqa 0(%r11),%xmm9
- movdqu 0(%r9),%xmm0
- movdqu 16(%r9),%xmm1
- movdqu 32(%r9),%xmm2
- movdqu 48(%r9),%xmm3
-.byte 102,15,56,0,198
- addq $64,%r9
-.byte 102,15,56,0,206
-.byte 102,15,56,0,214
-.byte 102,15,56,0,222
- paddd %xmm9,%xmm0
- paddd %xmm9,%xmm1
- paddd %xmm9,%xmm2
- movdqa %xmm0,0(%rsp)
- psubd %xmm9,%xmm0
- movdqa %xmm1,16(%rsp)
- psubd %xmm9,%xmm1
- movdqa %xmm2,32(%rsp)
- psubd %xmm9,%xmm2
- jmp L$oop_ssse3
-.p2align 4
-L$oop_ssse3:
- movdqa %xmm1,%xmm4
- addl 0(%rsp),%ebp
- xorl %edx,%ecx
- movdqa %xmm3,%xmm8
-.byte 102,15,58,15,224,8
- movl %eax,%edi
- roll $5,%eax
- paddd %xmm3,%xmm9
- andl %ecx,%esi
- xorl %edx,%ecx
- psrldq $4,%xmm8
- xorl %edx,%esi
- addl %eax,%ebp
- pxor %xmm0,%xmm4
- rorl $2,%ebx
- addl %esi,%ebp
- pxor %xmm2,%xmm8
- addl 4(%rsp),%edx
- xorl %ecx,%ebx
- movl %ebp,%esi
+ movl %eax,4(%rsp)
+ addl %ebx,%r12d
+ leal -1894007588(%rax,%rbp,1),%r11d
+ movl 8(%rsp),%eax
+ movl %edx,%ebx
+ movl %edx,%ecx
+ xorl 16(%rsp),%eax
+ movl %r12d,%ebp
+ andl %esi,%ebx
+ xorl 40(%rsp),%eax
+ orl %esi,%ecx
roll $5,%ebp
- pxor %xmm8,%xmm4
- andl %ebx,%edi
- xorl %ecx,%ebx
- movdqa %xmm9,48(%rsp)
- xorl %ecx,%edi
- addl %ebp,%edx
- movdqa %xmm4,%xmm10
- movdqa %xmm4,%xmm8
- rorl $7,%eax
- addl %edi,%edx
- addl 8(%rsp),%ecx
- xorl %ebx,%eax
- pslldq $12,%xmm10
- paddd %xmm4,%xmm4
- movl %edx,%edi
- roll $5,%edx
- andl %eax,%esi
- xorl %ebx,%eax
- psrld $31,%xmm8
- xorl %ebx,%esi
- addl %edx,%ecx
- movdqa %xmm10,%xmm9
- rorl $7,%ebp
- addl %esi,%ecx
- psrld $30,%xmm10
- por %xmm8,%xmm4
- addl 12(%rsp),%ebx
- xorl %eax,%ebp
- movl %ecx,%esi
- roll $5,%ecx
- pslld $2,%xmm9
- pxor %xmm10,%xmm4
- andl %ebp,%edi
- xorl %eax,%ebp
- movdqa 0(%r11),%xmm10
- xorl %eax,%edi
- addl %ecx,%ebx
- pxor %xmm9,%xmm4
- rorl $7,%edx
- addl %edi,%ebx
- movdqa %xmm2,%xmm5
- addl 16(%rsp),%eax
- xorl %ebp,%edx
- movdqa %xmm4,%xmm9
-.byte 102,15,58,15,233,8
- movl %ebx,%edi
- roll $5,%ebx
- paddd %xmm4,%xmm10
- andl %edx,%esi
- xorl %ebp,%edx
- psrldq $4,%xmm9
- xorl %ebp,%esi
- addl %ebx,%eax
- pxor %xmm1,%xmm5
- rorl $7,%ecx
- addl %esi,%eax
- pxor %xmm3,%xmm9
- addl 20(%rsp),%ebp
- xorl %edx,%ecx
- movl %eax,%esi
- roll $5,%eax
- pxor %xmm9,%xmm5
- andl %ecx,%edi
- xorl %edx,%ecx
- movdqa %xmm10,0(%rsp)
- xorl %edx,%edi
- addl %eax,%ebp
- movdqa %xmm5,%xmm8
- movdqa %xmm5,%xmm9
- rorl $7,%ebx
+ xorl 60(%rsp),%eax
+ andl %edi,%ecx
+ addl %ebp,%r11d
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%edx
+ movl %eax,8(%rsp)
+ addl %ebx,%r11d
+ leal -1894007588(%rax,%rdi,1),%ebp
+ movl 12(%rsp),%eax
+ movl %r12d,%ebx
+ movl %r12d,%ecx
+ xorl 20(%rsp),%eax
+ movl %r11d,%edi
+ andl %edx,%ebx
+ xorl 44(%rsp),%eax
+ orl %edx,%ecx
+ roll $5,%edi
+ xorl 0(%rsp),%eax
+ andl %esi,%ecx
addl %edi,%ebp
- addl 24(%rsp),%edx
- xorl %ecx,%ebx
- pslldq $12,%xmm8
- paddd %xmm5,%xmm5
- movl %ebp,%edi
- roll $5,%ebp
- andl %ebx,%esi
- xorl %ecx,%ebx
- psrld $31,%xmm9
- xorl %ecx,%esi
- addl %ebp,%edx
- movdqa %xmm8,%xmm10
- rorl $7,%eax
- addl %esi,%edx
- psrld $30,%xmm8
- por %xmm9,%xmm5
- addl 28(%rsp),%ecx
- xorl %ebx,%eax
- movl %edx,%esi
- roll $5,%edx
- pslld $2,%xmm10
- pxor %xmm8,%xmm5
- andl %eax,%edi
- xorl %ebx,%eax
- movdqa 16(%r11),%xmm8
- xorl %ebx,%edi
- addl %edx,%ecx
- pxor %xmm10,%xmm5
- rorl $7,%ebp
- addl %edi,%ecx
- movdqa %xmm3,%xmm6
- addl 32(%rsp),%ebx
- xorl %eax,%ebp
- movdqa %xmm5,%xmm10
-.byte 102,15,58,15,242,8
- movl %ecx,%edi
- roll $5,%ecx
- paddd %xmm5,%xmm8
- andl %ebp,%esi
- xorl %eax,%ebp
- psrldq $4,%xmm10
- xorl %eax,%esi
- addl %ecx,%ebx
- pxor %xmm2,%xmm6
- rorl $7,%edx
- addl %esi,%ebx
- pxor %xmm4,%xmm10
- addl 36(%rsp),%eax
- xorl %ebp,%edx
- movl %ebx,%esi
- roll $5,%ebx
- pxor %xmm10,%xmm6
- andl %edx,%edi
- xorl %ebp,%edx
- movdqa %xmm8,16(%rsp)
- xorl %ebp,%edi
- addl %ebx,%eax
- movdqa %xmm6,%xmm9
- movdqa %xmm6,%xmm10
- rorl $7,%ecx
- addl %edi,%eax
- addl 40(%rsp),%ebp
- xorl %edx,%ecx
- pslldq $12,%xmm9
- paddd %xmm6,%xmm6
- movl %eax,%edi
- roll $5,%eax
- andl %ecx,%esi
- xorl %edx,%ecx
- psrld $31,%xmm10
- xorl %edx,%esi
- addl %eax,%ebp
- movdqa %xmm9,%xmm8
- rorl $7,%ebx
- addl %esi,%ebp
- psrld $30,%xmm9
- por %xmm10,%xmm6
- addl 44(%rsp),%edx
- xorl %ecx,%ebx
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%r12d
+ movl %eax,12(%rsp)
+ addl %ebx,%ebp
+ leal -1894007588(%rax,%rsi,1),%edi
+ movl 16(%rsp),%eax
+ movl %r11d,%ebx
+ movl %r11d,%ecx
+ xorl 24(%rsp),%eax
movl %ebp,%esi
- roll $5,%ebp
- pslld $2,%xmm8
- pxor %xmm9,%xmm6
- andl %ebx,%edi
- xorl %ecx,%ebx
- movdqa 16(%r11),%xmm9
- xorl %ecx,%edi
- addl %ebp,%edx
- pxor %xmm8,%xmm6
- rorl $7,%eax
- addl %edi,%edx
- movdqa %xmm4,%xmm7
- addl 48(%rsp),%ecx
- xorl %ebx,%eax
- movdqa %xmm6,%xmm8
-.byte 102,15,58,15,251,8
- movl %edx,%edi
- roll $5,%edx
- paddd %xmm6,%xmm9
- andl %eax,%esi
- xorl %ebx,%eax
- psrldq $4,%xmm8
- xorl %ebx,%esi
- addl %edx,%ecx
- pxor %xmm3,%xmm7
- rorl $7,%ebp
- addl %esi,%ecx
- pxor %xmm5,%xmm8
- addl 52(%rsp),%ebx
- xorl %eax,%ebp
- movl %ecx,%esi
- roll $5,%ecx
- pxor %xmm8,%xmm7
- andl %ebp,%edi
- xorl %eax,%ebp
- movdqa %xmm9,32(%rsp)
- xorl %eax,%edi
- addl %ecx,%ebx
- movdqa %xmm7,%xmm10
- movdqa %xmm7,%xmm8
- rorl $7,%edx
- addl %edi,%ebx
- addl 56(%rsp),%eax
- xorl %ebp,%edx
- pslldq $12,%xmm10
- paddd %xmm7,%xmm7
- movl %ebx,%edi
- roll $5,%ebx
- andl %edx,%esi
- xorl %ebp,%edx
- psrld $31,%xmm8
- xorl %ebp,%esi
- addl %ebx,%eax
- movdqa %xmm10,%xmm9
- rorl $7,%ecx
- addl %esi,%eax
- psrld $30,%xmm10
- por %xmm8,%xmm7
- addl 60(%rsp),%ebp
- xorl %edx,%ecx
- movl %eax,%esi
- roll $5,%eax
- pslld $2,%xmm9
- pxor %xmm10,%xmm7
- andl %ecx,%edi
- xorl %edx,%ecx
- movdqa 16(%r11),%xmm10
- xorl %edx,%edi
- addl %eax,%ebp
- pxor %xmm9,%xmm7
- rorl $7,%ebx
- addl %edi,%ebp
- movdqa %xmm7,%xmm9
- addl 0(%rsp),%edx
- pxor %xmm4,%xmm0
-.byte 102,68,15,58,15,206,8
- xorl %ecx,%ebx
- movl %ebp,%edi
- roll $5,%ebp
- pxor %xmm1,%xmm0
- andl %ebx,%esi
- xorl %ecx,%ebx
- movdqa %xmm10,%xmm8
- paddd %xmm7,%xmm10
- xorl %ecx,%esi
- addl %ebp,%edx
- pxor %xmm9,%xmm0
- rorl $7,%eax
- addl %esi,%edx
- addl 4(%rsp),%ecx
- xorl %ebx,%eax
- movdqa %xmm0,%xmm9
- movdqa %xmm10,48(%rsp)
- movl %edx,%esi
+ andl %r12d,%ebx
+ xorl 48(%rsp),%eax
+ orl %r12d,%ecx
+ roll $5,%esi
+ xorl 4(%rsp),%eax
+ andl %edx,%ecx
+ addl %esi,%edi
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%r11d
+ movl %eax,16(%rsp)
+ addl %ebx,%edi
+ leal -1894007588(%rax,%rdx,1),%esi
+ movl 20(%rsp),%eax
+ movl %ebp,%ebx
+ movl %ebp,%ecx
+ xorl 28(%rsp),%eax
+ movl %edi,%edx
+ andl %r11d,%ebx
+ xorl 52(%rsp),%eax
+ orl %r11d,%ecx
roll $5,%edx
- andl %eax,%edi
- xorl %ebx,%eax
- pslld $2,%xmm0
- xorl %ebx,%edi
- addl %edx,%ecx
- psrld $30,%xmm9
- rorl $7,%ebp
- addl %edi,%ecx
- addl 8(%rsp),%ebx
- xorl %eax,%ebp
- movl %ecx,%edi
- roll $5,%ecx
- por %xmm9,%xmm0
- andl %ebp,%esi
- xorl %eax,%ebp
- movdqa %xmm0,%xmm10
- xorl %eax,%esi
- addl %ecx,%ebx
- rorl $7,%edx
- addl %esi,%ebx
- addl 12(%rsp),%eax
- xorl %ebp,%edx
- movl %ebx,%esi
- roll $5,%ebx
- andl %edx,%edi
- xorl %ebp,%edx
- xorl %ebp,%edi
- addl %ebx,%eax
- rorl $7,%ecx
- addl %edi,%eax
- addl 16(%rsp),%ebp
- pxor %xmm5,%xmm1
-.byte 102,68,15,58,15,215,8
- xorl %edx,%esi
- movl %eax,%edi
- roll $5,%eax
- pxor %xmm2,%xmm1
- xorl %ecx,%esi
- addl %eax,%ebp
- movdqa %xmm8,%xmm9
- paddd %xmm0,%xmm8
- rorl $7,%ebx
- addl %esi,%ebp
- pxor %xmm10,%xmm1
- addl 20(%rsp),%edx
- xorl %ecx,%edi
- movl %ebp,%esi
+ xorl 8(%rsp),%eax
+ andl %r12d,%ecx
+ addl %edx,%esi
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%ebp
+ movl %eax,20(%rsp)
+ addl %ebx,%esi
+ leal -1894007588(%rax,%r12,1),%edx
+ movl 24(%rsp),%eax
+ movl %edi,%ebx
+ movl %edi,%ecx
+ xorl 32(%rsp),%eax
+ movl %esi,%r12d
+ andl %ebp,%ebx
+ xorl 56(%rsp),%eax
+ orl %ebp,%ecx
+ roll $5,%r12d
+ xorl 12(%rsp),%eax
+ andl %r11d,%ecx
+ addl %r12d,%edx
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%edi
+ movl %eax,24(%rsp)
+ addl %ebx,%edx
+ leal -1894007588(%rax,%r11,1),%r12d
+ movl 28(%rsp),%eax
+ movl %esi,%ebx
+ movl %esi,%ecx
+ xorl 36(%rsp),%eax
+ movl %edx,%r11d
+ andl %edi,%ebx
+ xorl 60(%rsp),%eax
+ orl %edi,%ecx
+ roll $5,%r11d
+ xorl 16(%rsp),%eax
+ andl %ebp,%ecx
+ addl %r11d,%r12d
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%esi
+ movl %eax,28(%rsp)
+ addl %ebx,%r12d
+ leal -1894007588(%rax,%rbp,1),%r11d
+ movl 32(%rsp),%eax
+ movl %edx,%ebx
+ movl %edx,%ecx
+ xorl 40(%rsp),%eax
+ movl %r12d,%ebp
+ andl %esi,%ebx
+ xorl 0(%rsp),%eax
+ orl %esi,%ecx
roll $5,%ebp
- movdqa %xmm1,%xmm10
- movdqa %xmm8,0(%rsp)
- xorl %ebx,%edi
- addl %ebp,%edx
- rorl $7,%eax
- addl %edi,%edx
- pslld $2,%xmm1
- addl 24(%rsp),%ecx
- xorl %ebx,%esi
- psrld $30,%xmm10
- movl %edx,%edi
- roll $5,%edx
- xorl %eax,%esi
- addl %edx,%ecx
- rorl $7,%ebp
- addl %esi,%ecx
- por %xmm10,%xmm1
- addl 28(%rsp),%ebx
- xorl %eax,%edi
- movdqa %xmm1,%xmm8
- movl %ecx,%esi
- roll $5,%ecx
- xorl %ebp,%edi
- addl %ecx,%ebx
- rorl $7,%edx
- addl %edi,%ebx
- addl 32(%rsp),%eax
- pxor %xmm6,%xmm2
-.byte 102,68,15,58,15,192,8
- xorl %ebp,%esi
- movl %ebx,%edi
- roll $5,%ebx
- pxor %xmm3,%xmm2
- xorl %edx,%esi
- addl %ebx,%eax
- movdqa 32(%r11),%xmm10
- paddd %xmm1,%xmm9
- rorl $7,%ecx
- addl %esi,%eax
- pxor %xmm8,%xmm2
- addl 36(%rsp),%ebp
- xorl %edx,%edi
- movl %eax,%esi
- roll $5,%eax
- movdqa %xmm2,%xmm8
- movdqa %xmm9,16(%rsp)
- xorl %ecx,%edi
- addl %eax,%ebp
- rorl $7,%ebx
+ xorl 20(%rsp),%eax
+ andl %edi,%ecx
+ addl %ebp,%r11d
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%edx
+ movl %eax,32(%rsp)
+ addl %ebx,%r11d
+ leal -1894007588(%rax,%rdi,1),%ebp
+ movl 36(%rsp),%eax
+ movl %r12d,%ebx
+ movl %r12d,%ecx
+ xorl 44(%rsp),%eax
+ movl %r11d,%edi
+ andl %edx,%ebx
+ xorl 4(%rsp),%eax
+ orl %edx,%ecx
+ roll $5,%edi
+ xorl 24(%rsp),%eax
+ andl %esi,%ecx
addl %edi,%ebp
- pslld $2,%xmm2
- addl 40(%rsp),%edx
- xorl %ecx,%esi
- psrld $30,%xmm8
- movl %ebp,%edi
- roll $5,%ebp
- xorl %ebx,%esi
- addl %ebp,%edx
- rorl $7,%eax
- addl %esi,%edx
- por %xmm8,%xmm2
- addl 44(%rsp),%ecx
- xorl %ebx,%edi
- movdqa %xmm2,%xmm9
- movl %edx,%esi
- roll $5,%edx
- xorl %eax,%edi
- addl %edx,%ecx
- rorl $7,%ebp
- addl %edi,%ecx
- addl 48(%rsp),%ebx
- pxor %xmm7,%xmm3
-.byte 102,68,15,58,15,201,8
- xorl %eax,%esi
- movl %ecx,%edi
- roll $5,%ecx
- pxor %xmm4,%xmm3
- xorl %ebp,%esi
- addl %ecx,%ebx
- movdqa %xmm10,%xmm8
- paddd %xmm2,%xmm10
- rorl $7,%edx
- addl %esi,%ebx
- pxor %xmm9,%xmm3
- addl 52(%rsp),%eax
- xorl %ebp,%edi
- movl %ebx,%esi
- roll $5,%ebx
- movdqa %xmm3,%xmm9
- movdqa %xmm10,32(%rsp)
- xorl %edx,%edi
- addl %ebx,%eax
- rorl $7,%ecx
- addl %edi,%eax
- pslld $2,%xmm3
- addl 56(%rsp),%ebp
- xorl %edx,%esi
- psrld $30,%xmm9
- movl %eax,%edi
- roll $5,%eax
- xorl %ecx,%esi
- addl %eax,%ebp
- rorl $7,%ebx
- addl %esi,%ebp
- por %xmm9,%xmm3
- addl 60(%rsp),%edx
- xorl %ecx,%edi
- movdqa %xmm3,%xmm10
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%r12d
+ movl %eax,36(%rsp)
+ addl %ebx,%ebp
+ leal -1894007588(%rax,%rsi,1),%edi
+ movl 40(%rsp),%eax
+ movl %r11d,%ebx
+ movl %r11d,%ecx
+ xorl 48(%rsp),%eax
movl %ebp,%esi
- roll $5,%ebp
- xorl %ebx,%edi
- addl %ebp,%edx
- rorl $7,%eax
- addl %edi,%edx
- addl 0(%rsp),%ecx
- pxor %xmm0,%xmm4
-.byte 102,68,15,58,15,210,8
- xorl %ebx,%esi
- movl %edx,%edi
+ andl %r12d,%ebx
+ xorl 8(%rsp),%eax
+ orl %r12d,%ecx
+ roll $5,%esi
+ xorl 28(%rsp),%eax
+ andl %edx,%ecx
+ addl %esi,%edi
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%r11d
+ movl %eax,40(%rsp)
+ addl %ebx,%edi
+ leal -1894007588(%rax,%rdx,1),%esi
+ movl 44(%rsp),%eax
+ movl %ebp,%ebx
+ movl %ebp,%ecx
+ xorl 52(%rsp),%eax
+ movl %edi,%edx
+ andl %r11d,%ebx
+ xorl 12(%rsp),%eax
+ orl %r11d,%ecx
roll $5,%edx
- pxor %xmm5,%xmm4
- xorl %eax,%esi
- addl %edx,%ecx
- movdqa %xmm8,%xmm9
- paddd %xmm3,%xmm8
- rorl $7,%ebp
- addl %esi,%ecx
- pxor %xmm10,%xmm4
- addl 4(%rsp),%ebx
- xorl %eax,%edi
- movl %ecx,%esi
- roll $5,%ecx
- movdqa %xmm4,%xmm10
- movdqa %xmm8,48(%rsp)
- xorl %ebp,%edi
- addl %ecx,%ebx
- rorl $7,%edx
- addl %edi,%ebx
- pslld $2,%xmm4
- addl 8(%rsp),%eax
- xorl %ebp,%esi
- psrld $30,%xmm10
- movl %ebx,%edi
- roll $5,%ebx
- xorl %edx,%esi
- addl %ebx,%eax
- rorl $7,%ecx
- addl %esi,%eax
- por %xmm10,%xmm4
- addl 12(%rsp),%ebp
- xorl %edx,%edi
- movdqa %xmm4,%xmm8
- movl %eax,%esi
- roll $5,%eax
- xorl %ecx,%edi
- addl %eax,%ebp
- rorl $7,%ebx
- addl %edi,%ebp
- addl 16(%rsp),%edx
- pxor %xmm1,%xmm5
-.byte 102,68,15,58,15,195,8
- xorl %ecx,%esi
- movl %ebp,%edi
+ xorl 32(%rsp),%eax
+ andl %r12d,%ecx
+ addl %edx,%esi
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%ebp
+ movl %eax,44(%rsp)
+ addl %ebx,%esi
+ leal -1894007588(%rax,%r12,1),%edx
+ movl 48(%rsp),%eax
+ movl %edi,%ebx
+ movl %edi,%ecx
+ xorl 56(%rsp),%eax
+ movl %esi,%r12d
+ andl %ebp,%ebx
+ xorl 16(%rsp),%eax
+ orl %ebp,%ecx
+ roll $5,%r12d
+ xorl 36(%rsp),%eax
+ andl %r11d,%ecx
+ addl %r12d,%edx
+ roll $1,%eax
+ orl %ecx,%ebx
+ roll $30,%edi
+ movl %eax,48(%rsp)
+ addl %ebx,%edx
+ leal -899497514(%rax,%r11,1),%r12d
+ movl 52(%rsp),%eax
+ movl %edi,%ebx
+ movl %edx,%r11d
+ xorl 60(%rsp),%eax
+ xorl %esi,%ebx
+ roll $5,%r11d
+ xorl 20(%rsp),%eax
+ xorl %ebp,%ebx
+ addl %r11d,%r12d
+ xorl 40(%rsp),%eax
+ roll $30,%esi
+ addl %ebx,%r12d
+ roll $1,%eax
+ movl %eax,52(%rsp)
+ leal -899497514(%rax,%rbp,1),%r11d
+ movl 56(%rsp),%eax
+ movl %esi,%ebx
+ movl %r12d,%ebp
+ xorl 0(%rsp),%eax
+ xorl %edx,%ebx
roll $5,%ebp
- pxor %xmm6,%xmm5
- xorl %ebx,%esi
- addl %ebp,%edx
- movdqa %xmm9,%xmm10
- paddd %xmm4,%xmm9
- rorl $7,%eax
- addl %esi,%edx
- pxor %xmm8,%xmm5
- addl 20(%rsp),%ecx
- xorl %ebx,%edi
- movl %edx,%esi
- roll $5,%edx
- movdqa %xmm5,%xmm8
- movdqa %xmm9,0(%rsp)
- xorl %eax,%edi
- addl %edx,%ecx
- rorl $7,%ebp
- addl %edi,%ecx
- pslld $2,%xmm5
- addl 24(%rsp),%ebx
- xorl %eax,%esi
- psrld $30,%xmm8
- movl %ecx,%edi
- roll $5,%ecx
- xorl %ebp,%esi
- addl %ecx,%ebx
- rorl $7,%edx
- addl %esi,%ebx
- por %xmm8,%xmm5
- addl 28(%rsp),%eax
- xorl %ebp,%edi
- movdqa %xmm5,%xmm9
- movl %ebx,%esi
- roll $5,%ebx
- xorl %edx,%edi
- addl %ebx,%eax
- rorl $7,%ecx
- addl %edi,%eax
- movl %ecx,%edi
- pxor %xmm2,%xmm6
-.byte 102,68,15,58,15,204,8
- xorl %edx,%ecx
- addl 32(%rsp),%ebp
- andl %edx,%edi
- pxor %xmm7,%xmm6
- andl %ecx,%esi
- rorl $7,%ebx
- movdqa %xmm10,%xmm8
- paddd %xmm5,%xmm10
+ xorl 24(%rsp),%eax
+ xorl %edi,%ebx
+ addl %ebp,%r11d
+ xorl 44(%rsp),%eax
+ roll $30,%edx
+ addl %ebx,%r11d
+ roll $1,%eax
+ movl %eax,56(%rsp)
+ leal -899497514(%rax,%rdi,1),%ebp
+ movl 60(%rsp),%eax
+ movl %edx,%ebx
+ movl %r11d,%edi
+ xorl 4(%rsp),%eax
+ xorl %r12d,%ebx
+ roll $5,%edi
+ xorl 28(%rsp),%eax
+ xorl %esi,%ebx
addl %edi,%ebp
- movl %eax,%edi
- pxor %xmm9,%xmm6
- roll $5,%eax
- addl %esi,%ebp
- xorl %edx,%ecx
- addl %eax,%ebp
- movdqa %xmm6,%xmm9
- movdqa %xmm10,16(%rsp)
- movl %ebx,%esi
- xorl %ecx,%ebx
- addl 36(%rsp),%edx
- andl %ecx,%esi
- pslld $2,%xmm6
- andl %ebx,%edi
- rorl $7,%eax
- psrld $30,%xmm9
- addl %esi,%edx
+ xorl 48(%rsp),%eax
+ roll $30,%r12d
+ addl %ebx,%ebp
+ roll $1,%eax
+ movl %eax,60(%rsp)
+ leal -899497514(%rax,%rsi,1),%edi
+ movl 0(%rsp),%eax
+ movl %r12d,%ebx
movl %ebp,%esi
- roll $5,%ebp
- addl %edi,%edx
- xorl %ecx,%ebx
- addl %ebp,%edx
- por %xmm9,%xmm6
- movl %eax,%edi
- xorl %ebx,%eax
- movdqa %xmm6,%xmm10
- addl 40(%rsp),%ecx
- andl %ebx,%edi
- andl %eax,%esi
- rorl $7,%ebp
- addl %edi,%ecx
- movl %edx,%edi
+ xorl 8(%rsp),%eax
+ xorl %r11d,%ebx
+ roll $5,%esi
+ xorl 32(%rsp),%eax
+ xorl %edx,%ebx
+ addl %esi,%edi
+ xorl 52(%rsp),%eax
+ roll $30,%r11d
+ addl %ebx,%edi
+ roll $1,%eax
+ movl %eax,0(%rsp)
+ leal -899497514(%rax,%rdx,1),%esi
+ movl 4(%rsp),%eax
+ movl %r11d,%ebx
+ movl %edi,%edx
+ xorl 12(%rsp),%eax
+ xorl %ebp,%ebx
roll $5,%edx
- addl %esi,%ecx
- xorl %ebx,%eax
- addl %edx,%ecx
- movl %ebp,%esi
- xorl %eax,%ebp
- addl 44(%rsp),%ebx
- andl %eax,%esi
- andl %ebp,%edi
- rorl $7,%edx
- addl %esi,%ebx
- movl %ecx,%esi
- roll $5,%ecx
- addl %edi,%ebx
- xorl %eax,%ebp
- addl %ecx,%ebx
- movl %edx,%edi
- pxor %xmm3,%xmm7
-.byte 102,68,15,58,15,213,8
- xorl %ebp,%edx
- addl 48(%rsp),%eax
- andl %ebp,%edi
- pxor %xmm0,%xmm7
- andl %edx,%esi
- rorl $7,%ecx
- movdqa 48(%r11),%xmm9
- paddd %xmm6,%xmm8
- addl %edi,%eax
- movl %ebx,%edi
- pxor %xmm10,%xmm7
- roll $5,%ebx
- addl %esi,%eax
- xorl %ebp,%edx
- addl %ebx,%eax
- movdqa %xmm7,%xmm10
- movdqa %xmm8,32(%rsp)
- movl %ecx,%esi
- xorl %edx,%ecx
- addl 52(%rsp),%ebp
- andl %edx,%esi
- pslld $2,%xmm7
- andl %ecx,%edi
- rorl $7,%ebx
- psrld $30,%xmm10
- addl %esi,%ebp
- movl %eax,%esi
- roll $5,%eax
- addl %edi,%ebp
- xorl %edx,%ecx
- addl %eax,%ebp
- por %xmm10,%xmm7
- movl %ebx,%edi
- xorl %ecx,%ebx
- movdqa %xmm7,%xmm8
- addl 56(%rsp),%edx
- andl %ecx,%edi
- andl %ebx,%esi
- rorl $7,%eax
- addl %edi,%edx
- movl %ebp,%edi
+ xorl 36(%rsp),%eax
+ xorl %r12d,%ebx
+ addl %edx,%esi
+ xorl 56(%rsp),%eax
+ roll $30,%ebp
+ addl %ebx,%esi
+ roll $1,%eax
+ movl %eax,4(%rsp)
+ leal -899497514(%rax,%r12,1),%edx
+ movl 8(%rsp),%eax
+ movl %ebp,%ebx
+ movl %esi,%r12d
+ xorl 16(%rsp),%eax
+ xorl %edi,%ebx
+ roll $5,%r12d
+ xorl 40(%rsp),%eax
+ xorl %r11d,%ebx
+ addl %r12d,%edx
+ xorl 60(%rsp),%eax
+ roll $30,%edi
+ addl %ebx,%edx
+ roll $1,%eax
+ movl %eax,8(%rsp)
+ leal -899497514(%rax,%r11,1),%r12d
+ movl 12(%rsp),%eax
+ movl %edi,%ebx
+ movl %edx,%r11d
+ xorl 20(%rsp),%eax
+ xorl %esi,%ebx
+ roll $5,%r11d
+ xorl 44(%rsp),%eax
+ xorl %ebp,%ebx
+ addl %r11d,%r12d
+ xorl 0(%rsp),%eax
+ roll $30,%esi
+ addl %ebx,%r12d
+ roll $1,%eax
+ movl %eax,12(%rsp)
+ leal -899497514(%rax,%rbp,1),%r11d
+ movl 16(%rsp),%eax
+ movl %esi,%ebx
+ movl %r12d,%ebp
+ xorl 24(%rsp),%eax
+ xorl %edx,%ebx
roll $5,%ebp
- addl %esi,%edx
- xorl %ecx,%ebx
- addl %ebp,%edx
- movl %eax,%esi
- xorl %ebx,%eax
- addl 60(%rsp),%ecx
- andl %ebx,%esi
- andl %eax,%edi
- rorl $7,%ebp
- addl %esi,%ecx
- movl %edx,%esi
- roll $5,%edx
- addl %edi,%ecx
- xorl %ebx,%eax
- addl %edx,%ecx
- movl %ebp,%edi
- pxor %xmm4,%xmm0
-.byte 102,68,15,58,15,198,8
- xorl %eax,%ebp
- addl 0(%rsp),%ebx
- andl %eax,%edi
- pxor %xmm1,%xmm0
- andl %ebp,%esi
- rorl $7,%edx
- movdqa %xmm9,%xmm10
- paddd %xmm7,%xmm9
- addl %edi,%ebx
- movl %ecx,%edi
- pxor %xmm8,%xmm0
- roll $5,%ecx
- addl %esi,%ebx
- xorl %eax,%ebp
- addl %ecx,%ebx
- movdqa %xmm0,%xmm8
- movdqa %xmm9,48(%rsp)
- movl %edx,%esi
- xorl %ebp,%edx
- addl 4(%rsp),%eax
- andl %ebp,%esi
- pslld $2,%xmm0
- andl %edx,%edi
- rorl $7,%ecx
- psrld $30,%xmm8
- addl %esi,%eax
- movl %ebx,%esi
- roll $5,%ebx
- addl %edi,%eax
- xorl %ebp,%edx
- addl %ebx,%eax
- por %xmm8,%xmm0
- movl %ecx,%edi
- xorl %edx,%ecx
- movdqa %xmm0,%xmm9
- addl 8(%rsp),%ebp
- andl %edx,%edi
- andl %ecx,%esi
- rorl $7,%ebx
+ xorl 48(%rsp),%eax
+ xorl %edi,%ebx
+ addl %ebp,%r11d
+ xorl 4(%rsp),%eax
+ roll $30,%edx
+ addl %ebx,%r11d
+ roll $1,%eax
+ movl %eax,16(%rsp)
+ leal -899497514(%rax,%rdi,1),%ebp
+ movl 20(%rsp),%eax
+ movl %edx,%ebx
+ movl %r11d,%edi
+ xorl 28(%rsp),%eax
+ xorl %r12d,%ebx
+ roll $5,%edi
+ xorl 52(%rsp),%eax
+ xorl %esi,%ebx
addl %edi,%ebp
- movl %eax,%edi
- roll $5,%eax
- addl %esi,%ebp
- xorl %edx,%ecx
- addl %eax,%ebp
- movl %ebx,%esi
- xorl %ecx,%ebx
- addl 12(%rsp),%edx
- andl %ecx,%esi
- andl %ebx,%edi
- rorl $7,%eax
- addl %esi,%edx
- movl %ebp,%esi
- roll $5,%ebp
- addl %edi,%edx
- xorl %ecx,%ebx
- addl %ebp,%edx
- movl %eax,%edi
- pxor %xmm5,%xmm1
-.byte 102,68,15,58,15,207,8
- xorl %ebx,%eax
- addl 16(%rsp),%ecx
- andl %ebx,%edi
- pxor %xmm2,%xmm1
- andl %eax,%esi
- rorl $7,%ebp
- movdqa %xmm10,%xmm8
- paddd %xmm0,%xmm10
- addl %edi,%ecx
- movl %edx,%edi
- pxor %xmm9,%xmm1
- roll $5,%edx
- addl %esi,%ecx
- xorl %ebx,%eax
- addl %edx,%ecx
- movdqa %xmm1,%xmm9
- movdqa %xmm10,0(%rsp)
+ xorl 8(%rsp),%eax
+ roll $30,%r12d
+ addl %ebx,%ebp
+ roll $1,%eax
+ movl %eax,20(%rsp)
+ leal -899497514(%rax,%rsi,1),%edi
+ movl 24(%rsp),%eax
+ movl %r12d,%ebx
movl %ebp,%esi
- xorl %eax,%ebp
- addl 20(%rsp),%ebx
- andl %eax,%esi
- pslld $2,%xmm1
- andl %ebp,%edi
- rorl $7,%edx
- psrld $30,%xmm9
- addl %esi,%ebx
- movl %ecx,%esi
- roll $5,%ecx
- addl %edi,%ebx
- xorl %eax,%ebp
- addl %ecx,%ebx
- por %xmm9,%xmm1
- movl %edx,%edi
- xorl %ebp,%edx
- movdqa %xmm1,%xmm10
- addl 24(%rsp),%eax
- andl %ebp,%edi
- andl %edx,%esi
- rorl $7,%ecx
- addl %edi,%eax
- movl %ebx,%edi
- roll $5,%ebx
- addl %esi,%eax
- xorl %ebp,%edx
- addl %ebx,%eax
- movl %ecx,%esi
- xorl %edx,%ecx
- addl 28(%rsp),%ebp
- andl %edx,%esi
- andl %ecx,%edi
- rorl $7,%ebx
- addl %esi,%ebp
- movl %eax,%esi
- roll $5,%eax
- addl %edi,%ebp
- xorl %edx,%ecx
- addl %eax,%ebp
- movl %ebx,%edi
- pxor %xmm6,%xmm2
-.byte 102,68,15,58,15,208,8
- xorl %ecx,%ebx
- addl 32(%rsp),%edx
- andl %ecx,%edi
- pxor %xmm3,%xmm2
- andl %ebx,%esi
- rorl $7,%eax
- movdqa %xmm8,%xmm9
- paddd %xmm1,%xmm8
- addl %edi,%edx
- movl %ebp,%edi
- pxor %xmm10,%xmm2
- roll $5,%ebp
- addl %esi,%edx
- xorl %ecx,%ebx
- addl %ebp,%edx
- movdqa %xmm2,%xmm10
- movdqa %xmm8,16(%rsp)
- movl %eax,%esi
- xorl %ebx,%eax
- addl 36(%rsp),%ecx
- andl %ebx,%esi
- pslld $2,%xmm2
- andl %eax,%edi
- rorl $7,%ebp
- psrld $30,%xmm10
- addl %esi,%ecx
- movl %edx,%esi
+ xorl 32(%rsp),%eax
+ xorl %r11d,%ebx
+ roll $5,%esi
+ xorl 56(%rsp),%eax
+ xorl %edx,%ebx
+ addl %esi,%edi
+ xorl 12(%rsp),%eax
+ roll $30,%r11d
+ addl %ebx,%edi
+ roll $1,%eax
+ movl %eax,24(%rsp)
+ leal -899497514(%rax,%rdx,1),%esi
+ movl 28(%rsp),%eax
+ movl %r11d,%ebx
+ movl %edi,%edx
+ xorl 36(%rsp),%eax
+ xorl %ebp,%ebx
roll $5,%edx
- addl %edi,%ecx
- xorl %ebx,%eax
- addl %edx,%ecx
- por %xmm10,%xmm2
- movl %ebp,%edi
- xorl %eax,%ebp
- movdqa %xmm2,%xmm8
- addl 40(%rsp),%ebx
- andl %eax,%edi
- andl %ebp,%esi
- rorl $7,%edx
- addl %edi,%ebx
- movl %ecx,%edi
- roll $5,%ecx
- addl %esi,%ebx
- xorl %eax,%ebp
- addl %ecx,%ebx
- movl %edx,%esi
- xorl %ebp,%edx
- addl 44(%rsp),%eax
- andl %ebp,%esi
- andl %edx,%edi
- rorl $7,%ecx
- addl %esi,%eax
- movl %ebx,%esi
- roll $5,%ebx
- addl %edi,%eax
- xorl %ebp,%edx
- addl %ebx,%eax
- addl 48(%rsp),%ebp
- pxor %xmm7,%xmm3
-.byte 102,68,15,58,15,193,8
- xorl %edx,%esi
- movl %eax,%edi
- roll $5,%eax
- pxor %xmm4,%xmm3
- xorl %ecx,%esi
- addl %eax,%ebp
- movdqa %xmm9,%xmm10
- paddd %xmm2,%xmm9
- rorl $7,%ebx
- addl %esi,%ebp
- pxor %xmm8,%xmm3
- addl 52(%rsp),%edx
- xorl %ecx,%edi
- movl %ebp,%esi
+ xorl 60(%rsp),%eax
+ xorl %r12d,%ebx
+ addl %edx,%esi
+ xorl 16(%rsp),%eax
+ roll $30,%ebp
+ addl %ebx,%esi
+ roll $1,%eax
+ movl %eax,28(%rsp)
+ leal -899497514(%rax,%r12,1),%edx
+ movl 32(%rsp),%eax
+ movl %ebp,%ebx
+ movl %esi,%r12d
+ xorl 40(%rsp),%eax
+ xorl %edi,%ebx
+ roll $5,%r12d
+ xorl 0(%rsp),%eax
+ xorl %r11d,%ebx
+ addl %r12d,%edx
+ xorl 20(%rsp),%eax
+ roll $30,%edi
+ addl %ebx,%edx
+ roll $1,%eax
+ movl %eax,32(%rsp)
+ leal -899497514(%rax,%r11,1),%r12d
+ movl 36(%rsp),%eax
+ movl %edi,%ebx
+ movl %edx,%r11d
+ xorl 44(%rsp),%eax
+ xorl %esi,%ebx
+ roll $5,%r11d
+ xorl 4(%rsp),%eax
+ xorl %ebp,%ebx
+ addl %r11d,%r12d
+ xorl 24(%rsp),%eax
+ roll $30,%esi
+ addl %ebx,%r12d
+ roll $1,%eax
+ movl %eax,36(%rsp)
+ leal -899497514(%rax,%rbp,1),%r11d
+ movl 40(%rsp),%eax
+ movl %esi,%ebx
+ movl %r12d,%ebp
+ xorl 48(%rsp),%eax
+ xorl %edx,%ebx
roll $5,%ebp
- movdqa %xmm3,%xmm8
- movdqa %xmm9,32(%rsp)
- xorl %ebx,%edi
- addl %ebp,%edx
- rorl $7,%eax
- addl %edi,%edx
- pslld $2,%xmm3
- addl 56(%rsp),%ecx
- xorl %ebx,%esi
- psrld $30,%xmm8
- movl %edx,%edi
- roll $5,%edx
- xorl %eax,%esi
- addl %edx,%ecx
- rorl $7,%ebp
- addl %esi,%ecx
- por %xmm8,%xmm3
- addl 60(%rsp),%ebx
- xorl %eax,%edi
- movl %ecx,%esi
- roll $5,%ecx
- xorl %ebp,%edi
- addl %ecx,%ebx
- rorl $7,%edx
- addl %edi,%ebx
- addl 0(%rsp),%eax
- paddd %xmm3,%xmm10
- xorl %ebp,%esi
- movl %ebx,%edi
- roll $5,%ebx
- xorl %edx,%esi
- movdqa %xmm10,48(%rsp)
- addl %ebx,%eax
- rorl $7,%ecx
- addl %esi,%eax
- addl 4(%rsp),%ebp
- xorl %edx,%edi
- movl %eax,%esi
- roll $5,%eax
- xorl %ecx,%edi
- addl %eax,%ebp
- rorl $7,%ebx
+ xorl 8(%rsp),%eax
+ xorl %edi,%ebx
+ addl %ebp,%r11d
+ xorl 28(%rsp),%eax
+ roll $30,%edx
+ addl %ebx,%r11d
+ roll $1,%eax
+ movl %eax,40(%rsp)
+ leal -899497514(%rax,%rdi,1),%ebp
+ movl 44(%rsp),%eax
+ movl %edx,%ebx
+ movl %r11d,%edi
+ xorl 52(%rsp),%eax
+ xorl %r12d,%ebx
+ roll $5,%edi
+ xorl 12(%rsp),%eax
+ xorl %esi,%ebx
addl %edi,%ebp
- addl 8(%rsp),%edx
- xorl %ecx,%esi
- movl %ebp,%edi
- roll $5,%ebp
- xorl %ebx,%esi
- addl %ebp,%edx
- rorl $7,%eax
- addl %esi,%edx
- addl 12(%rsp),%ecx
- xorl %ebx,%edi
- movl %edx,%esi
- roll $5,%edx
- xorl %eax,%edi
- addl %edx,%ecx
- rorl $7,%ebp
- addl %edi,%ecx
- cmpq %r10,%r9
- je L$done_ssse3
- movdqa 64(%r11),%xmm6
- movdqa 0(%r11),%xmm9
- movdqu 0(%r9),%xmm0
- movdqu 16(%r9),%xmm1
- movdqu 32(%r9),%xmm2
- movdqu 48(%r9),%xmm3
-.byte 102,15,56,0,198
- addq $64,%r9
- addl 16(%rsp),%ebx
- xorl %eax,%esi
-.byte 102,15,56,0,206
- movl %ecx,%edi
- roll $5,%ecx
- paddd %xmm9,%xmm0
- xorl %ebp,%esi
- addl %ecx,%ebx
- rorl $7,%edx
- addl %esi,%ebx
- movdqa %xmm0,0(%rsp)
- addl 20(%rsp),%eax
- xorl %ebp,%edi
- psubd %xmm9,%xmm0
- movl %ebx,%esi
- roll $5,%ebx
- xorl %edx,%edi
- addl %ebx,%eax
- rorl $7,%ecx
- addl %edi,%eax
- addl 24(%rsp),%ebp
- xorl %edx,%esi
- movl %eax,%edi
- roll $5,%eax
- xorl %ecx,%esi
- addl %eax,%ebp
- rorl $7,%ebx
- addl %esi,%ebp
- addl 28(%rsp),%edx
- xorl %ecx,%edi
+ xorl 32(%rsp),%eax
+ roll $30,%r12d
+ addl %ebx,%ebp
+ roll $1,%eax
+ movl %eax,44(%rsp)
+ leal -899497514(%rax,%rsi,1),%edi
+ movl 48(%rsp),%eax
+ movl %r12d,%ebx
movl %ebp,%esi
- roll $5,%ebp
- xorl %ebx,%edi
- addl %ebp,%edx
- rorl $7,%eax
- addl %edi,%edx
- addl 32(%rsp),%ecx
- xorl %ebx,%esi
-.byte 102,15,56,0,214
- movl %edx,%edi
- roll $5,%edx
- paddd %xmm9,%xmm1
- xorl %eax,%esi
- addl %edx,%ecx
- rorl $7,%ebp
- addl %esi,%ecx
- movdqa %xmm1,16(%rsp)
- addl 36(%rsp),%ebx
- xorl %eax,%edi
- psubd %xmm9,%xmm1
- movl %ecx,%esi
- roll $5,%ecx
- xorl %ebp,%edi
- addl %ecx,%ebx
- rorl $7,%edx
- addl %edi,%ebx
- addl 40(%rsp),%eax
- xorl %ebp,%esi
- movl %ebx,%edi
- roll $5,%ebx
- xorl %edx,%esi
- addl %ebx,%eax
- rorl $7,%ecx
- addl %esi,%eax
- addl 44(%rsp),%ebp
- xorl %edx,%edi
- movl %eax,%esi
- roll $5,%eax
- xorl %ecx,%edi
- addl %eax,%ebp
- rorl $7,%ebx
- addl %edi,%ebp
- addl 48(%rsp),%edx
- xorl %ecx,%esi
-.byte 102,15,56,0,222
- movl %ebp,%edi
- roll $5,%ebp
- paddd %xmm9,%xmm2
- xorl %ebx,%esi
- addl %ebp,%edx
- rorl $7,%eax
- addl %esi,%edx
- movdqa %xmm2,32(%rsp)
- addl 52(%rsp),%ecx
- xorl %ebx,%edi
- psubd %xmm9,%xmm2
- movl %edx,%esi
+ xorl 56(%rsp),%eax
+ xorl %r11d,%ebx
+ roll $5,%esi
+ xorl 16(%rsp),%eax
+ xorl %edx,%ebx
+ addl %esi,%edi
+ xorl 36(%rsp),%eax
+ roll $30,%r11d
+ addl %ebx,%edi
+ roll $1,%eax
+ movl %eax,48(%rsp)
+ leal -899497514(%rax,%rdx,1),%esi
+ movl 52(%rsp),%eax
+ movl %r11d,%ebx
+ movl %edi,%edx
+ xorl 60(%rsp),%eax
+ xorl %ebp,%ebx
roll $5,%edx
- xorl %eax,%edi
- addl %edx,%ecx
- rorl $7,%ebp
- addl %edi,%ecx
- addl 56(%rsp),%ebx
- xorl %eax,%esi
- movl %ecx,%edi
- roll $5,%ecx
- xorl %ebp,%esi
- addl %ecx,%ebx
- rorl $7,%edx
- addl %esi,%ebx
- addl 60(%rsp),%eax
- xorl %ebp,%edi
- movl %ebx,%esi
- roll $5,%ebx
- xorl %edx,%edi
- addl %ebx,%eax
- rorl $7,%ecx
- addl %edi,%eax
- addl 0(%r8),%eax
- addl 4(%r8),%esi
- addl 8(%r8),%ecx
- addl 12(%r8),%edx
- movl %eax,0(%r8)
- addl 16(%r8),%ebp
- movl %esi,4(%r8)
+ xorl 20(%rsp),%eax
+ xorl %r12d,%ebx
+ addl %edx,%esi
+ xorl 40(%rsp),%eax
+ roll $30,%ebp
+ addl %ebx,%esi
+ roll $1,%eax
+ leal -899497514(%rax,%r12,1),%edx
+ movl 56(%rsp),%eax
+ movl %ebp,%ebx
+ movl %esi,%r12d
+ xorl 0(%rsp),%eax
+ xorl %edi,%ebx
+ roll $5,%r12d
+ xorl 24(%rsp),%eax
+ xorl %r11d,%ebx
+ addl %r12d,%edx
+ xorl 44(%rsp),%eax
+ roll $30,%edi
+ addl %ebx,%edx
+ roll $1,%eax
+ leal -899497514(%rax,%r11,1),%r12d
+ movl 60(%rsp),%eax
+ movl %edi,%ebx
+ movl %edx,%r11d
+ xorl 4(%rsp),%eax
+ xorl %esi,%ebx
+ roll $5,%r11d
+ xorl 28(%rsp),%eax
+ xorl %ebp,%ebx
+ addl %r11d,%r12d
+ xorl 48(%rsp),%eax
+ roll $30,%esi
+ addl %ebx,%r12d
+ roll $1,%eax
+ leal -899497514(%rax,%rbp,1),%r11d
movl %esi,%ebx
- movl %ecx,8(%r8)
- movl %edx,12(%r8)
- movl %ebp,16(%r8)
- jmp L$oop_ssse3
-
-.p2align 4
-L$done_ssse3:
- addl 16(%rsp),%ebx
- xorl %eax,%esi
- movl %ecx,%edi
- roll $5,%ecx
- xorl %ebp,%esi
- addl %ecx,%ebx
- rorl $7,%edx
- addl %esi,%ebx
- addl 20(%rsp),%eax
- xorl %ebp,%edi
- movl %ebx,%esi
- roll $5,%ebx
- xorl %edx,%edi
- addl %ebx,%eax
- rorl $7,%ecx
- addl %edi,%eax
- addl 24(%rsp),%ebp
- xorl %edx,%esi
- movl %eax,%edi
- roll $5,%eax
- xorl %ecx,%esi
- addl %eax,%ebp
- rorl $7,%ebx
- addl %esi,%ebp
- addl 28(%rsp),%edx
- xorl %ecx,%edi
- movl %ebp,%esi
+ movl %r12d,%ebp
+ xorl %edx,%ebx
roll $5,%ebp
- xorl %ebx,%edi
- addl %ebp,%edx
- rorl $7,%eax
- addl %edi,%edx
- addl 32(%rsp),%ecx
- xorl %ebx,%esi
- movl %edx,%edi
- roll $5,%edx
- xorl %eax,%esi
- addl %edx,%ecx
- rorl $7,%ebp
- addl %esi,%ecx
- addl 36(%rsp),%ebx
- xorl %eax,%edi
- movl %ecx,%esi
- roll $5,%ecx
- xorl %ebp,%edi
- addl %ecx,%ebx
- rorl $7,%edx
- addl %edi,%ebx
- addl 40(%rsp),%eax
- xorl %ebp,%esi
- movl %ebx,%edi
- roll $5,%ebx
- xorl %edx,%esi
- addl %ebx,%eax
- rorl $7,%ecx
- addl %esi,%eax
- addl 44(%rsp),%ebp
- xorl %edx,%edi
- movl %eax,%esi
- roll $5,%eax
- xorl %ecx,%edi
- addl %eax,%ebp
- rorl $7,%ebx
- addl %edi,%ebp
- addl 48(%rsp),%edx
- xorl %ecx,%esi
- movl %ebp,%edi
- roll $5,%ebp
- xorl %ebx,%esi
- addl %ebp,%edx
- rorl $7,%eax
- addl %esi,%edx
- addl 52(%rsp),%ecx
- xorl %ebx,%edi
- movl %edx,%esi
- roll $5,%edx
- xorl %eax,%edi
- addl %edx,%ecx
- rorl $7,%ebp
- addl %edi,%ecx
- addl 56(%rsp),%ebx
- xorl %eax,%esi
- movl %ecx,%edi
- roll $5,%ecx
- xorl %ebp,%esi
- addl %ecx,%ebx
- rorl $7,%edx
- addl %esi,%ebx
- addl 60(%rsp),%eax
- xorl %ebp,%edi
- movl %ebx,%esi
- roll $5,%ebx
- xorl %edx,%edi
- addl %ebx,%eax
- rorl $7,%ecx
- addl %edi,%eax
- addl 0(%r8),%eax
- addl 4(%r8),%esi
- addl 8(%r8),%ecx
- movl %eax,0(%r8)
- addl 12(%r8),%edx
- movl %esi,4(%r8)
- addl 16(%r8),%ebp
- movl %ecx,8(%r8)
- movl %edx,12(%r8)
- movl %ebp,16(%r8)
- leaq 64(%rsp),%rsi
- movq 0(%rsi),%r12
+ xorl %edi,%ebx
+ addl %ebp,%r11d
+ roll $30,%edx
+ addl %ebx,%r11d
+ addl 0(%r8),%r11d
+ addl 4(%r8),%r12d
+ addl 8(%r8),%edx
+ addl 12(%r8),%esi
+ addl 16(%r8),%edi
+ movl %r11d,0(%r8)
+ movl %r12d,4(%r8)
+ movl %edx,8(%r8)
+ movl %esi,12(%r8)
+ movl %edi,16(%r8)
+
+ xchgl %r11d,%edx
+ xchgl %r12d,%esi
+ xchgl %r11d,%edi
+ xchgl %r12d,%ebp
+
+ leaq 64(%r9),%r9
+ subq $1,%r10
+ jnz L$loop
+ movq 64(%rsp),%rsi
+ movq (%rsi),%r12
movq 8(%rsi),%rbp
movq 16(%rsi),%rbx
leaq 24(%rsi),%rsp
-L$epilogue_ssse3:
+L$epilogue:
.byte 0xf3,0xc3
-.p2align 6
-K_XX_XX:
-.long 0x5a827999,0x5a827999,0x5a827999,0x5a827999
-
-.long 0x6ed9eba1,0x6ed9eba1,0x6ed9eba1,0x6ed9eba1
-
-.long 0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc,0x8f1bbcdc
-
-.long 0xca62c1d6,0xca62c1d6,0xca62c1d6,0xca62c1d6
-
-.long 0x00010203,0x04050607,0x08090a0b,0x0c0d0e0f
-
.byte 83,72,65,49,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
-.p2align 6
+.p2align 4
diff --git a/deps/openssl/asm/x64-macosx-gas/sha/sha512-x86_64.s b/deps/openssl/asm/x64-macosx-gas/sha/sha512-x86_64.s
index dda5a96e9d..73c4990304 100644
--- a/deps/openssl/asm/x64-macosx-gas/sha/sha512-x86_64.s
+++ b/deps/openssl/asm/x64-macosx-gas/sha/sha512-x86_64.s
@@ -38,1688 +38,1880 @@ L$prologue:
L$loop:
xorq %rdi,%rdi
movl 0(%rsi),%r12d
- movl %r8d,%r13d
- movl %eax,%r14d
bswapl %r12d
- rorl $14,%r13d
+ movl %r8d,%r13d
+ movl %r8d,%r14d
movl %r9d,%r15d
- movl %r12d,0(%rsp)
- rorl $9,%r14d
- xorl %r8d,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %r10d,%r15d
- rorl $5,%r13d
- addl %r11d,%r12d
- xorl %eax,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %r8d,%r15d
- movl %ebx,%r11d
+ movl %r12d,0(%rsp)
- rorl $11,%r14d
- xorl %r8d,%r13d
+ xorl %r14d,%r13d
xorl %r10d,%r15d
+ addl %r11d,%r12d
+
+ movl %eax,%r11d
+ addl %r13d,%r12d
- xorl %ecx,%r11d
- xorl %eax,%r14d
addl %r15d,%r12d
- movl %ebx,%r15d
+ movl %eax,%r13d
+ movl %eax,%r14d
- rorl $6,%r13d
- andl %eax,%r11d
- andl %ecx,%r15d
+ rorl $2,%r11d
+ rorl $13,%r13d
+ movl %eax,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%r11d
+ xorl %r13d,%r11d
+ rorl $9,%r13d
+ orl %ecx,%r14d
+ xorl %r13d,%r11d
+ andl %ecx,%r15d
addl %r12d,%edx
+
+ andl %ebx,%r14d
addl %r12d,%r11d
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%r11d
+ addl %r14d,%r11d
movl 4(%rsi),%r12d
- movl %edx,%r13d
- movl %r11d,%r14d
bswapl %r12d
- rorl $14,%r13d
+ movl %edx,%r13d
+ movl %edx,%r14d
movl %r8d,%r15d
- movl %r12d,4(%rsp)
- rorl $9,%r14d
- xorl %edx,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %r9d,%r15d
- rorl $5,%r13d
- addl %r10d,%r12d
- xorl %r11d,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %edx,%r15d
- movl %eax,%r10d
+ movl %r12d,4(%rsp)
- rorl $11,%r14d
- xorl %edx,%r13d
+ xorl %r14d,%r13d
xorl %r9d,%r15d
+ addl %r10d,%r12d
+
+ movl %r11d,%r10d
+ addl %r13d,%r12d
- xorl %ebx,%r10d
- xorl %r11d,%r14d
addl %r15d,%r12d
- movl %eax,%r15d
+ movl %r11d,%r13d
+ movl %r11d,%r14d
- rorl $6,%r13d
- andl %r11d,%r10d
- andl %ebx,%r15d
+ rorl $2,%r10d
+ rorl $13,%r13d
+ movl %r11d,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%r10d
+ xorl %r13d,%r10d
+ rorl $9,%r13d
+ orl %ebx,%r14d
+ xorl %r13d,%r10d
+ andl %ebx,%r15d
addl %r12d,%ecx
+
+ andl %eax,%r14d
addl %r12d,%r10d
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%r10d
+ addl %r14d,%r10d
movl 8(%rsi),%r12d
- movl %ecx,%r13d
- movl %r10d,%r14d
bswapl %r12d
- rorl $14,%r13d
+ movl %ecx,%r13d
+ movl %ecx,%r14d
movl %edx,%r15d
- movl %r12d,8(%rsp)
- rorl $9,%r14d
- xorl %ecx,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %r8d,%r15d
- rorl $5,%r13d
- addl %r9d,%r12d
- xorl %r10d,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %ecx,%r15d
- movl %r11d,%r9d
+ movl %r12d,8(%rsp)
- rorl $11,%r14d
- xorl %ecx,%r13d
+ xorl %r14d,%r13d
xorl %r8d,%r15d
+ addl %r9d,%r12d
+
+ movl %r10d,%r9d
+ addl %r13d,%r12d
- xorl %eax,%r9d
- xorl %r10d,%r14d
addl %r15d,%r12d
- movl %r11d,%r15d
+ movl %r10d,%r13d
+ movl %r10d,%r14d
- rorl $6,%r13d
- andl %r10d,%r9d
- andl %eax,%r15d
+ rorl $2,%r9d
+ rorl $13,%r13d
+ movl %r10d,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%r9d
+ xorl %r13d,%r9d
+ rorl $9,%r13d
+ orl %eax,%r14d
+ xorl %r13d,%r9d
+ andl %eax,%r15d
addl %r12d,%ebx
+
+ andl %r11d,%r14d
addl %r12d,%r9d
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%r9d
+ addl %r14d,%r9d
movl 12(%rsi),%r12d
- movl %ebx,%r13d
- movl %r9d,%r14d
bswapl %r12d
- rorl $14,%r13d
+ movl %ebx,%r13d
+ movl %ebx,%r14d
movl %ecx,%r15d
- movl %r12d,12(%rsp)
- rorl $9,%r14d
- xorl %ebx,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %edx,%r15d
- rorl $5,%r13d
- addl %r8d,%r12d
- xorl %r9d,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %ebx,%r15d
- movl %r10d,%r8d
+ movl %r12d,12(%rsp)
- rorl $11,%r14d
- xorl %ebx,%r13d
+ xorl %r14d,%r13d
xorl %edx,%r15d
+ addl %r8d,%r12d
+
+ movl %r9d,%r8d
+ addl %r13d,%r12d
- xorl %r11d,%r8d
- xorl %r9d,%r14d
addl %r15d,%r12d
- movl %r10d,%r15d
+ movl %r9d,%r13d
+ movl %r9d,%r14d
- rorl $6,%r13d
- andl %r9d,%r8d
- andl %r11d,%r15d
+ rorl $2,%r8d
+ rorl $13,%r13d
+ movl %r9d,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%r8d
+ xorl %r13d,%r8d
+ rorl $9,%r13d
+ orl %r11d,%r14d
+ xorl %r13d,%r8d
+ andl %r11d,%r15d
addl %r12d,%eax
+
+ andl %r10d,%r14d
addl %r12d,%r8d
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%r8d
+ addl %r14d,%r8d
movl 16(%rsi),%r12d
- movl %eax,%r13d
- movl %r8d,%r14d
bswapl %r12d
- rorl $14,%r13d
+ movl %eax,%r13d
+ movl %eax,%r14d
movl %ebx,%r15d
- movl %r12d,16(%rsp)
- rorl $9,%r14d
- xorl %eax,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %ecx,%r15d
- rorl $5,%r13d
- addl %edx,%r12d
- xorl %r8d,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %eax,%r15d
- movl %r9d,%edx
+ movl %r12d,16(%rsp)
- rorl $11,%r14d
- xorl %eax,%r13d
+ xorl %r14d,%r13d
xorl %ecx,%r15d
+ addl %edx,%r12d
+
+ movl %r8d,%edx
+ addl %r13d,%r12d
- xorl %r10d,%edx
- xorl %r8d,%r14d
addl %r15d,%r12d
- movl %r9d,%r15d
+ movl %r8d,%r13d
+ movl %r8d,%r14d
- rorl $6,%r13d
- andl %r8d,%edx
- andl %r10d,%r15d
+ rorl $2,%edx
+ rorl $13,%r13d
+ movl %r8d,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%edx
+ xorl %r13d,%edx
+ rorl $9,%r13d
+ orl %r10d,%r14d
+ xorl %r13d,%edx
+ andl %r10d,%r15d
addl %r12d,%r11d
+
+ andl %r9d,%r14d
addl %r12d,%edx
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%edx
+ addl %r14d,%edx
movl 20(%rsi),%r12d
- movl %r11d,%r13d
- movl %edx,%r14d
bswapl %r12d
- rorl $14,%r13d
+ movl %r11d,%r13d
+ movl %r11d,%r14d
movl %eax,%r15d
- movl %r12d,20(%rsp)
- rorl $9,%r14d
- xorl %r11d,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %ebx,%r15d
- rorl $5,%r13d
- addl %ecx,%r12d
- xorl %edx,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %r11d,%r15d
- movl %r8d,%ecx
+ movl %r12d,20(%rsp)
- rorl $11,%r14d
- xorl %r11d,%r13d
+ xorl %r14d,%r13d
xorl %ebx,%r15d
+ addl %ecx,%r12d
+
+ movl %edx,%ecx
+ addl %r13d,%r12d
- xorl %r9d,%ecx
- xorl %edx,%r14d
addl %r15d,%r12d
- movl %r8d,%r15d
+ movl %edx,%r13d
+ movl %edx,%r14d
- rorl $6,%r13d
- andl %edx,%ecx
- andl %r9d,%r15d
+ rorl $2,%ecx
+ rorl $13,%r13d
+ movl %edx,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%ecx
+ xorl %r13d,%ecx
+ rorl $9,%r13d
+ orl %r9d,%r14d
+ xorl %r13d,%ecx
+ andl %r9d,%r15d
addl %r12d,%r10d
+
+ andl %r8d,%r14d
addl %r12d,%ecx
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%ecx
+ addl %r14d,%ecx
movl 24(%rsi),%r12d
- movl %r10d,%r13d
- movl %ecx,%r14d
bswapl %r12d
- rorl $14,%r13d
+ movl %r10d,%r13d
+ movl %r10d,%r14d
movl %r11d,%r15d
- movl %r12d,24(%rsp)
- rorl $9,%r14d
- xorl %r10d,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %eax,%r15d
- rorl $5,%r13d
- addl %ebx,%r12d
- xorl %ecx,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %r10d,%r15d
- movl %edx,%ebx
+ movl %r12d,24(%rsp)
- rorl $11,%r14d
- xorl %r10d,%r13d
+ xorl %r14d,%r13d
xorl %eax,%r15d
+ addl %ebx,%r12d
+
+ movl %ecx,%ebx
+ addl %r13d,%r12d
- xorl %r8d,%ebx
- xorl %ecx,%r14d
addl %r15d,%r12d
- movl %edx,%r15d
+ movl %ecx,%r13d
+ movl %ecx,%r14d
- rorl $6,%r13d
- andl %ecx,%ebx
- andl %r8d,%r15d
+ rorl $2,%ebx
+ rorl $13,%r13d
+ movl %ecx,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%ebx
+ xorl %r13d,%ebx
+ rorl $9,%r13d
+ orl %r8d,%r14d
+ xorl %r13d,%ebx
+ andl %r8d,%r15d
addl %r12d,%r9d
+
+ andl %edx,%r14d
addl %r12d,%ebx
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%ebx
+ addl %r14d,%ebx
movl 28(%rsi),%r12d
- movl %r9d,%r13d
- movl %ebx,%r14d
bswapl %r12d
- rorl $14,%r13d
+ movl %r9d,%r13d
+ movl %r9d,%r14d
movl %r10d,%r15d
- movl %r12d,28(%rsp)
- rorl $9,%r14d
- xorl %r9d,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %r11d,%r15d
- rorl $5,%r13d
- addl %eax,%r12d
- xorl %ebx,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %r9d,%r15d
- movl %ecx,%eax
+ movl %r12d,28(%rsp)
- rorl $11,%r14d
- xorl %r9d,%r13d
+ xorl %r14d,%r13d
xorl %r11d,%r15d
+ addl %eax,%r12d
+
+ movl %ebx,%eax
+ addl %r13d,%r12d
- xorl %edx,%eax
- xorl %ebx,%r14d
addl %r15d,%r12d
- movl %ecx,%r15d
+ movl %ebx,%r13d
+ movl %ebx,%r14d
- rorl $6,%r13d
- andl %ebx,%eax
- andl %edx,%r15d
+ rorl $2,%eax
+ rorl $13,%r13d
+ movl %ebx,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%eax
+ xorl %r13d,%eax
+ rorl $9,%r13d
+ orl %edx,%r14d
+ xorl %r13d,%eax
+ andl %edx,%r15d
addl %r12d,%r8d
+
+ andl %ecx,%r14d
addl %r12d,%eax
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%eax
+ addl %r14d,%eax
movl 32(%rsi),%r12d
- movl %r8d,%r13d
- movl %eax,%r14d
bswapl %r12d
- rorl $14,%r13d
+ movl %r8d,%r13d
+ movl %r8d,%r14d
movl %r9d,%r15d
- movl %r12d,32(%rsp)
- rorl $9,%r14d
- xorl %r8d,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %r10d,%r15d
- rorl $5,%r13d
- addl %r11d,%r12d
- xorl %eax,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %r8d,%r15d
- movl %ebx,%r11d
+ movl %r12d,32(%rsp)
- rorl $11,%r14d
- xorl %r8d,%r13d
+ xorl %r14d,%r13d
xorl %r10d,%r15d
+ addl %r11d,%r12d
+
+ movl %eax,%r11d
+ addl %r13d,%r12d
- xorl %ecx,%r11d
- xorl %eax,%r14d
addl %r15d,%r12d
- movl %ebx,%r15d
+ movl %eax,%r13d
+ movl %eax,%r14d
- rorl $6,%r13d
- andl %eax,%r11d
- andl %ecx,%r15d
+ rorl $2,%r11d
+ rorl $13,%r13d
+ movl %eax,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%r11d
+ xorl %r13d,%r11d
+ rorl $9,%r13d
+ orl %ecx,%r14d
+ xorl %r13d,%r11d
+ andl %ecx,%r15d
addl %r12d,%edx
+
+ andl %ebx,%r14d
addl %r12d,%r11d
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%r11d
+ addl %r14d,%r11d
movl 36(%rsi),%r12d
- movl %edx,%r13d
- movl %r11d,%r14d
bswapl %r12d
- rorl $14,%r13d
+ movl %edx,%r13d
+ movl %edx,%r14d
movl %r8d,%r15d
- movl %r12d,36(%rsp)
- rorl $9,%r14d
- xorl %edx,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %r9d,%r15d
- rorl $5,%r13d
- addl %r10d,%r12d
- xorl %r11d,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %edx,%r15d
- movl %eax,%r10d
+ movl %r12d,36(%rsp)
- rorl $11,%r14d
- xorl %edx,%r13d
+ xorl %r14d,%r13d
xorl %r9d,%r15d
+ addl %r10d,%r12d
+
+ movl %r11d,%r10d
+ addl %r13d,%r12d
- xorl %ebx,%r10d
- xorl %r11d,%r14d
addl %r15d,%r12d
- movl %eax,%r15d
+ movl %r11d,%r13d
+ movl %r11d,%r14d
- rorl $6,%r13d
- andl %r11d,%r10d
- andl %ebx,%r15d
+ rorl $2,%r10d
+ rorl $13,%r13d
+ movl %r11d,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%r10d
+ xorl %r13d,%r10d
+ rorl $9,%r13d
+ orl %ebx,%r14d
+ xorl %r13d,%r10d
+ andl %ebx,%r15d
addl %r12d,%ecx
+
+ andl %eax,%r14d
addl %r12d,%r10d
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%r10d
+ addl %r14d,%r10d
movl 40(%rsi),%r12d
- movl %ecx,%r13d
- movl %r10d,%r14d
bswapl %r12d
- rorl $14,%r13d
+ movl %ecx,%r13d
+ movl %ecx,%r14d
movl %edx,%r15d
- movl %r12d,40(%rsp)
- rorl $9,%r14d
- xorl %ecx,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %r8d,%r15d
- rorl $5,%r13d
- addl %r9d,%r12d
- xorl %r10d,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %ecx,%r15d
- movl %r11d,%r9d
+ movl %r12d,40(%rsp)
- rorl $11,%r14d
- xorl %ecx,%r13d
+ xorl %r14d,%r13d
xorl %r8d,%r15d
+ addl %r9d,%r12d
+
+ movl %r10d,%r9d
+ addl %r13d,%r12d
- xorl %eax,%r9d
- xorl %r10d,%r14d
addl %r15d,%r12d
- movl %r11d,%r15d
+ movl %r10d,%r13d
+ movl %r10d,%r14d
- rorl $6,%r13d
- andl %r10d,%r9d
- andl %eax,%r15d
+ rorl $2,%r9d
+ rorl $13,%r13d
+ movl %r10d,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%r9d
+ xorl %r13d,%r9d
+ rorl $9,%r13d
+ orl %eax,%r14d
+ xorl %r13d,%r9d
+ andl %eax,%r15d
addl %r12d,%ebx
+
+ andl %r11d,%r14d
addl %r12d,%r9d
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%r9d
+ addl %r14d,%r9d
movl 44(%rsi),%r12d
- movl %ebx,%r13d
- movl %r9d,%r14d
bswapl %r12d
- rorl $14,%r13d
+ movl %ebx,%r13d
+ movl %ebx,%r14d
movl %ecx,%r15d
- movl %r12d,44(%rsp)
- rorl $9,%r14d
- xorl %ebx,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %edx,%r15d
- rorl $5,%r13d
- addl %r8d,%r12d
- xorl %r9d,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %ebx,%r15d
- movl %r10d,%r8d
+ movl %r12d,44(%rsp)
- rorl $11,%r14d
- xorl %ebx,%r13d
+ xorl %r14d,%r13d
xorl %edx,%r15d
+ addl %r8d,%r12d
+
+ movl %r9d,%r8d
+ addl %r13d,%r12d
- xorl %r11d,%r8d
- xorl %r9d,%r14d
addl %r15d,%r12d
- movl %r10d,%r15d
+ movl %r9d,%r13d
+ movl %r9d,%r14d
- rorl $6,%r13d
- andl %r9d,%r8d
- andl %r11d,%r15d
+ rorl $2,%r8d
+ rorl $13,%r13d
+ movl %r9d,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%r8d
+ xorl %r13d,%r8d
+ rorl $9,%r13d
+ orl %r11d,%r14d
+ xorl %r13d,%r8d
+ andl %r11d,%r15d
addl %r12d,%eax
+
+ andl %r10d,%r14d
addl %r12d,%r8d
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%r8d
+ addl %r14d,%r8d
movl 48(%rsi),%r12d
- movl %eax,%r13d
- movl %r8d,%r14d
bswapl %r12d
- rorl $14,%r13d
+ movl %eax,%r13d
+ movl %eax,%r14d
movl %ebx,%r15d
- movl %r12d,48(%rsp)
- rorl $9,%r14d
- xorl %eax,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %ecx,%r15d
- rorl $5,%r13d
- addl %edx,%r12d
- xorl %r8d,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %eax,%r15d
- movl %r9d,%edx
+ movl %r12d,48(%rsp)
- rorl $11,%r14d
- xorl %eax,%r13d
+ xorl %r14d,%r13d
xorl %ecx,%r15d
+ addl %edx,%r12d
+
+ movl %r8d,%edx
+ addl %r13d,%r12d
- xorl %r10d,%edx
- xorl %r8d,%r14d
addl %r15d,%r12d
- movl %r9d,%r15d
+ movl %r8d,%r13d
+ movl %r8d,%r14d
- rorl $6,%r13d
- andl %r8d,%edx
- andl %r10d,%r15d
+ rorl $2,%edx
+ rorl $13,%r13d
+ movl %r8d,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%edx
+ xorl %r13d,%edx
+ rorl $9,%r13d
+ orl %r10d,%r14d
+ xorl %r13d,%edx
+ andl %r10d,%r15d
addl %r12d,%r11d
+
+ andl %r9d,%r14d
addl %r12d,%edx
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%edx
+ addl %r14d,%edx
movl 52(%rsi),%r12d
- movl %r11d,%r13d
- movl %edx,%r14d
bswapl %r12d
- rorl $14,%r13d
+ movl %r11d,%r13d
+ movl %r11d,%r14d
movl %eax,%r15d
- movl %r12d,52(%rsp)
- rorl $9,%r14d
- xorl %r11d,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %ebx,%r15d
- rorl $5,%r13d
- addl %ecx,%r12d
- xorl %edx,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %r11d,%r15d
- movl %r8d,%ecx
+ movl %r12d,52(%rsp)
- rorl $11,%r14d
- xorl %r11d,%r13d
+ xorl %r14d,%r13d
xorl %ebx,%r15d
+ addl %ecx,%r12d
+
+ movl %edx,%ecx
+ addl %r13d,%r12d
- xorl %r9d,%ecx
- xorl %edx,%r14d
addl %r15d,%r12d
- movl %r8d,%r15d
+ movl %edx,%r13d
+ movl %edx,%r14d
- rorl $6,%r13d
- andl %edx,%ecx
- andl %r9d,%r15d
+ rorl $2,%ecx
+ rorl $13,%r13d
+ movl %edx,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%ecx
+ xorl %r13d,%ecx
+ rorl $9,%r13d
+ orl %r9d,%r14d
+ xorl %r13d,%ecx
+ andl %r9d,%r15d
addl %r12d,%r10d
+
+ andl %r8d,%r14d
addl %r12d,%ecx
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%ecx
+ addl %r14d,%ecx
movl 56(%rsi),%r12d
- movl %r10d,%r13d
- movl %ecx,%r14d
bswapl %r12d
- rorl $14,%r13d
+ movl %r10d,%r13d
+ movl %r10d,%r14d
movl %r11d,%r15d
- movl %r12d,56(%rsp)
- rorl $9,%r14d
- xorl %r10d,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %eax,%r15d
- rorl $5,%r13d
- addl %ebx,%r12d
- xorl %ecx,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %r10d,%r15d
- movl %edx,%ebx
+ movl %r12d,56(%rsp)
- rorl $11,%r14d
- xorl %r10d,%r13d
+ xorl %r14d,%r13d
xorl %eax,%r15d
+ addl %ebx,%r12d
+
+ movl %ecx,%ebx
+ addl %r13d,%r12d
- xorl %r8d,%ebx
- xorl %ecx,%r14d
addl %r15d,%r12d
- movl %edx,%r15d
+ movl %ecx,%r13d
+ movl %ecx,%r14d
- rorl $6,%r13d
- andl %ecx,%ebx
- andl %r8d,%r15d
+ rorl $2,%ebx
+ rorl $13,%r13d
+ movl %ecx,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%ebx
+ xorl %r13d,%ebx
+ rorl $9,%r13d
+ orl %r8d,%r14d
+ xorl %r13d,%ebx
+ andl %r8d,%r15d
addl %r12d,%r9d
+
+ andl %edx,%r14d
addl %r12d,%ebx
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%ebx
+ addl %r14d,%ebx
movl 60(%rsi),%r12d
- movl %r9d,%r13d
- movl %ebx,%r14d
bswapl %r12d
- rorl $14,%r13d
+ movl %r9d,%r13d
+ movl %r9d,%r14d
movl %r10d,%r15d
- movl %r12d,60(%rsp)
- rorl $9,%r14d
- xorl %r9d,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %r11d,%r15d
- rorl $5,%r13d
- addl %eax,%r12d
- xorl %ebx,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %r9d,%r15d
- movl %ecx,%eax
+ movl %r12d,60(%rsp)
- rorl $11,%r14d
- xorl %r9d,%r13d
+ xorl %r14d,%r13d
xorl %r11d,%r15d
+ addl %eax,%r12d
+
+ movl %ebx,%eax
+ addl %r13d,%r12d
- xorl %edx,%eax
- xorl %ebx,%r14d
addl %r15d,%r12d
- movl %ecx,%r15d
+ movl %ebx,%r13d
+ movl %ebx,%r14d
- rorl $6,%r13d
- andl %ebx,%eax
- andl %edx,%r15d
+ rorl $2,%eax
+ rorl $13,%r13d
+ movl %ebx,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%eax
+ xorl %r13d,%eax
+ rorl $9,%r13d
+ orl %edx,%r14d
+ xorl %r13d,%eax
+ andl %edx,%r15d
addl %r12d,%r8d
+
+ andl %ecx,%r14d
addl %r12d,%eax
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%eax
+ addl %r14d,%eax
jmp L$rounds_16_xx
.p2align 4
L$rounds_16_xx:
movl 4(%rsp),%r13d
- movl 56(%rsp),%r14d
- movl %r13d,%r12d
- movl %r14d,%r15d
+ movl 56(%rsp),%r12d
+
+ movl %r13d,%r15d
- rorl $11,%r12d
- xorl %r13d,%r12d
shrl $3,%r13d
+ rorl $7,%r15d
- rorl $7,%r12d
- xorl %r12d,%r13d
- movl 36(%rsp),%r12d
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
- rorl $2,%r15d
- xorl %r14d,%r15d
- shrl $10,%r14d
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
- rorl $17,%r15d
addl %r13d,%r12d
- xorl %r15d,%r14d
+
+ addl 36(%rsp),%r12d
addl 0(%rsp),%r12d
movl %r8d,%r13d
- addl %r14d,%r12d
- movl %eax,%r14d
- rorl $14,%r13d
+ movl %r8d,%r14d
movl %r9d,%r15d
- movl %r12d,0(%rsp)
- rorl $9,%r14d
- xorl %r8d,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %r10d,%r15d
- rorl $5,%r13d
- addl %r11d,%r12d
- xorl %eax,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %r8d,%r15d
- movl %ebx,%r11d
+ movl %r12d,0(%rsp)
- rorl $11,%r14d
- xorl %r8d,%r13d
+ xorl %r14d,%r13d
xorl %r10d,%r15d
+ addl %r11d,%r12d
+
+ movl %eax,%r11d
+ addl %r13d,%r12d
- xorl %ecx,%r11d
- xorl %eax,%r14d
addl %r15d,%r12d
- movl %ebx,%r15d
+ movl %eax,%r13d
+ movl %eax,%r14d
- rorl $6,%r13d
- andl %eax,%r11d
- andl %ecx,%r15d
+ rorl $2,%r11d
+ rorl $13,%r13d
+ movl %eax,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%r11d
+ xorl %r13d,%r11d
+ rorl $9,%r13d
+ orl %ecx,%r14d
+ xorl %r13d,%r11d
+ andl %ecx,%r15d
addl %r12d,%edx
+
+ andl %ebx,%r14d
addl %r12d,%r11d
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%r11d
+ addl %r14d,%r11d
movl 8(%rsp),%r13d
- movl 60(%rsp),%r14d
- movl %r13d,%r12d
- movl %r14d,%r15d
+ movl 60(%rsp),%r12d
+
+ movl %r13d,%r15d
- rorl $11,%r12d
- xorl %r13d,%r12d
shrl $3,%r13d
+ rorl $7,%r15d
- rorl $7,%r12d
- xorl %r12d,%r13d
- movl 40(%rsp),%r12d
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
- rorl $2,%r15d
- xorl %r14d,%r15d
- shrl $10,%r14d
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
- rorl $17,%r15d
addl %r13d,%r12d
- xorl %r15d,%r14d
+
+ addl 40(%rsp),%r12d
addl 4(%rsp),%r12d
movl %edx,%r13d
- addl %r14d,%r12d
- movl %r11d,%r14d
- rorl $14,%r13d
+ movl %edx,%r14d
movl %r8d,%r15d
- movl %r12d,4(%rsp)
- rorl $9,%r14d
- xorl %edx,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %r9d,%r15d
- rorl $5,%r13d
- addl %r10d,%r12d
- xorl %r11d,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %edx,%r15d
- movl %eax,%r10d
+ movl %r12d,4(%rsp)
- rorl $11,%r14d
- xorl %edx,%r13d
+ xorl %r14d,%r13d
xorl %r9d,%r15d
+ addl %r10d,%r12d
+
+ movl %r11d,%r10d
+ addl %r13d,%r12d
- xorl %ebx,%r10d
- xorl %r11d,%r14d
addl %r15d,%r12d
- movl %eax,%r15d
+ movl %r11d,%r13d
+ movl %r11d,%r14d
- rorl $6,%r13d
- andl %r11d,%r10d
- andl %ebx,%r15d
+ rorl $2,%r10d
+ rorl $13,%r13d
+ movl %r11d,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%r10d
+ xorl %r13d,%r10d
+ rorl $9,%r13d
+ orl %ebx,%r14d
+ xorl %r13d,%r10d
+ andl %ebx,%r15d
addl %r12d,%ecx
+
+ andl %eax,%r14d
addl %r12d,%r10d
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%r10d
+ addl %r14d,%r10d
movl 12(%rsp),%r13d
- movl 0(%rsp),%r14d
- movl %r13d,%r12d
- movl %r14d,%r15d
+ movl 0(%rsp),%r12d
+
+ movl %r13d,%r15d
- rorl $11,%r12d
- xorl %r13d,%r12d
shrl $3,%r13d
+ rorl $7,%r15d
- rorl $7,%r12d
- xorl %r12d,%r13d
- movl 44(%rsp),%r12d
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
- rorl $2,%r15d
- xorl %r14d,%r15d
- shrl $10,%r14d
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
- rorl $17,%r15d
addl %r13d,%r12d
- xorl %r15d,%r14d
+
+ addl 44(%rsp),%r12d
addl 8(%rsp),%r12d
movl %ecx,%r13d
- addl %r14d,%r12d
- movl %r10d,%r14d
- rorl $14,%r13d
+ movl %ecx,%r14d
movl %edx,%r15d
- movl %r12d,8(%rsp)
- rorl $9,%r14d
- xorl %ecx,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %r8d,%r15d
- rorl $5,%r13d
- addl %r9d,%r12d
- xorl %r10d,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %ecx,%r15d
- movl %r11d,%r9d
+ movl %r12d,8(%rsp)
- rorl $11,%r14d
- xorl %ecx,%r13d
+ xorl %r14d,%r13d
xorl %r8d,%r15d
+ addl %r9d,%r12d
+
+ movl %r10d,%r9d
+ addl %r13d,%r12d
- xorl %eax,%r9d
- xorl %r10d,%r14d
addl %r15d,%r12d
- movl %r11d,%r15d
+ movl %r10d,%r13d
+ movl %r10d,%r14d
- rorl $6,%r13d
- andl %r10d,%r9d
- andl %eax,%r15d
+ rorl $2,%r9d
+ rorl $13,%r13d
+ movl %r10d,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%r9d
+ xorl %r13d,%r9d
+ rorl $9,%r13d
+ orl %eax,%r14d
+ xorl %r13d,%r9d
+ andl %eax,%r15d
addl %r12d,%ebx
+
+ andl %r11d,%r14d
addl %r12d,%r9d
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%r9d
+ addl %r14d,%r9d
movl 16(%rsp),%r13d
- movl 4(%rsp),%r14d
- movl %r13d,%r12d
- movl %r14d,%r15d
+ movl 4(%rsp),%r12d
+
+ movl %r13d,%r15d
- rorl $11,%r12d
- xorl %r13d,%r12d
shrl $3,%r13d
+ rorl $7,%r15d
- rorl $7,%r12d
- xorl %r12d,%r13d
- movl 48(%rsp),%r12d
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
- rorl $2,%r15d
- xorl %r14d,%r15d
- shrl $10,%r14d
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
- rorl $17,%r15d
addl %r13d,%r12d
- xorl %r15d,%r14d
+
+ addl 48(%rsp),%r12d
addl 12(%rsp),%r12d
movl %ebx,%r13d
- addl %r14d,%r12d
- movl %r9d,%r14d
- rorl $14,%r13d
+ movl %ebx,%r14d
movl %ecx,%r15d
- movl %r12d,12(%rsp)
- rorl $9,%r14d
- xorl %ebx,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %edx,%r15d
- rorl $5,%r13d
- addl %r8d,%r12d
- xorl %r9d,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %ebx,%r15d
- movl %r10d,%r8d
+ movl %r12d,12(%rsp)
- rorl $11,%r14d
- xorl %ebx,%r13d
+ xorl %r14d,%r13d
xorl %edx,%r15d
+ addl %r8d,%r12d
+
+ movl %r9d,%r8d
+ addl %r13d,%r12d
- xorl %r11d,%r8d
- xorl %r9d,%r14d
addl %r15d,%r12d
- movl %r10d,%r15d
+ movl %r9d,%r13d
+ movl %r9d,%r14d
- rorl $6,%r13d
- andl %r9d,%r8d
- andl %r11d,%r15d
+ rorl $2,%r8d
+ rorl $13,%r13d
+ movl %r9d,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%r8d
+ xorl %r13d,%r8d
+ rorl $9,%r13d
+ orl %r11d,%r14d
+ xorl %r13d,%r8d
+ andl %r11d,%r15d
addl %r12d,%eax
+
+ andl %r10d,%r14d
addl %r12d,%r8d
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%r8d
+ addl %r14d,%r8d
movl 20(%rsp),%r13d
- movl 8(%rsp),%r14d
- movl %r13d,%r12d
- movl %r14d,%r15d
+ movl 8(%rsp),%r12d
+
+ movl %r13d,%r15d
- rorl $11,%r12d
- xorl %r13d,%r12d
shrl $3,%r13d
+ rorl $7,%r15d
- rorl $7,%r12d
- xorl %r12d,%r13d
- movl 52(%rsp),%r12d
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
- rorl $2,%r15d
- xorl %r14d,%r15d
- shrl $10,%r14d
+ xorl %r14d,%r12d
- rorl $17,%r15d
addl %r13d,%r12d
- xorl %r15d,%r14d
+
+ addl 52(%rsp),%r12d
addl 16(%rsp),%r12d
movl %eax,%r13d
- addl %r14d,%r12d
- movl %r8d,%r14d
- rorl $14,%r13d
+ movl %eax,%r14d
movl %ebx,%r15d
- movl %r12d,16(%rsp)
- rorl $9,%r14d
- xorl %eax,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %ecx,%r15d
- rorl $5,%r13d
- addl %edx,%r12d
- xorl %r8d,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %eax,%r15d
- movl %r9d,%edx
+ movl %r12d,16(%rsp)
- rorl $11,%r14d
- xorl %eax,%r13d
+ xorl %r14d,%r13d
xorl %ecx,%r15d
+ addl %edx,%r12d
+
+ movl %r8d,%edx
+ addl %r13d,%r12d
- xorl %r10d,%edx
- xorl %r8d,%r14d
addl %r15d,%r12d
- movl %r9d,%r15d
+ movl %r8d,%r13d
+ movl %r8d,%r14d
- rorl $6,%r13d
- andl %r8d,%edx
- andl %r10d,%r15d
+ rorl $2,%edx
+ rorl $13,%r13d
+ movl %r8d,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%edx
+ xorl %r13d,%edx
+ rorl $9,%r13d
+ orl %r10d,%r14d
+ xorl %r13d,%edx
+ andl %r10d,%r15d
addl %r12d,%r11d
+
+ andl %r9d,%r14d
addl %r12d,%edx
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%edx
+ addl %r14d,%edx
movl 24(%rsp),%r13d
- movl 12(%rsp),%r14d
- movl %r13d,%r12d
- movl %r14d,%r15d
+ movl 12(%rsp),%r12d
+
+ movl %r13d,%r15d
- rorl $11,%r12d
- xorl %r13d,%r12d
shrl $3,%r13d
+ rorl $7,%r15d
- rorl $7,%r12d
- xorl %r12d,%r13d
- movl 56(%rsp),%r12d
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
- rorl $2,%r15d
- xorl %r14d,%r15d
- shrl $10,%r14d
+ xorl %r14d,%r12d
- rorl $17,%r15d
addl %r13d,%r12d
- xorl %r15d,%r14d
+
+ addl 56(%rsp),%r12d
addl 20(%rsp),%r12d
movl %r11d,%r13d
- addl %r14d,%r12d
- movl %edx,%r14d
- rorl $14,%r13d
+ movl %r11d,%r14d
movl %eax,%r15d
- movl %r12d,20(%rsp)
- rorl $9,%r14d
- xorl %r11d,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %ebx,%r15d
- rorl $5,%r13d
- addl %ecx,%r12d
- xorl %edx,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %r11d,%r15d
- movl %r8d,%ecx
+ movl %r12d,20(%rsp)
- rorl $11,%r14d
- xorl %r11d,%r13d
+ xorl %r14d,%r13d
xorl %ebx,%r15d
+ addl %ecx,%r12d
+
+ movl %edx,%ecx
+ addl %r13d,%r12d
- xorl %r9d,%ecx
- xorl %edx,%r14d
addl %r15d,%r12d
- movl %r8d,%r15d
+ movl %edx,%r13d
+ movl %edx,%r14d
- rorl $6,%r13d
- andl %edx,%ecx
- andl %r9d,%r15d
+ rorl $2,%ecx
+ rorl $13,%r13d
+ movl %edx,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%ecx
+ xorl %r13d,%ecx
+ rorl $9,%r13d
+ orl %r9d,%r14d
+ xorl %r13d,%ecx
+ andl %r9d,%r15d
addl %r12d,%r10d
+
+ andl %r8d,%r14d
addl %r12d,%ecx
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%ecx
+ addl %r14d,%ecx
movl 28(%rsp),%r13d
- movl 16(%rsp),%r14d
- movl %r13d,%r12d
- movl %r14d,%r15d
+ movl 16(%rsp),%r12d
+
+ movl %r13d,%r15d
- rorl $11,%r12d
- xorl %r13d,%r12d
shrl $3,%r13d
+ rorl $7,%r15d
- rorl $7,%r12d
- xorl %r12d,%r13d
- movl 60(%rsp),%r12d
+ xorl %r15d,%r13d
+ rorl $11,%r15d
- rorl $2,%r15d
- xorl %r14d,%r15d
- shrl $10,%r14d
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
- rorl $17,%r15d
addl %r13d,%r12d
- xorl %r15d,%r14d
+
+ addl 60(%rsp),%r12d
addl 24(%rsp),%r12d
movl %r10d,%r13d
- addl %r14d,%r12d
- movl %ecx,%r14d
- rorl $14,%r13d
+ movl %r10d,%r14d
movl %r11d,%r15d
- movl %r12d,24(%rsp)
- rorl $9,%r14d
- xorl %r10d,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %eax,%r15d
- rorl $5,%r13d
- addl %ebx,%r12d
- xorl %ecx,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %r10d,%r15d
- movl %edx,%ebx
+ movl %r12d,24(%rsp)
- rorl $11,%r14d
- xorl %r10d,%r13d
+ xorl %r14d,%r13d
xorl %eax,%r15d
+ addl %ebx,%r12d
+
+ movl %ecx,%ebx
+ addl %r13d,%r12d
- xorl %r8d,%ebx
- xorl %ecx,%r14d
addl %r15d,%r12d
- movl %edx,%r15d
+ movl %ecx,%r13d
+ movl %ecx,%r14d
- rorl $6,%r13d
- andl %ecx,%ebx
- andl %r8d,%r15d
+ rorl $2,%ebx
+ rorl $13,%r13d
+ movl %ecx,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%ebx
+ xorl %r13d,%ebx
+ rorl $9,%r13d
+ orl %r8d,%r14d
+ xorl %r13d,%ebx
+ andl %r8d,%r15d
addl %r12d,%r9d
+
+ andl %edx,%r14d
addl %r12d,%ebx
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%ebx
+ addl %r14d,%ebx
movl 32(%rsp),%r13d
- movl 20(%rsp),%r14d
- movl %r13d,%r12d
- movl %r14d,%r15d
+ movl 20(%rsp),%r12d
+
+ movl %r13d,%r15d
- rorl $11,%r12d
- xorl %r13d,%r12d
shrl $3,%r13d
+ rorl $7,%r15d
- rorl $7,%r12d
- xorl %r12d,%r13d
- movl 0(%rsp),%r12d
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
- rorl $2,%r15d
- xorl %r14d,%r15d
- shrl $10,%r14d
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
- rorl $17,%r15d
addl %r13d,%r12d
- xorl %r15d,%r14d
+
+ addl 0(%rsp),%r12d
addl 28(%rsp),%r12d
movl %r9d,%r13d
- addl %r14d,%r12d
- movl %ebx,%r14d
- rorl $14,%r13d
+ movl %r9d,%r14d
movl %r10d,%r15d
- movl %r12d,28(%rsp)
- rorl $9,%r14d
- xorl %r9d,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %r11d,%r15d
- rorl $5,%r13d
- addl %eax,%r12d
- xorl %ebx,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %r9d,%r15d
- movl %ecx,%eax
+ movl %r12d,28(%rsp)
- rorl $11,%r14d
- xorl %r9d,%r13d
+ xorl %r14d,%r13d
xorl %r11d,%r15d
+ addl %eax,%r12d
+
+ movl %ebx,%eax
+ addl %r13d,%r12d
- xorl %edx,%eax
- xorl %ebx,%r14d
addl %r15d,%r12d
- movl %ecx,%r15d
+ movl %ebx,%r13d
+ movl %ebx,%r14d
- rorl $6,%r13d
- andl %ebx,%eax
- andl %edx,%r15d
+ rorl $2,%eax
+ rorl $13,%r13d
+ movl %ebx,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%eax
+ xorl %r13d,%eax
+ rorl $9,%r13d
+ orl %edx,%r14d
+ xorl %r13d,%eax
+ andl %edx,%r15d
addl %r12d,%r8d
+
+ andl %ecx,%r14d
addl %r12d,%eax
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%eax
+ addl %r14d,%eax
movl 36(%rsp),%r13d
- movl 24(%rsp),%r14d
- movl %r13d,%r12d
- movl %r14d,%r15d
+ movl 24(%rsp),%r12d
+
+ movl %r13d,%r15d
- rorl $11,%r12d
- xorl %r13d,%r12d
shrl $3,%r13d
+ rorl $7,%r15d
- rorl $7,%r12d
- xorl %r12d,%r13d
- movl 4(%rsp),%r12d
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
- rorl $2,%r15d
- xorl %r14d,%r15d
- shrl $10,%r14d
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
- rorl $17,%r15d
addl %r13d,%r12d
- xorl %r15d,%r14d
+
+ addl 4(%rsp),%r12d
addl 32(%rsp),%r12d
movl %r8d,%r13d
- addl %r14d,%r12d
- movl %eax,%r14d
- rorl $14,%r13d
+ movl %r8d,%r14d
movl %r9d,%r15d
- movl %r12d,32(%rsp)
- rorl $9,%r14d
- xorl %r8d,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %r10d,%r15d
- rorl $5,%r13d
- addl %r11d,%r12d
- xorl %eax,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %r8d,%r15d
- movl %ebx,%r11d
+ movl %r12d,32(%rsp)
- rorl $11,%r14d
- xorl %r8d,%r13d
+ xorl %r14d,%r13d
xorl %r10d,%r15d
+ addl %r11d,%r12d
+
+ movl %eax,%r11d
+ addl %r13d,%r12d
- xorl %ecx,%r11d
- xorl %eax,%r14d
addl %r15d,%r12d
- movl %ebx,%r15d
+ movl %eax,%r13d
+ movl %eax,%r14d
- rorl $6,%r13d
- andl %eax,%r11d
- andl %ecx,%r15d
+ rorl $2,%r11d
+ rorl $13,%r13d
+ movl %eax,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%r11d
+ xorl %r13d,%r11d
+ rorl $9,%r13d
+ orl %ecx,%r14d
+ xorl %r13d,%r11d
+ andl %ecx,%r15d
addl %r12d,%edx
+
+ andl %ebx,%r14d
addl %r12d,%r11d
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%r11d
+ addl %r14d,%r11d
movl 40(%rsp),%r13d
- movl 28(%rsp),%r14d
- movl %r13d,%r12d
- movl %r14d,%r15d
+ movl 28(%rsp),%r12d
+
+ movl %r13d,%r15d
- rorl $11,%r12d
- xorl %r13d,%r12d
shrl $3,%r13d
+ rorl $7,%r15d
- rorl $7,%r12d
- xorl %r12d,%r13d
- movl 8(%rsp),%r12d
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
- rorl $2,%r15d
- xorl %r14d,%r15d
- shrl $10,%r14d
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
- rorl $17,%r15d
addl %r13d,%r12d
- xorl %r15d,%r14d
+
+ addl 8(%rsp),%r12d
addl 36(%rsp),%r12d
movl %edx,%r13d
- addl %r14d,%r12d
- movl %r11d,%r14d
- rorl $14,%r13d
+ movl %edx,%r14d
movl %r8d,%r15d
- movl %r12d,36(%rsp)
- rorl $9,%r14d
- xorl %edx,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %r9d,%r15d
- rorl $5,%r13d
- addl %r10d,%r12d
- xorl %r11d,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %edx,%r15d
- movl %eax,%r10d
+ movl %r12d,36(%rsp)
- rorl $11,%r14d
- xorl %edx,%r13d
+ xorl %r14d,%r13d
xorl %r9d,%r15d
+ addl %r10d,%r12d
+
+ movl %r11d,%r10d
+ addl %r13d,%r12d
- xorl %ebx,%r10d
- xorl %r11d,%r14d
addl %r15d,%r12d
- movl %eax,%r15d
+ movl %r11d,%r13d
+ movl %r11d,%r14d
- rorl $6,%r13d
- andl %r11d,%r10d
- andl %ebx,%r15d
+ rorl $2,%r10d
+ rorl $13,%r13d
+ movl %r11d,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%r10d
+ xorl %r13d,%r10d
+ rorl $9,%r13d
+ orl %ebx,%r14d
+ xorl %r13d,%r10d
+ andl %ebx,%r15d
addl %r12d,%ecx
+
+ andl %eax,%r14d
addl %r12d,%r10d
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%r10d
+ addl %r14d,%r10d
movl 44(%rsp),%r13d
- movl 32(%rsp),%r14d
- movl %r13d,%r12d
- movl %r14d,%r15d
+ movl 32(%rsp),%r12d
+
+ movl %r13d,%r15d
- rorl $11,%r12d
- xorl %r13d,%r12d
shrl $3,%r13d
+ rorl $7,%r15d
- rorl $7,%r12d
- xorl %r12d,%r13d
- movl 12(%rsp),%r12d
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
- rorl $2,%r15d
- xorl %r14d,%r15d
- shrl $10,%r14d
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
- rorl $17,%r15d
addl %r13d,%r12d
- xorl %r15d,%r14d
+
+ addl 12(%rsp),%r12d
addl 40(%rsp),%r12d
movl %ecx,%r13d
- addl %r14d,%r12d
- movl %r10d,%r14d
- rorl $14,%r13d
+ movl %ecx,%r14d
movl %edx,%r15d
- movl %r12d,40(%rsp)
- rorl $9,%r14d
- xorl %ecx,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %r8d,%r15d
- rorl $5,%r13d
- addl %r9d,%r12d
- xorl %r10d,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %ecx,%r15d
- movl %r11d,%r9d
+ movl %r12d,40(%rsp)
- rorl $11,%r14d
- xorl %ecx,%r13d
+ xorl %r14d,%r13d
xorl %r8d,%r15d
+ addl %r9d,%r12d
+
+ movl %r10d,%r9d
+ addl %r13d,%r12d
- xorl %eax,%r9d
- xorl %r10d,%r14d
addl %r15d,%r12d
- movl %r11d,%r15d
+ movl %r10d,%r13d
+ movl %r10d,%r14d
- rorl $6,%r13d
- andl %r10d,%r9d
- andl %eax,%r15d
+ rorl $2,%r9d
+ rorl $13,%r13d
+ movl %r10d,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%r9d
+ xorl %r13d,%r9d
+ rorl $9,%r13d
+ orl %eax,%r14d
+ xorl %r13d,%r9d
+ andl %eax,%r15d
addl %r12d,%ebx
+
+ andl %r11d,%r14d
addl %r12d,%r9d
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%r9d
+ addl %r14d,%r9d
movl 48(%rsp),%r13d
- movl 36(%rsp),%r14d
- movl %r13d,%r12d
- movl %r14d,%r15d
+ movl 36(%rsp),%r12d
+
+ movl %r13d,%r15d
- rorl $11,%r12d
- xorl %r13d,%r12d
shrl $3,%r13d
+ rorl $7,%r15d
- rorl $7,%r12d
- xorl %r12d,%r13d
- movl 16(%rsp),%r12d
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
- rorl $2,%r15d
- xorl %r14d,%r15d
- shrl $10,%r14d
+ xorl %r14d,%r12d
- rorl $17,%r15d
addl %r13d,%r12d
- xorl %r15d,%r14d
+
+ addl 16(%rsp),%r12d
addl 44(%rsp),%r12d
movl %ebx,%r13d
- addl %r14d,%r12d
- movl %r9d,%r14d
- rorl $14,%r13d
+ movl %ebx,%r14d
movl %ecx,%r15d
- movl %r12d,44(%rsp)
- rorl $9,%r14d
- xorl %ebx,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %edx,%r15d
- rorl $5,%r13d
- addl %r8d,%r12d
- xorl %r9d,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %ebx,%r15d
- movl %r10d,%r8d
+ movl %r12d,44(%rsp)
- rorl $11,%r14d
- xorl %ebx,%r13d
+ xorl %r14d,%r13d
xorl %edx,%r15d
+ addl %r8d,%r12d
+
+ movl %r9d,%r8d
+ addl %r13d,%r12d
- xorl %r11d,%r8d
- xorl %r9d,%r14d
addl %r15d,%r12d
- movl %r10d,%r15d
+ movl %r9d,%r13d
+ movl %r9d,%r14d
- rorl $6,%r13d
- andl %r9d,%r8d
- andl %r11d,%r15d
+ rorl $2,%r8d
+ rorl $13,%r13d
+ movl %r9d,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%r8d
+ xorl %r13d,%r8d
+ rorl $9,%r13d
+ orl %r11d,%r14d
+ xorl %r13d,%r8d
+ andl %r11d,%r15d
addl %r12d,%eax
+
+ andl %r10d,%r14d
addl %r12d,%r8d
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%r8d
+ addl %r14d,%r8d
movl 52(%rsp),%r13d
- movl 40(%rsp),%r14d
- movl %r13d,%r12d
- movl %r14d,%r15d
+ movl 40(%rsp),%r12d
+
+ movl %r13d,%r15d
- rorl $11,%r12d
- xorl %r13d,%r12d
shrl $3,%r13d
+ rorl $7,%r15d
- rorl $7,%r12d
- xorl %r12d,%r13d
- movl 20(%rsp),%r12d
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
- rorl $2,%r15d
- xorl %r14d,%r15d
- shrl $10,%r14d
+ xorl %r14d,%r12d
- rorl $17,%r15d
addl %r13d,%r12d
- xorl %r15d,%r14d
+
+ addl 20(%rsp),%r12d
addl 48(%rsp),%r12d
movl %eax,%r13d
- addl %r14d,%r12d
- movl %r8d,%r14d
- rorl $14,%r13d
+ movl %eax,%r14d
movl %ebx,%r15d
- movl %r12d,48(%rsp)
- rorl $9,%r14d
- xorl %eax,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %ecx,%r15d
- rorl $5,%r13d
- addl %edx,%r12d
- xorl %r8d,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %eax,%r15d
- movl %r9d,%edx
+ movl %r12d,48(%rsp)
- rorl $11,%r14d
- xorl %eax,%r13d
+ xorl %r14d,%r13d
xorl %ecx,%r15d
+ addl %edx,%r12d
+
+ movl %r8d,%edx
+ addl %r13d,%r12d
- xorl %r10d,%edx
- xorl %r8d,%r14d
addl %r15d,%r12d
- movl %r9d,%r15d
+ movl %r8d,%r13d
+ movl %r8d,%r14d
- rorl $6,%r13d
- andl %r8d,%edx
- andl %r10d,%r15d
+ rorl $2,%edx
+ rorl $13,%r13d
+ movl %r8d,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%edx
+ xorl %r13d,%edx
+ rorl $9,%r13d
+ orl %r10d,%r14d
+ xorl %r13d,%edx
+ andl %r10d,%r15d
addl %r12d,%r11d
+
+ andl %r9d,%r14d
addl %r12d,%edx
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%edx
+ addl %r14d,%edx
movl 56(%rsp),%r13d
- movl 44(%rsp),%r14d
- movl %r13d,%r12d
- movl %r14d,%r15d
+ movl 44(%rsp),%r12d
+
+ movl %r13d,%r15d
- rorl $11,%r12d
- xorl %r13d,%r12d
shrl $3,%r13d
+ rorl $7,%r15d
- rorl $7,%r12d
- xorl %r12d,%r13d
- movl 24(%rsp),%r12d
+ xorl %r15d,%r13d
+ rorl $11,%r15d
- rorl $2,%r15d
- xorl %r14d,%r15d
- shrl $10,%r14d
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
- rorl $17,%r15d
addl %r13d,%r12d
- xorl %r15d,%r14d
+
+ addl 24(%rsp),%r12d
addl 52(%rsp),%r12d
movl %r11d,%r13d
- addl %r14d,%r12d
- movl %edx,%r14d
- rorl $14,%r13d
+ movl %r11d,%r14d
movl %eax,%r15d
- movl %r12d,52(%rsp)
- rorl $9,%r14d
- xorl %r11d,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %ebx,%r15d
- rorl $5,%r13d
- addl %ecx,%r12d
- xorl %edx,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %r11d,%r15d
- movl %r8d,%ecx
+ movl %r12d,52(%rsp)
- rorl $11,%r14d
- xorl %r11d,%r13d
+ xorl %r14d,%r13d
xorl %ebx,%r15d
+ addl %ecx,%r12d
+
+ movl %edx,%ecx
+ addl %r13d,%r12d
- xorl %r9d,%ecx
- xorl %edx,%r14d
addl %r15d,%r12d
- movl %r8d,%r15d
+ movl %edx,%r13d
+ movl %edx,%r14d
- rorl $6,%r13d
- andl %edx,%ecx
- andl %r9d,%r15d
+ rorl $2,%ecx
+ rorl $13,%r13d
+ movl %edx,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%ecx
+ xorl %r13d,%ecx
+ rorl $9,%r13d
+ orl %r9d,%r14d
+ xorl %r13d,%ecx
+ andl %r9d,%r15d
addl %r12d,%r10d
+
+ andl %r8d,%r14d
addl %r12d,%ecx
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%ecx
+ addl %r14d,%ecx
movl 60(%rsp),%r13d
- movl 48(%rsp),%r14d
- movl %r13d,%r12d
- movl %r14d,%r15d
+ movl 48(%rsp),%r12d
+
+ movl %r13d,%r15d
- rorl $11,%r12d
- xorl %r13d,%r12d
shrl $3,%r13d
+ rorl $7,%r15d
- rorl $7,%r12d
- xorl %r12d,%r13d
- movl 28(%rsp),%r12d
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
+
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
- rorl $2,%r15d
- xorl %r14d,%r15d
- shrl $10,%r14d
+ xorl %r14d,%r12d
- rorl $17,%r15d
addl %r13d,%r12d
- xorl %r15d,%r14d
+
+ addl 28(%rsp),%r12d
addl 56(%rsp),%r12d
movl %r10d,%r13d
- addl %r14d,%r12d
- movl %ecx,%r14d
- rorl $14,%r13d
+ movl %r10d,%r14d
movl %r11d,%r15d
- movl %r12d,56(%rsp)
- rorl $9,%r14d
- xorl %r10d,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %eax,%r15d
- rorl $5,%r13d
- addl %ebx,%r12d
- xorl %ecx,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %r10d,%r15d
- movl %edx,%ebx
+ movl %r12d,56(%rsp)
- rorl $11,%r14d
- xorl %r10d,%r13d
+ xorl %r14d,%r13d
xorl %eax,%r15d
+ addl %ebx,%r12d
+
+ movl %ecx,%ebx
+ addl %r13d,%r12d
- xorl %r8d,%ebx
- xorl %ecx,%r14d
addl %r15d,%r12d
- movl %edx,%r15d
+ movl %ecx,%r13d
+ movl %ecx,%r14d
- rorl $6,%r13d
- andl %ecx,%ebx
- andl %r8d,%r15d
+ rorl $2,%ebx
+ rorl $13,%r13d
+ movl %ecx,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%ebx
+ xorl %r13d,%ebx
+ rorl $9,%r13d
+ orl %r8d,%r14d
+ xorl %r13d,%ebx
+ andl %r8d,%r15d
addl %r12d,%r9d
+
+ andl %edx,%r14d
addl %r12d,%ebx
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%ebx
+ addl %r14d,%ebx
movl 0(%rsp),%r13d
- movl 52(%rsp),%r14d
- movl %r13d,%r12d
- movl %r14d,%r15d
+ movl 52(%rsp),%r12d
+
+ movl %r13d,%r15d
- rorl $11,%r12d
- xorl %r13d,%r12d
shrl $3,%r13d
+ rorl $7,%r15d
- rorl $7,%r12d
- xorl %r12d,%r13d
- movl 32(%rsp),%r12d
+ xorl %r15d,%r13d
+ rorl $11,%r15d
+
+ xorl %r15d,%r13d
+ movl %r12d,%r14d
- rorl $2,%r15d
- xorl %r14d,%r15d
- shrl $10,%r14d
+ shrl $10,%r12d
+ rorl $17,%r14d
+
+ xorl %r14d,%r12d
+ rorl $2,%r14d
+
+ xorl %r14d,%r12d
- rorl $17,%r15d
addl %r13d,%r12d
- xorl %r15d,%r14d
+
+ addl 32(%rsp),%r12d
addl 60(%rsp),%r12d
movl %r9d,%r13d
- addl %r14d,%r12d
- movl %ebx,%r14d
- rorl $14,%r13d
+ movl %r9d,%r14d
movl %r10d,%r15d
- movl %r12d,60(%rsp)
- rorl $9,%r14d
- xorl %r9d,%r13d
+ rorl $6,%r13d
+ rorl $11,%r14d
xorl %r11d,%r15d
- rorl $5,%r13d
- addl %eax,%r12d
- xorl %ebx,%r14d
-
- addl (%rbp,%rdi,4),%r12d
+ xorl %r14d,%r13d
+ rorl $14,%r14d
andl %r9d,%r15d
- movl %ecx,%eax
+ movl %r12d,60(%rsp)
- rorl $11,%r14d
- xorl %r9d,%r13d
+ xorl %r14d,%r13d
xorl %r11d,%r15d
+ addl %eax,%r12d
+
+ movl %ebx,%eax
+ addl %r13d,%r12d
- xorl %edx,%eax
- xorl %ebx,%r14d
addl %r15d,%r12d
- movl %ecx,%r15d
+ movl %ebx,%r13d
+ movl %ebx,%r14d
- rorl $6,%r13d
- andl %ebx,%eax
- andl %edx,%r15d
+ rorl $2,%eax
+ rorl $13,%r13d
+ movl %ebx,%r15d
+ addl (%rbp,%rdi,4),%r12d
- rorl $2,%r14d
- addl %r13d,%r12d
- addl %r15d,%eax
+ xorl %r13d,%eax
+ rorl $9,%r13d
+ orl %edx,%r14d
+ xorl %r13d,%eax
+ andl %edx,%r15d
addl %r12d,%r8d
+
+ andl %ecx,%r14d
addl %r12d,%eax
+
+ orl %r15d,%r14d
leaq 1(%rdi),%rdi
- addl %r14d,%eax
+ addl %r14d,%eax
cmpq $64,%rdi
jb L$rounds_16_xx
diff --git a/deps/openssl/asm/x64-macosx-gas/x86_64cpuid.s b/deps/openssl/asm/x64-macosx-gas/x86_64cpuid.s
index 21e8a8fc2e..a1670e38e8 100644
--- a/deps/openssl/asm/x64-macosx-gas/x86_64cpuid.s
+++ b/deps/openssl/asm/x64-macosx-gas/x86_64cpuid.s
@@ -1,12 +1,8 @@
-.private_extern _OPENSSL_cpuid_setup
.mod_init_func
.p2align 3
.quad _OPENSSL_cpuid_setup
-.private_extern _OPENSSL_ia32cap_P
-.comm _OPENSSL_ia32cap_P,8,2
-
.text
@@ -72,15 +68,7 @@ _OPENSSL_ia32_cpuid:
movl $2147483648,%eax
cpuid
- cmpl $2147483649,%eax
- jb L$intel
- movl %eax,%r10d
- movl $2147483649,%eax
- cpuid
- orl %ecx,%r9d
- andl $2049,%r9d
-
- cmpl $2147483656,%r10d
+ cmpl $2147483656,%eax
jb L$intel
movl $2147483656,%eax
@@ -91,12 +79,12 @@ _OPENSSL_ia32_cpuid:
movl $1,%eax
cpuid
btl $28,%edx
- jnc L$generic
+ jnc L$done
shrl $16,%ebx
cmpb %r10b,%bl
- ja L$generic
+ ja L$done
andl $4026531839,%edx
- jmp L$generic
+ jmp L$done
L$intel:
cmpl $4,%r11d
@@ -113,48 +101,30 @@ L$intel:
L$nocacheinfo:
movl $1,%eax
cpuid
- andl $3220176895,%edx
cmpl $0,%r9d
jne L$notintel
- orl $1073741824,%edx
+ orl $1048576,%edx
andb $15,%ah
cmpb $15,%ah
- jne L$notintel
- orl $1048576,%edx
+ je L$notintel
+ orl $1073741824,%edx
L$notintel:
btl $28,%edx
- jnc L$generic
+ jnc L$done
andl $4026531839,%edx
cmpl $0,%r10d
- je L$generic
+ je L$done
orl $268435456,%edx
shrl $16,%ebx
cmpb $1,%bl
- ja L$generic
+ ja L$done
andl $4026531839,%edx
-L$generic:
- andl $2048,%r9d
- andl $4294965247,%ecx
- orl %ecx,%r9d
-
- movl %edx,%r10d
- btl $27,%r9d
- jnc L$clear_avx
- xorl %ecx,%ecx
-.byte 0x0f,0x01,0xd0
-
- andl $6,%eax
- cmpl $6,%eax
- je L$done
-L$clear_avx:
- movl $4026525695,%eax
- andl %eax,%r9d
L$done:
- shlq $32,%r9
- movl %r10d,%eax
+ shlq $32,%rcx
+ movl %edx,%eax
movq %r8,%rbx
- orq %r9,%rax
+ orq %rcx,%rax
.byte 0xf3,0xc3
@@ -223,16 +193,3 @@ _OPENSSL_wipe_cpu:
leaq 8(%rsp),%rax
.byte 0xf3,0xc3
-.globl _OPENSSL_ia32_rdrand
-
-.p2align 4
-_OPENSSL_ia32_rdrand:
- movl $8,%ecx
-L$oop_rdrand:
-.byte 72,15,199,240
- jc L$break_rdrand
- loop L$oop_rdrand
-L$break_rdrand:
- cmpq $0,%rax
- cmoveq %rcx,%rax
- .byte 0xf3,0xc3
diff --git a/deps/openssl/asm/x64-win32-masm/aes/aes-x86_64.asm b/deps/openssl/asm/x64-win32-masm/aes/aes-x86_64.asm
index b9f6fd081b..2c590b94f4 100644
--- a/deps/openssl/asm/x64-win32-masm/aes/aes-x86_64.asm
+++ b/deps/openssl/asm/x64-win32-masm/aes/aes-x86_64.asm
@@ -333,9 +333,6 @@ _x86_64_AES_encrypt_compact ENDP
PUBLIC AES_encrypt
ALIGN 16
-PUBLIC asm_AES_encrypt
-
-asm_AES_encrypt::
AES_encrypt PROC PUBLIC
mov QWORD PTR[8+rsp],rdi ;WIN64 prologue
mov QWORD PTR[16+rsp],rsi
@@ -795,9 +792,6 @@ _x86_64_AES_decrypt_compact ENDP
PUBLIC AES_decrypt
ALIGN 16
-PUBLIC asm_AES_decrypt
-
-asm_AES_decrypt::
AES_decrypt PROC PUBLIC
mov QWORD PTR[8+rsp],rdi ;WIN64 prologue
mov QWORD PTR[16+rsp],rsi
@@ -873,14 +867,14 @@ $L$dec_epilogue::
DB 0F3h,0C3h ;repret
$L$SEH_end_AES_decrypt::
AES_decrypt ENDP
-PUBLIC private_AES_set_encrypt_key
+PUBLIC AES_set_encrypt_key
ALIGN 16
-private_AES_set_encrypt_key PROC PUBLIC
+AES_set_encrypt_key PROC PUBLIC
mov QWORD PTR[8+rsp],rdi ;WIN64 prologue
mov QWORD PTR[16+rsp],rsi
mov rax,rsp
-$L$SEH_begin_private_AES_set_encrypt_key::
+$L$SEH_begin_AES_set_encrypt_key::
mov rdi,rcx
mov rsi,rdx
mov rdx,r8
@@ -908,8 +902,8 @@ $L$enc_key_epilogue::
mov rdi,QWORD PTR[8+rsp] ;WIN64 epilogue
mov rsi,QWORD PTR[16+rsp]
DB 0F3h,0C3h ;repret
-$L$SEH_end_private_AES_set_encrypt_key::
-private_AES_set_encrypt_key ENDP
+$L$SEH_end_AES_set_encrypt_key::
+AES_set_encrypt_key ENDP
ALIGN 16
@@ -1151,14 +1145,14 @@ $L$exit::
DB 0f3h,0c3h
_x86_64_AES_set_encrypt_key ENDP
-PUBLIC private_AES_set_decrypt_key
+PUBLIC AES_set_decrypt_key
ALIGN 16
-private_AES_set_decrypt_key PROC PUBLIC
+AES_set_decrypt_key PROC PUBLIC
mov QWORD PTR[8+rsp],rdi ;WIN64 prologue
mov QWORD PTR[16+rsp],rsi
mov rax,rsp
-$L$SEH_begin_private_AES_set_decrypt_key::
+$L$SEH_begin_AES_set_decrypt_key::
mov rdi,rcx
mov rsi,rdx
mov rdx,r8
@@ -1348,15 +1342,12 @@ $L$dec_key_epilogue::
mov rdi,QWORD PTR[8+rsp] ;WIN64 epilogue
mov rsi,QWORD PTR[16+rsp]
DB 0F3h,0C3h ;repret
-$L$SEH_end_private_AES_set_decrypt_key::
-private_AES_set_decrypt_key ENDP
+$L$SEH_end_AES_set_decrypt_key::
+AES_set_decrypt_key ENDP
PUBLIC AES_cbc_encrypt
ALIGN 16
EXTERN OPENSSL_ia32cap_P:NEAR
-PUBLIC asm_AES_cbc_encrypt
-
-asm_AES_cbc_encrypt::
AES_cbc_encrypt PROC PUBLIC
mov QWORD PTR[8+rsp],rdi ;WIN64 prologue
mov QWORD PTR[16+rsp],rsi
@@ -2851,13 +2842,13 @@ ALIGN 4
DD imagerel $L$SEH_end_AES_decrypt
DD imagerel $L$SEH_info_AES_decrypt
- DD imagerel $L$SEH_begin_private_AES_set_encrypt_key
- DD imagerel $L$SEH_end_private_AES_set_encrypt_key
- DD imagerel $L$SEH_info_private_AES_set_encrypt_key
+ DD imagerel $L$SEH_begin_AES_set_encrypt_key
+ DD imagerel $L$SEH_end_AES_set_encrypt_key
+ DD imagerel $L$SEH_info_AES_set_encrypt_key
- DD imagerel $L$SEH_begin_private_AES_set_decrypt_key
- DD imagerel $L$SEH_end_private_AES_set_decrypt_key
- DD imagerel $L$SEH_info_private_AES_set_decrypt_key
+ DD imagerel $L$SEH_begin_AES_set_decrypt_key
+ DD imagerel $L$SEH_end_AES_set_decrypt_key
+ DD imagerel $L$SEH_info_AES_set_decrypt_key
DD imagerel $L$SEH_begin_AES_cbc_encrypt
DD imagerel $L$SEH_end_AES_cbc_encrypt
@@ -2876,12 +2867,12 @@ DB 9,0,0,0
DD imagerel block_se_handler
DD imagerel $L$dec_prologue,imagerel $L$dec_epilogue
-$L$SEH_info_private_AES_set_encrypt_key::
+$L$SEH_info_AES_set_encrypt_key::
DB 9,0,0,0
DD imagerel key_se_handler
DD imagerel $L$enc_key_prologue,imagerel $L$enc_key_epilogue
-$L$SEH_info_private_AES_set_decrypt_key::
+$L$SEH_info_AES_set_decrypt_key::
DB 9,0,0,0
DD imagerel key_se_handler
DD imagerel $L$dec_key_prologue,imagerel $L$dec_key_epilogue
diff --git a/deps/openssl/asm/x64-win32-masm/bn/x86_64-mont.asm b/deps/openssl/asm/x64-win32-masm/bn/x86_64-mont.asm
index f4518aa3bd..9e54d88953 100644
--- a/deps/openssl/asm/x64-win32-masm/bn/x86_64-mont.asm
+++ b/deps/openssl/asm/x64-win32-masm/bn/x86_64-mont.asm
@@ -17,16 +17,6 @@ $L$SEH_begin_bn_mul_mont::
mov r9,QWORD PTR[48+rsp]
- test r9d,3
- jnz $L$mul_enter
- cmp r9d,8
- jb $L$mul_enter
- cmp rdx,rsi
- jne $L$mul4x_enter
- jmp $L$sqr4x_enter
-
-ALIGN 16
-$L$mul_enter::
push rbx
push rbp
push r12
@@ -42,63 +32,48 @@ $L$mul_enter::
and rsp,-1024
mov QWORD PTR[8+r9*8+rsp],r11
-$L$mul_body::
+$L$prologue::
mov r12,rdx
+
mov r8,QWORD PTR[r8]
- mov rbx,QWORD PTR[r12]
- mov rax,QWORD PTR[rsi]
xor r14,r14
xor r15,r15
- mov rbp,r8
+ mov rbx,QWORD PTR[r12]
+ mov rax,QWORD PTR[rsi]
mul rbx
mov r10,rax
- mov rax,QWORD PTR[rcx]
-
- imul rbp,r10
mov r11,rdx
- mul rbp
- add r10,rax
- mov rax,QWORD PTR[8+rsi]
+ imul rax,r8
+ mov rbp,rax
+
+ mul QWORD PTR[rcx]
+ add rax,r10
adc rdx,0
mov r13,rdx
lea r15,QWORD PTR[1+r15]
- jmp $L$1st_enter
-
-ALIGN 16
$L$1st::
- add r13,rax
mov rax,QWORD PTR[r15*8+rsi]
- adc rdx,0
- add r13,r11
- mov r11,r10
- adc rdx,0
- mov QWORD PTR[((-16))+r15*8+rsp],r13
- mov r13,rdx
-
-$L$1st_enter::
mul rbx
- add r11,rax
- mov rax,QWORD PTR[r15*8+rcx]
+ add rax,r11
adc rdx,0
- lea r15,QWORD PTR[1+r15]
- mov r10,rdx
+ mov r10,rax
+ mov rax,QWORD PTR[r15*8+rcx]
+ mov r11,rdx
mul rbp
- cmp r15,r9
- jne $L$1st
-
- add r13,rax
- mov rax,QWORD PTR[rsi]
+ add rax,r13
+ lea r15,QWORD PTR[1+r15]
adc rdx,0
- add r13,r11
+ add rax,r10
adc rdx,0
- mov QWORD PTR[((-16))+r15*8+rsp],r13
+ mov QWORD PTR[((-16))+r15*8+rsp],rax
+ cmp r15,r9
mov r13,rdx
- mov r11,r10
+ jl $L$1st
xor rdx,rdx
add r13,r11
@@ -107,64 +82,50 @@ $L$1st_enter::
mov QWORD PTR[r9*8+rsp],rdx
lea r14,QWORD PTR[1+r14]
- jmp $L$outer
-ALIGN 16
+ALIGN 4
$L$outer::
- mov rbx,QWORD PTR[r14*8+r12]
xor r15,r15
- mov rbp,r8
- mov r10,QWORD PTR[rsp]
+
+ mov rbx,QWORD PTR[r14*8+r12]
+ mov rax,QWORD PTR[rsi]
mul rbx
- add r10,rax
- mov rax,QWORD PTR[rcx]
+ add rax,QWORD PTR[rsp]
adc rdx,0
-
- imul rbp,r10
+ mov r10,rax
mov r11,rdx
- mul rbp
- add r10,rax
- mov rax,QWORD PTR[8+rsi]
- adc rdx,0
+ imul rax,r8
+ mov rbp,rax
+
+ mul QWORD PTR[r15*8+rcx]
+ add rax,r10
mov r10,QWORD PTR[8+rsp]
+ adc rdx,0
mov r13,rdx
lea r15,QWORD PTR[1+r15]
- jmp $L$inner_enter
-
-ALIGN 16
+ALIGN 4
$L$inner::
- add r13,rax
mov rax,QWORD PTR[r15*8+rsi]
- adc rdx,0
- add r13,r10
- mov r10,QWORD PTR[r15*8+rsp]
- adc rdx,0
- mov QWORD PTR[((-16))+r15*8+rsp],r13
- mov r13,rdx
-
-$L$inner_enter::
mul rbx
- add r11,rax
+ add rax,r11
+ adc rdx,0
+ add r10,rax
mov rax,QWORD PTR[r15*8+rcx]
adc rdx,0
- add r10,r11
mov r11,rdx
- adc r11,0
- lea r15,QWORD PTR[1+r15]
mul rbp
- cmp r15,r9
- jne $L$inner
-
- add r13,rax
- mov rax,QWORD PTR[rsi]
+ add rax,r13
+ lea r15,QWORD PTR[1+r15]
adc rdx,0
- add r13,r10
- mov r10,QWORD PTR[r15*8+rsp]
+ add rax,r10
adc rdx,0
- mov QWORD PTR[((-16))+r15*8+rsp],r13
+ mov r10,QWORD PTR[r15*8+rsp]
+ cmp r15,r9
+ mov QWORD PTR[((-16))+r15*8+rsp],rax
mov r13,rdx
+ jl $L$inner
xor rdx,rdx
add r13,r11
@@ -178,449 +139,35 @@ $L$inner_enter::
cmp r14,r9
jl $L$outer
- xor r14,r14
- mov rax,QWORD PTR[rsp]
lea rsi,QWORD PTR[rsp]
- mov r15,r9
+ lea r15,QWORD PTR[((-1))+r9]
+
+ mov rax,QWORD PTR[rsi]
+ xor r14,r14
jmp $L$sub
ALIGN 16
$L$sub:: sbb rax,QWORD PTR[r14*8+rcx]
mov QWORD PTR[r14*8+rdi],rax
+ dec r15
mov rax,QWORD PTR[8+r14*8+rsi]
lea r14,QWORD PTR[1+r14]
- dec r15
- jnz $L$sub
+ jge $L$sub
sbb rax,0
- xor r14,r14
and rsi,rax
not rax
mov rcx,rdi
and rcx,rax
- mov r15,r9
+ lea r15,QWORD PTR[((-1))+r9]
or rsi,rcx
ALIGN 16
$L$copy::
- mov rax,QWORD PTR[r14*8+rsi]
- mov QWORD PTR[r14*8+rsp],r14
- mov QWORD PTR[r14*8+rdi],rax
- lea r14,QWORD PTR[1+r14]
- sub r15,1
- jnz $L$copy
-
- mov rsi,QWORD PTR[8+r9*8+rsp]
- mov rax,1
- mov r15,QWORD PTR[rsi]
- mov r14,QWORD PTR[8+rsi]
- mov r13,QWORD PTR[16+rsi]
- mov r12,QWORD PTR[24+rsi]
- mov rbp,QWORD PTR[32+rsi]
- mov rbx,QWORD PTR[40+rsi]
- lea rsp,QWORD PTR[48+rsi]
-$L$mul_epilogue::
- mov rdi,QWORD PTR[8+rsp] ;WIN64 epilogue
- mov rsi,QWORD PTR[16+rsp]
- DB 0F3h,0C3h ;repret
-$L$SEH_end_bn_mul_mont::
-bn_mul_mont ENDP
-
-ALIGN 16
-bn_mul4x_mont PROC PRIVATE
- mov QWORD PTR[8+rsp],rdi ;WIN64 prologue
- mov QWORD PTR[16+rsp],rsi
- mov rax,rsp
-$L$SEH_begin_bn_mul4x_mont::
- mov rdi,rcx
- mov rsi,rdx
- mov rdx,r8
- mov rcx,r9
- mov r8,QWORD PTR[40+rsp]
- mov r9,QWORD PTR[48+rsp]
-
-
-$L$mul4x_enter::
- push rbx
- push rbp
- push r12
- push r13
- push r14
- push r15
-
- mov r9d,r9d
- lea r10,QWORD PTR[4+r9]
- mov r11,rsp
- neg r10
- lea rsp,QWORD PTR[r10*8+rsp]
- and rsp,-1024
-
- mov QWORD PTR[8+r9*8+rsp],r11
-$L$mul4x_body::
- mov QWORD PTR[16+r9*8+rsp],rdi
- mov r12,rdx
- mov r8,QWORD PTR[r8]
- mov rbx,QWORD PTR[r12]
- mov rax,QWORD PTR[rsi]
-
- xor r14,r14
- xor r15,r15
-
- mov rbp,r8
- mul rbx
- mov r10,rax
- mov rax,QWORD PTR[rcx]
-
- imul rbp,r10
- mov r11,rdx
-
- mul rbp
- add r10,rax
- mov rax,QWORD PTR[8+rsi]
- adc rdx,0
- mov rdi,rdx
-
- mul rbx
- add r11,rax
- mov rax,QWORD PTR[8+rcx]
- adc rdx,0
- mov r10,rdx
-
- mul rbp
- add rdi,rax
- mov rax,QWORD PTR[16+rsi]
- adc rdx,0
- add rdi,r11
- lea r15,QWORD PTR[4+r15]
- adc rdx,0
- mov QWORD PTR[rsp],rdi
- mov r13,rdx
- jmp $L$1st4x
-ALIGN 16
-$L$1st4x::
- mul rbx
- add r10,rax
- mov rax,QWORD PTR[((-16))+r15*8+rcx]
- adc rdx,0
- mov r11,rdx
-
- mul rbp
- add r13,rax
- mov rax,QWORD PTR[((-8))+r15*8+rsi]
- adc rdx,0
- add r13,r10
- adc rdx,0
- mov QWORD PTR[((-24))+r15*8+rsp],r13
- mov rdi,rdx
-
- mul rbx
- add r11,rax
- mov rax,QWORD PTR[((-8))+r15*8+rcx]
- adc rdx,0
- mov r10,rdx
-
- mul rbp
- add rdi,rax
- mov rax,QWORD PTR[r15*8+rsi]
- adc rdx,0
- add rdi,r11
- adc rdx,0
- mov QWORD PTR[((-16))+r15*8+rsp],rdi
- mov r13,rdx
-
- mul rbx
- add r10,rax
- mov rax,QWORD PTR[r15*8+rcx]
- adc rdx,0
- mov r11,rdx
-
- mul rbp
- add r13,rax
- mov rax,QWORD PTR[8+r15*8+rsi]
- adc rdx,0
- add r13,r10
- adc rdx,0
- mov QWORD PTR[((-8))+r15*8+rsp],r13
- mov rdi,rdx
-
- mul rbx
- add r11,rax
- mov rax,QWORD PTR[8+r15*8+rcx]
- adc rdx,0
- lea r15,QWORD PTR[4+r15]
- mov r10,rdx
-
- mul rbp
- add rdi,rax
- mov rax,QWORD PTR[((-16))+r15*8+rsi]
- adc rdx,0
- add rdi,r11
- adc rdx,0
- mov QWORD PTR[((-32))+r15*8+rsp],rdi
- mov r13,rdx
- cmp r15,r9
- jl $L$1st4x
-
- mul rbx
- add r10,rax
- mov rax,QWORD PTR[((-16))+r15*8+rcx]
- adc rdx,0
- mov r11,rdx
-
- mul rbp
- add r13,rax
- mov rax,QWORD PTR[((-8))+r15*8+rsi]
- adc rdx,0
- add r13,r10
- adc rdx,0
- mov QWORD PTR[((-24))+r15*8+rsp],r13
- mov rdi,rdx
-
- mul rbx
- add r11,rax
- mov rax,QWORD PTR[((-8))+r15*8+rcx]
- adc rdx,0
- mov r10,rdx
-
- mul rbp
- add rdi,rax
- mov rax,QWORD PTR[rsi]
- adc rdx,0
- add rdi,r11
- adc rdx,0
- mov QWORD PTR[((-16))+r15*8+rsp],rdi
- mov r13,rdx
-
- xor rdi,rdi
- add r13,r10
- adc rdi,0
- mov QWORD PTR[((-8))+r15*8+rsp],r13
- mov QWORD PTR[r15*8+rsp],rdi
-
- lea r14,QWORD PTR[1+r14]
-ALIGN 4
-$L$outer4x::
- mov rbx,QWORD PTR[r14*8+r12]
- xor r15,r15
- mov r10,QWORD PTR[rsp]
- mov rbp,r8
- mul rbx
- add r10,rax
- mov rax,QWORD PTR[rcx]
- adc rdx,0
-
- imul rbp,r10
- mov r11,rdx
-
- mul rbp
- add r10,rax
- mov rax,QWORD PTR[8+rsi]
- adc rdx,0
- mov rdi,rdx
-
- mul rbx
- add r11,rax
- mov rax,QWORD PTR[8+rcx]
- adc rdx,0
- add r11,QWORD PTR[8+rsp]
- adc rdx,0
- mov r10,rdx
-
- mul rbp
- add rdi,rax
- mov rax,QWORD PTR[16+rsi]
- adc rdx,0
- add rdi,r11
- lea r15,QWORD PTR[4+r15]
- adc rdx,0
- mov QWORD PTR[rsp],rdi
- mov r13,rdx
- jmp $L$inner4x
-ALIGN 16
-$L$inner4x::
- mul rbx
- add r10,rax
- mov rax,QWORD PTR[((-16))+r15*8+rcx]
- adc rdx,0
- add r10,QWORD PTR[((-16))+r15*8+rsp]
- adc rdx,0
- mov r11,rdx
-
- mul rbp
- add r13,rax
- mov rax,QWORD PTR[((-8))+r15*8+rsi]
- adc rdx,0
- add r13,r10
- adc rdx,0
- mov QWORD PTR[((-24))+r15*8+rsp],r13
- mov rdi,rdx
-
- mul rbx
- add r11,rax
- mov rax,QWORD PTR[((-8))+r15*8+rcx]
- adc rdx,0
- add r11,QWORD PTR[((-8))+r15*8+rsp]
- adc rdx,0
- mov r10,rdx
-
- mul rbp
- add rdi,rax
mov rax,QWORD PTR[r15*8+rsi]
- adc rdx,0
- add rdi,r11
- adc rdx,0
- mov QWORD PTR[((-16))+r15*8+rsp],rdi
- mov r13,rdx
-
- mul rbx
- add r10,rax
- mov rax,QWORD PTR[r15*8+rcx]
- adc rdx,0
- add r10,QWORD PTR[r15*8+rsp]
- adc rdx,0
- mov r11,rdx
-
- mul rbp
- add r13,rax
- mov rax,QWORD PTR[8+r15*8+rsi]
- adc rdx,0
- add r13,r10
- adc rdx,0
- mov QWORD PTR[((-8))+r15*8+rsp],r13
- mov rdi,rdx
-
- mul rbx
- add r11,rax
- mov rax,QWORD PTR[8+r15*8+rcx]
- adc rdx,0
- add r11,QWORD PTR[8+r15*8+rsp]
- adc rdx,0
- lea r15,QWORD PTR[4+r15]
- mov r10,rdx
-
- mul rbp
- add rdi,rax
- mov rax,QWORD PTR[((-16))+r15*8+rsi]
- adc rdx,0
- add rdi,r11
- adc rdx,0
- mov QWORD PTR[((-32))+r15*8+rsp],rdi
- mov r13,rdx
- cmp r15,r9
- jl $L$inner4x
-
- mul rbx
- add r10,rax
- mov rax,QWORD PTR[((-16))+r15*8+rcx]
- adc rdx,0
- add r10,QWORD PTR[((-16))+r15*8+rsp]
- adc rdx,0
- mov r11,rdx
-
- mul rbp
- add r13,rax
- mov rax,QWORD PTR[((-8))+r15*8+rsi]
- adc rdx,0
- add r13,r10
- adc rdx,0
- mov QWORD PTR[((-24))+r15*8+rsp],r13
- mov rdi,rdx
-
- mul rbx
- add r11,rax
- mov rax,QWORD PTR[((-8))+r15*8+rcx]
- adc rdx,0
- add r11,QWORD PTR[((-8))+r15*8+rsp]
- adc rdx,0
- lea r14,QWORD PTR[1+r14]
- mov r10,rdx
-
- mul rbp
- add rdi,rax
- mov rax,QWORD PTR[rsi]
- adc rdx,0
- add rdi,r11
- adc rdx,0
- mov QWORD PTR[((-16))+r15*8+rsp],rdi
- mov r13,rdx
-
- xor rdi,rdi
- add r13,r10
- adc rdi,0
- add r13,QWORD PTR[r9*8+rsp]
- adc rdi,0
- mov QWORD PTR[((-8))+r15*8+rsp],r13
- mov QWORD PTR[r15*8+rsp],rdi
-
- cmp r14,r9
- jl $L$outer4x
- mov rdi,QWORD PTR[16+r9*8+rsp]
- mov rax,QWORD PTR[rsp]
- pxor xmm0,xmm0
- mov rdx,QWORD PTR[8+rsp]
- shr r9,2
- lea rsi,QWORD PTR[rsp]
- xor r14,r14
-
- sub rax,QWORD PTR[rcx]
- mov rbx,QWORD PTR[16+rsi]
- mov rbp,QWORD PTR[24+rsi]
- sbb rdx,QWORD PTR[8+rcx]
- lea r15,QWORD PTR[((-1))+r9]
- jmp $L$sub4x
-ALIGN 16
-$L$sub4x::
- mov QWORD PTR[r14*8+rdi],rax
- mov QWORD PTR[8+r14*8+rdi],rdx
- sbb rbx,QWORD PTR[16+r14*8+rcx]
- mov rax,QWORD PTR[32+r14*8+rsi]
- mov rdx,QWORD PTR[40+r14*8+rsi]
- sbb rbp,QWORD PTR[24+r14*8+rcx]
- mov QWORD PTR[16+r14*8+rdi],rbx
- mov QWORD PTR[24+r14*8+rdi],rbp
- sbb rax,QWORD PTR[32+r14*8+rcx]
- mov rbx,QWORD PTR[48+r14*8+rsi]
- mov rbp,QWORD PTR[56+r14*8+rsi]
- sbb rdx,QWORD PTR[40+r14*8+rcx]
- lea r14,QWORD PTR[4+r14]
+ mov QWORD PTR[r15*8+rdi],rax
+ mov QWORD PTR[r15*8+rsp],r14
dec r15
- jnz $L$sub4x
-
- mov QWORD PTR[r14*8+rdi],rax
- mov rax,QWORD PTR[32+r14*8+rsi]
- sbb rbx,QWORD PTR[16+r14*8+rcx]
- mov QWORD PTR[8+r14*8+rdi],rdx
- sbb rbp,QWORD PTR[24+r14*8+rcx]
- mov QWORD PTR[16+r14*8+rdi],rbx
+ jge $L$copy
- sbb rax,0
- mov QWORD PTR[24+r14*8+rdi],rbp
- xor r14,r14
- and rsi,rax
- not rax
- mov rcx,rdi
- and rcx,rax
- lea r15,QWORD PTR[((-1))+r9]
- or rsi,rcx
-
- movdqu xmm1,XMMWORD PTR[rsi]
- movdqa XMMWORD PTR[rsp],xmm0
- movdqu XMMWORD PTR[rdi],xmm1
- jmp $L$copy4x
-ALIGN 16
-$L$copy4x::
- movdqu xmm2,XMMWORD PTR[16+r14*1+rsi]
- movdqu xmm1,XMMWORD PTR[32+r14*1+rsi]
- movdqa XMMWORD PTR[16+r14*1+rsp],xmm0
- movdqu XMMWORD PTR[16+r14*1+rdi],xmm2
- movdqa XMMWORD PTR[32+r14*1+rsp],xmm0
- movdqu XMMWORD PTR[32+r14*1+rdi],xmm1
- lea r14,QWORD PTR[32+r14]
- dec r15
- jnz $L$copy4x
-
- shl r9,2
- movdqu xmm2,XMMWORD PTR[16+r14*1+rsi]
- movdqa XMMWORD PTR[16+r14*1+rsp],xmm0
- movdqu XMMWORD PTR[16+r14*1+rdi],xmm2
mov rsi,QWORD PTR[8+r9*8+rsp]
mov rax,1
mov r15,QWORD PTR[rsi]
@@ -630,792 +177,12 @@ $L$copy4x::
mov rbp,QWORD PTR[32+rsi]
mov rbx,QWORD PTR[40+rsi]
lea rsp,QWORD PTR[48+rsi]
-$L$mul4x_epilogue::
- mov rdi,QWORD PTR[8+rsp] ;WIN64 epilogue
- mov rsi,QWORD PTR[16+rsp]
- DB 0F3h,0C3h ;repret
-$L$SEH_end_bn_mul4x_mont::
-bn_mul4x_mont ENDP
-
-ALIGN 16
-bn_sqr4x_mont PROC PRIVATE
- mov QWORD PTR[8+rsp],rdi ;WIN64 prologue
- mov QWORD PTR[16+rsp],rsi
- mov rax,rsp
-$L$SEH_begin_bn_sqr4x_mont::
- mov rdi,rcx
- mov rsi,rdx
- mov rdx,r8
- mov rcx,r9
- mov r8,QWORD PTR[40+rsp]
- mov r9,QWORD PTR[48+rsp]
-
-
-$L$sqr4x_enter::
- push rbx
- push rbp
- push r12
- push r13
- push r14
- push r15
-
- shl r9d,3
- xor r10,r10
- mov r11,rsp
- sub r10,r9
- mov r8,QWORD PTR[r8]
- lea rsp,QWORD PTR[((-72))+r10*2+rsp]
- and rsp,-1024
-
-
-
-
-
-
-
-
-
-
-
- mov QWORD PTR[32+rsp],rdi
- mov QWORD PTR[40+rsp],rcx
- mov QWORD PTR[48+rsp],r8
- mov QWORD PTR[56+rsp],r11
-$L$sqr4x_body::
-
-
-
-
-
-
-
- lea rbp,QWORD PTR[32+r10]
- lea rsi,QWORD PTR[r9*1+rsi]
-
- mov rcx,r9
-
-
- mov r14,QWORD PTR[((-32))+rbp*1+rsi]
- lea rdi,QWORD PTR[64+r9*2+rsp]
- mov rax,QWORD PTR[((-24))+rbp*1+rsi]
- lea rdi,QWORD PTR[((-32))+rbp*1+rdi]
- mov rbx,QWORD PTR[((-16))+rbp*1+rsi]
- mov r15,rax
-
- mul r14
- mov r10,rax
- mov rax,rbx
- mov r11,rdx
- mov QWORD PTR[((-24))+rbp*1+rdi],r10
-
- xor r10,r10
- mul r14
- add r11,rax
- mov rax,rbx
- adc r10,rdx
- mov QWORD PTR[((-16))+rbp*1+rdi],r11
-
- lea rcx,QWORD PTR[((-16))+rbp]
-
-
- mov rbx,QWORD PTR[8+rcx*1+rsi]
- mul r15
- mov r12,rax
- mov rax,rbx
- mov r13,rdx
-
- xor r11,r11
- add r10,r12
- lea rcx,QWORD PTR[16+rcx]
- adc r11,0
- mul r14
- add r10,rax
- mov rax,rbx
- adc r11,rdx
- mov QWORD PTR[((-8))+rcx*1+rdi],r10
- jmp $L$sqr4x_1st
-
-ALIGN 16
-$L$sqr4x_1st::
- mov rbx,QWORD PTR[rcx*1+rsi]
- xor r12,r12
- mul r15
- add r13,rax
- mov rax,rbx
- adc r12,rdx
-
- xor r10,r10
- add r11,r13
- adc r10,0
- mul r14
- add r11,rax
- mov rax,rbx
- adc r10,rdx
- mov QWORD PTR[rcx*1+rdi],r11
-
-
- mov rbx,QWORD PTR[8+rcx*1+rsi]
- xor r13,r13
- mul r15
- add r12,rax
- mov rax,rbx
- adc r13,rdx
-
- xor r11,r11
- add r10,r12
- adc r11,0
- mul r14
- add r10,rax
- mov rax,rbx
- adc r11,rdx
- mov QWORD PTR[8+rcx*1+rdi],r10
-
- mov rbx,QWORD PTR[16+rcx*1+rsi]
- xor r12,r12
- mul r15
- add r13,rax
- mov rax,rbx
- adc r12,rdx
-
- xor r10,r10
- add r11,r13
- adc r10,0
- mul r14
- add r11,rax
- mov rax,rbx
- adc r10,rdx
- mov QWORD PTR[16+rcx*1+rdi],r11
-
-
- mov rbx,QWORD PTR[24+rcx*1+rsi]
- xor r13,r13
- mul r15
- add r12,rax
- mov rax,rbx
- adc r13,rdx
-
- xor r11,r11
- add r10,r12
- lea rcx,QWORD PTR[32+rcx]
- adc r11,0
- mul r14
- add r10,rax
- mov rax,rbx
- adc r11,rdx
- mov QWORD PTR[((-8))+rcx*1+rdi],r10
-
- cmp rcx,0
- jne $L$sqr4x_1st
-
- xor r12,r12
- add r13,r11
- adc r12,0
- mul r15
- add r13,rax
- adc r12,rdx
-
- mov QWORD PTR[rdi],r13
- lea rbp,QWORD PTR[16+rbp]
- mov QWORD PTR[8+rdi],r12
- jmp $L$sqr4x_outer
-
-ALIGN 16
-$L$sqr4x_outer::
- mov r14,QWORD PTR[((-32))+rbp*1+rsi]
- lea rdi,QWORD PTR[64+r9*2+rsp]
- mov rax,QWORD PTR[((-24))+rbp*1+rsi]
- lea rdi,QWORD PTR[((-32))+rbp*1+rdi]
- mov rbx,QWORD PTR[((-16))+rbp*1+rsi]
- mov r15,rax
-
- mov r10,QWORD PTR[((-24))+rbp*1+rdi]
- xor r11,r11
- mul r14
- add r10,rax
- mov rax,rbx
- adc r11,rdx
- mov QWORD PTR[((-24))+rbp*1+rdi],r10
-
- xor r10,r10
- add r11,QWORD PTR[((-16))+rbp*1+rdi]
- adc r10,0
- mul r14
- add r11,rax
- mov rax,rbx
- adc r10,rdx
- mov QWORD PTR[((-16))+rbp*1+rdi],r11
-
- lea rcx,QWORD PTR[((-16))+rbp]
- xor r12,r12
-
-
- mov rbx,QWORD PTR[8+rcx*1+rsi]
- xor r13,r13
- add r12,QWORD PTR[8+rcx*1+rdi]
- adc r13,0
- mul r15
- add r12,rax
- mov rax,rbx
- adc r13,rdx
-
- xor r11,r11
- add r10,r12
- adc r11,0
- mul r14
- add r10,rax
- mov rax,rbx
- adc r11,rdx
- mov QWORD PTR[8+rcx*1+rdi],r10
-
- lea rcx,QWORD PTR[16+rcx]
- jmp $L$sqr4x_inner
-
-ALIGN 16
-$L$sqr4x_inner::
- mov rbx,QWORD PTR[rcx*1+rsi]
- xor r12,r12
- add r13,QWORD PTR[rcx*1+rdi]
- adc r12,0
- mul r15
- add r13,rax
- mov rax,rbx
- adc r12,rdx
-
- xor r10,r10
- add r11,r13
- adc r10,0
- mul r14
- add r11,rax
- mov rax,rbx
- adc r10,rdx
- mov QWORD PTR[rcx*1+rdi],r11
-
- mov rbx,QWORD PTR[8+rcx*1+rsi]
- xor r13,r13
- add r12,QWORD PTR[8+rcx*1+rdi]
- adc r13,0
- mul r15
- add r12,rax
- mov rax,rbx
- adc r13,rdx
-
- xor r11,r11
- add r10,r12
- lea rcx,QWORD PTR[16+rcx]
- adc r11,0
- mul r14
- add r10,rax
- mov rax,rbx
- adc r11,rdx
- mov QWORD PTR[((-8))+rcx*1+rdi],r10
-
- cmp rcx,0
- jne $L$sqr4x_inner
-
- xor r12,r12
- add r13,r11
- adc r12,0
- mul r15
- add r13,rax
- adc r12,rdx
-
- mov QWORD PTR[rdi],r13
- mov QWORD PTR[8+rdi],r12
-
- add rbp,16
- jnz $L$sqr4x_outer
-
-
- mov r14,QWORD PTR[((-32))+rsi]
- lea rdi,QWORD PTR[64+r9*2+rsp]
- mov rax,QWORD PTR[((-24))+rsi]
- lea rdi,QWORD PTR[((-32))+rbp*1+rdi]
- mov rbx,QWORD PTR[((-16))+rsi]
- mov r15,rax
-
- xor r11,r11
- mul r14
- add r10,rax
- mov rax,rbx
- adc r11,rdx
- mov QWORD PTR[((-24))+rdi],r10
-
- xor r10,r10
- add r11,r13
- adc r10,0
- mul r14
- add r11,rax
- mov rax,rbx
- adc r10,rdx
- mov QWORD PTR[((-16))+rdi],r11
-
- mov rbx,QWORD PTR[((-8))+rsi]
- mul r15
- add r12,rax
- mov rax,rbx
- adc rdx,0
-
- xor r11,r11
- add r10,r12
- mov r13,rdx
- adc r11,0
- mul r14
- add r10,rax
- mov rax,rbx
- adc r11,rdx
- mov QWORD PTR[((-8))+rdi],r10
-
- xor r12,r12
- add r13,r11
- adc r12,0
- mul r15
- add r13,rax
- mov rax,QWORD PTR[((-16))+rsi]
- adc r12,rdx
-
- mov QWORD PTR[rdi],r13
- mov QWORD PTR[8+rdi],r12
-
- mul rbx
- add rbp,16
- xor r14,r14
- sub rbp,r9
- xor r15,r15
-
- add rax,r12
- adc rdx,0
- mov QWORD PTR[8+rdi],rax
- mov QWORD PTR[16+rdi],rdx
- mov QWORD PTR[24+rdi],r15
-
- mov rax,QWORD PTR[((-16))+rbp*1+rsi]
- lea rdi,QWORD PTR[64+r9*2+rsp]
- xor r10,r10
- mov r11,QWORD PTR[((-24))+rbp*2+rdi]
-
- lea r12,QWORD PTR[r10*2+r14]
- shr r10,63
- lea r13,QWORD PTR[r11*2+rcx]
- shr r11,63
- or r13,r10
- mov r10,QWORD PTR[((-16))+rbp*2+rdi]
- mov r14,r11
- mul rax
- neg r15
- mov r11,QWORD PTR[((-8))+rbp*2+rdi]
- adc r12,rax
- mov rax,QWORD PTR[((-8))+rbp*1+rsi]
- mov QWORD PTR[((-32))+rbp*2+rdi],r12
- adc r13,rdx
-
- lea rbx,QWORD PTR[r10*2+r14]
- mov QWORD PTR[((-24))+rbp*2+rdi],r13
- sbb r15,r15
- shr r10,63
- lea r8,QWORD PTR[r11*2+rcx]
- shr r11,63
- or r8,r10
- mov r10,QWORD PTR[rbp*2+rdi]
- mov r14,r11
- mul rax
- neg r15
- mov r11,QWORD PTR[8+rbp*2+rdi]
- adc rbx,rax
- mov rax,QWORD PTR[rbp*1+rsi]
- mov QWORD PTR[((-16))+rbp*2+rdi],rbx
- adc r8,rdx
- lea rbp,QWORD PTR[16+rbp]
- mov QWORD PTR[((-40))+rbp*2+rdi],r8
- sbb r15,r15
- jmp $L$sqr4x_shift_n_add
-
-ALIGN 16
-$L$sqr4x_shift_n_add::
- lea r12,QWORD PTR[r10*2+r14]
- shr r10,63
- lea r13,QWORD PTR[r11*2+rcx]
- shr r11,63
- or r13,r10
- mov r10,QWORD PTR[((-16))+rbp*2+rdi]
- mov r14,r11
- mul rax
- neg r15
- mov r11,QWORD PTR[((-8))+rbp*2+rdi]
- adc r12,rax
- mov rax,QWORD PTR[((-8))+rbp*1+rsi]
- mov QWORD PTR[((-32))+rbp*2+rdi],r12
- adc r13,rdx
-
- lea rbx,QWORD PTR[r10*2+r14]
- mov QWORD PTR[((-24))+rbp*2+rdi],r13
- sbb r15,r15
- shr r10,63
- lea r8,QWORD PTR[r11*2+rcx]
- shr r11,63
- or r8,r10
- mov r10,QWORD PTR[rbp*2+rdi]
- mov r14,r11
- mul rax
- neg r15
- mov r11,QWORD PTR[8+rbp*2+rdi]
- adc rbx,rax
- mov rax,QWORD PTR[rbp*1+rsi]
- mov QWORD PTR[((-16))+rbp*2+rdi],rbx
- adc r8,rdx
-
- lea r12,QWORD PTR[r10*2+r14]
- mov QWORD PTR[((-8))+rbp*2+rdi],r8
- sbb r15,r15
- shr r10,63
- lea r13,QWORD PTR[r11*2+rcx]
- shr r11,63
- or r13,r10
- mov r10,QWORD PTR[16+rbp*2+rdi]
- mov r14,r11
- mul rax
- neg r15
- mov r11,QWORD PTR[24+rbp*2+rdi]
- adc r12,rax
- mov rax,QWORD PTR[8+rbp*1+rsi]
- mov QWORD PTR[rbp*2+rdi],r12
- adc r13,rdx
-
- lea rbx,QWORD PTR[r10*2+r14]
- mov QWORD PTR[8+rbp*2+rdi],r13
- sbb r15,r15
- shr r10,63
- lea r8,QWORD PTR[r11*2+rcx]
- shr r11,63
- or r8,r10
- mov r10,QWORD PTR[32+rbp*2+rdi]
- mov r14,r11
- mul rax
- neg r15
- mov r11,QWORD PTR[40+rbp*2+rdi]
- adc rbx,rax
- mov rax,QWORD PTR[16+rbp*1+rsi]
- mov QWORD PTR[16+rbp*2+rdi],rbx
- adc r8,rdx
- mov QWORD PTR[24+rbp*2+rdi],r8
- sbb r15,r15
- add rbp,32
- jnz $L$sqr4x_shift_n_add
-
- lea r12,QWORD PTR[r10*2+r14]
- shr r10,63
- lea r13,QWORD PTR[r11*2+rcx]
- shr r11,63
- or r13,r10
- mov r10,QWORD PTR[((-16))+rdi]
- mov r14,r11
- mul rax
- neg r15
- mov r11,QWORD PTR[((-8))+rdi]
- adc r12,rax
- mov rax,QWORD PTR[((-8))+rsi]
- mov QWORD PTR[((-32))+rdi],r12
- adc r13,rdx
-
- lea rbx,QWORD PTR[r10*2+r14]
- mov QWORD PTR[((-24))+rdi],r13
- sbb r15,r15
- shr r10,63
- lea r8,QWORD PTR[r11*2+rcx]
- shr r11,63
- or r8,r10
- mul rax
- neg r15
- adc rbx,rax
- adc r8,rdx
- mov QWORD PTR[((-16))+rdi],rbx
- mov QWORD PTR[((-8))+rdi],r8
- mov rsi,QWORD PTR[40+rsp]
- mov r8,QWORD PTR[48+rsp]
- xor rcx,rcx
- mov QWORD PTR[rsp],r9
- sub rcx,r9
- mov r10,QWORD PTR[64+rsp]
- mov r14,r8
- lea rax,QWORD PTR[64+r9*2+rsp]
- lea rdi,QWORD PTR[64+r9*1+rsp]
- mov QWORD PTR[8+rsp],rax
- lea rsi,QWORD PTR[r9*1+rsi]
- xor rbp,rbp
-
- mov rax,QWORD PTR[rcx*1+rsi]
- mov r9,QWORD PTR[8+rcx*1+rsi]
- imul r14,r10
- mov rbx,rax
- jmp $L$sqr4x_mont_outer
-
-ALIGN 16
-$L$sqr4x_mont_outer::
- xor r11,r11
- mul r14
- add r10,rax
- mov rax,r9
- adc r11,rdx
- mov r15,r8
-
- xor r10,r10
- add r11,QWORD PTR[8+rcx*1+rdi]
- adc r10,0
- mul r14
- add r11,rax
- mov rax,rbx
- adc r10,rdx
-
- imul r15,r11
-
- mov rbx,QWORD PTR[16+rcx*1+rsi]
- xor r13,r13
- add r12,r11
- adc r13,0
- mul r15
- add r12,rax
- mov rax,rbx
- adc r13,rdx
- mov QWORD PTR[8+rcx*1+rdi],r12
-
- xor r11,r11
- add r10,QWORD PTR[16+rcx*1+rdi]
- adc r11,0
- mul r14
- add r10,rax
- mov rax,r9
- adc r11,rdx
-
- mov r9,QWORD PTR[24+rcx*1+rsi]
- xor r12,r12
- add r13,r10
- adc r12,0
- mul r15
- add r13,rax
- mov rax,r9
- adc r12,rdx
- mov QWORD PTR[16+rcx*1+rdi],r13
-
- xor r10,r10
- add r11,QWORD PTR[24+rcx*1+rdi]
- lea rcx,QWORD PTR[32+rcx]
- adc r10,0
- mul r14
- add r11,rax
- mov rax,rbx
- adc r10,rdx
- jmp $L$sqr4x_mont_inner
-
-ALIGN 16
-$L$sqr4x_mont_inner::
- mov rbx,QWORD PTR[rcx*1+rsi]
- xor r13,r13
- add r12,r11
- adc r13,0
- mul r15
- add r12,rax
- mov rax,rbx
- adc r13,rdx
- mov QWORD PTR[((-8))+rcx*1+rdi],r12
-
- xor r11,r11
- add r10,QWORD PTR[rcx*1+rdi]
- adc r11,0
- mul r14
- add r10,rax
- mov rax,r9
- adc r11,rdx
-
- mov r9,QWORD PTR[8+rcx*1+rsi]
- xor r12,r12
- add r13,r10
- adc r12,0
- mul r15
- add r13,rax
- mov rax,r9
- adc r12,rdx
- mov QWORD PTR[rcx*1+rdi],r13
-
- xor r10,r10
- add r11,QWORD PTR[8+rcx*1+rdi]
- adc r10,0
- mul r14
- add r11,rax
- mov rax,rbx
- adc r10,rdx
-
-
- mov rbx,QWORD PTR[16+rcx*1+rsi]
- xor r13,r13
- add r12,r11
- adc r13,0
- mul r15
- add r12,rax
- mov rax,rbx
- adc r13,rdx
- mov QWORD PTR[8+rcx*1+rdi],r12
-
- xor r11,r11
- add r10,QWORD PTR[16+rcx*1+rdi]
- adc r11,0
- mul r14
- add r10,rax
- mov rax,r9
- adc r11,rdx
-
- mov r9,QWORD PTR[24+rcx*1+rsi]
- xor r12,r12
- add r13,r10
- adc r12,0
- mul r15
- add r13,rax
- mov rax,r9
- adc r12,rdx
- mov QWORD PTR[16+rcx*1+rdi],r13
-
- xor r10,r10
- add r11,QWORD PTR[24+rcx*1+rdi]
- lea rcx,QWORD PTR[32+rcx]
- adc r10,0
- mul r14
- add r11,rax
- mov rax,rbx
- adc r10,rdx
- cmp rcx,0
- jne $L$sqr4x_mont_inner
-
- sub rcx,QWORD PTR[rsp]
- mov r14,r8
-
- xor r13,r13
- add r12,r11
- adc r13,0
- mul r15
- add r12,rax
- mov rax,r9
- adc r13,rdx
- mov QWORD PTR[((-8))+rdi],r12
-
- xor r11,r11
- add r10,QWORD PTR[rdi]
- adc r11,0
- mov rbx,QWORD PTR[rcx*1+rsi]
- add r10,rbp
- adc r11,0
-
- imul r14,QWORD PTR[16+rcx*1+rdi]
- xor r12,r12
- mov r9,QWORD PTR[8+rcx*1+rsi]
- add r13,r10
- mov r10,QWORD PTR[16+rcx*1+rdi]
- adc r12,0
- mul r15
- add r13,rax
- mov rax,rbx
- adc r12,rdx
- mov QWORD PTR[rdi],r13
-
- xor rbp,rbp
- add r12,QWORD PTR[8+rdi]
- adc rbp,rbp
- add r12,r11
- lea rdi,QWORD PTR[16+rdi]
- adc rbp,0
- mov QWORD PTR[((-8))+rdi],r12
- cmp rdi,QWORD PTR[8+rsp]
- jb $L$sqr4x_mont_outer
-
- mov r9,QWORD PTR[rsp]
- mov QWORD PTR[rdi],rbp
- mov rax,QWORD PTR[64+r9*1+rsp]
- lea rbx,QWORD PTR[64+r9*1+rsp]
- mov rsi,QWORD PTR[40+rsp]
- shr r9,5
- mov rdx,QWORD PTR[8+rbx]
- xor rbp,rbp
-
- mov rdi,QWORD PTR[32+rsp]
- sub rax,QWORD PTR[rsi]
- mov r10,QWORD PTR[16+rbx]
- mov r11,QWORD PTR[24+rbx]
- sbb rdx,QWORD PTR[8+rsi]
- lea rcx,QWORD PTR[((-1))+r9]
- jmp $L$sqr4x_sub
-ALIGN 16
-$L$sqr4x_sub::
- mov QWORD PTR[rbp*8+rdi],rax
- mov QWORD PTR[8+rbp*8+rdi],rdx
- sbb r10,QWORD PTR[16+rbp*8+rsi]
- mov rax,QWORD PTR[32+rbp*8+rbx]
- mov rdx,QWORD PTR[40+rbp*8+rbx]
- sbb r11,QWORD PTR[24+rbp*8+rsi]
- mov QWORD PTR[16+rbp*8+rdi],r10
- mov QWORD PTR[24+rbp*8+rdi],r11
- sbb rax,QWORD PTR[32+rbp*8+rsi]
- mov r10,QWORD PTR[48+rbp*8+rbx]
- mov r11,QWORD PTR[56+rbp*8+rbx]
- sbb rdx,QWORD PTR[40+rbp*8+rsi]
- lea rbp,QWORD PTR[4+rbp]
- dec rcx
- jnz $L$sqr4x_sub
-
- mov QWORD PTR[rbp*8+rdi],rax
- mov rax,QWORD PTR[32+rbp*8+rbx]
- sbb r10,QWORD PTR[16+rbp*8+rsi]
- mov QWORD PTR[8+rbp*8+rdi],rdx
- sbb r11,QWORD PTR[24+rbp*8+rsi]
- mov QWORD PTR[16+rbp*8+rdi],r10
-
- sbb rax,0
- mov QWORD PTR[24+rbp*8+rdi],r11
- xor rbp,rbp
- and rbx,rax
- not rax
- mov rsi,rdi
- and rsi,rax
- lea rcx,QWORD PTR[((-1))+r9]
- or rbx,rsi
-
- pxor xmm0,xmm0
- lea rsi,QWORD PTR[64+r9*8+rsp]
- movdqu xmm1,XMMWORD PTR[rbx]
- lea rsi,QWORD PTR[r9*8+rsi]
- movdqa XMMWORD PTR[64+rsp],xmm0
- movdqa XMMWORD PTR[rsi],xmm0
- movdqu XMMWORD PTR[rdi],xmm1
- jmp $L$sqr4x_copy
-ALIGN 16
-$L$sqr4x_copy::
- movdqu xmm2,XMMWORD PTR[16+rbp*1+rbx]
- movdqu xmm1,XMMWORD PTR[32+rbp*1+rbx]
- movdqa XMMWORD PTR[80+rbp*1+rsp],xmm0
- movdqa XMMWORD PTR[96+rbp*1+rsp],xmm0
- movdqa XMMWORD PTR[16+rbp*1+rsi],xmm0
- movdqa XMMWORD PTR[32+rbp*1+rsi],xmm0
- movdqu XMMWORD PTR[16+rbp*1+rdi],xmm2
- movdqu XMMWORD PTR[32+rbp*1+rdi],xmm1
- lea rbp,QWORD PTR[32+rbp]
- dec rcx
- jnz $L$sqr4x_copy
-
- movdqu xmm2,XMMWORD PTR[16+rbp*1+rbx]
- movdqa XMMWORD PTR[80+rbp*1+rsp],xmm0
- movdqa XMMWORD PTR[16+rbp*1+rsi],xmm0
- movdqu XMMWORD PTR[16+rbp*1+rdi],xmm2
- mov rsi,QWORD PTR[56+rsp]
- mov rax,1
- mov r15,QWORD PTR[rsi]
- mov r14,QWORD PTR[8+rsi]
- mov r13,QWORD PTR[16+rsi]
- mov r12,QWORD PTR[24+rsi]
- mov rbp,QWORD PTR[32+rsi]
- mov rbx,QWORD PTR[40+rsi]
- lea rsp,QWORD PTR[48+rsi]
-$L$sqr4x_epilogue::
+$L$epilogue::
mov rdi,QWORD PTR[8+rsp] ;WIN64 epilogue
mov rsi,QWORD PTR[16+rsp]
DB 0F3h,0C3h ;repret
-$L$SEH_end_bn_sqr4x_mont::
-bn_sqr4x_mont ENDP
+$L$SEH_end_bn_mul_mont::
+bn_mul_mont ENDP
DB 77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105
DB 112,108,105,99,97,116,105,111,110,32,102,111,114,32,120,56
DB 54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83
@@ -1425,7 +192,7 @@ ALIGN 16
EXTERN __imp_RtlVirtualUnwind:NEAR
ALIGN 16
-mul_handler PROC PRIVATE
+se_handler PROC PRIVATE
push rsi
push rdi
push rbx
@@ -1440,20 +207,15 @@ mul_handler PROC PRIVATE
mov rax,QWORD PTR[120+r8]
mov rbx,QWORD PTR[248+r8]
- mov rsi,QWORD PTR[8+r9]
- mov r11,QWORD PTR[56+r9]
-
- mov r10d,DWORD PTR[r11]
- lea r10,QWORD PTR[r10*1+rsi]
+ lea r10,QWORD PTR[$L$prologue]
cmp rbx,r10
- jb $L$common_seh_tail
+ jb $L$in_prologue
mov rax,QWORD PTR[152+r8]
- mov r10d,DWORD PTR[4+r11]
- lea r10,QWORD PTR[r10*1+rsi]
+ lea r10,QWORD PTR[$L$epilogue]
cmp rbx,r10
- jae $L$common_seh_tail
+ jae $L$in_prologue
mov r10,QWORD PTR[192+r8]
mov rax,QWORD PTR[8+r10*8+rax]
@@ -1472,53 +234,7 @@ mul_handler PROC PRIVATE
mov QWORD PTR[232+r8],r14
mov QWORD PTR[240+r8],r15
- jmp $L$common_seh_tail
-mul_handler ENDP
-
-
-ALIGN 16
-sqr_handler PROC PRIVATE
- push rsi
- push rdi
- push rbx
- push rbp
- push r12
- push r13
- push r14
- push r15
- pushfq
- sub rsp,64
-
- mov rax,QWORD PTR[120+r8]
- mov rbx,QWORD PTR[248+r8]
-
- lea r10,QWORD PTR[$L$sqr4x_body]
- cmp rbx,r10
- jb $L$common_seh_tail
-
- mov rax,QWORD PTR[152+r8]
-
- lea r10,QWORD PTR[$L$sqr4x_epilogue]
- cmp rbx,r10
- jae $L$common_seh_tail
-
- mov rax,QWORD PTR[56+rax]
- lea rax,QWORD PTR[48+rax]
-
- mov rbx,QWORD PTR[((-8))+rax]
- mov rbp,QWORD PTR[((-16))+rax]
- mov r12,QWORD PTR[((-24))+rax]
- mov r13,QWORD PTR[((-32))+rax]
- mov r14,QWORD PTR[((-40))+rax]
- mov r15,QWORD PTR[((-48))+rax]
- mov QWORD PTR[144+r8],rbx
- mov QWORD PTR[160+r8],rbp
- mov QWORD PTR[216+r8],r12
- mov QWORD PTR[224+r8],r13
- mov QWORD PTR[232+r8],r14
- mov QWORD PTR[240+r8],r15
-
-$L$common_seh_tail::
+$L$in_prologue::
mov rdi,QWORD PTR[8+rax]
mov rsi,QWORD PTR[16+rax]
mov QWORD PTR[152+r8],rax
@@ -1557,7 +273,7 @@ $L$common_seh_tail::
pop rdi
pop rsi
DB 0F3h,0C3h ;repret
-sqr_handler ENDP
+se_handler ENDP
.text$ ENDS
.pdata SEGMENT READONLY ALIGN(4)
@@ -1566,30 +282,12 @@ ALIGN 4
DD imagerel $L$SEH_end_bn_mul_mont
DD imagerel $L$SEH_info_bn_mul_mont
- DD imagerel $L$SEH_begin_bn_mul4x_mont
- DD imagerel $L$SEH_end_bn_mul4x_mont
- DD imagerel $L$SEH_info_bn_mul4x_mont
-
- DD imagerel $L$SEH_begin_bn_sqr4x_mont
- DD imagerel $L$SEH_end_bn_sqr4x_mont
- DD imagerel $L$SEH_info_bn_sqr4x_mont
-
.pdata ENDS
.xdata SEGMENT READONLY ALIGN(8)
ALIGN 8
$L$SEH_info_bn_mul_mont::
DB 9,0,0,0
- DD imagerel mul_handler
- DD imagerel $L$mul_body,imagerel $L$mul_epilogue
-
-$L$SEH_info_bn_mul4x_mont::
-DB 9,0,0,0
- DD imagerel mul_handler
- DD imagerel $L$mul4x_body,imagerel $L$mul4x_epilogue
-
-$L$SEH_info_bn_sqr4x_mont::
-DB 9,0,0,0
- DD imagerel sqr_handler
+ DD imagerel se_handler
.xdata ENDS
END
diff --git a/deps/openssl/asm/x64-win32-masm/camellia/cmll-x86_64.asm b/deps/openssl/asm/x64-win32-masm/camellia/cmll-x86_64.asm
index 0ea789b6ae..a5913da92e 100644
--- a/deps/openssl/asm/x64-win32-masm/camellia/cmll-x86_64.asm
+++ b/deps/openssl/asm/x64-win32-masm/camellia/cmll-x86_64.asm
@@ -250,7 +250,7 @@ $L$eloop::
xor r8d,ecx
xor r9d,ecx
xor r9d,edx
- lea r14,QWORD PTR[64+r14]
+ lea r14,QWORD PTR[((16*4))+r14]
cmp r14,r15
mov edx,DWORD PTR[8+r14]
mov ecx,DWORD PTR[12+r14]
@@ -533,7 +533,7 @@ $L$dloop::
xor r8d,ecx
xor r9d,ecx
xor r9d,edx
- lea r14,QWORD PTR[((-64))+r14]
+ lea r14,QWORD PTR[((-16*4))+r14]
cmp r14,r15
mov edx,DWORD PTR[r14]
mov ecx,DWORD PTR[4+r14]
diff --git a/deps/openssl/asm/x64-win32-masm/md5/md5-x86_64.asm b/deps/openssl/asm/x64-win32-masm/md5/md5-x86_64.asm
index 8ddad41c84..34305c687c 100644
--- a/deps/openssl/asm/x64-win32-masm/md5/md5-x86_64.asm
+++ b/deps/openssl/asm/x64-win32-masm/md5/md5-x86_64.asm
@@ -27,10 +27,10 @@ $L$prologue::
mov rbp,rdi
shl rdx,6
lea rdi,QWORD PTR[rdx*1+rsi]
- mov eax,DWORD PTR[rbp]
- mov ebx,DWORD PTR[4+rbp]
- mov ecx,DWORD PTR[8+rbp]
- mov edx,DWORD PTR[12+rbp]
+ mov eax,DWORD PTR[((0*4))+rbp]
+ mov ebx,DWORD PTR[((1*4))+rbp]
+ mov ecx,DWORD PTR[((2*4))+rbp]
+ mov edx,DWORD PTR[((3*4))+rbp]
@@ -48,160 +48,160 @@ $L$loop::
mov r9d,ebx
mov r14d,ecx
mov r15d,edx
- mov r10d,DWORD PTR[rsi]
+ mov r10d,DWORD PTR[((0*4))+rsi]
mov r11d,edx
xor r11d,ecx
- lea eax,DWORD PTR[((-680876936))+r10*1+rax]
+ lea eax,DWORD PTR[0d76aa478h+r10*1+rax]
and r11d,ebx
xor r11d,edx
- mov r10d,DWORD PTR[4+rsi]
+ mov r10d,DWORD PTR[((1*4))+rsi]
add eax,r11d
rol eax,7
mov r11d,ecx
add eax,ebx
xor r11d,ebx
- lea edx,DWORD PTR[((-389564586))+r10*1+rdx]
+ lea edx,DWORD PTR[0e8c7b756h+r10*1+rdx]
and r11d,eax
xor r11d,ecx
- mov r10d,DWORD PTR[8+rsi]
+ mov r10d,DWORD PTR[((2*4))+rsi]
add edx,r11d
rol edx,12
mov r11d,ebx
add edx,eax
xor r11d,eax
- lea ecx,DWORD PTR[606105819+r10*1+rcx]
+ lea ecx,DWORD PTR[0242070dbh+r10*1+rcx]
and r11d,edx
xor r11d,ebx
- mov r10d,DWORD PTR[12+rsi]
+ mov r10d,DWORD PTR[((3*4))+rsi]
add ecx,r11d
rol ecx,17
mov r11d,eax
add ecx,edx
xor r11d,edx
- lea ebx,DWORD PTR[((-1044525330))+r10*1+rbx]
+ lea ebx,DWORD PTR[0c1bdceeeh+r10*1+rbx]
and r11d,ecx
xor r11d,eax
- mov r10d,DWORD PTR[16+rsi]
+ mov r10d,DWORD PTR[((4*4))+rsi]
add ebx,r11d
rol ebx,22
mov r11d,edx
add ebx,ecx
xor r11d,ecx
- lea eax,DWORD PTR[((-176418897))+r10*1+rax]
+ lea eax,DWORD PTR[0f57c0fafh+r10*1+rax]
and r11d,ebx
xor r11d,edx
- mov r10d,DWORD PTR[20+rsi]
+ mov r10d,DWORD PTR[((5*4))+rsi]
add eax,r11d
rol eax,7
mov r11d,ecx
add eax,ebx
xor r11d,ebx
- lea edx,DWORD PTR[1200080426+r10*1+rdx]
+ lea edx,DWORD PTR[04787c62ah+r10*1+rdx]
and r11d,eax
xor r11d,ecx
- mov r10d,DWORD PTR[24+rsi]
+ mov r10d,DWORD PTR[((6*4))+rsi]
add edx,r11d
rol edx,12
mov r11d,ebx
add edx,eax
xor r11d,eax
- lea ecx,DWORD PTR[((-1473231341))+r10*1+rcx]
+ lea ecx,DWORD PTR[0a8304613h+r10*1+rcx]
and r11d,edx
xor r11d,ebx
- mov r10d,DWORD PTR[28+rsi]
+ mov r10d,DWORD PTR[((7*4))+rsi]
add ecx,r11d
rol ecx,17
mov r11d,eax
add ecx,edx
xor r11d,edx
- lea ebx,DWORD PTR[((-45705983))+r10*1+rbx]
+ lea ebx,DWORD PTR[0fd469501h+r10*1+rbx]
and r11d,ecx
xor r11d,eax
- mov r10d,DWORD PTR[32+rsi]
+ mov r10d,DWORD PTR[((8*4))+rsi]
add ebx,r11d
rol ebx,22
mov r11d,edx
add ebx,ecx
xor r11d,ecx
- lea eax,DWORD PTR[1770035416+r10*1+rax]
+ lea eax,DWORD PTR[0698098d8h+r10*1+rax]
and r11d,ebx
xor r11d,edx
- mov r10d,DWORD PTR[36+rsi]
+ mov r10d,DWORD PTR[((9*4))+rsi]
add eax,r11d
rol eax,7
mov r11d,ecx
add eax,ebx
xor r11d,ebx
- lea edx,DWORD PTR[((-1958414417))+r10*1+rdx]
+ lea edx,DWORD PTR[08b44f7afh+r10*1+rdx]
and r11d,eax
xor r11d,ecx
- mov r10d,DWORD PTR[40+rsi]
+ mov r10d,DWORD PTR[((10*4))+rsi]
add edx,r11d
rol edx,12
mov r11d,ebx
add edx,eax
xor r11d,eax
- lea ecx,DWORD PTR[((-42063))+r10*1+rcx]
+ lea ecx,DWORD PTR[0ffff5bb1h+r10*1+rcx]
and r11d,edx
xor r11d,ebx
- mov r10d,DWORD PTR[44+rsi]
+ mov r10d,DWORD PTR[((11*4))+rsi]
add ecx,r11d
rol ecx,17
mov r11d,eax
add ecx,edx
xor r11d,edx
- lea ebx,DWORD PTR[((-1990404162))+r10*1+rbx]
+ lea ebx,DWORD PTR[0895cd7beh+r10*1+rbx]
and r11d,ecx
xor r11d,eax
- mov r10d,DWORD PTR[48+rsi]
+ mov r10d,DWORD PTR[((12*4))+rsi]
add ebx,r11d
rol ebx,22
mov r11d,edx
add ebx,ecx
xor r11d,ecx
- lea eax,DWORD PTR[1804603682+r10*1+rax]
+ lea eax,DWORD PTR[06b901122h+r10*1+rax]
and r11d,ebx
xor r11d,edx
- mov r10d,DWORD PTR[52+rsi]
+ mov r10d,DWORD PTR[((13*4))+rsi]
add eax,r11d
rol eax,7
mov r11d,ecx
add eax,ebx
xor r11d,ebx
- lea edx,DWORD PTR[((-40341101))+r10*1+rdx]
+ lea edx,DWORD PTR[0fd987193h+r10*1+rdx]
and r11d,eax
xor r11d,ecx
- mov r10d,DWORD PTR[56+rsi]
+ mov r10d,DWORD PTR[((14*4))+rsi]
add edx,r11d
rol edx,12
mov r11d,ebx
add edx,eax
xor r11d,eax
- lea ecx,DWORD PTR[((-1502002290))+r10*1+rcx]
+ lea ecx,DWORD PTR[0a679438eh+r10*1+rcx]
and r11d,edx
xor r11d,ebx
- mov r10d,DWORD PTR[60+rsi]
+ mov r10d,DWORD PTR[((15*4))+rsi]
add ecx,r11d
rol ecx,17
mov r11d,eax
add ecx,edx
xor r11d,edx
- lea ebx,DWORD PTR[1236535329+r10*1+rbx]
+ lea ebx,DWORD PTR[049b40821h+r10*1+rbx]
and r11d,ecx
xor r11d,eax
- mov r10d,DWORD PTR[rsi]
+ mov r10d,DWORD PTR[((0*4))+rsi]
add ebx,r11d
rol ebx,22
mov r11d,edx
add ebx,ecx
- mov r10d,DWORD PTR[4+rsi]
+ mov r10d,DWORD PTR[((1*4))+rsi]
mov r11d,edx
mov r12d,edx
not r11d
- lea eax,DWORD PTR[((-165796510))+r10*1+rax]
+ lea eax,DWORD PTR[0f61e2562h+r10*1+rax]
and r12d,ebx
and r11d,ecx
- mov r10d,DWORD PTR[24+rsi]
+ mov r10d,DWORD PTR[((6*4))+rsi]
or r12d,r11d
mov r11d,ecx
add eax,r12d
@@ -209,10 +209,10 @@ $L$loop::
rol eax,5
add eax,ebx
not r11d
- lea edx,DWORD PTR[((-1069501632))+r10*1+rdx]
+ lea edx,DWORD PTR[0c040b340h+r10*1+rdx]
and r12d,eax
and r11d,ebx
- mov r10d,DWORD PTR[44+rsi]
+ mov r10d,DWORD PTR[((11*4))+rsi]
or r12d,r11d
mov r11d,ebx
add edx,r12d
@@ -220,10 +220,10 @@ $L$loop::
rol edx,9
add edx,eax
not r11d
- lea ecx,DWORD PTR[643717713+r10*1+rcx]
+ lea ecx,DWORD PTR[0265e5a51h+r10*1+rcx]
and r12d,edx
and r11d,eax
- mov r10d,DWORD PTR[rsi]
+ mov r10d,DWORD PTR[((0*4))+rsi]
or r12d,r11d
mov r11d,eax
add ecx,r12d
@@ -231,10 +231,10 @@ $L$loop::
rol ecx,14
add ecx,edx
not r11d
- lea ebx,DWORD PTR[((-373897302))+r10*1+rbx]
+ lea ebx,DWORD PTR[0e9b6c7aah+r10*1+rbx]
and r12d,ecx
and r11d,edx
- mov r10d,DWORD PTR[20+rsi]
+ mov r10d,DWORD PTR[((5*4))+rsi]
or r12d,r11d
mov r11d,edx
add ebx,r12d
@@ -242,10 +242,10 @@ $L$loop::
rol ebx,20
add ebx,ecx
not r11d
- lea eax,DWORD PTR[((-701558691))+r10*1+rax]
+ lea eax,DWORD PTR[0d62f105dh+r10*1+rax]
and r12d,ebx
and r11d,ecx
- mov r10d,DWORD PTR[40+rsi]
+ mov r10d,DWORD PTR[((10*4))+rsi]
or r12d,r11d
mov r11d,ecx
add eax,r12d
@@ -253,10 +253,10 @@ $L$loop::
rol eax,5
add eax,ebx
not r11d
- lea edx,DWORD PTR[38016083+r10*1+rdx]
+ lea edx,DWORD PTR[02441453h+r10*1+rdx]
and r12d,eax
and r11d,ebx
- mov r10d,DWORD PTR[60+rsi]
+ mov r10d,DWORD PTR[((15*4))+rsi]
or r12d,r11d
mov r11d,ebx
add edx,r12d
@@ -264,10 +264,10 @@ $L$loop::
rol edx,9
add edx,eax
not r11d
- lea ecx,DWORD PTR[((-660478335))+r10*1+rcx]
+ lea ecx,DWORD PTR[0d8a1e681h+r10*1+rcx]
and r12d,edx
and r11d,eax
- mov r10d,DWORD PTR[16+rsi]
+ mov r10d,DWORD PTR[((4*4))+rsi]
or r12d,r11d
mov r11d,eax
add ecx,r12d
@@ -275,10 +275,10 @@ $L$loop::
rol ecx,14
add ecx,edx
not r11d
- lea ebx,DWORD PTR[((-405537848))+r10*1+rbx]
+ lea ebx,DWORD PTR[0e7d3fbc8h+r10*1+rbx]
and r12d,ecx
and r11d,edx
- mov r10d,DWORD PTR[36+rsi]
+ mov r10d,DWORD PTR[((9*4))+rsi]
or r12d,r11d
mov r11d,edx
add ebx,r12d
@@ -286,10 +286,10 @@ $L$loop::
rol ebx,20
add ebx,ecx
not r11d
- lea eax,DWORD PTR[568446438+r10*1+rax]
+ lea eax,DWORD PTR[021e1cde6h+r10*1+rax]
and r12d,ebx
and r11d,ecx
- mov r10d,DWORD PTR[56+rsi]
+ mov r10d,DWORD PTR[((14*4))+rsi]
or r12d,r11d
mov r11d,ecx
add eax,r12d
@@ -297,10 +297,10 @@ $L$loop::
rol eax,5
add eax,ebx
not r11d
- lea edx,DWORD PTR[((-1019803690))+r10*1+rdx]
+ lea edx,DWORD PTR[0c33707d6h+r10*1+rdx]
and r12d,eax
and r11d,ebx
- mov r10d,DWORD PTR[12+rsi]
+ mov r10d,DWORD PTR[((3*4))+rsi]
or r12d,r11d
mov r11d,ebx
add edx,r12d
@@ -308,10 +308,10 @@ $L$loop::
rol edx,9
add edx,eax
not r11d
- lea ecx,DWORD PTR[((-187363961))+r10*1+rcx]
+ lea ecx,DWORD PTR[0f4d50d87h+r10*1+rcx]
and r12d,edx
and r11d,eax
- mov r10d,DWORD PTR[32+rsi]
+ mov r10d,DWORD PTR[((8*4))+rsi]
or r12d,r11d
mov r11d,eax
add ecx,r12d
@@ -319,10 +319,10 @@ $L$loop::
rol ecx,14
add ecx,edx
not r11d
- lea ebx,DWORD PTR[1163531501+r10*1+rbx]
+ lea ebx,DWORD PTR[0455a14edh+r10*1+rbx]
and r12d,ecx
and r11d,edx
- mov r10d,DWORD PTR[52+rsi]
+ mov r10d,DWORD PTR[((13*4))+rsi]
or r12d,r11d
mov r11d,edx
add ebx,r12d
@@ -330,10 +330,10 @@ $L$loop::
rol ebx,20
add ebx,ecx
not r11d
- lea eax,DWORD PTR[((-1444681467))+r10*1+rax]
+ lea eax,DWORD PTR[0a9e3e905h+r10*1+rax]
and r12d,ebx
and r11d,ecx
- mov r10d,DWORD PTR[8+rsi]
+ mov r10d,DWORD PTR[((2*4))+rsi]
or r12d,r11d
mov r11d,ecx
add eax,r12d
@@ -341,10 +341,10 @@ $L$loop::
rol eax,5
add eax,ebx
not r11d
- lea edx,DWORD PTR[((-51403784))+r10*1+rdx]
+ lea edx,DWORD PTR[0fcefa3f8h+r10*1+rdx]
and r12d,eax
and r11d,ebx
- mov r10d,DWORD PTR[28+rsi]
+ mov r10d,DWORD PTR[((7*4))+rsi]
or r12d,r11d
mov r11d,ebx
add edx,r12d
@@ -352,10 +352,10 @@ $L$loop::
rol edx,9
add edx,eax
not r11d
- lea ecx,DWORD PTR[1735328473+r10*1+rcx]
+ lea ecx,DWORD PTR[0676f02d9h+r10*1+rcx]
and r12d,edx
and r11d,eax
- mov r10d,DWORD PTR[48+rsi]
+ mov r10d,DWORD PTR[((12*4))+rsi]
or r12d,r11d
mov r11d,eax
add ecx,r12d
@@ -363,289 +363,289 @@ $L$loop::
rol ecx,14
add ecx,edx
not r11d
- lea ebx,DWORD PTR[((-1926607734))+r10*1+rbx]
+ lea ebx,DWORD PTR[08d2a4c8ah+r10*1+rbx]
and r12d,ecx
and r11d,edx
- mov r10d,DWORD PTR[rsi]
+ mov r10d,DWORD PTR[((0*4))+rsi]
or r12d,r11d
mov r11d,edx
add ebx,r12d
mov r12d,edx
rol ebx,20
add ebx,ecx
- mov r10d,DWORD PTR[20+rsi]
+ mov r10d,DWORD PTR[((5*4))+rsi]
mov r11d,ecx
- lea eax,DWORD PTR[((-378558))+r10*1+rax]
- mov r10d,DWORD PTR[32+rsi]
+ lea eax,DWORD PTR[0fffa3942h+r10*1+rax]
+ mov r10d,DWORD PTR[((8*4))+rsi]
xor r11d,edx
xor r11d,ebx
add eax,r11d
rol eax,4
mov r11d,ebx
add eax,ebx
- lea edx,DWORD PTR[((-2022574463))+r10*1+rdx]
- mov r10d,DWORD PTR[44+rsi]
+ lea edx,DWORD PTR[08771f681h+r10*1+rdx]
+ mov r10d,DWORD PTR[((11*4))+rsi]
xor r11d,ecx
xor r11d,eax
add edx,r11d
rol edx,11
mov r11d,eax
add edx,eax
- lea ecx,DWORD PTR[1839030562+r10*1+rcx]
- mov r10d,DWORD PTR[56+rsi]
+ lea ecx,DWORD PTR[06d9d6122h+r10*1+rcx]
+ mov r10d,DWORD PTR[((14*4))+rsi]
xor r11d,ebx
xor r11d,edx
add ecx,r11d
rol ecx,16
mov r11d,edx
add ecx,edx
- lea ebx,DWORD PTR[((-35309556))+r10*1+rbx]
- mov r10d,DWORD PTR[4+rsi]
+ lea ebx,DWORD PTR[0fde5380ch+r10*1+rbx]
+ mov r10d,DWORD PTR[((1*4))+rsi]
xor r11d,eax
xor r11d,ecx
add ebx,r11d
rol ebx,23
mov r11d,ecx
add ebx,ecx
- lea eax,DWORD PTR[((-1530992060))+r10*1+rax]
- mov r10d,DWORD PTR[16+rsi]
+ lea eax,DWORD PTR[0a4beea44h+r10*1+rax]
+ mov r10d,DWORD PTR[((4*4))+rsi]
xor r11d,edx
xor r11d,ebx
add eax,r11d
rol eax,4
mov r11d,ebx
add eax,ebx
- lea edx,DWORD PTR[1272893353+r10*1+rdx]
- mov r10d,DWORD PTR[28+rsi]
+ lea edx,DWORD PTR[04bdecfa9h+r10*1+rdx]
+ mov r10d,DWORD PTR[((7*4))+rsi]
xor r11d,ecx
xor r11d,eax
add edx,r11d
rol edx,11
mov r11d,eax
add edx,eax
- lea ecx,DWORD PTR[((-155497632))+r10*1+rcx]
- mov r10d,DWORD PTR[40+rsi]
+ lea ecx,DWORD PTR[0f6bb4b60h+r10*1+rcx]
+ mov r10d,DWORD PTR[((10*4))+rsi]
xor r11d,ebx
xor r11d,edx
add ecx,r11d
rol ecx,16
mov r11d,edx
add ecx,edx
- lea ebx,DWORD PTR[((-1094730640))+r10*1+rbx]
- mov r10d,DWORD PTR[52+rsi]
+ lea ebx,DWORD PTR[0bebfbc70h+r10*1+rbx]
+ mov r10d,DWORD PTR[((13*4))+rsi]
xor r11d,eax
xor r11d,ecx
add ebx,r11d
rol ebx,23
mov r11d,ecx
add ebx,ecx
- lea eax,DWORD PTR[681279174+r10*1+rax]
- mov r10d,DWORD PTR[rsi]
+ lea eax,DWORD PTR[0289b7ec6h+r10*1+rax]
+ mov r10d,DWORD PTR[((0*4))+rsi]
xor r11d,edx
xor r11d,ebx
add eax,r11d
rol eax,4
mov r11d,ebx
add eax,ebx
- lea edx,DWORD PTR[((-358537222))+r10*1+rdx]
- mov r10d,DWORD PTR[12+rsi]
+ lea edx,DWORD PTR[0eaa127fah+r10*1+rdx]
+ mov r10d,DWORD PTR[((3*4))+rsi]
xor r11d,ecx
xor r11d,eax
add edx,r11d
rol edx,11
mov r11d,eax
add edx,eax
- lea ecx,DWORD PTR[((-722521979))+r10*1+rcx]
- mov r10d,DWORD PTR[24+rsi]
+ lea ecx,DWORD PTR[0d4ef3085h+r10*1+rcx]
+ mov r10d,DWORD PTR[((6*4))+rsi]
xor r11d,ebx
xor r11d,edx
add ecx,r11d
rol ecx,16
mov r11d,edx
add ecx,edx
- lea ebx,DWORD PTR[76029189+r10*1+rbx]
- mov r10d,DWORD PTR[36+rsi]
+ lea ebx,DWORD PTR[04881d05h+r10*1+rbx]
+ mov r10d,DWORD PTR[((9*4))+rsi]
xor r11d,eax
xor r11d,ecx
add ebx,r11d
rol ebx,23
mov r11d,ecx
add ebx,ecx
- lea eax,DWORD PTR[((-640364487))+r10*1+rax]
- mov r10d,DWORD PTR[48+rsi]
+ lea eax,DWORD PTR[0d9d4d039h+r10*1+rax]
+ mov r10d,DWORD PTR[((12*4))+rsi]
xor r11d,edx
xor r11d,ebx
add eax,r11d
rol eax,4
mov r11d,ebx
add eax,ebx
- lea edx,DWORD PTR[((-421815835))+r10*1+rdx]
- mov r10d,DWORD PTR[60+rsi]
+ lea edx,DWORD PTR[0e6db99e5h+r10*1+rdx]
+ mov r10d,DWORD PTR[((15*4))+rsi]
xor r11d,ecx
xor r11d,eax
add edx,r11d
rol edx,11
mov r11d,eax
add edx,eax
- lea ecx,DWORD PTR[530742520+r10*1+rcx]
- mov r10d,DWORD PTR[8+rsi]
+ lea ecx,DWORD PTR[01fa27cf8h+r10*1+rcx]
+ mov r10d,DWORD PTR[((2*4))+rsi]
xor r11d,ebx
xor r11d,edx
add ecx,r11d
rol ecx,16
mov r11d,edx
add ecx,edx
- lea ebx,DWORD PTR[((-995338651))+r10*1+rbx]
- mov r10d,DWORD PTR[rsi]
+ lea ebx,DWORD PTR[0c4ac5665h+r10*1+rbx]
+ mov r10d,DWORD PTR[((0*4))+rsi]
xor r11d,eax
xor r11d,ecx
add ebx,r11d
rol ebx,23
mov r11d,ecx
add ebx,ecx
- mov r10d,DWORD PTR[rsi]
+ mov r10d,DWORD PTR[((0*4))+rsi]
mov r11d,0ffffffffh
xor r11d,edx
- lea eax,DWORD PTR[((-198630844))+r10*1+rax]
+ lea eax,DWORD PTR[0f4292244h+r10*1+rax]
or r11d,ebx
xor r11d,ecx
add eax,r11d
- mov r10d,DWORD PTR[28+rsi]
+ mov r10d,DWORD PTR[((7*4))+rsi]
mov r11d,0ffffffffh
rol eax,6
xor r11d,ecx
add eax,ebx
- lea edx,DWORD PTR[1126891415+r10*1+rdx]
+ lea edx,DWORD PTR[0432aff97h+r10*1+rdx]
or r11d,eax
xor r11d,ebx
add edx,r11d
- mov r10d,DWORD PTR[56+rsi]
+ mov r10d,DWORD PTR[((14*4))+rsi]
mov r11d,0ffffffffh
rol edx,10
xor r11d,ebx
add edx,eax
- lea ecx,DWORD PTR[((-1416354905))+r10*1+rcx]
+ lea ecx,DWORD PTR[0ab9423a7h+r10*1+rcx]
or r11d,edx
xor r11d,eax
add ecx,r11d
- mov r10d,DWORD PTR[20+rsi]
+ mov r10d,DWORD PTR[((5*4))+rsi]
mov r11d,0ffffffffh
rol ecx,15
xor r11d,eax
add ecx,edx
- lea ebx,DWORD PTR[((-57434055))+r10*1+rbx]
+ lea ebx,DWORD PTR[0fc93a039h+r10*1+rbx]
or r11d,ecx
xor r11d,edx
add ebx,r11d
- mov r10d,DWORD PTR[48+rsi]
+ mov r10d,DWORD PTR[((12*4))+rsi]
mov r11d,0ffffffffh
rol ebx,21
xor r11d,edx
add ebx,ecx
- lea eax,DWORD PTR[1700485571+r10*1+rax]
+ lea eax,DWORD PTR[0655b59c3h+r10*1+rax]
or r11d,ebx
xor r11d,ecx
add eax,r11d
- mov r10d,DWORD PTR[12+rsi]
+ mov r10d,DWORD PTR[((3*4))+rsi]
mov r11d,0ffffffffh
rol eax,6
xor r11d,ecx
add eax,ebx
- lea edx,DWORD PTR[((-1894986606))+r10*1+rdx]
+ lea edx,DWORD PTR[08f0ccc92h+r10*1+rdx]
or r11d,eax
xor r11d,ebx
add edx,r11d
- mov r10d,DWORD PTR[40+rsi]
+ mov r10d,DWORD PTR[((10*4))+rsi]
mov r11d,0ffffffffh
rol edx,10
xor r11d,ebx
add edx,eax
- lea ecx,DWORD PTR[((-1051523))+r10*1+rcx]
+ lea ecx,DWORD PTR[0ffeff47dh+r10*1+rcx]
or r11d,edx
xor r11d,eax
add ecx,r11d
- mov r10d,DWORD PTR[4+rsi]
+ mov r10d,DWORD PTR[((1*4))+rsi]
mov r11d,0ffffffffh
rol ecx,15
xor r11d,eax
add ecx,edx
- lea ebx,DWORD PTR[((-2054922799))+r10*1+rbx]
+ lea ebx,DWORD PTR[085845dd1h+r10*1+rbx]
or r11d,ecx
xor r11d,edx
add ebx,r11d
- mov r10d,DWORD PTR[32+rsi]
+ mov r10d,DWORD PTR[((8*4))+rsi]
mov r11d,0ffffffffh
rol ebx,21
xor r11d,edx
add ebx,ecx
- lea eax,DWORD PTR[1873313359+r10*1+rax]
+ lea eax,DWORD PTR[06fa87e4fh+r10*1+rax]
or r11d,ebx
xor r11d,ecx
add eax,r11d
- mov r10d,DWORD PTR[60+rsi]
+ mov r10d,DWORD PTR[((15*4))+rsi]
mov r11d,0ffffffffh
rol eax,6
xor r11d,ecx
add eax,ebx
- lea edx,DWORD PTR[((-30611744))+r10*1+rdx]
+ lea edx,DWORD PTR[0fe2ce6e0h+r10*1+rdx]
or r11d,eax
xor r11d,ebx
add edx,r11d
- mov r10d,DWORD PTR[24+rsi]
+ mov r10d,DWORD PTR[((6*4))+rsi]
mov r11d,0ffffffffh
rol edx,10
xor r11d,ebx
add edx,eax
- lea ecx,DWORD PTR[((-1560198380))+r10*1+rcx]
+ lea ecx,DWORD PTR[0a3014314h+r10*1+rcx]
or r11d,edx
xor r11d,eax
add ecx,r11d
- mov r10d,DWORD PTR[52+rsi]
+ mov r10d,DWORD PTR[((13*4))+rsi]
mov r11d,0ffffffffh
rol ecx,15
xor r11d,eax
add ecx,edx
- lea ebx,DWORD PTR[1309151649+r10*1+rbx]
+ lea ebx,DWORD PTR[04e0811a1h+r10*1+rbx]
or r11d,ecx
xor r11d,edx
add ebx,r11d
- mov r10d,DWORD PTR[16+rsi]
+ mov r10d,DWORD PTR[((4*4))+rsi]
mov r11d,0ffffffffh
rol ebx,21
xor r11d,edx
add ebx,ecx
- lea eax,DWORD PTR[((-145523070))+r10*1+rax]
+ lea eax,DWORD PTR[0f7537e82h+r10*1+rax]
or r11d,ebx
xor r11d,ecx
add eax,r11d
- mov r10d,DWORD PTR[44+rsi]
+ mov r10d,DWORD PTR[((11*4))+rsi]
mov r11d,0ffffffffh
rol eax,6
xor r11d,ecx
add eax,ebx
- lea edx,DWORD PTR[((-1120210379))+r10*1+rdx]
+ lea edx,DWORD PTR[0bd3af235h+r10*1+rdx]
or r11d,eax
xor r11d,ebx
add edx,r11d
- mov r10d,DWORD PTR[8+rsi]
+ mov r10d,DWORD PTR[((2*4))+rsi]
mov r11d,0ffffffffh
rol edx,10
xor r11d,ebx
add edx,eax
- lea ecx,DWORD PTR[718787259+r10*1+rcx]
+ lea ecx,DWORD PTR[02ad7d2bbh+r10*1+rcx]
or r11d,edx
xor r11d,eax
add ecx,r11d
- mov r10d,DWORD PTR[36+rsi]
+ mov r10d,DWORD PTR[((9*4))+rsi]
mov r11d,0ffffffffh
rol ecx,15
xor r11d,eax
add ecx,edx
- lea ebx,DWORD PTR[((-343485551))+r10*1+rbx]
+ lea ebx,DWORD PTR[0eb86d391h+r10*1+rbx]
or r11d,ecx
xor r11d,edx
add ebx,r11d
- mov r10d,DWORD PTR[rsi]
+ mov r10d,DWORD PTR[((0*4))+rsi]
mov r11d,0ffffffffh
rol ebx,21
xor r11d,edx
@@ -664,10 +664,10 @@ $L$loop::
$L$end::
- mov DWORD PTR[rbp],eax
- mov DWORD PTR[4+rbp],ebx
- mov DWORD PTR[8+rbp],ecx
- mov DWORD PTR[12+rbp],edx
+ mov DWORD PTR[((0*4))+rbp],eax
+ mov DWORD PTR[((1*4))+rbp],ebx
+ mov DWORD PTR[((2*4))+rbp],ecx
+ mov DWORD PTR[((3*4))+rbp],edx
mov r15,QWORD PTR[rsp]
mov r14,QWORD PTR[8+rsp]
diff --git a/deps/openssl/asm/x64-win32-masm/rc4/rc4-x86_64.asm b/deps/openssl/asm/x64-win32-masm/rc4/rc4-x86_64.asm
index aea304fbad..f508fa6679 100644
--- a/deps/openssl/asm/x64-win32-masm/rc4/rc4-x86_64.asm
+++ b/deps/openssl/asm/x64-win32-masm/rc4/rc4-x86_64.asm
@@ -1,6 +1,5 @@
OPTION DOTNAME
.text$ SEGMENT ALIGN(64) 'CODE'
-EXTERN OPENSSL_ia32cap_P:NEAR
PUBLIC RC4
@@ -25,511 +24,316 @@ $L$entry::
push r12
push r13
$L$prologue::
- mov r11,rsi
- mov r12,rdx
- mov r13,rcx
- xor r10,r10
- xor rcx,rcx
- lea rdi,QWORD PTR[8+rdi]
- mov r10b,BYTE PTR[((-8))+rdi]
- mov cl,BYTE PTR[((-4))+rdi]
+ add rdi,8
+ mov r8d,DWORD PTR[((-8))+rdi]
+ mov r12d,DWORD PTR[((-4))+rdi]
cmp DWORD PTR[256+rdi],-1
je $L$RC4_CHAR
- mov r8d,DWORD PTR[OPENSSL_ia32cap_P]
- xor rbx,rbx
- inc r10b
- sub rbx,r10
- sub r13,r12
- mov eax,DWORD PTR[r10*4+rdi]
- test r11,-16
+ inc r8b
+ mov r9d,DWORD PTR[r8*4+rdi]
+ test rsi,-8
jz $L$loop1
- bt r8d,30
- jc $L$intel
- and rbx,7
- lea rsi,QWORD PTR[1+r10]
- jz $L$oop8
- sub r11,rbx
-$L$oop8_warmup::
- add cl,al
- mov edx,DWORD PTR[rcx*4+rdi]
- mov DWORD PTR[rcx*4+rdi],eax
- mov DWORD PTR[r10*4+rdi],edx
- add al,dl
- inc r10b
- mov edx,DWORD PTR[rax*4+rdi]
- mov eax,DWORD PTR[r10*4+rdi]
- xor dl,BYTE PTR[r12]
- mov BYTE PTR[r12*1+r13],dl
- lea r12,QWORD PTR[1+r12]
- dec rbx
- jnz $L$oop8_warmup
-
- lea rsi,QWORD PTR[1+r10]
- jmp $L$oop8
+ jmp $L$loop8
ALIGN 16
-$L$oop8::
- add cl,al
- mov edx,DWORD PTR[rcx*4+rdi]
- mov DWORD PTR[rcx*4+rdi],eax
- mov ebx,DWORD PTR[rsi*4+rdi]
- ror r8,8
- mov DWORD PTR[r10*4+rdi],edx
- add dl,al
- mov r8b,BYTE PTR[rdx*4+rdi]
- add cl,bl
- mov edx,DWORD PTR[rcx*4+rdi]
- mov DWORD PTR[rcx*4+rdi],ebx
- mov eax,DWORD PTR[4+rsi*4+rdi]
- ror r8,8
- mov DWORD PTR[4+r10*4+rdi],edx
- add dl,bl
- mov r8b,BYTE PTR[rdx*4+rdi]
- add cl,al
- mov edx,DWORD PTR[rcx*4+rdi]
- mov DWORD PTR[rcx*4+rdi],eax
- mov ebx,DWORD PTR[8+rsi*4+rdi]
- ror r8,8
- mov DWORD PTR[8+r10*4+rdi],edx
- add dl,al
- mov r8b,BYTE PTR[rdx*4+rdi]
- add cl,bl
- mov edx,DWORD PTR[rcx*4+rdi]
- mov DWORD PTR[rcx*4+rdi],ebx
- mov eax,DWORD PTR[12+rsi*4+rdi]
- ror r8,8
- mov DWORD PTR[12+r10*4+rdi],edx
- add dl,bl
- mov r8b,BYTE PTR[rdx*4+rdi]
- add cl,al
- mov edx,DWORD PTR[rcx*4+rdi]
- mov DWORD PTR[rcx*4+rdi],eax
- mov ebx,DWORD PTR[16+rsi*4+rdi]
- ror r8,8
- mov DWORD PTR[16+r10*4+rdi],edx
- add dl,al
- mov r8b,BYTE PTR[rdx*4+rdi]
- add cl,bl
- mov edx,DWORD PTR[rcx*4+rdi]
- mov DWORD PTR[rcx*4+rdi],ebx
- mov eax,DWORD PTR[20+rsi*4+rdi]
- ror r8,8
- mov DWORD PTR[20+r10*4+rdi],edx
- add dl,bl
- mov r8b,BYTE PTR[rdx*4+rdi]
- add cl,al
- mov edx,DWORD PTR[rcx*4+rdi]
- mov DWORD PTR[rcx*4+rdi],eax
- mov ebx,DWORD PTR[24+rsi*4+rdi]
- ror r8,8
- mov DWORD PTR[24+r10*4+rdi],edx
- add dl,al
- mov r8b,BYTE PTR[rdx*4+rdi]
- add sil,8
- add cl,bl
- mov edx,DWORD PTR[rcx*4+rdi]
- mov DWORD PTR[rcx*4+rdi],ebx
- mov eax,DWORD PTR[((-4))+rsi*4+rdi]
- ror r8,8
- mov DWORD PTR[28+r10*4+rdi],edx
- add dl,bl
- mov r8b,BYTE PTR[rdx*4+rdi]
- add r10b,8
- ror r8,8
- sub r11,8
-
- xor r8,QWORD PTR[r12]
- mov QWORD PTR[r12*1+r13],r8
- lea r12,QWORD PTR[8+r12]
-
- test r11,-8
- jnz $L$oop8
- cmp r11,0
- jne $L$loop1
- jmp $L$exit
-
-ALIGN 16
-$L$intel::
- test r11,-32
- jz $L$loop1
- and rbx,15
- jz $L$oop16_is_hot
- sub r11,rbx
-$L$oop16_warmup::
- add cl,al
- mov edx,DWORD PTR[rcx*4+rdi]
- mov DWORD PTR[rcx*4+rdi],eax
- mov DWORD PTR[r10*4+rdi],edx
- add al,dl
+$L$loop8::
+ add r12b,r9b
+ mov r10,r8
+ mov r13d,DWORD PTR[r12*4+rdi]
+ ror rax,8
inc r10b
- mov edx,DWORD PTR[rax*4+rdi]
- mov eax,DWORD PTR[r10*4+rdi]
- xor dl,BYTE PTR[r12]
- mov BYTE PTR[r12*1+r13],dl
- lea r12,QWORD PTR[1+r12]
- dec rbx
- jnz $L$oop16_warmup
-
- mov rbx,rcx
- xor rcx,rcx
- mov cl,bl
-
-$L$oop16_is_hot::
- lea rsi,QWORD PTR[r10*4+rdi]
- add cl,al
- mov edx,DWORD PTR[rcx*4+rdi]
- pxor xmm0,xmm0
- mov DWORD PTR[rcx*4+rdi],eax
- add al,dl
- mov ebx,DWORD PTR[4+rsi]
- movzx eax,al
- mov DWORD PTR[rsi],edx
- add cl,bl
- pinsrw xmm0,WORD PTR[rax*4+rdi],0
- jmp $L$oop16_enter
-ALIGN 16
-$L$oop16::
- add cl,al
- mov edx,DWORD PTR[rcx*4+rdi]
- pxor xmm2,xmm0
- psllq xmm1,8
- pxor xmm0,xmm0
- mov DWORD PTR[rcx*4+rdi],eax
- add al,dl
- mov ebx,DWORD PTR[4+rsi]
- movzx eax,al
- mov DWORD PTR[rsi],edx
- pxor xmm2,xmm1
- add cl,bl
- pinsrw xmm0,WORD PTR[rax*4+rdi],0
- movdqu XMMWORD PTR[r12*1+r13],xmm2
- lea r12,QWORD PTR[16+r12]
-$L$oop16_enter::
- mov edx,DWORD PTR[rcx*4+rdi]
- pxor xmm1,xmm1
- mov DWORD PTR[rcx*4+rdi],ebx
- add bl,dl
- mov eax,DWORD PTR[8+rsi]
- movzx ebx,bl
- mov DWORD PTR[4+rsi],edx
- add cl,al
- pinsrw xmm1,WORD PTR[rbx*4+rdi],0
- mov edx,DWORD PTR[rcx*4+rdi]
- mov DWORD PTR[rcx*4+rdi],eax
- add al,dl
- mov ebx,DWORD PTR[12+rsi]
- movzx eax,al
- mov DWORD PTR[8+rsi],edx
- add cl,bl
- pinsrw xmm0,WORD PTR[rax*4+rdi],1
- mov edx,DWORD PTR[rcx*4+rdi]
- mov DWORD PTR[rcx*4+rdi],ebx
- add bl,dl
- mov eax,DWORD PTR[16+rsi]
- movzx ebx,bl
- mov DWORD PTR[12+rsi],edx
- add cl,al
- pinsrw xmm1,WORD PTR[rbx*4+rdi],1
- mov edx,DWORD PTR[rcx*4+rdi]
- mov DWORD PTR[rcx*4+rdi],eax
- add al,dl
- mov ebx,DWORD PTR[20+rsi]
- movzx eax,al
- mov DWORD PTR[16+rsi],edx
- add cl,bl
- pinsrw xmm0,WORD PTR[rax*4+rdi],2
- mov edx,DWORD PTR[rcx*4+rdi]
- mov DWORD PTR[rcx*4+rdi],ebx
- add bl,dl
- mov eax,DWORD PTR[24+rsi]
- movzx ebx,bl
- mov DWORD PTR[20+rsi],edx
- add cl,al
- pinsrw xmm1,WORD PTR[rbx*4+rdi],2
- mov edx,DWORD PTR[rcx*4+rdi]
- mov DWORD PTR[rcx*4+rdi],eax
- add al,dl
- mov ebx,DWORD PTR[28+rsi]
- movzx eax,al
- mov DWORD PTR[24+rsi],edx
- add cl,bl
- pinsrw xmm0,WORD PTR[rax*4+rdi],3
- mov edx,DWORD PTR[rcx*4+rdi]
- mov DWORD PTR[rcx*4+rdi],ebx
- add bl,dl
- mov eax,DWORD PTR[32+rsi]
- movzx ebx,bl
- mov DWORD PTR[28+rsi],edx
- add cl,al
- pinsrw xmm1,WORD PTR[rbx*4+rdi],3
- mov edx,DWORD PTR[rcx*4+rdi]
- mov DWORD PTR[rcx*4+rdi],eax
- add al,dl
- mov ebx,DWORD PTR[36+rsi]
- movzx eax,al
- mov DWORD PTR[32+rsi],edx
- add cl,bl
- pinsrw xmm0,WORD PTR[rax*4+rdi],4
- mov edx,DWORD PTR[rcx*4+rdi]
- mov DWORD PTR[rcx*4+rdi],ebx
- add bl,dl
- mov eax,DWORD PTR[40+rsi]
- movzx ebx,bl
- mov DWORD PTR[36+rsi],edx
- add cl,al
- pinsrw xmm1,WORD PTR[rbx*4+rdi],4
- mov edx,DWORD PTR[rcx*4+rdi]
- mov DWORD PTR[rcx*4+rdi],eax
- add al,dl
- mov ebx,DWORD PTR[44+rsi]
- movzx eax,al
- mov DWORD PTR[40+rsi],edx
- add cl,bl
- pinsrw xmm0,WORD PTR[rax*4+rdi],5
- mov edx,DWORD PTR[rcx*4+rdi]
- mov DWORD PTR[rcx*4+rdi],ebx
- add bl,dl
- mov eax,DWORD PTR[48+rsi]
- movzx ebx,bl
- mov DWORD PTR[44+rsi],edx
- add cl,al
- pinsrw xmm1,WORD PTR[rbx*4+rdi],5
- mov edx,DWORD PTR[rcx*4+rdi]
- mov DWORD PTR[rcx*4+rdi],eax
- add al,dl
- mov ebx,DWORD PTR[52+rsi]
- movzx eax,al
- mov DWORD PTR[48+rsi],edx
- add cl,bl
- pinsrw xmm0,WORD PTR[rax*4+rdi],6
- mov edx,DWORD PTR[rcx*4+rdi]
- mov DWORD PTR[rcx*4+rdi],ebx
- add bl,dl
- mov eax,DWORD PTR[56+rsi]
- movzx ebx,bl
- mov DWORD PTR[52+rsi],edx
- add cl,al
- pinsrw xmm1,WORD PTR[rbx*4+rdi],6
- mov edx,DWORD PTR[rcx*4+rdi]
- mov DWORD PTR[rcx*4+rdi],eax
- add al,dl
- mov ebx,DWORD PTR[60+rsi]
- movzx eax,al
- mov DWORD PTR[56+rsi],edx
- add cl,bl
- pinsrw xmm0,WORD PTR[rax*4+rdi],7
- add r10b,16
- movdqu xmm2,XMMWORD PTR[r12]
- mov edx,DWORD PTR[rcx*4+rdi]
- mov DWORD PTR[rcx*4+rdi],ebx
- add bl,dl
- movzx ebx,bl
- mov DWORD PTR[60+rsi],edx
- lea rsi,QWORD PTR[r10*4+rdi]
- pinsrw xmm1,WORD PTR[rbx*4+rdi],7
- mov eax,DWORD PTR[rsi]
- mov rbx,rcx
- xor rcx,rcx
- sub r11,16
- mov cl,bl
- test r11,-16
- jnz $L$oop16
-
- psllq xmm1,8
- pxor xmm2,xmm0
- pxor xmm2,xmm1
- movdqu XMMWORD PTR[r12*1+r13],xmm2
- lea r12,QWORD PTR[16+r12]
-
- cmp r11,0
+ mov r11d,DWORD PTR[r10*4+rdi]
+ cmp r12,r10
+ mov DWORD PTR[r12*4+rdi],r9d
+ cmove r11,r9
+ mov DWORD PTR[r8*4+rdi],r13d
+ add r13b,r9b
+ mov al,BYTE PTR[r13*4+rdi]
+ add r12b,r11b
+ mov r8,r10
+ mov r13d,DWORD PTR[r12*4+rdi]
+ ror rax,8
+ inc r8b
+ mov r9d,DWORD PTR[r8*4+rdi]
+ cmp r12,r8
+ mov DWORD PTR[r12*4+rdi],r11d
+ cmove r9,r11
+ mov DWORD PTR[r10*4+rdi],r13d
+ add r13b,r11b
+ mov al,BYTE PTR[r13*4+rdi]
+ add r12b,r9b
+ mov r10,r8
+ mov r13d,DWORD PTR[r12*4+rdi]
+ ror rax,8
+ inc r10b
+ mov r11d,DWORD PTR[r10*4+rdi]
+ cmp r12,r10
+ mov DWORD PTR[r12*4+rdi],r9d
+ cmove r11,r9
+ mov DWORD PTR[r8*4+rdi],r13d
+ add r13b,r9b
+ mov al,BYTE PTR[r13*4+rdi]
+ add r12b,r11b
+ mov r8,r10
+ mov r13d,DWORD PTR[r12*4+rdi]
+ ror rax,8
+ inc r8b
+ mov r9d,DWORD PTR[r8*4+rdi]
+ cmp r12,r8
+ mov DWORD PTR[r12*4+rdi],r11d
+ cmove r9,r11
+ mov DWORD PTR[r10*4+rdi],r13d
+ add r13b,r11b
+ mov al,BYTE PTR[r13*4+rdi]
+ add r12b,r9b
+ mov r10,r8
+ mov r13d,DWORD PTR[r12*4+rdi]
+ ror rax,8
+ inc r10b
+ mov r11d,DWORD PTR[r10*4+rdi]
+ cmp r12,r10
+ mov DWORD PTR[r12*4+rdi],r9d
+ cmove r11,r9
+ mov DWORD PTR[r8*4+rdi],r13d
+ add r13b,r9b
+ mov al,BYTE PTR[r13*4+rdi]
+ add r12b,r11b
+ mov r8,r10
+ mov r13d,DWORD PTR[r12*4+rdi]
+ ror rax,8
+ inc r8b
+ mov r9d,DWORD PTR[r8*4+rdi]
+ cmp r12,r8
+ mov DWORD PTR[r12*4+rdi],r11d
+ cmove r9,r11
+ mov DWORD PTR[r10*4+rdi],r13d
+ add r13b,r11b
+ mov al,BYTE PTR[r13*4+rdi]
+ add r12b,r9b
+ mov r10,r8
+ mov r13d,DWORD PTR[r12*4+rdi]
+ ror rax,8
+ inc r10b
+ mov r11d,DWORD PTR[r10*4+rdi]
+ cmp r12,r10
+ mov DWORD PTR[r12*4+rdi],r9d
+ cmove r11,r9
+ mov DWORD PTR[r8*4+rdi],r13d
+ add r13b,r9b
+ mov al,BYTE PTR[r13*4+rdi]
+ add r12b,r11b
+ mov r8,r10
+ mov r13d,DWORD PTR[r12*4+rdi]
+ ror rax,8
+ inc r8b
+ mov r9d,DWORD PTR[r8*4+rdi]
+ cmp r12,r8
+ mov DWORD PTR[r12*4+rdi],r11d
+ cmove r9,r11
+ mov DWORD PTR[r10*4+rdi],r13d
+ add r13b,r11b
+ mov al,BYTE PTR[r13*4+rdi]
+ ror rax,8
+ sub rsi,8
+
+ xor rax,QWORD PTR[rdx]
+ add rdx,8
+ mov QWORD PTR[rcx],rax
+ add rcx,8
+
+ test rsi,-8
+ jnz $L$loop8
+ cmp rsi,0
jne $L$loop1
jmp $L$exit
ALIGN 16
$L$loop1::
- add cl,al
- mov edx,DWORD PTR[rcx*4+rdi]
- mov DWORD PTR[rcx*4+rdi],eax
- mov DWORD PTR[r10*4+rdi],edx
- add al,dl
- inc r10b
- mov edx,DWORD PTR[rax*4+rdi]
- mov eax,DWORD PTR[r10*4+rdi]
- xor dl,BYTE PTR[r12]
- mov BYTE PTR[r12*1+r13],dl
- lea r12,QWORD PTR[1+r12]
- dec r11
+ add r12b,r9b
+ mov r13d,DWORD PTR[r12*4+rdi]
+ mov DWORD PTR[r12*4+rdi],r9d
+ mov DWORD PTR[r8*4+rdi],r13d
+ add r9b,r13b
+ inc r8b
+ mov r13d,DWORD PTR[r9*4+rdi]
+ mov r9d,DWORD PTR[r8*4+rdi]
+ xor r13b,BYTE PTR[rdx]
+ inc rdx
+ mov BYTE PTR[rcx],r13b
+ inc rcx
+ dec rsi
jnz $L$loop1
jmp $L$exit
ALIGN 16
$L$RC4_CHAR::
- add r10b,1
- movzx eax,BYTE PTR[r10*1+rdi]
- test r11,-8
+ add r8b,1
+ movzx r9d,BYTE PTR[r8*1+rdi]
+ test rsi,-8
jz $L$cloop1
+ cmp DWORD PTR[260+rdi],0
+ jnz $L$cloop1
jmp $L$cloop8
ALIGN 16
$L$cloop8::
- mov r8d,DWORD PTR[r12]
- mov r9d,DWORD PTR[4+r12]
- add cl,al
- lea rsi,QWORD PTR[1+r10]
- movzx edx,BYTE PTR[rcx*1+rdi]
- movzx esi,sil
- movzx ebx,BYTE PTR[rsi*1+rdi]
- mov BYTE PTR[rcx*1+rdi],al
- cmp rcx,rsi
- mov BYTE PTR[r10*1+rdi],dl
+ mov eax,DWORD PTR[rdx]
+ mov ebx,DWORD PTR[4+rdx]
+ add r12b,r9b
+ lea r10,QWORD PTR[1+r8]
+ movzx r13d,BYTE PTR[r12*1+rdi]
+ movzx r10d,r10b
+ movzx r11d,BYTE PTR[r10*1+rdi]
+ mov BYTE PTR[r12*1+rdi],r9b
+ cmp r12,r10
+ mov BYTE PTR[r8*1+rdi],r13b
jne $L$cmov0
- mov rbx,rax
+ mov r11,r9
$L$cmov0::
- add dl,al
- xor r8b,BYTE PTR[rdx*1+rdi]
- ror r8d,8
- add cl,bl
- lea r10,QWORD PTR[1+rsi]
- movzx edx,BYTE PTR[rcx*1+rdi]
- movzx r10d,r10b
- movzx eax,BYTE PTR[r10*1+rdi]
- mov BYTE PTR[rcx*1+rdi],bl
- cmp rcx,r10
- mov BYTE PTR[rsi*1+rdi],dl
+ add r13b,r9b
+ xor al,BYTE PTR[r13*1+rdi]
+ ror eax,8
+ add r12b,r11b
+ lea r8,QWORD PTR[1+r10]
+ movzx r13d,BYTE PTR[r12*1+rdi]
+ movzx r8d,r8b
+ movzx r9d,BYTE PTR[r8*1+rdi]
+ mov BYTE PTR[r12*1+rdi],r11b
+ cmp r12,r8
+ mov BYTE PTR[r10*1+rdi],r13b
jne $L$cmov1
- mov rax,rbx
+ mov r9,r11
$L$cmov1::
- add dl,bl
- xor r8b,BYTE PTR[rdx*1+rdi]
- ror r8d,8
- add cl,al
- lea rsi,QWORD PTR[1+r10]
- movzx edx,BYTE PTR[rcx*1+rdi]
- movzx esi,sil
- movzx ebx,BYTE PTR[rsi*1+rdi]
- mov BYTE PTR[rcx*1+rdi],al
- cmp rcx,rsi
- mov BYTE PTR[r10*1+rdi],dl
+ add r13b,r11b
+ xor al,BYTE PTR[r13*1+rdi]
+ ror eax,8
+ add r12b,r9b
+ lea r10,QWORD PTR[1+r8]
+ movzx r13d,BYTE PTR[r12*1+rdi]
+ movzx r10d,r10b
+ movzx r11d,BYTE PTR[r10*1+rdi]
+ mov BYTE PTR[r12*1+rdi],r9b
+ cmp r12,r10
+ mov BYTE PTR[r8*1+rdi],r13b
jne $L$cmov2
- mov rbx,rax
+ mov r11,r9
$L$cmov2::
- add dl,al
- xor r8b,BYTE PTR[rdx*1+rdi]
- ror r8d,8
- add cl,bl
- lea r10,QWORD PTR[1+rsi]
- movzx edx,BYTE PTR[rcx*1+rdi]
- movzx r10d,r10b
- movzx eax,BYTE PTR[r10*1+rdi]
- mov BYTE PTR[rcx*1+rdi],bl
- cmp rcx,r10
- mov BYTE PTR[rsi*1+rdi],dl
+ add r13b,r9b
+ xor al,BYTE PTR[r13*1+rdi]
+ ror eax,8
+ add r12b,r11b
+ lea r8,QWORD PTR[1+r10]
+ movzx r13d,BYTE PTR[r12*1+rdi]
+ movzx r8d,r8b
+ movzx r9d,BYTE PTR[r8*1+rdi]
+ mov BYTE PTR[r12*1+rdi],r11b
+ cmp r12,r8
+ mov BYTE PTR[r10*1+rdi],r13b
jne $L$cmov3
- mov rax,rbx
+ mov r9,r11
$L$cmov3::
- add dl,bl
- xor r8b,BYTE PTR[rdx*1+rdi]
- ror r8d,8
- add cl,al
- lea rsi,QWORD PTR[1+r10]
- movzx edx,BYTE PTR[rcx*1+rdi]
- movzx esi,sil
- movzx ebx,BYTE PTR[rsi*1+rdi]
- mov BYTE PTR[rcx*1+rdi],al
- cmp rcx,rsi
- mov BYTE PTR[r10*1+rdi],dl
+ add r13b,r11b
+ xor al,BYTE PTR[r13*1+rdi]
+ ror eax,8
+ add r12b,r9b
+ lea r10,QWORD PTR[1+r8]
+ movzx r13d,BYTE PTR[r12*1+rdi]
+ movzx r10d,r10b
+ movzx r11d,BYTE PTR[r10*1+rdi]
+ mov BYTE PTR[r12*1+rdi],r9b
+ cmp r12,r10
+ mov BYTE PTR[r8*1+rdi],r13b
jne $L$cmov4
- mov rbx,rax
+ mov r11,r9
$L$cmov4::
- add dl,al
- xor r9b,BYTE PTR[rdx*1+rdi]
- ror r9d,8
- add cl,bl
- lea r10,QWORD PTR[1+rsi]
- movzx edx,BYTE PTR[rcx*1+rdi]
- movzx r10d,r10b
- movzx eax,BYTE PTR[r10*1+rdi]
- mov BYTE PTR[rcx*1+rdi],bl
- cmp rcx,r10
- mov BYTE PTR[rsi*1+rdi],dl
+ add r13b,r9b
+ xor bl,BYTE PTR[r13*1+rdi]
+ ror ebx,8
+ add r12b,r11b
+ lea r8,QWORD PTR[1+r10]
+ movzx r13d,BYTE PTR[r12*1+rdi]
+ movzx r8d,r8b
+ movzx r9d,BYTE PTR[r8*1+rdi]
+ mov BYTE PTR[r12*1+rdi],r11b
+ cmp r12,r8
+ mov BYTE PTR[r10*1+rdi],r13b
jne $L$cmov5
- mov rax,rbx
+ mov r9,r11
$L$cmov5::
- add dl,bl
- xor r9b,BYTE PTR[rdx*1+rdi]
- ror r9d,8
- add cl,al
- lea rsi,QWORD PTR[1+r10]
- movzx edx,BYTE PTR[rcx*1+rdi]
- movzx esi,sil
- movzx ebx,BYTE PTR[rsi*1+rdi]
- mov BYTE PTR[rcx*1+rdi],al
- cmp rcx,rsi
- mov BYTE PTR[r10*1+rdi],dl
+ add r13b,r11b
+ xor bl,BYTE PTR[r13*1+rdi]
+ ror ebx,8
+ add r12b,r9b
+ lea r10,QWORD PTR[1+r8]
+ movzx r13d,BYTE PTR[r12*1+rdi]
+ movzx r10d,r10b
+ movzx r11d,BYTE PTR[r10*1+rdi]
+ mov BYTE PTR[r12*1+rdi],r9b
+ cmp r12,r10
+ mov BYTE PTR[r8*1+rdi],r13b
jne $L$cmov6
- mov rbx,rax
+ mov r11,r9
$L$cmov6::
- add dl,al
- xor r9b,BYTE PTR[rdx*1+rdi]
- ror r9d,8
- add cl,bl
- lea r10,QWORD PTR[1+rsi]
- movzx edx,BYTE PTR[rcx*1+rdi]
- movzx r10d,r10b
- movzx eax,BYTE PTR[r10*1+rdi]
- mov BYTE PTR[rcx*1+rdi],bl
- cmp rcx,r10
- mov BYTE PTR[rsi*1+rdi],dl
+ add r13b,r9b
+ xor bl,BYTE PTR[r13*1+rdi]
+ ror ebx,8
+ add r12b,r11b
+ lea r8,QWORD PTR[1+r10]
+ movzx r13d,BYTE PTR[r12*1+rdi]
+ movzx r8d,r8b
+ movzx r9d,BYTE PTR[r8*1+rdi]
+ mov BYTE PTR[r12*1+rdi],r11b
+ cmp r12,r8
+ mov BYTE PTR[r10*1+rdi],r13b
jne $L$cmov7
- mov rax,rbx
+ mov r9,r11
$L$cmov7::
- add dl,bl
- xor r9b,BYTE PTR[rdx*1+rdi]
- ror r9d,8
- lea r11,QWORD PTR[((-8))+r11]
- mov DWORD PTR[r13],r8d
- lea r12,QWORD PTR[8+r12]
- mov DWORD PTR[4+r13],r9d
- lea r13,QWORD PTR[8+r13]
-
- test r11,-8
+ add r13b,r11b
+ xor bl,BYTE PTR[r13*1+rdi]
+ ror ebx,8
+ lea rsi,QWORD PTR[((-8))+rsi]
+ mov DWORD PTR[rcx],eax
+ lea rdx,QWORD PTR[8+rdx]
+ mov DWORD PTR[4+rcx],ebx
+ lea rcx,QWORD PTR[8+rcx]
+
+ test rsi,-8
jnz $L$cloop8
- cmp r11,0
+ cmp rsi,0
jne $L$cloop1
jmp $L$exit
ALIGN 16
$L$cloop1::
- add cl,al
- movzx ecx,cl
- movzx edx,BYTE PTR[rcx*1+rdi]
- mov BYTE PTR[rcx*1+rdi],al
- mov BYTE PTR[r10*1+rdi],dl
- add dl,al
- add r10b,1
- movzx edx,dl
- movzx r10d,r10b
- movzx edx,BYTE PTR[rdx*1+rdi]
- movzx eax,BYTE PTR[r10*1+rdi]
- xor dl,BYTE PTR[r12]
- lea r12,QWORD PTR[1+r12]
- mov BYTE PTR[r13],dl
- lea r13,QWORD PTR[1+r13]
- sub r11,1
+ add r12b,r9b
+ movzx r13d,BYTE PTR[r12*1+rdi]
+ mov BYTE PTR[r12*1+rdi],r9b
+ mov BYTE PTR[r8*1+rdi],r13b
+ add r13b,r9b
+ add r8b,1
+ movzx r13d,r13b
+ movzx r8d,r8b
+ movzx r13d,BYTE PTR[r13*1+rdi]
+ movzx r9d,BYTE PTR[r8*1+rdi]
+ xor r13b,BYTE PTR[rdx]
+ lea rdx,QWORD PTR[1+rdx]
+ mov BYTE PTR[rcx],r13b
+ lea rcx,QWORD PTR[1+rcx]
+ sub rsi,1
jnz $L$cloop1
jmp $L$exit
ALIGN 16
$L$exit::
- sub r10b,1
- mov DWORD PTR[((-8))+rdi],r10d
- mov DWORD PTR[((-4))+rdi],ecx
+ sub r8b,1
+ mov DWORD PTR[((-8))+rdi],r8d
+ mov DWORD PTR[((-4))+rdi],r12d
mov r13,QWORD PTR[rsp]
mov r12,QWORD PTR[8+rsp]
@@ -541,14 +345,15 @@ $L$epilogue::
DB 0F3h,0C3h ;repret
$L$SEH_end_RC4::
RC4 ENDP
-PUBLIC private_RC4_set_key
+EXTERN OPENSSL_ia32cap_P:NEAR
+PUBLIC RC4_set_key
ALIGN 16
-private_RC4_set_key PROC PUBLIC
+RC4_set_key PROC PUBLIC
mov QWORD PTR[8+rsp],rdi ;WIN64 prologue
mov QWORD PTR[16+rsp],rsi
mov rax,rsp
-$L$SEH_begin_private_RC4_set_key::
+$L$SEH_begin_RC4_set_key::
mov rdi,rcx
mov rsi,rdx
mov rdx,r8
@@ -565,8 +370,11 @@ $L$SEH_begin_private_RC4_set_key::
mov r8d,DWORD PTR[OPENSSL_ia32cap_P]
bt r8d,20
- jc $L$c1stloop
- jmp $L$w1stloop
+ jnc $L$w1stloop
+ bt r8d,30
+ setc r9b
+ mov DWORD PTR[260+rdi],r9d
+ jmp $L$c1stloop
ALIGN 16
$L$w1stloop::
@@ -622,8 +430,8 @@ $L$exit_key::
mov rdi,QWORD PTR[8+rsp] ;WIN64 epilogue
mov rsi,QWORD PTR[16+rsp]
DB 0F3h,0C3h ;repret
-$L$SEH_end_private_RC4_set_key::
-private_RC4_set_key ENDP
+$L$SEH_end_RC4_set_key::
+RC4_set_key ENDP
PUBLIC RC4_options
@@ -632,20 +440,18 @@ RC4_options PROC PUBLIC
lea rax,QWORD PTR[$L$opts]
mov edx,DWORD PTR[OPENSSL_ia32cap_P]
bt edx,20
- jc $L$8xchar
- bt edx,30
jnc $L$done
- add rax,25
- DB 0F3h,0C3h ;repret
-$L$8xchar::
add rax,12
+ bt edx,30
+ jnc $L$done
+ add rax,13
$L$done::
DB 0F3h,0C3h ;repret
ALIGN 64
$L$opts::
DB 114,99,52,40,56,120,44,105,110,116,41,0
DB 114,99,52,40,56,120,44,99,104,97,114,41,0
-DB 114,99,52,40,49,54,120,44,105,110,116,41,0
+DB 114,99,52,40,49,120,44,99,104,97,114,41,0
DB 82,67,52,32,102,111,114,32,120,56,54,95,54,52,44,32
DB 67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97
DB 112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
@@ -762,9 +568,9 @@ ALIGN 4
DD imagerel $L$SEH_end_RC4
DD imagerel $L$SEH_info_RC4
- DD imagerel $L$SEH_begin_private_RC4_set_key
- DD imagerel $L$SEH_end_private_RC4_set_key
- DD imagerel $L$SEH_info_private_RC4_set_key
+ DD imagerel $L$SEH_begin_RC4_set_key
+ DD imagerel $L$SEH_end_RC4_set_key
+ DD imagerel $L$SEH_info_RC4_set_key
.pdata ENDS
.xdata SEGMENT READONLY ALIGN(8)
@@ -772,7 +578,7 @@ ALIGN 8
$L$SEH_info_RC4::
DB 9,0,0,0
DD imagerel stream_se_handler
-$L$SEH_info_private_RC4_set_key::
+$L$SEH_info_RC4_set_key::
DB 9,0,0,0
DD imagerel key_se_handler
diff --git a/deps/openssl/asm/x64-win32-masm/sha/sha1-x86_64.asm b/deps/openssl/asm/x64-win32-masm/sha/sha1-x86_64.asm
index 9589f7fa08..9323f2b26a 100644
--- a/deps/openssl/asm/x64-win32-masm/sha/sha1-x86_64.asm
+++ b/deps/openssl/asm/x64-win32-masm/sha/sha1-x86_64.asm
@@ -1,7 +1,5 @@
OPTION DOTNAME
.text$ SEGMENT ALIGN(64) 'CODE'
-EXTERN OPENSSL_ia32cap_P:NEAR
-
PUBLIC sha1_block_data_order
ALIGN 16
@@ -15,18 +13,9 @@ $L$SEH_begin_sha1_block_data_order::
mov rdx,r8
- mov r9d,DWORD PTR[((OPENSSL_ia32cap_P+0))]
- mov r8d,DWORD PTR[((OPENSSL_ia32cap_P+4))]
- test r8d,512
- jz $L$ialu
- jmp _ssse3_shortcut
-
-ALIGN 16
-$L$ialu::
push rbx
push rbp
push r12
- push r13
mov r11,rsp
mov r8,rdi
sub rsp,72
@@ -36,2499 +25,1278 @@ $L$ialu::
mov QWORD PTR[64+rsp],r11
$L$prologue::
- mov esi,DWORD PTR[r8]
- mov edi,DWORD PTR[4+r8]
- mov r11d,DWORD PTR[8+r8]
- mov r12d,DWORD PTR[12+r8]
- mov r13d,DWORD PTR[16+r8]
- jmp $L$loop
-
-ALIGN 16
+ mov edx,DWORD PTR[r8]
+ mov esi,DWORD PTR[4+r8]
+ mov edi,DWORD PTR[8+r8]
+ mov ebp,DWORD PTR[12+r8]
+ mov r11d,DWORD PTR[16+r8]
+ALIGN 4
$L$loop::
- mov edx,DWORD PTR[r9]
- bswap edx
- mov DWORD PTR[rsp],edx
- mov eax,r11d
- mov ebp,DWORD PTR[4+r9]
- mov ecx,esi
- xor eax,r12d
- bswap ebp
- rol ecx,5
- lea r13d,DWORD PTR[1518500249+r13*1+rdx]
- and eax,edi
- mov DWORD PTR[4+rsp],ebp
- add r13d,ecx
- xor eax,r12d
- rol edi,30
- add r13d,eax
- mov eax,edi
- mov edx,DWORD PTR[8+r9]
- mov ecx,r13d
- xor eax,r11d
- bswap edx
- rol ecx,5
- lea r12d,DWORD PTR[1518500249+r12*1+rbp]
- and eax,esi
- mov DWORD PTR[8+rsp],edx
- add r12d,ecx
- xor eax,r11d
- rol esi,30
- add r12d,eax
- mov eax,esi
- mov ebp,DWORD PTR[12+r9]
- mov ecx,r12d
- xor eax,edi
- bswap ebp
- rol ecx,5
- lea r11d,DWORD PTR[1518500249+r11*1+rdx]
- and eax,r13d
- mov DWORD PTR[12+rsp],ebp
- add r11d,ecx
- xor eax,edi
- rol r13d,30
- add r11d,eax
- mov eax,r13d
- mov edx,DWORD PTR[16+r9]
- mov ecx,r11d
- xor eax,esi
- bswap edx
- rol ecx,5
- lea edi,DWORD PTR[1518500249+rdi*1+rbp]
- and eax,r12d
- mov DWORD PTR[16+rsp],edx
- add edi,ecx
- xor eax,esi
- rol r12d,30
- add edi,eax
- mov eax,r12d
- mov ebp,DWORD PTR[20+r9]
- mov ecx,edi
- xor eax,r13d
- bswap ebp
- rol ecx,5
- lea esi,DWORD PTR[1518500249+rsi*1+rdx]
- and eax,r11d
- mov DWORD PTR[20+rsp],ebp
- add esi,ecx
- xor eax,r13d
- rol r11d,30
- add esi,eax
- mov eax,r11d
- mov edx,DWORD PTR[24+r9]
- mov ecx,esi
- xor eax,r12d
- bswap edx
- rol ecx,5
- lea r13d,DWORD PTR[1518500249+r13*1+rbp]
- and eax,edi
- mov DWORD PTR[24+rsp],edx
- add r13d,ecx
- xor eax,r12d
- rol edi,30
- add r13d,eax
- mov eax,edi
- mov ebp,DWORD PTR[28+r9]
- mov ecx,r13d
- xor eax,r11d
- bswap ebp
- rol ecx,5
- lea r12d,DWORD PTR[1518500249+r12*1+rdx]
- and eax,esi
- mov DWORD PTR[28+rsp],ebp
- add r12d,ecx
- xor eax,r11d
- rol esi,30
- add r12d,eax
- mov eax,esi
- mov edx,DWORD PTR[32+r9]
- mov ecx,r12d
- xor eax,edi
- bswap edx
- rol ecx,5
- lea r11d,DWORD PTR[1518500249+r11*1+rbp]
- and eax,r13d
- mov DWORD PTR[32+rsp],edx
- add r11d,ecx
- xor eax,edi
- rol r13d,30
- add r11d,eax
- mov eax,r13d
- mov ebp,DWORD PTR[36+r9]
- mov ecx,r11d
- xor eax,esi
- bswap ebp
- rol ecx,5
- lea edi,DWORD PTR[1518500249+rdi*1+rdx]
- and eax,r12d
- mov DWORD PTR[36+rsp],ebp
- add edi,ecx
- xor eax,esi
- rol r12d,30
- add edi,eax
- mov eax,r12d
- mov edx,DWORD PTR[40+r9]
- mov ecx,edi
- xor eax,r13d
- bswap edx
- rol ecx,5
- lea esi,DWORD PTR[1518500249+rsi*1+rbp]
- and eax,r11d
- mov DWORD PTR[40+rsp],edx
- add esi,ecx
- xor eax,r13d
- rol r11d,30
- add esi,eax
- mov eax,r11d
- mov ebp,DWORD PTR[44+r9]
- mov ecx,esi
- xor eax,r12d
- bswap ebp
- rol ecx,5
- lea r13d,DWORD PTR[1518500249+r13*1+rdx]
- and eax,edi
- mov DWORD PTR[44+rsp],ebp
- add r13d,ecx
- xor eax,r12d
- rol edi,30
- add r13d,eax
- mov eax,edi
- mov edx,DWORD PTR[48+r9]
- mov ecx,r13d
- xor eax,r11d
- bswap edx
- rol ecx,5
- lea r12d,DWORD PTR[1518500249+r12*1+rbp]
- and eax,esi
- mov DWORD PTR[48+rsp],edx
- add r12d,ecx
- xor eax,r11d
- rol esi,30
- add r12d,eax
- mov eax,esi
- mov ebp,DWORD PTR[52+r9]
- mov ecx,r12d
- xor eax,edi
- bswap ebp
- rol ecx,5
- lea r11d,DWORD PTR[1518500249+r11*1+rdx]
- and eax,r13d
- mov DWORD PTR[52+rsp],ebp
- add r11d,ecx
- xor eax,edi
- rol r13d,30
- add r11d,eax
- mov eax,r13d
- mov edx,DWORD PTR[56+r9]
- mov ecx,r11d
- xor eax,esi
- bswap edx
- rol ecx,5
- lea edi,DWORD PTR[1518500249+rdi*1+rbp]
- and eax,r12d
- mov DWORD PTR[56+rsp],edx
- add edi,ecx
- xor eax,esi
- rol r12d,30
- add edi,eax
- mov eax,r12d
- mov ebp,DWORD PTR[60+r9]
- mov ecx,edi
- xor eax,r13d
- bswap ebp
- rol ecx,5
- lea esi,DWORD PTR[1518500249+rsi*1+rdx]
- and eax,r11d
- mov DWORD PTR[60+rsp],ebp
- add esi,ecx
- xor eax,r13d
- rol r11d,30
- add esi,eax
- mov edx,DWORD PTR[rsp]
- mov eax,r11d
- mov ecx,esi
- xor edx,DWORD PTR[8+rsp]
- xor eax,r12d
- rol ecx,5
- xor edx,DWORD PTR[32+rsp]
- and eax,edi
- lea r13d,DWORD PTR[1518500249+r13*1+rbp]
- xor edx,DWORD PTR[52+rsp]
- xor eax,r12d
- rol edx,1
- add r13d,ecx
- rol edi,30
- mov DWORD PTR[rsp],edx
- add r13d,eax
- mov ebp,DWORD PTR[4+rsp]
- mov eax,edi
- mov ecx,r13d
- xor ebp,DWORD PTR[12+rsp]
- xor eax,r11d
- rol ecx,5
- xor ebp,DWORD PTR[36+rsp]
- and eax,esi
- lea r12d,DWORD PTR[1518500249+r12*1+rdx]
- xor ebp,DWORD PTR[56+rsp]
- xor eax,r11d
- rol ebp,1
- add r12d,ecx
- rol esi,30
- mov DWORD PTR[4+rsp],ebp
- add r12d,eax
- mov edx,DWORD PTR[8+rsp]
- mov eax,esi
- mov ecx,r12d
- xor edx,DWORD PTR[16+rsp]
- xor eax,edi
- rol ecx,5
- xor edx,DWORD PTR[40+rsp]
- and eax,r13d
- lea r11d,DWORD PTR[1518500249+r11*1+rbp]
- xor edx,DWORD PTR[60+rsp]
- xor eax,edi
- rol edx,1
- add r11d,ecx
- rol r13d,30
- mov DWORD PTR[8+rsp],edx
- add r11d,eax
- mov ebp,DWORD PTR[12+rsp]
- mov eax,r13d
- mov ecx,r11d
- xor ebp,DWORD PTR[20+rsp]
- xor eax,esi
- rol ecx,5
- xor ebp,DWORD PTR[44+rsp]
- and eax,r12d
- lea edi,DWORD PTR[1518500249+rdi*1+rdx]
- xor ebp,DWORD PTR[rsp]
- xor eax,esi
- rol ebp,1
- add edi,ecx
- rol r12d,30
- mov DWORD PTR[12+rsp],ebp
- add edi,eax
- mov edx,DWORD PTR[16+rsp]
- mov eax,r12d
- mov ecx,edi
- xor edx,DWORD PTR[24+rsp]
- xor eax,r13d
- rol ecx,5
- xor edx,DWORD PTR[48+rsp]
- and eax,r11d
- lea esi,DWORD PTR[1518500249+rsi*1+rbp]
- xor edx,DWORD PTR[4+rsp]
- xor eax,r13d
- rol edx,1
- add esi,ecx
- rol r11d,30
- mov DWORD PTR[16+rsp],edx
- add esi,eax
- mov ebp,DWORD PTR[20+rsp]
- mov eax,r11d
- mov ecx,esi
- xor ebp,DWORD PTR[28+rsp]
- xor eax,edi
- rol ecx,5
- lea r13d,DWORD PTR[1859775393+r13*1+rdx]
- xor ebp,DWORD PTR[52+rsp]
- xor eax,r12d
- add r13d,ecx
- xor ebp,DWORD PTR[8+rsp]
- rol edi,30
- add r13d,eax
- rol ebp,1
- mov DWORD PTR[20+rsp],ebp
- mov edx,DWORD PTR[24+rsp]
- mov eax,edi
- mov ecx,r13d
- xor edx,DWORD PTR[32+rsp]
- xor eax,esi
- rol ecx,5
- lea r12d,DWORD PTR[1859775393+r12*1+rbp]
- xor edx,DWORD PTR[56+rsp]
- xor eax,r11d
- add r12d,ecx
- xor edx,DWORD PTR[12+rsp]
- rol esi,30
- add r12d,eax
- rol edx,1
- mov DWORD PTR[24+rsp],edx
- mov ebp,DWORD PTR[28+rsp]
- mov eax,esi
- mov ecx,r12d
- xor ebp,DWORD PTR[36+rsp]
- xor eax,r13d
- rol ecx,5
- lea r11d,DWORD PTR[1859775393+r11*1+rdx]
- xor ebp,DWORD PTR[60+rsp]
- xor eax,edi
- add r11d,ecx
- xor ebp,DWORD PTR[16+rsp]
- rol r13d,30
- add r11d,eax
- rol ebp,1
- mov DWORD PTR[28+rsp],ebp
- mov edx,DWORD PTR[32+rsp]
- mov eax,r13d
- mov ecx,r11d
- xor edx,DWORD PTR[40+rsp]
- xor eax,r12d
- rol ecx,5
- lea edi,DWORD PTR[1859775393+rdi*1+rbp]
- xor edx,DWORD PTR[rsp]
- xor eax,esi
- add edi,ecx
- xor edx,DWORD PTR[20+rsp]
- rol r12d,30
- add edi,eax
- rol edx,1
- mov DWORD PTR[32+rsp],edx
- mov ebp,DWORD PTR[36+rsp]
- mov eax,r12d
- mov ecx,edi
- xor ebp,DWORD PTR[44+rsp]
- xor eax,r11d
- rol ecx,5
- lea esi,DWORD PTR[1859775393+rsi*1+rdx]
- xor ebp,DWORD PTR[4+rsp]
- xor eax,r13d
- add esi,ecx
- xor ebp,DWORD PTR[24+rsp]
- rol r11d,30
- add esi,eax
- rol ebp,1
- mov DWORD PTR[36+rsp],ebp
- mov edx,DWORD PTR[40+rsp]
- mov eax,r11d
- mov ecx,esi
- xor edx,DWORD PTR[48+rsp]
- xor eax,edi
- rol ecx,5
- lea r13d,DWORD PTR[1859775393+r13*1+rbp]
- xor edx,DWORD PTR[8+rsp]
- xor eax,r12d
- add r13d,ecx
- xor edx,DWORD PTR[28+rsp]
- rol edi,30
- add r13d,eax
- rol edx,1
- mov DWORD PTR[40+rsp],edx
- mov ebp,DWORD PTR[44+rsp]
- mov eax,edi
- mov ecx,r13d
- xor ebp,DWORD PTR[52+rsp]
- xor eax,esi
- rol ecx,5
- lea r12d,DWORD PTR[1859775393+r12*1+rdx]
- xor ebp,DWORD PTR[12+rsp]
- xor eax,r11d
- add r12d,ecx
- xor ebp,DWORD PTR[32+rsp]
- rol esi,30
- add r12d,eax
- rol ebp,1
- mov DWORD PTR[44+rsp],ebp
- mov edx,DWORD PTR[48+rsp]
- mov eax,esi
- mov ecx,r12d
- xor edx,DWORD PTR[56+rsp]
- xor eax,r13d
- rol ecx,5
- lea r11d,DWORD PTR[1859775393+r11*1+rbp]
- xor edx,DWORD PTR[16+rsp]
- xor eax,edi
- add r11d,ecx
- xor edx,DWORD PTR[36+rsp]
- rol r13d,30
- add r11d,eax
- rol edx,1
- mov DWORD PTR[48+rsp],edx
- mov ebp,DWORD PTR[52+rsp]
- mov eax,r13d
- mov ecx,r11d
- xor ebp,DWORD PTR[60+rsp]
- xor eax,r12d
- rol ecx,5
- lea edi,DWORD PTR[1859775393+rdi*1+rdx]
- xor ebp,DWORD PTR[20+rsp]
- xor eax,esi
- add edi,ecx
- xor ebp,DWORD PTR[40+rsp]
- rol r12d,30
- add edi,eax
- rol ebp,1
- mov DWORD PTR[52+rsp],ebp
- mov edx,DWORD PTR[56+rsp]
- mov eax,r12d
- mov ecx,edi
- xor edx,DWORD PTR[rsp]
- xor eax,r11d
- rol ecx,5
- lea esi,DWORD PTR[1859775393+rsi*1+rbp]
- xor edx,DWORD PTR[24+rsp]
- xor eax,r13d
- add esi,ecx
- xor edx,DWORD PTR[44+rsp]
- rol r11d,30
- add esi,eax
- rol edx,1
- mov DWORD PTR[56+rsp],edx
- mov ebp,DWORD PTR[60+rsp]
- mov eax,r11d
- mov ecx,esi
- xor ebp,DWORD PTR[4+rsp]
- xor eax,edi
- rol ecx,5
- lea r13d,DWORD PTR[1859775393+r13*1+rdx]
- xor ebp,DWORD PTR[28+rsp]
- xor eax,r12d
- add r13d,ecx
- xor ebp,DWORD PTR[48+rsp]
- rol edi,30
- add r13d,eax
- rol ebp,1
- mov DWORD PTR[60+rsp],ebp
- mov edx,DWORD PTR[rsp]
- mov eax,edi
- mov ecx,r13d
- xor edx,DWORD PTR[8+rsp]
- xor eax,esi
- rol ecx,5
- lea r12d,DWORD PTR[1859775393+r12*1+rbp]
- xor edx,DWORD PTR[32+rsp]
- xor eax,r11d
- add r12d,ecx
- xor edx,DWORD PTR[52+rsp]
- rol esi,30
- add r12d,eax
- rol edx,1
- mov DWORD PTR[rsp],edx
- mov ebp,DWORD PTR[4+rsp]
- mov eax,esi
- mov ecx,r12d
- xor ebp,DWORD PTR[12+rsp]
- xor eax,r13d
- rol ecx,5
- lea r11d,DWORD PTR[1859775393+r11*1+rdx]
- xor ebp,DWORD PTR[36+rsp]
- xor eax,edi
- add r11d,ecx
- xor ebp,DWORD PTR[56+rsp]
- rol r13d,30
- add r11d,eax
- rol ebp,1
- mov DWORD PTR[4+rsp],ebp
- mov edx,DWORD PTR[8+rsp]
- mov eax,r13d
- mov ecx,r11d
- xor edx,DWORD PTR[16+rsp]
- xor eax,r12d
- rol ecx,5
- lea edi,DWORD PTR[1859775393+rdi*1+rbp]
- xor edx,DWORD PTR[40+rsp]
- xor eax,esi
- add edi,ecx
- xor edx,DWORD PTR[60+rsp]
- rol r12d,30
- add edi,eax
- rol edx,1
- mov DWORD PTR[8+rsp],edx
- mov ebp,DWORD PTR[12+rsp]
- mov eax,r12d
- mov ecx,edi
- xor ebp,DWORD PTR[20+rsp]
- xor eax,r11d
- rol ecx,5
- lea esi,DWORD PTR[1859775393+rsi*1+rdx]
- xor ebp,DWORD PTR[44+rsp]
- xor eax,r13d
- add esi,ecx
- xor ebp,DWORD PTR[rsp]
- rol r11d,30
- add esi,eax
- rol ebp,1
- mov DWORD PTR[12+rsp],ebp
- mov edx,DWORD PTR[16+rsp]
- mov eax,r11d
- mov ecx,esi
- xor edx,DWORD PTR[24+rsp]
- xor eax,edi
- rol ecx,5
- lea r13d,DWORD PTR[1859775393+r13*1+rbp]
- xor edx,DWORD PTR[48+rsp]
- xor eax,r12d
- add r13d,ecx
- xor edx,DWORD PTR[4+rsp]
- rol edi,30
- add r13d,eax
- rol edx,1
- mov DWORD PTR[16+rsp],edx
- mov ebp,DWORD PTR[20+rsp]
- mov eax,edi
- mov ecx,r13d
- xor ebp,DWORD PTR[28+rsp]
- xor eax,esi
- rol ecx,5
- lea r12d,DWORD PTR[1859775393+r12*1+rdx]
- xor ebp,DWORD PTR[52+rsp]
- xor eax,r11d
- add r12d,ecx
- xor ebp,DWORD PTR[8+rsp]
+ mov eax,DWORD PTR[r9]
+ bswap eax
+ mov DWORD PTR[rsp],eax
+ lea r12d,DWORD PTR[05a827999h+r11*1+rax]
+ mov ebx,edi
+ mov eax,DWORD PTR[4+r9]
+ mov r11d,edx
+ xor ebx,ebp
+ bswap eax
+ rol r11d,5
+ and ebx,esi
+ mov DWORD PTR[4+rsp],eax
+ add r12d,r11d
+ xor ebx,ebp
rol esi,30
- add r12d,eax
- rol ebp,1
- mov DWORD PTR[20+rsp],ebp
- mov edx,DWORD PTR[24+rsp]
- mov eax,esi
- mov ecx,r12d
- xor edx,DWORD PTR[32+rsp]
- xor eax,r13d
- rol ecx,5
- lea r11d,DWORD PTR[1859775393+r11*1+rbp]
- xor edx,DWORD PTR[56+rsp]
- xor eax,edi
- add r11d,ecx
- xor edx,DWORD PTR[12+rsp]
- rol r13d,30
- add r11d,eax
- rol edx,1
- mov DWORD PTR[24+rsp],edx
- mov ebp,DWORD PTR[28+rsp]
- mov eax,r13d
- mov ecx,r11d
- xor ebp,DWORD PTR[36+rsp]
- xor eax,r12d
- rol ecx,5
- lea edi,DWORD PTR[1859775393+rdi*1+rdx]
- xor ebp,DWORD PTR[60+rsp]
- xor eax,esi
- add edi,ecx
- xor ebp,DWORD PTR[16+rsp]
+ add r12d,ebx
+ lea r11d,DWORD PTR[05a827999h+rbp*1+rax]
+ mov ebx,esi
+ mov eax,DWORD PTR[8+r9]
+ mov ebp,r12d
+ xor ebx,edi
+ bswap eax
+ rol ebp,5
+ and ebx,edx
+ mov DWORD PTR[8+rsp],eax
+ add r11d,ebp
+ xor ebx,edi
+ rol edx,30
+ add r11d,ebx
+ lea ebp,DWORD PTR[05a827999h+rdi*1+rax]
+ mov ebx,edx
+ mov eax,DWORD PTR[12+r9]
+ mov edi,r11d
+ xor ebx,esi
+ bswap eax
+ rol edi,5
+ and ebx,r12d
+ mov DWORD PTR[12+rsp],eax
+ add ebp,edi
+ xor ebx,esi
rol r12d,30
- add edi,eax
- rol ebp,1
- mov DWORD PTR[28+rsp],ebp
- mov edx,DWORD PTR[32+rsp]
- mov eax,r12d
- mov ecx,edi
- xor edx,DWORD PTR[40+rsp]
- xor eax,r11d
- rol ecx,5
- lea esi,DWORD PTR[1859775393+rsi*1+rbp]
- xor edx,DWORD PTR[rsp]
- xor eax,r13d
- add esi,ecx
- xor edx,DWORD PTR[20+rsp]
+ add ebp,ebx
+ lea edi,DWORD PTR[05a827999h+rsi*1+rax]
+ mov ebx,r12d
+ mov eax,DWORD PTR[16+r9]
+ mov esi,ebp
+ xor ebx,edx
+ bswap eax
+ rol esi,5
+ and ebx,r11d
+ mov DWORD PTR[16+rsp],eax
+ add edi,esi
+ xor ebx,edx
rol r11d,30
- add esi,eax
- rol edx,1
- mov DWORD PTR[32+rsp],edx
- mov ebp,DWORD PTR[36+rsp]
- mov eax,r11d
+ add edi,ebx
+ lea esi,DWORD PTR[05a827999h+rdx*1+rax]
mov ebx,r11d
- xor ebp,DWORD PTR[44+rsp]
- and eax,r12d
- mov ecx,esi
- xor ebp,DWORD PTR[4+rsp]
+ mov eax,DWORD PTR[20+r9]
+ mov edx,edi
+ xor ebx,r12d
+ bswap eax
+ rol edx,5
+ and ebx,ebp
+ mov DWORD PTR[20+rsp],eax
+ add esi,edx
xor ebx,r12d
- lea r13d,DWORD PTR[((-1894007588))+r13*1+rdx]
- rol ecx,5
- xor ebp,DWORD PTR[24+rsp]
- add r13d,eax
+ rol ebp,30
+ add esi,ebx
+ lea edx,DWORD PTR[05a827999h+r12*1+rax]
+ mov ebx,ebp
+ mov eax,DWORD PTR[24+r9]
+ mov r12d,esi
+ xor ebx,r11d
+ bswap eax
+ rol r12d,5
and ebx,edi
- rol ebp,1
- add r13d,ebx
+ mov DWORD PTR[24+rsp],eax
+ add edx,r12d
+ xor ebx,r11d
rol edi,30
- mov DWORD PTR[36+rsp],ebp
- add r13d,ecx
- mov edx,DWORD PTR[40+rsp]
- mov eax,edi
+ add edx,ebx
+ lea r12d,DWORD PTR[05a827999h+r11*1+rax]
mov ebx,edi
- xor edx,DWORD PTR[48+rsp]
- and eax,r11d
- mov ecx,r13d
- xor edx,DWORD PTR[8+rsp]
- xor ebx,r11d
- lea r12d,DWORD PTR[((-1894007588))+r12*1+rbp]
- rol ecx,5
- xor edx,DWORD PTR[28+rsp]
- add r12d,eax
+ mov eax,DWORD PTR[28+r9]
+ mov r11d,edx
+ xor ebx,ebp
+ bswap eax
+ rol r11d,5
and ebx,esi
- rol edx,1
- add r12d,ebx
+ mov DWORD PTR[28+rsp],eax
+ add r12d,r11d
+ xor ebx,ebp
rol esi,30
- mov DWORD PTR[40+rsp],edx
- add r12d,ecx
- mov ebp,DWORD PTR[44+rsp]
- mov eax,esi
+ add r12d,ebx
+ lea r11d,DWORD PTR[05a827999h+rbp*1+rax]
mov ebx,esi
- xor ebp,DWORD PTR[52+rsp]
- and eax,edi
- mov ecx,r12d
- xor ebp,DWORD PTR[12+rsp]
+ mov eax,DWORD PTR[32+r9]
+ mov ebp,r12d
+ xor ebx,edi
+ bswap eax
+ rol ebp,5
+ and ebx,edx
+ mov DWORD PTR[32+rsp],eax
+ add r11d,ebp
xor ebx,edi
- lea r11d,DWORD PTR[((-1894007588))+r11*1+rdx]
- rol ecx,5
- xor ebp,DWORD PTR[32+rsp]
- add r11d,eax
- and ebx,r13d
- rol ebp,1
+ rol edx,30
add r11d,ebx
- rol r13d,30
- mov DWORD PTR[44+rsp],ebp
- add r11d,ecx
- mov edx,DWORD PTR[48+rsp]
- mov eax,r13d
- mov ebx,r13d
- xor edx,DWORD PTR[56+rsp]
- and eax,esi
- mov ecx,r11d
- xor edx,DWORD PTR[16+rsp]
+ lea ebp,DWORD PTR[05a827999h+rdi*1+rax]
+ mov ebx,edx
+ mov eax,DWORD PTR[36+r9]
+ mov edi,r11d
xor ebx,esi
- lea edi,DWORD PTR[((-1894007588))+rdi*1+rbp]
- rol ecx,5
- xor edx,DWORD PTR[36+rsp]
- add edi,eax
+ bswap eax
+ rol edi,5
and ebx,r12d
- rol edx,1
- add edi,ebx
+ mov DWORD PTR[36+rsp],eax
+ add ebp,edi
+ xor ebx,esi
rol r12d,30
- mov DWORD PTR[48+rsp],edx
- add edi,ecx
- mov ebp,DWORD PTR[52+rsp]
- mov eax,r12d
+ add ebp,ebx
+ lea edi,DWORD PTR[05a827999h+rsi*1+rax]
mov ebx,r12d
- xor ebp,DWORD PTR[60+rsp]
- and eax,r13d
- mov ecx,edi
- xor ebp,DWORD PTR[20+rsp]
- xor ebx,r13d
- lea esi,DWORD PTR[((-1894007588))+rsi*1+rdx]
- rol ecx,5
- xor ebp,DWORD PTR[40+rsp]
- add esi,eax
+ mov eax,DWORD PTR[40+r9]
+ mov esi,ebp
+ xor ebx,edx
+ bswap eax
+ rol esi,5
and ebx,r11d
- rol ebp,1
- add esi,ebx
+ mov DWORD PTR[40+rsp],eax
+ add edi,esi
+ xor ebx,edx
rol r11d,30
- mov DWORD PTR[52+rsp],ebp
- add esi,ecx
- mov edx,DWORD PTR[56+rsp]
- mov eax,r11d
+ add edi,ebx
+ lea esi,DWORD PTR[05a827999h+rdx*1+rax]
mov ebx,r11d
- xor edx,DWORD PTR[rsp]
- and eax,r12d
- mov ecx,esi
- xor edx,DWORD PTR[24+rsp]
+ mov eax,DWORD PTR[44+r9]
+ mov edx,edi
+ xor ebx,r12d
+ bswap eax
+ rol edx,5
+ and ebx,ebp
+ mov DWORD PTR[44+rsp],eax
+ add esi,edx
xor ebx,r12d
- lea r13d,DWORD PTR[((-1894007588))+r13*1+rbp]
- rol ecx,5
- xor edx,DWORD PTR[44+rsp]
- add r13d,eax
+ rol ebp,30
+ add esi,ebx
+ lea edx,DWORD PTR[05a827999h+r12*1+rax]
+ mov ebx,ebp
+ mov eax,DWORD PTR[48+r9]
+ mov r12d,esi
+ xor ebx,r11d
+ bswap eax
+ rol r12d,5
and ebx,edi
- rol edx,1
- add r13d,ebx
+ mov DWORD PTR[48+rsp],eax
+ add edx,r12d
+ xor ebx,r11d
rol edi,30
- mov DWORD PTR[56+rsp],edx
- add r13d,ecx
- mov ebp,DWORD PTR[60+rsp]
- mov eax,edi
+ add edx,ebx
+ lea r12d,DWORD PTR[05a827999h+r11*1+rax]
mov ebx,edi
- xor ebp,DWORD PTR[4+rsp]
- and eax,r11d
- mov ecx,r13d
- xor ebp,DWORD PTR[28+rsp]
- xor ebx,r11d
- lea r12d,DWORD PTR[((-1894007588))+r12*1+rdx]
- rol ecx,5
- xor ebp,DWORD PTR[48+rsp]
- add r12d,eax
+ mov eax,DWORD PTR[52+r9]
+ mov r11d,edx
+ xor ebx,ebp
+ bswap eax
+ rol r11d,5
and ebx,esi
- rol ebp,1
- add r12d,ebx
+ mov DWORD PTR[52+rsp],eax
+ add r12d,r11d
+ xor ebx,ebp
rol esi,30
- mov DWORD PTR[60+rsp],ebp
- add r12d,ecx
- mov edx,DWORD PTR[rsp]
- mov eax,esi
+ add r12d,ebx
+ lea r11d,DWORD PTR[05a827999h+rbp*1+rax]
mov ebx,esi
- xor edx,DWORD PTR[8+rsp]
- and eax,edi
- mov ecx,r12d
- xor edx,DWORD PTR[32+rsp]
+ mov eax,DWORD PTR[56+r9]
+ mov ebp,r12d
xor ebx,edi
- lea r11d,DWORD PTR[((-1894007588))+r11*1+rbp]
- rol ecx,5
- xor edx,DWORD PTR[52+rsp]
- add r11d,eax
- and ebx,r13d
- rol edx,1
+ bswap eax
+ rol ebp,5
+ and ebx,edx
+ mov DWORD PTR[56+rsp],eax
+ add r11d,ebp
+ xor ebx,edi
+ rol edx,30
add r11d,ebx
- rol r13d,30
- mov DWORD PTR[rsp],edx
- add r11d,ecx
- mov ebp,DWORD PTR[4+rsp]
- mov eax,r13d
- mov ebx,r13d
- xor ebp,DWORD PTR[12+rsp]
- and eax,esi
- mov ecx,r11d
- xor ebp,DWORD PTR[36+rsp]
+ lea ebp,DWORD PTR[05a827999h+rdi*1+rax]
+ mov ebx,edx
+ mov eax,DWORD PTR[60+r9]
+ mov edi,r11d
xor ebx,esi
- lea edi,DWORD PTR[((-1894007588))+rdi*1+rdx]
- rol ecx,5
- xor ebp,DWORD PTR[56+rsp]
- add edi,eax
+ bswap eax
+ rol edi,5
and ebx,r12d
- rol ebp,1
- add edi,ebx
+ mov DWORD PTR[60+rsp],eax
+ add ebp,edi
+ xor ebx,esi
rol r12d,30
- mov DWORD PTR[4+rsp],ebp
- add edi,ecx
- mov edx,DWORD PTR[8+rsp]
- mov eax,r12d
+ add ebp,ebx
+ lea edi,DWORD PTR[05a827999h+rsi*1+rax]
+ mov eax,DWORD PTR[rsp]
mov ebx,r12d
- xor edx,DWORD PTR[16+rsp]
- and eax,r13d
- mov ecx,edi
- xor edx,DWORD PTR[40+rsp]
- xor ebx,r13d
- lea esi,DWORD PTR[((-1894007588))+rsi*1+rbp]
- rol ecx,5
- xor edx,DWORD PTR[60+rsp]
- add esi,eax
+ mov esi,ebp
+ xor eax,DWORD PTR[8+rsp]
+ xor ebx,edx
+ rol esi,5
+ xor eax,DWORD PTR[32+rsp]
and ebx,r11d
- rol edx,1
- add esi,ebx
+ add edi,esi
+ xor eax,DWORD PTR[52+rsp]
+ xor ebx,edx
rol r11d,30
- mov DWORD PTR[8+rsp],edx
- add esi,ecx
- mov ebp,DWORD PTR[12+rsp]
- mov eax,r11d
+ add edi,ebx
+ rol eax,1
+ mov DWORD PTR[rsp],eax
+ lea esi,DWORD PTR[05a827999h+rdx*1+rax]
+ mov eax,DWORD PTR[4+rsp]
mov ebx,r11d
- xor ebp,DWORD PTR[20+rsp]
- and eax,r12d
- mov ecx,esi
- xor ebp,DWORD PTR[44+rsp]
+ mov edx,edi
+ xor eax,DWORD PTR[12+rsp]
xor ebx,r12d
- lea r13d,DWORD PTR[((-1894007588))+r13*1+rdx]
- rol ecx,5
- xor ebp,DWORD PTR[rsp]
- add r13d,eax
+ rol edx,5
+ xor eax,DWORD PTR[36+rsp]
+ and ebx,ebp
+ add esi,edx
+ xor eax,DWORD PTR[56+rsp]
+ xor ebx,r12d
+ rol ebp,30
+ add esi,ebx
+ rol eax,1
+ mov DWORD PTR[4+rsp],eax
+ lea edx,DWORD PTR[05a827999h+r12*1+rax]
+ mov eax,DWORD PTR[8+rsp]
+ mov ebx,ebp
+ mov r12d,esi
+ xor eax,DWORD PTR[16+rsp]
+ xor ebx,r11d
+ rol r12d,5
+ xor eax,DWORD PTR[40+rsp]
and ebx,edi
- rol ebp,1
- add r13d,ebx
+ add edx,r12d
+ xor eax,DWORD PTR[60+rsp]
+ xor ebx,r11d
rol edi,30
- mov DWORD PTR[12+rsp],ebp
- add r13d,ecx
- mov edx,DWORD PTR[16+rsp]
- mov eax,edi
+ add edx,ebx
+ rol eax,1
+ mov DWORD PTR[8+rsp],eax
+ lea r12d,DWORD PTR[05a827999h+r11*1+rax]
+ mov eax,DWORD PTR[12+rsp]
mov ebx,edi
- xor edx,DWORD PTR[24+rsp]
- and eax,r11d
- mov ecx,r13d
- xor edx,DWORD PTR[48+rsp]
- xor ebx,r11d
- lea r12d,DWORD PTR[((-1894007588))+r12*1+rbp]
- rol ecx,5
- xor edx,DWORD PTR[4+rsp]
- add r12d,eax
+ mov r11d,edx
+ xor eax,DWORD PTR[20+rsp]
+ xor ebx,ebp
+ rol r11d,5
+ xor eax,DWORD PTR[44+rsp]
and ebx,esi
- rol edx,1
- add r12d,ebx
+ add r12d,r11d
+ xor eax,DWORD PTR[rsp]
+ xor ebx,ebp
rol esi,30
- mov DWORD PTR[16+rsp],edx
- add r12d,ecx
- mov ebp,DWORD PTR[20+rsp]
- mov eax,esi
+ add r12d,ebx
+ rol eax,1
+ mov DWORD PTR[12+rsp],eax
+ lea r11d,DWORD PTR[05a827999h+rbp*1+rax]
+ mov eax,DWORD PTR[16+rsp]
mov ebx,esi
- xor ebp,DWORD PTR[28+rsp]
- and eax,edi
- mov ecx,r12d
- xor ebp,DWORD PTR[52+rsp]
+ mov ebp,r12d
+ xor eax,DWORD PTR[24+rsp]
+ xor ebx,edi
+ rol ebp,5
+ xor eax,DWORD PTR[48+rsp]
+ and ebx,edx
+ add r11d,ebp
+ xor eax,DWORD PTR[4+rsp]
xor ebx,edi
- lea r11d,DWORD PTR[((-1894007588))+r11*1+rdx]
- rol ecx,5
- xor ebp,DWORD PTR[8+rsp]
- add r11d,eax
- and ebx,r13d
- rol ebp,1
+ rol edx,30
add r11d,ebx
- rol r13d,30
- mov DWORD PTR[20+rsp],ebp
- add r11d,ecx
- mov edx,DWORD PTR[24+rsp]
- mov eax,r13d
- mov ebx,r13d
- xor edx,DWORD PTR[32+rsp]
- and eax,esi
- mov ecx,r11d
- xor edx,DWORD PTR[56+rsp]
+ rol eax,1
+ mov DWORD PTR[16+rsp],eax
+ lea ebp,DWORD PTR[1859775393+rdi*1+rax]
+ mov eax,DWORD PTR[20+rsp]
+ mov ebx,edx
+ mov edi,r11d
+ xor eax,DWORD PTR[28+rsp]
+ xor ebx,r12d
+ rol edi,5
+ xor eax,DWORD PTR[52+rsp]
xor ebx,esi
- lea edi,DWORD PTR[((-1894007588))+rdi*1+rbp]
- rol ecx,5
- xor edx,DWORD PTR[12+rsp]
- add edi,eax
- and ebx,r12d
- rol edx,1
- add edi,ebx
+ add ebp,edi
+ xor eax,DWORD PTR[8+rsp]
rol r12d,30
- mov DWORD PTR[24+rsp],edx
- add edi,ecx
- mov ebp,DWORD PTR[28+rsp]
- mov eax,r12d
+ add ebp,ebx
+ rol eax,1
+ mov DWORD PTR[20+rsp],eax
+ lea edi,DWORD PTR[1859775393+rsi*1+rax]
+ mov eax,DWORD PTR[24+rsp]
mov ebx,r12d
- xor ebp,DWORD PTR[36+rsp]
- and eax,r13d
- mov ecx,edi
- xor ebp,DWORD PTR[60+rsp]
- xor ebx,r13d
- lea esi,DWORD PTR[((-1894007588))+rsi*1+rdx]
- rol ecx,5
- xor ebp,DWORD PTR[16+rsp]
- add esi,eax
- and ebx,r11d
- rol ebp,1
- add esi,ebx
+ mov esi,ebp
+ xor eax,DWORD PTR[32+rsp]
+ xor ebx,r11d
+ rol esi,5
+ xor eax,DWORD PTR[56+rsp]
+ xor ebx,edx
+ add edi,esi
+ xor eax,DWORD PTR[12+rsp]
rol r11d,30
- mov DWORD PTR[28+rsp],ebp
- add esi,ecx
- mov edx,DWORD PTR[32+rsp]
- mov eax,r11d
+ add edi,ebx
+ rol eax,1
+ mov DWORD PTR[24+rsp],eax
+ lea esi,DWORD PTR[1859775393+rdx*1+rax]
+ mov eax,DWORD PTR[28+rsp]
mov ebx,r11d
- xor edx,DWORD PTR[40+rsp]
- and eax,r12d
- mov ecx,esi
- xor edx,DWORD PTR[rsp]
+ mov edx,edi
+ xor eax,DWORD PTR[36+rsp]
+ xor ebx,ebp
+ rol edx,5
+ xor eax,DWORD PTR[60+rsp]
xor ebx,r12d
- lea r13d,DWORD PTR[((-1894007588))+r13*1+rbp]
- rol ecx,5
- xor edx,DWORD PTR[20+rsp]
- add r13d,eax
- and ebx,edi
- rol edx,1
- add r13d,ebx
+ add esi,edx
+ xor eax,DWORD PTR[16+rsp]
+ rol ebp,30
+ add esi,ebx
+ rol eax,1
+ mov DWORD PTR[28+rsp],eax
+ lea edx,DWORD PTR[1859775393+r12*1+rax]
+ mov eax,DWORD PTR[32+rsp]
+ mov ebx,ebp
+ mov r12d,esi
+ xor eax,DWORD PTR[40+rsp]
+ xor ebx,edi
+ rol r12d,5
+ xor eax,DWORD PTR[rsp]
+ xor ebx,r11d
+ add edx,r12d
+ xor eax,DWORD PTR[20+rsp]
rol edi,30
- mov DWORD PTR[32+rsp],edx
- add r13d,ecx
- mov ebp,DWORD PTR[36+rsp]
- mov eax,edi
+ add edx,ebx
+ rol eax,1
+ mov DWORD PTR[32+rsp],eax
+ lea r12d,DWORD PTR[1859775393+r11*1+rax]
+ mov eax,DWORD PTR[36+rsp]
mov ebx,edi
- xor ebp,DWORD PTR[44+rsp]
- and eax,r11d
- mov ecx,r13d
- xor ebp,DWORD PTR[4+rsp]
- xor ebx,r11d
- lea r12d,DWORD PTR[((-1894007588))+r12*1+rdx]
- rol ecx,5
- xor ebp,DWORD PTR[24+rsp]
- add r12d,eax
- and ebx,esi
- rol ebp,1
- add r12d,ebx
+ mov r11d,edx
+ xor eax,DWORD PTR[44+rsp]
+ xor ebx,esi
+ rol r11d,5
+ xor eax,DWORD PTR[4+rsp]
+ xor ebx,ebp
+ add r12d,r11d
+ xor eax,DWORD PTR[24+rsp]
rol esi,30
- mov DWORD PTR[36+rsp],ebp
- add r12d,ecx
- mov edx,DWORD PTR[40+rsp]
- mov eax,esi
+ add r12d,ebx
+ rol eax,1
+ mov DWORD PTR[36+rsp],eax
+ lea r11d,DWORD PTR[1859775393+rbp*1+rax]
+ mov eax,DWORD PTR[40+rsp]
mov ebx,esi
- xor edx,DWORD PTR[48+rsp]
- and eax,edi
- mov ecx,r12d
- xor edx,DWORD PTR[8+rsp]
+ mov ebp,r12d
+ xor eax,DWORD PTR[48+rsp]
+ xor ebx,edx
+ rol ebp,5
+ xor eax,DWORD PTR[8+rsp]
xor ebx,edi
- lea r11d,DWORD PTR[((-1894007588))+r11*1+rbp]
- rol ecx,5
- xor edx,DWORD PTR[28+rsp]
- add r11d,eax
- and ebx,r13d
- rol edx,1
+ add r11d,ebp
+ xor eax,DWORD PTR[28+rsp]
+ rol edx,30
add r11d,ebx
- rol r13d,30
- mov DWORD PTR[40+rsp],edx
- add r11d,ecx
- mov ebp,DWORD PTR[44+rsp]
- mov eax,r13d
- mov ebx,r13d
- xor ebp,DWORD PTR[52+rsp]
- and eax,esi
- mov ecx,r11d
- xor ebp,DWORD PTR[12+rsp]
+ rol eax,1
+ mov DWORD PTR[40+rsp],eax
+ lea ebp,DWORD PTR[1859775393+rdi*1+rax]
+ mov eax,DWORD PTR[44+rsp]
+ mov ebx,edx
+ mov edi,r11d
+ xor eax,DWORD PTR[52+rsp]
+ xor ebx,r12d
+ rol edi,5
+ xor eax,DWORD PTR[12+rsp]
xor ebx,esi
- lea edi,DWORD PTR[((-1894007588))+rdi*1+rdx]
- rol ecx,5
- xor ebp,DWORD PTR[32+rsp]
- add edi,eax
- and ebx,r12d
- rol ebp,1
- add edi,ebx
+ add ebp,edi
+ xor eax,DWORD PTR[32+rsp]
rol r12d,30
- mov DWORD PTR[44+rsp],ebp
- add edi,ecx
- mov edx,DWORD PTR[48+rsp]
- mov eax,r12d
+ add ebp,ebx
+ rol eax,1
+ mov DWORD PTR[44+rsp],eax
+ lea edi,DWORD PTR[1859775393+rsi*1+rax]
+ mov eax,DWORD PTR[48+rsp]
mov ebx,r12d
- xor edx,DWORD PTR[56+rsp]
- and eax,r13d
- mov ecx,edi
- xor edx,DWORD PTR[16+rsp]
- xor ebx,r13d
- lea esi,DWORD PTR[((-1894007588))+rsi*1+rbp]
- rol ecx,5
- xor edx,DWORD PTR[36+rsp]
- add esi,eax
- and ebx,r11d
- rol edx,1
- add esi,ebx
+ mov esi,ebp
+ xor eax,DWORD PTR[56+rsp]
+ xor ebx,r11d
+ rol esi,5
+ xor eax,DWORD PTR[16+rsp]
+ xor ebx,edx
+ add edi,esi
+ xor eax,DWORD PTR[36+rsp]
rol r11d,30
- mov DWORD PTR[48+rsp],edx
- add esi,ecx
- mov ebp,DWORD PTR[52+rsp]
- mov eax,r11d
- mov ecx,esi
- xor ebp,DWORD PTR[60+rsp]
- xor eax,edi
- rol ecx,5
- lea r13d,DWORD PTR[((-899497514))+r13*1+rdx]
- xor ebp,DWORD PTR[20+rsp]
- xor eax,r12d
- add r13d,ecx
- xor ebp,DWORD PTR[40+rsp]
+ add edi,ebx
+ rol eax,1
+ mov DWORD PTR[48+rsp],eax
+ lea esi,DWORD PTR[1859775393+rdx*1+rax]
+ mov eax,DWORD PTR[52+rsp]
+ mov ebx,r11d
+ mov edx,edi
+ xor eax,DWORD PTR[60+rsp]
+ xor ebx,ebp
+ rol edx,5
+ xor eax,DWORD PTR[20+rsp]
+ xor ebx,r12d
+ add esi,edx
+ xor eax,DWORD PTR[40+rsp]
+ rol ebp,30
+ add esi,ebx
+ rol eax,1
+ mov DWORD PTR[52+rsp],eax
+ lea edx,DWORD PTR[1859775393+r12*1+rax]
+ mov eax,DWORD PTR[56+rsp]
+ mov ebx,ebp
+ mov r12d,esi
+ xor eax,DWORD PTR[rsp]
+ xor ebx,edi
+ rol r12d,5
+ xor eax,DWORD PTR[24+rsp]
+ xor ebx,r11d
+ add edx,r12d
+ xor eax,DWORD PTR[44+rsp]
rol edi,30
- add r13d,eax
- rol ebp,1
- mov DWORD PTR[52+rsp],ebp
- mov edx,DWORD PTR[56+rsp]
- mov eax,edi
- mov ecx,r13d
- xor edx,DWORD PTR[rsp]
- xor eax,esi
- rol ecx,5
- lea r12d,DWORD PTR[((-899497514))+r12*1+rbp]
- xor edx,DWORD PTR[24+rsp]
- xor eax,r11d
- add r12d,ecx
- xor edx,DWORD PTR[44+rsp]
+ add edx,ebx
+ rol eax,1
+ mov DWORD PTR[56+rsp],eax
+ lea r12d,DWORD PTR[1859775393+r11*1+rax]
+ mov eax,DWORD PTR[60+rsp]
+ mov ebx,edi
+ mov r11d,edx
+ xor eax,DWORD PTR[4+rsp]
+ xor ebx,esi
+ rol r11d,5
+ xor eax,DWORD PTR[28+rsp]
+ xor ebx,ebp
+ add r12d,r11d
+ xor eax,DWORD PTR[48+rsp]
rol esi,30
- add r12d,eax
- rol edx,1
- mov DWORD PTR[56+rsp],edx
- mov ebp,DWORD PTR[60+rsp]
- mov eax,esi
- mov ecx,r12d
- xor ebp,DWORD PTR[4+rsp]
- xor eax,r13d
- rol ecx,5
- lea r11d,DWORD PTR[((-899497514))+r11*1+rdx]
- xor ebp,DWORD PTR[28+rsp]
- xor eax,edi
- add r11d,ecx
- xor ebp,DWORD PTR[48+rsp]
- rol r13d,30
- add r11d,eax
- rol ebp,1
- mov DWORD PTR[60+rsp],ebp
- mov edx,DWORD PTR[rsp]
- mov eax,r13d
- mov ecx,r11d
- xor edx,DWORD PTR[8+rsp]
- xor eax,r12d
- rol ecx,5
- lea edi,DWORD PTR[((-899497514))+rdi*1+rbp]
- xor edx,DWORD PTR[32+rsp]
- xor eax,esi
- add edi,ecx
- xor edx,DWORD PTR[52+rsp]
+ add r12d,ebx
+ rol eax,1
+ mov DWORD PTR[60+rsp],eax
+ lea r11d,DWORD PTR[1859775393+rbp*1+rax]
+ mov eax,DWORD PTR[rsp]
+ mov ebx,esi
+ mov ebp,r12d
+ xor eax,DWORD PTR[8+rsp]
+ xor ebx,edx
+ rol ebp,5
+ xor eax,DWORD PTR[32+rsp]
+ xor ebx,edi
+ add r11d,ebp
+ xor eax,DWORD PTR[52+rsp]
+ rol edx,30
+ add r11d,ebx
+ rol eax,1
+ mov DWORD PTR[rsp],eax
+ lea ebp,DWORD PTR[1859775393+rdi*1+rax]
+ mov eax,DWORD PTR[4+rsp]
+ mov ebx,edx
+ mov edi,r11d
+ xor eax,DWORD PTR[12+rsp]
+ xor ebx,r12d
+ rol edi,5
+ xor eax,DWORD PTR[36+rsp]
+ xor ebx,esi
+ add ebp,edi
+ xor eax,DWORD PTR[56+rsp]
rol r12d,30
- add edi,eax
- rol edx,1
- mov DWORD PTR[rsp],edx
- mov ebp,DWORD PTR[4+rsp]
- mov eax,r12d
- mov ecx,edi
- xor ebp,DWORD PTR[12+rsp]
- xor eax,r11d
- rol ecx,5
- lea esi,DWORD PTR[((-899497514))+rsi*1+rdx]
- xor ebp,DWORD PTR[36+rsp]
- xor eax,r13d
- add esi,ecx
- xor ebp,DWORD PTR[56+rsp]
+ add ebp,ebx
+ rol eax,1
+ mov DWORD PTR[4+rsp],eax
+ lea edi,DWORD PTR[1859775393+rsi*1+rax]
+ mov eax,DWORD PTR[8+rsp]
+ mov ebx,r12d
+ mov esi,ebp
+ xor eax,DWORD PTR[16+rsp]
+ xor ebx,r11d
+ rol esi,5
+ xor eax,DWORD PTR[40+rsp]
+ xor ebx,edx
+ add edi,esi
+ xor eax,DWORD PTR[60+rsp]
rol r11d,30
- add esi,eax
- rol ebp,1
- mov DWORD PTR[4+rsp],ebp
- mov edx,DWORD PTR[8+rsp]
- mov eax,r11d
- mov ecx,esi
- xor edx,DWORD PTR[16+rsp]
- xor eax,edi
- rol ecx,5
- lea r13d,DWORD PTR[((-899497514))+r13*1+rbp]
- xor edx,DWORD PTR[40+rsp]
- xor eax,r12d
- add r13d,ecx
- xor edx,DWORD PTR[60+rsp]
+ add edi,ebx
+ rol eax,1
+ mov DWORD PTR[8+rsp],eax
+ lea esi,DWORD PTR[1859775393+rdx*1+rax]
+ mov eax,DWORD PTR[12+rsp]
+ mov ebx,r11d
+ mov edx,edi
+ xor eax,DWORD PTR[20+rsp]
+ xor ebx,ebp
+ rol edx,5
+ xor eax,DWORD PTR[44+rsp]
+ xor ebx,r12d
+ add esi,edx
+ xor eax,DWORD PTR[rsp]
+ rol ebp,30
+ add esi,ebx
+ rol eax,1
+ mov DWORD PTR[12+rsp],eax
+ lea edx,DWORD PTR[1859775393+r12*1+rax]
+ mov eax,DWORD PTR[16+rsp]
+ mov ebx,ebp
+ mov r12d,esi
+ xor eax,DWORD PTR[24+rsp]
+ xor ebx,edi
+ rol r12d,5
+ xor eax,DWORD PTR[48+rsp]
+ xor ebx,r11d
+ add edx,r12d
+ xor eax,DWORD PTR[4+rsp]
rol edi,30
- add r13d,eax
- rol edx,1
- mov DWORD PTR[8+rsp],edx
- mov ebp,DWORD PTR[12+rsp]
- mov eax,edi
- mov ecx,r13d
- xor ebp,DWORD PTR[20+rsp]
- xor eax,esi
- rol ecx,5
- lea r12d,DWORD PTR[((-899497514))+r12*1+rdx]
- xor ebp,DWORD PTR[44+rsp]
- xor eax,r11d
- add r12d,ecx
- xor ebp,DWORD PTR[rsp]
+ add edx,ebx
+ rol eax,1
+ mov DWORD PTR[16+rsp],eax
+ lea r12d,DWORD PTR[1859775393+r11*1+rax]
+ mov eax,DWORD PTR[20+rsp]
+ mov ebx,edi
+ mov r11d,edx
+ xor eax,DWORD PTR[28+rsp]
+ xor ebx,esi
+ rol r11d,5
+ xor eax,DWORD PTR[52+rsp]
+ xor ebx,ebp
+ add r12d,r11d
+ xor eax,DWORD PTR[8+rsp]
rol esi,30
- add r12d,eax
- rol ebp,1
- mov DWORD PTR[12+rsp],ebp
- mov edx,DWORD PTR[16+rsp]
- mov eax,esi
- mov ecx,r12d
- xor edx,DWORD PTR[24+rsp]
- xor eax,r13d
- rol ecx,5
- lea r11d,DWORD PTR[((-899497514))+r11*1+rbp]
- xor edx,DWORD PTR[48+rsp]
- xor eax,edi
- add r11d,ecx
- xor edx,DWORD PTR[4+rsp]
- rol r13d,30
- add r11d,eax
- rol edx,1
- mov DWORD PTR[16+rsp],edx
- mov ebp,DWORD PTR[20+rsp]
- mov eax,r13d
- mov ecx,r11d
- xor ebp,DWORD PTR[28+rsp]
- xor eax,r12d
- rol ecx,5
- lea edi,DWORD PTR[((-899497514))+rdi*1+rdx]
- xor ebp,DWORD PTR[52+rsp]
- xor eax,esi
- add edi,ecx
- xor ebp,DWORD PTR[8+rsp]
+ add r12d,ebx
+ rol eax,1
+ mov DWORD PTR[20+rsp],eax
+ lea r11d,DWORD PTR[1859775393+rbp*1+rax]
+ mov eax,DWORD PTR[24+rsp]
+ mov ebx,esi
+ mov ebp,r12d
+ xor eax,DWORD PTR[32+rsp]
+ xor ebx,edx
+ rol ebp,5
+ xor eax,DWORD PTR[56+rsp]
+ xor ebx,edi
+ add r11d,ebp
+ xor eax,DWORD PTR[12+rsp]
+ rol edx,30
+ add r11d,ebx
+ rol eax,1
+ mov DWORD PTR[24+rsp],eax
+ lea ebp,DWORD PTR[1859775393+rdi*1+rax]
+ mov eax,DWORD PTR[28+rsp]
+ mov ebx,edx
+ mov edi,r11d
+ xor eax,DWORD PTR[36+rsp]
+ xor ebx,r12d
+ rol edi,5
+ xor eax,DWORD PTR[60+rsp]
+ xor ebx,esi
+ add ebp,edi
+ xor eax,DWORD PTR[16+rsp]
rol r12d,30
- add edi,eax
- rol ebp,1
- mov DWORD PTR[20+rsp],ebp
- mov edx,DWORD PTR[24+rsp]
- mov eax,r12d
- mov ecx,edi
- xor edx,DWORD PTR[32+rsp]
- xor eax,r11d
- rol ecx,5
- lea esi,DWORD PTR[((-899497514))+rsi*1+rbp]
- xor edx,DWORD PTR[56+rsp]
- xor eax,r13d
- add esi,ecx
- xor edx,DWORD PTR[12+rsp]
+ add ebp,ebx
+ rol eax,1
+ mov DWORD PTR[28+rsp],eax
+ lea edi,DWORD PTR[1859775393+rsi*1+rax]
+ mov eax,DWORD PTR[32+rsp]
+ mov ebx,r12d
+ mov esi,ebp
+ xor eax,DWORD PTR[40+rsp]
+ xor ebx,r11d
+ rol esi,5
+ xor eax,DWORD PTR[rsp]
+ xor ebx,edx
+ add edi,esi
+ xor eax,DWORD PTR[20+rsp]
rol r11d,30
- add esi,eax
- rol edx,1
- mov DWORD PTR[24+rsp],edx
- mov ebp,DWORD PTR[28+rsp]
- mov eax,r11d
- mov ecx,esi
- xor ebp,DWORD PTR[36+rsp]
- xor eax,edi
- rol ecx,5
- lea r13d,DWORD PTR[((-899497514))+r13*1+rdx]
- xor ebp,DWORD PTR[60+rsp]
- xor eax,r12d
- add r13d,ecx
- xor ebp,DWORD PTR[16+rsp]
+ add edi,ebx
+ rol eax,1
+ mov DWORD PTR[32+rsp],eax
+ lea esi,DWORD PTR[08f1bbcdch+rdx*1+rax]
+ mov eax,DWORD PTR[36+rsp]
+ mov ebx,ebp
+ mov ecx,ebp
+ xor eax,DWORD PTR[44+rsp]
+ mov edx,edi
+ and ebx,r11d
+ xor eax,DWORD PTR[4+rsp]
+ or ecx,r11d
+ rol edx,5
+ xor eax,DWORD PTR[24+rsp]
+ and ecx,r12d
+ add esi,edx
+ rol eax,1
+ or ebx,ecx
+ rol ebp,30
+ mov DWORD PTR[36+rsp],eax
+ add esi,ebx
+ lea edx,DWORD PTR[08f1bbcdch+r12*1+rax]
+ mov eax,DWORD PTR[40+rsp]
+ mov ebx,edi
+ mov ecx,edi
+ xor eax,DWORD PTR[48+rsp]
+ mov r12d,esi
+ and ebx,ebp
+ xor eax,DWORD PTR[8+rsp]
+ or ecx,ebp
+ rol r12d,5
+ xor eax,DWORD PTR[28+rsp]
+ and ecx,r11d
+ add edx,r12d
+ rol eax,1
+ or ebx,ecx
rol edi,30
- add r13d,eax
- rol ebp,1
- mov DWORD PTR[28+rsp],ebp
- mov edx,DWORD PTR[32+rsp]
- mov eax,edi
- mov ecx,r13d
- xor edx,DWORD PTR[40+rsp]
- xor eax,esi
- rol ecx,5
- lea r12d,DWORD PTR[((-899497514))+r12*1+rbp]
- xor edx,DWORD PTR[rsp]
- xor eax,r11d
- add r12d,ecx
- xor edx,DWORD PTR[20+rsp]
+ mov DWORD PTR[40+rsp],eax
+ add edx,ebx
+ lea r12d,DWORD PTR[08f1bbcdch+r11*1+rax]
+ mov eax,DWORD PTR[44+rsp]
+ mov ebx,esi
+ mov ecx,esi
+ xor eax,DWORD PTR[52+rsp]
+ mov r11d,edx
+ and ebx,edi
+ xor eax,DWORD PTR[12+rsp]
+ or ecx,edi
+ rol r11d,5
+ xor eax,DWORD PTR[32+rsp]
+ and ecx,ebp
+ add r12d,r11d
+ rol eax,1
+ or ebx,ecx
rol esi,30
- add r12d,eax
- rol edx,1
- mov DWORD PTR[32+rsp],edx
- mov ebp,DWORD PTR[36+rsp]
- mov eax,esi
+ mov DWORD PTR[44+rsp],eax
+ add r12d,ebx
+ lea r11d,DWORD PTR[08f1bbcdch+rbp*1+rax]
+ mov eax,DWORD PTR[48+rsp]
+ mov ebx,edx
+ mov ecx,edx
+ xor eax,DWORD PTR[56+rsp]
+ mov ebp,r12d
+ and ebx,esi
+ xor eax,DWORD PTR[16+rsp]
+ or ecx,esi
+ rol ebp,5
+ xor eax,DWORD PTR[36+rsp]
+ and ecx,edi
+ add r11d,ebp
+ rol eax,1
+ or ebx,ecx
+ rol edx,30
+ mov DWORD PTR[48+rsp],eax
+ add r11d,ebx
+ lea ebp,DWORD PTR[08f1bbcdch+rdi*1+rax]
+ mov eax,DWORD PTR[52+rsp]
+ mov ebx,r12d
mov ecx,r12d
- xor ebp,DWORD PTR[44+rsp]
- xor eax,r13d
- rol ecx,5
- lea r11d,DWORD PTR[((-899497514))+r11*1+rdx]
- xor ebp,DWORD PTR[4+rsp]
- xor eax,edi
- add r11d,ecx
- xor ebp,DWORD PTR[24+rsp]
- rol r13d,30
- add r11d,eax
- rol ebp,1
- mov DWORD PTR[36+rsp],ebp
- mov edx,DWORD PTR[40+rsp]
- mov eax,r13d
- mov ecx,r11d
- xor edx,DWORD PTR[48+rsp]
- xor eax,r12d
- rol ecx,5
- lea edi,DWORD PTR[((-899497514))+rdi*1+rbp]
- xor edx,DWORD PTR[8+rsp]
- xor eax,esi
- add edi,ecx
- xor edx,DWORD PTR[28+rsp]
+ xor eax,DWORD PTR[60+rsp]
+ mov edi,r11d
+ and ebx,edx
+ xor eax,DWORD PTR[20+rsp]
+ or ecx,edx
+ rol edi,5
+ xor eax,DWORD PTR[40+rsp]
+ and ecx,esi
+ add ebp,edi
+ rol eax,1
+ or ebx,ecx
rol r12d,30
- add edi,eax
- rol edx,1
- mov DWORD PTR[40+rsp],edx
- mov ebp,DWORD PTR[44+rsp]
- mov eax,r12d
- mov ecx,edi
- xor ebp,DWORD PTR[52+rsp]
- xor eax,r11d
- rol ecx,5
- lea esi,DWORD PTR[((-899497514))+rsi*1+rdx]
- xor ebp,DWORD PTR[12+rsp]
- xor eax,r13d
- add esi,ecx
- xor ebp,DWORD PTR[32+rsp]
+ mov DWORD PTR[52+rsp],eax
+ add ebp,ebx
+ lea edi,DWORD PTR[08f1bbcdch+rsi*1+rax]
+ mov eax,DWORD PTR[56+rsp]
+ mov ebx,r11d
+ mov ecx,r11d
+ xor eax,DWORD PTR[rsp]
+ mov esi,ebp
+ and ebx,r12d
+ xor eax,DWORD PTR[24+rsp]
+ or ecx,r12d
+ rol esi,5
+ xor eax,DWORD PTR[44+rsp]
+ and ecx,edx
+ add edi,esi
+ rol eax,1
+ or ebx,ecx
rol r11d,30
- add esi,eax
- rol ebp,1
- mov DWORD PTR[44+rsp],ebp
- mov edx,DWORD PTR[48+rsp]
- mov eax,r11d
- mov ecx,esi
- xor edx,DWORD PTR[56+rsp]
- xor eax,edi
- rol ecx,5
- lea r13d,DWORD PTR[((-899497514))+r13*1+rbp]
- xor edx,DWORD PTR[16+rsp]
- xor eax,r12d
- add r13d,ecx
- xor edx,DWORD PTR[36+rsp]
+ mov DWORD PTR[56+rsp],eax
+ add edi,ebx
+ lea esi,DWORD PTR[08f1bbcdch+rdx*1+rax]
+ mov eax,DWORD PTR[60+rsp]
+ mov ebx,ebp
+ mov ecx,ebp
+ xor eax,DWORD PTR[4+rsp]
+ mov edx,edi
+ and ebx,r11d
+ xor eax,DWORD PTR[28+rsp]
+ or ecx,r11d
+ rol edx,5
+ xor eax,DWORD PTR[48+rsp]
+ and ecx,r12d
+ add esi,edx
+ rol eax,1
+ or ebx,ecx
+ rol ebp,30
+ mov DWORD PTR[60+rsp],eax
+ add esi,ebx
+ lea edx,DWORD PTR[08f1bbcdch+r12*1+rax]
+ mov eax,DWORD PTR[rsp]
+ mov ebx,edi
+ mov ecx,edi
+ xor eax,DWORD PTR[8+rsp]
+ mov r12d,esi
+ and ebx,ebp
+ xor eax,DWORD PTR[32+rsp]
+ or ecx,ebp
+ rol r12d,5
+ xor eax,DWORD PTR[52+rsp]
+ and ecx,r11d
+ add edx,r12d
+ rol eax,1
+ or ebx,ecx
rol edi,30
- add r13d,eax
- rol edx,1
- mov DWORD PTR[48+rsp],edx
- mov ebp,DWORD PTR[52+rsp]
- mov eax,edi
- mov ecx,r13d
- xor ebp,DWORD PTR[60+rsp]
- xor eax,esi
- rol ecx,5
- lea r12d,DWORD PTR[((-899497514))+r12*1+rdx]
- xor ebp,DWORD PTR[20+rsp]
- xor eax,r11d
- add r12d,ecx
- xor ebp,DWORD PTR[40+rsp]
+ mov DWORD PTR[rsp],eax
+ add edx,ebx
+ lea r12d,DWORD PTR[08f1bbcdch+r11*1+rax]
+ mov eax,DWORD PTR[4+rsp]
+ mov ebx,esi
+ mov ecx,esi
+ xor eax,DWORD PTR[12+rsp]
+ mov r11d,edx
+ and ebx,edi
+ xor eax,DWORD PTR[36+rsp]
+ or ecx,edi
+ rol r11d,5
+ xor eax,DWORD PTR[56+rsp]
+ and ecx,ebp
+ add r12d,r11d
+ rol eax,1
+ or ebx,ecx
rol esi,30
- add r12d,eax
- rol ebp,1
- mov edx,DWORD PTR[56+rsp]
- mov eax,esi
- mov ecx,r12d
- xor edx,DWORD PTR[rsp]
- xor eax,r13d
- rol ecx,5
- lea r11d,DWORD PTR[((-899497514))+r11*1+rbp]
- xor edx,DWORD PTR[24+rsp]
- xor eax,edi
- add r11d,ecx
- xor edx,DWORD PTR[44+rsp]
- rol r13d,30
- add r11d,eax
- rol edx,1
- mov ebp,DWORD PTR[60+rsp]
- mov eax,r13d
- mov ecx,r11d
- xor ebp,DWORD PTR[4+rsp]
- xor eax,r12d
- rol ecx,5
- lea edi,DWORD PTR[((-899497514))+rdi*1+rdx]
- xor ebp,DWORD PTR[28+rsp]
- xor eax,esi
- add edi,ecx
- xor ebp,DWORD PTR[48+rsp]
- rol r12d,30
- add edi,eax
- rol ebp,1
- mov eax,r12d
- mov ecx,edi
- xor eax,r11d
- lea esi,DWORD PTR[((-899497514))+rsi*1+rbp]
- rol ecx,5
- xor eax,r13d
- add esi,ecx
- rol r11d,30
- add esi,eax
- add esi,DWORD PTR[r8]
- add edi,DWORD PTR[4+r8]
- add r11d,DWORD PTR[8+r8]
- add r12d,DWORD PTR[12+r8]
- add r13d,DWORD PTR[16+r8]
- mov DWORD PTR[r8],esi
- mov DWORD PTR[4+r8],edi
- mov DWORD PTR[8+r8],r11d
- mov DWORD PTR[12+r8],r12d
- mov DWORD PTR[16+r8],r13d
-
- sub r10,1
- lea r9,QWORD PTR[64+r9]
- jnz $L$loop
-
- mov rsi,QWORD PTR[64+rsp]
- mov r13,QWORD PTR[rsi]
- mov r12,QWORD PTR[8+rsi]
- mov rbp,QWORD PTR[16+rsi]
- mov rbx,QWORD PTR[24+rsi]
- lea rsp,QWORD PTR[32+rsi]
-$L$epilogue::
- mov rdi,QWORD PTR[8+rsp] ;WIN64 epilogue
- mov rsi,QWORD PTR[16+rsp]
- DB 0F3h,0C3h ;repret
-$L$SEH_end_sha1_block_data_order::
-sha1_block_data_order ENDP
-
-ALIGN 16
-sha1_block_data_order_ssse3 PROC PRIVATE
- mov QWORD PTR[8+rsp],rdi ;WIN64 prologue
- mov QWORD PTR[16+rsp],rsi
- mov rax,rsp
-$L$SEH_begin_sha1_block_data_order_ssse3::
- mov rdi,rcx
- mov rsi,rdx
- mov rdx,r8
-
-
-_ssse3_shortcut::
- push rbx
- push rbp
- push r12
- lea rsp,QWORD PTR[((-144))+rsp]
- movaps XMMWORD PTR[(64+0)+rsp],xmm6
- movaps XMMWORD PTR[(64+16)+rsp],xmm7
- movaps XMMWORD PTR[(64+32)+rsp],xmm8
- movaps XMMWORD PTR[(64+48)+rsp],xmm9
- movaps XMMWORD PTR[(64+64)+rsp],xmm10
-$L$prologue_ssse3::
- mov r8,rdi
- mov r9,rsi
- mov r10,rdx
-
- shl r10,6
- add r10,r9
- lea r11,QWORD PTR[K_XX_XX]
-
- mov eax,DWORD PTR[r8]
- mov ebx,DWORD PTR[4+r8]
- mov ecx,DWORD PTR[8+r8]
- mov edx,DWORD PTR[12+r8]
- mov esi,ebx
- mov ebp,DWORD PTR[16+r8]
-
- movdqa xmm6,XMMWORD PTR[64+r11]
- movdqa xmm9,XMMWORD PTR[r11]
- movdqu xmm0,XMMWORD PTR[r9]
- movdqu xmm1,XMMWORD PTR[16+r9]
- movdqu xmm2,XMMWORD PTR[32+r9]
- movdqu xmm3,XMMWORD PTR[48+r9]
-DB 102,15,56,0,198
- add r9,64
-DB 102,15,56,0,206
-DB 102,15,56,0,214
-DB 102,15,56,0,222
- paddd xmm0,xmm9
- paddd xmm1,xmm9
- paddd xmm2,xmm9
- movdqa XMMWORD PTR[rsp],xmm0
- psubd xmm0,xmm9
- movdqa XMMWORD PTR[16+rsp],xmm1
- psubd xmm1,xmm9
- movdqa XMMWORD PTR[32+rsp],xmm2
- psubd xmm2,xmm9
- jmp $L$oop_ssse3
-ALIGN 16
-$L$oop_ssse3::
- movdqa xmm4,xmm1
- add ebp,DWORD PTR[rsp]
- xor ecx,edx
- movdqa xmm8,xmm3
-DB 102,15,58,15,224,8
- mov edi,eax
- rol eax,5
- paddd xmm9,xmm3
- and esi,ecx
- xor ecx,edx
- psrldq xmm8,4
- xor esi,edx
- add ebp,eax
- pxor xmm4,xmm0
- ror ebx,2
- add ebp,esi
- pxor xmm8,xmm2
- add edx,DWORD PTR[4+rsp]
- xor ebx,ecx
- mov esi,ebp
+ mov DWORD PTR[4+rsp],eax
+ add r12d,ebx
+ lea r11d,DWORD PTR[08f1bbcdch+rbp*1+rax]
+ mov eax,DWORD PTR[8+rsp]
+ mov ebx,edx
+ mov ecx,edx
+ xor eax,DWORD PTR[16+rsp]
+ mov ebp,r12d
+ and ebx,esi
+ xor eax,DWORD PTR[40+rsp]
+ or ecx,esi
rol ebp,5
- pxor xmm4,xmm8
- and edi,ebx
- xor ebx,ecx
- movdqa XMMWORD PTR[48+rsp],xmm9
- xor edi,ecx
- add edx,ebp
- movdqa xmm10,xmm4
- movdqa xmm8,xmm4
- ror eax,7
- add edx,edi
- add ecx,DWORD PTR[8+rsp]
- xor eax,ebx
- pslldq xmm10,12
- paddd xmm4,xmm4
- mov edi,edx
- rol edx,5
- and esi,eax
- xor eax,ebx
- psrld xmm8,31
- xor esi,ebx
- add ecx,edx
- movdqa xmm9,xmm10
- ror ebp,7
- add ecx,esi
- psrld xmm10,30
- por xmm4,xmm8
- add ebx,DWORD PTR[12+rsp]
- xor ebp,eax
- mov esi,ecx
- rol ecx,5
- pslld xmm9,2
- pxor xmm4,xmm10
- and edi,ebp
- xor ebp,eax
- movdqa xmm10,XMMWORD PTR[r11]
- xor edi,eax
- add ebx,ecx
- pxor xmm4,xmm9
- ror edx,7
- add ebx,edi
- movdqa xmm5,xmm2
- add eax,DWORD PTR[16+rsp]
- xor edx,ebp
- movdqa xmm9,xmm4
-DB 102,15,58,15,233,8
- mov edi,ebx
- rol ebx,5
- paddd xmm10,xmm4
- and esi,edx
- xor edx,ebp
- psrldq xmm9,4
- xor esi,ebp
- add eax,ebx
- pxor xmm5,xmm1
- ror ecx,7
- add eax,esi
- pxor xmm9,xmm3
- add ebp,DWORD PTR[20+rsp]
- xor ecx,edx
- mov esi,eax
- rol eax,5
- pxor xmm5,xmm9
- and edi,ecx
- xor ecx,edx
- movdqa XMMWORD PTR[rsp],xmm10
- xor edi,edx
- add ebp,eax
- movdqa xmm8,xmm5
- movdqa xmm9,xmm5
- ror ebx,7
+ xor eax,DWORD PTR[60+rsp]
+ and ecx,edi
+ add r11d,ebp
+ rol eax,1
+ or ebx,ecx
+ rol edx,30
+ mov DWORD PTR[8+rsp],eax
+ add r11d,ebx
+ lea ebp,DWORD PTR[08f1bbcdch+rdi*1+rax]
+ mov eax,DWORD PTR[12+rsp]
+ mov ebx,r12d
+ mov ecx,r12d
+ xor eax,DWORD PTR[20+rsp]
+ mov edi,r11d
+ and ebx,edx
+ xor eax,DWORD PTR[44+rsp]
+ or ecx,edx
+ rol edi,5
+ xor eax,DWORD PTR[rsp]
+ and ecx,esi
add ebp,edi
- add edx,DWORD PTR[24+rsp]
- xor ebx,ecx
- pslldq xmm8,12
- paddd xmm5,xmm5
- mov edi,ebp
- rol ebp,5
- and esi,ebx
- xor ebx,ecx
- psrld xmm9,31
- xor esi,ecx
- add edx,ebp
- movdqa xmm10,xmm8
- ror eax,7
- add edx,esi
- psrld xmm8,30
- por xmm5,xmm9
- add ecx,DWORD PTR[28+rsp]
- xor eax,ebx
- mov esi,edx
- rol edx,5
- pslld xmm10,2
- pxor xmm5,xmm8
- and edi,eax
- xor eax,ebx
- movdqa xmm8,XMMWORD PTR[16+r11]
- xor edi,ebx
- add ecx,edx
- pxor xmm5,xmm10
- ror ebp,7
- add ecx,edi
- movdqa xmm6,xmm3
- add ebx,DWORD PTR[32+rsp]
- xor ebp,eax
- movdqa xmm10,xmm5
-DB 102,15,58,15,242,8
- mov edi,ecx
- rol ecx,5
- paddd xmm8,xmm5
- and esi,ebp
- xor ebp,eax
- psrldq xmm10,4
- xor esi,eax
- add ebx,ecx
- pxor xmm6,xmm2
- ror edx,7
- add ebx,esi
- pxor xmm10,xmm4
- add eax,DWORD PTR[36+rsp]
- xor edx,ebp
- mov esi,ebx
- rol ebx,5
- pxor xmm6,xmm10
- and edi,edx
- xor edx,ebp
- movdqa XMMWORD PTR[16+rsp],xmm8
- xor edi,ebp
- add eax,ebx
- movdqa xmm9,xmm6
- movdqa xmm10,xmm6
- ror ecx,7
- add eax,edi
- add ebp,DWORD PTR[40+rsp]
- xor ecx,edx
- pslldq xmm9,12
- paddd xmm6,xmm6
- mov edi,eax
- rol eax,5
- and esi,ecx
- xor ecx,edx
- psrld xmm10,31
- xor esi,edx
- add ebp,eax
- movdqa xmm8,xmm9
- ror ebx,7
- add ebp,esi
- psrld xmm9,30
- por xmm6,xmm10
- add edx,DWORD PTR[44+rsp]
- xor ebx,ecx
+ rol eax,1
+ or ebx,ecx
+ rol r12d,30
+ mov DWORD PTR[12+rsp],eax
+ add ebp,ebx
+ lea edi,DWORD PTR[08f1bbcdch+rsi*1+rax]
+ mov eax,DWORD PTR[16+rsp]
+ mov ebx,r11d
+ mov ecx,r11d
+ xor eax,DWORD PTR[24+rsp]
mov esi,ebp
- rol ebp,5
- pslld xmm8,2
- pxor xmm6,xmm9
- and edi,ebx
- xor ebx,ecx
- movdqa xmm9,XMMWORD PTR[16+r11]
- xor edi,ecx
- add edx,ebp
- pxor xmm6,xmm8
- ror eax,7
- add edx,edi
- movdqa xmm7,xmm4
- add ecx,DWORD PTR[48+rsp]
- xor eax,ebx
- movdqa xmm8,xmm6
-DB 102,15,58,15,251,8
- mov edi,edx
- rol edx,5
- paddd xmm9,xmm6
- and esi,eax
- xor eax,ebx
- psrldq xmm8,4
- xor esi,ebx
- add ecx,edx
- pxor xmm7,xmm3
- ror ebp,7
- add ecx,esi
- pxor xmm8,xmm5
- add ebx,DWORD PTR[52+rsp]
- xor ebp,eax
- mov esi,ecx
- rol ecx,5
- pxor xmm7,xmm8
- and edi,ebp
- xor ebp,eax
- movdqa XMMWORD PTR[32+rsp],xmm9
- xor edi,eax
- add ebx,ecx
- movdqa xmm10,xmm7
- movdqa xmm8,xmm7
- ror edx,7
- add ebx,edi
- add eax,DWORD PTR[56+rsp]
- xor edx,ebp
- pslldq xmm10,12
- paddd xmm7,xmm7
- mov edi,ebx
- rol ebx,5
- and esi,edx
- xor edx,ebp
- psrld xmm8,31
- xor esi,ebp
- add eax,ebx
- movdqa xmm9,xmm10
- ror ecx,7
- add eax,esi
- psrld xmm10,30
- por xmm7,xmm8
- add ebp,DWORD PTR[60+rsp]
- xor ecx,edx
- mov esi,eax
- rol eax,5
- pslld xmm9,2
- pxor xmm7,xmm10
- and edi,ecx
- xor ecx,edx
- movdqa xmm10,XMMWORD PTR[16+r11]
- xor edi,edx
- add ebp,eax
- pxor xmm7,xmm9
- ror ebx,7
- add ebp,edi
- movdqa xmm9,xmm7
- add edx,DWORD PTR[rsp]
- pxor xmm0,xmm4
-DB 102,68,15,58,15,206,8
- xor ebx,ecx
- mov edi,ebp
- rol ebp,5
- pxor xmm0,xmm1
- and esi,ebx
- xor ebx,ecx
- movdqa xmm8,xmm10
- paddd xmm10,xmm7
- xor esi,ecx
- add edx,ebp
- pxor xmm0,xmm9
- ror eax,7
- add edx,esi
- add ecx,DWORD PTR[4+rsp]
- xor eax,ebx
- movdqa xmm9,xmm0
- movdqa XMMWORD PTR[48+rsp],xmm10
- mov esi,edx
+ and ebx,r12d
+ xor eax,DWORD PTR[48+rsp]
+ or ecx,r12d
+ rol esi,5
+ xor eax,DWORD PTR[4+rsp]
+ and ecx,edx
+ add edi,esi
+ rol eax,1
+ or ebx,ecx
+ rol r11d,30
+ mov DWORD PTR[16+rsp],eax
+ add edi,ebx
+ lea esi,DWORD PTR[08f1bbcdch+rdx*1+rax]
+ mov eax,DWORD PTR[20+rsp]
+ mov ebx,ebp
+ mov ecx,ebp
+ xor eax,DWORD PTR[28+rsp]
+ mov edx,edi
+ and ebx,r11d
+ xor eax,DWORD PTR[52+rsp]
+ or ecx,r11d
rol edx,5
- and edi,eax
- xor eax,ebx
- pslld xmm0,2
- xor edi,ebx
- add ecx,edx
- psrld xmm9,30
- ror ebp,7
- add ecx,edi
- add ebx,DWORD PTR[8+rsp]
- xor ebp,eax
- mov edi,ecx
- rol ecx,5
- por xmm0,xmm9
- and esi,ebp
- xor ebp,eax
- movdqa xmm10,xmm0
- xor esi,eax
- add ebx,ecx
- ror edx,7
- add ebx,esi
- add eax,DWORD PTR[12+rsp]
- xor edx,ebp
- mov esi,ebx
- rol ebx,5
- and edi,edx
- xor edx,ebp
- xor edi,ebp
- add eax,ebx
- ror ecx,7
- add eax,edi
- add ebp,DWORD PTR[16+rsp]
- pxor xmm1,xmm5
-DB 102,68,15,58,15,215,8
- xor esi,edx
- mov edi,eax
- rol eax,5
- pxor xmm1,xmm2
- xor esi,ecx
- add ebp,eax
- movdqa xmm9,xmm8
- paddd xmm8,xmm0
- ror ebx,7
- add ebp,esi
- pxor xmm1,xmm10
- add edx,DWORD PTR[20+rsp]
- xor edi,ecx
- mov esi,ebp
+ xor eax,DWORD PTR[8+rsp]
+ and ecx,r12d
+ add esi,edx
+ rol eax,1
+ or ebx,ecx
+ rol ebp,30
+ mov DWORD PTR[20+rsp],eax
+ add esi,ebx
+ lea edx,DWORD PTR[08f1bbcdch+r12*1+rax]
+ mov eax,DWORD PTR[24+rsp]
+ mov ebx,edi
+ mov ecx,edi
+ xor eax,DWORD PTR[32+rsp]
+ mov r12d,esi
+ and ebx,ebp
+ xor eax,DWORD PTR[56+rsp]
+ or ecx,ebp
+ rol r12d,5
+ xor eax,DWORD PTR[12+rsp]
+ and ecx,r11d
+ add edx,r12d
+ rol eax,1
+ or ebx,ecx
+ rol edi,30
+ mov DWORD PTR[24+rsp],eax
+ add edx,ebx
+ lea r12d,DWORD PTR[08f1bbcdch+r11*1+rax]
+ mov eax,DWORD PTR[28+rsp]
+ mov ebx,esi
+ mov ecx,esi
+ xor eax,DWORD PTR[36+rsp]
+ mov r11d,edx
+ and ebx,edi
+ xor eax,DWORD PTR[60+rsp]
+ or ecx,edi
+ rol r11d,5
+ xor eax,DWORD PTR[16+rsp]
+ and ecx,ebp
+ add r12d,r11d
+ rol eax,1
+ or ebx,ecx
+ rol esi,30
+ mov DWORD PTR[28+rsp],eax
+ add r12d,ebx
+ lea r11d,DWORD PTR[08f1bbcdch+rbp*1+rax]
+ mov eax,DWORD PTR[32+rsp]
+ mov ebx,edx
+ mov ecx,edx
+ xor eax,DWORD PTR[40+rsp]
+ mov ebp,r12d
+ and ebx,esi
+ xor eax,DWORD PTR[rsp]
+ or ecx,esi
rol ebp,5
- movdqa xmm10,xmm1
- movdqa XMMWORD PTR[rsp],xmm8
- xor edi,ebx
- add edx,ebp
- ror eax,7
- add edx,edi
- pslld xmm1,2
- add ecx,DWORD PTR[24+rsp]
- xor esi,ebx
- psrld xmm10,30
- mov edi,edx
- rol edx,5
- xor esi,eax
- add ecx,edx
- ror ebp,7
- add ecx,esi
- por xmm1,xmm10
- add ebx,DWORD PTR[28+rsp]
- xor edi,eax
- movdqa xmm8,xmm1
- mov esi,ecx
- rol ecx,5
- xor edi,ebp
- add ebx,ecx
- ror edx,7
- add ebx,edi
- add eax,DWORD PTR[32+rsp]
- pxor xmm2,xmm6
-DB 102,68,15,58,15,192,8
- xor esi,ebp
- mov edi,ebx
- rol ebx,5
- pxor xmm2,xmm3
- xor esi,edx
- add eax,ebx
- movdqa xmm10,XMMWORD PTR[32+r11]
- paddd xmm9,xmm1
- ror ecx,7
- add eax,esi
- pxor xmm2,xmm8
- add ebp,DWORD PTR[36+rsp]
- xor edi,edx
- mov esi,eax
- rol eax,5
- movdqa xmm8,xmm2
- movdqa XMMWORD PTR[16+rsp],xmm9
- xor edi,ecx
- add ebp,eax
- ror ebx,7
+ xor eax,DWORD PTR[20+rsp]
+ and ecx,edi
+ add r11d,ebp
+ rol eax,1
+ or ebx,ecx
+ rol edx,30
+ mov DWORD PTR[32+rsp],eax
+ add r11d,ebx
+ lea ebp,DWORD PTR[08f1bbcdch+rdi*1+rax]
+ mov eax,DWORD PTR[36+rsp]
+ mov ebx,r12d
+ mov ecx,r12d
+ xor eax,DWORD PTR[44+rsp]
+ mov edi,r11d
+ and ebx,edx
+ xor eax,DWORD PTR[4+rsp]
+ or ecx,edx
+ rol edi,5
+ xor eax,DWORD PTR[24+rsp]
+ and ecx,esi
add ebp,edi
- pslld xmm2,2
- add edx,DWORD PTR[40+rsp]
- xor esi,ecx
- psrld xmm8,30
- mov edi,ebp
- rol ebp,5
- xor esi,ebx
- add edx,ebp
- ror eax,7
- add edx,esi
- por xmm2,xmm8
- add ecx,DWORD PTR[44+rsp]
- xor edi,ebx
- movdqa xmm9,xmm2
- mov esi,edx
- rol edx,5
- xor edi,eax
- add ecx,edx
- ror ebp,7
- add ecx,edi
- add ebx,DWORD PTR[48+rsp]
- pxor xmm3,xmm7
-DB 102,68,15,58,15,201,8
- xor esi,eax
- mov edi,ecx
- rol ecx,5
- pxor xmm3,xmm4
- xor esi,ebp
- add ebx,ecx
- movdqa xmm8,xmm10
- paddd xmm10,xmm2
- ror edx,7
- add ebx,esi
- pxor xmm3,xmm9
- add eax,DWORD PTR[52+rsp]
- xor edi,ebp
- mov esi,ebx
- rol ebx,5
- movdqa xmm9,xmm3
- movdqa XMMWORD PTR[32+rsp],xmm10
- xor edi,edx
- add eax,ebx
- ror ecx,7
- add eax,edi
- pslld xmm3,2
- add ebp,DWORD PTR[56+rsp]
- xor esi,edx
- psrld xmm9,30
- mov edi,eax
- rol eax,5
- xor esi,ecx
- add ebp,eax
- ror ebx,7
- add ebp,esi
- por xmm3,xmm9
- add edx,DWORD PTR[60+rsp]
- xor edi,ecx
- movdqa xmm10,xmm3
+ rol eax,1
+ or ebx,ecx
+ rol r12d,30
+ mov DWORD PTR[36+rsp],eax
+ add ebp,ebx
+ lea edi,DWORD PTR[08f1bbcdch+rsi*1+rax]
+ mov eax,DWORD PTR[40+rsp]
+ mov ebx,r11d
+ mov ecx,r11d
+ xor eax,DWORD PTR[48+rsp]
mov esi,ebp
- rol ebp,5
- xor edi,ebx
- add edx,ebp
- ror eax,7
- add edx,edi
- add ecx,DWORD PTR[rsp]
- pxor xmm4,xmm0
-DB 102,68,15,58,15,210,8
- xor esi,ebx
- mov edi,edx
+ and ebx,r12d
+ xor eax,DWORD PTR[8+rsp]
+ or ecx,r12d
+ rol esi,5
+ xor eax,DWORD PTR[28+rsp]
+ and ecx,edx
+ add edi,esi
+ rol eax,1
+ or ebx,ecx
+ rol r11d,30
+ mov DWORD PTR[40+rsp],eax
+ add edi,ebx
+ lea esi,DWORD PTR[08f1bbcdch+rdx*1+rax]
+ mov eax,DWORD PTR[44+rsp]
+ mov ebx,ebp
+ mov ecx,ebp
+ xor eax,DWORD PTR[52+rsp]
+ mov edx,edi
+ and ebx,r11d
+ xor eax,DWORD PTR[12+rsp]
+ or ecx,r11d
rol edx,5
- pxor xmm4,xmm5
- xor esi,eax
- add ecx,edx
- movdqa xmm9,xmm8
- paddd xmm8,xmm3
- ror ebp,7
- add ecx,esi
- pxor xmm4,xmm10
- add ebx,DWORD PTR[4+rsp]
- xor edi,eax
- mov esi,ecx
- rol ecx,5
- movdqa xmm10,xmm4
- movdqa XMMWORD PTR[48+rsp],xmm8
- xor edi,ebp
- add ebx,ecx
- ror edx,7
- add ebx,edi
- pslld xmm4,2
- add eax,DWORD PTR[8+rsp]
- xor esi,ebp
- psrld xmm10,30
- mov edi,ebx
- rol ebx,5
- xor esi,edx
- add eax,ebx
- ror ecx,7
- add eax,esi
- por xmm4,xmm10
- add ebp,DWORD PTR[12+rsp]
- xor edi,edx
- movdqa xmm8,xmm4
- mov esi,eax
- rol eax,5
- xor edi,ecx
- add ebp,eax
- ror ebx,7
- add ebp,edi
- add edx,DWORD PTR[16+rsp]
- pxor xmm5,xmm1
-DB 102,68,15,58,15,195,8
- xor esi,ecx
- mov edi,ebp
+ xor eax,DWORD PTR[32+rsp]
+ and ecx,r12d
+ add esi,edx
+ rol eax,1
+ or ebx,ecx
+ rol ebp,30
+ mov DWORD PTR[44+rsp],eax
+ add esi,ebx
+ lea edx,DWORD PTR[08f1bbcdch+r12*1+rax]
+ mov eax,DWORD PTR[48+rsp]
+ mov ebx,edi
+ mov ecx,edi
+ xor eax,DWORD PTR[56+rsp]
+ mov r12d,esi
+ and ebx,ebp
+ xor eax,DWORD PTR[16+rsp]
+ or ecx,ebp
+ rol r12d,5
+ xor eax,DWORD PTR[36+rsp]
+ and ecx,r11d
+ add edx,r12d
+ rol eax,1
+ or ebx,ecx
+ rol edi,30
+ mov DWORD PTR[48+rsp],eax
+ add edx,ebx
+ lea r12d,DWORD PTR[3395469782+r11*1+rax]
+ mov eax,DWORD PTR[52+rsp]
+ mov ebx,edi
+ mov r11d,edx
+ xor eax,DWORD PTR[60+rsp]
+ xor ebx,esi
+ rol r11d,5
+ xor eax,DWORD PTR[20+rsp]
+ xor ebx,ebp
+ add r12d,r11d
+ xor eax,DWORD PTR[40+rsp]
+ rol esi,30
+ add r12d,ebx
+ rol eax,1
+ mov DWORD PTR[52+rsp],eax
+ lea r11d,DWORD PTR[3395469782+rbp*1+rax]
+ mov eax,DWORD PTR[56+rsp]
+ mov ebx,esi
+ mov ebp,r12d
+ xor eax,DWORD PTR[rsp]
+ xor ebx,edx
rol ebp,5
- pxor xmm5,xmm6
- xor esi,ebx
- add edx,ebp
- movdqa xmm10,xmm9
- paddd xmm9,xmm4
- ror eax,7
- add edx,esi
- pxor xmm5,xmm8
- add ecx,DWORD PTR[20+rsp]
- xor edi,ebx
- mov esi,edx
- rol edx,5
- movdqa xmm8,xmm5
- movdqa XMMWORD PTR[rsp],xmm9
- xor edi,eax
- add ecx,edx
- ror ebp,7
- add ecx,edi
- pslld xmm5,2
- add ebx,DWORD PTR[24+rsp]
- xor esi,eax
- psrld xmm8,30
- mov edi,ecx
- rol ecx,5
- xor esi,ebp
- add ebx,ecx
- ror edx,7
- add ebx,esi
- por xmm5,xmm8
- add eax,DWORD PTR[28+rsp]
- xor edi,ebp
- movdqa xmm9,xmm5
- mov esi,ebx
- rol ebx,5
- xor edi,edx
- add eax,ebx
- ror ecx,7
- add eax,edi
- mov edi,ecx
- pxor xmm6,xmm2
-DB 102,68,15,58,15,204,8
- xor ecx,edx
- add ebp,DWORD PTR[32+rsp]
- and edi,edx
- pxor xmm6,xmm7
- and esi,ecx
- ror ebx,7
- movdqa xmm8,xmm10
- paddd xmm10,xmm5
+ xor eax,DWORD PTR[24+rsp]
+ xor ebx,edi
+ add r11d,ebp
+ xor eax,DWORD PTR[44+rsp]
+ rol edx,30
+ add r11d,ebx
+ rol eax,1
+ mov DWORD PTR[56+rsp],eax
+ lea ebp,DWORD PTR[3395469782+rdi*1+rax]
+ mov eax,DWORD PTR[60+rsp]
+ mov ebx,edx
+ mov edi,r11d
+ xor eax,DWORD PTR[4+rsp]
+ xor ebx,r12d
+ rol edi,5
+ xor eax,DWORD PTR[28+rsp]
+ xor ebx,esi
add ebp,edi
- mov edi,eax
- pxor xmm6,xmm9
- rol eax,5
- add ebp,esi
- xor ecx,edx
- add ebp,eax
- movdqa xmm9,xmm6
- movdqa XMMWORD PTR[16+rsp],xmm10
- mov esi,ebx
- xor ebx,ecx
- add edx,DWORD PTR[36+rsp]
- and esi,ecx
- pslld xmm6,2
- and edi,ebx
- ror eax,7
- psrld xmm9,30
- add edx,esi
+ xor eax,DWORD PTR[48+rsp]
+ rol r12d,30
+ add ebp,ebx
+ rol eax,1
+ mov DWORD PTR[60+rsp],eax
+ lea edi,DWORD PTR[3395469782+rsi*1+rax]
+ mov eax,DWORD PTR[rsp]
+ mov ebx,r12d
mov esi,ebp
- rol ebp,5
- add edx,edi
- xor ebx,ecx
- add edx,ebp
- por xmm6,xmm9
- mov edi,eax
- xor eax,ebx
- movdqa xmm10,xmm6
- add ecx,DWORD PTR[40+rsp]
- and edi,ebx
- and esi,eax
- ror ebp,7
- add ecx,edi
- mov edi,edx
+ xor eax,DWORD PTR[8+rsp]
+ xor ebx,r11d
+ rol esi,5
+ xor eax,DWORD PTR[32+rsp]
+ xor ebx,edx
+ add edi,esi
+ xor eax,DWORD PTR[52+rsp]
+ rol r11d,30
+ add edi,ebx
+ rol eax,1
+ mov DWORD PTR[rsp],eax
+ lea esi,DWORD PTR[3395469782+rdx*1+rax]
+ mov eax,DWORD PTR[4+rsp]
+ mov ebx,r11d
+ mov edx,edi
+ xor eax,DWORD PTR[12+rsp]
+ xor ebx,ebp
rol edx,5
- add ecx,esi
- xor eax,ebx
- add ecx,edx
- mov esi,ebp
- xor ebp,eax
- add ebx,DWORD PTR[44+rsp]
- and esi,eax
- and edi,ebp
- ror edx,7
- add ebx,esi
- mov esi,ecx
- rol ecx,5
- add ebx,edi
- xor ebp,eax
- add ebx,ecx
- mov edi,edx
- pxor xmm7,xmm3
-DB 102,68,15,58,15,213,8
- xor edx,ebp
- add eax,DWORD PTR[48+rsp]
- and edi,ebp
- pxor xmm7,xmm0
- and esi,edx
- ror ecx,7
- movdqa xmm9,XMMWORD PTR[48+r11]
- paddd xmm8,xmm6
- add eax,edi
- mov edi,ebx
- pxor xmm7,xmm10
- rol ebx,5
- add eax,esi
- xor edx,ebp
- add eax,ebx
- movdqa xmm10,xmm7
- movdqa XMMWORD PTR[32+rsp],xmm8
- mov esi,ecx
- xor ecx,edx
- add ebp,DWORD PTR[52+rsp]
- and esi,edx
- pslld xmm7,2
- and edi,ecx
- ror ebx,7
- psrld xmm10,30
- add ebp,esi
- mov esi,eax
- rol eax,5
- add ebp,edi
- xor ecx,edx
- add ebp,eax
- por xmm7,xmm10
- mov edi,ebx
- xor ebx,ecx
- movdqa xmm8,xmm7
- add edx,DWORD PTR[56+rsp]
- and edi,ecx
- and esi,ebx
- ror eax,7
- add edx,edi
- mov edi,ebp
+ xor eax,DWORD PTR[36+rsp]
+ xor ebx,r12d
+ add esi,edx
+ xor eax,DWORD PTR[56+rsp]
+ rol ebp,30
+ add esi,ebx
+ rol eax,1
+ mov DWORD PTR[4+rsp],eax
+ lea edx,DWORD PTR[3395469782+r12*1+rax]
+ mov eax,DWORD PTR[8+rsp]
+ mov ebx,ebp
+ mov r12d,esi
+ xor eax,DWORD PTR[16+rsp]
+ xor ebx,edi
+ rol r12d,5
+ xor eax,DWORD PTR[40+rsp]
+ xor ebx,r11d
+ add edx,r12d
+ xor eax,DWORD PTR[60+rsp]
+ rol edi,30
+ add edx,ebx
+ rol eax,1
+ mov DWORD PTR[8+rsp],eax
+ lea r12d,DWORD PTR[3395469782+r11*1+rax]
+ mov eax,DWORD PTR[12+rsp]
+ mov ebx,edi
+ mov r11d,edx
+ xor eax,DWORD PTR[20+rsp]
+ xor ebx,esi
+ rol r11d,5
+ xor eax,DWORD PTR[44+rsp]
+ xor ebx,ebp
+ add r12d,r11d
+ xor eax,DWORD PTR[rsp]
+ rol esi,30
+ add r12d,ebx
+ rol eax,1
+ mov DWORD PTR[12+rsp],eax
+ lea r11d,DWORD PTR[3395469782+rbp*1+rax]
+ mov eax,DWORD PTR[16+rsp]
+ mov ebx,esi
+ mov ebp,r12d
+ xor eax,DWORD PTR[24+rsp]
+ xor ebx,edx
rol ebp,5
- add edx,esi
- xor ebx,ecx
- add edx,ebp
- mov esi,eax
- xor eax,ebx
- add ecx,DWORD PTR[60+rsp]
- and esi,ebx
- and edi,eax
- ror ebp,7
- add ecx,esi
- mov esi,edx
- rol edx,5
- add ecx,edi
- xor eax,ebx
- add ecx,edx
- mov edi,ebp
- pxor xmm0,xmm4
-DB 102,68,15,58,15,198,8
- xor ebp,eax
- add ebx,DWORD PTR[rsp]
- and edi,eax
- pxor xmm0,xmm1
- and esi,ebp
- ror edx,7
- movdqa xmm10,xmm9
- paddd xmm9,xmm7
- add ebx,edi
- mov edi,ecx
- pxor xmm0,xmm8
- rol ecx,5
- add ebx,esi
- xor ebp,eax
- add ebx,ecx
- movdqa xmm8,xmm0
- movdqa XMMWORD PTR[48+rsp],xmm9
- mov esi,edx
- xor edx,ebp
- add eax,DWORD PTR[4+rsp]
- and esi,ebp
- pslld xmm0,2
- and edi,edx
- ror ecx,7
- psrld xmm8,30
- add eax,esi
- mov esi,ebx
- rol ebx,5
- add eax,edi
- xor edx,ebp
- add eax,ebx
- por xmm0,xmm8
- mov edi,ecx
- xor ecx,edx
- movdqa xmm9,xmm0
- add ebp,DWORD PTR[8+rsp]
- and edi,edx
- and esi,ecx
- ror ebx,7
+ xor eax,DWORD PTR[48+rsp]
+ xor ebx,edi
+ add r11d,ebp
+ xor eax,DWORD PTR[4+rsp]
+ rol edx,30
+ add r11d,ebx
+ rol eax,1
+ mov DWORD PTR[16+rsp],eax
+ lea ebp,DWORD PTR[3395469782+rdi*1+rax]
+ mov eax,DWORD PTR[20+rsp]
+ mov ebx,edx
+ mov edi,r11d
+ xor eax,DWORD PTR[28+rsp]
+ xor ebx,r12d
+ rol edi,5
+ xor eax,DWORD PTR[52+rsp]
+ xor ebx,esi
add ebp,edi
- mov edi,eax
- rol eax,5
- add ebp,esi
- xor ecx,edx
- add ebp,eax
- mov esi,ebx
- xor ebx,ecx
- add edx,DWORD PTR[12+rsp]
- and esi,ecx
- and edi,ebx
- ror eax,7
- add edx,esi
- mov esi,ebp
- rol ebp,5
- add edx,edi
- xor ebx,ecx
- add edx,ebp
- mov edi,eax
- pxor xmm1,xmm5
-DB 102,68,15,58,15,207,8
- xor eax,ebx
- add ecx,DWORD PTR[16+rsp]
- and edi,ebx
- pxor xmm1,xmm2
- and esi,eax
- ror ebp,7
- movdqa xmm8,xmm10
- paddd xmm10,xmm0
- add ecx,edi
- mov edi,edx
- pxor xmm1,xmm9
- rol edx,5
- add ecx,esi
- xor eax,ebx
- add ecx,edx
- movdqa xmm9,xmm1
- movdqa XMMWORD PTR[rsp],xmm10
+ xor eax,DWORD PTR[8+rsp]
+ rol r12d,30
+ add ebp,ebx
+ rol eax,1
+ mov DWORD PTR[20+rsp],eax
+ lea edi,DWORD PTR[3395469782+rsi*1+rax]
+ mov eax,DWORD PTR[24+rsp]
+ mov ebx,r12d
mov esi,ebp
- xor ebp,eax
- add ebx,DWORD PTR[20+rsp]
- and esi,eax
- pslld xmm1,2
- and edi,ebp
- ror edx,7
- psrld xmm9,30
- add ebx,esi
- mov esi,ecx
- rol ecx,5
- add ebx,edi
- xor ebp,eax
- add ebx,ecx
- por xmm1,xmm9
- mov edi,edx
- xor edx,ebp
- movdqa xmm10,xmm1
- add eax,DWORD PTR[24+rsp]
- and edi,ebp
- and esi,edx
- ror ecx,7
- add eax,edi
- mov edi,ebx
- rol ebx,5
- add eax,esi
- xor edx,ebp
- add eax,ebx
- mov esi,ecx
- xor ecx,edx
- add ebp,DWORD PTR[28+rsp]
- and esi,edx
- and edi,ecx
- ror ebx,7
- add ebp,esi
- mov esi,eax
- rol eax,5
- add ebp,edi
- xor ecx,edx
- add ebp,eax
- mov edi,ebx
- pxor xmm2,xmm6
-DB 102,68,15,58,15,208,8
- xor ebx,ecx
- add edx,DWORD PTR[32+rsp]
- and edi,ecx
- pxor xmm2,xmm3
- and esi,ebx
- ror eax,7
- movdqa xmm9,xmm8
- paddd xmm8,xmm1
- add edx,edi
- mov edi,ebp
- pxor xmm2,xmm10
- rol ebp,5
- add edx,esi
- xor ebx,ecx
- add edx,ebp
- movdqa xmm10,xmm2
- movdqa XMMWORD PTR[16+rsp],xmm8
- mov esi,eax
- xor eax,ebx
- add ecx,DWORD PTR[36+rsp]
- and esi,ebx
- pslld xmm2,2
- and edi,eax
- ror ebp,7
- psrld xmm10,30
- add ecx,esi
- mov esi,edx
+ xor eax,DWORD PTR[32+rsp]
+ xor ebx,r11d
+ rol esi,5
+ xor eax,DWORD PTR[56+rsp]
+ xor ebx,edx
+ add edi,esi
+ xor eax,DWORD PTR[12+rsp]
+ rol r11d,30
+ add edi,ebx
+ rol eax,1
+ mov DWORD PTR[24+rsp],eax
+ lea esi,DWORD PTR[3395469782+rdx*1+rax]
+ mov eax,DWORD PTR[28+rsp]
+ mov ebx,r11d
+ mov edx,edi
+ xor eax,DWORD PTR[36+rsp]
+ xor ebx,ebp
rol edx,5
- add ecx,edi
- xor eax,ebx
- add ecx,edx
- por xmm2,xmm10
- mov edi,ebp
- xor ebp,eax
- movdqa xmm8,xmm2
- add ebx,DWORD PTR[40+rsp]
- and edi,eax
- and esi,ebp
- ror edx,7
- add ebx,edi
- mov edi,ecx
- rol ecx,5
- add ebx,esi
- xor ebp,eax
- add ebx,ecx
- mov esi,edx
- xor edx,ebp
- add eax,DWORD PTR[44+rsp]
- and esi,ebp
- and edi,edx
- ror ecx,7
- add eax,esi
- mov esi,ebx
- rol ebx,5
- add eax,edi
- xor edx,ebp
- add eax,ebx
- add ebp,DWORD PTR[48+rsp]
- pxor xmm3,xmm7
-DB 102,68,15,58,15,193,8
- xor esi,edx
- mov edi,eax
- rol eax,5
- pxor xmm3,xmm4
- xor esi,ecx
- add ebp,eax
- movdqa xmm10,xmm9
- paddd xmm9,xmm2
- ror ebx,7
- add ebp,esi
- pxor xmm3,xmm8
- add edx,DWORD PTR[52+rsp]
- xor edi,ecx
- mov esi,ebp
+ xor eax,DWORD PTR[60+rsp]
+ xor ebx,r12d
+ add esi,edx
+ xor eax,DWORD PTR[16+rsp]
+ rol ebp,30
+ add esi,ebx
+ rol eax,1
+ mov DWORD PTR[28+rsp],eax
+ lea edx,DWORD PTR[3395469782+r12*1+rax]
+ mov eax,DWORD PTR[32+rsp]
+ mov ebx,ebp
+ mov r12d,esi
+ xor eax,DWORD PTR[40+rsp]
+ xor ebx,edi
+ rol r12d,5
+ xor eax,DWORD PTR[rsp]
+ xor ebx,r11d
+ add edx,r12d
+ xor eax,DWORD PTR[20+rsp]
+ rol edi,30
+ add edx,ebx
+ rol eax,1
+ mov DWORD PTR[32+rsp],eax
+ lea r12d,DWORD PTR[3395469782+r11*1+rax]
+ mov eax,DWORD PTR[36+rsp]
+ mov ebx,edi
+ mov r11d,edx
+ xor eax,DWORD PTR[44+rsp]
+ xor ebx,esi
+ rol r11d,5
+ xor eax,DWORD PTR[4+rsp]
+ xor ebx,ebp
+ add r12d,r11d
+ xor eax,DWORD PTR[24+rsp]
+ rol esi,30
+ add r12d,ebx
+ rol eax,1
+ mov DWORD PTR[36+rsp],eax
+ lea r11d,DWORD PTR[3395469782+rbp*1+rax]
+ mov eax,DWORD PTR[40+rsp]
+ mov ebx,esi
+ mov ebp,r12d
+ xor eax,DWORD PTR[48+rsp]
+ xor ebx,edx
rol ebp,5
- movdqa xmm8,xmm3
- movdqa XMMWORD PTR[32+rsp],xmm9
- xor edi,ebx
- add edx,ebp
- ror eax,7
- add edx,edi
- pslld xmm3,2
- add ecx,DWORD PTR[56+rsp]
- xor esi,ebx
- psrld xmm8,30
- mov edi,edx
- rol edx,5
- xor esi,eax
- add ecx,edx
- ror ebp,7
- add ecx,esi
- por xmm3,xmm8
- add ebx,DWORD PTR[60+rsp]
- xor edi,eax
- mov esi,ecx
- rol ecx,5
- xor edi,ebp
- add ebx,ecx
- ror edx,7
- add ebx,edi
- add eax,DWORD PTR[rsp]
- paddd xmm10,xmm3
- xor esi,ebp
- mov edi,ebx
- rol ebx,5
- xor esi,edx
- movdqa XMMWORD PTR[48+rsp],xmm10
- add eax,ebx
- ror ecx,7
- add eax,esi
- add ebp,DWORD PTR[4+rsp]
- xor edi,edx
- mov esi,eax
- rol eax,5
- xor edi,ecx
- add ebp,eax
- ror ebx,7
+ xor eax,DWORD PTR[8+rsp]
+ xor ebx,edi
+ add r11d,ebp
+ xor eax,DWORD PTR[28+rsp]
+ rol edx,30
+ add r11d,ebx
+ rol eax,1
+ mov DWORD PTR[40+rsp],eax
+ lea ebp,DWORD PTR[3395469782+rdi*1+rax]
+ mov eax,DWORD PTR[44+rsp]
+ mov ebx,edx
+ mov edi,r11d
+ xor eax,DWORD PTR[52+rsp]
+ xor ebx,r12d
+ rol edi,5
+ xor eax,DWORD PTR[12+rsp]
+ xor ebx,esi
add ebp,edi
- add edx,DWORD PTR[8+rsp]
- xor esi,ecx
- mov edi,ebp
- rol ebp,5
- xor esi,ebx
- add edx,ebp
- ror eax,7
- add edx,esi
- add ecx,DWORD PTR[12+rsp]
- xor edi,ebx
- mov esi,edx
- rol edx,5
- xor edi,eax
- add ecx,edx
- ror ebp,7
- add ecx,edi
- cmp r9,r10
- je $L$done_ssse3
- movdqa xmm6,XMMWORD PTR[64+r11]
- movdqa xmm9,XMMWORD PTR[r11]
- movdqu xmm0,XMMWORD PTR[r9]
- movdqu xmm1,XMMWORD PTR[16+r9]
- movdqu xmm2,XMMWORD PTR[32+r9]
- movdqu xmm3,XMMWORD PTR[48+r9]
-DB 102,15,56,0,198
- add r9,64
- add ebx,DWORD PTR[16+rsp]
- xor esi,eax
-DB 102,15,56,0,206
- mov edi,ecx
- rol ecx,5
- paddd xmm0,xmm9
- xor esi,ebp
- add ebx,ecx
- ror edx,7
- add ebx,esi
- movdqa XMMWORD PTR[rsp],xmm0
- add eax,DWORD PTR[20+rsp]
- xor edi,ebp
- psubd xmm0,xmm9
- mov esi,ebx
- rol ebx,5
- xor edi,edx
- add eax,ebx
- ror ecx,7
- add eax,edi
- add ebp,DWORD PTR[24+rsp]
- xor esi,edx
- mov edi,eax
- rol eax,5
- xor esi,ecx
- add ebp,eax
- ror ebx,7
- add ebp,esi
- add edx,DWORD PTR[28+rsp]
- xor edi,ecx
+ xor eax,DWORD PTR[32+rsp]
+ rol r12d,30
+ add ebp,ebx
+ rol eax,1
+ mov DWORD PTR[44+rsp],eax
+ lea edi,DWORD PTR[3395469782+rsi*1+rax]
+ mov eax,DWORD PTR[48+rsp]
+ mov ebx,r12d
mov esi,ebp
- rol ebp,5
- xor edi,ebx
- add edx,ebp
- ror eax,7
- add edx,edi
- add ecx,DWORD PTR[32+rsp]
- xor esi,ebx
-DB 102,15,56,0,214
- mov edi,edx
- rol edx,5
- paddd xmm1,xmm9
- xor esi,eax
- add ecx,edx
- ror ebp,7
- add ecx,esi
- movdqa XMMWORD PTR[16+rsp],xmm1
- add ebx,DWORD PTR[36+rsp]
- xor edi,eax
- psubd xmm1,xmm9
- mov esi,ecx
- rol ecx,5
- xor edi,ebp
- add ebx,ecx
- ror edx,7
- add ebx,edi
- add eax,DWORD PTR[40+rsp]
- xor esi,ebp
- mov edi,ebx
- rol ebx,5
- xor esi,edx
- add eax,ebx
- ror ecx,7
- add eax,esi
- add ebp,DWORD PTR[44+rsp]
- xor edi,edx
- mov esi,eax
- rol eax,5
- xor edi,ecx
- add ebp,eax
- ror ebx,7
- add ebp,edi
- add edx,DWORD PTR[48+rsp]
- xor esi,ecx
-DB 102,15,56,0,222
- mov edi,ebp
- rol ebp,5
- paddd xmm2,xmm9
- xor esi,ebx
- add edx,ebp
- ror eax,7
- add edx,esi
- movdqa XMMWORD PTR[32+rsp],xmm2
- add ecx,DWORD PTR[52+rsp]
- xor edi,ebx
- psubd xmm2,xmm9
- mov esi,edx
+ xor eax,DWORD PTR[56+rsp]
+ xor ebx,r11d
+ rol esi,5
+ xor eax,DWORD PTR[16+rsp]
+ xor ebx,edx
+ add edi,esi
+ xor eax,DWORD PTR[36+rsp]
+ rol r11d,30
+ add edi,ebx
+ rol eax,1
+ mov DWORD PTR[48+rsp],eax
+ lea esi,DWORD PTR[3395469782+rdx*1+rax]
+ mov eax,DWORD PTR[52+rsp]
+ mov ebx,r11d
+ mov edx,edi
+ xor eax,DWORD PTR[60+rsp]
+ xor ebx,ebp
rol edx,5
- xor edi,eax
- add ecx,edx
- ror ebp,7
- add ecx,edi
- add ebx,DWORD PTR[56+rsp]
- xor esi,eax
- mov edi,ecx
- rol ecx,5
- xor esi,ebp
- add ebx,ecx
- ror edx,7
- add ebx,esi
- add eax,DWORD PTR[60+rsp]
- xor edi,ebp
- mov esi,ebx
- rol ebx,5
- xor edi,edx
- add eax,ebx
- ror ecx,7
- add eax,edi
- add eax,DWORD PTR[r8]
- add esi,DWORD PTR[4+r8]
- add ecx,DWORD PTR[8+r8]
- add edx,DWORD PTR[12+r8]
- mov DWORD PTR[r8],eax
- add ebp,DWORD PTR[16+r8]
- mov DWORD PTR[4+r8],esi
+ xor eax,DWORD PTR[20+rsp]
+ xor ebx,r12d
+ add esi,edx
+ xor eax,DWORD PTR[40+rsp]
+ rol ebp,30
+ add esi,ebx
+ rol eax,1
+ lea edx,DWORD PTR[3395469782+r12*1+rax]
+ mov eax,DWORD PTR[56+rsp]
+ mov ebx,ebp
+ mov r12d,esi
+ xor eax,DWORD PTR[rsp]
+ xor ebx,edi
+ rol r12d,5
+ xor eax,DWORD PTR[24+rsp]
+ xor ebx,r11d
+ add edx,r12d
+ xor eax,DWORD PTR[44+rsp]
+ rol edi,30
+ add edx,ebx
+ rol eax,1
+ lea r12d,DWORD PTR[3395469782+r11*1+rax]
+ mov eax,DWORD PTR[60+rsp]
+ mov ebx,edi
+ mov r11d,edx
+ xor eax,DWORD PTR[4+rsp]
+ xor ebx,esi
+ rol r11d,5
+ xor eax,DWORD PTR[28+rsp]
+ xor ebx,ebp
+ add r12d,r11d
+ xor eax,DWORD PTR[48+rsp]
+ rol esi,30
+ add r12d,ebx
+ rol eax,1
+ lea r11d,DWORD PTR[3395469782+rbp*1+rax]
mov ebx,esi
- mov DWORD PTR[8+r8],ecx
- mov DWORD PTR[12+r8],edx
- mov DWORD PTR[16+r8],ebp
- jmp $L$oop_ssse3
-
-ALIGN 16
-$L$done_ssse3::
- add ebx,DWORD PTR[16+rsp]
- xor esi,eax
- mov edi,ecx
- rol ecx,5
- xor esi,ebp
- add ebx,ecx
- ror edx,7
- add ebx,esi
- add eax,DWORD PTR[20+rsp]
- xor edi,ebp
- mov esi,ebx
- rol ebx,5
- xor edi,edx
- add eax,ebx
- ror ecx,7
- add eax,edi
- add ebp,DWORD PTR[24+rsp]
- xor esi,edx
- mov edi,eax
- rol eax,5
- xor esi,ecx
- add ebp,eax
- ror ebx,7
- add ebp,esi
- add edx,DWORD PTR[28+rsp]
- xor edi,ecx
- mov esi,ebp
+ mov ebp,r12d
+ xor ebx,edx
rol ebp,5
- xor edi,ebx
- add edx,ebp
- ror eax,7
- add edx,edi
- add ecx,DWORD PTR[32+rsp]
- xor esi,ebx
- mov edi,edx
- rol edx,5
- xor esi,eax
- add ecx,edx
- ror ebp,7
- add ecx,esi
- add ebx,DWORD PTR[36+rsp]
- xor edi,eax
- mov esi,ecx
- rol ecx,5
- xor edi,ebp
- add ebx,ecx
- ror edx,7
- add ebx,edi
- add eax,DWORD PTR[40+rsp]
- xor esi,ebp
- mov edi,ebx
- rol ebx,5
- xor esi,edx
- add eax,ebx
- ror ecx,7
- add eax,esi
- add ebp,DWORD PTR[44+rsp]
- xor edi,edx
- mov esi,eax
- rol eax,5
- xor edi,ecx
- add ebp,eax
- ror ebx,7
- add ebp,edi
- add edx,DWORD PTR[48+rsp]
- xor esi,ecx
- mov edi,ebp
- rol ebp,5
- xor esi,ebx
- add edx,ebp
- ror eax,7
- add edx,esi
- add ecx,DWORD PTR[52+rsp]
- xor edi,ebx
- mov esi,edx
- rol edx,5
- xor edi,eax
- add ecx,edx
- ror ebp,7
- add ecx,edi
- add ebx,DWORD PTR[56+rsp]
- xor esi,eax
- mov edi,ecx
- rol ecx,5
- xor esi,ebp
- add ebx,ecx
- ror edx,7
- add ebx,esi
- add eax,DWORD PTR[60+rsp]
- xor edi,ebp
- mov esi,ebx
- rol ebx,5
- xor edi,edx
- add eax,ebx
- ror ecx,7
- add eax,edi
- add eax,DWORD PTR[r8]
- add esi,DWORD PTR[4+r8]
- add ecx,DWORD PTR[8+r8]
- mov DWORD PTR[r8],eax
- add edx,DWORD PTR[12+r8]
- mov DWORD PTR[4+r8],esi
- add ebp,DWORD PTR[16+r8]
- mov DWORD PTR[8+r8],ecx
- mov DWORD PTR[12+r8],edx
- mov DWORD PTR[16+r8],ebp
- movaps xmm6,XMMWORD PTR[((64+0))+rsp]
- movaps xmm7,XMMWORD PTR[((64+16))+rsp]
- movaps xmm8,XMMWORD PTR[((64+32))+rsp]
- movaps xmm9,XMMWORD PTR[((64+48))+rsp]
- movaps xmm10,XMMWORD PTR[((64+64))+rsp]
- lea rsi,QWORD PTR[144+rsp]
+ xor ebx,edi
+ add r11d,ebp
+ rol edx,30
+ add r11d,ebx
+ add r11d,DWORD PTR[r8]
+ add r12d,DWORD PTR[4+r8]
+ add edx,DWORD PTR[8+r8]
+ add esi,DWORD PTR[12+r8]
+ add edi,DWORD PTR[16+r8]
+ mov DWORD PTR[r8],r11d
+ mov DWORD PTR[4+r8],r12d
+ mov DWORD PTR[8+r8],edx
+ mov DWORD PTR[12+r8],esi
+ mov DWORD PTR[16+r8],edi
+
+ xchg edx,r11d
+ xchg esi,r12d
+ xchg edi,r11d
+ xchg ebp,r12d
+
+ lea r9,QWORD PTR[64+r9]
+ sub r10,1
+ jnz $L$loop
+ mov rsi,QWORD PTR[64+rsp]
mov r12,QWORD PTR[rsi]
mov rbp,QWORD PTR[8+rsi]
mov rbx,QWORD PTR[16+rsi]
lea rsp,QWORD PTR[24+rsi]
-$L$epilogue_ssse3::
+$L$epilogue::
mov rdi,QWORD PTR[8+rsp] ;WIN64 epilogue
mov rsi,QWORD PTR[16+rsp]
DB 0F3h,0C3h ;repret
-$L$SEH_end_sha1_block_data_order_ssse3::
-sha1_block_data_order_ssse3 ENDP
-ALIGN 64
-K_XX_XX::
- DD 05a827999h,05a827999h,05a827999h,05a827999h
-
- DD 06ed9eba1h,06ed9eba1h,06ed9eba1h,06ed9eba1h
-
- DD 08f1bbcdch,08f1bbcdch,08f1bbcdch,08f1bbcdch
-
- DD 0ca62c1d6h,0ca62c1d6h,0ca62c1d6h,0ca62c1d6h
-
- DD 000010203h,004050607h,008090a0bh,00c0d0e0fh
-
+$L$SEH_end_sha1_block_data_order::
+sha1_block_data_order ENDP
DB 83,72,65,49,32,98,108,111,99,107,32,116,114,97,110,115
DB 102,111,114,109,32,102,111,114,32,120,56,54,95,54,52,44
DB 32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60
DB 97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114
DB 103,62,0
-ALIGN 64
+ALIGN 16
EXTERN __imp_RtlVirtualUnwind:NEAR
ALIGN 16
@@ -2549,67 +1317,16 @@ se_handler PROC PRIVATE
lea r10,QWORD PTR[$L$prologue]
cmp rbx,r10
- jb $L$common_seh_tail
+ jb $L$in_prologue
mov rax,QWORD PTR[152+r8]
lea r10,QWORD PTR[$L$epilogue]
cmp rbx,r10
- jae $L$common_seh_tail
+ jae $L$in_prologue
mov rax,QWORD PTR[64+rax]
- lea rax,QWORD PTR[32+rax]
-
- mov rbx,QWORD PTR[((-8))+rax]
- mov rbp,QWORD PTR[((-16))+rax]
- mov r12,QWORD PTR[((-24))+rax]
- mov r13,QWORD PTR[((-32))+rax]
- mov QWORD PTR[144+r8],rbx
- mov QWORD PTR[160+r8],rbp
- mov QWORD PTR[216+r8],r12
- mov QWORD PTR[224+r8],r13
-
- jmp $L$common_seh_tail
-se_handler ENDP
-
-
-ALIGN 16
-ssse3_handler PROC PRIVATE
- push rsi
- push rdi
- push rbx
- push rbp
- push r12
- push r13
- push r14
- push r15
- pushfq
- sub rsp,64
-
- mov rax,QWORD PTR[120+r8]
- mov rbx,QWORD PTR[248+r8]
-
- mov rsi,QWORD PTR[8+r9]
- mov r11,QWORD PTR[56+r9]
-
- mov r10d,DWORD PTR[r11]
- lea r10,QWORD PTR[r10*1+rsi]
- cmp rbx,r10
- jb $L$common_seh_tail
-
- mov rax,QWORD PTR[152+r8]
-
- mov r10d,DWORD PTR[4+r11]
- lea r10,QWORD PTR[r10*1+rsi]
- cmp rbx,r10
- jae $L$common_seh_tail
-
- lea rsi,QWORD PTR[64+rax]
- lea rdi,QWORD PTR[512+r8]
- mov ecx,10
- DD 0a548f3fch
-
- lea rax,QWORD PTR[168+rax]
+ lea rax,QWORD PTR[24+rax]
mov rbx,QWORD PTR[((-8))+rax]
mov rbp,QWORD PTR[((-16))+rax]
@@ -2618,7 +1335,7 @@ ssse3_handler PROC PRIVATE
mov QWORD PTR[160+r8],rbp
mov QWORD PTR[216+r8],r12
-$L$common_seh_tail::
+$L$in_prologue::
mov rdi,QWORD PTR[8+rax]
mov rsi,QWORD PTR[16+rax]
mov QWORD PTR[152+r8],rax
@@ -2657,7 +1374,7 @@ $L$common_seh_tail::
pop rdi
pop rsi
DB 0F3h,0C3h ;repret
-ssse3_handler ENDP
+se_handler ENDP
.text$ ENDS
.pdata SEGMENT READONLY ALIGN(4)
@@ -2665,20 +1382,13 @@ ALIGN 4
DD imagerel $L$SEH_begin_sha1_block_data_order
DD imagerel $L$SEH_end_sha1_block_data_order
DD imagerel $L$SEH_info_sha1_block_data_order
- DD imagerel $L$SEH_begin_sha1_block_data_order_ssse3
- DD imagerel $L$SEH_end_sha1_block_data_order_ssse3
- DD imagerel $L$SEH_info_sha1_block_data_order_ssse3
+
.pdata ENDS
.xdata SEGMENT READONLY ALIGN(8)
ALIGN 8
$L$SEH_info_sha1_block_data_order::
DB 9,0,0,0
DD imagerel se_handler
-$L$SEH_info_sha1_block_data_order_ssse3::
-DB 9,0,0,0
- DD imagerel ssse3_handler
- DD imagerel $L$prologue_ssse3,imagerel $L$epilogue_ssse3
-
.xdata ENDS
END
diff --git a/deps/openssl/asm/x64-win32-masm/sha/sha512-x86_64.asm b/deps/openssl/asm/x64-win32-masm/sha/sha512-x86_64.asm
index f685c2fdfc..5ea4a6327a 100644
--- a/deps/openssl/asm/x64-win32-masm/sha/sha512-x86_64.asm
+++ b/deps/openssl/asm/x64-win32-masm/sha/sha512-x86_64.asm
@@ -26,1738 +26,1930 @@ $L$SEH_begin_sha256_block_data_order::
sub rsp,16*4+4*8
lea rdx,QWORD PTR[rdx*4+rsi]
and rsp,-64
- mov QWORD PTR[((64+0))+rsp],rdi
- mov QWORD PTR[((64+8))+rsp],rsi
- mov QWORD PTR[((64+16))+rsp],rdx
- mov QWORD PTR[((64+24))+rsp],r11
+ mov QWORD PTR[((16*4+0*8))+rsp],rdi
+ mov QWORD PTR[((16*4+1*8))+rsp],rsi
+ mov QWORD PTR[((16*4+2*8))+rsp],rdx
+ mov QWORD PTR[((16*4+3*8))+rsp],r11
$L$prologue::
lea rbp,QWORD PTR[K256]
- mov eax,DWORD PTR[rdi]
- mov ebx,DWORD PTR[4+rdi]
- mov ecx,DWORD PTR[8+rdi]
- mov edx,DWORD PTR[12+rdi]
- mov r8d,DWORD PTR[16+rdi]
- mov r9d,DWORD PTR[20+rdi]
- mov r10d,DWORD PTR[24+rdi]
- mov r11d,DWORD PTR[28+rdi]
+ mov eax,DWORD PTR[((4*0))+rdi]
+ mov ebx,DWORD PTR[((4*1))+rdi]
+ mov ecx,DWORD PTR[((4*2))+rdi]
+ mov edx,DWORD PTR[((4*3))+rdi]
+ mov r8d,DWORD PTR[((4*4))+rdi]
+ mov r9d,DWORD PTR[((4*5))+rdi]
+ mov r10d,DWORD PTR[((4*6))+rdi]
+ mov r11d,DWORD PTR[((4*7))+rdi]
jmp $L$loop
ALIGN 16
$L$loop::
xor rdi,rdi
- mov r12d,DWORD PTR[rsi]
- mov r13d,r8d
- mov r14d,eax
+ mov r12d,DWORD PTR[((4*0))+rsi]
bswap r12d
- ror r13d,14
+ mov r13d,r8d
+ mov r14d,r8d
mov r15d,r9d
- mov DWORD PTR[rsp],r12d
- ror r14d,9
- xor r13d,r8d
+ ror r13d,6
+ ror r14d,11
xor r15d,r10d
- ror r13d,5
- add r12d,r11d
- xor r14d,eax
-
- add r12d,DWORD PTR[rdi*4+rbp]
+ xor r13d,r14d
+ ror r14d,14
and r15d,r8d
- mov r11d,ebx
+ mov DWORD PTR[rsp],r12d
- ror r14d,11
- xor r13d,r8d
+ xor r13d,r14d
xor r15d,r10d
+ add r12d,r11d
+
+ mov r11d,eax
+ add r12d,r13d
- xor r11d,ecx
- xor r14d,eax
add r12d,r15d
- mov r15d,ebx
+ mov r13d,eax
+ mov r14d,eax
- ror r13d,6
- and r11d,eax
- and r15d,ecx
+ ror r11d,2
+ ror r13d,13
+ mov r15d,eax
+ add r12d,DWORD PTR[rdi*4+rbp]
- ror r14d,2
- add r12d,r13d
- add r11d,r15d
+ xor r11d,r13d
+ ror r13d,9
+ or r14d,ecx
+ xor r11d,r13d
+ and r15d,ecx
add edx,r12d
+
+ and r14d,ebx
add r11d,r12d
+
+ or r14d,r15d
lea rdi,QWORD PTR[1+rdi]
- add r11d,r14d
- mov r12d,DWORD PTR[4+rsi]
- mov r13d,edx
- mov r14d,r11d
+ add r11d,r14d
+ mov r12d,DWORD PTR[((4*1))+rsi]
bswap r12d
- ror r13d,14
+ mov r13d,edx
+ mov r14d,edx
mov r15d,r8d
- mov DWORD PTR[4+rsp],r12d
- ror r14d,9
- xor r13d,edx
+ ror r13d,6
+ ror r14d,11
xor r15d,r9d
- ror r13d,5
- add r12d,r10d
- xor r14d,r11d
-
- add r12d,DWORD PTR[rdi*4+rbp]
+ xor r13d,r14d
+ ror r14d,14
and r15d,edx
- mov r10d,eax
+ mov DWORD PTR[4+rsp],r12d
- ror r14d,11
- xor r13d,edx
+ xor r13d,r14d
xor r15d,r9d
+ add r12d,r10d
+
+ mov r10d,r11d
+ add r12d,r13d
- xor r10d,ebx
- xor r14d,r11d
add r12d,r15d
- mov r15d,eax
+ mov r13d,r11d
+ mov r14d,r11d
- ror r13d,6
- and r10d,r11d
- and r15d,ebx
+ ror r10d,2
+ ror r13d,13
+ mov r15d,r11d
+ add r12d,DWORD PTR[rdi*4+rbp]
- ror r14d,2
- add r12d,r13d
- add r10d,r15d
+ xor r10d,r13d
+ ror r13d,9
+ or r14d,ebx
+ xor r10d,r13d
+ and r15d,ebx
add ecx,r12d
+
+ and r14d,eax
add r10d,r12d
+
+ or r14d,r15d
lea rdi,QWORD PTR[1+rdi]
- add r10d,r14d
- mov r12d,DWORD PTR[8+rsi]
- mov r13d,ecx
- mov r14d,r10d
+ add r10d,r14d
+ mov r12d,DWORD PTR[((4*2))+rsi]
bswap r12d
- ror r13d,14
+ mov r13d,ecx
+ mov r14d,ecx
mov r15d,edx
- mov DWORD PTR[8+rsp],r12d
- ror r14d,9
- xor r13d,ecx
+ ror r13d,6
+ ror r14d,11
xor r15d,r8d
- ror r13d,5
- add r12d,r9d
- xor r14d,r10d
-
- add r12d,DWORD PTR[rdi*4+rbp]
+ xor r13d,r14d
+ ror r14d,14
and r15d,ecx
- mov r9d,r11d
+ mov DWORD PTR[8+rsp],r12d
- ror r14d,11
- xor r13d,ecx
+ xor r13d,r14d
xor r15d,r8d
+ add r12d,r9d
+
+ mov r9d,r10d
+ add r12d,r13d
- xor r9d,eax
- xor r14d,r10d
add r12d,r15d
- mov r15d,r11d
+ mov r13d,r10d
+ mov r14d,r10d
- ror r13d,6
- and r9d,r10d
- and r15d,eax
+ ror r9d,2
+ ror r13d,13
+ mov r15d,r10d
+ add r12d,DWORD PTR[rdi*4+rbp]
- ror r14d,2
- add r12d,r13d
- add r9d,r15d
+ xor r9d,r13d
+ ror r13d,9
+ or r14d,eax
+ xor r9d,r13d
+ and r15d,eax
add ebx,r12d
+
+ and r14d,r11d
add r9d,r12d
+
+ or r14d,r15d
lea rdi,QWORD PTR[1+rdi]
- add r9d,r14d
- mov r12d,DWORD PTR[12+rsi]
- mov r13d,ebx
- mov r14d,r9d
+ add r9d,r14d
+ mov r12d,DWORD PTR[((4*3))+rsi]
bswap r12d
- ror r13d,14
+ mov r13d,ebx
+ mov r14d,ebx
mov r15d,ecx
- mov DWORD PTR[12+rsp],r12d
- ror r14d,9
- xor r13d,ebx
+ ror r13d,6
+ ror r14d,11
xor r15d,edx
- ror r13d,5
- add r12d,r8d
- xor r14d,r9d
-
- add r12d,DWORD PTR[rdi*4+rbp]
+ xor r13d,r14d
+ ror r14d,14
and r15d,ebx
- mov r8d,r10d
+ mov DWORD PTR[12+rsp],r12d
- ror r14d,11
- xor r13d,ebx
+ xor r13d,r14d
xor r15d,edx
+ add r12d,r8d
+
+ mov r8d,r9d
+ add r12d,r13d
- xor r8d,r11d
- xor r14d,r9d
add r12d,r15d
- mov r15d,r10d
+ mov r13d,r9d
+ mov r14d,r9d
- ror r13d,6
- and r8d,r9d
- and r15d,r11d
+ ror r8d,2
+ ror r13d,13
+ mov r15d,r9d
+ add r12d,DWORD PTR[rdi*4+rbp]
- ror r14d,2
- add r12d,r13d
- add r8d,r15d
+ xor r8d,r13d
+ ror r13d,9
+ or r14d,r11d
+ xor r8d,r13d
+ and r15d,r11d
add eax,r12d
+
+ and r14d,r10d
add r8d,r12d
+
+ or r14d,r15d
lea rdi,QWORD PTR[1+rdi]
- add r8d,r14d
- mov r12d,DWORD PTR[16+rsi]
- mov r13d,eax
- mov r14d,r8d
+ add r8d,r14d
+ mov r12d,DWORD PTR[((4*4))+rsi]
bswap r12d
- ror r13d,14
+ mov r13d,eax
+ mov r14d,eax
mov r15d,ebx
- mov DWORD PTR[16+rsp],r12d
- ror r14d,9
- xor r13d,eax
+ ror r13d,6
+ ror r14d,11
xor r15d,ecx
- ror r13d,5
- add r12d,edx
- xor r14d,r8d
-
- add r12d,DWORD PTR[rdi*4+rbp]
+ xor r13d,r14d
+ ror r14d,14
and r15d,eax
- mov edx,r9d
+ mov DWORD PTR[16+rsp],r12d
- ror r14d,11
- xor r13d,eax
+ xor r13d,r14d
xor r15d,ecx
+ add r12d,edx
+
+ mov edx,r8d
+ add r12d,r13d
- xor edx,r10d
- xor r14d,r8d
add r12d,r15d
- mov r15d,r9d
+ mov r13d,r8d
+ mov r14d,r8d
- ror r13d,6
- and edx,r8d
- and r15d,r10d
+ ror edx,2
+ ror r13d,13
+ mov r15d,r8d
+ add r12d,DWORD PTR[rdi*4+rbp]
- ror r14d,2
- add r12d,r13d
- add edx,r15d
+ xor edx,r13d
+ ror r13d,9
+ or r14d,r10d
+ xor edx,r13d
+ and r15d,r10d
add r11d,r12d
+
+ and r14d,r9d
add edx,r12d
+
+ or r14d,r15d
lea rdi,QWORD PTR[1+rdi]
- add edx,r14d
- mov r12d,DWORD PTR[20+rsi]
- mov r13d,r11d
- mov r14d,edx
+ add edx,r14d
+ mov r12d,DWORD PTR[((4*5))+rsi]
bswap r12d
- ror r13d,14
+ mov r13d,r11d
+ mov r14d,r11d
mov r15d,eax
- mov DWORD PTR[20+rsp],r12d
- ror r14d,9
- xor r13d,r11d
+ ror r13d,6
+ ror r14d,11
xor r15d,ebx
- ror r13d,5
- add r12d,ecx
- xor r14d,edx
-
- add r12d,DWORD PTR[rdi*4+rbp]
+ xor r13d,r14d
+ ror r14d,14
and r15d,r11d
- mov ecx,r8d
+ mov DWORD PTR[20+rsp],r12d
- ror r14d,11
- xor r13d,r11d
+ xor r13d,r14d
xor r15d,ebx
+ add r12d,ecx
+
+ mov ecx,edx
+ add r12d,r13d
- xor ecx,r9d
- xor r14d,edx
add r12d,r15d
- mov r15d,r8d
+ mov r13d,edx
+ mov r14d,edx
- ror r13d,6
- and ecx,edx
- and r15d,r9d
+ ror ecx,2
+ ror r13d,13
+ mov r15d,edx
+ add r12d,DWORD PTR[rdi*4+rbp]
- ror r14d,2
- add r12d,r13d
- add ecx,r15d
+ xor ecx,r13d
+ ror r13d,9
+ or r14d,r9d
+ xor ecx,r13d
+ and r15d,r9d
add r10d,r12d
+
+ and r14d,r8d
add ecx,r12d
+
+ or r14d,r15d
lea rdi,QWORD PTR[1+rdi]
- add ecx,r14d
- mov r12d,DWORD PTR[24+rsi]
- mov r13d,r10d
- mov r14d,ecx
+ add ecx,r14d
+ mov r12d,DWORD PTR[((4*6))+rsi]
bswap r12d
- ror r13d,14
+ mov r13d,r10d
+ mov r14d,r10d
mov r15d,r11d
- mov DWORD PTR[24+rsp],r12d
- ror r14d,9
- xor r13d,r10d
+ ror r13d,6
+ ror r14d,11
xor r15d,eax
- ror r13d,5
- add r12d,ebx
- xor r14d,ecx
-
- add r12d,DWORD PTR[rdi*4+rbp]
+ xor r13d,r14d
+ ror r14d,14
and r15d,r10d
- mov ebx,edx
+ mov DWORD PTR[24+rsp],r12d
- ror r14d,11
- xor r13d,r10d
+ xor r13d,r14d
xor r15d,eax
+ add r12d,ebx
+
+ mov ebx,ecx
+ add r12d,r13d
- xor ebx,r8d
- xor r14d,ecx
add r12d,r15d
- mov r15d,edx
+ mov r13d,ecx
+ mov r14d,ecx
- ror r13d,6
- and ebx,ecx
- and r15d,r8d
+ ror ebx,2
+ ror r13d,13
+ mov r15d,ecx
+ add r12d,DWORD PTR[rdi*4+rbp]
- ror r14d,2
- add r12d,r13d
- add ebx,r15d
+ xor ebx,r13d
+ ror r13d,9
+ or r14d,r8d
+ xor ebx,r13d
+ and r15d,r8d
add r9d,r12d
+
+ and r14d,edx
add ebx,r12d
+
+ or r14d,r15d
lea rdi,QWORD PTR[1+rdi]
- add ebx,r14d
- mov r12d,DWORD PTR[28+rsi]
- mov r13d,r9d
- mov r14d,ebx
+ add ebx,r14d
+ mov r12d,DWORD PTR[((4*7))+rsi]
bswap r12d
- ror r13d,14
+ mov r13d,r9d
+ mov r14d,r9d
mov r15d,r10d
- mov DWORD PTR[28+rsp],r12d
- ror r14d,9
- xor r13d,r9d
+ ror r13d,6
+ ror r14d,11
xor r15d,r11d
- ror r13d,5
- add r12d,eax
- xor r14d,ebx
-
- add r12d,DWORD PTR[rdi*4+rbp]
+ xor r13d,r14d
+ ror r14d,14
and r15d,r9d
- mov eax,ecx
+ mov DWORD PTR[28+rsp],r12d
- ror r14d,11
- xor r13d,r9d
+ xor r13d,r14d
xor r15d,r11d
+ add r12d,eax
+
+ mov eax,ebx
+ add r12d,r13d
- xor eax,edx
- xor r14d,ebx
add r12d,r15d
- mov r15d,ecx
+ mov r13d,ebx
+ mov r14d,ebx
- ror r13d,6
- and eax,ebx
- and r15d,edx
+ ror eax,2
+ ror r13d,13
+ mov r15d,ebx
+ add r12d,DWORD PTR[rdi*4+rbp]
- ror r14d,2
- add r12d,r13d
- add eax,r15d
+ xor eax,r13d
+ ror r13d,9
+ or r14d,edx
+ xor eax,r13d
+ and r15d,edx
add r8d,r12d
+
+ and r14d,ecx
add eax,r12d
+
+ or r14d,r15d
lea rdi,QWORD PTR[1+rdi]
- add eax,r14d
- mov r12d,DWORD PTR[32+rsi]
- mov r13d,r8d
- mov r14d,eax
+ add eax,r14d
+ mov r12d,DWORD PTR[((4*8))+rsi]
bswap r12d
- ror r13d,14
+ mov r13d,r8d
+ mov r14d,r8d
mov r15d,r9d
- mov DWORD PTR[32+rsp],r12d
- ror r14d,9
- xor r13d,r8d
+ ror r13d,6
+ ror r14d,11
xor r15d,r10d
- ror r13d,5
- add r12d,r11d
- xor r14d,eax
-
- add r12d,DWORD PTR[rdi*4+rbp]
+ xor r13d,r14d
+ ror r14d,14
and r15d,r8d
- mov r11d,ebx
+ mov DWORD PTR[32+rsp],r12d
- ror r14d,11
- xor r13d,r8d
+ xor r13d,r14d
xor r15d,r10d
+ add r12d,r11d
+
+ mov r11d,eax
+ add r12d,r13d
- xor r11d,ecx
- xor r14d,eax
add r12d,r15d
- mov r15d,ebx
+ mov r13d,eax
+ mov r14d,eax
- ror r13d,6
- and r11d,eax
- and r15d,ecx
+ ror r11d,2
+ ror r13d,13
+ mov r15d,eax
+ add r12d,DWORD PTR[rdi*4+rbp]
- ror r14d,2
- add r12d,r13d
- add r11d,r15d
+ xor r11d,r13d
+ ror r13d,9
+ or r14d,ecx
+ xor r11d,r13d
+ and r15d,ecx
add edx,r12d
+
+ and r14d,ebx
add r11d,r12d
+
+ or r14d,r15d
lea rdi,QWORD PTR[1+rdi]
- add r11d,r14d
- mov r12d,DWORD PTR[36+rsi]
- mov r13d,edx
- mov r14d,r11d
+ add r11d,r14d
+ mov r12d,DWORD PTR[((4*9))+rsi]
bswap r12d
- ror r13d,14
+ mov r13d,edx
+ mov r14d,edx
mov r15d,r8d
- mov DWORD PTR[36+rsp],r12d
- ror r14d,9
- xor r13d,edx
+ ror r13d,6
+ ror r14d,11
xor r15d,r9d
- ror r13d,5
- add r12d,r10d
- xor r14d,r11d
-
- add r12d,DWORD PTR[rdi*4+rbp]
+ xor r13d,r14d
+ ror r14d,14
and r15d,edx
- mov r10d,eax
+ mov DWORD PTR[36+rsp],r12d
- ror r14d,11
- xor r13d,edx
+ xor r13d,r14d
xor r15d,r9d
+ add r12d,r10d
+
+ mov r10d,r11d
+ add r12d,r13d
- xor r10d,ebx
- xor r14d,r11d
add r12d,r15d
- mov r15d,eax
+ mov r13d,r11d
+ mov r14d,r11d
- ror r13d,6
- and r10d,r11d
- and r15d,ebx
+ ror r10d,2
+ ror r13d,13
+ mov r15d,r11d
+ add r12d,DWORD PTR[rdi*4+rbp]
- ror r14d,2
- add r12d,r13d
- add r10d,r15d
+ xor r10d,r13d
+ ror r13d,9
+ or r14d,ebx
+ xor r10d,r13d
+ and r15d,ebx
add ecx,r12d
+
+ and r14d,eax
add r10d,r12d
+
+ or r14d,r15d
lea rdi,QWORD PTR[1+rdi]
- add r10d,r14d
- mov r12d,DWORD PTR[40+rsi]
- mov r13d,ecx
- mov r14d,r10d
+ add r10d,r14d
+ mov r12d,DWORD PTR[((4*10))+rsi]
bswap r12d
- ror r13d,14
+ mov r13d,ecx
+ mov r14d,ecx
mov r15d,edx
- mov DWORD PTR[40+rsp],r12d
- ror r14d,9
- xor r13d,ecx
+ ror r13d,6
+ ror r14d,11
xor r15d,r8d
- ror r13d,5
- add r12d,r9d
- xor r14d,r10d
-
- add r12d,DWORD PTR[rdi*4+rbp]
+ xor r13d,r14d
+ ror r14d,14
and r15d,ecx
- mov r9d,r11d
+ mov DWORD PTR[40+rsp],r12d
- ror r14d,11
- xor r13d,ecx
+ xor r13d,r14d
xor r15d,r8d
+ add r12d,r9d
+
+ mov r9d,r10d
+ add r12d,r13d
- xor r9d,eax
- xor r14d,r10d
add r12d,r15d
- mov r15d,r11d
+ mov r13d,r10d
+ mov r14d,r10d
- ror r13d,6
- and r9d,r10d
- and r15d,eax
+ ror r9d,2
+ ror r13d,13
+ mov r15d,r10d
+ add r12d,DWORD PTR[rdi*4+rbp]
- ror r14d,2
- add r12d,r13d
- add r9d,r15d
+ xor r9d,r13d
+ ror r13d,9
+ or r14d,eax
+ xor r9d,r13d
+ and r15d,eax
add ebx,r12d
+
+ and r14d,r11d
add r9d,r12d
+
+ or r14d,r15d
lea rdi,QWORD PTR[1+rdi]
- add r9d,r14d
- mov r12d,DWORD PTR[44+rsi]
- mov r13d,ebx
- mov r14d,r9d
+ add r9d,r14d
+ mov r12d,DWORD PTR[((4*11))+rsi]
bswap r12d
- ror r13d,14
+ mov r13d,ebx
+ mov r14d,ebx
mov r15d,ecx
- mov DWORD PTR[44+rsp],r12d
- ror r14d,9
- xor r13d,ebx
+ ror r13d,6
+ ror r14d,11
xor r15d,edx
- ror r13d,5
- add r12d,r8d
- xor r14d,r9d
-
- add r12d,DWORD PTR[rdi*4+rbp]
+ xor r13d,r14d
+ ror r14d,14
and r15d,ebx
- mov r8d,r10d
+ mov DWORD PTR[44+rsp],r12d
- ror r14d,11
- xor r13d,ebx
+ xor r13d,r14d
xor r15d,edx
+ add r12d,r8d
+
+ mov r8d,r9d
+ add r12d,r13d
- xor r8d,r11d
- xor r14d,r9d
add r12d,r15d
- mov r15d,r10d
+ mov r13d,r9d
+ mov r14d,r9d
- ror r13d,6
- and r8d,r9d
- and r15d,r11d
+ ror r8d,2
+ ror r13d,13
+ mov r15d,r9d
+ add r12d,DWORD PTR[rdi*4+rbp]
- ror r14d,2
- add r12d,r13d
- add r8d,r15d
+ xor r8d,r13d
+ ror r13d,9
+ or r14d,r11d
+ xor r8d,r13d
+ and r15d,r11d
add eax,r12d
+
+ and r14d,r10d
add r8d,r12d
+
+ or r14d,r15d
lea rdi,QWORD PTR[1+rdi]
- add r8d,r14d
- mov r12d,DWORD PTR[48+rsi]
- mov r13d,eax
- mov r14d,r8d
+ add r8d,r14d
+ mov r12d,DWORD PTR[((4*12))+rsi]
bswap r12d
- ror r13d,14
+ mov r13d,eax
+ mov r14d,eax
mov r15d,ebx
- mov DWORD PTR[48+rsp],r12d
- ror r14d,9
- xor r13d,eax
+ ror r13d,6
+ ror r14d,11
xor r15d,ecx
- ror r13d,5
- add r12d,edx
- xor r14d,r8d
-
- add r12d,DWORD PTR[rdi*4+rbp]
+ xor r13d,r14d
+ ror r14d,14
and r15d,eax
- mov edx,r9d
+ mov DWORD PTR[48+rsp],r12d
- ror r14d,11
- xor r13d,eax
+ xor r13d,r14d
xor r15d,ecx
+ add r12d,edx
+
+ mov edx,r8d
+ add r12d,r13d
- xor edx,r10d
- xor r14d,r8d
add r12d,r15d
- mov r15d,r9d
+ mov r13d,r8d
+ mov r14d,r8d
- ror r13d,6
- and edx,r8d
- and r15d,r10d
+ ror edx,2
+ ror r13d,13
+ mov r15d,r8d
+ add r12d,DWORD PTR[rdi*4+rbp]
- ror r14d,2
- add r12d,r13d
- add edx,r15d
+ xor edx,r13d
+ ror r13d,9
+ or r14d,r10d
+ xor edx,r13d
+ and r15d,r10d
add r11d,r12d
+
+ and r14d,r9d
add edx,r12d
+
+ or r14d,r15d
lea rdi,QWORD PTR[1+rdi]
- add edx,r14d
- mov r12d,DWORD PTR[52+rsi]
- mov r13d,r11d
- mov r14d,edx
+ add edx,r14d
+ mov r12d,DWORD PTR[((4*13))+rsi]
bswap r12d
- ror r13d,14
+ mov r13d,r11d
+ mov r14d,r11d
mov r15d,eax
- mov DWORD PTR[52+rsp],r12d
- ror r14d,9
- xor r13d,r11d
+ ror r13d,6
+ ror r14d,11
xor r15d,ebx
- ror r13d,5
- add r12d,ecx
- xor r14d,edx
-
- add r12d,DWORD PTR[rdi*4+rbp]
+ xor r13d,r14d
+ ror r14d,14
and r15d,r11d
- mov ecx,r8d
+ mov DWORD PTR[52+rsp],r12d
- ror r14d,11
- xor r13d,r11d
+ xor r13d,r14d
xor r15d,ebx
+ add r12d,ecx
+
+ mov ecx,edx
+ add r12d,r13d
- xor ecx,r9d
- xor r14d,edx
add r12d,r15d
- mov r15d,r8d
+ mov r13d,edx
+ mov r14d,edx
- ror r13d,6
- and ecx,edx
- and r15d,r9d
+ ror ecx,2
+ ror r13d,13
+ mov r15d,edx
+ add r12d,DWORD PTR[rdi*4+rbp]
- ror r14d,2
- add r12d,r13d
- add ecx,r15d
+ xor ecx,r13d
+ ror r13d,9
+ or r14d,r9d
+ xor ecx,r13d
+ and r15d,r9d
add r10d,r12d
+
+ and r14d,r8d
add ecx,r12d
+
+ or r14d,r15d
lea rdi,QWORD PTR[1+rdi]
- add ecx,r14d
- mov r12d,DWORD PTR[56+rsi]
- mov r13d,r10d
- mov r14d,ecx
+ add ecx,r14d
+ mov r12d,DWORD PTR[((4*14))+rsi]
bswap r12d
- ror r13d,14
+ mov r13d,r10d
+ mov r14d,r10d
mov r15d,r11d
- mov DWORD PTR[56+rsp],r12d
- ror r14d,9
- xor r13d,r10d
+ ror r13d,6
+ ror r14d,11
xor r15d,eax
- ror r13d,5
- add r12d,ebx
- xor r14d,ecx
-
- add r12d,DWORD PTR[rdi*4+rbp]
+ xor r13d,r14d
+ ror r14d,14
and r15d,r10d
- mov ebx,edx
+ mov DWORD PTR[56+rsp],r12d
- ror r14d,11
- xor r13d,r10d
+ xor r13d,r14d
xor r15d,eax
+ add r12d,ebx
+
+ mov ebx,ecx
+ add r12d,r13d
- xor ebx,r8d
- xor r14d,ecx
add r12d,r15d
- mov r15d,edx
+ mov r13d,ecx
+ mov r14d,ecx
- ror r13d,6
- and ebx,ecx
- and r15d,r8d
+ ror ebx,2
+ ror r13d,13
+ mov r15d,ecx
+ add r12d,DWORD PTR[rdi*4+rbp]
- ror r14d,2
- add r12d,r13d
- add ebx,r15d
+ xor ebx,r13d
+ ror r13d,9
+ or r14d,r8d
+ xor ebx,r13d
+ and r15d,r8d
add r9d,r12d
+
+ and r14d,edx
add ebx,r12d
+
+ or r14d,r15d
lea rdi,QWORD PTR[1+rdi]
- add ebx,r14d
- mov r12d,DWORD PTR[60+rsi]
- mov r13d,r9d
- mov r14d,ebx
+ add ebx,r14d
+ mov r12d,DWORD PTR[((4*15))+rsi]
bswap r12d
- ror r13d,14
+ mov r13d,r9d
+ mov r14d,r9d
mov r15d,r10d
- mov DWORD PTR[60+rsp],r12d
- ror r14d,9
- xor r13d,r9d
+ ror r13d,6
+ ror r14d,11
xor r15d,r11d
- ror r13d,5
- add r12d,eax
- xor r14d,ebx
-
- add r12d,DWORD PTR[rdi*4+rbp]
+ xor r13d,r14d
+ ror r14d,14
and r15d,r9d
- mov eax,ecx
+ mov DWORD PTR[60+rsp],r12d
- ror r14d,11
- xor r13d,r9d
+ xor r13d,r14d
xor r15d,r11d
+ add r12d,eax
+
+ mov eax,ebx
+ add r12d,r13d
- xor eax,edx
- xor r14d,ebx
add r12d,r15d
- mov r15d,ecx
+ mov r13d,ebx
+ mov r14d,ebx
- ror r13d,6
- and eax,ebx
- and r15d,edx
+ ror eax,2
+ ror r13d,13
+ mov r15d,ebx
+ add r12d,DWORD PTR[rdi*4+rbp]
- ror r14d,2
- add r12d,r13d
- add eax,r15d
+ xor eax,r13d
+ ror r13d,9
+ or r14d,edx
+ xor eax,r13d
+ and r15d,edx
add r8d,r12d
+
+ and r14d,ecx
add eax,r12d
+
+ or r14d,r15d
lea rdi,QWORD PTR[1+rdi]
- add eax,r14d
+ add eax,r14d
jmp $L$rounds_16_xx
ALIGN 16
$L$rounds_16_xx::
mov r13d,DWORD PTR[4+rsp]
- mov r14d,DWORD PTR[56+rsp]
- mov r12d,r13d
- mov r15d,r14d
+ mov r12d,DWORD PTR[56+rsp]
+
+ mov r15d,r13d
- ror r12d,11
- xor r12d,r13d
shr r13d,3
+ ror r15d,7
- ror r12d,7
- xor r13d,r12d
- mov r12d,DWORD PTR[36+rsp]
+ xor r13d,r15d
+ ror r15d,11
+
+ xor r13d,r15d
+ mov r14d,r12d
- ror r15d,2
- xor r15d,r14d
- shr r14d,10
+ shr r12d,10
+ ror r14d,17
+
+ xor r12d,r14d
+ ror r14d,2
+
+ xor r12d,r14d
- ror r15d,17
add r12d,r13d
- xor r14d,r15d
+
+ add r12d,DWORD PTR[36+rsp]
add r12d,DWORD PTR[rsp]
mov r13d,r8d
- add r12d,r14d
- mov r14d,eax
- ror r13d,14
+ mov r14d,r8d
mov r15d,r9d
- mov DWORD PTR[rsp],r12d
- ror r14d,9
- xor r13d,r8d
+ ror r13d,6
+ ror r14d,11
xor r15d,r10d
- ror r13d,5
- add r12d,r11d
- xor r14d,eax
-
- add r12d,DWORD PTR[rdi*4+rbp]
+ xor r13d,r14d
+ ror r14d,14
and r15d,r8d
- mov r11d,ebx
+ mov DWORD PTR[rsp],r12d
- ror r14d,11
- xor r13d,r8d
+ xor r13d,r14d
xor r15d,r10d
+ add r12d,r11d
+
+ mov r11d,eax
+ add r12d,r13d
- xor r11d,ecx
- xor r14d,eax
add r12d,r15d
- mov r15d,ebx
+ mov r13d,eax
+ mov r14d,eax
- ror r13d,6
- and r11d,eax
- and r15d,ecx
+ ror r11d,2
+ ror r13d,13
+ mov r15d,eax
+ add r12d,DWORD PTR[rdi*4+rbp]
- ror r14d,2
- add r12d,r13d
- add r11d,r15d
+ xor r11d,r13d
+ ror r13d,9
+ or r14d,ecx
+ xor r11d,r13d
+ and r15d,ecx
add edx,r12d
+
+ and r14d,ebx
add r11d,r12d
+
+ or r14d,r15d
lea rdi,QWORD PTR[1+rdi]
- add r11d,r14d
+ add r11d,r14d
mov r13d,DWORD PTR[8+rsp]
- mov r14d,DWORD PTR[60+rsp]
- mov r12d,r13d
- mov r15d,r14d
+ mov r12d,DWORD PTR[60+rsp]
+
+ mov r15d,r13d
- ror r12d,11
- xor r12d,r13d
shr r13d,3
+ ror r15d,7
- ror r12d,7
- xor r13d,r12d
- mov r12d,DWORD PTR[40+rsp]
+ xor r13d,r15d
+ ror r15d,11
+
+ xor r13d,r15d
+ mov r14d,r12d
- ror r15d,2
- xor r15d,r14d
- shr r14d,10
+ shr r12d,10
+ ror r14d,17
+
+ xor r12d,r14d
+ ror r14d,2
+
+ xor r12d,r14d
- ror r15d,17
add r12d,r13d
- xor r14d,r15d
+
+ add r12d,DWORD PTR[40+rsp]
add r12d,DWORD PTR[4+rsp]
mov r13d,edx
- add r12d,r14d
- mov r14d,r11d
- ror r13d,14
+ mov r14d,edx
mov r15d,r8d
- mov DWORD PTR[4+rsp],r12d
- ror r14d,9
- xor r13d,edx
+ ror r13d,6
+ ror r14d,11
xor r15d,r9d
- ror r13d,5
- add r12d,r10d
- xor r14d,r11d
-
- add r12d,DWORD PTR[rdi*4+rbp]
+ xor r13d,r14d
+ ror r14d,14
and r15d,edx
- mov r10d,eax
+ mov DWORD PTR[4+rsp],r12d
- ror r14d,11
- xor r13d,edx
+ xor r13d,r14d
xor r15d,r9d
+ add r12d,r10d
+
+ mov r10d,r11d
+ add r12d,r13d
- xor r10d,ebx
- xor r14d,r11d
add r12d,r15d
- mov r15d,eax
+ mov r13d,r11d
+ mov r14d,r11d
- ror r13d,6
- and r10d,r11d
- and r15d,ebx
+ ror r10d,2
+ ror r13d,13
+ mov r15d,r11d
+ add r12d,DWORD PTR[rdi*4+rbp]
- ror r14d,2
- add r12d,r13d
- add r10d,r15d
+ xor r10d,r13d
+ ror r13d,9
+ or r14d,ebx
+ xor r10d,r13d
+ and r15d,ebx
add ecx,r12d
+
+ and r14d,eax
add r10d,r12d
+
+ or r14d,r15d
lea rdi,QWORD PTR[1+rdi]
- add r10d,r14d
+ add r10d,r14d
mov r13d,DWORD PTR[12+rsp]
- mov r14d,DWORD PTR[rsp]
- mov r12d,r13d
- mov r15d,r14d
+ mov r12d,DWORD PTR[rsp]
+
+ mov r15d,r13d
- ror r12d,11
- xor r12d,r13d
shr r13d,3
+ ror r15d,7
- ror r12d,7
- xor r13d,r12d
- mov r12d,DWORD PTR[44+rsp]
+ xor r13d,r15d
+ ror r15d,11
+
+ xor r13d,r15d
+ mov r14d,r12d
+
+ shr r12d,10
+ ror r14d,17
- ror r15d,2
- xor r15d,r14d
- shr r14d,10
+ xor r12d,r14d
+ ror r14d,2
+
+ xor r12d,r14d
- ror r15d,17
add r12d,r13d
- xor r14d,r15d
+
+ add r12d,DWORD PTR[44+rsp]
add r12d,DWORD PTR[8+rsp]
mov r13d,ecx
- add r12d,r14d
- mov r14d,r10d
- ror r13d,14
+ mov r14d,ecx
mov r15d,edx
- mov DWORD PTR[8+rsp],r12d
- ror r14d,9
- xor r13d,ecx
+ ror r13d,6
+ ror r14d,11
xor r15d,r8d
- ror r13d,5
- add r12d,r9d
- xor r14d,r10d
-
- add r12d,DWORD PTR[rdi*4+rbp]
+ xor r13d,r14d
+ ror r14d,14
and r15d,ecx
- mov r9d,r11d
+ mov DWORD PTR[8+rsp],r12d
- ror r14d,11
- xor r13d,ecx
+ xor r13d,r14d
xor r15d,r8d
+ add r12d,r9d
+
+ mov r9d,r10d
+ add r12d,r13d
- xor r9d,eax
- xor r14d,r10d
add r12d,r15d
- mov r15d,r11d
+ mov r13d,r10d
+ mov r14d,r10d
- ror r13d,6
- and r9d,r10d
- and r15d,eax
+ ror r9d,2
+ ror r13d,13
+ mov r15d,r10d
+ add r12d,DWORD PTR[rdi*4+rbp]
- ror r14d,2
- add r12d,r13d
- add r9d,r15d
+ xor r9d,r13d
+ ror r13d,9
+ or r14d,eax
+ xor r9d,r13d
+ and r15d,eax
add ebx,r12d
+
+ and r14d,r11d
add r9d,r12d
+
+ or r14d,r15d
lea rdi,QWORD PTR[1+rdi]
- add r9d,r14d
+ add r9d,r14d
mov r13d,DWORD PTR[16+rsp]
- mov r14d,DWORD PTR[4+rsp]
- mov r12d,r13d
- mov r15d,r14d
+ mov r12d,DWORD PTR[4+rsp]
+
+ mov r15d,r13d
- ror r12d,11
- xor r12d,r13d
shr r13d,3
+ ror r15d,7
- ror r12d,7
- xor r13d,r12d
- mov r12d,DWORD PTR[48+rsp]
+ xor r13d,r15d
+ ror r15d,11
+
+ xor r13d,r15d
+ mov r14d,r12d
+
+ shr r12d,10
+ ror r14d,17
- ror r15d,2
- xor r15d,r14d
- shr r14d,10
+ xor r12d,r14d
+ ror r14d,2
+
+ xor r12d,r14d
- ror r15d,17
add r12d,r13d
- xor r14d,r15d
+
+ add r12d,DWORD PTR[48+rsp]
add r12d,DWORD PTR[12+rsp]
mov r13d,ebx
- add r12d,r14d
- mov r14d,r9d
- ror r13d,14
+ mov r14d,ebx
mov r15d,ecx
- mov DWORD PTR[12+rsp],r12d
- ror r14d,9
- xor r13d,ebx
+ ror r13d,6
+ ror r14d,11
xor r15d,edx
- ror r13d,5
- add r12d,r8d
- xor r14d,r9d
-
- add r12d,DWORD PTR[rdi*4+rbp]
+ xor r13d,r14d
+ ror r14d,14
and r15d,ebx
- mov r8d,r10d
+ mov DWORD PTR[12+rsp],r12d
- ror r14d,11
- xor r13d,ebx
+ xor r13d,r14d
xor r15d,edx
+ add r12d,r8d
+
+ mov r8d,r9d
+ add r12d,r13d
- xor r8d,r11d
- xor r14d,r9d
add r12d,r15d
- mov r15d,r10d
+ mov r13d,r9d
+ mov r14d,r9d
- ror r13d,6
- and r8d,r9d
- and r15d,r11d
+ ror r8d,2
+ ror r13d,13
+ mov r15d,r9d
+ add r12d,DWORD PTR[rdi*4+rbp]
- ror r14d,2
- add r12d,r13d
- add r8d,r15d
+ xor r8d,r13d
+ ror r13d,9
+ or r14d,r11d
+ xor r8d,r13d
+ and r15d,r11d
add eax,r12d
+
+ and r14d,r10d
add r8d,r12d
+
+ or r14d,r15d
lea rdi,QWORD PTR[1+rdi]
- add r8d,r14d
+ add r8d,r14d
mov r13d,DWORD PTR[20+rsp]
- mov r14d,DWORD PTR[8+rsp]
- mov r12d,r13d
- mov r15d,r14d
+ mov r12d,DWORD PTR[8+rsp]
+
+ mov r15d,r13d
- ror r12d,11
- xor r12d,r13d
shr r13d,3
+ ror r15d,7
- ror r12d,7
- xor r13d,r12d
- mov r12d,DWORD PTR[52+rsp]
+ xor r13d,r15d
+ ror r15d,11
+
+ xor r13d,r15d
+ mov r14d,r12d
+
+ shr r12d,10
+ ror r14d,17
+
+ xor r12d,r14d
+ ror r14d,2
- ror r15d,2
- xor r15d,r14d
- shr r14d,10
+ xor r12d,r14d
- ror r15d,17
add r12d,r13d
- xor r14d,r15d
+
+ add r12d,DWORD PTR[52+rsp]
add r12d,DWORD PTR[16+rsp]
mov r13d,eax
- add r12d,r14d
- mov r14d,r8d
- ror r13d,14
+ mov r14d,eax
mov r15d,ebx
- mov DWORD PTR[16+rsp],r12d
- ror r14d,9
- xor r13d,eax
+ ror r13d,6
+ ror r14d,11
xor r15d,ecx
- ror r13d,5
- add r12d,edx
- xor r14d,r8d
-
- add r12d,DWORD PTR[rdi*4+rbp]
+ xor r13d,r14d
+ ror r14d,14
and r15d,eax
- mov edx,r9d
+ mov DWORD PTR[16+rsp],r12d
- ror r14d,11
- xor r13d,eax
+ xor r13d,r14d
xor r15d,ecx
+ add r12d,edx
+
+ mov edx,r8d
+ add r12d,r13d
- xor edx,r10d
- xor r14d,r8d
add r12d,r15d
- mov r15d,r9d
+ mov r13d,r8d
+ mov r14d,r8d
- ror r13d,6
- and edx,r8d
- and r15d,r10d
+ ror edx,2
+ ror r13d,13
+ mov r15d,r8d
+ add r12d,DWORD PTR[rdi*4+rbp]
- ror r14d,2
- add r12d,r13d
- add edx,r15d
+ xor edx,r13d
+ ror r13d,9
+ or r14d,r10d
+ xor edx,r13d
+ and r15d,r10d
add r11d,r12d
+
+ and r14d,r9d
add edx,r12d
+
+ or r14d,r15d
lea rdi,QWORD PTR[1+rdi]
- add edx,r14d
+ add edx,r14d
mov r13d,DWORD PTR[24+rsp]
- mov r14d,DWORD PTR[12+rsp]
- mov r12d,r13d
- mov r15d,r14d
+ mov r12d,DWORD PTR[12+rsp]
+
+ mov r15d,r13d
- ror r12d,11
- xor r12d,r13d
shr r13d,3
+ ror r15d,7
- ror r12d,7
- xor r13d,r12d
- mov r12d,DWORD PTR[56+rsp]
+ xor r13d,r15d
+ ror r15d,11
+
+ xor r13d,r15d
+ mov r14d,r12d
+
+ shr r12d,10
+ ror r14d,17
+
+ xor r12d,r14d
+ ror r14d,2
- ror r15d,2
- xor r15d,r14d
- shr r14d,10
+ xor r12d,r14d
- ror r15d,17
add r12d,r13d
- xor r14d,r15d
+
+ add r12d,DWORD PTR[56+rsp]
add r12d,DWORD PTR[20+rsp]
mov r13d,r11d
- add r12d,r14d
- mov r14d,edx
- ror r13d,14
+ mov r14d,r11d
mov r15d,eax
- mov DWORD PTR[20+rsp],r12d
- ror r14d,9
- xor r13d,r11d
+ ror r13d,6
+ ror r14d,11
xor r15d,ebx
- ror r13d,5
- add r12d,ecx
- xor r14d,edx
-
- add r12d,DWORD PTR[rdi*4+rbp]
+ xor r13d,r14d
+ ror r14d,14
and r15d,r11d
- mov ecx,r8d
+ mov DWORD PTR[20+rsp],r12d
- ror r14d,11
- xor r13d,r11d
+ xor r13d,r14d
xor r15d,ebx
+ add r12d,ecx
+
+ mov ecx,edx
+ add r12d,r13d
- xor ecx,r9d
- xor r14d,edx
add r12d,r15d
- mov r15d,r8d
+ mov r13d,edx
+ mov r14d,edx
- ror r13d,6
- and ecx,edx
- and r15d,r9d
+ ror ecx,2
+ ror r13d,13
+ mov r15d,edx
+ add r12d,DWORD PTR[rdi*4+rbp]
- ror r14d,2
- add r12d,r13d
- add ecx,r15d
+ xor ecx,r13d
+ ror r13d,9
+ or r14d,r9d
+ xor ecx,r13d
+ and r15d,r9d
add r10d,r12d
+
+ and r14d,r8d
add ecx,r12d
+
+ or r14d,r15d
lea rdi,QWORD PTR[1+rdi]
- add ecx,r14d
+ add ecx,r14d
mov r13d,DWORD PTR[28+rsp]
- mov r14d,DWORD PTR[16+rsp]
- mov r12d,r13d
- mov r15d,r14d
+ mov r12d,DWORD PTR[16+rsp]
+
+ mov r15d,r13d
- ror r12d,11
- xor r12d,r13d
shr r13d,3
+ ror r15d,7
- ror r12d,7
- xor r13d,r12d
- mov r12d,DWORD PTR[60+rsp]
+ xor r13d,r15d
+ ror r15d,11
- ror r15d,2
- xor r15d,r14d
- shr r14d,10
+ xor r13d,r15d
+ mov r14d,r12d
+
+ shr r12d,10
+ ror r14d,17
+
+ xor r12d,r14d
+ ror r14d,2
+
+ xor r12d,r14d
- ror r15d,17
add r12d,r13d
- xor r14d,r15d
+
+ add r12d,DWORD PTR[60+rsp]
add r12d,DWORD PTR[24+rsp]
mov r13d,r10d
- add r12d,r14d
- mov r14d,ecx
- ror r13d,14
+ mov r14d,r10d
mov r15d,r11d
- mov DWORD PTR[24+rsp],r12d
- ror r14d,9
- xor r13d,r10d
+ ror r13d,6
+ ror r14d,11
xor r15d,eax
- ror r13d,5
- add r12d,ebx
- xor r14d,ecx
-
- add r12d,DWORD PTR[rdi*4+rbp]
+ xor r13d,r14d
+ ror r14d,14
and r15d,r10d
- mov ebx,edx
+ mov DWORD PTR[24+rsp],r12d
- ror r14d,11
- xor r13d,r10d
+ xor r13d,r14d
xor r15d,eax
+ add r12d,ebx
+
+ mov ebx,ecx
+ add r12d,r13d
- xor ebx,r8d
- xor r14d,ecx
add r12d,r15d
- mov r15d,edx
+ mov r13d,ecx
+ mov r14d,ecx
- ror r13d,6
- and ebx,ecx
- and r15d,r8d
+ ror ebx,2
+ ror r13d,13
+ mov r15d,ecx
+ add r12d,DWORD PTR[rdi*4+rbp]
- ror r14d,2
- add r12d,r13d
- add ebx,r15d
+ xor ebx,r13d
+ ror r13d,9
+ or r14d,r8d
+ xor ebx,r13d
+ and r15d,r8d
add r9d,r12d
+
+ and r14d,edx
add ebx,r12d
+
+ or r14d,r15d
lea rdi,QWORD PTR[1+rdi]
- add ebx,r14d
+ add ebx,r14d
mov r13d,DWORD PTR[32+rsp]
- mov r14d,DWORD PTR[20+rsp]
- mov r12d,r13d
- mov r15d,r14d
+ mov r12d,DWORD PTR[20+rsp]
+
+ mov r15d,r13d
- ror r12d,11
- xor r12d,r13d
shr r13d,3
+ ror r15d,7
- ror r12d,7
- xor r13d,r12d
- mov r12d,DWORD PTR[rsp]
+ xor r13d,r15d
+ ror r15d,11
+
+ xor r13d,r15d
+ mov r14d,r12d
- ror r15d,2
- xor r15d,r14d
- shr r14d,10
+ shr r12d,10
+ ror r14d,17
+
+ xor r12d,r14d
+ ror r14d,2
+
+ xor r12d,r14d
- ror r15d,17
add r12d,r13d
- xor r14d,r15d
+
+ add r12d,DWORD PTR[rsp]
add r12d,DWORD PTR[28+rsp]
mov r13d,r9d
- add r12d,r14d
- mov r14d,ebx
- ror r13d,14
+ mov r14d,r9d
mov r15d,r10d
- mov DWORD PTR[28+rsp],r12d
- ror r14d,9
- xor r13d,r9d
+ ror r13d,6
+ ror r14d,11
xor r15d,r11d
- ror r13d,5
- add r12d,eax
- xor r14d,ebx
-
- add r12d,DWORD PTR[rdi*4+rbp]
+ xor r13d,r14d
+ ror r14d,14
and r15d,r9d
- mov eax,ecx
+ mov DWORD PTR[28+rsp],r12d
- ror r14d,11
- xor r13d,r9d
+ xor r13d,r14d
xor r15d,r11d
+ add r12d,eax
+
+ mov eax,ebx
+ add r12d,r13d
- xor eax,edx
- xor r14d,ebx
add r12d,r15d
- mov r15d,ecx
+ mov r13d,ebx
+ mov r14d,ebx
- ror r13d,6
- and eax,ebx
- and r15d,edx
+ ror eax,2
+ ror r13d,13
+ mov r15d,ebx
+ add r12d,DWORD PTR[rdi*4+rbp]
- ror r14d,2
- add r12d,r13d
- add eax,r15d
+ xor eax,r13d
+ ror r13d,9
+ or r14d,edx
+ xor eax,r13d
+ and r15d,edx
add r8d,r12d
+
+ and r14d,ecx
add eax,r12d
+
+ or r14d,r15d
lea rdi,QWORD PTR[1+rdi]
- add eax,r14d
+ add eax,r14d
mov r13d,DWORD PTR[36+rsp]
- mov r14d,DWORD PTR[24+rsp]
- mov r12d,r13d
- mov r15d,r14d
+ mov r12d,DWORD PTR[24+rsp]
+
+ mov r15d,r13d
- ror r12d,11
- xor r12d,r13d
shr r13d,3
+ ror r15d,7
- ror r12d,7
- xor r13d,r12d
- mov r12d,DWORD PTR[4+rsp]
+ xor r13d,r15d
+ ror r15d,11
+
+ xor r13d,r15d
+ mov r14d,r12d
- ror r15d,2
- xor r15d,r14d
- shr r14d,10
+ shr r12d,10
+ ror r14d,17
+
+ xor r12d,r14d
+ ror r14d,2
+
+ xor r12d,r14d
- ror r15d,17
add r12d,r13d
- xor r14d,r15d
+
+ add r12d,DWORD PTR[4+rsp]
add r12d,DWORD PTR[32+rsp]
mov r13d,r8d
- add r12d,r14d
- mov r14d,eax
- ror r13d,14
+ mov r14d,r8d
mov r15d,r9d
- mov DWORD PTR[32+rsp],r12d
- ror r14d,9
- xor r13d,r8d
+ ror r13d,6
+ ror r14d,11
xor r15d,r10d
- ror r13d,5
- add r12d,r11d
- xor r14d,eax
-
- add r12d,DWORD PTR[rdi*4+rbp]
+ xor r13d,r14d
+ ror r14d,14
and r15d,r8d
- mov r11d,ebx
+ mov DWORD PTR[32+rsp],r12d
- ror r14d,11
- xor r13d,r8d
+ xor r13d,r14d
xor r15d,r10d
+ add r12d,r11d
+
+ mov r11d,eax
+ add r12d,r13d
- xor r11d,ecx
- xor r14d,eax
add r12d,r15d
- mov r15d,ebx
+ mov r13d,eax
+ mov r14d,eax
- ror r13d,6
- and r11d,eax
- and r15d,ecx
+ ror r11d,2
+ ror r13d,13
+ mov r15d,eax
+ add r12d,DWORD PTR[rdi*4+rbp]
- ror r14d,2
- add r12d,r13d
- add r11d,r15d
+ xor r11d,r13d
+ ror r13d,9
+ or r14d,ecx
+ xor r11d,r13d
+ and r15d,ecx
add edx,r12d
+
+ and r14d,ebx
add r11d,r12d
+
+ or r14d,r15d
lea rdi,QWORD PTR[1+rdi]
- add r11d,r14d
+ add r11d,r14d
mov r13d,DWORD PTR[40+rsp]
- mov r14d,DWORD PTR[28+rsp]
- mov r12d,r13d
- mov r15d,r14d
+ mov r12d,DWORD PTR[28+rsp]
+
+ mov r15d,r13d
- ror r12d,11
- xor r12d,r13d
shr r13d,3
+ ror r15d,7
- ror r12d,7
- xor r13d,r12d
- mov r12d,DWORD PTR[8+rsp]
+ xor r13d,r15d
+ ror r15d,11
+
+ xor r13d,r15d
+ mov r14d,r12d
+
+ shr r12d,10
+ ror r14d,17
- ror r15d,2
- xor r15d,r14d
- shr r14d,10
+ xor r12d,r14d
+ ror r14d,2
+
+ xor r12d,r14d
- ror r15d,17
add r12d,r13d
- xor r14d,r15d
+
+ add r12d,DWORD PTR[8+rsp]
add r12d,DWORD PTR[36+rsp]
mov r13d,edx
- add r12d,r14d
- mov r14d,r11d
- ror r13d,14
+ mov r14d,edx
mov r15d,r8d
- mov DWORD PTR[36+rsp],r12d
- ror r14d,9
- xor r13d,edx
+ ror r13d,6
+ ror r14d,11
xor r15d,r9d
- ror r13d,5
- add r12d,r10d
- xor r14d,r11d
-
- add r12d,DWORD PTR[rdi*4+rbp]
+ xor r13d,r14d
+ ror r14d,14
and r15d,edx
- mov r10d,eax
+ mov DWORD PTR[36+rsp],r12d
- ror r14d,11
- xor r13d,edx
+ xor r13d,r14d
xor r15d,r9d
+ add r12d,r10d
+
+ mov r10d,r11d
+ add r12d,r13d
- xor r10d,ebx
- xor r14d,r11d
add r12d,r15d
- mov r15d,eax
+ mov r13d,r11d
+ mov r14d,r11d
- ror r13d,6
- and r10d,r11d
- and r15d,ebx
+ ror r10d,2
+ ror r13d,13
+ mov r15d,r11d
+ add r12d,DWORD PTR[rdi*4+rbp]
- ror r14d,2
- add r12d,r13d
- add r10d,r15d
+ xor r10d,r13d
+ ror r13d,9
+ or r14d,ebx
+ xor r10d,r13d
+ and r15d,ebx
add ecx,r12d
+
+ and r14d,eax
add r10d,r12d
+
+ or r14d,r15d
lea rdi,QWORD PTR[1+rdi]
- add r10d,r14d
+ add r10d,r14d
mov r13d,DWORD PTR[44+rsp]
- mov r14d,DWORD PTR[32+rsp]
- mov r12d,r13d
- mov r15d,r14d
+ mov r12d,DWORD PTR[32+rsp]
+
+ mov r15d,r13d
- ror r12d,11
- xor r12d,r13d
shr r13d,3
+ ror r15d,7
- ror r12d,7
- xor r13d,r12d
- mov r12d,DWORD PTR[12+rsp]
+ xor r13d,r15d
+ ror r15d,11
+
+ xor r13d,r15d
+ mov r14d,r12d
+
+ shr r12d,10
+ ror r14d,17
- ror r15d,2
- xor r15d,r14d
- shr r14d,10
+ xor r12d,r14d
+ ror r14d,2
+
+ xor r12d,r14d
- ror r15d,17
add r12d,r13d
- xor r14d,r15d
+
+ add r12d,DWORD PTR[12+rsp]
add r12d,DWORD PTR[40+rsp]
mov r13d,ecx
- add r12d,r14d
- mov r14d,r10d
- ror r13d,14
+ mov r14d,ecx
mov r15d,edx
- mov DWORD PTR[40+rsp],r12d
- ror r14d,9
- xor r13d,ecx
+ ror r13d,6
+ ror r14d,11
xor r15d,r8d
- ror r13d,5
- add r12d,r9d
- xor r14d,r10d
-
- add r12d,DWORD PTR[rdi*4+rbp]
+ xor r13d,r14d
+ ror r14d,14
and r15d,ecx
- mov r9d,r11d
+ mov DWORD PTR[40+rsp],r12d
- ror r14d,11
- xor r13d,ecx
+ xor r13d,r14d
xor r15d,r8d
+ add r12d,r9d
+
+ mov r9d,r10d
+ add r12d,r13d
- xor r9d,eax
- xor r14d,r10d
add r12d,r15d
- mov r15d,r11d
+ mov r13d,r10d
+ mov r14d,r10d
- ror r13d,6
- and r9d,r10d
- and r15d,eax
+ ror r9d,2
+ ror r13d,13
+ mov r15d,r10d
+ add r12d,DWORD PTR[rdi*4+rbp]
- ror r14d,2
- add r12d,r13d
- add r9d,r15d
+ xor r9d,r13d
+ ror r13d,9
+ or r14d,eax
+ xor r9d,r13d
+ and r15d,eax
add ebx,r12d
+
+ and r14d,r11d
add r9d,r12d
+
+ or r14d,r15d
lea rdi,QWORD PTR[1+rdi]
- add r9d,r14d
+ add r9d,r14d
mov r13d,DWORD PTR[48+rsp]
- mov r14d,DWORD PTR[36+rsp]
- mov r12d,r13d
- mov r15d,r14d
+ mov r12d,DWORD PTR[36+rsp]
+
+ mov r15d,r13d
- ror r12d,11
- xor r12d,r13d
shr r13d,3
+ ror r15d,7
- ror r12d,7
- xor r13d,r12d
- mov r12d,DWORD PTR[16+rsp]
+ xor r13d,r15d
+ ror r15d,11
+
+ xor r13d,r15d
+ mov r14d,r12d
+
+ shr r12d,10
+ ror r14d,17
+
+ xor r12d,r14d
+ ror r14d,2
- ror r15d,2
- xor r15d,r14d
- shr r14d,10
+ xor r12d,r14d
- ror r15d,17
add r12d,r13d
- xor r14d,r15d
+
+ add r12d,DWORD PTR[16+rsp]
add r12d,DWORD PTR[44+rsp]
mov r13d,ebx
- add r12d,r14d
- mov r14d,r9d
- ror r13d,14
+ mov r14d,ebx
mov r15d,ecx
- mov DWORD PTR[44+rsp],r12d
- ror r14d,9
- xor r13d,ebx
+ ror r13d,6
+ ror r14d,11
xor r15d,edx
- ror r13d,5
- add r12d,r8d
- xor r14d,r9d
-
- add r12d,DWORD PTR[rdi*4+rbp]
+ xor r13d,r14d
+ ror r14d,14
and r15d,ebx
- mov r8d,r10d
+ mov DWORD PTR[44+rsp],r12d
- ror r14d,11
- xor r13d,ebx
+ xor r13d,r14d
xor r15d,edx
+ add r12d,r8d
+
+ mov r8d,r9d
+ add r12d,r13d
- xor r8d,r11d
- xor r14d,r9d
add r12d,r15d
- mov r15d,r10d
+ mov r13d,r9d
+ mov r14d,r9d
- ror r13d,6
- and r8d,r9d
- and r15d,r11d
+ ror r8d,2
+ ror r13d,13
+ mov r15d,r9d
+ add r12d,DWORD PTR[rdi*4+rbp]
- ror r14d,2
- add r12d,r13d
- add r8d,r15d
+ xor r8d,r13d
+ ror r13d,9
+ or r14d,r11d
+ xor r8d,r13d
+ and r15d,r11d
add eax,r12d
+
+ and r14d,r10d
add r8d,r12d
+
+ or r14d,r15d
lea rdi,QWORD PTR[1+rdi]
- add r8d,r14d
+ add r8d,r14d
mov r13d,DWORD PTR[52+rsp]
- mov r14d,DWORD PTR[40+rsp]
- mov r12d,r13d
- mov r15d,r14d
+ mov r12d,DWORD PTR[40+rsp]
+
+ mov r15d,r13d
- ror r12d,11
- xor r12d,r13d
shr r13d,3
+ ror r15d,7
- ror r12d,7
- xor r13d,r12d
- mov r12d,DWORD PTR[20+rsp]
+ xor r13d,r15d
+ ror r15d,11
+
+ xor r13d,r15d
+ mov r14d,r12d
+
+ shr r12d,10
+ ror r14d,17
+
+ xor r12d,r14d
+ ror r14d,2
- ror r15d,2
- xor r15d,r14d
- shr r14d,10
+ xor r12d,r14d
- ror r15d,17
add r12d,r13d
- xor r14d,r15d
+
+ add r12d,DWORD PTR[20+rsp]
add r12d,DWORD PTR[48+rsp]
mov r13d,eax
- add r12d,r14d
- mov r14d,r8d
- ror r13d,14
+ mov r14d,eax
mov r15d,ebx
- mov DWORD PTR[48+rsp],r12d
- ror r14d,9
- xor r13d,eax
+ ror r13d,6
+ ror r14d,11
xor r15d,ecx
- ror r13d,5
- add r12d,edx
- xor r14d,r8d
-
- add r12d,DWORD PTR[rdi*4+rbp]
+ xor r13d,r14d
+ ror r14d,14
and r15d,eax
- mov edx,r9d
+ mov DWORD PTR[48+rsp],r12d
- ror r14d,11
- xor r13d,eax
+ xor r13d,r14d
xor r15d,ecx
+ add r12d,edx
+
+ mov edx,r8d
+ add r12d,r13d
- xor edx,r10d
- xor r14d,r8d
add r12d,r15d
- mov r15d,r9d
+ mov r13d,r8d
+ mov r14d,r8d
- ror r13d,6
- and edx,r8d
- and r15d,r10d
+ ror edx,2
+ ror r13d,13
+ mov r15d,r8d
+ add r12d,DWORD PTR[rdi*4+rbp]
- ror r14d,2
- add r12d,r13d
- add edx,r15d
+ xor edx,r13d
+ ror r13d,9
+ or r14d,r10d
+ xor edx,r13d
+ and r15d,r10d
add r11d,r12d
+
+ and r14d,r9d
add edx,r12d
+
+ or r14d,r15d
lea rdi,QWORD PTR[1+rdi]
- add edx,r14d
+ add edx,r14d
mov r13d,DWORD PTR[56+rsp]
- mov r14d,DWORD PTR[44+rsp]
- mov r12d,r13d
- mov r15d,r14d
+ mov r12d,DWORD PTR[44+rsp]
+
+ mov r15d,r13d
- ror r12d,11
- xor r12d,r13d
shr r13d,3
+ ror r15d,7
- ror r12d,7
- xor r13d,r12d
- mov r12d,DWORD PTR[24+rsp]
+ xor r13d,r15d
+ ror r15d,11
- ror r15d,2
- xor r15d,r14d
- shr r14d,10
+ xor r13d,r15d
+ mov r14d,r12d
+
+ shr r12d,10
+ ror r14d,17
+
+ xor r12d,r14d
+ ror r14d,2
+
+ xor r12d,r14d
- ror r15d,17
add r12d,r13d
- xor r14d,r15d
+
+ add r12d,DWORD PTR[24+rsp]
add r12d,DWORD PTR[52+rsp]
mov r13d,r11d
- add r12d,r14d
- mov r14d,edx
- ror r13d,14
+ mov r14d,r11d
mov r15d,eax
- mov DWORD PTR[52+rsp],r12d
- ror r14d,9
- xor r13d,r11d
+ ror r13d,6
+ ror r14d,11
xor r15d,ebx
- ror r13d,5
- add r12d,ecx
- xor r14d,edx
-
- add r12d,DWORD PTR[rdi*4+rbp]
+ xor r13d,r14d
+ ror r14d,14
and r15d,r11d
- mov ecx,r8d
+ mov DWORD PTR[52+rsp],r12d
- ror r14d,11
- xor r13d,r11d
+ xor r13d,r14d
xor r15d,ebx
+ add r12d,ecx
+
+ mov ecx,edx
+ add r12d,r13d
- xor ecx,r9d
- xor r14d,edx
add r12d,r15d
- mov r15d,r8d
+ mov r13d,edx
+ mov r14d,edx
- ror r13d,6
- and ecx,edx
- and r15d,r9d
+ ror ecx,2
+ ror r13d,13
+ mov r15d,edx
+ add r12d,DWORD PTR[rdi*4+rbp]
- ror r14d,2
- add r12d,r13d
- add ecx,r15d
+ xor ecx,r13d
+ ror r13d,9
+ or r14d,r9d
+ xor ecx,r13d
+ and r15d,r9d
add r10d,r12d
+
+ and r14d,r8d
add ecx,r12d
+
+ or r14d,r15d
lea rdi,QWORD PTR[1+rdi]
- add ecx,r14d
+ add ecx,r14d
mov r13d,DWORD PTR[60+rsp]
- mov r14d,DWORD PTR[48+rsp]
- mov r12d,r13d
- mov r15d,r14d
+ mov r12d,DWORD PTR[48+rsp]
+
+ mov r15d,r13d
- ror r12d,11
- xor r12d,r13d
shr r13d,3
+ ror r15d,7
- ror r12d,7
- xor r13d,r12d
- mov r12d,DWORD PTR[28+rsp]
+ xor r13d,r15d
+ ror r15d,11
+
+ xor r13d,r15d
+ mov r14d,r12d
+
+ shr r12d,10
+ ror r14d,17
+
+ xor r12d,r14d
+ ror r14d,2
- ror r15d,2
- xor r15d,r14d
- shr r14d,10
+ xor r12d,r14d
- ror r15d,17
add r12d,r13d
- xor r14d,r15d
+
+ add r12d,DWORD PTR[28+rsp]
add r12d,DWORD PTR[56+rsp]
mov r13d,r10d
- add r12d,r14d
- mov r14d,ecx
- ror r13d,14
+ mov r14d,r10d
mov r15d,r11d
- mov DWORD PTR[56+rsp],r12d
- ror r14d,9
- xor r13d,r10d
+ ror r13d,6
+ ror r14d,11
xor r15d,eax
- ror r13d,5
- add r12d,ebx
- xor r14d,ecx
-
- add r12d,DWORD PTR[rdi*4+rbp]
+ xor r13d,r14d
+ ror r14d,14
and r15d,r10d
- mov ebx,edx
+ mov DWORD PTR[56+rsp],r12d
- ror r14d,11
- xor r13d,r10d
+ xor r13d,r14d
xor r15d,eax
+ add r12d,ebx
+
+ mov ebx,ecx
+ add r12d,r13d
- xor ebx,r8d
- xor r14d,ecx
add r12d,r15d
- mov r15d,edx
+ mov r13d,ecx
+ mov r14d,ecx
- ror r13d,6
- and ebx,ecx
- and r15d,r8d
+ ror ebx,2
+ ror r13d,13
+ mov r15d,ecx
+ add r12d,DWORD PTR[rdi*4+rbp]
- ror r14d,2
- add r12d,r13d
- add ebx,r15d
+ xor ebx,r13d
+ ror r13d,9
+ or r14d,r8d
+ xor ebx,r13d
+ and r15d,r8d
add r9d,r12d
+
+ and r14d,edx
add ebx,r12d
+
+ or r14d,r15d
lea rdi,QWORD PTR[1+rdi]
- add ebx,r14d
+ add ebx,r14d
mov r13d,DWORD PTR[rsp]
- mov r14d,DWORD PTR[52+rsp]
- mov r12d,r13d
- mov r15d,r14d
+ mov r12d,DWORD PTR[52+rsp]
+
+ mov r15d,r13d
- ror r12d,11
- xor r12d,r13d
shr r13d,3
+ ror r15d,7
- ror r12d,7
- xor r13d,r12d
- mov r12d,DWORD PTR[32+rsp]
+ xor r13d,r15d
+ ror r15d,11
+
+ xor r13d,r15d
+ mov r14d,r12d
- ror r15d,2
- xor r15d,r14d
- shr r14d,10
+ shr r12d,10
+ ror r14d,17
+
+ xor r12d,r14d
+ ror r14d,2
+
+ xor r12d,r14d
- ror r15d,17
add r12d,r13d
- xor r14d,r15d
+
+ add r12d,DWORD PTR[32+rsp]
add r12d,DWORD PTR[60+rsp]
mov r13d,r9d
- add r12d,r14d
- mov r14d,ebx
- ror r13d,14
+ mov r14d,r9d
mov r15d,r10d
- mov DWORD PTR[60+rsp],r12d
- ror r14d,9
- xor r13d,r9d
+ ror r13d,6
+ ror r14d,11
xor r15d,r11d
- ror r13d,5
- add r12d,eax
- xor r14d,ebx
-
- add r12d,DWORD PTR[rdi*4+rbp]
+ xor r13d,r14d
+ ror r14d,14
and r15d,r9d
- mov eax,ecx
+ mov DWORD PTR[60+rsp],r12d
- ror r14d,11
- xor r13d,r9d
+ xor r13d,r14d
xor r15d,r11d
+ add r12d,eax
+
+ mov eax,ebx
+ add r12d,r13d
- xor eax,edx
- xor r14d,ebx
add r12d,r15d
- mov r15d,ecx
+ mov r13d,ebx
+ mov r14d,ebx
- ror r13d,6
- and eax,ebx
- and r15d,edx
+ ror eax,2
+ ror r13d,13
+ mov r15d,ebx
+ add r12d,DWORD PTR[rdi*4+rbp]
- ror r14d,2
- add r12d,r13d
- add eax,r15d
+ xor eax,r13d
+ ror r13d,9
+ or r14d,edx
+ xor eax,r13d
+ and r15d,edx
add r8d,r12d
+
+ and r14d,ecx
add eax,r12d
+
+ or r14d,r15d
lea rdi,QWORD PTR[1+rdi]
- add eax,r14d
+ add eax,r14d
cmp rdi,64
jb $L$rounds_16_xx
- mov rdi,QWORD PTR[((64+0))+rsp]
- lea rsi,QWORD PTR[64+rsi]
-
- add eax,DWORD PTR[rdi]
- add ebx,DWORD PTR[4+rdi]
- add ecx,DWORD PTR[8+rdi]
- add edx,DWORD PTR[12+rdi]
- add r8d,DWORD PTR[16+rdi]
- add r9d,DWORD PTR[20+rdi]
- add r10d,DWORD PTR[24+rdi]
- add r11d,DWORD PTR[28+rdi]
-
- cmp rsi,QWORD PTR[((64+16))+rsp]
-
- mov DWORD PTR[rdi],eax
- mov DWORD PTR[4+rdi],ebx
- mov DWORD PTR[8+rdi],ecx
- mov DWORD PTR[12+rdi],edx
- mov DWORD PTR[16+rdi],r8d
- mov DWORD PTR[20+rdi],r9d
- mov DWORD PTR[24+rdi],r10d
- mov DWORD PTR[28+rdi],r11d
+ mov rdi,QWORD PTR[((16*4+0*8))+rsp]
+ lea rsi,QWORD PTR[((16*4))+rsi]
+
+ add eax,DWORD PTR[((4*0))+rdi]
+ add ebx,DWORD PTR[((4*1))+rdi]
+ add ecx,DWORD PTR[((4*2))+rdi]
+ add edx,DWORD PTR[((4*3))+rdi]
+ add r8d,DWORD PTR[((4*4))+rdi]
+ add r9d,DWORD PTR[((4*5))+rdi]
+ add r10d,DWORD PTR[((4*6))+rdi]
+ add r11d,DWORD PTR[((4*7))+rdi]
+
+ cmp rsi,QWORD PTR[((16*4+2*8))+rsp]
+
+ mov DWORD PTR[((4*0))+rdi],eax
+ mov DWORD PTR[((4*1))+rdi],ebx
+ mov DWORD PTR[((4*2))+rdi],ecx
+ mov DWORD PTR[((4*3))+rdi],edx
+ mov DWORD PTR[((4*4))+rdi],r8d
+ mov DWORD PTR[((4*5))+rdi],r9d
+ mov DWORD PTR[((4*6))+rdi],r10d
+ mov DWORD PTR[((4*7))+rdi],r11d
jb $L$loop
- mov rsi,QWORD PTR[((64+24))+rsp]
+ mov rsi,QWORD PTR[((16*4+3*8))+rsp]
mov r15,QWORD PTR[rsi]
mov r14,QWORD PTR[8+rsi]
mov r13,QWORD PTR[16+rsi]
@@ -1818,7 +2010,7 @@ se_handler PROC PRIVATE
cmp rbx,r10
jae $L$in_prologue
- mov rax,QWORD PTR[((64+24))+rax]
+ mov rax,QWORD PTR[((16*4+3*8))+rax]
lea rax,QWORD PTR[48+rax]
mov rbx,QWORD PTR[((-8))+rax]
diff --git a/deps/openssl/asm/x64-win32-masm/whrlpool/wp-x86_64.asm b/deps/openssl/asm/x64-win32-masm/whrlpool/wp-x86_64.asm
index 42b524dc8f..25337b2440 100644
--- a/deps/openssl/asm/x64-win32-masm/whrlpool/wp-x86_64.asm
+++ b/deps/openssl/asm/x64-win32-masm/whrlpool/wp-x86_64.asm
@@ -37,39 +37,39 @@ $L$prologue::
xor rcx,rcx
xor rdx,rdx
- mov r8,QWORD PTR[rdi]
- mov r9,QWORD PTR[8+rdi]
- mov r10,QWORD PTR[16+rdi]
- mov r11,QWORD PTR[24+rdi]
- mov r12,QWORD PTR[32+rdi]
- mov r13,QWORD PTR[40+rdi]
- mov r14,QWORD PTR[48+rdi]
- mov r15,QWORD PTR[56+rdi]
+ mov r8,QWORD PTR[((0*8))+rdi]
+ mov r9,QWORD PTR[((1*8))+rdi]
+ mov r10,QWORD PTR[((2*8))+rdi]
+ mov r11,QWORD PTR[((3*8))+rdi]
+ mov r12,QWORD PTR[((4*8))+rdi]
+ mov r13,QWORD PTR[((5*8))+rdi]
+ mov r14,QWORD PTR[((6*8))+rdi]
+ mov r15,QWORD PTR[((7*8))+rdi]
$L$outerloop::
- mov QWORD PTR[rsp],r8
- mov QWORD PTR[8+rsp],r9
- mov QWORD PTR[16+rsp],r10
- mov QWORD PTR[24+rsp],r11
- mov QWORD PTR[32+rsp],r12
- mov QWORD PTR[40+rsp],r13
- mov QWORD PTR[48+rsp],r14
- mov QWORD PTR[56+rsp],r15
- xor r8,QWORD PTR[rsi]
- xor r9,QWORD PTR[8+rsi]
- xor r10,QWORD PTR[16+rsi]
- xor r11,QWORD PTR[24+rsi]
- xor r12,QWORD PTR[32+rsi]
- xor r13,QWORD PTR[40+rsi]
- xor r14,QWORD PTR[48+rsi]
- xor r15,QWORD PTR[56+rsi]
- mov QWORD PTR[((64+0))+rsp],r8
- mov QWORD PTR[((64+8))+rsp],r9
- mov QWORD PTR[((64+16))+rsp],r10
- mov QWORD PTR[((64+24))+rsp],r11
- mov QWORD PTR[((64+32))+rsp],r12
- mov QWORD PTR[((64+40))+rsp],r13
- mov QWORD PTR[((64+48))+rsp],r14
- mov QWORD PTR[((64+56))+rsp],r15
+ mov QWORD PTR[((0*8))+rsp],r8
+ mov QWORD PTR[((1*8))+rsp],r9
+ mov QWORD PTR[((2*8))+rsp],r10
+ mov QWORD PTR[((3*8))+rsp],r11
+ mov QWORD PTR[((4*8))+rsp],r12
+ mov QWORD PTR[((5*8))+rsp],r13
+ mov QWORD PTR[((6*8))+rsp],r14
+ mov QWORD PTR[((7*8))+rsp],r15
+ xor r8,QWORD PTR[((0*8))+rsi]
+ xor r9,QWORD PTR[((1*8))+rsi]
+ xor r10,QWORD PTR[((2*8))+rsi]
+ xor r11,QWORD PTR[((3*8))+rsi]
+ xor r12,QWORD PTR[((4*8))+rsi]
+ xor r13,QWORD PTR[((5*8))+rsi]
+ xor r14,QWORD PTR[((6*8))+rsi]
+ xor r15,QWORD PTR[((7*8))+rsi]
+ mov QWORD PTR[((64+0*8))+rsp],r8
+ mov QWORD PTR[((64+1*8))+rsp],r9
+ mov QWORD PTR[((64+2*8))+rsp],r10
+ mov QWORD PTR[((64+3*8))+rsp],r11
+ mov QWORD PTR[((64+4*8))+rsp],r12
+ mov QWORD PTR[((64+5*8))+rsp],r13
+ mov QWORD PTR[((64+6*8))+rsp],r14
+ mov QWORD PTR[((64+7*8))+rsp],r15
xor rsi,rsi
mov QWORD PTR[24+rbx],rsi
ALIGN 16
@@ -86,7 +86,7 @@ $L$round::
mov r9,QWORD PTR[7+rdi*8+rbp]
mov cl,al
mov dl,ah
- mov eax,DWORD PTR[((0+8))+rsp]
+ mov eax,DWORD PTR[((0*8+8))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
mov r10,QWORD PTR[6+rsi*8+rbp]
@@ -100,7 +100,7 @@ $L$round::
mov r13,QWORD PTR[3+rdi*8+rbp]
mov cl,bl
mov dl,bh
- mov ebx,DWORD PTR[((0+8+4))+rsp]
+ mov ebx,DWORD PTR[((0*8+8+4))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
mov r14,QWORD PTR[2+rsi*8+rbp]
@@ -114,7 +114,7 @@ $L$round::
xor r10,QWORD PTR[7+rdi*8+rbp]
mov cl,al
mov dl,ah
- mov eax,DWORD PTR[((8+8))+rsp]
+ mov eax,DWORD PTR[((1*8+8))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r11,QWORD PTR[6+rsi*8+rbp]
@@ -128,7 +128,7 @@ $L$round::
xor r14,QWORD PTR[3+rdi*8+rbp]
mov cl,bl
mov dl,bh
- mov ebx,DWORD PTR[((8+8+4))+rsp]
+ mov ebx,DWORD PTR[((1*8+8+4))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r15,QWORD PTR[2+rsi*8+rbp]
@@ -142,7 +142,7 @@ $L$round::
xor r11,QWORD PTR[7+rdi*8+rbp]
mov cl,al
mov dl,ah
- mov eax,DWORD PTR[((16+8))+rsp]
+ mov eax,DWORD PTR[((2*8+8))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r12,QWORD PTR[6+rsi*8+rbp]
@@ -156,7 +156,7 @@ $L$round::
xor r15,QWORD PTR[3+rdi*8+rbp]
mov cl,bl
mov dl,bh
- mov ebx,DWORD PTR[((16+8+4))+rsp]
+ mov ebx,DWORD PTR[((2*8+8+4))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r8,QWORD PTR[2+rsi*8+rbp]
@@ -170,7 +170,7 @@ $L$round::
xor r12,QWORD PTR[7+rdi*8+rbp]
mov cl,al
mov dl,ah
- mov eax,DWORD PTR[((24+8))+rsp]
+ mov eax,DWORD PTR[((3*8+8))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r13,QWORD PTR[6+rsi*8+rbp]
@@ -184,7 +184,7 @@ $L$round::
xor r8,QWORD PTR[3+rdi*8+rbp]
mov cl,bl
mov dl,bh
- mov ebx,DWORD PTR[((24+8+4))+rsp]
+ mov ebx,DWORD PTR[((3*8+8+4))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r9,QWORD PTR[2+rsi*8+rbp]
@@ -198,7 +198,7 @@ $L$round::
xor r13,QWORD PTR[7+rdi*8+rbp]
mov cl,al
mov dl,ah
- mov eax,DWORD PTR[((32+8))+rsp]
+ mov eax,DWORD PTR[((4*8+8))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r14,QWORD PTR[6+rsi*8+rbp]
@@ -212,7 +212,7 @@ $L$round::
xor r9,QWORD PTR[3+rdi*8+rbp]
mov cl,bl
mov dl,bh
- mov ebx,DWORD PTR[((32+8+4))+rsp]
+ mov ebx,DWORD PTR[((4*8+8+4))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r10,QWORD PTR[2+rsi*8+rbp]
@@ -226,7 +226,7 @@ $L$round::
xor r14,QWORD PTR[7+rdi*8+rbp]
mov cl,al
mov dl,ah
- mov eax,DWORD PTR[((40+8))+rsp]
+ mov eax,DWORD PTR[((5*8+8))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r15,QWORD PTR[6+rsi*8+rbp]
@@ -240,7 +240,7 @@ $L$round::
xor r10,QWORD PTR[3+rdi*8+rbp]
mov cl,bl
mov dl,bh
- mov ebx,DWORD PTR[((40+8+4))+rsp]
+ mov ebx,DWORD PTR[((5*8+8+4))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r11,QWORD PTR[2+rsi*8+rbp]
@@ -254,7 +254,7 @@ $L$round::
xor r15,QWORD PTR[7+rdi*8+rbp]
mov cl,al
mov dl,ah
- mov eax,DWORD PTR[((48+8))+rsp]
+ mov eax,DWORD PTR[((6*8+8))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r8,QWORD PTR[6+rsi*8+rbp]
@@ -268,7 +268,7 @@ $L$round::
xor r11,QWORD PTR[3+rdi*8+rbp]
mov cl,bl
mov dl,bh
- mov ebx,DWORD PTR[((48+8+4))+rsp]
+ mov ebx,DWORD PTR[((6*8+8+4))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r12,QWORD PTR[2+rsi*8+rbp]
@@ -282,7 +282,7 @@ $L$round::
xor r8,QWORD PTR[7+rdi*8+rbp]
mov cl,al
mov dl,ah
- mov eax,DWORD PTR[((56+8))+rsp]
+ mov eax,DWORD PTR[((7*8+8))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r9,QWORD PTR[6+rsi*8+rbp]
@@ -296,19 +296,19 @@ $L$round::
xor r12,QWORD PTR[3+rdi*8+rbp]
mov cl,bl
mov dl,bh
- mov ebx,DWORD PTR[((56+8+4))+rsp]
+ mov ebx,DWORD PTR[((7*8+8+4))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r13,QWORD PTR[2+rsi*8+rbp]
xor r14,QWORD PTR[1+rdi*8+rbp]
- mov QWORD PTR[rsp],r8
- mov QWORD PTR[8+rsp],r9
- mov QWORD PTR[16+rsp],r10
- mov QWORD PTR[24+rsp],r11
- mov QWORD PTR[32+rsp],r12
- mov QWORD PTR[40+rsp],r13
- mov QWORD PTR[48+rsp],r14
- mov QWORD PTR[56+rsp],r15
+ mov QWORD PTR[((0*8))+rsp],r8
+ mov QWORD PTR[((1*8))+rsp],r9
+ mov QWORD PTR[((2*8))+rsp],r10
+ mov QWORD PTR[((3*8))+rsp],r11
+ mov QWORD PTR[((4*8))+rsp],r12
+ mov QWORD PTR[((5*8))+rsp],r13
+ mov QWORD PTR[((6*8))+rsp],r14
+ mov QWORD PTR[((7*8))+rsp],r15
mov cl,al
mov dl,ah
lea rsi,QWORD PTR[rcx*1+rcx]
@@ -318,7 +318,7 @@ $L$round::
xor r9,QWORD PTR[7+rdi*8+rbp]
mov cl,al
mov dl,ah
- mov eax,DWORD PTR[((64+0+8))+rsp]
+ mov eax,DWORD PTR[((64+0*8+8))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r10,QWORD PTR[6+rsi*8+rbp]
@@ -332,7 +332,7 @@ $L$round::
xor r13,QWORD PTR[3+rdi*8+rbp]
mov cl,bl
mov dl,bh
- mov ebx,DWORD PTR[((64+0+8+4))+rsp]
+ mov ebx,DWORD PTR[((64+0*8+8+4))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r14,QWORD PTR[2+rsi*8+rbp]
@@ -346,7 +346,7 @@ $L$round::
xor r10,QWORD PTR[7+rdi*8+rbp]
mov cl,al
mov dl,ah
- mov eax,DWORD PTR[((64+8+8))+rsp]
+ mov eax,DWORD PTR[((64+1*8+8))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r11,QWORD PTR[6+rsi*8+rbp]
@@ -360,7 +360,7 @@ $L$round::
xor r14,QWORD PTR[3+rdi*8+rbp]
mov cl,bl
mov dl,bh
- mov ebx,DWORD PTR[((64+8+8+4))+rsp]
+ mov ebx,DWORD PTR[((64+1*8+8+4))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r15,QWORD PTR[2+rsi*8+rbp]
@@ -374,7 +374,7 @@ $L$round::
xor r11,QWORD PTR[7+rdi*8+rbp]
mov cl,al
mov dl,ah
- mov eax,DWORD PTR[((64+16+8))+rsp]
+ mov eax,DWORD PTR[((64+2*8+8))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r12,QWORD PTR[6+rsi*8+rbp]
@@ -388,7 +388,7 @@ $L$round::
xor r15,QWORD PTR[3+rdi*8+rbp]
mov cl,bl
mov dl,bh
- mov ebx,DWORD PTR[((64+16+8+4))+rsp]
+ mov ebx,DWORD PTR[((64+2*8+8+4))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r8,QWORD PTR[2+rsi*8+rbp]
@@ -402,7 +402,7 @@ $L$round::
xor r12,QWORD PTR[7+rdi*8+rbp]
mov cl,al
mov dl,ah
- mov eax,DWORD PTR[((64+24+8))+rsp]
+ mov eax,DWORD PTR[((64+3*8+8))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r13,QWORD PTR[6+rsi*8+rbp]
@@ -416,7 +416,7 @@ $L$round::
xor r8,QWORD PTR[3+rdi*8+rbp]
mov cl,bl
mov dl,bh
- mov ebx,DWORD PTR[((64+24+8+4))+rsp]
+ mov ebx,DWORD PTR[((64+3*8+8+4))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r9,QWORD PTR[2+rsi*8+rbp]
@@ -430,7 +430,7 @@ $L$round::
xor r13,QWORD PTR[7+rdi*8+rbp]
mov cl,al
mov dl,ah
- mov eax,DWORD PTR[((64+32+8))+rsp]
+ mov eax,DWORD PTR[((64+4*8+8))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r14,QWORD PTR[6+rsi*8+rbp]
@@ -444,7 +444,7 @@ $L$round::
xor r9,QWORD PTR[3+rdi*8+rbp]
mov cl,bl
mov dl,bh
- mov ebx,DWORD PTR[((64+32+8+4))+rsp]
+ mov ebx,DWORD PTR[((64+4*8+8+4))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r10,QWORD PTR[2+rsi*8+rbp]
@@ -458,7 +458,7 @@ $L$round::
xor r14,QWORD PTR[7+rdi*8+rbp]
mov cl,al
mov dl,ah
- mov eax,DWORD PTR[((64+40+8))+rsp]
+ mov eax,DWORD PTR[((64+5*8+8))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r15,QWORD PTR[6+rsi*8+rbp]
@@ -472,7 +472,7 @@ $L$round::
xor r10,QWORD PTR[3+rdi*8+rbp]
mov cl,bl
mov dl,bh
- mov ebx,DWORD PTR[((64+40+8+4))+rsp]
+ mov ebx,DWORD PTR[((64+5*8+8+4))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r11,QWORD PTR[2+rsi*8+rbp]
@@ -486,7 +486,7 @@ $L$round::
xor r15,QWORD PTR[7+rdi*8+rbp]
mov cl,al
mov dl,ah
- mov eax,DWORD PTR[((64+48+8))+rsp]
+ mov eax,DWORD PTR[((64+6*8+8))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r8,QWORD PTR[6+rsi*8+rbp]
@@ -500,7 +500,7 @@ $L$round::
xor r11,QWORD PTR[3+rdi*8+rbp]
mov cl,bl
mov dl,bh
- mov ebx,DWORD PTR[((64+48+8+4))+rsp]
+ mov ebx,DWORD PTR[((64+6*8+8+4))+rsp]
lea rsi,QWORD PTR[rcx*1+rcx]
lea rdi,QWORD PTR[rdx*1+rdx]
xor r12,QWORD PTR[2+rsi*8+rbp]
@@ -540,44 +540,44 @@ $L$round::
je $L$roundsdone
mov QWORD PTR[24+rbx],rsi
- mov QWORD PTR[((64+0))+rsp],r8
- mov QWORD PTR[((64+8))+rsp],r9
- mov QWORD PTR[((64+16))+rsp],r10
- mov QWORD PTR[((64+24))+rsp],r11
- mov QWORD PTR[((64+32))+rsp],r12
- mov QWORD PTR[((64+40))+rsp],r13
- mov QWORD PTR[((64+48))+rsp],r14
- mov QWORD PTR[((64+56))+rsp],r15
+ mov QWORD PTR[((64+0*8))+rsp],r8
+ mov QWORD PTR[((64+1*8))+rsp],r9
+ mov QWORD PTR[((64+2*8))+rsp],r10
+ mov QWORD PTR[((64+3*8))+rsp],r11
+ mov QWORD PTR[((64+4*8))+rsp],r12
+ mov QWORD PTR[((64+5*8))+rsp],r13
+ mov QWORD PTR[((64+6*8))+rsp],r14
+ mov QWORD PTR[((64+7*8))+rsp],r15
jmp $L$round
ALIGN 16
$L$roundsdone::
mov rdi,QWORD PTR[rbx]
mov rsi,QWORD PTR[8+rbx]
mov rax,QWORD PTR[16+rbx]
- xor r8,QWORD PTR[rsi]
- xor r9,QWORD PTR[8+rsi]
- xor r10,QWORD PTR[16+rsi]
- xor r11,QWORD PTR[24+rsi]
- xor r12,QWORD PTR[32+rsi]
- xor r13,QWORD PTR[40+rsi]
- xor r14,QWORD PTR[48+rsi]
- xor r15,QWORD PTR[56+rsi]
- xor r8,QWORD PTR[rdi]
- xor r9,QWORD PTR[8+rdi]
- xor r10,QWORD PTR[16+rdi]
- xor r11,QWORD PTR[24+rdi]
- xor r12,QWORD PTR[32+rdi]
- xor r13,QWORD PTR[40+rdi]
- xor r14,QWORD PTR[48+rdi]
- xor r15,QWORD PTR[56+rdi]
- mov QWORD PTR[rdi],r8
- mov QWORD PTR[8+rdi],r9
- mov QWORD PTR[16+rdi],r10
- mov QWORD PTR[24+rdi],r11
- mov QWORD PTR[32+rdi],r12
- mov QWORD PTR[40+rdi],r13
- mov QWORD PTR[48+rdi],r14
- mov QWORD PTR[56+rdi],r15
+ xor r8,QWORD PTR[((0*8))+rsi]
+ xor r9,QWORD PTR[((1*8))+rsi]
+ xor r10,QWORD PTR[((2*8))+rsi]
+ xor r11,QWORD PTR[((3*8))+rsi]
+ xor r12,QWORD PTR[((4*8))+rsi]
+ xor r13,QWORD PTR[((5*8))+rsi]
+ xor r14,QWORD PTR[((6*8))+rsi]
+ xor r15,QWORD PTR[((7*8))+rsi]
+ xor r8,QWORD PTR[((0*8))+rdi]
+ xor r9,QWORD PTR[((1*8))+rdi]
+ xor r10,QWORD PTR[((2*8))+rdi]
+ xor r11,QWORD PTR[((3*8))+rdi]
+ xor r12,QWORD PTR[((4*8))+rdi]
+ xor r13,QWORD PTR[((5*8))+rdi]
+ xor r14,QWORD PTR[((6*8))+rdi]
+ xor r15,QWORD PTR[((7*8))+rdi]
+ mov QWORD PTR[((0*8))+rdi],r8
+ mov QWORD PTR[((1*8))+rdi],r9
+ mov QWORD PTR[((2*8))+rdi],r10
+ mov QWORD PTR[((3*8))+rdi],r11
+ mov QWORD PTR[((4*8))+rdi],r12
+ mov QWORD PTR[((5*8))+rdi],r13
+ mov QWORD PTR[((6*8))+rdi],r14
+ mov QWORD PTR[((7*8))+rdi],r15
lea rsi,QWORD PTR[64+rsi]
sub rax,1
jz $L$alldone
diff --git a/deps/openssl/asm/x64-win32-masm/x86_64cpuid.asm b/deps/openssl/asm/x64-win32-masm/x86_64cpuid.asm
index 497160cbcf..cdf7f90ca0 100644
--- a/deps/openssl/asm/x64-win32-masm/x86_64cpuid.asm
+++ b/deps/openssl/asm/x64-win32-masm/x86_64cpuid.asm
@@ -1,15 +1,9 @@
OPTION DOTNAME
EXTERN OPENSSL_cpuid_setup:NEAR
-
.CRT$XCU SEGMENT READONLY ALIGN(8)
DQ OPENSSL_cpuid_setup
-
.CRT$XCU ENDS
-_DATA SEGMENT
-COMM OPENSSL_ia32cap_P:DWORD:2
-
-_DATA ENDS
.text$ SEGMENT ALIGN(64) 'CODE'
PUBLIC OPENSSL_atomic_add
@@ -74,15 +68,7 @@ OPENSSL_ia32_cpuid PROC PUBLIC
mov eax,080000000h
cpuid
- cmp eax,080000001h
- jb $L$intel
- mov r10d,eax
- mov eax,080000001h
- cpuid
- or r9d,ecx
- and r9d,000000801h
-
- cmp r10d,080000008h
+ cmp eax,080000008h
jb $L$intel
mov eax,080000008h
@@ -93,12 +79,12 @@ OPENSSL_ia32_cpuid PROC PUBLIC
mov eax,1
cpuid
bt edx,28
- jnc $L$generic
+ jnc $L$done
shr ebx,16
cmp bl,r10b
- ja $L$generic
+ ja $L$done
and edx,0efffffffh
- jmp $L$generic
+ jmp $L$done
$L$intel::
cmp r11d,4
@@ -115,48 +101,30 @@ $L$intel::
$L$nocacheinfo::
mov eax,1
cpuid
- and edx,0bfefffffh
cmp r9d,0
jne $L$notintel
- or edx,040000000h
+ or edx,000100000h
and ah,15
cmp ah,15
- jne $L$notintel
- or edx,000100000h
+ je $L$notintel
+ or edx,040000000h
$L$notintel::
bt edx,28
- jnc $L$generic
+ jnc $L$done
and edx,0efffffffh
cmp r10d,0
- je $L$generic
+ je $L$done
or edx,010000000h
shr ebx,16
cmp bl,1
- ja $L$generic
+ ja $L$done
and edx,0efffffffh
-$L$generic::
- and r9d,000000800h
- and ecx,0fffff7ffh
- or r9d,ecx
-
- mov r10d,edx
- bt r9d,27
- jnc $L$clear_avx
- xor ecx,ecx
-DB 00fh,001h,0d0h
-
- and eax,6
- cmp eax,6
- je $L$done
-$L$clear_avx::
- mov eax,0efffe7ffh
- and r9d,eax
$L$done::
- shl r9,32
- mov eax,r10d
+ shl rcx,32
+ mov eax,edx
mov rbx,r8
- or rax,r9
+ or rax,rcx
DB 0F3h,0C3h ;repret
OPENSSL_ia32_cpuid ENDP
@@ -213,20 +181,6 @@ OPENSSL_wipe_cpu PROC PUBLIC
lea rax,QWORD PTR[8+rsp]
DB 0F3h,0C3h ;repret
OPENSSL_wipe_cpu ENDP
-PUBLIC OPENSSL_ia32_rdrand
-
-ALIGN 16
-OPENSSL_ia32_rdrand PROC PUBLIC
- mov ecx,8
-$L$oop_rdrand::
-DB 72,15,199,240
- jc $L$break_rdrand
- loop $L$oop_rdrand
-$L$break_rdrand::
- cmp rax,0
- cmove rax,rcx
- DB 0F3h,0C3h ;repret
-OPENSSL_ia32_rdrand ENDP
.text$ ENDS
END
diff --git a/deps/openssl/asm/x86-elf-gas/aes/aes-586.s b/deps/openssl/asm/x86-elf-gas/aes/aes-586.s
index f586d3df60..34c90a068b 100644
--- a/deps/openssl/asm/x86-elf-gas/aes/aes-586.s
+++ b/deps/openssl/asm/x86-elf-gas/aes/aes-586.s
@@ -2986,19 +2986,19 @@ _x86_AES_set_encrypt_key:
popl %ebp
ret
.size _x86_AES_set_encrypt_key,.-_x86_AES_set_encrypt_key
-.globl private_AES_set_encrypt_key
-.type private_AES_set_encrypt_key,@function
+.globl AES_set_encrypt_key
+.type AES_set_encrypt_key,@function
.align 16
-private_AES_set_encrypt_key:
-.L_private_AES_set_encrypt_key_begin:
+AES_set_encrypt_key:
+.L_AES_set_encrypt_key_begin:
call _x86_AES_set_encrypt_key
ret
-.size private_AES_set_encrypt_key,.-.L_private_AES_set_encrypt_key_begin
-.globl private_AES_set_decrypt_key
-.type private_AES_set_decrypt_key,@function
+.size AES_set_encrypt_key,.-.L_AES_set_encrypt_key_begin
+.globl AES_set_decrypt_key
+.type AES_set_decrypt_key,@function
.align 16
-private_AES_set_decrypt_key:
-.L_private_AES_set_decrypt_key_begin:
+AES_set_decrypt_key:
+.L_AES_set_decrypt_key_begin:
call _x86_AES_set_encrypt_key
cmpl $0,%eax
je .L054proceed
@@ -3227,8 +3227,8 @@ private_AES_set_decrypt_key:
popl %ebx
popl %ebp
ret
-.size private_AES_set_decrypt_key,.-.L_private_AES_set_decrypt_key_begin
+.size AES_set_decrypt_key,.-.L_AES_set_decrypt_key_begin
.byte 65,69,83,32,102,111,114,32,120,56,54,44,32,67,82,89
.byte 80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114
.byte 111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
-.comm OPENSSL_ia32cap_P,8,4
+.comm OPENSSL_ia32cap_P,4,4
diff --git a/deps/openssl/asm/x86-elf-gas/camellia/cmll-x86.s b/deps/openssl/asm/x86-elf-gas/camellia/cmll-x86.s
index 5c87910e34..a896314cfe 100644
--- a/deps/openssl/asm/x86-elf-gas/camellia/cmll-x86.s
+++ b/deps/openssl/asm/x86-elf-gas/camellia/cmll-x86.s
@@ -1537,11 +1537,11 @@ Camellia_Ekeygen:
popl %ebp
ret
.size Camellia_Ekeygen,.-.L_Camellia_Ekeygen_begin
-.globl private_Camellia_set_key
-.type private_Camellia_set_key,@function
+.globl Camellia_set_key
+.type Camellia_set_key,@function
.align 16
-private_Camellia_set_key:
-.L_private_Camellia_set_key_begin:
+Camellia_set_key:
+.L_Camellia_set_key_begin:
pushl %ebx
movl 8(%esp),%ecx
movl 12(%esp),%ebx
@@ -1571,7 +1571,7 @@ private_Camellia_set_key:
.L014done:
popl %ebx
ret
-.size private_Camellia_set_key,.-.L_private_Camellia_set_key_begin
+.size Camellia_set_key,.-.L_Camellia_set_key_begin
.align 64
.LCamellia_SIGMA:
.long 2694735487,1003262091,3061508184,1286239154,3337565999,3914302142,1426019237,4057165596,283453434,3731369245,2958461122,3018244605,0,0,0,0
diff --git a/deps/openssl/asm/x86-elf-gas/rc4/rc4-586.s b/deps/openssl/asm/x86-elf-gas/rc4/rc4-586.s
index 513ce6a58b..9ba94e4b1a 100644
--- a/deps/openssl/asm/x86-elf-gas/rc4/rc4-586.s
+++ b/deps/openssl/asm/x86-elf-gas/rc4/rc4-586.s
@@ -29,146 +29,11 @@ RC4:
movl (%edi,%eax,4),%ecx
andl $-4,%edx
jz .L002loop1
- testl $-8,%edx
- movl %ebp,32(%esp)
- jz .L003go4loop4
- leal OPENSSL_ia32cap_P,%ebp
- btl $26,(%ebp)
- jnc .L003go4loop4
- movl 32(%esp),%ebp
- andl $-8,%edx
- leal -8(%esi,%edx,1),%edx
- movl %edx,-4(%edi)
- addb %cl,%bl
- movl (%edi,%ebx,4),%edx
- movl %ecx,(%edi,%ebx,4)
- movl %edx,(%edi,%eax,4)
- incl %eax
- addl %ecx,%edx
- movzbl %al,%eax
- movzbl %dl,%edx
- movq (%esi),%mm0
- movl (%edi,%eax,4),%ecx
- movd (%edi,%edx,4),%mm2
- jmp .L004loop_mmx_enter
-.align 16
-.L005loop_mmx:
- addb %cl,%bl
- psllq $56,%mm1
- movl (%edi,%ebx,4),%edx
- movl %ecx,(%edi,%ebx,4)
- movl %edx,(%edi,%eax,4)
- incl %eax
- addl %ecx,%edx
- movzbl %al,%eax
- movzbl %dl,%edx
- pxor %mm1,%mm2
- movq (%esi),%mm0
- movq %mm2,-8(%ebp,%esi,1)
- movl (%edi,%eax,4),%ecx
- movd (%edi,%edx,4),%mm2
-.L004loop_mmx_enter:
- addb %cl,%bl
- movl (%edi,%ebx,4),%edx
- movl %ecx,(%edi,%ebx,4)
- movl %edx,(%edi,%eax,4)
- incl %eax
- addl %ecx,%edx
- movzbl %al,%eax
- movzbl %dl,%edx
- pxor %mm0,%mm2
- movl (%edi,%eax,4),%ecx
- movd (%edi,%edx,4),%mm1
- addb %cl,%bl
- psllq $8,%mm1
- movl (%edi,%ebx,4),%edx
- movl %ecx,(%edi,%ebx,4)
- movl %edx,(%edi,%eax,4)
- incl %eax
- addl %ecx,%edx
- movzbl %al,%eax
- movzbl %dl,%edx
- pxor %mm1,%mm2
- movl (%edi,%eax,4),%ecx
- movd (%edi,%edx,4),%mm1
- addb %cl,%bl
- psllq $16,%mm1
- movl (%edi,%ebx,4),%edx
- movl %ecx,(%edi,%ebx,4)
- movl %edx,(%edi,%eax,4)
- incl %eax
- addl %ecx,%edx
- movzbl %al,%eax
- movzbl %dl,%edx
- pxor %mm1,%mm2
- movl (%edi,%eax,4),%ecx
- movd (%edi,%edx,4),%mm1
- addb %cl,%bl
- psllq $24,%mm1
- movl (%edi,%ebx,4),%edx
- movl %ecx,(%edi,%ebx,4)
- movl %edx,(%edi,%eax,4)
- incl %eax
- addl %ecx,%edx
- movzbl %al,%eax
- movzbl %dl,%edx
- pxor %mm1,%mm2
- movl (%edi,%eax,4),%ecx
- movd (%edi,%edx,4),%mm1
- addb %cl,%bl
- psllq $32,%mm1
- movl (%edi,%ebx,4),%edx
- movl %ecx,(%edi,%ebx,4)
- movl %edx,(%edi,%eax,4)
- incl %eax
- addl %ecx,%edx
- movzbl %al,%eax
- movzbl %dl,%edx
- pxor %mm1,%mm2
- movl (%edi,%eax,4),%ecx
- movd (%edi,%edx,4),%mm1
- addb %cl,%bl
- psllq $40,%mm1
- movl (%edi,%ebx,4),%edx
- movl %ecx,(%edi,%ebx,4)
- movl %edx,(%edi,%eax,4)
- incl %eax
- addl %ecx,%edx
- movzbl %al,%eax
- movzbl %dl,%edx
- pxor %mm1,%mm2
- movl (%edi,%eax,4),%ecx
- movd (%edi,%edx,4),%mm1
- addb %cl,%bl
- psllq $48,%mm1
- movl (%edi,%ebx,4),%edx
- movl %ecx,(%edi,%ebx,4)
- movl %edx,(%edi,%eax,4)
- incl %eax
- addl %ecx,%edx
- movzbl %al,%eax
- movzbl %dl,%edx
- pxor %mm1,%mm2
- movl (%edi,%eax,4),%ecx
- movd (%edi,%edx,4),%mm1
- movl %ebx,%edx
- xorl %ebx,%ebx
- movb %dl,%bl
- cmpl -4(%edi),%esi
- leal 8(%esi),%esi
- jb .L005loop_mmx
- psllq $56,%mm1
- pxor %mm1,%mm2
- movq %mm2,-8(%ebp,%esi,1)
- emms
- cmpl 24(%esp),%esi
- je .L006done
- jmp .L002loop1
-.align 16
-.L003go4loop4:
leal -4(%esi,%edx,1),%edx
movl %edx,28(%esp)
-.L007loop4:
+ movl %ebp,32(%esp)
+.align 16
+.L003loop4:
addb %cl,%bl
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
@@ -214,9 +79,9 @@ RC4:
movl %ebp,(%ecx,%esi,1)
leal 4(%esi),%esi
movl (%edi,%eax,4),%ecx
- jb .L007loop4
+ jb .L003loop4
cmpl 24(%esp),%esi
- je .L006done
+ je .L004done
movl 32(%esp),%ebp
.align 16
.L002loop1:
@@ -234,11 +99,11 @@ RC4:
cmpl 24(%esp),%esi
movb %dl,-1(%ebp,%esi,1)
jb .L002loop1
- jmp .L006done
+ jmp .L004done
.align 16
.L001RC4_CHAR:
movzbl (%edi,%eax,1),%ecx
-.L008cloop1:
+.L005cloop1:
addb %cl,%bl
movzbl (%edi,%ebx,1),%edx
movb %cl,(%edi,%ebx,1)
@@ -251,10 +116,10 @@ RC4:
movzbl (%edi,%eax,1),%ecx
cmpl 24(%esp),%esi
movb %dl,-1(%ebp,%esi,1)
- jb .L008cloop1
-.L006done:
+ jb .L005cloop1
+.L004done:
decb %al
- movl %ebx,-4(%edi)
+ movb %bl,-4(%edi)
movb %al,-8(%edi)
.L000abort:
popl %edi
@@ -263,11 +128,11 @@ RC4:
popl %ebp
ret
.size RC4,.-.L_RC4_begin
-.globl private_RC4_set_key
-.type private_RC4_set_key,@function
+.globl RC4_set_key
+.type RC4_set_key,@function
.align 16
-private_RC4_set_key:
-.L_private_RC4_set_key_begin:
+RC4_set_key:
+.L_RC4_set_key_begin:
pushl %ebp
pushl %ebx
pushl %esi
@@ -282,53 +147,53 @@ private_RC4_set_key:
xorl %eax,%eax
movl %ebp,-4(%edi)
btl $20,(%edx)
- jc .L009c1stloop
+ jc .L006c1stloop
.align 16
-.L010w1stloop:
+.L007w1stloop:
movl %eax,(%edi,%eax,4)
addb $1,%al
- jnc .L010w1stloop
+ jnc .L007w1stloop
xorl %ecx,%ecx
xorl %edx,%edx
.align 16
-.L011w2ndloop:
+.L008w2ndloop:
movl (%edi,%ecx,4),%eax
addb (%esi,%ebp,1),%dl
addb %al,%dl
addl $1,%ebp
movl (%edi,%edx,4),%ebx
- jnz .L012wnowrap
+ jnz .L009wnowrap
movl -4(%edi),%ebp
-.L012wnowrap:
+.L009wnowrap:
movl %eax,(%edi,%edx,4)
movl %ebx,(%edi,%ecx,4)
addb $1,%cl
- jnc .L011w2ndloop
- jmp .L013exit
+ jnc .L008w2ndloop
+ jmp .L010exit
.align 16
-.L009c1stloop:
+.L006c1stloop:
movb %al,(%edi,%eax,1)
addb $1,%al
- jnc .L009c1stloop
+ jnc .L006c1stloop
xorl %ecx,%ecx
xorl %edx,%edx
xorl %ebx,%ebx
.align 16
-.L014c2ndloop:
+.L011c2ndloop:
movb (%edi,%ecx,1),%al
addb (%esi,%ebp,1),%dl
addb %al,%dl
addl $1,%ebp
movb (%edi,%edx,1),%bl
- jnz .L015cnowrap
+ jnz .L012cnowrap
movl -4(%edi),%ebp
-.L015cnowrap:
+.L012cnowrap:
movb %al,(%edi,%edx,1)
movb %bl,(%edi,%ecx,1)
addb $1,%cl
- jnc .L014c2ndloop
+ jnc .L011c2ndloop
movl $-1,256(%edi)
-.L013exit:
+.L010exit:
xorl %eax,%eax
movl %eax,-8(%edi)
movl %eax,-4(%edi)
@@ -337,36 +202,29 @@ private_RC4_set_key:
popl %ebx
popl %ebp
ret
-.size private_RC4_set_key,.-.L_private_RC4_set_key_begin
+.size RC4_set_key,.-.L_RC4_set_key_begin
.globl RC4_options
.type RC4_options,@function
.align 16
RC4_options:
.L_RC4_options_begin:
- call .L016pic_point
-.L016pic_point:
+ call .L013pic_point
+.L013pic_point:
popl %eax
- leal .L017opts-.L016pic_point(%eax),%eax
+ leal .L014opts-.L013pic_point(%eax),%eax
leal OPENSSL_ia32cap_P,%edx
- movl (%edx),%edx
- btl $20,%edx
- jc .L0181xchar
- btl $26,%edx
- jnc .L019ret
- addl $25,%eax
- ret
-.L0181xchar:
+ btl $20,(%edx)
+ jnc .L015skip
addl $12,%eax
-.L019ret:
+.L015skip:
ret
.align 64
-.L017opts:
+.L014opts:
.byte 114,99,52,40,52,120,44,105,110,116,41,0
.byte 114,99,52,40,49,120,44,99,104,97,114,41,0
-.byte 114,99,52,40,56,120,44,109,109,120,41,0
.byte 82,67,52,32,102,111,114,32,120,56,54,44,32,67,82,89
.byte 80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114
.byte 111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
.align 64
.size RC4_options,.-.L_RC4_options_begin
-.comm OPENSSL_ia32cap_P,8,4
+.comm OPENSSL_ia32cap_P,4,4
diff --git a/deps/openssl/asm/x86-elf-gas/sha/sha1-586.s b/deps/openssl/asm/x86-elf-gas/sha/sha1-586.s
index e77f65412f..cccb1aba85 100644
--- a/deps/openssl/asm/x86-elf-gas/sha/sha1-586.s
+++ b/deps/openssl/asm/x86-elf-gas/sha/sha1-586.s
@@ -12,12 +12,11 @@ sha1_block_data_order:
movl 20(%esp),%ebp
movl 24(%esp),%esi
movl 28(%esp),%eax
- subl $76,%esp
+ subl $64,%esp
shll $6,%eax
addl %esi,%eax
- movl %eax,104(%esp)
+ movl %eax,92(%esp)
movl 16(%ebp),%edi
- jmp .L000loop
.align 16
.L000loop:
movl (%esi),%eax
@@ -68,7 +67,7 @@ sha1_block_data_order:
movl %ebx,52(%esp)
movl %ecx,56(%esp)
movl %edx,60(%esp)
- movl %esi,100(%esp)
+ movl %esi,88(%esp)
movl (%ebp),%eax
movl 4(%ebp),%ebx
movl 8(%ebp),%ecx
@@ -79,10 +78,10 @@ sha1_block_data_order:
roll $5,%ebp
xorl %edx,%esi
addl %edi,%ebp
- movl (%esp),%edi
andl %ebx,%esi
- rorl $2,%ebx
+ movl (%esp),%edi
xorl %edx,%esi
+ rorl $2,%ebx
leal 1518500249(%ebp,%edi,1),%ebp
addl %esi,%ebp
@@ -91,10 +90,10 @@ sha1_block_data_order:
roll $5,%ebp
xorl %ecx,%edi
addl %edx,%ebp
- movl 4(%esp),%edx
andl %eax,%edi
- rorl $2,%eax
+ movl 4(%esp),%edx
xorl %ecx,%edi
+ rorl $2,%eax
leal 1518500249(%ebp,%edx,1),%ebp
addl %edi,%ebp
@@ -103,10 +102,10 @@ sha1_block_data_order:
roll $5,%ebp
xorl %ebx,%edx
addl %ecx,%ebp
- movl 8(%esp),%ecx
andl %esi,%edx
- rorl $2,%esi
+ movl 8(%esp),%ecx
xorl %ebx,%edx
+ rorl $2,%esi
leal 1518500249(%ebp,%ecx,1),%ebp
addl %edx,%ebp
@@ -115,10 +114,10 @@ sha1_block_data_order:
roll $5,%ebp
xorl %eax,%ecx
addl %ebx,%ebp
- movl 12(%esp),%ebx
andl %edi,%ecx
- rorl $2,%edi
+ movl 12(%esp),%ebx
xorl %eax,%ecx
+ rorl $2,%edi
leal 1518500249(%ebp,%ebx,1),%ebp
addl %ecx,%ebp
@@ -127,10 +126,10 @@ sha1_block_data_order:
roll $5,%ebp
xorl %esi,%ebx
addl %eax,%ebp
- movl 16(%esp),%eax
andl %edx,%ebx
- rorl $2,%edx
+ movl 16(%esp),%eax
xorl %esi,%ebx
+ rorl $2,%edx
leal 1518500249(%ebp,%eax,1),%ebp
addl %ebx,%ebp
@@ -139,10 +138,10 @@ sha1_block_data_order:
roll $5,%ebp
xorl %edi,%eax
addl %esi,%ebp
- movl 20(%esp),%esi
andl %ecx,%eax
- rorl $2,%ecx
+ movl 20(%esp),%esi
xorl %edi,%eax
+ rorl $2,%ecx
leal 1518500249(%ebp,%esi,1),%ebp
addl %eax,%ebp
@@ -151,10 +150,10 @@ sha1_block_data_order:
roll $5,%ebp
xorl %edx,%esi
addl %edi,%ebp
- movl 24(%esp),%edi
andl %ebx,%esi
- rorl $2,%ebx
+ movl 24(%esp),%edi
xorl %edx,%esi
+ rorl $2,%ebx
leal 1518500249(%ebp,%edi,1),%ebp
addl %esi,%ebp
@@ -163,10 +162,10 @@ sha1_block_data_order:
roll $5,%ebp
xorl %ecx,%edi
addl %edx,%ebp
- movl 28(%esp),%edx
andl %eax,%edi
- rorl $2,%eax
+ movl 28(%esp),%edx
xorl %ecx,%edi
+ rorl $2,%eax
leal 1518500249(%ebp,%edx,1),%ebp
addl %edi,%ebp
@@ -175,10 +174,10 @@ sha1_block_data_order:
roll $5,%ebp
xorl %ebx,%edx
addl %ecx,%ebp
- movl 32(%esp),%ecx
andl %esi,%edx
- rorl $2,%esi
+ movl 32(%esp),%ecx
xorl %ebx,%edx
+ rorl $2,%esi
leal 1518500249(%ebp,%ecx,1),%ebp
addl %edx,%ebp
@@ -187,10 +186,10 @@ sha1_block_data_order:
roll $5,%ebp
xorl %eax,%ecx
addl %ebx,%ebp
- movl 36(%esp),%ebx
andl %edi,%ecx
- rorl $2,%edi
+ movl 36(%esp),%ebx
xorl %eax,%ecx
+ rorl $2,%edi
leal 1518500249(%ebp,%ebx,1),%ebp
addl %ecx,%ebp
@@ -199,10 +198,10 @@ sha1_block_data_order:
roll $5,%ebp
xorl %esi,%ebx
addl %eax,%ebp
- movl 40(%esp),%eax
andl %edx,%ebx
- rorl $2,%edx
+ movl 40(%esp),%eax
xorl %esi,%ebx
+ rorl $2,%edx
leal 1518500249(%ebp,%eax,1),%ebp
addl %ebx,%ebp
@@ -211,10 +210,10 @@ sha1_block_data_order:
roll $5,%ebp
xorl %edi,%eax
addl %esi,%ebp
- movl 44(%esp),%esi
andl %ecx,%eax
- rorl $2,%ecx
+ movl 44(%esp),%esi
xorl %edi,%eax
+ rorl $2,%ecx
leal 1518500249(%ebp,%esi,1),%ebp
addl %eax,%ebp
@@ -223,10 +222,10 @@ sha1_block_data_order:
roll $5,%ebp
xorl %edx,%esi
addl %edi,%ebp
- movl 48(%esp),%edi
andl %ebx,%esi
- rorl $2,%ebx
+ movl 48(%esp),%edi
xorl %edx,%esi
+ rorl $2,%ebx
leal 1518500249(%ebp,%edi,1),%ebp
addl %esi,%ebp
@@ -235,10 +234,10 @@ sha1_block_data_order:
roll $5,%ebp
xorl %ecx,%edi
addl %edx,%ebp
- movl 52(%esp),%edx
andl %eax,%edi
- rorl $2,%eax
+ movl 52(%esp),%edx
xorl %ecx,%edi
+ rorl $2,%eax
leal 1518500249(%ebp,%edx,1),%ebp
addl %edi,%ebp
@@ -247,10 +246,10 @@ sha1_block_data_order:
roll $5,%ebp
xorl %ebx,%edx
addl %ecx,%ebp
- movl 56(%esp),%ecx
andl %esi,%edx
- rorl $2,%esi
+ movl 56(%esp),%ecx
xorl %ebx,%edx
+ rorl $2,%esi
leal 1518500249(%ebp,%ecx,1),%ebp
addl %edx,%ebp
@@ -259,1099 +258,1162 @@ sha1_block_data_order:
roll $5,%ebp
xorl %eax,%ecx
addl %ebx,%ebp
- movl 60(%esp),%ebx
andl %edi,%ecx
- rorl $2,%edi
+ movl 60(%esp),%ebx
xorl %eax,%ecx
+ rorl $2,%edi
leal 1518500249(%ebp,%ebx,1),%ebp
- movl (%esp),%ebx
addl %ebp,%ecx
+ movl (%esp),%ebx
movl %edi,%ebp
xorl 8(%esp),%ebx
xorl %esi,%ebp
xorl 32(%esp),%ebx
andl %edx,%ebp
+ rorl $2,%edx
xorl 52(%esp),%ebx
roll $1,%ebx
xorl %esi,%ebp
- addl %ebp,%eax
- movl %ecx,%ebp
- rorl $2,%edx
movl %ebx,(%esp)
- roll $5,%ebp
leal 1518500249(%ebx,%eax,1),%ebx
- movl 4(%esp),%eax
+ movl %ecx,%eax
+ roll $5,%eax
addl %ebp,%ebx
+ addl %eax,%ebx
+ movl 4(%esp),%eax
movl %edx,%ebp
xorl 12(%esp),%eax
xorl %edi,%ebp
xorl 36(%esp),%eax
andl %ecx,%ebp
+ rorl $2,%ecx
xorl 56(%esp),%eax
roll $1,%eax
xorl %edi,%ebp
- addl %ebp,%esi
- movl %ebx,%ebp
- rorl $2,%ecx
movl %eax,4(%esp)
- roll $5,%ebp
leal 1518500249(%eax,%esi,1),%eax
- movl 8(%esp),%esi
+ movl %ebx,%esi
+ roll $5,%esi
addl %ebp,%eax
+ addl %esi,%eax
+ movl 8(%esp),%esi
movl %ecx,%ebp
xorl 16(%esp),%esi
xorl %edx,%ebp
xorl 40(%esp),%esi
andl %ebx,%ebp
+ rorl $2,%ebx
xorl 60(%esp),%esi
roll $1,%esi
xorl %edx,%ebp
- addl %ebp,%edi
- movl %eax,%ebp
- rorl $2,%ebx
movl %esi,8(%esp)
- roll $5,%ebp
leal 1518500249(%esi,%edi,1),%esi
- movl 12(%esp),%edi
+ movl %eax,%edi
+ roll $5,%edi
addl %ebp,%esi
+ addl %edi,%esi
+ movl 12(%esp),%edi
movl %ebx,%ebp
xorl 20(%esp),%edi
xorl %ecx,%ebp
xorl 44(%esp),%edi
andl %eax,%ebp
+ rorl $2,%eax
xorl (%esp),%edi
roll $1,%edi
xorl %ecx,%ebp
- addl %ebp,%edx
- movl %esi,%ebp
- rorl $2,%eax
movl %edi,12(%esp)
- roll $5,%ebp
leal 1518500249(%edi,%edx,1),%edi
- movl 16(%esp),%edx
+ movl %esi,%edx
+ roll $5,%edx
addl %ebp,%edi
+ addl %edx,%edi
movl %esi,%ebp
+ movl 16(%esp),%edx
+ rorl $2,%esi
xorl 24(%esp),%edx
xorl %eax,%ebp
xorl 48(%esp),%edx
xorl %ebx,%ebp
xorl 4(%esp),%edx
roll $1,%edx
- addl %ebp,%ecx
- rorl $2,%esi
- movl %edi,%ebp
- roll $5,%ebp
+ addl %ecx,%ebp
movl %edx,16(%esp)
- leal 1859775393(%edx,%ecx,1),%edx
- movl 20(%esp),%ecx
- addl %ebp,%edx
+ movl %edi,%ecx
+ roll $5,%ecx
+ leal 1859775393(%edx,%ebp,1),%edx
+ addl %ecx,%edx
movl %edi,%ebp
+ movl 20(%esp),%ecx
+ rorl $2,%edi
xorl 28(%esp),%ecx
xorl %esi,%ebp
xorl 52(%esp),%ecx
xorl %eax,%ebp
xorl 8(%esp),%ecx
roll $1,%ecx
- addl %ebp,%ebx
- rorl $2,%edi
- movl %edx,%ebp
- roll $5,%ebp
+ addl %ebx,%ebp
movl %ecx,20(%esp)
- leal 1859775393(%ecx,%ebx,1),%ecx
- movl 24(%esp),%ebx
- addl %ebp,%ecx
+ movl %edx,%ebx
+ roll $5,%ebx
+ leal 1859775393(%ecx,%ebp,1),%ecx
+ addl %ebx,%ecx
movl %edx,%ebp
+ movl 24(%esp),%ebx
+ rorl $2,%edx
xorl 32(%esp),%ebx
xorl %edi,%ebp
xorl 56(%esp),%ebx
xorl %esi,%ebp
xorl 12(%esp),%ebx
roll $1,%ebx
- addl %ebp,%eax
- rorl $2,%edx
- movl %ecx,%ebp
- roll $5,%ebp
+ addl %eax,%ebp
movl %ebx,24(%esp)
- leal 1859775393(%ebx,%eax,1),%ebx
- movl 28(%esp),%eax
- addl %ebp,%ebx
+ movl %ecx,%eax
+ roll $5,%eax
+ leal 1859775393(%ebx,%ebp,1),%ebx
+ addl %eax,%ebx
movl %ecx,%ebp
+ movl 28(%esp),%eax
+ rorl $2,%ecx
xorl 36(%esp),%eax
xorl %edx,%ebp
xorl 60(%esp),%eax
xorl %edi,%ebp
xorl 16(%esp),%eax
roll $1,%eax
- addl %ebp,%esi
- rorl $2,%ecx
- movl %ebx,%ebp
- roll $5,%ebp
+ addl %esi,%ebp
movl %eax,28(%esp)
- leal 1859775393(%eax,%esi,1),%eax
- movl 32(%esp),%esi
- addl %ebp,%eax
+ movl %ebx,%esi
+ roll $5,%esi
+ leal 1859775393(%eax,%ebp,1),%eax
+ addl %esi,%eax
movl %ebx,%ebp
+ movl 32(%esp),%esi
+ rorl $2,%ebx
xorl 40(%esp),%esi
xorl %ecx,%ebp
xorl (%esp),%esi
xorl %edx,%ebp
xorl 20(%esp),%esi
roll $1,%esi
- addl %ebp,%edi
- rorl $2,%ebx
- movl %eax,%ebp
- roll $5,%ebp
+ addl %edi,%ebp
movl %esi,32(%esp)
- leal 1859775393(%esi,%edi,1),%esi
- movl 36(%esp),%edi
- addl %ebp,%esi
+ movl %eax,%edi
+ roll $5,%edi
+ leal 1859775393(%esi,%ebp,1),%esi
+ addl %edi,%esi
movl %eax,%ebp
+ movl 36(%esp),%edi
+ rorl $2,%eax
xorl 44(%esp),%edi
xorl %ebx,%ebp
xorl 4(%esp),%edi
xorl %ecx,%ebp
xorl 24(%esp),%edi
roll $1,%edi
- addl %ebp,%edx
- rorl $2,%eax
- movl %esi,%ebp
- roll $5,%ebp
+ addl %edx,%ebp
movl %edi,36(%esp)
- leal 1859775393(%edi,%edx,1),%edi
- movl 40(%esp),%edx
- addl %ebp,%edi
+ movl %esi,%edx
+ roll $5,%edx
+ leal 1859775393(%edi,%ebp,1),%edi
+ addl %edx,%edi
movl %esi,%ebp
+ movl 40(%esp),%edx
+ rorl $2,%esi
xorl 48(%esp),%edx
xorl %eax,%ebp
xorl 8(%esp),%edx
xorl %ebx,%ebp
xorl 28(%esp),%edx
roll $1,%edx
- addl %ebp,%ecx
- rorl $2,%esi
- movl %edi,%ebp
- roll $5,%ebp
+ addl %ecx,%ebp
movl %edx,40(%esp)
- leal 1859775393(%edx,%ecx,1),%edx
- movl 44(%esp),%ecx
- addl %ebp,%edx
+ movl %edi,%ecx
+ roll $5,%ecx
+ leal 1859775393(%edx,%ebp,1),%edx
+ addl %ecx,%edx
movl %edi,%ebp
+ movl 44(%esp),%ecx
+ rorl $2,%edi
xorl 52(%esp),%ecx
xorl %esi,%ebp
xorl 12(%esp),%ecx
xorl %eax,%ebp
xorl 32(%esp),%ecx
roll $1,%ecx
- addl %ebp,%ebx
- rorl $2,%edi
- movl %edx,%ebp
- roll $5,%ebp
+ addl %ebx,%ebp
movl %ecx,44(%esp)
- leal 1859775393(%ecx,%ebx,1),%ecx
- movl 48(%esp),%ebx
- addl %ebp,%ecx
+ movl %edx,%ebx
+ roll $5,%ebx
+ leal 1859775393(%ecx,%ebp,1),%ecx
+ addl %ebx,%ecx
movl %edx,%ebp
+ movl 48(%esp),%ebx
+ rorl $2,%edx
xorl 56(%esp),%ebx
xorl %edi,%ebp
xorl 16(%esp),%ebx
xorl %esi,%ebp
xorl 36(%esp),%ebx
roll $1,%ebx
- addl %ebp,%eax
- rorl $2,%edx
- movl %ecx,%ebp
- roll $5,%ebp
+ addl %eax,%ebp
movl %ebx,48(%esp)
- leal 1859775393(%ebx,%eax,1),%ebx
- movl 52(%esp),%eax
- addl %ebp,%ebx
+ movl %ecx,%eax
+ roll $5,%eax
+ leal 1859775393(%ebx,%ebp,1),%ebx
+ addl %eax,%ebx
movl %ecx,%ebp
+ movl 52(%esp),%eax
+ rorl $2,%ecx
xorl 60(%esp),%eax
xorl %edx,%ebp
xorl 20(%esp),%eax
xorl %edi,%ebp
xorl 40(%esp),%eax
roll $1,%eax
- addl %ebp,%esi
- rorl $2,%ecx
- movl %ebx,%ebp
- roll $5,%ebp
+ addl %esi,%ebp
movl %eax,52(%esp)
- leal 1859775393(%eax,%esi,1),%eax
- movl 56(%esp),%esi
- addl %ebp,%eax
+ movl %ebx,%esi
+ roll $5,%esi
+ leal 1859775393(%eax,%ebp,1),%eax
+ addl %esi,%eax
movl %ebx,%ebp
+ movl 56(%esp),%esi
+ rorl $2,%ebx
xorl (%esp),%esi
xorl %ecx,%ebp
xorl 24(%esp),%esi
xorl %edx,%ebp
xorl 44(%esp),%esi
roll $1,%esi
- addl %ebp,%edi
- rorl $2,%ebx
- movl %eax,%ebp
- roll $5,%ebp
+ addl %edi,%ebp
movl %esi,56(%esp)
- leal 1859775393(%esi,%edi,1),%esi
- movl 60(%esp),%edi
- addl %ebp,%esi
+ movl %eax,%edi
+ roll $5,%edi
+ leal 1859775393(%esi,%ebp,1),%esi
+ addl %edi,%esi
movl %eax,%ebp
+ movl 60(%esp),%edi
+ rorl $2,%eax
xorl 4(%esp),%edi
xorl %ebx,%ebp
xorl 28(%esp),%edi
xorl %ecx,%ebp
xorl 48(%esp),%edi
roll $1,%edi
- addl %ebp,%edx
- rorl $2,%eax
- movl %esi,%ebp
- roll $5,%ebp
+ addl %edx,%ebp
movl %edi,60(%esp)
- leal 1859775393(%edi,%edx,1),%edi
- movl (%esp),%edx
- addl %ebp,%edi
+ movl %esi,%edx
+ roll $5,%edx
+ leal 1859775393(%edi,%ebp,1),%edi
+ addl %edx,%edi
movl %esi,%ebp
+ movl (%esp),%edx
+ rorl $2,%esi
xorl 8(%esp),%edx
xorl %eax,%ebp
xorl 32(%esp),%edx
xorl %ebx,%ebp
xorl 52(%esp),%edx
roll $1,%edx
- addl %ebp,%ecx
- rorl $2,%esi
- movl %edi,%ebp
- roll $5,%ebp
+ addl %ecx,%ebp
movl %edx,(%esp)
- leal 1859775393(%edx,%ecx,1),%edx
- movl 4(%esp),%ecx
- addl %ebp,%edx
+ movl %edi,%ecx
+ roll $5,%ecx
+ leal 1859775393(%edx,%ebp,1),%edx
+ addl %ecx,%edx
movl %edi,%ebp
+ movl 4(%esp),%ecx
+ rorl $2,%edi
xorl 12(%esp),%ecx
xorl %esi,%ebp
xorl 36(%esp),%ecx
xorl %eax,%ebp
xorl 56(%esp),%ecx
roll $1,%ecx
- addl %ebp,%ebx
- rorl $2,%edi
- movl %edx,%ebp
- roll $5,%ebp
+ addl %ebx,%ebp
movl %ecx,4(%esp)
- leal 1859775393(%ecx,%ebx,1),%ecx
- movl 8(%esp),%ebx
- addl %ebp,%ecx
+ movl %edx,%ebx
+ roll $5,%ebx
+ leal 1859775393(%ecx,%ebp,1),%ecx
+ addl %ebx,%ecx
movl %edx,%ebp
+ movl 8(%esp),%ebx
+ rorl $2,%edx
xorl 16(%esp),%ebx
xorl %edi,%ebp
xorl 40(%esp),%ebx
xorl %esi,%ebp
xorl 60(%esp),%ebx
roll $1,%ebx
- addl %ebp,%eax
- rorl $2,%edx
- movl %ecx,%ebp
- roll $5,%ebp
+ addl %eax,%ebp
movl %ebx,8(%esp)
- leal 1859775393(%ebx,%eax,1),%ebx
- movl 12(%esp),%eax
- addl %ebp,%ebx
+ movl %ecx,%eax
+ roll $5,%eax
+ leal 1859775393(%ebx,%ebp,1),%ebx
+ addl %eax,%ebx
movl %ecx,%ebp
+ movl 12(%esp),%eax
+ rorl $2,%ecx
xorl 20(%esp),%eax
xorl %edx,%ebp
xorl 44(%esp),%eax
xorl %edi,%ebp
xorl (%esp),%eax
roll $1,%eax
- addl %ebp,%esi
- rorl $2,%ecx
- movl %ebx,%ebp
- roll $5,%ebp
+ addl %esi,%ebp
movl %eax,12(%esp)
- leal 1859775393(%eax,%esi,1),%eax
- movl 16(%esp),%esi
- addl %ebp,%eax
+ movl %ebx,%esi
+ roll $5,%esi
+ leal 1859775393(%eax,%ebp,1),%eax
+ addl %esi,%eax
movl %ebx,%ebp
+ movl 16(%esp),%esi
+ rorl $2,%ebx
xorl 24(%esp),%esi
xorl %ecx,%ebp
xorl 48(%esp),%esi
xorl %edx,%ebp
xorl 4(%esp),%esi
roll $1,%esi
- addl %ebp,%edi
- rorl $2,%ebx
- movl %eax,%ebp
- roll $5,%ebp
+ addl %edi,%ebp
movl %esi,16(%esp)
- leal 1859775393(%esi,%edi,1),%esi
- movl 20(%esp),%edi
- addl %ebp,%esi
+ movl %eax,%edi
+ roll $5,%edi
+ leal 1859775393(%esi,%ebp,1),%esi
+ addl %edi,%esi
movl %eax,%ebp
+ movl 20(%esp),%edi
+ rorl $2,%eax
xorl 28(%esp),%edi
xorl %ebx,%ebp
xorl 52(%esp),%edi
xorl %ecx,%ebp
xorl 8(%esp),%edi
roll $1,%edi
- addl %ebp,%edx
- rorl $2,%eax
- movl %esi,%ebp
- roll $5,%ebp
+ addl %edx,%ebp
movl %edi,20(%esp)
- leal 1859775393(%edi,%edx,1),%edi
- movl 24(%esp),%edx
- addl %ebp,%edi
+ movl %esi,%edx
+ roll $5,%edx
+ leal 1859775393(%edi,%ebp,1),%edi
+ addl %edx,%edi
movl %esi,%ebp
+ movl 24(%esp),%edx
+ rorl $2,%esi
xorl 32(%esp),%edx
xorl %eax,%ebp
xorl 56(%esp),%edx
xorl %ebx,%ebp
xorl 12(%esp),%edx
roll $1,%edx
- addl %ebp,%ecx
- rorl $2,%esi
- movl %edi,%ebp
- roll $5,%ebp
+ addl %ecx,%ebp
movl %edx,24(%esp)
- leal 1859775393(%edx,%ecx,1),%edx
- movl 28(%esp),%ecx
- addl %ebp,%edx
+ movl %edi,%ecx
+ roll $5,%ecx
+ leal 1859775393(%edx,%ebp,1),%edx
+ addl %ecx,%edx
movl %edi,%ebp
+ movl 28(%esp),%ecx
+ rorl $2,%edi
xorl 36(%esp),%ecx
xorl %esi,%ebp
xorl 60(%esp),%ecx
xorl %eax,%ebp
xorl 16(%esp),%ecx
roll $1,%ecx
- addl %ebp,%ebx
- rorl $2,%edi
- movl %edx,%ebp
- roll $5,%ebp
+ addl %ebx,%ebp
movl %ecx,28(%esp)
- leal 1859775393(%ecx,%ebx,1),%ecx
- movl 32(%esp),%ebx
- addl %ebp,%ecx
+ movl %edx,%ebx
+ roll $5,%ebx
+ leal 1859775393(%ecx,%ebp,1),%ecx
+ addl %ebx,%ecx
- movl %edi,%ebp
- xorl 40(%esp),%ebx
- xorl %esi,%ebp
- xorl (%esp),%ebx
- andl %edx,%ebp
- xorl 20(%esp),%ebx
+ movl 32(%esp),%ebx
+ movl 40(%esp),%ebp
+ xorl %ebp,%ebx
+ movl (%esp),%ebp
+ xorl %ebp,%ebx
+ movl 20(%esp),%ebp
+ xorl %ebp,%ebx
+ movl %edx,%ebp
roll $1,%ebx
- addl %eax,%ebp
+ orl %edi,%ebp
+ movl %ebx,32(%esp)
+ andl %esi,%ebp
+ leal 2400959708(%ebx,%eax,1),%ebx
+ movl %edx,%eax
rorl $2,%edx
+ andl %edi,%eax
+ orl %eax,%ebp
movl %ecx,%eax
roll $5,%eax
- movl %ebx,32(%esp)
- leal 2400959708(%ebx,%ebp,1),%ebx
- movl %edi,%ebp
- addl %eax,%ebx
- andl %esi,%ebp
- movl 36(%esp),%eax
addl %ebp,%ebx
+ addl %eax,%ebx
- movl %edx,%ebp
- xorl 44(%esp),%eax
- xorl %edi,%ebp
- xorl 4(%esp),%eax
- andl %ecx,%ebp
- xorl 24(%esp),%eax
+ movl 36(%esp),%eax
+ movl 44(%esp),%ebp
+ xorl %ebp,%eax
+ movl 4(%esp),%ebp
+ xorl %ebp,%eax
+ movl 24(%esp),%ebp
+ xorl %ebp,%eax
+ movl %ecx,%ebp
roll $1,%eax
- addl %esi,%ebp
+ orl %edx,%ebp
+ movl %eax,36(%esp)
+ andl %edi,%ebp
+ leal 2400959708(%eax,%esi,1),%eax
+ movl %ecx,%esi
rorl $2,%ecx
+ andl %edx,%esi
+ orl %esi,%ebp
movl %ebx,%esi
roll $5,%esi
- movl %eax,36(%esp)
- leal 2400959708(%eax,%ebp,1),%eax
- movl %edx,%ebp
- addl %esi,%eax
- andl %edi,%ebp
- movl 40(%esp),%esi
addl %ebp,%eax
+ addl %esi,%eax
- movl %ecx,%ebp
- xorl 48(%esp),%esi
- xorl %edx,%ebp
- xorl 8(%esp),%esi
- andl %ebx,%ebp
- xorl 28(%esp),%esi
+ movl 40(%esp),%esi
+ movl 48(%esp),%ebp
+ xorl %ebp,%esi
+ movl 8(%esp),%ebp
+ xorl %ebp,%esi
+ movl 28(%esp),%ebp
+ xorl %ebp,%esi
+ movl %ebx,%ebp
roll $1,%esi
- addl %edi,%ebp
+ orl %ecx,%ebp
+ movl %esi,40(%esp)
+ andl %edx,%ebp
+ leal 2400959708(%esi,%edi,1),%esi
+ movl %ebx,%edi
rorl $2,%ebx
+ andl %ecx,%edi
+ orl %edi,%ebp
movl %eax,%edi
roll $5,%edi
- movl %esi,40(%esp)
- leal 2400959708(%esi,%ebp,1),%esi
- movl %ecx,%ebp
- addl %edi,%esi
- andl %edx,%ebp
- movl 44(%esp),%edi
addl %ebp,%esi
+ addl %edi,%esi
- movl %ebx,%ebp
- xorl 52(%esp),%edi
- xorl %ecx,%ebp
- xorl 12(%esp),%edi
- andl %eax,%ebp
- xorl 32(%esp),%edi
+ movl 44(%esp),%edi
+ movl 52(%esp),%ebp
+ xorl %ebp,%edi
+ movl 12(%esp),%ebp
+ xorl %ebp,%edi
+ movl 32(%esp),%ebp
+ xorl %ebp,%edi
+ movl %eax,%ebp
roll $1,%edi
- addl %edx,%ebp
+ orl %ebx,%ebp
+ movl %edi,44(%esp)
+ andl %ecx,%ebp
+ leal 2400959708(%edi,%edx,1),%edi
+ movl %eax,%edx
rorl $2,%eax
+ andl %ebx,%edx
+ orl %edx,%ebp
movl %esi,%edx
roll $5,%edx
- movl %edi,44(%esp)
- leal 2400959708(%edi,%ebp,1),%edi
- movl %ebx,%ebp
- addl %edx,%edi
- andl %ecx,%ebp
- movl 48(%esp),%edx
addl %ebp,%edi
+ addl %edx,%edi
- movl %eax,%ebp
- xorl 56(%esp),%edx
- xorl %ebx,%ebp
- xorl 16(%esp),%edx
- andl %esi,%ebp
- xorl 36(%esp),%edx
+ movl 48(%esp),%edx
+ movl 56(%esp),%ebp
+ xorl %ebp,%edx
+ movl 16(%esp),%ebp
+ xorl %ebp,%edx
+ movl 36(%esp),%ebp
+ xorl %ebp,%edx
+ movl %esi,%ebp
roll $1,%edx
- addl %ecx,%ebp
+ orl %eax,%ebp
+ movl %edx,48(%esp)
+ andl %ebx,%ebp
+ leal 2400959708(%edx,%ecx,1),%edx
+ movl %esi,%ecx
rorl $2,%esi
+ andl %eax,%ecx
+ orl %ecx,%ebp
movl %edi,%ecx
roll $5,%ecx
- movl %edx,48(%esp)
- leal 2400959708(%edx,%ebp,1),%edx
- movl %eax,%ebp
- addl %ecx,%edx
- andl %ebx,%ebp
- movl 52(%esp),%ecx
addl %ebp,%edx
+ addl %ecx,%edx
- movl %esi,%ebp
- xorl 60(%esp),%ecx
- xorl %eax,%ebp
- xorl 20(%esp),%ecx
- andl %edi,%ebp
- xorl 40(%esp),%ecx
+ movl 52(%esp),%ecx
+ movl 60(%esp),%ebp
+ xorl %ebp,%ecx
+ movl 20(%esp),%ebp
+ xorl %ebp,%ecx
+ movl 40(%esp),%ebp
+ xorl %ebp,%ecx
+ movl %edi,%ebp
roll $1,%ecx
- addl %ebx,%ebp
+ orl %esi,%ebp
+ movl %ecx,52(%esp)
+ andl %eax,%ebp
+ leal 2400959708(%ecx,%ebx,1),%ecx
+ movl %edi,%ebx
rorl $2,%edi
+ andl %esi,%ebx
+ orl %ebx,%ebp
movl %edx,%ebx
roll $5,%ebx
- movl %ecx,52(%esp)
- leal 2400959708(%ecx,%ebp,1),%ecx
- movl %esi,%ebp
- addl %ebx,%ecx
- andl %eax,%ebp
- movl 56(%esp),%ebx
addl %ebp,%ecx
+ addl %ebx,%ecx
- movl %edi,%ebp
- xorl (%esp),%ebx
- xorl %esi,%ebp
- xorl 24(%esp),%ebx
- andl %edx,%ebp
- xorl 44(%esp),%ebx
+ movl 56(%esp),%ebx
+ movl (%esp),%ebp
+ xorl %ebp,%ebx
+ movl 24(%esp),%ebp
+ xorl %ebp,%ebx
+ movl 44(%esp),%ebp
+ xorl %ebp,%ebx
+ movl %edx,%ebp
roll $1,%ebx
- addl %eax,%ebp
+ orl %edi,%ebp
+ movl %ebx,56(%esp)
+ andl %esi,%ebp
+ leal 2400959708(%ebx,%eax,1),%ebx
+ movl %edx,%eax
rorl $2,%edx
+ andl %edi,%eax
+ orl %eax,%ebp
movl %ecx,%eax
roll $5,%eax
- movl %ebx,56(%esp)
- leal 2400959708(%ebx,%ebp,1),%ebx
- movl %edi,%ebp
- addl %eax,%ebx
- andl %esi,%ebp
- movl 60(%esp),%eax
addl %ebp,%ebx
+ addl %eax,%ebx
- movl %edx,%ebp
- xorl 4(%esp),%eax
- xorl %edi,%ebp
- xorl 28(%esp),%eax
- andl %ecx,%ebp
- xorl 48(%esp),%eax
+ movl 60(%esp),%eax
+ movl 4(%esp),%ebp
+ xorl %ebp,%eax
+ movl 28(%esp),%ebp
+ xorl %ebp,%eax
+ movl 48(%esp),%ebp
+ xorl %ebp,%eax
+ movl %ecx,%ebp
roll $1,%eax
- addl %esi,%ebp
+ orl %edx,%ebp
+ movl %eax,60(%esp)
+ andl %edi,%ebp
+ leal 2400959708(%eax,%esi,1),%eax
+ movl %ecx,%esi
rorl $2,%ecx
+ andl %edx,%esi
+ orl %esi,%ebp
movl %ebx,%esi
roll $5,%esi
- movl %eax,60(%esp)
- leal 2400959708(%eax,%ebp,1),%eax
- movl %edx,%ebp
- addl %esi,%eax
- andl %edi,%ebp
- movl (%esp),%esi
addl %ebp,%eax
+ addl %esi,%eax
- movl %ecx,%ebp
- xorl 8(%esp),%esi
- xorl %edx,%ebp
- xorl 32(%esp),%esi
- andl %ebx,%ebp
- xorl 52(%esp),%esi
+ movl (%esp),%esi
+ movl 8(%esp),%ebp
+ xorl %ebp,%esi
+ movl 32(%esp),%ebp
+ xorl %ebp,%esi
+ movl 52(%esp),%ebp
+ xorl %ebp,%esi
+ movl %ebx,%ebp
roll $1,%esi
- addl %edi,%ebp
+ orl %ecx,%ebp
+ movl %esi,(%esp)
+ andl %edx,%ebp
+ leal 2400959708(%esi,%edi,1),%esi
+ movl %ebx,%edi
rorl $2,%ebx
+ andl %ecx,%edi
+ orl %edi,%ebp
movl %eax,%edi
roll $5,%edi
- movl %esi,(%esp)
- leal 2400959708(%esi,%ebp,1),%esi
- movl %ecx,%ebp
- addl %edi,%esi
- andl %edx,%ebp
- movl 4(%esp),%edi
addl %ebp,%esi
+ addl %edi,%esi
- movl %ebx,%ebp
- xorl 12(%esp),%edi
- xorl %ecx,%ebp
- xorl 36(%esp),%edi
- andl %eax,%ebp
- xorl 56(%esp),%edi
+ movl 4(%esp),%edi
+ movl 12(%esp),%ebp
+ xorl %ebp,%edi
+ movl 36(%esp),%ebp
+ xorl %ebp,%edi
+ movl 56(%esp),%ebp
+ xorl %ebp,%edi
+ movl %eax,%ebp
roll $1,%edi
- addl %edx,%ebp
+ orl %ebx,%ebp
+ movl %edi,4(%esp)
+ andl %ecx,%ebp
+ leal 2400959708(%edi,%edx,1),%edi
+ movl %eax,%edx
rorl $2,%eax
+ andl %ebx,%edx
+ orl %edx,%ebp
movl %esi,%edx
roll $5,%edx
- movl %edi,4(%esp)
- leal 2400959708(%edi,%ebp,1),%edi
- movl %ebx,%ebp
- addl %edx,%edi
- andl %ecx,%ebp
- movl 8(%esp),%edx
addl %ebp,%edi
+ addl %edx,%edi
- movl %eax,%ebp
- xorl 16(%esp),%edx
- xorl %ebx,%ebp
- xorl 40(%esp),%edx
- andl %esi,%ebp
- xorl 60(%esp),%edx
+ movl 8(%esp),%edx
+ movl 16(%esp),%ebp
+ xorl %ebp,%edx
+ movl 40(%esp),%ebp
+ xorl %ebp,%edx
+ movl 60(%esp),%ebp
+ xorl %ebp,%edx
+ movl %esi,%ebp
roll $1,%edx
- addl %ecx,%ebp
+ orl %eax,%ebp
+ movl %edx,8(%esp)
+ andl %ebx,%ebp
+ leal 2400959708(%edx,%ecx,1),%edx
+ movl %esi,%ecx
rorl $2,%esi
+ andl %eax,%ecx
+ orl %ecx,%ebp
movl %edi,%ecx
roll $5,%ecx
- movl %edx,8(%esp)
- leal 2400959708(%edx,%ebp,1),%edx
- movl %eax,%ebp
- addl %ecx,%edx
- andl %ebx,%ebp
- movl 12(%esp),%ecx
addl %ebp,%edx
+ addl %ecx,%edx
- movl %esi,%ebp
- xorl 20(%esp),%ecx
- xorl %eax,%ebp
- xorl 44(%esp),%ecx
- andl %edi,%ebp
- xorl (%esp),%ecx
+ movl 12(%esp),%ecx
+ movl 20(%esp),%ebp
+ xorl %ebp,%ecx
+ movl 44(%esp),%ebp
+ xorl %ebp,%ecx
+ movl (%esp),%ebp
+ xorl %ebp,%ecx
+ movl %edi,%ebp
roll $1,%ecx
- addl %ebx,%ebp
+ orl %esi,%ebp
+ movl %ecx,12(%esp)
+ andl %eax,%ebp
+ leal 2400959708(%ecx,%ebx,1),%ecx
+ movl %edi,%ebx
rorl $2,%edi
+ andl %esi,%ebx
+ orl %ebx,%ebp
movl %edx,%ebx
roll $5,%ebx
- movl %ecx,12(%esp)
- leal 2400959708(%ecx,%ebp,1),%ecx
- movl %esi,%ebp
- addl %ebx,%ecx
- andl %eax,%ebp
- movl 16(%esp),%ebx
addl %ebp,%ecx
+ addl %ebx,%ecx
- movl %edi,%ebp
- xorl 24(%esp),%ebx
- xorl %esi,%ebp
- xorl 48(%esp),%ebx
- andl %edx,%ebp
- xorl 4(%esp),%ebx
+ movl 16(%esp),%ebx
+ movl 24(%esp),%ebp
+ xorl %ebp,%ebx
+ movl 48(%esp),%ebp
+ xorl %ebp,%ebx
+ movl 4(%esp),%ebp
+ xorl %ebp,%ebx
+ movl %edx,%ebp
roll $1,%ebx
- addl %eax,%ebp
+ orl %edi,%ebp
+ movl %ebx,16(%esp)
+ andl %esi,%ebp
+ leal 2400959708(%ebx,%eax,1),%ebx
+ movl %edx,%eax
rorl $2,%edx
+ andl %edi,%eax
+ orl %eax,%ebp
movl %ecx,%eax
roll $5,%eax
- movl %ebx,16(%esp)
- leal 2400959708(%ebx,%ebp,1),%ebx
- movl %edi,%ebp
- addl %eax,%ebx
- andl %esi,%ebp
- movl 20(%esp),%eax
addl %ebp,%ebx
+ addl %eax,%ebx
- movl %edx,%ebp
- xorl 28(%esp),%eax
- xorl %edi,%ebp
- xorl 52(%esp),%eax
- andl %ecx,%ebp
- xorl 8(%esp),%eax
+ movl 20(%esp),%eax
+ movl 28(%esp),%ebp
+ xorl %ebp,%eax
+ movl 52(%esp),%ebp
+ xorl %ebp,%eax
+ movl 8(%esp),%ebp
+ xorl %ebp,%eax
+ movl %ecx,%ebp
roll $1,%eax
- addl %esi,%ebp
+ orl %edx,%ebp
+ movl %eax,20(%esp)
+ andl %edi,%ebp
+ leal 2400959708(%eax,%esi,1),%eax
+ movl %ecx,%esi
rorl $2,%ecx
+ andl %edx,%esi
+ orl %esi,%ebp
movl %ebx,%esi
roll $5,%esi
- movl %eax,20(%esp)
- leal 2400959708(%eax,%ebp,1),%eax
- movl %edx,%ebp
- addl %esi,%eax
- andl %edi,%ebp
- movl 24(%esp),%esi
addl %ebp,%eax
+ addl %esi,%eax
- movl %ecx,%ebp
- xorl 32(%esp),%esi
- xorl %edx,%ebp
- xorl 56(%esp),%esi
- andl %ebx,%ebp
- xorl 12(%esp),%esi
+ movl 24(%esp),%esi
+ movl 32(%esp),%ebp
+ xorl %ebp,%esi
+ movl 56(%esp),%ebp
+ xorl %ebp,%esi
+ movl 12(%esp),%ebp
+ xorl %ebp,%esi
+ movl %ebx,%ebp
roll $1,%esi
- addl %edi,%ebp
+ orl %ecx,%ebp
+ movl %esi,24(%esp)
+ andl %edx,%ebp
+ leal 2400959708(%esi,%edi,1),%esi
+ movl %ebx,%edi
rorl $2,%ebx
+ andl %ecx,%edi
+ orl %edi,%ebp
movl %eax,%edi
roll $5,%edi
- movl %esi,24(%esp)
- leal 2400959708(%esi,%ebp,1),%esi
- movl %ecx,%ebp
- addl %edi,%esi
- andl %edx,%ebp
- movl 28(%esp),%edi
addl %ebp,%esi
+ addl %edi,%esi
- movl %ebx,%ebp
- xorl 36(%esp),%edi
- xorl %ecx,%ebp
- xorl 60(%esp),%edi
- andl %eax,%ebp
- xorl 16(%esp),%edi
+ movl 28(%esp),%edi
+ movl 36(%esp),%ebp
+ xorl %ebp,%edi
+ movl 60(%esp),%ebp
+ xorl %ebp,%edi
+ movl 16(%esp),%ebp
+ xorl %ebp,%edi
+ movl %eax,%ebp
roll $1,%edi
- addl %edx,%ebp
+ orl %ebx,%ebp
+ movl %edi,28(%esp)
+ andl %ecx,%ebp
+ leal 2400959708(%edi,%edx,1),%edi
+ movl %eax,%edx
rorl $2,%eax
+ andl %ebx,%edx
+ orl %edx,%ebp
movl %esi,%edx
roll $5,%edx
- movl %edi,28(%esp)
- leal 2400959708(%edi,%ebp,1),%edi
- movl %ebx,%ebp
- addl %edx,%edi
- andl %ecx,%ebp
- movl 32(%esp),%edx
addl %ebp,%edi
+ addl %edx,%edi
- movl %eax,%ebp
- xorl 40(%esp),%edx
- xorl %ebx,%ebp
- xorl (%esp),%edx
- andl %esi,%ebp
- xorl 20(%esp),%edx
+ movl 32(%esp),%edx
+ movl 40(%esp),%ebp
+ xorl %ebp,%edx
+ movl (%esp),%ebp
+ xorl %ebp,%edx
+ movl 20(%esp),%ebp
+ xorl %ebp,%edx
+ movl %esi,%ebp
roll $1,%edx
- addl %ecx,%ebp
+ orl %eax,%ebp
+ movl %edx,32(%esp)
+ andl %ebx,%ebp
+ leal 2400959708(%edx,%ecx,1),%edx
+ movl %esi,%ecx
rorl $2,%esi
+ andl %eax,%ecx
+ orl %ecx,%ebp
movl %edi,%ecx
roll $5,%ecx
- movl %edx,32(%esp)
- leal 2400959708(%edx,%ebp,1),%edx
- movl %eax,%ebp
- addl %ecx,%edx
- andl %ebx,%ebp
- movl 36(%esp),%ecx
addl %ebp,%edx
+ addl %ecx,%edx
- movl %esi,%ebp
- xorl 44(%esp),%ecx
- xorl %eax,%ebp
- xorl 4(%esp),%ecx
- andl %edi,%ebp
- xorl 24(%esp),%ecx
+ movl 36(%esp),%ecx
+ movl 44(%esp),%ebp
+ xorl %ebp,%ecx
+ movl 4(%esp),%ebp
+ xorl %ebp,%ecx
+ movl 24(%esp),%ebp
+ xorl %ebp,%ecx
+ movl %edi,%ebp
roll $1,%ecx
- addl %ebx,%ebp
+ orl %esi,%ebp
+ movl %ecx,36(%esp)
+ andl %eax,%ebp
+ leal 2400959708(%ecx,%ebx,1),%ecx
+ movl %edi,%ebx
rorl $2,%edi
+ andl %esi,%ebx
+ orl %ebx,%ebp
movl %edx,%ebx
roll $5,%ebx
- movl %ecx,36(%esp)
- leal 2400959708(%ecx,%ebp,1),%ecx
- movl %esi,%ebp
- addl %ebx,%ecx
- andl %eax,%ebp
- movl 40(%esp),%ebx
addl %ebp,%ecx
+ addl %ebx,%ecx
- movl %edi,%ebp
- xorl 48(%esp),%ebx
- xorl %esi,%ebp
- xorl 8(%esp),%ebx
- andl %edx,%ebp
- xorl 28(%esp),%ebx
+ movl 40(%esp),%ebx
+ movl 48(%esp),%ebp
+ xorl %ebp,%ebx
+ movl 8(%esp),%ebp
+ xorl %ebp,%ebx
+ movl 28(%esp),%ebp
+ xorl %ebp,%ebx
+ movl %edx,%ebp
roll $1,%ebx
- addl %eax,%ebp
+ orl %edi,%ebp
+ movl %ebx,40(%esp)
+ andl %esi,%ebp
+ leal 2400959708(%ebx,%eax,1),%ebx
+ movl %edx,%eax
rorl $2,%edx
+ andl %edi,%eax
+ orl %eax,%ebp
movl %ecx,%eax
roll $5,%eax
- movl %ebx,40(%esp)
- leal 2400959708(%ebx,%ebp,1),%ebx
- movl %edi,%ebp
- addl %eax,%ebx
- andl %esi,%ebp
- movl 44(%esp),%eax
addl %ebp,%ebx
+ addl %eax,%ebx
- movl %edx,%ebp
- xorl 52(%esp),%eax
- xorl %edi,%ebp
- xorl 12(%esp),%eax
- andl %ecx,%ebp
- xorl 32(%esp),%eax
+ movl 44(%esp),%eax
+ movl 52(%esp),%ebp
+ xorl %ebp,%eax
+ movl 12(%esp),%ebp
+ xorl %ebp,%eax
+ movl 32(%esp),%ebp
+ xorl %ebp,%eax
+ movl %ecx,%ebp
roll $1,%eax
- addl %esi,%ebp
+ orl %edx,%ebp
+ movl %eax,44(%esp)
+ andl %edi,%ebp
+ leal 2400959708(%eax,%esi,1),%eax
+ movl %ecx,%esi
rorl $2,%ecx
+ andl %edx,%esi
+ orl %esi,%ebp
movl %ebx,%esi
roll $5,%esi
- movl %eax,44(%esp)
- leal 2400959708(%eax,%ebp,1),%eax
- movl %edx,%ebp
- addl %esi,%eax
- andl %edi,%ebp
- movl 48(%esp),%esi
addl %ebp,%eax
+ addl %esi,%eax
movl %ebx,%ebp
+ movl 48(%esp),%esi
+ rorl $2,%ebx
xorl 56(%esp),%esi
xorl %ecx,%ebp
xorl 16(%esp),%esi
xorl %edx,%ebp
xorl 36(%esp),%esi
roll $1,%esi
- addl %ebp,%edi
- rorl $2,%ebx
- movl %eax,%ebp
- roll $5,%ebp
+ addl %edi,%ebp
movl %esi,48(%esp)
- leal 3395469782(%esi,%edi,1),%esi
- movl 52(%esp),%edi
- addl %ebp,%esi
+ movl %eax,%edi
+ roll $5,%edi
+ leal 3395469782(%esi,%ebp,1),%esi
+ addl %edi,%esi
movl %eax,%ebp
+ movl 52(%esp),%edi
+ rorl $2,%eax
xorl 60(%esp),%edi
xorl %ebx,%ebp
xorl 20(%esp),%edi
xorl %ecx,%ebp
xorl 40(%esp),%edi
roll $1,%edi
- addl %ebp,%edx
- rorl $2,%eax
- movl %esi,%ebp
- roll $5,%ebp
+ addl %edx,%ebp
movl %edi,52(%esp)
- leal 3395469782(%edi,%edx,1),%edi
- movl 56(%esp),%edx
- addl %ebp,%edi
+ movl %esi,%edx
+ roll $5,%edx
+ leal 3395469782(%edi,%ebp,1),%edi
+ addl %edx,%edi
movl %esi,%ebp
+ movl 56(%esp),%edx
+ rorl $2,%esi
xorl (%esp),%edx
xorl %eax,%ebp
xorl 24(%esp),%edx
xorl %ebx,%ebp
xorl 44(%esp),%edx
roll $1,%edx
- addl %ebp,%ecx
- rorl $2,%esi
- movl %edi,%ebp
- roll $5,%ebp
+ addl %ecx,%ebp
movl %edx,56(%esp)
- leal 3395469782(%edx,%ecx,1),%edx
- movl 60(%esp),%ecx
- addl %ebp,%edx
+ movl %edi,%ecx
+ roll $5,%ecx
+ leal 3395469782(%edx,%ebp,1),%edx
+ addl %ecx,%edx
movl %edi,%ebp
+ movl 60(%esp),%ecx
+ rorl $2,%edi
xorl 4(%esp),%ecx
xorl %esi,%ebp
xorl 28(%esp),%ecx
xorl %eax,%ebp
xorl 48(%esp),%ecx
roll $1,%ecx
- addl %ebp,%ebx
- rorl $2,%edi
- movl %edx,%ebp
- roll $5,%ebp
+ addl %ebx,%ebp
movl %ecx,60(%esp)
- leal 3395469782(%ecx,%ebx,1),%ecx
- movl (%esp),%ebx
- addl %ebp,%ecx
+ movl %edx,%ebx
+ roll $5,%ebx
+ leal 3395469782(%ecx,%ebp,1),%ecx
+ addl %ebx,%ecx
movl %edx,%ebp
+ movl (%esp),%ebx
+ rorl $2,%edx
xorl 8(%esp),%ebx
xorl %edi,%ebp
xorl 32(%esp),%ebx
xorl %esi,%ebp
xorl 52(%esp),%ebx
roll $1,%ebx
- addl %ebp,%eax
- rorl $2,%edx
- movl %ecx,%ebp
- roll $5,%ebp
+ addl %eax,%ebp
movl %ebx,(%esp)
- leal 3395469782(%ebx,%eax,1),%ebx
- movl 4(%esp),%eax
- addl %ebp,%ebx
+ movl %ecx,%eax
+ roll $5,%eax
+ leal 3395469782(%ebx,%ebp,1),%ebx
+ addl %eax,%ebx
movl %ecx,%ebp
+ movl 4(%esp),%eax
+ rorl $2,%ecx
xorl 12(%esp),%eax
xorl %edx,%ebp
xorl 36(%esp),%eax
xorl %edi,%ebp
xorl 56(%esp),%eax
roll $1,%eax
- addl %ebp,%esi
- rorl $2,%ecx
- movl %ebx,%ebp
- roll $5,%ebp
+ addl %esi,%ebp
movl %eax,4(%esp)
- leal 3395469782(%eax,%esi,1),%eax
- movl 8(%esp),%esi
- addl %ebp,%eax
+ movl %ebx,%esi
+ roll $5,%esi
+ leal 3395469782(%eax,%ebp,1),%eax
+ addl %esi,%eax
movl %ebx,%ebp
+ movl 8(%esp),%esi
+ rorl $2,%ebx
xorl 16(%esp),%esi
xorl %ecx,%ebp
xorl 40(%esp),%esi
xorl %edx,%ebp
xorl 60(%esp),%esi
roll $1,%esi
- addl %ebp,%edi
- rorl $2,%ebx
- movl %eax,%ebp
- roll $5,%ebp
+ addl %edi,%ebp
movl %esi,8(%esp)
- leal 3395469782(%esi,%edi,1),%esi
- movl 12(%esp),%edi
- addl %ebp,%esi
+ movl %eax,%edi
+ roll $5,%edi
+ leal 3395469782(%esi,%ebp,1),%esi
+ addl %edi,%esi
movl %eax,%ebp
+ movl 12(%esp),%edi
+ rorl $2,%eax
xorl 20(%esp),%edi
xorl %ebx,%ebp
xorl 44(%esp),%edi
xorl %ecx,%ebp
xorl (%esp),%edi
roll $1,%edi
- addl %ebp,%edx
- rorl $2,%eax
- movl %esi,%ebp
- roll $5,%ebp
+ addl %edx,%ebp
movl %edi,12(%esp)
- leal 3395469782(%edi,%edx,1),%edi
- movl 16(%esp),%edx
- addl %ebp,%edi
+ movl %esi,%edx
+ roll $5,%edx
+ leal 3395469782(%edi,%ebp,1),%edi
+ addl %edx,%edi
movl %esi,%ebp
+ movl 16(%esp),%edx
+ rorl $2,%esi
xorl 24(%esp),%edx
xorl %eax,%ebp
xorl 48(%esp),%edx
xorl %ebx,%ebp
xorl 4(%esp),%edx
roll $1,%edx
- addl %ebp,%ecx
- rorl $2,%esi
- movl %edi,%ebp
- roll $5,%ebp
+ addl %ecx,%ebp
movl %edx,16(%esp)
- leal 3395469782(%edx,%ecx,1),%edx
- movl 20(%esp),%ecx
- addl %ebp,%edx
+ movl %edi,%ecx
+ roll $5,%ecx
+ leal 3395469782(%edx,%ebp,1),%edx
+ addl %ecx,%edx
movl %edi,%ebp
+ movl 20(%esp),%ecx
+ rorl $2,%edi
xorl 28(%esp),%ecx
xorl %esi,%ebp
xorl 52(%esp),%ecx
xorl %eax,%ebp
xorl 8(%esp),%ecx
roll $1,%ecx
- addl %ebp,%ebx
- rorl $2,%edi
- movl %edx,%ebp
- roll $5,%ebp
+ addl %ebx,%ebp
movl %ecx,20(%esp)
- leal 3395469782(%ecx,%ebx,1),%ecx
- movl 24(%esp),%ebx
- addl %ebp,%ecx
+ movl %edx,%ebx
+ roll $5,%ebx
+ leal 3395469782(%ecx,%ebp,1),%ecx
+ addl %ebx,%ecx
movl %edx,%ebp
+ movl 24(%esp),%ebx
+ rorl $2,%edx
xorl 32(%esp),%ebx
xorl %edi,%ebp
xorl 56(%esp),%ebx
xorl %esi,%ebp
xorl 12(%esp),%ebx
roll $1,%ebx
- addl %ebp,%eax
- rorl $2,%edx
- movl %ecx,%ebp
- roll $5,%ebp
+ addl %eax,%ebp
movl %ebx,24(%esp)
- leal 3395469782(%ebx,%eax,1),%ebx
- movl 28(%esp),%eax
- addl %ebp,%ebx
+ movl %ecx,%eax
+ roll $5,%eax
+ leal 3395469782(%ebx,%ebp,1),%ebx
+ addl %eax,%ebx
movl %ecx,%ebp
+ movl 28(%esp),%eax
+ rorl $2,%ecx
xorl 36(%esp),%eax
xorl %edx,%ebp
xorl 60(%esp),%eax
xorl %edi,%ebp
xorl 16(%esp),%eax
roll $1,%eax
- addl %ebp,%esi
- rorl $2,%ecx
- movl %ebx,%ebp
- roll $5,%ebp
+ addl %esi,%ebp
movl %eax,28(%esp)
- leal 3395469782(%eax,%esi,1),%eax
- movl 32(%esp),%esi
- addl %ebp,%eax
+ movl %ebx,%esi
+ roll $5,%esi
+ leal 3395469782(%eax,%ebp,1),%eax
+ addl %esi,%eax
movl %ebx,%ebp
+ movl 32(%esp),%esi
+ rorl $2,%ebx
xorl 40(%esp),%esi
xorl %ecx,%ebp
xorl (%esp),%esi
xorl %edx,%ebp
xorl 20(%esp),%esi
roll $1,%esi
- addl %ebp,%edi
- rorl $2,%ebx
- movl %eax,%ebp
- roll $5,%ebp
+ addl %edi,%ebp
movl %esi,32(%esp)
- leal 3395469782(%esi,%edi,1),%esi
- movl 36(%esp),%edi
- addl %ebp,%esi
+ movl %eax,%edi
+ roll $5,%edi
+ leal 3395469782(%esi,%ebp,1),%esi
+ addl %edi,%esi
movl %eax,%ebp
+ movl 36(%esp),%edi
+ rorl $2,%eax
xorl 44(%esp),%edi
xorl %ebx,%ebp
xorl 4(%esp),%edi
xorl %ecx,%ebp
xorl 24(%esp),%edi
roll $1,%edi
- addl %ebp,%edx
- rorl $2,%eax
- movl %esi,%ebp
- roll $5,%ebp
+ addl %edx,%ebp
movl %edi,36(%esp)
- leal 3395469782(%edi,%edx,1),%edi
- movl 40(%esp),%edx
- addl %ebp,%edi
+ movl %esi,%edx
+ roll $5,%edx
+ leal 3395469782(%edi,%ebp,1),%edi
+ addl %edx,%edi
movl %esi,%ebp
+ movl 40(%esp),%edx
+ rorl $2,%esi
xorl 48(%esp),%edx
xorl %eax,%ebp
xorl 8(%esp),%edx
xorl %ebx,%ebp
xorl 28(%esp),%edx
roll $1,%edx
- addl %ebp,%ecx
- rorl $2,%esi
- movl %edi,%ebp
- roll $5,%ebp
+ addl %ecx,%ebp
movl %edx,40(%esp)
- leal 3395469782(%edx,%ecx,1),%edx
- movl 44(%esp),%ecx
- addl %ebp,%edx
+ movl %edi,%ecx
+ roll $5,%ecx
+ leal 3395469782(%edx,%ebp,1),%edx
+ addl %ecx,%edx
movl %edi,%ebp
+ movl 44(%esp),%ecx
+ rorl $2,%edi
xorl 52(%esp),%ecx
xorl %esi,%ebp
xorl 12(%esp),%ecx
xorl %eax,%ebp
xorl 32(%esp),%ecx
roll $1,%ecx
- addl %ebp,%ebx
- rorl $2,%edi
- movl %edx,%ebp
- roll $5,%ebp
+ addl %ebx,%ebp
movl %ecx,44(%esp)
- leal 3395469782(%ecx,%ebx,1),%ecx
- movl 48(%esp),%ebx
- addl %ebp,%ecx
+ movl %edx,%ebx
+ roll $5,%ebx
+ leal 3395469782(%ecx,%ebp,1),%ecx
+ addl %ebx,%ecx
movl %edx,%ebp
+ movl 48(%esp),%ebx
+ rorl $2,%edx
xorl 56(%esp),%ebx
xorl %edi,%ebp
xorl 16(%esp),%ebx
xorl %esi,%ebp
xorl 36(%esp),%ebx
roll $1,%ebx
- addl %ebp,%eax
- rorl $2,%edx
- movl %ecx,%ebp
- roll $5,%ebp
+ addl %eax,%ebp
movl %ebx,48(%esp)
- leal 3395469782(%ebx,%eax,1),%ebx
- movl 52(%esp),%eax
- addl %ebp,%ebx
+ movl %ecx,%eax
+ roll $5,%eax
+ leal 3395469782(%ebx,%ebp,1),%ebx
+ addl %eax,%ebx
movl %ecx,%ebp
+ movl 52(%esp),%eax
+ rorl $2,%ecx
xorl 60(%esp),%eax
xorl %edx,%ebp
xorl 20(%esp),%eax
xorl %edi,%ebp
xorl 40(%esp),%eax
roll $1,%eax
- addl %ebp,%esi
- rorl $2,%ecx
- movl %ebx,%ebp
- roll $5,%ebp
- leal 3395469782(%eax,%esi,1),%eax
- movl 56(%esp),%esi
- addl %ebp,%eax
+ addl %esi,%ebp
+ movl %eax,52(%esp)
+ movl %ebx,%esi
+ roll $5,%esi
+ leal 3395469782(%eax,%ebp,1),%eax
+ addl %esi,%eax
movl %ebx,%ebp
+ movl 56(%esp),%esi
+ rorl $2,%ebx
xorl (%esp),%esi
xorl %ecx,%ebp
xorl 24(%esp),%esi
xorl %edx,%ebp
xorl 44(%esp),%esi
roll $1,%esi
- addl %ebp,%edi
- rorl $2,%ebx
- movl %eax,%ebp
- roll $5,%ebp
- leal 3395469782(%esi,%edi,1),%esi
- movl 60(%esp),%edi
- addl %ebp,%esi
+ addl %edi,%ebp
+ movl %esi,56(%esp)
+ movl %eax,%edi
+ roll $5,%edi
+ leal 3395469782(%esi,%ebp,1),%esi
+ addl %edi,%esi
movl %eax,%ebp
+ movl 60(%esp),%edi
+ rorl $2,%eax
xorl 4(%esp),%edi
xorl %ebx,%ebp
xorl 28(%esp),%edi
xorl %ecx,%ebp
xorl 48(%esp),%edi
roll $1,%edi
- addl %ebp,%edx
- rorl $2,%eax
- movl %esi,%ebp
- roll $5,%ebp
- leal 3395469782(%edi,%edx,1),%edi
- addl %ebp,%edi
- movl 96(%esp),%ebp
- movl 100(%esp),%edx
+ addl %edx,%ebp
+ movl %edi,60(%esp)
+ movl %esi,%edx
+ roll $5,%edx
+ leal 3395469782(%edi,%ebp,1),%edi
+ addl %edx,%edi
+ movl 84(%esp),%ebp
+ movl 88(%esp),%edx
addl (%ebp),%edi
addl 4(%ebp),%esi
addl 8(%ebp),%eax
@@ -1360,14 +1422,14 @@ sha1_block_data_order:
movl %edi,(%ebp)
addl $64,%edx
movl %esi,4(%ebp)
- cmpl 104(%esp),%edx
+ cmpl 92(%esp),%edx
movl %eax,8(%ebp)
movl %ecx,%edi
movl %ebx,12(%ebp)
movl %edx,%esi
movl %ecx,16(%ebp)
jb .L000loop
- addl $76,%esp
+ addl $64,%esp
popl %edi
popl %esi
popl %ebx
diff --git a/deps/openssl/asm/x86-elf-gas/sha/sha256-586.s b/deps/openssl/asm/x86-elf-gas/sha/sha256-586.s
index 77a89514f1..973e50d198 100644
--- a/deps/openssl/asm/x86-elf-gas/sha/sha256-586.s
+++ b/deps/openssl/asm/x86-elf-gas/sha/sha256-586.s
@@ -96,30 +96,31 @@ sha256_block_data_order:
.L00300_15:
movl 92(%esp),%ebx
movl %edx,%ecx
- rorl $14,%ecx
- movl 20(%esp),%esi
- xorl %edx,%ecx
- rorl $5,%ecx
- xorl %edx,%ecx
rorl $6,%ecx
+ movl %edx,%edi
+ rorl $11,%edi
+ movl 20(%esp),%esi
+ xorl %edi,%ecx
+ rorl $14,%edi
+ xorl %edi,%ecx
movl 24(%esp),%edi
addl %ecx,%ebx
- xorl %edi,%esi
movl %edx,16(%esp)
+ xorl %edi,%esi
movl %eax,%ecx
andl %edx,%esi
movl 12(%esp),%edx
xorl %edi,%esi
movl %eax,%edi
addl %esi,%ebx
- rorl $9,%ecx
+ rorl $2,%ecx
addl 28(%esp),%ebx
- xorl %eax,%ecx
- rorl $11,%ecx
+ rorl $13,%edi
movl 4(%esp),%esi
- xorl %eax,%ecx
- rorl $2,%ecx
+ xorl %edi,%ecx
+ rorl $9,%edi
addl %ebx,%edx
+ xorl %edi,%ecx
movl 8(%esp),%edi
addl %ecx,%ebx
movl %eax,(%esp)
@@ -141,46 +142,48 @@ sha256_block_data_order:
.L00416_63:
movl %ebx,%esi
movl 100(%esp),%ecx
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %esi,%ebx
rorl $11,%esi
movl %ecx,%edi
- xorl %ebx,%esi
- rorl $7,%esi
- shrl $3,%ebx
- rorl $2,%edi
xorl %esi,%ebx
- xorl %ecx,%edi
- rorl $17,%edi
shrl $10,%ecx
- addl 156(%esp),%ebx
+ movl 156(%esp),%esi
+ rorl $17,%edi
+ xorl %edi,%ecx
+ rorl $2,%edi
+ addl %esi,%ebx
xorl %ecx,%edi
- addl 120(%esp),%ebx
- movl %edx,%ecx
addl %edi,%ebx
- rorl $14,%ecx
+ movl %edx,%ecx
+ addl 120(%esp),%ebx
+ rorl $6,%ecx
+ movl %edx,%edi
+ rorl $11,%edi
movl 20(%esp),%esi
- xorl %edx,%ecx
- rorl $5,%ecx
+ xorl %edi,%ecx
+ rorl $14,%edi
movl %ebx,92(%esp)
- xorl %edx,%ecx
- rorl $6,%ecx
+ xorl %edi,%ecx
movl 24(%esp),%edi
addl %ecx,%ebx
- xorl %edi,%esi
movl %edx,16(%esp)
+ xorl %edi,%esi
movl %eax,%ecx
andl %edx,%esi
movl 12(%esp),%edx
xorl %edi,%esi
movl %eax,%edi
addl %esi,%ebx
- rorl $9,%ecx
+ rorl $2,%ecx
addl 28(%esp),%ebx
- xorl %eax,%ecx
- rorl $11,%ecx
+ rorl $13,%edi
movl 4(%esp),%esi
- xorl %eax,%ecx
- rorl $2,%ecx
+ xorl %edi,%ecx
+ rorl $9,%edi
addl %ebx,%edx
+ xorl %edi,%ecx
movl 8(%esp),%edi
addl %ecx,%ebx
movl %eax,(%esp)
diff --git a/deps/openssl/asm/x86-elf-gas/x86cpuid.s b/deps/openssl/asm/x86-elf-gas/x86cpuid.s
index f9cd038059..56a92bfcbe 100644
--- a/deps/openssl/asm/x86-elf-gas/x86cpuid.s
+++ b/deps/openssl/asm/x86-elf-gas/x86cpuid.s
@@ -19,9 +19,9 @@ OPENSSL_ia32_cpuid:
pushfl
popl %eax
xorl %eax,%ecx
- xorl %eax,%eax
btl $21,%ecx
- jnc .L000nocpuid
+ jnc .L000done
+ xorl %eax,%eax
.byte 0x0f,0xa2
movl %eax,%edi
xorl %eax,%eax
@@ -47,14 +47,7 @@ OPENSSL_ia32_cpuid:
jnz .L001intel
movl $2147483648,%eax
.byte 0x0f,0xa2
- cmpl $2147483649,%eax
- jb .L001intel
- movl %eax,%esi
- movl $2147483649,%eax
- .byte 0x0f,0xa2
- orl %ecx,%ebp
- andl $2049,%ebp
- cmpl $2147483656,%esi
+ cmpl $2147483656,%eax
jb .L001intel
movl $2147483656,%eax
.byte 0x0f,0xa2
@@ -63,68 +56,46 @@ OPENSSL_ia32_cpuid:
movl $1,%eax
.byte 0x0f,0xa2
btl $28,%edx
- jnc .L002generic
+ jnc .L000done
shrl $16,%ebx
andl $255,%ebx
cmpl %esi,%ebx
- ja .L002generic
+ ja .L000done
andl $4026531839,%edx
- jmp .L002generic
+ jmp .L000done
.L001intel:
cmpl $4,%edi
movl $-1,%edi
- jb .L003nocacheinfo
+ jb .L002nocacheinfo
movl $4,%eax
movl $0,%ecx
.byte 0x0f,0xa2
movl %eax,%edi
shrl $14,%edi
andl $4095,%edi
-.L003nocacheinfo:
+.L002nocacheinfo:
movl $1,%eax
.byte 0x0f,0xa2
- andl $3220176895,%edx
cmpl $0,%ebp
- jne .L004notintel
- orl $1073741824,%edx
+ jne .L003notP4
andb $15,%ah
cmpb $15,%ah
- jne .L004notintel
+ jne .L003notP4
orl $1048576,%edx
-.L004notintel:
+.L003notP4:
btl $28,%edx
- jnc .L002generic
+ jnc .L000done
andl $4026531839,%edx
cmpl $0,%edi
- je .L002generic
+ je .L000done
orl $268435456,%edx
shrl $16,%ebx
cmpb $1,%bl
- ja .L002generic
+ ja .L000done
andl $4026531839,%edx
-.L002generic:
- andl $2048,%ebp
- andl $4294965247,%ecx
- movl %edx,%esi
- orl %ecx,%ebp
- btl $27,%ecx
- jnc .L005clear_avx
- xorl %ecx,%ecx
-.byte 15,1,208
- andl $6,%eax
- cmpl $6,%eax
- je .L006done
- cmpl $2,%eax
- je .L005clear_avx
-.L007clear_xmm:
- andl $4261412861,%ebp
- andl $4278190079,%esi
-.L005clear_avx:
- andl $4026525695,%ebp
-.L006done:
- movl %esi,%eax
- movl %ebp,%edx
-.L000nocpuid:
+.L000done:
+ movl %edx,%eax
+ movl %ecx,%edx
popl %edi
popl %esi
popl %ebx
@@ -140,9 +111,9 @@ OPENSSL_rdtsc:
xorl %edx,%edx
leal OPENSSL_ia32cap_P,%ecx
btl $4,(%ecx)
- jnc .L008notsc
+ jnc .L004notsc
.byte 0x0f,0x31
-.L008notsc:
+.L004notsc:
ret
.size OPENSSL_rdtsc,.-.L_OPENSSL_rdtsc_begin
.globl OPENSSL_instrument_halt
@@ -152,14 +123,14 @@ OPENSSL_instrument_halt:
.L_OPENSSL_instrument_halt_begin:
leal OPENSSL_ia32cap_P,%ecx
btl $4,(%ecx)
- jnc .L009nohalt
+ jnc .L005nohalt
.long 2421723150
andl $3,%eax
- jnz .L009nohalt
+ jnz .L005nohalt
pushfl
popl %eax
btl $9,%eax
- jnc .L009nohalt
+ jnc .L005nohalt
.byte 0x0f,0x31
pushl %edx
pushl %eax
@@ -169,7 +140,7 @@ OPENSSL_instrument_halt:
sbbl 4(%esp),%edx
addl $8,%esp
ret
-.L009nohalt:
+.L005nohalt:
xorl %eax,%eax
xorl %edx,%edx
ret
@@ -182,21 +153,21 @@ OPENSSL_far_spin:
pushfl
popl %eax
btl $9,%eax
- jnc .L010nospin
+ jnc .L006nospin
movl 4(%esp),%eax
movl 8(%esp),%ecx
.long 2430111262
xorl %eax,%eax
movl (%ecx),%edx
- jmp .L011spin
+ jmp .L007spin
.align 16
-.L011spin:
+.L007spin:
incl %eax
cmpl (%ecx),%edx
- je .L011spin
+ je .L007spin
.long 529567888
ret
-.L010nospin:
+.L006nospin:
xorl %eax,%eax
xorl %edx,%edx
ret
@@ -211,9 +182,9 @@ OPENSSL_wipe_cpu:
leal OPENSSL_ia32cap_P,%ecx
movl (%ecx),%ecx
btl $1,(%ecx)
- jnc .L012no_x87
+ jnc .L008no_x87
.long 4007259865,4007259865,4007259865,4007259865,2430851995
-.L012no_x87:
+.L008no_x87:
leal 4(%esp),%eax
ret
.size OPENSSL_wipe_cpu,.-.L_OPENSSL_wipe_cpu_begin
@@ -227,11 +198,11 @@ OPENSSL_atomic_add:
pushl %ebx
nop
movl (%edx),%eax
-.L013spin:
+.L009spin:
leal (%eax,%ecx,1),%ebx
nop
.long 447811568
- jne .L013spin
+ jne .L009spin
movl %ebx,%eax
popl %ebx
ret
@@ -272,49 +243,37 @@ OPENSSL_cleanse:
movl 8(%esp),%ecx
xorl %eax,%eax
cmpl $7,%ecx
- jae .L014lot
+ jae .L010lot
cmpl $0,%ecx
- je .L015ret
-.L016little:
+ je .L011ret
+.L012little:
movb %al,(%edx)
subl $1,%ecx
leal 1(%edx),%edx
- jnz .L016little
-.L015ret:
+ jnz .L012little
+.L011ret:
ret
.align 16
-.L014lot:
+.L010lot:
testl $3,%edx
- jz .L017aligned
+ jz .L013aligned
movb %al,(%edx)
leal -1(%ecx),%ecx
leal 1(%edx),%edx
- jmp .L014lot
-.L017aligned:
+ jmp .L010lot
+.L013aligned:
movl %eax,(%edx)
leal -4(%ecx),%ecx
testl $-4,%ecx
leal 4(%edx),%edx
- jnz .L017aligned
+ jnz .L013aligned
cmpl $0,%ecx
- jne .L016little
+ jne .L012little
ret
.size OPENSSL_cleanse,.-.L_OPENSSL_cleanse_begin
-.globl OPENSSL_ia32_rdrand
-.type OPENSSL_ia32_rdrand,@function
-.align 16
-OPENSSL_ia32_rdrand:
-.L_OPENSSL_ia32_rdrand_begin:
- movl $8,%ecx
-.L018loop:
-.byte 15,199,240
- jc .L019break
- loop .L018loop
-.L019break:
- cmpl $0,%eax
- cmovel %ecx,%eax
- ret
-.size OPENSSL_ia32_rdrand,.-.L_OPENSSL_ia32_rdrand_begin
-.comm OPENSSL_ia32cap_P,8,4
+.comm OPENSSL_ia32cap_P,4,4
.section .init
call OPENSSL_cpuid_setup
+ jmp .Linitalign
+.align 16
+.Linitalign:
diff --git a/deps/openssl/asm/x86-macosx-gas/aes/aes-586.s b/deps/openssl/asm/x86-macosx-gas/aes/aes-586.s
index a58ea6f76d..ff56a4bef7 100644
--- a/deps/openssl/asm/x86-macosx-gas/aes/aes-586.s
+++ b/deps/openssl/asm/x86-macosx-gas/aes/aes-586.s
@@ -975,7 +975,7 @@ L_AES_encrypt_begin:
call L004pic_point
L004pic_point:
popl %ebp
- movl L_OPENSSL_ia32cap_P$non_lazy_ptr-L004pic_point(%ebp),%eax
+ leal _OPENSSL_ia32cap_P,%eax
leal LAES_Te-L004pic_point(%ebp),%ebp
leal 764(%esp),%ebx
subl %ebp,%ebx
@@ -2153,7 +2153,7 @@ L_AES_decrypt_begin:
call L010pic_point
L010pic_point:
popl %ebp
- movl L_OPENSSL_ia32cap_P$non_lazy_ptr-L010pic_point(%ebp),%eax
+ leal _OPENSSL_ia32cap_P,%eax
leal LAES_Td-L010pic_point(%ebp),%ebp
leal 764(%esp),%ebx
subl %ebp,%ebx
@@ -2207,7 +2207,7 @@ L_AES_cbc_encrypt_begin:
call L013pic_point
L013pic_point:
popl %ebp
- movl L_OPENSSL_ia32cap_P$non_lazy_ptr-L013pic_point(%ebp),%eax
+ leal _OPENSSL_ia32cap_P,%eax
cmpl $0,40(%esp)
leal LAES_Te-L013pic_point(%ebp),%ebp
jne L014picked_te
@@ -2950,16 +2950,16 @@ L045exit:
popl %ebx
popl %ebp
ret
-.globl _private_AES_set_encrypt_key
+.globl _AES_set_encrypt_key
.align 4
-_private_AES_set_encrypt_key:
-L_private_AES_set_encrypt_key_begin:
+_AES_set_encrypt_key:
+L_AES_set_encrypt_key_begin:
call __x86_AES_set_encrypt_key
ret
-.globl _private_AES_set_decrypt_key
+.globl _AES_set_decrypt_key
.align 4
-_private_AES_set_decrypt_key:
-L_private_AES_set_decrypt_key_begin:
+_AES_set_decrypt_key:
+L_AES_set_decrypt_key_begin:
call __x86_AES_set_encrypt_key
cmpl $0,%eax
je L054proceed
@@ -3191,8 +3191,4 @@ L056permute:
.byte 65,69,83,32,102,111,114,32,120,56,54,44,32,67,82,89
.byte 80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114
.byte 111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
-.section __IMPORT,__pointers,non_lazy_symbol_pointers
-L_OPENSSL_ia32cap_P$non_lazy_ptr:
-.indirect_symbol _OPENSSL_ia32cap_P
-.long 0
-.comm _OPENSSL_ia32cap_P,8,2
+.comm _OPENSSL_ia32cap_P,4
diff --git a/deps/openssl/asm/x86-macosx-gas/camellia/cmll-x86.s b/deps/openssl/asm/x86-macosx-gas/camellia/cmll-x86.s
index 2367cee780..4d61caa680 100644
--- a/deps/openssl/asm/x86-macosx-gas/camellia/cmll-x86.s
+++ b/deps/openssl/asm/x86-macosx-gas/camellia/cmll-x86.s
@@ -1519,10 +1519,10 @@ L013done:
popl %ebx
popl %ebp
ret
-.globl _private_Camellia_set_key
+.globl _Camellia_set_key
.align 4
-_private_Camellia_set_key:
-L_private_Camellia_set_key_begin:
+_Camellia_set_key:
+L_Camellia_set_key_begin:
pushl %ebx
movl 8(%esp),%ecx
movl 12(%esp),%ebx
diff --git a/deps/openssl/asm/x86-macosx-gas/des/crypt586.s b/deps/openssl/asm/x86-macosx-gas/des/crypt586.s
index 7d0074ec2a..edb1bb3915 100644
--- a/deps/openssl/asm/x86-macosx-gas/des/crypt586.s
+++ b/deps/openssl/asm/x86-macosx-gas/des/crypt586.s
@@ -13,14 +13,11 @@ L_fcrypt_body_begin:
xorl %edi,%edi
xorl %esi,%esi
- call L000PIC_me_up
-L000PIC_me_up:
- popl %edx
- movl L_DES_SPtrans$non_lazy_ptr-L000PIC_me_up(%edx),%edx
+ leal _DES_SPtrans,%edx
pushl %edx
movl 28(%esp),%ebp
pushl $25
-L001start:
+L000start:
# Round 0
@@ -843,7 +840,7 @@ L001start:
movl %esi,%edi
movl %eax,%esi
movl %ebx,(%esp)
- jnz L001start
+ jnz L000start
# FP
@@ -892,7 +889,3 @@ L001start:
popl %ebx
popl %ebp
ret
-.section __IMPORT,__pointers,non_lazy_symbol_pointers
-L_DES_SPtrans$non_lazy_ptr:
-.indirect_symbol _DES_SPtrans
-.long 0
diff --git a/deps/openssl/asm/x86-macosx-gas/rc4/rc4-586.s b/deps/openssl/asm/x86-macosx-gas/rc4/rc4-586.s
index 882a02d74c..a821dc9503 100644
--- a/deps/openssl/asm/x86-macosx-gas/rc4/rc4-586.s
+++ b/deps/openssl/asm/x86-macosx-gas/rc4/rc4-586.s
@@ -28,149 +28,11 @@ L_RC4_begin:
movl (%edi,%eax,4),%ecx
andl $-4,%edx
jz L002loop1
- testl $-8,%edx
- movl %ebp,32(%esp)
- jz L003go4loop4
- call L004PIC_me_up
-L004PIC_me_up:
- popl %ebp
- movl L_OPENSSL_ia32cap_P$non_lazy_ptr-L004PIC_me_up(%ebp),%ebp
- btl $26,(%ebp)
- jnc L003go4loop4
- movl 32(%esp),%ebp
- andl $-8,%edx
- leal -8(%esi,%edx,1),%edx
- movl %edx,-4(%edi)
- addb %cl,%bl
- movl (%edi,%ebx,4),%edx
- movl %ecx,(%edi,%ebx,4)
- movl %edx,(%edi,%eax,4)
- incl %eax
- addl %ecx,%edx
- movzbl %al,%eax
- movzbl %dl,%edx
- movq (%esi),%mm0
- movl (%edi,%eax,4),%ecx
- movd (%edi,%edx,4),%mm2
- jmp L005loop_mmx_enter
-.align 4,0x90
-L006loop_mmx:
- addb %cl,%bl
- psllq $56,%mm1
- movl (%edi,%ebx,4),%edx
- movl %ecx,(%edi,%ebx,4)
- movl %edx,(%edi,%eax,4)
- incl %eax
- addl %ecx,%edx
- movzbl %al,%eax
- movzbl %dl,%edx
- pxor %mm1,%mm2
- movq (%esi),%mm0
- movq %mm2,-8(%ebp,%esi,1)
- movl (%edi,%eax,4),%ecx
- movd (%edi,%edx,4),%mm2
-L005loop_mmx_enter:
- addb %cl,%bl
- movl (%edi,%ebx,4),%edx
- movl %ecx,(%edi,%ebx,4)
- movl %edx,(%edi,%eax,4)
- incl %eax
- addl %ecx,%edx
- movzbl %al,%eax
- movzbl %dl,%edx
- pxor %mm0,%mm2
- movl (%edi,%eax,4),%ecx
- movd (%edi,%edx,4),%mm1
- addb %cl,%bl
- psllq $8,%mm1
- movl (%edi,%ebx,4),%edx
- movl %ecx,(%edi,%ebx,4)
- movl %edx,(%edi,%eax,4)
- incl %eax
- addl %ecx,%edx
- movzbl %al,%eax
- movzbl %dl,%edx
- pxor %mm1,%mm2
- movl (%edi,%eax,4),%ecx
- movd (%edi,%edx,4),%mm1
- addb %cl,%bl
- psllq $16,%mm1
- movl (%edi,%ebx,4),%edx
- movl %ecx,(%edi,%ebx,4)
- movl %edx,(%edi,%eax,4)
- incl %eax
- addl %ecx,%edx
- movzbl %al,%eax
- movzbl %dl,%edx
- pxor %mm1,%mm2
- movl (%edi,%eax,4),%ecx
- movd (%edi,%edx,4),%mm1
- addb %cl,%bl
- psllq $24,%mm1
- movl (%edi,%ebx,4),%edx
- movl %ecx,(%edi,%ebx,4)
- movl %edx,(%edi,%eax,4)
- incl %eax
- addl %ecx,%edx
- movzbl %al,%eax
- movzbl %dl,%edx
- pxor %mm1,%mm2
- movl (%edi,%eax,4),%ecx
- movd (%edi,%edx,4),%mm1
- addb %cl,%bl
- psllq $32,%mm1
- movl (%edi,%ebx,4),%edx
- movl %ecx,(%edi,%ebx,4)
- movl %edx,(%edi,%eax,4)
- incl %eax
- addl %ecx,%edx
- movzbl %al,%eax
- movzbl %dl,%edx
- pxor %mm1,%mm2
- movl (%edi,%eax,4),%ecx
- movd (%edi,%edx,4),%mm1
- addb %cl,%bl
- psllq $40,%mm1
- movl (%edi,%ebx,4),%edx
- movl %ecx,(%edi,%ebx,4)
- movl %edx,(%edi,%eax,4)
- incl %eax
- addl %ecx,%edx
- movzbl %al,%eax
- movzbl %dl,%edx
- pxor %mm1,%mm2
- movl (%edi,%eax,4),%ecx
- movd (%edi,%edx,4),%mm1
- addb %cl,%bl
- psllq $48,%mm1
- movl (%edi,%ebx,4),%edx
- movl %ecx,(%edi,%ebx,4)
- movl %edx,(%edi,%eax,4)
- incl %eax
- addl %ecx,%edx
- movzbl %al,%eax
- movzbl %dl,%edx
- pxor %mm1,%mm2
- movl (%edi,%eax,4),%ecx
- movd (%edi,%edx,4),%mm1
- movl %ebx,%edx
- xorl %ebx,%ebx
- movb %dl,%bl
- cmpl -4(%edi),%esi
- leal 8(%esi),%esi
- jb L006loop_mmx
- psllq $56,%mm1
- pxor %mm1,%mm2
- movq %mm2,-8(%ebp,%esi,1)
- emms
- cmpl 24(%esp),%esi
- je L007done
- jmp L002loop1
-.align 4,0x90
-L003go4loop4:
leal -4(%esi,%edx,1),%edx
movl %edx,28(%esp)
-L008loop4:
+ movl %ebp,32(%esp)
+.align 4,0x90
+L003loop4:
addb %cl,%bl
movl (%edi,%ebx,4),%edx
movl %ecx,(%edi,%ebx,4)
@@ -216,9 +78,9 @@ L008loop4:
movl %ebp,(%ecx,%esi,1)
leal 4(%esi),%esi
movl (%edi,%eax,4),%ecx
- jb L008loop4
+ jb L003loop4
cmpl 24(%esp),%esi
- je L007done
+ je L004done
movl 32(%esp),%ebp
.align 4,0x90
L002loop1:
@@ -236,11 +98,11 @@ L002loop1:
cmpl 24(%esp),%esi
movb %dl,-1(%ebp,%esi,1)
jb L002loop1
- jmp L007done
+ jmp L004done
.align 4,0x90
L001RC4_CHAR:
movzbl (%edi,%eax,1),%ecx
-L009cloop1:
+L005cloop1:
addb %cl,%bl
movzbl (%edi,%ebx,1),%edx
movb %cl,(%edi,%ebx,1)
@@ -253,10 +115,10 @@ L009cloop1:
movzbl (%edi,%eax,1),%ecx
cmpl 24(%esp),%esi
movb %dl,-1(%ebp,%esi,1)
- jb L009cloop1
-L007done:
+ jb L005cloop1
+L004done:
decb %al
- movl %ebx,-4(%edi)
+ movb %bl,-4(%edi)
movb %al,-8(%edi)
L000abort:
popl %edi
@@ -264,10 +126,10 @@ L000abort:
popl %ebx
popl %ebp
ret
-.globl _private_RC4_set_key
+.globl _RC4_set_key
.align 4
-_private_RC4_set_key:
-L_private_RC4_set_key_begin:
+_RC4_set_key:
+L_RC4_set_key_begin:
pushl %ebp
pushl %ebx
pushl %esi
@@ -275,63 +137,60 @@ L_private_RC4_set_key_begin:
movl 20(%esp),%edi
movl 24(%esp),%ebp
movl 28(%esp),%esi
- call L010PIC_me_up
-L010PIC_me_up:
- popl %edx
- movl L_OPENSSL_ia32cap_P$non_lazy_ptr-L010PIC_me_up(%edx),%edx
+ leal _OPENSSL_ia32cap_P,%edx
leal 8(%edi),%edi
leal (%esi,%ebp,1),%esi
negl %ebp
xorl %eax,%eax
movl %ebp,-4(%edi)
btl $20,(%edx)
- jc L011c1stloop
+ jc L006c1stloop
.align 4,0x90
-L012w1stloop:
+L007w1stloop:
movl %eax,(%edi,%eax,4)
addb $1,%al
- jnc L012w1stloop
+ jnc L007w1stloop
xorl %ecx,%ecx
xorl %edx,%edx
.align 4,0x90
-L013w2ndloop:
+L008w2ndloop:
movl (%edi,%ecx,4),%eax
addb (%esi,%ebp,1),%dl
addb %al,%dl
addl $1,%ebp
movl (%edi,%edx,4),%ebx
- jnz L014wnowrap
+ jnz L009wnowrap
movl -4(%edi),%ebp
-L014wnowrap:
+L009wnowrap:
movl %eax,(%edi,%edx,4)
movl %ebx,(%edi,%ecx,4)
addb $1,%cl
- jnc L013w2ndloop
- jmp L015exit
+ jnc L008w2ndloop
+ jmp L010exit
.align 4,0x90
-L011c1stloop:
+L006c1stloop:
movb %al,(%edi,%eax,1)
addb $1,%al
- jnc L011c1stloop
+ jnc L006c1stloop
xorl %ecx,%ecx
xorl %edx,%edx
xorl %ebx,%ebx
.align 4,0x90
-L016c2ndloop:
+L011c2ndloop:
movb (%edi,%ecx,1),%al
addb (%esi,%ebp,1),%dl
addb %al,%dl
addl $1,%ebp
movb (%edi,%edx,1),%bl
- jnz L017cnowrap
+ jnz L012cnowrap
movl -4(%edi),%ebp
-L017cnowrap:
+L012cnowrap:
movb %al,(%edi,%edx,1)
movb %bl,(%edi,%ecx,1)
addb $1,%cl
- jnc L016c2ndloop
+ jnc L011c2ndloop
movl $-1,256(%edi)
-L015exit:
+L010exit:
xorl %eax,%eax
movl %eax,-8(%edi)
movl %eax,-4(%edi)
@@ -344,36 +203,22 @@ L015exit:
.align 4
_RC4_options:
L_RC4_options_begin:
- call L018pic_point
-L018pic_point:
+ call L013pic_point
+L013pic_point:
popl %eax
- leal L019opts-L018pic_point(%eax),%eax
- call L020PIC_me_up
-L020PIC_me_up:
- popl %edx
- movl L_OPENSSL_ia32cap_P$non_lazy_ptr-L020PIC_me_up(%edx),%edx
- movl (%edx),%edx
- btl $20,%edx
- jc L0211xchar
- btl $26,%edx
- jnc L022ret
- addl $25,%eax
- ret
-L0211xchar:
+ leal L014opts-L013pic_point(%eax),%eax
+ leal _OPENSSL_ia32cap_P,%edx
+ btl $20,(%edx)
+ jnc L015skip
addl $12,%eax
-L022ret:
+L015skip:
ret
.align 6,0x90
-L019opts:
+L014opts:
.byte 114,99,52,40,52,120,44,105,110,116,41,0
.byte 114,99,52,40,49,120,44,99,104,97,114,41,0
-.byte 114,99,52,40,56,120,44,109,109,120,41,0
.byte 82,67,52,32,102,111,114,32,120,56,54,44,32,67,82,89
.byte 80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114
.byte 111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
.align 6,0x90
-.section __IMPORT,__pointers,non_lazy_symbol_pointers
-L_OPENSSL_ia32cap_P$non_lazy_ptr:
-.indirect_symbol _OPENSSL_ia32cap_P
-.long 0
-.comm _OPENSSL_ia32cap_P,8,2
+.comm _OPENSSL_ia32cap_P,4
diff --git a/deps/openssl/asm/x86-macosx-gas/sha/sha1-586.s b/deps/openssl/asm/x86-macosx-gas/sha/sha1-586.s
index 28d95721b8..4f356fe70f 100644
--- a/deps/openssl/asm/x86-macosx-gas/sha/sha1-586.s
+++ b/deps/openssl/asm/x86-macosx-gas/sha/sha1-586.s
@@ -11,12 +11,11 @@ L_sha1_block_data_order_begin:
movl 20(%esp),%ebp
movl 24(%esp),%esi
movl 28(%esp),%eax
- subl $76,%esp
+ subl $64,%esp
shll $6,%eax
addl %esi,%eax
- movl %eax,104(%esp)
+ movl %eax,92(%esp)
movl 16(%ebp),%edi
- jmp L000loop
.align 4,0x90
L000loop:
movl (%esi),%eax
@@ -67,7 +66,7 @@ L000loop:
movl %ebx,52(%esp)
movl %ecx,56(%esp)
movl %edx,60(%esp)
- movl %esi,100(%esp)
+ movl %esi,88(%esp)
movl (%ebp),%eax
movl 4(%ebp),%ebx
movl 8(%ebp),%ecx
@@ -79,10 +78,10 @@ L000loop:
roll $5,%ebp
xorl %edx,%esi
addl %edi,%ebp
- movl (%esp),%edi
andl %ebx,%esi
- rorl $2,%ebx
+ movl (%esp),%edi
xorl %edx,%esi
+ rorl $2,%ebx
leal 1518500249(%ebp,%edi,1),%ebp
addl %esi,%ebp
# 00_15 1
@@ -92,10 +91,10 @@ L000loop:
roll $5,%ebp
xorl %ecx,%edi
addl %edx,%ebp
- movl 4(%esp),%edx
andl %eax,%edi
- rorl $2,%eax
+ movl 4(%esp),%edx
xorl %ecx,%edi
+ rorl $2,%eax
leal 1518500249(%ebp,%edx,1),%ebp
addl %edi,%ebp
# 00_15 2
@@ -105,10 +104,10 @@ L000loop:
roll $5,%ebp
xorl %ebx,%edx
addl %ecx,%ebp
- movl 8(%esp),%ecx
andl %esi,%edx
- rorl $2,%esi
+ movl 8(%esp),%ecx
xorl %ebx,%edx
+ rorl $2,%esi
leal 1518500249(%ebp,%ecx,1),%ebp
addl %edx,%ebp
# 00_15 3
@@ -118,10 +117,10 @@ L000loop:
roll $5,%ebp
xorl %eax,%ecx
addl %ebx,%ebp
- movl 12(%esp),%ebx
andl %edi,%ecx
- rorl $2,%edi
+ movl 12(%esp),%ebx
xorl %eax,%ecx
+ rorl $2,%edi
leal 1518500249(%ebp,%ebx,1),%ebp
addl %ecx,%ebp
# 00_15 4
@@ -131,10 +130,10 @@ L000loop:
roll $5,%ebp
xorl %esi,%ebx
addl %eax,%ebp
- movl 16(%esp),%eax
andl %edx,%ebx
- rorl $2,%edx
+ movl 16(%esp),%eax
xorl %esi,%ebx
+ rorl $2,%edx
leal 1518500249(%ebp,%eax,1),%ebp
addl %ebx,%ebp
# 00_15 5
@@ -144,10 +143,10 @@ L000loop:
roll $5,%ebp
xorl %edi,%eax
addl %esi,%ebp
- movl 20(%esp),%esi
andl %ecx,%eax
- rorl $2,%ecx
+ movl 20(%esp),%esi
xorl %edi,%eax
+ rorl $2,%ecx
leal 1518500249(%ebp,%esi,1),%ebp
addl %eax,%ebp
# 00_15 6
@@ -157,10 +156,10 @@ L000loop:
roll $5,%ebp
xorl %edx,%esi
addl %edi,%ebp
- movl 24(%esp),%edi
andl %ebx,%esi
- rorl $2,%ebx
+ movl 24(%esp),%edi
xorl %edx,%esi
+ rorl $2,%ebx
leal 1518500249(%ebp,%edi,1),%ebp
addl %esi,%ebp
# 00_15 7
@@ -170,10 +169,10 @@ L000loop:
roll $5,%ebp
xorl %ecx,%edi
addl %edx,%ebp
- movl 28(%esp),%edx
andl %eax,%edi
- rorl $2,%eax
+ movl 28(%esp),%edx
xorl %ecx,%edi
+ rorl $2,%eax
leal 1518500249(%ebp,%edx,1),%ebp
addl %edi,%ebp
# 00_15 8
@@ -183,10 +182,10 @@ L000loop:
roll $5,%ebp
xorl %ebx,%edx
addl %ecx,%ebp
- movl 32(%esp),%ecx
andl %esi,%edx
- rorl $2,%esi
+ movl 32(%esp),%ecx
xorl %ebx,%edx
+ rorl $2,%esi
leal 1518500249(%ebp,%ecx,1),%ebp
addl %edx,%ebp
# 00_15 9
@@ -196,10 +195,10 @@ L000loop:
roll $5,%ebp
xorl %eax,%ecx
addl %ebx,%ebp
- movl 36(%esp),%ebx
andl %edi,%ecx
- rorl $2,%edi
+ movl 36(%esp),%ebx
xorl %eax,%ecx
+ rorl $2,%edi
leal 1518500249(%ebp,%ebx,1),%ebp
addl %ecx,%ebp
# 00_15 10
@@ -209,10 +208,10 @@ L000loop:
roll $5,%ebp
xorl %esi,%ebx
addl %eax,%ebp
- movl 40(%esp),%eax
andl %edx,%ebx
- rorl $2,%edx
+ movl 40(%esp),%eax
xorl %esi,%ebx
+ rorl $2,%edx
leal 1518500249(%ebp,%eax,1),%ebp
addl %ebx,%ebp
# 00_15 11
@@ -222,10 +221,10 @@ L000loop:
roll $5,%ebp
xorl %edi,%eax
addl %esi,%ebp
- movl 44(%esp),%esi
andl %ecx,%eax
- rorl $2,%ecx
+ movl 44(%esp),%esi
xorl %edi,%eax
+ rorl $2,%ecx
leal 1518500249(%ebp,%esi,1),%ebp
addl %eax,%ebp
# 00_15 12
@@ -235,10 +234,10 @@ L000loop:
roll $5,%ebp
xorl %edx,%esi
addl %edi,%ebp
- movl 48(%esp),%edi
andl %ebx,%esi
- rorl $2,%ebx
+ movl 48(%esp),%edi
xorl %edx,%esi
+ rorl $2,%ebx
leal 1518500249(%ebp,%edi,1),%ebp
addl %esi,%ebp
# 00_15 13
@@ -248,10 +247,10 @@ L000loop:
roll $5,%ebp
xorl %ecx,%edi
addl %edx,%ebp
- movl 52(%esp),%edx
andl %eax,%edi
- rorl $2,%eax
+ movl 52(%esp),%edx
xorl %ecx,%edi
+ rorl $2,%eax
leal 1518500249(%ebp,%edx,1),%ebp
addl %edi,%ebp
# 00_15 14
@@ -261,10 +260,10 @@ L000loop:
roll $5,%ebp
xorl %ebx,%edx
addl %ecx,%ebp
- movl 56(%esp),%ecx
andl %esi,%edx
- rorl $2,%esi
+ movl 56(%esp),%ecx
xorl %ebx,%edx
+ rorl $2,%esi
leal 1518500249(%ebp,%ecx,1),%ebp
addl %edx,%ebp
# 00_15 15
@@ -274,1163 +273,1226 @@ L000loop:
roll $5,%ebp
xorl %eax,%ecx
addl %ebx,%ebp
- movl 60(%esp),%ebx
andl %edi,%ecx
- rorl $2,%edi
+ movl 60(%esp),%ebx
xorl %eax,%ecx
+ rorl $2,%edi
leal 1518500249(%ebp,%ebx,1),%ebp
- movl (%esp),%ebx
addl %ebp,%ecx
# 16_19 16
+ movl (%esp),%ebx
movl %edi,%ebp
xorl 8(%esp),%ebx
xorl %esi,%ebp
xorl 32(%esp),%ebx
andl %edx,%ebp
+ rorl $2,%edx
xorl 52(%esp),%ebx
roll $1,%ebx
xorl %esi,%ebp
- addl %ebp,%eax
- movl %ecx,%ebp
- rorl $2,%edx
movl %ebx,(%esp)
- roll $5,%ebp
leal 1518500249(%ebx,%eax,1),%ebx
- movl 4(%esp),%eax
+ movl %ecx,%eax
+ roll $5,%eax
addl %ebp,%ebx
+ addl %eax,%ebx
# 16_19 17
+ movl 4(%esp),%eax
movl %edx,%ebp
xorl 12(%esp),%eax
xorl %edi,%ebp
xorl 36(%esp),%eax
andl %ecx,%ebp
+ rorl $2,%ecx
xorl 56(%esp),%eax
roll $1,%eax
xorl %edi,%ebp
- addl %ebp,%esi
- movl %ebx,%ebp
- rorl $2,%ecx
movl %eax,4(%esp)
- roll $5,%ebp
leal 1518500249(%eax,%esi,1),%eax
- movl 8(%esp),%esi
+ movl %ebx,%esi
+ roll $5,%esi
addl %ebp,%eax
+ addl %esi,%eax
# 16_19 18
+ movl 8(%esp),%esi
movl %ecx,%ebp
xorl 16(%esp),%esi
xorl %edx,%ebp
xorl 40(%esp),%esi
andl %ebx,%ebp
+ rorl $2,%ebx
xorl 60(%esp),%esi
roll $1,%esi
xorl %edx,%ebp
- addl %ebp,%edi
- movl %eax,%ebp
- rorl $2,%ebx
movl %esi,8(%esp)
- roll $5,%ebp
leal 1518500249(%esi,%edi,1),%esi
- movl 12(%esp),%edi
+ movl %eax,%edi
+ roll $5,%edi
addl %ebp,%esi
+ addl %edi,%esi
# 16_19 19
+ movl 12(%esp),%edi
movl %ebx,%ebp
xorl 20(%esp),%edi
xorl %ecx,%ebp
xorl 44(%esp),%edi
andl %eax,%ebp
+ rorl $2,%eax
xorl (%esp),%edi
roll $1,%edi
xorl %ecx,%ebp
- addl %ebp,%edx
- movl %esi,%ebp
- rorl $2,%eax
movl %edi,12(%esp)
- roll $5,%ebp
leal 1518500249(%edi,%edx,1),%edi
- movl 16(%esp),%edx
+ movl %esi,%edx
+ roll $5,%edx
addl %ebp,%edi
+ addl %edx,%edi
# 20_39 20
movl %esi,%ebp
+ movl 16(%esp),%edx
+ rorl $2,%esi
xorl 24(%esp),%edx
xorl %eax,%ebp
xorl 48(%esp),%edx
xorl %ebx,%ebp
xorl 4(%esp),%edx
roll $1,%edx
- addl %ebp,%ecx
- rorl $2,%esi
- movl %edi,%ebp
- roll $5,%ebp
+ addl %ecx,%ebp
movl %edx,16(%esp)
- leal 1859775393(%edx,%ecx,1),%edx
- movl 20(%esp),%ecx
- addl %ebp,%edx
+ movl %edi,%ecx
+ roll $5,%ecx
+ leal 1859775393(%edx,%ebp,1),%edx
+ addl %ecx,%edx
# 20_39 21
movl %edi,%ebp
+ movl 20(%esp),%ecx
+ rorl $2,%edi
xorl 28(%esp),%ecx
xorl %esi,%ebp
xorl 52(%esp),%ecx
xorl %eax,%ebp
xorl 8(%esp),%ecx
roll $1,%ecx
- addl %ebp,%ebx
- rorl $2,%edi
- movl %edx,%ebp
- roll $5,%ebp
+ addl %ebx,%ebp
movl %ecx,20(%esp)
- leal 1859775393(%ecx,%ebx,1),%ecx
- movl 24(%esp),%ebx
- addl %ebp,%ecx
+ movl %edx,%ebx
+ roll $5,%ebx
+ leal 1859775393(%ecx,%ebp,1),%ecx
+ addl %ebx,%ecx
# 20_39 22
movl %edx,%ebp
+ movl 24(%esp),%ebx
+ rorl $2,%edx
xorl 32(%esp),%ebx
xorl %edi,%ebp
xorl 56(%esp),%ebx
xorl %esi,%ebp
xorl 12(%esp),%ebx
roll $1,%ebx
- addl %ebp,%eax
- rorl $2,%edx
- movl %ecx,%ebp
- roll $5,%ebp
+ addl %eax,%ebp
movl %ebx,24(%esp)
- leal 1859775393(%ebx,%eax,1),%ebx
- movl 28(%esp),%eax
- addl %ebp,%ebx
+ movl %ecx,%eax
+ roll $5,%eax
+ leal 1859775393(%ebx,%ebp,1),%ebx
+ addl %eax,%ebx
# 20_39 23
movl %ecx,%ebp
+ movl 28(%esp),%eax
+ rorl $2,%ecx
xorl 36(%esp),%eax
xorl %edx,%ebp
xorl 60(%esp),%eax
xorl %edi,%ebp
xorl 16(%esp),%eax
roll $1,%eax
- addl %ebp,%esi
- rorl $2,%ecx
- movl %ebx,%ebp
- roll $5,%ebp
+ addl %esi,%ebp
movl %eax,28(%esp)
- leal 1859775393(%eax,%esi,1),%eax
- movl 32(%esp),%esi
- addl %ebp,%eax
+ movl %ebx,%esi
+ roll $5,%esi
+ leal 1859775393(%eax,%ebp,1),%eax
+ addl %esi,%eax
# 20_39 24
movl %ebx,%ebp
+ movl 32(%esp),%esi
+ rorl $2,%ebx
xorl 40(%esp),%esi
xorl %ecx,%ebp
xorl (%esp),%esi
xorl %edx,%ebp
xorl 20(%esp),%esi
roll $1,%esi
- addl %ebp,%edi
- rorl $2,%ebx
- movl %eax,%ebp
- roll $5,%ebp
+ addl %edi,%ebp
movl %esi,32(%esp)
- leal 1859775393(%esi,%edi,1),%esi
- movl 36(%esp),%edi
- addl %ebp,%esi
+ movl %eax,%edi
+ roll $5,%edi
+ leal 1859775393(%esi,%ebp,1),%esi
+ addl %edi,%esi
# 20_39 25
movl %eax,%ebp
+ movl 36(%esp),%edi
+ rorl $2,%eax
xorl 44(%esp),%edi
xorl %ebx,%ebp
xorl 4(%esp),%edi
xorl %ecx,%ebp
xorl 24(%esp),%edi
roll $1,%edi
- addl %ebp,%edx
- rorl $2,%eax
- movl %esi,%ebp
- roll $5,%ebp
+ addl %edx,%ebp
movl %edi,36(%esp)
- leal 1859775393(%edi,%edx,1),%edi
- movl 40(%esp),%edx
- addl %ebp,%edi
+ movl %esi,%edx
+ roll $5,%edx
+ leal 1859775393(%edi,%ebp,1),%edi
+ addl %edx,%edi
# 20_39 26
movl %esi,%ebp
+ movl 40(%esp),%edx
+ rorl $2,%esi
xorl 48(%esp),%edx
xorl %eax,%ebp
xorl 8(%esp),%edx
xorl %ebx,%ebp
xorl 28(%esp),%edx
roll $1,%edx
- addl %ebp,%ecx
- rorl $2,%esi
- movl %edi,%ebp
- roll $5,%ebp
+ addl %ecx,%ebp
movl %edx,40(%esp)
- leal 1859775393(%edx,%ecx,1),%edx
- movl 44(%esp),%ecx
- addl %ebp,%edx
+ movl %edi,%ecx
+ roll $5,%ecx
+ leal 1859775393(%edx,%ebp,1),%edx
+ addl %ecx,%edx
# 20_39 27
movl %edi,%ebp
+ movl 44(%esp),%ecx
+ rorl $2,%edi
xorl 52(%esp),%ecx
xorl %esi,%ebp
xorl 12(%esp),%ecx
xorl %eax,%ebp
xorl 32(%esp),%ecx
roll $1,%ecx
- addl %ebp,%ebx
- rorl $2,%edi
- movl %edx,%ebp
- roll $5,%ebp
+ addl %ebx,%ebp
movl %ecx,44(%esp)
- leal 1859775393(%ecx,%ebx,1),%ecx
- movl 48(%esp),%ebx
- addl %ebp,%ecx
+ movl %edx,%ebx
+ roll $5,%ebx
+ leal 1859775393(%ecx,%ebp,1),%ecx
+ addl %ebx,%ecx
# 20_39 28
movl %edx,%ebp
+ movl 48(%esp),%ebx
+ rorl $2,%edx
xorl 56(%esp),%ebx
xorl %edi,%ebp
xorl 16(%esp),%ebx
xorl %esi,%ebp
xorl 36(%esp),%ebx
roll $1,%ebx
- addl %ebp,%eax
- rorl $2,%edx
- movl %ecx,%ebp
- roll $5,%ebp
+ addl %eax,%ebp
movl %ebx,48(%esp)
- leal 1859775393(%ebx,%eax,1),%ebx
- movl 52(%esp),%eax
- addl %ebp,%ebx
+ movl %ecx,%eax
+ roll $5,%eax
+ leal 1859775393(%ebx,%ebp,1),%ebx
+ addl %eax,%ebx
# 20_39 29
movl %ecx,%ebp
+ movl 52(%esp),%eax
+ rorl $2,%ecx
xorl 60(%esp),%eax
xorl %edx,%ebp
xorl 20(%esp),%eax
xorl %edi,%ebp
xorl 40(%esp),%eax
roll $1,%eax
- addl %ebp,%esi
- rorl $2,%ecx
- movl %ebx,%ebp
- roll $5,%ebp
+ addl %esi,%ebp
movl %eax,52(%esp)
- leal 1859775393(%eax,%esi,1),%eax
- movl 56(%esp),%esi
- addl %ebp,%eax
+ movl %ebx,%esi
+ roll $5,%esi
+ leal 1859775393(%eax,%ebp,1),%eax
+ addl %esi,%eax
# 20_39 30
movl %ebx,%ebp
+ movl 56(%esp),%esi
+ rorl $2,%ebx
xorl (%esp),%esi
xorl %ecx,%ebp
xorl 24(%esp),%esi
xorl %edx,%ebp
xorl 44(%esp),%esi
roll $1,%esi
- addl %ebp,%edi
- rorl $2,%ebx
- movl %eax,%ebp
- roll $5,%ebp
+ addl %edi,%ebp
movl %esi,56(%esp)
- leal 1859775393(%esi,%edi,1),%esi
- movl 60(%esp),%edi
- addl %ebp,%esi
+ movl %eax,%edi
+ roll $5,%edi
+ leal 1859775393(%esi,%ebp,1),%esi
+ addl %edi,%esi
# 20_39 31
movl %eax,%ebp
+ movl 60(%esp),%edi
+ rorl $2,%eax
xorl 4(%esp),%edi
xorl %ebx,%ebp
xorl 28(%esp),%edi
xorl %ecx,%ebp
xorl 48(%esp),%edi
roll $1,%edi
- addl %ebp,%edx
- rorl $2,%eax
- movl %esi,%ebp
- roll $5,%ebp
+ addl %edx,%ebp
movl %edi,60(%esp)
- leal 1859775393(%edi,%edx,1),%edi
- movl (%esp),%edx
- addl %ebp,%edi
+ movl %esi,%edx
+ roll $5,%edx
+ leal 1859775393(%edi,%ebp,1),%edi
+ addl %edx,%edi
# 20_39 32
movl %esi,%ebp
+ movl (%esp),%edx
+ rorl $2,%esi
xorl 8(%esp),%edx
xorl %eax,%ebp
xorl 32(%esp),%edx
xorl %ebx,%ebp
xorl 52(%esp),%edx
roll $1,%edx
- addl %ebp,%ecx
- rorl $2,%esi
- movl %edi,%ebp
- roll $5,%ebp
+ addl %ecx,%ebp
movl %edx,(%esp)
- leal 1859775393(%edx,%ecx,1),%edx
- movl 4(%esp),%ecx
- addl %ebp,%edx
+ movl %edi,%ecx
+ roll $5,%ecx
+ leal 1859775393(%edx,%ebp,1),%edx
+ addl %ecx,%edx
# 20_39 33
movl %edi,%ebp
+ movl 4(%esp),%ecx
+ rorl $2,%edi
xorl 12(%esp),%ecx
xorl %esi,%ebp
xorl 36(%esp),%ecx
xorl %eax,%ebp
xorl 56(%esp),%ecx
roll $1,%ecx
- addl %ebp,%ebx
- rorl $2,%edi
- movl %edx,%ebp
- roll $5,%ebp
+ addl %ebx,%ebp
movl %ecx,4(%esp)
- leal 1859775393(%ecx,%ebx,1),%ecx
- movl 8(%esp),%ebx
- addl %ebp,%ecx
+ movl %edx,%ebx
+ roll $5,%ebx
+ leal 1859775393(%ecx,%ebp,1),%ecx
+ addl %ebx,%ecx
# 20_39 34
movl %edx,%ebp
+ movl 8(%esp),%ebx
+ rorl $2,%edx
xorl 16(%esp),%ebx
xorl %edi,%ebp
xorl 40(%esp),%ebx
xorl %esi,%ebp
xorl 60(%esp),%ebx
roll $1,%ebx
- addl %ebp,%eax
- rorl $2,%edx
- movl %ecx,%ebp
- roll $5,%ebp
+ addl %eax,%ebp
movl %ebx,8(%esp)
- leal 1859775393(%ebx,%eax,1),%ebx
- movl 12(%esp),%eax
- addl %ebp,%ebx
+ movl %ecx,%eax
+ roll $5,%eax
+ leal 1859775393(%ebx,%ebp,1),%ebx
+ addl %eax,%ebx
# 20_39 35
movl %ecx,%ebp
+ movl 12(%esp),%eax
+ rorl $2,%ecx
xorl 20(%esp),%eax
xorl %edx,%ebp
xorl 44(%esp),%eax
xorl %edi,%ebp
xorl (%esp),%eax
roll $1,%eax
- addl %ebp,%esi
- rorl $2,%ecx
- movl %ebx,%ebp
- roll $5,%ebp
+ addl %esi,%ebp
movl %eax,12(%esp)
- leal 1859775393(%eax,%esi,1),%eax
- movl 16(%esp),%esi
- addl %ebp,%eax
+ movl %ebx,%esi
+ roll $5,%esi
+ leal 1859775393(%eax,%ebp,1),%eax
+ addl %esi,%eax
# 20_39 36
movl %ebx,%ebp
+ movl 16(%esp),%esi
+ rorl $2,%ebx
xorl 24(%esp),%esi
xorl %ecx,%ebp
xorl 48(%esp),%esi
xorl %edx,%ebp
xorl 4(%esp),%esi
roll $1,%esi
- addl %ebp,%edi
- rorl $2,%ebx
- movl %eax,%ebp
- roll $5,%ebp
+ addl %edi,%ebp
movl %esi,16(%esp)
- leal 1859775393(%esi,%edi,1),%esi
- movl 20(%esp),%edi
- addl %ebp,%esi
+ movl %eax,%edi
+ roll $5,%edi
+ leal 1859775393(%esi,%ebp,1),%esi
+ addl %edi,%esi
# 20_39 37
movl %eax,%ebp
+ movl 20(%esp),%edi
+ rorl $2,%eax
xorl 28(%esp),%edi
xorl %ebx,%ebp
xorl 52(%esp),%edi
xorl %ecx,%ebp
xorl 8(%esp),%edi
roll $1,%edi
- addl %ebp,%edx
- rorl $2,%eax
- movl %esi,%ebp
- roll $5,%ebp
+ addl %edx,%ebp
movl %edi,20(%esp)
- leal 1859775393(%edi,%edx,1),%edi
- movl 24(%esp),%edx
- addl %ebp,%edi
+ movl %esi,%edx
+ roll $5,%edx
+ leal 1859775393(%edi,%ebp,1),%edi
+ addl %edx,%edi
# 20_39 38
movl %esi,%ebp
+ movl 24(%esp),%edx
+ rorl $2,%esi
xorl 32(%esp),%edx
xorl %eax,%ebp
xorl 56(%esp),%edx
xorl %ebx,%ebp
xorl 12(%esp),%edx
roll $1,%edx
- addl %ebp,%ecx
- rorl $2,%esi
- movl %edi,%ebp
- roll $5,%ebp
+ addl %ecx,%ebp
movl %edx,24(%esp)
- leal 1859775393(%edx,%ecx,1),%edx
- movl 28(%esp),%ecx
- addl %ebp,%edx
+ movl %edi,%ecx
+ roll $5,%ecx
+ leal 1859775393(%edx,%ebp,1),%edx
+ addl %ecx,%edx
# 20_39 39
movl %edi,%ebp
+ movl 28(%esp),%ecx
+ rorl $2,%edi
xorl 36(%esp),%ecx
xorl %esi,%ebp
xorl 60(%esp),%ecx
xorl %eax,%ebp
xorl 16(%esp),%ecx
roll $1,%ecx
- addl %ebp,%ebx
- rorl $2,%edi
- movl %edx,%ebp
- roll $5,%ebp
+ addl %ebx,%ebp
movl %ecx,28(%esp)
- leal 1859775393(%ecx,%ebx,1),%ecx
- movl 32(%esp),%ebx
- addl %ebp,%ecx
+ movl %edx,%ebx
+ roll $5,%ebx
+ leal 1859775393(%ecx,%ebp,1),%ecx
+ addl %ebx,%ecx
# 40_59 40
- movl %edi,%ebp
- xorl 40(%esp),%ebx
- xorl %esi,%ebp
- xorl (%esp),%ebx
- andl %edx,%ebp
- xorl 20(%esp),%ebx
+ movl 32(%esp),%ebx
+ movl 40(%esp),%ebp
+ xorl %ebp,%ebx
+ movl (%esp),%ebp
+ xorl %ebp,%ebx
+ movl 20(%esp),%ebp
+ xorl %ebp,%ebx
+ movl %edx,%ebp
roll $1,%ebx
- addl %eax,%ebp
+ orl %edi,%ebp
+ movl %ebx,32(%esp)
+ andl %esi,%ebp
+ leal 2400959708(%ebx,%eax,1),%ebx
+ movl %edx,%eax
rorl $2,%edx
+ andl %edi,%eax
+ orl %eax,%ebp
movl %ecx,%eax
roll $5,%eax
- movl %ebx,32(%esp)
- leal 2400959708(%ebx,%ebp,1),%ebx
- movl %edi,%ebp
- addl %eax,%ebx
- andl %esi,%ebp
- movl 36(%esp),%eax
addl %ebp,%ebx
+ addl %eax,%ebx
# 40_59 41
- movl %edx,%ebp
- xorl 44(%esp),%eax
- xorl %edi,%ebp
- xorl 4(%esp),%eax
- andl %ecx,%ebp
- xorl 24(%esp),%eax
+ movl 36(%esp),%eax
+ movl 44(%esp),%ebp
+ xorl %ebp,%eax
+ movl 4(%esp),%ebp
+ xorl %ebp,%eax
+ movl 24(%esp),%ebp
+ xorl %ebp,%eax
+ movl %ecx,%ebp
roll $1,%eax
- addl %esi,%ebp
+ orl %edx,%ebp
+ movl %eax,36(%esp)
+ andl %edi,%ebp
+ leal 2400959708(%eax,%esi,1),%eax
+ movl %ecx,%esi
rorl $2,%ecx
+ andl %edx,%esi
+ orl %esi,%ebp
movl %ebx,%esi
roll $5,%esi
- movl %eax,36(%esp)
- leal 2400959708(%eax,%ebp,1),%eax
- movl %edx,%ebp
- addl %esi,%eax
- andl %edi,%ebp
- movl 40(%esp),%esi
addl %ebp,%eax
+ addl %esi,%eax
# 40_59 42
- movl %ecx,%ebp
- xorl 48(%esp),%esi
- xorl %edx,%ebp
- xorl 8(%esp),%esi
- andl %ebx,%ebp
- xorl 28(%esp),%esi
+ movl 40(%esp),%esi
+ movl 48(%esp),%ebp
+ xorl %ebp,%esi
+ movl 8(%esp),%ebp
+ xorl %ebp,%esi
+ movl 28(%esp),%ebp
+ xorl %ebp,%esi
+ movl %ebx,%ebp
roll $1,%esi
- addl %edi,%ebp
+ orl %ecx,%ebp
+ movl %esi,40(%esp)
+ andl %edx,%ebp
+ leal 2400959708(%esi,%edi,1),%esi
+ movl %ebx,%edi
rorl $2,%ebx
+ andl %ecx,%edi
+ orl %edi,%ebp
movl %eax,%edi
roll $5,%edi
- movl %esi,40(%esp)
- leal 2400959708(%esi,%ebp,1),%esi
- movl %ecx,%ebp
- addl %edi,%esi
- andl %edx,%ebp
- movl 44(%esp),%edi
addl %ebp,%esi
+ addl %edi,%esi
# 40_59 43
- movl %ebx,%ebp
- xorl 52(%esp),%edi
- xorl %ecx,%ebp
- xorl 12(%esp),%edi
- andl %eax,%ebp
- xorl 32(%esp),%edi
+ movl 44(%esp),%edi
+ movl 52(%esp),%ebp
+ xorl %ebp,%edi
+ movl 12(%esp),%ebp
+ xorl %ebp,%edi
+ movl 32(%esp),%ebp
+ xorl %ebp,%edi
+ movl %eax,%ebp
roll $1,%edi
- addl %edx,%ebp
+ orl %ebx,%ebp
+ movl %edi,44(%esp)
+ andl %ecx,%ebp
+ leal 2400959708(%edi,%edx,1),%edi
+ movl %eax,%edx
rorl $2,%eax
+ andl %ebx,%edx
+ orl %edx,%ebp
movl %esi,%edx
roll $5,%edx
- movl %edi,44(%esp)
- leal 2400959708(%edi,%ebp,1),%edi
- movl %ebx,%ebp
- addl %edx,%edi
- andl %ecx,%ebp
- movl 48(%esp),%edx
addl %ebp,%edi
+ addl %edx,%edi
# 40_59 44
- movl %eax,%ebp
- xorl 56(%esp),%edx
- xorl %ebx,%ebp
- xorl 16(%esp),%edx
- andl %esi,%ebp
- xorl 36(%esp),%edx
+ movl 48(%esp),%edx
+ movl 56(%esp),%ebp
+ xorl %ebp,%edx
+ movl 16(%esp),%ebp
+ xorl %ebp,%edx
+ movl 36(%esp),%ebp
+ xorl %ebp,%edx
+ movl %esi,%ebp
roll $1,%edx
- addl %ecx,%ebp
+ orl %eax,%ebp
+ movl %edx,48(%esp)
+ andl %ebx,%ebp
+ leal 2400959708(%edx,%ecx,1),%edx
+ movl %esi,%ecx
rorl $2,%esi
+ andl %eax,%ecx
+ orl %ecx,%ebp
movl %edi,%ecx
roll $5,%ecx
- movl %edx,48(%esp)
- leal 2400959708(%edx,%ebp,1),%edx
- movl %eax,%ebp
- addl %ecx,%edx
- andl %ebx,%ebp
- movl 52(%esp),%ecx
addl %ebp,%edx
+ addl %ecx,%edx
# 40_59 45
- movl %esi,%ebp
- xorl 60(%esp),%ecx
- xorl %eax,%ebp
- xorl 20(%esp),%ecx
- andl %edi,%ebp
- xorl 40(%esp),%ecx
+ movl 52(%esp),%ecx
+ movl 60(%esp),%ebp
+ xorl %ebp,%ecx
+ movl 20(%esp),%ebp
+ xorl %ebp,%ecx
+ movl 40(%esp),%ebp
+ xorl %ebp,%ecx
+ movl %edi,%ebp
roll $1,%ecx
- addl %ebx,%ebp
+ orl %esi,%ebp
+ movl %ecx,52(%esp)
+ andl %eax,%ebp
+ leal 2400959708(%ecx,%ebx,1),%ecx
+ movl %edi,%ebx
rorl $2,%edi
+ andl %esi,%ebx
+ orl %ebx,%ebp
movl %edx,%ebx
roll $5,%ebx
- movl %ecx,52(%esp)
- leal 2400959708(%ecx,%ebp,1),%ecx
- movl %esi,%ebp
- addl %ebx,%ecx
- andl %eax,%ebp
- movl 56(%esp),%ebx
addl %ebp,%ecx
+ addl %ebx,%ecx
# 40_59 46
- movl %edi,%ebp
- xorl (%esp),%ebx
- xorl %esi,%ebp
- xorl 24(%esp),%ebx
- andl %edx,%ebp
- xorl 44(%esp),%ebx
+ movl 56(%esp),%ebx
+ movl (%esp),%ebp
+ xorl %ebp,%ebx
+ movl 24(%esp),%ebp
+ xorl %ebp,%ebx
+ movl 44(%esp),%ebp
+ xorl %ebp,%ebx
+ movl %edx,%ebp
roll $1,%ebx
- addl %eax,%ebp
+ orl %edi,%ebp
+ movl %ebx,56(%esp)
+ andl %esi,%ebp
+ leal 2400959708(%ebx,%eax,1),%ebx
+ movl %edx,%eax
rorl $2,%edx
+ andl %edi,%eax
+ orl %eax,%ebp
movl %ecx,%eax
roll $5,%eax
- movl %ebx,56(%esp)
- leal 2400959708(%ebx,%ebp,1),%ebx
- movl %edi,%ebp
- addl %eax,%ebx
- andl %esi,%ebp
- movl 60(%esp),%eax
addl %ebp,%ebx
+ addl %eax,%ebx
# 40_59 47
- movl %edx,%ebp
- xorl 4(%esp),%eax
- xorl %edi,%ebp
- xorl 28(%esp),%eax
- andl %ecx,%ebp
- xorl 48(%esp),%eax
+ movl 60(%esp),%eax
+ movl 4(%esp),%ebp
+ xorl %ebp,%eax
+ movl 28(%esp),%ebp
+ xorl %ebp,%eax
+ movl 48(%esp),%ebp
+ xorl %ebp,%eax
+ movl %ecx,%ebp
roll $1,%eax
- addl %esi,%ebp
+ orl %edx,%ebp
+ movl %eax,60(%esp)
+ andl %edi,%ebp
+ leal 2400959708(%eax,%esi,1),%eax
+ movl %ecx,%esi
rorl $2,%ecx
+ andl %edx,%esi
+ orl %esi,%ebp
movl %ebx,%esi
roll $5,%esi
- movl %eax,60(%esp)
- leal 2400959708(%eax,%ebp,1),%eax
- movl %edx,%ebp
- addl %esi,%eax
- andl %edi,%ebp
- movl (%esp),%esi
addl %ebp,%eax
+ addl %esi,%eax
# 40_59 48
- movl %ecx,%ebp
- xorl 8(%esp),%esi
- xorl %edx,%ebp
- xorl 32(%esp),%esi
- andl %ebx,%ebp
- xorl 52(%esp),%esi
+ movl (%esp),%esi
+ movl 8(%esp),%ebp
+ xorl %ebp,%esi
+ movl 32(%esp),%ebp
+ xorl %ebp,%esi
+ movl 52(%esp),%ebp
+ xorl %ebp,%esi
+ movl %ebx,%ebp
roll $1,%esi
- addl %edi,%ebp
+ orl %ecx,%ebp
+ movl %esi,(%esp)
+ andl %edx,%ebp
+ leal 2400959708(%esi,%edi,1),%esi
+ movl %ebx,%edi
rorl $2,%ebx
+ andl %ecx,%edi
+ orl %edi,%ebp
movl %eax,%edi
roll $5,%edi
- movl %esi,(%esp)
- leal 2400959708(%esi,%ebp,1),%esi
- movl %ecx,%ebp
- addl %edi,%esi
- andl %edx,%ebp
- movl 4(%esp),%edi
addl %ebp,%esi
+ addl %edi,%esi
# 40_59 49
- movl %ebx,%ebp
- xorl 12(%esp),%edi
- xorl %ecx,%ebp
- xorl 36(%esp),%edi
- andl %eax,%ebp
- xorl 56(%esp),%edi
+ movl 4(%esp),%edi
+ movl 12(%esp),%ebp
+ xorl %ebp,%edi
+ movl 36(%esp),%ebp
+ xorl %ebp,%edi
+ movl 56(%esp),%ebp
+ xorl %ebp,%edi
+ movl %eax,%ebp
roll $1,%edi
- addl %edx,%ebp
+ orl %ebx,%ebp
+ movl %edi,4(%esp)
+ andl %ecx,%ebp
+ leal 2400959708(%edi,%edx,1),%edi
+ movl %eax,%edx
rorl $2,%eax
+ andl %ebx,%edx
+ orl %edx,%ebp
movl %esi,%edx
roll $5,%edx
- movl %edi,4(%esp)
- leal 2400959708(%edi,%ebp,1),%edi
- movl %ebx,%ebp
- addl %edx,%edi
- andl %ecx,%ebp
- movl 8(%esp),%edx
addl %ebp,%edi
+ addl %edx,%edi
# 40_59 50
- movl %eax,%ebp
- xorl 16(%esp),%edx
- xorl %ebx,%ebp
- xorl 40(%esp),%edx
- andl %esi,%ebp
- xorl 60(%esp),%edx
+ movl 8(%esp),%edx
+ movl 16(%esp),%ebp
+ xorl %ebp,%edx
+ movl 40(%esp),%ebp
+ xorl %ebp,%edx
+ movl 60(%esp),%ebp
+ xorl %ebp,%edx
+ movl %esi,%ebp
roll $1,%edx
- addl %ecx,%ebp
+ orl %eax,%ebp
+ movl %edx,8(%esp)
+ andl %ebx,%ebp
+ leal 2400959708(%edx,%ecx,1),%edx
+ movl %esi,%ecx
rorl $2,%esi
+ andl %eax,%ecx
+ orl %ecx,%ebp
movl %edi,%ecx
roll $5,%ecx
- movl %edx,8(%esp)
- leal 2400959708(%edx,%ebp,1),%edx
- movl %eax,%ebp
- addl %ecx,%edx
- andl %ebx,%ebp
- movl 12(%esp),%ecx
addl %ebp,%edx
+ addl %ecx,%edx
# 40_59 51
- movl %esi,%ebp
- xorl 20(%esp),%ecx
- xorl %eax,%ebp
- xorl 44(%esp),%ecx
- andl %edi,%ebp
- xorl (%esp),%ecx
+ movl 12(%esp),%ecx
+ movl 20(%esp),%ebp
+ xorl %ebp,%ecx
+ movl 44(%esp),%ebp
+ xorl %ebp,%ecx
+ movl (%esp),%ebp
+ xorl %ebp,%ecx
+ movl %edi,%ebp
roll $1,%ecx
- addl %ebx,%ebp
+ orl %esi,%ebp
+ movl %ecx,12(%esp)
+ andl %eax,%ebp
+ leal 2400959708(%ecx,%ebx,1),%ecx
+ movl %edi,%ebx
rorl $2,%edi
+ andl %esi,%ebx
+ orl %ebx,%ebp
movl %edx,%ebx
roll $5,%ebx
- movl %ecx,12(%esp)
- leal 2400959708(%ecx,%ebp,1),%ecx
- movl %esi,%ebp
- addl %ebx,%ecx
- andl %eax,%ebp
- movl 16(%esp),%ebx
addl %ebp,%ecx
+ addl %ebx,%ecx
# 40_59 52
- movl %edi,%ebp
- xorl 24(%esp),%ebx
- xorl %esi,%ebp
- xorl 48(%esp),%ebx
- andl %edx,%ebp
- xorl 4(%esp),%ebx
+ movl 16(%esp),%ebx
+ movl 24(%esp),%ebp
+ xorl %ebp,%ebx
+ movl 48(%esp),%ebp
+ xorl %ebp,%ebx
+ movl 4(%esp),%ebp
+ xorl %ebp,%ebx
+ movl %edx,%ebp
roll $1,%ebx
- addl %eax,%ebp
+ orl %edi,%ebp
+ movl %ebx,16(%esp)
+ andl %esi,%ebp
+ leal 2400959708(%ebx,%eax,1),%ebx
+ movl %edx,%eax
rorl $2,%edx
+ andl %edi,%eax
+ orl %eax,%ebp
movl %ecx,%eax
roll $5,%eax
- movl %ebx,16(%esp)
- leal 2400959708(%ebx,%ebp,1),%ebx
- movl %edi,%ebp
- addl %eax,%ebx
- andl %esi,%ebp
- movl 20(%esp),%eax
addl %ebp,%ebx
+ addl %eax,%ebx
# 40_59 53
- movl %edx,%ebp
- xorl 28(%esp),%eax
- xorl %edi,%ebp
- xorl 52(%esp),%eax
- andl %ecx,%ebp
- xorl 8(%esp),%eax
+ movl 20(%esp),%eax
+ movl 28(%esp),%ebp
+ xorl %ebp,%eax
+ movl 52(%esp),%ebp
+ xorl %ebp,%eax
+ movl 8(%esp),%ebp
+ xorl %ebp,%eax
+ movl %ecx,%ebp
roll $1,%eax
- addl %esi,%ebp
+ orl %edx,%ebp
+ movl %eax,20(%esp)
+ andl %edi,%ebp
+ leal 2400959708(%eax,%esi,1),%eax
+ movl %ecx,%esi
rorl $2,%ecx
+ andl %edx,%esi
+ orl %esi,%ebp
movl %ebx,%esi
roll $5,%esi
- movl %eax,20(%esp)
- leal 2400959708(%eax,%ebp,1),%eax
- movl %edx,%ebp
- addl %esi,%eax
- andl %edi,%ebp
- movl 24(%esp),%esi
addl %ebp,%eax
+ addl %esi,%eax
# 40_59 54
- movl %ecx,%ebp
- xorl 32(%esp),%esi
- xorl %edx,%ebp
- xorl 56(%esp),%esi
- andl %ebx,%ebp
- xorl 12(%esp),%esi
+ movl 24(%esp),%esi
+ movl 32(%esp),%ebp
+ xorl %ebp,%esi
+ movl 56(%esp),%ebp
+ xorl %ebp,%esi
+ movl 12(%esp),%ebp
+ xorl %ebp,%esi
+ movl %ebx,%ebp
roll $1,%esi
- addl %edi,%ebp
+ orl %ecx,%ebp
+ movl %esi,24(%esp)
+ andl %edx,%ebp
+ leal 2400959708(%esi,%edi,1),%esi
+ movl %ebx,%edi
rorl $2,%ebx
+ andl %ecx,%edi
+ orl %edi,%ebp
movl %eax,%edi
roll $5,%edi
- movl %esi,24(%esp)
- leal 2400959708(%esi,%ebp,1),%esi
- movl %ecx,%ebp
- addl %edi,%esi
- andl %edx,%ebp
- movl 28(%esp),%edi
addl %ebp,%esi
+ addl %edi,%esi
# 40_59 55
- movl %ebx,%ebp
- xorl 36(%esp),%edi
- xorl %ecx,%ebp
- xorl 60(%esp),%edi
- andl %eax,%ebp
- xorl 16(%esp),%edi
+ movl 28(%esp),%edi
+ movl 36(%esp),%ebp
+ xorl %ebp,%edi
+ movl 60(%esp),%ebp
+ xorl %ebp,%edi
+ movl 16(%esp),%ebp
+ xorl %ebp,%edi
+ movl %eax,%ebp
roll $1,%edi
- addl %edx,%ebp
+ orl %ebx,%ebp
+ movl %edi,28(%esp)
+ andl %ecx,%ebp
+ leal 2400959708(%edi,%edx,1),%edi
+ movl %eax,%edx
rorl $2,%eax
+ andl %ebx,%edx
+ orl %edx,%ebp
movl %esi,%edx
roll $5,%edx
- movl %edi,28(%esp)
- leal 2400959708(%edi,%ebp,1),%edi
- movl %ebx,%ebp
- addl %edx,%edi
- andl %ecx,%ebp
- movl 32(%esp),%edx
addl %ebp,%edi
+ addl %edx,%edi
# 40_59 56
- movl %eax,%ebp
- xorl 40(%esp),%edx
- xorl %ebx,%ebp
- xorl (%esp),%edx
- andl %esi,%ebp
- xorl 20(%esp),%edx
+ movl 32(%esp),%edx
+ movl 40(%esp),%ebp
+ xorl %ebp,%edx
+ movl (%esp),%ebp
+ xorl %ebp,%edx
+ movl 20(%esp),%ebp
+ xorl %ebp,%edx
+ movl %esi,%ebp
roll $1,%edx
- addl %ecx,%ebp
+ orl %eax,%ebp
+ movl %edx,32(%esp)
+ andl %ebx,%ebp
+ leal 2400959708(%edx,%ecx,1),%edx
+ movl %esi,%ecx
rorl $2,%esi
+ andl %eax,%ecx
+ orl %ecx,%ebp
movl %edi,%ecx
roll $5,%ecx
- movl %edx,32(%esp)
- leal 2400959708(%edx,%ebp,1),%edx
- movl %eax,%ebp
- addl %ecx,%edx
- andl %ebx,%ebp
- movl 36(%esp),%ecx
addl %ebp,%edx
+ addl %ecx,%edx
# 40_59 57
- movl %esi,%ebp
- xorl 44(%esp),%ecx
- xorl %eax,%ebp
- xorl 4(%esp),%ecx
- andl %edi,%ebp
- xorl 24(%esp),%ecx
+ movl 36(%esp),%ecx
+ movl 44(%esp),%ebp
+ xorl %ebp,%ecx
+ movl 4(%esp),%ebp
+ xorl %ebp,%ecx
+ movl 24(%esp),%ebp
+ xorl %ebp,%ecx
+ movl %edi,%ebp
roll $1,%ecx
- addl %ebx,%ebp
+ orl %esi,%ebp
+ movl %ecx,36(%esp)
+ andl %eax,%ebp
+ leal 2400959708(%ecx,%ebx,1),%ecx
+ movl %edi,%ebx
rorl $2,%edi
+ andl %esi,%ebx
+ orl %ebx,%ebp
movl %edx,%ebx
roll $5,%ebx
- movl %ecx,36(%esp)
- leal 2400959708(%ecx,%ebp,1),%ecx
- movl %esi,%ebp
- addl %ebx,%ecx
- andl %eax,%ebp
- movl 40(%esp),%ebx
addl %ebp,%ecx
+ addl %ebx,%ecx
# 40_59 58
- movl %edi,%ebp
- xorl 48(%esp),%ebx
- xorl %esi,%ebp
- xorl 8(%esp),%ebx
- andl %edx,%ebp
- xorl 28(%esp),%ebx
+ movl 40(%esp),%ebx
+ movl 48(%esp),%ebp
+ xorl %ebp,%ebx
+ movl 8(%esp),%ebp
+ xorl %ebp,%ebx
+ movl 28(%esp),%ebp
+ xorl %ebp,%ebx
+ movl %edx,%ebp
roll $1,%ebx
- addl %eax,%ebp
+ orl %edi,%ebp
+ movl %ebx,40(%esp)
+ andl %esi,%ebp
+ leal 2400959708(%ebx,%eax,1),%ebx
+ movl %edx,%eax
rorl $2,%edx
+ andl %edi,%eax
+ orl %eax,%ebp
movl %ecx,%eax
roll $5,%eax
- movl %ebx,40(%esp)
- leal 2400959708(%ebx,%ebp,1),%ebx
- movl %edi,%ebp
- addl %eax,%ebx
- andl %esi,%ebp
- movl 44(%esp),%eax
addl %ebp,%ebx
+ addl %eax,%ebx
# 40_59 59
- movl %edx,%ebp
- xorl 52(%esp),%eax
- xorl %edi,%ebp
- xorl 12(%esp),%eax
- andl %ecx,%ebp
- xorl 32(%esp),%eax
+ movl 44(%esp),%eax
+ movl 52(%esp),%ebp
+ xorl %ebp,%eax
+ movl 12(%esp),%ebp
+ xorl %ebp,%eax
+ movl 32(%esp),%ebp
+ xorl %ebp,%eax
+ movl %ecx,%ebp
roll $1,%eax
- addl %esi,%ebp
+ orl %edx,%ebp
+ movl %eax,44(%esp)
+ andl %edi,%ebp
+ leal 2400959708(%eax,%esi,1),%eax
+ movl %ecx,%esi
rorl $2,%ecx
+ andl %edx,%esi
+ orl %esi,%ebp
movl %ebx,%esi
roll $5,%esi
- movl %eax,44(%esp)
- leal 2400959708(%eax,%ebp,1),%eax
- movl %edx,%ebp
- addl %esi,%eax
- andl %edi,%ebp
- movl 48(%esp),%esi
addl %ebp,%eax
+ addl %esi,%eax
# 20_39 60
movl %ebx,%ebp
+ movl 48(%esp),%esi
+ rorl $2,%ebx
xorl 56(%esp),%esi
xorl %ecx,%ebp
xorl 16(%esp),%esi
xorl %edx,%ebp
xorl 36(%esp),%esi
roll $1,%esi
- addl %ebp,%edi
- rorl $2,%ebx
- movl %eax,%ebp
- roll $5,%ebp
+ addl %edi,%ebp
movl %esi,48(%esp)
- leal 3395469782(%esi,%edi,1),%esi
- movl 52(%esp),%edi
- addl %ebp,%esi
+ movl %eax,%edi
+ roll $5,%edi
+ leal 3395469782(%esi,%ebp,1),%esi
+ addl %edi,%esi
# 20_39 61
movl %eax,%ebp
+ movl 52(%esp),%edi
+ rorl $2,%eax
xorl 60(%esp),%edi
xorl %ebx,%ebp
xorl 20(%esp),%edi
xorl %ecx,%ebp
xorl 40(%esp),%edi
roll $1,%edi
- addl %ebp,%edx
- rorl $2,%eax
- movl %esi,%ebp
- roll $5,%ebp
+ addl %edx,%ebp
movl %edi,52(%esp)
- leal 3395469782(%edi,%edx,1),%edi
- movl 56(%esp),%edx
- addl %ebp,%edi
+ movl %esi,%edx
+ roll $5,%edx
+ leal 3395469782(%edi,%ebp,1),%edi
+ addl %edx,%edi
# 20_39 62
movl %esi,%ebp
+ movl 56(%esp),%edx
+ rorl $2,%esi
xorl (%esp),%edx
xorl %eax,%ebp
xorl 24(%esp),%edx
xorl %ebx,%ebp
xorl 44(%esp),%edx
roll $1,%edx
- addl %ebp,%ecx
- rorl $2,%esi
- movl %edi,%ebp
- roll $5,%ebp
+ addl %ecx,%ebp
movl %edx,56(%esp)
- leal 3395469782(%edx,%ecx,1),%edx
- movl 60(%esp),%ecx
- addl %ebp,%edx
+ movl %edi,%ecx
+ roll $5,%ecx
+ leal 3395469782(%edx,%ebp,1),%edx
+ addl %ecx,%edx
# 20_39 63
movl %edi,%ebp
+ movl 60(%esp),%ecx
+ rorl $2,%edi
xorl 4(%esp),%ecx
xorl %esi,%ebp
xorl 28(%esp),%ecx
xorl %eax,%ebp
xorl 48(%esp),%ecx
roll $1,%ecx
- addl %ebp,%ebx
- rorl $2,%edi
- movl %edx,%ebp
- roll $5,%ebp
+ addl %ebx,%ebp
movl %ecx,60(%esp)
- leal 3395469782(%ecx,%ebx,1),%ecx
- movl (%esp),%ebx
- addl %ebp,%ecx
+ movl %edx,%ebx
+ roll $5,%ebx
+ leal 3395469782(%ecx,%ebp,1),%ecx
+ addl %ebx,%ecx
# 20_39 64
movl %edx,%ebp
+ movl (%esp),%ebx
+ rorl $2,%edx
xorl 8(%esp),%ebx
xorl %edi,%ebp
xorl 32(%esp),%ebx
xorl %esi,%ebp
xorl 52(%esp),%ebx
roll $1,%ebx
- addl %ebp,%eax
- rorl $2,%edx
- movl %ecx,%ebp
- roll $5,%ebp
+ addl %eax,%ebp
movl %ebx,(%esp)
- leal 3395469782(%ebx,%eax,1),%ebx
- movl 4(%esp),%eax
- addl %ebp,%ebx
+ movl %ecx,%eax
+ roll $5,%eax
+ leal 3395469782(%ebx,%ebp,1),%ebx
+ addl %eax,%ebx
# 20_39 65
movl %ecx,%ebp
+ movl 4(%esp),%eax
+ rorl $2,%ecx
xorl 12(%esp),%eax
xorl %edx,%ebp
xorl 36(%esp),%eax
xorl %edi,%ebp
xorl 56(%esp),%eax
roll $1,%eax
- addl %ebp,%esi
- rorl $2,%ecx
- movl %ebx,%ebp
- roll $5,%ebp
+ addl %esi,%ebp
movl %eax,4(%esp)
- leal 3395469782(%eax,%esi,1),%eax
- movl 8(%esp),%esi
- addl %ebp,%eax
+ movl %ebx,%esi
+ roll $5,%esi
+ leal 3395469782(%eax,%ebp,1),%eax
+ addl %esi,%eax
# 20_39 66
movl %ebx,%ebp
+ movl 8(%esp),%esi
+ rorl $2,%ebx
xorl 16(%esp),%esi
xorl %ecx,%ebp
xorl 40(%esp),%esi
xorl %edx,%ebp
xorl 60(%esp),%esi
roll $1,%esi
- addl %ebp,%edi
- rorl $2,%ebx
- movl %eax,%ebp
- roll $5,%ebp
+ addl %edi,%ebp
movl %esi,8(%esp)
- leal 3395469782(%esi,%edi,1),%esi
- movl 12(%esp),%edi
- addl %ebp,%esi
+ movl %eax,%edi
+ roll $5,%edi
+ leal 3395469782(%esi,%ebp,1),%esi
+ addl %edi,%esi
# 20_39 67
movl %eax,%ebp
+ movl 12(%esp),%edi
+ rorl $2,%eax
xorl 20(%esp),%edi
xorl %ebx,%ebp
xorl 44(%esp),%edi
xorl %ecx,%ebp
xorl (%esp),%edi
roll $1,%edi
- addl %ebp,%edx
- rorl $2,%eax
- movl %esi,%ebp
- roll $5,%ebp
+ addl %edx,%ebp
movl %edi,12(%esp)
- leal 3395469782(%edi,%edx,1),%edi
- movl 16(%esp),%edx
- addl %ebp,%edi
+ movl %esi,%edx
+ roll $5,%edx
+ leal 3395469782(%edi,%ebp,1),%edi
+ addl %edx,%edi
# 20_39 68
movl %esi,%ebp
+ movl 16(%esp),%edx
+ rorl $2,%esi
xorl 24(%esp),%edx
xorl %eax,%ebp
xorl 48(%esp),%edx
xorl %ebx,%ebp
xorl 4(%esp),%edx
roll $1,%edx
- addl %ebp,%ecx
- rorl $2,%esi
- movl %edi,%ebp
- roll $5,%ebp
+ addl %ecx,%ebp
movl %edx,16(%esp)
- leal 3395469782(%edx,%ecx,1),%edx
- movl 20(%esp),%ecx
- addl %ebp,%edx
+ movl %edi,%ecx
+ roll $5,%ecx
+ leal 3395469782(%edx,%ebp,1),%edx
+ addl %ecx,%edx
# 20_39 69
movl %edi,%ebp
+ movl 20(%esp),%ecx
+ rorl $2,%edi
xorl 28(%esp),%ecx
xorl %esi,%ebp
xorl 52(%esp),%ecx
xorl %eax,%ebp
xorl 8(%esp),%ecx
roll $1,%ecx
- addl %ebp,%ebx
- rorl $2,%edi
- movl %edx,%ebp
- roll $5,%ebp
+ addl %ebx,%ebp
movl %ecx,20(%esp)
- leal 3395469782(%ecx,%ebx,1),%ecx
- movl 24(%esp),%ebx
- addl %ebp,%ecx
+ movl %edx,%ebx
+ roll $5,%ebx
+ leal 3395469782(%ecx,%ebp,1),%ecx
+ addl %ebx,%ecx
# 20_39 70
movl %edx,%ebp
+ movl 24(%esp),%ebx
+ rorl $2,%edx
xorl 32(%esp),%ebx
xorl %edi,%ebp
xorl 56(%esp),%ebx
xorl %esi,%ebp
xorl 12(%esp),%ebx
roll $1,%ebx
- addl %ebp,%eax
- rorl $2,%edx
- movl %ecx,%ebp
- roll $5,%ebp
+ addl %eax,%ebp
movl %ebx,24(%esp)
- leal 3395469782(%ebx,%eax,1),%ebx
- movl 28(%esp),%eax
- addl %ebp,%ebx
+ movl %ecx,%eax
+ roll $5,%eax
+ leal 3395469782(%ebx,%ebp,1),%ebx
+ addl %eax,%ebx
# 20_39 71
movl %ecx,%ebp
+ movl 28(%esp),%eax
+ rorl $2,%ecx
xorl 36(%esp),%eax
xorl %edx,%ebp
xorl 60(%esp),%eax
xorl %edi,%ebp
xorl 16(%esp),%eax
roll $1,%eax
- addl %ebp,%esi
- rorl $2,%ecx
- movl %ebx,%ebp
- roll $5,%ebp
+ addl %esi,%ebp
movl %eax,28(%esp)
- leal 3395469782(%eax,%esi,1),%eax
- movl 32(%esp),%esi
- addl %ebp,%eax
+ movl %ebx,%esi
+ roll $5,%esi
+ leal 3395469782(%eax,%ebp,1),%eax
+ addl %esi,%eax
# 20_39 72
movl %ebx,%ebp
+ movl 32(%esp),%esi
+ rorl $2,%ebx
xorl 40(%esp),%esi
xorl %ecx,%ebp
xorl (%esp),%esi
xorl %edx,%ebp
xorl 20(%esp),%esi
roll $1,%esi
- addl %ebp,%edi
- rorl $2,%ebx
- movl %eax,%ebp
- roll $5,%ebp
+ addl %edi,%ebp
movl %esi,32(%esp)
- leal 3395469782(%esi,%edi,1),%esi
- movl 36(%esp),%edi
- addl %ebp,%esi
+ movl %eax,%edi
+ roll $5,%edi
+ leal 3395469782(%esi,%ebp,1),%esi
+ addl %edi,%esi
# 20_39 73
movl %eax,%ebp
+ movl 36(%esp),%edi
+ rorl $2,%eax
xorl 44(%esp),%edi
xorl %ebx,%ebp
xorl 4(%esp),%edi
xorl %ecx,%ebp
xorl 24(%esp),%edi
roll $1,%edi
- addl %ebp,%edx
- rorl $2,%eax
- movl %esi,%ebp
- roll $5,%ebp
+ addl %edx,%ebp
movl %edi,36(%esp)
- leal 3395469782(%edi,%edx,1),%edi
- movl 40(%esp),%edx
- addl %ebp,%edi
+ movl %esi,%edx
+ roll $5,%edx
+ leal 3395469782(%edi,%ebp,1),%edi
+ addl %edx,%edi
# 20_39 74
movl %esi,%ebp
+ movl 40(%esp),%edx
+ rorl $2,%esi
xorl 48(%esp),%edx
xorl %eax,%ebp
xorl 8(%esp),%edx
xorl %ebx,%ebp
xorl 28(%esp),%edx
roll $1,%edx
- addl %ebp,%ecx
- rorl $2,%esi
- movl %edi,%ebp
- roll $5,%ebp
+ addl %ecx,%ebp
movl %edx,40(%esp)
- leal 3395469782(%edx,%ecx,1),%edx
- movl 44(%esp),%ecx
- addl %ebp,%edx
+ movl %edi,%ecx
+ roll $5,%ecx
+ leal 3395469782(%edx,%ebp,1),%edx
+ addl %ecx,%edx
# 20_39 75
movl %edi,%ebp
+ movl 44(%esp),%ecx
+ rorl $2,%edi
xorl 52(%esp),%ecx
xorl %esi,%ebp
xorl 12(%esp),%ecx
xorl %eax,%ebp
xorl 32(%esp),%ecx
roll $1,%ecx
- addl %ebp,%ebx
- rorl $2,%edi
- movl %edx,%ebp
- roll $5,%ebp
+ addl %ebx,%ebp
movl %ecx,44(%esp)
- leal 3395469782(%ecx,%ebx,1),%ecx
- movl 48(%esp),%ebx
- addl %ebp,%ecx
+ movl %edx,%ebx
+ roll $5,%ebx
+ leal 3395469782(%ecx,%ebp,1),%ecx
+ addl %ebx,%ecx
# 20_39 76
movl %edx,%ebp
+ movl 48(%esp),%ebx
+ rorl $2,%edx
xorl 56(%esp),%ebx
xorl %edi,%ebp
xorl 16(%esp),%ebx
xorl %esi,%ebp
xorl 36(%esp),%ebx
roll $1,%ebx
- addl %ebp,%eax
- rorl $2,%edx
- movl %ecx,%ebp
- roll $5,%ebp
+ addl %eax,%ebp
movl %ebx,48(%esp)
- leal 3395469782(%ebx,%eax,1),%ebx
- movl 52(%esp),%eax
- addl %ebp,%ebx
+ movl %ecx,%eax
+ roll $5,%eax
+ leal 3395469782(%ebx,%ebp,1),%ebx
+ addl %eax,%ebx
# 20_39 77
movl %ecx,%ebp
+ movl 52(%esp),%eax
+ rorl $2,%ecx
xorl 60(%esp),%eax
xorl %edx,%ebp
xorl 20(%esp),%eax
xorl %edi,%ebp
xorl 40(%esp),%eax
roll $1,%eax
- addl %ebp,%esi
- rorl $2,%ecx
- movl %ebx,%ebp
- roll $5,%ebp
- leal 3395469782(%eax,%esi,1),%eax
- movl 56(%esp),%esi
- addl %ebp,%eax
+ addl %esi,%ebp
+ movl %eax,52(%esp)
+ movl %ebx,%esi
+ roll $5,%esi
+ leal 3395469782(%eax,%ebp,1),%eax
+ addl %esi,%eax
# 20_39 78
movl %ebx,%ebp
+ movl 56(%esp),%esi
+ rorl $2,%ebx
xorl (%esp),%esi
xorl %ecx,%ebp
xorl 24(%esp),%esi
xorl %edx,%ebp
xorl 44(%esp),%esi
roll $1,%esi
- addl %ebp,%edi
- rorl $2,%ebx
- movl %eax,%ebp
- roll $5,%ebp
- leal 3395469782(%esi,%edi,1),%esi
- movl 60(%esp),%edi
- addl %ebp,%esi
+ addl %edi,%ebp
+ movl %esi,56(%esp)
+ movl %eax,%edi
+ roll $5,%edi
+ leal 3395469782(%esi,%ebp,1),%esi
+ addl %edi,%esi
# 20_39 79
movl %eax,%ebp
+ movl 60(%esp),%edi
+ rorl $2,%eax
xorl 4(%esp),%edi
xorl %ebx,%ebp
xorl 28(%esp),%edi
xorl %ecx,%ebp
xorl 48(%esp),%edi
roll $1,%edi
- addl %ebp,%edx
- rorl $2,%eax
- movl %esi,%ebp
- roll $5,%ebp
- leal 3395469782(%edi,%edx,1),%edi
- addl %ebp,%edi
- movl 96(%esp),%ebp
- movl 100(%esp),%edx
+ addl %edx,%ebp
+ movl %edi,60(%esp)
+ movl %esi,%edx
+ roll $5,%edx
+ leal 3395469782(%edi,%ebp,1),%edi
+ addl %edx,%edi
+ movl 84(%esp),%ebp
+ movl 88(%esp),%edx
addl (%ebp),%edi
addl 4(%ebp),%esi
addl 8(%ebp),%eax
@@ -1439,14 +1501,14 @@ L000loop:
movl %edi,(%ebp)
addl $64,%edx
movl %esi,4(%ebp)
- cmpl 104(%esp),%edx
+ cmpl 92(%esp),%edx
movl %eax,8(%ebp)
movl %ecx,%edi
movl %ebx,12(%ebp)
movl %edx,%esi
movl %ecx,16(%ebp)
jb L000loop
- addl $76,%esp
+ addl $64,%esp
popl %edi
popl %esi
popl %ebx
diff --git a/deps/openssl/asm/x86-macosx-gas/sha/sha256-586.s b/deps/openssl/asm/x86-macosx-gas/sha/sha256-586.s
index 67c7a96bc0..1190be7503 100644
--- a/deps/openssl/asm/x86-macosx-gas/sha/sha256-586.s
+++ b/deps/openssl/asm/x86-macosx-gas/sha/sha256-586.s
@@ -95,30 +95,31 @@ L002loop:
L00300_15:
movl 92(%esp),%ebx
movl %edx,%ecx
- rorl $14,%ecx
- movl 20(%esp),%esi
- xorl %edx,%ecx
- rorl $5,%ecx
- xorl %edx,%ecx
rorl $6,%ecx
+ movl %edx,%edi
+ rorl $11,%edi
+ movl 20(%esp),%esi
+ xorl %edi,%ecx
+ rorl $14,%edi
+ xorl %edi,%ecx
movl 24(%esp),%edi
addl %ecx,%ebx
- xorl %edi,%esi
movl %edx,16(%esp)
+ xorl %edi,%esi
movl %eax,%ecx
andl %edx,%esi
movl 12(%esp),%edx
xorl %edi,%esi
movl %eax,%edi
addl %esi,%ebx
- rorl $9,%ecx
+ rorl $2,%ecx
addl 28(%esp),%ebx
- xorl %eax,%ecx
- rorl $11,%ecx
+ rorl $13,%edi
movl 4(%esp),%esi
- xorl %eax,%ecx
- rorl $2,%ecx
+ xorl %edi,%ecx
+ rorl $9,%edi
addl %ebx,%edx
+ xorl %edi,%ecx
movl 8(%esp),%edi
addl %ecx,%ebx
movl %eax,(%esp)
@@ -140,46 +141,48 @@ L00300_15:
L00416_63:
movl %ebx,%esi
movl 100(%esp),%ecx
+ shrl $3,%ebx
+ rorl $7,%esi
+ xorl %esi,%ebx
rorl $11,%esi
movl %ecx,%edi
- xorl %ebx,%esi
- rorl $7,%esi
- shrl $3,%ebx
- rorl $2,%edi
xorl %esi,%ebx
- xorl %ecx,%edi
- rorl $17,%edi
shrl $10,%ecx
- addl 156(%esp),%ebx
+ movl 156(%esp),%esi
+ rorl $17,%edi
+ xorl %edi,%ecx
+ rorl $2,%edi
+ addl %esi,%ebx
xorl %ecx,%edi
- addl 120(%esp),%ebx
- movl %edx,%ecx
addl %edi,%ebx
- rorl $14,%ecx
+ movl %edx,%ecx
+ addl 120(%esp),%ebx
+ rorl $6,%ecx
+ movl %edx,%edi
+ rorl $11,%edi
movl 20(%esp),%esi
- xorl %edx,%ecx
- rorl $5,%ecx
+ xorl %edi,%ecx
+ rorl $14,%edi
movl %ebx,92(%esp)
- xorl %edx,%ecx
- rorl $6,%ecx
+ xorl %edi,%ecx
movl 24(%esp),%edi
addl %ecx,%ebx
- xorl %edi,%esi
movl %edx,16(%esp)
+ xorl %edi,%esi
movl %eax,%ecx
andl %edx,%esi
movl 12(%esp),%edx
xorl %edi,%esi
movl %eax,%edi
addl %esi,%ebx
- rorl $9,%ecx
+ rorl $2,%ecx
addl 28(%esp),%ebx
- xorl %eax,%ecx
- rorl $11,%ecx
+ rorl $13,%edi
movl 4(%esp),%esi
- xorl %eax,%ecx
- rorl $2,%ecx
+ xorl %edi,%ecx
+ rorl $9,%edi
addl %ebx,%edx
+ xorl %edi,%ecx
movl 8(%esp),%edi
addl %ecx,%ebx
movl %eax,(%esp)
diff --git a/deps/openssl/asm/x86-macosx-gas/x86cpuid.s b/deps/openssl/asm/x86-macosx-gas/x86cpuid.s
index db36e6f503..b5e80f83a3 100644
--- a/deps/openssl/asm/x86-macosx-gas/x86cpuid.s
+++ b/deps/openssl/asm/x86-macosx-gas/x86cpuid.s
@@ -18,9 +18,9 @@ L_OPENSSL_ia32_cpuid_begin:
pushfl
popl %eax
xorl %eax,%ecx
- xorl %eax,%eax
btl $21,%ecx
- jnc L000nocpuid
+ jnc L000done
+ xorl %eax,%eax
.byte 0x0f,0xa2
movl %eax,%edi
xorl %eax,%eax
@@ -46,14 +46,7 @@ L_OPENSSL_ia32_cpuid_begin:
jnz L001intel
movl $2147483648,%eax
.byte 0x0f,0xa2
- cmpl $2147483649,%eax
- jb L001intel
- movl %eax,%esi
- movl $2147483649,%eax
- .byte 0x0f,0xa2
- orl %ecx,%ebp
- andl $2049,%ebp
- cmpl $2147483656,%esi
+ cmpl $2147483656,%eax
jb L001intel
movl $2147483656,%eax
.byte 0x0f,0xa2
@@ -62,68 +55,46 @@ L_OPENSSL_ia32_cpuid_begin:
movl $1,%eax
.byte 0x0f,0xa2
btl $28,%edx
- jnc L002generic
+ jnc L000done
shrl $16,%ebx
andl $255,%ebx
cmpl %esi,%ebx
- ja L002generic
+ ja L000done
andl $4026531839,%edx
- jmp L002generic
+ jmp L000done
L001intel:
cmpl $4,%edi
movl $-1,%edi
- jb L003nocacheinfo
+ jb L002nocacheinfo
movl $4,%eax
movl $0,%ecx
.byte 0x0f,0xa2
movl %eax,%edi
shrl $14,%edi
andl $4095,%edi
-L003nocacheinfo:
+L002nocacheinfo:
movl $1,%eax
.byte 0x0f,0xa2
- andl $3220176895,%edx
cmpl $0,%ebp
- jne L004notintel
- orl $1073741824,%edx
+ jne L003notP4
andb $15,%ah
cmpb $15,%ah
- jne L004notintel
+ jne L003notP4
orl $1048576,%edx
-L004notintel:
+L003notP4:
btl $28,%edx
- jnc L002generic
+ jnc L000done
andl $4026531839,%edx
cmpl $0,%edi
- je L002generic
+ je L000done
orl $268435456,%edx
shrl $16,%ebx
cmpb $1,%bl
- ja L002generic
+ ja L000done
andl $4026531839,%edx
-L002generic:
- andl $2048,%ebp
- andl $4294965247,%ecx
- movl %edx,%esi
- orl %ecx,%ebp
- btl $27,%ecx
- jnc L005clear_avx
- xorl %ecx,%ecx
-.byte 15,1,208
- andl $6,%eax
- cmpl $6,%eax
- je L006done
- cmpl $2,%eax
- je L005clear_avx
-L007clear_xmm:
- andl $4261412861,%ebp
- andl $4278190079,%esi
-L005clear_avx:
- andl $4026525695,%ebp
-L006done:
- movl %esi,%eax
- movl %ebp,%edx
-L000nocpuid:
+L000done:
+ movl %edx,%eax
+ movl %ecx,%edx
popl %edi
popl %esi
popl %ebx
@@ -135,32 +106,26 @@ _OPENSSL_rdtsc:
L_OPENSSL_rdtsc_begin:
xorl %eax,%eax
xorl %edx,%edx
- call L008PIC_me_up
-L008PIC_me_up:
- popl %ecx
- movl L_OPENSSL_ia32cap_P$non_lazy_ptr-L008PIC_me_up(%ecx),%ecx
+ leal _OPENSSL_ia32cap_P,%ecx
btl $4,(%ecx)
- jnc L009notsc
+ jnc L004notsc
.byte 0x0f,0x31
-L009notsc:
+L004notsc:
ret
.globl _OPENSSL_instrument_halt
.align 4
_OPENSSL_instrument_halt:
L_OPENSSL_instrument_halt_begin:
- call L010PIC_me_up
-L010PIC_me_up:
- popl %ecx
- movl L_OPENSSL_ia32cap_P$non_lazy_ptr-L010PIC_me_up(%ecx),%ecx
+ leal _OPENSSL_ia32cap_P,%ecx
btl $4,(%ecx)
- jnc L011nohalt
+ jnc L005nohalt
.long 2421723150
andl $3,%eax
- jnz L011nohalt
+ jnz L005nohalt
pushfl
popl %eax
btl $9,%eax
- jnc L011nohalt
+ jnc L005nohalt
.byte 0x0f,0x31
pushl %edx
pushl %eax
@@ -170,7 +135,7 @@ L010PIC_me_up:
sbbl 4(%esp),%edx
addl $8,%esp
ret
-L011nohalt:
+L005nohalt:
xorl %eax,%eax
xorl %edx,%edx
ret
@@ -181,21 +146,21 @@ L_OPENSSL_far_spin_begin:
pushfl
popl %eax
btl $9,%eax
- jnc L012nospin
+ jnc L006nospin
movl 4(%esp),%eax
movl 8(%esp),%ecx
.long 2430111262
xorl %eax,%eax
movl (%ecx),%edx
- jmp L013spin
+ jmp L007spin
.align 4,0x90
-L013spin:
+L007spin:
incl %eax
cmpl (%ecx),%edx
- je L013spin
+ je L007spin
.long 529567888
ret
-L012nospin:
+L006nospin:
xorl %eax,%eax
xorl %edx,%edx
ret
@@ -205,15 +170,12 @@ _OPENSSL_wipe_cpu:
L_OPENSSL_wipe_cpu_begin:
xorl %eax,%eax
xorl %edx,%edx
- call L014PIC_me_up
-L014PIC_me_up:
- popl %ecx
- movl L_OPENSSL_ia32cap_P$non_lazy_ptr-L014PIC_me_up(%ecx),%ecx
+ leal _OPENSSL_ia32cap_P,%ecx
movl (%ecx),%ecx
btl $1,(%ecx)
- jnc L015no_x87
+ jnc L008no_x87
.long 4007259865,4007259865,4007259865,4007259865,2430851995
-L015no_x87:
+L008no_x87:
leal 4(%esp),%eax
ret
.globl _OPENSSL_atomic_add
@@ -225,11 +187,11 @@ L_OPENSSL_atomic_add_begin:
pushl %ebx
nop
movl (%edx),%eax
-L016spin:
+L009spin:
leal (%eax,%ecx,1),%ebx
nop
.long 447811568
- jne L016spin
+ jne L009spin
movl %ebx,%eax
popl %ebx
ret
@@ -266,51 +228,34 @@ L_OPENSSL_cleanse_begin:
movl 8(%esp),%ecx
xorl %eax,%eax
cmpl $7,%ecx
- jae L017lot
+ jae L010lot
cmpl $0,%ecx
- je L018ret
-L019little:
+ je L011ret
+L012little:
movb %al,(%edx)
subl $1,%ecx
leal 1(%edx),%edx
- jnz L019little
-L018ret:
+ jnz L012little
+L011ret:
ret
.align 4,0x90
-L017lot:
+L010lot:
testl $3,%edx
- jz L020aligned
+ jz L013aligned
movb %al,(%edx)
leal -1(%ecx),%ecx
leal 1(%edx),%edx
- jmp L017lot
-L020aligned:
+ jmp L010lot
+L013aligned:
movl %eax,(%edx)
leal -4(%ecx),%ecx
testl $-4,%ecx
leal 4(%edx),%edx
- jnz L020aligned
+ jnz L013aligned
cmpl $0,%ecx
- jne L019little
- ret
-.globl _OPENSSL_ia32_rdrand
-.align 4
-_OPENSSL_ia32_rdrand:
-L_OPENSSL_ia32_rdrand_begin:
- movl $8,%ecx
-L021loop:
-.byte 15,199,240
- jc L022break
- loop L021loop
-L022break:
- cmpl $0,%eax
- cmovel %ecx,%eax
+ jne L012little
ret
-.section __IMPORT,__pointers,non_lazy_symbol_pointers
-L_OPENSSL_ia32cap_P$non_lazy_ptr:
-.indirect_symbol _OPENSSL_ia32cap_P
-.long 0
-.comm _OPENSSL_ia32cap_P,8,2
+.comm _OPENSSL_ia32cap_P,4
.mod_init_func
.align 2
.long _OPENSSL_cpuid_setup
diff --git a/deps/openssl/asm/x86-win32-masm/aes/aes-586.asm b/deps/openssl/asm/x86-win32-masm/aes/aes-586.asm
index e4ac96e646..22dd21fbcd 100644
--- a/deps/openssl/asm/x86-win32-masm/aes/aes-586.asm
+++ b/deps/openssl/asm/x86-win32-masm/aes/aes-586.asm
@@ -2975,14 +2975,14 @@ $L045exit:
ret
__x86_AES_set_encrypt_key ENDP
ALIGN 16
-_private_AES_set_encrypt_key PROC PUBLIC
-$L_private_AES_set_encrypt_key_begin::
+_AES_set_encrypt_key PROC PUBLIC
+$L_AES_set_encrypt_key_begin::
call __x86_AES_set_encrypt_key
ret
-_private_AES_set_encrypt_key ENDP
+_AES_set_encrypt_key ENDP
ALIGN 16
-_private_AES_set_decrypt_key PROC PUBLIC
-$L_private_AES_set_decrypt_key_begin::
+_AES_set_decrypt_key PROC PUBLIC
+$L_AES_set_decrypt_key_begin::
call __x86_AES_set_encrypt_key
cmp eax,0
je $L054proceed
@@ -3211,12 +3211,12 @@ $L056permute:
pop ebx
pop ebp
ret
-_private_AES_set_decrypt_key ENDP
+_AES_set_decrypt_key ENDP
DB 65,69,83,32,102,111,114,32,120,56,54,44,32,67,82,89
DB 80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114
DB 111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
.text$ ENDS
.bss SEGMENT 'BSS'
-COMM _OPENSSL_ia32cap_P:QWORD
+COMM _OPENSSL_ia32cap_P:DWORD
.bss ENDS
END
diff --git a/deps/openssl/asm/x86-win32-masm/bf/bf-686.asm b/deps/openssl/asm/x86-win32-masm/bf/bf-686.asm
index 2883179674..a802e7292f 100644
--- a/deps/openssl/asm/x86-win32-masm/bf/bf-686.asm
+++ b/deps/openssl/asm/x86-win32-masm/bf/bf-686.asm
@@ -2,7 +2,7 @@ TITLE bf-686.asm
IF @Version LT 800
ECHO MASM version 8.00 or later is strongly recommended.
ENDIF
-.686
+.586
.MODEL FLAT
OPTION DOTNAME
IF @Version LT 800
diff --git a/deps/openssl/asm/x86-win32-masm/bn/x86-mont.asm b/deps/openssl/asm/x86-win32-masm/bn/x86-mont.asm
index 031be4e7ea..eaad4a073a 100644
--- a/deps/openssl/asm/x86-win32-masm/bn/x86-mont.asm
+++ b/deps/openssl/asm/x86-win32-masm/bn/x86-mont.asm
@@ -2,7 +2,7 @@ TITLE ../openssl/crypto/bn/asm/x86-mont.asm
IF @Version LT 800
ECHO MASM version 8.00 or later is strongly recommended.
ENDIF
-.686
+.586
.MODEL FLAT
OPTION DOTNAME
IF @Version LT 800
diff --git a/deps/openssl/asm/x86-win32-masm/bn/x86.asm b/deps/openssl/asm/x86-win32-masm/bn/x86.asm
index 2e7a0d4aaf..d7051fa4e5 100644
--- a/deps/openssl/asm/x86-win32-masm/bn/x86.asm
+++ b/deps/openssl/asm/x86-win32-masm/bn/x86.asm
@@ -2,7 +2,7 @@ TITLE ../openssl/crypto/bn/asm/x86.asm
IF @Version LT 800
ECHO MASM version 8.00 or later is strongly recommended.
ENDIF
-.686
+.586
.MODEL FLAT
OPTION DOTNAME
IF @Version LT 800
diff --git a/deps/openssl/asm/x86-win32-masm/camellia/cmll-x86.asm b/deps/openssl/asm/x86-win32-masm/camellia/cmll-x86.asm
index e32d28135b..acdf6a2f8b 100644
--- a/deps/openssl/asm/x86-win32-masm/camellia/cmll-x86.asm
+++ b/deps/openssl/asm/x86-win32-masm/camellia/cmll-x86.asm
@@ -2,7 +2,7 @@ TITLE cmll-586.asm
IF @Version LT 800
ECHO MASM version 8.00 or later is strongly recommended.
ENDIF
-.686
+.586
.MODEL FLAT
OPTION DOTNAME
IF @Version LT 800
@@ -1532,8 +1532,8 @@ $L013done:
ret
_Camellia_Ekeygen ENDP
ALIGN 16
-_private_Camellia_set_key PROC PUBLIC
-$L_private_Camellia_set_key_begin::
+_Camellia_set_key PROC PUBLIC
+$L_Camellia_set_key_begin::
push ebx
mov ecx,DWORD PTR 8[esp]
mov ebx,DWORD PTR 12[esp]
@@ -1563,7 +1563,7 @@ ALIGN 4
$L014done:
pop ebx
ret
-_private_Camellia_set_key ENDP
+_Camellia_set_key ENDP
ALIGN 64
$LCamellia_SIGMA::
DD 2694735487,1003262091,3061508184,1286239154,3337565999,3914302142,1426019237,4057165596,283453434,3731369245,2958461122,3018244605,0,0,0,0
diff --git a/deps/openssl/asm/x86-win32-masm/cast/cast-586.asm b/deps/openssl/asm/x86-win32-masm/cast/cast-586.asm
index 6f85c34d28..1f2f0708a5 100644
--- a/deps/openssl/asm/x86-win32-masm/cast/cast-586.asm
+++ b/deps/openssl/asm/x86-win32-masm/cast/cast-586.asm
@@ -2,7 +2,7 @@ TITLE cast-586.asm
IF @Version LT 800
ECHO MASM version 8.00 or later is strongly recommended.
ENDIF
-.686
+.586
.MODEL FLAT
OPTION DOTNAME
IF @Version LT 800
diff --git a/deps/openssl/asm/x86-win32-masm/des/crypt586.asm b/deps/openssl/asm/x86-win32-masm/des/crypt586.asm
index 4c82c7a265..24e474dfc5 100644
--- a/deps/openssl/asm/x86-win32-masm/des/crypt586.asm
+++ b/deps/openssl/asm/x86-win32-masm/des/crypt586.asm
@@ -2,7 +2,7 @@ TITLE crypt586.asm
IF @Version LT 800
ECHO MASM version 8.00 or later is strongly recommended.
ENDIF
-.686
+.586
.MODEL FLAT
OPTION DOTNAME
IF @Version LT 800
diff --git a/deps/openssl/asm/x86-win32-masm/des/des-586.asm b/deps/openssl/asm/x86-win32-masm/des/des-586.asm
index 24f19a6603..3c630daff9 100644
--- a/deps/openssl/asm/x86-win32-masm/des/des-586.asm
+++ b/deps/openssl/asm/x86-win32-masm/des/des-586.asm
@@ -2,7 +2,7 @@ TITLE des-586.asm
IF @Version LT 800
ECHO MASM version 8.00 or later is strongly recommended.
ENDIF
-.686
+.586
.MODEL FLAT
OPTION DOTNAME
IF @Version LT 800
diff --git a/deps/openssl/asm/x86-win32-masm/md5/md5-586.asm b/deps/openssl/asm/x86-win32-masm/md5/md5-586.asm
index 8e263de0fd..c8edae762d 100644
--- a/deps/openssl/asm/x86-win32-masm/md5/md5-586.asm
+++ b/deps/openssl/asm/x86-win32-masm/md5/md5-586.asm
@@ -2,7 +2,7 @@ TITLE ../openssl/crypto/md5/asm/md5-586.asm
IF @Version LT 800
ECHO MASM version 8.00 or later is strongly recommended.
ENDIF
-.686
+.586
.MODEL FLAT
OPTION DOTNAME
IF @Version LT 800
diff --git a/deps/openssl/asm/x86-win32-masm/rc4/rc4-586.asm b/deps/openssl/asm/x86-win32-masm/rc4/rc4-586.asm
index d179090911..3eb66f7350 100644
--- a/deps/openssl/asm/x86-win32-masm/rc4/rc4-586.asm
+++ b/deps/openssl/asm/x86-win32-masm/rc4/rc4-586.asm
@@ -2,14 +2,7 @@ TITLE rc4-586.asm
IF @Version LT 800
ECHO MASM version 8.00 or later is strongly recommended.
ENDIF
-.686
-.XMM
-IF @Version LT 800
-XMMWORD STRUCT 16
-DQ 2 dup (?)
-XMMWORD ENDS
-ENDIF
-
+.586
.MODEL FLAT
OPTION DOTNAME
IF @Version LT 800
@@ -17,7 +10,6 @@ IF @Version LT 800
ELSE
.text$ SEGMENT ALIGN(64) 'CODE'
ENDIF
-;EXTERN _OPENSSL_ia32cap_P:NEAR
ALIGN 16
_RC4 PROC PUBLIC
$L_RC4_begin::
@@ -45,146 +37,11 @@ $L_RC4_begin::
mov ecx,DWORD PTR [eax*4+edi]
and edx,-4
jz $L002loop1
- test edx,-8
- mov DWORD PTR 32[esp],ebp
- jz $L003go4loop4
- lea ebp,DWORD PTR _OPENSSL_ia32cap_P
- bt DWORD PTR [ebp],26
- jnc $L003go4loop4
- mov ebp,DWORD PTR 32[esp]
- and edx,-8
- lea edx,DWORD PTR [edx*1+esi-8]
- mov DWORD PTR [edi-4],edx
- add bl,cl
- mov edx,DWORD PTR [ebx*4+edi]
- mov DWORD PTR [ebx*4+edi],ecx
- mov DWORD PTR [eax*4+edi],edx
- inc eax
- add edx,ecx
- movzx eax,al
- movzx edx,dl
- movq mm0,QWORD PTR [esi]
- mov ecx,DWORD PTR [eax*4+edi]
- movd mm2,DWORD PTR [edx*4+edi]
- jmp $L004loop_mmx_enter
-ALIGN 16
-$L005loop_mmx:
- add bl,cl
- psllq mm1,56
- mov edx,DWORD PTR [ebx*4+edi]
- mov DWORD PTR [ebx*4+edi],ecx
- mov DWORD PTR [eax*4+edi],edx
- inc eax
- add edx,ecx
- movzx eax,al
- movzx edx,dl
- pxor mm2,mm1
- movq mm0,QWORD PTR [esi]
- movq QWORD PTR [esi*1+ebp-8],mm2
- mov ecx,DWORD PTR [eax*4+edi]
- movd mm2,DWORD PTR [edx*4+edi]
-$L004loop_mmx_enter:
- add bl,cl
- mov edx,DWORD PTR [ebx*4+edi]
- mov DWORD PTR [ebx*4+edi],ecx
- mov DWORD PTR [eax*4+edi],edx
- inc eax
- add edx,ecx
- movzx eax,al
- movzx edx,dl
- pxor mm2,mm0
- mov ecx,DWORD PTR [eax*4+edi]
- movd mm1,DWORD PTR [edx*4+edi]
- add bl,cl
- psllq mm1,8
- mov edx,DWORD PTR [ebx*4+edi]
- mov DWORD PTR [ebx*4+edi],ecx
- mov DWORD PTR [eax*4+edi],edx
- inc eax
- add edx,ecx
- movzx eax,al
- movzx edx,dl
- pxor mm2,mm1
- mov ecx,DWORD PTR [eax*4+edi]
- movd mm1,DWORD PTR [edx*4+edi]
- add bl,cl
- psllq mm1,16
- mov edx,DWORD PTR [ebx*4+edi]
- mov DWORD PTR [ebx*4+edi],ecx
- mov DWORD PTR [eax*4+edi],edx
- inc eax
- add edx,ecx
- movzx eax,al
- movzx edx,dl
- pxor mm2,mm1
- mov ecx,DWORD PTR [eax*4+edi]
- movd mm1,DWORD PTR [edx*4+edi]
- add bl,cl
- psllq mm1,24
- mov edx,DWORD PTR [ebx*4+edi]
- mov DWORD PTR [ebx*4+edi],ecx
- mov DWORD PTR [eax*4+edi],edx
- inc eax
- add edx,ecx
- movzx eax,al
- movzx edx,dl
- pxor mm2,mm1
- mov ecx,DWORD PTR [eax*4+edi]
- movd mm1,DWORD PTR [edx*4+edi]
- add bl,cl
- psllq mm1,32
- mov edx,DWORD PTR [ebx*4+edi]
- mov DWORD PTR [ebx*4+edi],ecx
- mov DWORD PTR [eax*4+edi],edx
- inc eax
- add edx,ecx
- movzx eax,al
- movzx edx,dl
- pxor mm2,mm1
- mov ecx,DWORD PTR [eax*4+edi]
- movd mm1,DWORD PTR [edx*4+edi]
- add bl,cl
- psllq mm1,40
- mov edx,DWORD PTR [ebx*4+edi]
- mov DWORD PTR [ebx*4+edi],ecx
- mov DWORD PTR [eax*4+edi],edx
- inc eax
- add edx,ecx
- movzx eax,al
- movzx edx,dl
- pxor mm2,mm1
- mov ecx,DWORD PTR [eax*4+edi]
- movd mm1,DWORD PTR [edx*4+edi]
- add bl,cl
- psllq mm1,48
- mov edx,DWORD PTR [ebx*4+edi]
- mov DWORD PTR [ebx*4+edi],ecx
- mov DWORD PTR [eax*4+edi],edx
- inc eax
- add edx,ecx
- movzx eax,al
- movzx edx,dl
- pxor mm2,mm1
- mov ecx,DWORD PTR [eax*4+edi]
- movd mm1,DWORD PTR [edx*4+edi]
- mov edx,ebx
- xor ebx,ebx
- mov bl,dl
- cmp esi,DWORD PTR [edi-4]
- lea esi,DWORD PTR 8[esi]
- jb $L005loop_mmx
- psllq mm1,56
- pxor mm2,mm1
- movq QWORD PTR [esi*1+ebp-8],mm2
- emms
- cmp esi,DWORD PTR 24[esp]
- je $L006done
- jmp $L002loop1
-ALIGN 16
-$L003go4loop4:
lea edx,DWORD PTR [edx*1+esi-4]
mov DWORD PTR 28[esp],edx
-$L007loop4:
+ mov DWORD PTR 32[esp],ebp
+ALIGN 16
+$L003loop4:
add bl,cl
mov edx,DWORD PTR [ebx*4+edi]
mov DWORD PTR [ebx*4+edi],ecx
@@ -230,9 +87,9 @@ $L007loop4:
mov DWORD PTR [esi*1+ecx],ebp
lea esi,DWORD PTR 4[esi]
mov ecx,DWORD PTR [eax*4+edi]
- jb $L007loop4
+ jb $L003loop4
cmp esi,DWORD PTR 24[esp]
- je $L006done
+ je $L004done
mov ebp,DWORD PTR 32[esp]
ALIGN 16
$L002loop1:
@@ -250,11 +107,11 @@ $L002loop1:
cmp esi,DWORD PTR 24[esp]
mov BYTE PTR [esi*1+ebp-1],dl
jb $L002loop1
- jmp $L006done
+ jmp $L004done
ALIGN 16
$L001RC4_CHAR:
movzx ecx,BYTE PTR [eax*1+edi]
-$L008cloop1:
+$L005cloop1:
add bl,cl
movzx edx,BYTE PTR [ebx*1+edi]
mov BYTE PTR [ebx*1+edi],cl
@@ -267,10 +124,10 @@ $L008cloop1:
movzx ecx,BYTE PTR [eax*1+edi]
cmp esi,DWORD PTR 24[esp]
mov BYTE PTR [esi*1+ebp-1],dl
- jb $L008cloop1
-$L006done:
+ jb $L005cloop1
+$L004done:
dec al
- mov DWORD PTR [edi-4],ebx
+ mov BYTE PTR [edi-4],bl
mov BYTE PTR [edi-8],al
$L000abort:
pop edi
@@ -279,9 +136,10 @@ $L000abort:
pop ebp
ret
_RC4 ENDP
+;EXTERN _OPENSSL_ia32cap_P:NEAR
ALIGN 16
-_private_RC4_set_key PROC PUBLIC
-$L_private_RC4_set_key_begin::
+_RC4_set_key PROC PUBLIC
+$L_RC4_set_key_begin::
push ebp
push ebx
push esi
@@ -296,53 +154,53 @@ $L_private_RC4_set_key_begin::
xor eax,eax
mov DWORD PTR [edi-4],ebp
bt DWORD PTR [edx],20
- jc $L009c1stloop
+ jc $L006c1stloop
ALIGN 16
-$L010w1stloop:
+$L007w1stloop:
mov DWORD PTR [eax*4+edi],eax
add al,1
- jnc $L010w1stloop
+ jnc $L007w1stloop
xor ecx,ecx
xor edx,edx
ALIGN 16
-$L011w2ndloop:
+$L008w2ndloop:
mov eax,DWORD PTR [ecx*4+edi]
add dl,BYTE PTR [ebp*1+esi]
add dl,al
add ebp,1
mov ebx,DWORD PTR [edx*4+edi]
- jnz $L012wnowrap
+ jnz $L009wnowrap
mov ebp,DWORD PTR [edi-4]
-$L012wnowrap:
+$L009wnowrap:
mov DWORD PTR [edx*4+edi],eax
mov DWORD PTR [ecx*4+edi],ebx
add cl,1
- jnc $L011w2ndloop
- jmp $L013exit
+ jnc $L008w2ndloop
+ jmp $L010exit
ALIGN 16
-$L009c1stloop:
+$L006c1stloop:
mov BYTE PTR [eax*1+edi],al
add al,1
- jnc $L009c1stloop
+ jnc $L006c1stloop
xor ecx,ecx
xor edx,edx
xor ebx,ebx
ALIGN 16
-$L014c2ndloop:
+$L011c2ndloop:
mov al,BYTE PTR [ecx*1+edi]
add dl,BYTE PTR [ebp*1+esi]
add dl,al
add ebp,1
mov bl,BYTE PTR [edx*1+edi]
- jnz $L015cnowrap
+ jnz $L012cnowrap
mov ebp,DWORD PTR [edi-4]
-$L015cnowrap:
+$L012cnowrap:
mov BYTE PTR [edx*1+edi],al
mov BYTE PTR [ecx*1+edi],bl
add cl,1
- jnc $L014c2ndloop
+ jnc $L011c2ndloop
mov DWORD PTR 256[edi],-1
-$L013exit:
+$L010exit:
xor eax,eax
mov DWORD PTR [edi-8],eax
mov DWORD PTR [edi-4],eax
@@ -351,31 +209,24 @@ $L013exit:
pop ebx
pop ebp
ret
-_private_RC4_set_key ENDP
+_RC4_set_key ENDP
ALIGN 16
_RC4_options PROC PUBLIC
$L_RC4_options_begin::
- call $L016pic_point
-$L016pic_point:
+ call $L013pic_point
+$L013pic_point:
pop eax
- lea eax,DWORD PTR ($L017opts-$L016pic_point)[eax]
+ lea eax,DWORD PTR ($L014opts-$L013pic_point)[eax]
lea edx,DWORD PTR _OPENSSL_ia32cap_P
- mov edx,DWORD PTR [edx]
- bt edx,20
- jc $L0181xchar
- bt edx,26
- jnc $L019ret
- add eax,25
- ret
-$L0181xchar:
+ bt DWORD PTR [edx],20
+ jnc $L015skip
add eax,12
-$L019ret:
+$L015skip:
ret
ALIGN 64
-$L017opts:
+$L014opts:
DB 114,99,52,40,52,120,44,105,110,116,41,0
DB 114,99,52,40,49,120,44,99,104,97,114,41,0
-DB 114,99,52,40,56,120,44,109,109,120,41,0
DB 82,67,52,32,102,111,114,32,120,56,54,44,32,67,82,89
DB 80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114
DB 111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
@@ -383,6 +234,6 @@ ALIGN 64
_RC4_options ENDP
.text$ ENDS
.bss SEGMENT 'BSS'
-COMM _OPENSSL_ia32cap_P:QWORD
+COMM _OPENSSL_ia32cap_P:DWORD
.bss ENDS
END
diff --git a/deps/openssl/asm/x86-win32-masm/rc5/rc5-586.asm b/deps/openssl/asm/x86-win32-masm/rc5/rc5-586.asm
index 7ce74110e2..e699d9173f 100644
--- a/deps/openssl/asm/x86-win32-masm/rc5/rc5-586.asm
+++ b/deps/openssl/asm/x86-win32-masm/rc5/rc5-586.asm
@@ -2,7 +2,7 @@ TITLE rc5-586.asm
IF @Version LT 800
ECHO MASM version 8.00 or later is strongly recommended.
ENDIF
-.686
+.586
.MODEL FLAT
OPTION DOTNAME
IF @Version LT 800
diff --git a/deps/openssl/asm/x86-win32-masm/ripemd/rmd-586.asm b/deps/openssl/asm/x86-win32-masm/ripemd/rmd-586.asm
index 7f6458cefd..8fa61f8f98 100644
--- a/deps/openssl/asm/x86-win32-masm/ripemd/rmd-586.asm
+++ b/deps/openssl/asm/x86-win32-masm/ripemd/rmd-586.asm
@@ -2,7 +2,7 @@ TITLE ../openssl/crypto/ripemd/asm/rmd-586.asm
IF @Version LT 800
ECHO MASM version 8.00 or later is strongly recommended.
ENDIF
-.686
+.586
.MODEL FLAT
OPTION DOTNAME
IF @Version LT 800
diff --git a/deps/openssl/asm/x86-win32-masm/sha/sha1-586.asm b/deps/openssl/asm/x86-win32-masm/sha/sha1-586.asm
index 878b1d3b99..ce9f8d5b45 100644
--- a/deps/openssl/asm/x86-win32-masm/sha/sha1-586.asm
+++ b/deps/openssl/asm/x86-win32-masm/sha/sha1-586.asm
@@ -2,7 +2,7 @@ TITLE sha1-586.asm
IF @Version LT 800
ECHO MASM version 8.00 or later is strongly recommended.
ENDIF
-.686
+.586
.MODEL FLAT
OPTION DOTNAME
IF @Version LT 800
@@ -20,12 +20,11 @@ $L_sha1_block_data_order_begin::
mov ebp,DWORD PTR 20[esp]
mov esi,DWORD PTR 24[esp]
mov eax,DWORD PTR 28[esp]
- sub esp,76
+ sub esp,64
shl eax,6
add eax,esi
- mov DWORD PTR 104[esp],eax
+ mov DWORD PTR 92[esp],eax
mov edi,DWORD PTR 16[ebp]
- jmp $L000loop
ALIGN 16
$L000loop:
mov eax,DWORD PTR [esi]
@@ -76,7 +75,7 @@ $L000loop:
mov DWORD PTR 52[esp],ebx
mov DWORD PTR 56[esp],ecx
mov DWORD PTR 60[esp],edx
- mov DWORD PTR 100[esp],esi
+ mov DWORD PTR 88[esp],esi
mov eax,DWORD PTR [ebp]
mov ebx,DWORD PTR 4[ebp]
mov ecx,DWORD PTR 8[ebp]
@@ -87,10 +86,10 @@ $L000loop:
rol ebp,5
xor esi,edx
add ebp,edi
- mov edi,DWORD PTR [esp]
and esi,ebx
- ror ebx,2
+ mov edi,DWORD PTR [esp]
xor esi,edx
+ ror ebx,2
lea ebp,DWORD PTR 1518500249[edi*1+ebp]
add ebp,esi
; 00_15 1
@@ -99,10 +98,10 @@ $L000loop:
rol ebp,5
xor edi,ecx
add ebp,edx
- mov edx,DWORD PTR 4[esp]
and edi,eax
- ror eax,2
+ mov edx,DWORD PTR 4[esp]
xor edi,ecx
+ ror eax,2
lea ebp,DWORD PTR 1518500249[edx*1+ebp]
add ebp,edi
; 00_15 2
@@ -111,10 +110,10 @@ $L000loop:
rol ebp,5
xor edx,ebx
add ebp,ecx
- mov ecx,DWORD PTR 8[esp]
and edx,esi
- ror esi,2
+ mov ecx,DWORD PTR 8[esp]
xor edx,ebx
+ ror esi,2
lea ebp,DWORD PTR 1518500249[ecx*1+ebp]
add ebp,edx
; 00_15 3
@@ -123,10 +122,10 @@ $L000loop:
rol ebp,5
xor ecx,eax
add ebp,ebx
- mov ebx,DWORD PTR 12[esp]
and ecx,edi
- ror edi,2
+ mov ebx,DWORD PTR 12[esp]
xor ecx,eax
+ ror edi,2
lea ebp,DWORD PTR 1518500249[ebx*1+ebp]
add ebp,ecx
; 00_15 4
@@ -135,10 +134,10 @@ $L000loop:
rol ebp,5
xor ebx,esi
add ebp,eax
- mov eax,DWORD PTR 16[esp]
and ebx,edx
- ror edx,2
+ mov eax,DWORD PTR 16[esp]
xor ebx,esi
+ ror edx,2
lea ebp,DWORD PTR 1518500249[eax*1+ebp]
add ebp,ebx
; 00_15 5
@@ -147,10 +146,10 @@ $L000loop:
rol ebp,5
xor eax,edi
add ebp,esi
- mov esi,DWORD PTR 20[esp]
and eax,ecx
- ror ecx,2
+ mov esi,DWORD PTR 20[esp]
xor eax,edi
+ ror ecx,2
lea ebp,DWORD PTR 1518500249[esi*1+ebp]
add ebp,eax
; 00_15 6
@@ -159,10 +158,10 @@ $L000loop:
rol ebp,5
xor esi,edx
add ebp,edi
- mov edi,DWORD PTR 24[esp]
and esi,ebx
- ror ebx,2
+ mov edi,DWORD PTR 24[esp]
xor esi,edx
+ ror ebx,2
lea ebp,DWORD PTR 1518500249[edi*1+ebp]
add ebp,esi
; 00_15 7
@@ -171,10 +170,10 @@ $L000loop:
rol ebp,5
xor edi,ecx
add ebp,edx
- mov edx,DWORD PTR 28[esp]
and edi,eax
- ror eax,2
+ mov edx,DWORD PTR 28[esp]
xor edi,ecx
+ ror eax,2
lea ebp,DWORD PTR 1518500249[edx*1+ebp]
add ebp,edi
; 00_15 8
@@ -183,10 +182,10 @@ $L000loop:
rol ebp,5
xor edx,ebx
add ebp,ecx
- mov ecx,DWORD PTR 32[esp]
and edx,esi
- ror esi,2
+ mov ecx,DWORD PTR 32[esp]
xor edx,ebx
+ ror esi,2
lea ebp,DWORD PTR 1518500249[ecx*1+ebp]
add ebp,edx
; 00_15 9
@@ -195,10 +194,10 @@ $L000loop:
rol ebp,5
xor ecx,eax
add ebp,ebx
- mov ebx,DWORD PTR 36[esp]
and ecx,edi
- ror edi,2
+ mov ebx,DWORD PTR 36[esp]
xor ecx,eax
+ ror edi,2
lea ebp,DWORD PTR 1518500249[ebx*1+ebp]
add ebp,ecx
; 00_15 10
@@ -207,10 +206,10 @@ $L000loop:
rol ebp,5
xor ebx,esi
add ebp,eax
- mov eax,DWORD PTR 40[esp]
and ebx,edx
- ror edx,2
+ mov eax,DWORD PTR 40[esp]
xor ebx,esi
+ ror edx,2
lea ebp,DWORD PTR 1518500249[eax*1+ebp]
add ebp,ebx
; 00_15 11
@@ -219,10 +218,10 @@ $L000loop:
rol ebp,5
xor eax,edi
add ebp,esi
- mov esi,DWORD PTR 44[esp]
and eax,ecx
- ror ecx,2
+ mov esi,DWORD PTR 44[esp]
xor eax,edi
+ ror ecx,2
lea ebp,DWORD PTR 1518500249[esi*1+ebp]
add ebp,eax
; 00_15 12
@@ -231,10 +230,10 @@ $L000loop:
rol ebp,5
xor esi,edx
add ebp,edi
- mov edi,DWORD PTR 48[esp]
and esi,ebx
- ror ebx,2
+ mov edi,DWORD PTR 48[esp]
xor esi,edx
+ ror ebx,2
lea ebp,DWORD PTR 1518500249[edi*1+ebp]
add ebp,esi
; 00_15 13
@@ -243,10 +242,10 @@ $L000loop:
rol ebp,5
xor edi,ecx
add ebp,edx
- mov edx,DWORD PTR 52[esp]
and edi,eax
- ror eax,2
+ mov edx,DWORD PTR 52[esp]
xor edi,ecx
+ ror eax,2
lea ebp,DWORD PTR 1518500249[edx*1+ebp]
add ebp,edi
; 00_15 14
@@ -255,10 +254,10 @@ $L000loop:
rol ebp,5
xor edx,ebx
add ebp,ecx
- mov ecx,DWORD PTR 56[esp]
and edx,esi
- ror esi,2
+ mov ecx,DWORD PTR 56[esp]
xor edx,ebx
+ ror esi,2
lea ebp,DWORD PTR 1518500249[ecx*1+ebp]
add ebp,edx
; 00_15 15
@@ -267,1099 +266,1162 @@ $L000loop:
rol ebp,5
xor ecx,eax
add ebp,ebx
- mov ebx,DWORD PTR 60[esp]
and ecx,edi
- ror edi,2
+ mov ebx,DWORD PTR 60[esp]
xor ecx,eax
+ ror edi,2
lea ebp,DWORD PTR 1518500249[ebx*1+ebp]
- mov ebx,DWORD PTR [esp]
add ecx,ebp
; 16_19 16
+ mov ebx,DWORD PTR [esp]
mov ebp,edi
xor ebx,DWORD PTR 8[esp]
xor ebp,esi
xor ebx,DWORD PTR 32[esp]
and ebp,edx
+ ror edx,2
xor ebx,DWORD PTR 52[esp]
rol ebx,1
xor ebp,esi
- add eax,ebp
- mov ebp,ecx
- ror edx,2
mov DWORD PTR [esp],ebx
- rol ebp,5
lea ebx,DWORD PTR 1518500249[eax*1+ebx]
- mov eax,DWORD PTR 4[esp]
+ mov eax,ecx
+ rol eax,5
add ebx,ebp
+ add ebx,eax
; 16_19 17
+ mov eax,DWORD PTR 4[esp]
mov ebp,edx
xor eax,DWORD PTR 12[esp]
xor ebp,edi
xor eax,DWORD PTR 36[esp]
and ebp,ecx
+ ror ecx,2
xor eax,DWORD PTR 56[esp]
rol eax,1
xor ebp,edi
- add esi,ebp
- mov ebp,ebx
- ror ecx,2
mov DWORD PTR 4[esp],eax
- rol ebp,5
lea eax,DWORD PTR 1518500249[esi*1+eax]
- mov esi,DWORD PTR 8[esp]
+ mov esi,ebx
+ rol esi,5
add eax,ebp
+ add eax,esi
; 16_19 18
+ mov esi,DWORD PTR 8[esp]
mov ebp,ecx
xor esi,DWORD PTR 16[esp]
xor ebp,edx
xor esi,DWORD PTR 40[esp]
and ebp,ebx
+ ror ebx,2
xor esi,DWORD PTR 60[esp]
rol esi,1
xor ebp,edx
- add edi,ebp
- mov ebp,eax
- ror ebx,2
mov DWORD PTR 8[esp],esi
- rol ebp,5
lea esi,DWORD PTR 1518500249[edi*1+esi]
- mov edi,DWORD PTR 12[esp]
+ mov edi,eax
+ rol edi,5
add esi,ebp
+ add esi,edi
; 16_19 19
+ mov edi,DWORD PTR 12[esp]
mov ebp,ebx
xor edi,DWORD PTR 20[esp]
xor ebp,ecx
xor edi,DWORD PTR 44[esp]
and ebp,eax
+ ror eax,2
xor edi,DWORD PTR [esp]
rol edi,1
xor ebp,ecx
- add edx,ebp
- mov ebp,esi
- ror eax,2
mov DWORD PTR 12[esp],edi
- rol ebp,5
lea edi,DWORD PTR 1518500249[edx*1+edi]
- mov edx,DWORD PTR 16[esp]
+ mov edx,esi
+ rol edx,5
add edi,ebp
+ add edi,edx
; 20_39 20
mov ebp,esi
+ mov edx,DWORD PTR 16[esp]
+ ror esi,2
xor edx,DWORD PTR 24[esp]
xor ebp,eax
xor edx,DWORD PTR 48[esp]
xor ebp,ebx
xor edx,DWORD PTR 4[esp]
rol edx,1
- add ecx,ebp
- ror esi,2
- mov ebp,edi
- rol ebp,5
+ add ebp,ecx
mov DWORD PTR 16[esp],edx
- lea edx,DWORD PTR 1859775393[ecx*1+edx]
- mov ecx,DWORD PTR 20[esp]
- add edx,ebp
+ mov ecx,edi
+ rol ecx,5
+ lea edx,DWORD PTR 1859775393[ebp*1+edx]
+ add edx,ecx
; 20_39 21
mov ebp,edi
+ mov ecx,DWORD PTR 20[esp]
+ ror edi,2
xor ecx,DWORD PTR 28[esp]
xor ebp,esi
xor ecx,DWORD PTR 52[esp]
xor ebp,eax
xor ecx,DWORD PTR 8[esp]
rol ecx,1
- add ebx,ebp
- ror edi,2
- mov ebp,edx
- rol ebp,5
+ add ebp,ebx
mov DWORD PTR 20[esp],ecx
- lea ecx,DWORD PTR 1859775393[ebx*1+ecx]
- mov ebx,DWORD PTR 24[esp]
- add ecx,ebp
+ mov ebx,edx
+ rol ebx,5
+ lea ecx,DWORD PTR 1859775393[ebp*1+ecx]
+ add ecx,ebx
; 20_39 22
mov ebp,edx
+ mov ebx,DWORD PTR 24[esp]
+ ror edx,2
xor ebx,DWORD PTR 32[esp]
xor ebp,edi
xor ebx,DWORD PTR 56[esp]
xor ebp,esi
xor ebx,DWORD PTR 12[esp]
rol ebx,1
- add eax,ebp
- ror edx,2
- mov ebp,ecx
- rol ebp,5
+ add ebp,eax
mov DWORD PTR 24[esp],ebx
- lea ebx,DWORD PTR 1859775393[eax*1+ebx]
- mov eax,DWORD PTR 28[esp]
- add ebx,ebp
+ mov eax,ecx
+ rol eax,5
+ lea ebx,DWORD PTR 1859775393[ebp*1+ebx]
+ add ebx,eax
; 20_39 23
mov ebp,ecx
+ mov eax,DWORD PTR 28[esp]
+ ror ecx,2
xor eax,DWORD PTR 36[esp]
xor ebp,edx
xor eax,DWORD PTR 60[esp]
xor ebp,edi
xor eax,DWORD PTR 16[esp]
rol eax,1
- add esi,ebp
- ror ecx,2
- mov ebp,ebx
- rol ebp,5
+ add ebp,esi
mov DWORD PTR 28[esp],eax
- lea eax,DWORD PTR 1859775393[esi*1+eax]
- mov esi,DWORD PTR 32[esp]
- add eax,ebp
+ mov esi,ebx
+ rol esi,5
+ lea eax,DWORD PTR 1859775393[ebp*1+eax]
+ add eax,esi
; 20_39 24
mov ebp,ebx
+ mov esi,DWORD PTR 32[esp]
+ ror ebx,2
xor esi,DWORD PTR 40[esp]
xor ebp,ecx
xor esi,DWORD PTR [esp]
xor ebp,edx
xor esi,DWORD PTR 20[esp]
rol esi,1
- add edi,ebp
- ror ebx,2
- mov ebp,eax
- rol ebp,5
+ add ebp,edi
mov DWORD PTR 32[esp],esi
- lea esi,DWORD PTR 1859775393[edi*1+esi]
- mov edi,DWORD PTR 36[esp]
- add esi,ebp
+ mov edi,eax
+ rol edi,5
+ lea esi,DWORD PTR 1859775393[ebp*1+esi]
+ add esi,edi
; 20_39 25
mov ebp,eax
+ mov edi,DWORD PTR 36[esp]
+ ror eax,2
xor edi,DWORD PTR 44[esp]
xor ebp,ebx
xor edi,DWORD PTR 4[esp]
xor ebp,ecx
xor edi,DWORD PTR 24[esp]
rol edi,1
- add edx,ebp
- ror eax,2
- mov ebp,esi
- rol ebp,5
+ add ebp,edx
mov DWORD PTR 36[esp],edi
- lea edi,DWORD PTR 1859775393[edx*1+edi]
- mov edx,DWORD PTR 40[esp]
- add edi,ebp
+ mov edx,esi
+ rol edx,5
+ lea edi,DWORD PTR 1859775393[ebp*1+edi]
+ add edi,edx
; 20_39 26
mov ebp,esi
+ mov edx,DWORD PTR 40[esp]
+ ror esi,2
xor edx,DWORD PTR 48[esp]
xor ebp,eax
xor edx,DWORD PTR 8[esp]
xor ebp,ebx
xor edx,DWORD PTR 28[esp]
rol edx,1
- add ecx,ebp
- ror esi,2
- mov ebp,edi
- rol ebp,5
+ add ebp,ecx
mov DWORD PTR 40[esp],edx
- lea edx,DWORD PTR 1859775393[ecx*1+edx]
- mov ecx,DWORD PTR 44[esp]
- add edx,ebp
+ mov ecx,edi
+ rol ecx,5
+ lea edx,DWORD PTR 1859775393[ebp*1+edx]
+ add edx,ecx
; 20_39 27
mov ebp,edi
+ mov ecx,DWORD PTR 44[esp]
+ ror edi,2
xor ecx,DWORD PTR 52[esp]
xor ebp,esi
xor ecx,DWORD PTR 12[esp]
xor ebp,eax
xor ecx,DWORD PTR 32[esp]
rol ecx,1
- add ebx,ebp
- ror edi,2
- mov ebp,edx
- rol ebp,5
+ add ebp,ebx
mov DWORD PTR 44[esp],ecx
- lea ecx,DWORD PTR 1859775393[ebx*1+ecx]
- mov ebx,DWORD PTR 48[esp]
- add ecx,ebp
+ mov ebx,edx
+ rol ebx,5
+ lea ecx,DWORD PTR 1859775393[ebp*1+ecx]
+ add ecx,ebx
; 20_39 28
mov ebp,edx
+ mov ebx,DWORD PTR 48[esp]
+ ror edx,2
xor ebx,DWORD PTR 56[esp]
xor ebp,edi
xor ebx,DWORD PTR 16[esp]
xor ebp,esi
xor ebx,DWORD PTR 36[esp]
rol ebx,1
- add eax,ebp
- ror edx,2
- mov ebp,ecx
- rol ebp,5
+ add ebp,eax
mov DWORD PTR 48[esp],ebx
- lea ebx,DWORD PTR 1859775393[eax*1+ebx]
- mov eax,DWORD PTR 52[esp]
- add ebx,ebp
+ mov eax,ecx
+ rol eax,5
+ lea ebx,DWORD PTR 1859775393[ebp*1+ebx]
+ add ebx,eax
; 20_39 29
mov ebp,ecx
+ mov eax,DWORD PTR 52[esp]
+ ror ecx,2
xor eax,DWORD PTR 60[esp]
xor ebp,edx
xor eax,DWORD PTR 20[esp]
xor ebp,edi
xor eax,DWORD PTR 40[esp]
rol eax,1
- add esi,ebp
- ror ecx,2
- mov ebp,ebx
- rol ebp,5
+ add ebp,esi
mov DWORD PTR 52[esp],eax
- lea eax,DWORD PTR 1859775393[esi*1+eax]
- mov esi,DWORD PTR 56[esp]
- add eax,ebp
+ mov esi,ebx
+ rol esi,5
+ lea eax,DWORD PTR 1859775393[ebp*1+eax]
+ add eax,esi
; 20_39 30
mov ebp,ebx
+ mov esi,DWORD PTR 56[esp]
+ ror ebx,2
xor esi,DWORD PTR [esp]
xor ebp,ecx
xor esi,DWORD PTR 24[esp]
xor ebp,edx
xor esi,DWORD PTR 44[esp]
rol esi,1
- add edi,ebp
- ror ebx,2
- mov ebp,eax
- rol ebp,5
+ add ebp,edi
mov DWORD PTR 56[esp],esi
- lea esi,DWORD PTR 1859775393[edi*1+esi]
- mov edi,DWORD PTR 60[esp]
- add esi,ebp
+ mov edi,eax
+ rol edi,5
+ lea esi,DWORD PTR 1859775393[ebp*1+esi]
+ add esi,edi
; 20_39 31
mov ebp,eax
+ mov edi,DWORD PTR 60[esp]
+ ror eax,2
xor edi,DWORD PTR 4[esp]
xor ebp,ebx
xor edi,DWORD PTR 28[esp]
xor ebp,ecx
xor edi,DWORD PTR 48[esp]
rol edi,1
- add edx,ebp
- ror eax,2
- mov ebp,esi
- rol ebp,5
+ add ebp,edx
mov DWORD PTR 60[esp],edi
- lea edi,DWORD PTR 1859775393[edx*1+edi]
- mov edx,DWORD PTR [esp]
- add edi,ebp
+ mov edx,esi
+ rol edx,5
+ lea edi,DWORD PTR 1859775393[ebp*1+edi]
+ add edi,edx
; 20_39 32
mov ebp,esi
+ mov edx,DWORD PTR [esp]
+ ror esi,2
xor edx,DWORD PTR 8[esp]
xor ebp,eax
xor edx,DWORD PTR 32[esp]
xor ebp,ebx
xor edx,DWORD PTR 52[esp]
rol edx,1
- add ecx,ebp
- ror esi,2
- mov ebp,edi
- rol ebp,5
+ add ebp,ecx
mov DWORD PTR [esp],edx
- lea edx,DWORD PTR 1859775393[ecx*1+edx]
- mov ecx,DWORD PTR 4[esp]
- add edx,ebp
+ mov ecx,edi
+ rol ecx,5
+ lea edx,DWORD PTR 1859775393[ebp*1+edx]
+ add edx,ecx
; 20_39 33
mov ebp,edi
+ mov ecx,DWORD PTR 4[esp]
+ ror edi,2
xor ecx,DWORD PTR 12[esp]
xor ebp,esi
xor ecx,DWORD PTR 36[esp]
xor ebp,eax
xor ecx,DWORD PTR 56[esp]
rol ecx,1
- add ebx,ebp
- ror edi,2
- mov ebp,edx
- rol ebp,5
+ add ebp,ebx
mov DWORD PTR 4[esp],ecx
- lea ecx,DWORD PTR 1859775393[ebx*1+ecx]
- mov ebx,DWORD PTR 8[esp]
- add ecx,ebp
+ mov ebx,edx
+ rol ebx,5
+ lea ecx,DWORD PTR 1859775393[ebp*1+ecx]
+ add ecx,ebx
; 20_39 34
mov ebp,edx
+ mov ebx,DWORD PTR 8[esp]
+ ror edx,2
xor ebx,DWORD PTR 16[esp]
xor ebp,edi
xor ebx,DWORD PTR 40[esp]
xor ebp,esi
xor ebx,DWORD PTR 60[esp]
rol ebx,1
- add eax,ebp
- ror edx,2
- mov ebp,ecx
- rol ebp,5
+ add ebp,eax
mov DWORD PTR 8[esp],ebx
- lea ebx,DWORD PTR 1859775393[eax*1+ebx]
- mov eax,DWORD PTR 12[esp]
- add ebx,ebp
+ mov eax,ecx
+ rol eax,5
+ lea ebx,DWORD PTR 1859775393[ebp*1+ebx]
+ add ebx,eax
; 20_39 35
mov ebp,ecx
+ mov eax,DWORD PTR 12[esp]
+ ror ecx,2
xor eax,DWORD PTR 20[esp]
xor ebp,edx
xor eax,DWORD PTR 44[esp]
xor ebp,edi
xor eax,DWORD PTR [esp]
rol eax,1
- add esi,ebp
- ror ecx,2
- mov ebp,ebx
- rol ebp,5
+ add ebp,esi
mov DWORD PTR 12[esp],eax
- lea eax,DWORD PTR 1859775393[esi*1+eax]
- mov esi,DWORD PTR 16[esp]
- add eax,ebp
+ mov esi,ebx
+ rol esi,5
+ lea eax,DWORD PTR 1859775393[ebp*1+eax]
+ add eax,esi
; 20_39 36
mov ebp,ebx
+ mov esi,DWORD PTR 16[esp]
+ ror ebx,2
xor esi,DWORD PTR 24[esp]
xor ebp,ecx
xor esi,DWORD PTR 48[esp]
xor ebp,edx
xor esi,DWORD PTR 4[esp]
rol esi,1
- add edi,ebp
- ror ebx,2
- mov ebp,eax
- rol ebp,5
+ add ebp,edi
mov DWORD PTR 16[esp],esi
- lea esi,DWORD PTR 1859775393[edi*1+esi]
- mov edi,DWORD PTR 20[esp]
- add esi,ebp
+ mov edi,eax
+ rol edi,5
+ lea esi,DWORD PTR 1859775393[ebp*1+esi]
+ add esi,edi
; 20_39 37
mov ebp,eax
+ mov edi,DWORD PTR 20[esp]
+ ror eax,2
xor edi,DWORD PTR 28[esp]
xor ebp,ebx
xor edi,DWORD PTR 52[esp]
xor ebp,ecx
xor edi,DWORD PTR 8[esp]
rol edi,1
- add edx,ebp
- ror eax,2
- mov ebp,esi
- rol ebp,5
+ add ebp,edx
mov DWORD PTR 20[esp],edi
- lea edi,DWORD PTR 1859775393[edx*1+edi]
- mov edx,DWORD PTR 24[esp]
- add edi,ebp
+ mov edx,esi
+ rol edx,5
+ lea edi,DWORD PTR 1859775393[ebp*1+edi]
+ add edi,edx
; 20_39 38
mov ebp,esi
+ mov edx,DWORD PTR 24[esp]
+ ror esi,2
xor edx,DWORD PTR 32[esp]
xor ebp,eax
xor edx,DWORD PTR 56[esp]
xor ebp,ebx
xor edx,DWORD PTR 12[esp]
rol edx,1
- add ecx,ebp
- ror esi,2
- mov ebp,edi
- rol ebp,5
+ add ebp,ecx
mov DWORD PTR 24[esp],edx
- lea edx,DWORD PTR 1859775393[ecx*1+edx]
- mov ecx,DWORD PTR 28[esp]
- add edx,ebp
+ mov ecx,edi
+ rol ecx,5
+ lea edx,DWORD PTR 1859775393[ebp*1+edx]
+ add edx,ecx
; 20_39 39
mov ebp,edi
+ mov ecx,DWORD PTR 28[esp]
+ ror edi,2
xor ecx,DWORD PTR 36[esp]
xor ebp,esi
xor ecx,DWORD PTR 60[esp]
xor ebp,eax
xor ecx,DWORD PTR 16[esp]
rol ecx,1
- add ebx,ebp
- ror edi,2
- mov ebp,edx
- rol ebp,5
+ add ebp,ebx
mov DWORD PTR 28[esp],ecx
- lea ecx,DWORD PTR 1859775393[ebx*1+ecx]
- mov ebx,DWORD PTR 32[esp]
- add ecx,ebp
+ mov ebx,edx
+ rol ebx,5
+ lea ecx,DWORD PTR 1859775393[ebp*1+ecx]
+ add ecx,ebx
; 40_59 40
- mov ebp,edi
- xor ebx,DWORD PTR 40[esp]
- xor ebp,esi
- xor ebx,DWORD PTR [esp]
- and ebp,edx
- xor ebx,DWORD PTR 20[esp]
+ mov ebx,DWORD PTR 32[esp]
+ mov ebp,DWORD PTR 40[esp]
+ xor ebx,ebp
+ mov ebp,DWORD PTR [esp]
+ xor ebx,ebp
+ mov ebp,DWORD PTR 20[esp]
+ xor ebx,ebp
+ mov ebp,edx
rol ebx,1
- add ebp,eax
+ or ebp,edi
+ mov DWORD PTR 32[esp],ebx
+ and ebp,esi
+ lea ebx,DWORD PTR 2400959708[eax*1+ebx]
+ mov eax,edx
ror edx,2
+ and eax,edi
+ or ebp,eax
mov eax,ecx
rol eax,5
- mov DWORD PTR 32[esp],ebx
- lea ebx,DWORD PTR 2400959708[ebp*1+ebx]
- mov ebp,edi
- add ebx,eax
- and ebp,esi
- mov eax,DWORD PTR 36[esp]
add ebx,ebp
+ add ebx,eax
; 40_59 41
- mov ebp,edx
- xor eax,DWORD PTR 44[esp]
- xor ebp,edi
- xor eax,DWORD PTR 4[esp]
- and ebp,ecx
- xor eax,DWORD PTR 24[esp]
+ mov eax,DWORD PTR 36[esp]
+ mov ebp,DWORD PTR 44[esp]
+ xor eax,ebp
+ mov ebp,DWORD PTR 4[esp]
+ xor eax,ebp
+ mov ebp,DWORD PTR 24[esp]
+ xor eax,ebp
+ mov ebp,ecx
rol eax,1
- add ebp,esi
+ or ebp,edx
+ mov DWORD PTR 36[esp],eax
+ and ebp,edi
+ lea eax,DWORD PTR 2400959708[esi*1+eax]
+ mov esi,ecx
ror ecx,2
+ and esi,edx
+ or ebp,esi
mov esi,ebx
rol esi,5
- mov DWORD PTR 36[esp],eax
- lea eax,DWORD PTR 2400959708[ebp*1+eax]
- mov ebp,edx
- add eax,esi
- and ebp,edi
- mov esi,DWORD PTR 40[esp]
add eax,ebp
+ add eax,esi
; 40_59 42
- mov ebp,ecx
- xor esi,DWORD PTR 48[esp]
- xor ebp,edx
- xor esi,DWORD PTR 8[esp]
- and ebp,ebx
- xor esi,DWORD PTR 28[esp]
+ mov esi,DWORD PTR 40[esp]
+ mov ebp,DWORD PTR 48[esp]
+ xor esi,ebp
+ mov ebp,DWORD PTR 8[esp]
+ xor esi,ebp
+ mov ebp,DWORD PTR 28[esp]
+ xor esi,ebp
+ mov ebp,ebx
rol esi,1
- add ebp,edi
+ or ebp,ecx
+ mov DWORD PTR 40[esp],esi
+ and ebp,edx
+ lea esi,DWORD PTR 2400959708[edi*1+esi]
+ mov edi,ebx
ror ebx,2
+ and edi,ecx
+ or ebp,edi
mov edi,eax
rol edi,5
- mov DWORD PTR 40[esp],esi
- lea esi,DWORD PTR 2400959708[ebp*1+esi]
- mov ebp,ecx
- add esi,edi
- and ebp,edx
- mov edi,DWORD PTR 44[esp]
add esi,ebp
+ add esi,edi
; 40_59 43
- mov ebp,ebx
- xor edi,DWORD PTR 52[esp]
- xor ebp,ecx
- xor edi,DWORD PTR 12[esp]
- and ebp,eax
- xor edi,DWORD PTR 32[esp]
+ mov edi,DWORD PTR 44[esp]
+ mov ebp,DWORD PTR 52[esp]
+ xor edi,ebp
+ mov ebp,DWORD PTR 12[esp]
+ xor edi,ebp
+ mov ebp,DWORD PTR 32[esp]
+ xor edi,ebp
+ mov ebp,eax
rol edi,1
- add ebp,edx
+ or ebp,ebx
+ mov DWORD PTR 44[esp],edi
+ and ebp,ecx
+ lea edi,DWORD PTR 2400959708[edx*1+edi]
+ mov edx,eax
ror eax,2
+ and edx,ebx
+ or ebp,edx
mov edx,esi
rol edx,5
- mov DWORD PTR 44[esp],edi
- lea edi,DWORD PTR 2400959708[ebp*1+edi]
- mov ebp,ebx
- add edi,edx
- and ebp,ecx
- mov edx,DWORD PTR 48[esp]
add edi,ebp
+ add edi,edx
; 40_59 44
- mov ebp,eax
- xor edx,DWORD PTR 56[esp]
- xor ebp,ebx
- xor edx,DWORD PTR 16[esp]
- and ebp,esi
- xor edx,DWORD PTR 36[esp]
+ mov edx,DWORD PTR 48[esp]
+ mov ebp,DWORD PTR 56[esp]
+ xor edx,ebp
+ mov ebp,DWORD PTR 16[esp]
+ xor edx,ebp
+ mov ebp,DWORD PTR 36[esp]
+ xor edx,ebp
+ mov ebp,esi
rol edx,1
- add ebp,ecx
+ or ebp,eax
+ mov DWORD PTR 48[esp],edx
+ and ebp,ebx
+ lea edx,DWORD PTR 2400959708[ecx*1+edx]
+ mov ecx,esi
ror esi,2
+ and ecx,eax
+ or ebp,ecx
mov ecx,edi
rol ecx,5
- mov DWORD PTR 48[esp],edx
- lea edx,DWORD PTR 2400959708[ebp*1+edx]
- mov ebp,eax
- add edx,ecx
- and ebp,ebx
- mov ecx,DWORD PTR 52[esp]
add edx,ebp
+ add edx,ecx
; 40_59 45
- mov ebp,esi
- xor ecx,DWORD PTR 60[esp]
- xor ebp,eax
- xor ecx,DWORD PTR 20[esp]
- and ebp,edi
- xor ecx,DWORD PTR 40[esp]
+ mov ecx,DWORD PTR 52[esp]
+ mov ebp,DWORD PTR 60[esp]
+ xor ecx,ebp
+ mov ebp,DWORD PTR 20[esp]
+ xor ecx,ebp
+ mov ebp,DWORD PTR 40[esp]
+ xor ecx,ebp
+ mov ebp,edi
rol ecx,1
- add ebp,ebx
+ or ebp,esi
+ mov DWORD PTR 52[esp],ecx
+ and ebp,eax
+ lea ecx,DWORD PTR 2400959708[ebx*1+ecx]
+ mov ebx,edi
ror edi,2
+ and ebx,esi
+ or ebp,ebx
mov ebx,edx
rol ebx,5
- mov DWORD PTR 52[esp],ecx
- lea ecx,DWORD PTR 2400959708[ebp*1+ecx]
- mov ebp,esi
- add ecx,ebx
- and ebp,eax
- mov ebx,DWORD PTR 56[esp]
add ecx,ebp
+ add ecx,ebx
; 40_59 46
- mov ebp,edi
- xor ebx,DWORD PTR [esp]
- xor ebp,esi
- xor ebx,DWORD PTR 24[esp]
- and ebp,edx
- xor ebx,DWORD PTR 44[esp]
+ mov ebx,DWORD PTR 56[esp]
+ mov ebp,DWORD PTR [esp]
+ xor ebx,ebp
+ mov ebp,DWORD PTR 24[esp]
+ xor ebx,ebp
+ mov ebp,DWORD PTR 44[esp]
+ xor ebx,ebp
+ mov ebp,edx
rol ebx,1
- add ebp,eax
+ or ebp,edi
+ mov DWORD PTR 56[esp],ebx
+ and ebp,esi
+ lea ebx,DWORD PTR 2400959708[eax*1+ebx]
+ mov eax,edx
ror edx,2
+ and eax,edi
+ or ebp,eax
mov eax,ecx
rol eax,5
- mov DWORD PTR 56[esp],ebx
- lea ebx,DWORD PTR 2400959708[ebp*1+ebx]
- mov ebp,edi
- add ebx,eax
- and ebp,esi
- mov eax,DWORD PTR 60[esp]
add ebx,ebp
+ add ebx,eax
; 40_59 47
- mov ebp,edx
- xor eax,DWORD PTR 4[esp]
- xor ebp,edi
- xor eax,DWORD PTR 28[esp]
- and ebp,ecx
- xor eax,DWORD PTR 48[esp]
+ mov eax,DWORD PTR 60[esp]
+ mov ebp,DWORD PTR 4[esp]
+ xor eax,ebp
+ mov ebp,DWORD PTR 28[esp]
+ xor eax,ebp
+ mov ebp,DWORD PTR 48[esp]
+ xor eax,ebp
+ mov ebp,ecx
rol eax,1
- add ebp,esi
+ or ebp,edx
+ mov DWORD PTR 60[esp],eax
+ and ebp,edi
+ lea eax,DWORD PTR 2400959708[esi*1+eax]
+ mov esi,ecx
ror ecx,2
+ and esi,edx
+ or ebp,esi
mov esi,ebx
rol esi,5
- mov DWORD PTR 60[esp],eax
- lea eax,DWORD PTR 2400959708[ebp*1+eax]
- mov ebp,edx
- add eax,esi
- and ebp,edi
- mov esi,DWORD PTR [esp]
add eax,ebp
+ add eax,esi
; 40_59 48
- mov ebp,ecx
- xor esi,DWORD PTR 8[esp]
- xor ebp,edx
- xor esi,DWORD PTR 32[esp]
- and ebp,ebx
- xor esi,DWORD PTR 52[esp]
+ mov esi,DWORD PTR [esp]
+ mov ebp,DWORD PTR 8[esp]
+ xor esi,ebp
+ mov ebp,DWORD PTR 32[esp]
+ xor esi,ebp
+ mov ebp,DWORD PTR 52[esp]
+ xor esi,ebp
+ mov ebp,ebx
rol esi,1
- add ebp,edi
+ or ebp,ecx
+ mov DWORD PTR [esp],esi
+ and ebp,edx
+ lea esi,DWORD PTR 2400959708[edi*1+esi]
+ mov edi,ebx
ror ebx,2
+ and edi,ecx
+ or ebp,edi
mov edi,eax
rol edi,5
- mov DWORD PTR [esp],esi
- lea esi,DWORD PTR 2400959708[ebp*1+esi]
- mov ebp,ecx
- add esi,edi
- and ebp,edx
- mov edi,DWORD PTR 4[esp]
add esi,ebp
+ add esi,edi
; 40_59 49
- mov ebp,ebx
- xor edi,DWORD PTR 12[esp]
- xor ebp,ecx
- xor edi,DWORD PTR 36[esp]
- and ebp,eax
- xor edi,DWORD PTR 56[esp]
+ mov edi,DWORD PTR 4[esp]
+ mov ebp,DWORD PTR 12[esp]
+ xor edi,ebp
+ mov ebp,DWORD PTR 36[esp]
+ xor edi,ebp
+ mov ebp,DWORD PTR 56[esp]
+ xor edi,ebp
+ mov ebp,eax
rol edi,1
- add ebp,edx
+ or ebp,ebx
+ mov DWORD PTR 4[esp],edi
+ and ebp,ecx
+ lea edi,DWORD PTR 2400959708[edx*1+edi]
+ mov edx,eax
ror eax,2
+ and edx,ebx
+ or ebp,edx
mov edx,esi
rol edx,5
- mov DWORD PTR 4[esp],edi
- lea edi,DWORD PTR 2400959708[ebp*1+edi]
- mov ebp,ebx
- add edi,edx
- and ebp,ecx
- mov edx,DWORD PTR 8[esp]
add edi,ebp
+ add edi,edx
; 40_59 50
- mov ebp,eax
- xor edx,DWORD PTR 16[esp]
- xor ebp,ebx
- xor edx,DWORD PTR 40[esp]
- and ebp,esi
- xor edx,DWORD PTR 60[esp]
+ mov edx,DWORD PTR 8[esp]
+ mov ebp,DWORD PTR 16[esp]
+ xor edx,ebp
+ mov ebp,DWORD PTR 40[esp]
+ xor edx,ebp
+ mov ebp,DWORD PTR 60[esp]
+ xor edx,ebp
+ mov ebp,esi
rol edx,1
- add ebp,ecx
+ or ebp,eax
+ mov DWORD PTR 8[esp],edx
+ and ebp,ebx
+ lea edx,DWORD PTR 2400959708[ecx*1+edx]
+ mov ecx,esi
ror esi,2
+ and ecx,eax
+ or ebp,ecx
mov ecx,edi
rol ecx,5
- mov DWORD PTR 8[esp],edx
- lea edx,DWORD PTR 2400959708[ebp*1+edx]
- mov ebp,eax
- add edx,ecx
- and ebp,ebx
- mov ecx,DWORD PTR 12[esp]
add edx,ebp
+ add edx,ecx
; 40_59 51
- mov ebp,esi
- xor ecx,DWORD PTR 20[esp]
- xor ebp,eax
- xor ecx,DWORD PTR 44[esp]
- and ebp,edi
- xor ecx,DWORD PTR [esp]
+ mov ecx,DWORD PTR 12[esp]
+ mov ebp,DWORD PTR 20[esp]
+ xor ecx,ebp
+ mov ebp,DWORD PTR 44[esp]
+ xor ecx,ebp
+ mov ebp,DWORD PTR [esp]
+ xor ecx,ebp
+ mov ebp,edi
rol ecx,1
- add ebp,ebx
+ or ebp,esi
+ mov DWORD PTR 12[esp],ecx
+ and ebp,eax
+ lea ecx,DWORD PTR 2400959708[ebx*1+ecx]
+ mov ebx,edi
ror edi,2
+ and ebx,esi
+ or ebp,ebx
mov ebx,edx
rol ebx,5
- mov DWORD PTR 12[esp],ecx
- lea ecx,DWORD PTR 2400959708[ebp*1+ecx]
- mov ebp,esi
- add ecx,ebx
- and ebp,eax
- mov ebx,DWORD PTR 16[esp]
add ecx,ebp
+ add ecx,ebx
; 40_59 52
- mov ebp,edi
- xor ebx,DWORD PTR 24[esp]
- xor ebp,esi
- xor ebx,DWORD PTR 48[esp]
- and ebp,edx
- xor ebx,DWORD PTR 4[esp]
+ mov ebx,DWORD PTR 16[esp]
+ mov ebp,DWORD PTR 24[esp]
+ xor ebx,ebp
+ mov ebp,DWORD PTR 48[esp]
+ xor ebx,ebp
+ mov ebp,DWORD PTR 4[esp]
+ xor ebx,ebp
+ mov ebp,edx
rol ebx,1
- add ebp,eax
+ or ebp,edi
+ mov DWORD PTR 16[esp],ebx
+ and ebp,esi
+ lea ebx,DWORD PTR 2400959708[eax*1+ebx]
+ mov eax,edx
ror edx,2
+ and eax,edi
+ or ebp,eax
mov eax,ecx
rol eax,5
- mov DWORD PTR 16[esp],ebx
- lea ebx,DWORD PTR 2400959708[ebp*1+ebx]
- mov ebp,edi
- add ebx,eax
- and ebp,esi
- mov eax,DWORD PTR 20[esp]
add ebx,ebp
+ add ebx,eax
; 40_59 53
- mov ebp,edx
- xor eax,DWORD PTR 28[esp]
- xor ebp,edi
- xor eax,DWORD PTR 52[esp]
- and ebp,ecx
- xor eax,DWORD PTR 8[esp]
+ mov eax,DWORD PTR 20[esp]
+ mov ebp,DWORD PTR 28[esp]
+ xor eax,ebp
+ mov ebp,DWORD PTR 52[esp]
+ xor eax,ebp
+ mov ebp,DWORD PTR 8[esp]
+ xor eax,ebp
+ mov ebp,ecx
rol eax,1
- add ebp,esi
+ or ebp,edx
+ mov DWORD PTR 20[esp],eax
+ and ebp,edi
+ lea eax,DWORD PTR 2400959708[esi*1+eax]
+ mov esi,ecx
ror ecx,2
+ and esi,edx
+ or ebp,esi
mov esi,ebx
rol esi,5
- mov DWORD PTR 20[esp],eax
- lea eax,DWORD PTR 2400959708[ebp*1+eax]
- mov ebp,edx
- add eax,esi
- and ebp,edi
- mov esi,DWORD PTR 24[esp]
add eax,ebp
+ add eax,esi
; 40_59 54
- mov ebp,ecx
- xor esi,DWORD PTR 32[esp]
- xor ebp,edx
- xor esi,DWORD PTR 56[esp]
- and ebp,ebx
- xor esi,DWORD PTR 12[esp]
+ mov esi,DWORD PTR 24[esp]
+ mov ebp,DWORD PTR 32[esp]
+ xor esi,ebp
+ mov ebp,DWORD PTR 56[esp]
+ xor esi,ebp
+ mov ebp,DWORD PTR 12[esp]
+ xor esi,ebp
+ mov ebp,ebx
rol esi,1
- add ebp,edi
+ or ebp,ecx
+ mov DWORD PTR 24[esp],esi
+ and ebp,edx
+ lea esi,DWORD PTR 2400959708[edi*1+esi]
+ mov edi,ebx
ror ebx,2
+ and edi,ecx
+ or ebp,edi
mov edi,eax
rol edi,5
- mov DWORD PTR 24[esp],esi
- lea esi,DWORD PTR 2400959708[ebp*1+esi]
- mov ebp,ecx
- add esi,edi
- and ebp,edx
- mov edi,DWORD PTR 28[esp]
add esi,ebp
+ add esi,edi
; 40_59 55
- mov ebp,ebx
- xor edi,DWORD PTR 36[esp]
- xor ebp,ecx
- xor edi,DWORD PTR 60[esp]
- and ebp,eax
- xor edi,DWORD PTR 16[esp]
+ mov edi,DWORD PTR 28[esp]
+ mov ebp,DWORD PTR 36[esp]
+ xor edi,ebp
+ mov ebp,DWORD PTR 60[esp]
+ xor edi,ebp
+ mov ebp,DWORD PTR 16[esp]
+ xor edi,ebp
+ mov ebp,eax
rol edi,1
- add ebp,edx
+ or ebp,ebx
+ mov DWORD PTR 28[esp],edi
+ and ebp,ecx
+ lea edi,DWORD PTR 2400959708[edx*1+edi]
+ mov edx,eax
ror eax,2
+ and edx,ebx
+ or ebp,edx
mov edx,esi
rol edx,5
- mov DWORD PTR 28[esp],edi
- lea edi,DWORD PTR 2400959708[ebp*1+edi]
- mov ebp,ebx
- add edi,edx
- and ebp,ecx
- mov edx,DWORD PTR 32[esp]
add edi,ebp
+ add edi,edx
; 40_59 56
- mov ebp,eax
- xor edx,DWORD PTR 40[esp]
- xor ebp,ebx
- xor edx,DWORD PTR [esp]
- and ebp,esi
- xor edx,DWORD PTR 20[esp]
+ mov edx,DWORD PTR 32[esp]
+ mov ebp,DWORD PTR 40[esp]
+ xor edx,ebp
+ mov ebp,DWORD PTR [esp]
+ xor edx,ebp
+ mov ebp,DWORD PTR 20[esp]
+ xor edx,ebp
+ mov ebp,esi
rol edx,1
- add ebp,ecx
+ or ebp,eax
+ mov DWORD PTR 32[esp],edx
+ and ebp,ebx
+ lea edx,DWORD PTR 2400959708[ecx*1+edx]
+ mov ecx,esi
ror esi,2
+ and ecx,eax
+ or ebp,ecx
mov ecx,edi
rol ecx,5
- mov DWORD PTR 32[esp],edx
- lea edx,DWORD PTR 2400959708[ebp*1+edx]
- mov ebp,eax
- add edx,ecx
- and ebp,ebx
- mov ecx,DWORD PTR 36[esp]
add edx,ebp
+ add edx,ecx
; 40_59 57
- mov ebp,esi
- xor ecx,DWORD PTR 44[esp]
- xor ebp,eax
- xor ecx,DWORD PTR 4[esp]
- and ebp,edi
- xor ecx,DWORD PTR 24[esp]
+ mov ecx,DWORD PTR 36[esp]
+ mov ebp,DWORD PTR 44[esp]
+ xor ecx,ebp
+ mov ebp,DWORD PTR 4[esp]
+ xor ecx,ebp
+ mov ebp,DWORD PTR 24[esp]
+ xor ecx,ebp
+ mov ebp,edi
rol ecx,1
- add ebp,ebx
+ or ebp,esi
+ mov DWORD PTR 36[esp],ecx
+ and ebp,eax
+ lea ecx,DWORD PTR 2400959708[ebx*1+ecx]
+ mov ebx,edi
ror edi,2
+ and ebx,esi
+ or ebp,ebx
mov ebx,edx
rol ebx,5
- mov DWORD PTR 36[esp],ecx
- lea ecx,DWORD PTR 2400959708[ebp*1+ecx]
- mov ebp,esi
- add ecx,ebx
- and ebp,eax
- mov ebx,DWORD PTR 40[esp]
add ecx,ebp
+ add ecx,ebx
; 40_59 58
- mov ebp,edi
- xor ebx,DWORD PTR 48[esp]
- xor ebp,esi
- xor ebx,DWORD PTR 8[esp]
- and ebp,edx
- xor ebx,DWORD PTR 28[esp]
+ mov ebx,DWORD PTR 40[esp]
+ mov ebp,DWORD PTR 48[esp]
+ xor ebx,ebp
+ mov ebp,DWORD PTR 8[esp]
+ xor ebx,ebp
+ mov ebp,DWORD PTR 28[esp]
+ xor ebx,ebp
+ mov ebp,edx
rol ebx,1
- add ebp,eax
+ or ebp,edi
+ mov DWORD PTR 40[esp],ebx
+ and ebp,esi
+ lea ebx,DWORD PTR 2400959708[eax*1+ebx]
+ mov eax,edx
ror edx,2
+ and eax,edi
+ or ebp,eax
mov eax,ecx
rol eax,5
- mov DWORD PTR 40[esp],ebx
- lea ebx,DWORD PTR 2400959708[ebp*1+ebx]
- mov ebp,edi
- add ebx,eax
- and ebp,esi
- mov eax,DWORD PTR 44[esp]
add ebx,ebp
+ add ebx,eax
; 40_59 59
- mov ebp,edx
- xor eax,DWORD PTR 52[esp]
- xor ebp,edi
- xor eax,DWORD PTR 12[esp]
- and ebp,ecx
- xor eax,DWORD PTR 32[esp]
+ mov eax,DWORD PTR 44[esp]
+ mov ebp,DWORD PTR 52[esp]
+ xor eax,ebp
+ mov ebp,DWORD PTR 12[esp]
+ xor eax,ebp
+ mov ebp,DWORD PTR 32[esp]
+ xor eax,ebp
+ mov ebp,ecx
rol eax,1
- add ebp,esi
+ or ebp,edx
+ mov DWORD PTR 44[esp],eax
+ and ebp,edi
+ lea eax,DWORD PTR 2400959708[esi*1+eax]
+ mov esi,ecx
ror ecx,2
+ and esi,edx
+ or ebp,esi
mov esi,ebx
rol esi,5
- mov DWORD PTR 44[esp],eax
- lea eax,DWORD PTR 2400959708[ebp*1+eax]
- mov ebp,edx
- add eax,esi
- and ebp,edi
- mov esi,DWORD PTR 48[esp]
add eax,ebp
+ add eax,esi
; 20_39 60
mov ebp,ebx
+ mov esi,DWORD PTR 48[esp]
+ ror ebx,2
xor esi,DWORD PTR 56[esp]
xor ebp,ecx
xor esi,DWORD PTR 16[esp]
xor ebp,edx
xor esi,DWORD PTR 36[esp]
rol esi,1
- add edi,ebp
- ror ebx,2
- mov ebp,eax
- rol ebp,5
+ add ebp,edi
mov DWORD PTR 48[esp],esi
- lea esi,DWORD PTR 3395469782[edi*1+esi]
- mov edi,DWORD PTR 52[esp]
- add esi,ebp
+ mov edi,eax
+ rol edi,5
+ lea esi,DWORD PTR 3395469782[ebp*1+esi]
+ add esi,edi
; 20_39 61
mov ebp,eax
+ mov edi,DWORD PTR 52[esp]
+ ror eax,2
xor edi,DWORD PTR 60[esp]
xor ebp,ebx
xor edi,DWORD PTR 20[esp]
xor ebp,ecx
xor edi,DWORD PTR 40[esp]
rol edi,1
- add edx,ebp
- ror eax,2
- mov ebp,esi
- rol ebp,5
+ add ebp,edx
mov DWORD PTR 52[esp],edi
- lea edi,DWORD PTR 3395469782[edx*1+edi]
- mov edx,DWORD PTR 56[esp]
- add edi,ebp
+ mov edx,esi
+ rol edx,5
+ lea edi,DWORD PTR 3395469782[ebp*1+edi]
+ add edi,edx
; 20_39 62
mov ebp,esi
+ mov edx,DWORD PTR 56[esp]
+ ror esi,2
xor edx,DWORD PTR [esp]
xor ebp,eax
xor edx,DWORD PTR 24[esp]
xor ebp,ebx
xor edx,DWORD PTR 44[esp]
rol edx,1
- add ecx,ebp
- ror esi,2
- mov ebp,edi
- rol ebp,5
+ add ebp,ecx
mov DWORD PTR 56[esp],edx
- lea edx,DWORD PTR 3395469782[ecx*1+edx]
- mov ecx,DWORD PTR 60[esp]
- add edx,ebp
+ mov ecx,edi
+ rol ecx,5
+ lea edx,DWORD PTR 3395469782[ebp*1+edx]
+ add edx,ecx
; 20_39 63
mov ebp,edi
+ mov ecx,DWORD PTR 60[esp]
+ ror edi,2
xor ecx,DWORD PTR 4[esp]
xor ebp,esi
xor ecx,DWORD PTR 28[esp]
xor ebp,eax
xor ecx,DWORD PTR 48[esp]
rol ecx,1
- add ebx,ebp
- ror edi,2
- mov ebp,edx
- rol ebp,5
+ add ebp,ebx
mov DWORD PTR 60[esp],ecx
- lea ecx,DWORD PTR 3395469782[ebx*1+ecx]
- mov ebx,DWORD PTR [esp]
- add ecx,ebp
+ mov ebx,edx
+ rol ebx,5
+ lea ecx,DWORD PTR 3395469782[ebp*1+ecx]
+ add ecx,ebx
; 20_39 64
mov ebp,edx
+ mov ebx,DWORD PTR [esp]
+ ror edx,2
xor ebx,DWORD PTR 8[esp]
xor ebp,edi
xor ebx,DWORD PTR 32[esp]
xor ebp,esi
xor ebx,DWORD PTR 52[esp]
rol ebx,1
- add eax,ebp
- ror edx,2
- mov ebp,ecx
- rol ebp,5
+ add ebp,eax
mov DWORD PTR [esp],ebx
- lea ebx,DWORD PTR 3395469782[eax*1+ebx]
- mov eax,DWORD PTR 4[esp]
- add ebx,ebp
+ mov eax,ecx
+ rol eax,5
+ lea ebx,DWORD PTR 3395469782[ebp*1+ebx]
+ add ebx,eax
; 20_39 65
mov ebp,ecx
+ mov eax,DWORD PTR 4[esp]
+ ror ecx,2
xor eax,DWORD PTR 12[esp]
xor ebp,edx
xor eax,DWORD PTR 36[esp]
xor ebp,edi
xor eax,DWORD PTR 56[esp]
rol eax,1
- add esi,ebp
- ror ecx,2
- mov ebp,ebx
- rol ebp,5
+ add ebp,esi
mov DWORD PTR 4[esp],eax
- lea eax,DWORD PTR 3395469782[esi*1+eax]
- mov esi,DWORD PTR 8[esp]
- add eax,ebp
+ mov esi,ebx
+ rol esi,5
+ lea eax,DWORD PTR 3395469782[ebp*1+eax]
+ add eax,esi
; 20_39 66
mov ebp,ebx
+ mov esi,DWORD PTR 8[esp]
+ ror ebx,2
xor esi,DWORD PTR 16[esp]
xor ebp,ecx
xor esi,DWORD PTR 40[esp]
xor ebp,edx
xor esi,DWORD PTR 60[esp]
rol esi,1
- add edi,ebp
- ror ebx,2
- mov ebp,eax
- rol ebp,5
+ add ebp,edi
mov DWORD PTR 8[esp],esi
- lea esi,DWORD PTR 3395469782[edi*1+esi]
- mov edi,DWORD PTR 12[esp]
- add esi,ebp
+ mov edi,eax
+ rol edi,5
+ lea esi,DWORD PTR 3395469782[ebp*1+esi]
+ add esi,edi
; 20_39 67
mov ebp,eax
+ mov edi,DWORD PTR 12[esp]
+ ror eax,2
xor edi,DWORD PTR 20[esp]
xor ebp,ebx
xor edi,DWORD PTR 44[esp]
xor ebp,ecx
xor edi,DWORD PTR [esp]
rol edi,1
- add edx,ebp
- ror eax,2
- mov ebp,esi
- rol ebp,5
+ add ebp,edx
mov DWORD PTR 12[esp],edi
- lea edi,DWORD PTR 3395469782[edx*1+edi]
- mov edx,DWORD PTR 16[esp]
- add edi,ebp
+ mov edx,esi
+ rol edx,5
+ lea edi,DWORD PTR 3395469782[ebp*1+edi]
+ add edi,edx
; 20_39 68
mov ebp,esi
+ mov edx,DWORD PTR 16[esp]
+ ror esi,2
xor edx,DWORD PTR 24[esp]
xor ebp,eax
xor edx,DWORD PTR 48[esp]
xor ebp,ebx
xor edx,DWORD PTR 4[esp]
rol edx,1
- add ecx,ebp
- ror esi,2
- mov ebp,edi
- rol ebp,5
+ add ebp,ecx
mov DWORD PTR 16[esp],edx
- lea edx,DWORD PTR 3395469782[ecx*1+edx]
- mov ecx,DWORD PTR 20[esp]
- add edx,ebp
+ mov ecx,edi
+ rol ecx,5
+ lea edx,DWORD PTR 3395469782[ebp*1+edx]
+ add edx,ecx
; 20_39 69
mov ebp,edi
+ mov ecx,DWORD PTR 20[esp]
+ ror edi,2
xor ecx,DWORD PTR 28[esp]
xor ebp,esi
xor ecx,DWORD PTR 52[esp]
xor ebp,eax
xor ecx,DWORD PTR 8[esp]
rol ecx,1
- add ebx,ebp
- ror edi,2
- mov ebp,edx
- rol ebp,5
+ add ebp,ebx
mov DWORD PTR 20[esp],ecx
- lea ecx,DWORD PTR 3395469782[ebx*1+ecx]
- mov ebx,DWORD PTR 24[esp]
- add ecx,ebp
+ mov ebx,edx
+ rol ebx,5
+ lea ecx,DWORD PTR 3395469782[ebp*1+ecx]
+ add ecx,ebx
; 20_39 70
mov ebp,edx
+ mov ebx,DWORD PTR 24[esp]
+ ror edx,2
xor ebx,DWORD PTR 32[esp]
xor ebp,edi
xor ebx,DWORD PTR 56[esp]
xor ebp,esi
xor ebx,DWORD PTR 12[esp]
rol ebx,1
- add eax,ebp
- ror edx,2
- mov ebp,ecx
- rol ebp,5
+ add ebp,eax
mov DWORD PTR 24[esp],ebx
- lea ebx,DWORD PTR 3395469782[eax*1+ebx]
- mov eax,DWORD PTR 28[esp]
- add ebx,ebp
+ mov eax,ecx
+ rol eax,5
+ lea ebx,DWORD PTR 3395469782[ebp*1+ebx]
+ add ebx,eax
; 20_39 71
mov ebp,ecx
+ mov eax,DWORD PTR 28[esp]
+ ror ecx,2
xor eax,DWORD PTR 36[esp]
xor ebp,edx
xor eax,DWORD PTR 60[esp]
xor ebp,edi
xor eax,DWORD PTR 16[esp]
rol eax,1
- add esi,ebp
- ror ecx,2
- mov ebp,ebx
- rol ebp,5
+ add ebp,esi
mov DWORD PTR 28[esp],eax
- lea eax,DWORD PTR 3395469782[esi*1+eax]
- mov esi,DWORD PTR 32[esp]
- add eax,ebp
+ mov esi,ebx
+ rol esi,5
+ lea eax,DWORD PTR 3395469782[ebp*1+eax]
+ add eax,esi
; 20_39 72
mov ebp,ebx
+ mov esi,DWORD PTR 32[esp]
+ ror ebx,2
xor esi,DWORD PTR 40[esp]
xor ebp,ecx
xor esi,DWORD PTR [esp]
xor ebp,edx
xor esi,DWORD PTR 20[esp]
rol esi,1
- add edi,ebp
- ror ebx,2
- mov ebp,eax
- rol ebp,5
+ add ebp,edi
mov DWORD PTR 32[esp],esi
- lea esi,DWORD PTR 3395469782[edi*1+esi]
- mov edi,DWORD PTR 36[esp]
- add esi,ebp
+ mov edi,eax
+ rol edi,5
+ lea esi,DWORD PTR 3395469782[ebp*1+esi]
+ add esi,edi
; 20_39 73
mov ebp,eax
+ mov edi,DWORD PTR 36[esp]
+ ror eax,2
xor edi,DWORD PTR 44[esp]
xor ebp,ebx
xor edi,DWORD PTR 4[esp]
xor ebp,ecx
xor edi,DWORD PTR 24[esp]
rol edi,1
- add edx,ebp
- ror eax,2
- mov ebp,esi
- rol ebp,5
+ add ebp,edx
mov DWORD PTR 36[esp],edi
- lea edi,DWORD PTR 3395469782[edx*1+edi]
- mov edx,DWORD PTR 40[esp]
- add edi,ebp
+ mov edx,esi
+ rol edx,5
+ lea edi,DWORD PTR 3395469782[ebp*1+edi]
+ add edi,edx
; 20_39 74
mov ebp,esi
+ mov edx,DWORD PTR 40[esp]
+ ror esi,2
xor edx,DWORD PTR 48[esp]
xor ebp,eax
xor edx,DWORD PTR 8[esp]
xor ebp,ebx
xor edx,DWORD PTR 28[esp]
rol edx,1
- add ecx,ebp
- ror esi,2
- mov ebp,edi
- rol ebp,5
+ add ebp,ecx
mov DWORD PTR 40[esp],edx
- lea edx,DWORD PTR 3395469782[ecx*1+edx]
- mov ecx,DWORD PTR 44[esp]
- add edx,ebp
+ mov ecx,edi
+ rol ecx,5
+ lea edx,DWORD PTR 3395469782[ebp*1+edx]
+ add edx,ecx
; 20_39 75
mov ebp,edi
+ mov ecx,DWORD PTR 44[esp]
+ ror edi,2
xor ecx,DWORD PTR 52[esp]
xor ebp,esi
xor ecx,DWORD PTR 12[esp]
xor ebp,eax
xor ecx,DWORD PTR 32[esp]
rol ecx,1
- add ebx,ebp
- ror edi,2
- mov ebp,edx
- rol ebp,5
+ add ebp,ebx
mov DWORD PTR 44[esp],ecx
- lea ecx,DWORD PTR 3395469782[ebx*1+ecx]
- mov ebx,DWORD PTR 48[esp]
- add ecx,ebp
+ mov ebx,edx
+ rol ebx,5
+ lea ecx,DWORD PTR 3395469782[ebp*1+ecx]
+ add ecx,ebx
; 20_39 76
mov ebp,edx
+ mov ebx,DWORD PTR 48[esp]
+ ror edx,2
xor ebx,DWORD PTR 56[esp]
xor ebp,edi
xor ebx,DWORD PTR 16[esp]
xor ebp,esi
xor ebx,DWORD PTR 36[esp]
rol ebx,1
- add eax,ebp
- ror edx,2
- mov ebp,ecx
- rol ebp,5
+ add ebp,eax
mov DWORD PTR 48[esp],ebx
- lea ebx,DWORD PTR 3395469782[eax*1+ebx]
- mov eax,DWORD PTR 52[esp]
- add ebx,ebp
+ mov eax,ecx
+ rol eax,5
+ lea ebx,DWORD PTR 3395469782[ebp*1+ebx]
+ add ebx,eax
; 20_39 77
mov ebp,ecx
+ mov eax,DWORD PTR 52[esp]
+ ror ecx,2
xor eax,DWORD PTR 60[esp]
xor ebp,edx
xor eax,DWORD PTR 20[esp]
xor ebp,edi
xor eax,DWORD PTR 40[esp]
rol eax,1
- add esi,ebp
- ror ecx,2
- mov ebp,ebx
- rol ebp,5
- lea eax,DWORD PTR 3395469782[esi*1+eax]
- mov esi,DWORD PTR 56[esp]
- add eax,ebp
+ add ebp,esi
+ mov DWORD PTR 52[esp],eax
+ mov esi,ebx
+ rol esi,5
+ lea eax,DWORD PTR 3395469782[ebp*1+eax]
+ add eax,esi
; 20_39 78
mov ebp,ebx
+ mov esi,DWORD PTR 56[esp]
+ ror ebx,2
xor esi,DWORD PTR [esp]
xor ebp,ecx
xor esi,DWORD PTR 24[esp]
xor ebp,edx
xor esi,DWORD PTR 44[esp]
rol esi,1
- add edi,ebp
- ror ebx,2
- mov ebp,eax
- rol ebp,5
- lea esi,DWORD PTR 3395469782[edi*1+esi]
- mov edi,DWORD PTR 60[esp]
- add esi,ebp
+ add ebp,edi
+ mov DWORD PTR 56[esp],esi
+ mov edi,eax
+ rol edi,5
+ lea esi,DWORD PTR 3395469782[ebp*1+esi]
+ add esi,edi
; 20_39 79
mov ebp,eax
+ mov edi,DWORD PTR 60[esp]
+ ror eax,2
xor edi,DWORD PTR 4[esp]
xor ebp,ebx
xor edi,DWORD PTR 28[esp]
xor ebp,ecx
xor edi,DWORD PTR 48[esp]
rol edi,1
- add edx,ebp
- ror eax,2
- mov ebp,esi
- rol ebp,5
- lea edi,DWORD PTR 3395469782[edx*1+edi]
- add edi,ebp
- mov ebp,DWORD PTR 96[esp]
- mov edx,DWORD PTR 100[esp]
+ add ebp,edx
+ mov DWORD PTR 60[esp],edi
+ mov edx,esi
+ rol edx,5
+ lea edi,DWORD PTR 3395469782[ebp*1+edi]
+ add edi,edx
+ mov ebp,DWORD PTR 84[esp]
+ mov edx,DWORD PTR 88[esp]
add edi,DWORD PTR [ebp]
add esi,DWORD PTR 4[ebp]
add eax,DWORD PTR 8[ebp]
@@ -1368,14 +1430,14 @@ $L000loop:
mov DWORD PTR [ebp],edi
add edx,64
mov DWORD PTR 4[ebp],esi
- cmp edx,DWORD PTR 104[esp]
+ cmp edx,DWORD PTR 92[esp]
mov DWORD PTR 8[ebp],eax
mov edi,ecx
mov DWORD PTR 12[ebp],ebx
mov esi,edx
mov DWORD PTR 16[ebp],ecx
jb $L000loop
- add esp,76
+ add esp,64
pop edi
pop esi
pop ebx
diff --git a/deps/openssl/asm/x86-win32-masm/sha/sha256-586.asm b/deps/openssl/asm/x86-win32-masm/sha/sha256-586.asm
index 577c38ffab..75b1dc8ac5 100644
--- a/deps/openssl/asm/x86-win32-masm/sha/sha256-586.asm
+++ b/deps/openssl/asm/x86-win32-masm/sha/sha256-586.asm
@@ -2,7 +2,7 @@ TITLE sha512-586.asm
IF @Version LT 800
ECHO MASM version 8.00 or later is strongly recommended.
ENDIF
-.686
+.586
.MODEL FLAT
OPTION DOTNAME
IF @Version LT 800
@@ -104,30 +104,31 @@ ALIGN 16
$L00300_15:
mov ebx,DWORD PTR 92[esp]
mov ecx,edx
- ror ecx,14
- mov esi,DWORD PTR 20[esp]
- xor ecx,edx
- ror ecx,5
- xor ecx,edx
ror ecx,6
+ mov edi,edx
+ ror edi,11
+ mov esi,DWORD PTR 20[esp]
+ xor ecx,edi
+ ror edi,14
+ xor ecx,edi
mov edi,DWORD PTR 24[esp]
add ebx,ecx
- xor esi,edi
mov DWORD PTR 16[esp],edx
+ xor esi,edi
mov ecx,eax
and esi,edx
mov edx,DWORD PTR 12[esp]
xor esi,edi
mov edi,eax
add ebx,esi
- ror ecx,9
+ ror ecx,2
add ebx,DWORD PTR 28[esp]
- xor ecx,eax
- ror ecx,11
+ ror edi,13
mov esi,DWORD PTR 4[esp]
- xor ecx,eax
- ror ecx,2
+ xor ecx,edi
+ ror edi,9
add edx,ebx
+ xor ecx,edi
mov edi,DWORD PTR 8[esp]
add ebx,ecx
mov DWORD PTR [esp],eax
@@ -149,46 +150,48 @@ ALIGN 16
$L00416_63:
mov esi,ebx
mov ecx,DWORD PTR 100[esp]
+ shr ebx,3
+ ror esi,7
+ xor ebx,esi
ror esi,11
mov edi,ecx
- xor esi,ebx
- ror esi,7
- shr ebx,3
- ror edi,2
xor ebx,esi
- xor edi,ecx
- ror edi,17
shr ecx,10
- add ebx,DWORD PTR 156[esp]
+ mov esi,DWORD PTR 156[esp]
+ ror edi,17
+ xor ecx,edi
+ ror edi,2
+ add ebx,esi
xor edi,ecx
- add ebx,DWORD PTR 120[esp]
- mov ecx,edx
add ebx,edi
- ror ecx,14
+ mov ecx,edx
+ add ebx,DWORD PTR 120[esp]
+ ror ecx,6
+ mov edi,edx
+ ror edi,11
mov esi,DWORD PTR 20[esp]
- xor ecx,edx
- ror ecx,5
+ xor ecx,edi
+ ror edi,14
mov DWORD PTR 92[esp],ebx
- xor ecx,edx
- ror ecx,6
+ xor ecx,edi
mov edi,DWORD PTR 24[esp]
add ebx,ecx
- xor esi,edi
mov DWORD PTR 16[esp],edx
+ xor esi,edi
mov ecx,eax
and esi,edx
mov edx,DWORD PTR 12[esp]
xor esi,edi
mov edi,eax
add ebx,esi
- ror ecx,9
+ ror ecx,2
add ebx,DWORD PTR 28[esp]
- xor ecx,eax
- ror ecx,11
+ ror edi,13
mov esi,DWORD PTR 4[esp]
- xor ecx,eax
- ror ecx,2
+ xor ecx,edi
+ ror edi,9
add edx,ebx
+ xor ecx,edi
mov edi,DWORD PTR 8[esp]
add ebx,ecx
mov DWORD PTR [esp],eax
diff --git a/deps/openssl/asm/x86-win32-masm/sha/sha512-586.asm b/deps/openssl/asm/x86-win32-masm/sha/sha512-586.asm
index 98c1c070d7..9f3249762b 100644
--- a/deps/openssl/asm/x86-win32-masm/sha/sha512-586.asm
+++ b/deps/openssl/asm/x86-win32-masm/sha/sha512-586.asm
@@ -2,7 +2,7 @@ TITLE sha512-586.asm
IF @Version LT 800
ECHO MASM version 8.00 or later is strongly recommended.
ENDIF
-.686
+.586
.MODEL FLAT
OPTION DOTNAME
IF @Version LT 800
diff --git a/deps/openssl/asm/x86-win32-masm/x86cpuid.asm b/deps/openssl/asm/x86-win32-masm/x86cpuid.asm
index b9b1c25843..7e663d6645 100644
--- a/deps/openssl/asm/x86-win32-masm/x86cpuid.asm
+++ b/deps/openssl/asm/x86-win32-masm/x86cpuid.asm
@@ -2,7 +2,7 @@ TITLE x86cpuid.asm
IF @Version LT 800
ECHO MASM version 8.00 or later is strongly recommended.
ENDIF
-.686
+.586
.MODEL FLAT
OPTION DOTNAME
IF @Version LT 800
@@ -27,9 +27,9 @@ $L_OPENSSL_ia32_cpuid_begin::
pushfd
pop eax
xor ecx,eax
- xor eax,eax
bt ecx,21
- jnc $L000nocpuid
+ jnc $L000done
+ xor eax,eax
cpuid
mov edi,eax
xor eax,eax
@@ -55,14 +55,7 @@ $L_OPENSSL_ia32_cpuid_begin::
jnz $L001intel
mov eax,2147483648
cpuid
- cmp eax,2147483649
- jb $L001intel
- mov esi,eax
- mov eax,2147483649
- cpuid
- or ebp,ecx
- and ebp,2049
- cmp esi,2147483656
+ cmp eax,2147483656
jb $L001intel
mov eax,2147483656
cpuid
@@ -71,68 +64,46 @@ $L_OPENSSL_ia32_cpuid_begin::
mov eax,1
cpuid
bt edx,28
- jnc $L002generic
+ jnc $L000done
shr ebx,16
and ebx,255
cmp ebx,esi
- ja $L002generic
+ ja $L000done
and edx,4026531839
- jmp $L002generic
+ jmp $L000done
$L001intel:
cmp edi,4
mov edi,-1
- jb $L003nocacheinfo
+ jb $L002nocacheinfo
mov eax,4
mov ecx,0
cpuid
mov edi,eax
shr edi,14
and edi,4095
-$L003nocacheinfo:
+$L002nocacheinfo:
mov eax,1
cpuid
- and edx,3220176895
cmp ebp,0
- jne $L004notintel
- or edx,1073741824
+ jne $L003notP4
and ah,15
cmp ah,15
- jne $L004notintel
+ jne $L003notP4
or edx,1048576
-$L004notintel:
+$L003notP4:
bt edx,28
- jnc $L002generic
+ jnc $L000done
and edx,4026531839
cmp edi,0
- je $L002generic
+ je $L000done
or edx,268435456
shr ebx,16
cmp bl,1
- ja $L002generic
+ ja $L000done
and edx,4026531839
-$L002generic:
- and ebp,2048
- and ecx,4294965247
- mov esi,edx
- or ebp,ecx
- bt ecx,27
- jnc $L005clear_avx
- xor ecx,ecx
-DB 15,1,208
- and eax,6
- cmp eax,6
- je $L006done
- cmp eax,2
- je $L005clear_avx
-$L007clear_xmm:
- and ebp,4261412861
- and esi,4278190079
-$L005clear_avx:
- and ebp,4026525695
-$L006done:
- mov eax,esi
- mov edx,ebp
-$L000nocpuid:
+$L000done:
+ mov eax,edx
+ mov edx,ecx
pop edi
pop esi
pop ebx
@@ -147,9 +118,9 @@ $L_OPENSSL_rdtsc_begin::
xor edx,edx
lea ecx,DWORD PTR _OPENSSL_ia32cap_P
bt DWORD PTR [ecx],4
- jnc $L008notsc
+ jnc $L004notsc
rdtsc
-$L008notsc:
+$L004notsc:
ret
_OPENSSL_rdtsc ENDP
ALIGN 16
@@ -157,14 +128,14 @@ _OPENSSL_instrument_halt PROC PUBLIC
$L_OPENSSL_instrument_halt_begin::
lea ecx,DWORD PTR _OPENSSL_ia32cap_P
bt DWORD PTR [ecx],4
- jnc $L009nohalt
+ jnc $L005nohalt
DD 2421723150
and eax,3
- jnz $L009nohalt
+ jnz $L005nohalt
pushfd
pop eax
bt eax,9
- jnc $L009nohalt
+ jnc $L005nohalt
rdtsc
push edx
push eax
@@ -174,7 +145,7 @@ DD 2421723150
sbb edx,DWORD PTR 4[esp]
add esp,8
ret
-$L009nohalt:
+$L005nohalt:
xor eax,eax
xor edx,edx
ret
@@ -185,21 +156,21 @@ $L_OPENSSL_far_spin_begin::
pushfd
pop eax
bt eax,9
- jnc $L010nospin
+ jnc $L006nospin
mov eax,DWORD PTR 4[esp]
mov ecx,DWORD PTR 8[esp]
DD 2430111262
xor eax,eax
mov edx,DWORD PTR [ecx]
- jmp $L011spin
+ jmp $L007spin
ALIGN 16
-$L011spin:
+$L007spin:
inc eax
cmp edx,DWORD PTR [ecx]
- je $L011spin
+ je $L007spin
DD 529567888
ret
-$L010nospin:
+$L006nospin:
xor eax,eax
xor edx,edx
ret
@@ -212,9 +183,9 @@ $L_OPENSSL_wipe_cpu_begin::
lea ecx,DWORD PTR _OPENSSL_ia32cap_P
mov ecx,DWORD PTR [ecx]
bt DWORD PTR [ecx],1
- jnc $L012no_x87
+ jnc $L008no_x87
DD 4007259865,4007259865,4007259865,4007259865,2430851995
-$L012no_x87:
+$L008no_x87:
lea eax,DWORD PTR 4[esp]
ret
_OPENSSL_wipe_cpu ENDP
@@ -226,11 +197,11 @@ $L_OPENSSL_atomic_add_begin::
push ebx
nop
mov eax,DWORD PTR [edx]
-$L013spin:
+$L009spin:
lea ebx,DWORD PTR [ecx*1+eax]
nop
DD 447811568
- jne $L013spin
+ jne $L009spin
mov eax,ebx
pop ebx
ret
@@ -267,50 +238,37 @@ $L_OPENSSL_cleanse_begin::
mov ecx,DWORD PTR 8[esp]
xor eax,eax
cmp ecx,7
- jae $L014lot
+ jae $L010lot
cmp ecx,0
- je $L015ret
-$L016little:
+ je $L011ret
+$L012little:
mov BYTE PTR [edx],al
sub ecx,1
lea edx,DWORD PTR 1[edx]
- jnz $L016little
-$L015ret:
+ jnz $L012little
+$L011ret:
ret
ALIGN 16
-$L014lot:
+$L010lot:
test edx,3
- jz $L017aligned
+ jz $L013aligned
mov BYTE PTR [edx],al
lea ecx,DWORD PTR [ecx-1]
lea edx,DWORD PTR 1[edx]
- jmp $L014lot
-$L017aligned:
+ jmp $L010lot
+$L013aligned:
mov DWORD PTR [edx],eax
lea ecx,DWORD PTR [ecx-4]
test ecx,-4
lea edx,DWORD PTR 4[edx]
- jnz $L017aligned
+ jnz $L013aligned
cmp ecx,0
- jne $L016little
+ jne $L012little
ret
_OPENSSL_cleanse ENDP
-ALIGN 16
-_OPENSSL_ia32_rdrand PROC PUBLIC
-$L_OPENSSL_ia32_rdrand_begin::
- mov ecx,8
-$L018loop:
-DB 15,199,240
- jc $L019break
- loop $L018loop
-$L019break:
- cmp eax,0
- cmove eax,ecx
- ret
-_OPENSSL_ia32_rdrand ENDP
.text$ ENDS
.bss SEGMENT 'BSS'
-COMM _OPENSSL_ia32cap_P:QWORD
+COMM _OPENSSL_ia32cap_P:DWORD
.bss ENDS
.CRT$XCU SEGMENT DWORD PUBLIC 'DATA'
EXTERN _OPENSSL_cpuid_setup:NEAR