summaryrefslogtreecommitdiff
path: root/deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes
diff options
context:
space:
mode:
authorSam Roberts <vieuxtech@gmail.com>2018-11-22 11:47:07 -0800
committerSam Roberts <vieuxtech@gmail.com>2019-01-22 13:33:54 -0800
commit807ed7883a12423270450776f015a7c2348c0913 (patch)
tree00ec21dd290b29c782680ffc2f97e6d59fd2ab2f /deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes
parent57119fbdb200702d6e2cf23428de4c458ae86bbc (diff)
downloadandroid-node-v8-807ed7883a12423270450776f015a7c2348c0913.tar.gz
android-node-v8-807ed7883a12423270450776f015a7c2348c0913.tar.bz2
android-node-v8-807ed7883a12423270450776f015a7c2348c0913.zip
deps: update archs files for OpenSSL-1.1.1a
`cd deps/openssl/config; make` updates all archs dependant files. PR-URL: https://github.com/nodejs/node/pull/25381 Reviewed-By: Daniel Bevenius <daniel.bevenius@gmail.com> Reviewed-By: Shigeki Ohtsu <ohtsu@ohtsu.org>
Diffstat (limited to 'deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes')
-rw-r--r--deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/aes-x86_64.s145
-rw-r--r--deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/aesni-mb-x86_64.s72
-rw-r--r--deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/aesni-sha1-x86_64.s46
-rw-r--r--deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/aesni-sha256-x86_64.s114
-rw-r--r--deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/aesni-x86_64.s239
-rw-r--r--deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/bsaes-x86_64.s163
6 files changed, 608 insertions, 171 deletions
diff --git a/deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/aes-x86_64.s b/deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/aes-x86_64.s
index 488ae6d781..4bc117304f 100644
--- a/deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/aes-x86_64.s
+++ b/deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/aes-x86_64.s
@@ -332,15 +332,23 @@ _x86_64_AES_encrypt_compact:
.hidden asm_AES_encrypt
asm_AES_encrypt:
AES_encrypt:
+.cfi_startproc
+ movq %rsp,%rax
+.cfi_def_cfa_register %rax
pushq %rbx
+.cfi_offset %rbx,-16
pushq %rbp
+.cfi_offset %rbp,-24
pushq %r12
+.cfi_offset %r12,-32
pushq %r13
+.cfi_offset %r13,-40
pushq %r14
+.cfi_offset %r14,-48
pushq %r15
+.cfi_offset %r15,-56
- movq %rsp,%r10
leaq -63(%rdx),%rcx
andq $-64,%rsp
subq %rsp,%rcx
@@ -350,7 +358,8 @@ AES_encrypt:
subq $32,%rsp
movq %rsi,16(%rsp)
- movq %r10,24(%rsp)
+ movq %rax,24(%rsp)
+.cfi_escape 0x0f,0x05,0x77,0x18,0x06,0x23,0x08
.Lenc_prologue:
movq %rdx,%r15
@@ -377,20 +386,29 @@ AES_encrypt:
movq 16(%rsp),%r9
movq 24(%rsp),%rsi
+.cfi_def_cfa %rsi,8
movl %eax,0(%r9)
movl %ebx,4(%r9)
movl %ecx,8(%r9)
movl %edx,12(%r9)
- movq (%rsi),%r15
- movq 8(%rsi),%r14
- movq 16(%rsi),%r13
- movq 24(%rsi),%r12
- movq 32(%rsi),%rbp
- movq 40(%rsi),%rbx
- leaq 48(%rsi),%rsp
+ movq -48(%rsi),%r15
+.cfi_restore %r15
+ movq -40(%rsi),%r14
+.cfi_restore %r14
+ movq -32(%rsi),%r13
+.cfi_restore %r13
+ movq -24(%rsi),%r12
+.cfi_restore %r12
+ movq -16(%rsi),%rbp
+.cfi_restore %rbp
+ movq -8(%rsi),%rbx
+.cfi_restore %rbx
+ leaq (%rsi),%rsp
+.cfi_def_cfa_register %rsp
.Lenc_epilogue:
.byte 0xf3,0xc3
+.cfi_endproc
.size AES_encrypt,.-AES_encrypt
.type _x86_64_AES_decrypt,@function
.align 16
@@ -779,15 +797,23 @@ _x86_64_AES_decrypt_compact:
.hidden asm_AES_decrypt
asm_AES_decrypt:
AES_decrypt:
+.cfi_startproc
+ movq %rsp,%rax
+.cfi_def_cfa_register %rax
pushq %rbx
+.cfi_offset %rbx,-16
pushq %rbp
+.cfi_offset %rbp,-24
pushq %r12
+.cfi_offset %r12,-32
pushq %r13
+.cfi_offset %r13,-40
pushq %r14
+.cfi_offset %r14,-48
pushq %r15
+.cfi_offset %r15,-56
- movq %rsp,%r10
leaq -63(%rdx),%rcx
andq $-64,%rsp
subq %rsp,%rcx
@@ -797,7 +823,8 @@ AES_decrypt:
subq $32,%rsp
movq %rsi,16(%rsp)
- movq %r10,24(%rsp)
+ movq %rax,24(%rsp)
+.cfi_escape 0x0f,0x05,0x77,0x18,0x06,0x23,0x08
.Ldec_prologue:
movq %rdx,%r15
@@ -826,41 +853,68 @@ AES_decrypt:
movq 16(%rsp),%r9
movq 24(%rsp),%rsi
+.cfi_def_cfa %rsi,8
movl %eax,0(%r9)
movl %ebx,4(%r9)
movl %ecx,8(%r9)
movl %edx,12(%r9)
- movq (%rsi),%r15
- movq 8(%rsi),%r14
- movq 16(%rsi),%r13
- movq 24(%rsi),%r12
- movq 32(%rsi),%rbp
- movq 40(%rsi),%rbx
- leaq 48(%rsi),%rsp
+ movq -48(%rsi),%r15
+.cfi_restore %r15
+ movq -40(%rsi),%r14
+.cfi_restore %r14
+ movq -32(%rsi),%r13
+.cfi_restore %r13
+ movq -24(%rsi),%r12
+.cfi_restore %r12
+ movq -16(%rsi),%rbp
+.cfi_restore %rbp
+ movq -8(%rsi),%rbx
+.cfi_restore %rbx
+ leaq (%rsi),%rsp
+.cfi_def_cfa_register %rsp
.Ldec_epilogue:
.byte 0xf3,0xc3
+.cfi_endproc
.size AES_decrypt,.-AES_decrypt
.globl AES_set_encrypt_key
.type AES_set_encrypt_key,@function
.align 16
AES_set_encrypt_key:
+.cfi_startproc
pushq %rbx
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbx,-16
pushq %rbp
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbp,-24
pushq %r12
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r12,-32
pushq %r13
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r13,-40
pushq %r14
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r14,-48
pushq %r15
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r15,-56
subq $8,%rsp
+.cfi_adjust_cfa_offset 8
.Lenc_key_prologue:
call _x86_64_AES_set_encrypt_key
movq 40(%rsp),%rbp
+.cfi_restore %rbp
movq 48(%rsp),%rbx
+.cfi_restore %rbx
addq $56,%rsp
+.cfi_adjust_cfa_offset -56
.Lenc_key_epilogue:
.byte 0xf3,0xc3
+.cfi_endproc
.size AES_set_encrypt_key,.-AES_set_encrypt_key
.type _x86_64_AES_set_encrypt_key,@function
@@ -1106,13 +1160,27 @@ _x86_64_AES_set_encrypt_key:
.type AES_set_decrypt_key,@function
.align 16
AES_set_decrypt_key:
+.cfi_startproc
pushq %rbx
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbx,-16
pushq %rbp
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbp,-24
pushq %r12
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r12,-32
pushq %r13
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r13,-40
pushq %r14
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r14,-48
pushq %r15
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r15,-56
pushq %rdx
+.cfi_adjust_cfa_offset 8
.Ldec_key_prologue:
call _x86_64_AES_set_encrypt_key
@@ -1280,14 +1348,22 @@ AES_set_decrypt_key:
xorq %rax,%rax
.Labort:
movq 8(%rsp),%r15
+.cfi_restore %r15
movq 16(%rsp),%r14
+.cfi_restore %r14
movq 24(%rsp),%r13
+.cfi_restore %r13
movq 32(%rsp),%r12
+.cfi_restore %r12
movq 40(%rsp),%rbp
+.cfi_restore %rbp
movq 48(%rsp),%rbx
+.cfi_restore %rbx
addq $56,%rsp
+.cfi_adjust_cfa_offset -56
.Ldec_key_epilogue:
.byte 0xf3,0xc3
+.cfi_endproc
.size AES_set_decrypt_key,.-AES_set_decrypt_key
.globl AES_cbc_encrypt
.type AES_cbc_encrypt,@function
@@ -1297,25 +1373,39 @@ AES_set_decrypt_key:
.hidden asm_AES_cbc_encrypt
asm_AES_cbc_encrypt:
AES_cbc_encrypt:
+.cfi_startproc
cmpq $0,%rdx
je .Lcbc_epilogue
pushfq
+.cfi_adjust_cfa_offset 8
+.cfi_offset 49,-16
pushq %rbx
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbx,-24
pushq %rbp
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbp,-32
pushq %r12
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r12,-40
pushq %r13
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r13,-48
pushq %r14
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r14,-56
pushq %r15
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r15,-64
.Lcbc_prologue:
cld
movl %r9d,%r9d
leaq .LAES_Te(%rip),%r14
+ leaq .LAES_Td(%rip),%r10
cmpq $0,%r9
- jne .Lcbc_picked_te
- leaq .LAES_Td(%rip),%r14
-.Lcbc_picked_te:
+ cmoveq %r10,%r14
movl OPENSSL_ia32cap_P(%rip),%r10d
cmpq $512,%rdx
@@ -1351,8 +1441,10 @@ AES_cbc_encrypt:
.Lcbc_te_ok:
xchgq %rsp,%r15
+.cfi_def_cfa_register %r15
movq %r15,16(%rsp)
+.cfi_escape 0x0f,0x05,0x77,0x10,0x06,0x23,0x40
.Lcbc_fast_body:
movq %rdi,24(%rsp)
movq %rsi,32(%rsp)
@@ -1734,17 +1826,28 @@ AES_cbc_encrypt:
.align 16
.Lcbc_exit:
movq 16(%rsp),%rsi
+.cfi_def_cfa %rsi,64
movq (%rsi),%r15
+.cfi_restore %r15
movq 8(%rsi),%r14
+.cfi_restore %r14
movq 16(%rsi),%r13
+.cfi_restore %r13
movq 24(%rsi),%r12
+.cfi_restore %r12
movq 32(%rsi),%rbp
+.cfi_restore %rbp
movq 40(%rsi),%rbx
+.cfi_restore %rbx
leaq 48(%rsi),%rsp
+.cfi_def_cfa %rsp,16
.Lcbc_popfq:
popfq
+.cfi_adjust_cfa_offset -8
+.cfi_restore 49
.Lcbc_epilogue:
.byte 0xf3,0xc3
+.cfi_endproc
.size AES_cbc_encrypt,.-AES_cbc_encrypt
.align 64
.LAES_Te:
diff --git a/deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/aesni-mb-x86_64.s b/deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/aesni-mb-x86_64.s
index 3dcd55d3f5..f2b5662b9c 100644
--- a/deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/aesni-mb-x86_64.s
+++ b/deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/aesni-mb-x86_64.s
@@ -6,6 +6,7 @@
.type aesni_multi_cbc_encrypt,@function
.align 32
aesni_multi_cbc_encrypt:
+.cfi_startproc
cmpl $2,%edx
jb .Lenc_non_avx
movl OPENSSL_ia32cap_P+4(%rip),%ecx
@@ -15,12 +16,19 @@ aesni_multi_cbc_encrypt:
.align 16
.Lenc_non_avx:
movq %rsp,%rax
+.cfi_def_cfa_register %rax
pushq %rbx
+.cfi_offset %rbx,-16
pushq %rbp
+.cfi_offset %rbp,-24
pushq %r12
+.cfi_offset %r12,-32
pushq %r13
+.cfi_offset %r13,-40
pushq %r14
+.cfi_offset %r14,-48
pushq %r15
+.cfi_offset %r15,-56
@@ -30,6 +38,7 @@ aesni_multi_cbc_encrypt:
subq $48,%rsp
andq $-64,%rsp
movq %rax,16(%rsp)
+.cfi_escape 0x0f,0x05,0x77,0x10,0x06,0x23,0x08
.Lenc4x_body:
movdqu (%rsi),%xmm12
@@ -239,6 +248,7 @@ aesni_multi_cbc_encrypt:
jnz .Loop_enc4x
movq 16(%rsp),%rax
+.cfi_def_cfa %rax,8
movl 24(%rsp),%edx
@@ -256,20 +266,29 @@ aesni_multi_cbc_encrypt:
.Lenc4x_done:
movq -48(%rax),%r15
+.cfi_restore %r15
movq -40(%rax),%r14
+.cfi_restore %r14
movq -32(%rax),%r13
+.cfi_restore %r13
movq -24(%rax),%r12
+.cfi_restore %r12
movq -16(%rax),%rbp
+.cfi_restore %rbp
movq -8(%rax),%rbx
+.cfi_restore %rbx
leaq (%rax),%rsp
+.cfi_def_cfa_register %rsp
.Lenc4x_epilogue:
.byte 0xf3,0xc3
+.cfi_endproc
.size aesni_multi_cbc_encrypt,.-aesni_multi_cbc_encrypt
.globl aesni_multi_cbc_decrypt
.type aesni_multi_cbc_decrypt,@function
.align 32
aesni_multi_cbc_decrypt:
+.cfi_startproc
cmpl $2,%edx
jb .Ldec_non_avx
movl OPENSSL_ia32cap_P+4(%rip),%ecx
@@ -279,12 +298,19 @@ aesni_multi_cbc_decrypt:
.align 16
.Ldec_non_avx:
movq %rsp,%rax
+.cfi_def_cfa_register %rax
pushq %rbx
+.cfi_offset %rbx,-16
pushq %rbp
+.cfi_offset %rbp,-24
pushq %r12
+.cfi_offset %r12,-32
pushq %r13
+.cfi_offset %r13,-40
pushq %r14
+.cfi_offset %r14,-48
pushq %r15
+.cfi_offset %r15,-56
@@ -294,6 +320,7 @@ aesni_multi_cbc_decrypt:
subq $48,%rsp
andq $-64,%rsp
movq %rax,16(%rsp)
+.cfi_escape 0x0f,0x05,0x77,0x10,0x06,0x23,0x08
.Ldec4x_body:
movdqu (%rsi),%xmm12
@@ -503,6 +530,7 @@ aesni_multi_cbc_decrypt:
jnz .Loop_dec4x
movq 16(%rsp),%rax
+.cfi_def_cfa %rax,8
movl 24(%rsp),%edx
leaq 160(%rdi),%rdi
@@ -511,26 +539,42 @@ aesni_multi_cbc_decrypt:
.Ldec4x_done:
movq -48(%rax),%r15
+.cfi_restore %r15
movq -40(%rax),%r14
+.cfi_restore %r14
movq -32(%rax),%r13
+.cfi_restore %r13
movq -24(%rax),%r12
+.cfi_restore %r12
movq -16(%rax),%rbp
+.cfi_restore %rbp
movq -8(%rax),%rbx
+.cfi_restore %rbx
leaq (%rax),%rsp
+.cfi_def_cfa_register %rsp
.Ldec4x_epilogue:
.byte 0xf3,0xc3
+.cfi_endproc
.size aesni_multi_cbc_decrypt,.-aesni_multi_cbc_decrypt
.type aesni_multi_cbc_encrypt_avx,@function
.align 32
aesni_multi_cbc_encrypt_avx:
+.cfi_startproc
_avx_cbc_enc_shortcut:
movq %rsp,%rax
+.cfi_def_cfa_register %rax
pushq %rbx
+.cfi_offset %rbx,-16
pushq %rbp
+.cfi_offset %rbp,-24
pushq %r12
+.cfi_offset %r12,-32
pushq %r13
+.cfi_offset %r13,-40
pushq %r14
+.cfi_offset %r14,-48
pushq %r15
+.cfi_offset %r15,-56
@@ -542,6 +586,7 @@ _avx_cbc_enc_shortcut:
subq $192,%rsp
andq $-128,%rsp
movq %rax,16(%rsp)
+.cfi_escape 0x0f,0x05,0x77,0x10,0x06,0x23,0x08
.Lenc8x_body:
vzeroupper
@@ -939,6 +984,7 @@ _avx_cbc_enc_shortcut:
jnz .Loop_enc8x
movq 16(%rsp),%rax
+.cfi_def_cfa %rax,8
@@ -947,27 +993,43 @@ _avx_cbc_enc_shortcut:
.Lenc8x_done:
vzeroupper
movq -48(%rax),%r15
+.cfi_restore %r15
movq -40(%rax),%r14
+.cfi_restore %r14
movq -32(%rax),%r13
+.cfi_restore %r13
movq -24(%rax),%r12
+.cfi_restore %r12
movq -16(%rax),%rbp
+.cfi_restore %rbp
movq -8(%rax),%rbx
+.cfi_restore %rbx
leaq (%rax),%rsp
+.cfi_def_cfa_register %rsp
.Lenc8x_epilogue:
.byte 0xf3,0xc3
+.cfi_endproc
.size aesni_multi_cbc_encrypt_avx,.-aesni_multi_cbc_encrypt_avx
.type aesni_multi_cbc_decrypt_avx,@function
.align 32
aesni_multi_cbc_decrypt_avx:
+.cfi_startproc
_avx_cbc_dec_shortcut:
movq %rsp,%rax
+.cfi_def_cfa_register %rax
pushq %rbx
+.cfi_offset %rbx,-16
pushq %rbp
+.cfi_offset %rbp,-24
pushq %r12
+.cfi_offset %r12,-32
pushq %r13
+.cfi_offset %r13,-40
pushq %r14
+.cfi_offset %r14,-48
pushq %r15
+.cfi_offset %r15,-56
@@ -981,6 +1043,7 @@ _avx_cbc_dec_shortcut:
andq $-256,%rsp
subq $192,%rsp
movq %rax,16(%rsp)
+.cfi_escape 0x0f,0x05,0x77,0x10,0x06,0x23,0x08
.Ldec8x_body:
vzeroupper
@@ -1416,6 +1479,7 @@ _avx_cbc_dec_shortcut:
jnz .Loop_dec8x
movq 16(%rsp),%rax
+.cfi_def_cfa %rax,8
@@ -1424,12 +1488,20 @@ _avx_cbc_dec_shortcut:
.Ldec8x_done:
vzeroupper
movq -48(%rax),%r15
+.cfi_restore %r15
movq -40(%rax),%r14
+.cfi_restore %r14
movq -32(%rax),%r13
+.cfi_restore %r13
movq -24(%rax),%r12
+.cfi_restore %r12
movq -16(%rax),%rbp
+.cfi_restore %rbp
movq -8(%rax),%rbx
+.cfi_restore %rbx
leaq (%rax),%rsp
+.cfi_def_cfa_register %rsp
.Ldec8x_epilogue:
.byte 0xf3,0xc3
+.cfi_endproc
.size aesni_multi_cbc_decrypt_avx,.-aesni_multi_cbc_decrypt_avx
diff --git a/deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/aesni-sha1-x86_64.s b/deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/aesni-sha1-x86_64.s
index ca193ddb9e..4d2dfe4489 100644
--- a/deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/aesni-sha1-x86_64.s
+++ b/deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/aesni-sha1-x86_64.s
@@ -21,16 +21,30 @@ aesni_cbc_sha1_enc:
.type aesni_cbc_sha1_enc_ssse3,@function
.align 32
aesni_cbc_sha1_enc_ssse3:
+.cfi_startproc
movq 8(%rsp),%r10
pushq %rbx
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbx,-16
pushq %rbp
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbp,-24
pushq %r12
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r12,-32
pushq %r13
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r13,-40
pushq %r14
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r14,-48
pushq %r15
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r15,-56
leaq -104(%rsp),%rsp
+.cfi_adjust_cfa_offset 104
movq %rdi,%r12
@@ -1362,29 +1376,52 @@ aesni_cbc_sha1_enc_ssse3:
movl %ebp,16(%r9)
movups %xmm2,(%r8)
leaq 104(%rsp),%rsi
+.cfi_def_cfa %rsi,56
movq 0(%rsi),%r15
+.cfi_restore %r15
movq 8(%rsi),%r14
+.cfi_restore %r14
movq 16(%rsi),%r13
+.cfi_restore %r13
movq 24(%rsi),%r12
+.cfi_restore %r12
movq 32(%rsi),%rbp
+.cfi_restore %rbp
movq 40(%rsi),%rbx
+.cfi_restore %rbx
leaq 48(%rsi),%rsp
+.cfi_def_cfa %rsp,8
.Lepilogue_ssse3:
.byte 0xf3,0xc3
+.cfi_endproc
.size aesni_cbc_sha1_enc_ssse3,.-aesni_cbc_sha1_enc_ssse3
.type aesni_cbc_sha1_enc_avx,@function
.align 32
aesni_cbc_sha1_enc_avx:
+.cfi_startproc
movq 8(%rsp),%r10
pushq %rbx
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbx,-16
pushq %rbp
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbp,-24
pushq %r12
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r12,-32
pushq %r13
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r13,-40
pushq %r14
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r14,-48
pushq %r15
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r15,-56
leaq -104(%rsp),%rsp
+.cfi_adjust_cfa_offset 104
vzeroall
@@ -2660,15 +2697,24 @@ aesni_cbc_sha1_enc_avx:
vmovups %xmm12,(%r8)
vzeroall
leaq 104(%rsp),%rsi
+.cfi_def_cfa %rsi,56
movq 0(%rsi),%r15
+.cfi_restore %r15
movq 8(%rsi),%r14
+.cfi_restore %r14
movq 16(%rsi),%r13
+.cfi_restore %r13
movq 24(%rsi),%r12
+.cfi_restore %r12
movq 32(%rsi),%rbp
+.cfi_restore %rbp
movq 40(%rsi),%rbx
+.cfi_restore %rbx
leaq 48(%rsi),%rsp
+.cfi_def_cfa %rsp,8
.Lepilogue_avx:
.byte 0xf3,0xc3
+.cfi_endproc
.size aesni_cbc_sha1_enc_avx,.-aesni_cbc_sha1_enc_avx
.align 64
K_XX_XX:
diff --git a/deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/aesni-sha256-x86_64.s b/deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/aesni-sha256-x86_64.s
index 427a1c7d12..5a47b3ee51 100644
--- a/deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/aesni-sha256-x86_64.s
+++ b/deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/aesni-sha256-x86_64.s
@@ -77,15 +77,23 @@ K256:
.type aesni_cbc_sha256_enc_xop,@function
.align 64
aesni_cbc_sha256_enc_xop:
+.cfi_startproc
.Lxop_shortcut:
movq 8(%rsp),%r10
+ movq %rsp,%rax
+.cfi_def_cfa_register %rax
pushq %rbx
+.cfi_offset %rbx,-16
pushq %rbp
+.cfi_offset %rbp,-24
pushq %r12
+.cfi_offset %r12,-32
pushq %r13
+.cfi_offset %r13,-40
pushq %r14
+.cfi_offset %r14,-48
pushq %r15
- movq %rsp,%r11
+.cfi_offset %r15,-56
subq $128,%rsp
andq $-64,%rsp
@@ -101,7 +109,8 @@ aesni_cbc_sha256_enc_xop:
movq %r8,64+32(%rsp)
movq %r9,64+40(%rsp)
movq %r10,64+48(%rsp)
- movq %r11,64+56(%rsp)
+ movq %rax,120(%rsp)
+.cfi_escape 0x0f,0x06,0x77,0xf8,0x00,0x06,0x23,0x08
.Lprologue_xop:
vzeroall
@@ -1207,31 +1216,48 @@ aesni_cbc_sha256_enc_xop:
jb .Lloop_xop
movq 64+32(%rsp),%r8
- movq 64+56(%rsp),%rsi
+ movq 120(%rsp),%rsi
+.cfi_def_cfa %rsi,8
vmovdqu %xmm8,(%r8)
vzeroall
- movq (%rsi),%r15
- movq 8(%rsi),%r14
- movq 16(%rsi),%r13
- movq 24(%rsi),%r12
- movq 32(%rsi),%rbp
- movq 40(%rsi),%rbx
- leaq 48(%rsi),%rsp
+ movq -48(%rsi),%r15
+.cfi_restore %r15
+ movq -40(%rsi),%r14
+.cfi_restore %r14
+ movq -32(%rsi),%r13
+.cfi_restore %r13
+ movq -24(%rsi),%r12
+.cfi_restore %r12
+ movq -16(%rsi),%rbp
+.cfi_restore %rbp
+ movq -8(%rsi),%rbx
+.cfi_restore %rbx
+ leaq (%rsi),%rsp
+.cfi_def_cfa_register %rsp
.Lepilogue_xop:
.byte 0xf3,0xc3
+.cfi_endproc
.size aesni_cbc_sha256_enc_xop,.-aesni_cbc_sha256_enc_xop
.type aesni_cbc_sha256_enc_avx,@function
.align 64
aesni_cbc_sha256_enc_avx:
+.cfi_startproc
.Lavx_shortcut:
movq 8(%rsp),%r10
+ movq %rsp,%rax
+.cfi_def_cfa_register %rax
pushq %rbx
+.cfi_offset %rbx,-16
pushq %rbp
+.cfi_offset %rbp,-24
pushq %r12
+.cfi_offset %r12,-32
pushq %r13
+.cfi_offset %r13,-40
pushq %r14
+.cfi_offset %r14,-48
pushq %r15
- movq %rsp,%r11
+.cfi_offset %r15,-56
subq $128,%rsp
andq $-64,%rsp
@@ -1247,7 +1273,8 @@ aesni_cbc_sha256_enc_avx:
movq %r8,64+32(%rsp)
movq %r9,64+40(%rsp)
movq %r10,64+48(%rsp)
- movq %r11,64+56(%rsp)
+ movq %rax,120(%rsp)
+.cfi_escape 0x0f,0x06,0x77,0xf8,0x00,0x06,0x23,0x08
.Lprologue_avx:
vzeroall
@@ -2384,31 +2411,48 @@ aesni_cbc_sha256_enc_avx:
jb .Lloop_avx
movq 64+32(%rsp),%r8
- movq 64+56(%rsp),%rsi
+ movq 120(%rsp),%rsi
+.cfi_def_cfa %rsi,8
vmovdqu %xmm8,(%r8)
vzeroall
- movq (%rsi),%r15
- movq 8(%rsi),%r14
- movq 16(%rsi),%r13
- movq 24(%rsi),%r12
- movq 32(%rsi),%rbp
- movq 40(%rsi),%rbx
- leaq 48(%rsi),%rsp
+ movq -48(%rsi),%r15
+.cfi_restore %r15
+ movq -40(%rsi),%r14
+.cfi_restore %r14
+ movq -32(%rsi),%r13
+.cfi_restore %r13
+ movq -24(%rsi),%r12
+.cfi_restore %r12
+ movq -16(%rsi),%rbp
+.cfi_restore %rbp
+ movq -8(%rsi),%rbx
+.cfi_restore %rbx
+ leaq (%rsi),%rsp
+.cfi_def_cfa_register %rsp
.Lepilogue_avx:
.byte 0xf3,0xc3
+.cfi_endproc
.size aesni_cbc_sha256_enc_avx,.-aesni_cbc_sha256_enc_avx
.type aesni_cbc_sha256_enc_avx2,@function
.align 64
aesni_cbc_sha256_enc_avx2:
+.cfi_startproc
.Lavx2_shortcut:
movq 8(%rsp),%r10
+ movq %rsp,%rax
+.cfi_def_cfa_register %rax
pushq %rbx
+.cfi_offset %rbx,-16
pushq %rbp
+.cfi_offset %rbp,-24
pushq %r12
+.cfi_offset %r12,-32
pushq %r13
+.cfi_offset %r13,-40
pushq %r14
+.cfi_offset %r14,-48
pushq %r15
- movq %rsp,%r11
+.cfi_offset %r15,-56
subq $576,%rsp
andq $-1024,%rsp
addq $448,%rsp
@@ -2425,7 +2469,8 @@ aesni_cbc_sha256_enc_avx2:
movq %r8,64+32(%rsp)
movq %r9,64+40(%rsp)
movq %r10,64+48(%rsp)
- movq %r11,64+56(%rsp)
+ movq %rax,120(%rsp)
+.cfi_escape 0x0f,0x06,0x77,0xf8,0x00,0x06,0x23,0x08
.Lprologue_avx2:
vzeroall
@@ -3987,18 +4032,27 @@ aesni_cbc_sha256_enc_avx2:
.Ldone_avx2:
leaq (%rbp),%rsp
movq 64+32(%rsp),%r8
- movq 64+56(%rsp),%rsi
+ movq 120(%rsp),%rsi
+.cfi_def_cfa %rsi,8
vmovdqu %xmm8,(%r8)
vzeroall
- movq (%rsi),%r15
- movq 8(%rsi),%r14
- movq 16(%rsi),%r13
- movq 24(%rsi),%r12
- movq 32(%rsi),%rbp
- movq 40(%rsi),%rbx
- leaq 48(%rsi),%rsp
+ movq -48(%rsi),%r15
+.cfi_restore %r15
+ movq -40(%rsi),%r14
+.cfi_restore %r14
+ movq -32(%rsi),%r13
+.cfi_restore %r13
+ movq -24(%rsi),%r12
+.cfi_restore %r12
+ movq -16(%rsi),%rbp
+.cfi_restore %rbp
+ movq -8(%rsi),%rbx
+.cfi_restore %rbx
+ leaq (%rsi),%rsp
+.cfi_def_cfa_register %rsp
.Lepilogue_avx2:
.byte 0xf3,0xc3
+.cfi_endproc
.size aesni_cbc_sha256_enc_avx2,.-aesni_cbc_sha256_enc_avx2
.type aesni_cbc_sha256_enc_shaext,@function
.align 32
diff --git a/deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/aesni-x86_64.s b/deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/aesni-x86_64.s
index e18f87c4e6..5b2a68e758 100644
--- a/deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/aesni-x86_64.s
+++ b/deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/aesni-x86_64.s
@@ -995,6 +995,7 @@ aesni_ccm64_decrypt_blocks:
.type aesni_ctr32_encrypt_blocks,@function
.align 16
aesni_ctr32_encrypt_blocks:
+.cfi_startproc
cmpq $1,%rdx
jne .Lctr32_bulk
@@ -1024,11 +1025,12 @@ aesni_ctr32_encrypt_blocks:
.align 16
.Lctr32_bulk:
- leaq (%rsp),%rax
+ leaq (%rsp),%r11
+.cfi_def_cfa_register %r11
pushq %rbp
+.cfi_offset %rbp,-16
subq $128,%rsp
andq $-16,%rsp
- leaq -8(%rax),%rbp
@@ -1037,7 +1039,7 @@ aesni_ctr32_encrypt_blocks:
movdqu (%rcx),%xmm0
movl 12(%r8),%r8d
pxor %xmm0,%xmm2
- movl 12(%rcx),%r11d
+ movl 12(%rcx),%ebp
movdqa %xmm2,0(%rsp)
bswapl %r8d
movdqa %xmm2,%xmm3
@@ -1053,8 +1055,8 @@ aesni_ctr32_encrypt_blocks:
leaq 2(%r8),%rdx
bswapl %eax
bswapl %edx
- xorl %r11d,%eax
- xorl %r11d,%edx
+ xorl %ebp,%eax
+ xorl %ebp,%edx
.byte 102,15,58,34,216,3
leaq 3(%r8),%rax
movdqa %xmm3,16(%rsp)
@@ -1063,25 +1065,25 @@ aesni_ctr32_encrypt_blocks:
movq %r10,%rdx
leaq 4(%r8),%r10
movdqa %xmm4,32(%rsp)
- xorl %r11d,%eax
+ xorl %ebp,%eax
bswapl %r10d
.byte 102,15,58,34,232,3
- xorl %r11d,%r10d
+ xorl %ebp,%r10d
movdqa %xmm5,48(%rsp)
leaq 5(%r8),%r9
movl %r10d,64+12(%rsp)
bswapl %r9d
leaq 6(%r8),%r10
movl 240(%rcx),%eax
- xorl %r11d,%r9d
+ xorl %ebp,%r9d
bswapl %r10d
movl %r9d,80+12(%rsp)
- xorl %r11d,%r10d
+ xorl %ebp,%r10d
leaq 7(%r8),%r9
movl %r10d,96+12(%rsp)
bswapl %r9d
movl OPENSSL_ia32cap_P+4(%rip),%r10d
- xorl %r11d,%r9d
+ xorl %ebp,%r9d
andl $71303168,%r10d
movl %r9d,112+12(%rsp)
@@ -1105,7 +1107,7 @@ aesni_ctr32_encrypt_blocks:
.Lctr32_6x:
shll $4,%eax
movl $48,%r10d
- bswapl %r11d
+ bswapl %ebp
leaq 32(%rcx,%rax,1),%rcx
subq %rax,%r10
jmp .Lctr32_loop6
@@ -1116,32 +1118,32 @@ aesni_ctr32_encrypt_blocks:
movups -48(%rcx,%r10,1),%xmm0
.byte 102,15,56,220,209
movl %r8d,%eax
- xorl %r11d,%eax
+ xorl %ebp,%eax
.byte 102,15,56,220,217
.byte 0x0f,0x38,0xf1,0x44,0x24,12
leal 1(%r8),%eax
.byte 102,15,56,220,225
- xorl %r11d,%eax
+ xorl %ebp,%eax
.byte 0x0f,0x38,0xf1,0x44,0x24,28
.byte 102,15,56,220,233
leal 2(%r8),%eax
- xorl %r11d,%eax
+ xorl %ebp,%eax
.byte 102,15,56,220,241
.byte 0x0f,0x38,0xf1,0x44,0x24,44
leal 3(%r8),%eax
.byte 102,15,56,220,249
movups -32(%rcx,%r10,1),%xmm1
- xorl %r11d,%eax
+ xorl %ebp,%eax
.byte 102,15,56,220,208
.byte 0x0f,0x38,0xf1,0x44,0x24,60
leal 4(%r8),%eax
.byte 102,15,56,220,216
- xorl %r11d,%eax
+ xorl %ebp,%eax
.byte 0x0f,0x38,0xf1,0x44,0x24,76
.byte 102,15,56,220,224
leal 5(%r8),%eax
- xorl %r11d,%eax
+ xorl %ebp,%eax
.byte 102,15,56,220,232
.byte 0x0f,0x38,0xf1,0x44,0x24,92
movq %r10,%rax
@@ -1202,7 +1204,7 @@ aesni_ctr32_encrypt_blocks:
bswapl %r9d
movups 32-128(%rcx),%xmm0
.byte 102,15,56,220,225
- xorl %r11d,%r9d
+ xorl %ebp,%r9d
nop
.byte 102,15,56,220,233
movl %r9d,0+12(%rsp)
@@ -1215,7 +1217,7 @@ aesni_ctr32_encrypt_blocks:
bswapl %r9d
.byte 102,15,56,220,208
.byte 102,15,56,220,216
- xorl %r11d,%r9d
+ xorl %ebp,%r9d
.byte 0x66,0x90
.byte 102,15,56,220,224
.byte 102,15,56,220,232
@@ -1229,7 +1231,7 @@ aesni_ctr32_encrypt_blocks:
bswapl %r9d
.byte 102,15,56,220,209
.byte 102,15,56,220,217
- xorl %r11d,%r9d
+ xorl %ebp,%r9d
.byte 0x66,0x90
.byte 102,15,56,220,225
.byte 102,15,56,220,233
@@ -1243,7 +1245,7 @@ aesni_ctr32_encrypt_blocks:
bswapl %r9d
.byte 102,15,56,220,208
.byte 102,15,56,220,216
- xorl %r11d,%r9d
+ xorl %ebp,%r9d
.byte 0x66,0x90
.byte 102,15,56,220,224
.byte 102,15,56,220,232
@@ -1257,7 +1259,7 @@ aesni_ctr32_encrypt_blocks:
bswapl %r9d
.byte 102,15,56,220,209
.byte 102,15,56,220,217
- xorl %r11d,%r9d
+ xorl %ebp,%r9d
.byte 0x66,0x90
.byte 102,15,56,220,225
.byte 102,15,56,220,233
@@ -1271,7 +1273,7 @@ aesni_ctr32_encrypt_blocks:
bswapl %r9d
.byte 102,15,56,220,208
.byte 102,15,56,220,216
- xorl %r11d,%r9d
+ xorl %ebp,%r9d
.byte 0x66,0x90
.byte 102,15,56,220,224
.byte 102,15,56,220,232
@@ -1285,7 +1287,7 @@ aesni_ctr32_encrypt_blocks:
bswapl %r9d
.byte 102,15,56,220,209
.byte 102,15,56,220,217
- xorl %r11d,%r9d
+ xorl %ebp,%r9d
.byte 0x66,0x90
.byte 102,15,56,220,225
.byte 102,15,56,220,233
@@ -1300,7 +1302,7 @@ aesni_ctr32_encrypt_blocks:
.byte 102,15,56,220,208
.byte 102,15,56,220,216
.byte 102,15,56,220,224
- xorl %r11d,%r9d
+ xorl %ebp,%r9d
movdqu 0(%rdi),%xmm10
.byte 102,15,56,220,232
movl %r9d,112+12(%rsp)
@@ -1535,7 +1537,7 @@ aesni_ctr32_encrypt_blocks:
.Lctr32_done:
xorps %xmm0,%xmm0
- xorl %r11d,%r11d
+ xorl %ebp,%ebp
pxor %xmm1,%xmm1
pxor %xmm2,%xmm2
pxor %xmm3,%xmm3
@@ -1559,20 +1561,25 @@ aesni_ctr32_encrypt_blocks:
pxor %xmm14,%xmm14
movaps %xmm0,112(%rsp)
pxor %xmm15,%xmm15
- leaq (%rbp),%rsp
- popq %rbp
+ movq -8(%r11),%rbp
+.cfi_restore %rbp
+ leaq (%r11),%rsp
+.cfi_def_cfa_register %rsp
.Lctr32_epilogue:
.byte 0xf3,0xc3
+.cfi_endproc
.size aesni_ctr32_encrypt_blocks,.-aesni_ctr32_encrypt_blocks
.globl aesni_xts_encrypt
.type aesni_xts_encrypt,@function
.align 16
aesni_xts_encrypt:
- leaq (%rsp),%rax
+.cfi_startproc
+ leaq (%rsp),%r11
+.cfi_def_cfa_register %r11
pushq %rbp
+.cfi_offset %rbp,-16
subq $112,%rsp
andq $-16,%rsp
- leaq -8(%rax),%rbp
movups (%r9),%xmm2
movl 240(%r8),%eax
movl 240(%rcx),%r10d
@@ -1588,7 +1595,7 @@ aesni_xts_encrypt:
jnz .Loop_enc1_8
.byte 102,15,56,221,209
movups (%rcx),%xmm0
- movq %rcx,%r11
+ movq %rcx,%rbp
movl %r10d,%eax
shll $4,%r10d
movq %rdx,%r9
@@ -1644,9 +1651,9 @@ aesni_xts_encrypt:
jc .Lxts_enc_short
movl $16+96,%eax
- leaq 32(%r11,%r10,1),%rcx
+ leaq 32(%rbp,%r10,1),%rcx
subq %r10,%rax
- movups 16(%r11),%xmm1
+ movups 16(%rbp),%xmm1
movq %rax,%r10
leaq .Lxts_magic(%rip),%r8
jmp .Lxts_enc_grandloop
@@ -1671,7 +1678,7 @@ aesni_xts_encrypt:
movdqa 96(%rsp),%xmm9
pxor %xmm14,%xmm6
.byte 102,15,56,220,233
- movups 32(%r11),%xmm0
+ movups 32(%rbp),%xmm0
leaq 96(%rdi),%rdi
pxor %xmm8,%xmm7
@@ -1680,7 +1687,7 @@ aesni_xts_encrypt:
pxor %xmm9,%xmm11
movdqa %xmm10,0(%rsp)
.byte 102,15,56,220,249
- movups 48(%r11),%xmm1
+ movups 48(%rbp),%xmm1
pxor %xmm9,%xmm12
.byte 102,15,56,220,208
@@ -1695,7 +1702,7 @@ aesni_xts_encrypt:
movdqa %xmm14,64(%rsp)
.byte 102,15,56,220,240
.byte 102,15,56,220,248
- movups 64(%r11),%xmm0
+ movups 64(%rbp),%xmm0
movdqa %xmm8,80(%rsp)
pshufd $0x5f,%xmm15,%xmm9
jmp .Lxts_enc_loop6
@@ -1727,7 +1734,7 @@ aesni_xts_encrypt:
psrad $31,%xmm14
.byte 102,15,56,220,217
pand %xmm8,%xmm14
- movups (%r11),%xmm10
+ movups (%rbp),%xmm10
.byte 102,15,56,220,225
.byte 102,15,56,220,233
.byte 102,15,56,220,241
@@ -1795,10 +1802,10 @@ aesni_xts_encrypt:
.byte 102,15,56,220,225
.byte 102,15,56,220,233
pxor %xmm0,%xmm15
- movups (%r11),%xmm0
+ movups (%rbp),%xmm0
.byte 102,15,56,220,241
.byte 102,15,56,220,249
- movups 16(%r11),%xmm1
+ movups 16(%rbp),%xmm1
pxor %xmm15,%xmm14
.byte 102,15,56,221,84,36,0
@@ -1825,7 +1832,7 @@ aesni_xts_encrypt:
movl $16+96,%eax
subl %r10d,%eax
- movq %r11,%rcx
+ movq %rbp,%rcx
shrl $4,%eax
.Lxts_enc_short:
@@ -1981,7 +1988,7 @@ aesni_xts_encrypt:
jnz .Lxts_enc_steal
subq %r9,%rsi
- movq %r11,%rcx
+ movq %rbp,%rcx
movl %r10d,%eax
movups -16(%rsi),%xmm2
@@ -2024,20 +2031,25 @@ aesni_xts_encrypt:
movaps %xmm0,96(%rsp)
pxor %xmm14,%xmm14
pxor %xmm15,%xmm15
- leaq (%rbp),%rsp
- popq %rbp
+ movq -8(%r11),%rbp
+.cfi_restore %rbp
+ leaq (%r11),%rsp
+.cfi_def_cfa_register %rsp
.Lxts_enc_epilogue:
.byte 0xf3,0xc3
+.cfi_endproc
.size aesni_xts_encrypt,.-aesni_xts_encrypt
.globl aesni_xts_decrypt
.type aesni_xts_decrypt,@function
.align 16
aesni_xts_decrypt:
- leaq (%rsp),%rax
+.cfi_startproc
+ leaq (%rsp),%r11
+.cfi_def_cfa_register %r11
pushq %rbp
+.cfi_offset %rbp,-16
subq $112,%rsp
andq $-16,%rsp
- leaq -8(%rax),%rbp
movups (%r9),%xmm2
movl 240(%r8),%eax
movl 240(%rcx),%r10d
@@ -2059,7 +2071,7 @@ aesni_xts_decrypt:
subq %rax,%rdx
movups (%rcx),%xmm0
- movq %rcx,%r11
+ movq %rcx,%rbp
movl %r10d,%eax
shll $4,%r10d
movq %rdx,%r9
@@ -2115,9 +2127,9 @@ aesni_xts_decrypt:
jc .Lxts_dec_short
movl $16+96,%eax
- leaq 32(%r11,%r10,1),%rcx
+ leaq 32(%rbp,%r10,1),%rcx
subq %r10,%rax
- movups 16(%r11),%xmm1
+ movups 16(%rbp),%xmm1
movq %rax,%r10
leaq .Lxts_magic(%rip),%r8
jmp .Lxts_dec_grandloop
@@ -2142,7 +2154,7 @@ aesni_xts_decrypt:
movdqa 96(%rsp),%xmm9
pxor %xmm14,%xmm6
.byte 102,15,56,222,233
- movups 32(%r11),%xmm0
+ movups 32(%rbp),%xmm0
leaq 96(%rdi),%rdi
pxor %xmm8,%xmm7
@@ -2151,7 +2163,7 @@ aesni_xts_decrypt:
pxor %xmm9,%xmm11
movdqa %xmm10,0(%rsp)
.byte 102,15,56,222,249
- movups 48(%r11),%xmm1
+ movups 48(%rbp),%xmm1
pxor %xmm9,%xmm12
.byte 102,15,56,222,208
@@ -2166,7 +2178,7 @@ aesni_xts_decrypt:
movdqa %xmm14,64(%rsp)
.byte 102,15,56,222,240
.byte 102,15,56,222,248
- movups 64(%r11),%xmm0
+ movups 64(%rbp),%xmm0
movdqa %xmm8,80(%rsp)
pshufd $0x5f,%xmm15,%xmm9
jmp .Lxts_dec_loop6
@@ -2198,7 +2210,7 @@ aesni_xts_decrypt:
psrad $31,%xmm14
.byte 102,15,56,222,217
pand %xmm8,%xmm14
- movups (%r11),%xmm10
+ movups (%rbp),%xmm10
.byte 102,15,56,222,225
.byte 102,15,56,222,233
.byte 102,15,56,222,241
@@ -2266,10 +2278,10 @@ aesni_xts_decrypt:
.byte 102,15,56,222,225
.byte 102,15,56,222,233
pxor %xmm0,%xmm15
- movups (%r11),%xmm0
+ movups (%rbp),%xmm0
.byte 102,15,56,222,241
.byte 102,15,56,222,249
- movups 16(%r11),%xmm1
+ movups 16(%rbp),%xmm1
pxor %xmm15,%xmm14
.byte 102,15,56,223,84,36,0
@@ -2296,7 +2308,7 @@ aesni_xts_decrypt:
movl $16+96,%eax
subl %r10d,%eax
- movq %r11,%rcx
+ movq %rbp,%rcx
shrl $4,%eax
.Lxts_dec_short:
@@ -2453,7 +2465,7 @@ aesni_xts_decrypt:
jz .Lxts_dec_ret
.Lxts_dec_done2:
movq %r9,%rdx
- movq %r11,%rcx
+ movq %rbp,%rcx
movl %r10d,%eax
movups (%rdi),%xmm2
@@ -2483,7 +2495,7 @@ aesni_xts_decrypt:
jnz .Lxts_dec_steal
subq %r9,%rsi
- movq %r11,%rcx
+ movq %rbp,%rcx
movl %r10d,%eax
movups (%rsi),%xmm2
@@ -2526,21 +2538,35 @@ aesni_xts_decrypt:
movaps %xmm0,96(%rsp)
pxor %xmm14,%xmm14
pxor %xmm15,%xmm15
- leaq (%rbp),%rsp
- popq %rbp
+ movq -8(%r11),%rbp
+.cfi_restore %rbp
+ leaq (%r11),%rsp
+.cfi_def_cfa_register %rsp
.Lxts_dec_epilogue:
.byte 0xf3,0xc3
+.cfi_endproc
.size aesni_xts_decrypt,.-aesni_xts_decrypt
.globl aesni_ocb_encrypt
.type aesni_ocb_encrypt,@function
.align 32
aesni_ocb_encrypt:
+.cfi_startproc
leaq (%rsp),%rax
pushq %rbx
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbx,-16
pushq %rbp
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbp,-24
pushq %r12
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r12,-32
pushq %r13
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r13,-40
pushq %r14
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r14,-48
movq 8(%rax),%rbx
movq 8+8(%rax),%rbp
@@ -2716,13 +2742,23 @@ aesni_ocb_encrypt:
pxor %xmm13,%xmm13
pxor %xmm14,%xmm14
pxor %xmm15,%xmm15
- popq %r14
- popq %r13
- popq %r12
- popq %rbp
- popq %rbx
+ leaq 40(%rsp),%rax
+.cfi_def_cfa %rax,8
+ movq -40(%rax),%r14
+.cfi_restore %r14
+ movq -32(%rax),%r13
+.cfi_restore %r13
+ movq -24(%rax),%r12
+.cfi_restore %r12
+ movq -16(%rax),%rbp
+.cfi_restore %rbp
+ movq -8(%rax),%rbx
+.cfi_restore %rbx
+ leaq (%rax),%rsp
+.cfi_def_cfa_register %rsp
.Locb_enc_epilogue:
.byte 0xf3,0xc3
+.cfi_endproc
.size aesni_ocb_encrypt,.-aesni_ocb_encrypt
.type __ocb_encrypt6,@function
@@ -2935,12 +2971,23 @@ __ocb_encrypt1:
.type aesni_ocb_decrypt,@function
.align 32
aesni_ocb_decrypt:
+.cfi_startproc
leaq (%rsp),%rax
pushq %rbx
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbx,-16
pushq %rbp
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbp,-24
pushq %r12
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r12,-32
pushq %r13
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r13,-40
pushq %r14
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r14,-48
movq 8(%rax),%rbx
movq 8+8(%rax),%rbp
@@ -3138,13 +3185,23 @@ aesni_ocb_decrypt:
pxor %xmm13,%xmm13
pxor %xmm14,%xmm14
pxor %xmm15,%xmm15
- popq %r14
- popq %r13
- popq %r12
- popq %rbp
- popq %rbx
+ leaq 40(%rsp),%rax
+.cfi_def_cfa %rax,8
+ movq -40(%rax),%r14
+.cfi_restore %r14
+ movq -32(%rax),%r13
+.cfi_restore %r13
+ movq -24(%rax),%r12
+.cfi_restore %r12
+ movq -16(%rax),%rbp
+.cfi_restore %rbp
+ movq -8(%rax),%rbx
+.cfi_restore %rbx
+ leaq (%rax),%rsp
+.cfi_def_cfa_register %rsp
.Locb_dec_epilogue:
.byte 0xf3,0xc3
+.cfi_endproc
.size aesni_ocb_decrypt,.-aesni_ocb_decrypt
.type __ocb_decrypt6,@function
@@ -3345,6 +3402,7 @@ __ocb_decrypt1:
.type aesni_cbc_encrypt,@function
.align 16
aesni_cbc_encrypt:
+.cfi_startproc
testq %rdx,%rdx
jz .Lcbc_ret
@@ -3437,11 +3495,13 @@ aesni_cbc_encrypt:
jmp .Lcbc_ret
.align 16
.Lcbc_decrypt_bulk:
- leaq (%rsp),%rax
+ leaq (%rsp),%r11
+.cfi_def_cfa_register %r11
pushq %rbp
+.cfi_offset %rbp,-16
subq $16,%rsp
andq $-16,%rsp
- leaq -8(%rax),%rbp
+ movq %rcx,%rbp
movups (%r8),%xmm10
movl %r10d,%eax
cmpq $0x50,%rdx
@@ -3481,7 +3541,7 @@ aesni_cbc_encrypt:
pxor %xmm0,%xmm3
movups 16-112(%rcx),%xmm1
pxor %xmm0,%xmm4
- xorq %r11,%r11
+ movq $-1,%rbp
cmpq $0x70,%rdx
pxor %xmm0,%xmm5
pxor %xmm0,%xmm6
@@ -3497,10 +3557,10 @@ aesni_cbc_encrypt:
.byte 102,15,56,222,241
.byte 102,15,56,222,249
.byte 102,68,15,56,222,193
- setnc %r11b
- shlq $7,%r11
+ adcq $0,%rbp
+ andq $128,%rbp
.byte 102,68,15,56,222,201
- addq %rdi,%r11
+ addq %rdi,%rbp
movups 48-112(%rcx),%xmm1
.byte 102,15,56,222,208
.byte 102,15,56,222,216
@@ -3638,18 +3698,18 @@ aesni_cbc_encrypt:
movdqu 112(%rdi),%xmm0
.byte 102,65,15,56,223,228
leaq 128(%rdi),%rdi
- movdqu 0(%r11),%xmm11
+ movdqu 0(%rbp),%xmm11
.byte 102,65,15,56,223,237
.byte 102,65,15,56,223,246
- movdqu 16(%r11),%xmm12
- movdqu 32(%r11),%xmm13
+ movdqu 16(%rbp),%xmm12
+ movdqu 32(%rbp),%xmm13
.byte 102,65,15,56,223,255
.byte 102,68,15,56,223,193
- movdqu 48(%r11),%xmm14
- movdqu 64(%r11),%xmm15
+ movdqu 48(%rbp),%xmm14
+ movdqu 64(%rbp),%xmm15
.byte 102,69,15,56,223,202
movdqa %xmm0,%xmm10
- movdqu 80(%r11),%xmm1
+ movdqu 80(%rbp),%xmm1
movups -112(%rcx),%xmm0
movups %xmm2,(%rsi)
@@ -3768,7 +3828,7 @@ aesni_cbc_encrypt:
pxor %xmm13,%xmm5
movdqu %xmm4,32(%rsi)
pxor %xmm14,%xmm6
- movq %r11,%rcx
+ movq %rbp,%rcx
movdqu %xmm5,48(%rsi)
pxor %xmm15,%xmm7
movl %r10d,%eax
@@ -3921,16 +3981,21 @@ aesni_cbc_encrypt:
.Lcbc_dec_ret:
xorps %xmm0,%xmm0
pxor %xmm1,%xmm1
- leaq (%rbp),%rsp
- popq %rbp
+ movq -8(%r11),%rbp
+.cfi_restore %rbp
+ leaq (%r11),%rsp
+.cfi_def_cfa_register %rsp
.Lcbc_ret:
.byte 0xf3,0xc3
+.cfi_endproc
.size aesni_cbc_encrypt,.-aesni_cbc_encrypt
.globl aesni_set_decrypt_key
.type aesni_set_decrypt_key,@function
.align 16
aesni_set_decrypt_key:
+.cfi_startproc
.byte 0x48,0x83,0xEC,0x08
+.cfi_adjust_cfa_offset 8
call __aesni_set_encrypt_key
shll $4,%esi
testl %eax,%eax
@@ -3963,7 +4028,9 @@ aesni_set_decrypt_key:
pxor %xmm0,%xmm0
.Ldec_key_ret:
addq $8,%rsp
+.cfi_adjust_cfa_offset -8
.byte 0xf3,0xc3
+.cfi_endproc
.LSEH_end_set_decrypt_key:
.size aesni_set_decrypt_key,.-aesni_set_decrypt_key
.globl aesni_set_encrypt_key
@@ -3971,7 +4038,9 @@ aesni_set_decrypt_key:
.align 16
aesni_set_encrypt_key:
__aesni_set_encrypt_key:
+.cfi_startproc
.byte 0x48,0x83,0xEC,0x08
+.cfi_adjust_cfa_offset 8
movq $-1,%rax
testq %rdi,%rdi
jz .Lenc_key_ret
@@ -4264,7 +4333,9 @@ __aesni_set_encrypt_key:
pxor %xmm4,%xmm4
pxor %xmm5,%xmm5
addq $8,%rsp
+.cfi_adjust_cfa_offset -8
.byte 0xf3,0xc3
+.cfi_endproc
.LSEH_end_set_encrypt_key:
.align 16
diff --git a/deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/bsaes-x86_64.s b/deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/bsaes-x86_64.s
index c76c5a8afb..f7451dfe52 100644
--- a/deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/bsaes-x86_64.s
+++ b/deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/bsaes-x86_64.s
@@ -1067,6 +1067,7 @@ _bsaes_key_convert:
.type bsaes_cbc_encrypt,@function
.align 16
bsaes_cbc_encrypt:
+.cfi_startproc
cmpl $0,%r9d
jne asm_AES_cbc_encrypt
cmpq $128,%rdx
@@ -1075,13 +1076,27 @@ bsaes_cbc_encrypt:
movq %rsp,%rax
.Lcbc_dec_prologue:
pushq %rbp
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbp,-16
pushq %rbx
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbx,-24
pushq %r12
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r12,-32
pushq %r13
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r13,-40
pushq %r14
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r14,-48
pushq %r15
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r15,-56
leaq -72(%rsp),%rsp
+.cfi_adjust_cfa_offset 0x48
movq %rsp,%rbp
+.cfi_def_cfa_register %rbp
movl 240(%rcx),%eax
movq %rdi,%r12
movq %rsi,%r13
@@ -1300,33 +1315,56 @@ bsaes_cbc_encrypt:
cmpq %rax,%rbp
ja .Lcbc_dec_bzero
- leaq (%rbp),%rsp
- movq 72(%rsp),%r15
- movq 80(%rsp),%r14
- movq 88(%rsp),%r13
- movq 96(%rsp),%r12
- movq 104(%rsp),%rbx
- movq 112(%rsp),%rax
- leaq 120(%rsp),%rsp
- movq %rax,%rbp
+ leaq 120(%rbp),%rax
+.cfi_def_cfa %rax,8
+ movq -48(%rax),%r15
+.cfi_restore %r15
+ movq -40(%rax),%r14
+.cfi_restore %r14
+ movq -32(%rax),%r13
+.cfi_restore %r13
+ movq -24(%rax),%r12
+.cfi_restore %r12
+ movq -16(%rax),%rbx
+.cfi_restore %rbx
+ movq -8(%rax),%rbp
+.cfi_restore %rbp
+ leaq (%rax),%rsp
+.cfi_def_cfa_register %rsp
.Lcbc_dec_epilogue:
.byte 0xf3,0xc3
+.cfi_endproc
.size bsaes_cbc_encrypt,.-bsaes_cbc_encrypt
.globl bsaes_ctr32_encrypt_blocks
.type bsaes_ctr32_encrypt_blocks,@function
.align 16
bsaes_ctr32_encrypt_blocks:
+.cfi_startproc
movq %rsp,%rax
.Lctr_enc_prologue:
pushq %rbp
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbp,-16
pushq %rbx
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbx,-24
pushq %r12
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r12,-32
pushq %r13
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r13,-40
pushq %r14
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r14,-48
pushq %r15
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r15,-56
leaq -72(%rsp),%rsp
+.cfi_adjust_cfa_offset 0x48
movq %rsp,%rbp
+.cfi_def_cfa_register %rbp
movdqu (%r8),%xmm0
movl 240(%rcx),%eax
movq %rdi,%r12
@@ -1500,32 +1538,55 @@ bsaes_ctr32_encrypt_blocks:
cmpq %rax,%rbp
ja .Lctr_enc_bzero
- leaq (%rbp),%rsp
- movq 72(%rsp),%r15
- movq 80(%rsp),%r14
- movq 88(%rsp),%r13
- movq 96(%rsp),%r12
- movq 104(%rsp),%rbx
- movq 112(%rsp),%rax
- leaq 120(%rsp),%rsp
- movq %rax,%rbp
+ leaq 120(%rbp),%rax
+.cfi_def_cfa %rax,8
+ movq -48(%rax),%r15
+.cfi_restore %r15
+ movq -40(%rax),%r14
+.cfi_restore %r14
+ movq -32(%rax),%r13
+.cfi_restore %r13
+ movq -24(%rax),%r12
+.cfi_restore %r12
+ movq -16(%rax),%rbx
+.cfi_restore %rbx
+ movq -8(%rax),%rbp
+.cfi_restore %rbp
+ leaq (%rax),%rsp
+.cfi_def_cfa_register %rsp
.Lctr_enc_epilogue:
.byte 0xf3,0xc3
+.cfi_endproc
.size bsaes_ctr32_encrypt_blocks,.-bsaes_ctr32_encrypt_blocks
.globl bsaes_xts_encrypt
.type bsaes_xts_encrypt,@function
.align 16
bsaes_xts_encrypt:
+.cfi_startproc
movq %rsp,%rax
.Lxts_enc_prologue:
pushq %rbp
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbp,-16
pushq %rbx
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbx,-24
pushq %r12
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r12,-32
pushq %r13
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r13,-40
pushq %r14
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r14,-48
pushq %r15
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r15,-56
leaq -72(%rsp),%rsp
+.cfi_adjust_cfa_offset 0x48
movq %rsp,%rbp
+.cfi_def_cfa_register %rbp
movq %rdi,%r12
movq %rsi,%r13
movq %rdx,%r14
@@ -1951,32 +2012,54 @@ bsaes_xts_encrypt:
cmpq %rax,%rbp
ja .Lxts_enc_bzero
- leaq (%rbp),%rsp
- movq 72(%rsp),%r15
- movq 80(%rsp),%r14
- movq 88(%rsp),%r13
- movq 96(%rsp),%r12
- movq 104(%rsp),%rbx
- movq 112(%rsp),%rax
- leaq 120(%rsp),%rsp
- movq %rax,%rbp
+ leaq 120(%rbp),%rax
+.cfi_def_cfa %rax,8
+ movq -48(%rax),%r15
+.cfi_restore %r15
+ movq -40(%rax),%r14
+.cfi_restore %r14
+ movq -32(%rax),%r13
+.cfi_restore %r13
+ movq -24(%rax),%r12
+.cfi_restore %r12
+ movq -16(%rax),%rbx
+.cfi_restore %rbx
+ movq -8(%rax),%rbp
+.cfi_restore %rbp
+ leaq (%rax),%rsp
+.cfi_def_cfa_register %rsp
.Lxts_enc_epilogue:
.byte 0xf3,0xc3
+.cfi_endproc
.size bsaes_xts_encrypt,.-bsaes_xts_encrypt
.globl bsaes_xts_decrypt
.type bsaes_xts_decrypt,@function
.align 16
bsaes_xts_decrypt:
+.cfi_startproc
movq %rsp,%rax
.Lxts_dec_prologue:
pushq %rbp
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbp,-16
pushq %rbx
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbx,-24
pushq %r12
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r12,-32
pushq %r13
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r13,-40
pushq %r14
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r14,-48
pushq %r15
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r15,-56
leaq -72(%rsp),%rsp
+.cfi_adjust_cfa_offset 0x48
movq %rsp,%rbp
movq %rdi,%r12
movq %rsi,%r13
@@ -2429,17 +2512,25 @@ bsaes_xts_decrypt:
cmpq %rax,%rbp
ja .Lxts_dec_bzero
- leaq (%rbp),%rsp
- movq 72(%rsp),%r15
- movq 80(%rsp),%r14
- movq 88(%rsp),%r13
- movq 96(%rsp),%r12
- movq 104(%rsp),%rbx
- movq 112(%rsp),%rax
- leaq 120(%rsp),%rsp
- movq %rax,%rbp
+ leaq 120(%rbp),%rax
+.cfi_def_cfa %rax,8
+ movq -48(%rax),%r15
+.cfi_restore %r15
+ movq -40(%rax),%r14
+.cfi_restore %r14
+ movq -32(%rax),%r13
+.cfi_restore %r13
+ movq -24(%rax),%r12
+.cfi_restore %r12
+ movq -16(%rax),%rbx
+.cfi_restore %rbx
+ movq -8(%rax),%rbp
+.cfi_restore %rbp
+ leaq (%rax),%rsp
+.cfi_def_cfa_register %rsp
.Lxts_dec_epilogue:
.byte 0xf3,0xc3
+.cfi_endproc
.size bsaes_xts_decrypt,.-bsaes_xts_decrypt
.type _bsaes_const,@object
.align 64