summaryrefslogtreecommitdiff
path: root/deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/bsaes-x86_64.s
diff options
context:
space:
mode:
Diffstat (limited to 'deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/bsaes-x86_64.s')
-rw-r--r--deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/bsaes-x86_64.s163
1 files changed, 127 insertions, 36 deletions
diff --git a/deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/bsaes-x86_64.s b/deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/bsaes-x86_64.s
index c76c5a8afb..f7451dfe52 100644
--- a/deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/bsaes-x86_64.s
+++ b/deps/openssl/config/archs/BSD-x86_64/asm/crypto/aes/bsaes-x86_64.s
@@ -1067,6 +1067,7 @@ _bsaes_key_convert:
.type bsaes_cbc_encrypt,@function
.align 16
bsaes_cbc_encrypt:
+.cfi_startproc
cmpl $0,%r9d
jne asm_AES_cbc_encrypt
cmpq $128,%rdx
@@ -1075,13 +1076,27 @@ bsaes_cbc_encrypt:
movq %rsp,%rax
.Lcbc_dec_prologue:
pushq %rbp
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbp,-16
pushq %rbx
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbx,-24
pushq %r12
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r12,-32
pushq %r13
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r13,-40
pushq %r14
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r14,-48
pushq %r15
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r15,-56
leaq -72(%rsp),%rsp
+.cfi_adjust_cfa_offset 0x48
movq %rsp,%rbp
+.cfi_def_cfa_register %rbp
movl 240(%rcx),%eax
movq %rdi,%r12
movq %rsi,%r13
@@ -1300,33 +1315,56 @@ bsaes_cbc_encrypt:
cmpq %rax,%rbp
ja .Lcbc_dec_bzero
- leaq (%rbp),%rsp
- movq 72(%rsp),%r15
- movq 80(%rsp),%r14
- movq 88(%rsp),%r13
- movq 96(%rsp),%r12
- movq 104(%rsp),%rbx
- movq 112(%rsp),%rax
- leaq 120(%rsp),%rsp
- movq %rax,%rbp
+ leaq 120(%rbp),%rax
+.cfi_def_cfa %rax,8
+ movq -48(%rax),%r15
+.cfi_restore %r15
+ movq -40(%rax),%r14
+.cfi_restore %r14
+ movq -32(%rax),%r13
+.cfi_restore %r13
+ movq -24(%rax),%r12
+.cfi_restore %r12
+ movq -16(%rax),%rbx
+.cfi_restore %rbx
+ movq -8(%rax),%rbp
+.cfi_restore %rbp
+ leaq (%rax),%rsp
+.cfi_def_cfa_register %rsp
.Lcbc_dec_epilogue:
.byte 0xf3,0xc3
+.cfi_endproc
.size bsaes_cbc_encrypt,.-bsaes_cbc_encrypt
.globl bsaes_ctr32_encrypt_blocks
.type bsaes_ctr32_encrypt_blocks,@function
.align 16
bsaes_ctr32_encrypt_blocks:
+.cfi_startproc
movq %rsp,%rax
.Lctr_enc_prologue:
pushq %rbp
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbp,-16
pushq %rbx
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbx,-24
pushq %r12
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r12,-32
pushq %r13
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r13,-40
pushq %r14
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r14,-48
pushq %r15
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r15,-56
leaq -72(%rsp),%rsp
+.cfi_adjust_cfa_offset 0x48
movq %rsp,%rbp
+.cfi_def_cfa_register %rbp
movdqu (%r8),%xmm0
movl 240(%rcx),%eax
movq %rdi,%r12
@@ -1500,32 +1538,55 @@ bsaes_ctr32_encrypt_blocks:
cmpq %rax,%rbp
ja .Lctr_enc_bzero
- leaq (%rbp),%rsp
- movq 72(%rsp),%r15
- movq 80(%rsp),%r14
- movq 88(%rsp),%r13
- movq 96(%rsp),%r12
- movq 104(%rsp),%rbx
- movq 112(%rsp),%rax
- leaq 120(%rsp),%rsp
- movq %rax,%rbp
+ leaq 120(%rbp),%rax
+.cfi_def_cfa %rax,8
+ movq -48(%rax),%r15
+.cfi_restore %r15
+ movq -40(%rax),%r14
+.cfi_restore %r14
+ movq -32(%rax),%r13
+.cfi_restore %r13
+ movq -24(%rax),%r12
+.cfi_restore %r12
+ movq -16(%rax),%rbx
+.cfi_restore %rbx
+ movq -8(%rax),%rbp
+.cfi_restore %rbp
+ leaq (%rax),%rsp
+.cfi_def_cfa_register %rsp
.Lctr_enc_epilogue:
.byte 0xf3,0xc3
+.cfi_endproc
.size bsaes_ctr32_encrypt_blocks,.-bsaes_ctr32_encrypt_blocks
.globl bsaes_xts_encrypt
.type bsaes_xts_encrypt,@function
.align 16
bsaes_xts_encrypt:
+.cfi_startproc
movq %rsp,%rax
.Lxts_enc_prologue:
pushq %rbp
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbp,-16
pushq %rbx
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbx,-24
pushq %r12
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r12,-32
pushq %r13
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r13,-40
pushq %r14
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r14,-48
pushq %r15
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r15,-56
leaq -72(%rsp),%rsp
+.cfi_adjust_cfa_offset 0x48
movq %rsp,%rbp
+.cfi_def_cfa_register %rbp
movq %rdi,%r12
movq %rsi,%r13
movq %rdx,%r14
@@ -1951,32 +2012,54 @@ bsaes_xts_encrypt:
cmpq %rax,%rbp
ja .Lxts_enc_bzero
- leaq (%rbp),%rsp
- movq 72(%rsp),%r15
- movq 80(%rsp),%r14
- movq 88(%rsp),%r13
- movq 96(%rsp),%r12
- movq 104(%rsp),%rbx
- movq 112(%rsp),%rax
- leaq 120(%rsp),%rsp
- movq %rax,%rbp
+ leaq 120(%rbp),%rax
+.cfi_def_cfa %rax,8
+ movq -48(%rax),%r15
+.cfi_restore %r15
+ movq -40(%rax),%r14
+.cfi_restore %r14
+ movq -32(%rax),%r13
+.cfi_restore %r13
+ movq -24(%rax),%r12
+.cfi_restore %r12
+ movq -16(%rax),%rbx
+.cfi_restore %rbx
+ movq -8(%rax),%rbp
+.cfi_restore %rbp
+ leaq (%rax),%rsp
+.cfi_def_cfa_register %rsp
.Lxts_enc_epilogue:
.byte 0xf3,0xc3
+.cfi_endproc
.size bsaes_xts_encrypt,.-bsaes_xts_encrypt
.globl bsaes_xts_decrypt
.type bsaes_xts_decrypt,@function
.align 16
bsaes_xts_decrypt:
+.cfi_startproc
movq %rsp,%rax
.Lxts_dec_prologue:
pushq %rbp
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbp,-16
pushq %rbx
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbx,-24
pushq %r12
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r12,-32
pushq %r13
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r13,-40
pushq %r14
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r14,-48
pushq %r15
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r15,-56
leaq -72(%rsp),%rsp
+.cfi_adjust_cfa_offset 0x48
movq %rsp,%rbp
movq %rdi,%r12
movq %rsi,%r13
@@ -2429,17 +2512,25 @@ bsaes_xts_decrypt:
cmpq %rax,%rbp
ja .Lxts_dec_bzero
- leaq (%rbp),%rsp
- movq 72(%rsp),%r15
- movq 80(%rsp),%r14
- movq 88(%rsp),%r13
- movq 96(%rsp),%r12
- movq 104(%rsp),%rbx
- movq 112(%rsp),%rax
- leaq 120(%rsp),%rsp
- movq %rax,%rbp
+ leaq 120(%rbp),%rax
+.cfi_def_cfa %rax,8
+ movq -48(%rax),%r15
+.cfi_restore %r15
+ movq -40(%rax),%r14
+.cfi_restore %r14
+ movq -32(%rax),%r13
+.cfi_restore %r13
+ movq -24(%rax),%r12
+.cfi_restore %r12
+ movq -16(%rax),%rbx
+.cfi_restore %rbx
+ movq -8(%rax),%rbp
+.cfi_restore %rbp
+ leaq (%rax),%rsp
+.cfi_def_cfa_register %rsp
.Lxts_dec_epilogue:
.byte 0xf3,0xc3
+.cfi_endproc
.size bsaes_xts_decrypt,.-bsaes_xts_decrypt
.type _bsaes_const,@object
.align 64