summaryrefslogtreecommitdiff
path: root/deps/openssl/config/archs/linux-x86_64/asm/crypto/bn/rsaz-x86_64.s
diff options
context:
space:
mode:
Diffstat (limited to 'deps/openssl/config/archs/linux-x86_64/asm/crypto/bn/rsaz-x86_64.s')
-rw-r--r--deps/openssl/config/archs/linux-x86_64/asm/crypto/bn/rsaz-x86_64.s115
1 files changed, 115 insertions, 0 deletions
diff --git a/deps/openssl/config/archs/linux-x86_64/asm/crypto/bn/rsaz-x86_64.s b/deps/openssl/config/archs/linux-x86_64/asm/crypto/bn/rsaz-x86_64.s
index 795cebe1d7..f8e4a80588 100644
--- a/deps/openssl/config/archs/linux-x86_64/asm/crypto/bn/rsaz-x86_64.s
+++ b/deps/openssl/config/archs/linux-x86_64/asm/crypto/bn/rsaz-x86_64.s
@@ -6,14 +6,28 @@
.type rsaz_512_sqr,@function
.align 32
rsaz_512_sqr:
+.cfi_startproc
pushq %rbx
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbx,-16
pushq %rbp
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbp,-24
pushq %r12
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r12,-32
pushq %r13
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r13,-40
pushq %r14
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r14,-48
pushq %r15
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r15,-56
subq $128+24,%rsp
+.cfi_adjust_cfa_offset 128+24
.Lsqr_body:
movq %rdx,%rbp
movq (%rsi),%rdx
@@ -658,28 +672,51 @@ rsaz_512_sqr:
.Lsqr_tail:
leaq 128+24+48(%rsp),%rax
+.cfi_def_cfa %rax,8
movq -48(%rax),%r15
+.cfi_restore %r15
movq -40(%rax),%r14
+.cfi_restore %r14
movq -32(%rax),%r13
+.cfi_restore %r13
movq -24(%rax),%r12
+.cfi_restore %r12
movq -16(%rax),%rbp
+.cfi_restore %rbp
movq -8(%rax),%rbx
+.cfi_restore %rbx
leaq (%rax),%rsp
+.cfi_def_cfa_register %rsp
.Lsqr_epilogue:
.byte 0xf3,0xc3
+.cfi_endproc
.size rsaz_512_sqr,.-rsaz_512_sqr
.globl rsaz_512_mul
.type rsaz_512_mul,@function
.align 32
rsaz_512_mul:
+.cfi_startproc
pushq %rbx
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbx,-16
pushq %rbp
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbp,-24
pushq %r12
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r12,-32
pushq %r13
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r13,-40
pushq %r14
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r14,-48
pushq %r15
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r15,-56
subq $128+24,%rsp
+.cfi_adjust_cfa_offset 128+24
.Lmul_body:
.byte 102,72,15,110,199
.byte 102,72,15,110,201
@@ -741,28 +778,51 @@ rsaz_512_mul:
call __rsaz_512_subtract
leaq 128+24+48(%rsp),%rax
+.cfi_def_cfa %rax,8
movq -48(%rax),%r15
+.cfi_restore %r15
movq -40(%rax),%r14
+.cfi_restore %r14
movq -32(%rax),%r13
+.cfi_restore %r13
movq -24(%rax),%r12
+.cfi_restore %r12
movq -16(%rax),%rbp
+.cfi_restore %rbp
movq -8(%rax),%rbx
+.cfi_restore %rbx
leaq (%rax),%rsp
+.cfi_def_cfa_register %rsp
.Lmul_epilogue:
.byte 0xf3,0xc3
+.cfi_endproc
.size rsaz_512_mul,.-rsaz_512_mul
.globl rsaz_512_mul_gather4
.type rsaz_512_mul_gather4,@function
.align 32
rsaz_512_mul_gather4:
+.cfi_startproc
pushq %rbx
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbx,-16
pushq %rbp
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbp,-24
pushq %r12
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r12,-32
pushq %r13
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r13,-40
pushq %r14
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r14,-48
pushq %r15
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r15,-56
subq $152,%rsp
+.cfi_adjust_cfa_offset 152
.Lmul_gather4_body:
movd %r9d,%xmm8
movdqa .Linc+16(%rip),%xmm1
@@ -1151,29 +1211,52 @@ rsaz_512_mul_gather4:
call __rsaz_512_subtract
leaq 128+24+48(%rsp),%rax
+.cfi_def_cfa %rax,8
movq -48(%rax),%r15
+.cfi_restore %r15
movq -40(%rax),%r14
+.cfi_restore %r14
movq -32(%rax),%r13
+.cfi_restore %r13
movq -24(%rax),%r12
+.cfi_restore %r12
movq -16(%rax),%rbp
+.cfi_restore %rbp
movq -8(%rax),%rbx
+.cfi_restore %rbx
leaq (%rax),%rsp
+.cfi_def_cfa_register %rsp
.Lmul_gather4_epilogue:
.byte 0xf3,0xc3
+.cfi_endproc
.size rsaz_512_mul_gather4,.-rsaz_512_mul_gather4
.globl rsaz_512_mul_scatter4
.type rsaz_512_mul_scatter4,@function
.align 32
rsaz_512_mul_scatter4:
+.cfi_startproc
pushq %rbx
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbx,-16
pushq %rbp
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbp,-24
pushq %r12
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r12,-32
pushq %r13
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r13,-40
pushq %r14
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r14,-48
pushq %r15
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r15,-56
movl %r9d,%r9d
subq $128+24,%rsp
+.cfi_adjust_cfa_offset 128+24
.Lmul_scatter4_body:
leaq (%r8,%r9,8),%r8
.byte 102,72,15,110,199
@@ -1248,28 +1331,51 @@ rsaz_512_mul_scatter4:
movq %r15,896(%rsi)
leaq 128+24+48(%rsp),%rax
+.cfi_def_cfa %rax,8
movq -48(%rax),%r15
+.cfi_restore %r15
movq -40(%rax),%r14
+.cfi_restore %r14
movq -32(%rax),%r13
+.cfi_restore %r13
movq -24(%rax),%r12
+.cfi_restore %r12
movq -16(%rax),%rbp
+.cfi_restore %rbp
movq -8(%rax),%rbx
+.cfi_restore %rbx
leaq (%rax),%rsp
+.cfi_def_cfa_register %rsp
.Lmul_scatter4_epilogue:
.byte 0xf3,0xc3
+.cfi_endproc
.size rsaz_512_mul_scatter4,.-rsaz_512_mul_scatter4
.globl rsaz_512_mul_by_one
.type rsaz_512_mul_by_one,@function
.align 32
rsaz_512_mul_by_one:
+.cfi_startproc
pushq %rbx
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbx,-16
pushq %rbp
+.cfi_adjust_cfa_offset 8
+.cfi_offset %rbp,-24
pushq %r12
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r12,-32
pushq %r13
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r13,-40
pushq %r14
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r14,-48
pushq %r15
+.cfi_adjust_cfa_offset 8
+.cfi_offset %r15,-56
subq $128+24,%rsp
+.cfi_adjust_cfa_offset 128+24
.Lmul_by_one_body:
movl OPENSSL_ia32cap_P+8(%rip),%eax
movq %rdx,%rbp
@@ -1312,15 +1418,24 @@ rsaz_512_mul_by_one:
movq %r15,56(%rdi)
leaq 128+24+48(%rsp),%rax
+.cfi_def_cfa %rax,8
movq -48(%rax),%r15
+.cfi_restore %r15
movq -40(%rax),%r14
+.cfi_restore %r14
movq -32(%rax),%r13
+.cfi_restore %r13
movq -24(%rax),%r12
+.cfi_restore %r12
movq -16(%rax),%rbp
+.cfi_restore %rbp
movq -8(%rax),%rbx
+.cfi_restore %rbx
leaq (%rax),%rsp
+.cfi_def_cfa_register %rsp
.Lmul_by_one_epilogue:
.byte 0xf3,0xc3
+.cfi_endproc
.size rsaz_512_mul_by_one,.-rsaz_512_mul_by_one
.type __rsaz_512_reduce,@function
.align 32