summaryrefslogtreecommitdiff
path: root/deps/openssl/config/archs/BSD-x86_64/asm/crypto/bn/x86_64-mont.s
diff options
context:
space:
mode:
Diffstat (limited to 'deps/openssl/config/archs/BSD-x86_64/asm/crypto/bn/x86_64-mont.s')
-rw-r--r--deps/openssl/config/archs/BSD-x86_64/asm/crypto/bn/x86_64-mont.s72
1 files changed, 72 insertions, 0 deletions
diff --git a/deps/openssl/config/archs/BSD-x86_64/asm/crypto/bn/x86_64-mont.s b/deps/openssl/config/archs/BSD-x86_64/asm/crypto/bn/x86_64-mont.s
index 3a78cd8440..414be6aff5 100644
--- a/deps/openssl/config/archs/BSD-x86_64/asm/crypto/bn/x86_64-mont.s
+++ b/deps/openssl/config/archs/BSD-x86_64/asm/crypto/bn/x86_64-mont.s
@@ -6,8 +6,10 @@
.type bn_mul_mont,@function
.align 16
bn_mul_mont:
+.cfi_startproc
movl %r9d,%r9d
movq %rsp,%rax
+.cfi_def_cfa_register %rax
testl $3,%r9d
jnz .Lmul_enter
cmpl $8,%r9d
@@ -22,11 +24,17 @@ bn_mul_mont:
.align 16
.Lmul_enter:
pushq %rbx
+.cfi_offset %rbx,-16
pushq %rbp
+.cfi_offset %rbp,-24
pushq %r12
+.cfi_offset %r12,-32
pushq %r13
+.cfi_offset %r13,-40
pushq %r14
+.cfi_offset %r14,-48
pushq %r15
+.cfi_offset %r15,-56
negq %r9
movq %rsp,%r11
@@ -59,6 +67,7 @@ bn_mul_mont:
.Lmul_page_walk_done:
movq %rax,8(%rsp,%r9,8)
+.cfi_escape 0x0f,0x0a,0x77,0x08,0x79,0x00,0x38,0x1e,0x22,0x06,0x23,0x08
.Lmul_body:
movq %rdx,%r12
movq (%r8),%r8
@@ -226,32 +235,49 @@ bn_mul_mont:
jnz .Lcopy
movq 8(%rsp,%r9,8),%rsi
+.cfi_def_cfa %rsi,8
movq $1,%rax
movq -48(%rsi),%r15
+.cfi_restore %r15
movq -40(%rsi),%r14
+.cfi_restore %r14
movq -32(%rsi),%r13
+.cfi_restore %r13
movq -24(%rsi),%r12
+.cfi_restore %r12
movq -16(%rsi),%rbp
+.cfi_restore %rbp
movq -8(%rsi),%rbx
+.cfi_restore %rbx
leaq (%rsi),%rsp
+.cfi_def_cfa_register %rsp
.Lmul_epilogue:
.byte 0xf3,0xc3
+.cfi_endproc
.size bn_mul_mont,.-bn_mul_mont
.type bn_mul4x_mont,@function
.align 16
bn_mul4x_mont:
+.cfi_startproc
movl %r9d,%r9d
movq %rsp,%rax
+.cfi_def_cfa_register %rax
.Lmul4x_enter:
andl $0x80100,%r11d
cmpl $0x80100,%r11d
je .Lmulx4x_enter
pushq %rbx
+.cfi_offset %rbx,-16
pushq %rbp
+.cfi_offset %rbp,-24
pushq %r12
+.cfi_offset %r12,-32
pushq %r13
+.cfi_offset %r13,-40
pushq %r14
+.cfi_offset %r14,-48
pushq %r15
+.cfi_offset %r15,-56
negq %r9
movq %rsp,%r11
@@ -275,6 +301,7 @@ bn_mul4x_mont:
.Lmul4x_page_walk_done:
movq %rax,8(%rsp,%r9,8)
+.cfi_escape 0x0f,0x0a,0x77,0x08,0x79,0x00,0x38,0x1e,0x22,0x06,0x23,0x08
.Lmul4x_body:
movq %rdi,16(%rsp,%r9,8)
movq %rdx,%r12
@@ -642,16 +669,25 @@ bn_mul4x_mont:
decq %r15
jnz .Lcopy4x
movq 8(%rsp,%r9,8),%rsi
+.cfi_def_cfa %rsi, 8
movq $1,%rax
movq -48(%rsi),%r15
+.cfi_restore %r15
movq -40(%rsi),%r14
+.cfi_restore %r14
movq -32(%rsi),%r13
+.cfi_restore %r13
movq -24(%rsi),%r12
+.cfi_restore %r12
movq -16(%rsi),%rbp
+.cfi_restore %rbp
movq -8(%rsi),%rbx
+.cfi_restore %rbx
leaq (%rsi),%rsp
+.cfi_def_cfa_register %rsp
.Lmul4x_epilogue:
.byte 0xf3,0xc3
+.cfi_endproc
.size bn_mul4x_mont,.-bn_mul4x_mont
@@ -659,14 +695,22 @@ bn_mul4x_mont:
.type bn_sqr8x_mont,@function
.align 32
bn_sqr8x_mont:
+.cfi_startproc
movq %rsp,%rax
+.cfi_def_cfa_register %rax
.Lsqr8x_enter:
pushq %rbx
+.cfi_offset %rbx,-16
pushq %rbp
+.cfi_offset %rbp,-24
pushq %r12
+.cfi_offset %r12,-32
pushq %r13
+.cfi_offset %r13,-40
pushq %r14
+.cfi_offset %r14,-48
pushq %r15
+.cfi_offset %r15,-56
.Lsqr8x_prologue:
movl %r9d,%r10d
@@ -722,6 +766,7 @@ bn_sqr8x_mont:
movq %r8,32(%rsp)
movq %rax,40(%rsp)
+.cfi_escape 0x0f,0x05,0x77,0x28,0x06,0x23,0x08
.Lsqr8x_body:
.byte 102,72,15,110,209
@@ -787,6 +832,7 @@ bn_sqr8x_mont:
pxor %xmm0,%xmm0
pshufd $0,%xmm1,%xmm1
movq 40(%rsp),%rsi
+.cfi_def_cfa %rsi,8
jmp .Lsqr8x_cond_copy
.align 32
@@ -816,26 +862,42 @@ bn_sqr8x_mont:
movq $1,%rax
movq -48(%rsi),%r15
+.cfi_restore %r15
movq -40(%rsi),%r14
+.cfi_restore %r14
movq -32(%rsi),%r13
+.cfi_restore %r13
movq -24(%rsi),%r12
+.cfi_restore %r12
movq -16(%rsi),%rbp
+.cfi_restore %rbp
movq -8(%rsi),%rbx
+.cfi_restore %rbx
leaq (%rsi),%rsp
+.cfi_def_cfa_register %rsp
.Lsqr8x_epilogue:
.byte 0xf3,0xc3
+.cfi_endproc
.size bn_sqr8x_mont,.-bn_sqr8x_mont
.type bn_mulx4x_mont,@function
.align 32
bn_mulx4x_mont:
+.cfi_startproc
movq %rsp,%rax
+.cfi_def_cfa_register %rax
.Lmulx4x_enter:
pushq %rbx
+.cfi_offset %rbx,-16
pushq %rbp
+.cfi_offset %rbp,-24
pushq %r12
+.cfi_offset %r12,-32
pushq %r13
+.cfi_offset %r13,-40
pushq %r14
+.cfi_offset %r14,-48
pushq %r15
+.cfi_offset %r15,-56
.Lmulx4x_prologue:
shll $3,%r9d
@@ -881,6 +943,7 @@ bn_mulx4x_mont:
movq %r8,24(%rsp)
movq %rdi,32(%rsp)
movq %rax,40(%rsp)
+.cfi_escape 0x0f,0x05,0x77,0x28,0x06,0x23,0x08
movq %r9,48(%rsp)
jmp .Lmulx4x_body
@@ -1125,6 +1188,7 @@ bn_mulx4x_mont:
pxor %xmm0,%xmm0
pshufd $0,%xmm1,%xmm1
movq 40(%rsp),%rsi
+.cfi_def_cfa %rsi,8
jmp .Lmulx4x_cond_copy
.align 32
@@ -1154,14 +1218,22 @@ bn_mulx4x_mont:
movq $1,%rax
movq -48(%rsi),%r15
+.cfi_restore %r15
movq -40(%rsi),%r14
+.cfi_restore %r14
movq -32(%rsi),%r13
+.cfi_restore %r13
movq -24(%rsi),%r12
+.cfi_restore %r12
movq -16(%rsi),%rbp
+.cfi_restore %rbp
movq -8(%rsi),%rbx
+.cfi_restore %rbx
leaq (%rsi),%rsp
+.cfi_def_cfa_register %rsp
.Lmulx4x_epilogue:
.byte 0xf3,0xc3
+.cfi_endproc
.size bn_mulx4x_mont,.-bn_mulx4x_mont
.byte 77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105,112,108,105,99,97,116,105,111,110,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
.align 16