summaryrefslogtreecommitdiff
path: root/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto
diff options
context:
space:
mode:
Diffstat (limited to 'deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto')
-rw-r--r--deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/aes/aes-x86_64.s16
-rw-r--r--deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/aes/aesni-x86_64.s26
-rw-r--r--deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/aes/bsaes-x86_64.s6
-rw-r--r--deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/aes/vpaes-x86_64.s26
-rw-r--r--deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/bn/rsaz-avx2.s6
-rw-r--r--deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/bn/x86_64-mont5.s2
-rw-r--r--deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/buildinf.h2
-rw-r--r--deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/ec/ecp_nistz256-x86_64.s36
-rw-r--r--deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/modes/ghash-x86_64.s12
9 files changed, 129 insertions, 3 deletions
diff --git a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/aes/aes-x86_64.s b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/aes/aes-x86_64.s
index 4bc117304f..88d1114a5c 100644
--- a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/aes/aes-x86_64.s
+++ b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/aes/aes-x86_64.s
@@ -155,6 +155,7 @@ _x86_64_AES_encrypt:
.type _x86_64_AES_encrypt_compact,@function
.align 16
_x86_64_AES_encrypt_compact:
+.cfi_startproc
leaq 128(%r14),%r8
movl 0-128(%r8),%edi
movl 32-128(%r8),%ebp
@@ -324,6 +325,7 @@ _x86_64_AES_encrypt_compact:
xorl 8(%r15),%ecx
xorl 12(%r15),%edx
.byte 0xf3,0xc3
+.cfi_endproc
.size _x86_64_AES_encrypt_compact,.-_x86_64_AES_encrypt_compact
.globl AES_encrypt
.type AES_encrypt,@function
@@ -568,6 +570,7 @@ _x86_64_AES_decrypt:
.type _x86_64_AES_decrypt_compact,@function
.align 16
_x86_64_AES_decrypt_compact:
+.cfi_startproc
leaq 128(%r14),%r8
movl 0-128(%r8),%edi
movl 32-128(%r8),%ebp
@@ -789,6 +792,7 @@ _x86_64_AES_decrypt_compact:
xorl 8(%r15),%ecx
xorl 12(%r15),%edx
.byte 0xf3,0xc3
+.cfi_endproc
.size _x86_64_AES_decrypt_compact,.-_x86_64_AES_decrypt_compact
.globl AES_decrypt
.type AES_decrypt,@function
@@ -920,6 +924,7 @@ AES_set_encrypt_key:
.type _x86_64_AES_set_encrypt_key,@function
.align 16
_x86_64_AES_set_encrypt_key:
+.cfi_startproc
movl %esi,%ecx
movq %rdi,%rsi
movq %rdx,%rdi
@@ -1155,6 +1160,7 @@ _x86_64_AES_set_encrypt_key:
movq $-1,%rax
.Lexit:
.byte 0xf3,0xc3
+.cfi_endproc
.size _x86_64_AES_set_encrypt_key,.-_x86_64_AES_set_encrypt_key
.globl AES_set_decrypt_key
.type AES_set_decrypt_key,@function
@@ -1377,8 +1383,9 @@ AES_cbc_encrypt:
cmpq $0,%rdx
je .Lcbc_epilogue
pushfq
+
+
.cfi_adjust_cfa_offset 8
-.cfi_offset 49,-16
pushq %rbx
.cfi_adjust_cfa_offset 8
.cfi_offset %rbx,-24
@@ -1407,6 +1414,7 @@ AES_cbc_encrypt:
cmpq $0,%r9
cmoveq %r10,%r14
+.cfi_remember_state
movl OPENSSL_ia32cap_P(%rip),%r10d
cmpq $512,%rdx
jb .Lcbc_slow_prologue
@@ -1642,6 +1650,7 @@ AES_cbc_encrypt:
.align 16
.Lcbc_slow_prologue:
+.cfi_restore_state
leaq -88(%rsp),%rbp
andq $-64,%rbp
@@ -1653,8 +1662,10 @@ AES_cbc_encrypt:
subq %r10,%rbp
xchgq %rsp,%rbp
+.cfi_def_cfa_register %rbp
movq %rbp,16(%rsp)
+.cfi_escape 0x0f,0x05,0x77,0x10,0x06,0x23,0x40
.Lcbc_slow_body:
@@ -1843,8 +1854,9 @@ AES_cbc_encrypt:
.cfi_def_cfa %rsp,16
.Lcbc_popfq:
popfq
+
+
.cfi_adjust_cfa_offset -8
-.cfi_restore 49
.Lcbc_epilogue:
.byte 0xf3,0xc3
.cfi_endproc
diff --git a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/aes/aesni-x86_64.s b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/aes/aesni-x86_64.s
index 5b2a68e758..9be0053a2a 100644
--- a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/aes/aesni-x86_64.s
+++ b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/aes/aesni-x86_64.s
@@ -4,6 +4,7 @@
.type aesni_encrypt,@function
.align 16
aesni_encrypt:
+.cfi_startproc
movups (%rdi),%xmm2
movl 240(%rdx),%eax
movups (%rdx),%xmm0
@@ -22,12 +23,14 @@ aesni_encrypt:
movups %xmm2,(%rsi)
pxor %xmm2,%xmm2
.byte 0xf3,0xc3
+.cfi_endproc
.size aesni_encrypt,.-aesni_encrypt
.globl aesni_decrypt
.type aesni_decrypt,@function
.align 16
aesni_decrypt:
+.cfi_startproc
movups (%rdi),%xmm2
movl 240(%rdx),%eax
movups (%rdx),%xmm0
@@ -46,10 +49,12 @@ aesni_decrypt:
movups %xmm2,(%rsi)
pxor %xmm2,%xmm2
.byte 0xf3,0xc3
+.cfi_endproc
.size aesni_decrypt, .-aesni_decrypt
.type _aesni_encrypt2,@function
.align 16
_aesni_encrypt2:
+.cfi_startproc
movups (%rcx),%xmm0
shll $4,%eax
movups 16(%rcx),%xmm1
@@ -75,10 +80,12 @@ _aesni_encrypt2:
.byte 102,15,56,221,208
.byte 102,15,56,221,216
.byte 0xf3,0xc3
+.cfi_endproc
.size _aesni_encrypt2,.-_aesni_encrypt2
.type _aesni_decrypt2,@function
.align 16
_aesni_decrypt2:
+.cfi_startproc
movups (%rcx),%xmm0
shll $4,%eax
movups 16(%rcx),%xmm1
@@ -104,10 +111,12 @@ _aesni_decrypt2:
.byte 102,15,56,223,208
.byte 102,15,56,223,216
.byte 0xf3,0xc3
+.cfi_endproc
.size _aesni_decrypt2,.-_aesni_decrypt2
.type _aesni_encrypt3,@function
.align 16
_aesni_encrypt3:
+.cfi_startproc
movups (%rcx),%xmm0
shll $4,%eax
movups 16(%rcx),%xmm1
@@ -138,10 +147,12 @@ _aesni_encrypt3:
.byte 102,15,56,221,216
.byte 102,15,56,221,224
.byte 0xf3,0xc3
+.cfi_endproc
.size _aesni_encrypt3,.-_aesni_encrypt3
.type _aesni_decrypt3,@function
.align 16
_aesni_decrypt3:
+.cfi_startproc
movups (%rcx),%xmm0
shll $4,%eax
movups 16(%rcx),%xmm1
@@ -172,10 +183,12 @@ _aesni_decrypt3:
.byte 102,15,56,223,216
.byte 102,15,56,223,224
.byte 0xf3,0xc3
+.cfi_endproc
.size _aesni_decrypt3,.-_aesni_decrypt3
.type _aesni_encrypt4,@function
.align 16
_aesni_encrypt4:
+.cfi_startproc
movups (%rcx),%xmm0
shll $4,%eax
movups 16(%rcx),%xmm1
@@ -212,10 +225,12 @@ _aesni_encrypt4:
.byte 102,15,56,221,224
.byte 102,15,56,221,232
.byte 0xf3,0xc3
+.cfi_endproc
.size _aesni_encrypt4,.-_aesni_encrypt4
.type _aesni_decrypt4,@function
.align 16
_aesni_decrypt4:
+.cfi_startproc
movups (%rcx),%xmm0
shll $4,%eax
movups 16(%rcx),%xmm1
@@ -252,10 +267,12 @@ _aesni_decrypt4:
.byte 102,15,56,223,224
.byte 102,15,56,223,232
.byte 0xf3,0xc3
+.cfi_endproc
.size _aesni_decrypt4,.-_aesni_decrypt4
.type _aesni_encrypt6,@function
.align 16
_aesni_encrypt6:
+.cfi_startproc
movups (%rcx),%xmm0
shll $4,%eax
movups 16(%rcx),%xmm1
@@ -306,10 +323,12 @@ _aesni_encrypt6:
.byte 102,15,56,221,240
.byte 102,15,56,221,248
.byte 0xf3,0xc3
+.cfi_endproc
.size _aesni_encrypt6,.-_aesni_encrypt6
.type _aesni_decrypt6,@function
.align 16
_aesni_decrypt6:
+.cfi_startproc
movups (%rcx),%xmm0
shll $4,%eax
movups 16(%rcx),%xmm1
@@ -360,10 +379,12 @@ _aesni_decrypt6:
.byte 102,15,56,223,240
.byte 102,15,56,223,248
.byte 0xf3,0xc3
+.cfi_endproc
.size _aesni_decrypt6,.-_aesni_decrypt6
.type _aesni_encrypt8,@function
.align 16
_aesni_encrypt8:
+.cfi_startproc
movups (%rcx),%xmm0
shll $4,%eax
movups 16(%rcx),%xmm1
@@ -424,10 +445,12 @@ _aesni_encrypt8:
.byte 102,68,15,56,221,192
.byte 102,68,15,56,221,200
.byte 0xf3,0xc3
+.cfi_endproc
.size _aesni_encrypt8,.-_aesni_encrypt8
.type _aesni_decrypt8,@function
.align 16
_aesni_decrypt8:
+.cfi_startproc
movups (%rcx),%xmm0
shll $4,%eax
movups 16(%rcx),%xmm1
@@ -488,11 +511,13 @@ _aesni_decrypt8:
.byte 102,68,15,56,223,192
.byte 102,68,15,56,223,200
.byte 0xf3,0xc3
+.cfi_endproc
.size _aesni_decrypt8,.-_aesni_decrypt8
.globl aesni_ecb_encrypt
.type aesni_ecb_encrypt,@function
.align 16
aesni_ecb_encrypt:
+.cfi_startproc
andq $-16,%rdx
jz .Lecb_ret
@@ -830,6 +855,7 @@ aesni_ecb_encrypt:
xorps %xmm0,%xmm0
pxor %xmm1,%xmm1
.byte 0xf3,0xc3
+.cfi_endproc
.size aesni_ecb_encrypt,.-aesni_ecb_encrypt
.globl aesni_ccm64_encrypt_blocks
.type aesni_ccm64_encrypt_blocks,@function
diff --git a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/aes/bsaes-x86_64.s b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/aes/bsaes-x86_64.s
index f7451dfe52..c968165106 100644
--- a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/aes/bsaes-x86_64.s
+++ b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/aes/bsaes-x86_64.s
@@ -6,6 +6,7 @@
.type _bsaes_encrypt8,@function
.align 64
_bsaes_encrypt8:
+.cfi_startproc
leaq .LBS0(%rip),%r11
movdqa (%rax),%xmm8
@@ -473,11 +474,13 @@ _bsaes_encrypt8_bitslice:
pxor %xmm7,%xmm15
pxor %xmm7,%xmm0
.byte 0xf3,0xc3
+.cfi_endproc
.size _bsaes_encrypt8,.-_bsaes_encrypt8
.type _bsaes_decrypt8,@function
.align 64
_bsaes_decrypt8:
+.cfi_startproc
leaq .LBS0(%rip),%r11
movdqa (%rax),%xmm8
@@ -979,10 +982,12 @@ _bsaes_decrypt8:
pxor %xmm7,%xmm15
pxor %xmm7,%xmm0
.byte 0xf3,0xc3
+.cfi_endproc
.size _bsaes_decrypt8,.-_bsaes_decrypt8
.type _bsaes_key_convert,@function
.align 16
_bsaes_key_convert:
+.cfi_startproc
leaq .Lmasks(%rip),%r11
movdqu (%rcx),%xmm7
leaq 16(%rcx),%rcx
@@ -1061,6 +1066,7 @@ _bsaes_key_convert:
movdqa 80(%r11),%xmm7
.byte 0xf3,0xc3
+.cfi_endproc
.size _bsaes_key_convert,.-_bsaes_key_convert
.globl bsaes_cbc_encrypt
diff --git a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/aes/vpaes-x86_64.s b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/aes/vpaes-x86_64.s
index d193298940..fa7f3fb5a1 100644
--- a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/aes/vpaes-x86_64.s
+++ b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/aes/vpaes-x86_64.s
@@ -18,6 +18,7 @@
.type _vpaes_encrypt_core,@function
.align 16
_vpaes_encrypt_core:
+.cfi_startproc
movq %rdx,%r9
movq $16,%r11
movl 240(%rdx),%eax
@@ -98,6 +99,7 @@ _vpaes_encrypt_core:
pxor %xmm4,%xmm0
.byte 102,15,56,0,193
.byte 0xf3,0xc3
+.cfi_endproc
.size _vpaes_encrypt_core,.-_vpaes_encrypt_core
@@ -108,6 +110,7 @@ _vpaes_encrypt_core:
.type _vpaes_decrypt_core,@function
.align 16
_vpaes_decrypt_core:
+.cfi_startproc
movq %rdx,%r9
movl 240(%rdx),%eax
movdqa %xmm9,%xmm1
@@ -204,6 +207,7 @@ _vpaes_decrypt_core:
pxor %xmm4,%xmm0
.byte 102,15,56,0,194
.byte 0xf3,0xc3
+.cfi_endproc
.size _vpaes_decrypt_core,.-_vpaes_decrypt_core
@@ -214,6 +218,7 @@ _vpaes_decrypt_core:
.type _vpaes_schedule_core,@function
.align 16
_vpaes_schedule_core:
+.cfi_startproc
@@ -380,6 +385,7 @@ _vpaes_schedule_core:
pxor %xmm6,%xmm6
pxor %xmm7,%xmm7
.byte 0xf3,0xc3
+.cfi_endproc
.size _vpaes_schedule_core,.-_vpaes_schedule_core
@@ -399,6 +405,7 @@ _vpaes_schedule_core:
.type _vpaes_schedule_192_smear,@function
.align 16
_vpaes_schedule_192_smear:
+.cfi_startproc
pshufd $0x80,%xmm6,%xmm1
pshufd $0xFE,%xmm7,%xmm0
pxor %xmm1,%xmm6
@@ -407,6 +414,7 @@ _vpaes_schedule_192_smear:
movdqa %xmm6,%xmm0
movhlps %xmm1,%xmm6
.byte 0xf3,0xc3
+.cfi_endproc
.size _vpaes_schedule_192_smear,.-_vpaes_schedule_192_smear
@@ -430,6 +438,7 @@ _vpaes_schedule_192_smear:
.type _vpaes_schedule_round,@function
.align 16
_vpaes_schedule_round:
+.cfi_startproc
pxor %xmm1,%xmm1
.byte 102,65,15,58,15,200,15
@@ -483,6 +492,7 @@ _vpaes_schedule_low_round:
pxor %xmm7,%xmm0
movdqa %xmm0,%xmm7
.byte 0xf3,0xc3
+.cfi_endproc
.size _vpaes_schedule_round,.-_vpaes_schedule_round
@@ -497,6 +507,7 @@ _vpaes_schedule_low_round:
.type _vpaes_schedule_transform,@function
.align 16
_vpaes_schedule_transform:
+.cfi_startproc
movdqa %xmm9,%xmm1
pandn %xmm0,%xmm1
psrld $4,%xmm1
@@ -507,6 +518,7 @@ _vpaes_schedule_transform:
.byte 102,15,56,0,193
pxor %xmm2,%xmm0
.byte 0xf3,0xc3
+.cfi_endproc
.size _vpaes_schedule_transform,.-_vpaes_schedule_transform
@@ -535,6 +547,7 @@ _vpaes_schedule_transform:
.type _vpaes_schedule_mangle,@function
.align 16
_vpaes_schedule_mangle:
+.cfi_startproc
movdqa %xmm0,%xmm4
movdqa .Lk_mc_forward(%rip),%xmm5
testq %rcx,%rcx
@@ -599,6 +612,7 @@ _vpaes_schedule_mangle:
andq $0x30,%r8
movdqu %xmm3,(%rdx)
.byte 0xf3,0xc3
+.cfi_endproc
.size _vpaes_schedule_mangle,.-_vpaes_schedule_mangle
@@ -608,6 +622,7 @@ _vpaes_schedule_mangle:
.type vpaes_set_encrypt_key,@function
.align 16
vpaes_set_encrypt_key:
+.cfi_startproc
movl %esi,%eax
shrl $5,%eax
addl $5,%eax
@@ -618,12 +633,14 @@ vpaes_set_encrypt_key:
call _vpaes_schedule_core
xorl %eax,%eax
.byte 0xf3,0xc3
+.cfi_endproc
.size vpaes_set_encrypt_key,.-vpaes_set_encrypt_key
.globl vpaes_set_decrypt_key
.type vpaes_set_decrypt_key,@function
.align 16
vpaes_set_decrypt_key:
+.cfi_startproc
movl %esi,%eax
shrl $5,%eax
addl $5,%eax
@@ -639,33 +656,39 @@ vpaes_set_decrypt_key:
call _vpaes_schedule_core
xorl %eax,%eax
.byte 0xf3,0xc3
+.cfi_endproc
.size vpaes_set_decrypt_key,.-vpaes_set_decrypt_key
.globl vpaes_encrypt
.type vpaes_encrypt,@function
.align 16
vpaes_encrypt:
+.cfi_startproc
movdqu (%rdi),%xmm0
call _vpaes_preheat
call _vpaes_encrypt_core
movdqu %xmm0,(%rsi)
.byte 0xf3,0xc3
+.cfi_endproc
.size vpaes_encrypt,.-vpaes_encrypt
.globl vpaes_decrypt
.type vpaes_decrypt,@function
.align 16
vpaes_decrypt:
+.cfi_startproc
movdqu (%rdi),%xmm0
call _vpaes_preheat
call _vpaes_decrypt_core
movdqu %xmm0,(%rsi)
.byte 0xf3,0xc3
+.cfi_endproc
.size vpaes_decrypt,.-vpaes_decrypt
.globl vpaes_cbc_encrypt
.type vpaes_cbc_encrypt,@function
.align 16
vpaes_cbc_encrypt:
+.cfi_startproc
xchgq %rcx,%rdx
subq $16,%rcx
jc .Lcbc_abort
@@ -701,6 +724,7 @@ vpaes_cbc_encrypt:
movdqu %xmm6,(%r8)
.Lcbc_abort:
.byte 0xf3,0xc3
+.cfi_endproc
.size vpaes_cbc_encrypt,.-vpaes_cbc_encrypt
@@ -711,6 +735,7 @@ vpaes_cbc_encrypt:
.type _vpaes_preheat,@function
.align 16
_vpaes_preheat:
+.cfi_startproc
leaq .Lk_s0F(%rip),%r10
movdqa -32(%r10),%xmm10
movdqa -16(%r10),%xmm11
@@ -720,6 +745,7 @@ _vpaes_preheat:
movdqa 80(%r10),%xmm15
movdqa 96(%r10),%xmm14
.byte 0xf3,0xc3
+.cfi_endproc
.size _vpaes_preheat,.-_vpaes_preheat
diff --git a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/bn/rsaz-avx2.s b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/bn/rsaz-avx2.s
index 61b400749b..5ac86bbc79 100644
--- a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/bn/rsaz-avx2.s
+++ b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/bn/rsaz-avx2.s
@@ -1212,6 +1212,7 @@ rsaz_1024_mul_avx2:
.type rsaz_1024_red2norm_avx2,@function
.align 32
rsaz_1024_red2norm_avx2:
+.cfi_startproc
subq $-128,%rsi
xorq %rax,%rax
movq -128(%rsi),%r8
@@ -1403,12 +1404,14 @@ rsaz_1024_red2norm_avx2:
movq %rax,120(%rdi)
movq %r11,%rax
.byte 0xf3,0xc3
+.cfi_endproc
.size rsaz_1024_red2norm_avx2,.-rsaz_1024_red2norm_avx2
.globl rsaz_1024_norm2red_avx2
.type rsaz_1024_norm2red_avx2,@function
.align 32
rsaz_1024_norm2red_avx2:
+.cfi_startproc
subq $-128,%rdi
movq (%rsi),%r8
movl $0x1fffffff,%eax
@@ -1561,11 +1564,13 @@ rsaz_1024_norm2red_avx2:
movq %r8,176(%rdi)
movq %r8,184(%rdi)
.byte 0xf3,0xc3
+.cfi_endproc
.size rsaz_1024_norm2red_avx2,.-rsaz_1024_norm2red_avx2
.globl rsaz_1024_scatter5_avx2
.type rsaz_1024_scatter5_avx2,@function
.align 32
rsaz_1024_scatter5_avx2:
+.cfi_startproc
vzeroupper
vmovdqu .Lscatter_permd(%rip),%ymm5
shll $4,%edx
@@ -1585,6 +1590,7 @@ rsaz_1024_scatter5_avx2:
vzeroupper
.byte 0xf3,0xc3
+.cfi_endproc
.size rsaz_1024_scatter5_avx2,.-rsaz_1024_scatter5_avx2
.globl rsaz_1024_gather5_avx2
diff --git a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/bn/x86_64-mont5.s b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/bn/x86_64-mont5.s
index c6d752a245..653fada1b0 100644
--- a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/bn/x86_64-mont5.s
+++ b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/bn/x86_64-mont5.s
@@ -2893,6 +2893,7 @@ bn_powerx5:
.align 32
bn_sqrx8x_internal:
__bn_sqrx8x_internal:
+.cfi_startproc
@@ -3504,6 +3505,7 @@ __bn_sqrx8x_reduction:
cmpq 8+8(%rsp),%r8
jb .Lsqrx8x_reduction_loop
.byte 0xf3,0xc3
+.cfi_endproc
.size bn_sqrx8x_internal,.-bn_sqrx8x_internal
.align 32
__bn_postx4x_internal:
diff --git a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/buildinf.h b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/buildinf.h
index 45b1f5286a..b8b10efa8e 100644
--- a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/buildinf.h
+++ b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/buildinf.h
@@ -11,7 +11,7 @@
*/
#define PLATFORM "platform: solaris64-x86_64-gcc"
-#define DATE "built on: Thu Nov 22 19:35:40 2018 UTC"
+#define DATE "built on: Tue Feb 26 19:55:52 2019 UTC"
/*
* Generate compiler_flags as an array of individual characters. This is a
diff --git a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/ec/ecp_nistz256-x86_64.s b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/ec/ecp_nistz256-x86_64.s
index eeeed6ba40..77f2d8282b 100644
--- a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/ec/ecp_nistz256-x86_64.s
+++ b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/ec/ecp_nistz256-x86_64.s
@@ -3959,6 +3959,7 @@ ecp_nistz256_mul_mont:
.type __ecp_nistz256_mul_montq,@function
.align 32
__ecp_nistz256_mul_montq:
+.cfi_startproc
movq %rax,%rbp
@@ -4170,6 +4171,7 @@ __ecp_nistz256_mul_montq:
movq %r9,24(%rdi)
.byte 0xf3,0xc3
+.cfi_endproc
.size __ecp_nistz256_mul_montq,.-__ecp_nistz256_mul_montq
@@ -4247,6 +4249,7 @@ ecp_nistz256_sqr_mont:
.type __ecp_nistz256_sqr_montq,@function
.align 32
__ecp_nistz256_sqr_montq:
+.cfi_startproc
movq %rax,%r13
mulq %r14
movq %rax,%r9
@@ -4404,10 +4407,12 @@ __ecp_nistz256_sqr_montq:
movq %r15,24(%rdi)
.byte 0xf3,0xc3
+.cfi_endproc
.size __ecp_nistz256_sqr_montq,.-__ecp_nistz256_sqr_montq
.type __ecp_nistz256_mul_montx,@function
.align 32
__ecp_nistz256_mul_montx:
+.cfi_startproc
mulxq %r9,%r8,%r9
@@ -4570,11 +4575,13 @@ __ecp_nistz256_mul_montx:
movq %r9,24(%rdi)
.byte 0xf3,0xc3
+.cfi_endproc
.size __ecp_nistz256_mul_montx,.-__ecp_nistz256_mul_montx
.type __ecp_nistz256_sqr_montx,@function
.align 32
__ecp_nistz256_sqr_montx:
+.cfi_startproc
mulxq %r14,%r9,%r10
mulxq %r15,%rcx,%r11
xorl %eax,%eax
@@ -4698,6 +4705,7 @@ __ecp_nistz256_sqr_montx:
movq %r15,24(%rdi)
.byte 0xf3,0xc3
+.cfi_endproc
.size __ecp_nistz256_sqr_montx,.-__ecp_nistz256_sqr_montx
@@ -4837,6 +4845,7 @@ ecp_nistz256_scatter_w5:
.type ecp_nistz256_gather_w5,@function
.align 32
ecp_nistz256_gather_w5:
+.cfi_startproc
movl OPENSSL_ia32cap_P+8(%rip),%eax
testl $32,%eax
jnz .Lavx2_gather_w5
@@ -4891,6 +4900,7 @@ ecp_nistz256_gather_w5:
movdqu %xmm6,64(%rdi)
movdqu %xmm7,80(%rdi)
.byte 0xf3,0xc3
+.cfi_endproc
.LSEH_end_ecp_nistz256_gather_w5:
.size ecp_nistz256_gather_w5,.-ecp_nistz256_gather_w5
@@ -4919,6 +4929,7 @@ ecp_nistz256_scatter_w7:
.type ecp_nistz256_gather_w7,@function
.align 32
ecp_nistz256_gather_w7:
+.cfi_startproc
movl OPENSSL_ia32cap_P+8(%rip),%eax
testl $32,%eax
jnz .Lavx2_gather_w7
@@ -4962,6 +4973,7 @@ ecp_nistz256_gather_w7:
movdqu %xmm4,32(%rdi)
movdqu %xmm5,48(%rdi)
.byte 0xf3,0xc3
+.cfi_endproc
.LSEH_end_ecp_nistz256_gather_w7:
.size ecp_nistz256_gather_w7,.-ecp_nistz256_gather_w7
@@ -4969,6 +4981,7 @@ ecp_nistz256_gather_w7:
.type ecp_nistz256_avx2_gather_w5,@function
.align 32
ecp_nistz256_avx2_gather_w5:
+.cfi_startproc
.Lavx2_gather_w5:
vzeroupper
vmovdqa .LTwo(%rip),%ymm0
@@ -5023,6 +5036,7 @@ ecp_nistz256_avx2_gather_w5:
vmovdqu %ymm4,64(%rdi)
vzeroupper
.byte 0xf3,0xc3
+.cfi_endproc
.LSEH_end_ecp_nistz256_avx2_gather_w5:
.size ecp_nistz256_avx2_gather_w5,.-ecp_nistz256_avx2_gather_w5
@@ -5032,6 +5046,7 @@ ecp_nistz256_avx2_gather_w5:
.type ecp_nistz256_avx2_gather_w7,@function
.align 32
ecp_nistz256_avx2_gather_w7:
+.cfi_startproc
.Lavx2_gather_w7:
vzeroupper
vmovdqa .LThree(%rip),%ymm0
@@ -5101,11 +5116,13 @@ ecp_nistz256_avx2_gather_w7:
vmovdqu %ymm3,32(%rdi)
vzeroupper
.byte 0xf3,0xc3
+.cfi_endproc
.LSEH_end_ecp_nistz256_avx2_gather_w7:
.size ecp_nistz256_avx2_gather_w7,.-ecp_nistz256_avx2_gather_w7
.type __ecp_nistz256_add_toq,@function
.align 32
__ecp_nistz256_add_toq:
+.cfi_startproc
xorq %r11,%r11
addq 0(%rbx),%r12
adcq 8(%rbx),%r13
@@ -5133,11 +5150,13 @@ __ecp_nistz256_add_toq:
movq %r9,24(%rdi)
.byte 0xf3,0xc3
+.cfi_endproc
.size __ecp_nistz256_add_toq,.-__ecp_nistz256_add_toq
.type __ecp_nistz256_sub_fromq,@function
.align 32
__ecp_nistz256_sub_fromq:
+.cfi_startproc
subq 0(%rbx),%r12
sbbq 8(%rbx),%r13
movq %r12,%rax
@@ -5164,11 +5183,13 @@ __ecp_nistz256_sub_fromq:
movq %r9,24(%rdi)
.byte 0xf3,0xc3
+.cfi_endproc
.size __ecp_nistz256_sub_fromq,.-__ecp_nistz256_sub_fromq
.type __ecp_nistz256_subq,@function
.align 32
__ecp_nistz256_subq:
+.cfi_startproc
subq %r12,%rax
sbbq %r13,%rbp
movq %rax,%r12
@@ -5191,11 +5212,13 @@ __ecp_nistz256_subq:
cmovnzq %r10,%r9
.byte 0xf3,0xc3
+.cfi_endproc
.size __ecp_nistz256_subq,.-__ecp_nistz256_subq
.type __ecp_nistz256_mul_by_2q,@function
.align 32
__ecp_nistz256_mul_by_2q:
+.cfi_startproc
xorq %r11,%r11
addq %r12,%r12
adcq %r13,%r13
@@ -5223,6 +5246,7 @@ __ecp_nistz256_mul_by_2q:
movq %r9,24(%rdi)
.byte 0xf3,0xc3
+.cfi_endproc
.size __ecp_nistz256_mul_by_2q,.-__ecp_nistz256_mul_by_2q
.globl ecp_nistz256_point_double
.type ecp_nistz256_point_double,@function
@@ -5655,7 +5679,9 @@ ecp_nistz256_point_add:
.byte 102,72,15,126,206
.byte 102,72,15,126,199
addq $416,%rsp
+.cfi_adjust_cfa_offset -416
jmp .Lpoint_double_shortcutq
+.cfi_adjust_cfa_offset 416
.align 32
.Ladd_proceedq:
@@ -6217,6 +6243,7 @@ ecp_nistz256_point_add_affine:
.type __ecp_nistz256_add_tox,@function
.align 32
__ecp_nistz256_add_tox:
+.cfi_startproc
xorq %r11,%r11
adcq 0(%rbx),%r12
adcq 8(%rbx),%r13
@@ -6245,11 +6272,13 @@ __ecp_nistz256_add_tox:
movq %r9,24(%rdi)
.byte 0xf3,0xc3
+.cfi_endproc
.size __ecp_nistz256_add_tox,.-__ecp_nistz256_add_tox
.type __ecp_nistz256_sub_fromx,@function
.align 32
__ecp_nistz256_sub_fromx:
+.cfi_startproc
xorq %r11,%r11
sbbq 0(%rbx),%r12
sbbq 8(%rbx),%r13
@@ -6278,11 +6307,13 @@ __ecp_nistz256_sub_fromx:
movq %r9,24(%rdi)
.byte 0xf3,0xc3
+.cfi_endproc
.size __ecp_nistz256_sub_fromx,.-__ecp_nistz256_sub_fromx
.type __ecp_nistz256_subx,@function
.align 32
__ecp_nistz256_subx:
+.cfi_startproc
xorq %r11,%r11
sbbq %r12,%rax
sbbq %r13,%rbp
@@ -6307,11 +6338,13 @@ __ecp_nistz256_subx:
cmovcq %r10,%r9
.byte 0xf3,0xc3
+.cfi_endproc
.size __ecp_nistz256_subx,.-__ecp_nistz256_subx
.type __ecp_nistz256_mul_by_2x,@function
.align 32
__ecp_nistz256_mul_by_2x:
+.cfi_startproc
xorq %r11,%r11
adcq %r12,%r12
adcq %r13,%r13
@@ -6340,6 +6373,7 @@ __ecp_nistz256_mul_by_2x:
movq %r9,24(%rdi)
.byte 0xf3,0xc3
+.cfi_endproc
.size __ecp_nistz256_mul_by_2x,.-__ecp_nistz256_mul_by_2x
.type ecp_nistz256_point_doublex,@function
.align 32
@@ -6764,7 +6798,9 @@ ecp_nistz256_point_addx:
.byte 102,72,15,126,206
.byte 102,72,15,126,199
addq $416,%rsp
+.cfi_adjust_cfa_offset -416
jmp .Lpoint_double_shortcutx
+.cfi_adjust_cfa_offset 416
.align 32
.Ladd_proceedx:
diff --git a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/modes/ghash-x86_64.s b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/modes/ghash-x86_64.s
index 29c297f04b..a75ae1642c 100644
--- a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/modes/ghash-x86_64.s
+++ b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/modes/ghash-x86_64.s
@@ -705,6 +705,7 @@ gcm_ghash_4bit:
.type gcm_init_clmul,@function
.align 16
gcm_init_clmul:
+.cfi_startproc
.L_init_clmul:
movdqu (%rsi),%xmm2
pshufd $78,%xmm2,%xmm2
@@ -856,11 +857,13 @@ gcm_init_clmul:
.byte 102,15,58,15,227,8
movdqu %xmm4,80(%rdi)
.byte 0xf3,0xc3
+.cfi_endproc
.size gcm_init_clmul,.-gcm_init_clmul
.globl gcm_gmult_clmul
.type gcm_gmult_clmul,@function
.align 16
gcm_gmult_clmul:
+.cfi_startproc
.L_gmult_clmul:
movdqu (%rdi),%xmm0
movdqa .Lbswap_mask(%rip),%xmm5
@@ -907,11 +910,13 @@ gcm_gmult_clmul:
.byte 102,15,56,0,197
movdqu %xmm0,(%rdi)
.byte 0xf3,0xc3
+.cfi_endproc
.size gcm_gmult_clmul,.-gcm_gmult_clmul
.globl gcm_ghash_clmul
.type gcm_ghash_clmul,@function
.align 32
gcm_ghash_clmul:
+.cfi_startproc
.L_ghash_clmul:
movdqa .Lbswap_mask(%rip),%xmm10
@@ -1290,11 +1295,13 @@ gcm_ghash_clmul:
.byte 102,65,15,56,0,194
movdqu %xmm0,(%rdi)
.byte 0xf3,0xc3
+.cfi_endproc
.size gcm_ghash_clmul,.-gcm_ghash_clmul
.globl gcm_init_avx
.type gcm_init_avx,@function
.align 32
gcm_init_avx:
+.cfi_startproc
vzeroupper
vmovdqu (%rsi),%xmm2
@@ -1397,17 +1404,21 @@ gcm_init_avx:
vzeroupper
.byte 0xf3,0xc3
+.cfi_endproc
.size gcm_init_avx,.-gcm_init_avx
.globl gcm_gmult_avx
.type gcm_gmult_avx,@function
.align 32
gcm_gmult_avx:
+.cfi_startproc
jmp .L_gmult_clmul
+.cfi_endproc
.size gcm_gmult_avx,.-gcm_gmult_avx
.globl gcm_ghash_avx
.type gcm_ghash_avx,@function
.align 32
gcm_ghash_avx:
+.cfi_startproc
vzeroupper
vmovdqu (%rdi),%xmm10
@@ -1779,6 +1790,7 @@ gcm_ghash_avx:
vmovdqu %xmm10,(%rdi)
vzeroupper
.byte 0xf3,0xc3
+.cfi_endproc
.size gcm_ghash_avx,.-gcm_ghash_avx
.align 64
.Lbswap_mask: