summaryrefslogtreecommitdiff
path: root/deps/openssl/config/archs/linux-ppc64/asm_avx2/crypto/aes
diff options
context:
space:
mode:
Diffstat (limited to 'deps/openssl/config/archs/linux-ppc64/asm_avx2/crypto/aes')
-rw-r--r--deps/openssl/config/archs/linux-ppc64/asm_avx2/crypto/aes/aes-ppc.s1539
-rw-r--r--deps/openssl/config/archs/linux-ppc64/asm_avx2/crypto/aes/aesp8-ppc.s3689
-rw-r--r--deps/openssl/config/archs/linux-ppc64/asm_avx2/crypto/aes/vpaes-ppc.s1497
3 files changed, 6725 insertions, 0 deletions
diff --git a/deps/openssl/config/archs/linux-ppc64/asm_avx2/crypto/aes/aes-ppc.s b/deps/openssl/config/archs/linux-ppc64/asm_avx2/crypto/aes/aes-ppc.s
new file mode 100644
index 0000000000..5f37cdc33c
--- /dev/null
+++ b/deps/openssl/config/archs/linux-ppc64/asm_avx2/crypto/aes/aes-ppc.s
@@ -0,0 +1,1539 @@
+.machine "any"
+.text
+
+.align 7
+.LAES_Te:
+ mflr 0
+ bcl 20,31,$+4
+ mflr 3
+ addi 3,3,120
+ mtlr 0
+ blr
+.long 0
+.byte 0,12,0x14,0,0,0,0,0
+.space 28
+.LAES_Td:
+ mflr 0
+ bcl 20,31,$+4
+ mflr 3
+ addi 3,3,2360
+ mtlr 0
+ blr
+.long 0
+.byte 0,12,0x14,0,0,0,0,0
+.space 28
+.long 0xc66363a5,0xc66363a5
+.long 0xf87c7c84,0xf87c7c84
+.long 0xee777799,0xee777799
+.long 0xf67b7b8d,0xf67b7b8d
+.long 0xfff2f20d,0xfff2f20d
+.long 0xd66b6bbd,0xd66b6bbd
+.long 0xde6f6fb1,0xde6f6fb1
+.long 0x91c5c554,0x91c5c554
+.long 0x60303050,0x60303050
+.long 0x02010103,0x02010103
+.long 0xce6767a9,0xce6767a9
+.long 0x562b2b7d,0x562b2b7d
+.long 0xe7fefe19,0xe7fefe19
+.long 0xb5d7d762,0xb5d7d762
+.long 0x4dababe6,0x4dababe6
+.long 0xec76769a,0xec76769a
+.long 0x8fcaca45,0x8fcaca45
+.long 0x1f82829d,0x1f82829d
+.long 0x89c9c940,0x89c9c940
+.long 0xfa7d7d87,0xfa7d7d87
+.long 0xeffafa15,0xeffafa15
+.long 0xb25959eb,0xb25959eb
+.long 0x8e4747c9,0x8e4747c9
+.long 0xfbf0f00b,0xfbf0f00b
+.long 0x41adadec,0x41adadec
+.long 0xb3d4d467,0xb3d4d467
+.long 0x5fa2a2fd,0x5fa2a2fd
+.long 0x45afafea,0x45afafea
+.long 0x239c9cbf,0x239c9cbf
+.long 0x53a4a4f7,0x53a4a4f7
+.long 0xe4727296,0xe4727296
+.long 0x9bc0c05b,0x9bc0c05b
+.long 0x75b7b7c2,0x75b7b7c2
+.long 0xe1fdfd1c,0xe1fdfd1c
+.long 0x3d9393ae,0x3d9393ae
+.long 0x4c26266a,0x4c26266a
+.long 0x6c36365a,0x6c36365a
+.long 0x7e3f3f41,0x7e3f3f41
+.long 0xf5f7f702,0xf5f7f702
+.long 0x83cccc4f,0x83cccc4f
+.long 0x6834345c,0x6834345c
+.long 0x51a5a5f4,0x51a5a5f4
+.long 0xd1e5e534,0xd1e5e534
+.long 0xf9f1f108,0xf9f1f108
+.long 0xe2717193,0xe2717193
+.long 0xabd8d873,0xabd8d873
+.long 0x62313153,0x62313153
+.long 0x2a15153f,0x2a15153f
+.long 0x0804040c,0x0804040c
+.long 0x95c7c752,0x95c7c752
+.long 0x46232365,0x46232365
+.long 0x9dc3c35e,0x9dc3c35e
+.long 0x30181828,0x30181828
+.long 0x379696a1,0x379696a1
+.long 0x0a05050f,0x0a05050f
+.long 0x2f9a9ab5,0x2f9a9ab5
+.long 0x0e070709,0x0e070709
+.long 0x24121236,0x24121236
+.long 0x1b80809b,0x1b80809b
+.long 0xdfe2e23d,0xdfe2e23d
+.long 0xcdebeb26,0xcdebeb26
+.long 0x4e272769,0x4e272769
+.long 0x7fb2b2cd,0x7fb2b2cd
+.long 0xea75759f,0xea75759f
+.long 0x1209091b,0x1209091b
+.long 0x1d83839e,0x1d83839e
+.long 0x582c2c74,0x582c2c74
+.long 0x341a1a2e,0x341a1a2e
+.long 0x361b1b2d,0x361b1b2d
+.long 0xdc6e6eb2,0xdc6e6eb2
+.long 0xb45a5aee,0xb45a5aee
+.long 0x5ba0a0fb,0x5ba0a0fb
+.long 0xa45252f6,0xa45252f6
+.long 0x763b3b4d,0x763b3b4d
+.long 0xb7d6d661,0xb7d6d661
+.long 0x7db3b3ce,0x7db3b3ce
+.long 0x5229297b,0x5229297b
+.long 0xdde3e33e,0xdde3e33e
+.long 0x5e2f2f71,0x5e2f2f71
+.long 0x13848497,0x13848497
+.long 0xa65353f5,0xa65353f5
+.long 0xb9d1d168,0xb9d1d168
+.long 0x00000000,0x00000000
+.long 0xc1eded2c,0xc1eded2c
+.long 0x40202060,0x40202060
+.long 0xe3fcfc1f,0xe3fcfc1f
+.long 0x79b1b1c8,0x79b1b1c8
+.long 0xb65b5bed,0xb65b5bed
+.long 0xd46a6abe,0xd46a6abe
+.long 0x8dcbcb46,0x8dcbcb46
+.long 0x67bebed9,0x67bebed9
+.long 0x7239394b,0x7239394b
+.long 0x944a4ade,0x944a4ade
+.long 0x984c4cd4,0x984c4cd4
+.long 0xb05858e8,0xb05858e8
+.long 0x85cfcf4a,0x85cfcf4a
+.long 0xbbd0d06b,0xbbd0d06b
+.long 0xc5efef2a,0xc5efef2a
+.long 0x4faaaae5,0x4faaaae5
+.long 0xedfbfb16,0xedfbfb16
+.long 0x864343c5,0x864343c5
+.long 0x9a4d4dd7,0x9a4d4dd7
+.long 0x66333355,0x66333355
+.long 0x11858594,0x11858594
+.long 0x8a4545cf,0x8a4545cf
+.long 0xe9f9f910,0xe9f9f910
+.long 0x04020206,0x04020206
+.long 0xfe7f7f81,0xfe7f7f81
+.long 0xa05050f0,0xa05050f0
+.long 0x783c3c44,0x783c3c44
+.long 0x259f9fba,0x259f9fba
+.long 0x4ba8a8e3,0x4ba8a8e3
+.long 0xa25151f3,0xa25151f3
+.long 0x5da3a3fe,0x5da3a3fe
+.long 0x804040c0,0x804040c0
+.long 0x058f8f8a,0x058f8f8a
+.long 0x3f9292ad,0x3f9292ad
+.long 0x219d9dbc,0x219d9dbc
+.long 0x70383848,0x70383848
+.long 0xf1f5f504,0xf1f5f504
+.long 0x63bcbcdf,0x63bcbcdf
+.long 0x77b6b6c1,0x77b6b6c1
+.long 0xafdada75,0xafdada75
+.long 0x42212163,0x42212163
+.long 0x20101030,0x20101030
+.long 0xe5ffff1a,0xe5ffff1a
+.long 0xfdf3f30e,0xfdf3f30e
+.long 0xbfd2d26d,0xbfd2d26d
+.long 0x81cdcd4c,0x81cdcd4c
+.long 0x180c0c14,0x180c0c14
+.long 0x26131335,0x26131335
+.long 0xc3ecec2f,0xc3ecec2f
+.long 0xbe5f5fe1,0xbe5f5fe1
+.long 0x359797a2,0x359797a2
+.long 0x884444cc,0x884444cc
+.long 0x2e171739,0x2e171739
+.long 0x93c4c457,0x93c4c457
+.long 0x55a7a7f2,0x55a7a7f2
+.long 0xfc7e7e82,0xfc7e7e82
+.long 0x7a3d3d47,0x7a3d3d47
+.long 0xc86464ac,0xc86464ac
+.long 0xba5d5de7,0xba5d5de7
+.long 0x3219192b,0x3219192b
+.long 0xe6737395,0xe6737395
+.long 0xc06060a0,0xc06060a0
+.long 0x19818198,0x19818198
+.long 0x9e4f4fd1,0x9e4f4fd1
+.long 0xa3dcdc7f,0xa3dcdc7f
+.long 0x44222266,0x44222266
+.long 0x542a2a7e,0x542a2a7e
+.long 0x3b9090ab,0x3b9090ab
+.long 0x0b888883,0x0b888883
+.long 0x8c4646ca,0x8c4646ca
+.long 0xc7eeee29,0xc7eeee29
+.long 0x6bb8b8d3,0x6bb8b8d3
+.long 0x2814143c,0x2814143c
+.long 0xa7dede79,0xa7dede79
+.long 0xbc5e5ee2,0xbc5e5ee2
+.long 0x160b0b1d,0x160b0b1d
+.long 0xaddbdb76,0xaddbdb76
+.long 0xdbe0e03b,0xdbe0e03b
+.long 0x64323256,0x64323256
+.long 0x743a3a4e,0x743a3a4e
+.long 0x140a0a1e,0x140a0a1e
+.long 0x924949db,0x924949db
+.long 0x0c06060a,0x0c06060a
+.long 0x4824246c,0x4824246c
+.long 0xb85c5ce4,0xb85c5ce4
+.long 0x9fc2c25d,0x9fc2c25d
+.long 0xbdd3d36e,0xbdd3d36e
+.long 0x43acacef,0x43acacef
+.long 0xc46262a6,0xc46262a6
+.long 0x399191a8,0x399191a8
+.long 0x319595a4,0x319595a4
+.long 0xd3e4e437,0xd3e4e437
+.long 0xf279798b,0xf279798b
+.long 0xd5e7e732,0xd5e7e732
+.long 0x8bc8c843,0x8bc8c843
+.long 0x6e373759,0x6e373759
+.long 0xda6d6db7,0xda6d6db7
+.long 0x018d8d8c,0x018d8d8c
+.long 0xb1d5d564,0xb1d5d564
+.long 0x9c4e4ed2,0x9c4e4ed2
+.long 0x49a9a9e0,0x49a9a9e0
+.long 0xd86c6cb4,0xd86c6cb4
+.long 0xac5656fa,0xac5656fa
+.long 0xf3f4f407,0xf3f4f407
+.long 0xcfeaea25,0xcfeaea25
+.long 0xca6565af,0xca6565af
+.long 0xf47a7a8e,0xf47a7a8e
+.long 0x47aeaee9,0x47aeaee9
+.long 0x10080818,0x10080818
+.long 0x6fbabad5,0x6fbabad5
+.long 0xf0787888,0xf0787888
+.long 0x4a25256f,0x4a25256f
+.long 0x5c2e2e72,0x5c2e2e72
+.long 0x381c1c24,0x381c1c24
+.long 0x57a6a6f1,0x57a6a6f1
+.long 0x73b4b4c7,0x73b4b4c7
+.long 0x97c6c651,0x97c6c651
+.long 0xcbe8e823,0xcbe8e823
+.long 0xa1dddd7c,0xa1dddd7c
+.long 0xe874749c,0xe874749c
+.long 0x3e1f1f21,0x3e1f1f21
+.long 0x964b4bdd,0x964b4bdd
+.long 0x61bdbddc,0x61bdbddc
+.long 0x0d8b8b86,0x0d8b8b86
+.long 0x0f8a8a85,0x0f8a8a85
+.long 0xe0707090,0xe0707090
+.long 0x7c3e3e42,0x7c3e3e42
+.long 0x71b5b5c4,0x71b5b5c4
+.long 0xcc6666aa,0xcc6666aa
+.long 0x904848d8,0x904848d8
+.long 0x06030305,0x06030305
+.long 0xf7f6f601,0xf7f6f601
+.long 0x1c0e0e12,0x1c0e0e12
+.long 0xc26161a3,0xc26161a3
+.long 0x6a35355f,0x6a35355f
+.long 0xae5757f9,0xae5757f9
+.long 0x69b9b9d0,0x69b9b9d0
+.long 0x17868691,0x17868691
+.long 0x99c1c158,0x99c1c158
+.long 0x3a1d1d27,0x3a1d1d27
+.long 0x279e9eb9,0x279e9eb9
+.long 0xd9e1e138,0xd9e1e138
+.long 0xebf8f813,0xebf8f813
+.long 0x2b9898b3,0x2b9898b3
+.long 0x22111133,0x22111133
+.long 0xd26969bb,0xd26969bb
+.long 0xa9d9d970,0xa9d9d970
+.long 0x078e8e89,0x078e8e89
+.long 0x339494a7,0x339494a7
+.long 0x2d9b9bb6,0x2d9b9bb6
+.long 0x3c1e1e22,0x3c1e1e22
+.long 0x15878792,0x15878792
+.long 0xc9e9e920,0xc9e9e920
+.long 0x87cece49,0x87cece49
+.long 0xaa5555ff,0xaa5555ff
+.long 0x50282878,0x50282878
+.long 0xa5dfdf7a,0xa5dfdf7a
+.long 0x038c8c8f,0x038c8c8f
+.long 0x59a1a1f8,0x59a1a1f8
+.long 0x09898980,0x09898980
+.long 0x1a0d0d17,0x1a0d0d17
+.long 0x65bfbfda,0x65bfbfda
+.long 0xd7e6e631,0xd7e6e631
+.long 0x844242c6,0x844242c6
+.long 0xd06868b8,0xd06868b8
+.long 0x824141c3,0x824141c3
+.long 0x299999b0,0x299999b0
+.long 0x5a2d2d77,0x5a2d2d77
+.long 0x1e0f0f11,0x1e0f0f11
+.long 0x7bb0b0cb,0x7bb0b0cb
+.long 0xa85454fc,0xa85454fc
+.long 0x6dbbbbd6,0x6dbbbbd6
+.long 0x2c16163a,0x2c16163a
+.byte 0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5
+.byte 0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76
+.byte 0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0
+.byte 0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0
+.byte 0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc
+.byte 0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15
+.byte 0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a
+.byte 0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75
+.byte 0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0
+.byte 0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84
+.byte 0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b
+.byte 0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf
+.byte 0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85
+.byte 0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8
+.byte 0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5
+.byte 0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2
+.byte 0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17
+.byte 0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73
+.byte 0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88
+.byte 0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb
+.byte 0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c
+.byte 0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79
+.byte 0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9
+.byte 0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08
+.byte 0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6
+.byte 0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a
+.byte 0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e
+.byte 0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e
+.byte 0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94
+.byte 0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf
+.byte 0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68
+.byte 0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16
+.long 0x51f4a750,0x51f4a750
+.long 0x7e416553,0x7e416553
+.long 0x1a17a4c3,0x1a17a4c3
+.long 0x3a275e96,0x3a275e96
+.long 0x3bab6bcb,0x3bab6bcb
+.long 0x1f9d45f1,0x1f9d45f1
+.long 0xacfa58ab,0xacfa58ab
+.long 0x4be30393,0x4be30393
+.long 0x2030fa55,0x2030fa55
+.long 0xad766df6,0xad766df6
+.long 0x88cc7691,0x88cc7691
+.long 0xf5024c25,0xf5024c25
+.long 0x4fe5d7fc,0x4fe5d7fc
+.long 0xc52acbd7,0xc52acbd7
+.long 0x26354480,0x26354480
+.long 0xb562a38f,0xb562a38f
+.long 0xdeb15a49,0xdeb15a49
+.long 0x25ba1b67,0x25ba1b67
+.long 0x45ea0e98,0x45ea0e98
+.long 0x5dfec0e1,0x5dfec0e1
+.long 0xc32f7502,0xc32f7502
+.long 0x814cf012,0x814cf012
+.long 0x8d4697a3,0x8d4697a3
+.long 0x6bd3f9c6,0x6bd3f9c6
+.long 0x038f5fe7,0x038f5fe7
+.long 0x15929c95,0x15929c95
+.long 0xbf6d7aeb,0xbf6d7aeb
+.long 0x955259da,0x955259da
+.long 0xd4be832d,0xd4be832d
+.long 0x587421d3,0x587421d3
+.long 0x49e06929,0x49e06929
+.long 0x8ec9c844,0x8ec9c844
+.long 0x75c2896a,0x75c2896a
+.long 0xf48e7978,0xf48e7978
+.long 0x99583e6b,0x99583e6b
+.long 0x27b971dd,0x27b971dd
+.long 0xbee14fb6,0xbee14fb6
+.long 0xf088ad17,0xf088ad17
+.long 0xc920ac66,0xc920ac66
+.long 0x7dce3ab4,0x7dce3ab4
+.long 0x63df4a18,0x63df4a18
+.long 0xe51a3182,0xe51a3182
+.long 0x97513360,0x97513360
+.long 0x62537f45,0x62537f45
+.long 0xb16477e0,0xb16477e0
+.long 0xbb6bae84,0xbb6bae84
+.long 0xfe81a01c,0xfe81a01c
+.long 0xf9082b94,0xf9082b94
+.long 0x70486858,0x70486858
+.long 0x8f45fd19,0x8f45fd19
+.long 0x94de6c87,0x94de6c87
+.long 0x527bf8b7,0x527bf8b7
+.long 0xab73d323,0xab73d323
+.long 0x724b02e2,0x724b02e2
+.long 0xe31f8f57,0xe31f8f57
+.long 0x6655ab2a,0x6655ab2a
+.long 0xb2eb2807,0xb2eb2807
+.long 0x2fb5c203,0x2fb5c203
+.long 0x86c57b9a,0x86c57b9a
+.long 0xd33708a5,0xd33708a5
+.long 0x302887f2,0x302887f2
+.long 0x23bfa5b2,0x23bfa5b2
+.long 0x02036aba,0x02036aba
+.long 0xed16825c,0xed16825c
+.long 0x8acf1c2b,0x8acf1c2b
+.long 0xa779b492,0xa779b492
+.long 0xf307f2f0,0xf307f2f0
+.long 0x4e69e2a1,0x4e69e2a1
+.long 0x65daf4cd,0x65daf4cd
+.long 0x0605bed5,0x0605bed5
+.long 0xd134621f,0xd134621f
+.long 0xc4a6fe8a,0xc4a6fe8a
+.long 0x342e539d,0x342e539d
+.long 0xa2f355a0,0xa2f355a0
+.long 0x058ae132,0x058ae132
+.long 0xa4f6eb75,0xa4f6eb75
+.long 0x0b83ec39,0x0b83ec39
+.long 0x4060efaa,0x4060efaa
+.long 0x5e719f06,0x5e719f06
+.long 0xbd6e1051,0xbd6e1051
+.long 0x3e218af9,0x3e218af9
+.long 0x96dd063d,0x96dd063d
+.long 0xdd3e05ae,0xdd3e05ae
+.long 0x4de6bd46,0x4de6bd46
+.long 0x91548db5,0x91548db5
+.long 0x71c45d05,0x71c45d05
+.long 0x0406d46f,0x0406d46f
+.long 0x605015ff,0x605015ff
+.long 0x1998fb24,0x1998fb24
+.long 0xd6bde997,0xd6bde997
+.long 0x894043cc,0x894043cc
+.long 0x67d99e77,0x67d99e77
+.long 0xb0e842bd,0xb0e842bd
+.long 0x07898b88,0x07898b88
+.long 0xe7195b38,0xe7195b38
+.long 0x79c8eedb,0x79c8eedb
+.long 0xa17c0a47,0xa17c0a47
+.long 0x7c420fe9,0x7c420fe9
+.long 0xf8841ec9,0xf8841ec9
+.long 0x00000000,0x00000000
+.long 0x09808683,0x09808683
+.long 0x322bed48,0x322bed48
+.long 0x1e1170ac,0x1e1170ac
+.long 0x6c5a724e,0x6c5a724e
+.long 0xfd0efffb,0xfd0efffb
+.long 0x0f853856,0x0f853856
+.long 0x3daed51e,0x3daed51e
+.long 0x362d3927,0x362d3927
+.long 0x0a0fd964,0x0a0fd964
+.long 0x685ca621,0x685ca621
+.long 0x9b5b54d1,0x9b5b54d1
+.long 0x24362e3a,0x24362e3a
+.long 0x0c0a67b1,0x0c0a67b1
+.long 0x9357e70f,0x9357e70f
+.long 0xb4ee96d2,0xb4ee96d2
+.long 0x1b9b919e,0x1b9b919e
+.long 0x80c0c54f,0x80c0c54f
+.long 0x61dc20a2,0x61dc20a2
+.long 0x5a774b69,0x5a774b69
+.long 0x1c121a16,0x1c121a16
+.long 0xe293ba0a,0xe293ba0a
+.long 0xc0a02ae5,0xc0a02ae5
+.long 0x3c22e043,0x3c22e043
+.long 0x121b171d,0x121b171d
+.long 0x0e090d0b,0x0e090d0b
+.long 0xf28bc7ad,0xf28bc7ad
+.long 0x2db6a8b9,0x2db6a8b9
+.long 0x141ea9c8,0x141ea9c8
+.long 0x57f11985,0x57f11985
+.long 0xaf75074c,0xaf75074c
+.long 0xee99ddbb,0xee99ddbb
+.long 0xa37f60fd,0xa37f60fd
+.long 0xf701269f,0xf701269f
+.long 0x5c72f5bc,0x5c72f5bc
+.long 0x44663bc5,0x44663bc5
+.long 0x5bfb7e34,0x5bfb7e34
+.long 0x8b432976,0x8b432976
+.long 0xcb23c6dc,0xcb23c6dc
+.long 0xb6edfc68,0xb6edfc68
+.long 0xb8e4f163,0xb8e4f163
+.long 0xd731dcca,0xd731dcca
+.long 0x42638510,0x42638510
+.long 0x13972240,0x13972240
+.long 0x84c61120,0x84c61120
+.long 0x854a247d,0x854a247d
+.long 0xd2bb3df8,0xd2bb3df8
+.long 0xaef93211,0xaef93211
+.long 0xc729a16d,0xc729a16d
+.long 0x1d9e2f4b,0x1d9e2f4b
+.long 0xdcb230f3,0xdcb230f3
+.long 0x0d8652ec,0x0d8652ec
+.long 0x77c1e3d0,0x77c1e3d0
+.long 0x2bb3166c,0x2bb3166c
+.long 0xa970b999,0xa970b999
+.long 0x119448fa,0x119448fa
+.long 0x47e96422,0x47e96422
+.long 0xa8fc8cc4,0xa8fc8cc4
+.long 0xa0f03f1a,0xa0f03f1a
+.long 0x567d2cd8,0x567d2cd8
+.long 0x223390ef,0x223390ef
+.long 0x87494ec7,0x87494ec7
+.long 0xd938d1c1,0xd938d1c1
+.long 0x8ccaa2fe,0x8ccaa2fe
+.long 0x98d40b36,0x98d40b36
+.long 0xa6f581cf,0xa6f581cf
+.long 0xa57ade28,0xa57ade28
+.long 0xdab78e26,0xdab78e26
+.long 0x3fadbfa4,0x3fadbfa4
+.long 0x2c3a9de4,0x2c3a9de4
+.long 0x5078920d,0x5078920d
+.long 0x6a5fcc9b,0x6a5fcc9b
+.long 0x547e4662,0x547e4662
+.long 0xf68d13c2,0xf68d13c2
+.long 0x90d8b8e8,0x90d8b8e8
+.long 0x2e39f75e,0x2e39f75e
+.long 0x82c3aff5,0x82c3aff5
+.long 0x9f5d80be,0x9f5d80be
+.long 0x69d0937c,0x69d0937c
+.long 0x6fd52da9,0x6fd52da9
+.long 0xcf2512b3,0xcf2512b3
+.long 0xc8ac993b,0xc8ac993b
+.long 0x10187da7,0x10187da7
+.long 0xe89c636e,0xe89c636e
+.long 0xdb3bbb7b,0xdb3bbb7b
+.long 0xcd267809,0xcd267809
+.long 0x6e5918f4,0x6e5918f4
+.long 0xec9ab701,0xec9ab701
+.long 0x834f9aa8,0x834f9aa8
+.long 0xe6956e65,0xe6956e65
+.long 0xaaffe67e,0xaaffe67e
+.long 0x21bccf08,0x21bccf08
+.long 0xef15e8e6,0xef15e8e6
+.long 0xbae79bd9,0xbae79bd9
+.long 0x4a6f36ce,0x4a6f36ce
+.long 0xea9f09d4,0xea9f09d4
+.long 0x29b07cd6,0x29b07cd6
+.long 0x31a4b2af,0x31a4b2af
+.long 0x2a3f2331,0x2a3f2331
+.long 0xc6a59430,0xc6a59430
+.long 0x35a266c0,0x35a266c0
+.long 0x744ebc37,0x744ebc37
+.long 0xfc82caa6,0xfc82caa6
+.long 0xe090d0b0,0xe090d0b0
+.long 0x33a7d815,0x33a7d815
+.long 0xf104984a,0xf104984a
+.long 0x41ecdaf7,0x41ecdaf7
+.long 0x7fcd500e,0x7fcd500e
+.long 0x1791f62f,0x1791f62f
+.long 0x764dd68d,0x764dd68d
+.long 0x43efb04d,0x43efb04d
+.long 0xccaa4d54,0xccaa4d54
+.long 0xe49604df,0xe49604df
+.long 0x9ed1b5e3,0x9ed1b5e3
+.long 0x4c6a881b,0x4c6a881b
+.long 0xc12c1fb8,0xc12c1fb8
+.long 0x4665517f,0x4665517f
+.long 0x9d5eea04,0x9d5eea04
+.long 0x018c355d,0x018c355d
+.long 0xfa877473,0xfa877473
+.long 0xfb0b412e,0xfb0b412e
+.long 0xb3671d5a,0xb3671d5a
+.long 0x92dbd252,0x92dbd252
+.long 0xe9105633,0xe9105633
+.long 0x6dd64713,0x6dd64713
+.long 0x9ad7618c,0x9ad7618c
+.long 0x37a10c7a,0x37a10c7a
+.long 0x59f8148e,0x59f8148e
+.long 0xeb133c89,0xeb133c89
+.long 0xcea927ee,0xcea927ee
+.long 0xb761c935,0xb761c935
+.long 0xe11ce5ed,0xe11ce5ed
+.long 0x7a47b13c,0x7a47b13c
+.long 0x9cd2df59,0x9cd2df59
+.long 0x55f2733f,0x55f2733f
+.long 0x1814ce79,0x1814ce79
+.long 0x73c737bf,0x73c737bf
+.long 0x53f7cdea,0x53f7cdea
+.long 0x5ffdaa5b,0x5ffdaa5b
+.long 0xdf3d6f14,0xdf3d6f14
+.long 0x7844db86,0x7844db86
+.long 0xcaaff381,0xcaaff381
+.long 0xb968c43e,0xb968c43e
+.long 0x3824342c,0x3824342c
+.long 0xc2a3405f,0xc2a3405f
+.long 0x161dc372,0x161dc372
+.long 0xbce2250c,0xbce2250c
+.long 0x283c498b,0x283c498b
+.long 0xff0d9541,0xff0d9541
+.long 0x39a80171,0x39a80171
+.long 0x080cb3de,0x080cb3de
+.long 0xd8b4e49c,0xd8b4e49c
+.long 0x6456c190,0x6456c190
+.long 0x7bcb8461,0x7bcb8461
+.long 0xd532b670,0xd532b670
+.long 0x486c5c74,0x486c5c74
+.long 0xd0b85742,0xd0b85742
+.byte 0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38
+.byte 0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb
+.byte 0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87
+.byte 0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb
+.byte 0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d
+.byte 0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e
+.byte 0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2
+.byte 0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25
+.byte 0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16
+.byte 0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92
+.byte 0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda
+.byte 0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84
+.byte 0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a
+.byte 0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06
+.byte 0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02
+.byte 0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b
+.byte 0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea
+.byte 0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73
+.byte 0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85
+.byte 0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e
+.byte 0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89
+.byte 0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b
+.byte 0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20
+.byte 0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4
+.byte 0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31
+.byte 0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f
+.byte 0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d
+.byte 0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef
+.byte 0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0
+.byte 0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61
+.byte 0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26
+.byte 0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d
+
+
+.globl AES_encrypt
+.type AES_encrypt,@function
+.section ".opd","aw"
+.align 3
+AES_encrypt:
+.quad .AES_encrypt,.TOC.@tocbase,0
+.previous
+.align 7
+.AES_encrypt:
+ stdu 1,-256(1)
+ mflr 0
+
+ std 4,104(1)
+ std 14,112(1)
+ std 15,120(1)
+ std 16,128(1)
+ std 17,136(1)
+ std 18,144(1)
+ std 19,152(1)
+ std 20,160(1)
+ std 21,168(1)
+ std 22,176(1)
+ std 23,184(1)
+ std 24,192(1)
+ std 25,200(1)
+ std 26,208(1)
+ std 27,216(1)
+ std 28,224(1)
+ std 29,232(1)
+ std 30,240(1)
+ std 31,248(1)
+ std 0,272(1)
+
+ andi. 12,3,3
+ andi. 0,4,3
+ or. 12,12,0
+ bne .Lenc_unaligned
+
+.Lenc_unaligned_ok:
+ lwz 8,0(3)
+ lwz 9,4(3)
+ lwz 10,8(3)
+ lwz 11,12(3)
+ bl .LAES_Te
+ bl .Lppc_AES_encrypt_compact
+ ld 4,104(1)
+ stw 8,0(4)
+ stw 9,4(4)
+ stw 10,8(4)
+ stw 11,12(4)
+ b .Lenc_done
+
+.Lenc_unaligned:
+ subfic 12,3,4096
+ subfic 0,4,4096
+ andi. 12,12,4096-16
+ beq .Lenc_xpage
+ andi. 0,0,4096-16
+ bne .Lenc_unaligned_ok
+
+.Lenc_xpage:
+ lbz 16,0(3)
+ lbz 17,1(3)
+ lbz 18,2(3)
+ lbz 8,3(3)
+ lbz 20,4(3)
+ lbz 21,5(3)
+ lbz 22,6(3)
+ lbz 9,7(3)
+ lbz 24,8(3)
+ lbz 25,9(3)
+ lbz 26,10(3)
+ insrwi 8,16,8,0
+ lbz 10,11(3)
+ insrwi 9,20,8,0
+ lbz 28,12(3)
+ insrwi 8,17,8,8
+ lbz 29,13(3)
+ insrwi 9,21,8,8
+ lbz 30,14(3)
+ insrwi 8,18,8,16
+ lbz 11,15(3)
+ insrwi 9,22,8,16
+ insrwi 10,24,8,0
+ insrwi 11,28,8,0
+ insrwi 10,25,8,8
+ insrwi 11,29,8,8
+ insrwi 10,26,8,16
+ insrwi 11,30,8,16
+
+ bl .LAES_Te
+ bl .Lppc_AES_encrypt_compact
+ ld 4,104(1)
+
+ extrwi 16,8,8,0
+ extrwi 17,8,8,8
+ stb 16,0(4)
+ extrwi 18,8,8,16
+ stb 17,1(4)
+ stb 18,2(4)
+ extrwi 20,9,8,0
+ stb 8,3(4)
+ extrwi 21,9,8,8
+ stb 20,4(4)
+ extrwi 22,9,8,16
+ stb 21,5(4)
+ stb 22,6(4)
+ extrwi 24,10,8,0
+ stb 9,7(4)
+ extrwi 25,10,8,8
+ stb 24,8(4)
+ extrwi 26,10,8,16
+ stb 25,9(4)
+ stb 26,10(4)
+ extrwi 28,11,8,0
+ stb 10,11(4)
+ extrwi 29,11,8,8
+ stb 28,12(4)
+ extrwi 30,11,8,16
+ stb 29,13(4)
+ stb 30,14(4)
+ stb 11,15(4)
+
+.Lenc_done:
+ ld 0,272(1)
+ ld 14,112(1)
+ ld 15,120(1)
+ ld 16,128(1)
+ ld 17,136(1)
+ ld 18,144(1)
+ ld 19,152(1)
+ ld 20,160(1)
+ ld 21,168(1)
+ ld 22,176(1)
+ ld 23,184(1)
+ ld 24,192(1)
+ ld 25,200(1)
+ ld 26,208(1)
+ ld 27,216(1)
+ ld 28,224(1)
+ ld 29,232(1)
+ ld 30,240(1)
+ ld 31,248(1)
+ mtlr 0
+ addi 1,1,256
+ blr
+.long 0
+.byte 0,12,4,1,0x80,18,3,0
+.long 0
+
+.align 5
+.Lppc_AES_encrypt:
+ lwz 16,240(5)
+ addi 6,3,3
+ lwz 12,0(5)
+ addi 7,3,2
+ lwz 0,4(5)
+ addi 4,3,1
+ lwz 14,8(5)
+ addi 16,16,-1
+ lwz 15,12(5)
+ addi 5,5,16
+ xor 8,8,12
+ xor 9,9,0
+ xor 10,10,14
+ xor 11,11,15
+ mtctr 16
+.align 4
+.Lenc_loop:
+ rlwinm 16,8,11,21,28
+ rlwinm 17,9,11,21,28
+ rlwinm 18,10,11,21,28
+ rlwinm 19,11,11,21,28
+ lwz 12,0(5)
+ rlwinm 20,9,19,21,28
+ lwz 0,4(5)
+ rlwinm 21,10,19,21,28
+ lwz 14,8(5)
+ rlwinm 22,11,19,21,28
+ lwz 15,12(5)
+ rlwinm 23,8,19,21,28
+ lwzx 16,3,16
+ rlwinm 24,10,27,21,28
+ lwzx 17,3,17
+ rlwinm 25,11,27,21,28
+ lwzx 18,3,18
+ rlwinm 26,8,27,21,28
+ lwzx 19,3,19
+ rlwinm 27,9,27,21,28
+ lwzx 20,6,20
+ rlwinm 28,11,3,21,28
+ lwzx 21,6,21
+ rlwinm 29,8,3,21,28
+ lwzx 22,6,22
+ rlwinm 30,9,3,21,28
+ lwzx 23,6,23
+ rlwinm 31,10,3,21,28
+ lwzx 24,7,24
+ xor 12,12,16
+ lwzx 25,7,25
+ xor 0,0,17
+ lwzx 26,7,26
+ xor 14,14,18
+ lwzx 27,7,27
+ xor 15,15,19
+ lwzx 28,4,28
+ xor 12,12,20
+ lwzx 29,4,29
+ xor 0,0,21
+ lwzx 30,4,30
+ xor 14,14,22
+ lwzx 31,4,31
+ xor 15,15,23
+ xor 12,12,24
+ xor 0,0,25
+ xor 14,14,26
+ xor 15,15,27
+ xor 8,12,28
+ xor 9,0,29
+ xor 10,14,30
+ xor 11,15,31
+ addi 5,5,16
+ bdnz .Lenc_loop
+
+ addi 7,3,2048
+ nop
+ lwz 12,0(5)
+ rlwinm 16,8,8,24,31
+ lwz 0,4(5)
+ rlwinm 17,9,8,24,31
+ lwz 14,8(5)
+ rlwinm 18,10,8,24,31
+ lwz 15,12(5)
+ rlwinm 19,11,8,24,31
+ lwz 24,2048(3)
+ rlwinm 20,9,16,24,31
+ lwz 25,2080(3)
+ rlwinm 21,10,16,24,31
+ lwz 26,2112(3)
+ rlwinm 22,11,16,24,31
+ lwz 27,2144(3)
+ rlwinm 23,8,16,24,31
+ lwz 28,2176(3)
+ rlwinm 24,10,24,24,31
+ lwz 29,2208(3)
+ rlwinm 25,11,24,24,31
+ lwz 30,2240(3)
+ rlwinm 26,8,24,24,31
+ lwz 31,2272(3)
+ rlwinm 27,9,24,24,31
+ lbzx 16,7,16
+ rlwinm 28,11,0,24,31
+ lbzx 17,7,17
+ rlwinm 29,8,0,24,31
+ lbzx 18,7,18
+ rlwinm 30,9,0,24,31
+ lbzx 19,7,19
+ rlwinm 31,10,0,24,31
+ lbzx 20,7,20
+ rlwinm 8,16,24,0,7
+ lbzx 21,7,21
+ rlwinm 9,17,24,0,7
+ lbzx 22,7,22
+ rlwinm 10,18,24,0,7
+ lbzx 23,7,23
+ rlwinm 11,19,24,0,7
+ lbzx 24,7,24
+ rlwimi 8,20,16,8,15
+ lbzx 25,7,25
+ rlwimi 9,21,16,8,15
+ lbzx 26,7,26
+ rlwimi 10,22,16,8,15
+ lbzx 27,7,27
+ rlwimi 11,23,16,8,15
+ lbzx 28,7,28
+ rlwimi 8,24,8,16,23
+ lbzx 29,7,29
+ rlwimi 9,25,8,16,23
+ lbzx 30,7,30
+ rlwimi 10,26,8,16,23
+ lbzx 31,7,31
+ rlwimi 11,27,8,16,23
+ or 8,8,28
+ or 9,9,29
+ or 10,10,30
+ or 11,11,31
+ xor 8,8,12
+ xor 9,9,0
+ xor 10,10,14
+ xor 11,11,15
+ blr
+.long 0
+.byte 0,12,0x14,0,0,0,0,0
+
+.align 4
+.Lppc_AES_encrypt_compact:
+ lwz 16,240(5)
+ addi 6,3,2048
+ lwz 12,0(5)
+ lis 7,0x8080
+ lwz 0,4(5)
+ lis 4,0x1b1b
+ lwz 14,8(5)
+ ori 7,7,0x8080
+ lwz 15,12(5)
+ ori 4,4,0x1b1b
+ addi 5,5,16
+ mtctr 16
+.align 4
+.Lenc_compact_loop:
+ xor 8,8,12
+ xor 9,9,0
+ rlwinm 16,8,8,24,31
+ xor 10,10,14
+ rlwinm 17,9,8,24,31
+ xor 11,11,15
+ rlwinm 18,10,8,24,31
+ rlwinm 19,11,8,24,31
+ rlwinm 20,9,16,24,31
+ rlwinm 21,10,16,24,31
+ rlwinm 22,11,16,24,31
+ rlwinm 23,8,16,24,31
+ lbzx 16,6,16
+ rlwinm 24,10,24,24,31
+ lbzx 17,6,17
+ rlwinm 25,11,24,24,31
+ lbzx 18,6,18
+ rlwinm 26,8,24,24,31
+ lbzx 19,6,19
+ rlwinm 27,9,24,24,31
+ lbzx 20,6,20
+ rlwinm 28,11,0,24,31
+ lbzx 21,6,21
+ rlwinm 29,8,0,24,31
+ lbzx 22,6,22
+ rlwinm 30,9,0,24,31
+ lbzx 23,6,23
+ rlwinm 31,10,0,24,31
+ lbzx 24,6,24
+ rlwinm 8,16,24,0,7
+ lbzx 25,6,25
+ rlwinm 9,17,24,0,7
+ lbzx 26,6,26
+ rlwinm 10,18,24,0,7
+ lbzx 27,6,27
+ rlwinm 11,19,24,0,7
+ lbzx 28,6,28
+ rlwimi 8,20,16,8,15
+ lbzx 29,6,29
+ rlwimi 9,21,16,8,15
+ lbzx 30,6,30
+ rlwimi 10,22,16,8,15
+ lbzx 31,6,31
+ rlwimi 11,23,16,8,15
+ rlwimi 8,24,8,16,23
+ rlwimi 9,25,8,16,23
+ rlwimi 10,26,8,16,23
+ rlwimi 11,27,8,16,23
+ lwz 12,0(5)
+ or 8,8,28
+ lwz 0,4(5)
+ or 9,9,29
+ lwz 14,8(5)
+ or 10,10,30
+ lwz 15,12(5)
+ or 11,11,31
+
+ addi 5,5,16
+ bdz .Lenc_compact_done
+
+ and 16,8,7
+ and 17,9,7
+ and 18,10,7
+ and 19,11,7
+ srwi 20,16,7
+ andc 24,8,7
+ srwi 21,17,7
+ andc 25,9,7
+ srwi 22,18,7
+ andc 26,10,7
+ srwi 23,19,7
+ andc 27,11,7
+ sub 16,16,20
+ sub 17,17,21
+ sub 18,18,22
+ sub 19,19,23
+ add 24,24,24
+ add 25,25,25
+ add 26,26,26
+ add 27,27,27
+ and 16,16,4
+ and 17,17,4
+ and 18,18,4
+ and 19,19,4
+ xor 16,16,24
+ xor 17,17,25
+ rotlwi 28,8,16
+ xor 18,18,26
+ rotlwi 29,9,16
+ xor 19,19,27
+ rotlwi 30,10,16
+
+ xor 8,8,16
+ rotlwi 31,11,16
+ xor 9,9,17
+ rotrwi 8,8,24
+ xor 10,10,18
+ rotrwi 9,9,24
+ xor 11,11,19
+ rotrwi 10,10,24
+ xor 8,8,16
+ rotrwi 11,11,24
+ xor 9,9,17
+ xor 10,10,18
+ xor 11,11,19
+ rotlwi 24,28,8
+ xor 8,8,28
+ rotlwi 25,29,8
+ xor 9,9,29
+ rotlwi 26,30,8
+ xor 10,10,30
+ rotlwi 27,31,8
+ xor 11,11,31
+ xor 8,8,24
+ xor 9,9,25
+ xor 10,10,26
+ xor 11,11,27
+
+ b .Lenc_compact_loop
+.align 4
+.Lenc_compact_done:
+ xor 8,8,12
+ xor 9,9,0
+ xor 10,10,14
+ xor 11,11,15
+ blr
+.long 0
+.byte 0,12,0x14,0,0,0,0,0
+.size .AES_encrypt,.-.AES_encrypt
+.size AES_encrypt,.-.AES_encrypt
+
+.globl AES_decrypt
+.type AES_decrypt,@function
+.section ".opd","aw"
+.align 3
+AES_decrypt:
+.quad .AES_decrypt,.TOC.@tocbase,0
+.previous
+.align 7
+.AES_decrypt:
+ stdu 1,-256(1)
+ mflr 0
+
+ std 4,104(1)
+ std 14,112(1)
+ std 15,120(1)
+ std 16,128(1)
+ std 17,136(1)
+ std 18,144(1)
+ std 19,152(1)
+ std 20,160(1)
+ std 21,168(1)
+ std 22,176(1)
+ std 23,184(1)
+ std 24,192(1)
+ std 25,200(1)
+ std 26,208(1)
+ std 27,216(1)
+ std 28,224(1)
+ std 29,232(1)
+ std 30,240(1)
+ std 31,248(1)
+ std 0,272(1)
+
+ andi. 12,3,3
+ andi. 0,4,3
+ or. 12,12,0
+ bne .Ldec_unaligned
+
+.Ldec_unaligned_ok:
+ lwz 8,0(3)
+ lwz 9,4(3)
+ lwz 10,8(3)
+ lwz 11,12(3)
+ bl .LAES_Td
+ bl .Lppc_AES_decrypt_compact
+ ld 4,104(1)
+ stw 8,0(4)
+ stw 9,4(4)
+ stw 10,8(4)
+ stw 11,12(4)
+ b .Ldec_done
+
+.Ldec_unaligned:
+ subfic 12,3,4096
+ subfic 0,4,4096
+ andi. 12,12,4096-16
+ beq .Ldec_xpage
+ andi. 0,0,4096-16
+ bne .Ldec_unaligned_ok
+
+.Ldec_xpage:
+ lbz 16,0(3)
+ lbz 17,1(3)
+ lbz 18,2(3)
+ lbz 8,3(3)
+ lbz 20,4(3)
+ lbz 21,5(3)
+ lbz 22,6(3)
+ lbz 9,7(3)
+ lbz 24,8(3)
+ lbz 25,9(3)
+ lbz 26,10(3)
+ insrwi 8,16,8,0
+ lbz 10,11(3)
+ insrwi 9,20,8,0
+ lbz 28,12(3)
+ insrwi 8,17,8,8
+ lbz 29,13(3)
+ insrwi 9,21,8,8
+ lbz 30,14(3)
+ insrwi 8,18,8,16
+ lbz 11,15(3)
+ insrwi 9,22,8,16
+ insrwi 10,24,8,0
+ insrwi 11,28,8,0
+ insrwi 10,25,8,8
+ insrwi 11,29,8,8
+ insrwi 10,26,8,16
+ insrwi 11,30,8,16
+
+ bl .LAES_Td
+ bl .Lppc_AES_decrypt_compact
+ ld 4,104(1)
+
+ extrwi 16,8,8,0
+ extrwi 17,8,8,8
+ stb 16,0(4)
+ extrwi 18,8,8,16
+ stb 17,1(4)
+ stb 18,2(4)
+ extrwi 20,9,8,0
+ stb 8,3(4)
+ extrwi 21,9,8,8
+ stb 20,4(4)
+ extrwi 22,9,8,16
+ stb 21,5(4)
+ stb 22,6(4)
+ extrwi 24,10,8,0
+ stb 9,7(4)
+ extrwi 25,10,8,8
+ stb 24,8(4)
+ extrwi 26,10,8,16
+ stb 25,9(4)
+ stb 26,10(4)
+ extrwi 28,11,8,0
+ stb 10,11(4)
+ extrwi 29,11,8,8
+ stb 28,12(4)
+ extrwi 30,11,8,16
+ stb 29,13(4)
+ stb 30,14(4)
+ stb 11,15(4)
+
+.Ldec_done:
+ ld 0,272(1)
+ ld 14,112(1)
+ ld 15,120(1)
+ ld 16,128(1)
+ ld 17,136(1)
+ ld 18,144(1)
+ ld 19,152(1)
+ ld 20,160(1)
+ ld 21,168(1)
+ ld 22,176(1)
+ ld 23,184(1)
+ ld 24,192(1)
+ ld 25,200(1)
+ ld 26,208(1)
+ ld 27,216(1)
+ ld 28,224(1)
+ ld 29,232(1)
+ ld 30,240(1)
+ ld 31,248(1)
+ mtlr 0
+ addi 1,1,256
+ blr
+.long 0
+.byte 0,12,4,1,0x80,18,3,0
+.long 0
+
+.align 5
+.Lppc_AES_decrypt:
+ lwz 16,240(5)
+ addi 6,3,3
+ lwz 12,0(5)
+ addi 7,3,2
+ lwz 0,4(5)
+ addi 4,3,1
+ lwz 14,8(5)
+ addi 16,16,-1
+ lwz 15,12(5)
+ addi 5,5,16
+ xor 8,8,12
+ xor 9,9,0
+ xor 10,10,14
+ xor 11,11,15
+ mtctr 16
+.align 4
+.Ldec_loop:
+ rlwinm 16,8,11,21,28
+ rlwinm 17,9,11,21,28
+ rlwinm 18,10,11,21,28
+ rlwinm 19,11,11,21,28
+ lwz 12,0(5)
+ rlwinm 20,11,19,21,28
+ lwz 0,4(5)
+ rlwinm 21,8,19,21,28
+ lwz 14,8(5)
+ rlwinm 22,9,19,21,28
+ lwz 15,12(5)
+ rlwinm 23,10,19,21,28
+ lwzx 16,3,16
+ rlwinm 24,10,27,21,28
+ lwzx 17,3,17
+ rlwinm 25,11,27,21,28
+ lwzx 18,3,18
+ rlwinm 26,8,27,21,28
+ lwzx 19,3,19
+ rlwinm 27,9,27,21,28
+ lwzx 20,6,20
+ rlwinm 28,9,3,21,28
+ lwzx 21,6,21
+ rlwinm 29,10,3,21,28
+ lwzx 22,6,22
+ rlwinm 30,11,3,21,28
+ lwzx 23,6,23
+ rlwinm 31,8,3,21,28
+ lwzx 24,7,24
+ xor 12,12,16
+ lwzx 25,7,25
+ xor 0,0,17
+ lwzx 26,7,26
+ xor 14,14,18
+ lwzx 27,7,27
+ xor 15,15,19
+ lwzx 28,4,28
+ xor 12,12,20
+ lwzx 29,4,29
+ xor 0,0,21
+ lwzx 30,4,30
+ xor 14,14,22
+ lwzx 31,4,31
+ xor 15,15,23
+ xor 12,12,24
+ xor 0,0,25
+ xor 14,14,26
+ xor 15,15,27
+ xor 8,12,28
+ xor 9,0,29
+ xor 10,14,30
+ xor 11,15,31
+ addi 5,5,16
+ bdnz .Ldec_loop
+
+ addi 7,3,2048
+ nop
+ lwz 12,0(5)
+ rlwinm 16,8,8,24,31
+ lwz 0,4(5)
+ rlwinm 17,9,8,24,31
+ lwz 14,8(5)
+ rlwinm 18,10,8,24,31
+ lwz 15,12(5)
+ rlwinm 19,11,8,24,31
+ lwz 24,2048(3)
+ rlwinm 20,11,16,24,31
+ lwz 25,2080(3)
+ rlwinm 21,8,16,24,31
+ lwz 26,2112(3)
+ lbzx 16,7,16
+ lwz 27,2144(3)
+ lbzx 17,7,17
+ lwz 28,2176(3)
+ rlwinm 22,9,16,24,31
+ lwz 29,2208(3)
+ rlwinm 23,10,16,24,31
+ lwz 30,2240(3)
+ rlwinm 24,10,24,24,31
+ lwz 31,2272(3)
+ rlwinm 25,11,24,24,31
+ lbzx 18,7,18
+ rlwinm 26,8,24,24,31
+ lbzx 19,7,19
+ rlwinm 27,9,24,24,31
+ lbzx 20,7,20
+ rlwinm 28,9,0,24,31
+ lbzx 21,7,21
+ rlwinm 29,10,0,24,31
+ lbzx 22,7,22
+ rlwinm 30,11,0,24,31
+ lbzx 23,7,23
+ rlwinm 31,8,0,24,31
+ lbzx 24,7,24
+ rlwinm 8,16,24,0,7
+ lbzx 25,7,25
+ rlwinm 9,17,24,0,7
+ lbzx 26,7,26
+ rlwinm 10,18,24,0,7
+ lbzx 27,7,27
+ rlwinm 11,19,24,0,7
+ lbzx 28,7,28
+ rlwimi 8,20,16,8,15
+ lbzx 29,7,29
+ rlwimi 9,21,16,8,15
+ lbzx 30,7,30
+ rlwimi 10,22,16,8,15
+ lbzx 31,7,31
+ rlwimi 11,23,16,8,15
+ rlwimi 8,24,8,16,23
+ rlwimi 9,25,8,16,23
+ rlwimi 10,26,8,16,23
+ rlwimi 11,27,8,16,23
+ or 8,8,28
+ or 9,9,29
+ or 10,10,30
+ or 11,11,31
+ xor 8,8,12
+ xor 9,9,0
+ xor 10,10,14
+ xor 11,11,15
+ blr
+.long 0
+.byte 0,12,0x14,0,0,0,0,0
+
+.align 4
+.Lppc_AES_decrypt_compact:
+ lwz 16,240(5)
+ addi 6,3,2048
+ lwz 12,0(5)
+ lis 7,0x8080
+ lwz 0,4(5)
+ lis 4,0x1b1b
+ lwz 14,8(5)
+ ori 7,7,0x8080
+ lwz 15,12(5)
+ ori 4,4,0x1b1b
+ addi 5,5,16
+ insrdi 7,7,32,0
+ insrdi 4,4,32,0
+ mtctr 16
+.align 4
+.Ldec_compact_loop:
+ xor 8,8,12
+ xor 9,9,0
+ rlwinm 16,8,8,24,31
+ xor 10,10,14
+ rlwinm 17,9,8,24,31
+ xor 11,11,15
+ rlwinm 18,10,8,24,31
+ rlwinm 19,11,8,24,31
+ rlwinm 20,11,16,24,31
+ rlwinm 21,8,16,24,31
+ rlwinm 22,9,16,24,31
+ rlwinm 23,10,16,24,31
+ lbzx 16,6,16
+ rlwinm 24,10,24,24,31
+ lbzx 17,6,17
+ rlwinm 25,11,24,24,31
+ lbzx 18,6,18
+ rlwinm 26,8,24,24,31
+ lbzx 19,6,19
+ rlwinm 27,9,24,24,31
+ lbzx 20,6,20
+ rlwinm 28,9,0,24,31
+ lbzx 21,6,21
+ rlwinm 29,10,0,24,31
+ lbzx 22,6,22
+ rlwinm 30,11,0,24,31
+ lbzx 23,6,23
+ rlwinm 31,8,0,24,31
+ lbzx 24,6,24
+ rlwinm 8,16,24,0,7
+ lbzx 25,6,25
+ rlwinm 9,17,24,0,7
+ lbzx 26,6,26
+ rlwinm 10,18,24,0,7
+ lbzx 27,6,27
+ rlwinm 11,19,24,0,7
+ lbzx 28,6,28
+ rlwimi 8,20,16,8,15
+ lbzx 29,6,29
+ rlwimi 9,21,16,8,15
+ lbzx 30,6,30
+ rlwimi 10,22,16,8,15
+ lbzx 31,6,31
+ rlwimi 11,23,16,8,15
+ rlwimi 8,24,8,16,23
+ rlwimi 9,25,8,16,23
+ rlwimi 10,26,8,16,23
+ rlwimi 11,27,8,16,23
+ lwz 12,0(5)
+ or 8,8,28
+ lwz 0,4(5)
+ or 9,9,29
+ lwz 14,8(5)
+ or 10,10,30
+ lwz 15,12(5)
+ or 11,11,31
+
+ addi 5,5,16
+ bdz .Ldec_compact_done
+
+ insrdi 8,9,32,0
+ insrdi 10,11,32,0
+
+ and 16,8,7
+ and 18,10,7
+ srdi 20,16,7
+ srdi 22,18,7
+ andc 24,8,7
+ andc 26,10,7
+ sub 16,16,20
+ sub 18,18,22
+ add 24,24,24
+ add 26,26,26
+ and 16,16,4
+ and 18,18,4
+ xor 16,16,24
+ xor 18,18,26
+
+ and 20,16,7
+ and 22,18,7
+ srdi 24,20,7
+ srdi 26,22,7
+ andc 28,16,7
+ andc 30,18,7
+ sub 20,20,24
+ sub 22,22,26
+ add 28,28,28
+ add 30,30,30
+ and 20,20,4
+ and 22,22,4
+ xor 20,20,28
+ xor 22,22,30
+
+ and 24,20,7
+ and 26,22,7
+ srdi 28,24,7
+ srdi 30,26,7
+ sub 24,24,28
+ sub 26,26,30
+ andc 28,20,7
+ andc 30,22,7
+ add 28,28,28
+ add 30,30,30
+ and 24,24,4
+ and 26,26,4
+ xor 24,24,28
+ xor 26,26,30
+
+ xor 16,16,8
+ xor 18,18,10
+ xor 20,20,8
+ xor 22,22,10
+
+ rldicl 17,16,32,32
+ rldicl 19,18,32,32
+ rldicl 21,20,32,32
+ rldicl 23,22,32,32
+ rldicl 25,24,32,32
+ rldicl 27,26,32,32
+ rotrwi 8,8,8
+ rotrwi 9,9,8
+ xor 8,8,16
+ rotrwi 10,10,8
+ xor 9,9,17
+ rotrwi 11,11,8
+ xor 10,10,18
+ xor 11,11,19
+ xor 16,16,24
+ xor 17,17,25
+ xor 18,18,26
+ xor 19,19,27
+ xor 8,8,20
+ rotrwi 16,16,24
+ xor 9,9,21
+ rotrwi 17,17,24
+ xor 10,10,22
+ rotrwi 18,18,24
+ xor 11,11,23
+ rotrwi 19,19,24
+ xor 20,20,24
+ xor 21,21,25
+ xor 22,22,26
+ xor 23,23,27
+ xor 8,8,24
+ rotrwi 20,20,16
+ xor 9,9,25
+ rotrwi 21,21,16
+ xor 10,10,26
+ rotrwi 22,22,16
+ xor 11,11,27
+ rotrwi 23,23,16
+ xor 8,8,16
+ rotrwi 24,24,8
+ xor 9,9,17
+ rotrwi 25,25,8
+ xor 10,10,18
+ rotrwi 26,26,8
+ xor 11,11,19
+ rotrwi 27,27,8
+ xor 8,8,20
+ xor 9,9,21
+ xor 10,10,22
+ xor 11,11,23
+ xor 8,8,24
+ xor 9,9,25
+ xor 10,10,26
+ xor 11,11,27
+
+ b .Ldec_compact_loop
+.align 4
+.Ldec_compact_done:
+ xor 8,8,12
+ xor 9,9,0
+ xor 10,10,14
+ xor 11,11,15
+ blr
+.long 0
+.byte 0,12,0x14,0,0,0,0,0
+.size .AES_decrypt,.-.AES_decrypt
+.size AES_decrypt,.-.AES_decrypt
+
+.byte 65,69,83,32,102,111,114,32,80,80,67,44,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.align 2
+.align 7
diff --git a/deps/openssl/config/archs/linux-ppc64/asm_avx2/crypto/aes/aesp8-ppc.s b/deps/openssl/config/archs/linux-ppc64/asm_avx2/crypto/aes/aesp8-ppc.s
new file mode 100644
index 0000000000..b850d2226d
--- /dev/null
+++ b/deps/openssl/config/archs/linux-ppc64/asm_avx2/crypto/aes/aesp8-ppc.s
@@ -0,0 +1,3689 @@
+.machine "any"
+
+.text
+
+.align 7
+rcon:
+.byte 0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00
+.byte 0x1b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00
+.byte 0x0d,0x0e,0x0f,0x0c,0x0d,0x0e,0x0f,0x0c,0x0d,0x0e,0x0f,0x0c,0x0d,0x0e,0x0f,0x0c
+.byte 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00
+.Lconsts:
+ mflr 0
+ bcl 20,31,$+4
+ mflr 6
+ addi 6,6,-0x48
+ mtlr 0
+ blr
+.long 0
+.byte 0,12,0x14,0,0,0,0,0
+.byte 65,69,83,32,102,111,114,32,80,111,119,101,114,73,83,65,32,50,46,48,55,44,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
+.align 2
+
+.globl aes_p8_set_encrypt_key
+.type aes_p8_set_encrypt_key,@function
+.section ".opd","aw"
+.align 3
+aes_p8_set_encrypt_key:
+.quad .aes_p8_set_encrypt_key,.TOC.@tocbase,0
+.previous
+.align 5
+.aes_p8_set_encrypt_key:
+.Lset_encrypt_key:
+ mflr 11
+ std 11,16(1)
+
+ li 6,-1
+ cmpldi 3,0
+ beq- .Lenc_key_abort
+ cmpldi 5,0
+ beq- .Lenc_key_abort
+ li 6,-2
+ cmpwi 4,128
+ blt- .Lenc_key_abort
+ cmpwi 4,256
+ bgt- .Lenc_key_abort
+ andi. 0,4,0x3f
+ bne- .Lenc_key_abort
+
+ lis 0,0xfff0
+ mfspr 12,256
+ mtspr 256,0
+
+ bl .Lconsts
+ mtlr 11
+
+ neg 9,3
+ lvx 1,0,3
+ addi 3,3,15
+ lvsr 3,0,9
+ li 8,0x20
+ cmpwi 4,192
+ lvx 2,0,3
+
+ lvx 4,0,6
+
+ lvx 5,8,6
+ addi 6,6,0x10
+ vperm 1,1,2,3
+ li 7,8
+ vxor 0,0,0
+ mtctr 7
+
+ lvsr 8,0,5
+ vspltisb 9,-1
+ lvx 10,0,5
+ vperm 9,0,9,8
+
+ blt .Loop128
+ addi 3,3,8
+ beq .L192
+ addi 3,3,8
+ b .L256
+
+.align 4
+.Loop128:
+ vperm 3,1,1,5
+ vsldoi 6,0,1,12
+ vperm 11,1,1,8
+ vsel 7,10,11,9
+ vor 10,11,11
+ .long 0x10632509
+ stvx 7,0,5
+ addi 5,5,16
+
+ vxor 1,1,6
+ vsldoi 6,0,6,12
+ vxor 1,1,6
+ vsldoi 6,0,6,12
+ vxor 1,1,6
+ vadduwm 4,4,4
+ vxor 1,1,3
+ bdnz .Loop128
+
+ lvx 4,0,6
+
+ vperm 3,1,1,5
+ vsldoi 6,0,1,12
+ vperm 11,1,1,8
+ vsel 7,10,11,9
+ vor 10,11,11
+ .long 0x10632509
+ stvx 7,0,5
+ addi 5,5,16
+
+ vxor 1,1,6
+ vsldoi 6,0,6,12
+ vxor 1,1,6
+ vsldoi 6,0,6,12
+ vxor 1,1,6
+ vadduwm 4,4,4
+ vxor 1,1,3
+
+ vperm 3,1,1,5
+ vsldoi 6,0,1,12
+ vperm 11,1,1,8
+ vsel 7,10,11,9
+ vor 10,11,11
+ .long 0x10632509
+ stvx 7,0,5
+ addi 5,5,16
+
+ vxor 1,1,6
+ vsldoi 6,0,6,12
+ vxor 1,1,6
+ vsldoi 6,0,6,12
+ vxor 1,1,6
+ vxor 1,1,3
+ vperm 11,1,1,8
+ vsel 7,10,11,9
+ vor 10,11,11
+ stvx 7,0,5
+
+ addi 3,5,15
+ addi 5,5,0x50
+
+ li 8,10
+ b .Ldone
+
+.align 4
+.L192:
+ lvx 6,0,3
+ li 7,4
+ vperm 11,1,1,8
+ vsel 7,10,11,9
+ vor 10,11,11
+ stvx 7,0,5
+ addi 5,5,16
+ vperm 2,2,6,3
+ vspltisb 3,8
+ mtctr 7
+ vsububm 5,5,3
+
+.Loop192:
+ vperm 3,2,2,5
+ vsldoi 6,0,1,12
+ .long 0x10632509
+
+ vxor 1,1,6
+ vsldoi 6,0,6,12
+ vxor 1,1,6
+ vsldoi 6,0,6,12
+ vxor 1,1,6
+
+ vsldoi 7,0,2,8
+ vspltw 6,1,3
+ vxor 6,6,2
+ vsldoi 2,0,2,12
+ vadduwm 4,4,4
+ vxor 2,2,6
+ vxor 1,1,3
+ vxor 2,2,3
+ vsldoi 7,7,1,8
+
+ vperm 3,2,2,5
+ vsldoi 6,0,1,12
+ vperm 11,7,7,8
+ vsel 7,10,11,9
+ vor 10,11,11
+ .long 0x10632509
+ stvx 7,0,5
+ addi 5,5,16
+
+ vsldoi 7,1,2,8
+ vxor 1,1,6
+ vsldoi 6,0,6,12
+ vperm 11,7,7,8
+ vsel 7,10,11,9
+ vor 10,11,11
+ vxor 1,1,6
+ vsldoi 6,0,6,12
+ vxor 1,1,6
+ stvx 7,0,5
+ addi 5,5,16
+
+ vspltw 6,1,3
+ vxor 6,6,2
+ vsldoi 2,0,2,12
+ vadduwm 4,4,4
+ vxor 2,2,6
+ vxor 1,1,3
+ vxor 2,2,3
+ vperm 11,1,1,8
+ vsel 7,10,11,9
+ vor 10,11,11
+ stvx 7,0,5
+ addi 3,5,15
+ addi 5,5,16
+ bdnz .Loop192
+
+ li 8,12
+ addi 5,5,0x20
+ b .Ldone
+
+.align 4
+.L256:
+ lvx 6,0,3
+ li 7,7
+ li 8,14
+ vperm 11,1,1,8
+ vsel 7,10,11,9
+ vor 10,11,11
+ stvx 7,0,5
+ addi 5,5,16
+ vperm 2,2,6,3
+ mtctr 7
+
+.Loop256:
+ vperm 3,2,2,5
+ vsldoi 6,0,1,12
+ vperm 11,2,2,8
+ vsel 7,10,11,9
+ vor 10,11,11
+ .long 0x10632509
+ stvx 7,0,5
+ addi 5,5,16
+
+ vxor 1,1,6
+ vsldoi 6,0,6,12
+ vxor 1,1,6
+ vsldoi 6,0,6,12
+ vxor 1,1,6
+ vadduwm 4,4,4
+ vxor 1,1,3
+ vperm 11,1,1,8
+ vsel 7,10,11,9
+ vor 10,11,11
+ stvx 7,0,5
+ addi 3,5,15
+ addi 5,5,16
+ bdz .Ldone
+
+ vspltw 3,1,3
+ vsldoi 6,0,2,12
+ .long 0x106305C8
+
+ vxor 2,2,6
+ vsldoi 6,0,6,12
+ vxor 2,2,6
+ vsldoi 6,0,6,12
+ vxor 2,2,6
+
+ vxor 2,2,3
+ b .Loop256
+
+.align 4
+.Ldone:
+ lvx 2,0,3
+ vsel 2,10,2,9
+ stvx 2,0,3
+ li 6,0
+ mtspr 256,12
+ stw 8,0(5)
+
+.Lenc_key_abort:
+ mr 3,6
+ blr
+.long 0
+.byte 0,12,0x14,1,0,0,3,0
+.long 0
+.size .aes_p8_set_encrypt_key,.-.aes_p8_set_encrypt_key
+.size aes_p8_set_encrypt_key,.-.aes_p8_set_encrypt_key
+
+.globl aes_p8_set_decrypt_key
+.type aes_p8_set_decrypt_key,@function
+.section ".opd","aw"
+.align 3
+aes_p8_set_decrypt_key:
+.quad .aes_p8_set_decrypt_key,.TOC.@tocbase,0
+.previous
+.align 5
+.aes_p8_set_decrypt_key:
+ stdu 1,-64(1)
+ mflr 10
+ std 10,64+16(1)
+ bl .Lset_encrypt_key
+ mtlr 10
+
+ cmpwi 3,0
+ bne- .Ldec_key_abort
+
+ slwi 7,8,4
+ subi 3,5,240
+ srwi 8,8,1
+ add 5,3,7
+ mtctr 8
+
+.Ldeckey:
+ lwz 0, 0(3)
+ lwz 6, 4(3)
+ lwz 7, 8(3)
+ lwz 8, 12(3)
+ addi 3,3,16
+ lwz 9, 0(5)
+ lwz 10,4(5)
+ lwz 11,8(5)
+ lwz 12,12(5)
+ stw 0, 0(5)
+ stw 6, 4(5)
+ stw 7, 8(5)
+ stw 8, 12(5)
+ subi 5,5,16
+ stw 9, -16(3)
+ stw 10,-12(3)
+ stw 11,-8(3)
+ stw 12,-4(3)
+ bdnz .Ldeckey
+
+ xor 3,3,3
+.Ldec_key_abort:
+ addi 1,1,64
+ blr
+.long 0
+.byte 0,12,4,1,0x80,0,3,0
+.long 0
+.size .aes_p8_set_decrypt_key,.-.aes_p8_set_decrypt_key
+.size aes_p8_set_decrypt_key,.-.aes_p8_set_decrypt_key
+.globl aes_p8_encrypt
+.type aes_p8_encrypt,@function
+.section ".opd","aw"
+.align 3
+aes_p8_encrypt:
+.quad .aes_p8_encrypt,.TOC.@tocbase,0
+.previous
+.align 5
+.aes_p8_encrypt:
+ lwz 6,240(5)
+ lis 0,0xfc00
+ mfspr 12,256
+ li 7,15
+ mtspr 256,0
+
+ lvx 0,0,3
+ neg 11,4
+ lvx 1,7,3
+ lvsl 2,0,3
+
+ lvsl 3,0,11
+
+ li 7,16
+ vperm 0,0,1,2
+ lvx 1,0,5
+ lvsl 5,0,5
+ srwi 6,6,1
+ lvx 2,7,5
+ addi 7,7,16
+ subi 6,6,1
+ vperm 1,1,2,5
+
+ vxor 0,0,1
+ lvx 1,7,5
+ addi 7,7,16
+ mtctr 6
+
+.Loop_enc:
+ vperm 2,2,1,5
+ .long 0x10001508
+ lvx 2,7,5
+ addi 7,7,16
+ vperm 1,1,2,5
+ .long 0x10000D08
+ lvx 1,7,5
+ addi 7,7,16
+ bdnz .Loop_enc
+
+ vperm 2,2,1,5
+ .long 0x10001508
+ lvx 2,7,5
+ vperm 1,1,2,5
+ .long 0x10000D09
+
+ vspltisb 2,-1
+ vxor 1,1,1
+ li 7,15
+ vperm 2,1,2,3
+
+ lvx 1,0,4
+ vperm 0,0,0,3
+ vsel 1,1,0,2
+ lvx 4,7,4
+ stvx 1,0,4
+ vsel 0,0,4,2
+ stvx 0,7,4
+
+ mtspr 256,12
+ blr
+.long 0
+.byte 0,12,0x14,0,0,0,3,0
+.long 0
+.size .aes_p8_encrypt,.-.aes_p8_encrypt
+.size aes_p8_encrypt,.-.aes_p8_encrypt
+.globl aes_p8_decrypt
+.type aes_p8_decrypt,@function
+.section ".opd","aw"
+.align 3
+aes_p8_decrypt:
+.quad .aes_p8_decrypt,.TOC.@tocbase,0
+.previous
+.align 5
+.aes_p8_decrypt:
+ lwz 6,240(5)
+ lis 0,0xfc00
+ mfspr 12,256
+ li 7,15
+ mtspr 256,0
+
+ lvx 0,0,3
+ neg 11,4
+ lvx 1,7,3
+ lvsl 2,0,3
+
+ lvsl 3,0,11
+
+ li 7,16
+ vperm 0,0,1,2
+ lvx 1,0,5
+ lvsl 5,0,5
+ srwi 6,6,1
+ lvx 2,7,5
+ addi 7,7,16
+ subi 6,6,1
+ vperm 1,1,2,5
+
+ vxor 0,0,1
+ lvx 1,7,5
+ addi 7,7,16
+ mtctr 6
+
+.Loop_dec:
+ vperm 2,2,1,5
+ .long 0x10001548
+ lvx 2,7,5
+ addi 7,7,16
+ vperm 1,1,2,5
+ .long 0x10000D48
+ lvx 1,7,5
+ addi 7,7,16
+ bdnz .Loop_dec
+
+ vperm 2,2,1,5
+ .long 0x10001548
+ lvx 2,7,5
+ vperm 1,1,2,5
+ .long 0x10000D49
+
+ vspltisb 2,-1
+ vxor 1,1,1
+ li 7,15
+ vperm 2,1,2,3
+
+ lvx 1,0,4
+ vperm 0,0,0,3
+ vsel 1,1,0,2
+ lvx 4,7,4
+ stvx 1,0,4
+ vsel 0,0,4,2
+ stvx 0,7,4
+
+ mtspr 256,12
+ blr
+.long 0
+.byte 0,12,0x14,0,0,0,3,0
+.long 0
+.size .aes_p8_decrypt,.-.aes_p8_decrypt
+.size aes_p8_decrypt,.-.aes_p8_decrypt
+.globl aes_p8_cbc_encrypt
+.type aes_p8_cbc_encrypt,@function
+.section ".opd","aw"
+.align 3
+aes_p8_cbc_encrypt:
+.quad .aes_p8_cbc_encrypt,.TOC.@tocbase,0
+.previous
+.align 5
+.aes_p8_cbc_encrypt:
+ cmpldi 5,16
+ .long 0x4dc00020
+
+ cmpwi 8,0
+ lis 0,0xffe0
+ mfspr 12,256
+ mtspr 256,0
+
+ li 10,15
+ vxor 0,0,0
+
+
+ lvx 4,0,7
+ lvsl 6,0,7
+ lvx 5,10,7
+
+ vperm 4,4,5,6
+
+ neg 11,3
+ lvsl 10,0,6
+ lwz 9,240(6)
+
+ lvsr 6,0,11
+ lvx 5,0,3
+ addi 3,3,15
+
+
+ lvsr 8,0,4
+ vspltisb 9,-1
+ lvx 7,0,4
+ vperm 9,0,9,8
+
+
+ srwi 9,9,1
+ li 10,16
+ subi 9,9,1
+ beq .Lcbc_dec
+
+.Lcbc_enc:
+ vor 2,5,5
+ lvx 5,0,3
+ addi 3,3,16
+ mtctr 9
+ subi 5,5,16
+
+ lvx 0,0,6
+ vperm 2,2,5,6
+ lvx 1,10,6
+ addi 10,10,16
+ vperm 0,0,1,10
+ vxor 2,2,0
+ lvx 0,10,6
+ addi 10,10,16
+ vxor 2,2,4
+
+.Loop_cbc_enc:
+ vperm 1,1,0,10
+ .long 0x10420D08
+ lvx 1,10,6
+ addi 10,10,16
+ vperm 0,0,1,10
+ .long 0x10420508
+ lvx 0,10,6
+ addi 10,10,16
+ bdnz .Loop_cbc_enc
+
+ vperm 1,1,0,10
+ .long 0x10420D08
+ lvx 1,10,6
+ li 10,16
+ vperm 0,0,1,10
+ .long 0x10820509
+ cmpldi 5,16
+
+ vperm 3,4,4,8
+ vsel 2,7,3,9
+ vor 7,3,3
+ stvx 2,0,4
+ addi 4,4,16
+ bge .Lcbc_enc
+
+ b .Lcbc_done
+
+.align 4
+.Lcbc_dec:
+ cmpldi 5,128
+ bge _aesp8_cbc_decrypt8x
+ vor 3,5,5
+ lvx 5,0,3
+ addi 3,3,16
+ mtctr 9
+ subi 5,5,16
+
+ lvx 0,0,6
+ vperm 3,3,5,6
+ lvx 1,10,6
+ addi 10,10,16
+ vperm 0,0,1,10
+ vxor 2,3,0
+ lvx 0,10,6
+ addi 10,10,16
+
+.Loop_cbc_dec:
+ vperm 1,1,0,10
+ .long 0x10420D48
+ lvx 1,10,6
+ addi 10,10,16
+ vperm 0,0,1,10
+ .long 0x10420548
+ lvx 0,10,6
+ addi 10,10,16
+ bdnz .Loop_cbc_dec
+
+ vperm 1,1,0,10
+ .long 0x10420D48
+ lvx 1,10,6
+ li 10,16
+ vperm 0,0,1,10
+ .long 0x10420549
+ cmpldi 5,16
+
+ vxor 2,2,4
+ vor 4,3,3
+ vperm 3,2,2,8
+ vsel 2,7,3,9
+ vor 7,3,3
+ stvx 2,0,4
+ addi 4,4,16
+ bge .Lcbc_dec
+
+.Lcbc_done:
+ addi 4,4,-1
+ lvx 2,0,4
+ vsel 2,7,2,9
+ stvx 2,0,4
+
+ neg 8,7
+ li 10,15
+ vxor 0,0,0
+ vspltisb 9,-1
+
+ lvsl 8,0,8
+ vperm 9,0,9,8
+
+ lvx 7,0,7
+ vperm 4,4,4,8
+ vsel 2,7,4,9
+ lvx 5,10,7
+ stvx 2,0,7
+ vsel 2,4,5,9
+ stvx 2,10,7
+
+ mtspr 256,12
+ blr
+.long 0
+.byte 0,12,0x14,0,0,0,6,0
+.long 0
+.align 5
+_aesp8_cbc_decrypt8x:
+ stdu 1,-448(1)
+ li 10,207
+ li 11,223
+ stvx 20,10,1
+ addi 10,10,32
+ stvx 21,11,1
+ addi 11,11,32
+ stvx 22,10,1
+ addi 10,10,32
+ stvx 23,11,1
+ addi 11,11,32
+ stvx 24,10,1
+ addi 10,10,32
+ stvx 25,11,1
+ addi 11,11,32
+ stvx 26,10,1
+ addi 10,10,32
+ stvx 27,11,1
+ addi 11,11,32
+ stvx 28,10,1
+ addi 10,10,32
+ stvx 29,11,1
+ addi 11,11,32
+ stvx 30,10,1
+ stvx 31,11,1
+ li 0,-1
+ stw 12,396(1)
+ li 8,0x10
+ std 26,400(1)
+ li 26,0x20
+ std 27,408(1)
+ li 27,0x30
+ std 28,416(1)
+ li 28,0x40
+ std 29,424(1)
+ li 29,0x50
+ std 30,432(1)
+ li 30,0x60
+ std 31,440(1)
+ li 31,0x70
+ mtspr 256,0
+
+ subi 9,9,3
+ subi 5,5,128
+
+ lvx 23,0,6
+ lvx 30,8,6
+ addi 6,6,0x20
+ lvx 31,0,6
+ vperm 23,23,30,10
+ addi 11,1,64+15
+ mtctr 9
+
+.Load_cbc_dec_key:
+ vperm 24,30,31,10
+ lvx 30,8,6
+ addi 6,6,0x20
+ stvx 24,0,11
+ vperm 25,31,30,10
+ lvx 31,0,6
+ stvx 25,8,11
+ addi 11,11,0x20
+ bdnz .Load_cbc_dec_key
+
+ lvx 26,8,6
+ vperm 24,30,31,10
+ lvx 27,26,6
+ stvx 24,0,11
+ vperm 25,31,26,10
+ lvx 28,27,6
+ stvx 25,8,11
+ addi 11,1,64+15
+ vperm 26,26,27,10
+ lvx 29,28,6
+ vperm 27,27,28,10
+ lvx 30,29,6
+ vperm 28,28,29,10
+ lvx 31,30,6
+ vperm 29,29,30,10
+ lvx 14,31,6
+ vperm 30,30,31,10
+ lvx 24,0,11
+ vperm 31,31,14,10
+ lvx 25,8,11
+
+
+
+ subi 3,3,15
+
+
+ .long 0x7C001E99
+
+
+ .long 0x7C281E99
+
+ .long 0x7C5A1E99
+
+ .long 0x7C7B1E99
+
+ .long 0x7D5C1E99
+
+ vxor 14,0,23
+ .long 0x7D7D1E99
+
+ vxor 15,1,23
+ .long 0x7D9E1E99
+
+ vxor 16,2,23
+ .long 0x7DBF1E99
+ addi 3,3,0x80
+
+ vxor 17,3,23
+
+ vxor 18,10,23
+
+ vxor 19,11,23
+ vxor 20,12,23
+ vxor 21,13,23
+
+ mtctr 9
+ b .Loop_cbc_dec8x
+.align 5
+.Loop_cbc_dec8x:
+ .long 0x11CEC548
+ .long 0x11EFC548
+ .long 0x1210C548
+ .long 0x1231C548
+ .long 0x1252C548
+ .long 0x1273C548
+ .long 0x1294C548
+ .long 0x12B5C548
+ lvx 24,26,11
+ addi 11,11,0x20
+
+ .long 0x11CECD48
+ .long 0x11EFCD48
+ .long 0x1210CD48
+ .long 0x1231CD48
+ .long 0x1252CD48
+ .long 0x1273CD48
+ .long 0x1294CD48
+ .long 0x12B5CD48
+ lvx 25,8,11
+ bdnz .Loop_cbc_dec8x
+
+ subic 5,5,128
+ .long 0x11CEC548
+ .long 0x11EFC548
+ .long 0x1210C548
+ .long 0x1231C548
+ .long 0x1252C548
+ .long 0x1273C548
+ .long 0x1294C548
+ .long 0x12B5C548
+
+ subfe. 0,0,0
+ .long 0x11CECD48
+ .long 0x11EFCD48
+ .long 0x1210CD48
+ .long 0x1231CD48
+ .long 0x1252CD48
+ .long 0x1273CD48
+ .long 0x1294CD48
+ .long 0x12B5CD48
+
+ and 0,0,5
+ .long 0x11CED548
+ .long 0x11EFD548
+ .long 0x1210D548
+ .long 0x1231D548
+ .long 0x1252D548
+ .long 0x1273D548
+ .long 0x1294D548
+ .long 0x12B5D548
+
+ add 3,3,0
+
+
+
+ .long 0x11CEDD48
+ .long 0x11EFDD48
+ .long 0x1210DD48
+ .long 0x1231DD48
+ .long 0x1252DD48
+ .long 0x1273DD48
+ .long 0x1294DD48
+ .long 0x12B5DD48
+
+ addi 11,1,64+15
+ .long 0x11CEE548
+ .long 0x11EFE548
+ .long 0x1210E548
+ .long 0x1231E548
+ .long 0x1252E548
+ .long 0x1273E548
+ .long 0x1294E548
+ .long 0x12B5E548
+ lvx 24,0,11
+
+ .long 0x11CEED48
+ .long 0x11EFED48
+ .long 0x1210ED48
+ .long 0x1231ED48
+ .long 0x1252ED48
+ .long 0x1273ED48
+ .long 0x1294ED48
+ .long 0x12B5ED48
+ lvx 25,8,11
+
+ .long 0x11CEF548
+ vxor 4,4,31
+ .long 0x11EFF548
+ vxor 0,0,31
+ .long 0x1210F548
+ vxor 1,1,31
+ .long 0x1231F548
+ vxor 2,2,31
+ .long 0x1252F548
+ vxor 3,3,31
+ .long 0x1273F548
+ vxor 10,10,31
+ .long 0x1294F548
+ vxor 11,11,31
+ .long 0x12B5F548
+ vxor 12,12,31
+
+ .long 0x11CE2549
+ .long 0x11EF0549
+ .long 0x7C001E99
+ .long 0x12100D49
+ .long 0x7C281E99
+ .long 0x12311549
+
+ .long 0x7C5A1E99
+ .long 0x12521D49
+
+ .long 0x7C7B1E99
+ .long 0x12735549
+
+ .long 0x7D5C1E99
+ .long 0x12945D49
+
+ .long 0x7D7D1E99
+ .long 0x12B56549
+
+ .long 0x7D9E1E99
+ vor 4,13,13
+
+ .long 0x7DBF1E99
+ addi 3,3,0x80
+
+
+
+ .long 0x7DC02799
+
+ vxor 14,0,23
+
+ .long 0x7DE82799
+
+ vxor 15,1,23
+
+ .long 0x7E1A2799
+ vxor 16,2,23
+
+ .long 0x7E3B2799
+ vxor 17,3,23
+
+ .long 0x7E5C2799
+ vxor 18,10,23
+
+ .long 0x7E7D2799
+ vxor 19,11,23
+
+ .long 0x7E9E2799
+ vxor 20,12,23
+ .long 0x7EBF2799
+ addi 4,4,0x80
+ vxor 21,13,23
+
+ mtctr 9
+ beq .Loop_cbc_dec8x
+
+ addic. 5,5,128
+ beq .Lcbc_dec8x_done
+ nop
+ nop
+
+.Loop_cbc_dec8x_tail:
+ .long 0x11EFC548
+ .long 0x1210C548
+ .long 0x1231C548
+ .long 0x1252C548
+ .long 0x1273C548
+ .long 0x1294C548
+ .long 0x12B5C548
+ lvx 24,26,11
+ addi 11,11,0x20
+
+ .long 0x11EFCD48
+ .long 0x1210CD48
+ .long 0x1231CD48
+ .long 0x1252CD48
+ .long 0x1273CD48
+ .long 0x1294CD48
+ .long 0x12B5CD48
+ lvx 25,8,11
+ bdnz .Loop_cbc_dec8x_tail
+
+ .long 0x11EFC548
+ .long 0x1210C548
+ .long 0x1231C548
+ .long 0x1252C548
+ .long 0x1273C548
+ .long 0x1294C548
+ .long 0x12B5C548
+
+ .long 0x11EFCD48
+ .long 0x1210CD48
+ .long 0x1231CD48
+ .long 0x1252CD48
+ .long 0x1273CD48
+ .long 0x1294CD48
+ .long 0x12B5CD48
+
+ .long 0x11EFD548
+ .long 0x1210D548
+ .long 0x1231D548
+ .long 0x1252D548
+ .long 0x1273D548
+ .long 0x1294D548
+ .long 0x12B5D548
+
+ .long 0x11EFDD48
+ .long 0x1210DD48
+ .long 0x1231DD48
+ .long 0x1252DD48
+ .long 0x1273DD48
+ .long 0x1294DD48
+ .long 0x12B5DD48
+
+ .long 0x11EFE548
+ .long 0x1210E548
+ .long 0x1231E548
+ .long 0x1252E548
+ .long 0x1273E548
+ .long 0x1294E548
+ .long 0x12B5E548
+
+ .long 0x11EFED48
+ .long 0x1210ED48
+ .long 0x1231ED48
+ .long 0x1252ED48
+ .long 0x1273ED48
+ .long 0x1294ED48
+ .long 0x12B5ED48
+
+ .long 0x11EFF548
+ vxor 4,4,31
+ .long 0x1210F548
+ vxor 1,1,31
+ .long 0x1231F548
+ vxor 2,2,31
+ .long 0x1252F548
+ vxor 3,3,31
+ .long 0x1273F548
+ vxor 10,10,31
+ .long 0x1294F548
+ vxor 11,11,31
+ .long 0x12B5F548
+ vxor 12,12,31
+
+ cmplwi 5,32
+ blt .Lcbc_dec8x_one
+ nop
+ beq .Lcbc_dec8x_two
+ cmplwi 5,64
+ blt .Lcbc_dec8x_three
+ nop
+ beq .Lcbc_dec8x_four
+ cmplwi 5,96
+ blt .Lcbc_dec8x_five
+ nop
+ beq .Lcbc_dec8x_six
+
+.Lcbc_dec8x_seven:
+ .long 0x11EF2549
+ .long 0x12100D49
+ .long 0x12311549
+ .long 0x12521D49
+ .long 0x12735549
+ .long 0x12945D49
+ .long 0x12B56549
+ vor 4,13,13
+
+
+
+ .long 0x7DE02799
+
+ .long 0x7E082799
+
+ .long 0x7E3A2799
+
+ .long 0x7E5B2799
+
+ .long 0x7E7C2799
+
+ .long 0x7E9D2799
+ .long 0x7EBE2799
+ addi 4,4,0x70
+ b .Lcbc_dec8x_done
+
+.align 5
+.Lcbc_dec8x_six:
+ .long 0x12102549
+ .long 0x12311549
+ .long 0x12521D49
+ .long 0x12735549
+ .long 0x12945D49
+ .long 0x12B56549
+ vor 4,13,13
+
+
+
+ .long 0x7E002799
+
+ .long 0x7E282799
+
+ .long 0x7E5A2799
+
+ .long 0x7E7B2799
+
+ .long 0x7E9C2799
+ .long 0x7EBD2799
+ addi 4,4,0x60
+ b .Lcbc_dec8x_done
+
+.align 5
+.Lcbc_dec8x_five:
+ .long 0x12312549
+ .long 0x12521D49
+ .long 0x12735549
+ .long 0x12945D49
+ .long 0x12B56549
+ vor 4,13,13
+
+
+
+ .long 0x7E202799
+
+ .long 0x7E482799
+
+ .long 0x7E7A2799
+
+ .long 0x7E9B2799
+ .long 0x7EBC2799
+ addi 4,4,0x50
+ b .Lcbc_dec8x_done
+
+.align 5
+.Lcbc_dec8x_four:
+ .long 0x12522549
+ .long 0x12735549
+ .long 0x12945D49
+ .long 0x12B56549
+ vor 4,13,13
+
+
+
+ .long 0x7E402799
+
+ .long 0x7E682799
+
+ .long 0x7E9A2799
+ .long 0x7EBB2799
+ addi 4,4,0x40
+ b .Lcbc_dec8x_done
+
+.align 5
+.Lcbc_dec8x_three:
+ .long 0x12732549
+ .long 0x12945D49
+ .long 0x12B56549
+ vor 4,13,13
+
+
+
+ .long 0x7E602799
+
+ .long 0x7E882799
+ .long 0x7EBA2799
+ addi 4,4,0x30
+ b .Lcbc_dec8x_done
+
+.align 5
+.Lcbc_dec8x_two:
+ .long 0x12942549
+ .long 0x12B56549
+ vor 4,13,13
+
+
+
+ .long 0x7E802799
+ .long 0x7EA82799
+ addi 4,4,0x20
+ b .Lcbc_dec8x_done
+
+.align 5
+.Lcbc_dec8x_one:
+ .long 0x12B52549
+ vor 4,13,13
+
+
+ .long 0x7EA02799
+ addi 4,4,0x10
+
+.Lcbc_dec8x_done:
+
+ .long 0x7C803F99
+
+ li 10,79
+ li 11,95
+ stvx 6,10,1
+ addi 10,10,32
+ stvx 6,11,1
+ addi 11,11,32
+ stvx 6,10,1
+ addi 10,10,32
+ stvx 6,11,1
+ addi 11,11,32
+ stvx 6,10,1
+ addi 10,10,32
+ stvx 6,11,1
+ addi 11,11,32
+ stvx 6,10,1
+ addi 10,10,32
+ stvx 6,11,1
+ addi 11,11,32
+
+ mtspr 256,12
+ lvx 20,10,1
+ addi 10,10,32
+ lvx 21,11,1
+ addi 11,11,32
+ lvx 22,10,1
+ addi 10,10,32
+ lvx 23,11,1
+ addi 11,11,32
+ lvx 24,10,1
+ addi 10,10,32
+ lvx 25,11,1
+ addi 11,11,32
+ lvx 26,10,1
+ addi 10,10,32
+ lvx 27,11,1
+ addi 11,11,32
+ lvx 28,10,1
+ addi 10,10,32
+ lvx 29,11,1
+ addi 11,11,32
+ lvx 30,10,1
+ lvx 31,11,1
+ ld 26,400(1)
+ ld 27,408(1)
+ ld 28,416(1)
+ ld 29,424(1)
+ ld 30,432(1)
+ ld 31,440(1)
+ addi 1,1,448
+ blr
+.long 0
+.byte 0,12,0x04,0,0x80,6,6,0
+.long 0
+.size .aes_p8_cbc_encrypt,.-.aes_p8_cbc_encrypt
+.size aes_p8_cbc_encrypt,.-.aes_p8_cbc_encrypt
+.globl aes_p8_ctr32_encrypt_blocks
+.type aes_p8_ctr32_encrypt_blocks,@function
+.section ".opd","aw"
+.align 3
+aes_p8_ctr32_encrypt_blocks:
+.quad .aes_p8_ctr32_encrypt_blocks,.TOC.@tocbase,0
+.previous
+.align 5
+.aes_p8_ctr32_encrypt_blocks:
+ cmpldi 5,1
+ .long 0x4dc00020
+
+ lis 0,0xfff0
+ mfspr 12,256
+ mtspr 256,0
+
+ li 10,15
+ vxor 0,0,0
+
+
+ lvx 4,0,7
+ lvsl 6,0,7
+ lvx 5,10,7
+ vspltisb 11,1
+
+ vperm 4,4,5,6
+ vsldoi 11,0,11,1
+
+ neg 11,3
+ lvsl 10,0,6
+ lwz 9,240(6)
+
+ lvsr 6,0,11
+ lvx 5,0,3
+ addi 3,3,15
+
+
+ srwi 9,9,1
+ li 10,16
+ subi 9,9,1
+
+ cmpldi 5,8
+ bge _aesp8_ctr32_encrypt8x
+
+ lvsr 8,0,4
+ vspltisb 9,-1
+ lvx 7,0,4
+ vperm 9,0,9,8
+
+
+ lvx 0,0,6
+ mtctr 9
+ lvx 1,10,6
+ addi 10,10,16
+ vperm 0,0,1,10
+ vxor 2,4,0
+ lvx 0,10,6
+ addi 10,10,16
+ b .Loop_ctr32_enc
+
+.align 5
+.Loop_ctr32_enc:
+ vperm 1,1,0,10
+ .long 0x10420D08
+ lvx 1,10,6
+ addi 10,10,16
+ vperm 0,0,1,10
+ .long 0x10420508
+ lvx 0,10,6
+ addi 10,10,16
+ bdnz .Loop_ctr32_enc
+
+ vadduwm 4,4,11
+ vor 3,5,5
+ lvx 5,0,3
+ addi 3,3,16
+ subic. 5,5,1
+
+ vperm 1,1,0,10
+ .long 0x10420D08
+ lvx 1,10,6
+ vperm 3,3,5,6
+ li 10,16
+ vperm 1,0,1,10
+ lvx 0,0,6
+ vxor 3,3,1
+ .long 0x10421D09
+
+ lvx 1,10,6
+ addi 10,10,16
+ vperm 2,2,2,8
+ vsel 3,7,2,9
+ mtctr 9
+ vperm 0,0,1,10
+ vor 7,2,2
+ vxor 2,4,0
+ lvx 0,10,6
+ addi 10,10,16
+ stvx 3,0,4
+ addi 4,4,16
+ bne .Loop_ctr32_enc
+
+ addi 4,4,-1
+ lvx 2,0,4
+ vsel 2,7,2,9
+ stvx 2,0,4
+
+ mtspr 256,12
+ blr
+.long 0
+.byte 0,12,0x14,0,0,0,6,0
+.long 0
+.align 5
+_aesp8_ctr32_encrypt8x:
+ stdu 1,-448(1)
+ li 10,207
+ li 11,223
+ stvx 20,10,1
+ addi 10,10,32
+ stvx 21,11,1
+ addi 11,11,32
+ stvx 22,10,1
+ addi 10,10,32
+ stvx 23,11,1
+ addi 11,11,32
+ stvx 24,10,1
+ addi 10,10,32
+ stvx 25,11,1
+ addi 11,11,32
+ stvx 26,10,1
+ addi 10,10,32
+ stvx 27,11,1
+ addi 11,11,32
+ stvx 28,10,1
+ addi 10,10,32
+ stvx 29,11,1
+ addi 11,11,32
+ stvx 30,10,1
+ stvx 31,11,1
+ li 0,-1
+ stw 12,396(1)
+ li 8,0x10
+ std 26,400(1)
+ li 26,0x20
+ std 27,408(1)
+ li 27,0x30
+ std 28,416(1)
+ li 28,0x40
+ std 29,424(1)
+ li 29,0x50
+ std 30,432(1)
+ li 30,0x60
+ std 31,440(1)
+ li 31,0x70
+ mtspr 256,0
+
+ subi 9,9,3
+
+ lvx 23,0,6
+ lvx 30,8,6
+ addi 6,6,0x20
+ lvx 31,0,6
+ vperm 23,23,30,10
+ addi 11,1,64+15
+ mtctr 9
+
+.Load_ctr32_enc_key:
+ vperm 24,30,31,10
+ lvx 30,8,6
+ addi 6,6,0x20
+ stvx 24,0,11
+ vperm 25,31,30,10
+ lvx 31,0,6
+ stvx 25,8,11
+ addi 11,11,0x20
+ bdnz .Load_ctr32_enc_key
+
+ lvx 26,8,6
+ vperm 24,30,31,10
+ lvx 27,26,6
+ stvx 24,0,11
+ vperm 25,31,26,10
+ lvx 28,27,6
+ stvx 25,8,11
+ addi 11,1,64+15
+ vperm 26,26,27,10
+ lvx 29,28,6
+ vperm 27,27,28,10
+ lvx 30,29,6
+ vperm 28,28,29,10
+ lvx 31,30,6
+ vperm 29,29,30,10
+ lvx 15,31,6
+ vperm 30,30,31,10
+ lvx 24,0,11
+ vperm 31,31,15,10
+ lvx 25,8,11
+
+ vadduwm 7,11,11
+ subi 3,3,15
+ sldi 5,5,4
+
+ vadduwm 16,4,11
+ vadduwm 17,4,7
+ vxor 15,4,23
+
+ vadduwm 18,16,7
+ vxor 16,16,23
+
+ vadduwm 19,17,7
+ vxor 17,17,23
+
+ vadduwm 20,18,7
+ vxor 18,18,23
+
+ vadduwm 21,19,7
+ vxor 19,19,23
+ vadduwm 22,20,7
+ vxor 20,20,23
+ vadduwm 4,21,7
+ vxor 21,21,23
+ vxor 22,22,23
+
+ mtctr 9
+ b .Loop_ctr32_enc8x
+.align 5
+.Loop_ctr32_enc8x:
+ .long 0x11EFC508
+ .long 0x1210C508
+ .long 0x1231C508
+ .long 0x1252C508
+ .long 0x1273C508
+ .long 0x1294C508
+ .long 0x12B5C508
+ .long 0x12D6C508
+.Loop_ctr32_enc8x_middle:
+ lvx 24,26,11
+ addi 11,11,0x20
+
+ .long 0x11EFCD08
+ .long 0x1210CD08
+ .long 0x1231CD08
+ .long 0x1252CD08
+ .long 0x1273CD08
+ .long 0x1294CD08
+ .long 0x12B5CD08
+ .long 0x12D6CD08
+ lvx 25,8,11
+ bdnz .Loop_ctr32_enc8x
+
+ subic 11,5,256
+ .long 0x11EFC508
+ .long 0x1210C508
+ .long 0x1231C508
+ .long 0x1252C508
+ .long 0x1273C508
+ .long 0x1294C508
+ .long 0x12B5C508
+ .long 0x12D6C508
+
+ subfe 0,0,0
+ .long 0x11EFCD08
+ .long 0x1210CD08
+ .long 0x1231CD08
+ .long 0x1252CD08
+ .long 0x1273CD08
+ .long 0x1294CD08
+ .long 0x12B5CD08
+ .long 0x12D6CD08
+
+ and 0,0,11
+ addi 11,1,64+15
+ .long 0x11EFD508
+ .long 0x1210D508
+ .long 0x1231D508
+ .long 0x1252D508
+ .long 0x1273D508
+ .long 0x1294D508
+ .long 0x12B5D508
+ .long 0x12D6D508
+ lvx 24,0,11
+
+ subic 5,5,129
+ .long 0x11EFDD08
+ addi 5,5,1
+ .long 0x1210DD08
+ .long 0x1231DD08
+ .long 0x1252DD08
+ .long 0x1273DD08
+ .long 0x1294DD08
+ .long 0x12B5DD08
+ .long 0x12D6DD08
+ lvx 25,8,11
+
+ .long 0x11EFE508
+ .long 0x7C001E99
+ .long 0x1210E508
+ .long 0x7C281E99
+ .long 0x1231E508
+ .long 0x7C5A1E99
+ .long 0x1252E508
+ .long 0x7C7B1E99
+ .long 0x1273E508
+ .long 0x7D5C1E99
+ .long 0x1294E508
+ .long 0x7D9D1E99
+ .long 0x12B5E508
+ .long 0x7DBE1E99
+ .long 0x12D6E508
+ .long 0x7DDF1E99
+ addi 3,3,0x80
+
+ .long 0x11EFED08
+
+ .long 0x1210ED08
+
+ .long 0x1231ED08
+
+ .long 0x1252ED08
+
+ .long 0x1273ED08
+
+ .long 0x1294ED08
+
+ .long 0x12B5ED08
+
+ .long 0x12D6ED08
+
+
+ add 3,3,0
+
+
+
+ subfe. 0,0,0
+ .long 0x11EFF508
+ vxor 0,0,31
+ .long 0x1210F508
+ vxor 1,1,31
+ .long 0x1231F508
+ vxor 2,2,31
+ .long 0x1252F508
+ vxor 3,3,31
+ .long 0x1273F508
+ vxor 10,10,31
+ .long 0x1294F508
+ vxor 12,12,31
+ .long 0x12B5F508
+ vxor 13,13,31
+ .long 0x12D6F508
+ vxor 14,14,31
+
+ bne .Lctr32_enc8x_break
+
+ .long 0x100F0509
+ .long 0x10300D09
+ vadduwm 16,4,11
+ .long 0x10511509
+ vadduwm 17,4,7
+ vxor 15,4,23
+ .long 0x10721D09
+ vadduwm 18,16,7
+ vxor 16,16,23
+ .long 0x11535509
+ vadduwm 19,17,7
+ vxor 17,17,23
+ .long 0x11946509
+ vadduwm 20,18,7
+ vxor 18,18,23
+ .long 0x11B56D09
+ vadduwm 21,19,7
+ vxor 19,19,23
+ .long 0x11D67509
+ vadduwm 22,20,7
+ vxor 20,20,23
+
+ vadduwm 4,21,7
+ vxor 21,21,23
+
+ vxor 22,22,23
+ mtctr 9
+
+ .long 0x11EFC508
+ .long 0x7C002799
+
+ .long 0x1210C508
+ .long 0x7C282799
+
+ .long 0x1231C508
+ .long 0x7C5A2799
+
+ .long 0x1252C508
+ .long 0x7C7B2799
+
+ .long 0x1273C508
+ .long 0x7D5C2799
+
+ .long 0x1294C508
+ .long 0x7D9D2799
+
+ .long 0x12B5C508
+ .long 0x7DBE2799
+ .long 0x12D6C508
+ .long 0x7DDF2799
+ addi 4,4,0x80
+
+ b .Loop_ctr32_enc8x_middle
+
+.align 5
+.Lctr32_enc8x_break:
+ cmpwi 5,-0x60
+ blt .Lctr32_enc8x_one
+ nop
+ beq .Lctr32_enc8x_two
+ cmpwi 5,-0x40
+ blt .Lctr32_enc8x_three
+ nop
+ beq .Lctr32_enc8x_four
+ cmpwi 5,-0x20
+ blt .Lctr32_enc8x_five
+ nop
+ beq .Lctr32_enc8x_six
+ cmpwi 5,0x00
+ blt .Lctr32_enc8x_seven
+
+.Lctr32_enc8x_eight:
+ .long 0x11EF0509
+ .long 0x12100D09
+ .long 0x12311509
+ .long 0x12521D09
+ .long 0x12735509
+ .long 0x12946509
+ .long 0x12B56D09
+ .long 0x12D67509
+
+
+
+ .long 0x7DE02799
+
+ .long 0x7E082799
+
+ .long 0x7E3A2799
+
+ .long 0x7E5B2799
+
+ .long 0x7E7C2799
+
+ .long 0x7E9D2799
+
+ .long 0x7EBE2799
+ .long 0x7EDF2799
+ addi 4,4,0x80
+ b .Lctr32_enc8x_done
+
+.align 5
+.Lctr32_enc8x_seven:
+ .long 0x11EF0D09
+ .long 0x12101509
+ .long 0x12311D09
+ .long 0x12525509
+ .long 0x12736509
+ .long 0x12946D09
+ .long 0x12B57509
+
+
+
+ .long 0x7DE02799
+
+ .long 0x7E082799
+
+ .long 0x7E3A2799
+
+ .long 0x7E5B2799
+
+ .long 0x7E7C2799
+
+ .long 0x7E9D2799
+ .long 0x7EBE2799
+ addi 4,4,0x70
+ b .Lctr32_enc8x_done
+
+.align 5
+.Lctr32_enc8x_six:
+ .long 0x11EF1509
+ .long 0x12101D09
+ .long 0x12315509
+ .long 0x12526509
+ .long 0x12736D09
+ .long 0x12947509
+
+
+
+ .long 0x7DE02799
+
+ .long 0x7E082799
+
+ .long 0x7E3A2799
+
+ .long 0x7E5B2799
+
+ .long 0x7E7C2799
+ .long 0x7E9D2799
+ addi 4,4,0x60
+ b .Lctr32_enc8x_done
+
+.align 5
+.Lctr32_enc8x_five:
+ .long 0x11EF1D09
+ .long 0x12105509
+ .long 0x12316509
+ .long 0x12526D09
+ .long 0x12737509
+
+
+
+ .long 0x7DE02799
+
+ .long 0x7E082799
+
+ .long 0x7E3A2799
+
+ .long 0x7E5B2799
+ .long 0x7E7C2799
+ addi 4,4,0x50
+ b .Lctr32_enc8x_done
+
+.align 5
+.Lctr32_enc8x_four:
+ .long 0x11EF5509
+ .long 0x12106509
+ .long 0x12316D09
+ .long 0x12527509
+
+
+
+ .long 0x7DE02799
+
+ .long 0x7E082799
+
+ .long 0x7E3A2799
+ .long 0x7E5B2799
+ addi 4,4,0x40
+ b .Lctr32_enc8x_done
+
+.align 5
+.Lctr32_enc8x_three:
+ .long 0x11EF6509
+ .long 0x12106D09
+ .long 0x12317509
+
+
+
+ .long 0x7DE02799
+
+ .long 0x7E082799
+ .long 0x7E3A2799
+ addi 4,4,0x30
+ b .Lcbc_dec8x_done
+
+.align 5
+.Lctr32_enc8x_two:
+ .long 0x11EF6D09
+ .long 0x12107509
+
+
+
+ .long 0x7DE02799
+ .long 0x7E082799
+ addi 4,4,0x20
+ b .Lcbc_dec8x_done
+
+.align 5
+.Lctr32_enc8x_one:
+ .long 0x11EF7509
+
+
+ .long 0x7DE02799
+ addi 4,4,0x10
+
+.Lctr32_enc8x_done:
+ li 10,79
+ li 11,95
+ stvx 6,10,1
+ addi 10,10,32
+ stvx 6,11,1
+ addi 11,11,32
+ stvx 6,10,1
+ addi 10,10,32
+ stvx 6,11,1
+ addi 11,11,32
+ stvx 6,10,1
+ addi 10,10,32
+ stvx 6,11,1
+ addi 11,11,32
+ stvx 6,10,1
+ addi 10,10,32
+ stvx 6,11,1
+ addi 11,11,32
+
+ mtspr 256,12
+ lvx 20,10,1
+ addi 10,10,32
+ lvx 21,11,1
+ addi 11,11,32
+ lvx 22,10,1
+ addi 10,10,32
+ lvx 23,11,1
+ addi 11,11,32
+ lvx 24,10,1
+ addi 10,10,32
+ lvx 25,11,1
+ addi 11,11,32
+ lvx 26,10,1
+ addi 10,10,32
+ lvx 27,11,1
+ addi 11,11,32
+ lvx 28,10,1
+ addi 10,10,32
+ lvx 29,11,1
+ addi 11,11,32
+ lvx 30,10,1
+ lvx 31,11,1
+ ld 26,400(1)
+ ld 27,408(1)
+ ld 28,416(1)
+ ld 29,424(1)
+ ld 30,432(1)
+ ld 31,440(1)
+ addi 1,1,448
+ blr
+.long 0
+.byte 0,12,0x04,0,0x80,6,6,0
+.long 0
+.size .aes_p8_ctr32_encrypt_blocks,.-.aes_p8_ctr32_encrypt_blocks
+.size aes_p8_ctr32_encrypt_blocks,.-.aes_p8_ctr32_encrypt_blocks
+.globl aes_p8_xts_encrypt
+.type aes_p8_xts_encrypt,@function
+.section ".opd","aw"
+.align 3
+aes_p8_xts_encrypt:
+.quad .aes_p8_xts_encrypt,.TOC.@tocbase,0
+.previous
+.align 5
+.aes_p8_xts_encrypt:
+ mr 10,3
+ li 3,-1
+ cmpldi 5,16
+ .long 0x4dc00020
+
+ lis 0,0xfff0
+ mfspr 12,256
+ li 11,0
+ mtspr 256,0
+
+ vspltisb 9,0x07
+
+
+
+
+ li 3,15
+ lvx 8,0,8
+ lvsl 5,0,8
+ lvx 4,3,8
+
+ vperm 8,8,4,5
+
+ neg 11,10
+ lvsr 5,0,11
+ lvx 2,0,10
+ addi 10,10,15
+
+
+ cmpldi 7,0
+ beq .Lxts_enc_no_key2
+
+ lvsl 7,0,7
+ lwz 9,240(7)
+ srwi 9,9,1
+ subi 9,9,1
+ li 3,16
+
+ lvx 0,0,7
+ lvx 1,3,7
+ addi 3,3,16
+ vperm 0,0,1,7
+ vxor 8,8,0
+ lvx 0,3,7
+ addi 3,3,16
+ mtctr 9
+
+.Ltweak_xts_enc:
+ vperm 1,1,0,7
+ .long 0x11080D08
+ lvx 1,3,7
+ addi 3,3,16
+ vperm 0,0,1,7
+ .long 0x11080508
+ lvx 0,3,7
+ addi 3,3,16
+ bdnz .Ltweak_xts_enc
+
+ vperm 1,1,0,7
+ .long 0x11080D08
+ lvx 1,3,7
+ vperm 0,0,1,7
+ .long 0x11080509
+
+ li 8,0
+ b .Lxts_enc
+
+.Lxts_enc_no_key2:
+ li 3,-16
+ and 5,5,3
+
+
+.Lxts_enc:
+ lvx 4,0,10
+ addi 10,10,16
+
+ lvsl 7,0,6
+ lwz 9,240(6)
+ srwi 9,9,1
+ subi 9,9,1
+ li 3,16
+
+ vslb 10,9,9
+ vor 10,10,9
+ vspltisb 11,1
+ vsldoi 10,10,11,15
+
+ cmpldi 5,96
+ bge _aesp8_xts_encrypt6x
+
+ andi. 7,5,15
+ subic 0,5,32
+ subi 7,7,16
+ subfe 0,0,0
+ and 0,0,7
+ add 10,10,0
+
+ lvx 0,0,6
+ lvx 1,3,6
+ addi 3,3,16
+ vperm 2,2,4,5
+ vperm 0,0,1,7
+ vxor 2,2,8
+ vxor 2,2,0
+ lvx 0,3,6
+ addi 3,3,16
+ mtctr 9
+ b .Loop_xts_enc
+
+.align 5
+.Loop_xts_enc:
+ vperm 1,1,0,7
+ .long 0x10420D08
+ lvx 1,3,6
+ addi 3,3,16
+ vperm 0,0,1,7
+ .long 0x10420508
+ lvx 0,3,6
+ addi 3,3,16
+ bdnz .Loop_xts_enc
+
+ vperm 1,1,0,7
+ .long 0x10420D08
+ lvx 1,3,6
+ li 3,16
+ vperm 0,0,1,7
+ vxor 0,0,8
+ .long 0x10620509
+
+
+ nop
+
+ .long 0x7C602799
+ addi 4,4,16
+
+ subic. 5,5,16
+ beq .Lxts_enc_done
+
+ vor 2,4,4
+ lvx 4,0,10
+ addi 10,10,16
+ lvx 0,0,6
+ lvx 1,3,6
+ addi 3,3,16
+
+ subic 0,5,32
+ subfe 0,0,0
+ and 0,0,7
+ add 10,10,0
+
+ vsrab 11,8,9
+ vaddubm 8,8,8
+ vsldoi 11,11,11,15
+ vand 11,11,10
+ vxor 8,8,11
+
+ vperm 2,2,4,5
+ vperm 0,0,1,7
+ vxor 2,2,8
+ vxor 3,3,0
+ vxor 2,2,0
+ lvx 0,3,6
+ addi 3,3,16
+
+ mtctr 9
+ cmpldi 5,16
+ bge .Loop_xts_enc
+
+ vxor 3,3,8
+ lvsr 5,0,5
+ vxor 4,4,4
+ vspltisb 11,-1
+ vperm 4,4,11,5
+ vsel 2,2,3,4
+
+ subi 11,4,17
+ subi 4,4,16
+ mtctr 5
+ li 5,16
+.Loop_xts_enc_steal:
+ lbzu 0,1(11)
+ stb 0,16(11)
+ bdnz .Loop_xts_enc_steal
+
+ mtctr 9
+ b .Loop_xts_enc
+
+.Lxts_enc_done:
+ cmpldi 8,0
+ beq .Lxts_enc_ret
+
+ vsrab 11,8,9
+ vaddubm 8,8,8
+ vsldoi 11,11,11,15
+ vand 11,11,10
+ vxor 8,8,11
+
+
+ .long 0x7D004799
+
+.Lxts_enc_ret:
+ mtspr 256,12
+ li 3,0
+ blr
+.long 0
+.byte 0,12,0x04,0,0x80,6,6,0
+.long 0
+.size .aes_p8_xts_encrypt,.-.aes_p8_xts_encrypt
+.size aes_p8_xts_encrypt,.-.aes_p8_xts_encrypt
+
+.globl aes_p8_xts_decrypt
+.type aes_p8_xts_decrypt,@function
+.section ".opd","aw"
+.align 3
+aes_p8_xts_decrypt:
+.quad .aes_p8_xts_decrypt,.TOC.@tocbase,0
+.previous
+.align 5
+.aes_p8_xts_decrypt:
+ mr 10,3
+ li 3,-1
+ cmpldi 5,16
+ .long 0x4dc00020
+
+ lis 0,0xfff8
+ mfspr 12,256
+ li 11,0
+ mtspr 256,0
+
+ andi. 0,5,15
+ neg 0,0
+ andi. 0,0,16
+ sub 5,5,0
+
+ vspltisb 9,0x07
+
+
+
+
+ li 3,15
+ lvx 8,0,8
+ lvsl 5,0,8
+ lvx 4,3,8
+
+ vperm 8,8,4,5
+
+ neg 11,10
+ lvsr 5,0,11
+ lvx 2,0,10
+ addi 10,10,15
+
+
+ cmpldi 7,0
+ beq .Lxts_dec_no_key2
+
+ lvsl 7,0,7
+ lwz 9,240(7)
+ srwi 9,9,1
+ subi 9,9,1
+ li 3,16
+
+ lvx 0,0,7
+ lvx 1,3,7
+ addi 3,3,16
+ vperm 0,0,1,7
+ vxor 8,8,0
+ lvx 0,3,7
+ addi 3,3,16
+ mtctr 9
+
+.Ltweak_xts_dec:
+ vperm 1,1,0,7
+ .long 0x11080D08
+ lvx 1,3,7
+ addi 3,3,16
+ vperm 0,0,1,7
+ .long 0x11080508
+ lvx 0,3,7
+ addi 3,3,16
+ bdnz .Ltweak_xts_dec
+
+ vperm 1,1,0,7
+ .long 0x11080D08
+ lvx 1,3,7
+ vperm 0,0,1,7
+ .long 0x11080509
+
+ li 8,0
+ b .Lxts_dec
+
+.Lxts_dec_no_key2:
+ neg 3,5
+ andi. 3,3,15
+ add 5,5,3
+
+
+.Lxts_dec:
+ lvx 4,0,10
+ addi 10,10,16
+
+ lvsl 7,0,6
+ lwz 9,240(6)
+ srwi 9,9,1
+ subi 9,9,1
+ li 3,16
+
+ vslb 10,9,9
+ vor 10,10,9
+ vspltisb 11,1
+ vsldoi 10,10,11,15
+
+ cmpldi 5,96
+ bge _aesp8_xts_decrypt6x
+
+ lvx 0,0,6
+ lvx 1,3,6
+ addi 3,3,16
+ vperm 2,2,4,5
+ vperm 0,0,1,7
+ vxor 2,2,8
+ vxor 2,2,0
+ lvx 0,3,6
+ addi 3,3,16
+ mtctr 9
+
+ cmpldi 5,16
+ blt .Ltail_xts_dec
+ b .Loop_xts_dec
+
+.align 5
+.Loop_xts_dec:
+ vperm 1,1,0,7
+ .long 0x10420D48
+ lvx 1,3,6
+ addi 3,3,16
+ vperm 0,0,1,7
+ .long 0x10420548
+ lvx 0,3,6
+ addi 3,3,16
+ bdnz .Loop_xts_dec
+
+ vperm 1,1,0,7
+ .long 0x10420D48
+ lvx 1,3,6
+ li 3,16
+ vperm 0,0,1,7
+ vxor 0,0,8
+ .long 0x10620549
+
+
+ nop
+
+ .long 0x7C602799
+ addi 4,4,16
+
+ subic. 5,5,16
+ beq .Lxts_dec_done
+
+ vor 2,4,4
+ lvx 4,0,10
+ addi 10,10,16
+ lvx 0,0,6
+ lvx 1,3,6
+ addi 3,3,16
+
+ vsrab 11,8,9
+ vaddubm 8,8,8
+ vsldoi 11,11,11,15
+ vand 11,11,10
+ vxor 8,8,11
+
+ vperm 2,2,4,5
+ vperm 0,0,1,7
+ vxor 2,2,8
+ vxor 2,2,0
+ lvx 0,3,6
+ addi 3,3,16
+
+ mtctr 9
+ cmpldi 5,16
+ bge .Loop_xts_dec
+
+.Ltail_xts_dec:
+ vsrab 11,8,9
+ vaddubm 12,8,8
+ vsldoi 11,11,11,15
+ vand 11,11,10
+ vxor 12,12,11
+
+ subi 10,10,16
+ add 10,10,5
+
+ vxor 2,2,8
+ vxor 2,2,12
+
+.Loop_xts_dec_short:
+ vperm 1,1,0,7
+ .long 0x10420D48
+ lvx 1,3,6
+ addi 3,3,16
+ vperm 0,0,1,7
+ .long 0x10420548
+ lvx 0,3,6
+ addi 3,3,16
+ bdnz .Loop_xts_dec_short
+
+ vperm 1,1,0,7
+ .long 0x10420D48
+ lvx 1,3,6
+ li 3,16
+ vperm 0,0,1,7
+ vxor 0,0,12
+ .long 0x10620549
+
+
+ nop
+
+ .long 0x7C602799
+
+ vor 2,4,4
+ lvx 4,0,10
+
+ lvx 0,0,6
+ lvx 1,3,6
+ addi 3,3,16
+ vperm 2,2,4,5
+ vperm 0,0,1,7
+
+ lvsr 5,0,5
+ vxor 4,4,4
+ vspltisb 11,-1
+ vperm 4,4,11,5
+ vsel 2,2,3,4
+
+ vxor 0,0,8
+ vxor 2,2,0
+ lvx 0,3,6
+ addi 3,3,16
+
+ subi 11,4,1
+ mtctr 5
+ li 5,16
+.Loop_xts_dec_steal:
+ lbzu 0,1(11)
+ stb 0,16(11)
+ bdnz .Loop_xts_dec_steal
+
+ mtctr 9
+ b .Loop_xts_dec
+
+.Lxts_dec_done:
+ cmpldi 8,0
+ beq .Lxts_dec_ret
+
+ vsrab 11,8,9
+ vaddubm 8,8,8
+ vsldoi 11,11,11,15
+ vand 11,11,10
+ vxor 8,8,11
+
+
+ .long 0x7D004799
+
+.Lxts_dec_ret:
+ mtspr 256,12
+ li 3,0
+ blr
+.long 0
+.byte 0,12,0x04,0,0x80,6,6,0
+.long 0
+.size .aes_p8_xts_decrypt,.-.aes_p8_xts_decrypt
+.size aes_p8_xts_decrypt,.-.aes_p8_xts_decrypt
+.align 5
+_aesp8_xts_encrypt6x:
+ stdu 1,-448(1)
+ mflr 11
+ li 7,207
+ li 3,223
+ std 11,464(1)
+ stvx 20,7,1
+ addi 7,7,32
+ stvx 21,3,1
+ addi 3,3,32
+ stvx 22,7,1
+ addi 7,7,32
+ stvx 23,3,1
+ addi 3,3,32
+ stvx 24,7,1
+ addi 7,7,32
+ stvx 25,3,1
+ addi 3,3,32
+ stvx 26,7,1
+ addi 7,7,32
+ stvx 27,3,1
+ addi 3,3,32
+ stvx 28,7,1
+ addi 7,7,32
+ stvx 29,3,1
+ addi 3,3,32
+ stvx 30,7,1
+ stvx 31,3,1
+ li 0,-1
+ stw 12,396(1)
+ li 3,0x10
+ std 26,400(1)
+ li 26,0x20
+ std 27,408(1)
+ li 27,0x30
+ std 28,416(1)
+ li 28,0x40
+ std 29,424(1)
+ li 29,0x50
+ std 30,432(1)
+ li 30,0x60
+ std 31,440(1)
+ li 31,0x70
+ mtspr 256,0
+
+ subi 9,9,3
+
+ lvx 23,0,6
+ lvx 30,3,6
+ addi 6,6,0x20
+ lvx 31,0,6
+ vperm 23,23,30,7
+ addi 7,1,64+15
+ mtctr 9
+
+.Load_xts_enc_key:
+ vperm 24,30,31,7
+ lvx 30,3,6
+ addi 6,6,0x20
+ stvx 24,0,7
+ vperm 25,31,30,7
+ lvx 31,0,6
+ stvx 25,3,7
+ addi 7,7,0x20
+ bdnz .Load_xts_enc_key
+
+ lvx 26,3,6
+ vperm 24,30,31,7
+ lvx 27,26,6
+ stvx 24,0,7
+ vperm 25,31,26,7
+ lvx 28,27,6
+ stvx 25,3,7
+ addi 7,1,64+15
+ vperm 26,26,27,7
+ lvx 29,28,6
+ vperm 27,27,28,7
+ lvx 30,29,6
+ vperm 28,28,29,7
+ lvx 31,30,6
+ vperm 29,29,30,7
+ lvx 22,31,6
+ vperm 30,30,31,7
+ lvx 24,0,7
+ vperm 31,31,22,7
+ lvx 25,3,7
+
+ vperm 0,2,4,5
+ subi 10,10,31
+ vxor 17,8,23
+ vsrab 11,8,9
+ vaddubm 8,8,8
+ vsldoi 11,11,11,15
+ vand 11,11,10
+ vxor 7,0,17
+ vxor 8,8,11
+
+ .long 0x7C235699
+ vxor 18,8,23
+ vsrab 11,8,9
+ vaddubm 8,8,8
+ vsldoi 11,11,11,15
+
+ vand 11,11,10
+ vxor 12,1,18
+ vxor 8,8,11
+
+ .long 0x7C5A5699
+ andi. 31,5,15
+ vxor 19,8,23
+ vsrab 11,8,9
+ vaddubm 8,8,8
+ vsldoi 11,11,11,15
+
+ vand 11,11,10
+ vxor 13,2,19
+ vxor 8,8,11
+
+ .long 0x7C7B5699
+ sub 5,5,31
+ vxor 20,8,23
+ vsrab 11,8,9
+ vaddubm 8,8,8
+ vsldoi 11,11,11,15
+
+ vand 11,11,10
+ vxor 14,3,20
+ vxor 8,8,11
+
+ .long 0x7C9C5699
+ subi 5,5,0x60
+ vxor 21,8,23
+ vsrab 11,8,9
+ vaddubm 8,8,8
+ vsldoi 11,11,11,15
+
+ vand 11,11,10
+ vxor 15,4,21
+ vxor 8,8,11
+
+ .long 0x7CBD5699
+ addi 10,10,0x60
+ vxor 22,8,23
+ vsrab 11,8,9
+ vaddubm 8,8,8
+ vsldoi 11,11,11,15
+
+ vand 11,11,10
+ vxor 16,5,22
+ vxor 8,8,11
+
+ vxor 31,31,23
+ mtctr 9
+ b .Loop_xts_enc6x
+
+.align 5
+.Loop_xts_enc6x:
+ .long 0x10E7C508
+ .long 0x118CC508
+ .long 0x11ADC508
+ .long 0x11CEC508
+ .long 0x11EFC508
+ .long 0x1210C508
+ lvx 24,26,7
+ addi 7,7,0x20
+
+ .long 0x10E7CD08
+ .long 0x118CCD08
+ .long 0x11ADCD08
+ .long 0x11CECD08
+ .long 0x11EFCD08
+ .long 0x1210CD08
+ lvx 25,3,7
+ bdnz .Loop_xts_enc6x
+
+ subic 5,5,96
+ vxor 0,17,31
+ .long 0x10E7C508
+ .long 0x118CC508
+ vsrab 11,8,9
+ vxor 17,8,23
+ vaddubm 8,8,8
+ .long 0x11ADC508
+ .long 0x11CEC508
+ vsldoi 11,11,11,15
+ .long 0x11EFC508
+ .long 0x1210C508
+
+ subfe. 0,0,0
+ vand 11,11,10
+ .long 0x10E7CD08
+ .long 0x118CCD08
+ vxor 8,8,11
+ .long 0x11ADCD08
+ .long 0x11CECD08
+ vxor 1,18,31
+ vsrab 11,8,9
+ vxor 18,8,23
+ .long 0x11EFCD08
+ .long 0x1210CD08
+
+ and 0,0,5
+ vaddubm 8,8,8
+ vsldoi 11,11,11,15
+ .long 0x10E7D508
+ .long 0x118CD508
+ vand 11,11,10
+ .long 0x11ADD508
+ .long 0x11CED508
+ vxor 8,8,11
+ .long 0x11EFD508
+ .long 0x1210D508
+
+ add 10,10,0
+
+
+
+ vxor 2,19,31
+ vsrab 11,8,9
+ vxor 19,8,23
+ vaddubm 8,8,8
+ .long 0x10E7DD08
+ .long 0x118CDD08
+ vsldoi 11,11,11,15
+ .long 0x11ADDD08
+ .long 0x11CEDD08
+ vand 11,11,10
+ .long 0x11EFDD08
+ .long 0x1210DD08
+
+ addi 7,1,64+15
+ vxor 8,8,11
+ .long 0x10E7E508
+ .long 0x118CE508
+ vxor 3,20,31
+ vsrab 11,8,9
+ vxor 20,8,23
+ .long 0x11ADE508
+ .long 0x11CEE508
+ vaddubm 8,8,8
+ vsldoi 11,11,11,15
+ .long 0x11EFE508
+ .long 0x1210E508
+ lvx 24,0,7
+ vand 11,11,10
+
+ .long 0x10E7ED08
+ .long 0x118CED08
+ vxor 8,8,11
+ .long 0x11ADED08
+ .long 0x11CEED08
+ vxor 4,21,31
+ vsrab 11,8,9
+ vxor 21,8,23
+ .long 0x11EFED08
+ .long 0x1210ED08
+ lvx 25,3,7
+ vaddubm 8,8,8
+ vsldoi 11,11,11,15
+
+ .long 0x10E7F508
+ .long 0x118CF508
+ vand 11,11,10
+ .long 0x11ADF508
+ .long 0x11CEF508
+ vxor 8,8,11
+ .long 0x11EFF508
+ .long 0x1210F508
+ vxor 5,22,31
+ vsrab 11,8,9
+ vxor 22,8,23
+
+ .long 0x10E70509
+ .long 0x7C005699
+ vaddubm 8,8,8
+ vsldoi 11,11,11,15
+ .long 0x118C0D09
+ .long 0x7C235699
+ .long 0x11AD1509
+
+ .long 0x7C5A5699
+ vand 11,11,10
+ .long 0x11CE1D09
+
+ .long 0x7C7B5699
+ .long 0x11EF2509
+
+ .long 0x7C9C5699
+ vxor 8,8,11
+ .long 0x11702D09
+
+
+ .long 0x7CBD5699
+ addi 10,10,0x60
+
+
+
+
+
+ .long 0x7CE02799
+ vxor 7,0,17
+
+ .long 0x7D832799
+ vxor 12,1,18
+
+ .long 0x7DBA2799
+ vxor 13,2,19
+
+ .long 0x7DDB2799
+ vxor 14,3,20
+
+ .long 0x7DFC2799
+ vxor 15,4,21
+
+ .long 0x7D7D2799
+ vxor 16,5,22
+ addi 4,4,0x60
+
+ mtctr 9
+ beq .Loop_xts_enc6x
+
+ addic. 5,5,0x60
+ beq .Lxts_enc6x_zero
+ cmpwi 5,0x20
+ blt .Lxts_enc6x_one
+ nop
+ beq .Lxts_enc6x_two
+ cmpwi 5,0x40
+ blt .Lxts_enc6x_three
+ nop
+ beq .Lxts_enc6x_four
+
+.Lxts_enc6x_five:
+ vxor 7,1,17
+ vxor 12,2,18
+ vxor 13,3,19
+ vxor 14,4,20
+ vxor 15,5,21
+
+ bl _aesp8_xts_enc5x
+
+
+ vor 17,22,22
+
+ .long 0x7CE02799
+
+ .long 0x7D832799
+
+ .long 0x7DBA2799
+ vxor 11,15,22
+
+ .long 0x7DDB2799
+ .long 0x7DFC2799
+ addi 4,4,0x50
+ bne .Lxts_enc6x_steal
+ b .Lxts_enc6x_done
+
+.align 4
+.Lxts_enc6x_four:
+ vxor 7,2,17
+ vxor 12,3,18
+ vxor 13,4,19
+ vxor 14,5,20
+ vxor 15,15,15
+
+ bl _aesp8_xts_enc5x
+
+
+ vor 17,21,21
+
+ .long 0x7CE02799
+
+ .long 0x7D832799
+ vxor 11,14,21
+
+ .long 0x7DBA2799
+ .long 0x7DDB2799
+ addi 4,4,0x40
+ bne .Lxts_enc6x_steal
+ b .Lxts_enc6x_done
+
+.align 4
+.Lxts_enc6x_three:
+ vxor 7,3,17
+ vxor 12,4,18
+ vxor 13,5,19
+ vxor 14,14,14
+ vxor 15,15,15
+
+ bl _aesp8_xts_enc5x
+
+
+ vor 17,20,20
+
+ .long 0x7CE02799
+ vxor 11,13,20
+
+ .long 0x7D832799
+ .long 0x7DBA2799
+ addi 4,4,0x30
+ bne .Lxts_enc6x_steal
+ b .Lxts_enc6x_done
+
+.align 4
+.Lxts_enc6x_two:
+ vxor 7,4,17
+ vxor 12,5,18
+ vxor 13,13,13
+ vxor 14,14,14
+ vxor 15,15,15
+
+ bl _aesp8_xts_enc5x
+
+
+ vor 17,19,19
+ vxor 11,12,19
+
+ .long 0x7CE02799
+ .long 0x7D832799
+ addi 4,4,0x20
+ bne .Lxts_enc6x_steal
+ b .Lxts_enc6x_done
+
+.align 4
+.Lxts_enc6x_one:
+ vxor 7,5,17
+ nop
+.Loop_xts_enc1x:
+ .long 0x10E7C508
+ lvx 24,26,7
+ addi 7,7,0x20
+
+ .long 0x10E7CD08
+ lvx 25,3,7
+ bdnz .Loop_xts_enc1x
+
+ add 10,10,31
+ cmpwi 31,0
+ .long 0x10E7C508
+
+ subi 10,10,16
+ .long 0x10E7CD08
+
+ lvsr 5,0,31
+ .long 0x10E7D508
+
+ .long 0x7C005699
+ .long 0x10E7DD08
+
+ addi 7,1,64+15
+ .long 0x10E7E508
+ lvx 24,0,7
+
+ .long 0x10E7ED08
+ lvx 25,3,7
+ vxor 17,17,31
+
+
+ .long 0x10E7F508
+
+ vperm 0,0,0,5
+ .long 0x10E78D09
+
+ vor 17,18,18
+ vxor 11,7,18
+
+ .long 0x7CE02799
+ addi 4,4,0x10
+ bne .Lxts_enc6x_steal
+ b .Lxts_enc6x_done
+
+.align 4
+.Lxts_enc6x_zero:
+ cmpwi 31,0
+ beq .Lxts_enc6x_done
+
+ add 10,10,31
+ subi 10,10,16
+ .long 0x7C005699
+ lvsr 5,0,31
+
+ vperm 0,0,0,5
+ vxor 11,11,17
+.Lxts_enc6x_steal:
+ vxor 0,0,17
+ vxor 7,7,7
+ vspltisb 12,-1
+ vperm 7,7,12,5
+ vsel 7,0,11,7
+
+ subi 30,4,17
+ subi 4,4,16
+ mtctr 31
+.Loop_xts_enc6x_steal:
+ lbzu 0,1(30)
+ stb 0,16(30)
+ bdnz .Loop_xts_enc6x_steal
+
+ li 31,0
+ mtctr 9
+ b .Loop_xts_enc1x
+
+.align 4
+.Lxts_enc6x_done:
+ cmpldi 8,0
+ beq .Lxts_enc6x_ret
+
+ vxor 8,17,23
+
+ .long 0x7D004799
+
+.Lxts_enc6x_ret:
+ mtlr 11
+ li 10,79
+ li 11,95
+ stvx 9,10,1
+ addi 10,10,32
+ stvx 9,11,1
+ addi 11,11,32
+ stvx 9,10,1
+ addi 10,10,32
+ stvx 9,11,1
+ addi 11,11,32
+ stvx 9,10,1
+ addi 10,10,32
+ stvx 9,11,1
+ addi 11,11,32
+ stvx 9,10,1
+ addi 10,10,32
+ stvx 9,11,1
+ addi 11,11,32
+
+ mtspr 256,12
+ lvx 20,10,1
+ addi 10,10,32
+ lvx 21,11,1
+ addi 11,11,32
+ lvx 22,10,1
+ addi 10,10,32
+ lvx 23,11,1
+ addi 11,11,32
+ lvx 24,10,1
+ addi 10,10,32
+ lvx 25,11,1
+ addi 11,11,32
+ lvx 26,10,1
+ addi 10,10,32
+ lvx 27,11,1
+ addi 11,11,32
+ lvx 28,10,1
+ addi 10,10,32
+ lvx 29,11,1
+ addi 11,11,32
+ lvx 30,10,1
+ lvx 31,11,1
+ ld 26,400(1)
+ ld 27,408(1)
+ ld 28,416(1)
+ ld 29,424(1)
+ ld 30,432(1)
+ ld 31,440(1)
+ addi 1,1,448
+ blr
+.long 0
+.byte 0,12,0x04,1,0x80,6,6,0
+.long 0
+
+.align 5
+_aesp8_xts_enc5x:
+ .long 0x10E7C508
+ .long 0x118CC508
+ .long 0x11ADC508
+ .long 0x11CEC508
+ .long 0x11EFC508
+ lvx 24,26,7
+ addi 7,7,0x20
+
+ .long 0x10E7CD08
+ .long 0x118CCD08
+ .long 0x11ADCD08
+ .long 0x11CECD08
+ .long 0x11EFCD08
+ lvx 25,3,7
+ bdnz _aesp8_xts_enc5x
+
+ add 10,10,31
+ cmpwi 31,0
+ .long 0x10E7C508
+ .long 0x118CC508
+ .long 0x11ADC508
+ .long 0x11CEC508
+ .long 0x11EFC508
+
+ subi 10,10,16
+ .long 0x10E7CD08
+ .long 0x118CCD08
+ .long 0x11ADCD08
+ .long 0x11CECD08
+ .long 0x11EFCD08
+ vxor 17,17,31
+
+ .long 0x10E7D508
+ lvsr 5,0,31
+ .long 0x118CD508
+ .long 0x11ADD508
+ .long 0x11CED508
+ .long 0x11EFD508
+ vxor 1,18,31
+
+ .long 0x10E7DD08
+ .long 0x7C005699
+ .long 0x118CDD08
+ .long 0x11ADDD08
+ .long 0x11CEDD08
+ .long 0x11EFDD08
+ vxor 2,19,31
+
+ addi 7,1,64+15
+ .long 0x10E7E508
+ .long 0x118CE508
+ .long 0x11ADE508
+ .long 0x11CEE508
+ .long 0x11EFE508
+ lvx 24,0,7
+ vxor 3,20,31
+
+ .long 0x10E7ED08
+
+ .long 0x118CED08
+ .long 0x11ADED08
+ .long 0x11CEED08
+ .long 0x11EFED08
+ lvx 25,3,7
+ vxor 4,21,31
+
+ .long 0x10E7F508
+ vperm 0,0,0,5
+ .long 0x118CF508
+ .long 0x11ADF508
+ .long 0x11CEF508
+ .long 0x11EFF508
+
+ .long 0x10E78D09
+ .long 0x118C0D09
+ .long 0x11AD1509
+ .long 0x11CE1D09
+ .long 0x11EF2509
+ blr
+.long 0
+.byte 0,12,0x14,0,0,0,0,0
+
+.align 5
+_aesp8_xts_decrypt6x:
+ stdu 1,-448(1)
+ mflr 11
+ li 7,207
+ li 3,223
+ std 11,464(1)
+ stvx 20,7,1
+ addi 7,7,32
+ stvx 21,3,1
+ addi 3,3,32
+ stvx 22,7,1
+ addi 7,7,32
+ stvx 23,3,1
+ addi 3,3,32
+ stvx 24,7,1
+ addi 7,7,32
+ stvx 25,3,1
+ addi 3,3,32
+ stvx 26,7,1
+ addi 7,7,32
+ stvx 27,3,1
+ addi 3,3,32
+ stvx 28,7,1
+ addi 7,7,32
+ stvx 29,3,1
+ addi 3,3,32
+ stvx 30,7,1
+ stvx 31,3,1
+ li 0,-1
+ stw 12,396(1)
+ li 3,0x10
+ std 26,400(1)
+ li 26,0x20
+ std 27,408(1)
+ li 27,0x30
+ std 28,416(1)
+ li 28,0x40
+ std 29,424(1)
+ li 29,0x50
+ std 30,432(1)
+ li 30,0x60
+ std 31,440(1)
+ li 31,0x70
+ mtspr 256,0
+
+ subi 9,9,3
+
+ lvx 23,0,6
+ lvx 30,3,6
+ addi 6,6,0x20
+ lvx 31,0,6
+ vperm 23,23,30,7
+ addi 7,1,64+15
+ mtctr 9
+
+.Load_xts_dec_key:
+ vperm 24,30,31,7
+ lvx 30,3,6
+ addi 6,6,0x20
+ stvx 24,0,7
+ vperm 25,31,30,7
+ lvx 31,0,6
+ stvx 25,3,7
+ addi 7,7,0x20
+ bdnz .Load_xts_dec_key
+
+ lvx 26,3,6
+ vperm 24,30,31,7
+ lvx 27,26,6
+ stvx 24,0,7
+ vperm 25,31,26,7
+ lvx 28,27,6
+ stvx 25,3,7
+ addi 7,1,64+15
+ vperm 26,26,27,7
+ lvx 29,28,6
+ vperm 27,27,28,7
+ lvx 30,29,6
+ vperm 28,28,29,7
+ lvx 31,30,6
+ vperm 29,29,30,7
+ lvx 22,31,6
+ vperm 30,30,31,7
+ lvx 24,0,7
+ vperm 31,31,22,7
+ lvx 25,3,7
+
+ vperm 0,2,4,5
+ subi 10,10,31
+ vxor 17,8,23
+ vsrab 11,8,9
+ vaddubm 8,8,8
+ vsldoi 11,11,11,15
+ vand 11,11,10
+ vxor 7,0,17
+ vxor 8,8,11
+
+ .long 0x7C235699
+ vxor 18,8,23
+ vsrab 11,8,9
+ vaddubm 8,8,8
+ vsldoi 11,11,11,15
+
+ vand 11,11,10
+ vxor 12,1,18
+ vxor 8,8,11
+
+ .long 0x7C5A5699
+ andi. 31,5,15
+ vxor 19,8,23
+ vsrab 11,8,9
+ vaddubm 8,8,8
+ vsldoi 11,11,11,15
+
+ vand 11,11,10
+ vxor 13,2,19
+ vxor 8,8,11
+
+ .long 0x7C7B5699
+ sub 5,5,31
+ vxor 20,8,23
+ vsrab 11,8,9
+ vaddubm 8,8,8
+ vsldoi 11,11,11,15
+
+ vand 11,11,10
+ vxor 14,3,20
+ vxor 8,8,11
+
+ .long 0x7C9C5699
+ subi 5,5,0x60
+ vxor 21,8,23
+ vsrab 11,8,9
+ vaddubm 8,8,8
+ vsldoi 11,11,11,15
+
+ vand 11,11,10
+ vxor 15,4,21
+ vxor 8,8,11
+
+ .long 0x7CBD5699
+ addi 10,10,0x60
+ vxor 22,8,23
+ vsrab 11,8,9
+ vaddubm 8,8,8
+ vsldoi 11,11,11,15
+
+ vand 11,11,10
+ vxor 16,5,22
+ vxor 8,8,11
+
+ vxor 31,31,23
+ mtctr 9
+ b .Loop_xts_dec6x
+
+.align 5
+.Loop_xts_dec6x:
+ .long 0x10E7C548
+ .long 0x118CC548
+ .long 0x11ADC548
+ .long 0x11CEC548
+ .long 0x11EFC548
+ .long 0x1210C548
+ lvx 24,26,7
+ addi 7,7,0x20
+
+ .long 0x10E7CD48
+ .long 0x118CCD48
+ .long 0x11ADCD48
+ .long 0x11CECD48
+ .long 0x11EFCD48
+ .long 0x1210CD48
+ lvx 25,3,7
+ bdnz .Loop_xts_dec6x
+
+ subic 5,5,96
+ vxor 0,17,31
+ .long 0x10E7C548
+ .long 0x118CC548
+ vsrab 11,8,9
+ vxor 17,8,23
+ vaddubm 8,8,8
+ .long 0x11ADC548
+ .long 0x11CEC548
+ vsldoi 11,11,11,15
+ .long 0x11EFC548
+ .long 0x1210C548
+
+ subfe. 0,0,0
+ vand 11,11,10
+ .long 0x10E7CD48
+ .long 0x118CCD48
+ vxor 8,8,11
+ .long 0x11ADCD48
+ .long 0x11CECD48
+ vxor 1,18,31
+ vsrab 11,8,9
+ vxor 18,8,23
+ .long 0x11EFCD48
+ .long 0x1210CD48
+
+ and 0,0,5
+ vaddubm 8,8,8
+ vsldoi 11,11,11,15
+ .long 0x10E7D548
+ .long 0x118CD548
+ vand 11,11,10
+ .long 0x11ADD548
+ .long 0x11CED548
+ vxor 8,8,11
+ .long 0x11EFD548
+ .long 0x1210D548
+
+ add 10,10,0
+
+
+
+ vxor 2,19,31
+ vsrab 11,8,9
+ vxor 19,8,23
+ vaddubm 8,8,8
+ .long 0x10E7DD48
+ .long 0x118CDD48
+ vsldoi 11,11,11,15
+ .long 0x11ADDD48
+ .long 0x11CEDD48
+ vand 11,11,10
+ .long 0x11EFDD48
+ .long 0x1210DD48
+
+ addi 7,1,64+15
+ vxor 8,8,11
+ .long 0x10E7E548
+ .long 0x118CE548
+ vxor 3,20,31
+ vsrab 11,8,9
+ vxor 20,8,23
+ .long 0x11ADE548
+ .long 0x11CEE548
+ vaddubm 8,8,8
+ vsldoi 11,11,11,15
+ .long 0x11EFE548
+ .long 0x1210E548
+ lvx 24,0,7
+ vand 11,11,10
+
+ .long 0x10E7ED48
+ .long 0x118CED48
+ vxor 8,8,11
+ .long 0x11ADED48
+ .long 0x11CEED48
+ vxor 4,21,31
+ vsrab 11,8,9
+ vxor 21,8,23
+ .long 0x11EFED48
+ .long 0x1210ED48
+ lvx 25,3,7
+ vaddubm 8,8,8
+ vsldoi 11,11,11,15
+
+ .long 0x10E7F548
+ .long 0x118CF548
+ vand 11,11,10
+ .long 0x11ADF548
+ .long 0x11CEF548
+ vxor 8,8,11
+ .long 0x11EFF548
+ .long 0x1210F548
+ vxor 5,22,31
+ vsrab 11,8,9
+ vxor 22,8,23
+
+ .long 0x10E70549
+ .long 0x7C005699
+ vaddubm 8,8,8
+ vsldoi 11,11,11,15
+ .long 0x118C0D49
+ .long 0x7C235699
+ .long 0x11AD1549
+
+ .long 0x7C5A5699
+ vand 11,11,10
+ .long 0x11CE1D49
+
+ .long 0x7C7B5699
+ .long 0x11EF2549
+
+ .long 0x7C9C5699
+ vxor 8,8,11
+ .long 0x12102D49
+
+ .long 0x7CBD5699
+ addi 10,10,0x60
+
+
+
+
+
+ .long 0x7CE02799
+ vxor 7,0,17
+
+ .long 0x7D832799
+ vxor 12,1,18
+
+ .long 0x7DBA2799
+ vxor 13,2,19
+
+ .long 0x7DDB2799
+ vxor 14,3,20
+
+ .long 0x7DFC2799
+ vxor 15,4,21
+ .long 0x7E1D2799
+ vxor 16,5,22
+ addi 4,4,0x60
+
+ mtctr 9
+ beq .Loop_xts_dec6x
+
+ addic. 5,5,0x60
+ beq .Lxts_dec6x_zero
+ cmpwi 5,0x20
+ blt .Lxts_dec6x_one
+ nop
+ beq .Lxts_dec6x_two
+ cmpwi 5,0x40
+ blt .Lxts_dec6x_three
+ nop
+ beq .Lxts_dec6x_four
+
+.Lxts_dec6x_five:
+ vxor 7,1,17
+ vxor 12,2,18
+ vxor 13,3,19
+ vxor 14,4,20
+ vxor 15,5,21
+
+ bl _aesp8_xts_dec5x
+
+
+ vor 17,22,22
+ vxor 18,8,23
+
+ .long 0x7CE02799
+ vxor 7,0,18
+
+ .long 0x7D832799
+
+ .long 0x7DBA2799
+
+ .long 0x7DDB2799
+ .long 0x7DFC2799
+ addi 4,4,0x50
+ bne .Lxts_dec6x_steal
+ b .Lxts_dec6x_done
+
+.align 4
+.Lxts_dec6x_four:
+ vxor 7,2,17
+ vxor 12,3,18
+ vxor 13,4,19
+ vxor 14,5,20
+ vxor 15,15,15
+
+ bl _aesp8_xts_dec5x
+
+
+ vor 17,21,21
+ vor 18,22,22
+
+ .long 0x7CE02799
+ vxor 7,0,22
+
+ .long 0x7D832799
+
+ .long 0x7DBA2799
+ .long 0x7DDB2799
+ addi 4,4,0x40
+ bne .Lxts_dec6x_steal
+ b .Lxts_dec6x_done
+
+.align 4
+.Lxts_dec6x_three:
+ vxor 7,3,17
+ vxor 12,4,18
+ vxor 13,5,19
+ vxor 14,14,14
+ vxor 15,15,15
+
+ bl _aesp8_xts_dec5x
+
+
+ vor 17,20,20
+ vor 18,21,21
+
+ .long 0x7CE02799
+ vxor 7,0,21
+
+ .long 0x7D832799
+ .long 0x7DBA2799
+ addi 4,4,0x30
+ bne .Lxts_dec6x_steal
+ b .Lxts_dec6x_done
+
+.align 4
+.Lxts_dec6x_two:
+ vxor 7,4,17
+ vxor 12,5,18
+ vxor 13,13,13
+ vxor 14,14,14
+ vxor 15,15,15
+
+ bl _aesp8_xts_dec5x
+
+
+ vor 17,19,19
+ vor 18,20,20
+
+ .long 0x7CE02799
+ vxor 7,0,20
+ .long 0x7D832799
+ addi 4,4,0x20
+ bne .Lxts_dec6x_steal
+ b .Lxts_dec6x_done
+
+.align 4
+.Lxts_dec6x_one:
+ vxor 7,5,17
+ nop
+.Loop_xts_dec1x:
+ .long 0x10E7C548
+ lvx 24,26,7
+ addi 7,7,0x20
+
+ .long 0x10E7CD48
+ lvx 25,3,7
+ bdnz .Loop_xts_dec1x
+
+ subi 0,31,1
+ .long 0x10E7C548
+
+ andi. 0,0,16
+ cmpwi 31,0
+ .long 0x10E7CD48
+
+ sub 10,10,0
+ .long 0x10E7D548
+
+ .long 0x7C005699
+ .long 0x10E7DD48
+
+ addi 7,1,64+15
+ .long 0x10E7E548
+ lvx 24,0,7
+
+ .long 0x10E7ED48
+ lvx 25,3,7
+ vxor 17,17,31
+
+
+ .long 0x10E7F548
+
+ mtctr 9
+ .long 0x10E78D49
+
+ vor 17,18,18
+ vor 18,19,19
+
+ .long 0x7CE02799
+ addi 4,4,0x10
+ vxor 7,0,19
+ bne .Lxts_dec6x_steal
+ b .Lxts_dec6x_done
+
+.align 4
+.Lxts_dec6x_zero:
+ cmpwi 31,0
+ beq .Lxts_dec6x_done
+
+ .long 0x7C005699
+
+ vxor 7,0,18
+.Lxts_dec6x_steal:
+ .long 0x10E7C548
+ lvx 24,26,7
+ addi 7,7,0x20
+
+ .long 0x10E7CD48
+ lvx 25,3,7
+ bdnz .Lxts_dec6x_steal
+
+ add 10,10,31
+ .long 0x10E7C548
+
+ cmpwi 31,0
+ .long 0x10E7CD48
+
+ .long 0x7C005699
+ .long 0x10E7D548
+
+ lvsr 5,0,31
+ .long 0x10E7DD48
+
+ addi 7,1,64+15
+ .long 0x10E7E548
+ lvx 24,0,7
+
+ .long 0x10E7ED48
+ lvx 25,3,7
+ vxor 18,18,31
+
+
+ .long 0x10E7F548
+
+ vperm 0,0,0,5
+ .long 0x11679549
+
+
+
+ .long 0x7D602799
+
+ vxor 7,7,7
+ vspltisb 12,-1
+ vperm 7,7,12,5
+ vsel 7,0,11,7
+ vxor 7,7,17
+
+ subi 30,4,1
+ mtctr 31
+.Loop_xts_dec6x_steal:
+ lbzu 0,1(30)
+ stb 0,16(30)
+ bdnz .Loop_xts_dec6x_steal
+
+ li 31,0
+ mtctr 9
+ b .Loop_xts_dec1x
+
+.align 4
+.Lxts_dec6x_done:
+ cmpldi 8,0
+ beq .Lxts_dec6x_ret
+
+ vxor 8,17,23
+
+ .long 0x7D004799
+
+.Lxts_dec6x_ret:
+ mtlr 11
+ li 10,79
+ li 11,95
+ stvx 9,10,1
+ addi 10,10,32
+ stvx 9,11,1
+ addi 11,11,32
+ stvx 9,10,1
+ addi 10,10,32
+ stvx 9,11,1
+ addi 11,11,32
+ stvx 9,10,1
+ addi 10,10,32
+ stvx 9,11,1
+ addi 11,11,32
+ stvx 9,10,1
+ addi 10,10,32
+ stvx 9,11,1
+ addi 11,11,32
+
+ mtspr 256,12
+ lvx 20,10,1
+ addi 10,10,32
+ lvx 21,11,1
+ addi 11,11,32
+ lvx 22,10,1
+ addi 10,10,32
+ lvx 23,11,1
+ addi 11,11,32
+ lvx 24,10,1
+ addi 10,10,32
+ lvx 25,11,1
+ addi 11,11,32
+ lvx 26,10,1
+ addi 10,10,32
+ lvx 27,11,1
+ addi 11,11,32
+ lvx 28,10,1
+ addi 10,10,32
+ lvx 29,11,1
+ addi 11,11,32
+ lvx 30,10,1
+ lvx 31,11,1
+ ld 26,400(1)
+ ld 27,408(1)
+ ld 28,416(1)
+ ld 29,424(1)
+ ld 30,432(1)
+ ld 31,440(1)
+ addi 1,1,448
+ blr
+.long 0
+.byte 0,12,0x04,1,0x80,6,6,0
+.long 0
+
+.align 5
+_aesp8_xts_dec5x:
+ .long 0x10E7C548
+ .long 0x118CC548
+ .long 0x11ADC548
+ .long 0x11CEC548
+ .long 0x11EFC548
+ lvx 24,26,7
+ addi 7,7,0x20
+
+ .long 0x10E7CD48
+ .long 0x118CCD48
+ .long 0x11ADCD48
+ .long 0x11CECD48
+ .long 0x11EFCD48
+ lvx 25,3,7
+ bdnz _aesp8_xts_dec5x
+
+ subi 0,31,1
+ .long 0x10E7C548
+ .long 0x118CC548
+ .long 0x11ADC548
+ .long 0x11CEC548
+ .long 0x11EFC548
+
+ andi. 0,0,16
+ cmpwi 31,0
+ .long 0x10E7CD48
+ .long 0x118CCD48
+ .long 0x11ADCD48
+ .long 0x11CECD48
+ .long 0x11EFCD48
+ vxor 17,17,31
+
+ sub 10,10,0
+ .long 0x10E7D548
+ .long 0x118CD548
+ .long 0x11ADD548
+ .long 0x11CED548
+ .long 0x11EFD548
+ vxor 1,18,31
+
+ .long 0x10E7DD48
+ .long 0x7C005699
+ .long 0x118CDD48
+ .long 0x11ADDD48
+ .long 0x11CEDD48
+ .long 0x11EFDD48
+ vxor 2,19,31
+
+ addi 7,1,64+15
+ .long 0x10E7E548
+ .long 0x118CE548
+ .long 0x11ADE548
+ .long 0x11CEE548
+ .long 0x11EFE548
+ lvx 24,0,7
+ vxor 3,20,31
+
+ .long 0x10E7ED48
+
+ .long 0x118CED48
+ .long 0x11ADED48
+ .long 0x11CEED48
+ .long 0x11EFED48
+ lvx 25,3,7
+ vxor 4,21,31
+
+ .long 0x10E7F548
+ .long 0x118CF548
+ .long 0x11ADF548
+ .long 0x11CEF548
+ .long 0x11EFF548
+
+ .long 0x10E78D49
+ .long 0x118C0D49
+ .long 0x11AD1549
+ .long 0x11CE1D49
+ .long 0x11EF2549
+ mtctr 9
+ blr
+.long 0
+.byte 0,12,0x14,0,0,0,0,0
diff --git a/deps/openssl/config/archs/linux-ppc64/asm_avx2/crypto/aes/vpaes-ppc.s b/deps/openssl/config/archs/linux-ppc64/asm_avx2/crypto/aes/vpaes-ppc.s
new file mode 100644
index 0000000000..439da0fe81
--- /dev/null
+++ b/deps/openssl/config/archs/linux-ppc64/asm_avx2/crypto/aes/vpaes-ppc.s
@@ -0,0 +1,1497 @@
+.machine "any"
+
+.text
+
+.align 7
+_vpaes_consts:
+.Lk_mc_forward:
+.byte 0x01,0x02,0x03,0x00,0x05,0x06,0x07,0x04,0x09,0x0a,0x0b,0x08,0x0d,0x0e,0x0f,0x0c
+.byte 0x05,0x06,0x07,0x04,0x09,0x0a,0x0b,0x08,0x0d,0x0e,0x0f,0x0c,0x01,0x02,0x03,0x00
+.byte 0x09,0x0a,0x0b,0x08,0x0d,0x0e,0x0f,0x0c,0x01,0x02,0x03,0x00,0x05,0x06,0x07,0x04
+.byte 0x0d,0x0e,0x0f,0x0c,0x01,0x02,0x03,0x00,0x05,0x06,0x07,0x04,0x09,0x0a,0x0b,0x08
+.Lk_mc_backward:
+.byte 0x03,0x00,0x01,0x02,0x07,0x04,0x05,0x06,0x0b,0x08,0x09,0x0a,0x0f,0x0c,0x0d,0x0e
+.byte 0x0f,0x0c,0x0d,0x0e,0x03,0x00,0x01,0x02,0x07,0x04,0x05,0x06,0x0b,0x08,0x09,0x0a
+.byte 0x0b,0x08,0x09,0x0a,0x0f,0x0c,0x0d,0x0e,0x03,0x00,0x01,0x02,0x07,0x04,0x05,0x06
+.byte 0x07,0x04,0x05,0x06,0x0b,0x08,0x09,0x0a,0x0f,0x0c,0x0d,0x0e,0x03,0x00,0x01,0x02
+.Lk_sr:
+.byte 0x00,0x01,0x02,0x03,0x04,0x05,0x06,0x07,0x08,0x09,0x0a,0x0b,0x0c,0x0d,0x0e,0x0f
+.byte 0x00,0x05,0x0a,0x0f,0x04,0x09,0x0e,0x03,0x08,0x0d,0x02,0x07,0x0c,0x01,0x06,0x0b
+.byte 0x00,0x09,0x02,0x0b,0x04,0x0d,0x06,0x0f,0x08,0x01,0x0a,0x03,0x0c,0x05,0x0e,0x07
+.byte 0x00,0x0d,0x0a,0x07,0x04,0x01,0x0e,0x0b,0x08,0x05,0x02,0x0f,0x0c,0x09,0x06,0x03
+
+
+
+
+.Lk_inv:
+.byte 0xf0,0x01,0x08,0x0d,0x0f,0x06,0x05,0x0e,0x02,0x0c,0x0b,0x0a,0x09,0x03,0x07,0x04
+.byte 0xf0,0x07,0x0b,0x0f,0x06,0x0a,0x04,0x01,0x09,0x08,0x05,0x02,0x0c,0x0e,0x0d,0x03
+.Lk_ipt:
+.byte 0x00,0x70,0x2a,0x5a,0x98,0xe8,0xb2,0xc2,0x08,0x78,0x22,0x52,0x90,0xe0,0xba,0xca
+.byte 0x00,0x4d,0x7c,0x31,0x7d,0x30,0x01,0x4c,0x81,0xcc,0xfd,0xb0,0xfc,0xb1,0x80,0xcd
+.Lk_sbo:
+.byte 0x00,0xc7,0xbd,0x6f,0x17,0x6d,0xd2,0xd0,0x78,0xa8,0x02,0xc5,0x7a,0xbf,0xaa,0x15
+.byte 0x00,0x6a,0xbb,0x5f,0xa5,0x74,0xe4,0xcf,0xfa,0x35,0x2b,0x41,0xd1,0x90,0x1e,0x8e
+.Lk_sb1:
+.byte 0x00,0x23,0xe2,0xfa,0x15,0xd4,0x18,0x36,0xef,0xd9,0x2e,0x0d,0xc1,0xcc,0xf7,0x3b
+.byte 0x00,0x3e,0x50,0xcb,0x8f,0xe1,0x9b,0xb1,0x44,0xf5,0x2a,0x14,0x6e,0x7a,0xdf,0xa5
+.Lk_sb2:
+.byte 0x00,0x29,0xe1,0x0a,0x40,0x88,0xeb,0x69,0x4a,0x23,0x82,0xab,0xc8,0x63,0xa1,0xc2
+.byte 0x00,0x24,0x71,0x0b,0xc6,0x93,0x7a,0xe2,0xcd,0x2f,0x98,0xbc,0x55,0xe9,0xb7,0x5e
+
+
+
+
+.Lk_dipt:
+.byte 0x00,0x5f,0x54,0x0b,0x04,0x5b,0x50,0x0f,0x1a,0x45,0x4e,0x11,0x1e,0x41,0x4a,0x15
+.byte 0x00,0x65,0x05,0x60,0xe6,0x83,0xe3,0x86,0x94,0xf1,0x91,0xf4,0x72,0x17,0x77,0x12
+.Lk_dsbo:
+.byte 0x00,0x40,0xf9,0x7e,0x53,0xea,0x87,0x13,0x2d,0x3e,0x94,0xd4,0xb9,0x6d,0xaa,0xc7
+.byte 0x00,0x1d,0x44,0x93,0x0f,0x56,0xd7,0x12,0x9c,0x8e,0xc5,0xd8,0x59,0x81,0x4b,0xca
+.Lk_dsb9:
+.byte 0x00,0xd6,0x86,0x9a,0x53,0x03,0x1c,0x85,0xc9,0x4c,0x99,0x4f,0x50,0x1f,0xd5,0xca
+.byte 0x00,0x49,0xd7,0xec,0x89,0x17,0x3b,0xc0,0x65,0xa5,0xfb,0xb2,0x9e,0x2c,0x5e,0x72
+.Lk_dsbd:
+.byte 0x00,0xa2,0xb1,0xe6,0xdf,0xcc,0x57,0x7d,0x39,0x44,0x2a,0x88,0x13,0x9b,0x6e,0xf5
+.byte 0x00,0xcb,0xc6,0x24,0xf7,0xfa,0xe2,0x3c,0xd3,0xef,0xde,0x15,0x0d,0x18,0x31,0x29
+.Lk_dsbb:
+.byte 0x00,0x42,0xb4,0x96,0x92,0x64,0x22,0xd0,0x04,0xd4,0xf2,0xb0,0xf6,0x46,0x26,0x60
+.byte 0x00,0x67,0x59,0xcd,0xa6,0x98,0x94,0xc1,0x6b,0xaa,0x55,0x32,0x3e,0x0c,0xff,0xf3
+.Lk_dsbe:
+.byte 0x00,0xd0,0xd4,0x26,0x96,0x92,0xf2,0x46,0xb0,0xf6,0xb4,0x64,0x04,0x60,0x42,0x22
+.byte 0x00,0xc1,0xaa,0xff,0xcd,0xa6,0x55,0x0c,0x32,0x3e,0x59,0x98,0x6b,0xf3,0x67,0x94
+
+
+
+
+.Lk_dksd:
+.byte 0x00,0x47,0xe4,0xa3,0x5d,0x1a,0xb9,0xfe,0xf9,0xbe,0x1d,0x5a,0xa4,0xe3,0x40,0x07
+.byte 0x00,0x83,0x36,0xb5,0xf4,0x77,0xc2,0x41,0x1e,0x9d,0x28,0xab,0xea,0x69,0xdc,0x5f
+.Lk_dksb:
+.byte 0x00,0xd5,0x50,0x85,0x1f,0xca,0x4f,0x9a,0x99,0x4c,0xc9,0x1c,0x86,0x53,0xd6,0x03
+.byte 0x00,0x4a,0xfc,0xb6,0xa7,0xed,0x5b,0x11,0xc8,0x82,0x34,0x7e,0x6f,0x25,0x93,0xd9
+.Lk_dkse:
+.byte 0x00,0xd6,0xc9,0x1f,0xca,0x1c,0x03,0xd5,0x86,0x50,0x4f,0x99,0x4c,0x9a,0x85,0x53
+.byte 0xe8,0x7b,0xdc,0x4f,0x05,0x96,0x31,0xa2,0x87,0x14,0xb3,0x20,0x6a,0xf9,0x5e,0xcd
+.Lk_dks9:
+.byte 0x00,0xa7,0xd9,0x7e,0xc8,0x6f,0x11,0xb6,0xfc,0x5b,0x25,0x82,0x34,0x93,0xed,0x4a
+.byte 0x00,0x33,0x14,0x27,0x62,0x51,0x76,0x45,0xce,0xfd,0xda,0xe9,0xac,0x9f,0xb8,0x8b
+
+.Lk_rcon:
+.byte 0xb6,0xee,0x9d,0xaf,0xb9,0x91,0x83,0x1f,0x81,0x7d,0x7c,0x4d,0x08,0x98,0x2a,0x70
+.Lk_s63:
+.byte 0x5b,0x5b,0x5b,0x5b,0x5b,0x5b,0x5b,0x5b,0x5b,0x5b,0x5b,0x5b,0x5b,0x5b,0x5b,0x5b
+
+.Lk_opt:
+.byte 0x00,0x60,0xb6,0xd6,0x29,0x49,0x9f,0xff,0x08,0x68,0xbe,0xde,0x21,0x41,0x97,0xf7
+.byte 0x00,0xec,0xbc,0x50,0x51,0xbd,0xed,0x01,0xe0,0x0c,0x5c,0xb0,0xb1,0x5d,0x0d,0xe1
+.Lk_deskew:
+.byte 0x00,0xe3,0xa4,0x47,0x40,0xa3,0xe4,0x07,0x1a,0xf9,0xbe,0x5d,0x5a,0xb9,0xfe,0x1d
+.byte 0x00,0x69,0xea,0x83,0xdc,0xb5,0x36,0x5f,0x77,0x1e,0x9d,0xf4,0xab,0xc2,0x41,0x28
+.align 5
+.Lconsts:
+ mflr 0
+ bcl 20,31,$+4
+ mflr 12
+ addi 12,12,-0x308
+ mtlr 0
+ blr
+.long 0
+.byte 0,12,0x14,0,0,0,0,0
+.byte 86,101,99,116,111,114,32,80,101,114,109,117,116,97,116,105,111,110,32,65,69,83,32,102,111,114,32,65,108,116,105,86,101,99,44,77,105,107,101,32,72,97,109,98,117,114,103,32,40,83,116,97,110,102,111,114,100,32,85,110,105,118,101,114,115,105,116,121,41,0
+.align 2
+.align 6
+
+
+
+
+
+
+.align 4
+_vpaes_encrypt_preheat:
+ mflr 8
+ bl .Lconsts
+ mtlr 8
+ li 11, 0xc0
+ li 10, 0xd0
+ li 9, 0xe0
+ li 8, 0xf0
+ vxor 7, 7, 7
+ vspltisb 8,4
+ vspltisb 9,0x0f
+ lvx 10, 12, 11
+ li 11, 0x100
+ lvx 11, 12, 10
+ li 10, 0x110
+ lvx 12, 12, 9
+ li 9, 0x120
+ lvx 13, 12, 8
+ li 8, 0x130
+ lvx 14, 12, 11
+ li 11, 0x140
+ lvx 15, 12, 10
+ li 10, 0x150
+ lvx 16, 12, 9
+ lvx 17, 12, 8
+ lvx 18, 12, 11
+ lvx 19, 12, 10
+ blr
+.long 0
+.byte 0,12,0x14,0,0,0,0,0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.align 5
+_vpaes_encrypt_core:
+ lwz 8, 240(5)
+ li 9, 16
+ lvx 5, 0, 5
+ li 11, 0x10
+ lvx 6, 9, 5
+ addi 9, 9, 16
+ vperm 5, 5, 6, 31
+ addi 10, 11, 0x40
+ vsrb 1, 0, 8
+ vperm 0, 12, 12, 0
+ vperm 1, 13, 13, 1
+ vxor 0, 0, 5
+ vxor 0, 0, 1
+ mtctr 8
+ b .Lenc_entry
+
+.align 4
+.Lenc_loop:
+
+ vperm 4, 17, 7, 2
+ lvx 1, 12, 11
+ addi 11, 11, 16
+ vperm 0, 16, 7, 3
+ vxor 4, 4, 5
+ andi. 11, 11, 0x30
+ vperm 5, 19, 7, 2
+ vxor 0, 0, 4
+ vperm 2, 18, 7, 3
+ lvx 4, 12, 10
+ addi 10, 11, 0x40
+ vperm 3, 0, 7, 1
+ vxor 2, 2, 5
+ vperm 0, 0, 7, 4
+ vxor 3, 3, 2
+ vperm 4, 3, 7, 1
+ vxor 0, 0, 3
+ vxor 0, 0, 4
+
+.Lenc_entry:
+
+ vsrb 1, 0, 8
+ vperm 5, 11, 11, 0
+ vxor 0, 0, 1
+ vperm 3, 10, 10, 1
+ vperm 4, 10, 10, 0
+ vand 0, 0, 9
+ vxor 3, 3, 5
+ vxor 4, 4, 5
+ vperm 2, 10, 7, 3
+ vor 5,6,6
+ lvx 6, 9, 5
+ vperm 3, 10, 7, 4
+ addi 9, 9, 16
+ vxor 2, 2, 0
+ vperm 5, 5, 6, 31
+ vxor 3, 3, 1
+ bdnz .Lenc_loop
+
+
+ addi 10, 11, 0x80
+
+
+ vperm 4, 14, 7, 2
+ lvx 1, 12, 10
+ vperm 0, 15, 7, 3
+ vxor 4, 4, 5
+ vxor 0, 0, 4
+ vperm 0, 0, 7, 1
+ blr
+.long 0
+.byte 0,12,0x14,0,0,0,0,0
+
+.globl vpaes_encrypt
+.type vpaes_encrypt,@function
+.section ".opd","aw"
+.align 3
+vpaes_encrypt:
+.quad .vpaes_encrypt,.TOC.@tocbase,0
+.previous
+.align 5
+.vpaes_encrypt:
+ stdu 1,-256(1)
+ li 10,63
+ li 11,79
+ mflr 6
+ mfspr 7,256
+ stvx 20,10,1
+ addi 10,10,32
+ stvx 21,11,1
+ addi 11,11,32
+ stvx 22,10,1
+ addi 10,10,32
+ stvx 23,11,1
+ addi 11,11,32
+ stvx 24,10,1
+ addi 10,10,32
+ stvx 25,11,1
+ addi 11,11,32
+ stvx 26,10,1
+ addi 10,10,32
+ stvx 27,11,1
+ addi 11,11,32
+ stvx 28,10,1
+ addi 10,10,32
+ stvx 29,11,1
+ addi 11,11,32
+ stvx 30,10,1
+ stvx 31,11,1
+ stw 7,252(1)
+ li 0, -1
+ std 6,272(1)
+ mtspr 256,0
+
+ bl _vpaes_encrypt_preheat
+
+ lvsl 27, 0, 3
+ lvx 0, 0, 3
+ addi 3, 3, 15
+ lvsr 29, 0, 4
+ lvsl 31, 0, 5
+ lvx 26, 0, 3
+ vperm 0, 0, 26, 27
+
+ bl _vpaes_encrypt_core
+
+ andi. 8, 4, 15
+ li 9, 16
+ beq .Lenc_out_aligned
+
+ vperm 0, 0, 0, 29
+ mtctr 9
+.Lenc_out_unaligned:
+ stvebx 0, 0, 4
+ addi 4, 4, 1
+ bdnz .Lenc_out_unaligned
+ b .Lenc_done
+
+.align 4
+.Lenc_out_aligned:
+ stvx 0, 0, 4
+.Lenc_done:
+
+ li 10,63
+ li 11,79
+ mtlr 6
+ mtspr 256,7
+ lvx 20,10,1
+ addi 10,10,32
+ lvx 21,11,1
+ addi 11,11,32
+ lvx 22,10,1
+ addi 10,10,32
+ lvx 23,11,1
+ addi 11,11,32
+ lvx 24,10,1
+ addi 10,10,32
+ lvx 25,11,1
+ addi 11,11,32
+ lvx 26,10,1
+ addi 10,10,32
+ lvx 27,11,1
+ addi 11,11,32
+ lvx 28,10,1
+ addi 10,10,32
+ lvx 29,11,1
+ addi 11,11,32
+ lvx 30,10,1
+ lvx 31,11,1
+ addi 1,1,256
+ blr
+.long 0
+.byte 0,12,0x04,1,0x80,0,3,0
+.long 0
+.size .vpaes_encrypt,.-.vpaes_encrypt
+.size vpaes_encrypt,.-.vpaes_encrypt
+
+.align 4
+_vpaes_decrypt_preheat:
+ mflr 8
+ bl .Lconsts
+ mtlr 8
+ li 11, 0xc0
+ li 10, 0xd0
+ li 9, 0x160
+ li 8, 0x170
+ vxor 7, 7, 7
+ vspltisb 8,4
+ vspltisb 9,0x0f
+ lvx 10, 12, 11
+ li 11, 0x180
+ lvx 11, 12, 10
+ li 10, 0x190
+ lvx 12, 12, 9
+ li 9, 0x1a0
+ lvx 13, 12, 8
+ li 8, 0x1b0
+ lvx 14, 12, 11
+ li 11, 0x1c0
+ lvx 15, 12, 10
+ li 10, 0x1d0
+ lvx 16, 12, 9
+ li 9, 0x1e0
+ lvx 17, 12, 8
+ li 8, 0x1f0
+ lvx 18, 12, 11
+ li 11, 0x200
+ lvx 19, 12, 10
+ li 10, 0x210
+ lvx 20, 12, 9
+ lvx 21, 12, 8
+ lvx 22, 12, 11
+ lvx 23, 12, 10
+ blr
+.long 0
+.byte 0,12,0x14,0,0,0,0,0
+
+
+
+
+
+
+.align 4
+_vpaes_decrypt_core:
+ lwz 8, 240(5)
+ li 9, 16
+ lvx 5, 0, 5
+ li 11, 0x30
+ lvx 6, 9, 5
+ addi 9, 9, 16
+ vperm 5, 5, 6, 31
+ vsrb 1, 0, 8
+ vperm 0, 12, 12, 0
+ vperm 1, 13, 13, 1
+ vxor 0, 0, 5
+ vxor 0, 0, 1
+ mtctr 8
+ b .Ldec_entry
+
+.align 4
+.Ldec_loop:
+
+
+
+ lvx 0, 12, 11
+
+
+ vperm 4, 16, 7, 2
+ subi 11, 11, 16
+ vperm 1, 17, 7, 3
+ andi. 11, 11, 0x30
+ vxor 5, 5, 4
+
+ vxor 5, 5, 1
+
+
+ vperm 4, 18, 7, 2
+ vperm 5, 5, 7, 0
+ vperm 1, 19, 7, 3
+ vxor 5, 5, 4
+
+ vxor 5, 5, 1
+
+
+ vperm 4, 20, 7, 2
+ vperm 5, 5, 7, 0
+ vperm 1, 21, 7, 3
+ vxor 5, 5, 4
+
+ vxor 5, 5, 1
+
+
+ vperm 4, 22, 7, 2
+ vperm 5, 5, 7, 0
+ vperm 1, 23, 7, 3
+ vxor 0, 5, 4
+ vxor 0, 0, 1
+
+.Ldec_entry:
+
+ vsrb 1, 0, 8
+ vperm 2, 11, 11, 0
+ vxor 0, 0, 1
+ vperm 3, 10, 10, 1
+ vperm 4, 10, 10, 0
+ vand 0, 0, 9
+ vxor 3, 3, 2
+ vxor 4, 4, 2
+ vperm 2, 10, 7, 3
+ vor 5,6,6
+ lvx 6, 9, 5
+ vperm 3, 10, 7, 4
+ addi 9, 9, 16
+ vxor 2, 2, 0
+ vperm 5, 5, 6, 31
+ vxor 3, 3, 1
+ bdnz .Ldec_loop
+
+
+ addi 10, 11, 0x80
+
+ vperm 4, 14, 7, 2
+
+ lvx 2, 12, 10
+ vperm 1, 15, 7, 3
+ vxor 4, 4, 5
+ vxor 0, 1, 4
+ vperm 0, 0, 7, 2
+ blr
+.long 0
+.byte 0,12,0x14,0,0,0,0,0
+
+.globl vpaes_decrypt
+.type vpaes_decrypt,@function
+.section ".opd","aw"
+.align 3
+vpaes_decrypt:
+.quad .vpaes_decrypt,.TOC.@tocbase,0
+.previous
+.align 5
+.vpaes_decrypt:
+ stdu 1,-256(1)
+ li 10,63
+ li 11,79
+ mflr 6
+ mfspr 7,256
+ stvx 20,10,1
+ addi 10,10,32
+ stvx 21,11,1
+ addi 11,11,32
+ stvx 22,10,1
+ addi 10,10,32
+ stvx 23,11,1
+ addi 11,11,32
+ stvx 24,10,1
+ addi 10,10,32
+ stvx 25,11,1
+ addi 11,11,32
+ stvx 26,10,1
+ addi 10,10,32
+ stvx 27,11,1
+ addi 11,11,32
+ stvx 28,10,1
+ addi 10,10,32
+ stvx 29,11,1
+ addi 11,11,32
+ stvx 30,10,1
+ stvx 31,11,1
+ stw 7,252(1)
+ li 0, -1
+ std 6,272(1)
+ mtspr 256,0
+
+ bl _vpaes_decrypt_preheat
+
+ lvsl 27, 0, 3
+ lvx 0, 0, 3
+ addi 3, 3, 15
+ lvsr 29, 0, 4
+ lvsl 31, 0, 5
+ lvx 26, 0, 3
+ vperm 0, 0, 26, 27
+
+ bl _vpaes_decrypt_core
+
+ andi. 8, 4, 15
+ li 9, 16
+ beq .Ldec_out_aligned
+
+ vperm 0, 0, 0, 29
+ mtctr 9
+.Ldec_out_unaligned:
+ stvebx 0, 0, 4
+ addi 4, 4, 1
+ bdnz .Ldec_out_unaligned
+ b .Ldec_done
+
+.align 4
+.Ldec_out_aligned:
+ stvx 0, 0, 4
+.Ldec_done:
+
+ li 10,63
+ li 11,79
+ mtlr 6
+ mtspr 256,7
+ lvx 20,10,1
+ addi 10,10,32
+ lvx 21,11,1
+ addi 11,11,32
+ lvx 22,10,1
+ addi 10,10,32
+ lvx 23,11,1
+ addi 11,11,32
+ lvx 24,10,1
+ addi 10,10,32
+ lvx 25,11,1
+ addi 11,11,32
+ lvx 26,10,1
+ addi 10,10,32
+ lvx 27,11,1
+ addi 11,11,32
+ lvx 28,10,1
+ addi 10,10,32
+ lvx 29,11,1
+ addi 11,11,32
+ lvx 30,10,1
+ lvx 31,11,1
+ addi 1,1,256
+ blr
+.long 0
+.byte 0,12,0x04,1,0x80,0,3,0
+.long 0
+.size .vpaes_decrypt,.-.vpaes_decrypt
+.size vpaes_decrypt,.-.vpaes_decrypt
+
+.globl vpaes_cbc_encrypt
+.type vpaes_cbc_encrypt,@function
+.section ".opd","aw"
+.align 3
+vpaes_cbc_encrypt:
+.quad .vpaes_cbc_encrypt,.TOC.@tocbase,0
+.previous
+.align 5
+.vpaes_cbc_encrypt:
+ cmpldi 5,16
+ .long 0x4dc00020
+
+ stdu 1,-272(1)
+ mflr 0
+ li 10,63
+ li 11,79
+ mfspr 12,256
+ stvx 20,10,1
+ addi 10,10,32
+ stvx 21,11,1
+ addi 11,11,32
+ stvx 22,10,1
+ addi 10,10,32
+ stvx 23,11,1
+ addi 11,11,32
+ stvx 24,10,1
+ addi 10,10,32
+ stvx 25,11,1
+ addi 11,11,32
+ stvx 26,10,1
+ addi 10,10,32
+ stvx 27,11,1
+ addi 11,11,32
+ stvx 28,10,1
+ addi 10,10,32
+ stvx 29,11,1
+ addi 11,11,32
+ stvx 30,10,1
+ stvx 31,11,1
+ stw 12,252(1)
+ std 30,256(1)
+ std 31,264(1)
+ li 9, -16
+ std 0, 288(1)
+
+ and 30, 5, 9
+ andi. 9, 4, 15
+ mr 5, 6
+ mr 31, 7
+ li 6, -1
+ mcrf 1, 0
+ mr 7, 12
+ mtspr 256,6
+
+ lvx 24, 0, 31
+ li 9, 15
+ lvsl 27, 0, 31
+ lvx 25, 9, 31
+ vperm 24, 24, 25, 27
+
+ cmpwi 8, 0
+ neg 8, 3
+ vxor 7, 7, 7
+ lvsl 31, 0, 5
+ lvsr 29, 0, 4
+ lvsr 27, 0, 8
+ vnor 30, 7, 7
+ lvx 26, 0, 3
+ vperm 30, 7, 30, 29
+ addi 3, 3, 15
+
+ beq .Lcbc_decrypt
+
+ bl _vpaes_encrypt_preheat
+ li 0, 16
+
+ beq 1, .Lcbc_enc_loop
+
+ vor 0,26,26
+ lvx 26, 0, 3
+ addi 3, 3, 16
+ vperm 0, 0, 26, 27
+ vxor 0, 0, 24
+
+ bl _vpaes_encrypt_core
+
+ andi. 8, 4, 15
+ vor 24,0,0
+ sub 9, 4, 8
+ vperm 28, 0, 0, 29
+
+.Lcbc_enc_head:
+ stvebx 28, 8, 9
+ cmpwi 8, 15
+ addi 8, 8, 1
+ bne .Lcbc_enc_head
+
+ sub. 30, 30, 0
+ addi 4, 4, 16
+ beq .Lcbc_unaligned_done
+
+.Lcbc_enc_loop:
+ vor 0,26,26
+ lvx 26, 0, 3
+ addi 3, 3, 16
+ vperm 0, 0, 26, 27
+ vxor 0, 0, 24
+
+ bl _vpaes_encrypt_core
+
+ vor 24,0,0
+ sub. 30, 30, 0
+ vperm 0, 0, 0, 29
+ vsel 1, 28, 0, 30
+ vor 28,0,0
+ stvx 1, 0, 4
+ addi 4, 4, 16
+ bne .Lcbc_enc_loop
+
+ b .Lcbc_done
+
+.align 5
+.Lcbc_decrypt:
+ bl _vpaes_decrypt_preheat
+ li 0, 16
+
+ beq 1, .Lcbc_dec_loop
+
+ vor 0,26,26
+ lvx 26, 0, 3
+ addi 3, 3, 16
+ vperm 0, 0, 26, 27
+ vor 25,0,0
+
+ bl _vpaes_decrypt_core
+
+ andi. 8, 4, 15
+ vxor 0, 0, 24
+ vor 24,25,25
+ sub 9, 4, 8
+ vperm 28, 0, 0, 29
+
+.Lcbc_dec_head:
+ stvebx 28, 8, 9
+ cmpwi 8, 15
+ addi 8, 8, 1
+ bne .Lcbc_dec_head
+
+ sub. 30, 30, 0
+ addi 4, 4, 16
+ beq .Lcbc_unaligned_done
+
+.Lcbc_dec_loop:
+ vor 0,26,26
+ lvx 26, 0, 3
+ addi 3, 3, 16
+ vperm 0, 0, 26, 27
+ vor 25,0,0
+
+ bl _vpaes_decrypt_core
+
+ vxor 0, 0, 24
+ vor 24,25,25
+ sub. 30, 30, 0
+ vperm 0, 0, 0, 29
+ vsel 1, 28, 0, 30
+ vor 28,0,0
+ stvx 1, 0, 4
+ addi 4, 4, 16
+ bne .Lcbc_dec_loop
+
+.Lcbc_done:
+ beq 1, .Lcbc_write_iv
+
+.Lcbc_unaligned_done:
+ andi. 8, 4, 15
+ sub 4, 4, 8
+ li 9, 0
+.Lcbc_tail:
+ stvebx 28, 9, 4
+ addi 9, 9, 1
+ cmpw 9, 8
+ bne .Lcbc_tail
+
+.Lcbc_write_iv:
+ neg 8, 31
+ li 10, 4
+ lvsl 29, 0, 8
+ li 11, 8
+ li 12, 12
+ vperm 24, 24, 24, 29
+ stvewx 24, 0, 31
+ stvewx 24, 10, 31
+ stvewx 24, 11, 31
+ stvewx 24, 12, 31
+
+ mtspr 256,7
+ li 10,63
+ li 11,79
+ lvx 20,10,1
+ addi 10,10,32
+ lvx 21,11,1
+ addi 11,11,32
+ lvx 22,10,1
+ addi 10,10,32
+ lvx 23,11,1
+ addi 11,11,32
+ lvx 24,10,1
+ addi 10,10,32
+ lvx 25,11,1
+ addi 11,11,32
+ lvx 26,10,1
+ addi 10,10,32
+ lvx 27,11,1
+ addi 11,11,32
+ lvx 28,10,1
+ addi 10,10,32
+ lvx 29,11,1
+ addi 11,11,32
+ lvx 30,10,1
+ lvx 31,11,1
+.Lcbc_abort:
+ ld 0, 288(1)
+ ld 30,256(1)
+ ld 31,264(1)
+ mtlr 0
+ addi 1,1,272
+ blr
+.long 0
+.byte 0,12,0x04,1,0x80,2,6,0
+.long 0
+.size .vpaes_cbc_encrypt,.-.vpaes_cbc_encrypt
+.size vpaes_cbc_encrypt,.-.vpaes_cbc_encrypt
+
+
+
+
+
+.align 4
+_vpaes_key_preheat:
+ mflr 8
+ bl .Lconsts
+ mtlr 8
+ li 11, 0xc0
+ li 10, 0xd0
+ li 9, 0xe0
+ li 8, 0xf0
+
+ vspltisb 8,4
+ vxor 9,9,9
+ lvx 10, 12, 11
+ li 11, 0x120
+ lvx 11, 12, 10
+ li 10, 0x130
+ lvx 12, 12, 9
+ li 9, 0x220
+ lvx 13, 12, 8
+ li 8, 0x230
+
+ lvx 14, 12, 11
+ li 11, 0x240
+ lvx 15, 12, 10
+ li 10, 0x250
+
+ lvx 16, 12, 9
+ li 9, 0x260
+ lvx 17, 12, 8
+ li 8, 0x270
+ lvx 18, 12, 11
+ li 11, 0x280
+ lvx 19, 12, 10
+ li 10, 0x290
+ lvx 20, 12, 9
+ li 9, 0x2a0
+ lvx 21, 12, 8
+ li 8, 0x2b0
+ lvx 22, 12, 11
+ lvx 23, 12, 10
+
+ lvx 24, 12, 9
+ lvx 25, 0, 12
+ lvx 26, 12, 8
+ blr
+.long 0
+.byte 0,12,0x14,0,0,0,0,0
+
+.align 4
+_vpaes_schedule_core:
+ mflr 7
+
+ bl _vpaes_key_preheat
+
+
+ neg 8, 3
+ lvx 0, 0, 3
+ addi 3, 3, 15
+ lvsr 27, 0, 8
+ lvx 6, 0, 3
+ addi 3, 3, 8
+ vperm 0, 0, 6, 27
+
+
+ vor 3,0,0
+ bl _vpaes_schedule_transform
+ vor 7,0,0
+
+ bne 1, .Lschedule_am_decrypting
+
+
+ li 8, 0x30
+ li 9, 4
+ li 10, 8
+ li 11, 12
+
+ lvsr 29, 0, 5
+ vnor 30, 9, 9
+ vperm 30, 9, 30, 29
+
+
+ vperm 28, 0, 0, 29
+ stvewx 28, 0, 5
+ stvewx 28, 9, 5
+ stvewx 28, 10, 5
+ addi 10, 12, 0x80
+ stvewx 28, 11, 5
+ b .Lschedule_go
+
+.Lschedule_am_decrypting:
+ srwi 8, 4, 1
+ andi. 8, 8, 32
+ xori 8, 8, 32
+ addi 10, 12, 0x80
+
+ lvx 1, 8, 10
+ li 9, 4
+ li 10, 8
+ li 11, 12
+ vperm 4, 3, 3, 1
+
+ neg 0, 5
+ lvsl 29, 0, 0
+ vnor 30, 9, 9
+ vperm 30, 30, 9, 29
+
+
+ vperm 28, 4, 4, 29
+ stvewx 28, 0, 5
+ stvewx 28, 9, 5
+ stvewx 28, 10, 5
+ addi 10, 12, 0x80
+ stvewx 28, 11, 5
+ addi 5, 5, 15
+ xori 8, 8, 0x30
+
+.Lschedule_go:
+ cmplwi 4, 192
+ bgt .Lschedule_256
+ beq .Lschedule_192
+
+
+
+
+
+
+
+
+
+
+.Lschedule_128:
+ li 0, 10
+ mtctr 0
+
+.Loop_schedule_128:
+ bl _vpaes_schedule_round
+ bdz .Lschedule_mangle_last
+ bl _vpaes_schedule_mangle
+ b .Loop_schedule_128
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.align 4
+.Lschedule_192:
+ li 0, 4
+ lvx 0, 0, 3
+ vperm 0, 6, 0, 27
+ vsldoi 0, 3, 0, 8
+ bl _vpaes_schedule_transform
+ vsldoi 6, 0, 9, 8
+ vsldoi 6, 9, 6, 8
+ mtctr 0
+
+.Loop_schedule_192:
+ bl _vpaes_schedule_round
+ vsldoi 0, 6, 0, 8
+ bl _vpaes_schedule_mangle
+ bl _vpaes_schedule_192_smear
+ bl _vpaes_schedule_mangle
+ bl _vpaes_schedule_round
+ bdz .Lschedule_mangle_last
+ bl _vpaes_schedule_mangle
+ bl _vpaes_schedule_192_smear
+ b .Loop_schedule_192
+
+
+
+
+
+
+
+
+
+
+
+.align 4
+.Lschedule_256:
+ li 0, 7
+ addi 3, 3, 8
+ lvx 0, 0, 3
+ vperm 0, 6, 0, 27
+ bl _vpaes_schedule_transform
+ mtctr 0
+
+.Loop_schedule_256:
+ bl _vpaes_schedule_mangle
+ vor 6,0,0
+
+
+ bl _vpaes_schedule_round
+ bdz .Lschedule_mangle_last
+ bl _vpaes_schedule_mangle
+
+
+ vspltw 0, 0, 3
+ vor 5,7,7
+ vor 7,6,6
+ bl _vpaes_schedule_low_round
+ vor 7,5,5
+
+ b .Loop_schedule_256
+
+
+
+
+
+
+
+
+
+
+.align 4
+.Lschedule_mangle_last:
+
+ li 11, 0x2e0
+ li 9, 0x2f0
+ bne 1, .Lschedule_mangle_last_dec
+
+
+ lvx 1, 8, 10
+ li 11, 0x2c0
+ li 9, 0x2d0
+ vperm 0, 0, 0, 1
+
+ lvx 12, 11, 12
+ lvx 13, 9, 12
+ addi 5, 5, 16
+ vxor 0, 0, 26
+ bl _vpaes_schedule_transform
+
+
+ vperm 0, 0, 0, 29
+ li 10, 4
+ vsel 2, 28, 0, 30
+ li 11, 8
+ stvx 2, 0, 5
+ li 12, 12
+ stvewx 0, 0, 5
+ stvewx 0, 10, 5
+ stvewx 0, 11, 5
+ stvewx 0, 12, 5
+ b .Lschedule_mangle_done
+
+.align 4
+.Lschedule_mangle_last_dec:
+ lvx 12, 11, 12
+ lvx 13, 9, 12
+ addi 5, 5, -16
+ vxor 0, 0, 26
+ bl _vpaes_schedule_transform
+
+
+ addi 9, 5, -15
+ vperm 0, 0, 0, 29
+ li 10, 4
+ vsel 2, 28, 0, 30
+ li 11, 8
+ stvx 2, 0, 5
+ li 12, 12
+ stvewx 0, 0, 9
+ stvewx 0, 10, 9
+ stvewx 0, 11, 9
+ stvewx 0, 12, 9
+
+
+.Lschedule_mangle_done:
+ mtlr 7
+
+ vxor 0, 0, 0
+ vxor 1, 1, 1
+ vxor 2, 2, 2
+ vxor 3, 3, 3
+ vxor 4, 4, 4
+ vxor 5, 5, 5
+ vxor 6, 6, 6
+ vxor 7, 7, 7
+
+ blr
+.long 0
+.byte 0,12,0x14,0,0,0,0,0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.align 4
+_vpaes_schedule_192_smear:
+ vspltw 0, 7, 3
+ vsldoi 1, 9, 6, 12
+ vsldoi 0, 7, 0, 8
+ vxor 6, 6, 1
+ vxor 6, 6, 0
+ vor 0,6,6
+ vsldoi 6, 6, 9, 8
+ vsldoi 6, 9, 6, 8
+ blr
+.long 0
+.byte 0,12,0x14,0,0,0,0,0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.align 4
+_vpaes_schedule_round:
+
+
+ vsldoi 1, 24, 9, 15
+ vsldoi 24, 24, 24, 15
+ vxor 7, 7, 1
+
+
+ vspltw 0, 0, 3
+ vsldoi 0, 0, 0, 1
+
+
+
+
+_vpaes_schedule_low_round:
+
+ vsldoi 1, 9, 7, 12
+ vxor 7, 7, 1
+ vspltisb 1, 0x0f
+ vsldoi 4, 9, 7, 8
+
+
+ vand 1, 1, 0
+ vsrb 0, 0, 8
+ vxor 7, 7, 4
+ vperm 2, 11, 9, 1
+ vxor 1, 1, 0
+ vperm 3, 10, 9, 0
+ vxor 3, 3, 2
+ vperm 4, 10, 9, 1
+ vxor 7, 7, 26
+ vperm 3, 10, 9, 3
+ vxor 4, 4, 2
+ vperm 2, 10, 9, 4
+ vxor 3, 3, 1
+ vxor 2, 2, 0
+ vperm 4, 15, 9, 3
+ vperm 1, 14, 9, 2
+ vxor 1, 1, 4
+
+
+ vxor 0, 1, 7
+ vxor 7, 1, 7
+ blr
+.long 0
+.byte 0,12,0x14,0,0,0,0,0
+
+
+
+
+
+
+
+
+
+
+.align 4
+_vpaes_schedule_transform:
+
+ vsrb 2, 0, 8
+
+ vperm 0, 12, 12, 0
+
+ vperm 2, 13, 13, 2
+ vxor 0, 0, 2
+ blr
+.long 0
+.byte 0,12,0x14,0,0,0,0,0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+.align 4
+_vpaes_schedule_mangle:
+
+
+ bne 1, .Lschedule_mangle_dec
+
+
+ vxor 4, 0, 26
+ addi 5, 5, 16
+ vperm 4, 4, 4, 25
+ vperm 1, 4, 4, 25
+ vperm 3, 1, 1, 25
+ vxor 4, 4, 1
+ lvx 1, 8, 10
+ vxor 3, 3, 4
+
+ vperm 3, 3, 3, 1
+ addi 8, 8, -16
+ andi. 8, 8, 0x30
+
+
+ vperm 1, 3, 3, 29
+ vsel 2, 28, 1, 30
+ vor 28,1,1
+ stvx 2, 0, 5
+ blr
+
+.align 4
+.Lschedule_mangle_dec:
+
+
+ vsrb 1, 0, 8
+
+
+
+ vperm 2, 16, 16, 0
+
+ vperm 3, 17, 17, 1
+ vxor 3, 3, 2
+ vperm 3, 3, 9, 25
+
+
+ vperm 2, 18, 18, 0
+ vxor 2, 2, 3
+
+ vperm 3, 19, 19, 1
+ vxor 3, 3, 2
+ vperm 3, 3, 9, 25
+
+
+ vperm 2, 20, 20, 0
+ vxor 2, 2, 3
+
+ vperm 3, 21, 21, 1
+ vxor 3, 3, 2
+
+
+ vperm 2, 22, 22, 0
+ vperm 3, 3, 9, 25
+
+ vperm 4, 23, 23, 1
+ lvx 1, 8, 10
+ vxor 2, 2, 3
+ vxor 3, 4, 2
+
+ addi 5, 5, -16
+
+ vperm 3, 3, 3, 1
+ addi 8, 8, -16
+ andi. 8, 8, 0x30
+
+
+ vperm 1, 3, 3, 29
+ vsel 2, 28, 1, 30
+ vor 28,1,1
+ stvx 2, 0, 5
+ blr
+.long 0
+.byte 0,12,0x14,0,0,0,0,0
+
+.globl vpaes_set_encrypt_key
+.type vpaes_set_encrypt_key,@function
+.section ".opd","aw"
+.align 3
+vpaes_set_encrypt_key:
+.quad .vpaes_set_encrypt_key,.TOC.@tocbase,0
+.previous
+.align 5
+.vpaes_set_encrypt_key:
+ stdu 1,-256(1)
+ li 10,63
+ li 11,79
+ mflr 0
+ mfspr 6,256
+ stvx 20,10,1
+ addi 10,10,32
+ stvx 21,11,1
+ addi 11,11,32
+ stvx 22,10,1
+ addi 10,10,32
+ stvx 23,11,1
+ addi 11,11,32
+ stvx 24,10,1
+ addi 10,10,32
+ stvx 25,11,1
+ addi 11,11,32
+ stvx 26,10,1
+ addi 10,10,32
+ stvx 27,11,1
+ addi 11,11,32
+ stvx 28,10,1
+ addi 10,10,32
+ stvx 29,11,1
+ addi 11,11,32
+ stvx 30,10,1
+ stvx 31,11,1
+ stw 6,252(1)
+ li 7, -1
+ std 0, 272(1)
+ mtspr 256,7
+
+ srwi 9, 4, 5
+ addi 9, 9, 6
+ stw 9, 240(5)
+
+ cmplw 1,4,4
+ li 8, 0x30
+ bl _vpaes_schedule_core
+
+ ld 0, 272(1)
+ li 10,63
+ li 11,79
+ mtspr 256,6
+ mtlr 0
+ xor 3, 3, 3
+ lvx 20,10,1
+ addi 10,10,32
+ lvx 21,11,1
+ addi 11,11,32
+ lvx 22,10,1
+ addi 10,10,32
+ lvx 23,11,1
+ addi 11,11,32
+ lvx 24,10,1
+ addi 10,10,32
+ lvx 25,11,1
+ addi 11,11,32
+ lvx 26,10,1
+ addi 10,10,32
+ lvx 27,11,1
+ addi 11,11,32
+ lvx 28,10,1
+ addi 10,10,32
+ lvx 29,11,1
+ addi 11,11,32
+ lvx 30,10,1
+ lvx 31,11,1
+ addi 1,1,256
+ blr
+.long 0
+.byte 0,12,0x04,1,0x80,0,3,0
+.long 0
+.size .vpaes_set_encrypt_key,.-.vpaes_set_encrypt_key
+.size vpaes_set_encrypt_key,.-.vpaes_set_encrypt_key
+
+.globl vpaes_set_decrypt_key
+.type vpaes_set_decrypt_key,@function
+.section ".opd","aw"
+.align 3
+vpaes_set_decrypt_key:
+.quad .vpaes_set_decrypt_key,.TOC.@tocbase,0
+.previous
+.align 4
+.vpaes_set_decrypt_key:
+ stdu 1,-256(1)
+ li 10,63
+ li 11,79
+ mflr 0
+ mfspr 6,256
+ stvx 20,10,1
+ addi 10,10,32
+ stvx 21,11,1
+ addi 11,11,32
+ stvx 22,10,1
+ addi 10,10,32
+ stvx 23,11,1
+ addi 11,11,32
+ stvx 24,10,1
+ addi 10,10,32
+ stvx 25,11,1
+ addi 11,11,32
+ stvx 26,10,1
+ addi 10,10,32
+ stvx 27,11,1
+ addi 11,11,32
+ stvx 28,10,1
+ addi 10,10,32
+ stvx 29,11,1
+ addi 11,11,32
+ stvx 30,10,1
+ stvx 31,11,1
+ stw 6,252(1)
+ li 7, -1
+ std 0, 272(1)
+ mtspr 256,7
+
+ srwi 9, 4, 5
+ addi 9, 9, 6
+ stw 9, 240(5)
+
+ slwi 9, 9, 4
+ add 5, 5, 9
+
+ cmplwi 1, 4, 0
+ srwi 8, 4, 1
+ andi. 8, 8, 32
+ xori 8, 8, 32
+ bl _vpaes_schedule_core
+
+ ld 0, 272(1)
+ li 10,63
+ li 11,79
+ mtspr 256,6
+ mtlr 0
+ xor 3, 3, 3
+ lvx 20,10,1
+ addi 10,10,32
+ lvx 21,11,1
+ addi 11,11,32
+ lvx 22,10,1
+ addi 10,10,32
+ lvx 23,11,1
+ addi 11,11,32
+ lvx 24,10,1
+ addi 10,10,32
+ lvx 25,11,1
+ addi 11,11,32
+ lvx 26,10,1
+ addi 10,10,32
+ lvx 27,11,1
+ addi 11,11,32
+ lvx 28,10,1
+ addi 10,10,32
+ lvx 29,11,1
+ addi 11,11,32
+ lvx 30,10,1
+ lvx 31,11,1
+ addi 1,1,256
+ blr
+.long 0
+.byte 0,12,0x04,1,0x80,0,3,0
+.long 0
+.size .vpaes_set_decrypt_key,.-.vpaes_set_decrypt_key
+.size vpaes_set_decrypt_key,.-.vpaes_set_decrypt_key