1 # Copyright (c) 2011-2013, Andy Polyakov <appro@openssl.org>
4 # Redistribution and use in source and binary forms, with or without
5 # modification, are permitted provided that the following conditions
8 # * Redistributions of source code must retain copyright notices,
9 # this list of conditions and the following disclaimer.
11 # * Redistributions in binary form must reproduce the above
12 # copyright notice, this list of conditions and the following
13 # disclaimer in the documentation and/or other materials
14 # provided with the distribution.
16 # * Neither the name of the Andy Polyakov nor the names of its
17 # copyright holder and contributors may be used to endorse or
18 # promote products derived from this software without specific
19 # prior written permission.
21 # ALTERNATIVELY, provided that this notice is retained in full, this
22 # product may be distributed under the terms of the GNU General Public
23 # License (GPL), in which case the provisions of the GPL apply INSTEAD OF
26 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS
27 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
28 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
29 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
30 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
31 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
32 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
33 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
34 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
35 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
36 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
38 # *** This file is auto-generated ***
43 .def gcm_gmult_4bit; .scl 2; .type 32; .endef
49 .LSEH_begin_gcm_gmult_4bit:
59 leaq .Lrem_4bit(%rip),%r11
66 movq 8(%rsi,%rax,1),%r8
67 movq (%rsi,%rax,1),%r9
77 movb (%rdi,%rcx,1),%al
79 xorq 8(%rsi,%rbx,1),%r8
81 xorq (%rsi,%rbx,1),%r9
83 xorq (%r11,%rdx,8),%r9
94 xorq 8(%rsi,%rax,1),%r8
96 xorq (%rsi,%rax,1),%r9
98 xorq (%r11,%rdx,8),%r9
109 xorq 8(%rsi,%rax,1),%r8
111 xorq (%rsi,%rax,1),%r9
113 xorq (%r11,%rdx,8),%r9
121 xorq 8(%rsi,%rbx,1),%r8
123 xorq (%rsi,%rbx,1),%r9
125 xorq (%r11,%rdx,8),%r9
138 .LSEH_end_gcm_gmult_4bit:
139 .globl gcm_ghash_4bit
140 .def gcm_ghash_4bit; .scl 2; .type 32; .endef
146 .LSEH_begin_gcm_ghash_4bit:
163 leaq 16+128(%rsp),%rbp
165 movq 0+0-128(%rsi),%r8
166 movq 0+8-128(%rsi),%rax
171 movq 16+0-128(%rsi),%r9
173 movq 16+8-128(%rsi),%rbx
182 movq 32+0-128(%rsi),%r8
184 movq %rax,0-128(%rbp)
185 movq 32+8-128(%rsi),%rax
194 movq 48+0-128(%rsi),%r9
196 movq %rbx,8-128(%rbp)
197 movq 48+8-128(%rsi),%rbx
206 movq 64+0-128(%rsi),%r8
208 movq %rax,16-128(%rbp)
209 movq 64+8-128(%rsi),%rax
218 movq 80+0-128(%rsi),%r9
220 movq %rbx,24-128(%rbp)
221 movq 80+8-128(%rsi),%rbx
230 movq 96+0-128(%rsi),%r8
232 movq %rax,32-128(%rbp)
233 movq 96+8-128(%rsi),%rax
242 movq 112+0-128(%rsi),%r9
244 movq %rbx,40-128(%rbp)
245 movq 112+8-128(%rsi),%rbx
254 movq 128+0-128(%rsi),%r8
256 movq %rax,48-128(%rbp)
257 movq 128+8-128(%rsi),%rax
266 movq 144+0-128(%rsi),%r9
268 movq %rbx,56-128(%rbp)
269 movq 144+8-128(%rsi),%rbx
278 movq 160+0-128(%rsi),%r8
280 movq %rax,64-128(%rbp)
281 movq 160+8-128(%rsi),%rax
290 movq 176+0-128(%rsi),%r9
292 movq %rbx,72-128(%rbp)
293 movq 176+8-128(%rsi),%rbx
302 movq 192+0-128(%rsi),%r8
304 movq %rax,80-128(%rbp)
305 movq 192+8-128(%rsi),%rax
314 movq 208+0-128(%rsi),%r9
316 movq %rbx,88-128(%rbp)
317 movq 208+8-128(%rsi),%rbx
326 movq 224+0-128(%rsi),%r8
328 movq %rax,96-128(%rbp)
329 movq 224+8-128(%rsi),%rax
338 movq 240+0-128(%rsi),%r9
340 movq %rbx,104-128(%rbp)
341 movq 240+8-128(%rsi),%rbx
351 movq %rax,112-128(%rbp)
356 movq %rbx,120-128(%rbp)
361 leaq .Lrem_8bit(%rip),%r11
379 movq 8(%rsi,%rax,1),%r8
380 movq (%rsi,%rax,1),%r9
384 movzbq (%rsp,%rbx,1),%r12
391 xorq -128(%rbp,%rbx,8),%r8
393 xorq (%rbp,%rbx,8),%r9
395 xorq 8(%rsi,%rax,1),%r8
396 xorq (%rsi,%rax,1),%r9
399 movzwq (%r11,%r12,2),%r12
402 movzbq (%rsp,%rcx,1),%r13
411 xorq -128(%rbp,%rcx,8),%r8
413 xorq (%rbp,%rcx,8),%r9
415 xorq 8(%rsi,%rax,1),%r8
416 xorq (%rsi,%rax,1),%r9
419 movzwq (%r11,%r13,2),%r13
422 movzbq (%rsp,%rbx,1),%r12
432 xorq -128(%rbp,%rbx,8),%r8
434 xorq (%rbp,%rbx,8),%r9
436 xorq 8(%rsi,%rax,1),%r8
437 xorq (%rsi,%rax,1),%r9
440 movzwq (%r11,%r12,2),%r12
443 movzbq (%rsp,%rcx,1),%r13
452 xorq -128(%rbp,%rcx,8),%r8
454 xorq (%rbp,%rcx,8),%r9
456 xorq 8(%rsi,%rax,1),%r8
457 xorq (%rsi,%rax,1),%r9
460 movzwq (%r11,%r13,2),%r13
463 movzbq (%rsp,%rbx,1),%r12
472 xorq -128(%rbp,%rbx,8),%r8
474 xorq (%rbp,%rbx,8),%r9
476 xorq 8(%rsi,%rax,1),%r8
477 xorq (%rsi,%rax,1),%r9
480 movzwq (%r11,%r12,2),%r12
483 movzbq (%rsp,%rcx,1),%r13
492 xorq -128(%rbp,%rcx,8),%r8
494 xorq (%rbp,%rcx,8),%r9
496 xorq 8(%rsi,%rax,1),%r8
497 xorq (%rsi,%rax,1),%r9
500 movzwq (%r11,%r13,2),%r13
503 movzbq (%rsp,%rbx,1),%r12
513 xorq -128(%rbp,%rbx,8),%r8
515 xorq (%rbp,%rbx,8),%r9
517 xorq 8(%rsi,%rax,1),%r8
518 xorq (%rsi,%rax,1),%r9
521 movzwq (%r11,%r12,2),%r12
524 movzbq (%rsp,%rcx,1),%r13
533 xorq -128(%rbp,%rcx,8),%r8
535 xorq (%rbp,%rcx,8),%r9
537 xorq 8(%rsi,%rax,1),%r8
538 xorq (%rsi,%rax,1),%r9
541 movzwq (%r11,%r13,2),%r13
544 movzbq (%rsp,%rbx,1),%r12
553 xorq -128(%rbp,%rbx,8),%r8
555 xorq (%rbp,%rbx,8),%r9
557 xorq 8(%rsi,%rax,1),%r8
558 xorq (%rsi,%rax,1),%r9
561 movzwq (%r11,%r12,2),%r12
564 movzbq (%rsp,%rcx,1),%r13
573 xorq -128(%rbp,%rcx,8),%r8
575 xorq (%rbp,%rcx,8),%r9
577 xorq 8(%rsi,%rax,1),%r8
578 xorq (%rsi,%rax,1),%r9
581 movzwq (%r11,%r13,2),%r13
584 movzbq (%rsp,%rbx,1),%r12
594 xorq -128(%rbp,%rbx,8),%r8
596 xorq (%rbp,%rbx,8),%r9
598 xorq 8(%rsi,%rax,1),%r8
599 xorq (%rsi,%rax,1),%r9
602 movzwq (%r11,%r12,2),%r12
605 movzbq (%rsp,%rcx,1),%r13
614 xorq -128(%rbp,%rcx,8),%r8
616 xorq (%rbp,%rcx,8),%r9
618 xorq 8(%rsi,%rax,1),%r8
619 xorq (%rsi,%rax,1),%r9
622 movzwq (%r11,%r13,2),%r13
625 movzbq (%rsp,%rbx,1),%r12
634 xorq -128(%rbp,%rbx,8),%r8
636 xorq (%rbp,%rbx,8),%r9
638 xorq 8(%rsi,%rax,1),%r8
639 xorq (%rsi,%rax,1),%r9
642 movzwq (%r11,%r12,2),%r12
645 movzbq (%rsp,%rcx,1),%r13
654 xorq -128(%rbp,%rcx,8),%r8
656 xorq (%rbp,%rcx,8),%r9
658 xorq 8(%rsi,%rax,1),%r8
659 xorq (%rsi,%rax,1),%r9
662 movzwq (%r11,%r13,2),%r13
665 movzbq (%rsp,%rbx,1),%r12
675 xorq -128(%rbp,%rbx,8),%r8
677 xorq (%rbp,%rbx,8),%r9
678 movzwq (%r11,%r12,2),%r12
679 xorq 8(%rsi,%rax,1),%r8
680 xorq (%rsi,%rax,1),%r9
689 xorq 8(%rsi,%rcx,1),%r8
690 movzwq (%r11,%r13,2),%r13
692 xorq (%rsi,%rcx,1),%r9
715 .LSEH_end_gcm_ghash_4bit:
716 .globl gcm_init_clmul
717 .def gcm_init_clmul; .scl 2; .type 32; .endef
721 .LSEH_begin_gcm_init_clmul:
723 .byte 0x48,0x83,0xec,0x18
724 .byte 0x0f,0x29,0x34,0x24
726 pshufd $78,%xmm2,%xmm2
729 pshufd $255,%xmm2,%xmm4
739 pand .L0x1c2_polynomial(%rip),%xmm5
743 pshufd $78,%xmm2,%xmm6
747 pshufd $78,%xmm0,%xmm3
749 .byte 102,15,58,68,194,0
750 .byte 102,15,58,68,202,17
751 .byte 102,15,58,68,222,0
783 pshufd $78,%xmm2,%xmm3
784 pshufd $78,%xmm0,%xmm4
788 movdqu %xmm0,16(%rcx)
789 .byte 102,15,58,15,227,8
790 movdqu %xmm4,32(%rcx)
792 pshufd $78,%xmm0,%xmm3
794 .byte 102,15,58,68,194,0
795 .byte 102,15,58,68,202,17
796 .byte 102,15,58,68,222,0
830 pshufd $78,%xmm0,%xmm3
832 .byte 102,15,58,68,194,0
833 .byte 102,15,58,68,202,17
834 .byte 102,15,58,68,222,0
866 pshufd $78,%xmm5,%xmm3
867 pshufd $78,%xmm0,%xmm4
869 movdqu %xmm5,48(%rcx)
871 movdqu %xmm0,64(%rcx)
872 .byte 102,15,58,15,227,8
873 movdqu %xmm4,80(%rcx)
876 .LSEH_end_gcm_init_clmul:
879 .globl gcm_gmult_clmul
880 .def gcm_gmult_clmul; .scl 2; .type 32; .endef
885 movdqa .Lbswap_mask(%rip),%xmm5
887 movdqu 32(%rdx),%xmm4
888 .byte 102,15,56,0,197
890 pshufd $78,%xmm0,%xmm3
892 .byte 102,15,58,68,194,0
893 .byte 102,15,58,68,202,17
894 .byte 102,15,58,68,220,0
926 .byte 102,15,56,0,197
930 .globl gcm_ghash_clmul
931 .def gcm_ghash_clmul; .scl 2; .type 32; .endef
936 .LSEH_begin_gcm_ghash_clmul:
938 .byte 0x48,0x8d,0x60,0xe0
939 .byte 0x0f,0x29,0x70,0xe0
940 .byte 0x0f,0x29,0x78,0xf0
941 .byte 0x44,0x0f,0x29,0x00
942 .byte 0x44,0x0f,0x29,0x48,0x10
943 .byte 0x44,0x0f,0x29,0x50,0x20
944 .byte 0x44,0x0f,0x29,0x58,0x30
945 .byte 0x44,0x0f,0x29,0x60,0x40
946 .byte 0x44,0x0f,0x29,0x68,0x50
947 .byte 0x44,0x0f,0x29,0x70,0x60
948 .byte 0x44,0x0f,0x29,0x78,0x70
949 movdqa .Lbswap_mask(%rip),%xmm5
950 movq $11547335547999543296,%rax
954 movdqu 32(%rdx),%xmm10
955 .byte 102,15,56,0,197
960 movdqu 16(%rdx),%xmm9
965 movdqu 48(%rdx),%xmm14
966 movdqu 64(%rdx),%xmm15
972 movdqu 32(%r8),%xmm11
973 .byte 102,15,56,0,245
974 .byte 102,68,15,56,0,221
976 pshufd $78,%xmm6,%xmm7
978 .byte 102,15,58,68,242,0
979 .byte 102,68,15,58,68,194,17
980 .byte 102,65,15,58,68,250,0
983 pshufd $78,%xmm11,%xmm12
985 .byte 102,69,15,58,68,217,0
986 .byte 102,69,15,58,68,233,17
988 .byte 102,69,15,58,68,226,16
990 movups 80(%rdx),%xmm10
993 movdqu 16(%r8),%xmm11
995 .byte 102,68,15,56,0,221
996 .byte 102,15,56,0,221
998 pshufd $78,%xmm11,%xmm12
1001 .byte 102,69,15,58,68,222,0
1003 pshufd $78,%xmm0,%xmm3
1005 .byte 102,69,15,58,68,238,17
1007 .byte 102,69,15,58,68,226,0
1017 .byte 102,65,15,58,68,199,0
1019 movdqu 48(%r8),%xmm11
1020 .byte 102,68,15,56,0,221
1021 .byte 102,65,15,58,68,207,17
1023 movdqu 32(%r8),%xmm6
1024 movdqa %xmm11,%xmm13
1025 pshufd $78,%xmm11,%xmm12
1026 .byte 102,65,15,58,68,218,16
1029 .byte 102,15,56,0,245
1030 movups 32(%rdx),%xmm10
1031 .byte 102,68,15,58,68,218,0
1034 pshufd $78,%xmm6,%xmm7
1041 .byte 102,68,15,58,68,234,17
1044 movdqa .L7_mask(%rip),%xmm3
1046 .byte 102,72,15,110,224
1049 .byte 102,15,56,0,227
1050 .byte 102,69,15,58,68,226,0
1055 .byte 102,65,15,58,68,241,0
1063 .byte 102,69,15,58,68,193,17
1065 movdqu 16(%r8),%xmm11
1066 .byte 102,68,15,56,0,221
1067 .byte 102,65,15,58,68,250,16
1069 movups 80(%rdx),%xmm10
1070 .byte 102,15,56,0,221
1075 movdqa %xmm11,%xmm13
1077 pshufd $78,%xmm11,%xmm12
1079 .byte 102,69,15,58,68,222,0
1083 .byte 102,69,15,58,68,238,17
1087 .byte 102,69,15,58,68,226,0
1091 pshufd $78,%xmm0,%xmm3
1099 .byte 102,65,15,58,68,199,0
1101 .byte 102,65,15,58,68,207,17
1103 .byte 102,65,15,58,68,218,16
1141 movdqu 32(%rdx),%xmm10
1151 movdqu 16(%r8),%xmm6
1152 .byte 102,15,56,0,221
1153 .byte 102,15,56,0,245
1157 pshufd $78,%xmm6,%xmm3
1159 .byte 102,15,58,68,242,0
1160 .byte 102,68,15,58,68,194,17
1161 .byte 102,65,15,58,68,218,0
1171 pshufd $78,%xmm0,%xmm4
1174 .byte 102,65,15,58,68,193,0
1175 .byte 102,65,15,58,68,201,17
1176 .byte 102,65,15,58,68,226,16
1181 .byte 102,68,15,56,0,197
1182 movdqu 16(%r8),%xmm6
1188 .byte 102,15,56,0,245
1200 .byte 102,15,58,68,242,0
1210 pshufd $78,%xmm8,%xmm3
1213 .byte 102,68,15,58,68,194,17
1221 .byte 102,65,15,58,68,218,0
1230 pshufd $78,%xmm0,%xmm4
1233 .byte 102,65,15,58,68,193,0
1234 .byte 102,65,15,58,68,201,17
1235 .byte 102,65,15,58,68,226,16
1275 .byte 102,15,56,0,221
1278 pshufd $78,%xmm0,%xmm3
1280 .byte 102,15,58,68,194,0
1281 .byte 102,15,58,68,202,17
1282 .byte 102,65,15,58,68,218,0
1315 .byte 102,15,56,0,197
1318 movaps 16(%rsp),%xmm7
1319 movaps 32(%rsp),%xmm8
1320 movaps 48(%rsp),%xmm9
1321 movaps 64(%rsp),%xmm10
1322 movaps 80(%rsp),%xmm11
1323 movaps 96(%rsp),%xmm12
1324 movaps 112(%rsp),%xmm13
1325 movaps 128(%rsp),%xmm14
1326 movaps 144(%rsp),%xmm15
1328 .LSEH_end_gcm_ghash_clmul:
1332 .def gcm_init_avx; .scl 2; .type 32; .endef
1337 .globl gcm_gmult_avx
1338 .def gcm_gmult_avx; .scl 2; .type 32; .endef
1343 .globl gcm_ghash_avx
1344 .def gcm_ghash_avx; .scl 2; .type 32; .endef
1351 .byte 15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0
1353 .byte 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0xc2
1361 .long 0,0,0,471859200,0,943718400,0,610271232
1362 .long 0,1887436800,0,1822425088,0,1220542464,0,1423966208
1363 .long 0,3774873600,0,4246732800,0,3644850176,0,3311403008
1364 .long 0,2441084928,0,2376073216,0,2847932416,0,3051356160
1367 .value 0x0000,0x01C2,0x0384,0x0246,0x0708,0x06CA,0x048C,0x054E
1368 .value 0x0E10,0x0FD2,0x0D94,0x0C56,0x0918,0x08DA,0x0A9C,0x0B5E
1369 .value 0x1C20,0x1DE2,0x1FA4,0x1E66,0x1B28,0x1AEA,0x18AC,0x196E
1370 .value 0x1230,0x13F2,0x11B4,0x1076,0x1538,0x14FA,0x16BC,0x177E
1371 .value 0x3840,0x3982,0x3BC4,0x3A06,0x3F48,0x3E8A,0x3CCC,0x3D0E
1372 .value 0x3650,0x3792,0x35D4,0x3416,0x3158,0x309A,0x32DC,0x331E
1373 .value 0x2460,0x25A2,0x27E4,0x2626,0x2368,0x22AA,0x20EC,0x212E
1374 .value 0x2A70,0x2BB2,0x29F4,0x2836,0x2D78,0x2CBA,0x2EFC,0x2F3E
1375 .value 0x7080,0x7142,0x7304,0x72C6,0x7788,0x764A,0x740C,0x75CE
1376 .value 0x7E90,0x7F52,0x7D14,0x7CD6,0x7998,0x785A,0x7A1C,0x7BDE
1377 .value 0x6CA0,0x6D62,0x6F24,0x6EE6,0x6BA8,0x6A6A,0x682C,0x69EE
1378 .value 0x62B0,0x6372,0x6134,0x60F6,0x65B8,0x647A,0x663C,0x67FE
1379 .value 0x48C0,0x4902,0x4B44,0x4A86,0x4FC8,0x4E0A,0x4C4C,0x4D8E
1380 .value 0x46D0,0x4712,0x4554,0x4496,0x41D8,0x401A,0x425C,0x439E
1381 .value 0x54E0,0x5522,0x5764,0x56A6,0x53E8,0x522A,0x506C,0x51AE
1382 .value 0x5AF0,0x5B32,0x5974,0x58B6,0x5DF8,0x5C3A,0x5E7C,0x5FBE
1383 .value 0xE100,0xE0C2,0xE284,0xE346,0xE608,0xE7CA,0xE58C,0xE44E
1384 .value 0xEF10,0xEED2,0xEC94,0xED56,0xE818,0xE9DA,0xEB9C,0xEA5E
1385 .value 0xFD20,0xFCE2,0xFEA4,0xFF66,0xFA28,0xFBEA,0xF9AC,0xF86E
1386 .value 0xF330,0xF2F2,0xF0B4,0xF176,0xF438,0xF5FA,0xF7BC,0xF67E
1387 .value 0xD940,0xD882,0xDAC4,0xDB06,0xDE48,0xDF8A,0xDDCC,0xDC0E
1388 .value 0xD750,0xD692,0xD4D4,0xD516,0xD058,0xD19A,0xD3DC,0xD21E
1389 .value 0xC560,0xC4A2,0xC6E4,0xC726,0xC268,0xC3AA,0xC1EC,0xC02E
1390 .value 0xCB70,0xCAB2,0xC8F4,0xC936,0xCC78,0xCDBA,0xCFFC,0xCE3E
1391 .value 0x9180,0x9042,0x9204,0x93C6,0x9688,0x974A,0x950C,0x94CE
1392 .value 0x9F90,0x9E52,0x9C14,0x9DD6,0x9898,0x995A,0x9B1C,0x9ADE
1393 .value 0x8DA0,0x8C62,0x8E24,0x8FE6,0x8AA8,0x8B6A,0x892C,0x88EE
1394 .value 0x83B0,0x8272,0x8034,0x81F6,0x84B8,0x857A,0x873C,0x86FE
1395 .value 0xA9C0,0xA802,0xAA44,0xAB86,0xAEC8,0xAF0A,0xAD4C,0xAC8E
1396 .value 0xA7D0,0xA612,0xA454,0xA596,0xA0D8,0xA11A,0xA35C,0xA29E
1397 .value 0xB5E0,0xB422,0xB664,0xB7A6,0xB2E8,0xB32A,0xB16C,0xB0AE
1398 .value 0xBBF0,0xBA32,0xB874,0xB9B6,0xBCF8,0xBD3A,0xBF7C,0xBEBE
1400 .byte 71,72,65,83,72,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
1403 .def se_handler; .scl 3; .type 32; .endef
1424 leaq (%rsi,%r10,1),%r10
1431 leaq (%rsi,%r10,1),%r10
1468 call *__imp_RtlVirtualUnwind(%rip)
1486 .rva .LSEH_begin_gcm_gmult_4bit
1487 .rva .LSEH_end_gcm_gmult_4bit
1488 .rva .LSEH_info_gcm_gmult_4bit
1490 .rva .LSEH_begin_gcm_ghash_4bit
1491 .rva .LSEH_end_gcm_ghash_4bit
1492 .rva .LSEH_info_gcm_ghash_4bit
1494 .rva .LSEH_begin_gcm_init_clmul
1495 .rva .LSEH_end_gcm_init_clmul
1496 .rva .LSEH_info_gcm_init_clmul
1498 .rva .LSEH_begin_gcm_ghash_clmul
1499 .rva .LSEH_end_gcm_ghash_clmul
1500 .rva .LSEH_info_gcm_ghash_clmul
1503 .LSEH_info_gcm_gmult_4bit:
1506 .rva .Lgmult_prologue,.Lgmult_epilogue
1507 .LSEH_info_gcm_ghash_4bit:
1510 .rva .Lghash_prologue,.Lghash_epilogue
1511 .LSEH_info_gcm_init_clmul:
1512 .byte 0x01,0x08,0x03,0x00
1513 .byte 0x08,0x68,0x00,0x00
1514 .byte 0x04,0x22,0x00,0x00
1515 .LSEH_info_gcm_ghash_clmul:
1516 .byte 0x01,0x33,0x16,0x00
1517 .byte 0x33,0xf8,0x09,0x00
1518 .byte 0x2e,0xe8,0x08,0x00
1519 .byte 0x29,0xd8,0x07,0x00
1520 .byte 0x24,0xc8,0x06,0x00
1521 .byte 0x1f,0xb8,0x05,0x00
1522 .byte 0x1a,0xa8,0x04,0x00
1523 .byte 0x15,0x98,0x03,0x00
1524 .byte 0x10,0x88,0x02,0x00
1525 .byte 0x0c,0x78,0x01,0x00
1526 .byte 0x08,0x68,0x00,0x00
1527 .byte 0x04,0x01,0x15,0x00
1529 .section .note.GNU-stack,"",%progbits