use C's style comments to compile in old MacOSX systems. Reported by Ryan Schmidt.
[gnutls:gnutls.git] / lib / accelerated / x86 / macosx / appro-aes-gcm-x86-64-macosx.s
1 /*
2 # Copyright (c) 2011-2012, Andy Polyakov <appro@openssl.org>
3 # All rights reserved.
4 #
5 # Redistribution and use in source and binary forms, with or without
6 # modification, are permitted provided that the following conditions
7 # are met:
8
9 #     * Redistributions of source code must retain copyright notices,
10 #      this list of conditions and the following disclaimer.
11 #
12 #     * Redistributions in binary form must reproduce the above
13 #      copyright notice, this list of conditions and the following
14 #      disclaimer in the documentation and/or other materials
15 #      provided with the distribution.
16 #
17 #     * Neither the name of the Andy Polyakov nor the names of its
18 #      copyright holder and contributors may be used to endorse or
19 #      promote products derived from this software without specific
20 #      prior written permission.
21 #
22 # ALTERNATIVELY, provided that this notice is retained in full, this
23 # product may be distributed under the terms of the GNU General Public
24 # License (GPL), in which case the provisions of the GPL apply INSTEAD OF
25 # those given above.
26 #
27 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS
28 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
29 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
30 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
31 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
32 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
33 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
34 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
35 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
36 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
37 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
38 #
39 # *** This file is auto-generated ***
40 #
41 */
42 .text   
43
44 .globl  _gcm_gmult_4bit
45
46 .p2align        4
47 _gcm_gmult_4bit:
48         pushq   %rbx
49         pushq   %rbp
50         pushq   %r12
51 L$gmult_prologue:
52
53         movzbq  15(%rdi),%r8
54         leaq    L$rem_4bit(%rip),%r11
55         xorq    %rax,%rax
56         xorq    %rbx,%rbx
57         movb    %r8b,%al
58         movb    %r8b,%bl
59         shlb    $4,%al
60         movq    $14,%rcx
61         movq    8(%rsi,%rax,1),%r8
62         movq    (%rsi,%rax,1),%r9
63         andb    $240,%bl
64         movq    %r8,%rdx
65         jmp     L$oop1
66
67 .p2align        4
68 L$oop1:
69         shrq    $4,%r8
70         andq    $15,%rdx
71         movq    %r9,%r10
72         movb    (%rdi,%rcx,1),%al
73         shrq    $4,%r9
74         xorq    8(%rsi,%rbx,1),%r8
75         shlq    $60,%r10
76         xorq    (%rsi,%rbx,1),%r9
77         movb    %al,%bl
78         xorq    (%r11,%rdx,8),%r9
79         movq    %r8,%rdx
80         shlb    $4,%al
81         xorq    %r10,%r8
82         decq    %rcx
83         js      L$break1
84
85         shrq    $4,%r8
86         andq    $15,%rdx
87         movq    %r9,%r10
88         shrq    $4,%r9
89         xorq    8(%rsi,%rax,1),%r8
90         shlq    $60,%r10
91         xorq    (%rsi,%rax,1),%r9
92         andb    $240,%bl
93         xorq    (%r11,%rdx,8),%r9
94         movq    %r8,%rdx
95         xorq    %r10,%r8
96         jmp     L$oop1
97
98 .p2align        4
99 L$break1:
100         shrq    $4,%r8
101         andq    $15,%rdx
102         movq    %r9,%r10
103         shrq    $4,%r9
104         xorq    8(%rsi,%rax,1),%r8
105         shlq    $60,%r10
106         xorq    (%rsi,%rax,1),%r9
107         andb    $240,%bl
108         xorq    (%r11,%rdx,8),%r9
109         movq    %r8,%rdx
110         xorq    %r10,%r8
111
112         shrq    $4,%r8
113         andq    $15,%rdx
114         movq    %r9,%r10
115         shrq    $4,%r9
116         xorq    8(%rsi,%rbx,1),%r8
117         shlq    $60,%r10
118         xorq    (%rsi,%rbx,1),%r9
119         xorq    %r10,%r8
120         xorq    (%r11,%rdx,8),%r9
121
122         bswapq  %r8
123         bswapq  %r9
124         movq    %r8,8(%rdi)
125         movq    %r9,(%rdi)
126
127         movq    16(%rsp),%rbx
128         leaq    24(%rsp),%rsp
129 L$gmult_epilogue:
130         .byte   0xf3,0xc3
131
132 .globl  _gcm_ghash_4bit
133
134 .p2align        4
135 _gcm_ghash_4bit:
136         pushq   %rbx
137         pushq   %rbp
138         pushq   %r12
139         pushq   %r13
140         pushq   %r14
141         pushq   %r15
142         subq    $280,%rsp
143 L$ghash_prologue:
144         movq    %rdx,%r14
145         movq    %rcx,%r15
146         subq    $-128,%rsi
147         leaq    16+128(%rsp),%rbp
148         xorl    %edx,%edx
149         movq    0+0-128(%rsi),%r8
150         movq    0+8-128(%rsi),%rax
151         movb    %al,%dl
152         shrq    $4,%rax
153         movq    %r8,%r10
154         shrq    $4,%r8
155         movq    16+0-128(%rsi),%r9
156         shlb    $4,%dl
157         movq    16+8-128(%rsi),%rbx
158         shlq    $60,%r10
159         movb    %dl,0(%rsp)
160         orq     %r10,%rax
161         movb    %bl,%dl
162         shrq    $4,%rbx
163         movq    %r9,%r10
164         shrq    $4,%r9
165         movq    %r8,0(%rbp)
166         movq    32+0-128(%rsi),%r8
167         shlb    $4,%dl
168         movq    %rax,0-128(%rbp)
169         movq    32+8-128(%rsi),%rax
170         shlq    $60,%r10
171         movb    %dl,1(%rsp)
172         orq     %r10,%rbx
173         movb    %al,%dl
174         shrq    $4,%rax
175         movq    %r8,%r10
176         shrq    $4,%r8
177         movq    %r9,8(%rbp)
178         movq    48+0-128(%rsi),%r9
179         shlb    $4,%dl
180         movq    %rbx,8-128(%rbp)
181         movq    48+8-128(%rsi),%rbx
182         shlq    $60,%r10
183         movb    %dl,2(%rsp)
184         orq     %r10,%rax
185         movb    %bl,%dl
186         shrq    $4,%rbx
187         movq    %r9,%r10
188         shrq    $4,%r9
189         movq    %r8,16(%rbp)
190         movq    64+0-128(%rsi),%r8
191         shlb    $4,%dl
192         movq    %rax,16-128(%rbp)
193         movq    64+8-128(%rsi),%rax
194         shlq    $60,%r10
195         movb    %dl,3(%rsp)
196         orq     %r10,%rbx
197         movb    %al,%dl
198         shrq    $4,%rax
199         movq    %r8,%r10
200         shrq    $4,%r8
201         movq    %r9,24(%rbp)
202         movq    80+0-128(%rsi),%r9
203         shlb    $4,%dl
204         movq    %rbx,24-128(%rbp)
205         movq    80+8-128(%rsi),%rbx
206         shlq    $60,%r10
207         movb    %dl,4(%rsp)
208         orq     %r10,%rax
209         movb    %bl,%dl
210         shrq    $4,%rbx
211         movq    %r9,%r10
212         shrq    $4,%r9
213         movq    %r8,32(%rbp)
214         movq    96+0-128(%rsi),%r8
215         shlb    $4,%dl
216         movq    %rax,32-128(%rbp)
217         movq    96+8-128(%rsi),%rax
218         shlq    $60,%r10
219         movb    %dl,5(%rsp)
220         orq     %r10,%rbx
221         movb    %al,%dl
222         shrq    $4,%rax
223         movq    %r8,%r10
224         shrq    $4,%r8
225         movq    %r9,40(%rbp)
226         movq    112+0-128(%rsi),%r9
227         shlb    $4,%dl
228         movq    %rbx,40-128(%rbp)
229         movq    112+8-128(%rsi),%rbx
230         shlq    $60,%r10
231         movb    %dl,6(%rsp)
232         orq     %r10,%rax
233         movb    %bl,%dl
234         shrq    $4,%rbx
235         movq    %r9,%r10
236         shrq    $4,%r9
237         movq    %r8,48(%rbp)
238         movq    128+0-128(%rsi),%r8
239         shlb    $4,%dl
240         movq    %rax,48-128(%rbp)
241         movq    128+8-128(%rsi),%rax
242         shlq    $60,%r10
243         movb    %dl,7(%rsp)
244         orq     %r10,%rbx
245         movb    %al,%dl
246         shrq    $4,%rax
247         movq    %r8,%r10
248         shrq    $4,%r8
249         movq    %r9,56(%rbp)
250         movq    144+0-128(%rsi),%r9
251         shlb    $4,%dl
252         movq    %rbx,56-128(%rbp)
253         movq    144+8-128(%rsi),%rbx
254         shlq    $60,%r10
255         movb    %dl,8(%rsp)
256         orq     %r10,%rax
257         movb    %bl,%dl
258         shrq    $4,%rbx
259         movq    %r9,%r10
260         shrq    $4,%r9
261         movq    %r8,64(%rbp)
262         movq    160+0-128(%rsi),%r8
263         shlb    $4,%dl
264         movq    %rax,64-128(%rbp)
265         movq    160+8-128(%rsi),%rax
266         shlq    $60,%r10
267         movb    %dl,9(%rsp)
268         orq     %r10,%rbx
269         movb    %al,%dl
270         shrq    $4,%rax
271         movq    %r8,%r10
272         shrq    $4,%r8
273         movq    %r9,72(%rbp)
274         movq    176+0-128(%rsi),%r9
275         shlb    $4,%dl
276         movq    %rbx,72-128(%rbp)
277         movq    176+8-128(%rsi),%rbx
278         shlq    $60,%r10
279         movb    %dl,10(%rsp)
280         orq     %r10,%rax
281         movb    %bl,%dl
282         shrq    $4,%rbx
283         movq    %r9,%r10
284         shrq    $4,%r9
285         movq    %r8,80(%rbp)
286         movq    192+0-128(%rsi),%r8
287         shlb    $4,%dl
288         movq    %rax,80-128(%rbp)
289         movq    192+8-128(%rsi),%rax
290         shlq    $60,%r10
291         movb    %dl,11(%rsp)
292         orq     %r10,%rbx
293         movb    %al,%dl
294         shrq    $4,%rax
295         movq    %r8,%r10
296         shrq    $4,%r8
297         movq    %r9,88(%rbp)
298         movq    208+0-128(%rsi),%r9
299         shlb    $4,%dl
300         movq    %rbx,88-128(%rbp)
301         movq    208+8-128(%rsi),%rbx
302         shlq    $60,%r10
303         movb    %dl,12(%rsp)
304         orq     %r10,%rax
305         movb    %bl,%dl
306         shrq    $4,%rbx
307         movq    %r9,%r10
308         shrq    $4,%r9
309         movq    %r8,96(%rbp)
310         movq    224+0-128(%rsi),%r8
311         shlb    $4,%dl
312         movq    %rax,96-128(%rbp)
313         movq    224+8-128(%rsi),%rax
314         shlq    $60,%r10
315         movb    %dl,13(%rsp)
316         orq     %r10,%rbx
317         movb    %al,%dl
318         shrq    $4,%rax
319         movq    %r8,%r10
320         shrq    $4,%r8
321         movq    %r9,104(%rbp)
322         movq    240+0-128(%rsi),%r9
323         shlb    $4,%dl
324         movq    %rbx,104-128(%rbp)
325         movq    240+8-128(%rsi),%rbx
326         shlq    $60,%r10
327         movb    %dl,14(%rsp)
328         orq     %r10,%rax
329         movb    %bl,%dl
330         shrq    $4,%rbx
331         movq    %r9,%r10
332         shrq    $4,%r9
333         movq    %r8,112(%rbp)
334         shlb    $4,%dl
335         movq    %rax,112-128(%rbp)
336         shlq    $60,%r10
337         movb    %dl,15(%rsp)
338         orq     %r10,%rbx
339         movq    %r9,120(%rbp)
340         movq    %rbx,120-128(%rbp)
341         addq    $-128,%rsi
342         movq    8(%rdi),%r8
343         movq    0(%rdi),%r9
344         addq    %r14,%r15
345         leaq    L$rem_8bit(%rip),%r11
346         jmp     L$outer_loop
347 .p2align        4
348 L$outer_loop:
349         xorq    (%r14),%r9
350         movq    8(%r14),%rdx
351         leaq    16(%r14),%r14
352         xorq    %r8,%rdx
353         movq    %r9,(%rdi)
354         movq    %rdx,8(%rdi)
355         shrq    $32,%rdx
356         xorq    %rax,%rax
357         roll    $8,%edx
358         movb    %dl,%al
359         movzbl  %dl,%ebx
360         shlb    $4,%al
361         shrl    $4,%ebx
362         roll    $8,%edx
363         movq    8(%rsi,%rax,1),%r8
364         movq    (%rsi,%rax,1),%r9
365         movb    %dl,%al
366         movzbl  %dl,%ecx
367         shlb    $4,%al
368         movzbq  (%rsp,%rbx,1),%r12
369         shrl    $4,%ecx
370         xorq    %r8,%r12
371         movq    %r9,%r10
372         shrq    $8,%r8
373         movzbq  %r12b,%r12
374         shrq    $8,%r9
375         xorq    -128(%rbp,%rbx,8),%r8
376         shlq    $56,%r10
377         xorq    (%rbp,%rbx,8),%r9
378         roll    $8,%edx
379         xorq    8(%rsi,%rax,1),%r8
380         xorq    (%rsi,%rax,1),%r9
381         movb    %dl,%al
382         xorq    %r10,%r8
383         movzwq  (%r11,%r12,2),%r12
384         movzbl  %dl,%ebx
385         shlb    $4,%al
386         movzbq  (%rsp,%rcx,1),%r13
387         shrl    $4,%ebx
388         shlq    $48,%r12
389         xorq    %r8,%r13
390         movq    %r9,%r10
391         xorq    %r12,%r9
392         shrq    $8,%r8
393         movzbq  %r13b,%r13
394         shrq    $8,%r9
395         xorq    -128(%rbp,%rcx,8),%r8
396         shlq    $56,%r10
397         xorq    (%rbp,%rcx,8),%r9
398         roll    $8,%edx
399         xorq    8(%rsi,%rax,1),%r8
400         xorq    (%rsi,%rax,1),%r9
401         movb    %dl,%al
402         xorq    %r10,%r8
403         movzwq  (%r11,%r13,2),%r13
404         movzbl  %dl,%ecx
405         shlb    $4,%al
406         movzbq  (%rsp,%rbx,1),%r12
407         shrl    $4,%ecx
408         shlq    $48,%r13
409         xorq    %r8,%r12
410         movq    %r9,%r10
411         xorq    %r13,%r9
412         shrq    $8,%r8
413         movzbq  %r12b,%r12
414         movl    8(%rdi),%edx
415         shrq    $8,%r9
416         xorq    -128(%rbp,%rbx,8),%r8
417         shlq    $56,%r10
418         xorq    (%rbp,%rbx,8),%r9
419         roll    $8,%edx
420         xorq    8(%rsi,%rax,1),%r8
421         xorq    (%rsi,%rax,1),%r9
422         movb    %dl,%al
423         xorq    %r10,%r8
424         movzwq  (%r11,%r12,2),%r12
425         movzbl  %dl,%ebx
426         shlb    $4,%al
427         movzbq  (%rsp,%rcx,1),%r13
428         shrl    $4,%ebx
429         shlq    $48,%r12
430         xorq    %r8,%r13
431         movq    %r9,%r10
432         xorq    %r12,%r9
433         shrq    $8,%r8
434         movzbq  %r13b,%r13
435         shrq    $8,%r9
436         xorq    -128(%rbp,%rcx,8),%r8
437         shlq    $56,%r10
438         xorq    (%rbp,%rcx,8),%r9
439         roll    $8,%edx
440         xorq    8(%rsi,%rax,1),%r8
441         xorq    (%rsi,%rax,1),%r9
442         movb    %dl,%al
443         xorq    %r10,%r8
444         movzwq  (%r11,%r13,2),%r13
445         movzbl  %dl,%ecx
446         shlb    $4,%al
447         movzbq  (%rsp,%rbx,1),%r12
448         shrl    $4,%ecx
449         shlq    $48,%r13
450         xorq    %r8,%r12
451         movq    %r9,%r10
452         xorq    %r13,%r9
453         shrq    $8,%r8
454         movzbq  %r12b,%r12
455         shrq    $8,%r9
456         xorq    -128(%rbp,%rbx,8),%r8
457         shlq    $56,%r10
458         xorq    (%rbp,%rbx,8),%r9
459         roll    $8,%edx
460         xorq    8(%rsi,%rax,1),%r8
461         xorq    (%rsi,%rax,1),%r9
462         movb    %dl,%al
463         xorq    %r10,%r8
464         movzwq  (%r11,%r12,2),%r12
465         movzbl  %dl,%ebx
466         shlb    $4,%al
467         movzbq  (%rsp,%rcx,1),%r13
468         shrl    $4,%ebx
469         shlq    $48,%r12
470         xorq    %r8,%r13
471         movq    %r9,%r10
472         xorq    %r12,%r9
473         shrq    $8,%r8
474         movzbq  %r13b,%r13
475         shrq    $8,%r9
476         xorq    -128(%rbp,%rcx,8),%r8
477         shlq    $56,%r10
478         xorq    (%rbp,%rcx,8),%r9
479         roll    $8,%edx
480         xorq    8(%rsi,%rax,1),%r8
481         xorq    (%rsi,%rax,1),%r9
482         movb    %dl,%al
483         xorq    %r10,%r8
484         movzwq  (%r11,%r13,2),%r13
485         movzbl  %dl,%ecx
486         shlb    $4,%al
487         movzbq  (%rsp,%rbx,1),%r12
488         shrl    $4,%ecx
489         shlq    $48,%r13
490         xorq    %r8,%r12
491         movq    %r9,%r10
492         xorq    %r13,%r9
493         shrq    $8,%r8
494         movzbq  %r12b,%r12
495         movl    4(%rdi),%edx
496         shrq    $8,%r9
497         xorq    -128(%rbp,%rbx,8),%r8
498         shlq    $56,%r10
499         xorq    (%rbp,%rbx,8),%r9
500         roll    $8,%edx
501         xorq    8(%rsi,%rax,1),%r8
502         xorq    (%rsi,%rax,1),%r9
503         movb    %dl,%al
504         xorq    %r10,%r8
505         movzwq  (%r11,%r12,2),%r12
506         movzbl  %dl,%ebx
507         shlb    $4,%al
508         movzbq  (%rsp,%rcx,1),%r13
509         shrl    $4,%ebx
510         shlq    $48,%r12
511         xorq    %r8,%r13
512         movq    %r9,%r10
513         xorq    %r12,%r9
514         shrq    $8,%r8
515         movzbq  %r13b,%r13
516         shrq    $8,%r9
517         xorq    -128(%rbp,%rcx,8),%r8
518         shlq    $56,%r10
519         xorq    (%rbp,%rcx,8),%r9
520         roll    $8,%edx
521         xorq    8(%rsi,%rax,1),%r8
522         xorq    (%rsi,%rax,1),%r9
523         movb    %dl,%al
524         xorq    %r10,%r8
525         movzwq  (%r11,%r13,2),%r13
526         movzbl  %dl,%ecx
527         shlb    $4,%al
528         movzbq  (%rsp,%rbx,1),%r12
529         shrl    $4,%ecx
530         shlq    $48,%r13
531         xorq    %r8,%r12
532         movq    %r9,%r10
533         xorq    %r13,%r9
534         shrq    $8,%r8
535         movzbq  %r12b,%r12
536         shrq    $8,%r9
537         xorq    -128(%rbp,%rbx,8),%r8
538         shlq    $56,%r10
539         xorq    (%rbp,%rbx,8),%r9
540         roll    $8,%edx
541         xorq    8(%rsi,%rax,1),%r8
542         xorq    (%rsi,%rax,1),%r9
543         movb    %dl,%al
544         xorq    %r10,%r8
545         movzwq  (%r11,%r12,2),%r12
546         movzbl  %dl,%ebx
547         shlb    $4,%al
548         movzbq  (%rsp,%rcx,1),%r13
549         shrl    $4,%ebx
550         shlq    $48,%r12
551         xorq    %r8,%r13
552         movq    %r9,%r10
553         xorq    %r12,%r9
554         shrq    $8,%r8
555         movzbq  %r13b,%r13
556         shrq    $8,%r9
557         xorq    -128(%rbp,%rcx,8),%r8
558         shlq    $56,%r10
559         xorq    (%rbp,%rcx,8),%r9
560         roll    $8,%edx
561         xorq    8(%rsi,%rax,1),%r8
562         xorq    (%rsi,%rax,1),%r9
563         movb    %dl,%al
564         xorq    %r10,%r8
565         movzwq  (%r11,%r13,2),%r13
566         movzbl  %dl,%ecx
567         shlb    $4,%al
568         movzbq  (%rsp,%rbx,1),%r12
569         shrl    $4,%ecx
570         shlq    $48,%r13
571         xorq    %r8,%r12
572         movq    %r9,%r10
573         xorq    %r13,%r9
574         shrq    $8,%r8
575         movzbq  %r12b,%r12
576         movl    0(%rdi),%edx
577         shrq    $8,%r9
578         xorq    -128(%rbp,%rbx,8),%r8
579         shlq    $56,%r10
580         xorq    (%rbp,%rbx,8),%r9
581         roll    $8,%edx
582         xorq    8(%rsi,%rax,1),%r8
583         xorq    (%rsi,%rax,1),%r9
584         movb    %dl,%al
585         xorq    %r10,%r8
586         movzwq  (%r11,%r12,2),%r12
587         movzbl  %dl,%ebx
588         shlb    $4,%al
589         movzbq  (%rsp,%rcx,1),%r13
590         shrl    $4,%ebx
591         shlq    $48,%r12
592         xorq    %r8,%r13
593         movq    %r9,%r10
594         xorq    %r12,%r9
595         shrq    $8,%r8
596         movzbq  %r13b,%r13
597         shrq    $8,%r9
598         xorq    -128(%rbp,%rcx,8),%r8
599         shlq    $56,%r10
600         xorq    (%rbp,%rcx,8),%r9
601         roll    $8,%edx
602         xorq    8(%rsi,%rax,1),%r8
603         xorq    (%rsi,%rax,1),%r9
604         movb    %dl,%al
605         xorq    %r10,%r8
606         movzwq  (%r11,%r13,2),%r13
607         movzbl  %dl,%ecx
608         shlb    $4,%al
609         movzbq  (%rsp,%rbx,1),%r12
610         shrl    $4,%ecx
611         shlq    $48,%r13
612         xorq    %r8,%r12
613         movq    %r9,%r10
614         xorq    %r13,%r9
615         shrq    $8,%r8
616         movzbq  %r12b,%r12
617         shrq    $8,%r9
618         xorq    -128(%rbp,%rbx,8),%r8
619         shlq    $56,%r10
620         xorq    (%rbp,%rbx,8),%r9
621         roll    $8,%edx
622         xorq    8(%rsi,%rax,1),%r8
623         xorq    (%rsi,%rax,1),%r9
624         movb    %dl,%al
625         xorq    %r10,%r8
626         movzwq  (%r11,%r12,2),%r12
627         movzbl  %dl,%ebx
628         shlb    $4,%al
629         movzbq  (%rsp,%rcx,1),%r13
630         shrl    $4,%ebx
631         shlq    $48,%r12
632         xorq    %r8,%r13
633         movq    %r9,%r10
634         xorq    %r12,%r9
635         shrq    $8,%r8
636         movzbq  %r13b,%r13
637         shrq    $8,%r9
638         xorq    -128(%rbp,%rcx,8),%r8
639         shlq    $56,%r10
640         xorq    (%rbp,%rcx,8),%r9
641         roll    $8,%edx
642         xorq    8(%rsi,%rax,1),%r8
643         xorq    (%rsi,%rax,1),%r9
644         movb    %dl,%al
645         xorq    %r10,%r8
646         movzwq  (%r11,%r13,2),%r13
647         movzbl  %dl,%ecx
648         shlb    $4,%al
649         movzbq  (%rsp,%rbx,1),%r12
650         andl    $240,%ecx
651         shlq    $48,%r13
652         xorq    %r8,%r12
653         movq    %r9,%r10
654         xorq    %r13,%r9
655         shrq    $8,%r8
656         movzbq  %r12b,%r12
657         movl    -4(%rdi),%edx
658         shrq    $8,%r9
659         xorq    -128(%rbp,%rbx,8),%r8
660         shlq    $56,%r10
661         xorq    (%rbp,%rbx,8),%r9
662         movzwq  (%r11,%r12,2),%r12
663         xorq    8(%rsi,%rax,1),%r8
664         xorq    (%rsi,%rax,1),%r9
665         shlq    $48,%r12
666         xorq    %r10,%r8
667         xorq    %r12,%r9
668         movzbq  %r8b,%r13
669         shrq    $4,%r8
670         movq    %r9,%r10
671         shlb    $4,%r13b
672         shrq    $4,%r9
673         xorq    8(%rsi,%rcx,1),%r8
674         movzwq  (%r11,%r13,2),%r13
675         shlq    $60,%r10
676         xorq    (%rsi,%rcx,1),%r9
677         xorq    %r10,%r8
678         shlq    $48,%r13
679         bswapq  %r8
680         xorq    %r13,%r9
681         bswapq  %r9
682         cmpq    %r15,%r14
683         jb      L$outer_loop
684         movq    %r8,8(%rdi)
685         movq    %r9,(%rdi)
686
687         leaq    280(%rsp),%rsi
688         movq    0(%rsi),%r15
689         movq    8(%rsi),%r14
690         movq    16(%rsi),%r13
691         movq    24(%rsi),%r12
692         movq    32(%rsi),%rbp
693         movq    40(%rsi),%rbx
694         leaq    48(%rsi),%rsp
695 L$ghash_epilogue:
696         .byte   0xf3,0xc3
697
698 .globl  _gcm_init_clmul
699
700 .p2align        4
701 _gcm_init_clmul:
702         movdqu  (%rsi),%xmm2
703         pshufd  $78,%xmm2,%xmm2
704
705
706         pshufd  $255,%xmm2,%xmm4
707         movdqa  %xmm2,%xmm3
708         psllq   $1,%xmm2
709         pxor    %xmm5,%xmm5
710         psrlq   $63,%xmm3
711         pcmpgtd %xmm4,%xmm5
712         pslldq  $8,%xmm3
713         por     %xmm3,%xmm2
714
715
716         pand    L$0x1c2_polynomial(%rip),%xmm5
717         pxor    %xmm5,%xmm2
718
719
720         movdqa  %xmm2,%xmm0
721         movdqa  %xmm0,%xmm1
722         pshufd  $78,%xmm0,%xmm3
723         pshufd  $78,%xmm2,%xmm4
724         pxor    %xmm0,%xmm3
725         pxor    %xmm2,%xmm4
726 .byte   102,15,58,68,194,0
727 .byte   102,15,58,68,202,17
728 .byte   102,15,58,68,220,0
729         pxor    %xmm0,%xmm3
730         pxor    %xmm1,%xmm3
731
732         movdqa  %xmm3,%xmm4
733         psrldq  $8,%xmm3
734         pslldq  $8,%xmm4
735         pxor    %xmm3,%xmm1
736         pxor    %xmm4,%xmm0
737
738         movdqa  %xmm0,%xmm3
739         psllq   $1,%xmm0
740         pxor    %xmm3,%xmm0
741         psllq   $5,%xmm0
742         pxor    %xmm3,%xmm0
743         psllq   $57,%xmm0
744         movdqa  %xmm0,%xmm4
745         pslldq  $8,%xmm0
746         psrldq  $8,%xmm4
747         pxor    %xmm3,%xmm0
748         pxor    %xmm4,%xmm1
749
750
751         movdqa  %xmm0,%xmm4
752         psrlq   $5,%xmm0
753         pxor    %xmm4,%xmm0
754         psrlq   $1,%xmm0
755         pxor    %xmm4,%xmm0
756         pxor    %xmm1,%xmm4
757         psrlq   $1,%xmm0
758         pxor    %xmm4,%xmm0
759         movdqu  %xmm2,(%rdi)
760         movdqu  %xmm0,16(%rdi)
761         .byte   0xf3,0xc3
762
763 .globl  _gcm_gmult_clmul
764
765 .p2align        4
766 _gcm_gmult_clmul:
767         movdqu  (%rdi),%xmm0
768         movdqa  L$bswap_mask(%rip),%xmm5
769         movdqu  (%rsi),%xmm2
770 .byte   102,15,56,0,197
771         movdqa  %xmm0,%xmm1
772         pshufd  $78,%xmm0,%xmm3
773         pshufd  $78,%xmm2,%xmm4
774         pxor    %xmm0,%xmm3
775         pxor    %xmm2,%xmm4
776 .byte   102,15,58,68,194,0
777 .byte   102,15,58,68,202,17
778 .byte   102,15,58,68,220,0
779         pxor    %xmm0,%xmm3
780         pxor    %xmm1,%xmm3
781
782         movdqa  %xmm3,%xmm4
783         psrldq  $8,%xmm3
784         pslldq  $8,%xmm4
785         pxor    %xmm3,%xmm1
786         pxor    %xmm4,%xmm0
787
788         movdqa  %xmm0,%xmm3
789         psllq   $1,%xmm0
790         pxor    %xmm3,%xmm0
791         psllq   $5,%xmm0
792         pxor    %xmm3,%xmm0
793         psllq   $57,%xmm0
794         movdqa  %xmm0,%xmm4
795         pslldq  $8,%xmm0
796         psrldq  $8,%xmm4
797         pxor    %xmm3,%xmm0
798         pxor    %xmm4,%xmm1
799
800
801         movdqa  %xmm0,%xmm4
802         psrlq   $5,%xmm0
803         pxor    %xmm4,%xmm0
804         psrlq   $1,%xmm0
805         pxor    %xmm4,%xmm0
806         pxor    %xmm1,%xmm4
807         psrlq   $1,%xmm0
808         pxor    %xmm4,%xmm0
809 .byte   102,15,56,0,197
810         movdqu  %xmm0,(%rdi)
811         .byte   0xf3,0xc3
812
813 .globl  _gcm_ghash_clmul
814
815 .p2align        4
816 _gcm_ghash_clmul:
817         movdqa  L$bswap_mask(%rip),%xmm5
818
819         movdqu  (%rdi),%xmm0
820         movdqu  (%rsi),%xmm2
821 .byte   102,15,56,0,197
822
823         subq    $16,%rcx
824         jz      L$odd_tail
825
826         movdqu  16(%rsi),%xmm8
827
828
829
830
831
832         movdqu  (%rdx),%xmm3
833         movdqu  16(%rdx),%xmm6
834 .byte   102,15,56,0,221
835 .byte   102,15,56,0,245
836         pxor    %xmm3,%xmm0
837         movdqa  %xmm6,%xmm7
838         pshufd  $78,%xmm6,%xmm3
839         pshufd  $78,%xmm2,%xmm4
840         pxor    %xmm6,%xmm3
841         pxor    %xmm2,%xmm4
842 .byte   102,15,58,68,242,0
843 .byte   102,15,58,68,250,17
844 .byte   102,15,58,68,220,0
845         pxor    %xmm6,%xmm3
846         pxor    %xmm7,%xmm3
847
848         movdqa  %xmm3,%xmm4
849         psrldq  $8,%xmm3
850         pslldq  $8,%xmm4
851         pxor    %xmm3,%xmm7
852         pxor    %xmm4,%xmm6
853         movdqa  %xmm0,%xmm1
854         pshufd  $78,%xmm0,%xmm3
855         pshufd  $78,%xmm8,%xmm4
856         pxor    %xmm0,%xmm3
857         pxor    %xmm8,%xmm4
858
859         leaq    32(%rdx),%rdx
860         subq    $32,%rcx
861         jbe     L$even_tail
862
863 L$mod_loop:
864 .byte   102,65,15,58,68,192,0
865 .byte   102,65,15,58,68,200,17
866 .byte   102,15,58,68,220,0
867         pxor    %xmm0,%xmm3
868         pxor    %xmm1,%xmm3
869
870         movdqa  %xmm3,%xmm4
871         psrldq  $8,%xmm3
872         pslldq  $8,%xmm4
873         pxor    %xmm3,%xmm1
874         pxor    %xmm4,%xmm0
875         movdqu  (%rdx),%xmm3
876         pxor    %xmm6,%xmm0
877         pxor    %xmm7,%xmm1
878
879         movdqu  16(%rdx),%xmm6
880 .byte   102,15,56,0,221
881 .byte   102,15,56,0,245
882
883         movdqa  %xmm6,%xmm7
884         pshufd  $78,%xmm6,%xmm9
885         pshufd  $78,%xmm2,%xmm10
886         pxor    %xmm6,%xmm9
887         pxor    %xmm2,%xmm10
888         pxor    %xmm3,%xmm1
889
890         movdqa  %xmm0,%xmm3
891         psllq   $1,%xmm0
892         pxor    %xmm3,%xmm0
893         psllq   $5,%xmm0
894         pxor    %xmm3,%xmm0
895 .byte   102,15,58,68,242,0
896         psllq   $57,%xmm0
897         movdqa  %xmm0,%xmm4
898         pslldq  $8,%xmm0
899         psrldq  $8,%xmm4
900         pxor    %xmm3,%xmm0
901         pxor    %xmm4,%xmm1
902
903 .byte   102,15,58,68,250,17
904         movdqa  %xmm0,%xmm4
905         psrlq   $5,%xmm0
906         pxor    %xmm4,%xmm0
907         psrlq   $1,%xmm0
908         pxor    %xmm4,%xmm0
909         pxor    %xmm1,%xmm4
910         psrlq   $1,%xmm0
911         pxor    %xmm4,%xmm0
912
913 .byte   102,69,15,58,68,202,0
914         movdqa  %xmm0,%xmm1
915         pshufd  $78,%xmm0,%xmm3
916         pshufd  $78,%xmm8,%xmm4
917         pxor    %xmm0,%xmm3
918         pxor    %xmm8,%xmm4
919
920         pxor    %xmm6,%xmm9
921         pxor    %xmm7,%xmm9
922         movdqa  %xmm9,%xmm10
923         psrldq  $8,%xmm9
924         pslldq  $8,%xmm10
925         pxor    %xmm9,%xmm7
926         pxor    %xmm10,%xmm6
927
928         leaq    32(%rdx),%rdx
929         subq    $32,%rcx
930         ja      L$mod_loop
931
932 L$even_tail:
933 .byte   102,65,15,58,68,192,0
934 .byte   102,65,15,58,68,200,17
935 .byte   102,15,58,68,220,0
936         pxor    %xmm0,%xmm3
937         pxor    %xmm1,%xmm3
938
939         movdqa  %xmm3,%xmm4
940         psrldq  $8,%xmm3
941         pslldq  $8,%xmm4
942         pxor    %xmm3,%xmm1
943         pxor    %xmm4,%xmm0
944         pxor    %xmm6,%xmm0
945         pxor    %xmm7,%xmm1
946
947         movdqa  %xmm0,%xmm3
948         psllq   $1,%xmm0
949         pxor    %xmm3,%xmm0
950         psllq   $5,%xmm0
951         pxor    %xmm3,%xmm0
952         psllq   $57,%xmm0
953         movdqa  %xmm0,%xmm4
954         pslldq  $8,%xmm0
955         psrldq  $8,%xmm4
956         pxor    %xmm3,%xmm0
957         pxor    %xmm4,%xmm1
958
959
960         movdqa  %xmm0,%xmm4
961         psrlq   $5,%xmm0
962         pxor    %xmm4,%xmm0
963         psrlq   $1,%xmm0
964         pxor    %xmm4,%xmm0
965         pxor    %xmm1,%xmm4
966         psrlq   $1,%xmm0
967         pxor    %xmm4,%xmm0
968         testq   %rcx,%rcx
969         jnz     L$done
970
971 L$odd_tail:
972         movdqu  (%rdx),%xmm3
973 .byte   102,15,56,0,221
974         pxor    %xmm3,%xmm0
975         movdqa  %xmm0,%xmm1
976         pshufd  $78,%xmm0,%xmm3
977         pshufd  $78,%xmm2,%xmm4
978         pxor    %xmm0,%xmm3
979         pxor    %xmm2,%xmm4
980 .byte   102,15,58,68,194,0
981 .byte   102,15,58,68,202,17
982 .byte   102,15,58,68,220,0
983         pxor    %xmm0,%xmm3
984         pxor    %xmm1,%xmm3
985
986         movdqa  %xmm3,%xmm4
987         psrldq  $8,%xmm3
988         pslldq  $8,%xmm4
989         pxor    %xmm3,%xmm1
990         pxor    %xmm4,%xmm0
991
992         movdqa  %xmm0,%xmm3
993         psllq   $1,%xmm0
994         pxor    %xmm3,%xmm0
995         psllq   $5,%xmm0
996         pxor    %xmm3,%xmm0
997         psllq   $57,%xmm0
998         movdqa  %xmm0,%xmm4
999         pslldq  $8,%xmm0
1000         psrldq  $8,%xmm4
1001         pxor    %xmm3,%xmm0
1002         pxor    %xmm4,%xmm1
1003
1004
1005         movdqa  %xmm0,%xmm4
1006         psrlq   $5,%xmm0
1007         pxor    %xmm4,%xmm0
1008         psrlq   $1,%xmm0
1009         pxor    %xmm4,%xmm0
1010         pxor    %xmm1,%xmm4
1011         psrlq   $1,%xmm0
1012         pxor    %xmm4,%xmm0
1013 L$done:
1014 .byte   102,15,56,0,197
1015         movdqu  %xmm0,(%rdi)
1016         .byte   0xf3,0xc3
1017 L$SEH_end_gcm_ghash_clmul:
1018
1019 .p2align        6
1020 L$bswap_mask:
1021 .byte   15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0
1022 L$0x1c2_polynomial:
1023 .byte   1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0xc2
1024 .p2align        6
1025
1026 L$rem_4bit:
1027 .long   0,0,0,471859200,0,943718400,0,610271232
1028 .long   0,1887436800,0,1822425088,0,1220542464,0,1423966208
1029 .long   0,3774873600,0,4246732800,0,3644850176,0,3311403008
1030 .long   0,2441084928,0,2376073216,0,2847932416,0,3051356160
1031
1032 L$rem_8bit:
1033 .value  0x0000,0x01C2,0x0384,0x0246,0x0708,0x06CA,0x048C,0x054E
1034 .value  0x0E10,0x0FD2,0x0D94,0x0C56,0x0918,0x08DA,0x0A9C,0x0B5E
1035 .value  0x1C20,0x1DE2,0x1FA4,0x1E66,0x1B28,0x1AEA,0x18AC,0x196E
1036 .value  0x1230,0x13F2,0x11B4,0x1076,0x1538,0x14FA,0x16BC,0x177E
1037 .value  0x3840,0x3982,0x3BC4,0x3A06,0x3F48,0x3E8A,0x3CCC,0x3D0E
1038 .value  0x3650,0x3792,0x35D4,0x3416,0x3158,0x309A,0x32DC,0x331E
1039 .value  0x2460,0x25A2,0x27E4,0x2626,0x2368,0x22AA,0x20EC,0x212E
1040 .value  0x2A70,0x2BB2,0x29F4,0x2836,0x2D78,0x2CBA,0x2EFC,0x2F3E
1041 .value  0x7080,0x7142,0x7304,0x72C6,0x7788,0x764A,0x740C,0x75CE
1042 .value  0x7E90,0x7F52,0x7D14,0x7CD6,0x7998,0x785A,0x7A1C,0x7BDE
1043 .value  0x6CA0,0x6D62,0x6F24,0x6EE6,0x6BA8,0x6A6A,0x682C,0x69EE
1044 .value  0x62B0,0x6372,0x6134,0x60F6,0x65B8,0x647A,0x663C,0x67FE
1045 .value  0x48C0,0x4902,0x4B44,0x4A86,0x4FC8,0x4E0A,0x4C4C,0x4D8E
1046 .value  0x46D0,0x4712,0x4554,0x4496,0x41D8,0x401A,0x425C,0x439E
1047 .value  0x54E0,0x5522,0x5764,0x56A6,0x53E8,0x522A,0x506C,0x51AE
1048 .value  0x5AF0,0x5B32,0x5974,0x58B6,0x5DF8,0x5C3A,0x5E7C,0x5FBE
1049 .value  0xE100,0xE0C2,0xE284,0xE346,0xE608,0xE7CA,0xE58C,0xE44E
1050 .value  0xEF10,0xEED2,0xEC94,0xED56,0xE818,0xE9DA,0xEB9C,0xEA5E
1051 .value  0xFD20,0xFCE2,0xFEA4,0xFF66,0xFA28,0xFBEA,0xF9AC,0xF86E
1052 .value  0xF330,0xF2F2,0xF0B4,0xF176,0xF438,0xF5FA,0xF7BC,0xF67E
1053 .value  0xD940,0xD882,0xDAC4,0xDB06,0xDE48,0xDF8A,0xDDCC,0xDC0E
1054 .value  0xD750,0xD692,0xD4D4,0xD516,0xD058,0xD19A,0xD3DC,0xD21E
1055 .value  0xC560,0xC4A2,0xC6E4,0xC726,0xC268,0xC3AA,0xC1EC,0xC02E
1056 .value  0xCB70,0xCAB2,0xC8F4,0xC936,0xCC78,0xCDBA,0xCFFC,0xCE3E
1057 .value  0x9180,0x9042,0x9204,0x93C6,0x9688,0x974A,0x950C,0x94CE
1058 .value  0x9F90,0x9E52,0x9C14,0x9DD6,0x9898,0x995A,0x9B1C,0x9ADE
1059 .value  0x8DA0,0x8C62,0x8E24,0x8FE6,0x8AA8,0x8B6A,0x892C,0x88EE
1060 .value  0x83B0,0x8272,0x8034,0x81F6,0x84B8,0x857A,0x873C,0x86FE
1061 .value  0xA9C0,0xA802,0xAA44,0xAB86,0xAEC8,0xAF0A,0xAD4C,0xAC8E
1062 .value  0xA7D0,0xA612,0xA454,0xA596,0xA0D8,0xA11A,0xA35C,0xA29E
1063 .value  0xB5E0,0xB422,0xB664,0xB7A6,0xB2E8,0xB32A,0xB16C,0xB0AE
1064 .value  0xBBF0,0xBA32,0xB874,0xB9B6,0xBCF8,0xBD3A,0xBF7C,0xBEBE
1065
1066 .byte   71,72,65,83,72,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
1067 .p2align        6