# Copyright (c) 2011-2012, Andy Polyakov # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # * Redistributions of source code must retain copyright notices, # this list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials # provided with the distribution. # # * Neither the name of the Andy Polyakov nor the names of its # copyright holder and contributors may be used to endorse or # promote products derived from this software without specific # prior written permission. # # ALTERNATIVELY, provided that this notice is retained in full, this # product may be distributed under the terms of the GNU General Public # License (GPL), in which case the provisions of the GPL apply INSTEAD OF # those given above. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # *** This file is auto-generated *** # .text .globl aesni_encrypt .def aesni_encrypt; .scl 2; .type 32; .endef .p2align 4 aesni_encrypt: movups (%rcx),%xmm2 movl 240(%r8),%eax movups (%r8),%xmm0 movups 16(%r8),%xmm1 leaq 32(%r8),%r8 xorps %xmm0,%xmm2 .Loop_enc1_1: .byte 102,15,56,220,209 decl %eax movups (%r8),%xmm1 leaq 16(%r8),%r8 jnz .Loop_enc1_1 .byte 102,15,56,221,209 movups %xmm2,(%rdx) .byte 0xf3,0xc3 .globl aesni_decrypt .def aesni_decrypt; .scl 2; .type 32; .endef .p2align 4 aesni_decrypt: movups (%rcx),%xmm2 movl 240(%r8),%eax movups (%r8),%xmm0 movups 16(%r8),%xmm1 leaq 32(%r8),%r8 xorps %xmm0,%xmm2 .Loop_dec1_2: .byte 102,15,56,222,209 decl %eax movups (%r8),%xmm1 leaq 16(%r8),%r8 jnz .Loop_dec1_2 .byte 102,15,56,223,209 movups %xmm2,(%rdx) .byte 0xf3,0xc3 .def _aesni_encrypt3; .scl 3; .type 32; .endef .p2align 4 _aesni_encrypt3: movups (%rcx),%xmm0 shrl $1,%eax movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 xorps %xmm0,%xmm3 xorps %xmm0,%xmm4 movups (%rcx),%xmm0 .Lenc_loop3: .byte 102,15,56,220,209 .byte 102,15,56,220,217 decl %eax .byte 102,15,56,220,225 movups 16(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 102,15,56,220,216 leaq 32(%rcx),%rcx .byte 102,15,56,220,224 movups (%rcx),%xmm0 jnz .Lenc_loop3 .byte 102,15,56,220,209 .byte 102,15,56,220,217 .byte 102,15,56,220,225 .byte 102,15,56,221,208 .byte 102,15,56,221,216 .byte 102,15,56,221,224 .byte 0xf3,0xc3 .def _aesni_decrypt3; .scl 3; .type 32; .endef .p2align 4 _aesni_decrypt3: movups (%rcx),%xmm0 shrl $1,%eax movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 xorps %xmm0,%xmm3 xorps %xmm0,%xmm4 movups (%rcx),%xmm0 .Ldec_loop3: .byte 102,15,56,222,209 .byte 102,15,56,222,217 decl %eax .byte 102,15,56,222,225 movups 16(%rcx),%xmm1 .byte 102,15,56,222,208 .byte 102,15,56,222,216 leaq 32(%rcx),%rcx .byte 102,15,56,222,224 movups (%rcx),%xmm0 jnz .Ldec_loop3 .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .byte 102,15,56,223,208 .byte 102,15,56,223,216 .byte 102,15,56,223,224 .byte 0xf3,0xc3 .def _aesni_encrypt4; .scl 3; .type 32; .endef .p2align 4 _aesni_encrypt4: movups (%rcx),%xmm0 shrl $1,%eax movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 xorps %xmm0,%xmm3 xorps %xmm0,%xmm4 xorps %xmm0,%xmm5 movups (%rcx),%xmm0 .Lenc_loop4: .byte 102,15,56,220,209 .byte 102,15,56,220,217 decl %eax .byte 102,15,56,220,225 .byte 102,15,56,220,233 movups 16(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 102,15,56,220,216 leaq 32(%rcx),%rcx .byte 102,15,56,220,224 .byte 102,15,56,220,232 movups (%rcx),%xmm0 jnz .Lenc_loop4 .byte 102,15,56,220,209 .byte 102,15,56,220,217 .byte 102,15,56,220,225 .byte 102,15,56,220,233 .byte 102,15,56,221,208 .byte 102,15,56,221,216 .byte 102,15,56,221,224 .byte 102,15,56,221,232 .byte 0xf3,0xc3 .def _aesni_decrypt4; .scl 3; .type 32; .endef .p2align 4 _aesni_decrypt4: movups (%rcx),%xmm0 shrl $1,%eax movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 xorps %xmm0,%xmm3 xorps %xmm0,%xmm4 xorps %xmm0,%xmm5 movups (%rcx),%xmm0 .Ldec_loop4: .byte 102,15,56,222,209 .byte 102,15,56,222,217 decl %eax .byte 102,15,56,222,225 .byte 102,15,56,222,233 movups 16(%rcx),%xmm1 .byte 102,15,56,222,208 .byte 102,15,56,222,216 leaq 32(%rcx),%rcx .byte 102,15,56,222,224 .byte 102,15,56,222,232 movups (%rcx),%xmm0 jnz .Ldec_loop4 .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,223,208 .byte 102,15,56,223,216 .byte 102,15,56,223,224 .byte 102,15,56,223,232 .byte 0xf3,0xc3 .def _aesni_encrypt6; .scl 3; .type 32; .endef .p2align 4 _aesni_encrypt6: movups (%rcx),%xmm0 shrl $1,%eax movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 pxor %xmm0,%xmm3 .byte 102,15,56,220,209 pxor %xmm0,%xmm4 .byte 102,15,56,220,217 pxor %xmm0,%xmm5 .byte 102,15,56,220,225 pxor %xmm0,%xmm6 .byte 102,15,56,220,233 pxor %xmm0,%xmm7 decl %eax .byte 102,15,56,220,241 movups (%rcx),%xmm0 .byte 102,15,56,220,249 jmp .Lenc_loop6_enter .p2align 4 .Lenc_loop6: .byte 102,15,56,220,209 .byte 102,15,56,220,217 decl %eax .byte 102,15,56,220,225 .byte 102,15,56,220,233 .byte 102,15,56,220,241 .byte 102,15,56,220,249 .Lenc_loop6_enter: movups 16(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 102,15,56,220,216 leaq 32(%rcx),%rcx .byte 102,15,56,220,224 .byte 102,15,56,220,232 .byte 102,15,56,220,240 .byte 102,15,56,220,248 movups (%rcx),%xmm0 jnz .Lenc_loop6 .byte 102,15,56,220,209 .byte 102,15,56,220,217 .byte 102,15,56,220,225 .byte 102,15,56,220,233 .byte 102,15,56,220,241 .byte 102,15,56,220,249 .byte 102,15,56,221,208 .byte 102,15,56,221,216 .byte 102,15,56,221,224 .byte 102,15,56,221,232 .byte 102,15,56,221,240 .byte 102,15,56,221,248 .byte 0xf3,0xc3 .def _aesni_decrypt6; .scl 3; .type 32; .endef .p2align 4 _aesni_decrypt6: movups (%rcx),%xmm0 shrl $1,%eax movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 pxor %xmm0,%xmm3 .byte 102,15,56,222,209 pxor %xmm0,%xmm4 .byte 102,15,56,222,217 pxor %xmm0,%xmm5 .byte 102,15,56,222,225 pxor %xmm0,%xmm6 .byte 102,15,56,222,233 pxor %xmm0,%xmm7 decl %eax .byte 102,15,56,222,241 movups (%rcx),%xmm0 .byte 102,15,56,222,249 jmp .Ldec_loop6_enter .p2align 4 .Ldec_loop6: .byte 102,15,56,222,209 .byte 102,15,56,222,217 decl %eax .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,222,241 .byte 102,15,56,222,249 .Ldec_loop6_enter: movups 16(%rcx),%xmm1 .byte 102,15,56,222,208 .byte 102,15,56,222,216 leaq 32(%rcx),%rcx .byte 102,15,56,222,224 .byte 102,15,56,222,232 .byte 102,15,56,222,240 .byte 102,15,56,222,248 movups (%rcx),%xmm0 jnz .Ldec_loop6 .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,222,241 .byte 102,15,56,222,249 .byte 102,15,56,223,208 .byte 102,15,56,223,216 .byte 102,15,56,223,224 .byte 102,15,56,223,232 .byte 102,15,56,223,240 .byte 102,15,56,223,248 .byte 0xf3,0xc3 .def _aesni_encrypt8; .scl 3; .type 32; .endef .p2align 4 _aesni_encrypt8: movups (%rcx),%xmm0 shrl $1,%eax movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 xorps %xmm0,%xmm3 .byte 102,15,56,220,209 pxor %xmm0,%xmm4 .byte 102,15,56,220,217 pxor %xmm0,%xmm5 .byte 102,15,56,220,225 pxor %xmm0,%xmm6 .byte 102,15,56,220,233 pxor %xmm0,%xmm7 decl %eax .byte 102,15,56,220,241 pxor %xmm0,%xmm8 .byte 102,15,56,220,249 pxor %xmm0,%xmm9 movups (%rcx),%xmm0 .byte 102,68,15,56,220,193 .byte 102,68,15,56,220,201 movups 16(%rcx),%xmm1 jmp .Lenc_loop8_enter .p2align 4 .Lenc_loop8: .byte 102,15,56,220,209 .byte 102,15,56,220,217 decl %eax .byte 102,15,56,220,225 .byte 102,15,56,220,233 .byte 102,15,56,220,241 .byte 102,15,56,220,249 .byte 102,68,15,56,220,193 .byte 102,68,15,56,220,201 movups 16(%rcx),%xmm1 .Lenc_loop8_enter: .byte 102,15,56,220,208 .byte 102,15,56,220,216 leaq 32(%rcx),%rcx .byte 102,15,56,220,224 .byte 102,15,56,220,232 .byte 102,15,56,220,240 .byte 102,15,56,220,248 .byte 102,68,15,56,220,192 .byte 102,68,15,56,220,200 movups (%rcx),%xmm0 jnz .Lenc_loop8 .byte 102,15,56,220,209 .byte 102,15,56,220,217 .byte 102,15,56,220,225 .byte 102,15,56,220,233 .byte 102,15,56,220,241 .byte 102,15,56,220,249 .byte 102,68,15,56,220,193 .byte 102,68,15,56,220,201 .byte 102,15,56,221,208 .byte 102,15,56,221,216 .byte 102,15,56,221,224 .byte 102,15,56,221,232 .byte 102,15,56,221,240 .byte 102,15,56,221,248 .byte 102,68,15,56,221,192 .byte 102,68,15,56,221,200 .byte 0xf3,0xc3 .def _aesni_decrypt8; .scl 3; .type 32; .endef .p2align 4 _aesni_decrypt8: movups (%rcx),%xmm0 shrl $1,%eax movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 xorps %xmm0,%xmm3 .byte 102,15,56,222,209 pxor %xmm0,%xmm4 .byte 102,15,56,222,217 pxor %xmm0,%xmm5 .byte 102,15,56,222,225 pxor %xmm0,%xmm6 .byte 102,15,56,222,233 pxor %xmm0,%xmm7 decl %eax .byte 102,15,56,222,241 pxor %xmm0,%xmm8 .byte 102,15,56,222,249 pxor %xmm0,%xmm9 movups (%rcx),%xmm0 .byte 102,68,15,56,222,193 .byte 102,68,15,56,222,201 movups 16(%rcx),%xmm1 jmp .Ldec_loop8_enter .p2align 4 .Ldec_loop8: .byte 102,15,56,222,209 .byte 102,15,56,222,217 decl %eax .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,222,241 .byte 102,15,56,222,249 .byte 102,68,15,56,222,193 .byte 102,68,15,56,222,201 movups 16(%rcx),%xmm1 .Ldec_loop8_enter: .byte 102,15,56,222,208 .byte 102,15,56,222,216 leaq 32(%rcx),%rcx .byte 102,15,56,222,224 .byte 102,15,56,222,232 .byte 102,15,56,222,240 .byte 102,15,56,222,248 .byte 102,68,15,56,222,192 .byte 102,68,15,56,222,200 movups (%rcx),%xmm0 jnz .Ldec_loop8 .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,222,241 .byte 102,15,56,222,249 .byte 102,68,15,56,222,193 .byte 102,68,15,56,222,201 .byte 102,15,56,223,208 .byte 102,15,56,223,216 .byte 102,15,56,223,224 .byte 102,15,56,223,232 .byte 102,15,56,223,240 .byte 102,15,56,223,248 .byte 102,68,15,56,223,192 .byte 102,68,15,56,223,200 .byte 0xf3,0xc3 .globl aesni_ecb_encrypt .def aesni_ecb_encrypt; .scl 2; .type 32; .endef .p2align 4 aesni_ecb_encrypt: movq %rdi,8(%rsp) movq %rsi,16(%rsp) movq %rsp,%rax .LSEH_begin_aesni_ecb_encrypt: movq %rcx,%rdi movq %rdx,%rsi movq %r8,%rdx movq %r9,%rcx movq 40(%rsp),%r8 andq $-16,%rdx jz .Lecb_ret movl 240(%rcx),%eax movups (%rcx),%xmm0 movq %rcx,%r11 movl %eax,%r10d testl %r8d,%r8d jz .Lecb_decrypt cmpq $128,%rdx jb .Lecb_enc_tail movdqu (%rdi),%xmm2 movdqu 16(%rdi),%xmm3 movdqu 32(%rdi),%xmm4 movdqu 48(%rdi),%xmm5 movdqu 64(%rdi),%xmm6 movdqu 80(%rdi),%xmm7 movdqu 96(%rdi),%xmm8 movdqu 112(%rdi),%xmm9 leaq 128(%rdi),%rdi subq $128,%rdx jmp .Lecb_enc_loop8_enter .p2align 4 .Lecb_enc_loop8: movups %xmm2,(%rsi) movq %r11,%rcx movdqu (%rdi),%xmm2 movl %r10d,%eax movups %xmm3,16(%rsi) movdqu 16(%rdi),%xmm3 movups %xmm4,32(%rsi) movdqu 32(%rdi),%xmm4 movups %xmm5,48(%rsi) movdqu 48(%rdi),%xmm5 movups %xmm6,64(%rsi) movdqu 64(%rdi),%xmm6 movups %xmm7,80(%rsi) movdqu 80(%rdi),%xmm7 movups %xmm8,96(%rsi) movdqu 96(%rdi),%xmm8 movups %xmm9,112(%rsi) leaq 128(%rsi),%rsi movdqu 112(%rdi),%xmm9 leaq 128(%rdi),%rdi .Lecb_enc_loop8_enter: call _aesni_encrypt8 subq $128,%rdx jnc .Lecb_enc_loop8 movups %xmm2,(%rsi) movq %r11,%rcx movups %xmm3,16(%rsi) movl %r10d,%eax movups %xmm4,32(%rsi) movups %xmm5,48(%rsi) movups %xmm6,64(%rsi) movups %xmm7,80(%rsi) movups %xmm8,96(%rsi) movups %xmm9,112(%rsi) leaq 128(%rsi),%rsi addq $128,%rdx jz .Lecb_ret .Lecb_enc_tail: movups (%rdi),%xmm2 cmpq $32,%rdx jb .Lecb_enc_one movups 16(%rdi),%xmm3 je .Lecb_enc_two movups 32(%rdi),%xmm4 cmpq $64,%rdx jb .Lecb_enc_three movups 48(%rdi),%xmm5 je .Lecb_enc_four movups 64(%rdi),%xmm6 cmpq $96,%rdx jb .Lecb_enc_five movups 80(%rdi),%xmm7 je .Lecb_enc_six movdqu 96(%rdi),%xmm8 call _aesni_encrypt8 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) movups %xmm4,32(%rsi) movups %xmm5,48(%rsi) movups %xmm6,64(%rsi) movups %xmm7,80(%rsi) movups %xmm8,96(%rsi) jmp .Lecb_ret .p2align 4 .Lecb_enc_one: movups (%rcx),%xmm0 movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 .Loop_enc1_3: .byte 102,15,56,220,209 decl %eax movups (%rcx),%xmm1 leaq 16(%rcx),%rcx jnz .Loop_enc1_3 .byte 102,15,56,221,209 movups %xmm2,(%rsi) jmp .Lecb_ret .p2align 4 .Lecb_enc_two: xorps %xmm4,%xmm4 call _aesni_encrypt3 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) jmp .Lecb_ret .p2align 4 .Lecb_enc_three: call _aesni_encrypt3 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) movups %xmm4,32(%rsi) jmp .Lecb_ret .p2align 4 .Lecb_enc_four: call _aesni_encrypt4 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) movups %xmm4,32(%rsi) movups %xmm5,48(%rsi) jmp .Lecb_ret .p2align 4 .Lecb_enc_five: xorps %xmm7,%xmm7 call _aesni_encrypt6 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) movups %xmm4,32(%rsi) movups %xmm5,48(%rsi) movups %xmm6,64(%rsi) jmp .Lecb_ret .p2align 4 .Lecb_enc_six: call _aesni_encrypt6 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) movups %xmm4,32(%rsi) movups %xmm5,48(%rsi) movups %xmm6,64(%rsi) movups %xmm7,80(%rsi) jmp .Lecb_ret .p2align 4 .Lecb_decrypt: cmpq $128,%rdx jb .Lecb_dec_tail movdqu (%rdi),%xmm2 movdqu 16(%rdi),%xmm3 movdqu 32(%rdi),%xmm4 movdqu 48(%rdi),%xmm5 movdqu 64(%rdi),%xmm6 movdqu 80(%rdi),%xmm7 movdqu 96(%rdi),%xmm8 movdqu 112(%rdi),%xmm9 leaq 128(%rdi),%rdi subq $128,%rdx jmp .Lecb_dec_loop8_enter .p2align 4 .Lecb_dec_loop8: movups %xmm2,(%rsi) movq %r11,%rcx movdqu (%rdi),%xmm2 movl %r10d,%eax movups %xmm3,16(%rsi) movdqu 16(%rdi),%xmm3 movups %xmm4,32(%rsi) movdqu 32(%rdi),%xmm4 movups %xmm5,48(%rsi) movdqu 48(%rdi),%xmm5 movups %xmm6,64(%rsi) movdqu 64(%rdi),%xmm6 movups %xmm7,80(%rsi) movdqu 80(%rdi),%xmm7 movups %xmm8,96(%rsi) movdqu 96(%rdi),%xmm8 movups %xmm9,112(%rsi) leaq 128(%rsi),%rsi movdqu 112(%rdi),%xmm9 leaq 128(%rdi),%rdi .Lecb_dec_loop8_enter: call _aesni_decrypt8 movups (%r11),%xmm0 subq $128,%rdx jnc .Lecb_dec_loop8 movups %xmm2,(%rsi) movq %r11,%rcx movups %xmm3,16(%rsi) movl %r10d,%eax movups %xmm4,32(%rsi) movups %xmm5,48(%rsi) movups %xmm6,64(%rsi) movups %xmm7,80(%rsi) movups %xmm8,96(%rsi) movups %xmm9,112(%rsi) leaq 128(%rsi),%rsi addq $128,%rdx jz .Lecb_ret .Lecb_dec_tail: movups (%rdi),%xmm2 cmpq $32,%rdx jb .Lecb_dec_one movups 16(%rdi),%xmm3 je .Lecb_dec_two movups 32(%rdi),%xmm4 cmpq $64,%rdx jb .Lecb_dec_three movups 48(%rdi),%xmm5 je .Lecb_dec_four movups 64(%rdi),%xmm6 cmpq $96,%rdx jb .Lecb_dec_five movups 80(%rdi),%xmm7 je .Lecb_dec_six movups 96(%rdi),%xmm8 movups (%rcx),%xmm0 call _aesni_decrypt8 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) movups %xmm4,32(%rsi) movups %xmm5,48(%rsi) movups %xmm6,64(%rsi) movups %xmm7,80(%rsi) movups %xmm8,96(%rsi) jmp .Lecb_ret .p2align 4 .Lecb_dec_one: movups (%rcx),%xmm0 movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 .Loop_dec1_4: .byte 102,15,56,222,209 decl %eax movups (%rcx),%xmm1 leaq 16(%rcx),%rcx jnz .Loop_dec1_4 .byte 102,15,56,223,209 movups %xmm2,(%rsi) jmp .Lecb_ret .p2align 4 .Lecb_dec_two: xorps %xmm4,%xmm4 call _aesni_decrypt3 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) jmp .Lecb_ret .p2align 4 .Lecb_dec_three: call _aesni_decrypt3 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) movups %xmm4,32(%rsi) jmp .Lecb_ret .p2align 4 .Lecb_dec_four: call _aesni_decrypt4 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) movups %xmm4,32(%rsi) movups %xmm5,48(%rsi) jmp .Lecb_ret .p2align 4 .Lecb_dec_five: xorps %xmm7,%xmm7 call _aesni_decrypt6 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) movups %xmm4,32(%rsi) movups %xmm5,48(%rsi) movups %xmm6,64(%rsi) jmp .Lecb_ret .p2align 4 .Lecb_dec_six: call _aesni_decrypt6 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) movups %xmm4,32(%rsi) movups %xmm5,48(%rsi) movups %xmm6,64(%rsi) movups %xmm7,80(%rsi) .Lecb_ret: movq 8(%rsp),%rdi movq 16(%rsp),%rsi .byte 0xf3,0xc3 .LSEH_end_aesni_ecb_encrypt: .globl aesni_ccm64_encrypt_blocks .def aesni_ccm64_encrypt_blocks; .scl 2; .type 32; .endef .p2align 4 aesni_ccm64_encrypt_blocks: movq %rdi,8(%rsp) movq %rsi,16(%rsp) movq %rsp,%rax .LSEH_begin_aesni_ccm64_encrypt_blocks: movq %rcx,%rdi movq %rdx,%rsi movq %r8,%rdx movq %r9,%rcx movq 40(%rsp),%r8 movq 48(%rsp),%r9 leaq -88(%rsp),%rsp movaps %xmm6,(%rsp) movaps %xmm7,16(%rsp) movaps %xmm8,32(%rsp) movaps %xmm9,48(%rsp) .Lccm64_enc_body: movl 240(%rcx),%eax movdqu (%r8),%xmm9 movdqa .Lincrement64(%rip),%xmm6 movdqa .Lbswap_mask(%rip),%xmm7 shrl $1,%eax leaq 0(%rcx),%r11 movdqu (%r9),%xmm3 movdqa %xmm9,%xmm2 movl %eax,%r10d .byte 102,68,15,56,0,207 jmp .Lccm64_enc_outer .p2align 4 .Lccm64_enc_outer: movups (%r11),%xmm0 movl %r10d,%eax movups (%rdi),%xmm8 xorps %xmm0,%xmm2 movups 16(%r11),%xmm1 xorps %xmm8,%xmm0 leaq 32(%r11),%rcx xorps %xmm0,%xmm3 movups (%rcx),%xmm0 .Lccm64_enc2_loop: .byte 102,15,56,220,209 decl %eax .byte 102,15,56,220,217 movups 16(%rcx),%xmm1 .byte 102,15,56,220,208 leaq 32(%rcx),%rcx .byte 102,15,56,220,216 movups 0(%rcx),%xmm0 jnz .Lccm64_enc2_loop .byte 102,15,56,220,209 .byte 102,15,56,220,217 paddq %xmm6,%xmm9 .byte 102,15,56,221,208 .byte 102,15,56,221,216 decq %rdx leaq 16(%rdi),%rdi xorps %xmm2,%xmm8 movdqa %xmm9,%xmm2 movups %xmm8,(%rsi) leaq 16(%rsi),%rsi .byte 102,15,56,0,215 jnz .Lccm64_enc_outer movups %xmm3,(%r9) movaps (%rsp),%xmm6 movaps 16(%rsp),%xmm7 movaps 32(%rsp),%xmm8 movaps 48(%rsp),%xmm9 leaq 88(%rsp),%rsp .Lccm64_enc_ret: movq 8(%rsp),%rdi movq 16(%rsp),%rsi .byte 0xf3,0xc3 .LSEH_end_aesni_ccm64_encrypt_blocks: .globl aesni_ccm64_decrypt_blocks .def aesni_ccm64_decrypt_blocks; .scl 2; .type 32; .endef .p2align 4 aesni_ccm64_decrypt_blocks: movq %rdi,8(%rsp) movq %rsi,16(%rsp) movq %rsp,%rax .LSEH_begin_aesni_ccm64_decrypt_blocks: movq %rcx,%rdi movq %rdx,%rsi movq %r8,%rdx movq %r9,%rcx movq 40(%rsp),%r8 movq 48(%rsp),%r9 leaq -88(%rsp),%rsp movaps %xmm6,(%rsp) movaps %xmm7,16(%rsp) movaps %xmm8,32(%rsp) movaps %xmm9,48(%rsp) .Lccm64_dec_body: movl 240(%rcx),%eax movups (%r8),%xmm9 movdqu (%r9),%xmm3 movdqa .Lincrement64(%rip),%xmm6 movdqa .Lbswap_mask(%rip),%xmm7 movaps %xmm9,%xmm2 movl %eax,%r10d movq %rcx,%r11 .byte 102,68,15,56,0,207 movups (%rcx),%xmm0 movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 .Loop_enc1_5: .byte 102,15,56,220,209 decl %eax movups (%rcx),%xmm1 leaq 16(%rcx),%rcx jnz .Loop_enc1_5 .byte 102,15,56,221,209 movups (%rdi),%xmm8 paddq %xmm6,%xmm9 leaq 16(%rdi),%rdi jmp .Lccm64_dec_outer .p2align 4 .Lccm64_dec_outer: xorps %xmm2,%xmm8 movdqa %xmm9,%xmm2 movl %r10d,%eax movups %xmm8,(%rsi) leaq 16(%rsi),%rsi .byte 102,15,56,0,215 subq $1,%rdx jz .Lccm64_dec_break movups (%r11),%xmm0 shrl $1,%eax movups 16(%r11),%xmm1 xorps %xmm0,%xmm8 leaq 32(%r11),%rcx xorps %xmm0,%xmm2 xorps %xmm8,%xmm3 movups (%rcx),%xmm0 .Lccm64_dec2_loop: .byte 102,15,56,220,209 decl %eax .byte 102,15,56,220,217 movups 16(%rcx),%xmm1 .byte 102,15,56,220,208 leaq 32(%rcx),%rcx .byte 102,15,56,220,216 movups 0(%rcx),%xmm0 jnz .Lccm64_dec2_loop movups (%rdi),%xmm8 paddq %xmm6,%xmm9 .byte 102,15,56,220,209 .byte 102,15,56,220,217 leaq 16(%rdi),%rdi .byte 102,15,56,221,208 .byte 102,15,56,221,216 jmp .Lccm64_dec_outer .p2align 4 .Lccm64_dec_break: movups (%r11),%xmm0 movups 16(%r11),%xmm1 xorps %xmm0,%xmm8 leaq 32(%r11),%r11 xorps %xmm8,%xmm3 .Loop_enc1_6: .byte 102,15,56,220,217 decl %eax movups (%r11),%xmm1 leaq 16(%r11),%r11 jnz .Loop_enc1_6 .byte 102,15,56,221,217 movups %xmm3,(%r9) movaps (%rsp),%xmm6 movaps 16(%rsp),%xmm7 movaps 32(%rsp),%xmm8 movaps 48(%rsp),%xmm9 leaq 88(%rsp),%rsp .Lccm64_dec_ret: movq 8(%rsp),%rdi movq 16(%rsp),%rsi .byte 0xf3,0xc3 .LSEH_end_aesni_ccm64_decrypt_blocks: .globl aesni_ctr32_encrypt_blocks .def aesni_ctr32_encrypt_blocks; .scl 2; .type 32; .endef .p2align 4 aesni_ctr32_encrypt_blocks: movq %rdi,8(%rsp) movq %rsi,16(%rsp) movq %rsp,%rax .LSEH_begin_aesni_ctr32_encrypt_blocks: movq %rcx,%rdi movq %rdx,%rsi movq %r8,%rdx movq %r9,%rcx movq 40(%rsp),%r8 leaq (%rsp),%rax pushq %rbp subq $288,%rsp andq $-16,%rsp movaps %xmm6,-168(%rax) movaps %xmm7,-152(%rax) movaps %xmm8,-136(%rax) movaps %xmm9,-120(%rax) movaps %xmm10,-104(%rax) movaps %xmm11,-88(%rax) movaps %xmm12,-72(%rax) movaps %xmm13,-56(%rax) movaps %xmm14,-40(%rax) movaps %xmm15,-24(%rax) .Lctr32_body: leaq -8(%rax),%rbp cmpq $1,%rdx je .Lctr32_one_shortcut movdqu (%r8),%xmm2 movdqu (%rcx),%xmm0 movl 12(%r8),%r8d pxor %xmm0,%xmm2 movl 12(%rcx),%r11d movdqa %xmm2,0(%rsp) bswapl %r8d movdqa %xmm2,%xmm3 movdqa %xmm2,%xmm4 movdqa %xmm2,%xmm5 movdqa %xmm2,64(%rsp) movdqa %xmm2,80(%rsp) movdqa %xmm2,96(%rsp) movdqa %xmm2,112(%rsp) movl 240(%rcx),%eax leaq 1(%r8),%r9 leaq 2(%r8),%r10 bswapl %r9d bswapl %r10d xorl %r11d,%r9d xorl %r11d,%r10d .byte 102,65,15,58,34,217,3 leaq 3(%r8),%r9 movdqa %xmm3,16(%rsp) .byte 102,65,15,58,34,226,3 bswapl %r9d leaq 4(%r8),%r10 movdqa %xmm4,32(%rsp) xorl %r11d,%r9d bswapl %r10d .byte 102,65,15,58,34,233,3 xorl %r11d,%r10d movdqa %xmm5,48(%rsp) leaq 5(%r8),%r9 movl %r10d,64+12(%rsp) bswapl %r9d leaq 6(%r8),%r10 xorl %r11d,%r9d bswapl %r10d movl %r9d,80+12(%rsp) xorl %r11d,%r10d leaq 7(%r8),%r9 movl %r10d,96+12(%rsp) bswapl %r9d xorl %r11d,%r9d movl %r9d,112+12(%rsp) movups 16(%rcx),%xmm1 movdqa 64(%rsp),%xmm6 movdqa 80(%rsp),%xmm7 cmpq $8,%rdx jb .Lctr32_tail leaq 128(%rcx),%rcx subq $8,%rdx jmp .Lctr32_loop8 .p2align 5 .Lctr32_loop8: addl $8,%r8d movdqa 96(%rsp),%xmm8 .byte 102,15,56,220,209 movl %r8d,%r9d movdqa 112(%rsp),%xmm9 .byte 102,15,56,220,217 bswapl %r9d movups 32-128(%rcx),%xmm0 .byte 102,15,56,220,225 xorl %r11d,%r9d .byte 102,15,56,220,233 movl %r9d,0+12(%rsp) leaq 1(%r8),%r9 .byte 102,15,56,220,241 .byte 102,15,56,220,249 .byte 102,68,15,56,220,193 .byte 102,68,15,56,220,201 movups 48-128(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 102,15,56,220,216 bswapl %r9d .byte 102,15,56,220,224 xorl %r11d,%r9d .byte 102,15,56,220,232 movl %r9d,16+12(%rsp) leaq 2(%r8),%r9 .byte 102,15,56,220,240 .byte 102,15,56,220,248 .byte 102,68,15,56,220,192 .byte 102,68,15,56,220,200 movups 64-128(%rcx),%xmm0 .byte 102,15,56,220,209 .byte 102,15,56,220,217 bswapl %r9d .byte 102,15,56,220,225 xorl %r11d,%r9d .byte 102,15,56,220,233 movl %r9d,32+12(%rsp) leaq 3(%r8),%r9 .byte 102,15,56,220,241 .byte 102,15,56,220,249 .byte 102,68,15,56,220,193 .byte 102,68,15,56,220,201 movups 80-128(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 102,15,56,220,216 bswapl %r9d .byte 102,15,56,220,224 xorl %r11d,%r9d .byte 102,15,56,220,232 movl %r9d,48+12(%rsp) leaq 4(%r8),%r9 .byte 102,15,56,220,240 .byte 102,15,56,220,248 .byte 102,68,15,56,220,192 .byte 102,68,15,56,220,200 movups 96-128(%rcx),%xmm0 .byte 102,15,56,220,209 .byte 102,15,56,220,217 bswapl %r9d .byte 102,15,56,220,225 xorl %r11d,%r9d .byte 102,15,56,220,233 movl %r9d,64+12(%rsp) leaq 5(%r8),%r9 .byte 102,15,56,220,241 .byte 102,15,56,220,249 .byte 102,68,15,56,220,193 .byte 102,68,15,56,220,201 movups 112-128(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 102,15,56,220,216 bswapl %r9d .byte 102,15,56,220,224 xorl %r11d,%r9d .byte 102,15,56,220,232 movl %r9d,80+12(%rsp) leaq 6(%r8),%r9 .byte 102,15,56,220,240 .byte 102,15,56,220,248 .byte 102,68,15,56,220,192 .byte 102,68,15,56,220,200 movups 128-128(%rcx),%xmm0 .byte 102,15,56,220,209 .byte 102,15,56,220,217 bswapl %r9d .byte 102,15,56,220,225 xorl %r11d,%r9d .byte 102,15,56,220,233 movl %r9d,96+12(%rsp) leaq 7(%r8),%r9 .byte 102,15,56,220,241 .byte 102,15,56,220,249 .byte 102,68,15,56,220,193 .byte 102,68,15,56,220,201 movups 144-128(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 102,15,56,220,216 bswapl %r9d .byte 102,15,56,220,224 xorl %r11d,%r9d .byte 102,15,56,220,232 movl %r9d,112+12(%rsp) .byte 102,15,56,220,240 .byte 102,15,56,220,248 .byte 102,68,15,56,220,192 movdqu 0(%rdi),%xmm10 .byte 102,68,15,56,220,200 movups 160-128(%rcx),%xmm0 cmpl $11,%eax jb .Lctr32_enc_done .byte 102,15,56,220,209 .byte 102,15,56,220,217 .byte 102,15,56,220,225 .byte 102,15,56,220,233 .byte 102,15,56,220,241 .byte 102,15,56,220,249 .byte 102,68,15,56,220,193 .byte 102,68,15,56,220,201 movups 176-128(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 102,15,56,220,216 .byte 102,15,56,220,224 .byte 102,15,56,220,232 .byte 102,15,56,220,240 .byte 102,15,56,220,248 .byte 102,68,15,56,220,192 .byte 102,68,15,56,220,200 movups 192-128(%rcx),%xmm0 je .Lctr32_enc_done .byte 102,15,56,220,209 .byte 102,15,56,220,217 .byte 102,15,56,220,225 .byte 102,15,56,220,233 .byte 102,15,56,220,241 .byte 102,15,56,220,249 .byte 102,68,15,56,220,193 .byte 102,68,15,56,220,201 movups 208-128(%rcx),%xmm1 .byte 102,15,56,220,208 .byte 102,15,56,220,216 .byte 102,15,56,220,224 .byte 102,15,56,220,232 .byte 102,15,56,220,240 .byte 102,15,56,220,248 .byte 102,68,15,56,220,192 .byte 102,68,15,56,220,200 movups 224-128(%rcx),%xmm0 .Lctr32_enc_done: movdqu 16(%rdi),%xmm11 pxor %xmm0,%xmm10 movdqu 32(%rdi),%xmm12 pxor %xmm0,%xmm11 movdqu 48(%rdi),%xmm13 pxor %xmm0,%xmm12 movdqu 64(%rdi),%xmm14 pxor %xmm0,%xmm13 movdqu 80(%rdi),%xmm15 pxor %xmm0,%xmm14 .byte 102,15,56,220,209 pxor %xmm0,%xmm15 .byte 102,15,56,220,217 .byte 102,15,56,220,225 .byte 102,15,56,220,233 .byte 102,15,56,220,241 .byte 102,15,56,220,249 .byte 102,68,15,56,220,193 .byte 102,68,15,56,220,201 movdqu 96(%rdi),%xmm1 .byte 102,65,15,56,221,210 pxor %xmm0,%xmm1 movdqu 112(%rdi),%xmm10 leaq 128(%rdi),%rdi .byte 102,65,15,56,221,219 pxor %xmm0,%xmm10 movdqa 0(%rsp),%xmm11 .byte 102,65,15,56,221,228 movdqa 16(%rsp),%xmm12 .byte 102,65,15,56,221,237 movdqa 32(%rsp),%xmm13 .byte 102,65,15,56,221,246 movdqa 48(%rsp),%xmm14 .byte 102,65,15,56,221,255 movdqa 64(%rsp),%xmm15 .byte 102,68,15,56,221,193 movdqa 80(%rsp),%xmm0 .byte 102,69,15,56,221,202 movups 16-128(%rcx),%xmm1 movups %xmm2,(%rsi) movdqa %xmm11,%xmm2 movups %xmm3,16(%rsi) movdqa %xmm12,%xmm3 movups %xmm4,32(%rsi) movdqa %xmm13,%xmm4 movups %xmm5,48(%rsi) movdqa %xmm14,%xmm5 movups %xmm6,64(%rsi) movdqa %xmm15,%xmm6 movups %xmm7,80(%rsi) movdqa %xmm0,%xmm7 movups %xmm8,96(%rsi) movups %xmm9,112(%rsi) leaq 128(%rsi),%rsi subq $8,%rdx jnc .Lctr32_loop8 addq $8,%rdx jz .Lctr32_done leaq -128(%rcx),%rcx .Lctr32_tail: leaq 16(%rcx),%rcx cmpq $4,%rdx jb .Lctr32_loop3 je .Lctr32_loop4 movdqa 96(%rsp),%xmm8 pxor %xmm9,%xmm9 movups 16(%rcx),%xmm0 .byte 102,15,56,220,209 leaq 16(%rcx),%rcx .byte 102,15,56,220,217 shrl $1,%eax .byte 102,15,56,220,225 decl %eax .byte 102,15,56,220,233 movups (%rdi),%xmm10 .byte 102,15,56,220,241 movups 16(%rdi),%xmm11 .byte 102,15,56,220,249 movups 32(%rdi),%xmm12 .byte 102,68,15,56,220,193 movups 16(%rcx),%xmm1 call .Lenc_loop8_enter movdqu 48(%rdi),%xmm13 pxor %xmm10,%xmm2 movdqu 64(%rdi),%xmm10 pxor %xmm11,%xmm3 movdqu %xmm2,(%rsi) pxor %xmm12,%xmm4 movdqu %xmm3,16(%rsi) pxor %xmm13,%xmm5 movdqu %xmm4,32(%rsi) pxor %xmm10,%xmm6 movdqu %xmm5,48(%rsi) movdqu %xmm6,64(%rsi) cmpq $6,%rdx jb .Lctr32_done movups 80(%rdi),%xmm11 xorps %xmm11,%xmm7 movups %xmm7,80(%rsi) je .Lctr32_done movups 96(%rdi),%xmm12 xorps %xmm12,%xmm8 movups %xmm8,96(%rsi) jmp .Lctr32_done .p2align 5 .Lctr32_loop4: .byte 102,15,56,220,209 leaq 16(%rcx),%rcx .byte 102,15,56,220,217 .byte 102,15,56,220,225 .byte 102,15,56,220,233 movups (%rcx),%xmm1 decl %eax jnz .Lctr32_loop4 .byte 102,15,56,221,209 movups (%rdi),%xmm10 .byte 102,15,56,221,217 movups 16(%rdi),%xmm11 .byte 102,15,56,221,225 movups 32(%rdi),%xmm12 .byte 102,15,56,221,233 movups 48(%rdi),%xmm13 xorps %xmm10,%xmm2 movups %xmm2,(%rsi) xorps %xmm11,%xmm3 movups %xmm3,16(%rsi) pxor %xmm12,%xmm4 movdqu %xmm4,32(%rsi) pxor %xmm13,%xmm5 movdqu %xmm5,48(%rsi) jmp .Lctr32_done .p2align 5 .Lctr32_loop3: .byte 102,15,56,220,209 leaq 16(%rcx),%rcx .byte 102,15,56,220,217 .byte 102,15,56,220,225 movups (%rcx),%xmm1 decl %eax jnz .Lctr32_loop3 .byte 102,15,56,221,209 .byte 102,15,56,221,217 .byte 102,15,56,221,225 movups (%rdi),%xmm10 xorps %xmm10,%xmm2 movups %xmm2,(%rsi) cmpq $2,%rdx jb .Lctr32_done movups 16(%rdi),%xmm11 xorps %xmm11,%xmm3 movups %xmm3,16(%rsi) je .Lctr32_done movups 32(%rdi),%xmm12 xorps %xmm12,%xmm4 movups %xmm4,32(%rsi) jmp .Lctr32_done .p2align 4 .Lctr32_one_shortcut: movups (%r8),%xmm2 movups (%rdi),%xmm10 movl 240(%rcx),%eax movups (%rcx),%xmm0 movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 .Loop_enc1_7: .byte 102,15,56,220,209 decl %eax movups (%rcx),%xmm1 leaq 16(%rcx),%rcx jnz .Loop_enc1_7 .byte 102,15,56,221,209 xorps %xmm10,%xmm2 movups %xmm2,(%rsi) jmp .Lctr32_done .p2align 4 .Lctr32_done: movaps -160(%rbp),%xmm6 movaps -144(%rbp),%xmm7 movaps -128(%rbp),%xmm8 movaps -112(%rbp),%xmm9 movaps -96(%rbp),%xmm10 movaps -80(%rbp),%xmm11 movaps -64(%rbp),%xmm12 movaps -48(%rbp),%xmm13 movaps -32(%rbp),%xmm14 movaps -16(%rbp),%xmm15 leaq (%rbp),%rsp popq %rbp .Lctr32_epilogue: movq 8(%rsp),%rdi movq 16(%rsp),%rsi .byte 0xf3,0xc3 .LSEH_end_aesni_ctr32_encrypt_blocks: .globl aesni_xts_encrypt .def aesni_xts_encrypt; .scl 2; .type 32; .endef .p2align 4 aesni_xts_encrypt: movq %rdi,8(%rsp) movq %rsi,16(%rsp) movq %rsp,%rax .LSEH_begin_aesni_xts_encrypt: movq %rcx,%rdi movq %rdx,%rsi movq %r8,%rdx movq %r9,%rcx movq 40(%rsp),%r8 movq 48(%rsp),%r9 leaq (%rsp),%rax pushq %rbp subq $272,%rsp andq $-16,%rsp movaps %xmm6,-168(%rax) movaps %xmm7,-152(%rax) movaps %xmm8,-136(%rax) movaps %xmm9,-120(%rax) movaps %xmm10,-104(%rax) movaps %xmm11,-88(%rax) movaps %xmm12,-72(%rax) movaps %xmm13,-56(%rax) movaps %xmm14,-40(%rax) movaps %xmm15,-24(%rax) .Lxts_enc_body: leaq -8(%rax),%rbp movups (%r9),%xmm15 movl 240(%r8),%eax movl 240(%rcx),%r10d movups (%r8),%xmm0 movups 16(%r8),%xmm1 leaq 32(%r8),%r8 xorps %xmm0,%xmm15 .Loop_enc1_8: .byte 102,68,15,56,220,249 decl %eax movups (%r8),%xmm1 leaq 16(%r8),%r8 jnz .Loop_enc1_8 .byte 102,68,15,56,221,249 movups (%rcx),%xmm0 movq %rcx,%r11 movl %r10d,%eax shll $4,%r10d movq %rdx,%r9 andq $-16,%rdx movups 16(%rcx,%r10,1),%xmm1 movl %eax,%r10d movdqa .Lxts_magic(%rip),%xmm8 pshufd $95,%xmm15,%xmm9 pxor %xmm0,%xmm1 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 movdqa %xmm15,%xmm10 psrad $31,%xmm14 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 pxor %xmm0,%xmm10 pxor %xmm14,%xmm15 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 movdqa %xmm15,%xmm11 psrad $31,%xmm14 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 pxor %xmm0,%xmm11 pxor %xmm14,%xmm15 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 movdqa %xmm15,%xmm12 psrad $31,%xmm14 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 pxor %xmm0,%xmm12 pxor %xmm14,%xmm15 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 movdqa %xmm15,%xmm13 psrad $31,%xmm14 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 pxor %xmm0,%xmm13 pxor %xmm14,%xmm15 movdqa %xmm15,%xmm14 psrad $31,%xmm9 paddq %xmm15,%xmm15 pand %xmm8,%xmm9 pxor %xmm0,%xmm14 pxor %xmm9,%xmm15 movaps %xmm1,96(%rsp) subq $96,%rdx jc .Lxts_enc_short shrl $1,%eax subl $3,%eax movups 16(%r11),%xmm1 movl %eax,%r10d leaq .Lxts_magic(%rip),%r8 jmp .Lxts_enc_grandloop .p2align 5 .Lxts_enc_grandloop: movdqu 0(%rdi),%xmm2 movdqa %xmm0,%xmm8 movdqu 16(%rdi),%xmm3 pxor %xmm10,%xmm2 movdqu 32(%rdi),%xmm4 pxor %xmm11,%xmm3 .byte 102,15,56,220,209 movdqu 48(%rdi),%xmm5 pxor %xmm12,%xmm4 .byte 102,15,56,220,217 movdqu 64(%rdi),%xmm6 pxor %xmm13,%xmm5 .byte 102,15,56,220,225 movdqu 80(%rdi),%xmm7 pxor %xmm15,%xmm8 movdqa 96(%rsp),%xmm9 pxor %xmm14,%xmm6 .byte 102,15,56,220,233 movups 32(%r11),%xmm0 leaq 96(%rdi),%rdi pxor %xmm8,%xmm7 pxor %xmm9,%xmm10 .byte 102,15,56,220,241 pxor %xmm9,%xmm11 movdqa %xmm10,0(%rsp) .byte 102,15,56,220,249 movups 48(%r11),%xmm1 .byte 102,15,56,220,208 pxor %xmm9,%xmm12 movdqa %xmm11,16(%rsp) .byte 102,15,56,220,216 pxor %xmm9,%xmm13 movdqa %xmm12,32(%rsp) .byte 102,15,56,220,224 pxor %xmm9,%xmm14 .byte 102,15,56,220,232 pxor %xmm9,%xmm8 movdqa %xmm14,64(%rsp) .byte 102,15,56,220,240 movdqa %xmm8,80(%rsp) .byte 102,15,56,220,248 movups 64(%r11),%xmm0 leaq 64(%r11),%rcx pshufd $95,%xmm15,%xmm9 jmp .Lxts_enc_loop6 .p2align 5 .Lxts_enc_loop6: .byte 102,15,56,220,209 .byte 102,15,56,220,217 .byte 102,15,56,220,225 .byte 102,15,56,220,233 .byte 102,15,56,220,241 .byte 102,15,56,220,249 movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx .byte 102,15,56,220,208 .byte 102,15,56,220,216 .byte 102,15,56,220,224 .byte 102,15,56,220,232 .byte 102,15,56,220,240 .byte 102,15,56,220,248 movups (%rcx),%xmm0 decl %eax jnz .Lxts_enc_loop6 movdqa (%r8),%xmm8 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 .byte 102,15,56,220,209 paddq %xmm15,%xmm15 psrad $31,%xmm14 .byte 102,15,56,220,217 pand %xmm8,%xmm14 movups (%r11),%xmm10 .byte 102,15,56,220,225 .byte 102,15,56,220,233 pxor %xmm14,%xmm15 .byte 102,15,56,220,241 movaps %xmm10,%xmm11 .byte 102,15,56,220,249 movups 16(%rcx),%xmm1 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 .byte 102,15,56,220,208 pxor %xmm15,%xmm10 psrad $31,%xmm14 .byte 102,15,56,220,216 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 .byte 102,15,56,220,224 .byte 102,15,56,220,232 pxor %xmm14,%xmm15 .byte 102,15,56,220,240 movaps %xmm11,%xmm12 .byte 102,15,56,220,248 movups 32(%rcx),%xmm0 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 .byte 102,15,56,220,209 pxor %xmm15,%xmm11 psrad $31,%xmm14 .byte 102,15,56,220,217 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 .byte 102,15,56,220,225 movdqa %xmm13,48(%rsp) .byte 102,15,56,220,233 pxor %xmm14,%xmm15 .byte 102,15,56,220,241 movaps %xmm12,%xmm13 .byte 102,15,56,220,249 movups 48(%rcx),%xmm1 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 .byte 102,15,56,220,208 pxor %xmm15,%xmm12 psrad $31,%xmm14 .byte 102,15,56,220,216 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 .byte 102,15,56,220,224 .byte 102,15,56,220,232 pxor %xmm14,%xmm15 .byte 102,15,56,220,240 movaps %xmm13,%xmm14 .byte 102,15,56,220,248 movdqa %xmm9,%xmm0 paddd %xmm9,%xmm9 .byte 102,15,56,220,209 pxor %xmm15,%xmm13 psrad $31,%xmm0 .byte 102,15,56,220,217 paddq %xmm15,%xmm15 pand %xmm8,%xmm0 .byte 102,15,56,220,225 .byte 102,15,56,220,233 pxor %xmm0,%xmm15 movups (%r11),%xmm0 .byte 102,15,56,220,241 .byte 102,15,56,220,249 movups 16(%r11),%xmm1 pxor %xmm15,%xmm14 psrad $31,%xmm9 .byte 102,15,56,221,84,36,0 paddq %xmm15,%xmm15 pand %xmm8,%xmm9 .byte 102,15,56,221,92,36,16 .byte 102,15,56,221,100,36,32 pxor %xmm9,%xmm15 .byte 102,15,56,221,108,36,48 .byte 102,15,56,221,116,36,64 .byte 102,15,56,221,124,36,80 movl %r10d,%eax leaq 96(%rsi),%rsi movups %xmm2,-96(%rsi) movups %xmm3,-80(%rsi) movups %xmm4,-64(%rsi) movups %xmm5,-48(%rsi) movups %xmm6,-32(%rsi) movups %xmm7,-16(%rsi) subq $96,%rdx jnc .Lxts_enc_grandloop leal 7(%rax,%rax,1),%eax movq %r11,%rcx movl %eax,%r10d .Lxts_enc_short: pxor %xmm0,%xmm10 addq $96,%rdx jz .Lxts_enc_done pxor %xmm0,%xmm11 cmpq $32,%rdx jb .Lxts_enc_one pxor %xmm0,%xmm12 je .Lxts_enc_two pxor %xmm0,%xmm13 cmpq $64,%rdx jb .Lxts_enc_three pxor %xmm0,%xmm14 je .Lxts_enc_four movdqu (%rdi),%xmm2 movdqu 16(%rdi),%xmm3 movdqu 32(%rdi),%xmm4 pxor %xmm10,%xmm2 movdqu 48(%rdi),%xmm5 pxor %xmm11,%xmm3 movdqu 64(%rdi),%xmm6 leaq 80(%rdi),%rdi pxor %xmm12,%xmm4 pxor %xmm13,%xmm5 pxor %xmm14,%xmm6 call _aesni_encrypt6 xorps %xmm10,%xmm2 movdqa %xmm15,%xmm10 xorps %xmm11,%xmm3 xorps %xmm12,%xmm4 movdqu %xmm2,(%rsi) xorps %xmm13,%xmm5 movdqu %xmm3,16(%rsi) xorps %xmm14,%xmm6 movdqu %xmm4,32(%rsi) movdqu %xmm5,48(%rsi) movdqu %xmm6,64(%rsi) leaq 80(%rsi),%rsi jmp .Lxts_enc_done .p2align 4 .Lxts_enc_one: movups (%rdi),%xmm2 leaq 16(%rdi),%rdi xorps %xmm10,%xmm2 movups (%rcx),%xmm0 movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 .Loop_enc1_9: .byte 102,15,56,220,209 decl %eax movups (%rcx),%xmm1 leaq 16(%rcx),%rcx jnz .Loop_enc1_9 .byte 102,15,56,221,209 xorps %xmm10,%xmm2 movdqa %xmm11,%xmm10 movups %xmm2,(%rsi) leaq 16(%rsi),%rsi jmp .Lxts_enc_done .p2align 4 .Lxts_enc_two: movups (%rdi),%xmm2 movups 16(%rdi),%xmm3 leaq 32(%rdi),%rdi xorps %xmm10,%xmm2 xorps %xmm11,%xmm3 call _aesni_encrypt3 xorps %xmm10,%xmm2 movdqa %xmm12,%xmm10 xorps %xmm11,%xmm3 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) leaq 32(%rsi),%rsi jmp .Lxts_enc_done .p2align 4 .Lxts_enc_three: movups (%rdi),%xmm2 movups 16(%rdi),%xmm3 movups 32(%rdi),%xmm4 leaq 48(%rdi),%rdi xorps %xmm10,%xmm2 xorps %xmm11,%xmm3 xorps %xmm12,%xmm4 call _aesni_encrypt3 xorps %xmm10,%xmm2 movdqa %xmm13,%xmm10 xorps %xmm11,%xmm3 xorps %xmm12,%xmm4 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) movups %xmm4,32(%rsi) leaq 48(%rsi),%rsi jmp .Lxts_enc_done .p2align 4 .Lxts_enc_four: movups (%rdi),%xmm2 movups 16(%rdi),%xmm3 movups 32(%rdi),%xmm4 xorps %xmm10,%xmm2 movups 48(%rdi),%xmm5 leaq 64(%rdi),%rdi xorps %xmm11,%xmm3 xorps %xmm12,%xmm4 xorps %xmm13,%xmm5 call _aesni_encrypt4 pxor %xmm10,%xmm2 movdqa %xmm14,%xmm10 pxor %xmm11,%xmm3 pxor %xmm12,%xmm4 movdqu %xmm2,(%rsi) pxor %xmm13,%xmm5 movdqu %xmm3,16(%rsi) movdqu %xmm4,32(%rsi) movdqu %xmm5,48(%rsi) leaq 64(%rsi),%rsi jmp .Lxts_enc_done .p2align 4 .Lxts_enc_done: andq $15,%r9 jz .Lxts_enc_ret movq %r9,%rdx .Lxts_enc_steal: movzbl (%rdi),%eax movzbl -16(%rsi),%ecx leaq 1(%rdi),%rdi movb %al,-16(%rsi) movb %cl,0(%rsi) leaq 1(%rsi),%rsi subq $1,%rdx jnz .Lxts_enc_steal subq %r9,%rsi movq %r11,%rcx movl %r10d,%eax movups -16(%rsi),%xmm2 xorps %xmm10,%xmm2 movups (%rcx),%xmm0 movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 .Loop_enc1_10: .byte 102,15,56,220,209 decl %eax movups (%rcx),%xmm1 leaq 16(%rcx),%rcx jnz .Loop_enc1_10 .byte 102,15,56,221,209 xorps %xmm10,%xmm2 movups %xmm2,-16(%rsi) .Lxts_enc_ret: movaps -160(%rbp),%xmm6 movaps -144(%rbp),%xmm7 movaps -128(%rbp),%xmm8 movaps -112(%rbp),%xmm9 movaps -96(%rbp),%xmm10 movaps -80(%rbp),%xmm11 movaps -64(%rbp),%xmm12 movaps -48(%rbp),%xmm13 movaps -32(%rbp),%xmm14 movaps -16(%rbp),%xmm15 leaq (%rbp),%rsp popq %rbp .Lxts_enc_epilogue: movq 8(%rsp),%rdi movq 16(%rsp),%rsi .byte 0xf3,0xc3 .LSEH_end_aesni_xts_encrypt: .globl aesni_xts_decrypt .def aesni_xts_decrypt; .scl 2; .type 32; .endef .p2align 4 aesni_xts_decrypt: movq %rdi,8(%rsp) movq %rsi,16(%rsp) movq %rsp,%rax .LSEH_begin_aesni_xts_decrypt: movq %rcx,%rdi movq %rdx,%rsi movq %r8,%rdx movq %r9,%rcx movq 40(%rsp),%r8 movq 48(%rsp),%r9 leaq (%rsp),%rax pushq %rbp subq $272,%rsp andq $-16,%rsp movaps %xmm6,-168(%rax) movaps %xmm7,-152(%rax) movaps %xmm8,-136(%rax) movaps %xmm9,-120(%rax) movaps %xmm10,-104(%rax) movaps %xmm11,-88(%rax) movaps %xmm12,-72(%rax) movaps %xmm13,-56(%rax) movaps %xmm14,-40(%rax) movaps %xmm15,-24(%rax) .Lxts_dec_body: leaq -8(%rax),%rbp movups (%r9),%xmm15 movl 240(%r8),%eax movl 240(%rcx),%r10d movups (%r8),%xmm0 movups 16(%r8),%xmm1 leaq 32(%r8),%r8 xorps %xmm0,%xmm15 .Loop_enc1_11: .byte 102,68,15,56,220,249 decl %eax movups (%r8),%xmm1 leaq 16(%r8),%r8 jnz .Loop_enc1_11 .byte 102,68,15,56,221,249 xorl %eax,%eax testq $15,%rdx setnz %al shlq $4,%rax subq %rax,%rdx movups (%rcx),%xmm0 movq %rcx,%r11 movl %r10d,%eax shll $4,%r10d movq %rdx,%r9 andq $-16,%rdx movups 16(%rcx,%r10,1),%xmm1 movl %eax,%r10d movdqa .Lxts_magic(%rip),%xmm8 pshufd $95,%xmm15,%xmm9 pxor %xmm0,%xmm1 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 movdqa %xmm15,%xmm10 psrad $31,%xmm14 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 pxor %xmm0,%xmm10 pxor %xmm14,%xmm15 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 movdqa %xmm15,%xmm11 psrad $31,%xmm14 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 pxor %xmm0,%xmm11 pxor %xmm14,%xmm15 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 movdqa %xmm15,%xmm12 psrad $31,%xmm14 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 pxor %xmm0,%xmm12 pxor %xmm14,%xmm15 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 movdqa %xmm15,%xmm13 psrad $31,%xmm14 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 pxor %xmm0,%xmm13 pxor %xmm14,%xmm15 movdqa %xmm15,%xmm14 psrad $31,%xmm9 paddq %xmm15,%xmm15 pand %xmm8,%xmm9 pxor %xmm0,%xmm14 pxor %xmm9,%xmm15 movaps %xmm1,96(%rsp) subq $96,%rdx jc .Lxts_dec_short shrl $1,%eax subl $3,%eax movups 16(%r11),%xmm1 movl %eax,%r10d leaq .Lxts_magic(%rip),%r8 jmp .Lxts_dec_grandloop .p2align 5 .Lxts_dec_grandloop: movdqu 0(%rdi),%xmm2 movdqa %xmm0,%xmm8 movdqu 16(%rdi),%xmm3 pxor %xmm10,%xmm2 movdqu 32(%rdi),%xmm4 pxor %xmm11,%xmm3 .byte 102,15,56,222,209 movdqu 48(%rdi),%xmm5 pxor %xmm12,%xmm4 .byte 102,15,56,222,217 movdqu 64(%rdi),%xmm6 pxor %xmm13,%xmm5 .byte 102,15,56,222,225 movdqu 80(%rdi),%xmm7 pxor %xmm15,%xmm8 movdqa 96(%rsp),%xmm9 pxor %xmm14,%xmm6 .byte 102,15,56,222,233 movups 32(%r11),%xmm0 leaq 96(%rdi),%rdi pxor %xmm8,%xmm7 pxor %xmm9,%xmm10 .byte 102,15,56,222,241 pxor %xmm9,%xmm11 movdqa %xmm10,0(%rsp) .byte 102,15,56,222,249 movups 48(%r11),%xmm1 .byte 102,15,56,222,208 pxor %xmm9,%xmm12 movdqa %xmm11,16(%rsp) .byte 102,15,56,222,216 pxor %xmm9,%xmm13 movdqa %xmm12,32(%rsp) .byte 102,15,56,222,224 pxor %xmm9,%xmm14 .byte 102,15,56,222,232 pxor %xmm9,%xmm8 movdqa %xmm14,64(%rsp) .byte 102,15,56,222,240 movdqa %xmm8,80(%rsp) .byte 102,15,56,222,248 movups 64(%r11),%xmm0 leaq 64(%r11),%rcx pshufd $95,%xmm15,%xmm9 jmp .Lxts_dec_loop6 .p2align 5 .Lxts_dec_loop6: .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,222,241 .byte 102,15,56,222,249 movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx .byte 102,15,56,222,208 .byte 102,15,56,222,216 .byte 102,15,56,222,224 .byte 102,15,56,222,232 .byte 102,15,56,222,240 .byte 102,15,56,222,248 movups (%rcx),%xmm0 decl %eax jnz .Lxts_dec_loop6 movdqa (%r8),%xmm8 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 .byte 102,15,56,222,209 paddq %xmm15,%xmm15 psrad $31,%xmm14 .byte 102,15,56,222,217 pand %xmm8,%xmm14 movups (%r11),%xmm10 .byte 102,15,56,222,225 .byte 102,15,56,222,233 pxor %xmm14,%xmm15 .byte 102,15,56,222,241 movaps %xmm10,%xmm11 .byte 102,15,56,222,249 movups 16(%rcx),%xmm1 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 .byte 102,15,56,222,208 pxor %xmm15,%xmm10 psrad $31,%xmm14 .byte 102,15,56,222,216 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 .byte 102,15,56,222,224 .byte 102,15,56,222,232 pxor %xmm14,%xmm15 .byte 102,15,56,222,240 movaps %xmm11,%xmm12 .byte 102,15,56,222,248 movups 32(%rcx),%xmm0 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 .byte 102,15,56,222,209 pxor %xmm15,%xmm11 psrad $31,%xmm14 .byte 102,15,56,222,217 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 .byte 102,15,56,222,225 movdqa %xmm13,48(%rsp) .byte 102,15,56,222,233 pxor %xmm14,%xmm15 .byte 102,15,56,222,241 movaps %xmm12,%xmm13 .byte 102,15,56,222,249 movups 48(%rcx),%xmm1 movdqa %xmm9,%xmm14 paddd %xmm9,%xmm9 .byte 102,15,56,222,208 pxor %xmm15,%xmm12 psrad $31,%xmm14 .byte 102,15,56,222,216 paddq %xmm15,%xmm15 pand %xmm8,%xmm14 .byte 102,15,56,222,224 .byte 102,15,56,222,232 pxor %xmm14,%xmm15 .byte 102,15,56,222,240 movaps %xmm13,%xmm14 .byte 102,15,56,222,248 movdqa %xmm9,%xmm0 paddd %xmm9,%xmm9 .byte 102,15,56,222,209 pxor %xmm15,%xmm13 psrad $31,%xmm0 .byte 102,15,56,222,217 paddq %xmm15,%xmm15 pand %xmm8,%xmm0 .byte 102,15,56,222,225 .byte 102,15,56,222,233 pxor %xmm0,%xmm15 movups (%r11),%xmm0 .byte 102,15,56,222,241 .byte 102,15,56,222,249 movups 16(%r11),%xmm1 pxor %xmm15,%xmm14 psrad $31,%xmm9 .byte 102,15,56,223,84,36,0 paddq %xmm15,%xmm15 pand %xmm8,%xmm9 .byte 102,15,56,223,92,36,16 .byte 102,15,56,223,100,36,32 pxor %xmm9,%xmm15 .byte 102,15,56,223,108,36,48 .byte 102,15,56,223,116,36,64 .byte 102,15,56,223,124,36,80 movl %r10d,%eax leaq 96(%rsi),%rsi movups %xmm2,-96(%rsi) movups %xmm3,-80(%rsi) movups %xmm4,-64(%rsi) movups %xmm5,-48(%rsi) movups %xmm6,-32(%rsi) movups %xmm7,-16(%rsi) subq $96,%rdx jnc .Lxts_dec_grandloop leal 7(%rax,%rax,1),%eax movq %r11,%rcx movl %eax,%r10d .Lxts_dec_short: pxor %xmm0,%xmm10 pxor %xmm0,%xmm11 addq $96,%rdx jz .Lxts_dec_done pxor %xmm0,%xmm12 cmpq $32,%rdx jb .Lxts_dec_one pxor %xmm0,%xmm13 je .Lxts_dec_two pxor %xmm0,%xmm14 cmpq $64,%rdx jb .Lxts_dec_three je .Lxts_dec_four movdqu (%rdi),%xmm2 movdqu 16(%rdi),%xmm3 movdqu 32(%rdi),%xmm4 pxor %xmm10,%xmm2 movdqu 48(%rdi),%xmm5 pxor %xmm11,%xmm3 movdqu 64(%rdi),%xmm6 leaq 80(%rdi),%rdi pxor %xmm12,%xmm4 pxor %xmm13,%xmm5 pxor %xmm14,%xmm6 call _aesni_decrypt6 xorps %xmm10,%xmm2 xorps %xmm11,%xmm3 xorps %xmm12,%xmm4 movdqu %xmm2,(%rsi) xorps %xmm13,%xmm5 movdqu %xmm3,16(%rsi) xorps %xmm14,%xmm6 movdqu %xmm4,32(%rsi) pxor %xmm14,%xmm14 movdqu %xmm5,48(%rsi) pcmpgtd %xmm15,%xmm14 movdqu %xmm6,64(%rsi) leaq 80(%rsi),%rsi pshufd $19,%xmm14,%xmm11 andq $15,%r9 jz .Lxts_dec_ret movdqa %xmm15,%xmm10 paddq %xmm15,%xmm15 pand %xmm8,%xmm11 pxor %xmm15,%xmm11 jmp .Lxts_dec_done2 .p2align 4 .Lxts_dec_one: movups (%rdi),%xmm2 leaq 16(%rdi),%rdi xorps %xmm10,%xmm2 movups (%rcx),%xmm0 movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 .Loop_dec1_12: .byte 102,15,56,222,209 decl %eax movups (%rcx),%xmm1 leaq 16(%rcx),%rcx jnz .Loop_dec1_12 .byte 102,15,56,223,209 xorps %xmm10,%xmm2 movdqa %xmm11,%xmm10 movups %xmm2,(%rsi) movdqa %xmm12,%xmm11 leaq 16(%rsi),%rsi jmp .Lxts_dec_done .p2align 4 .Lxts_dec_two: movups (%rdi),%xmm2 movups 16(%rdi),%xmm3 leaq 32(%rdi),%rdi xorps %xmm10,%xmm2 xorps %xmm11,%xmm3 call _aesni_decrypt3 xorps %xmm10,%xmm2 movdqa %xmm12,%xmm10 xorps %xmm11,%xmm3 movdqa %xmm13,%xmm11 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) leaq 32(%rsi),%rsi jmp .Lxts_dec_done .p2align 4 .Lxts_dec_three: movups (%rdi),%xmm2 movups 16(%rdi),%xmm3 movups 32(%rdi),%xmm4 leaq 48(%rdi),%rdi xorps %xmm10,%xmm2 xorps %xmm11,%xmm3 xorps %xmm12,%xmm4 call _aesni_decrypt3 xorps %xmm10,%xmm2 movdqa %xmm13,%xmm10 xorps %xmm11,%xmm3 movdqa %xmm14,%xmm11 xorps %xmm12,%xmm4 movups %xmm2,(%rsi) movups %xmm3,16(%rsi) movups %xmm4,32(%rsi) leaq 48(%rsi),%rsi jmp .Lxts_dec_done .p2align 4 .Lxts_dec_four: movups (%rdi),%xmm2 movups 16(%rdi),%xmm3 movups 32(%rdi),%xmm4 xorps %xmm10,%xmm2 movups 48(%rdi),%xmm5 leaq 64(%rdi),%rdi xorps %xmm11,%xmm3 xorps %xmm12,%xmm4 xorps %xmm13,%xmm5 call _aesni_decrypt4 pxor %xmm10,%xmm2 movdqa %xmm14,%xmm10 pxor %xmm11,%xmm3 movdqa %xmm15,%xmm11 pxor %xmm12,%xmm4 movdqu %xmm2,(%rsi) pxor %xmm13,%xmm5 movdqu %xmm3,16(%rsi) movdqu %xmm4,32(%rsi) movdqu %xmm5,48(%rsi) leaq 64(%rsi),%rsi jmp .Lxts_dec_done .p2align 4 .Lxts_dec_done: andq $15,%r9 jz .Lxts_dec_ret .Lxts_dec_done2: movq %r9,%rdx movq %r11,%rcx movl %r10d,%eax movups (%rdi),%xmm2 xorps %xmm11,%xmm2 movups (%rcx),%xmm0 movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 .Loop_dec1_13: .byte 102,15,56,222,209 decl %eax movups (%rcx),%xmm1 leaq 16(%rcx),%rcx jnz .Loop_dec1_13 .byte 102,15,56,223,209 xorps %xmm11,%xmm2 movups %xmm2,(%rsi) .Lxts_dec_steal: movzbl 16(%rdi),%eax movzbl (%rsi),%ecx leaq 1(%rdi),%rdi movb %al,(%rsi) movb %cl,16(%rsi) leaq 1(%rsi),%rsi subq $1,%rdx jnz .Lxts_dec_steal subq %r9,%rsi movq %r11,%rcx movl %r10d,%eax movups (%rsi),%xmm2 xorps %xmm10,%xmm2 movups (%rcx),%xmm0 movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 .Loop_dec1_14: .byte 102,15,56,222,209 decl %eax movups (%rcx),%xmm1 leaq 16(%rcx),%rcx jnz .Loop_dec1_14 .byte 102,15,56,223,209 xorps %xmm10,%xmm2 movups %xmm2,(%rsi) .Lxts_dec_ret: movaps -160(%rbp),%xmm6 movaps -144(%rbp),%xmm7 movaps -128(%rbp),%xmm8 movaps -112(%rbp),%xmm9 movaps -96(%rbp),%xmm10 movaps -80(%rbp),%xmm11 movaps -64(%rbp),%xmm12 movaps -48(%rbp),%xmm13 movaps -32(%rbp),%xmm14 movaps -16(%rbp),%xmm15 leaq (%rbp),%rsp popq %rbp .Lxts_dec_epilogue: movq 8(%rsp),%rdi movq 16(%rsp),%rsi .byte 0xf3,0xc3 .LSEH_end_aesni_xts_decrypt: .globl aesni_cbc_encrypt .def aesni_cbc_encrypt; .scl 2; .type 32; .endef .p2align 4 aesni_cbc_encrypt: movq %rdi,8(%rsp) movq %rsi,16(%rsp) movq %rsp,%rax .LSEH_begin_aesni_cbc_encrypt: movq %rcx,%rdi movq %rdx,%rsi movq %r8,%rdx movq %r9,%rcx movq 40(%rsp),%r8 movq 48(%rsp),%r9 testq %rdx,%rdx jz .Lcbc_ret movl 240(%rcx),%r10d movq %rcx,%r11 testl %r9d,%r9d jz .Lcbc_decrypt movups (%r8),%xmm2 movl %r10d,%eax cmpq $16,%rdx jb .Lcbc_enc_tail subq $16,%rdx jmp .Lcbc_enc_loop .p2align 4 .Lcbc_enc_loop: movups (%rdi),%xmm3 leaq 16(%rdi),%rdi movups (%rcx),%xmm0 movups 16(%rcx),%xmm1 xorps %xmm0,%xmm3 leaq 32(%rcx),%rcx xorps %xmm3,%xmm2 .Loop_enc1_15: .byte 102,15,56,220,209 decl %eax movups (%rcx),%xmm1 leaq 16(%rcx),%rcx jnz .Loop_enc1_15 .byte 102,15,56,221,209 movl %r10d,%eax movq %r11,%rcx movups %xmm2,0(%rsi) leaq 16(%rsi),%rsi subq $16,%rdx jnc .Lcbc_enc_loop addq $16,%rdx jnz .Lcbc_enc_tail movups %xmm2,(%r8) jmp .Lcbc_ret .Lcbc_enc_tail: movq %rdx,%rcx xchgq %rdi,%rsi .long 0x9066A4F3 movl $16,%ecx subq %rdx,%rcx xorl %eax,%eax .long 0x9066AAF3 leaq -16(%rdi),%rdi movl %r10d,%eax movq %rdi,%rsi movq %r11,%rcx xorq %rdx,%rdx jmp .Lcbc_enc_loop .p2align 4 .Lcbc_decrypt: leaq (%rsp),%rax pushq %rbp subq $176,%rsp andq $-16,%rsp movaps %xmm6,16(%rsp) movaps %xmm7,32(%rsp) movaps %xmm8,48(%rsp) movaps %xmm9,64(%rsp) movaps %xmm10,80(%rsp) movaps %xmm11,96(%rsp) movaps %xmm12,112(%rsp) movaps %xmm13,128(%rsp) movaps %xmm14,144(%rsp) movaps %xmm15,160(%rsp) .Lcbc_decrypt_body: leaq -8(%rax),%rbp movups (%r8),%xmm10 movl %r10d,%eax cmpq $80,%rdx jbe .Lcbc_dec_tail movups (%rcx),%xmm0 movdqu 0(%rdi),%xmm2 movdqu 16(%rdi),%xmm3 movdqa %xmm2,%xmm11 movdqu 32(%rdi),%xmm4 movdqa %xmm3,%xmm12 movdqu 48(%rdi),%xmm5 movdqa %xmm4,%xmm13 movdqu 64(%rdi),%xmm6 movdqa %xmm5,%xmm14 movdqu 80(%rdi),%xmm7 movdqa %xmm6,%xmm15 cmpq $112,%rdx jbe .Lcbc_dec_six_or_seven subq $112,%rdx leaq 112(%rcx),%rcx jmp .Lcbc_dec_loop8_enter .p2align 4 .Lcbc_dec_loop8: movups %xmm9,(%rsi) leaq 16(%rsi),%rsi .Lcbc_dec_loop8_enter: movdqu 96(%rdi),%xmm8 pxor %xmm0,%xmm2 movdqu 112(%rdi),%xmm9 pxor %xmm0,%xmm3 movups 16-112(%rcx),%xmm1 pxor %xmm0,%xmm4 xorq %r11,%r11 cmpq $112,%rdx pxor %xmm0,%xmm5 pxor %xmm0,%xmm6 pxor %xmm0,%xmm7 pxor %xmm0,%xmm8 .byte 102,15,56,222,209 pxor %xmm0,%xmm9 movups 32-112(%rcx),%xmm0 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,222,241 .byte 102,15,56,222,249 setnc %r11b .byte 102,68,15,56,222,193 shlq $7,%r11 .byte 102,68,15,56,222,201 addq %rdi,%r11 movups 48-112(%rcx),%xmm1 .byte 102,15,56,222,208 .byte 102,15,56,222,216 .byte 102,15,56,222,224 .byte 102,15,56,222,232 .byte 102,15,56,222,240 .byte 102,15,56,222,248 .byte 102,68,15,56,222,192 .byte 102,68,15,56,222,200 movups 64-112(%rcx),%xmm0 .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,222,241 .byte 102,15,56,222,249 .byte 102,68,15,56,222,193 .byte 102,68,15,56,222,201 movups 80-112(%rcx),%xmm1 .byte 102,15,56,222,208 .byte 102,15,56,222,216 .byte 102,15,56,222,224 .byte 102,15,56,222,232 .byte 102,15,56,222,240 .byte 102,15,56,222,248 .byte 102,68,15,56,222,192 .byte 102,68,15,56,222,200 movups 96-112(%rcx),%xmm0 .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,222,241 .byte 102,15,56,222,249 .byte 102,68,15,56,222,193 .byte 102,68,15,56,222,201 movups 112-112(%rcx),%xmm1 .byte 102,15,56,222,208 .byte 102,15,56,222,216 .byte 102,15,56,222,224 .byte 102,15,56,222,232 .byte 102,15,56,222,240 .byte 102,15,56,222,248 .byte 102,68,15,56,222,192 .byte 102,68,15,56,222,200 movups 128-112(%rcx),%xmm0 .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,222,241 .byte 102,15,56,222,249 .byte 102,68,15,56,222,193 .byte 102,68,15,56,222,201 movups 144-112(%rcx),%xmm1 .byte 102,15,56,222,208 .byte 102,15,56,222,216 .byte 102,15,56,222,224 .byte 102,15,56,222,232 .byte 102,15,56,222,240 .byte 102,15,56,222,248 .byte 102,68,15,56,222,192 .byte 102,68,15,56,222,200 movups 160-112(%rcx),%xmm0 cmpl $11,%eax jb .Lcbc_dec_done .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,222,241 .byte 102,15,56,222,249 .byte 102,68,15,56,222,193 .byte 102,68,15,56,222,201 movups 176-112(%rcx),%xmm1 .byte 102,15,56,222,208 .byte 102,15,56,222,216 .byte 102,15,56,222,224 .byte 102,15,56,222,232 .byte 102,15,56,222,240 .byte 102,15,56,222,248 .byte 102,68,15,56,222,192 .byte 102,68,15,56,222,200 movups 192-112(%rcx),%xmm0 je .Lcbc_dec_done .byte 102,15,56,222,209 .byte 102,15,56,222,217 .byte 102,15,56,222,225 .byte 102,15,56,222,233 .byte 102,15,56,222,241 .byte 102,15,56,222,249 .byte 102,68,15,56,222,193 .byte 102,68,15,56,222,201 movups 208-112(%rcx),%xmm1 .byte 102,15,56,222,208 .byte 102,15,56,222,216 .byte 102,15,56,222,224 .byte 102,15,56,222,232 .byte 102,15,56,222,240 .byte 102,15,56,222,248 .byte 102,68,15,56,222,192 .byte 102,68,15,56,222,200 movups 224-112(%rcx),%xmm0 .Lcbc_dec_done: .byte 102,15,56,222,209 pxor %xmm0,%xmm10 .byte 102,15,56,222,217 pxor %xmm0,%xmm11 .byte 102,15,56,222,225 pxor %xmm0,%xmm12 .byte 102,15,56,222,233 pxor %xmm0,%xmm13 .byte 102,15,56,222,241 pxor %xmm0,%xmm14 .byte 102,15,56,222,249 pxor %xmm0,%xmm15 .byte 102,68,15,56,222,193 .byte 102,68,15,56,222,201 movdqu 80(%rdi),%xmm1 .byte 102,65,15,56,223,210 movdqu 96(%rdi),%xmm10 pxor %xmm0,%xmm1 .byte 102,65,15,56,223,219 pxor %xmm0,%xmm10 movdqu 112(%rdi),%xmm0 leaq 128(%rdi),%rdi .byte 102,65,15,56,223,228 movdqu 0(%r11),%xmm11 .byte 102,65,15,56,223,237 movdqu 16(%r11),%xmm12 .byte 102,65,15,56,223,246 movdqu 32(%r11),%xmm13 .byte 102,65,15,56,223,255 movdqu 48(%r11),%xmm14 .byte 102,68,15,56,223,193 movdqu 64(%r11),%xmm15 .byte 102,69,15,56,223,202 movdqa %xmm0,%xmm10 movdqu 80(%r11),%xmm1 movups -112(%rcx),%xmm0 movups %xmm2,(%rsi) movdqa %xmm11,%xmm2 movups %xmm3,16(%rsi) movdqa %xmm12,%xmm3 movups %xmm4,32(%rsi) movdqa %xmm13,%xmm4 movups %xmm5,48(%rsi) movdqa %xmm14,%xmm5 movups %xmm6,64(%rsi) movdqa %xmm15,%xmm6 movups %xmm7,80(%rsi) movdqa %xmm1,%xmm7 movups %xmm8,96(%rsi) leaq 112(%rsi),%rsi subq $128,%rdx ja .Lcbc_dec_loop8 movaps %xmm9,%xmm2 leaq -112(%rcx),%rcx addq $112,%rdx jle .Lcbc_dec_tail_collected movups %xmm9,(%rsi) leaq 16(%rsi),%rsi cmpq $80,%rdx jbe .Lcbc_dec_tail movaps %xmm11,%xmm2 .Lcbc_dec_six_or_seven: cmpq $96,%rdx ja .Lcbc_dec_seven movaps %xmm7,%xmm8 call _aesni_decrypt6 pxor %xmm10,%xmm2 movaps %xmm8,%xmm10 pxor %xmm11,%xmm3 movdqu %xmm2,(%rsi) pxor %xmm12,%xmm4 movdqu %xmm3,16(%rsi) pxor %xmm13,%xmm5 movdqu %xmm4,32(%rsi) pxor %xmm14,%xmm6 movdqu %xmm5,48(%rsi) pxor %xmm15,%xmm7 movdqu %xmm6,64(%rsi) leaq 80(%rsi),%rsi movdqa %xmm7,%xmm2 jmp .Lcbc_dec_tail_collected .p2align 4 .Lcbc_dec_seven: movups 96(%rdi),%xmm8 xorps %xmm9,%xmm9 call _aesni_decrypt8 movups 80(%rdi),%xmm9 pxor %xmm10,%xmm2 movups 96(%rdi),%xmm10 pxor %xmm11,%xmm3 movdqu %xmm2,(%rsi) pxor %xmm12,%xmm4 movdqu %xmm3,16(%rsi) pxor %xmm13,%xmm5 movdqu %xmm4,32(%rsi) pxor %xmm14,%xmm6 movdqu %xmm5,48(%rsi) pxor %xmm15,%xmm7 movdqu %xmm6,64(%rsi) pxor %xmm9,%xmm8 movdqu %xmm7,80(%rsi) leaq 96(%rsi),%rsi movdqa %xmm8,%xmm2 jmp .Lcbc_dec_tail_collected .Lcbc_dec_tail: movups (%rdi),%xmm2 subq $16,%rdx jbe .Lcbc_dec_one movups 16(%rdi),%xmm3 movaps %xmm2,%xmm11 subq $16,%rdx jbe .Lcbc_dec_two movups 32(%rdi),%xmm4 movaps %xmm3,%xmm12 subq $16,%rdx jbe .Lcbc_dec_three movups 48(%rdi),%xmm5 movaps %xmm4,%xmm13 subq $16,%rdx jbe .Lcbc_dec_four movups 64(%rdi),%xmm6 movaps %xmm5,%xmm14 movaps %xmm6,%xmm15 xorps %xmm7,%xmm7 call _aesni_decrypt6 pxor %xmm10,%xmm2 movaps %xmm15,%xmm10 pxor %xmm11,%xmm3 movdqu %xmm2,(%rsi) pxor %xmm12,%xmm4 movdqu %xmm3,16(%rsi) pxor %xmm13,%xmm5 movdqu %xmm4,32(%rsi) pxor %xmm14,%xmm6 movdqu %xmm5,48(%rsi) leaq 64(%rsi),%rsi movdqa %xmm6,%xmm2 subq $16,%rdx jmp .Lcbc_dec_tail_collected .p2align 4 .Lcbc_dec_one: movaps %xmm2,%xmm11 movups (%rcx),%xmm0 movups 16(%rcx),%xmm1 leaq 32(%rcx),%rcx xorps %xmm0,%xmm2 .Loop_dec1_16: .byte 102,15,56,222,209 decl %eax movups (%rcx),%xmm1 leaq 16(%rcx),%rcx jnz .Loop_dec1_16 .byte 102,15,56,223,209 xorps %xmm10,%xmm2 movaps %xmm11,%xmm10 jmp .Lcbc_dec_tail_collected .p2align 4 .Lcbc_dec_two: movaps %xmm3,%xmm12 xorps %xmm4,%xmm4 call _aesni_decrypt3 pxor %xmm10,%xmm2 movaps %xmm12,%xmm10 pxor %xmm11,%xmm3 movdqu %xmm2,(%rsi) movdqa %xmm3,%xmm2 leaq 16(%rsi),%rsi jmp .Lcbc_dec_tail_collected .p2align 4 .Lcbc_dec_three: movaps %xmm4,%xmm13 call _aesni_decrypt3 pxor %xmm10,%xmm2 movaps %xmm13,%xmm10 pxor %xmm11,%xmm3 movdqu %xmm2,(%rsi) pxor %xmm12,%xmm4 movdqu %xmm3,16(%rsi) movdqa %xmm4,%xmm2 leaq 32(%rsi),%rsi jmp .Lcbc_dec_tail_collected .p2align 4 .Lcbc_dec_four: movaps %xmm5,%xmm14 call _aesni_decrypt4 pxor %xmm10,%xmm2 movaps %xmm14,%xmm10 pxor %xmm11,%xmm3 movdqu %xmm2,(%rsi) pxor %xmm12,%xmm4 movdqu %xmm3,16(%rsi) pxor %xmm13,%xmm5 movdqu %xmm4,32(%rsi) movdqa %xmm5,%xmm2 leaq 48(%rsi),%rsi jmp .Lcbc_dec_tail_collected .p2align 4 .Lcbc_dec_tail_collected: movups %xmm10,(%r8) andq $15,%rdx jnz .Lcbc_dec_tail_partial movups %xmm2,(%rsi) jmp .Lcbc_dec_ret .p2align 4 .Lcbc_dec_tail_partial: movaps %xmm2,(%rsp) movq $16,%rcx movq %rsi,%rdi subq %rdx,%rcx leaq (%rsp),%rsi .long 0x9066A4F3 .Lcbc_dec_ret: movaps 16(%rsp),%xmm6 movaps 32(%rsp),%xmm7 movaps 48(%rsp),%xmm8 movaps 64(%rsp),%xmm9 movaps 80(%rsp),%xmm10 movaps 96(%rsp),%xmm11 movaps 112(%rsp),%xmm12 movaps 128(%rsp),%xmm13 movaps 144(%rsp),%xmm14 movaps 160(%rsp),%xmm15 leaq (%rbp),%rsp popq %rbp .Lcbc_ret: movq 8(%rsp),%rdi movq 16(%rsp),%rsi .byte 0xf3,0xc3 .LSEH_end_aesni_cbc_encrypt: .globl aesni_set_decrypt_key .def aesni_set_decrypt_key; .scl 2; .type 32; .endef .p2align 4 aesni_set_decrypt_key: .byte 0x48,0x83,0xEC,0x08 call __aesni_set_encrypt_key shll $4,%edx testl %eax,%eax jnz .Ldec_key_ret leaq 16(%r8,%rdx,1),%rcx movups (%r8),%xmm0 movups (%rcx),%xmm1 movups %xmm0,(%rcx) movups %xmm1,(%r8) leaq 16(%r8),%r8 leaq -16(%rcx),%rcx .Ldec_key_inverse: movups (%r8),%xmm0 movups (%rcx),%xmm1 .byte 102,15,56,219,192 .byte 102,15,56,219,201 leaq 16(%r8),%r8 leaq -16(%rcx),%rcx movups %xmm0,16(%rcx) movups %xmm1,-16(%r8) cmpq %r8,%rcx ja .Ldec_key_inverse movups (%r8),%xmm0 .byte 102,15,56,219,192 movups %xmm0,(%rcx) .Ldec_key_ret: addq $8,%rsp .byte 0xf3,0xc3 .LSEH_end_set_decrypt_key: .globl aesni_set_encrypt_key .def aesni_set_encrypt_key; .scl 2; .type 32; .endef .p2align 4 aesni_set_encrypt_key: __aesni_set_encrypt_key: .byte 0x48,0x83,0xEC,0x08 movq $-1,%rax testq %rcx,%rcx jz .Lenc_key_ret testq %r8,%r8 jz .Lenc_key_ret movups (%rcx),%xmm0 xorps %xmm4,%xmm4 leaq 16(%r8),%rax cmpl $256,%edx je .L14rounds cmpl $192,%edx je .L12rounds cmpl $128,%edx jne .Lbad_keybits .L10rounds: movl $9,%edx movups %xmm0,(%r8) .byte 102,15,58,223,200,1 call .Lkey_expansion_128_cold .byte 102,15,58,223,200,2 call .Lkey_expansion_128 .byte 102,15,58,223,200,4 call .Lkey_expansion_128 .byte 102,15,58,223,200,8 call .Lkey_expansion_128 .byte 102,15,58,223,200,16 call .Lkey_expansion_128 .byte 102,15,58,223,200,32 call .Lkey_expansion_128 .byte 102,15,58,223,200,64 call .Lkey_expansion_128 .byte 102,15,58,223,200,128 call .Lkey_expansion_128 .byte 102,15,58,223,200,27 call .Lkey_expansion_128 .byte 102,15,58,223,200,54 call .Lkey_expansion_128 movups %xmm0,(%rax) movl %edx,80(%rax) xorl %eax,%eax jmp .Lenc_key_ret .p2align 4 .L12rounds: movq 16(%rcx),%xmm2 movl $11,%edx movups %xmm0,(%r8) .byte 102,15,58,223,202,1 call .Lkey_expansion_192a_cold .byte 102,15,58,223,202,2 call .Lkey_expansion_192b .byte 102,15,58,223,202,4 call .Lkey_expansion_192a .byte 102,15,58,223,202,8 call .Lkey_expansion_192b .byte 102,15,58,223,202,16 call .Lkey_expansion_192a .byte 102,15,58,223,202,32 call .Lkey_expansion_192b .byte 102,15,58,223,202,64 call .Lkey_expansion_192a .byte 102,15,58,223,202,128 call .Lkey_expansion_192b movups %xmm0,(%rax) movl %edx,48(%rax) xorq %rax,%rax jmp .Lenc_key_ret .p2align 4 .L14rounds: movups 16(%rcx),%xmm2 movl $13,%edx leaq 16(%rax),%rax movups %xmm0,(%r8) movups %xmm2,16(%r8) .byte 102,15,58,223,202,1 call .Lkey_expansion_256a_cold .byte 102,15,58,223,200,1 call .Lkey_expansion_256b .byte 102,15,58,223,202,2 call .Lkey_expansion_256a .byte 102,15,58,223,200,2 call .Lkey_expansion_256b .byte 102,15,58,223,202,4 call .Lkey_expansion_256a .byte 102,15,58,223,200,4 call .Lkey_expansion_256b .byte 102,15,58,223,202,8 call .Lkey_expansion_256a .byte 102,15,58,223,200,8 call .Lkey_expansion_256b .byte 102,15,58,223,202,16 call .Lkey_expansion_256a .byte 102,15,58,223,200,16 call .Lkey_expansion_256b .byte 102,15,58,223,202,32 call .Lkey_expansion_256a .byte 102,15,58,223,200,32 call .Lkey_expansion_256b .byte 102,15,58,223,202,64 call .Lkey_expansion_256a movups %xmm0,(%rax) movl %edx,16(%rax) xorq %rax,%rax jmp .Lenc_key_ret .p2align 4 .Lbad_keybits: movq $-2,%rax .Lenc_key_ret: addq $8,%rsp .byte 0xf3,0xc3 .LSEH_end_set_encrypt_key: .p2align 4 .Lkey_expansion_128: movups %xmm0,(%rax) leaq 16(%rax),%rax .Lkey_expansion_128_cold: shufps $16,%xmm0,%xmm4 xorps %xmm4,%xmm0 shufps $140,%xmm0,%xmm4 xorps %xmm4,%xmm0 shufps $255,%xmm1,%xmm1 xorps %xmm1,%xmm0 .byte 0xf3,0xc3 .p2align 4 .Lkey_expansion_192a: movups %xmm0,(%rax) leaq 16(%rax),%rax .Lkey_expansion_192a_cold: movaps %xmm2,%xmm5 .Lkey_expansion_192b_warm: shufps $16,%xmm0,%xmm4 movdqa %xmm2,%xmm3 xorps %xmm4,%xmm0 shufps $140,%xmm0,%xmm4 pslldq $4,%xmm3 xorps %xmm4,%xmm0 pshufd $85,%xmm1,%xmm1 pxor %xmm3,%xmm2 pxor %xmm1,%xmm0 pshufd $255,%xmm0,%xmm3 pxor %xmm3,%xmm2 .byte 0xf3,0xc3 .p2align 4 .Lkey_expansion_192b: movaps %xmm0,%xmm3 shufps $68,%xmm0,%xmm5 movups %xmm5,(%rax) shufps $78,%xmm2,%xmm3 movups %xmm3,16(%rax) leaq 32(%rax),%rax jmp .Lkey_expansion_192b_warm .p2align 4 .Lkey_expansion_256a: movups %xmm2,(%rax) leaq 16(%rax),%rax .Lkey_expansion_256a_cold: shufps $16,%xmm0,%xmm4 xorps %xmm4,%xmm0 shufps $140,%xmm0,%xmm4 xorps %xmm4,%xmm0 shufps $255,%xmm1,%xmm1 xorps %xmm1,%xmm0 .byte 0xf3,0xc3 .p2align 4 .Lkey_expansion_256b: movups %xmm0,(%rax) leaq 16(%rax),%rax shufps $16,%xmm2,%xmm4 xorps %xmm4,%xmm2 shufps $140,%xmm2,%xmm4 xorps %xmm4,%xmm2 shufps $170,%xmm1,%xmm1 xorps %xmm1,%xmm2 .byte 0xf3,0xc3 .p2align 6 .Lbswap_mask: .byte 15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0 .Lincrement32: .long 6,6,6,0 .Lincrement64: .long 1,0,0,0 .Lxts_magic: .long 0x87,0,1,0 .Lincrement1: .byte 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1 .byte 65,69,83,32,102,111,114,32,73,110,116,101,108,32,65,69,83,45,78,73,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .p2align 6 .def ecb_se_handler; .scl 3; .type 32; .endef .p2align 4 ecb_se_handler: pushq %rsi pushq %rdi pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 pushq %r15 pushfq subq $64,%rsp movq 152(%r8),%rax jmp .Lcommon_seh_tail .def ccm64_se_handler; .scl 3; .type 32; .endef .p2align 4 ccm64_se_handler: pushq %rsi pushq %rdi pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 pushq %r15 pushfq subq $64,%rsp movq 120(%r8),%rax movq 248(%r8),%rbx movq 8(%r9),%rsi movq 56(%r9),%r11 movl 0(%r11),%r10d leaq (%rsi,%r10,1),%r10 cmpq %r10,%rbx jb .Lcommon_seh_tail movq 152(%r8),%rax movl 4(%r11),%r10d leaq (%rsi,%r10,1),%r10 cmpq %r10,%rbx jae .Lcommon_seh_tail leaq 0(%rax),%rsi leaq 512(%r8),%rdi movl $8,%ecx .long 0xa548f3fc leaq 88(%rax),%rax jmp .Lcommon_seh_tail .def ctr_xts_se_handler; .scl 3; .type 32; .endef .p2align 4 ctr_xts_se_handler: pushq %rsi pushq %rdi pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 pushq %r15 pushfq subq $64,%rsp movq 120(%r8),%rax movq 248(%r8),%rbx movq 8(%r9),%rsi movq 56(%r9),%r11 movl 0(%r11),%r10d leaq (%rsi,%r10,1),%r10 cmpq %r10,%rbx jb .Lcommon_seh_tail movq 152(%r8),%rax movl 4(%r11),%r10d leaq (%rsi,%r10,1),%r10 cmpq %r10,%rbx jae .Lcommon_seh_tail movq 160(%r8),%rax leaq -160(%rax),%rsi leaq 512(%r8),%rdi movl $20,%ecx .long 0xa548f3fc jmp .Lcommon_rbp_tail .def cbc_se_handler; .scl 3; .type 32; .endef .p2align 4 cbc_se_handler: pushq %rsi pushq %rdi pushq %rbx pushq %rbp pushq %r12 pushq %r13 pushq %r14 pushq %r15 pushfq subq $64,%rsp movq 152(%r8),%rax movq 248(%r8),%rbx leaq .Lcbc_decrypt(%rip),%r10 cmpq %r10,%rbx jb .Lcommon_seh_tail leaq .Lcbc_decrypt_body(%rip),%r10 cmpq %r10,%rbx jb .Lrestore_cbc_rax leaq .Lcbc_ret(%rip),%r10 cmpq %r10,%rbx jae .Lcommon_seh_tail leaq 16(%rax),%rsi leaq 512(%r8),%rdi movl $20,%ecx .long 0xa548f3fc .Lcommon_rbp_tail: movq 160(%r8),%rax movq (%rax),%rbp leaq 8(%rax),%rax movq %rbp,160(%r8) jmp .Lcommon_seh_tail .Lrestore_cbc_rax: movq 120(%r8),%rax .Lcommon_seh_tail: movq 8(%rax),%rdi movq 16(%rax),%rsi movq %rax,152(%r8) movq %rsi,168(%r8) movq %rdi,176(%r8) movq 40(%r9),%rdi movq %r8,%rsi movl $154,%ecx .long 0xa548f3fc movq %r9,%rsi xorq %rcx,%rcx movq 8(%rsi),%rdx movq 0(%rsi),%r8 movq 16(%rsi),%r9 movq 40(%rsi),%r10 leaq 56(%rsi),%r11 leaq 24(%rsi),%r12 movq %r10,32(%rsp) movq %r11,40(%rsp) movq %r12,48(%rsp) movq %rcx,56(%rsp) call *__imp_RtlVirtualUnwind(%rip) movl $1,%eax addq $64,%rsp popfq popq %r15 popq %r14 popq %r13 popq %r12 popq %rbp popq %rbx popq %rdi popq %rsi .byte 0xf3,0xc3 .section .pdata .p2align 2 .rva .LSEH_begin_aesni_ecb_encrypt .rva .LSEH_end_aesni_ecb_encrypt .rva .LSEH_info_ecb .rva .LSEH_begin_aesni_ccm64_encrypt_blocks .rva .LSEH_end_aesni_ccm64_encrypt_blocks .rva .LSEH_info_ccm64_enc .rva .LSEH_begin_aesni_ccm64_decrypt_blocks .rva .LSEH_end_aesni_ccm64_decrypt_blocks .rva .LSEH_info_ccm64_dec .rva .LSEH_begin_aesni_ctr32_encrypt_blocks .rva .LSEH_end_aesni_ctr32_encrypt_blocks .rva .LSEH_info_ctr32 .rva .LSEH_begin_aesni_xts_encrypt .rva .LSEH_end_aesni_xts_encrypt .rva .LSEH_info_xts_enc .rva .LSEH_begin_aesni_xts_decrypt .rva .LSEH_end_aesni_xts_decrypt .rva .LSEH_info_xts_dec .rva .LSEH_begin_aesni_cbc_encrypt .rva .LSEH_end_aesni_cbc_encrypt .rva .LSEH_info_cbc .rva aesni_set_decrypt_key .rva .LSEH_end_set_decrypt_key .rva .LSEH_info_key .rva aesni_set_encrypt_key .rva .LSEH_end_set_encrypt_key .rva .LSEH_info_key .section .xdata .p2align 3 .LSEH_info_ecb: .byte 9,0,0,0 .rva ecb_se_handler .LSEH_info_ccm64_enc: .byte 9,0,0,0 .rva ccm64_se_handler .rva .Lccm64_enc_body,.Lccm64_enc_ret .LSEH_info_ccm64_dec: .byte 9,0,0,0 .rva ccm64_se_handler .rva .Lccm64_dec_body,.Lccm64_dec_ret .LSEH_info_ctr32: .byte 9,0,0,0 .rva ctr_xts_se_handler .rva .Lctr32_body,.Lctr32_epilogue .LSEH_info_xts_enc: .byte 9,0,0,0 .rva ctr_xts_se_handler .rva .Lxts_enc_body,.Lxts_enc_epilogue .LSEH_info_xts_dec: .byte 9,0,0,0 .rva ctr_xts_se_handler .rva .Lxts_dec_body,.Lxts_dec_epilogue .LSEH_info_cbc: .byte 9,0,0,0 .rva cbc_se_handler .LSEH_info_key: .byte 0x01,0x04,0x01,0x00 .byte 0x04,0x02,0x00,0x00