Commit 2441feef authored by Maximiliano Korp's avatar Maximiliano Korp

Bump

parent 35cd5649
......@@ -81,8 +81,8 @@ _x86_64_AES_encrypt:
movl 0(%r14,%rdi,8),%edi
movl 0(%r14,%rbp,8),%ebp
andl $65280,%edi
andl $65280,%ebp
andl $0x0000ff00,%edi
andl $0x0000ff00,%ebp
xorl %edi,%r10d
xorl %ebp,%r11d
......@@ -94,8 +94,8 @@ _x86_64_AES_encrypt:
movl 0(%r14,%rsi,8),%esi
movl 0(%r14,%rdi,8),%edi
andl $65280,%esi
andl $65280,%edi
andl $0x0000ff00,%esi
andl $0x0000ff00,%edi
shrl $16,%ebx
xorl %esi,%r12d
xorl %edi,%r8d
......@@ -108,9 +108,9 @@ _x86_64_AES_encrypt:
movl 0(%r14,%rdi,8),%edi
movl 0(%r14,%rbp,8),%ebp
andl $16711680,%esi
andl $16711680,%edi
andl $16711680,%ebp
andl $0x00ff0000,%esi
andl $0x00ff0000,%edi
andl $0x00ff0000,%ebp
xorl %esi,%r10d
xorl %edi,%r11d
......@@ -123,9 +123,9 @@ _x86_64_AES_encrypt:
movl 2(%r14,%rdi,8),%edi
movl 2(%r14,%rbp,8),%ebp
andl $16711680,%esi
andl $4278190080,%edi
andl $4278190080,%ebp
andl $0x00ff0000,%esi
andl $0xff000000,%edi
andl $0xff000000,%ebp
xorl %esi,%r8d
xorl %edi,%r10d
......@@ -138,8 +138,8 @@ _x86_64_AES_encrypt:
movl 2(%r14,%rdi,8),%edi
movl 16+0(%r15),%eax
andl $4278190080,%esi
andl $4278190080,%edi
andl $0xff000000,%esi
andl $0xff000000,%edi
xorl %esi,%r12d
xorl %edi,%r8d
......@@ -241,8 +241,8 @@ _x86_64_AES_encrypt_compact:
xorl %r8d,%edx
cmpq 16(%rsp),%r15
je .Lenc_compact_done
movl $2155905152,%r10d
movl $2155905152,%r11d
movl $0x80808080,%r10d
movl $0x80808080,%r11d
andl %eax,%r10d
andl %ebx,%r11d
movl %r10d,%esi
......@@ -253,10 +253,10 @@ _x86_64_AES_encrypt_compact:
leal (%rbx,%rbx,1),%r9d
subl %r10d,%esi
subl %r11d,%edi
andl $4278124286,%r8d
andl $4278124286,%r9d
andl $454761243,%esi
andl $454761243,%edi
andl $0xfefefefe,%r8d
andl $0xfefefefe,%r9d
andl $0x1b1b1b1b,%esi
andl $0x1b1b1b1b,%edi
movl %eax,%r10d
movl %ebx,%r11d
xorl %esi,%r8d
......@@ -264,9 +264,9 @@ _x86_64_AES_encrypt_compact:
xorl %r8d,%eax
xorl %r9d,%ebx
movl $2155905152,%r12d
movl $0x80808080,%r12d
roll $24,%eax
movl $2155905152,%ebp
movl $0x80808080,%ebp
roll $24,%ebx
andl %ecx,%r12d
andl %edx,%ebp
......@@ -289,10 +289,10 @@ _x86_64_AES_encrypt_compact:
xorl %r10d,%eax
xorl %r11d,%ebx
andl $4278124286,%r8d
andl $4278124286,%r9d
andl $454761243,%esi
andl $454761243,%edi
andl $0xfefefefe,%r8d
andl $0xfefefefe,%r9d
andl $0x1b1b1b1b,%esi
andl $0x1b1b1b1b,%edi
movl %ecx,%r12d
movl %edx,%ebp
xorl %esi,%r8d
......@@ -345,7 +345,7 @@ AES_encrypt:
andq $-64,%rsp
subq %rsp,%rcx
negq %rcx
andq $960,%rcx
andq $0x3c0,%rcx
subq %rcx,%rsp
subq $32,%rsp
......@@ -370,7 +370,7 @@ AES_encrypt:
leaq .LAES_Te+2048(%rip),%r14
leaq 768(%rsp),%rbp
subq %r14,%rbp
andq $768,%rbp
andq $0x300,%rbp
leaq (%r14,%rbp,1),%r14
call _x86_64_AES_encrypt_compact
......@@ -792,7 +792,7 @@ AES_decrypt:
andq $-64,%rsp
subq %rsp,%rcx
negq %rcx
andq $960,%rcx
andq $0x3c0,%rcx
subq %rcx,%rsp
subq $32,%rsp
......@@ -817,7 +817,7 @@ AES_decrypt:
leaq .LAES_Td+2048(%rip),%r14
leaq 768(%rsp),%rbp
subq %r14,%rbp
andq $768,%rbp
andq $0x300,%rbp
leaq (%r14,%rbp,1),%r14
shrq $3,%rbp
addq %rbp,%r14
......@@ -1333,9 +1333,9 @@ AES_cbc_encrypt:
movq %r14,%r10
leaq 2304(%r14),%r11
movq %r15,%r12
andq $4095,%r10
andq $4095,%r11
andq $4095,%r12
andq $0xFFF,%r10
andq $0xFFF,%r11
andq $0xFFF,%r12
cmpq %r11,%r12
jb .Lcbc_te_break_out
......@@ -1344,7 +1344,7 @@ AES_cbc_encrypt:
jmp .Lcbc_te_ok
.Lcbc_te_break_out:
subq %r10,%r12
andq $4095,%r12
andq $0xFFF,%r12
addq $320,%r12
subq %r12,%r15
.align 4
......@@ -1370,7 +1370,7 @@ AES_cbc_encrypt:
movq %r15,%r10
subq %r14,%r10
andq $4095,%r10
andq $0xfff,%r10
cmpq $2304,%r10
jb .Lcbc_do_ecopy
cmpq $4096-248,%r10
......@@ -1557,7 +1557,7 @@ AES_cbc_encrypt:
leaq -88-63(%rcx),%r10
subq %rbp,%r10
negq %r10
andq $960,%r10
andq $0x3c0,%r10
subq %r10,%rbp
xchgq %rsp,%rbp
......@@ -1586,7 +1586,7 @@ AES_cbc_encrypt:
leaq 2048(%r14),%r14
leaq 768-8(%rsp),%rax
subq %r14,%rax
andq $768,%rax
andq $0x300,%rax
leaq (%r14,%rax,1),%r14
cmpq $0,%rbx
......
......@@ -1120,7 +1120,7 @@ _avx_cbc_dec_shortcut:
vpxor %xmm15,%xmm8,%xmm8
vmovdqu %xmm9,112(%rbp)
vpxor %xmm15,%xmm9,%xmm9
xorq $128,%rbp
xorq $0x80,%rbp
movl $1,%ecx
jmp .Loop_dec8x
......
......@@ -4015,9 +4015,9 @@ aesni_cbc_sha256_enc_shaext:
movups 16(%rcx),%xmm4
leaq 112(%rcx),%rcx
pshufd $27,%xmm1,%xmm0
pshufd $177,%xmm1,%xmm1
pshufd $27,%xmm2,%xmm2
pshufd $0x1b,%xmm1,%xmm0
pshufd $0xb1,%xmm1,%xmm1
pshufd $0x1b,%xmm2,%xmm2
movdqa %xmm3,%xmm7
.byte 102,15,58,15,202,8
punpcklqdq %xmm0,%xmm2
......@@ -4043,7 +4043,7 @@ aesni_cbc_sha256_enc_shaext:
movups -80(%rcx),%xmm5
aesenc %xmm4,%xmm6
.byte 15,56,203,209
pshufd $14,%xmm0,%xmm0
pshufd $0x0e,%xmm0,%xmm0
movups -64(%rcx),%xmm4
aesenc %xmm5,%xmm6
.byte 15,56,203,202
......@@ -4055,7 +4055,7 @@ aesni_cbc_sha256_enc_shaext:
movups -48(%rcx),%xmm5
aesenc %xmm4,%xmm6
.byte 15,56,203,209
pshufd $14,%xmm0,%xmm0
pshufd $0x0e,%xmm0,%xmm0
movups -32(%rcx),%xmm4
aesenc %xmm5,%xmm6
.byte 15,56,203,202
......@@ -4067,7 +4067,7 @@ aesni_cbc_sha256_enc_shaext:
movups -16(%rcx),%xmm5
aesenc %xmm4,%xmm6
.byte 15,56,203,209
pshufd $14,%xmm0,%xmm0
pshufd $0x0e,%xmm0,%xmm0
movdqa %xmm13,%xmm3
.byte 102,65,15,58,15,220,4
paddd %xmm3,%xmm10
......@@ -4082,7 +4082,7 @@ aesni_cbc_sha256_enc_shaext:
movups 16(%rcx),%xmm5
aesenc %xmm4,%xmm6
.byte 15,56,203,209
pshufd $14,%xmm0,%xmm0
pshufd $0x0e,%xmm0,%xmm0
movups 32(%rcx),%xmm4
aesenc %xmm5,%xmm6
movdqa %xmm10,%xmm3
......@@ -4096,7 +4096,7 @@ aesni_cbc_sha256_enc_shaext:
movups 48(%rcx),%xmm5
aesenc %xmm4,%xmm6
.byte 15,56,203,209
pshufd $14,%xmm0,%xmm0
pshufd $0x0e,%xmm0,%xmm0
movdqa %xmm11,%xmm3
.byte 102,65,15,58,15,218,4
paddd %xmm3,%xmm12
......@@ -4129,7 +4129,7 @@ aesni_cbc_sha256_enc_shaext:
movups -64(%rcx),%xmm4
aesenc %xmm5,%xmm6
.byte 15,56,203,209
pshufd $14,%xmm0,%xmm0
pshufd $0x0e,%xmm0,%xmm0
movdqa %xmm12,%xmm3
.byte 102,65,15,58,15,219,4
paddd %xmm3,%xmm13
......@@ -4143,7 +4143,7 @@ aesni_cbc_sha256_enc_shaext:
movups -32(%rcx),%xmm4
aesenc %xmm5,%xmm6
.byte 15,56,203,209
pshufd $14,%xmm0,%xmm0
pshufd $0x0e,%xmm0,%xmm0
movdqa %xmm13,%xmm3
.byte 102,65,15,58,15,220,4
paddd %xmm3,%xmm10
......@@ -4157,7 +4157,7 @@ aesni_cbc_sha256_enc_shaext:
movups 0(%rcx),%xmm4
aesenc %xmm5,%xmm6
.byte 15,56,203,209
pshufd $14,%xmm0,%xmm0
pshufd $0x0e,%xmm0,%xmm0
movdqa %xmm10,%xmm3
.byte 102,65,15,58,15,221,4
paddd %xmm3,%xmm11
......@@ -4171,7 +4171,7 @@ aesni_cbc_sha256_enc_shaext:
movups 32(%rcx),%xmm4
aesenc %xmm5,%xmm6
.byte 15,56,203,209
pshufd $14,%xmm0,%xmm0
pshufd $0x0e,%xmm0,%xmm0
movdqa %xmm11,%xmm3
.byte 102,65,15,58,15,218,4
paddd %xmm3,%xmm12
......@@ -4206,7 +4206,7 @@ aesni_cbc_sha256_enc_shaext:
movups -64(%rcx),%xmm4
aesenc %xmm5,%xmm6
.byte 15,56,203,209
pshufd $14,%xmm0,%xmm0
pshufd $0x0e,%xmm0,%xmm0
movdqa %xmm12,%xmm3
.byte 102,65,15,58,15,219,4
paddd %xmm3,%xmm13
......@@ -4220,7 +4220,7 @@ aesni_cbc_sha256_enc_shaext:
movups -32(%rcx),%xmm4
aesenc %xmm5,%xmm6
.byte 15,56,203,209
pshufd $14,%xmm0,%xmm0
pshufd $0x0e,%xmm0,%xmm0
movdqa %xmm13,%xmm3
.byte 102,65,15,58,15,220,4
paddd %xmm3,%xmm10
......@@ -4234,7 +4234,7 @@ aesni_cbc_sha256_enc_shaext:
movups 0(%rcx),%xmm4
aesenc %xmm5,%xmm6
.byte 15,56,203,209
pshufd $14,%xmm0,%xmm0
pshufd $0x0e,%xmm0,%xmm0
movdqa %xmm10,%xmm3
.byte 102,65,15,58,15,221,4
paddd %xmm3,%xmm11
......@@ -4248,7 +4248,7 @@ aesni_cbc_sha256_enc_shaext:
movups 32(%rcx),%xmm4
aesenc %xmm5,%xmm6
.byte 15,56,203,209
pshufd $14,%xmm0,%xmm0
pshufd $0x0e,%xmm0,%xmm0
movdqa %xmm11,%xmm3
.byte 102,65,15,58,15,218,4
paddd %xmm3,%xmm12
......@@ -4275,7 +4275,7 @@ aesni_cbc_sha256_enc_shaext:
movups 16-112(%rcx),%xmm4
nop
.byte 15,56,203,209
pshufd $14,%xmm0,%xmm0
pshufd $0x0e,%xmm0,%xmm0
movdqa %xmm12,%xmm3
.byte 102,65,15,58,15,219,4
paddd %xmm3,%xmm13
......@@ -4296,7 +4296,7 @@ aesni_cbc_sha256_enc_shaext:
movups -48(%rcx),%xmm5
aesenc %xmm4,%xmm6
.byte 15,56,203,209
pshufd $14,%xmm0,%xmm0
pshufd $0x0e,%xmm0,%xmm0
movups -32(%rcx),%xmm4
aesenc %xmm5,%xmm6
.byte 15,56,203,202
......@@ -4308,7 +4308,7 @@ aesni_cbc_sha256_enc_shaext:
movups 0(%rcx),%xmm4
aesenc %xmm5,%xmm6
.byte 15,56,203,209
pshufd $14,%xmm0,%xmm0
pshufd $0x0e,%xmm0,%xmm0
movups 16(%rcx),%xmm5
aesenc %xmm4,%xmm6
.byte 15,56,203,202
......@@ -4341,9 +4341,9 @@ aesni_cbc_sha256_enc_shaext:
leaq 64(%rdi),%rdi
jnz .Loop_shaext
pshufd $177,%xmm2,%xmm2
pshufd $27,%xmm1,%xmm3
pshufd $177,%xmm1,%xmm1
pshufd $0xb1,%xmm2,%xmm2
pshufd $0x1b,%xmm1,%xmm3
pshufd $0xb1,%xmm1,%xmm1
punpckhqdq %xmm2,%xmm1
.byte 102,15,58,15,211,8
......
......@@ -503,7 +503,7 @@ aesni_ecb_encrypt:
testl %r8d,%r8d
jz .Lecb_decrypt
cmpq $128,%rdx
cmpq $0x80,%rdx
jb .Lecb_enc_tail
movdqu (%rdi),%xmm2
......@@ -515,7 +515,7 @@ aesni_ecb_encrypt:
movdqu 96(%rdi),%xmm8
movdqu 112(%rdi),%xmm9
leaq 128(%rdi),%rdi
subq $128,%rdx
subq $0x80,%rdx
jmp .Lecb_enc_loop8_enter
.align 16
.Lecb_enc_loop8:
......@@ -543,7 +543,7 @@ aesni_ecb_encrypt:
call _aesni_encrypt8
subq $128,%rdx
subq $0x80,%rdx
jnc .Lecb_enc_loop8
movups %xmm2,(%rsi)
......@@ -557,22 +557,22 @@ aesni_ecb_encrypt:
movups %xmm8,96(%rsi)
movups %xmm9,112(%rsi)
leaq 128(%rsi),%rsi
addq $128,%rdx
addq $0x80,%rdx
jz .Lecb_ret
.Lecb_enc_tail:
movups (%rdi),%xmm2
cmpq $32,%rdx
cmpq $0x20,%rdx
jb .Lecb_enc_one
movups 16(%rdi),%xmm3
je .Lecb_enc_two
movups 32(%rdi),%xmm4
cmpq $64,%rdx
cmpq $0x40,%rdx
jb .Lecb_enc_three
movups 48(%rdi),%xmm5
je .Lecb_enc_four
movups 64(%rdi),%xmm6
cmpq $96,%rdx
cmpq $0x60,%rdx
jb .Lecb_enc_five
movups 80(%rdi),%xmm7
je .Lecb_enc_six
......@@ -646,7 +646,7 @@ aesni_ecb_encrypt:
.align 16
.Lecb_decrypt:
cmpq $128,%rdx
cmpq $0x80,%rdx
jb .Lecb_dec_tail
movdqu (%rdi),%xmm2
......@@ -658,7 +658,7 @@ aesni_ecb_encrypt:
movdqu 96(%rdi),%xmm8
movdqu 112(%rdi),%xmm9
leaq 128(%rdi),%rdi
subq $128,%rdx
subq $0x80,%rdx
jmp .Lecb_dec_loop8_enter
.align 16
.Lecb_dec_loop8:
......@@ -687,7 +687,7 @@ aesni_ecb_encrypt:
call _aesni_decrypt8
movups (%r11),%xmm0
subq $128,%rdx
subq $0x80,%rdx
jnc .Lecb_dec_loop8
movups %xmm2,(%rsi)
......@@ -709,22 +709,22 @@ aesni_ecb_encrypt:
movups %xmm9,112(%rsi)
pxor %xmm9,%xmm9
leaq 128(%rsi),%rsi
addq $128,%rdx
addq $0x80,%rdx
jz .Lecb_ret
.Lecb_dec_tail:
movups (%rdi),%xmm2
cmpq $32,%rdx
cmpq $0x20,%rdx
jb .Lecb_dec_one
movups 16(%rdi),%xmm3
je .Lecb_dec_two
movups 32(%rdi),%xmm4
cmpq $64,%rdx
cmpq $0x40,%rdx
jb .Lecb_dec_three
movups 48(%rdi),%xmm5
je .Lecb_dec_four
movups 64(%rdi),%xmm6
cmpq $96,%rdx
cmpq $0x60,%rdx
jb .Lecb_dec_five
movups 80(%rdi),%xmm7
je .Lecb_dec_six
......@@ -1598,7 +1598,7 @@ aesni_xts_encrypt:
movdqa .Lxts_magic(%rip),%xmm8
movdqa %xmm2,%xmm15
pshufd $95,%xmm2,%xmm9
pshufd $0x5f,%xmm2,%xmm9
pxor %xmm0,%xmm1
movdqa %xmm9,%xmm14
paddd %xmm9,%xmm9
......@@ -1697,7 +1697,7 @@ aesni_xts_encrypt:
.byte 102,15,56,220,248
movups 64(%r11),%xmm0
movdqa %xmm8,80(%rsp)
pshufd $95,%xmm15,%xmm9
pshufd $0x5f,%xmm15,%xmm9
jmp .Lxts_enc_loop6
.align 32
.Lxts_enc_loop6:
......@@ -1836,13 +1836,13 @@ aesni_xts_encrypt:
jz .Lxts_enc_done
pxor %xmm0,%xmm11
cmpq $32,%rdx
cmpq $0x20,%rdx
jb .Lxts_enc_one
pxor %xmm0,%xmm12
je .Lxts_enc_two
pxor %xmm0,%xmm13
cmpq $64,%rdx
cmpq $0x40,%rdx
jb .Lxts_enc_three
pxor %xmm0,%xmm14
je .Lxts_enc_four
......@@ -2069,7 +2069,7 @@ aesni_xts_decrypt:
movdqa .Lxts_magic(%rip),%xmm8
movdqa %xmm2,%xmm15
pshufd $95,%xmm2,%xmm9
pshufd $0x5f,%xmm2,%xmm9
pxor %xmm0,%xmm1
movdqa %xmm9,%xmm14
paddd %xmm9,%xmm9
......@@ -2168,7 +2168,7 @@ aesni_xts_decrypt:
.byte 102,15,56,222,248
movups 64(%r11),%xmm0
movdqa %xmm8,80(%rsp)
pshufd $95,%xmm15,%xmm9
pshufd $0x5f,%xmm15,%xmm9
jmp .Lxts_dec_loop6
.align 32
.Lxts_dec_loop6:
......@@ -2308,13 +2308,13 @@ aesni_xts_decrypt:
jz .Lxts_dec_done
pxor %xmm0,%xmm12
cmpq $32,%rdx
cmpq $