| default rel |
| %define XMMWORD |
| %define YMMWORD |
| %define ZMMWORD |
| section .text code align=64 |
| |
| EXTERN OPENSSL_ia32cap_P |
| |
| global sha1_block_data_order |
| |
| ALIGN 16 |
| sha1_block_data_order: |
| mov QWORD[8+rsp],rdi ;WIN64 prologue |
| mov QWORD[16+rsp],rsi |
| mov rax,rsp |
| $L$SEH_begin_sha1_block_data_order: |
| mov rdi,rcx |
| mov rsi,rdx |
| mov rdx,r8 |
| |
| |
| |
| lea r10,[OPENSSL_ia32cap_P] |
| mov r9d,DWORD[r10] |
| mov r8d,DWORD[4+r10] |
| mov r10d,DWORD[8+r10] |
| test r8d,512 |
| jz NEAR $L$ialu |
| and r8d,268435456 |
| and r9d,1073741824 |
| or r8d,r9d |
| cmp r8d,1342177280 |
| je NEAR _avx_shortcut |
| jmp NEAR _ssse3_shortcut |
| |
| ALIGN 16 |
| $L$ialu: |
| mov rax,rsp |
| |
| push rbx |
| |
| push rbp |
| |
| push r12 |
| |
| push r13 |
| |
| push r14 |
| |
| mov r8,rdi |
| sub rsp,72 |
| mov r9,rsi |
| and rsp,-64 |
| mov r10,rdx |
| mov QWORD[64+rsp],rax |
| |
| $L$prologue: |
| |
| mov esi,DWORD[r8] |
| mov edi,DWORD[4+r8] |
| mov r11d,DWORD[8+r8] |
| mov r12d,DWORD[12+r8] |
| mov r13d,DWORD[16+r8] |
| jmp NEAR $L$loop |
| |
| ALIGN 16 |
| $L$loop: |
| mov edx,DWORD[r9] |
| bswap edx |
| mov ebp,DWORD[4+r9] |
| mov eax,r12d |
| mov DWORD[rsp],edx |
| mov ecx,esi |
| bswap ebp |
| xor eax,r11d |
| rol ecx,5 |
| and eax,edi |
| lea r13d,[1518500249+r13*1+rdx] |
| add r13d,ecx |
| xor eax,r12d |
| rol edi,30 |
| add r13d,eax |
| mov r14d,DWORD[8+r9] |
| mov eax,r11d |
| mov DWORD[4+rsp],ebp |
| mov ecx,r13d |
| bswap r14d |
| xor eax,edi |
| rol ecx,5 |
| and eax,esi |
| lea r12d,[1518500249+r12*1+rbp] |
| add r12d,ecx |
| xor eax,r11d |
| rol esi,30 |
| add r12d,eax |
| mov edx,DWORD[12+r9] |
| mov eax,edi |
| mov DWORD[8+rsp],r14d |
| mov ecx,r12d |
| bswap edx |
| xor eax,esi |
| rol ecx,5 |
| and eax,r13d |
| lea r11d,[1518500249+r11*1+r14] |
| add r11d,ecx |
| xor eax,edi |
| rol r13d,30 |
| add r11d,eax |
| mov ebp,DWORD[16+r9] |
| mov eax,esi |
| mov DWORD[12+rsp],edx |
| mov ecx,r11d |
| bswap ebp |
| xor eax,r13d |
| rol ecx,5 |
| and eax,r12d |
| lea edi,[1518500249+rdi*1+rdx] |
| add edi,ecx |
| xor eax,esi |
| rol r12d,30 |
| add edi,eax |
| mov r14d,DWORD[20+r9] |
| mov eax,r13d |
| mov DWORD[16+rsp],ebp |
| mov ecx,edi |
| bswap r14d |
| xor eax,r12d |
| rol ecx,5 |
| and eax,r11d |
| lea esi,[1518500249+rsi*1+rbp] |
| add esi,ecx |
| xor eax,r13d |
| rol r11d,30 |
| add esi,eax |
| mov edx,DWORD[24+r9] |
| mov eax,r12d |
| mov DWORD[20+rsp],r14d |
| mov ecx,esi |
| bswap edx |
| xor eax,r11d |
| rol ecx,5 |
| and eax,edi |
| lea r13d,[1518500249+r13*1+r14] |
| add r13d,ecx |
| xor eax,r12d |
| rol edi,30 |
| add r13d,eax |
| mov ebp,DWORD[28+r9] |
| mov eax,r11d |
| mov DWORD[24+rsp],edx |
| mov ecx,r13d |
| bswap ebp |
| xor eax,edi |
| rol ecx,5 |
| and eax,esi |
| lea r12d,[1518500249+r12*1+rdx] |
| add r12d,ecx |
| xor eax,r11d |
| rol esi,30 |
| add r12d,eax |
| mov r14d,DWORD[32+r9] |
| mov eax,edi |
| mov DWORD[28+rsp],ebp |
| mov ecx,r12d |
| bswap r14d |
| xor eax,esi |
| rol ecx,5 |
| and eax,r13d |
| lea r11d,[1518500249+r11*1+rbp] |
| add r11d,ecx |
| xor eax,edi |
| rol r13d,30 |
| add r11d,eax |
| mov edx,DWORD[36+r9] |
| mov eax,esi |
| mov DWORD[32+rsp],r14d |
| mov ecx,r11d |
| bswap edx |
| xor eax,r13d |
| rol ecx,5 |
| and eax,r12d |
| lea edi,[1518500249+rdi*1+r14] |
| add edi,ecx |
| xor eax,esi |
| rol r12d,30 |
| add edi,eax |
| mov ebp,DWORD[40+r9] |
| mov eax,r13d |
| mov DWORD[36+rsp],edx |
| mov ecx,edi |
| bswap ebp |
| xor eax,r12d |
| rol ecx,5 |
| and eax,r11d |
| lea esi,[1518500249+rsi*1+rdx] |
| add esi,ecx |
| xor eax,r13d |
| rol r11d,30 |
| add esi,eax |
| mov r14d,DWORD[44+r9] |
| mov eax,r12d |
| mov DWORD[40+rsp],ebp |
| mov ecx,esi |
| bswap r14d |
| xor eax,r11d |
| rol ecx,5 |
| and eax,edi |
| lea r13d,[1518500249+r13*1+rbp] |
| add r13d,ecx |
| xor eax,r12d |
| rol edi,30 |
| add r13d,eax |
| mov edx,DWORD[48+r9] |
| mov eax,r11d |
| mov DWORD[44+rsp],r14d |
| mov ecx,r13d |
| bswap edx |
| xor eax,edi |
| rol ecx,5 |
| and eax,esi |
| lea r12d,[1518500249+r12*1+r14] |
| add r12d,ecx |
| xor eax,r11d |
| rol esi,30 |
| add r12d,eax |
| mov ebp,DWORD[52+r9] |
| mov eax,edi |
| mov DWORD[48+rsp],edx |
| mov ecx,r12d |
| bswap ebp |
| xor eax,esi |
| rol ecx,5 |
| and eax,r13d |
| lea r11d,[1518500249+r11*1+rdx] |
| add r11d,ecx |
| xor eax,edi |
| rol r13d,30 |
| add r11d,eax |
| mov r14d,DWORD[56+r9] |
| mov eax,esi |
| mov DWORD[52+rsp],ebp |
| mov ecx,r11d |
| bswap r14d |
| xor eax,r13d |
| rol ecx,5 |
| and eax,r12d |
| lea edi,[1518500249+rdi*1+rbp] |
| add edi,ecx |
| xor eax,esi |
| rol r12d,30 |
| add edi,eax |
| mov edx,DWORD[60+r9] |
| mov eax,r13d |
| mov DWORD[56+rsp],r14d |
| mov ecx,edi |
| bswap edx |
| xor eax,r12d |
| rol ecx,5 |
| and eax,r11d |
| lea esi,[1518500249+rsi*1+r14] |
| add esi,ecx |
| xor eax,r13d |
| rol r11d,30 |
| add esi,eax |
| xor ebp,DWORD[rsp] |
| mov eax,r12d |
| mov DWORD[60+rsp],edx |
| mov ecx,esi |
| xor ebp,DWORD[8+rsp] |
| xor eax,r11d |
| rol ecx,5 |
| xor ebp,DWORD[32+rsp] |
| and eax,edi |
| lea r13d,[1518500249+r13*1+rdx] |
| rol edi,30 |
| xor eax,r12d |
| add r13d,ecx |
| rol ebp,1 |
| add r13d,eax |
| xor r14d,DWORD[4+rsp] |
| mov eax,r11d |
| mov DWORD[rsp],ebp |
| mov ecx,r13d |
| xor r14d,DWORD[12+rsp] |
| xor eax,edi |
| rol ecx,5 |
| xor r14d,DWORD[36+rsp] |
| and eax,esi |
| lea r12d,[1518500249+r12*1+rbp] |
| rol esi,30 |
| xor eax,r11d |
| add r12d,ecx |
| rol r14d,1 |
| add r12d,eax |
| xor edx,DWORD[8+rsp] |
| mov eax,edi |
| mov DWORD[4+rsp],r14d |
| mov ecx,r12d |
| xor edx,DWORD[16+rsp] |
| xor eax,esi |
| rol ecx,5 |
| xor edx,DWORD[40+rsp] |
| and eax,r13d |
| lea r11d,[1518500249+r11*1+r14] |
| rol r13d,30 |
| xor eax,edi |
| add r11d,ecx |
| rol edx,1 |
| add r11d,eax |
| xor ebp,DWORD[12+rsp] |
| mov eax,esi |
| mov DWORD[8+rsp],edx |
| mov ecx,r11d |
| xor ebp,DWORD[20+rsp] |
| xor eax,r13d |
| rol ecx,5 |
| xor ebp,DWORD[44+rsp] |
| and eax,r12d |
| lea edi,[1518500249+rdi*1+rdx] |
| rol r12d,30 |
| xor eax,esi |
| add edi,ecx |
| rol ebp,1 |
| add edi,eax |
| xor r14d,DWORD[16+rsp] |
| mov eax,r13d |
| mov DWORD[12+rsp],ebp |
| mov ecx,edi |
| xor r14d,DWORD[24+rsp] |
| xor eax,r12d |
| rol ecx,5 |
| xor r14d,DWORD[48+rsp] |
| and eax,r11d |
| lea esi,[1518500249+rsi*1+rbp] |
| rol r11d,30 |
| xor eax,r13d |
| add esi,ecx |
| rol r14d,1 |
| add esi,eax |
| xor edx,DWORD[20+rsp] |
| mov eax,edi |
| mov DWORD[16+rsp],r14d |
| mov ecx,esi |
| xor edx,DWORD[28+rsp] |
| xor eax,r12d |
| rol ecx,5 |
| xor edx,DWORD[52+rsp] |
| lea r13d,[1859775393+r13*1+r14] |
| xor eax,r11d |
| add r13d,ecx |
| rol edi,30 |
| add r13d,eax |
| rol edx,1 |
| xor ebp,DWORD[24+rsp] |
| mov eax,esi |
| mov DWORD[20+rsp],edx |
| mov ecx,r13d |
| xor ebp,DWORD[32+rsp] |
| xor eax,r11d |
| rol ecx,5 |
| xor ebp,DWORD[56+rsp] |
| lea r12d,[1859775393+r12*1+rdx] |
| xor eax,edi |
| add r12d,ecx |
| rol esi,30 |
| add r12d,eax |
| rol ebp,1 |
| xor r14d,DWORD[28+rsp] |
| mov eax,r13d |
| mov DWORD[24+rsp],ebp |
| mov ecx,r12d |
| xor r14d,DWORD[36+rsp] |
| xor eax,edi |
| rol ecx,5 |
| xor r14d,DWORD[60+rsp] |
| lea r11d,[1859775393+r11*1+rbp] |
| xor eax,esi |
| add r11d,ecx |
| rol r13d,30 |
| add r11d,eax |
| rol r14d,1 |
| xor edx,DWORD[32+rsp] |
| mov eax,r12d |
| mov DWORD[28+rsp],r14d |
| mov ecx,r11d |
| xor edx,DWORD[40+rsp] |
| xor eax,esi |
| rol ecx,5 |
| xor edx,DWORD[rsp] |
| lea edi,[1859775393+rdi*1+r14] |
| xor eax,r13d |
| add edi,ecx |
| rol r12d,30 |
| add edi,eax |
| rol edx,1 |
| xor ebp,DWORD[36+rsp] |
| mov eax,r11d |
| mov DWORD[32+rsp],edx |
| mov ecx,edi |
| xor ebp,DWORD[44+rsp] |
| xor eax,r13d |
| rol ecx,5 |
| xor ebp,DWORD[4+rsp] |
| lea esi,[1859775393+rsi*1+rdx] |
| xor eax,r12d |
| add esi,ecx |
| rol r11d,30 |
| add esi,eax |
| rol ebp,1 |
| xor r14d,DWORD[40+rsp] |
| mov eax,edi |
| mov DWORD[36+rsp],ebp |
| mov ecx,esi |
| xor r14d,DWORD[48+rsp] |
| xor eax,r12d |
| rol ecx,5 |
| xor r14d,DWORD[8+rsp] |
| lea r13d,[1859775393+r13*1+rbp] |
| xor eax,r11d |
| add r13d,ecx |
| rol edi,30 |
| add r13d,eax |
| rol r14d,1 |
| xor edx,DWORD[44+rsp] |
| mov eax,esi |
| mov DWORD[40+rsp],r14d |
| mov ecx,r13d |
| xor edx,DWORD[52+rsp] |
| xor eax,r11d |
| rol ecx,5 |
| xor edx,DWORD[12+rsp] |
| lea r12d,[1859775393+r12*1+r14] |
| xor eax,edi |
| add r12d,ecx |
| rol esi,30 |
| add r12d,eax |
| rol edx,1 |
| xor ebp,DWORD[48+rsp] |
| mov eax,r13d |
| mov DWORD[44+rsp],edx |
| mov ecx,r12d |
| xor ebp,DWORD[56+rsp] |
| xor eax,edi |
| rol ecx,5 |
| xor ebp,DWORD[16+rsp] |
| lea r11d,[1859775393+r11*1+rdx] |
| xor eax,esi |
| add r11d,ecx |
| rol r13d,30 |
| add r11d,eax |
| rol ebp,1 |
| xor r14d,DWORD[52+rsp] |
| mov eax,r12d |
| mov DWORD[48+rsp],ebp |
| mov ecx,r11d |
| xor r14d,DWORD[60+rsp] |
| xor eax,esi |
| rol ecx,5 |
| xor r14d,DWORD[20+rsp] |
| lea edi,[1859775393+rdi*1+rbp] |
| xor eax,r13d |
| add edi,ecx |
| rol r12d,30 |
| add edi,eax |
| rol r14d,1 |
| xor edx,DWORD[56+rsp] |
| mov eax,r11d |
| mov DWORD[52+rsp],r14d |
| mov ecx,edi |
| xor edx,DWORD[rsp] |
| xor eax,r13d |
| rol ecx,5 |
| xor edx,DWORD[24+rsp] |
| lea esi,[1859775393+rsi*1+r14] |
| xor eax,r12d |
| add esi,ecx |
| rol r11d,30 |
| add esi,eax |
| rol edx,1 |
| xor ebp,DWORD[60+rsp] |
| mov eax,edi |
| mov DWORD[56+rsp],edx |
| mov ecx,esi |
| xor ebp,DWORD[4+rsp] |
| xor eax,r12d |
| rol ecx,5 |
| xor ebp,DWORD[28+rsp] |
| lea r13d,[1859775393+r13*1+rdx] |
| xor eax,r11d |
| add r13d,ecx |
| rol edi,30 |
| add r13d,eax |
| rol ebp,1 |
| xor r14d,DWORD[rsp] |
| mov eax,esi |
| mov DWORD[60+rsp],ebp |
| mov ecx,r13d |
| xor r14d,DWORD[8+rsp] |
| xor eax,r11d |
| rol ecx,5 |
| xor r14d,DWORD[32+rsp] |
| lea r12d,[1859775393+r12*1+rbp] |
| xor eax,edi |
| add r12d,ecx |
| rol esi,30 |
| add r12d,eax |
| rol r14d,1 |
| xor edx,DWORD[4+rsp] |
| mov eax,r13d |
| mov DWORD[rsp],r14d |
| mov ecx,r12d |
| xor edx,DWORD[12+rsp] |
| xor eax,edi |
| rol ecx,5 |
| xor edx,DWORD[36+rsp] |
| lea r11d,[1859775393+r11*1+r14] |
| xor eax,esi |
| add r11d,ecx |
| rol r13d,30 |
| add r11d,eax |
| rol edx,1 |
| xor ebp,DWORD[8+rsp] |
| mov eax,r12d |
| mov DWORD[4+rsp],edx |
| mov ecx,r11d |
| xor ebp,DWORD[16+rsp] |
| xor eax,esi |
| rol ecx,5 |
| xor ebp,DWORD[40+rsp] |
| lea edi,[1859775393+rdi*1+rdx] |
| xor eax,r13d |
| add edi,ecx |
| rol r12d,30 |
| add edi,eax |
| rol ebp,1 |
| xor r14d,DWORD[12+rsp] |
| mov eax,r11d |
| mov DWORD[8+rsp],ebp |
| mov ecx,edi |
| xor r14d,DWORD[20+rsp] |
| xor eax,r13d |
| rol ecx,5 |
| xor r14d,DWORD[44+rsp] |
| lea esi,[1859775393+rsi*1+rbp] |
| xor eax,r12d |
| add esi,ecx |
| rol r11d,30 |
| add esi,eax |
| rol r14d,1 |
| xor edx,DWORD[16+rsp] |
| mov eax,edi |
| mov DWORD[12+rsp],r14d |
| mov ecx,esi |
| xor edx,DWORD[24+rsp] |
| xor eax,r12d |
| rol ecx,5 |
| xor edx,DWORD[48+rsp] |
| lea r13d,[1859775393+r13*1+r14] |
| xor eax,r11d |
| add r13d,ecx |
| rol edi,30 |
| add r13d,eax |
| rol edx,1 |
| xor ebp,DWORD[20+rsp] |
| mov eax,esi |
| mov DWORD[16+rsp],edx |
| mov ecx,r13d |
| xor ebp,DWORD[28+rsp] |
| xor eax,r11d |
| rol ecx,5 |
| xor ebp,DWORD[52+rsp] |
| lea r12d,[1859775393+r12*1+rdx] |
| xor eax,edi |
| add r12d,ecx |
| rol esi,30 |
| add r12d,eax |
| rol ebp,1 |
| xor r14d,DWORD[24+rsp] |
| mov eax,r13d |
| mov DWORD[20+rsp],ebp |
| mov ecx,r12d |
| xor r14d,DWORD[32+rsp] |
| xor eax,edi |
| rol ecx,5 |
| xor r14d,DWORD[56+rsp] |
| lea r11d,[1859775393+r11*1+rbp] |
| xor eax,esi |
| add r11d,ecx |
| rol r13d,30 |
| add r11d,eax |
| rol r14d,1 |
| xor edx,DWORD[28+rsp] |
| mov eax,r12d |
| mov DWORD[24+rsp],r14d |
| mov ecx,r11d |
| xor edx,DWORD[36+rsp] |
| xor eax,esi |
| rol ecx,5 |
| xor edx,DWORD[60+rsp] |
| lea edi,[1859775393+rdi*1+r14] |
| xor eax,r13d |
| add edi,ecx |
| rol r12d,30 |
| add edi,eax |
| rol edx,1 |
| xor ebp,DWORD[32+rsp] |
| mov eax,r11d |
| mov DWORD[28+rsp],edx |
| mov ecx,edi |
| xor ebp,DWORD[40+rsp] |
| xor eax,r13d |
| rol ecx,5 |
| xor ebp,DWORD[rsp] |
| lea esi,[1859775393+rsi*1+rdx] |
| xor eax,r12d |
| add esi,ecx |
| rol r11d,30 |
| add esi,eax |
| rol ebp,1 |
| xor r14d,DWORD[36+rsp] |
| mov eax,r12d |
| mov DWORD[32+rsp],ebp |
| mov ebx,r12d |
| xor r14d,DWORD[44+rsp] |
| and eax,r11d |
| mov ecx,esi |
| xor r14d,DWORD[4+rsp] |
| lea r13d,[((-1894007588))+r13*1+rbp] |
| xor ebx,r11d |
| rol ecx,5 |
| add r13d,eax |
| rol r14d,1 |
| and ebx,edi |
| add r13d,ecx |
| rol edi,30 |
| add r13d,ebx |
| xor edx,DWORD[40+rsp] |
| mov eax,r11d |
| mov DWORD[36+rsp],r14d |
| mov ebx,r11d |
| xor edx,DWORD[48+rsp] |
| and eax,edi |
| mov ecx,r13d |
| xor edx,DWORD[8+rsp] |
| lea r12d,[((-1894007588))+r12*1+r14] |
| xor ebx,edi |
| rol ecx,5 |
| add r12d,eax |
| rol edx,1 |
| and ebx,esi |
| add r12d,ecx |
| rol esi,30 |
| add r12d,ebx |
| xor ebp,DWORD[44+rsp] |
| mov eax,edi |
| mov DWORD[40+rsp],edx |
| mov ebx,edi |
| xor ebp,DWORD[52+rsp] |
| and eax,esi |
| mov ecx,r12d |
| xor ebp,DWORD[12+rsp] |
| lea r11d,[((-1894007588))+r11*1+rdx] |
| xor ebx,esi |
| rol ecx,5 |
| add r11d,eax |
| rol ebp,1 |
| and ebx,r13d |
| add r11d,ecx |
| rol r13d,30 |
| add r11d,ebx |
| xor r14d,DWORD[48+rsp] |
| mov eax,esi |
| mov DWORD[44+rsp],ebp |
| mov ebx,esi |
| xor r14d,DWORD[56+rsp] |
| and eax,r13d |
| mov ecx,r11d |
| xor r14d,DWORD[16+rsp] |
| lea edi,[((-1894007588))+rdi*1+rbp] |
| xor ebx,r13d |
| rol ecx,5 |
| add edi,eax |
| rol r14d,1 |
| and ebx,r12d |
| add edi,ecx |
| rol r12d,30 |
| add edi,ebx |
| xor edx,DWORD[52+rsp] |
| mov eax,r13d |
| mov DWORD[48+rsp],r14d |
| mov ebx,r13d |
| xor edx,DWORD[60+rsp] |
| and eax,r12d |
| mov ecx,edi |
| xor edx,DWORD[20+rsp] |
| lea esi,[((-1894007588))+rsi*1+r14] |
| xor ebx,r12d |
| rol ecx,5 |
| add esi,eax |
| rol edx,1 |
| and ebx,r11d |
| add esi,ecx |
| rol r11d,30 |
| add esi,ebx |
| xor ebp,DWORD[56+rsp] |
| mov eax,r12d |
| mov DWORD[52+rsp],edx |
| mov ebx,r12d |
| xor ebp,DWORD[rsp] |
| and eax,r11d |
| mov ecx,esi |
| xor ebp,DWORD[24+rsp] |
| lea r13d,[((-1894007588))+r13*1+rdx] |
| xor ebx,r11d |
| rol ecx,5 |
| add r13d,eax |
| rol ebp,1 |
| and ebx,edi |
| add r13d,ecx |
| rol edi,30 |
| add r13d,ebx |
| xor r14d,DWORD[60+rsp] |
| mov eax,r11d |
| mov DWORD[56+rsp],ebp |
| mov ebx,r11d |
| xor r14d,DWORD[4+rsp] |
| and eax,edi |
| mov ecx,r13d |
| xor r14d,DWORD[28+rsp] |
| lea r12d,[((-1894007588))+r12*1+rbp] |
| xor ebx,edi |
| rol ecx,5 |
| add r12d,eax |
| rol r14d,1 |
| and ebx,esi |
| add r12d,ecx |
| rol esi,30 |
| add r12d,ebx |
| xor edx,DWORD[rsp] |
| mov eax,edi |
| mov DWORD[60+rsp],r14d |
| mov ebx,edi |
| xor edx,DWORD[8+rsp] |
| and eax,esi |
| mov ecx,r12d |
| xor edx,DWORD[32+rsp] |
| lea r11d,[((-1894007588))+r11*1+r14] |
| xor ebx,esi |
| rol ecx,5 |
| add r11d,eax |
| rol edx,1 |
| and ebx,r13d |
| add r11d,ecx |
| rol r13d,30 |
| add r11d,ebx |
| xor ebp,DWORD[4+rsp] |
| mov eax,esi |
| mov DWORD[rsp],edx |
| mov ebx,esi |
| xor ebp,DWORD[12+rsp] |
| and eax,r13d |
| mov ecx,r11d |
| xor ebp,DWORD[36+rsp] |
| lea edi,[((-1894007588))+rdi*1+rdx] |
| xor ebx,r13d |
| rol ecx,5 |
| add edi,eax |
| rol ebp,1 |
| and ebx,r12d |
| add edi,ecx |
| rol r12d,30 |
| add edi,ebx |
| xor r14d,DWORD[8+rsp] |
| mov eax,r13d |
| mov DWORD[4+rsp],ebp |
| mov ebx,r13d |
| xor r14d,DWORD[16+rsp] |
| and eax,r12d |
| mov ecx,edi |
| xor r14d,DWORD[40+rsp] |
| lea esi,[((-1894007588))+rsi*1+rbp] |
| xor ebx,r12d |
| rol ecx,5 |
| add esi,eax |
| rol r14d,1 |
| and ebx,r11d |
| add esi,ecx |
| rol r11d,30 |
| add esi,ebx |
| xor edx,DWORD[12+rsp] |
| mov eax,r12d |
| mov DWORD[8+rsp],r14d |
| mov ebx,r12d |
| xor edx,DWORD[20+rsp] |
| and eax,r11d |
| mov ecx,esi |
| xor edx,DWORD[44+rsp] |
| lea r13d,[((-1894007588))+r13*1+r14] |
| xor ebx,r11d |
| rol ecx,5 |
| add r13d,eax |
| rol edx,1 |
| and ebx,edi |
| add r13d,ecx |
| rol edi,30 |
| add r13d,ebx |
| xor ebp,DWORD[16+rsp] |
| mov eax,r11d |
| mov DWORD[12+rsp],edx |
| mov ebx,r11d |
| xor ebp,DWORD[24+rsp] |
| and eax,edi |
| mov ecx,r13d |
| xor ebp,DWORD[48+rsp] |
| lea r12d,[((-1894007588))+r12*1+rdx] |
| xor ebx,edi |
| rol ecx,5 |
| add r12d,eax |
| rol ebp,1 |
| and ebx,esi |
| add r12d,ecx |
| rol esi,30 |
| add r12d,ebx |
| xor r14d,DWORD[20+rsp] |
| mov eax,edi |
| mov DWORD[16+rsp],ebp |
| mov ebx,edi |
| xor r14d,DWORD[28+rsp] |
| and eax,esi |
| mov ecx,r12d |
| xor r14d,DWORD[52+rsp] |
| lea r11d,[((-1894007588))+r11*1+rbp] |
| xor ebx,esi |
| rol ecx,5 |
| add r11d,eax |
| rol r14d,1 |
| and ebx,r13d |
| add r11d,ecx |
| rol r13d,30 |
| add r11d,ebx |
| xor edx,DWORD[24+rsp] |
| mov eax,esi |
| mov DWORD[20+rsp],r14d |
| mov ebx,esi |
| xor edx,DWORD[32+rsp] |
| and eax,r13d |
| mov ecx,r11d |
| xor edx,DWORD[56+rsp] |
| lea edi,[((-1894007588))+rdi*1+r14] |
| xor ebx,r13d |
| rol ecx,5 |
| add edi,eax |
| rol edx,1 |
| and ebx,r12d |
| add edi,ecx |
| rol r12d,30 |
| add edi,ebx |
| xor ebp,DWORD[28+rsp] |
| mov eax,r13d |
| mov DWORD[24+rsp],edx |
| mov ebx,r13d |
| xor ebp,DWORD[36+rsp] |
| and eax,r12d |
| mov ecx,edi |
| xor ebp,DWORD[60+rsp] |
| lea esi,[((-1894007588))+rsi*1+rdx] |
| xor ebx,r12d |
| rol ecx,5 |
| add esi,eax |
| rol ebp,1 |
| and ebx,r11d |
| add esi,ecx |
| rol r11d,30 |
| add esi,ebx |
| xor r14d,DWORD[32+rsp] |
| mov eax,r12d |
| mov DWORD[28+rsp],ebp |
| mov ebx,r12d |
| xor r14d,DWORD[40+rsp] |
| and eax,r11d |
| mov ecx,esi |
| xor r14d,DWORD[rsp] |
| lea r13d,[((-1894007588))+r13*1+rbp] |
| xor ebx,r11d |
| rol ecx,5 |
| add r13d,eax |
| rol r14d,1 |
| and ebx,edi |
| add r13d,ecx |
| rol edi,30 |
| add r13d,ebx |
| xor edx,DWORD[36+rsp] |
| mov eax,r11d |
| mov DWORD[32+rsp],r14d |
| mov ebx,r11d |
| xor edx,DWORD[44+rsp] |
| and eax,edi |
| mov ecx,r13d |
| xor edx,DWORD[4+rsp] |
| lea r12d,[((-1894007588))+r12*1+r14] |
| xor ebx,edi |
| rol ecx,5 |
| add r12d,eax |
| rol edx,1 |
| and ebx,esi |
| add r12d,ecx |
| rol esi,30 |
| add r12d,ebx |
| xor ebp,DWORD[40+rsp] |
| mov eax,edi |
| mov DWORD[36+rsp],edx |
| mov ebx,edi |
| xor ebp,DWORD[48+rsp] |
| and eax,esi |
| mov ecx,r12d |
| xor ebp,DWORD[8+rsp] |
| lea r11d,[((-1894007588))+r11*1+rdx] |
| xor ebx,esi |
| rol ecx,5 |
| add r11d,eax |
| rol ebp,1 |
| and ebx,r13d |
| add r11d,ecx |
| rol r13d,30 |
| add r11d,ebx |
| xor r14d,DWORD[44+rsp] |
| mov eax,esi |
| mov DWORD[40+rsp],ebp |
| mov ebx,esi |
| xor r14d,DWORD[52+rsp] |
| and eax,r13d |
| mov ecx,r11d |
| xor r14d,DWORD[12+rsp] |
| lea edi,[((-1894007588))+rdi*1+rbp] |
| xor ebx,r13d |
| rol ecx,5 |
| add edi,eax |
| rol r14d,1 |
| and ebx,r12d |
| add edi,ecx |
| rol r12d,30 |
| add edi,ebx |
| xor edx,DWORD[48+rsp] |
| mov eax,r13d |
| mov DWORD[44+rsp],r14d |
| mov ebx,r13d |
| xor edx,DWORD[56+rsp] |
| and eax,r12d |
| mov ecx,edi |
| xor edx,DWORD[16+rsp] |
| lea esi,[((-1894007588))+rsi*1+r14] |
| xor ebx,r12d |
| rol ecx,5 |
| add esi,eax |
| rol edx,1 |
| and ebx,r11d |
| add esi,ecx |
| rol r11d,30 |
| add esi,ebx |
| xor ebp,DWORD[52+rsp] |
| mov eax,edi |
| mov DWORD[48+rsp],edx |
| mov ecx,esi |
| xor ebp,DWORD[60+rsp] |
| xor eax,r12d |
| rol ecx,5 |
| xor ebp,DWORD[20+rsp] |
| lea r13d,[((-899497514))+r13*1+rdx] |
| xor eax,r11d |
| add r13d,ecx |
| rol edi,30 |
| add r13d,eax |
| rol ebp,1 |
| xor r14d,DWORD[56+rsp] |
| mov eax,esi |
| mov DWORD[52+rsp],ebp |
| mov ecx,r13d |
| xor r14d,DWORD[rsp] |
| xor eax,r11d |
| rol ecx,5 |
| xor r14d,DWORD[24+rsp] |
| lea r12d,[((-899497514))+r12*1+rbp] |
| xor eax,edi |
| add r12d,ecx |
| rol esi,30 |
| add r12d,eax |
| rol r14d,1 |
| xor edx,DWORD[60+rsp] |
| mov eax,r13d |
| mov DWORD[56+rsp],r14d |
| mov ecx,r12d |
| xor edx,DWORD[4+rsp] |
| xor eax,edi |
| rol ecx,5 |
| xor edx,DWORD[28+rsp] |
| lea r11d,[((-899497514))+r11*1+r14] |
| xor eax,esi |
| add r11d,ecx |
| rol r13d,30 |
| add r11d,eax |
| rol edx,1 |
| xor ebp,DWORD[rsp] |
| mov eax,r12d |
| mov DWORD[60+rsp],edx |
| mov ecx,r11d |
| xor ebp,DWORD[8+rsp] |
| xor eax,esi |
| rol ecx,5 |
| xor ebp,DWORD[32+rsp] |
| lea edi,[((-899497514))+rdi*1+rdx] |
| xor eax,r13d |
| add edi,ecx |
| rol r12d,30 |
| add edi,eax |
| rol ebp,1 |
| xor r14d,DWORD[4+rsp] |
| mov eax,r11d |
| mov DWORD[rsp],ebp |
| mov ecx,edi |
| xor r14d,DWORD[12+rsp] |
| xor eax,r13d |
| rol ecx,5 |
| xor r14d,DWORD[36+rsp] |
| lea esi,[((-899497514))+rsi*1+rbp] |
| xor eax,r12d |
| add esi,ecx |
| rol r11d,30 |
| add esi,eax |
| rol r14d,1 |
| xor edx,DWORD[8+rsp] |
| mov eax,edi |
| mov DWORD[4+rsp],r14d |
| mov ecx,esi |
| xor edx,DWORD[16+rsp] |
| xor eax,r12d |
| rol ecx,5 |
| xor edx,DWORD[40+rsp] |
| lea r13d,[((-899497514))+r13*1+r14] |
| xor eax,r11d |
| add r13d,ecx |
| rol edi,30 |
| add r13d,eax |
| rol edx,1 |
| xor ebp,DWORD[12+rsp] |
| mov eax,esi |
| mov DWORD[8+rsp],edx |
| mov ecx,r13d |
| xor ebp,DWORD[20+rsp] |
| xor eax,r11d |
| rol ecx,5 |
| xor ebp,DWORD[44+rsp] |
| lea r12d,[((-899497514))+r12*1+rdx] |
| xor eax,edi |
| add r12d,ecx |
| rol esi,30 |
| add r12d,eax |
| rol ebp,1 |
| xor r14d,DWORD[16+rsp] |
| mov eax,r13d |
| mov DWORD[12+rsp],ebp |
| mov ecx,r12d |
| xor r14d,DWORD[24+rsp] |
| xor eax,edi |
| rol ecx,5 |
| xor r14d,DWORD[48+rsp] |
| lea r11d,[((-899497514))+r11*1+rbp] |
| xor eax,esi |
| add r11d,ecx |
| rol r13d,30 |
| add r11d,eax |
| rol r14d,1 |
| xor edx,DWORD[20+rsp] |
| mov eax,r12d |
| mov DWORD[16+rsp],r14d |
| mov ecx,r11d |
| xor edx,DWORD[28+rsp] |
| xor eax,esi |
| rol ecx,5 |
| xor edx,DWORD[52+rsp] |
| lea edi,[((-899497514))+rdi*1+r14] |
| xor eax,r13d |
| add edi,ecx |
| rol r12d,30 |
| add edi,eax |
| rol edx,1 |
| xor ebp,DWORD[24+rsp] |
| mov eax,r11d |
| mov DWORD[20+rsp],edx |
| mov ecx,edi |
| xor ebp,DWORD[32+rsp] |
| xor eax,r13d |
| rol ecx,5 |
| xor ebp,DWORD[56+rsp] |
| lea esi,[((-899497514))+rsi*1+rdx] |
| xor eax,r12d |
| add esi,ecx |
| rol r11d,30 |
| add esi,eax |
| rol ebp,1 |
| xor r14d,DWORD[28+rsp] |
| mov eax,edi |
| mov DWORD[24+rsp],ebp |
| mov ecx,esi |
| xor r14d,DWORD[36+rsp] |
| xor eax,r12d |
| rol ecx,5 |
| xor r14d,DWORD[60+rsp] |
| lea r13d,[((-899497514))+r13*1+rbp] |
| xor eax,r11d |
| add r13d,ecx |
| rol edi,30 |
| add r13d,eax |
| rol r14d,1 |
| xor edx,DWORD[32+rsp] |
| mov eax,esi |
| mov DWORD[28+rsp],r14d |
| mov ecx,r13d |
| xor edx,DWORD[40+rsp] |
| xor eax,r11d |
| rol ecx,5 |
| xor edx,DWORD[rsp] |
| lea r12d,[((-899497514))+r12*1+r14] |
| xor eax,edi |
| add r12d,ecx |
| rol esi,30 |
| add r12d,eax |
| rol edx,1 |
| xor ebp,DWORD[36+rsp] |
| mov eax,r13d |
| |
| mov ecx,r12d |
| xor ebp,DWORD[44+rsp] |
| xor eax,edi |
| rol ecx,5 |
| xor ebp,DWORD[4+rsp] |
| lea r11d,[((-899497514))+r11*1+rdx] |
| xor eax,esi |
| add r11d,ecx |
| rol r13d,30 |
| add r11d,eax |
| rol ebp,1 |
| xor r14d,DWORD[40+rsp] |
| mov eax,r12d |
| |
| mov ecx,r11d |
| xor r14d,DWORD[48+rsp] |
| xor eax,esi |
| rol ecx,5 |
| xor r14d,DWORD[8+rsp] |
| lea edi,[((-899497514))+rdi*1+rbp] |
| xor eax,r13d |
| add edi,ecx |
| rol r12d,30 |
| add edi,eax |
| rol r14d,1 |
| xor edx,DWORD[44+rsp] |
| mov eax,r11d |
| |
| mov ecx,edi |
| xor edx,DWORD[52+rsp] |
| xor eax,r13d |
| rol ecx,5 |
| xor edx,DWORD[12+rsp] |
| lea esi,[((-899497514))+rsi*1+r14] |
| xor eax,r12d |
| add esi,ecx |
| rol r11d,30 |
| add esi,eax |
| rol edx,1 |
| xor ebp,DWORD[48+rsp] |
| mov eax,edi |
| |
| mov ecx,esi |
| xor ebp,DWORD[56+rsp] |
| xor eax,r12d |
| rol ecx,5 |
| xor ebp,DWORD[16+rsp] |
| lea r13d,[((-899497514))+r13*1+rdx] |
| xor eax,r11d |
| add r13d,ecx |
| rol edi,30 |
| add r13d,eax |
| rol ebp,1 |
| xor r14d,DWORD[52+rsp] |
| mov eax,esi |
| |
| mov ecx,r13d |
| xor r14d,DWORD[60+rsp] |
| xor eax,r11d |
| rol ecx,5 |
| xor r14d,DWORD[20+rsp] |
| lea r12d,[((-899497514))+r12*1+rbp] |
| xor eax,edi |
| add r12d,ecx |
| rol esi,30 |
| add r12d,eax |
| rol r14d,1 |
| xor edx,DWORD[56+rsp] |
| mov eax,r13d |
| |
| mov ecx,r12d |
| xor edx,DWORD[rsp] |
| xor eax,edi |
| rol ecx,5 |
| xor edx,DWORD[24+rsp] |
| lea r11d,[((-899497514))+r11*1+r14] |
| xor eax,esi |
| add r11d,ecx |
| rol r13d,30 |
| add r11d,eax |
| rol edx,1 |
| xor ebp,DWORD[60+rsp] |
| mov eax,r12d |
| |
| mov ecx,r11d |
| xor ebp,DWORD[4+rsp] |
| xor eax,esi |
| rol ecx,5 |
| xor ebp,DWORD[28+rsp] |
| lea edi,[((-899497514))+rdi*1+rdx] |
| xor eax,r13d |
| add edi,ecx |
| rol r12d,30 |
| add edi,eax |
| rol ebp,1 |
| mov eax,r11d |
| mov ecx,edi |
| xor eax,r13d |
| lea esi,[((-899497514))+rsi*1+rbp] |
| rol ecx,5 |
| xor eax,r12d |
| add esi,ecx |
| rol r11d,30 |
| add esi,eax |
| add esi,DWORD[r8] |
| add edi,DWORD[4+r8] |
| add r11d,DWORD[8+r8] |
| add r12d,DWORD[12+r8] |
| add r13d,DWORD[16+r8] |
| mov DWORD[r8],esi |
| mov DWORD[4+r8],edi |
| mov DWORD[8+r8],r11d |
| mov DWORD[12+r8],r12d |
| mov DWORD[16+r8],r13d |
| |
| sub r10,1 |
| lea r9,[64+r9] |
| jnz NEAR $L$loop |
| |
| mov rsi,QWORD[64+rsp] |
| |
| mov r14,QWORD[((-40))+rsi] |
| |
| mov r13,QWORD[((-32))+rsi] |
| |
| mov r12,QWORD[((-24))+rsi] |
| |
| mov rbp,QWORD[((-16))+rsi] |
| |
| mov rbx,QWORD[((-8))+rsi] |
| |
| lea rsp,[rsi] |
| |
| $L$epilogue: |
| mov rdi,QWORD[8+rsp] ;WIN64 epilogue |
| mov rsi,QWORD[16+rsp] |
| DB 0F3h,0C3h ;repret |
| |
| $L$SEH_end_sha1_block_data_order: |
| |
| ALIGN 16 |
| sha1_block_data_order_ssse3: |
| mov QWORD[8+rsp],rdi ;WIN64 prologue |
| mov QWORD[16+rsp],rsi |
| mov rax,rsp |
| $L$SEH_begin_sha1_block_data_order_ssse3: |
| mov rdi,rcx |
| mov rsi,rdx |
| mov rdx,r8 |
| |
| |
| _ssse3_shortcut: |
| |
| mov r11,rsp |
| |
| push rbx |
| |
| push rbp |
| |
| push r12 |
| |
| push r13 |
| |
| push r14 |
| |
| lea rsp,[((-160))+rsp] |
| movaps XMMWORD[(-40-96)+r11],xmm6 |
| movaps XMMWORD[(-40-80)+r11],xmm7 |
| movaps XMMWORD[(-40-64)+r11],xmm8 |
| movaps XMMWORD[(-40-48)+r11],xmm9 |
| movaps XMMWORD[(-40-32)+r11],xmm10 |
| movaps XMMWORD[(-40-16)+r11],xmm11 |
| $L$prologue_ssse3: |
| and rsp,-64 |
| mov r8,rdi |
| mov r9,rsi |
| mov r10,rdx |
| |
| shl r10,6 |
| add r10,r9 |
| lea r14,[((K_XX_XX+64))] |
| |
| mov eax,DWORD[r8] |
| mov ebx,DWORD[4+r8] |
| mov ecx,DWORD[8+r8] |
| mov edx,DWORD[12+r8] |
| mov esi,ebx |
| mov ebp,DWORD[16+r8] |
| mov edi,ecx |
| xor edi,edx |
| and esi,edi |
| |
| movdqa xmm6,XMMWORD[64+r14] |
| movdqa xmm9,XMMWORD[((-64))+r14] |
| movdqu xmm0,XMMWORD[r9] |
| movdqu xmm1,XMMWORD[16+r9] |
| movdqu xmm2,XMMWORD[32+r9] |
| movdqu xmm3,XMMWORD[48+r9] |
| DB 102,15,56,0,198 |
| DB 102,15,56,0,206 |
| DB 102,15,56,0,214 |
| add r9,64 |
| paddd xmm0,xmm9 |
| DB 102,15,56,0,222 |
| paddd xmm1,xmm9 |
| paddd xmm2,xmm9 |
| movdqa XMMWORD[rsp],xmm0 |
| psubd xmm0,xmm9 |
| movdqa XMMWORD[16+rsp],xmm1 |
| psubd xmm1,xmm9 |
| movdqa XMMWORD[32+rsp],xmm2 |
| psubd xmm2,xmm9 |
| jmp NEAR $L$oop_ssse3 |
| ALIGN 16 |
| $L$oop_ssse3: |
| ror ebx,2 |
| pshufd xmm4,xmm0,238 |
| xor esi,edx |
| movdqa xmm8,xmm3 |
| paddd xmm9,xmm3 |
| mov edi,eax |
| add ebp,DWORD[rsp] |
| punpcklqdq xmm4,xmm1 |
| xor ebx,ecx |
| rol eax,5 |
| add ebp,esi |
| psrldq xmm8,4 |
| and edi,ebx |
| xor ebx,ecx |
| pxor xmm4,xmm0 |
| add ebp,eax |
| ror eax,7 |
| pxor xmm8,xmm2 |
| xor edi,ecx |
| mov esi,ebp |
| add edx,DWORD[4+rsp] |
| pxor xmm4,xmm8 |
| xor eax,ebx |
| rol ebp,5 |
| movdqa XMMWORD[48+rsp],xmm9 |
| add edx,edi |
| and esi,eax |
| movdqa xmm10,xmm4 |
| xor eax,ebx |
| add edx,ebp |
| ror ebp,7 |
| movdqa xmm8,xmm4 |
| xor esi,ebx |
| pslldq xmm10,12 |
| paddd xmm4,xmm4 |
| mov edi,edx |
| add ecx,DWORD[8+rsp] |
| psrld xmm8,31 |
| xor ebp,eax |
| rol edx,5 |
| add ecx,esi |
| movdqa xmm9,xmm10 |
| and edi,ebp |
| xor ebp,eax |
| psrld xmm10,30 |
| add ecx,edx |
| ror edx,7 |
| por xmm4,xmm8 |
| xor edi,eax |
| mov esi,ecx |
| add ebx,DWORD[12+rsp] |
| pslld xmm9,2 |
| pxor xmm4,xmm10 |
| xor edx,ebp |
| movdqa xmm10,XMMWORD[((-64))+r14] |
| rol ecx,5 |
| add ebx,edi |
| and esi,edx |
| pxor xmm4,xmm9 |
| xor edx,ebp |
| add ebx,ecx |
| ror ecx,7 |
| pshufd xmm5,xmm1,238 |
| xor esi,ebp |
| movdqa xmm9,xmm4 |
| paddd xmm10,xmm4 |
| mov edi,ebx |
| add eax,DWORD[16+rsp] |
| punpcklqdq xmm5,xmm2 |
| xor ecx,edx |
| rol ebx,5 |
| add eax,esi |
| psrldq xmm9,4 |
| and edi,ecx |
| xor ecx,edx |
| pxor xmm5,xmm1 |
| add eax,ebx |
| ror ebx,7 |
| pxor xmm9,xmm3 |
| xor edi,edx |
| mov esi,eax |
| add ebp,DWORD[20+rsp] |
| pxor xmm5,xmm9 |
| xor ebx,ecx |
| rol eax,5 |
| movdqa XMMWORD[rsp],xmm10 |
| add ebp,edi |
| and esi,ebx |
| movdqa xmm8,xmm5 |
| xor ebx,ecx |
| add ebp,eax |
| ror eax,7 |
| movdqa xmm9,xmm5 |
| xor esi,ecx |
| pslldq xmm8,12 |
| paddd xmm5,xmm5 |
| mov edi,ebp |
| add edx,DWORD[24+rsp] |
| psrld xmm9,31 |
| xor eax,ebx |
| rol ebp,5 |
| add edx,esi |
| movdqa xmm10,xmm8 |
| and edi,eax |
| xor eax,ebx |
| psrld xmm8,30 |
| add edx,ebp |
| ror ebp,7 |
| por xmm5,xmm9 |
| xor edi,ebx |
| mov esi,edx |
| add ecx,DWORD[28+rsp] |
| pslld xmm10,2 |
| pxor xmm5,xmm8 |
| xor ebp,eax |
| movdqa xmm8,XMMWORD[((-32))+r14] |
| rol edx,5 |
| add ecx,edi |
| and esi,ebp |
| pxor xmm5,xmm10 |
| xor ebp,eax |
| add ecx,edx |
| ror edx,7 |
| pshufd xmm6,xmm2,238 |
| xor esi,eax |
| movdqa xmm10,xmm5 |
| paddd xmm8,xmm5 |
| mov edi,ecx |
| add ebx,DWORD[32+rsp] |
| punpcklqdq xmm6,xmm3 |
| xor edx,ebp |
| rol ecx,5 |
| add ebx,esi |
| psrldq xmm10,4 |
| and edi,edx |
| xor edx,ebp |
| pxor xmm6,xmm2 |
| add ebx,ecx |
| ror ecx,7 |
| pxor xmm10,xmm4 |
| xor edi,ebp |
| mov esi,ebx |
| add eax,DWORD[36+rsp] |
| pxor xmm6,xmm10 |
| xor ecx,edx |
| rol ebx,5 |
| movdqa XMMWORD[16+rsp],xmm8 |
| add eax,edi |
| and esi,ecx |
| movdqa xmm9,xmm6 |
| xor ecx,edx |
| add eax,ebx |
| ror ebx,7 |
| movdqa xmm10,xmm6 |
| xor esi,edx |
| pslldq xmm9,12 |
| paddd xmm6,xmm6 |
| mov edi,eax |
| add ebp,DWORD[40+rsp] |
| psrld xmm10,31 |
| xor ebx,ecx |
| rol eax,5 |
| add ebp,esi |
| movdqa xmm8,xmm9 |
| and edi,ebx |
| xor ebx,ecx |
| psrld xmm9,30 |
| add ebp,eax |
| ror eax,7 |
| por xmm6,xmm10 |
| xor edi,ecx |
| mov esi,ebp |
| add edx,DWORD[44+rsp] |
| pslld xmm8,2 |
| pxor xmm6,xmm9 |
| xor eax,ebx |
| movdqa xmm9,XMMWORD[((-32))+r14] |
| rol ebp,5 |
| add edx,edi |
| and esi,eax |
| pxor xmm6,xmm8 |
| xor eax,ebx |
| add edx,ebp |
| ror ebp,7 |
| pshufd xmm7,xmm3,238 |
| xor esi,ebx |
| movdqa xmm8,xmm6 |
| paddd xmm9,xmm6 |
| mov edi,edx |
| add ecx,DWORD[48+rsp] |
| punpcklqdq xmm7,xmm4 |
| xor ebp,eax |
| rol edx,5 |
| add ecx,esi |
| psrldq xmm8,4 |
| and edi,ebp |
| xor ebp,eax |
| pxor xmm7,xmm3 |
| add ecx,edx |
| ror edx,7 |
| pxor xmm8,xmm5 |
| xor edi,eax |
| mov esi,ecx |
| add ebx,DWORD[52+rsp] |
| pxor xmm7,xmm8 |
| xor edx,ebp |
| rol ecx,5 |
| movdqa XMMWORD[32+rsp],xmm9 |
| add ebx,edi |
| and esi,edx |
| movdqa xmm10,xmm7 |
| xor edx,ebp |
| add ebx,ecx |
| ror ecx,7 |
| movdqa xmm8,xmm7 |
| xor esi,ebp |
| pslldq xmm10,12 |
| paddd xmm7,xmm7 |
| mov edi,ebx |
| add eax,DWORD[56+rsp] |
| psrld xmm8,31 |
| xor ecx,edx |
| rol ebx,5 |
| add eax,esi |
| movdqa xmm9,xmm10 |
| and edi,ecx |
| xor ecx,edx |
| psrld xmm10,30 |
| add eax,ebx |
| ror ebx,7 |
| por xmm7,xmm8 |
| xor edi,edx |
| mov esi,eax |
| add ebp,DWORD[60+rsp] |
| pslld xmm9,2 |
| pxor xmm7,xmm10 |
| xor ebx,ecx |
| movdqa xmm10,XMMWORD[((-32))+r14] |
| rol eax,5 |
| add ebp,edi |
| and esi,ebx |
| pxor xmm7,xmm9 |
| pshufd xmm9,xmm6,238 |
| xor ebx,ecx |
| add ebp,eax |
| ror eax,7 |
| pxor xmm0,xmm4 |
| xor esi,ecx |
| mov edi,ebp |
| add edx,DWORD[rsp] |
| punpcklqdq xmm9,xmm7 |
| xor eax,ebx |
| rol ebp,5 |
| pxor xmm0,xmm1 |
| add edx,esi |
| and edi,eax |
| movdqa xmm8,xmm10 |
| xor eax,ebx |
| paddd xmm10,xmm7 |
| add edx,ebp |
| pxor xmm0,xmm9 |
| ror ebp,7 |
| xor edi,ebx |
| mov esi,edx |
| add ecx,DWORD[4+rsp] |
| movdqa xmm9,xmm0 |
| xor ebp,eax |
| rol edx,5 |
| movdqa XMMWORD[48+rsp],xmm10 |
| add ecx,edi |
| and esi,ebp |
| xor ebp,eax |
| pslld xmm0,2 |
| add ecx,edx |
| ror edx,7 |
| psrld xmm9,30 |
| xor esi,eax |
| mov edi,ecx |
| add ebx,DWORD[8+rsp] |
| por xmm0,xmm9 |
| xor edx,ebp |
| rol ecx,5 |
| pshufd xmm10,xmm7,238 |
| add ebx,esi |
| and edi,edx |
| xor edx,ebp |
| add ebx,ecx |
| add eax,DWORD[12+rsp] |
| xor edi,ebp |
| mov esi,ebx |
| rol ebx,5 |
| add eax,edi |
| xor esi,edx |
| ror ecx,7 |
| add eax,ebx |
| pxor xmm1,xmm5 |
| add ebp,DWORD[16+rsp] |
| xor esi,ecx |
| punpcklqdq xmm10,xmm0 |
| mov edi,eax |
| rol eax,5 |
| pxor xmm1,xmm2 |
| add ebp,esi |
| xor edi,ecx |
| movdqa xmm9,xmm8 |
| ror ebx,7 |
| paddd xmm8,xmm0 |
| add ebp,eax |
| pxor xmm1,xmm10 |
| add edx,DWORD[20+rsp] |
| xor edi,ebx |
| mov esi,ebp |
| rol ebp,5 |
| movdqa xmm10,xmm1 |
| add edx,edi |
| xor esi,ebx |
| movdqa XMMWORD[rsp],xmm8 |
| ror eax,7 |
| add edx,ebp |
| add ecx,DWORD[24+rsp] |
| pslld xmm1,2 |
| xor esi,eax |
| mov edi,edx |
| psrld xmm10,30 |
| rol edx,5 |
| add ecx,esi |
| xor edi,eax |
| ror ebp,7 |
| por xmm1,xmm10 |
| add ecx,edx |
| add ebx,DWORD[28+rsp] |
| pshufd xmm8,xmm0,238 |
| xor edi,ebp |
| mov esi,ecx |
| rol ecx,5 |
| add ebx,edi |
| xor esi,ebp |
| ror edx,7 |
| add ebx,ecx |
| pxor xmm2,xmm6 |
| add eax,DWORD[32+rsp] |
| xor esi,edx |
| punpcklqdq xmm8,xmm1 |
| mov edi,ebx |
| rol ebx,5 |
| pxor xmm2,xmm3 |
| add eax,esi |
| xor edi,edx |
| movdqa xmm10,XMMWORD[r14] |
| ror ecx,7 |
| paddd xmm9,xmm1 |
| add eax,ebx |
| pxor xmm2,xmm8 |
| add ebp,DWORD[36+rsp] |
| xor edi,ecx |
| mov esi,eax |
| rol eax,5 |
| movdqa xmm8,xmm2 |
| add ebp,edi |
| xor esi,ecx |
| movdqa XMMWORD[16+rsp],xmm9 |
| ror ebx,7 |
| add ebp,eax |
| add edx,DWORD[40+rsp] |
| pslld xmm2,2 |
| xor esi,ebx |
| mov edi,ebp |
| psrld xmm8,30 |
| rol ebp,5 |
| add edx,esi |
| xor edi,ebx |
| ror eax,7 |
| por xmm2,xmm8 |
| add edx,ebp |
| add ecx,DWORD[44+rsp] |
| pshufd xmm9,xmm1,238 |
| xor edi,eax |
| mov esi,edx |
| rol edx,5 |
| add ecx,edi |
| xor esi,eax |
| ror ebp,7 |
| add ecx,edx |
| pxor xmm3,xmm7 |
| add ebx,DWORD[48+rsp] |
| xor esi,ebp |
| punpcklqdq xmm9,xmm2 |
| mov edi,ecx |
| rol ecx,5 |
| pxor xmm3,xmm4 |
| add ebx,esi |
| xor edi,ebp |
| movdqa xmm8,xmm10 |
| ror edx,7 |
| paddd xmm10,xmm2 |
| add ebx,ecx |
| pxor xmm3,xmm9 |
| add eax,DWORD[52+rsp] |
| xor edi,edx |
| mov esi,ebx |
| rol ebx,5 |
| movdqa xmm9,xmm3 |
| add eax,edi |
| xor esi,edx |
| movdqa XMMWORD[32+rsp],xmm10 |
| ror ecx,7 |
| add eax,ebx |
| add ebp,DWORD[56+rsp] |
| pslld xmm3,2 |
| xor esi,ecx |
| mov edi,eax |
| psrld xmm9,30 |
| rol eax,5 |
| add ebp,esi |
| xor edi,ecx |
| ror ebx,7 |
| por xmm3,xmm9 |
| add ebp,eax |
| add edx,DWORD[60+rsp] |
| pshufd xmm10,xmm2,238 |
| xor edi,ebx |
| mov esi,ebp |
| rol ebp,5 |
| add edx,edi |
| xor esi,ebx |
| ror eax,7 |
| add edx,ebp |
| pxor xmm4,xmm0 |
| add ecx,DWORD[rsp] |
| xor esi,eax |
| punpcklqdq xmm10,xmm3 |
| mov edi,edx |
| rol edx,5 |
| pxor xmm4,xmm5 |
| add ecx,esi |
| xor edi,eax |
| movdqa xmm9,xmm8 |
| ror ebp,7 |
| paddd xmm8,xmm3 |
| add ecx,edx |
| pxor xmm4,xmm10 |
| add ebx,DWORD[4+rsp] |
| xor edi,ebp |
| mov esi,ecx |
| rol ecx,5 |
| movdqa xmm10,xmm4 |
| add ebx,edi |
| xor esi,ebp |
| movdqa XMMWORD[48+rsp],xmm8 |
| ror edx,7 |
| add ebx,ecx |
| add eax,DWORD[8+rsp] |
| pslld xmm4,2 |
| xor esi,edx |
| mov edi,ebx |
| psrld xmm10,30 |
| rol ebx,5 |
| add eax,esi |
| xor edi,edx |
| ror ecx,7 |
| por xmm4,xmm10 |
| add eax,ebx |
| add ebp,DWORD[12+rsp] |
| pshufd xmm8,xmm3,238 |
| xor edi,ecx |
| mov esi,eax |
| rol eax,5 |
| add ebp,edi |
| xor esi,ecx |
| ror ebx,7 |
| add ebp,eax |
| pxor xmm5,xmm1 |
| add edx,DWORD[16+rsp] |
| xor esi,ebx |
| punpcklqdq xmm8,xmm4 |
| mov edi,ebp |
| rol ebp,5 |
| pxor xmm5,xmm6 |
| add edx,esi |
| xor edi,ebx |
| movdqa xmm10,xmm9 |
| ror eax,7 |
| paddd xmm9,xmm4 |
| add edx,ebp |
| pxor xmm5,xmm8 |
| add ecx,DWORD[20+rsp] |
| xor edi,eax |
| mov esi,edx |
| rol edx,5 |
| movdqa xmm8,xmm5 |
| add ecx,edi |
| xor esi,eax |
| movdqa XMMWORD[rsp],xmm9 |
| ror ebp,7 |
| add ecx,edx |
| add ebx,DWORD[24+rsp] |
| pslld xmm5,2 |
| xor esi,ebp |
| mov edi,ecx |
| psrld xmm8,30 |
| rol ecx,5 |
| add ebx,esi |
| xor edi,ebp |
| ror edx,7 |
| por xmm5,xmm8 |
| add ebx,ecx |
| add eax,DWORD[28+rsp] |
| pshufd xmm9,xmm4,238 |
| ror ecx,7 |
| mov esi,ebx |
| xor edi,edx |
| rol ebx,5 |
| add eax,edi |
| xor esi,ecx |
| xor ecx,edx |
| add eax,ebx |
| pxor xmm6,xmm2 |
| add ebp,DWORD[32+rsp] |
| and esi,ecx |
| xor ecx,edx |
| ror ebx,7 |
| punpcklqdq xmm9,xmm5 |
| mov edi,eax |
| xor esi,ecx |
| pxor xmm6,xmm7 |
| rol eax,5 |
| add ebp,esi |
| movdqa xmm8,xmm10 |
| xor edi,ebx |
| paddd xmm10,xmm5 |
| xor ebx,ecx |
| pxor xmm6,xmm9 |
| add ebp,eax |
| add edx,DWORD[36+rsp] |
| and edi,ebx |
| xor ebx,ecx |
| ror eax,7 |
| movdqa xmm9,xmm6 |
| mov esi,ebp |
| xor edi,ebx |
| movdqa XMMWORD[16+rsp],xmm10 |
| rol ebp,5 |
| add edx,edi |
| xor esi,eax |
| pslld xmm6,2 |
| xor eax,ebx |
| add edx,ebp |
| psrld xmm9,30 |
| add ecx,DWORD[40+rsp] |
| and esi,eax |
| xor eax,ebx |
| por xmm6,xmm9 |
| ror ebp,7 |
| mov edi,edx |
| xor esi,eax |
| rol edx,5 |
| pshufd xmm10,xmm5,238 |
| add ecx,esi |
| xor edi,ebp |
| xor ebp,eax |
| add ecx,edx |
| add ebx,DWORD[44+rsp] |
| and edi,ebp |
| xor ebp,eax |
| ror edx,7 |
| mov esi,ecx |
| xor edi,ebp |
| rol ecx,5 |
| add ebx,edi |
| xor esi,edx |
| xor edx,ebp |
| add ebx,ecx |
| pxor xmm7,xmm3 |
| add eax,DWORD[48+rsp] |
| and esi,edx |
| xor edx,ebp |
| ror ecx,7 |
| punpcklqdq xmm10,xmm6 |
| mov edi,ebx |
| xor esi,edx |
| pxor xmm7,xmm0 |
| rol ebx,5 |
| add eax,esi |
| movdqa xmm9,XMMWORD[32+r14] |
| xor edi,ecx |
| paddd xmm8,xmm6 |
| xor ecx,edx |
| pxor xmm7,xmm10 |
| add eax,ebx |
| add ebp,DWORD[52+rsp] |
| and edi,ecx |
| xor ecx,edx |
| ror ebx,7 |
| movdqa xmm10,xmm7 |
| mov esi,eax |
| xor edi,ecx |
| movdqa XMMWORD[32+rsp],xmm8 |
| rol eax,5 |
| add ebp,edi |
| xor esi,ebx |
| pslld xmm7,2 |
| xor ebx,ecx |
| add ebp,eax |
| psrld xmm10,30 |
| add edx,DWORD[56+rsp] |
| and esi,ebx |
| xor ebx,ecx |
| por xmm7,xmm10 |
| ror eax,7 |
| mov edi,ebp |
| xor esi,ebx |
| rol ebp,5 |
| pshufd xmm8,xmm6,238 |
| add edx,esi |
| xor edi,eax |
| xor eax,ebx |
| add edx,ebp |
| add ecx,DWORD[60+rsp] |
| and edi,eax |
| xor eax,ebx |
| ror ebp,7 |
| mov esi,edx |
| xor edi,eax |
| rol edx,5 |
| add ecx,edi |
| xor esi,ebp |
| xor ebp,eax |
| add ecx,edx |
| pxor xmm0,xmm4 |
| add ebx,DWORD[rsp] |
| and esi,ebp |
| xor ebp,eax |
| ror edx,7 |
| punpcklqdq xmm8,xmm7 |
| mov edi,ecx |
| xor esi,ebp |
| pxor xmm0,xmm1 |
| rol ecx,5 |
| add ebx,esi |
| movdqa xmm10,xmm9 |
| xor edi,edx |
| paddd xmm9,xmm7 |
| xor edx,ebp |
| pxor xmm0,xmm8 |
| add ebx,ecx |
| add eax,DWORD[4+rsp] |
| and edi,edx |
| xor edx,ebp |
| ror ecx,7 |
| movdqa xmm8,xmm0 |
| mov esi,ebx |
| xor edi,edx |
| movdqa XMMWORD[48+rsp],xmm9 |
| rol ebx,5 |
| add eax,edi |
| xor esi,ecx |
| pslld xmm0,2 |
| xor ecx,edx |
| add eax,ebx |
| psrld xmm8,30 |
| add ebp,DWORD[8+rsp] |
| and esi,ecx |
| xor ecx,edx |
| por xmm0,xmm8 |
| ror ebx,7 |
| mov edi,eax |
| xor esi,ecx |
| rol eax,5 |
| pshufd xmm9,xmm7,238 |
| add ebp,esi |
| xor edi,ebx |
| xor ebx,ecx |
| add ebp,eax |
| add edx,DWORD[12+rsp] |
| and edi,ebx |
| xor ebx,ecx |
| ror eax,7 |
| mov esi,ebp |
| xor edi,ebx |
| rol ebp,5 |
| add edx,edi |
| xor esi,eax |
| xor eax,ebx |
| add edx,ebp |
| pxor xmm1,xmm5 |
| add ecx,DWORD[16+rsp] |
| and esi,eax |
| xor eax,ebx |
| ror ebp,7 |
| punpcklqdq xmm9,xmm0 |
| mov edi,edx |
| xor esi,eax |
| pxor xmm1,xmm2 |
| rol edx,5 |
| add ecx,esi |
| movdqa xmm8,xmm10 |
| xor edi,ebp |
| paddd xmm10,xmm0 |
| xor ebp,eax |
| pxor xmm1,xmm9 |
| add ecx,edx |
| add ebx,DWORD[20+rsp] |
| and edi,ebp |
| xor ebp,eax |
| ror edx,7 |
| movdqa xmm9,xmm1 |
| mov esi,ecx |
| xor edi,ebp |
| movdqa XMMWORD[rsp],xmm10 |
| rol ecx,5 |
| add ebx,edi |
| xor esi,edx |
| pslld xmm1,2 |
| xor edx,ebp |
| add ebx,ecx |
| psrld xmm9,30 |
| add eax,DWORD[24+rsp] |
| and esi,edx |
| xor edx,ebp |
| por xmm1,xmm9 |
| ror ecx,7 |
| mov edi,ebx |
| xor esi,edx |
| rol ebx,5 |
| pshufd xmm10,xmm0,238 |
| add eax,esi |
| xor edi,ecx |
| xor ecx,edx |
| add eax,ebx |
| add ebp,DWORD[28+rsp] |
| and edi,ecx |
| xor ecx,edx |
| ror ebx,7 |
| mov esi,eax |
| xor edi,ecx |
| rol eax,5 |
| add ebp,edi |
| xor esi,ebx |
| xor ebx,ecx |
| add ebp,eax |
| pxor xmm2,xmm6 |
| add edx,DWORD[32+rsp] |
| and esi,ebx |
| xor ebx,ecx |
| ror eax,7 |
| punpcklqdq xmm10,xmm1 |
| mov edi,ebp |
| xor esi,ebx |
| pxor xmm2,xmm3 |
| rol ebp,5 |
| add edx,esi |
| movdqa xmm9,xmm8 |
| xor edi,eax |
| paddd xmm8,xmm1 |
| xor eax,ebx |
| pxor xmm2,xmm10 |
| add edx,ebp |
| add ecx,DWORD[36+rsp] |
| and edi,eax |
| xor eax,ebx |
| ror ebp,7 |
| movdqa xmm10,xmm2 |
| mov esi,edx |
| xor edi,eax |
| movdqa XMMWORD[16+rsp],xmm8 |
| rol edx,5 |
| add ecx,edi |
| xor esi,ebp |
| pslld xmm2,2 |
| xor ebp,eax |
| add ecx,edx |
| psrld xmm10,30 |
| add ebx,DWORD[40+rsp] |
| and esi,ebp |
| xor ebp,eax |
| por xmm2,xmm10 |
| ror edx,7 |
| mov edi,ecx |
| xor esi,ebp |
| rol ecx,5 |
| pshufd xmm8,xmm1,238 |
| add ebx,esi |
| xor edi,edx |
| xor edx,ebp |
| add ebx,ecx |
| add eax,DWORD[44+rsp] |
| and edi,edx |
| xor edx,ebp |
| ror ecx,7 |
| mov esi,ebx |
| xor edi,edx |
| rol ebx,5 |
| add eax,edi |
| xor esi,edx |
| add eax,ebx |
| pxor xmm3,xmm7 |
| add ebp,DWORD[48+rsp] |
| xor esi,ecx |
| punpcklqdq xmm8,xmm2 |
| mov edi,eax |
| rol eax,5 |
| pxor xmm3,xmm4 |
| add ebp,esi |
| xor edi,ecx |
| movdqa xmm10,xmm9 |
| ror ebx,7 |
| paddd xmm9,xmm2 |
| add ebp,eax |
| pxor xmm3,xmm8 |
| add edx,DWORD[52+rsp] |
| xor edi,ebx |
| mov esi,ebp |
| rol ebp,5 |
| movdqa xmm8,xmm3 |
| add edx,edi |
| xor esi,ebx |
| movdqa XMMWORD[32+rsp],xmm9 |
| ror eax,7 |
| add edx,ebp |
| add ecx,DWORD[56+rsp] |
| pslld xmm3,2 |
| xor esi,eax |
| mov edi,edx |
| psrld xmm8,30 |
| rol edx,5 |
| add ecx,esi |
| xor edi,eax |
| ror ebp,7 |
| por xmm3,xmm8 |
| add ecx,edx |
| add ebx,DWORD[60+rsp] |
| xor edi,ebp |
| mov esi,ecx |
| rol ecx,5 |
| add ebx,edi |
| xor esi,ebp |
| ror edx,7 |
| add ebx,ecx |
| add eax,DWORD[rsp] |
| xor esi,edx |
| mov edi,ebx |
| rol ebx,5 |
| paddd xmm10,xmm3 |
| add eax,esi |
| xor edi,edx |
| movdqa XMMWORD[48+rsp],xmm10 |
| ror ecx,7 |
| add eax,ebx |
| add ebp,DWORD[4+rsp] |
| xor edi,ecx |
| mov esi,eax |
| rol eax,5 |
| add ebp,edi |
| xor esi,ecx |
| ror ebx,7 |
| add ebp,eax |
| add edx,DWORD[8+rsp] |
| xor esi,ebx |
| mov edi,ebp |
| rol ebp,5 |
| add edx,esi |
| xor edi,ebx |
| ror eax,7 |
| add edx,ebp |
| add ecx,DWORD[12+rsp] |
| xor edi,eax |
| mov esi,edx |
| rol edx,5 |
| add ecx,edi |
| xor esi,eax |
| ror ebp,7 |
| add ecx,edx |
| cmp r9,r10 |
| je NEAR $L$done_ssse3 |
| movdqa xmm6,XMMWORD[64+r14] |
| movdqa xmm9,XMMWORD[((-64))+r14] |
| movdqu xmm0,XMMWORD[r9] |
| movdqu xmm1,XMMWORD[16+r9] |
| movdqu xmm2,XMMWORD[32+r9] |
| movdqu xmm3,XMMWORD[48+r9] |
| DB 102,15,56,0,198 |
| add r9,64 |
| add ebx,DWORD[16+rsp] |
| xor esi,ebp |
| mov edi,ecx |
| DB 102,15,56,0,206 |
| rol ecx,5 |
| add ebx,esi |
| xor edi,ebp |
| ror edx,7 |
| paddd xmm0,xmm9 |
| add ebx,ecx |
| add eax,DWORD[20+rsp] |
| xor edi,edx |
| mov esi,ebx |
| movdqa XMMWORD[rsp],xmm0 |
| rol ebx,5 |
| add eax,edi |
| xor esi,edx |
| ror ecx,7 |
| psubd xmm0,xmm9 |
| add eax,ebx |
| add ebp,DWORD[24+rsp] |
| xor esi,ecx |
| mov edi,eax |
| rol eax,5 |
| add ebp,esi |
| xor edi,ecx |
| ror ebx,7 |
| add ebp,eax |
| add edx,DWORD[28+rsp] |
| xor edi,ebx |
| mov esi,ebp |
| rol ebp,5 |
| add edx,edi |
| xor esi,ebx |
| ror eax,7 |
| add edx,ebp |
| add ecx,DWORD[32+rsp] |
| xor esi,eax |
| mov edi,edx |
| DB 102,15,56,0,214 |
| rol edx,5 |
| add ecx,esi |
| xor edi,eax |
| ror ebp,7 |
| paddd xmm1,xmm9 |
| add ecx,edx |
| add ebx,DWORD[36+rsp] |
| xor edi,ebp |
| mov esi,ecx |
| movdqa XMMWORD[16+rsp],xmm1 |
| rol ecx,5 |
| add ebx,edi |
| xor esi,ebp |
| ror edx,7 |
| psubd xmm1,xmm9 |
| add ebx,ecx |
| add eax,DWORD[40+rsp] |
| xor esi,edx |
| mov edi,ebx |
| rol ebx,5 |
| add eax,esi |
| xor edi,edx |
| ror ecx,7 |
| add eax,ebx |
| add ebp,DWORD[44+rsp] |
| xor edi,ecx |
| mov esi,eax |
| rol eax,5 |
| add ebp,edi |
| xor esi,ecx |
| ror ebx,7 |
| add ebp,eax |
| add edx,DWORD[48+rsp] |
| xor esi,ebx |
| mov edi,ebp |
| DB 102,15,56,0,222 |
| rol ebp,5 |
| add edx,esi |
| xor edi,ebx |
| ror eax,7 |
| paddd xmm2,xmm9 |
| add edx,ebp |
| add ecx,DWORD[52+rsp] |
| xor edi,eax |
| mov esi,edx |
| movdqa XMMWORD[32+rsp],xmm2 |
| rol edx,5 |
| add ecx,edi |
| xor esi,eax |
| ror ebp,7 |
| psubd xmm2,xmm9 |
| add ecx,edx |
| add ebx,DWORD[56+rsp] |
| xor esi,ebp |
| mov edi,ecx |
| rol ecx,5 |
| add ebx,esi |
| xor edi,ebp |
| ror edx,7 |
| add ebx,ecx |
| add eax,DWORD[60+rsp] |
| xor edi,edx |
| mov esi,ebx |
| rol ebx,5 |
| add eax,edi |
| ror ecx,7 |
| add eax,ebx |
| add eax,DWORD[r8] |
| add esi,DWORD[4+r8] |
| add ecx,DWORD[8+r8] |
| add edx,DWORD[12+r8] |
| mov DWORD[r8],eax |
| add ebp,DWORD[16+r8] |
| mov DWORD[4+r8],esi |
| mov ebx,esi |
| mov DWORD[8+r8],ecx |
| mov edi,ecx |
| mov DWORD[12+r8],edx |
| xor edi,edx |
| mov DWORD[16+r8],ebp |
| and esi,edi |
| jmp NEAR $L$oop_ssse3 |
| |
| ALIGN 16 |
| $L$done_ssse3: |
| add ebx,DWORD[16+rsp] |
| xor esi,ebp |
| mov edi,ecx |
| rol ecx,5 |
| add ebx,esi |
| xor edi,ebp |
| ror edx,7 |
| add ebx,ecx |
| add eax,DWORD[20+rsp] |
| xor edi,edx |
| mov esi,ebx |
| rol ebx,5 |
| add eax,edi |
| xor esi,edx |
| ror ecx,7 |
| add eax,ebx |
| add ebp,DWORD[24+rsp] |
| xor esi,ecx |
| mov edi,eax |
| rol eax,5 |
| add ebp,esi |
| xor edi,ecx |
| ror ebx,7 |
| add ebp,eax |
| add edx,DWORD[28+rsp] |
| xor edi,ebx |
| mov esi,ebp |
| rol ebp,5 |
| add edx,edi |
| xor esi,ebx |
| ror eax,7 |
| add edx,ebp |
| add ecx,DWORD[32+rsp] |
| xor esi,eax |
| mov edi,edx |
| rol edx,5 |
| add ecx,esi |
| xor edi,eax |
| ror ebp,7 |
| add ecx,edx |
| add ebx,DWORD[36+rsp] |
| xor edi,ebp |
| mov esi,ecx |
| rol ecx,5 |
| add ebx,edi |
| xor esi,ebp |
| ror edx,7 |
| add ebx,ecx |
| add eax,DWORD[40+rsp] |
| xor esi,edx |
| mov edi,ebx |
| rol ebx,5 |
| add eax,esi |
| xor edi,edx |
| ror ecx,7 |
| add eax,ebx |
| add ebp,DWORD[44+rsp] |
| xor edi,ecx |
| mov esi,eax |
| rol eax,5 |
| add ebp,edi |
| xor esi,ecx |
| ror ebx,7 |
| add ebp,eax |
| add edx,DWORD[48+rsp] |
| xor esi,ebx |
| mov edi,ebp |
| rol ebp,5 |
| add edx,esi |
| xor edi,ebx |
| ror eax,7 |
| add edx,ebp |
| add ecx,DWORD[52+rsp] |
| xor edi,eax |
| mov esi,edx |
| rol edx,5 |
| add ecx,edi |
| xor esi,eax |
| ror ebp,7 |
| add ecx,edx |
| add ebx,DWORD[56+rsp] |
| xor esi,ebp |
| mov edi,ecx |
| rol ecx,5 |
| add ebx,esi |
| xor edi,ebp |
| ror edx,7 |
| add ebx,ecx |
| add eax,DWORD[60+rsp] |
| xor edi,edx |
| mov esi,ebx |
| rol ebx,5 |
| add eax,edi |
| ror ecx,7 |
| add eax,ebx |
| add eax,DWORD[r8] |
| add esi,DWORD[4+r8] |
| add ecx,DWORD[8+r8] |
| mov DWORD[r8],eax |
| add edx,DWORD[12+r8] |
| mov DWORD[4+r8],esi |
| add ebp,DWORD[16+r8] |
| mov DWORD[8+r8],ecx |
| mov DWORD[12+r8],edx |
| mov DWORD[16+r8],ebp |
| movaps xmm6,XMMWORD[((-40-96))+r11] |
| movaps xmm7,XMMWORD[((-40-80))+r11] |
| movaps xmm8,XMMWORD[((-40-64))+r11] |
| movaps xmm9,XMMWORD[((-40-48))+r11] |
| movaps xmm10,XMMWORD[((-40-32))+r11] |
| movaps xmm11,XMMWORD[((-40-16))+r11] |
| mov r14,QWORD[((-40))+r11] |
| |
| mov r13,QWORD[((-32))+r11] |
| |
| mov r12,QWORD[((-24))+r11] |
| |
| mov rbp,QWORD[((-16))+r11] |
| |
| mov rbx,QWORD[((-8))+r11] |
| |
| lea rsp,[r11] |
| |
| $L$epilogue_ssse3: |
| mov rdi,QWORD[8+rsp] ;WIN64 epilogue |
| mov rsi,QWORD[16+rsp] |
| DB 0F3h,0C3h ;repret |
| |
| $L$SEH_end_sha1_block_data_order_ssse3: |
| |
| ALIGN 16 |
| sha1_block_data_order_avx: |
| mov QWORD[8+rsp],rdi ;WIN64 prologue |
| mov QWORD[16+rsp],rsi |
| mov rax,rsp |
| $L$SEH_begin_sha1_block_data_order_avx: |
| mov rdi,rcx |
| mov rsi,rdx |
| mov rdx,r8 |
| |
| |
| _avx_shortcut: |
| |
| mov r11,rsp |
| |
| push rbx |
| |
| push rbp |
| |
| push r12 |
| |
| push r13 |
| |
| push r14 |
| |
| lea rsp,[((-160))+rsp] |
| vzeroupper |
| vmovaps XMMWORD[(-40-96)+r11],xmm6 |
| vmovaps XMMWORD[(-40-80)+r11],xmm7 |
| vmovaps XMMWORD[(-40-64)+r11],xmm8 |
| vmovaps XMMWORD[(-40-48)+r11],xmm9 |
| vmovaps XMMWORD[(-40-32)+r11],xmm10 |
| vmovaps XMMWORD[(-40-16)+r11],xmm11 |
| $L$prologue_avx: |
| and rsp,-64 |
| mov r8,rdi |
| mov r9,rsi |
| mov r10,rdx |
| |
| shl r10,6 |
| add r10,r9 |
| lea r14,[((K_XX_XX+64))] |
| |
| mov eax,DWORD[r8] |
| mov ebx,DWORD[4+r8] |
| mov ecx,DWORD[8+r8] |
| mov edx,DWORD[12+r8] |
| mov esi,ebx |
| mov ebp,DWORD[16+r8] |
| mov edi,ecx |
| xor edi,edx |
| and esi,edi |
| |
| vmovdqa xmm6,XMMWORD[64+r14] |
| vmovdqa xmm11,XMMWORD[((-64))+r14] |
| vmovdqu xmm0,XMMWORD[r9] |
| vmovdqu xmm1,XMMWORD[16+r9] |
| vmovdqu xmm2,XMMWORD[32+r9] |
| vmovdqu xmm3,XMMWORD[48+r9] |
| vpshufb xmm0,xmm0,xmm6 |
| add r9,64 |
| vpshufb xmm1,xmm1,xmm6 |
| vpshufb xmm2,xmm2,xmm6 |
| vpshufb xmm3,xmm3,xmm6 |
| vpaddd xmm4,xmm0,xmm11 |
| vpaddd xmm5,xmm1,xmm11 |
| vpaddd xmm6,xmm2,xmm11 |
| vmovdqa XMMWORD[rsp],xmm4 |
| vmovdqa XMMWORD[16+rsp],xmm5 |
| vmovdqa XMMWORD[32+rsp],xmm6 |
| jmp NEAR $L$oop_avx |
| ALIGN 16 |
| $L$oop_avx: |
| shrd ebx,ebx,2 |
| xor esi,edx |
| vpalignr xmm4,xmm1,xmm0,8 |
| mov edi,eax |
| add ebp,DWORD[rsp] |
| vpaddd xmm9,xmm11,xmm3 |
| xor ebx,ecx |
| shld eax,eax,5 |
| vpsrldq xmm8,xmm3,4 |
| add ebp,esi |
| and edi,ebx |
| vpxor xmm4,xmm4,xmm0 |
| xor ebx,ecx |
| add ebp,eax |
| vpxor xmm8,xmm8,xmm2 |
| shrd eax,eax,7 |
| xor edi,ecx |
| mov esi,ebp |
| add edx,DWORD[4+rsp] |
| vpxor xmm4,xmm4,xmm8 |
| xor eax,ebx |
| shld ebp,ebp,5 |
| vmovdqa XMMWORD[48+rsp],xmm9 |
| add edx,edi |
| and esi,eax |
| vpsrld xmm8,xmm4,31 |
| xor eax,ebx |
| add edx,ebp |
| shrd ebp,ebp,7 |
| xor esi,ebx |
| vpslldq xmm10,xmm4,12 |
| vpaddd xmm4,xmm4,xmm4 |
| mov edi,edx |
| add ecx,DWORD[8+rsp] |
| xor ebp,eax |
| shld edx,edx,5 |
| vpsrld xmm9,xmm10,30 |
| vpor xmm4,xmm4,xmm8 |
| add ecx,esi |
| and edi,ebp |
| xor ebp,eax |
| add ecx,edx |
| vpslld xmm10,xmm10,2 |
| vpxor xmm4,xmm4,xmm9 |
| shrd edx,edx,7 |
| xor edi,eax |
| mov esi,ecx |
| add ebx,DWORD[12+rsp] |
| vpxor xmm4,xmm4,xmm10 |
| xor edx,ebp |
| shld ecx,ecx,5 |
| add ebx,edi |
| and esi,edx |
| xor edx,ebp |
| add ebx,ecx |
| shrd ecx,ecx,7 |
| xor esi,ebp |
| vpalignr xmm5,xmm2,xmm1,8 |
| mov edi,ebx |
| add eax,DWORD[16+rsp] |
| vpaddd xmm9,xmm11,xmm4 |
| xor ecx,edx |
| shld ebx,ebx,5 |
| vpsrldq xmm8,xmm4,4 |
| add eax,esi |
| and edi,ecx |
| vpxor xmm5,xmm5,xmm1 |
| xor ecx,edx |
| add eax,ebx |
| vpxor xmm8,xmm8,xmm3 |
| shrd ebx,ebx,7 |
| xor edi,edx |
| mov esi,eax |
| add ebp,DWORD[20+rsp] |
| vpxor xmm5,xmm5,xmm8 |
| xor ebx,ecx |
| shld eax,eax,5 |
| vmovdqa XMMWORD[rsp],xmm9 |
| add ebp,edi |
| and esi,ebx |
| vpsrld xmm8,xmm5,31 |
| xor ebx,ecx |
| add ebp,eax |
| shrd eax,eax,7 |
| xor esi,ecx |
| vpslldq xmm10,xmm5,12 |
| vpaddd xmm5,xmm5,xmm5 |
| mov edi,ebp |
| add edx,DWORD[24+rsp] |
| xor eax,ebx |
| shld ebp,ebp,5 |
| vpsrld xmm9,xmm10,30 |
| vpor xmm5,xmm5,xmm8 |
| add edx,esi |
| and edi,eax |
| xor eax,ebx |
| add edx,ebp |
| vpslld xmm10,xmm10,2 |
| vpxor xmm5,xmm5,xmm9 |
| shrd ebp,ebp,7 |
| xor edi,ebx |
| mov esi,edx |
| add ecx,DWORD[28+rsp] |
| vpxor xmm5,xmm5,xmm10 |
| xor ebp,eax |
| shld edx,edx,5 |
| vmovdqa xmm11,XMMWORD[((-32))+r14] |
| add ecx,edi |
| and esi,ebp |
| xor ebp,eax |
| add ecx,edx |
| shrd edx,edx,7 |
| xor esi,eax |
| vpalignr xmm6,xmm3,xmm2,8 |
| mov edi,ecx |
| add ebx,DWORD[32+rsp] |
| vpaddd xmm9,xmm11,xmm5 |
| xor edx,ebp |
| shld ecx,ecx,5 |
| vpsrldq xmm8,xmm5,4 |
| add ebx,esi |
| and edi,edx |
| vpxor xmm6,xmm6,xmm2 |
| xor edx,ebp |
| add ebx,ecx |
| vpxor xmm8,xmm8,xmm4 |
| shrd ecx,ecx,7 |
| xor edi,ebp |
| mov esi,ebx |
| add eax,DWORD[36+rsp] |
| vpxor xmm6,xmm6,xmm8 |
| xor ecx,edx |
| shld ebx,ebx,5 |
| vmovdqa XMMWORD[16+rsp],xmm9 |
| add eax,edi |
| and esi,ecx |
| vpsrld xmm8,xmm6,31 |
| xor ecx,edx |
| add eax,ebx |
| shrd ebx,ebx,7 |
| xor esi,edx |
| vpslldq xmm10,xmm6,12 |
| vpaddd xmm6,xmm6,xmm6 |
| mov edi,eax |
| add ebp,DWORD[40+rsp] |
| xor ebx,ecx |
| shld eax,eax,5 |
| vpsrld xmm9,xmm10,30 |
| vpor xmm6,xmm6,xmm8 |
| add ebp,esi |
| and edi,ebx |
| xor ebx,ecx |
| add ebp,eax |
| vpslld xmm10,xmm10,2 |
| vpxor xmm6,xmm6,xmm9 |
| shrd eax,eax,7 |
| xor edi,ecx |
| mov esi,ebp |
| add edx,DWORD[44+rsp] |
| vpxor xmm6,xmm6,xmm10 |
| xor eax,ebx |
| shld ebp,ebp,5 |
| add edx,edi |
| and esi,eax |
| xor eax,ebx |
| add edx,ebp |
| shrd ebp,ebp,7 |
| xor esi,ebx |
| vpalignr xmm7,xmm4,xmm3,8 |
| mov edi,edx |
| add ecx,DWORD[48+rsp] |
| vpaddd xmm9,xmm11,xmm6 |
| xor ebp,eax |
| shld edx,edx,5 |
| vpsrldq xmm8,xmm6,4 |
| add ecx,esi |
| and edi,ebp |
| vpxor xmm7,xmm7,xmm3 |
| xor ebp,eax |
| add ecx,edx |
| vpxor xmm8,xmm8,xmm5 |
| shrd edx,edx,7 |
| xor edi,eax |
| mov esi,ecx |
| add ebx,DWORD[52+rsp] |
| vpxor xmm7,xmm7,xmm8 |
| xor edx,ebp |
| shld ecx,ecx,5 |
| vmovdqa XMMWORD[32+rsp],xmm9 |
| add ebx,edi |
| and esi,edx |
| vpsrld xmm8,xmm7,31 |
| xor edx,ebp |
| add ebx,ecx |
| shrd ecx,ecx,7 |
| xor esi,ebp |
| vpslldq xmm10,xmm7,12 |
| vpaddd xmm7,xmm7,xmm7 |
| mov edi,ebx |
| add eax,DWORD[56+rsp] |
| xor ecx,edx |
| shld ebx,ebx,5 |
| vpsrld xmm9,xmm10,30 |
| vpor xmm7,xmm7,xmm8 |
| add eax,esi |
| and edi,ecx |
| xor ecx,edx |
| add eax,ebx |
| vpslld xmm10,xmm10,2 |
| vpxor xmm7,xmm7,xmm9 |
| shrd ebx,ebx,7 |
| xor edi,edx |
| mov esi,eax |
| add ebp,DWORD[60+rsp] |
| vpxor xmm7,xmm7,xmm10 |
| xor ebx,ecx |
| shld eax,eax,5 |
| add ebp,edi |
| and esi,ebx |
| xor ebx,ecx |
| add ebp,eax |
| vpalignr xmm8,xmm7,xmm6,8 |
| vpxor xmm0,xmm0,xmm4 |
| shrd eax,eax,7 |
| xor esi,ecx |
| mov edi,ebp |
| add edx,DWORD[rsp] |
| vpxor xmm0,xmm0,xmm1 |
| xor eax,ebx |
| shld ebp,ebp,5 |
| vpaddd xmm9,xmm11,xmm7 |
| add edx,esi |
| and edi,eax |
| vpxor xmm0,xmm0,xmm8 |
| xor eax,ebx |
| add edx,ebp |
| shrd ebp,ebp,7 |
| xor edi,ebx |
| vpsrld xmm8,xmm0,30 |
| vmovdqa XMMWORD[48+rsp],xmm9 |
| mov esi,edx |
| add ecx,DWORD[4+rsp] |
| xor ebp,eax |
| shld edx,edx,5 |
| vpslld xmm0,xmm0,2 |
| add ecx,edi |
| and esi,ebp |
| xor ebp,eax |
| add ecx,edx |
| shrd edx,edx,7 |
| xor esi,eax |
| mov edi,ecx |
| add ebx,DWORD[8+rsp] |
| vpor xmm0,xmm0,xmm8 |
| xor edx,ebp |
| shld ecx,ecx,5 |
| add ebx,esi |
| and edi,edx |
| xor edx,ebp |
| add ebx,ecx |
| add eax,DWORD[12+rsp] |
| xor edi,ebp |
| mov esi,ebx |
| shld ebx,ebx,5 |
| add eax,edi |
| xor esi,edx |
| shrd ecx,ecx,7 |
| add eax,ebx |
| vpalignr xmm8,xmm0,xmm7,8 |
| vpxor xmm1,xmm1,xmm5 |
| add ebp,DWORD[16+rsp] |
| xor esi,ecx |
| mov edi,eax |
| shld eax,eax,5 |
| vpxor xmm1,xmm1,xmm2 |
| add ebp,esi |
| xor edi,ecx |
| vpaddd xmm9,xmm11,xmm0 |
| shrd ebx,ebx,7 |
| add ebp,eax |
| vpxor xmm1,xmm1,xmm8 |
| add edx,DWORD[20+rsp] |
| xor edi,ebx |
| mov esi,ebp |
| shld ebp,ebp,5 |
| vpsrld xmm8,xmm1,30 |
| vmovdqa XMMWORD[rsp],xmm9 |
| add edx,edi |
| xor esi,ebx |
| shrd eax,eax,7 |
| add edx,ebp |
| vpslld xmm1,xmm1,2 |
| add ecx,DWORD[24+rsp] |
| xor esi,eax |
| mov edi,edx |
| shld edx,edx,5 |
| add ecx,esi |
| xor edi,eax |
| shrd ebp,ebp,7 |
| add ecx,edx |
| vpor xmm1,xmm1,xmm8 |
| add ebx,DWORD[28+rsp] |
| xor edi,ebp |
| mov esi,ecx |
| shld ecx,ecx,5 |
| add ebx,edi |
| xor esi,ebp |
| shrd edx,edx,7 |
| add ebx,ecx |
| vpalignr xmm8,xmm1,xmm0,8 |
| vpxor xmm2,xmm2,xmm6 |
| add eax,DWORD[32+rsp] |
| xor esi,edx |
| mov edi,ebx |
| shld ebx,ebx,5 |
| vpxor xmm2,xmm2,xmm3 |
| add eax,esi |
| xor edi,edx |
| vpaddd xmm9,xmm11,xmm1 |
| vmovdqa xmm11,XMMWORD[r14] |
| shrd ecx,ecx,7 |
| add eax,ebx |
| vpxor xmm2,xmm2,xmm8 |
| add ebp,DWORD[36+rsp] |
| xor edi,ecx |
| mov esi,eax |
| shld eax,eax,5 |
| vpsrld xmm8,xmm2,30 |
| vmovdqa XMMWORD[16+rsp],xmm9 |
| add ebp,edi |
| xor esi,ecx |
| shrd ebx,ebx,7 |
| add ebp,eax |
| vpslld xmm2,xmm2,2 |
| add edx,DWORD[40+rsp] |
| xor esi,ebx |
| mov edi,ebp |
| shld ebp,ebp,5 |
| add edx,esi |
| xor edi,ebx |
| shrd eax,eax,7 |
| add edx,ebp |
| vpor xmm2,xmm2,xmm8 |
| add ecx,DWORD[44+rsp] |
| xor edi,eax |
| mov esi,edx |
| shld edx,edx,5 |
| add ecx,edi |
| xor esi,eax |
| shrd ebp,ebp,7 |
| add ecx,edx |
| vpalignr xmm8,xmm2,xmm1,8 |
| vpxor xmm3,xmm3,xmm7 |
| add ebx,DWORD[48+rsp] |
| xor esi,ebp |
| mov edi,ecx |
| shld ecx,ecx,5 |
| vpxor xmm3,xmm3,xmm4 |
| add ebx,esi |
| xor edi,ebp |
| vpaddd xmm9,xmm11,xmm2 |
| shrd edx,edx,7 |
| add ebx,ecx |
| vpxor xmm3,xmm3,xmm8 |
| add eax,DWORD[52+rsp] |
| xor edi,edx |
| mov esi,ebx |
| shld ebx,ebx,5 |
| vpsrld xmm8,xmm3,30 |
| vmovdqa XMMWORD[32+rsp],xmm9 |
| add eax,edi |
| xor esi,edx |
| shrd ecx,ecx,7 |
| add eax,ebx |
| vpslld xmm3,xmm3,2 |
| add ebp,DWORD[56+rsp] |
| xor esi,ecx |
| mov edi,eax |
| shld eax,eax,5 |
| add ebp,esi |
| xor edi,ecx |
| shrd ebx,ebx,7 |
| add ebp,eax |
| vpor xmm3,xmm3,xmm8 |
| add edx,DWORD[60+rsp] |
| xor edi,ebx |
| mov esi,ebp |
| shld ebp,ebp,5 |
| add edx,edi |
| xor esi,ebx |
| shrd eax,eax,7 |
| add edx,ebp |
| vpalignr xmm8,xmm3,xmm2,8 |
| vpxor xmm4,xmm4,xmm0 |
| add ecx,DWORD[rsp] |
| xor esi,eax |
| mov edi,edx |
| shld edx,edx,5 |
| vpxor xmm4,xmm4,xmm5 |
| add ecx,esi |
| xor edi,eax |
| vpaddd xmm9,xmm11,xmm3 |
| shrd ebp,ebp,7 |
| add ecx,edx |
| vpxor xmm4,xmm4,xmm8 |
| add ebx,DWORD[4+rsp] |
| xor edi,ebp |
| mov esi,ecx |
| shld ecx,ecx,5 |
| vpsrld xmm8,xmm4,30 |
| vmovdqa XMMWORD[48+rsp],xmm9 |
| add ebx,edi |
| xor esi,ebp |
| shrd edx,edx,7 |
| add ebx,ecx |
| vpslld xmm4,xmm4,2 |
| add eax,DWORD[8+rsp] |
| xor esi,edx |
| mov edi,ebx |
| shld ebx,ebx,5 |
| add eax,esi |
| xor edi,edx |
| shrd ecx,ecx,7 |
| add eax,ebx |
| vpor xmm4,xmm4,xmm8 |
| add ebp,DWORD[12+rsp] |
| xor edi,ecx |
| mov esi,eax |
| shld eax,eax,5 |
| add ebp,edi |
| xor esi,ecx |
| shrd ebx,ebx,7 |
| add ebp,eax |
| vpalignr xmm8,xmm4,xmm3,8 |
| vpxor xmm5,xmm5,xmm1 |
| add edx,DWORD[16+rsp] |
| xor esi,ebx |
| mov edi,ebp |
| shld ebp,ebp,5 |
| vpxor xmm5,xmm5,xmm6 |
| add edx,esi |
| xor edi,ebx |
| vpaddd xmm9,xmm11,xmm4 |
| shrd eax,eax,7 |
| add edx,ebp |
| vpxor xmm5,xmm5,xmm8 |
| add ecx,DWORD[20+rsp] |
| xor edi,eax |
| mov esi,edx |
| shld edx,edx,5 |
| vpsrld xmm8,xmm5,30 |
| vmovdqa XMMWORD[rsp],xmm9 |
| add ecx,edi |
| xor esi,eax |
| shrd ebp,ebp,7 |
| add ecx,edx |
| vpslld xmm5,xmm5,2 |
| add ebx,DWORD[24+rsp] |
| xor esi,ebp |
| mov edi,ecx |
| shld ecx,ecx,5 |
| add ebx,esi |
| xor edi,ebp |
| shrd edx,edx,7 |
| add ebx,ecx |
| vpor xmm5,xmm5,xmm8 |
| add eax,DWORD[28+rsp] |
| shrd ecx,ecx,7 |
| mov esi,ebx |
| xor edi,edx |
| shld ebx,ebx,5 |
| add eax,edi |
| xor esi,ecx |
| xor ecx,edx |
| add eax,ebx |
| vpalignr xmm8,xmm5,xmm4,8 |
| vpxor xmm6,xmm6,xmm2 |
| add ebp,DWORD[32+rsp] |
| and esi,ecx |
| xor ecx,edx |
| shrd ebx,ebx,7 |
| vpxor xmm6,xmm6,xmm7 |
| mov edi,eax |
| xor esi,ecx |
| vpaddd xmm9,xmm11,xmm5 |
| shld eax,eax,5 |
| add ebp,esi |
| vpxor xmm6,xmm6,xmm8 |
| xor edi,ebx |
| xor ebx,ecx |
| add ebp,eax |
| add edx,DWORD[36+rsp] |
| vpsrld xmm8,xmm6,30 |
| vmovdqa XMMWORD[16+rsp],xmm9 |
| and edi,ebx |
| xor ebx,ecx |
| shrd eax,eax,7 |
| mov esi,ebp |
| vpslld xmm6,xmm6,2 |
| xor edi,ebx |
| shld ebp,ebp,5 |
| add edx,edi |
| xor esi,eax |
| xor eax,ebx |
| add edx,ebp |
| add ecx,DWORD[40+rsp] |
| and esi,eax |
| vpor xmm6,xmm6,xmm8 |
| xor eax,ebx |
| shrd ebp,ebp,7 |
| mov edi,edx |
| xor esi,eax |
| shld edx,edx,5 |
| add ecx,esi |
| xor edi,ebp |
| xor ebp,eax |
| add ecx,edx |
| add ebx,DWORD[44+rsp] |
| and edi,ebp |
| xor ebp,eax |
| shrd edx,edx,7 |
| mov esi,ecx |
| xor edi,ebp |
| shld ecx,ecx,5 |
| add ebx,edi |
| xor esi,edx |
| xor edx,ebp |
| add ebx,ecx |
| vpalignr xmm8,xmm6,xmm5,8 |
| vpxor xmm7,xmm7,xmm3 |
| add eax,DWORD[48+rsp] |
| and esi,edx |
| xor edx,ebp |
| shrd ecx,ecx,7 |
| vpxor xmm7,xmm7,xmm0 |
| mov edi,ebx |
| xor esi,edx |
| vpaddd xmm9,xmm11,xmm6 |
| vmovdqa xmm11,XMMWORD[32+r14] |
| shld ebx,ebx,5 |
| add eax,esi |
| vpxor xmm7,xmm7,xmm8 |
| xor edi,ecx |
| xor ecx,edx |
| add eax,ebx |
| add ebp,DWORD[52+rsp] |
| vpsrld xmm8,xmm7,30 |
| vmovdqa XMMWORD[32+rsp],xmm9 |
| and edi,ecx |
| xor ecx,edx |
| shrd ebx,ebx,7 |
| mov esi,eax |
| vpslld xmm7,xmm7,2 |
| xor edi,ecx |
| shld eax,eax,5 |
| add ebp,edi |
| xor esi,ebx |
| xor ebx,ecx |
| add ebp,eax |
| add edx,DWORD[56+rsp] |
| and esi,ebx |
| vpor xmm7,xmm7,xmm8 |
| xor ebx,ecx |
| shrd eax,eax,7 |
| mov edi,ebp |
| xor esi,ebx |
| shld ebp,ebp,5 |
| add edx,esi |
| xor edi,eax |
| xor eax,ebx |
| add edx,ebp |
| add ecx,DWORD[60+rsp] |
| and edi,eax |
| xor eax,ebx |
| shrd ebp,ebp,7 |
| mov esi,edx |
| xor edi,eax |
| shld edx,edx,5 |
| add ecx,edi |
| xor esi,ebp |
| xor ebp,eax |
| add ecx,edx |
| vpalignr xmm8,xmm7,xmm6,8 |
| vpxor xmm0,xmm0,xmm4 |
| add ebx,DWORD[rsp] |
| and esi,ebp |
| xor ebp,eax |
| shrd edx,edx,7 |
| vpxor xmm0,xmm0,xmm1 |
| mov edi,ecx |
| xor esi,ebp |
| vpaddd xmm9,xmm11,xmm7 |
| shld ecx,ecx,5 |
| add ebx,esi |
| vpxor xmm0,xmm0,xmm8 |
| xor edi,edx |
| xor edx,ebp |
| add ebx,ecx |
| add eax,DWORD[4+rsp] |
| vpsrld xmm8,xmm0,30 |
| vmovdqa XMMWORD[48+rsp],xmm9 |
| and edi,edx |
| xor edx,ebp |
| shrd ecx,ecx,7 |
| mov esi,ebx |
| vpslld xmm0,xmm0,2 |
| xor edi,edx |
| shld ebx,ebx,5 |
| add eax,edi |
| xor esi,ecx |
| xor ecx,edx |
| add eax,ebx |
| add ebp,DWORD[8+rsp] |
| and esi,ecx |
| vpor xmm0,xmm0,xmm8 |
| xor ecx,edx |
| shrd ebx,ebx,7 |
| mov edi,eax |
| xor esi,ecx |
| shld eax,eax,5 |
| add ebp,esi |
| xor edi,ebx |
| xor ebx,ecx |
| add ebp,eax |
| add edx,DWORD[12+rsp] |
| and edi,ebx |
| xor ebx,ecx |
| shrd eax,eax,7 |
| mov esi,ebp |
| xor edi,ebx |
| shld ebp,ebp,5 |
| add edx,edi |
| xor esi,eax |
| xor eax,ebx |
| add edx,ebp |
| vpalignr xmm8,xmm0,xmm7,8 |
| vpxor xmm1,xmm1,xmm5 |
| add ecx,DWORD[16+rsp] |
| and esi,eax |
| xor eax,ebx |
| shrd ebp,ebp,7 |
| vpxor xmm1,xmm1,xmm2 |
| mov edi,edx |
| xor esi,eax |
| vpaddd xmm9,xmm11,xmm0 |
| shld edx,edx,5 |
| add ecx,esi |
| vpxor xmm1,xmm1,xmm8 |
| xor edi,ebp |
| xor ebp,eax |
| add ecx,edx |
| add ebx,DWORD[20+rsp] |
| vpsrld xmm8,xmm1,30 |
| vmovdqa XMMWORD[rsp],xmm9 |
| and edi,ebp |
| xor ebp,eax |
| shrd edx,edx,7 |
| mov esi,ecx |
| vpslld xmm1,xmm1,2 |
| xor edi,ebp |
| shld ecx,ecx,5 |
| add ebx,edi |
| xor esi,edx |
| xor edx,ebp |
| add ebx,ecx |
| add eax,DWORD[24+rsp] |
| and esi,edx |
| vpor xmm1,xmm1,xmm8 |
| xor edx,ebp |
| shrd ecx,ecx,7 |
| mov edi,ebx |
| xor esi,edx |
| shld ebx,ebx,5 |
| add eax,esi |
| xor edi,ecx |
| xor ecx,edx |
| add eax,ebx |
| add ebp,DWORD[28+rsp] |
| and edi,ecx |
| xor ecx,edx |
| shrd ebx,ebx,7 |
| mov esi,eax |
| xor edi,ecx |
| shld eax,eax,5 |
| add ebp,edi |
| xor esi,ebx |
| xor ebx,ecx |
| add ebp,eax |
| vpalignr xmm8,xmm1,xmm0,8 |
| vpxor xmm2,xmm2,xmm6 |
| add edx,DWORD[32+rsp] |
| and esi,ebx |
| xor ebx,ecx |
| shrd eax,eax,7 |
| vpxor xmm2,xmm2,xmm3 |
| mov edi,ebp |
| xor esi,ebx |
| vpaddd xmm9,xmm11,xmm1 |
| shld ebp,ebp,5 |
| add edx,esi |
| vpxor xmm2,xmm2,xmm8 |
| xor edi,eax |
| xor eax,ebx |
| add edx,ebp |
| add ecx,DWORD[36+rsp] |
| vpsrld xmm8,xmm2,30 |
| vmovdqa XMMWORD[16+rsp],xmm9 |
| and edi,eax |
| xor eax,ebx |
| shrd ebp,ebp,7 |
| mov esi,edx |
| vpslld xmm2,xmm2,2 |
| xor edi,eax |
| shld edx,edx,5 |
| add ecx,edi |
| xor esi,ebp |
| xor ebp,eax |
| add ecx,edx |
| add ebx,DWORD[40+rsp] |
| and esi,ebp |
| vpor xmm2,xmm2,xmm8 |
| xor ebp,eax |
| shrd edx,edx,7 |
| mov edi,ecx |
| xor esi,ebp |
| shld ecx,ecx,5 |
| add ebx,esi |
| xor edi,edx |
| xor edx,ebp |
| add ebx,ecx |
| add eax,DWORD[44+rsp] |
| and edi,edx |
| xor edx,ebp |
| shrd ecx,ecx,7 |
| mov esi,ebx |
| xor edi,edx |
| shld ebx,ebx,5 |
| add eax,edi |
| xor esi,edx |
| add eax,ebx |
| vpalignr xmm8,xmm2,xmm1,8 |
| vpxor xmm3,xmm3,xmm7 |
| add ebp,DWORD[48+rsp] |
| xor esi,ecx |
| mov edi,eax |
| shld eax,eax,5 |
| vpxor xmm3,xmm3,xmm4 |
| add ebp,esi |
| xor edi,ecx |
| vpaddd xmm9,xmm11,xmm2 |
| shrd ebx,ebx,7 |
| add ebp,eax |
| vpxor xmm3,xmm3,xmm8 |
| add edx,DWORD[52+rsp] |
| xor edi,ebx |
| mov esi,ebp |
| shld ebp,ebp,5 |
| vpsrld xmm8,xmm3,30 |
| vmovdqa XMMWORD[32+rsp],xmm9 |
| add edx,edi |
| xor esi,ebx |
| shrd eax,eax,7 |
| add edx,ebp |
| vpslld xmm3,xmm3,2 |
| add ecx,DWORD[56+rsp] |
| xor esi,eax |
| mov edi,edx |
| shld edx,edx,5 |
| add ecx,esi |
| xor edi,eax |
| shrd ebp,ebp,7 |
| add ecx,edx |
| vpor xmm3,xmm3,xmm8 |
| add ebx,DWORD[60+rsp] |
| xor edi,ebp |
| mov esi,ecx |
| shld ecx,ecx,5 |
| add ebx,edi |
| xor esi,ebp |
| shrd edx,edx,7 |
| add ebx,ecx |
| add eax,DWORD[rsp] |
| vpaddd xmm9,xmm11,xmm3 |
| xor esi,edx |
| mov edi,ebx |
| shld ebx,ebx,5 |
| add eax,esi |
| vmovdqa XMMWORD[48+rsp],xmm9 |
| xor edi,edx |
| shrd ecx,ecx,7 |
| add eax,ebx |
| add ebp,DWORD[4+rsp] |
| xor edi,ecx |
| mov esi,eax |
| shld eax,eax,5 |
| add ebp,edi |
| xor esi,ecx |
| shrd ebx,ebx,7 |
| add ebp,eax |
| add edx,DWORD[8+rsp] |
| xor esi,ebx |
| mov edi,ebp |
| shld ebp,ebp,5 |
| add edx,esi |
| xor edi,ebx |
| shrd eax,eax,7 |
| add edx,ebp |
| add ecx,DWORD[12+rsp] |
| xor edi,eax |
| mov esi,edx |
| shld edx,edx,5 |
| add ecx,edi |
| xor esi,eax |
| shrd ebp,ebp,7 |
| add ecx,edx |
| cmp r9,r10 |
| je NEAR $L$done_avx |
| vmovdqa xmm6,XMMWORD[64+r14] |
| vmovdqa xmm11,XMMWORD[((-64))+r14] |
| vmovdqu xmm0,XMMWORD[r9] |
| vmovdqu xmm1,XMMWORD[16+r9] |
| vmovdqu xmm2,XMMWORD[32+r9] |
| vmovdqu xmm3,XMMWORD[48+r9] |
| vpshufb xmm0,xmm0,xmm6 |
| add r9,64 |
| add ebx,DWORD[16+rsp] |
| xor esi,ebp |
| vpshufb xmm1,xmm1,xmm6 |
| mov edi,ecx |
| shld ecx,ecx,5 |
| vpaddd xmm4,xmm0,xmm11 |
|