summaryrefslogtreecommitdiffstats
path: root/crypto/sha/asm
diff options
context:
space:
mode:
Diffstat (limited to 'crypto/sha/asm')
-rw-r--r--crypto/sha/asm/README1
-rw-r--r--crypto/sha/asm/s1-win32.asm1664
-rw-r--r--crypto/sha/asm/sha1-586.pl491
-rw-r--r--crypto/sha/asm/sx86unix.cpp1948
4 files changed, 4104 insertions, 0 deletions
diff --git a/crypto/sha/asm/README b/crypto/sha/asm/README
new file mode 100644
index 0000000000..b7e755765f
--- /dev/null
+++ b/crypto/sha/asm/README
@@ -0,0 +1 @@
+C2.pl works
diff --git a/crypto/sha/asm/s1-win32.asm b/crypto/sha/asm/s1-win32.asm
new file mode 100644
index 0000000000..61335666b9
--- /dev/null
+++ b/crypto/sha/asm/s1-win32.asm
@@ -0,0 +1,1664 @@
+ ; Don't even think of reading this code
+ ; It was automatically generated by sha1-586.pl
+ ; Which is a perl program used to generate the x86 assember for
+ ; any of elf, a.out, BSDI,Win32, or Solaris
+ ; eric <eay@cryptsoft.com>
+ ;
+ TITLE sha1-586.asm
+ .486
+.model FLAT
+_TEXT SEGMENT
+PUBLIC _sha1_block_x86
+
+_sha1_block_x86 PROC NEAR
+ push esi
+ push ebp
+ mov eax, DWORD PTR 20[esp]
+ mov esi, DWORD PTR 16[esp]
+ add eax, esi
+ mov ebp, DWORD PTR 12[esp]
+ push ebx
+ sub eax, 64
+ push edi
+ mov ebx, DWORD PTR 4[ebp]
+ sub esp, 72
+ mov edx, DWORD PTR 12[ebp]
+ mov edi, DWORD PTR 16[ebp]
+ mov ecx, DWORD PTR 8[ebp]
+ mov DWORD PTR 68[esp],eax
+ ; First we need to setup the X array
+ mov eax, DWORD PTR [esi]
+L000start:
+ ; First, load the words onto the stack in network byte order
+ bswap eax
+ mov DWORD PTR [esp],eax
+ mov eax, DWORD PTR 4[esi]
+ bswap eax
+ mov DWORD PTR 4[esp],eax
+ mov eax, DWORD PTR 8[esi]
+ bswap eax
+ mov DWORD PTR 8[esp],eax
+ mov eax, DWORD PTR 12[esi]
+ bswap eax
+ mov DWORD PTR 12[esp],eax
+ mov eax, DWORD PTR 16[esi]
+ bswap eax
+ mov DWORD PTR 16[esp],eax
+ mov eax, DWORD PTR 20[esi]
+ bswap eax
+ mov DWORD PTR 20[esp],eax
+ mov eax, DWORD PTR 24[esi]
+ bswap eax
+ mov DWORD PTR 24[esp],eax
+ mov eax, DWORD PTR 28[esi]
+ bswap eax
+ mov DWORD PTR 28[esp],eax
+ mov eax, DWORD PTR 32[esi]
+ bswap eax
+ mov DWORD PTR 32[esp],eax
+ mov eax, DWORD PTR 36[esi]
+ bswap eax
+ mov DWORD PTR 36[esp],eax
+ mov eax, DWORD PTR 40[esi]
+ bswap eax
+ mov DWORD PTR 40[esp],eax
+ mov eax, DWORD PTR 44[esi]
+ bswap eax
+ mov DWORD PTR 44[esp],eax
+ mov eax, DWORD PTR 48[esi]
+ bswap eax
+ mov DWORD PTR 48[esp],eax
+ mov eax, DWORD PTR 52[esi]
+ bswap eax
+ mov DWORD PTR 52[esp],eax
+ mov eax, DWORD PTR 56[esi]
+ bswap eax
+ mov DWORD PTR 56[esp],eax
+ mov eax, DWORD PTR 60[esi]
+ bswap eax
+ mov DWORD PTR 60[esp],eax
+ ; We now have the X array on the stack
+ ; starting at sp-4
+ mov DWORD PTR 64[esp],esi
+ ;
+ ; Start processing
+ mov eax, DWORD PTR [ebp]
+ ; 00_15 0
+ mov esi, ecx
+ mov ebp, eax
+ xor esi, edx
+ rol ebp, 5
+ and esi, ebx
+ add ebp, edi
+ ror ebx, 1
+ mov edi, DWORD PTR [esp]
+ ror ebx, 1
+ xor esi, edx
+ lea ebp, DWORD PTR 1518500249[edi*1+ebp]
+ mov edi, ebx
+ add esi, ebp
+ xor edi, ecx
+ mov ebp, esi
+ and edi, eax
+ rol ebp, 5
+ add ebp, edx
+ mov edx, DWORD PTR 4[esp]
+ ror eax, 1
+ xor edi, ecx
+ ror eax, 1
+ lea ebp, DWORD PTR 1518500249[edx*1+ebp]
+ add edi, ebp
+ ; 00_15 2
+ mov edx, eax
+ mov ebp, edi
+ xor edx, ebx
+ rol ebp, 5
+ and edx, esi
+ add ebp, ecx
+ ror esi, 1
+ mov ecx, DWORD PTR 8[esp]
+ ror esi, 1
+ xor edx, ebx
+ lea ebp, DWORD PTR 1518500249[ecx*1+ebp]
+ mov ecx, esi
+ add edx, ebp
+ xor ecx, eax
+ mov ebp, edx
+ and ecx, edi
+ rol ebp, 5
+ add ebp, ebx
+ mov ebx, DWORD PTR 12[esp]
+ ror edi, 1
+ xor ecx, eax
+ ror edi, 1
+ lea ebp, DWORD PTR 1518500249[ebx*1+ebp]
+ add ecx, ebp
+ ; 00_15 4
+ mov ebx, edi
+ mov ebp, ecx
+ xor ebx, esi
+ rol ebp, 5
+ and ebx, edx
+ add ebp, eax
+ ror edx, 1
+ mov eax, DWORD PTR 16[esp]
+ ror edx, 1
+ xor ebx, esi
+ lea ebp, DWORD PTR 1518500249[eax*1+ebp]
+ mov eax, edx
+ add ebx, ebp
+ xor eax, edi
+ mov ebp, ebx
+ and eax, ecx
+ rol ebp, 5
+ add ebp, esi
+ mov esi, DWORD PTR 20[esp]
+ ror ecx, 1
+ xor eax, edi
+ ror ecx, 1
+ lea ebp, DWORD PTR 1518500249[esi*1+ebp]
+ add eax, ebp
+ ; 00_15 6
+ mov esi, ecx
+ mov ebp, eax
+ xor esi, edx
+ rol ebp, 5
+ and esi, ebx
+ add ebp, edi
+ ror ebx, 1
+ mov edi, DWORD PTR 24[esp]
+ ror ebx, 1
+ xor esi, edx
+ lea ebp, DWORD PTR 1518500249[edi*1+ebp]
+ mov edi, ebx
+ add esi, ebp
+ xor edi, ecx
+ mov ebp, esi
+ and edi, eax
+ rol ebp, 5
+ add ebp, edx
+ mov edx, DWORD PTR 28[esp]
+ ror eax, 1
+ xor edi, ecx
+ ror eax, 1
+ lea ebp, DWORD PTR 1518500249[edx*1+ebp]
+ add edi, ebp
+ ; 00_15 8
+ mov edx, eax
+ mov ebp, edi
+ xor edx, ebx
+ rol ebp, 5
+ and edx, esi
+ add ebp, ecx
+ ror esi, 1
+ mov ecx, DWORD PTR 32[esp]
+ ror esi, 1
+ xor edx, ebx
+ lea ebp, DWORD PTR 1518500249[ecx*1+ebp]
+ mov ecx, esi
+ add edx, ebp
+ xor ecx, eax
+ mov ebp, edx
+ and ecx, edi
+ rol ebp, 5
+ add ebp, ebx
+ mov ebx, DWORD PTR 36[esp]
+ ror edi, 1
+ xor ecx, eax
+ ror edi, 1
+ lea ebp, DWORD PTR 1518500249[ebx*1+ebp]
+ add ecx, ebp
+ ; 00_15 10
+ mov ebx, edi
+ mov ebp, ecx
+ xor ebx, esi
+ rol ebp, 5
+ and ebx, edx
+ add ebp, eax
+ ror edx, 1
+ mov eax, DWORD PTR 40[esp]
+ ror edx, 1
+ xor ebx, esi
+ lea ebp, DWORD PTR 1518500249[eax*1+ebp]
+ mov eax, edx
+ add ebx, ebp
+ xor eax, edi
+ mov ebp, ebx
+ and eax, ecx
+ rol ebp, 5
+ add ebp, esi
+ mov esi, DWORD PTR 44[esp]
+ ror ecx, 1
+ xor eax, edi
+ ror ecx, 1
+ lea ebp, DWORD PTR 1518500249[esi*1+ebp]
+ add eax, ebp
+ ; 00_15 12
+ mov esi, ecx
+ mov ebp, eax
+ xor esi, edx
+ rol ebp, 5
+ and esi, ebx
+ add ebp, edi
+ ror ebx, 1
+ mov edi, DWORD PTR 48[esp]
+ ror ebx, 1
+ xor esi, edx
+ lea ebp, DWORD PTR 1518500249[edi*1+ebp]
+ mov edi, ebx
+ add esi, ebp
+ xor edi, ecx
+ mov ebp, esi
+ and edi, eax
+ rol ebp, 5
+ add ebp, edx
+ mov edx, DWORD PTR 52[esp]
+ ror eax, 1
+ xor edi, ecx
+ ror eax, 1
+ lea ebp, DWORD PTR 1518500249[edx*1+ebp]
+ add edi, ebp
+ ; 00_15 14
+ mov edx, eax
+ mov ebp, edi
+ xor edx, ebx
+ rol ebp, 5
+ and edx, esi
+ add ebp, ecx
+ ror esi, 1
+ mov ecx, DWORD PTR 56[esp]
+ ror esi, 1
+ xor edx, ebx
+ lea ebp, DWORD PTR 1518500249[ecx*1+ebp]
+ mov ecx, esi
+ add edx, ebp
+ xor ecx, eax
+ mov ebp, edx
+ and ecx, edi
+ rol ebp, 5
+ add ebp, ebx
+ mov ebx, DWORD PTR 60[esp]
+ ror edi, 1
+ xor ecx, eax
+ ror edi, 1
+ lea ebp, DWORD PTR 1518500249[ebx*1+ebp]
+ add ecx, ebp
+ ; 16_19 16
+ nop
+ mov ebp, DWORD PTR [esp]
+ mov ebx, DWORD PTR 8[esp]
+ xor ebx, ebp
+ mov ebp, DWORD PTR 32[esp]
+ xor ebx, ebp
+ mov ebp, DWORD PTR 52[esp]
+ xor ebx, ebp
+ mov ebp, edi
+ rol ebx, 1
+ xor ebp, esi
+ mov DWORD PTR [esp],ebx
+ and ebp, edx
+ lea ebx, DWORD PTR 1518500249[eax*1+ebx]
+ xor ebp, esi
+ mov eax, ecx
+ add ebx, ebp
+ rol eax, 5
+ ror edx, 1
+ add ebx, eax
+ mov eax, DWORD PTR 4[esp]
+ mov ebp, DWORD PTR 12[esp]
+ xor eax, ebp
+ mov ebp, DWORD PTR 36[esp]
+ xor eax, ebp
+ mov ebp, DWORD PTR 56[esp]
+ ror edx, 1
+ xor eax, ebp
+ rol eax, 1
+ mov ebp, edx
+ xor ebp, edi
+ mov DWORD PTR 4[esp],eax
+ and ebp, ecx
+ lea eax, DWORD PTR 1518500249[esi*1+eax]
+ xor ebp, edi
+ mov esi, ebx
+ rol esi, 5
+ ror ecx, 1
+ add eax, esi
+ ror ecx, 1
+ add eax, ebp
+ ; 16_19 18
+ mov ebp, DWORD PTR 8[esp]
+ mov esi, DWORD PTR 16[esp]
+ xor esi, ebp
+ mov ebp, DWORD PTR 40[esp]
+ xor esi, ebp
+ mov ebp, DWORD PTR 60[esp]
+ xor esi, ebp
+ mov ebp, ecx
+ rol esi, 1
+ xor ebp, edx
+ mov DWORD PTR 8[esp],esi
+ and ebp, ebx
+ lea esi, DWORD PTR 1518500249[edi*1+esi]
+ xor ebp, edx
+ mov edi, eax
+ add esi, ebp
+ rol edi, 5
+ ror ebx, 1
+ add esi, edi
+ mov edi, DWORD PTR 12[esp]
+ mov ebp, DWORD PTR 20[esp]
+ xor edi, ebp
+ mov ebp, DWORD PTR 44[esp]
+ xor edi, ebp
+ mov ebp, DWORD PTR [esp]
+ ror ebx, 1
+ xor edi, ebp
+ rol edi, 1
+ mov ebp, ebx
+ xor ebp, ecx
+ mov DWORD PTR 12[esp],edi
+ and ebp, eax
+ lea edi, DWORD PTR 1518500249[edx*1+edi]
+ xor ebp, ecx
+ mov edx, esi
+ rol edx, 5
+ ror eax, 1
+ add edi, edx
+ ror eax, 1
+ add edi, ebp
+ ; 20_39 20
+ mov edx, DWORD PTR 16[esp]
+ mov ebp, DWORD PTR 24[esp]
+ xor edx, ebp
+ mov ebp, DWORD PTR 48[esp]
+ xor edx, ebp
+ mov ebp, DWORD PTR 4[esp]
+ xor edx, ebp
+ mov ebp, esi
+ rol edx, 1
+ xor ebp, eax
+ mov DWORD PTR 16[esp],edx
+ xor ebp, ebx
+ lea edx, DWORD PTR 1859775393[ecx*1+edx]
+ mov ecx, edi
+ rol ecx, 5
+ ror esi, 1
+ add ecx, ebp
+ ror esi, 1
+ add edx, ecx
+ ; 20_39 21
+ mov ecx, DWORD PTR 20[esp]
+ mov ebp, DWORD PTR 28[esp]
+ xor ecx, ebp
+ mov ebp, DWORD PTR 52[esp]
+ xor ecx, ebp
+ mov ebp, DWORD PTR 8[esp]
+ xor ecx, ebp
+ mov ebp, edi
+ rol ecx, 1
+ xor ebp, esi
+ mov DWORD PTR 20[esp],ecx
+ xor ebp, eax
+ lea ecx, DWORD PTR 1859775393[ebx*1+ecx]
+ mov ebx, edx
+ rol ebx, 5
+ ror edi, 1
+ add ebx, ebp
+ ror edi, 1
+ add ecx, ebx
+ ; 20_39 22
+ mov ebx, DWORD PTR 24[esp]
+ mov ebp, DWORD PTR 32[esp]
+ xor ebx, ebp
+ mov ebp, DWORD PTR 56[esp]
+ xor ebx, ebp
+ mov ebp, DWORD PTR 12[esp]
+ xor ebx, ebp
+ mov ebp, edx
+ rol ebx, 1
+ xor ebp, edi
+ mov DWORD PTR 24[esp],ebx
+ xor ebp, esi
+ lea ebx, DWORD PTR 1859775393[eax*1+ebx]
+ mov eax, ecx
+ rol eax, 5
+ ror edx, 1
+ add eax, ebp
+ ror edx, 1
+ add ebx, eax
+ ; 20_39 23
+ mov eax, DWORD PTR 28[esp]
+ mov ebp, DWORD PTR 36[esp]
+ xor eax, ebp
+ mov ebp, DWORD PTR 60[esp]
+ xor eax, ebp
+ mov ebp, DWORD PTR 16[esp]
+ xor eax, ebp
+ mov ebp, ecx
+ rol eax, 1
+ xor ebp, edx
+ mov DWORD PTR 28[esp],eax
+ xor ebp, edi
+ lea eax, DWORD PTR 1859775393[esi*1+eax]
+ mov esi, ebx
+ rol esi, 5
+ ror ecx, 1
+ add esi, ebp
+ ror ecx, 1
+ add eax, esi
+ ; 20_39 24
+ mov esi, DWORD PTR 32[esp]
+ mov ebp, DWORD PTR 40[esp]
+ xor esi, ebp
+ mov ebp, DWORD PTR [esp]
+ xor esi, ebp
+ mov ebp, DWORD PTR 20[esp]
+ xor esi, ebp
+ mov ebp, ebx
+ rol esi, 1
+ xor ebp, ecx
+ mov DWORD PTR 32[esp],esi
+ xor ebp, edx
+ lea esi, DWORD PTR 1859775393[edi*1+esi]
+ mov edi, eax
+ rol edi, 5
+ ror ebx, 1
+ add edi, ebp
+ ror ebx, 1
+ add esi, edi
+ ; 20_39 25
+ mov edi, DWORD PTR 36[esp]
+ mov ebp, DWORD PTR 44[esp]
+ xor edi, ebp
+ mov ebp, DWORD PTR 4[esp]
+ xor edi, ebp
+ mov ebp, DWORD PTR 24[esp]
+ xor edi, ebp
+ mov ebp, eax
+ rol edi, 1
+ xor ebp, ebx
+ mov DWORD PTR 36[esp],edi
+ xor ebp, ecx
+ lea edi, DWORD PTR 1859775393[edx*1+edi]
+ mov edx, esi
+ rol edx, 5
+ ror eax, 1
+ add edx, ebp
+ ror eax, 1
+ add edi, edx
+ ; 20_39 26
+ mov edx, DWORD PTR 40[esp]
+ mov ebp, DWORD PTR 48[esp]
+ xor edx, ebp
+ mov ebp, DWORD PTR 8[esp]
+ xor edx, ebp
+ mov ebp, DWORD PTR 28[esp]
+ xor edx, ebp
+ mov ebp, esi
+ rol edx, 1
+ xor ebp, eax
+ mov DWORD PTR 40[esp],edx
+ xor ebp, ebx
+ lea edx, DWORD PTR 1859775393[ecx*1+edx]
+ mov ecx, edi
+ rol ecx, 5
+ ror esi, 1
+ add ecx, ebp
+ ror esi, 1
+ add edx, ecx
+ ; 20_39 27
+ mov ecx, DWORD PTR 44[esp]
+ mov ebp, DWORD PTR 52[esp]
+ xor ecx, ebp
+ mov ebp, DWORD PTR 12[esp]
+ xor ecx, ebp
+ mov ebp, DWORD PTR 32[esp]
+ xor ecx, ebp
+ mov ebp, edi
+ rol ecx, 1
+ xor ebp, esi
+ mov DWORD PTR 44[esp],ecx
+ xor ebp, eax
+ lea ecx, DWORD PTR 1859775393[ebx*1+ecx]
+ mov ebx, edx
+ rol ebx, 5
+ ror edi, 1
+ add ebx, ebp
+ ror edi, 1
+ add ecx, ebx
+ ; 20_39 28
+ mov ebx, DWORD PTR 48[esp]
+ mov ebp, DWORD PTR 56[esp]
+ xor ebx, ebp
+ mov ebp, DWORD PTR 16[esp]
+ xor ebx, ebp
+ mov ebp, DWORD PTR 36[esp]
+ xor ebx, ebp
+ mov ebp, edx
+ rol ebx, 1
+ xor ebp, edi
+ mov DWORD PTR 48[esp],ebx
+ xor ebp, esi
+ lea ebx, DWORD PTR 1859775393[eax*1+ebx]
+ mov eax, ecx
+ rol eax, 5
+ ror edx, 1
+ add eax, ebp
+ ror edx, 1
+ add ebx, eax
+ ; 20_39 29
+ mov eax, DWORD PTR 52[esp]
+ mov ebp, DWORD PTR 60[esp]
+ xor eax, ebp
+ mov ebp, DWORD PTR 20[esp]
+ xor eax, ebp
+ mov ebp, DWORD PTR 40[esp]
+ xor eax, ebp
+ mov ebp, ecx
+ rol eax, 1
+ xor ebp, edx
+ mov DWORD PTR 52[esp],eax
+ xor ebp, edi
+ lea eax, DWORD PTR 1859775393[esi*1+eax]
+ mov esi, ebx
+ rol esi, 5
+ ror ecx, 1
+ add esi, ebp
+ ror ecx, 1
+ add eax, esi
+ ; 20_39 30
+ mov esi, DWORD PTR 56[esp]
+ mov ebp, DWORD PTR [esp]
+ xor esi, ebp
+ mov ebp, DWORD PTR 24[esp]
+ xor esi, ebp
+ mov ebp, DWORD PTR 44[esp]
+ xor esi, ebp
+ mov ebp, ebx
+ rol esi, 1
+ xor ebp, ecx
+ mov DWORD PTR 56[esp],esi
+ xor ebp, edx
+ lea esi, DWORD PTR 1859775393[edi*1+esi]
+ mov edi, eax
+ rol edi, 5
+ ror ebx, 1
+ add edi, ebp
+ ror ebx, 1
+ add esi, edi
+ ; 20_39 31
+ mov edi, DWORD PTR 60[esp]
+ mov ebp, DWORD PTR 4[esp]
+ xor edi, ebp
+ mov ebp, DWORD PTR 28[esp]
+ xor edi, ebp
+ mov ebp, DWORD PTR 48[esp]
+ xor edi, ebp
+ mov ebp, eax
+ rol edi, 1
+ xor ebp, ebx
+ mov DWORD PTR 60[esp],edi
+ xor ebp, ecx
+ lea edi, DWORD PTR 1859775393[edx*1+edi]
+ mov edx, esi
+ rol edx, 5
+ ror eax, 1
+ add edx, ebp
+ ror eax, 1
+ add edi, edx
+ ; 20_39 32
+ mov edx, DWORD PTR [esp]
+ mov ebp, DWORD PTR 8[esp]
+ xor edx, ebp
+ mov ebp, DWORD PTR 32[esp]
+ xor edx, ebp
+ mov ebp, DWORD PTR 52[esp]
+ xor edx, ebp
+ mov ebp, esi
+ rol edx, 1
+ xor ebp, eax
+ mov DWORD PTR [esp],edx
+ xor ebp, ebx
+ lea edx, DWORD PTR 1859775393[ecx*1+edx]
+ mov ecx, edi
+ rol ecx, 5
+ ror esi, 1
+ add ecx, ebp
+ ror esi, 1
+ add edx, ecx
+ ; 20_39 33
+ mov ecx, DWORD PTR 4[esp]
+ mov ebp, DWORD PTR 12[esp]
+ xor ecx, ebp
+ mov ebp, DWORD PTR 36[esp]
+ xor ecx, ebp
+ mov ebp, DWORD PTR 56[esp]
+ xor ecx, ebp
+ mov ebp, edi
+ rol ecx, 1
+ xor ebp, esi
+ mov DWORD PTR 4[esp],ecx
+ xor ebp, eax
+ lea ecx, DWORD PTR 1859775393[ebx*1+ecx]
+ mov ebx, edx
+ rol ebx, 5
+ ror edi, 1
+ add ebx, ebp
+ ror edi, 1
+ add ecx, ebx
+ ; 20_39 34
+ mov ebx, DWORD PTR 8[esp]
+ mov ebp, DWORD PTR 16[esp]
+ xor ebx, ebp
+ mov ebp, DWORD PTR 40[esp]
+ xor ebx, ebp
+ mov ebp, DWORD PTR 60[esp]
+ xor ebx, ebp
+ mov ebp, edx
+ rol ebx, 1
+ xor ebp, edi
+ mov DWORD PTR 8[esp],ebx
+ xor ebp, esi
+ lea ebx, DWORD PTR 1859775393[eax*1+ebx]
+ mov eax, ecx
+ rol eax, 5
+ ror edx, 1
+ add eax, ebp
+ ror edx, 1
+ add ebx, eax
+ ; 20_39 35
+ mov eax, DWORD PTR 12[esp]
+ mov ebp, DWORD PTR 20[esp]
+ xor eax, ebp
+ mov ebp, DWORD PTR 44[esp]
+ xor eax, ebp
+ mov ebp, DWORD PTR [esp]
+ xor eax, ebp
+ mov ebp, ecx
+ rol eax, 1
+ xor ebp, edx
+ mov DWORD PTR 12[esp],eax
+ xor ebp, edi
+ lea eax, DWORD PTR 1859775393[esi*1+eax]
+ mov esi, ebx
+ rol esi, 5
+ ror ecx, 1
+ add esi, ebp
+ ror ecx, 1
+ add eax, esi
+ ; 20_39 36
+ mov esi, DWORD PTR 16[esp]
+ mov ebp, DWORD PTR 24[esp]
+ xor esi, ebp
+ mov ebp, DWORD PTR 48[esp]
+ xor esi, ebp
+ mov ebp, DWORD PTR 4[esp]
+ xor esi, ebp
+ mov ebp, ebx
+ rol esi, 1
+ xor ebp, ecx
+ mov DWORD PTR 16[esp],esi
+ xor ebp, edx
+ lea esi, DWORD PTR 1859775393[edi*1+esi]
+ mov edi, eax
+ rol edi, 5
+ ror ebx, 1
+ add edi, ebp
+ ror ebx, 1
+ add esi, edi
+ ; 20_39 37
+ mov edi, DWORD PTR 20[esp]
+ mov ebp, DWORD PTR 28[esp]
+ xor edi, ebp
+ mov ebp, DWORD PTR 52[esp]
+ xor edi, ebp
+ mov ebp, DWORD PTR 8[esp]
+ xor edi, ebp
+ mov ebp, eax
+ rol edi, 1
+ xor ebp, ebx
+ mov DWORD PTR 20[esp],edi
+ xor ebp, ecx
+ lea edi, DWORD PTR 1859775393[edx*1+edi]
+ mov edx, esi
+ rol edx, 5
+ ror eax, 1
+ add edx, ebp
+ ror eax, 1
+ add edi, edx
+ ; 20_39 38
+ mov edx, DWORD PTR 24[esp]
+ mov ebp, DWORD PTR 32[esp]
+ xor edx, ebp
+ mov ebp, DWORD PTR 56[esp]
+ xor edx, ebp
+ mov ebp, DWORD PTR 12[esp]
+ xor edx, ebp
+ mov ebp, esi
+ rol edx, 1
+ xor ebp, eax
+ mov DWORD PTR 24[esp],edx
+ xor ebp, ebx
+ lea edx, DWORD PTR 1859775393[ecx*1+edx]
+ mov ecx, edi
+ rol ecx, 5
+ ror esi, 1
+ add ecx, ebp
+ ror esi, 1
+ add edx, ecx
+ ; 20_39 39
+ mov ecx, DWORD PTR 28[esp]
+ mov ebp, DWORD PTR 36[esp]
+ xor ecx, ebp
+ mov ebp, DWORD PTR 60[esp]
+ xor ecx, ebp
+ mov ebp, DWORD PTR 16[esp]
+ xor ecx, ebp
+ mov ebp, edi
+ rol ecx, 1
+ xor ebp, esi
+ mov DWORD PTR 28[esp],ecx
+ xor ebp, eax
+ lea ecx, DWORD PTR 1859775393[ebx*1+ecx]
+ mov ebx, edx
+ rol ebx, 5
+ ror edi, 1
+ add ebx, ebp
+ ror edi, 1
+ add ecx, ebx
+ ; 40_59 40
+ mov ebx, DWORD PTR 32[esp]
+ mov ebp, DWORD PTR 40[esp]
+ xor ebx, ebp
+ mov ebp, DWORD PTR [esp]
+ xor ebx, ebp
+ mov ebp, DWORD PTR 20[esp]
+ xor ebx, ebp
+ mov ebp, edx
+ rol ebx, 1
+ or ebp, edi
+ mov DWORD PTR 32[esp],ebx
+ and ebp, esi
+ lea ebx, DWORD PTR 2400959708[eax*1+ebx]
+ mov eax, edx
+ ror edx, 1
+ and eax, edi
+ or ebp, eax
+ mov eax, ecx
+ rol eax, 5
+ add ebp, eax
+ mov eax, DWORD PTR 36[esp]
+ add ebx, ebp
+ mov ebp, DWORD PTR 44[esp]
+ xor eax, ebp
+ mov ebp, DWORD PTR 4[esp]
+ xor eax, ebp
+ mov ebp, DWORD PTR 24[esp]
+ ror edx, 1
+ xor eax, ebp
+ rol eax, 1
+ mov ebp, ecx
+ mov DWORD PTR 36[esp],eax
+ or ebp, edx
+ lea eax, DWORD PTR 2400959708[esi*1+eax]
+ mov esi, ecx
+ and ebp, edi
+ and esi, edx
+ or ebp, esi
+ mov esi, ebx
+ rol esi, 5
+ ror ecx, 1
+ add ebp, esi
+ ror ecx, 1
+ add eax, ebp
+ ; 40_59 41
+ ; 40_59 42
+ mov esi, DWORD PTR 40[esp]
+ mov ebp, DWORD PTR 48[esp]
+ xor esi, ebp
+ mov ebp, DWORD PTR 8[esp]
+ xor esi, ebp
+ mov ebp, DWORD PTR 28[esp]
+ xor esi, ebp
+ mov ebp, ebx
+ rol esi, 1
+ or ebp, ecx
+ mov DWORD PTR 40[esp],esi
+ and ebp, edx
+ lea esi, DWORD PTR 2400959708[edi*1+esi]
+ mov edi, ebx
+ ror ebx, 1
+ and edi, ecx
+ or ebp, edi
+ mov edi, eax
+ rol edi, 5
+ add ebp, edi
+ mov edi, DWORD PTR 44[esp]
+ add esi, ebp
+ mov ebp, DWORD PTR 52[esp]
+ xor edi, ebp
+ mov ebp, DWORD PTR 12[esp]
+ xor edi, ebp
+ mov ebp, DWORD PTR 32[esp]
+ ror ebx, 1
+ xor edi, ebp
+ rol edi, 1
+ mov ebp, eax
+ mov DWORD PTR 44[esp],edi
+ or ebp, ebx
+ lea edi, DWORD PTR 2400959708[edx*1+edi]
+ mov edx, eax
+ and ebp, ecx
+ and edx, ebx
+ or ebp, edx
+ mov edx, esi
+ rol edx, 5
+ ror eax, 1
+ add ebp, edx
+ ror eax, 1
+ add edi, ebp
+ ; 40_59 43
+ ; 40_59 44
+ mov edx, DWORD PTR 48[esp]
+ mov ebp, DWORD PTR 56[esp]
+ xor edx, ebp
+ mov ebp, DWORD PTR 16[esp]
+ xor edx, ebp
+ mov ebp, DWORD PTR 36[esp]
+ xor edx, ebp
+ mov ebp, esi
+ rol edx, 1
+ or ebp, eax
+ mov DWORD PTR 48[esp],edx
+ and ebp, ebx
+ lea edx, DWORD PTR 2400959708[ecx*1+edx]
+ mov ecx, esi
+ ror esi, 1
+ and ecx, eax
+ or ebp, ecx
+ mov ecx, edi
+ rol ecx, 5
+ add ebp, ecx
+ mov ecx, DWORD PTR 52[esp]
+ add edx, ebp
+ mov ebp, DWORD PTR 60[esp]
+ xor ecx, ebp
+ mov ebp, DWORD PTR 20[esp]
+ xor ecx, ebp
+ mov ebp, DWORD PTR 40[esp]
+ ror esi, 1
+ xor ecx, ebp
+ rol ecx, 1
+ mov ebp, edi
+ mov DWORD PTR 52[esp],ecx
+ or ebp, esi
+ lea ecx, DWORD PTR 2400959708[ebx*1+ecx]
+ mov ebx, edi
+ and ebp, eax
+ and ebx, esi
+ or ebp, ebx
+ mov ebx, edx
+ rol ebx, 5
+ ror edi, 1
+ add ebp, ebx
+ ror edi, 1
+ add ecx, ebp
+ ; 40_59 45
+ ; 40_59 46
+ mov ebx, DWORD PTR 56[esp]
+ mov ebp, DWORD PTR [esp]
+ xor ebx, ebp
+ mov ebp, DWORD PTR 24[esp]
+ xor ebx, ebp
+ mov ebp, DWORD PTR 44[esp]
+ xor ebx, ebp
+ mov ebp, edx
+ rol ebx, 1
+ or ebp, edi
+ mov DWORD PTR 56[esp],ebx
+ and ebp, esi
+ lea ebx, DWORD PTR 2400959708[eax*1+ebx]
+ mov eax, edx
+ ror edx, 1
+ and eax, edi
+ or ebp, eax
+ mov eax, ecx
+ rol eax, 5
+ add ebp, eax
+ mov eax, DWORD PTR 60[esp]
+ add ebx, ebp
+ mov ebp, DWORD PTR 4[esp]
+ xor eax, ebp
+ mov ebp, DWORD PTR 28[esp]
+ xor eax, ebp
+ mov ebp, DWORD PTR 48[esp]
+ ror edx, 1
+ xor eax, ebp
+ rol eax, 1
+ mov ebp, ecx
+ mov DWORD PTR 60[esp],eax
+ or ebp, edx
+ lea eax, DWORD PTR 2400959708[esi*1+eax]
+ mov esi, ecx
+ and ebp, edi
+ and esi, edx
+ or ebp, esi
+ mov esi, ebx
+ rol esi, 5
+ ror ecx, 1
+ add ebp, esi
+ ror ecx, 1
+ add eax, ebp
+ ; 40_59 47
+ ; 40_59 48
+ mov esi, DWORD PTR [esp]
+ mov ebp, DWORD PTR 8[esp]
+ xor esi, ebp
+ mov ebp, DWORD PTR 32[esp]
+ xor esi, ebp
+ mov ebp, DWORD PTR 52[esp]
+ xor esi, ebp
+ mov ebp, ebx
+ rol esi, 1
+ or ebp, ecx
+ mov DWORD PTR [esp],esi
+ and ebp, edx
+ lea esi, DWORD PTR 2400959708[edi*1+esi]
+ mov edi, ebx
+ ror ebx, 1
+ and edi, ecx
+ or ebp, edi
+ mov edi, eax
+ rol edi, 5
+ add ebp, edi
+ mov edi, DWORD PTR 4[esp]
+ add esi, ebp
+ mov ebp, DWORD PTR 12[esp]
+ xor edi, ebp
+ mov ebp, DWORD PTR 36[esp]
+ xor edi, ebp
+ mov ebp, DWORD PTR 56[esp]
+ ror ebx, 1
+ xor edi, ebp
+ rol edi, 1
+ mov ebp, eax
+ mov DWORD PTR 4[esp],edi
+ or ebp, ebx
+ lea edi, DWORD PTR 2400959708[edx*1+edi]
+ mov edx, eax
+ and ebp, ecx
+ and edx, ebx
+ or ebp, edx
+ mov edx, esi
+ rol edx, 5
+ ror eax, 1
+ add ebp, edx
+ ror eax, 1
+ add edi, ebp
+ ; 40_59 49
+ ; 40_59 50
+ mov edx, DWORD PTR 8[esp]
+ mov ebp, DWORD PTR 16[esp]
+ xor edx, ebp
+ mov ebp, DWORD PTR 40[esp]
+ xor edx, ebp
+ mov ebp, DWORD PTR 60[esp]
+ xor edx, ebp
+ mov ebp, esi
+ rol edx, 1
+ or ebp, eax
+ mov DWORD PTR 8[esp],edx
+ and ebp, ebx
+ lea edx, DWORD PTR 2400959708[ecx*1+edx]
+ mov ecx, esi
+ ror esi, 1
+ and ecx, eax
+ or ebp, ecx
+ mov ecx, edi
+ rol ecx, 5
+ add ebp, ecx
+ mov ecx, DWORD PTR 12[esp]
+ add edx, ebp
+ mov ebp, DWORD PTR 20[esp]
+ xor ecx, ebp
+ mov ebp, DWORD PTR 44[esp]
+ xor ecx, ebp
+ mov ebp, DWORD PTR [esp]
+ ror esi, 1
+ xor ecx, ebp
+ rol ecx, 1
+ mov ebp, edi
+ mov DWORD PTR 12[esp],ecx
+ or ebp, esi
+ lea ecx, DWORD PTR 2400959708[ebx*1+ecx]
+ mov ebx, edi
+ and ebp, eax
+ and ebx, esi
+ or ebp, ebx
+ mov ebx, edx
+ rol ebx, 5
+ ror edi, 1
+ add ebp, ebx
+ ror edi, 1
+ add ecx, ebp
+ ; 40_59 51
+ ; 40_59 52
+ mov ebx, DWORD PTR 16[esp]
+ mov ebp, DWORD PTR 24[esp]
+ xor ebx, ebp
+ mov ebp, DWORD PTR 48[esp]
+ xor ebx, ebp
+ mov ebp, DWORD PTR 4[esp]
+ xor ebx, ebp
+ mov ebp, edx
+ rol ebx, 1
+ or ebp, edi
+ mov DWORD PTR 16[esp],ebx
+ and ebp, esi
+ lea ebx, DWORD PTR 2400959708[eax*1+ebx]
+ mov eax, edx
+ ror edx, 1
+ and eax, edi
+ or ebp, eax
+ mov eax, ecx
+ rol eax, 5
+ add ebp, eax
+ mov eax, DWORD PTR 20[esp]
+ add ebx, ebp
+ mov ebp, DWORD PTR 28[esp]
+ xor eax, ebp
+ mov ebp, DWORD PTR 52[esp]
+ xor eax, ebp
+ mov ebp, DWORD PTR 8[esp]
+ ror edx, 1
+ xor eax, ebp
+ rol eax, 1
+ mov ebp, ecx
+ mov DWORD PTR 20[esp],eax
+ or ebp, edx
+ lea eax, DWORD PTR 2400959708[esi*1+eax]
+ mov esi, ecx
+ and ebp, edi
+ and esi, edx
+ or ebp, esi
+ mov esi, ebx
+ rol esi, 5
+ ror ecx, 1
+ add ebp, esi
+ ror ecx, 1
+ add eax, ebp
+ ; 40_59 53
+ ; 40_59 54
+ mov esi, DWORD PTR 24[esp]
+ mov ebp, DWORD PTR 32[esp]
+ xor esi, ebp
+ mov ebp, DWORD PTR 56[esp]
+ xor esi, ebp
+ mov ebp, DWORD PTR 12[esp]
+ xor esi, ebp
+ mov ebp, ebx
+ rol esi, 1
+ or ebp, ecx
+ mov DWORD PTR 24[esp],esi
+ and ebp, edx
+ lea esi, DWORD PTR 2400959708[edi*1+esi]
+ mov edi, ebx
+ ror ebx, 1
+ and edi, ecx
+ or ebp, edi
+ mov edi, eax
+ rol edi, 5
+ add ebp, edi
+ mov edi, DWORD PTR 28[esp]
+ add esi, ebp
+ mov ebp, DWORD PTR 36[esp]
+ xor edi, ebp
+ mov ebp, DWORD PTR 60[esp]
+ xor edi, ebp
+ mov ebp, DWORD PTR 16[esp]
+ ror ebx, 1
+ xor edi, ebp
+ rol edi, 1
+ mov ebp, eax
+ mov DWORD PTR 28[esp],edi
+ or ebp, ebx
+ lea edi, DWORD PTR 2400959708[edx*1+edi]
+ mov edx, eax
+ and ebp, ecx
+ and edx, ebx
+ or ebp, edx
+ mov edx, esi
+ rol edx, 5
+ ror eax, 1
+ add ebp, edx
+ ror eax, 1
+ add edi, ebp
+ ; 40_59 55
+ ; 40_59 56
+ mov edx, DWORD PTR 32[esp]
+ mov ebp, DWORD PTR 40[esp]
+ xor edx, ebp
+ mov ebp, DWORD PTR [esp]
+ xor edx, ebp
+ mov ebp, DWORD PTR 20[esp]
+ xor edx, ebp
+ mov ebp, esi
+ rol edx, 1
+ or ebp, eax
+ mov DWORD PTR 32[esp],edx
+ and ebp, ebx
+ lea edx, DWORD PTR 2400959708[ecx*1+edx]
+ mov ecx, esi
+ ror esi, 1
+ and ecx, eax
+ or ebp, ecx
+ mov ecx, edi
+ rol ecx, 5
+ add ebp, ecx
+ mov ecx, DWORD PTR 36[esp]
+ add edx, ebp
+ mov ebp, DWORD PTR 44[esp]
+ xor ecx, ebp
+ mov ebp, DWORD PTR 4[esp]
+ xor ecx, ebp
+ mov ebp, DWORD PTR 24[esp]
+ ror esi, 1
+ xor ecx, ebp
+ rol ecx, 1
+ mov ebp, edi
+ mov DWORD PTR 36[esp],ecx
+ or ebp, esi
+ lea ecx, DWORD PTR 2400959708[ebx*1+ecx]
+ mov ebx, edi
+ and ebp, eax
+ and ebx, esi
+ or ebp, ebx
+ mov ebx, edx
+ rol ebx, 5
+ ror edi, 1
+ add ebp, ebx
+ ror edi, 1
+ add ecx, ebp
+ ; 40_59 57
+ ; 40_59 58
+ mov ebx, DWORD PTR 40[esp]
+ mov ebp, DWORD PTR 48[esp]
+ xor ebx, ebp
+ mov ebp, DWORD PTR 8[esp]
+ xor ebx, ebp
+ mov ebp, DWORD PTR 28[esp]
+ xor ebx, ebp
+ mov ebp, edx
+ rol ebx, 1
+ or ebp, edi
+ mov DWORD PTR 40[esp],ebx
+ and ebp, esi
+ lea ebx, DWORD PTR 2400959708[eax*1+ebx]
+ mov eax, edx
+ ror edx, 1
+ and eax, edi
+ or ebp, eax
+ mov eax, ecx
+ rol eax, 5
+ add ebp, eax
+ mov eax, DWORD PTR 44[esp]
+ add ebx, ebp
+ mov ebp, DWORD PTR 52[esp]
+ xor eax, ebp
+ mov ebp, DWORD PTR 12[esp]
+ xor eax, ebp
+ mov ebp, DWORD PTR 32[esp]
+ ror edx, 1
+ xor eax, ebp
+ rol eax, 1
+ mov ebp, ecx
+ mov DWORD PTR 44[esp],eax
+ or ebp, edx
+ lea eax, DWORD PTR 2400959708[esi*1+eax]
+ mov esi, ecx
+ and ebp, edi
+ and esi, edx
+ or ebp, esi
+ mov esi, ebx
+ rol esi, 5
+ ror ecx, 1
+ add ebp, esi
+ ror ecx, 1
+ add eax, ebp
+ ; 40_59 59
+ ; 20_39 60
+ mov esi, DWORD PTR 48[esp]
+ mov ebp, DWORD PTR 56[esp]
+ xor esi, ebp
+ mov ebp, DWORD PTR 16[esp]
+ xor esi, ebp
+ mov ebp, DWORD PTR 36[esp]
+ xor esi, ebp
+ mov ebp, ebx
+ rol esi, 1
+ xor ebp, ecx
+ mov DWORD PTR 48[esp],esi
+ xor ebp, edx
+ lea esi, DWORD PTR 3395469782[edi*1+esi]
+ mov edi, eax
+ rol edi, 5
+ ror ebx, 1
+ add edi, ebp
+ ror ebx, 1
+ add esi, edi
+ ; 20_39 61
+ mov edi, DWORD PTR 52[esp]
+ mov ebp, DWORD PTR 60[esp]
+ xor edi, ebp
+ mov ebp, DWORD PTR 20[esp]
+ xor edi, ebp
+ mov ebp, DWORD PTR 40[esp]
+ xor edi, ebp
+ mov ebp, eax
+ rol edi, 1
+ xor ebp, ebx
+ mov DWORD PTR 52[esp],edi
+ xor ebp, ecx
+ lea edi, DWORD PTR 3395469782[edx*1+edi]
+ mov edx, esi
+ rol edx, 5
+ ror eax, 1
+ add edx, ebp
+ ror eax, 1
+ add edi, edx
+ ; 20_39 62
+ mov edx, DWORD PTR 56[esp]
+ mov ebp, DWORD PTR [esp]
+ xor edx, ebp
+ mov ebp, DWORD PTR 24[esp]
+ xor edx, ebp
+ mov ebp, DWORD PTR 44[esp]
+ xor edx, ebp
+ mov ebp, esi
+ rol edx, 1
+ xor ebp, eax
+ mov DWORD PTR 56[esp],edx
+ xor ebp, ebx
+ lea edx, DWORD PTR 3395469782[ecx*1+edx]
+ mov ecx, edi
+ rol ecx, 5
+ ror esi, 1
+ add ecx, ebp
+ ror esi, 1
+ add edx, ecx
+ ; 20_39 63
+ mov ecx, DWORD PTR 60[esp]
+ mov ebp, DWORD PTR 4[esp]
+ xor ecx, ebp
+ mov ebp, DWORD PTR 28[esp]
+ xor ecx, ebp
+ mov ebp, DWORD PTR 48[esp]
+ xor ecx, ebp
+ mov ebp, edi
+ rol ecx, 1
+ xor ebp, esi
+ mov DWORD PTR 60[esp],ecx
+ xor ebp, eax
+ lea ecx, DWORD PTR 3395469782[ebx*1+ecx]
+ mov ebx, edx
+ rol ebx, 5
+ ror edi, 1
+ add ebx, ebp
+ ror edi, 1
+ add ecx, ebx
+ ; 20_39 64
+ mov ebx, DWORD PTR [esp]
+ mov ebp, DWORD PTR 8[esp]
+ xor ebx, ebp
+ mov ebp, DWORD PTR 32[esp]
+ xor ebx, ebp
+ mov ebp, DWORD PTR 52[esp]
+ xor ebx, ebp
+ mov ebp, edx
+ rol ebx, 1
+ xor ebp, edi
+ mov DWORD PTR [esp],ebx
+ xor ebp, esi
+ lea ebx, DWORD PTR 3395469782[eax*1+ebx]
+ mov eax, ecx
+ rol eax, 5
+ ror edx, 1
+ add eax, ebp
+ ror edx, 1
+ add ebx, eax
+ ; 20_39 65
+ mov eax, DWORD PTR 4[esp]
+ mov ebp, DWORD PTR 12[esp]
+ xor eax, ebp
+ mov ebp, DWORD PTR 36[esp]
+ xor eax, ebp
+ mov ebp, DWORD PTR 56[esp]
+ xor eax, ebp
+ mov ebp, ecx
+ rol eax, 1
+ xor ebp, edx
+ mov DWORD PTR 4[esp],eax
+ xor ebp, edi
+ lea eax, DWORD PTR 3395469782[esi*1+eax]
+ mov esi, ebx
+ rol esi, 5
+ ror ecx, 1
+ add esi, ebp
+ ror ecx, 1
+ add eax, esi
+ ; 20_39 66
+ mov esi, DWORD PTR 8[esp]
+ mov ebp, DWORD PTR 16[esp]
+ xor esi, ebp
+ mov ebp, DWORD PTR 40[esp]
+ xor esi, ebp
+ mov ebp, DWORD PTR 60[esp]
+ xor esi, ebp
+ mov ebp, ebx
+ rol esi, 1
+ xor ebp, ecx
+ mov DWORD PTR 8[esp],esi
+ xor ebp, edx
+ lea esi, DWORD PTR 3395469782[edi*1+esi]
+ mov edi, eax
+ rol edi, 5
+ ror ebx, 1
+ add edi, ebp
+ ror ebx, 1
+ add esi, edi
+ ; 20_39 67
+ mov edi, DWORD PTR 12[esp]
+ mov ebp, DWORD PTR 20[esp]
+ xor edi, ebp
+ mov ebp, DWORD PTR 44[esp]
+ xor edi, ebp
+ mov ebp, DWORD PTR [esp]
+ xor edi, ebp
+ mov ebp, eax
+ rol edi, 1
+ xor ebp, ebx
+ mov DWORD PTR 12[esp],edi
+ xor ebp, ecx
+ lea edi, DWORD PTR 3395469782[edx*1+edi]
+ mov edx, esi
+ rol edx, 5
+ ror eax, 1
+ add edx, ebp
+ ror eax, 1
+ add edi, edx
+ ; 20_39 68
+ mov edx, DWORD PTR 16[esp]
+ mov ebp, DWORD PTR 24[esp]
+ xor edx, ebp
+ mov ebp, DWORD PTR 48[esp]
+ xor edx, ebp
+ mov ebp, DWORD PTR 4[esp]
+ xor edx, ebp
+ mov ebp, esi
+ rol edx, 1
+ xor ebp, eax
+ mov DWORD PTR 16[esp],edx
+ xor ebp, ebx
+ lea edx, DWORD PTR 3395469782[ecx*1+edx]
+ mov ecx, edi
+ rol ecx, 5
+ ror esi, 1
+ add ecx, ebp
+ ror esi, 1
+ add edx, ecx
+ ; 20_39 69
+ mov ecx, DWORD PTR 20[esp]
+ mov ebp, DWORD PTR 28[esp]
+ xor ecx, ebp
+ mov ebp, DWORD PTR 52[esp]
+ xor ecx, ebp
+ mov ebp, DWORD PTR 8[esp]
+ xor ecx, ebp
+ mov ebp, edi
+ rol ecx, 1
+ xor ebp, esi
+ mov DWORD PTR 20[esp],ecx
+ xor ebp, eax
+ lea ecx, DWORD PTR 3395469782[ebx*1+ecx]
+ mov ebx, edx
+ rol ebx, 5
+ ror edi, 1
+ add ebx, ebp
+ ror edi, 1
+ add ecx, ebx
+ ; 20_39 70
+ mov ebx, DWORD PTR 24[esp]
+ mov ebp, DWORD PTR 32[esp]
+ xor ebx, ebp
+ mov ebp, DWORD PTR 56[esp]
+ xor ebx, ebp
+ mov ebp, DWORD PTR 12[esp]
+ xor ebx, ebp
+ mov ebp, edx
+ rol ebx, 1
+ xor ebp, edi
+ mov DWORD PTR 24[esp],ebx
+ xor ebp, esi
+ lea ebx, DWORD PTR 3395469782[eax*1+ebx]
+ mov eax, ecx
+ rol eax, 5
+ ror edx, 1
+ add eax, ebp
+ ror edx, 1
+ add ebx, eax
+ ; 20_39 71
+ mov eax, DWORD PTR 28[esp]
+ mov ebp, DWORD PTR 36[esp]
+ xor eax, ebp
+ mov ebp, DWORD PTR 60[esp]
+ xor eax, ebp
+ mov ebp, DWORD PTR 16[esp]
+ xor eax, ebp
+ mov ebp, ecx
+ rol eax, 1
+ xor ebp, edx
+ mov DWORD PTR 28[esp],eax
+ xor ebp, edi
+ lea eax, DWORD PTR 3395469782[esi*1+eax]
+ mov esi, ebx
+ rol esi, 5
+ ror ecx, 1
+ add esi, ebp
+ ror ecx, 1
+ add eax, esi
+ ; 20_39 72
+ mov esi, DWORD PTR 32[esp]
+ mov ebp, DWORD PTR 40[esp]
+ xor esi, ebp
+ mov ebp, DWORD PTR [esp]
+ xor esi, ebp
+ mov ebp, DWORD PTR 20[esp]
+ xor esi, ebp
+ mov ebp, ebx
+ rol esi, 1
+ xor ebp, ecx
+ mov DWORD PTR 32[esp],esi
+ xor ebp, edx
+ lea esi, DWORD PTR 3395469782[edi*1+esi]
+ mov edi, eax
+ rol edi, 5
+ ror ebx, 1
+ add edi, ebp
+ ror ebx, 1
+ add esi, edi
+ ; 20_39 73
+ mov edi, DWORD PTR 36[esp]
+ mov ebp, DWORD PTR 44[esp]
+ xor edi, ebp
+ mov ebp, DWORD PTR 4[esp]
+ xor edi, ebp
+ mov ebp, DWORD PTR 24[esp]
+ xor edi, ebp
+ mov ebp, eax
+ rol edi, 1
+ xor ebp, ebx
+ mov DWORD PTR 36[esp],edi
+ xor ebp, ecx
+ lea edi, DWORD PTR 3395469782[edx*1+edi]
+ mov edx, esi
+ rol edx, 5
+ ror eax, 1
+ add edx, ebp
+ ror eax, 1
+ add edi, edx
+ ; 20_39 74
+ mov edx, DWORD PTR 40[esp]
+ mov ebp, DWORD PTR 48[esp]
+ xor edx, ebp
+ mov ebp, DWORD PTR 8[esp]
+ xor edx, ebp
+ mov ebp, DWORD PTR 28[esp]
+ xor edx, ebp
+ mov ebp, esi
+ rol edx, 1
+ xor ebp, eax
+ mov DWORD PTR 40[esp],edx
+ xor ebp, ebx
+ lea edx, DWORD PTR 3395469782[ecx*1+edx]
+ mov ecx, edi
+ rol ecx, 5
+ ror esi, 1
+ add ecx, ebp
+ ror esi, 1
+ add edx, ecx
+ ; 20_39 75
+ mov ecx, DWORD PTR 44[esp]
+ mov ebp, DWORD PTR 52[esp]
+ xor ecx, ebp
+ mov ebp, DWORD PTR 12[esp]
+ xor ecx, ebp
+ mov ebp, DWORD PTR 32[esp]
+ xor ecx, ebp
+ mov ebp, edi
+ rol ecx, 1
+ xor ebp, esi
+ mov DWORD PTR 44[esp],ecx
+ xor ebp, eax
+ lea ecx, DWORD PTR 3395469782[ebx*1+ecx]
+ mov ebx, edx
+ rol ebx, 5
+ ror edi, 1
+ add ebx, ebp
+ ror edi, 1
+ add ecx, ebx
+ ; 20_39 76
+ mov ebx, DWORD PTR 48[esp]
+ mov ebp, DWORD PTR 56[esp]
+ xor ebx, ebp
+ mov ebp, DWORD PTR 16[esp]
+ xor ebx, ebp
+ mov ebp, DWORD PTR 36[esp]
+ xor ebx, ebp
+ mov ebp, edx
+ rol ebx, 1
+ xor ebp, edi
+ mov DWORD PTR 48[esp],ebx
+ xor ebp, esi
+ lea ebx, DWORD PTR 3395469782[eax*1+ebx]
+ mov eax, ecx
+ rol eax, 5
+ ror edx, 1
+ add eax, ebp
+ ror edx, 1
+ add ebx, eax
+ ; 20_39 77
+ mov eax, DWORD PTR 52[esp]
+ mov ebp, DWORD PTR 60[esp]
+ xor eax, ebp
+ mov ebp, DWORD PTR 20[esp]
+ xor eax, ebp
+ mov ebp, DWORD PTR 40[esp]
+ xor eax, ebp
+ mov ebp, ecx
+ rol eax, 1
+ xor ebp, edx
+ mov DWORD PTR 52[esp],eax
+ xor ebp, edi
+ lea eax, DWORD PTR 3395469782[esi*1+eax]
+ mov esi, ebx
+ rol esi, 5
+ ror ecx, 1
+ add esi, ebp
+ ror ecx, 1
+ add eax, esi
+ ; 20_39 78
+ mov esi, DWORD PTR 56[esp]
+ mov ebp, DWORD PTR [esp]
+ xor esi, ebp
+ mov ebp, DWORD PTR 24[esp]
+ xor esi, ebp
+ mov ebp, DWORD PTR 44[esp]
+ xor esi, ebp
+ mov ebp, ebx
+ rol esi, 1
+ xor ebp, ecx
+ mov DWORD PTR 56[esp],esi
+ xor ebp, edx
+ lea esi, DWORD PTR 3395469782[edi*1+esi]
+ mov edi, eax
+ rol edi, 5
+ ror ebx, 1
+ add edi, ebp
+ ror ebx, 1
+ add esi, edi
+ ; 20_39 79
+ mov edi, DWORD PTR 60[esp]
+ mov ebp, DWORD PTR 4[esp]
+ xor edi, ebp
+ mov ebp, DWORD PTR 28[esp]
+ xor edi, ebp
+ mov ebp, DWORD PTR 48[esp]
+ xor edi, ebp
+ mov ebp, eax
+ rol edi, 1
+ xor ebp, ebx
+ mov DWORD PTR 60[esp],edi
+ xor ebp, ecx
+ lea edi, DWORD PTR 3395469782[edx*1+edi]
+ mov edx, esi
+ rol edx, 5
+ add edx, ebp
+ mov ebp, DWORD PTR 92[esp]
+ ror eax, 1
+ add edi, edx
+ ror eax, 1
+ ; End processing
+ ;
+ mov edx, DWORD PTR 12[ebp]
+ add edx, ebx
+ mov ebx, DWORD PTR 4[ebp]
+ add ebx, esi
+ mov esi, eax
+ mov eax, DWORD PTR [ebp]
+ mov DWORD PTR 12[ebp],edx
+ add eax, edi
+ mov edi, DWORD PTR 16[ebp]
+ add edi, ecx
+ mov ecx, DWORD PTR 8[ebp]
+ add ecx, esi
+ mov DWORD PTR [ebp],eax
+ mov esi, DWORD PTR 64[esp]
+ mov DWORD PTR 8[ebp],ecx
+ add esi, 64
+ mov eax, DWORD PTR 68[esp]
+ mov DWORD PTR 16[ebp],edi
+ cmp eax, esi
+ mov DWORD PTR 4[ebp],ebx
+ jl $L001end
+ mov eax, DWORD PTR [esi]
+ jmp L000start
+$L001end:
+ add esp, 72
+ pop edi
+ pop ebx
+ pop ebp
+ pop esi
+ ret
+_sha1_block_x86 ENDP
+_TEXT ENDS
+END
diff --git a/crypto/sha/asm/sha1-586.pl b/crypto/sha/asm/sha1-586.pl
new file mode 100644
index 0000000000..38bb27532d
--- /dev/null
+++ b/crypto/sha/asm/sha1-586.pl
@@ -0,0 +1,491 @@
+#!/usr/local/bin/perl
+
+$normal=0;
+
+push(@INC,"perlasm","../../perlasm");
+require "x86asm.pl";
+
+&asm_init($ARGV[0],"sha1-586.pl");
+
+$A="eax";
+$B="ebx";
+$C="ecx";
+$D="edx";
+$E="edi";
+$T="esi";
+$tmp1="ebp";
+
+$off=9*4;
+
+@K=(0x5a827999,0x6ed9eba1,0x8f1bbcdc,0xca62c1d6);
+
+&sha1_block("sha1_block_x86");
+
+&asm_finish();
+
+sub Nn
+ {
+ local($p)=@_;
+ local(%n)=($A,$T,$B,$A,$C,$B,$D,$C,$E,$D,$T,$E);
+ return($n{$p});
+ }
+
+sub Np
+ {
+ local($p)=@_;
+ local(%n)=($A,$T,$B,$A,$C,$B,$D,$C,$E,$D,$T,$E);
+ local(%n)=($A,$B,$B,$C,$C,$D,$D,$E,$E,$T,$T,$A);
+ return($n{$p});
+ }
+
+sub Na
+ {
+ local($n)=@_;
+ return( (($n )&0x0f),
+ (($n+ 2)&0x0f),
+ (($n+ 8)&0x0f),
+ (($n+13)&0x0f),
+ (($n+ 1)&0x0f));
+ }
+
+sub X_expand
+ {
+ local($in)=@_;
+
+ &comment("First, load the words onto the stack in network byte order");
+ for ($i=0; $i<16; $i++)
+ {
+ &mov("eax",&DWP(($i+0)*4,$in,"",0)) unless $i == 0;
+ &bswap("eax");
+ &mov(&swtmp($i+0),"eax");
+ }
+
+ &comment("We now have the X array on the stack");
+ &comment("starting at sp-4");
+ }
+
+# Rules of engagement
+# F is always trashable at the start, the running total.
+# E becomes the next F so it can be trashed after it has been 'accumulated'
+# F becomes A in the next round. We don't need to access it much.
+# During the X update part, the result ends up in $X[$n0].
+
+sub BODY_00_15
+ {
+ local($pos,$K,$X,$n,$a,$b,$c,$d,$e,$f)=@_;
+
+return if $n & 1;
+ &comment("00_15 $n");
+
+ &mov($f,$c);
+
+ &mov($tmp1,$a);
+ &xor($f,$d); # F2
+
+ &rotl($tmp1,5); # A2
+
+ &and($f,$b); # F3
+ &add($tmp1,$e);
+
+ &rotr($b,1); # B1 <- F
+ &mov($e,&swtmp($n)); # G1
+
+ &rotr($b,1); # B1 <- F
+ &xor($f,$d); # F4
+
+ &lea($tmp1,&DWP($K,$tmp1,$e,1));
+
+############################
+# &BODY_40_59( 0,$K[2],$X,42,$A,$B,$C,$D,$E,$T);
+# &BODY_40_59( 0,$K[2],$X,43,$T,$A,$B,$C,$D,$E);
+$n++;
+ local($n0,$n1,$n2,$n3,$np)=&Na($n);
+ ($b,$c,$d,$e,$f,$a)=($a,$b,$c,$d,$e,$f);
+
+ &mov($f,$c);
+
+ &add($a,$tmp1); # MOVED DOWN
+ &xor($f,$d); # F2
+
+ &mov($tmp1,$a);
+ &and($f,$b); # F3
+
+ &rotl($tmp1,5); # A2
+
+ &add($tmp1,$e);
+ &mov($e,&swtmp($n)); # G1
+
+ &rotr($b,1); # B1 <- F
+ &xor($f,$d); # F4
+
+ &rotr($b,1); # B1 <- F
+ &lea($tmp1,&DWP($K,$tmp1,$e,1));
+
+ &add($f,$tmp1);
+ }
+
+sub BODY_16_19
+ {
+ local($pos,$K,$X,$n,$a,$b,$c,$d,$e,$f)=@_;
+ local($n0,$n1,$n2,$n3,$np)=&Na($n);
+
+return if $n & 1;
+ &comment("16_19 $n");
+
+ &nop() if ($pos < 0);
+&mov($tmp1,&swtmp($n0)); # X1
+ &mov($f,&swtmp($n1)); # X2
+&xor($f,$tmp1); # X3
+ &mov($tmp1,&swtmp($n2)); # X4
+&xor($f,$tmp1); # X5
+ &mov($tmp1,&swtmp($n3)); # X6
+&xor($f,$tmp1); # X7 - slot
+ &mov($tmp1,$c); # F1
+&rotl($f,1); # X8 - slot
+ &xor($tmp1,$d); # F2
+&mov(&swtmp($n0),$f); # X9 - anytime
+ &and($tmp1,$b); # F3
+&lea($f,&DWP($K,$f,$e,1)); # tot=X+K+e
+ &xor($tmp1,$d); # F4
+&mov($e,$a); # A1
+ &add($f,$tmp1); # tot+=F();
+
+&rotl($e,5); # A2
+
+&rotr($b,1); # B1 <- F
+ &add($f,$e); # tot+=a
+
+############################
+# &BODY_40_59( 0,$K[2],$X,42,$A,$B,$C,$D,$E,$T);
+# &BODY_40_59( 0,$K[2],$X,43,$T,$A,$B,$C,$D,$E);
+$n++;
+ local($n0,$n1,$n2,$n3,$np)=&Na($n);
+ ($b,$c,$d,$e,$f,$a)=($a,$b,$c,$d,$e,$f);
+
+
+&mov($f,&swtmp($n0)); # X1
+ &mov($tmp1,&swtmp($n1)); # X2
+&xor($f,$tmp1); # X3
+ &mov($tmp1,&swtmp($n2)); # X4
+&xor($f,$tmp1); # X5
+ &mov($tmp1,&swtmp($n3)); # X6
+&rotr($c,1); #&rotr($b,1); # B1 <- F # MOVED DOWN
+ &xor($f,$tmp1); # X7 - slot
+&rotl($f,1); # X8 - slot
+ &mov($tmp1,$c); # F1
+&xor($tmp1,$d); # F2
+ &mov(&swtmp($n0),$f); # X9 - anytime
+&and($tmp1,$b); # F3
+ &lea($f,&DWP($K,$f,$e,1)); # tot=X+K+e
+
+&xor($tmp1,$d); # F4
+ &mov($e,$a); # A1
+
+&rotl($e,5); # A2
+
+&rotr($b,1); # B1 <- F
+ &add($f,$e); # tot+=a
+
+&rotr($b,1); # B1 <- F
+ &add($f,$tmp1); # tot+=F();
+
+ }
+
+sub BODY_20_39
+ {
+ local($pos,$K,$X,$n,$a,$b,$c,$d,$e,$f)=@_;
+
+ &comment("20_39 $n");
+ local($n0,$n1,$n2,$n3,$np)=&Na($n);
+
+&mov($f,&swtmp($n0)); # X1
+ &mov($tmp1,&swtmp($n1)); # X2
+&xor($f,$tmp1); # X3
+ &mov($tmp1,&swtmp($n2)); # X4
+&xor($f,$tmp1); # X5
+ &mov($tmp1,&swtmp($n3)); # X6
+&xor($f,$tmp1); # X7 - slot
+ &mov($tmp1,$b); # F1
+&rotl($f,1); # X8 - slot
+ &xor($tmp1,$c); # F2
+&mov(&swtmp($n0),$f); # X9 - anytime
+ &xor($tmp1,$d); # F3
+
+&lea($f,&DWP($K,$f,$e,1)); # tot=X+K+e
+ &mov($e,$a); # A1
+
+&rotl($e,5); # A2
+
+if ($n != 79) # last loop
+ {
+ &rotr($b,1); # B1 <- F
+ &add($e,$tmp1); # tmp1=F()+a
+
+ &rotr($b,1); # B2 <- F
+ &add($f,$e); # tot+=tmp1;
+ }
+else
+ {
+ &add($e,$tmp1); # tmp1=F()+a
+ &mov($tmp1,&wparam(0));
+
+ &rotr($b,1); # B1 <- F
+ &add($f,$e); # tot+=tmp1;
+
+ &rotr($b,1); # B2 <- F
+ }
+ }
+
+sub BODY_40_59
+ {
+ local($pos,$K,$X,$n,$a,$b,$c,$d,$e,$f)=@_;
+
+ &comment("40_59 $n");
+ return if $n & 1;
+ local($n0,$n1,$n2,$n3,$np)=&Na($n);
+
+&mov($f,&swtmp($n0)); # X1
+ &mov($tmp1,&swtmp($n1)); # X2
+&xor($f,$tmp1); # X3
+ &mov($tmp1,&swtmp($n2)); # X4
+&xor($f,$tmp1); # X5
+ &mov($tmp1,&swtmp($n3)); # X6
+&xor($f,$tmp1); # X7 - slot
+ &mov($tmp1,$b); # F1
+&rotl($f,1); # X8 - slot
+ &or($tmp1,$c); # F2
+&mov(&swtmp($n0),$f); # X9 - anytime
+ &and($tmp1,$d); # F3
+
+&lea($f,&DWP($K,$f,$e,1)); # tot=X+K+e
+ &mov($e,$b); # F4
+
+&rotr($b,1); # B1 <- F
+ &and($e,$c); # F5
+
+&or($tmp1,$e); # F6
+ &mov($e,$a); # A1
+
+&rotl($e,5); # A2
+
+&add($tmp1,$e); # tmp1=F()+a
+
+############################
+# &BODY_40_59( 0,$K[2],$X,42,$A,$B,$C,$D,$E,$T);
+# &BODY_40_59( 0,$K[2],$X,43,$T,$A,$B,$C,$D,$E);
+$n++;
+ local($n0,$n1,$n2,$n3,$np)=&Na($n);
+ ($b,$c,$d,$e,$f,$a)=($a,$b,$c,$d,$e,$f);
+
+ &mov($f,&swtmp($n0)); # X1
+&add($a,$tmp1); # tot+=tmp1; # moved was add f,tmp1
+ &mov($tmp1,&swtmp($n1)); # X2
+&xor($f,$tmp1); # X3
+ &mov($tmp1,&swtmp($n2)); # X4
+&xor($f,$tmp1); # X5
+ &mov($tmp1,&swtmp($n3)); # X6
+&rotr($c,1); # B2 <- F # moved was rotr b,1
+ &xor($f,$tmp1); # X7 - slot
+&rotl($f,1); # X8 - slot
+ &mov($tmp1,$b); # F1
+&mov(&swtmp($n0),$f); # X9 - anytime
+ &or($tmp1,$c); # F2
+&lea($f,&DWP($K,$f,$e,1)); # tot=X+K+e
+ &mov($e,$b); # F4
+&and($tmp1,$d); # F3
+ &and($e,$c); # F5
+
+&or($tmp1,$e); # F6
+ &mov($e,$a); # A1
+
+&rotl($e,5); # A2
+
+&rotr($b,1); # B1 <- F
+ &add($tmp1,$e); # tmp1=F()+a
+
+&rotr($b,1); # B2 <- F
+ &add($f,$tmp1); # tot+=tmp1;
+ }
+
+sub BODY_60_79
+ {
+ &BODY_20_39(@_);
+ }
+
+sub sha1_block
+ {
+ local($name)=@_;
+
+ &function_begin_B($name,"");
+
+ # parameter 1 is the MD5_CTX structure.
+ # A 0
+ # B 4
+ # C 8
+ # D 12
+ # E 16
+
+ &push("esi");
+ &push("ebp");
+ &mov("eax", &wparam(2));
+ &mov("esi", &wparam(1));
+ &add("eax", "esi"); # offset to leave on
+ &mov("ebp", &wparam(0));
+ &push("ebx");
+ &sub("eax", 64);
+ &push("edi");
+ &mov($B, &DWP( 4,"ebp","",0));
+ &stack_push(18);
+ &mov($D, &DWP(12,"ebp","",0));
+ &mov($E, &DWP(16,"ebp","",0));
+ &mov($C, &DWP( 8,"ebp","",0));
+ &mov(&swtmp(17),"eax");
+
+ &comment("First we need to setup the X array");
+ &mov("eax",&DWP(0,"esi","",0)); # pulled out of X_expand
+
+ &set_label("start") unless $normal;
+
+ &X_expand("esi");
+ &mov(&swtmp(16),"esi");
+
+ &comment("");
+ &comment("Start processing");
+
+ # odd start
+ &mov($A, &DWP( 0,"ebp","",0));
+ $X="esp";
+ &BODY_00_15(-2,$K[0],$X, 0,$A,$B,$C,$D,$E,$T);
+ &BODY_00_15( 0,$K[0],$X, 1,$T,$A,$B,$C,$D,$E);
+ &BODY_00_15( 0,$K[0],$X, 2,$E,$T,$A,$B,$C,$D);
+ &BODY_00_15( 0,$K[0],$X, 3,$D,$E,$T,$A,$B,$C);
+ &BODY_00_15( 0,$K[0],$X, 4,$C,$D,$E,$T,$A,$B);
+ &BODY_00_15( 0,$K[0],$X, 5,$B,$C,$D,$E,$T,$A);
+ &BODY_00_15( 0,$K[0],$X, 6,$A,$B,$C,$D,$E,$T);
+ &BODY_00_15( 0,$K[0],$X, 7,$T,$A,$B,$C,$D,$E);
+ &BODY_00_15( 0,$K[0],$X, 8,$E,$T,$A,$B,$C,$D);
+ &BODY_00_15( 0,$K[0],$X, 9,$D,$E,$T,$A,$B,$C);
+ &BODY_00_15( 0,$K[0],$X,10,$C,$D,$E,$T,$A,$B);
+ &BODY_00_15( 0,$K[0],$X,11,$B,$C,$D,$E,$T,$A);
+ &BODY_00_15( 0,$K[0],$X,12,$A,$B,$C,$D,$E,$T);
+ &BODY_00_15( 0,$K[0],$X,13,$T,$A,$B,$C,$D,$E);
+ &BODY_00_15( 0,$K[0],$X,14,$E,$T,$A,$B,$C,$D);
+ &BODY_00_15( 1,$K[0],$X,15,$D,$E,$T,$A,$B,$C);
+ &BODY_16_19(-1,$K[0],$X,16,$C,$D,$E,$T,$A,$B);
+ &BODY_16_19( 0,$K[0],$X,17,$B,$C,$D,$E,$T,$A);
+ &BODY_16_19( 0,$K[0],$X,18,$A,$B,$C,$D,$E,$T);
+ &BODY_16_19( 1,$K[0],$X,19,$T,$A,$B,$C,$D,$E);
+
+ &BODY_20_39(-1,$K[1],$X,20,$E,$T,$A,$B,$C,$D);
+ &BODY_20_39( 0,$K[1],$X,21,$D,$E,$T,$A,$B,$C);
+ &BODY_20_39( 0,$K[1],$X,22,$C,$D,$E,$T,$A,$B);
+ &BODY_20_39( 0,$K[1],$X,23,$B,$C,$D,$E,$T,$A);
+ &BODY_20_39( 0,$K[1],$X,24,$A,$B,$C,$D,$E,$T);
+ &BODY_20_39( 0,$K[1],$X,25,$T,$A,$B,$C,$D,$E);
+ &BODY_20_39( 0,$K[1],$X,26,$E,$T,$A,$B,$C,$D);
+ &BODY_20_39( 0,$K[1],$X,27,$D,$E,$T,$A,$B,$C);
+ &BODY_20_39( 0,$K[1],$X,28,$C,$D,$E,$T,$A,$B);
+ &BODY_20_39( 0,$K[1],$X,29,$B,$C,$D,$E,$T,$A);
+ &BODY_20_39( 0,$K[1],$X,30,$A,$B,$C,$D,$E,$T);
+ &BODY_20_39( 0,$K[1],$X,31,$T,$A,$B,$C,$D,$E);
+ &BODY_20_39( 0,$K[1],$X,32,$E,$T,$A,$B,$C,$D);
+ &BODY_20_39( 0,$K[1],$X,33,$D,$E,$T,$A,$B,$C);
+ &BODY_20_39( 0,$K[1],$X,34,$C,$D,$E,$T,$A,$B);
+ &BODY_20_39( 0,$K[1],$X,35,$B,$C,$D,$E,$T,$A);
+ &BODY_20_39( 0,$K[1],$X,36,$A,$B,$C,$D,$E,$T);
+ &BODY_20_39( 0,$K[1],$X,37,$T,$A,$B,$C,$D,$E);
+ &BODY_20_39( 0,$K[1],$X,38,$E,$T,$A,$B,$C,$D);
+ &BODY_20_39( 1,$K[1],$X,39,$D,$E,$T,$A,$B,$C);
+
+ &BODY_40_59(-1,$K[2],$X,40,$C,$D,$E,$T,$A,$B);
+ &BODY_40_59( 0,$K[2],$X,41,$B,$C,$D,$E,$T,$A);
+ &BODY_40_59( 0,$K[2],$X,42,$A,$B,$C,$D,$E,$T);
+ &BODY_40_59( 0,$K[2],$X,43,$T,$A,$B,$C,$D,$E);
+ &BODY_40_59( 0,$K[2],$X,44,$E,$T,$A,$B,$C,$D);
+ &BODY_40_59( 0,$K[2],$X,45,$D,$E,$T,$A,$B,$C);
+ &BODY_40_59( 0,$K[2],$X,46,$C,$D,$E,$T,$A,$B);
+ &BODY_40_59( 0,$K[2],$X,47,$B,$C,$D,$E,$T,$A);
+ &BODY_40_59( 0,$K[2],$X,48,$A,$B,$C,$D,$E,$T);
+ &BODY_40_59( 0,$K[2],$X,49,$T,$A,$B,$C,$D,$E);
+ &BODY_40_59( 0,$K[2],$X,50,$E,$T,$A,$B,$C,$D);
+ &BODY_40_59( 0,$K[2],$X,51,$D,$E,$T,$A,$B,$C);
+ &BODY_40_59( 0,$K[2],$X,52,$C,$D,$E,$T,$A,$B);
+ &BODY_40_59( 0,$K[2],$X,53,$B,$C,$D,$E,$T,$A);
+ &BODY_40_59( 0,$K[2],$X,54,$A,$B,$C,$D,$E,$T);
+ &BODY_40_59( 0,$K[2],$X,55,$T,$A,$B,$C,$D,$E);
+ &BODY_40_59( 0,$K[2],$X,56,$E,$T,$A,$B,$C,$D);
+ &BODY_40_59( 0,$K[2],$X,57,$D,$E,$T,$A,$B,$C);
+ &BODY_40_59( 0,$K[2],$X,58,$C,$D,$E,$T,$A,$B);
+ &BODY_40_59( 1,$K[2],$X,59,$B,$C,$D,$E,$T,$A);
+
+ &BODY_60_79(-1,$K[3],$X,60,$A,$B,$C,$D,$E,$T);
+ &BODY_60_79( 0,$K[3],$X,61,$T,$A,$B,$C,$D,$E);
+ &BODY_60_79( 0,$K[3],$X,62,$E,$T,$A,$B,$C,$D);
+ &BODY_60_79( 0,$K[3],$X,63,$D,$E,$T,$A,$B,$C);
+ &BODY_60_79( 0,$K[3],$X,64,$C,$D,$E,$T,$A,$B);
+ &BODY_60_79( 0,$K[3],$X,65,$B,$C,$D,$E,$T,$A);
+ &BODY_60_79( 0,$K[3],$X,66,$A,$B,$C,$D,$E,$T);
+ &BODY_60_79( 0,$K[3],$X,67,$T,$A,$B,$C,$D,$E);
+ &BODY_60_79( 0,$K[3],$X,68,$E,$T,$A,$B,$C,$D);
+ &BODY_60_79( 0,$K[3],$X,69,$D,$E,$T,$A,$B,$C);
+ &BODY_60_79( 0,$K[3],$X,70,$C,$D,$E,$T,$A,$B);
+ &BODY_60_79( 0,$K[3],$X,71,$B,$C,$D,$E,$T,$A);
+ &BODY_60_79( 0,$K[3],$X,72,$A,$B,$C,$D,$E,$T);
+ &BODY_60_79( 0,$K[3],$X,73,$T,$A,$B,$C,$D,$E);
+ &BODY_60_79( 0,$K[3],$X,74,$E,$T,$A,$B,$C,$D);
+ &BODY_60_79( 0,$K[3],$X,75,$D,$E,$T,$A,$B,$C);
+ &BODY_60_79( 0,$K[3],$X,76,$C,$D,$E,$T,$A,$B);
+ &BODY_60_79( 0,$K[3],$X,77,$B,$C,$D,$E,$T,$A);
+ &BODY_60_79( 0,$K[3],$X,78,$A,$B,$C,$D,$E,$T);
+ &BODY_60_79( 2,$K[3],$X,79,$T,$A,$B,$C,$D,$E);
+
+ &comment("End processing");
+ &comment("");
+ # D is the tmp value
+
+ # E -> A
+ # T -> B
+ # A -> C
+ # B -> D
+ # C -> E
+ # D -> T
+
+ # The last 2 have been moved into the last loop
+ # &mov($tmp1,&wparam(0));
+
+ &mov($D, &DWP(12,$tmp1,"",0));
+ &add($D,$B);
+ &mov($B, &DWP( 4,$tmp1,"",0));
+ &add($B,$T);
+ &mov($T, $A);
+ &mov($A, &DWP( 0,$tmp1,"",0));
+ &mov(&DWP(12,$tmp1,"",0),$D);
+
+ &add($A,$E);
+ &mov($E, &DWP(16,$tmp1,"",0));
+ &add($E,$C);
+ &mov($C, &DWP( 8,$tmp1,"",0));
+ &add($C,$T);
+
+ &mov(&DWP( 0,$tmp1,"",0),$A);
+ &mov("esi",&swtmp(16));
+ &mov(&DWP( 8,$tmp1,"",0),$C); # This is for looping
+ &add("esi",64);
+ &mov("eax",&swtmp(17));
+ &mov(&DWP(16,$tmp1,"",0),$E);
+ &cmp("eax","esi");
+ &mov(&DWP( 4,$tmp1,"",0),$B); # This is for looping
+ &jl(&label("end"));
+ &mov("eax",&DWP(0,"esi","",0)); # Pulled down from
+ &jmp(&label("start"));
+
+ &set_label("end");
+ &stack_pop(18);
+ &pop("edi");
+ &pop("ebx");
+ &pop("ebp");
+ &pop("esi");
+ &ret();
+ &function_end_B($name);
+ }
+
diff --git a/crypto/sha/asm/sx86unix.cpp b/crypto/sha/asm/sx86unix.cpp
new file mode 100644
index 0000000000..8366664a39
--- /dev/null
+++ b/crypto/sha/asm/sx86unix.cpp
@@ -0,0 +1,1948 @@
+/* Run the C pre-processor over this file with one of the following defined
+ * ELF - elf object files,
+ * OUT - a.out object files,
+ * BSDI - BSDI style a.out object files
+ * SOL - Solaris style elf
+ */
+
+#define TYPE(a,b) .type a,b
+#define SIZE(a,b) .size a,b
+
+#if defined(OUT) || defined(BSDI)
+#define sha1_block_x86 _sha1_block_x86
+
+#endif
+
+#ifdef OUT
+#define OK 1
+#define ALIGN 4
+#endif
+
+#ifdef BSDI
+#define OK 1
+#define ALIGN 4
+#undef SIZE
+#undef TYPE
+#define SIZE(a,b)
+#define TYPE(a,b)
+#endif
+
+#if defined(ELF) || defined(SOL)
+#define OK 1
+#define ALIGN 16
+#endif
+
+#ifndef OK
+You need to define one of
+ELF - elf systems - linux-elf, NetBSD and DG-UX
+OUT - a.out systems - linux-a.out and FreeBSD
+SOL - solaris systems, which are elf with strange comment lines
+BSDI - a.out with a very primative version of as.
+#endif
+
+/* Let the Assembler begin :-) */
+ /* Don't even think of reading this code */
+ /* It was automatically generated by sha1-586.pl */
+ /* Which is a perl program used to generate the x86 assember for */
+ /* any of elf, a.out, BSDI,Win32, or Solaris */
+ /* eric <eay@cryptsoft.com> */
+
+ .file "sha1-586.s"
+ .version "01.01"
+gcc2_compiled.:
+.text
+ .align ALIGN
+.globl sha1_block_x86
+ TYPE(sha1_block_x86,@function)
+sha1_block_x86:
+ pushl %esi
+ pushl %ebp
+ movl 20(%esp), %eax
+ movl 16(%esp), %esi
+ addl %esi, %eax
+ movl 12(%esp), %ebp
+ pushl %ebx
+ subl $64, %eax
+ pushl %edi
+ movl 4(%ebp), %ebx
+ subl $72, %esp
+ movl 12(%ebp), %edx
+ movl 16(%ebp), %edi
+ movl 8(%ebp), %ecx
+ movl %eax, 68(%esp)
+ /* First we need to setup the X array */
+ movl (%esi), %eax
+.L000start:
+ /* First, load the words onto the stack in network byte order */
+.byte 15
+.byte 200 /* bswapl %eax */
+ movl %eax, (%esp)
+ movl 4(%esi), %eax
+.byte 15
+.byte 200 /* bswapl %eax */
+ movl %eax, 4(%esp)
+ movl 8(%esi), %eax
+.byte 15
+.byte 200 /* bswapl %eax */
+ movl %eax, 8(%esp)
+ movl 12(%esi), %eax
+.byte 15
+.byte 200 /* bswapl %eax */
+ movl %eax, 12(%esp)
+ movl 16(%esi), %eax
+.byte 15
+.byte 200 /* bswapl %eax */
+ movl %eax, 16(%esp)
+ movl 20(%esi), %eax
+.byte 15
+.byte 200 /* bswapl %eax */
+ movl %eax, 20(%esp)
+ movl 24(%esi), %eax
+.byte 15
+.byte 200 /* bswapl %eax */
+ movl %eax, 24(%esp)
+ movl 28(%esi), %eax
+.byte 15
+.byte 200 /* bswapl %eax */
+ movl %eax, 28(%esp)
+ movl 32(%esi), %eax
+.byte 15
+.byte 200 /* bswapl %eax */
+ movl %eax, 32(%esp)
+ movl 36(%esi), %eax
+.byte 15
+.byte 200 /* bswapl %eax */
+ movl %eax, 36(%esp)
+ movl 40(%esi), %eax
+.byte 15
+.byte 200 /* bswapl %eax */
+ movl %eax, 40(%esp)
+ movl 44(%esi), %eax
+.byte 15
+.byte 200 /* bswapl %eax */
+ movl %eax, 44(%esp)
+ movl 48(%esi), %eax
+.byte 15
+.byte 200 /* bswapl %eax */
+ movl %eax, 48(%esp)
+ movl 52(%esi), %eax
+.byte 15
+.byte 200 /* bswapl %eax */
+ movl %eax, 52(%esp)
+ movl 56(%esi), %eax
+.byte 15
+.byte 200 /* bswapl %eax */
+ movl %eax, 56(%esp)
+ movl 60(%esi), %eax
+.byte 15
+.byte 200 /* bswapl %eax */
+ movl %eax, 60(%esp)
+ /* We now have the X array on the stack */
+ /* starting at sp-4 */
+ movl %esi, 64(%esp)
+
+ /* Start processing */
+ movl (%ebp), %eax
+ /* 00_15 0 */
+ movl %ecx, %esi
+ movl %eax, %ebp
+ xorl %edx, %esi
+ roll $5, %ebp
+ andl %ebx, %esi
+ addl %edi, %ebp
+.byte 209
+.byte 203 /* rorl $1 %ebx */
+ movl (%esp), %edi
+.byte 209
+.byte 203 /* rorl $1 %ebx */
+ xorl %edx, %esi
+ leal 1518500249(%ebp,%edi,1),%ebp
+ movl %ebx, %edi
+ addl %ebp, %esi
+ xorl %ecx, %edi
+ movl %esi, %ebp
+ andl %eax, %edi
+ roll $5, %ebp
+ addl %edx, %ebp
+ movl 4(%esp), %edx
+.byte 209
+.byte 200 /* rorl $1 %eax */
+ xorl %ecx, %edi
+.byte 209
+.byte 200 /* rorl $1 %eax */
+ leal 1518500249(%ebp,%edx,1),%ebp
+ addl %ebp, %edi
+ /* 00_15 2 */
+ movl %eax, %edx
+ movl %edi, %ebp
+ xorl %ebx, %edx
+ roll $5, %ebp
+ andl %esi, %edx
+ addl %ecx, %ebp
+.byte 209
+.byte 206 /* rorl $1 %esi */
+ movl 8(%esp), %ecx
+.byte 209
+.byte 206 /* rorl $1 %esi */
+ xorl %ebx, %edx
+ leal 1518500249(%ebp,%ecx,1),%ebp
+ movl %esi, %ecx
+ addl %ebp, %edx
+ xorl %eax, %ecx
+ movl %edx, %ebp
+ andl %edi, %ecx
+ roll $5, %ebp
+ addl %ebx, %ebp
+ movl 12(%esp), %ebx
+.byte 209
+.byte 207 /* rorl $1 %edi */
+ xorl %eax, %ecx
+.byte 209
+.byte 207 /* rorl $1 %edi */
+ leal 1518500249(%ebp,%ebx,1),%ebp
+ addl %ebp, %ecx
+ /* 00_15 4 */
+ movl %edi, %ebx
+ movl %ecx, %ebp
+ xorl %esi, %ebx
+ roll $5, %ebp
+ andl %edx, %ebx
+ addl %eax, %ebp
+.byte 209
+.byte 202 /* rorl $1 %edx */
+ movl 16(%esp), %eax
+.byte 209
+.byte 202 /* rorl $1 %edx */
+ xorl %esi, %ebx
+ leal 1518500249(%ebp,%eax,1),%ebp
+ movl %edx, %eax
+ addl %ebp, %ebx
+ xorl %edi, %eax
+ movl %ebx, %ebp
+ andl %ecx, %eax
+ roll $5, %ebp
+ addl %esi, %ebp
+ movl 20(%esp), %esi
+.byte 209
+.byte 201 /* rorl $1 %ecx */
+ xorl %edi, %eax
+.byte 209
+.byte 201 /* rorl $1 %ecx */
+ leal 1518500249(%ebp,%esi,1),%ebp
+ addl %ebp, %eax
+ /* 00_15 6 */
+ movl %ecx, %esi
+ movl %eax, %ebp
+ xorl %edx, %esi
+ roll $5, %ebp
+ andl %ebx, %esi
+ addl %edi, %ebp
+.byte 209
+.byte 203 /* rorl $1 %ebx */
+ movl 24(%esp), %edi
+.byte 209
+.byte 203 /* rorl $1 %ebx */
+ xorl %edx, %esi
+ leal 1518500249(%ebp,%edi,1),%ebp
+ movl %ebx, %edi
+ addl %ebp, %esi
+ xorl %ecx, %edi
+ movl %esi, %ebp
+ andl %eax, %edi
+ roll $5, %ebp
+ addl %edx, %ebp
+ movl 28(%esp), %edx
+.byte 209
+.byte 200 /* rorl $1 %eax */
+ xorl %ecx, %edi
+.byte 209
+.byte 200 /* rorl $1 %eax */
+ leal 1518500249(%ebp,%edx,1),%ebp
+ addl %ebp, %edi
+ /* 00_15 8 */
+ movl %eax, %edx
+ movl %edi, %ebp
+ xorl %ebx, %edx
+ roll $5, %ebp
+ andl %esi, %edx
+ addl %ecx, %ebp
+.byte 209
+.byte 206 /* rorl $1 %esi */
+ movl 32(%esp), %ecx
+.byte 209
+.byte 206 /* rorl $1 %esi */
+ xorl %ebx, %edx
+ leal 1518500249(%ebp,%ecx,1),%ebp
+ movl %esi, %ecx
+ addl %ebp, %edx
+ xorl %eax, %ecx
+ movl %edx, %ebp
+ andl %edi, %ecx
+ roll $5, %ebp
+ addl %ebx, %ebp
+ movl 36(%esp), %ebx
+.byte 209
+.byte 207 /* rorl $1 %edi */
+ xorl %eax, %ecx
+.byte 209
+.byte 207 /* rorl $1 %edi */
+ leal 1518500249(%ebp,%ebx,1),%ebp
+ addl %ebp, %ecx
+ /* 00_15 10 */
+ movl %edi, %ebx
+ movl %ecx, %ebp
+ xorl %esi, %ebx
+ roll $5, %ebp
+ andl %edx, %ebx
+ addl %eax, %ebp
+.byte 209
+.byte 202 /* rorl $1 %edx */
+ movl 40(%esp), %eax
+.byte 209
+.byte 202 /* rorl $1 %edx */
+ xorl %esi, %ebx
+ leal 1518500249(%ebp,%eax,1),%ebp
+ movl %edx, %eax
+ addl %ebp, %ebx
+ xorl %edi, %eax
+ movl %ebx, %ebp
+ andl %ecx, %eax
+ roll $5, %ebp
+ addl %esi, %ebp
+ movl 44(%esp), %esi
+.byte 209
+.byte 201 /* rorl $1 %ecx */
+ xorl %edi, %eax
+.byte 209
+.byte 201 /* rorl $1 %ecx */
+ leal 1518500249(%ebp,%esi,1),%ebp
+ addl %ebp, %eax
+ /* 00_15 12 */
+ movl %ecx, %esi
+ movl %eax, %ebp
+ xorl %edx, %esi
+ roll $5, %ebp
+ andl %ebx, %esi
+ addl %edi, %ebp
+.byte 209
+.byte 203 /* rorl $1 %ebx */
+ movl 48(%esp), %edi
+.byte 209
+.byte 203 /* rorl $1 %ebx */
+ xorl %edx, %esi
+ leal 1518500249(%ebp,%edi,1),%ebp
+ movl %ebx, %edi
+ addl %ebp, %esi
+ xorl %ecx, %edi
+ movl %esi, %ebp
+ andl %eax, %edi
+ roll $5, %ebp
+ addl %edx, %ebp
+ movl 52(%esp), %edx
+.byte 209
+.byte 200 /* rorl $1 %eax */
+ xorl %ecx, %edi
+.byte 209
+.byte 200 /* rorl $1 %eax */
+ leal 1518500249(%ebp,%edx,1),%ebp
+ addl %ebp, %edi
+ /* 00_15 14 */
+ movl %eax, %edx
+ movl %edi, %ebp
+ xorl %ebx, %edx
+ roll $5, %ebp
+ andl %esi, %edx
+ addl %ecx, %ebp
+.byte 209
+.byte 206 /* rorl $1 %esi */
+ movl 56(%esp), %ecx
+.byte 209
+.byte 206 /* rorl $1 %esi */
+ xorl %ebx, %edx
+ leal 1518500249(%ebp,%ecx,1),%ebp
+ movl %esi, %ecx
+ addl %ebp, %edx
+ xorl %eax, %ecx
+ movl %edx, %ebp
+ andl %edi, %ecx
+ roll $5, %ebp
+ addl %ebx, %ebp
+ movl 60(%esp), %ebx
+.byte 209
+.byte 207 /* rorl $1 %edi */
+ xorl %eax, %ecx
+.byte 209
+.byte 207 /* rorl $1 %edi */
+ leal 1518500249(%ebp,%ebx,1),%ebp
+ addl %ebp, %ecx
+ /* 16_19 16 */
+ nop
+ movl (%esp), %ebp
+ movl 8(%esp), %ebx
+ xorl %ebp, %ebx
+ movl 32(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 52(%esp), %ebp
+ xorl %ebp, %ebx
+ movl %edi, %ebp
+.byte 209
+.byte 195 /* roll $1 %ebx */
+ xorl %esi, %ebp
+ movl %ebx, (%esp)
+ andl %edx, %ebp
+ leal 1518500249(%ebx,%eax,1),%ebx
+ xorl %esi, %ebp
+ movl %ecx, %eax
+ addl %ebp, %ebx
+ roll $5, %eax
+.byte 209
+.byte 202 /* rorl $1 %edx */
+ addl %eax, %ebx
+ movl 4(%esp), %eax
+ movl 12(%esp), %ebp
+ xorl %ebp, %eax
+ movl 36(%esp), %ebp
+ xorl %ebp, %eax
+ movl 56(%esp), %ebp
+.byte 209
+.byte 202 /* rorl $1 %edx */
+ xorl %ebp, %eax
+.byte 209
+.byte 192 /* roll $1 %eax */
+ movl %edx, %ebp
+ xorl %edi, %ebp
+ movl %eax, 4(%esp)
+ andl %ecx, %ebp
+ leal 1518500249(%eax,%esi,1),%eax
+ xorl %edi, %ebp
+ movl %ebx, %esi
+ roll $5, %esi
+.byte 209
+.byte 201 /* rorl $1 %ecx */
+ addl %esi, %eax
+.byte 209
+.byte 201 /* rorl $1 %ecx */
+ addl %ebp, %eax
+ /* 16_19 18 */
+ movl 8(%esp), %ebp
+ movl 16(%esp), %esi
+ xorl %ebp, %esi
+ movl 40(%esp), %ebp
+ xorl %ebp, %esi
+ movl 60(%esp), %ebp
+ xorl %ebp, %esi
+ movl %ecx, %ebp
+.byte 209
+.byte 198 /* roll $1 %esi */
+ xorl %edx, %ebp
+ movl %esi, 8(%esp)
+ andl %ebx, %ebp
+ leal 1518500249(%esi,%edi,1),%esi
+ xorl %edx, %ebp
+ movl %eax, %edi
+ addl %ebp, %esi
+ roll $5, %edi
+.byte 209
+.byte 203 /* rorl $1 %ebx */
+ addl %edi, %esi
+ movl 12(%esp), %edi
+ movl 20(%esp), %ebp
+ xorl %ebp, %edi
+ movl 44(%esp), %ebp
+ xorl %ebp, %edi
+ movl (%esp), %ebp
+.byte 209
+.byte 203 /* rorl $1 %ebx */
+ xorl %ebp, %edi
+.byte 209
+.byte 199 /* roll $1 %edi */
+ movl %ebx, %ebp
+ xorl %ecx, %ebp
+ movl %edi, 12(%esp)
+ andl %eax, %ebp
+ leal 1518500249(%edi,%edx,1),%edi
+ xorl %ecx, %ebp
+ movl %esi, %edx
+ roll $5, %edx
+.byte 209
+.byte 200 /* rorl $1 %eax */
+ addl %edx, %edi
+.byte 209
+.byte 200 /* rorl $1 %eax */
+ addl %ebp, %edi
+ /* 20_39 20 */
+ movl 16(%esp), %edx
+ movl 24(%esp), %ebp
+ xorl %ebp, %edx
+ movl 48(%esp), %ebp
+ xorl %ebp, %edx
+ movl 4(%esp), %ebp
+ xorl %ebp, %edx
+ movl %esi, %ebp
+.byte 209
+.byte 194 /* roll $1 %edx */
+ xorl %eax, %ebp
+ movl %edx, 16(%esp)
+ xorl %ebx, %ebp
+ leal 1859775393(%edx,%ecx,1),%edx
+ movl %edi, %ecx
+ roll $5, %ecx
+.byte 209
+.byte 206 /* rorl $1 %esi */
+ addl %ebp, %ecx
+.byte 209
+.byte 206 /* rorl $1 %esi */
+ addl %ecx, %edx
+ /* 20_39 21 */
+ movl 20(%esp), %ecx
+ movl 28(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 52(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 8(%esp), %ebp
+ xorl %ebp, %ecx
+ movl %edi, %ebp
+.byte 209
+.byte 193 /* roll $1 %ecx */
+ xorl %esi, %ebp
+ movl %ecx, 20(%esp)
+ xorl %eax, %ebp
+ leal 1859775393(%ecx,%ebx,1),%ecx
+ movl %edx, %ebx
+ roll $5, %ebx
+.byte 209
+.byte 207 /* rorl $1 %edi */
+ addl %ebp, %ebx
+.byte 209
+.byte 207 /* rorl $1 %edi */
+ addl %ebx, %ecx
+ /* 20_39 22 */
+ movl 24(%esp), %ebx
+ movl 32(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 56(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 12(%esp), %ebp
+ xorl %ebp, %ebx
+ movl %edx, %ebp
+.byte 209
+.byte 195 /* roll $1 %ebx */
+ xorl %edi, %ebp
+ movl %ebx, 24(%esp)
+ xorl %esi, %ebp
+ leal 1859775393(%ebx,%eax,1),%ebx
+ movl %ecx, %eax
+ roll $5, %eax
+.byte 209
+.byte 202 /* rorl $1 %edx */
+ addl %ebp, %eax
+.byte 209
+.byte 202 /* rorl $1 %edx */
+ addl %eax, %ebx
+ /* 20_39 23 */
+ movl 28(%esp), %eax
+ movl 36(%esp), %ebp
+ xorl %ebp, %eax
+ movl 60(%esp), %ebp
+ xorl %ebp, %eax
+ movl 16(%esp), %ebp
+ xorl %ebp, %eax
+ movl %ecx, %ebp
+.byte 209
+.byte 192 /* roll $1 %eax */
+ xorl %edx, %ebp
+ movl %eax, 28(%esp)
+ xorl %edi, %ebp
+ leal 1859775393(%eax,%esi,1),%eax
+ movl %ebx, %esi
+ roll $5, %esi
+.byte 209
+.byte 201 /* rorl $1 %ecx */
+ addl %ebp, %esi
+.byte 209
+.byte 201 /* rorl $1 %ecx */
+ addl %esi, %eax
+ /* 20_39 24 */
+ movl 32(%esp), %esi
+ movl 40(%esp), %ebp
+ xorl %ebp, %esi
+ movl (%esp), %ebp
+ xorl %ebp, %esi
+ movl 20(%esp), %ebp
+ xorl %ebp, %esi
+ movl %ebx, %ebp
+.byte 209
+.byte 198 /* roll $1 %esi */
+ xorl %ecx, %ebp
+ movl %esi, 32(%esp)
+ xorl %edx, %ebp
+ leal 1859775393(%esi,%edi,1),%esi
+ movl %eax, %edi
+ roll $5, %edi
+.byte 209
+.byte 203 /* rorl $1 %ebx */
+ addl %ebp, %edi
+.byte 209
+.byte 203 /* rorl $1 %ebx */
+ addl %edi, %esi
+ /* 20_39 25 */
+ movl 36(%esp), %edi
+ movl 44(%esp), %ebp
+ xorl %ebp, %edi
+ movl 4(%esp), %ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ xorl %ebp, %edi
+ movl %eax, %ebp
+.byte 209
+.byte 199 /* roll $1 %edi */
+ xorl %ebx, %ebp
+ movl %edi, 36(%esp)
+ xorl %ecx, %ebp
+ leal 1859775393(%edi,%edx,1),%edi
+ movl %esi, %edx
+ roll $5, %edx
+.byte 209
+.byte 200 /* rorl $1 %eax */
+ addl %ebp, %edx
+.byte 209
+.byte 200 /* rorl $1 %eax */
+ addl %edx, %edi
+ /* 20_39 26 */
+ movl 40(%esp), %edx
+ movl 48(%esp), %ebp
+ xorl %ebp, %edx
+ movl 8(%esp), %ebp
+ xorl %ebp, %edx
+ movl 28(%esp), %ebp
+ xorl %ebp, %edx
+ movl %esi, %ebp
+.byte 209
+.byte 194 /* roll $1 %edx */
+ xorl %eax, %ebp
+ movl %edx, 40(%esp)
+ xorl %ebx, %ebp
+ leal 1859775393(%edx,%ecx,1),%edx
+ movl %edi, %ecx
+ roll $5, %ecx
+.byte 209
+.byte 206 /* rorl $1 %esi */
+ addl %ebp, %ecx
+.byte 209
+.byte 206 /* rorl $1 %esi */
+ addl %ecx, %edx
+ /* 20_39 27 */
+ movl 44(%esp), %ecx
+ movl 52(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 12(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 32(%esp), %ebp
+ xorl %ebp, %ecx
+ movl %edi, %ebp
+.byte 209
+.byte 193 /* roll $1 %ecx */
+ xorl %esi, %ebp
+ movl %ecx, 44(%esp)
+ xorl %eax, %ebp
+ leal 1859775393(%ecx,%ebx,1),%ecx
+ movl %edx, %ebx
+ roll $5, %ebx
+.byte 209
+.byte 207 /* rorl $1 %edi */
+ addl %ebp, %ebx
+.byte 209
+.byte 207 /* rorl $1 %edi */
+ addl %ebx, %ecx
+ /* 20_39 28 */
+ movl 48(%esp), %ebx
+ movl 56(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 16(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 36(%esp), %ebp
+ xorl %ebp, %ebx
+ movl %edx, %ebp
+.byte 209
+.byte 195 /* roll $1 %ebx */
+ xorl %edi, %ebp
+ movl %ebx, 48(%esp)
+ xorl %esi, %ebp
+ leal 1859775393(%ebx,%eax,1),%ebx
+ movl %ecx, %eax
+ roll $5, %eax
+.byte 209
+.byte 202 /* rorl $1 %edx */
+ addl %ebp, %eax
+.byte 209
+.byte 202 /* rorl $1 %edx */
+ addl %eax, %ebx
+ /* 20_39 29 */
+ movl 52(%esp), %eax
+ movl 60(%esp), %ebp
+ xorl %ebp, %eax
+ movl 20(%esp), %ebp
+ xorl %ebp, %eax
+ movl 40(%esp), %ebp
+ xorl %ebp, %eax
+ movl %ecx, %ebp
+.byte 209
+.byte 192 /* roll $1 %eax */
+ xorl %edx, %ebp
+ movl %eax, 52(%esp)
+ xorl %edi, %ebp
+ leal 1859775393(%eax,%esi,1),%eax
+ movl %ebx, %esi
+ roll $5, %esi
+.byte 209
+.byte 201 /* rorl $1 %ecx */
+ addl %ebp, %esi
+.byte 209
+.byte 201 /* rorl $1 %ecx */
+ addl %esi, %eax
+ /* 20_39 30 */
+ movl 56(%esp), %esi
+ movl (%esp), %ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ xorl %ebp, %esi
+ movl 44(%esp), %ebp
+ xorl %ebp, %esi
+ movl %ebx, %ebp
+.byte 209
+.byte 198 /* roll $1 %esi */
+ xorl %ecx, %ebp
+ movl %esi, 56(%esp)
+ xorl %edx, %ebp
+ leal 1859775393(%esi,%edi,1),%esi
+ movl %eax, %edi
+ roll $5, %edi
+.byte 209
+.byte 203 /* rorl $1 %ebx */
+ addl %ebp, %edi
+.byte 209
+.byte 203 /* rorl $1 %ebx */
+ addl %edi, %esi
+ /* 20_39 31 */
+ movl 60(%esp), %edi
+ movl 4(%esp), %ebp
+ xorl %ebp, %edi
+ movl 28(%esp), %ebp
+ xorl %ebp, %edi
+ movl 48(%esp), %ebp
+ xorl %ebp, %edi
+ movl %eax, %ebp
+.byte 209
+.byte 199 /* roll $1 %edi */
+ xorl %ebx, %ebp
+ movl %edi, 60(%esp)
+ xorl %ecx, %ebp
+ leal 1859775393(%edi,%edx,1),%edi
+ movl %esi, %edx
+ roll $5, %edx
+.byte 209
+.byte 200 /* rorl $1 %eax */
+ addl %ebp, %edx
+.byte 209
+.byte 200 /* rorl $1 %eax */
+ addl %edx, %edi
+ /* 20_39 32 */
+ movl (%esp), %edx
+ movl 8(%esp), %ebp
+ xorl %ebp, %edx
+ movl 32(%esp), %ebp
+ xorl %ebp, %edx
+ movl 52(%esp), %ebp
+ xorl %ebp, %edx
+ movl %esi, %ebp
+.byte 209
+.byte 194 /* roll $1 %edx */
+ xorl %eax, %ebp
+ movl %edx, (%esp)
+ xorl %ebx, %ebp
+ leal 1859775393(%edx,%ecx,1),%edx
+ movl %edi, %ecx
+ roll $5, %ecx
+.byte 209
+.byte 206 /* rorl $1 %esi */
+ addl %ebp, %ecx
+.byte 209
+.byte 206 /* rorl $1 %esi */
+ addl %ecx, %edx
+ /* 20_39 33 */
+ movl 4(%esp), %ecx
+ movl 12(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 36(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 56(%esp), %ebp
+ xorl %ebp, %ecx
+ movl %edi, %ebp
+.byte 209
+.byte 193 /* roll $1 %ecx */
+ xorl %esi, %ebp
+ movl %ecx, 4(%esp)
+ xorl %eax, %ebp
+ leal 1859775393(%ecx,%ebx,1),%ecx
+ movl %edx, %ebx
+ roll $5, %ebx
+.byte 209
+.byte 207 /* rorl $1 %edi */
+ addl %ebp, %ebx
+.byte 209
+.byte 207 /* rorl $1 %edi */
+ addl %ebx, %ecx
+ /* 20_39 34 */
+ movl 8(%esp), %ebx
+ movl 16(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 40(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 60(%esp), %ebp
+ xorl %ebp, %ebx
+ movl %edx, %ebp
+.byte 209
+.byte 195 /* roll $1 %ebx */
+ xorl %edi, %ebp
+ movl %ebx, 8(%esp)
+ xorl %esi, %ebp
+ leal 1859775393(%ebx,%eax,1),%ebx
+ movl %ecx, %eax
+ roll $5, %eax
+.byte 209
+.byte 202 /* rorl $1 %edx */
+ addl %ebp, %eax
+.byte 209
+.byte 202 /* rorl $1 %edx */
+ addl %eax, %ebx
+ /* 20_39 35 */
+ movl 12(%esp), %eax
+ movl 20(%esp), %ebp
+ xorl %ebp, %eax
+ movl 44(%esp), %ebp
+ xorl %ebp, %eax
+ movl (%esp), %ebp
+ xorl %ebp, %eax
+ movl %ecx, %ebp
+.byte 209
+.byte 192 /* roll $1 %eax */
+ xorl %edx, %ebp
+ movl %eax, 12(%esp)
+ xorl %edi, %ebp
+ leal 1859775393(%eax,%esi,1),%eax
+ movl %ebx, %esi
+ roll $5, %esi
+.byte 209
+.byte 201 /* rorl $1 %ecx */
+ addl %ebp, %esi
+.byte 209
+.byte 201 /* rorl $1 %ecx */
+ addl %esi, %eax
+ /* 20_39 36 */
+ movl 16(%esp), %esi
+ movl 24(%esp), %ebp
+ xorl %ebp, %esi
+ movl 48(%esp), %ebp
+ xorl %ebp, %esi
+ movl 4(%esp), %ebp
+ xorl %ebp, %esi
+ movl %ebx, %ebp
+.byte 209
+.byte 198 /* roll $1 %esi */
+ xorl %ecx, %ebp
+ movl %esi, 16(%esp)
+ xorl %edx, %ebp
+ leal 1859775393(%esi,%edi,1),%esi
+ movl %eax, %edi
+ roll $5, %edi
+.byte 209
+.byte 203 /* rorl $1 %ebx */
+ addl %ebp, %edi
+.byte 209
+.byte 203 /* rorl $1 %ebx */
+ addl %edi, %esi
+ /* 20_39 37 */
+ movl 20(%esp), %edi
+ movl 28(%esp), %ebp
+ xorl %ebp, %edi
+ movl 52(%esp), %ebp
+ xorl %ebp, %edi
+ movl 8(%esp), %ebp
+ xorl %ebp, %edi
+ movl %eax, %ebp
+.byte 209
+.byte 199 /* roll $1 %edi */
+ xorl %ebx, %ebp
+ movl %edi, 20(%esp)
+ xorl %ecx, %ebp
+ leal 1859775393(%edi,%edx,1),%edi
+ movl %esi, %edx
+ roll $5, %edx
+.byte 209
+.byte 200 /* rorl $1 %eax */
+ addl %ebp, %edx
+.byte 209
+.byte 200 /* rorl $1 %eax */
+ addl %edx, %edi
+ /* 20_39 38 */
+ movl 24(%esp), %edx
+ movl 32(%esp), %ebp
+ xorl %ebp, %edx
+ movl 56(%esp), %ebp
+ xorl %ebp, %edx
+ movl 12(%esp), %ebp
+ xorl %ebp, %edx
+ movl %esi, %ebp
+.byte 209
+.byte 194 /* roll $1 %edx */
+ xorl %eax, %ebp
+ movl %edx, 24(%esp)
+ xorl %ebx, %ebp
+ leal 1859775393(%edx,%ecx,1),%edx
+ movl %edi, %ecx
+ roll $5, %ecx
+.byte 209
+.byte 206 /* rorl $1 %esi */
+ addl %ebp, %ecx
+.byte 209
+.byte 206 /* rorl $1 %esi */
+ addl %ecx, %edx
+ /* 20_39 39 */
+ movl 28(%esp), %ecx
+ movl 36(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 60(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 16(%esp), %ebp
+ xorl %ebp, %ecx
+ movl %edi, %ebp
+.byte 209
+.byte 193 /* roll $1 %ecx */
+ xorl %esi, %ebp
+ movl %ecx, 28(%esp)
+ xorl %eax, %ebp
+ leal 1859775393(%ecx,%ebx,1),%ecx
+ movl %edx, %ebx
+ roll $5, %ebx
+.byte 209
+.byte 207 /* rorl $1 %edi */
+ addl %ebp, %ebx
+.byte 209
+.byte 207 /* rorl $1 %edi */
+ addl %ebx, %ecx
+ /* 40_59 40 */
+ movl 32(%esp), %ebx
+ movl 40(%esp), %ebp
+ xorl %ebp, %ebx
+ movl (%esp), %ebp
+ xorl %ebp, %ebx
+ movl 20(%esp), %ebp
+ xorl %ebp, %ebx
+ movl %edx, %ebp
+.byte 209
+.byte 195 /* roll $1 %ebx */
+ orl %edi, %ebp
+ movl %ebx, 32(%esp)
+ andl %esi, %ebp
+ leal 2400959708(%ebx,%eax,1),%ebx
+ movl %edx, %eax
+.byte 209
+.byte 202 /* rorl $1 %edx */
+ andl %edi, %eax
+ orl %eax, %ebp
+ movl %ecx, %eax
+ roll $5, %eax
+ addl %eax, %ebp
+ movl 36(%esp), %eax
+ addl %ebp, %ebx
+ movl 44(%esp), %ebp
+ xorl %ebp, %eax
+ movl 4(%esp), %ebp
+ xorl %ebp, %eax
+ movl 24(%esp), %ebp
+.byte 209
+.byte 202 /* rorl $1 %edx */
+ xorl %ebp, %eax
+.byte 209
+.byte 192 /* roll $1 %eax */
+ movl %ecx, %ebp
+ movl %eax, 36(%esp)
+ orl %edx, %ebp
+ leal 2400959708(%eax,%esi,1),%eax
+ movl %ecx, %esi
+ andl %edi, %ebp
+ andl %edx, %esi
+ orl %esi, %ebp
+ movl %ebx, %esi
+ roll $5, %esi
+.byte 209
+.byte 201 /* rorl $1 %ecx */
+ addl %esi, %ebp
+.byte 209
+.byte 201 /* rorl $1 %ecx */
+ addl %ebp, %eax
+ /* 40_59 41 */
+ /* 40_59 42 */
+ movl 40(%esp), %esi
+ movl 48(%esp), %ebp
+ xorl %ebp, %esi
+ movl 8(%esp), %ebp
+ xorl %ebp, %esi
+ movl 28(%esp), %ebp
+ xorl %ebp, %esi
+ movl %ebx, %ebp
+.byte 209
+.byte 198 /* roll $1 %esi */
+ orl %ecx, %ebp
+ movl %esi, 40(%esp)
+ andl %edx, %ebp
+ leal 2400959708(%esi,%edi,1),%esi
+ movl %ebx, %edi
+.byte 209
+.byte 203 /* rorl $1 %ebx */
+ andl %ecx, %edi
+ orl %edi, %ebp
+ movl %eax, %edi
+ roll $5, %edi
+ addl %edi, %ebp
+ movl 44(%esp), %edi
+ addl %ebp, %esi
+ movl 52(%esp), %ebp
+ xorl %ebp, %edi
+ movl 12(%esp), %ebp
+ xorl %ebp, %edi
+ movl 32(%esp), %ebp
+.byte 209
+.byte 203 /* rorl $1 %ebx */
+ xorl %ebp, %edi
+.byte 209
+.byte 199 /* roll $1 %edi */
+ movl %eax, %ebp
+ movl %edi, 44(%esp)
+ orl %ebx, %ebp
+ leal 2400959708(%edi,%edx,1),%edi
+ movl %eax, %edx
+ andl %ecx, %ebp
+ andl %ebx, %edx
+ orl %edx, %ebp
+ movl %esi, %edx
+ roll $5, %edx
+.byte 209
+.byte 200 /* rorl $1 %eax */
+ addl %edx, %ebp
+.byte 209
+.byte 200 /* rorl $1 %eax */
+ addl %ebp, %edi
+ /* 40_59 43 */
+ /* 40_59 44 */
+ movl 48(%esp), %edx
+ movl 56(%esp), %ebp
+ xorl %ebp, %edx
+ movl 16(%esp), %ebp
+ xorl %ebp, %edx
+ movl 36(%esp), %ebp
+ xorl %ebp, %edx
+ movl %esi, %ebp
+.byte 209
+.byte 194 /* roll $1 %edx */
+ orl %eax, %ebp
+ movl %edx, 48(%esp)
+ andl %ebx, %ebp
+ leal 2400959708(%edx,%ecx,1),%edx
+ movl %esi, %ecx
+.byte 209
+.byte 206 /* rorl $1 %esi */
+ andl %eax, %ecx
+ orl %ecx, %ebp
+ movl %edi, %ecx
+ roll $5, %ecx
+ addl %ecx, %ebp
+ movl 52(%esp), %ecx
+ addl %ebp, %edx
+ movl 60(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 20(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 40(%esp), %ebp
+.byte 209
+.byte 206 /* rorl $1 %esi */
+ xorl %ebp, %ecx
+.byte 209
+.byte 193 /* roll $1 %ecx */
+ movl %edi, %ebp
+ movl %ecx, 52(%esp)
+ orl %esi, %ebp
+ leal 2400959708(%ecx,%ebx,1),%ecx
+ movl %edi, %ebx
+ andl %eax, %ebp
+ andl %esi, %ebx
+ orl %ebx, %ebp
+ movl %edx, %ebx
+ roll $5, %ebx
+.byte 209
+.byte 207 /* rorl $1 %edi */
+ addl %ebx, %ebp
+.byte 209
+.byte 207 /* rorl $1 %edi */
+ addl %ebp, %ecx
+ /* 40_59 45 */
+ /* 40_59 46 */
+ movl 56(%esp), %ebx
+ movl (%esp), %ebp
+ xorl %ebp, %ebx
+ movl 24(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 44(%esp), %ebp
+ xorl %ebp, %ebx
+ movl %edx, %ebp
+.byte 209
+.byte 195 /* roll $1 %ebx */
+ orl %edi, %ebp
+ movl %ebx, 56(%esp)
+ andl %esi, %ebp
+ leal 2400959708(%ebx,%eax,1),%ebx
+ movl %edx, %eax
+.byte 209
+.byte 202 /* rorl $1 %edx */
+ andl %edi, %eax
+ orl %eax, %ebp
+ movl %ecx, %eax
+ roll $5, %eax
+ addl %eax, %ebp
+ movl 60(%esp), %eax
+ addl %ebp, %ebx
+ movl 4(%esp), %ebp
+ xorl %ebp, %eax
+ movl 28(%esp), %ebp
+ xorl %ebp, %eax
+ movl 48(%esp), %ebp
+.byte 209
+.byte 202 /* rorl $1 %edx */
+ xorl %ebp, %eax
+.byte 209
+.byte 192 /* roll $1 %eax */
+ movl %ecx, %ebp
+ movl %eax, 60(%esp)
+ orl %edx, %ebp
+ leal 2400959708(%eax,%esi,1),%eax
+ movl %ecx, %esi
+ andl %edi, %ebp
+ andl %edx, %esi
+ orl %esi, %ebp
+ movl %ebx, %esi
+ roll $5, %esi
+.byte 209
+.byte 201 /* rorl $1 %ecx */
+ addl %esi, %ebp
+.byte 209
+.byte 201 /* rorl $1 %ecx */
+ addl %ebp, %eax
+ /* 40_59 47 */
+ /* 40_59 48 */
+ movl (%esp), %esi
+ movl 8(%esp), %ebp
+ xorl %ebp, %esi
+ movl 32(%esp), %ebp
+ xorl %ebp, %esi
+ movl 52(%esp), %ebp
+ xorl %ebp, %esi
+ movl %ebx, %ebp
+.byte 209
+.byte 198 /* roll $1 %esi */
+ orl %ecx, %ebp
+ movl %esi, (%esp)
+ andl %edx, %ebp
+ leal 2400959708(%esi,%edi,1),%esi
+ movl %ebx, %edi
+.byte 209
+.byte 203 /* rorl $1 %ebx */
+ andl %ecx, %edi
+ orl %edi, %ebp
+ movl %eax, %edi
+ roll $5, %edi
+ addl %edi, %ebp
+ movl 4(%esp), %edi
+ addl %ebp, %esi
+ movl 12(%esp), %ebp
+ xorl %ebp, %edi
+ movl 36(%esp), %ebp
+ xorl %ebp, %edi
+ movl 56(%esp), %ebp
+.byte 209
+.byte 203 /* rorl $1 %ebx */
+ xorl %ebp, %edi
+.byte 209
+.byte 199 /* roll $1 %edi */
+ movl %eax, %ebp
+ movl %edi, 4(%esp)
+ orl %ebx, %ebp
+ leal 2400959708(%edi,%edx,1),%edi
+ movl %eax, %edx
+ andl %ecx, %ebp
+ andl %ebx, %edx
+ orl %edx, %ebp
+ movl %esi, %edx
+ roll $5, %edx
+.byte 209
+.byte 200 /* rorl $1 %eax */
+ addl %edx, %ebp
+.byte 209
+.byte 200 /* rorl $1 %eax */
+ addl %ebp, %edi
+ /* 40_59 49 */
+ /* 40_59 50 */
+ movl 8(%esp), %edx
+ movl 16(%esp), %ebp
+ xorl %ebp, %edx
+ movl 40(%esp), %ebp
+ xorl %ebp, %edx
+ movl 60(%esp), %ebp
+ xorl %ebp, %edx
+ movl %esi, %ebp
+.byte 209
+.byte 194 /* roll $1 %edx */
+ orl %eax, %ebp
+ movl %edx, 8(%esp)
+ andl %ebx, %ebp
+ leal 2400959708(%edx,%ecx,1),%edx
+ movl %esi, %ecx
+.byte 209
+.byte 206 /* rorl $1 %esi */
+ andl %eax, %ecx
+ orl %ecx, %ebp
+ movl %edi, %ecx
+ roll $5, %ecx
+ addl %ecx, %ebp
+ movl 12(%esp), %ecx
+ addl %ebp, %edx
+ movl 20(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 44(%esp), %ebp
+ xorl %ebp, %ecx
+ movl (%esp), %ebp
+.byte 209
+.byte 206 /* rorl $1 %esi */
+ xorl %ebp, %ecx
+.byte 209
+.byte 193 /* roll $1 %ecx */
+ movl %edi, %ebp
+ movl %ecx, 12(%esp)
+ orl %esi, %ebp
+ leal 2400959708(%ecx,%ebx,1),%ecx
+ movl %edi, %ebx
+ andl %eax, %ebp
+ andl %esi, %ebx
+ orl %ebx, %ebp
+ movl %edx, %ebx
+ roll $5, %ebx
+.byte 209
+.byte 207 /* rorl $1 %edi */
+ addl %ebx, %ebp
+.byte 209
+.byte 207 /* rorl $1 %edi */
+ addl %ebp, %ecx
+ /* 40_59 51 */
+ /* 40_59 52 */
+ movl 16(%esp), %ebx
+ movl 24(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 48(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 4(%esp), %ebp
+ xorl %ebp, %ebx
+ movl %edx, %ebp
+.byte 209
+.byte 195 /* roll $1 %ebx */
+ orl %edi, %ebp
+ movl %ebx, 16(%esp)
+ andl %esi, %ebp
+ leal 2400959708(%ebx,%eax,1),%ebx
+ movl %edx, %eax
+.byte 209
+.byte 202 /* rorl $1 %edx */
+ andl %edi, %eax
+ orl %eax, %ebp
+ movl %ecx, %eax
+ roll $5, %eax
+ addl %eax, %ebp
+ movl 20(%esp), %eax
+ addl %ebp, %ebx
+ movl 28(%esp), %ebp
+ xorl %ebp, %eax
+ movl 52(%esp), %ebp
+ xorl %ebp, %eax
+ movl 8(%esp), %ebp
+.byte 209
+.byte 202 /* rorl $1 %edx */
+ xorl %ebp, %eax
+.byte 209
+.byte 192 /* roll $1 %eax */
+ movl %ecx, %ebp
+ movl %eax, 20(%esp)
+ orl %edx, %ebp
+ leal 2400959708(%eax,%esi,1),%eax
+ movl %ecx, %esi
+ andl %edi, %ebp
+ andl %edx, %esi
+ orl %esi, %ebp
+ movl %ebx, %esi
+ roll $5, %esi
+.byte 209
+.byte 201 /* rorl $1 %ecx */
+ addl %esi, %ebp
+.byte 209
+.byte 201 /* rorl $1 %ecx */
+ addl %ebp, %eax
+ /* 40_59 53 */
+ /* 40_59 54 */
+ movl 24(%esp), %esi
+ movl 32(%esp), %ebp
+ xorl %ebp, %esi
+ movl 56(%esp), %ebp
+ xorl %ebp, %esi
+ movl 12(%esp), %ebp
+ xorl %ebp, %esi
+ movl %ebx, %ebp
+.byte 209
+.byte 198 /* roll $1 %esi */
+ orl %ecx, %ebp
+ movl %esi, 24(%esp)
+ andl %edx, %ebp
+ leal 2400959708(%esi,%edi,1),%esi
+ movl %ebx, %edi
+.byte 209
+.byte 203 /* rorl $1 %ebx */
+ andl %ecx, %edi
+ orl %edi, %ebp
+ movl %eax, %edi
+ roll $5, %edi
+ addl %edi, %ebp
+ movl 28(%esp), %edi
+ addl %ebp, %esi
+ movl 36(%esp), %ebp
+ xorl %ebp, %edi
+ movl 60(%esp), %ebp
+ xorl %ebp, %edi
+ movl 16(%esp), %ebp
+.byte 209
+.byte 203 /* rorl $1 %ebx */
+ xorl %ebp, %edi
+.byte 209
+.byte 199 /* roll $1 %edi */
+ movl %eax, %ebp
+ movl %edi, 28(%esp)
+ orl %ebx, %ebp
+ leal 2400959708(%edi,%edx,1),%edi
+ movl %eax, %edx
+ andl %ecx, %ebp
+ andl %ebx, %edx
+ orl %edx, %ebp
+ movl %esi, %edx
+ roll $5, %edx
+.byte 209
+.byte 200 /* rorl $1 %eax */
+ addl %edx, %ebp
+.byte 209
+.byte 200 /* rorl $1 %eax */
+ addl %ebp, %edi
+ /* 40_59 55 */
+ /* 40_59 56 */
+ movl 32(%esp), %edx
+ movl 40(%esp), %ebp
+ xorl %ebp, %edx
+ movl (%esp), %ebp
+ xorl %ebp, %edx
+ movl 20(%esp), %ebp
+ xorl %ebp, %edx
+ movl %esi, %ebp
+.byte 209
+.byte 194 /* roll $1 %edx */
+ orl %eax, %ebp
+ movl %edx, 32(%esp)
+ andl %ebx, %ebp
+ leal 2400959708(%edx,%ecx,1),%edx
+ movl %esi, %ecx
+.byte 209
+.byte 206 /* rorl $1 %esi */
+ andl %eax, %ecx
+ orl %ecx, %ebp
+ movl %edi, %ecx
+ roll $5, %ecx
+ addl %ecx, %ebp
+ movl 36(%esp), %ecx
+ addl %ebp, %edx
+ movl 44(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 4(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 24(%esp), %ebp
+.byte 209
+.byte 206 /* rorl $1 %esi */
+ xorl %ebp, %ecx
+.byte 209
+.byte 193 /* roll $1 %ecx */
+ movl %edi, %ebp
+ movl %ecx, 36(%esp)
+ orl %esi, %ebp
+ leal 2400959708(%ecx,%ebx,1),%ecx
+ movl %edi, %ebx
+ andl %eax, %ebp
+ andl %esi, %ebx
+ orl %ebx, %ebp
+ movl %edx, %ebx
+ roll $5, %ebx
+.byte 209
+.byte 207 /* rorl $1 %edi */
+ addl %ebx, %ebp
+.byte 209
+.byte 207 /* rorl $1 %edi */
+ addl %ebp, %ecx
+ /* 40_59 57 */
+ /* 40_59 58 */
+ movl 40(%esp), %ebx
+ movl 48(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 8(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 28(%esp), %ebp
+ xorl %ebp, %ebx
+ movl %edx, %ebp
+.byte 209
+.byte 195 /* roll $1 %ebx */
+ orl %edi, %ebp
+ movl %ebx, 40(%esp)
+ andl %esi, %ebp
+ leal 2400959708(%ebx,%eax,1),%ebx
+ movl %edx, %eax
+.byte 209
+.byte 202 /* rorl $1 %edx */
+ andl %edi, %eax
+ orl %eax, %ebp
+ movl %ecx, %eax
+ roll $5, %eax
+ addl %eax, %ebp
+ movl 44(%esp), %eax
+ addl %ebp, %ebx
+ movl 52(%esp), %ebp
+ xorl %ebp, %eax
+ movl 12(%esp), %ebp
+ xorl %ebp, %eax
+ movl 32(%esp), %ebp
+.byte 209
+.byte 202 /* rorl $1 %edx */
+ xorl %ebp, %eax
+.byte 209
+.byte 192 /* roll $1 %eax */
+ movl %ecx, %ebp
+ movl %eax, 44(%esp)
+ orl %edx, %ebp
+ leal 2400959708(%eax,%esi,1),%eax
+ movl %ecx, %esi
+ andl %edi, %ebp
+ andl %edx, %esi
+ orl %esi, %ebp
+ movl %ebx, %esi
+ roll $5, %esi
+.byte 209
+.byte 201 /* rorl $1 %ecx */
+ addl %esi, %ebp
+.byte 209
+.byte 201 /* rorl $1 %ecx */
+ addl %ebp, %eax
+ /* 40_59 59 */
+ /* 20_39 60 */
+ movl 48(%esp), %esi
+ movl 56(%esp), %ebp
+ xorl %ebp, %esi
+ movl 16(%esp), %ebp
+ xorl %ebp, %esi
+ movl 36(%esp), %ebp
+ xorl %ebp, %esi
+ movl %ebx, %ebp
+.byte 209
+.byte 198 /* roll $1 %esi */
+ xorl %ecx, %ebp
+ movl %esi, 48(%esp)
+ xorl %edx, %ebp
+ leal 3395469782(%esi,%edi,1),%esi
+ movl %eax, %edi
+ roll $5, %edi
+.byte 209
+.byte 203 /* rorl $1 %ebx */
+ addl %ebp, %edi
+.byte 209
+.byte 203 /* rorl $1 %ebx */
+ addl %edi, %esi
+ /* 20_39 61 */
+ movl 52(%esp), %edi
+ movl 60(%esp), %ebp
+ xorl %ebp, %edi
+ movl 20(%esp), %ebp
+ xorl %ebp, %edi
+ movl 40(%esp), %ebp
+ xorl %ebp, %edi
+ movl %eax, %ebp
+.byte 209
+.byte 199 /* roll $1 %edi */
+ xorl %ebx, %ebp
+ movl %edi, 52(%esp)
+ xorl %ecx, %ebp
+ leal 3395469782(%edi,%edx,1),%edi
+ movl %esi, %edx
+ roll $5, %edx
+.byte 209
+.byte 200 /* rorl $1 %eax */
+ addl %ebp, %edx
+.byte 209
+.byte 200 /* rorl $1 %eax */
+ addl %edx, %edi
+ /* 20_39 62 */
+ movl 56(%esp), %edx
+ movl (%esp), %ebp
+ xorl %ebp, %edx
+ movl 24(%esp), %ebp
+ xorl %ebp, %edx
+ movl 44(%esp), %ebp
+ xorl %ebp, %edx
+ movl %esi, %ebp
+.byte 209
+.byte 194 /* roll $1 %edx */
+ xorl %eax, %ebp
+ movl %edx, 56(%esp)
+ xorl %ebx, %ebp
+ leal 3395469782(%edx,%ecx,1),%edx
+ movl %edi, %ecx
+ roll $5, %ecx
+.byte 209
+.byte 206 /* rorl $1 %esi */
+ addl %ebp, %ecx
+.byte 209
+.byte 206 /* rorl $1 %esi */
+ addl %ecx, %edx
+ /* 20_39 63 */
+ movl 60(%esp), %ecx
+ movl 4(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 28(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 48(%esp), %ebp
+ xorl %ebp, %ecx
+ movl %edi, %ebp
+.byte 209
+.byte 193 /* roll $1 %ecx */
+ xorl %esi, %ebp
+ movl %ecx, 60(%esp)
+ xorl %eax, %ebp
+ leal 3395469782(%ecx,%ebx,1),%ecx
+ movl %edx, %ebx
+ roll $5, %ebx
+.byte 209
+.byte 207 /* rorl $1 %edi */
+ addl %ebp, %ebx
+.byte 209
+.byte 207 /* rorl $1 %edi */
+ addl %ebx, %ecx
+ /* 20_39 64 */
+ movl (%esp), %ebx
+ movl 8(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 32(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 52(%esp), %ebp
+ xorl %ebp, %ebx
+ movl %edx, %ebp
+.byte 209
+.byte 195 /* roll $1 %ebx */
+ xorl %edi, %ebp
+ movl %ebx, (%esp)
+ xorl %esi, %ebp
+ leal 3395469782(%ebx,%eax,1),%ebx
+ movl %ecx, %eax
+ roll $5, %eax
+.byte 209
+.byte 202 /* rorl $1 %edx */
+ addl %ebp, %eax
+.byte 209
+.byte 202 /* rorl $1 %edx */
+ addl %eax, %ebx
+ /* 20_39 65 */
+ movl 4(%esp), %eax
+ movl 12(%esp), %ebp
+ xorl %ebp, %eax
+ movl 36(%esp), %ebp
+ xorl %ebp, %eax
+ movl 56(%esp), %ebp
+ xorl %ebp, %eax
+ movl %ecx, %ebp
+.byte 209
+.byte 192 /* roll $1 %eax */
+ xorl %edx, %ebp
+ movl %eax, 4(%esp)
+ xorl %edi, %ebp
+ leal 3395469782(%eax,%esi,1),%eax
+ movl %ebx, %esi
+ roll $5, %esi
+.byte 209
+.byte 201 /* rorl $1 %ecx */
+ addl %ebp, %esi
+.byte 209
+.byte 201 /* rorl $1 %ecx */
+ addl %esi, %eax
+ /* 20_39 66 */
+ movl 8(%esp), %esi
+ movl 16(%esp), %ebp
+ xorl %ebp, %esi
+ movl 40(%esp), %ebp
+ xorl %ebp, %esi
+ movl 60(%esp), %ebp
+ xorl %ebp, %esi
+ movl %ebx, %ebp
+.byte 209
+.byte 198 /* roll $1 %esi */
+ xorl %ecx, %ebp
+ movl %esi, 8(%esp)
+ xorl %edx, %ebp
+ leal 3395469782(%esi,%edi,1),%esi
+ movl %eax, %edi
+ roll $5, %edi
+.byte 209
+.byte 203 /* rorl $1 %ebx */
+ addl %ebp, %edi
+.byte 209
+.byte 203 /* rorl $1 %ebx */
+ addl %edi, %esi
+ /* 20_39 67 */
+ movl 12(%esp), %edi
+ movl 20(%esp), %ebp
+ xorl %ebp, %edi
+ movl 44(%esp), %ebp
+ xorl %ebp, %edi
+ movl (%esp), %ebp
+ xorl %ebp, %edi
+ movl %eax, %ebp
+.byte 209
+.byte 199 /* roll $1 %edi */
+ xorl %ebx, %ebp
+ movl %edi, 12(%esp)
+ xorl %ecx, %ebp
+ leal 3395469782(%edi,%edx,1),%edi
+ movl %esi, %edx
+ roll $5, %edx
+.byte 209
+.byte 200 /* rorl $1 %eax */
+ addl %ebp, %edx
+.byte 209
+.byte 200 /* rorl $1 %eax */
+ addl %edx, %edi
+ /* 20_39 68 */
+ movl 16(%esp), %edx
+ movl 24(%esp), %ebp
+ xorl %ebp, %edx
+ movl 48(%esp), %ebp
+ xorl %ebp, %edx
+ movl 4(%esp), %ebp
+ xorl %ebp, %edx
+ movl %esi, %ebp
+.byte 209
+.byte 194 /* roll $1 %edx */
+ xorl %eax, %ebp
+ movl %edx, 16(%esp)
+ xorl %ebx, %ebp
+ leal 3395469782(%edx,%ecx,1),%edx
+ movl %edi, %ecx
+ roll $5, %ecx
+.byte 209
+.byte 206 /* rorl $1 %esi */
+ addl %ebp, %ecx
+.byte 209
+.byte 206 /* rorl $1 %esi */
+ addl %ecx, %edx
+ /* 20_39 69 */
+ movl 20(%esp), %ecx
+ movl 28(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 52(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 8(%esp), %ebp
+ xorl %ebp, %ecx
+ movl %edi, %ebp
+.byte 209
+.byte 193 /* roll $1 %ecx */
+ xorl %esi, %ebp
+ movl %ecx, 20(%esp)
+ xorl %eax, %ebp
+ leal 3395469782(%ecx,%ebx,1),%ecx
+ movl %edx, %ebx
+ roll $5, %ebx
+.byte 209
+.byte 207 /* rorl $1 %edi */
+ addl %ebp, %ebx
+.byte 209
+.byte 207 /* rorl $1 %edi */
+ addl %ebx, %ecx
+ /* 20_39 70 */
+ movl 24(%esp), %ebx
+ movl 32(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 56(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 12(%esp), %ebp
+ xorl %ebp, %ebx
+ movl %edx, %ebp
+.byte 209
+.byte 195 /* roll $1 %ebx */
+ xorl %edi, %ebp
+ movl %ebx, 24(%esp)
+ xorl %esi, %ebp
+ leal 3395469782(%ebx,%eax,1),%ebx
+ movl %ecx, %eax
+ roll $5, %eax
+.byte 209
+.byte 202 /* rorl $1 %edx */
+ addl %ebp, %eax
+.byte 209
+.byte 202 /* rorl $1 %edx */
+ addl %eax, %ebx
+ /* 20_39 71 */
+ movl 28(%esp), %eax
+ movl 36(%esp), %ebp
+ xorl %ebp, %eax
+ movl 60(%esp), %ebp
+ xorl %ebp, %eax
+ movl 16(%esp), %ebp
+ xorl %ebp, %eax
+ movl %ecx, %ebp
+.byte 209
+.byte 192 /* roll $1 %eax */
+ xorl %edx, %ebp
+ movl %eax, 28(%esp)
+ xorl %edi, %ebp
+ leal 3395469782(%eax,%esi,1),%eax
+ movl %ebx, %esi
+ roll $5, %esi
+.byte 209
+.byte 201 /* rorl $1 %ecx */
+ addl %ebp, %esi
+.byte 209
+.byte 201 /* rorl $1 %ecx */
+ addl %esi, %eax
+ /* 20_39 72 */
+ movl 32(%esp), %esi
+ movl 40(%esp), %ebp
+ xorl %ebp, %esi
+ movl (%esp), %ebp
+ xorl %ebp, %esi
+ movl 20(%esp), %ebp
+ xorl %ebp, %esi
+ movl %ebx, %ebp
+.byte 209
+.byte 198 /* roll $1 %esi */
+ xorl %ecx, %ebp
+ movl %esi, 32(%esp)
+ xorl %edx, %ebp
+ leal 3395469782(%esi,%edi,1),%esi
+ movl %eax, %edi
+ roll $5, %edi
+.byte 209
+.byte 203 /* rorl $1 %ebx */
+ addl %ebp, %edi
+.byte 209
+.byte 203 /* rorl $1 %ebx */
+ addl %edi, %esi
+ /* 20_39 73 */
+ movl 36(%esp), %edi
+ movl 44(%esp), %ebp
+ xorl %ebp, %edi
+ movl 4(%esp), %ebp
+ xorl %ebp, %edi
+ movl 24(%esp), %ebp
+ xorl %ebp, %edi
+ movl %eax, %ebp
+.byte 209
+.byte 199 /* roll $1 %edi */
+ xorl %ebx, %ebp
+ movl %edi, 36(%esp)
+ xorl %ecx, %ebp
+ leal 3395469782(%edi,%edx,1),%edi
+ movl %esi, %edx
+ roll $5, %edx
+.byte 209
+.byte 200 /* rorl $1 %eax */
+ addl %ebp, %edx
+.byte 209
+.byte 200 /* rorl $1 %eax */
+ addl %edx, %edi
+ /* 20_39 74 */
+ movl 40(%esp), %edx
+ movl 48(%esp), %ebp
+ xorl %ebp, %edx
+ movl 8(%esp), %ebp
+ xorl %ebp, %edx
+ movl 28(%esp), %ebp
+ xorl %ebp, %edx
+ movl %esi, %ebp
+.byte 209
+.byte 194 /* roll $1 %edx */
+ xorl %eax, %ebp
+ movl %edx, 40(%esp)
+ xorl %ebx, %ebp
+ leal 3395469782(%edx,%ecx,1),%edx
+ movl %edi, %ecx
+ roll $5, %ecx
+.byte 209
+.byte 206 /* rorl $1 %esi */
+ addl %ebp, %ecx
+.byte 209
+.byte 206 /* rorl $1 %esi */
+ addl %ecx, %edx
+ /* 20_39 75 */
+ movl 44(%esp), %ecx
+ movl 52(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 12(%esp), %ebp
+ xorl %ebp, %ecx
+ movl 32(%esp), %ebp
+ xorl %ebp, %ecx
+ movl %edi, %ebp
+.byte 209
+.byte 193 /* roll $1 %ecx */
+ xorl %esi, %ebp
+ movl %ecx, 44(%esp)
+ xorl %eax, %ebp
+ leal 3395469782(%ecx,%ebx,1),%ecx
+ movl %edx, %ebx
+ roll $5, %ebx
+.byte 209
+.byte 207 /* rorl $1 %edi */
+ addl %ebp, %ebx
+.byte 209
+.byte 207 /* rorl $1 %edi */
+ addl %ebx, %ecx
+ /* 20_39 76 */
+ movl 48(%esp), %ebx
+ movl 56(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 16(%esp), %ebp
+ xorl %ebp, %ebx
+ movl 36(%esp), %ebp
+ xorl %ebp, %ebx
+ movl %edx, %ebp
+.byte 209
+.byte 195 /* roll $1 %ebx */
+ xorl %edi, %ebp
+ movl %ebx, 48(%esp)
+ xorl %esi, %ebp
+ leal 3395469782(%ebx,%eax,1),%ebx
+ movl %ecx, %eax
+ roll $5, %eax
+.byte 209
+.byte 202 /* rorl $1 %edx */
+ addl %ebp, %eax
+.byte 209
+.byte 202 /* rorl $1 %edx */
+ addl %eax, %ebx
+ /* 20_39 77 */
+ movl 52(%esp), %eax
+ movl 60(%esp), %ebp
+ xorl %ebp, %eax
+ movl 20(%esp), %ebp
+ xorl %ebp, %eax
+ movl 40(%esp), %ebp
+ xorl %ebp, %eax
+ movl %ecx, %ebp
+.byte 209
+.byte 192 /* roll $1 %eax */
+ xorl %edx, %ebp
+ movl %eax, 52(%esp)
+ xorl %edi, %ebp
+ leal 3395469782(%eax,%esi,1),%eax
+ movl %ebx, %esi
+ roll $5, %esi
+.byte 209
+.byte 201 /* rorl $1 %ecx */
+ addl %ebp, %esi
+.byte 209
+.byte 201 /* rorl $1 %ecx */
+ addl %esi, %eax
+ /* 20_39 78 */
+ movl 56(%esp), %esi
+ movl (%esp), %ebp
+ xorl %ebp, %esi
+ movl 24(%esp), %ebp
+ xorl %ebp, %esi
+ movl 44(%esp), %ebp
+ xorl %ebp, %esi
+ movl %ebx, %ebp
+.byte 209
+.byte 198 /* roll $1 %esi */
+ xorl %ecx, %ebp
+ movl %esi, 56(%esp)
+ xorl %edx, %ebp
+ leal 3395469782(%esi,%edi,1),%esi
+ movl %eax, %edi
+ roll $5, %edi
+.byte 209
+.byte 203 /* rorl $1 %ebx */
+ addl %ebp, %edi
+.byte 209
+.byte 203 /* rorl $1 %ebx */
+ addl %edi, %esi
+ /* 20_39 79 */
+ movl 60(%esp), %edi
+ movl 4(%esp), %ebp
+ xorl %ebp, %edi
+ movl 28(%esp), %ebp
+ xorl %ebp, %edi
+ movl 48(%esp), %ebp
+ xorl %ebp, %edi
+ movl %eax, %ebp
+.byte 209
+.byte 199 /* roll $1 %edi */
+ xorl %ebx, %ebp
+ movl %edi, 60(%esp)
+ xorl %ecx, %ebp
+ leal 3395469782(%edi,%edx,1),%edi
+ movl %esi, %edx
+ roll $5, %edx
+ addl %ebp, %edx
+ movl 92(%esp), %ebp
+.byte 209
+.byte 200 /* rorl $1 %eax */
+ addl %edx, %edi
+.byte 209
+.byte 200 /* rorl $1 %eax */
+ /* End processing */
+
+ movl 12(%ebp), %edx
+ addl %ebx, %edx
+ movl 4(%ebp), %ebx
+ addl %esi, %ebx
+ movl %eax, %esi
+ movl (%ebp), %eax
+ movl %edx, 12(%ebp)
+ addl %edi, %eax
+ movl 16(%ebp), %edi
+ addl %ecx, %edi
+ movl 8(%ebp), %ecx
+ addl %esi, %ecx
+ movl %eax, (%ebp)
+ movl 64(%esp), %esi
+ movl %ecx, 8(%ebp)
+ addl $64, %esi
+ movl 68(%esp), %eax
+ movl %edi, 16(%ebp)
+ cmpl %esi, %eax
+ movl %ebx, 4(%ebp)
+ jl .L001end
+ movl (%esi), %eax
+ jmp .L000start
+.L001end:
+ addl $72, %esp
+ popl %edi
+ popl %ebx
+ popl %ebp
+ popl %esi
+ ret
+.sha1_block_x86_end:
+ SIZE(sha1_block_x86,.sha1_block_x86_end-sha1_block_x86)
+.ident "desasm.pl"