aboutsummaryrefslogtreecommitdiffstats
path: root/crypto/ripemd
diff options
context:
space:
mode:
authorRalf S. Engelschall <rse@openssl.org>1998-12-21 10:56:39 +0000
committerRalf S. Engelschall <rse@openssl.org>1998-12-21 10:56:39 +0000
commit58964a492275ca9a59a0cd9c8155cb2491b4b909 (patch)
treec7b16876a5789463bbbb468ef4829c8129b3d718 /crypto/ripemd
parentd02b48c63a58ea4367a0e905979f140b7d090f86 (diff)
downloadopenssl-58964a492275ca9a59a0cd9c8155cb2491b4b909.tar.gz
Import of old SSLeay release: SSLeay 0.9.0b
Diffstat (limited to 'crypto/ripemd')
-rw-r--r--crypto/ripemd/Makefile.ssl104
-rw-r--r--crypto/ripemd/Makefile.uni109
-rw-r--r--crypto/ripemd/README15
-rw-r--r--crypto/ripemd/asm/rips.cpp78
-rw-r--r--crypto/ripemd/asm/rm-win32.asm1972
-rw-r--r--crypto/ripemd/asm/rm86unix.cpp2016
-rw-r--r--crypto/ripemd/asm/rmd-586.pl582
-rw-r--r--crypto/ripemd/ripemd.h99
-rw-r--r--crypto/ripemd/rmd160.c135
-rw-r--r--crypto/ripemd/rmd_dgst.c535
-rw-r--r--crypto/ripemd/rmd_locl.h226
-rw-r--r--crypto/ripemd/rmd_one.c77
-rw-r--r--crypto/ripemd/rmdconst.h399
-rw-r--r--crypto/ripemd/rmdtest.c133
14 files changed, 6480 insertions, 0 deletions
diff --git a/crypto/ripemd/Makefile.ssl b/crypto/ripemd/Makefile.ssl
new file mode 100644
index 0000000000..67d47ceb2c
--- /dev/null
+++ b/crypto/ripemd/Makefile.ssl
@@ -0,0 +1,104 @@
+#
+# SSLeay/crypto/ripemd/Makefile
+#
+
+DIR= ripemd
+TOP= ../..
+CC= cc
+CPP= $(CC) -E
+INCLUDES=
+CFLAG=-g
+INSTALLTOP=/usr/local/ssl
+MAKE= make -f Makefile.ssl
+MAKEDEPEND= makedepend -f Makefile.ssl
+MAKEFILE= Makefile.ssl
+AR= ar r
+
+RIP_ASM_OBJ=
+
+CFLAGS= $(INCLUDES) $(CFLAG)
+
+GENERAL=Makefile
+TEST=rmdtest.c
+APPS=rmd160.c
+
+LIB=$(TOP)/libcrypto.a
+LIBSRC=rmd_dgst.c rmd_one.c
+LIBOBJ=rmd_dgst.o rmd_one.o $(RMD160_ASM_OBJ)
+
+SRC= $(LIBSRC)
+
+EXHEADER= ripemd.h
+HEADER= rmd_locl.h rmdconst.h $(EXHEADER)
+
+ALL= $(GENERAL) $(SRC) $(HEADER)
+
+top:
+ (cd ../..; $(MAKE) DIRS=crypto SDIRS=$(DIR) sub_all)
+
+all: lib
+
+lib: $(LIBOBJ)
+ $(AR) $(LIB) $(LIBOBJ)
+ sh $(TOP)/util/ranlib.sh $(LIB)
+ @touch lib
+
+# elf
+asm/rm86-elf.o: asm/rm86unix.cpp
+ $(CPP) -DELF asm/rm86unix.cpp | as -o asm/rm86-elf.o
+
+# solaris
+asm/rm86-sol.o: asm/rm86unix.cpp
+ $(CC) -E -DSOL asm/rm86unix.cpp | sed 's/^#.*//' > asm/rm86-sol.s
+ as -o asm/rm86-sol.o asm/rm86-sol.s
+ rm -f asm/rm86-sol.s
+
+# a.out
+asm/rm86-out.o: asm/rm86unix.cpp
+ $(CPP) -DOUT asm/rm86unix.cpp | as -o asm/rm86-out.o
+
+# bsdi
+asm/rm86bsdi.o: asm/rm86unix.cpp
+ $(CPP) -DBSDI asm/rm86unix.cpp | as -o asm/rm86bsdi.o
+
+asm/rm86unix.cpp:
+ (cd asm; perl rmd-586.pl cpp >rm86unix.cpp)
+
+files:
+ perl $(TOP)/util/files.pl Makefile.ssl >> $(TOP)/MINFO
+
+links:
+ /bin/rm -f Makefile
+ $(TOP)/util/point.sh Makefile.ssl Makefile ;
+ $(TOP)/util/mklink.sh ../../include $(EXHEADER)
+ $(TOP)/util/mklink.sh ../../test $(TEST)
+ $(TOP)/util/mklink.sh ../../apps $(APPS)
+
+install:
+ @for i in $(EXHEADER) ; \
+ do \
+ (cp $$i $(INSTALLTOP)/include/$$i; \
+ chmod 644 $(INSTALLTOP)/include/$$i ); \
+ done;
+
+tags:
+ ctags $(SRC)
+
+tests:
+
+lint:
+ lint -DLINT $(INCLUDES) $(SRC)>fluff
+
+depend:
+ $(MAKEDEPEND) $(INCLUDES) $(PROGS) $(LIBSRC)
+
+dclean:
+ perl -pe 'if (/^# DO NOT DELETE THIS LINE/) {print; exit(0);}' $(MAKEFILE) >Makefile.new
+ mv -f Makefile.new $(MAKEFILE)
+
+clean:
+ /bin/rm -f *.o asm/*.o *.obj lib tags core .pure .nfs* *.old *.bak fluff
+
+errors:
+
+# DO NOT DELETE THIS LINE -- make depend depends on it.
diff --git a/crypto/ripemd/Makefile.uni b/crypto/ripemd/Makefile.uni
new file mode 100644
index 0000000000..54685712db
--- /dev/null
+++ b/crypto/ripemd/Makefile.uni
@@ -0,0 +1,109 @@
+# Targets
+# make - twidle the options yourself :-)
+# make cc - standard cc options
+# make gcc - standard gcc options
+# make x86-elf - linux-elf etc
+# make x86-out - linux-a.out, FreeBSD etc
+# make x86-solaris
+# make x86-bdsi
+
+DIR= md5
+TOP= .
+CC= gcc
+CFLAG= -O3 -fomit-frame-pointer
+
+CPP= $(CC) -E
+INCLUDES=
+INSTALLTOP=/usr/local/lib
+MAKE= make
+MAKEDEPEND= makedepend
+MAKEFILE= Makefile.uni
+AR= ar r
+
+MD5_ASM_OBJ=
+
+CFLAGS= $(INCLUDES) $(CFLAG)
+
+GENERAL=Makefile
+TEST=md5test
+APPS=md5
+
+LIB=libmd5.a
+LIBSRC=md5_dgst.c md5_one.c
+LIBOBJ=md5_dgst.o md5_one.o $(MD5_ASM_OBJ)
+
+SRC= $(LIBSRC)
+
+EXHEADER= md5.h
+HEADER= md5_locl.h $(EXHEADER)
+
+ALL= $(GENERAL) $(SRC) $(HEADER)
+
+all: $(LIB) $(TEST) $(APPS)
+
+$(LIB): $(LIBOBJ)
+ $(AR) $(LIB) $(LIBOBJ)
+ sh $(TOP)/ranlib.sh $(LIB)
+
+# elf
+asm/mx86-elf.o: asm/mx86unix.cpp
+ $(CPP) -DELF asm/mx86unix.cpp | as -o asm/mx86-elf.o
+
+# solaris
+asm/mx86-sol.o: asm/mx86unix.cpp
+ $(CC) -E -DSOL asm/mx86unix.cpp | sed 's/^#.*//' > asm/mx86-sol.s
+ as -o asm/mx86-sol.o asm/mx86-sol.s
+ rm -f asm/mx86-sol.s
+
+# a.out
+asm/mx86-out.o: asm/mx86unix.cpp
+ $(CPP) -DOUT asm/mx86unix.cpp | as -o asm/mx86-out.o
+
+# bsdi
+asm/mx86bsdi.o: asm/mx86unix.cpp
+ $(CPP) -DBSDI asm/mx86unix.cpp | as -o asm/mx86bsdi.o
+
+asm/mx86unix.cpp:
+ (cd asm; perl md5-586.pl cpp >mx86unix.cpp)
+
+test: $(TEST)
+ ./$(TEST)
+
+$(TEST): $(TEST).c $(LIB)
+ $(CC) -o $(TEST) $(CFLAGS) $(TEST).c $(LIB)
+
+$(APPS): $(APPS).c $(LIB)
+ $(CC) -o $(APPS) $(CFLAGS) $(APPS).c $(LIB)
+
+lint:
+ lint -DLINT $(INCLUDES) $(SRC)>fluff
+
+depend:
+ $(MAKEDEPEND) $(INCLUDES) $(PROGS) $(LIBSRC)
+
+dclean:
+ perl -pe 'if (/^# DO NOT DELETE THIS LINE/) {print; exit(0);}' $(MAKEFILE) >Makefile.new
+ mv -f Makefile.new $(MAKEFILE)
+
+clean:
+ /bin/rm -f $(LIB) $(TEST) $(APPS) *.o asm/*.o *.obj lib tags core .pure .nfs* *.old *.bak fluff
+
+cc:
+ $(MAKE) MD5_ASM_OBJ="" CC="cc" CFLAG="-O" all
+
+gcc:
+ $(MAKE) MD5_ASM_OBJ="" CC="gcc" CFLAGS="-O3 -fomit-frame-pointer" all
+
+x86-elf:
+ $(MAKE) MD5_ASM_OBJ="asm/mx86-elf.o" CFLAG="-DELF -DMD5_ASM -DL_ENDIAN $(CFLAGS)" all
+
+x86-out:
+ $(MAKE) MD5_ASM_OBJ="asm/mx86-out.o" CFLAG="-DOUT -DMD5_ASM -DL_ENDIAN $(CFLAGS)" all
+
+x86-solaris:
+ $(MAKE) MD5_ASM_OBJ="asm/mx86-sol.o" CFLAG="-DSOL -DMD5_ASM -DL_ENDIAN $(CFLAGS)" all
+
+x86-bdsi:
+ $(MAKE) MD5_ASM_OBJ="asm/mx86-bdsi.o" CFLAG="-DBDSI -DMD5_ASM -DL_ENDIAN $(CFLAGS)" all
+
+# DO NOT DELETE THIS LINE -- make depend depends on it.
diff --git a/crypto/ripemd/README b/crypto/ripemd/README
new file mode 100644
index 0000000000..7097707264
--- /dev/null
+++ b/crypto/ripemd/README
@@ -0,0 +1,15 @@
+RIPEMD-160
+http://www.esat.kuleuven.ac.be/~bosselae/ripemd160.html
+
+This is my implementation of RIPEMD-160. The pentium assember is a little
+off the pace since I only get 1050 cycles, while the best is 1013.
+I have a few ideas for how to get another 20 or so cycles, but at
+this point I will not bother right now. I belive the trick will be
+to remove my 'copy X array onto stack' until inside the RIP1() finctions the
+first time round. To do this I need another register and will only have one
+temporary one. A bit tricky.... I can also cleanup the saving of the 5 words
+after the first half of the calculation. I should read the origional
+value, add then write. Currently I just save the new and read the origioal.
+I then read both at the end. Bad.
+
+eric (20-Jan-1998)
diff --git a/crypto/ripemd/asm/rips.cpp b/crypto/ripemd/asm/rips.cpp
new file mode 100644
index 0000000000..78a933c448
--- /dev/null
+++ b/crypto/ripemd/asm/rips.cpp
@@ -0,0 +1,78 @@
+//
+// gettsc.inl
+//
+// gives access to the Pentium's (secret) cycle counter
+//
+// This software was written by Leonard Janke (janke@unixg.ubc.ca)
+// in 1996-7 and is entered, by him, into the public domain.
+
+#if defined(__WATCOMC__)
+void GetTSC(unsigned long&);
+#pragma aux GetTSC = 0x0f 0x31 "mov [edi], eax" parm [edi] modify [edx eax];
+#elif defined(__GNUC__)
+inline
+void GetTSC(unsigned long& tsc)
+{
+ asm volatile(".byte 15, 49\n\t"
+ : "=eax" (tsc)
+ :
+ : "%edx", "%eax");
+}
+#elif defined(_MSC_VER)
+inline
+void GetTSC(unsigned long& tsc)
+{
+ unsigned long a;
+ __asm _emit 0fh
+ __asm _emit 31h
+ __asm mov a, eax;
+ tsc=a;
+}
+#endif
+
+#include <stdio.h>
+#include <stdlib.h>
+#include "ripemd.h"
+
+extern "C" {
+void ripemd160_block_x86(RIPEMD160_CTX *ctx, unsigned char *buffer,int num);
+}
+
+void main(int argc,char *argv[])
+ {
+ unsigned char buffer[64*256];
+ RIPEMD160_CTX ctx;
+ unsigned long s1,s2,e1,e2;
+ unsigned char k[16];
+ unsigned long data[2];
+ unsigned char iv[8];
+ int i,num=0,numm;
+ int j=0;
+
+ if (argc >= 2)
+ num=atoi(argv[1]);
+
+ if (num == 0) num=16;
+ if (num > 250) num=16;
+ numm=num+2;
+ num*=64;
+ numm*=64;
+
+ for (j=0; j<6; j++)
+ {
+ for (i=0; i<10; i++) /**/
+ {
+ ripemd160_block_x86(&ctx,buffer,numm);
+ GetTSC(s1);
+ ripemd160_block_x86(&ctx,buffer,numm);
+ GetTSC(e1);
+ GetTSC(s2);
+ ripemd160_block_x86(&ctx,buffer,num);
+ GetTSC(e2);
+ ripemd160_block_x86(&ctx,buffer,num);
+ }
+ printf("ripemd160 (%d bytes) %d %d (%.2f)\n",num,
+ e1-s1,e2-s2,(double)((e1-s1)-(e2-s2))/2);
+ }
+ }
+
diff --git a/crypto/ripemd/asm/rm-win32.asm b/crypto/ripemd/asm/rm-win32.asm
new file mode 100644
index 0000000000..bd38791c13
--- /dev/null
+++ b/crypto/ripemd/asm/rm-win32.asm
@@ -0,0 +1,1972 @@
+ ; Don't even think of reading this code
+ ; It was automatically generated by rmd-586.pl
+ ; Which is a perl program used to generate the x86 assember for
+ ; any of elf, a.out, BSDI,Win32, or Solaris
+ ; eric <eay@cryptsoft.com>
+ ;
+ TITLE rmd-586.asm
+ .386
+.model FLAT
+_TEXT SEGMENT
+PUBLIC _ripemd160_block_x86
+
+_ripemd160_block_x86 PROC NEAR
+ push esi
+ mov ecx, DWORD PTR 16[esp]
+ push edi
+ mov esi, DWORD PTR 16[esp]
+ push ebp
+ add ecx, esi
+ push ebx
+ sub ecx, 64
+ sub esp, 88
+ mov DWORD PTR [esp],ecx
+ mov edi, DWORD PTR 108[esp]
+L000start:
+ ;
+ mov eax, DWORD PTR [esi]
+ mov ebx, DWORD PTR 4[esi]
+ mov DWORD PTR 4[esp],eax
+ mov DWORD PTR 8[esp],ebx
+ mov eax, DWORD PTR 8[esi]
+ mov ebx, DWORD PTR 12[esi]
+ mov DWORD PTR 12[esp],eax
+ mov DWORD PTR 16[esp],ebx
+ mov eax, DWORD PTR 16[esi]
+ mov ebx, DWORD PTR 20[esi]
+ mov DWORD PTR 20[esp],eax
+ mov DWORD PTR 24[esp],ebx
+ mov eax, DWORD PTR 24[esi]
+ mov ebx, DWORD PTR 28[esi]
+ mov DWORD PTR 28[esp],eax
+ mov DWORD PTR 32[esp],ebx
+ mov eax, DWORD PTR 32[esi]
+ mov ebx, DWORD PTR 36[esi]
+ mov DWORD PTR 36[esp],eax
+ mov DWORD PTR 40[esp],ebx
+ mov eax, DWORD PTR 40[esi]
+ mov ebx, DWORD PTR 44[esi]
+ mov DWORD PTR 44[esp],eax
+ mov DWORD PTR 48[esp],ebx
+ mov eax, DWORD PTR 48[esi]
+ mov ebx, DWORD PTR 52[esi]
+ mov DWORD PTR 52[esp],eax
+ mov DWORD PTR 56[esp],ebx
+ mov eax, DWORD PTR 56[esi]
+ mov ebx, DWORD PTR 60[esi]
+ mov DWORD PTR 60[esp],eax
+ mov DWORD PTR 64[esp],ebx
+ add esi, 64
+ mov eax, DWORD PTR [edi]
+ mov DWORD PTR 112[esp],esi
+ mov ebx, DWORD PTR 4[edi]
+ mov ecx, DWORD PTR 8[edi]
+ mov edx, DWORD PTR 12[edi]
+ mov ebp, DWORD PTR 16[edi]
+ ; 0
+ mov esi, ecx
+ xor esi, edx
+ mov edi, DWORD PTR 4[esp]
+ xor esi, ebx
+ add eax, edi
+ rol ecx, 10
+ add eax, esi
+ mov esi, ebx
+ rol eax, 11
+ add eax, ebp
+ ; 1
+ xor esi, ecx
+ mov edi, DWORD PTR 8[esp]
+ xor esi, eax
+ add ebp, esi
+ mov esi, eax
+ rol ebx, 10
+ add ebp, edi
+ xor esi, ebx
+ rol ebp, 14
+ add ebp, edx
+ ; 2
+ mov edi, DWORD PTR 12[esp]
+ xor esi, ebp
+ add edx, edi
+ rol eax, 10
+ add edx, esi
+ mov esi, ebp
+ rol edx, 15
+ add edx, ecx
+ ; 3
+ xor esi, eax
+ mov edi, DWORD PTR 16[esp]
+ xor esi, edx
+ add ecx, esi
+ mov esi, edx
+ rol ebp, 10
+ add ecx, edi
+ xor esi, ebp
+ rol ecx, 12
+ add ecx, ebx
+ ; 4
+ mov edi, DWORD PTR 20[esp]
+ xor esi, ecx
+ add ebx, edi
+ rol edx, 10
+ add ebx, esi
+ mov esi, ecx
+ rol ebx, 5
+ add ebx, eax
+ ; 5
+ xor esi, edx
+ mov edi, DWORD PTR 24[esp]
+ xor esi, ebx
+ add eax, esi
+ mov esi, ebx
+ rol ecx, 10
+ add eax, edi
+ xor esi, ecx
+ rol eax, 8
+ add eax, ebp
+ ; 6
+ mov edi, DWORD PTR 28[esp]
+ xor esi, eax
+ add ebp, edi
+ rol ebx, 10
+ add ebp, esi
+ mov esi, eax
+ rol ebp, 7
+ add ebp, edx
+ ; 7
+ xor esi, ebx
+ mov edi, DWORD PTR 32[esp]
+ xor esi, ebp
+ add edx, esi
+ mov esi, ebp
+ rol eax, 10
+ add edx, edi
+ xor esi, eax
+ rol edx, 9
+ add edx, ecx
+ ; 8
+ mov edi, DWORD PTR 36[esp]
+ xor esi, edx
+ add ecx, edi
+ rol ebp, 10
+ add ecx, esi
+ mov esi, edx
+ rol ecx, 11
+ add ecx, ebx
+ ; 9
+ xor esi, ebp
+ mov edi, DWORD PTR 40[esp]
+ xor esi, ecx
+ add ebx, esi
+ mov esi, ecx
+ rol edx, 10
+ add ebx, edi
+ xor esi, edx
+ rol ebx, 13
+ add ebx, eax
+ ; 10
+ mov edi, DWORD PTR 44[esp]
+ xor esi, ebx
+ add eax, edi
+ rol ecx, 10
+ add eax, esi
+ mov esi, ebx
+ rol eax, 14
+ add eax, ebp
+ ; 11
+ xor esi, ecx
+ mov edi, DWORD PTR 48[esp]
+ xor esi, eax
+ add ebp, esi
+ mov esi, eax
+ rol ebx, 10
+ add ebp, edi
+ xor esi, ebx
+ rol ebp, 15
+ add ebp, edx
+ ; 12
+ mov edi, DWORD PTR 52[esp]
+ xor esi, ebp
+ add edx, edi
+ rol eax, 10
+ add edx, esi
+ mov esi, ebp
+ rol edx, 6
+ add edx, ecx
+ ; 13
+ xor esi, eax
+ mov edi, DWORD PTR 56[esp]
+ xor esi, edx
+ add ecx, esi
+ mov esi, edx
+ rol ebp, 10
+ add ecx, edi
+ xor esi, ebp
+ rol ecx, 7
+ add ecx, ebx
+ ; 14
+ mov edi, DWORD PTR 60[esp]
+ xor esi, ecx
+ add ebx, edi
+ rol edx, 10
+ add ebx, esi
+ mov esi, ecx
+ rol ebx, 9
+ add ebx, eax
+ ; 15
+ xor esi, edx
+ mov edi, DWORD PTR 64[esp]
+ xor esi, ebx
+ add eax, esi
+ mov esi, -1
+ rol ecx, 10
+ add eax, edi
+ mov edi, DWORD PTR 32[esp]
+ rol eax, 8
+ add eax, ebp
+ ; 16
+ add ebp, edi
+ mov edi, ebx
+ sub esi, eax
+ and edi, eax
+ and esi, ecx
+ or edi, esi
+ mov esi, DWORD PTR 20[esp]
+ rol ebx, 10
+ lea ebp, DWORD PTR 1518500249[edi*1+ebp]
+ mov edi, -1
+ rol ebp, 7
+ add ebp, edx
+ ; 17
+ add edx, esi
+ mov esi, eax
+ sub edi, ebp
+ and esi, ebp
+ and edi, ebx
+ or esi, edi
+ mov edi, DWORD PTR 56[esp]
+ rol eax, 10
+ lea edx, DWORD PTR 1518500249[esi*1+edx]
+ mov esi, -1
+ rol edx, 6
+ add edx, ecx
+ ; 18
+ add ecx, edi
+ mov edi, ebp
+ sub esi, edx
+ and edi, edx
+ and esi, eax
+ or edi, esi
+ mov esi, DWORD PTR 8[esp]
+ rol ebp, 10
+ lea ecx, DWORD PTR 1518500249[edi*1+ecx]
+ mov edi, -1
+ rol ecx, 8
+ add ecx, ebx
+ ; 19
+ add ebx, esi
+ mov esi, edx
+ sub edi, ecx
+ and esi, ecx
+ and edi, ebp
+ or esi, edi
+ mov edi, DWORD PTR 44[esp]
+ rol edx, 10
+ lea ebx, DWORD PTR 1518500249[esi*1+ebx]
+ mov esi, -1
+ rol ebx, 13
+ add ebx, eax
+ ; 20
+ add eax, edi
+ mov edi, ecx
+ sub esi, ebx
+ and edi, ebx
+ and esi, edx
+ or edi, esi
+ mov esi, DWORD PTR 28[esp]
+ rol ecx, 10
+ lea eax, DWORD PTR 1518500249[edi*1+eax]
+ mov edi, -1
+ rol eax, 11
+ add eax, ebp
+ ; 21
+ add ebp, esi
+ mov esi, ebx
+ sub edi, eax
+ and esi, eax
+ and edi, ecx
+ or esi, edi
+ mov edi, DWORD PTR 64[esp]
+ rol ebx, 10
+ lea ebp, DWORD PTR 1518500249[esi*1+ebp]
+ mov esi, -1
+ rol ebp, 9
+ add ebp, edx
+ ; 22
+ add edx, edi
+ mov edi, eax
+ sub esi, ebp
+ and edi, ebp
+ and esi, ebx
+ or edi, esi
+ mov esi, DWORD PTR 16[esp]
+ rol eax, 10
+ lea edx, DWORD PTR 1518500249[edi*1+edx]
+ mov edi, -1
+ rol edx, 7
+ add edx, ecx
+ ; 23
+ add ecx, esi
+ mov esi, ebp
+ sub edi, edx
+ and esi, edx
+ and edi, eax
+ or esi, edi
+ mov edi, DWORD PTR 52[esp]
+ rol ebp, 10
+ lea ecx, DWORD PTR 1518500249[esi*1+ecx]
+ mov esi, -1
+ rol ecx, 15
+ add ecx, ebx
+ ; 24
+ add ebx, edi
+ mov edi, edx
+ sub esi, ecx
+ and edi, ecx
+ and esi, ebp
+ or edi, esi
+ mov esi, DWORD PTR 4[esp]
+ rol edx, 10
+ lea ebx, DWORD PTR 1518500249[edi*1+ebx]
+ mov edi, -1
+ rol ebx, 7
+ add ebx, eax
+ ; 25
+ add eax, esi
+ mov esi, ecx
+ sub edi, ebx
+ and esi, ebx
+ and edi, edx
+ or esi, edi
+ mov edi, DWORD PTR 40[esp]
+ rol ecx, 10
+ lea eax, DWORD PTR 1518500249[esi*1+eax]
+ mov esi, -1
+ rol eax, 12
+ add eax, ebp
+ ; 26
+ add ebp, edi
+ mov edi, ebx
+ sub esi, eax
+ and edi, eax
+ and esi, ecx
+ or edi, esi
+ mov esi, DWORD PTR 24[esp]
+ rol ebx, 10
+ lea ebp, DWORD PTR 1518500249[edi*1+ebp]
+ mov edi, -1
+ rol ebp, 15
+ add ebp, edx
+ ; 27
+ add edx, esi
+ mov esi, eax
+ sub edi, ebp
+ and esi, ebp
+ and edi, ebx
+ or esi, edi
+ mov edi, DWORD PTR 12[esp]
+ rol eax, 10
+ lea edx, DWORD PTR 1518500249[esi*1+edx]
+ mov esi, -1
+ rol edx, 9
+ add edx, ecx
+ ; 28
+ add ecx, edi
+ mov edi, ebp
+ sub esi, edx
+ and edi, edx
+ and esi, eax
+ or edi, esi
+ mov esi, DWORD PTR 60[esp]
+ rol ebp, 10
+ lea ecx, DWORD PTR 1518500249[edi*1+ecx]
+ mov edi, -1
+ rol ecx, 11
+ add ecx, ebx
+ ; 29
+ add ebx, esi
+ mov esi, edx
+ sub edi, ecx
+ and esi, ecx
+ and edi, ebp
+ or esi, edi
+ mov edi, DWORD PTR 48[esp]
+ rol edx, 10
+ lea ebx, DWORD PTR 1518500249[esi*1+ebx]
+ mov esi, -1
+ rol ebx, 7
+ add ebx, eax
+ ; 30
+ add eax, edi
+ mov edi, ecx
+ sub esi, ebx
+ and edi, ebx
+ and esi, edx
+ or edi, esi
+ mov esi, DWORD PTR 36[esp]
+ rol ecx, 10
+ lea eax, DWORD PTR 1518500249[edi*1+eax]
+ mov edi, -1
+ rol eax, 13
+ add eax, ebp
+ ; 31
+ add ebp, esi
+ mov esi, ebx
+ sub edi, eax
+ and esi, eax
+ and edi, ecx
+ or esi, edi
+ mov edi, -1
+ rol ebx, 10
+ lea ebp, DWORD PTR 1518500249[esi*1+ebp]
+ sub edi, eax
+ rol ebp, 12
+ add ebp, edx
+ ; 32
+ mov esi, DWORD PTR 16[esp]
+ or edi, ebp
+ add edx, esi
+ xor edi, ebx
+ mov esi, -1
+ rol eax, 10
+ lea edx, DWORD PTR 1859775393[edi*1+edx]
+ sub esi, ebp
+ rol edx, 11
+ add edx, ecx
+ ; 33
+ mov edi, DWORD PTR 44[esp]
+ or esi, edx
+ add ecx, edi
+ xor esi, eax
+ mov edi, -1
+ rol ebp, 10
+ lea ecx, DWORD PTR 1859775393[esi*1+ecx]
+ sub edi, edx
+ rol ecx, 13
+ add ecx, ebx
+ ; 34
+ mov esi, DWORD PTR 60[esp]
+ or edi, ecx
+ add ebx, esi
+ xor edi, ebp
+ mov esi, -1
+ rol edx, 10
+ lea ebx, DWORD PTR 1859775393[edi*1+ebx]
+ sub esi, ecx
+ rol ebx, 6
+ add ebx, eax
+ ; 35
+ mov edi, DWORD PTR 20[esp]
+ or esi, ebx
+ add eax, edi
+ xor esi, edx
+ mov edi, -1
+ rol ecx, 10
+ lea eax, DWORD PTR 1859775393[esi*1+eax]
+ sub edi, ebx
+ rol eax, 7
+ add eax, ebp
+ ; 36
+ mov esi, DWORD PTR 40[esp]
+ or edi, eax
+ add ebp, esi
+ xor edi, ecx
+ mov esi, -1
+ rol ebx, 10
+ lea ebp, DWORD PTR 1859775393[edi*1+ebp]
+ sub esi, eax
+ rol ebp, 14
+ add ebp, edx
+ ; 37
+ mov edi, DWORD PTR 64[esp]
+ or esi, ebp
+ add edx, edi
+ xor esi, ebx
+ mov edi, -1
+ rol eax, 10
+ lea edx, DWORD PTR 1859775393[esi*1+edx]
+ sub edi, ebp
+ rol edx, 9
+ add edx, ecx
+ ; 38
+ mov esi, DWORD PTR 36[esp]
+ or edi, edx
+ add ecx, esi
+ xor edi, eax
+ mov esi, -1
+ rol ebp, 10
+ lea ecx, DWORD PTR 1859775393[edi*1+ecx]
+ sub esi, edx
+ rol ecx, 13
+ add ecx, ebx
+ ; 39
+ mov edi, DWORD PTR 8[esp]
+ or esi, ecx
+ add ebx, edi
+ xor esi, ebp
+ mov edi, -1
+ rol edx, 10
+ lea ebx, DWORD PTR 1859775393[esi*1+ebx]
+ sub edi, ecx
+ rol ebx, 15
+ add ebx, eax
+ ; 40
+ mov esi, DWORD PTR 12[esp]
+ or edi, ebx
+ add eax, esi
+ xor edi, edx
+ mov esi, -1
+ rol ecx, 10
+ lea eax, DWORD PTR 1859775393[edi*1+eax]
+ sub esi, ebx
+ rol eax, 14
+ add eax, ebp
+ ; 41
+ mov edi, DWORD PTR 32[esp]
+ or esi, eax
+ add ebp, edi
+ xor esi, ecx
+ mov edi, -1
+ rol ebx, 10
+ lea ebp, DWORD PTR 1859775393[esi*1+ebp]
+ sub edi, eax
+ rol ebp, 8
+ add ebp, edx
+ ; 42
+ mov esi, DWORD PTR 4[esp]
+ or edi, ebp
+ add edx, esi
+ xor edi, ebx
+ mov esi, -1
+ rol eax, 10
+ lea edx, DWORD PTR 1859775393[edi*1+edx]
+ sub esi, ebp
+ rol edx, 13
+ add edx, ecx
+ ; 43
+ mov edi, DWORD PTR 28[esp]
+ or esi, edx
+ add ecx, edi
+ xor esi, eax
+ mov edi, -1
+ rol ebp, 10
+ lea ecx, DWORD PTR 1859775393[esi*1+ecx]
+ sub edi, edx
+ rol ecx, 6
+ add ecx, ebx
+ ; 44
+ mov esi, DWORD PTR 56[esp]
+ or edi, ecx
+ add ebx, esi
+ xor edi, ebp
+ mov esi, -1
+ rol edx, 10
+ lea ebx, DWORD PTR 1859775393[edi*1+ebx]
+ sub esi, ecx
+ rol ebx, 5
+ add ebx, eax
+ ; 45
+ mov edi, DWORD PTR 48[esp]
+ or esi, ebx
+ add eax, edi
+ xor esi, edx
+ mov edi, -1
+ rol ecx, 10
+ lea eax, DWORD PTR 1859775393[esi*1+eax]
+ sub edi, ebx
+ rol eax, 12
+ add eax, ebp
+ ; 46
+ mov esi, DWORD PTR 24[esp]
+ or edi, eax
+ add ebp, esi
+ xor edi, ecx
+ mov esi, -1
+ rol ebx, 10
+ lea ebp, DWORD PTR 1859775393[edi*1+ebp]
+ sub esi, eax
+ rol ebp, 7
+ add ebp, edx
+ ; 47
+ mov edi, DWORD PTR 52[esp]
+ or esi, ebp
+ add edx, edi
+ xor esi, ebx
+ mov edi, -1
+ rol eax, 10
+ lea edx, DWORD PTR 1859775393[esi*1+edx]
+ mov esi, eax
+ rol edx, 5
+ add edx, ecx
+ ; 48
+ sub edi, eax
+ and esi, edx
+ and edi, ebp
+ or edi, esi
+ mov esi, DWORD PTR 8[esp]
+ rol ebp, 10
+ lea ecx, DWORD PTR 2400959708[edi+ecx]
+ mov edi, -1
+ add ecx, esi
+ mov esi, ebp
+ rol ecx, 11
+ add ecx, ebx
+ ; 49
+ sub edi, ebp
+ and esi, ecx
+ and edi, edx
+ or edi, esi
+ mov esi, DWORD PTR 40[esp]
+ rol edx, 10
+ lea ebx, DWORD PTR 2400959708[edi+ebx]
+ mov edi, -1
+ add ebx, esi
+ mov esi, edx
+ rol ebx, 12
+ add ebx, eax
+ ; 50
+ sub edi, edx
+ and esi, ebx
+ and edi, ecx
+ or edi, esi
+ mov esi, DWORD PTR 48[esp]
+ rol ecx, 10
+ lea eax, DWORD PTR 2400959708[edi+eax]
+ mov edi, -1
+ add eax, esi
+ mov esi, ecx
+ rol eax, 14
+ add eax, ebp
+ ; 51
+ sub edi, ecx
+ and esi, eax
+ and edi, ebx
+ or edi, esi
+ mov esi, DWORD PTR 44[esp]
+ rol ebx, 10
+ lea ebp, DWORD PTR 2400959708[edi+ebp]
+ mov edi, -1
+ add ebp, esi
+ mov esi, ebx
+ rol ebp, 15
+ add ebp, edx
+ ; 52
+ sub edi, ebx
+ and esi, ebp
+ and edi, eax
+ or edi, esi
+ mov esi, DWORD PTR 4[esp]
+ rol eax, 10
+ lea edx, DWORD PTR 2400959708[edi+edx]
+ mov edi, -1
+ add edx, esi
+ mov esi, eax
+ rol edx, 14
+ add edx, ecx
+ ; 53
+ sub edi, eax
+ and esi, edx
+ and edi, ebp
+ or edi, esi
+ mov esi, DWORD PTR 36[esp]
+ rol ebp, 10
+ lea ecx, DWORD PTR 2400959708[edi+ecx]
+ mov edi, -1
+ add ecx, esi
+ mov esi, ebp
+ rol ecx, 15
+ add ecx, ebx
+ ; 54
+ sub edi, ebp
+ and esi, ecx
+ and edi, edx
+ or edi, esi
+ mov esi, DWORD PTR 52[esp]
+ rol edx, 10
+ lea ebx, DWORD PTR 2400959708[edi+ebx]
+ mov edi, -1
+ add ebx, esi
+ mov esi, edx
+ rol ebx, 9
+ add ebx, eax
+ ; 55
+ sub edi, edx
+ and esi, ebx
+ and edi, ecx
+ or edi, esi
+ mov esi, DWORD PTR 20[esp]
+ rol ecx, 10
+ lea eax, DWORD PTR 2400959708[edi+eax]
+ mov edi, -1
+ add eax, esi
+ mov esi, ecx
+ rol eax, 8
+ add eax, ebp
+ ; 56
+ sub edi, ecx
+ and esi, eax
+ and edi, ebx
+ or edi, esi
+ mov esi, DWORD PTR 56[esp]
+ rol ebx, 10
+ lea ebp, DWORD PTR 2400959708[edi+ebp]
+ mov edi, -1
+ add ebp, esi
+ mov esi, ebx
+ rol ebp, 9
+ add ebp, edx
+ ; 57
+ sub edi, ebx
+ and esi, ebp
+ and edi, eax
+ or edi, esi
+ mov esi, DWORD PTR 16[esp]
+ rol eax, 10
+ lea edx, DWORD PTR 2400959708[edi+edx]
+ mov edi, -1
+ add edx, esi
+ mov esi, eax
+ rol edx, 14
+ add edx, ecx
+ ; 58
+ sub edi, eax
+ and esi, edx
+ and edi, ebp
+ or edi, esi
+ mov esi, DWORD PTR 32[esp]
+ rol ebp, 10
+ lea ecx, DWORD PTR 2400959708[edi+ecx]
+ mov edi, -1
+ add ecx, esi
+ mov esi, ebp
+ rol ecx, 5
+ add ecx, ebx
+ ; 59
+ sub edi, ebp
+ and esi, ecx
+ and edi, edx
+ or edi, esi
+ mov esi, DWORD PTR 64[esp]
+ rol edx, 10
+ lea ebx, DWORD PTR 2400959708[edi+ebx]
+ mov edi, -1
+ add ebx, esi
+ mov esi, edx
+ rol ebx, 6
+ add ebx, eax
+ ; 60
+ sub edi, edx
+ and esi, ebx
+ and edi, ecx
+ or edi, esi
+ mov esi, DWORD PTR 60[esp]
+ rol ecx, 10
+ lea eax, DWORD PTR 2400959708[edi+eax]
+ mov edi, -1
+ add eax, esi
+ mov esi, ecx
+ rol eax, 8
+ add eax, ebp
+ ; 61
+ sub edi, ecx
+ and esi, eax
+ and edi, ebx
+ or edi, esi
+ mov esi, DWORD PTR 24[esp]
+ rol ebx, 10
+ lea ebp, DWORD PTR 2400959708[edi+ebp]
+ mov edi, -1
+ add ebp, esi
+ mov esi, ebx
+ rol ebp, 6
+ add ebp, edx
+ ; 62
+ sub edi, ebx
+ and esi, ebp
+ and edi, eax
+ or edi, esi
+ mov esi, DWORD PTR 28[esp]
+ rol eax, 10
+ lea edx, DWORD PTR 2400959708[edi+edx]
+ mov edi, -1
+ add edx, esi
+ mov esi, eax
+ rol edx, 5
+ add edx, ecx
+ ; 63
+ sub edi, eax
+ and esi, edx
+ and edi, ebp
+ or edi, esi
+ mov esi, DWORD PTR 12[esp]
+ rol ebp, 10
+ lea ecx, DWORD PTR 2400959708[edi+ecx]
+ mov edi, -1
+ add ecx, esi
+ sub edi, ebp
+ rol ecx, 12
+ add ecx, ebx
+ ; 64
+ mov esi, DWORD PTR 20[esp]
+ or edi, edx
+ add ebx, esi
+ xor edi, ecx
+ mov esi, -1
+ rol edx, 10
+ lea ebx, DWORD PTR 2840853838[edi*1+ebx]
+ sub esi, edx
+ rol ebx, 9
+ add ebx, eax
+ ; 65
+ mov edi, DWORD PTR 4[esp]
+ or esi, ecx
+ add eax, edi
+ xor esi, ebx
+ mov edi, -1
+ rol ecx, 10
+ lea eax, DWORD PTR 2840853838[esi*1+eax]
+ sub edi, ecx
+ rol eax, 15
+ add eax, ebp
+ ; 66
+ mov esi, DWORD PTR 24[esp]
+ or edi, ebx
+ add ebp, esi
+ xor edi, eax
+ mov esi, -1
+ rol ebx, 10
+ lea ebp, DWORD PTR 2840853838[edi*1+ebp]
+ sub esi, ebx
+ rol ebp, 5
+ add ebp, edx
+ ; 67
+ mov edi, DWORD PTR 40[esp]
+ or esi, eax
+ add edx, edi
+ xor esi, ebp
+ mov edi, -1
+ rol eax, 10
+ lea edx, DWORD PTR 2840853838[esi*1+edx]
+ sub edi, eax
+ rol edx, 11
+ add edx, ecx
+ ; 68
+ mov esi, DWORD PTR 32[esp]
+ or edi, ebp
+ add ecx, esi
+ xor edi, edx
+ mov esi, -1
+ rol ebp, 10
+ lea ecx, DWORD PTR 2840853838[edi*1+ecx]
+ sub esi, ebp
+ rol ecx, 6
+ add ecx, ebx
+ ; 69
+ mov edi, DWORD PTR 52[esp]
+ or esi, edx
+ add ebx, edi
+ xor esi, ecx
+ mov edi, -1
+ rol edx, 10
+ lea ebx, DWORD PTR 2840853838[esi*1+ebx]
+ sub edi, edx
+ rol ebx, 8
+ add ebx, eax
+ ; 70
+ mov esi, DWORD PTR 12[esp]
+ or edi, ecx
+ add eax, esi
+ xor edi, ebx
+ mov esi, -1
+ rol ecx, 10
+ lea eax, DWORD PTR 2840853838[edi*1+eax]
+ sub esi, ecx
+ rol eax, 13
+ add eax, ebp
+ ; 71
+ mov edi, DWORD PTR 44[esp]
+ or esi, ebx
+ add ebp, edi
+ xor esi, eax
+ mov edi, -1
+ rol ebx, 10
+ lea ebp, DWORD PTR 2840853838[esi*1+ebp]
+ sub edi, ebx
+ rol ebp, 12
+ add ebp, edx
+ ; 72
+ mov esi, DWORD PTR 60[esp]
+ or edi, eax
+ add edx, esi
+ xor edi, ebp
+ mov esi, -1
+ rol eax, 10
+ lea edx, DWORD PTR 2840853838[edi*1+edx]
+ sub esi, eax
+ rol edx, 5
+ add edx, ecx
+ ; 73
+ mov edi, DWORD PTR 8[esp]
+ or esi, ebp
+ add ecx, edi
+ xor esi, edx
+ mov edi, -1
+ rol ebp, 10
+ lea ecx, DWORD PTR 2840853838[esi*1+ecx]
+ sub edi, ebp
+ rol ecx, 12
+ add ecx, ebx
+ ; 74
+ mov esi, DWORD PTR 16[esp]
+ or edi, edx
+ add ebx, esi
+ xor edi, ecx
+ mov esi, -1
+ rol edx, 10
+ lea ebx, DWORD PTR 2840853838[edi*1+ebx]
+ sub esi, edx
+ rol ebx, 13
+ add ebx, eax
+ ; 75
+ mov edi, DWORD PTR 36[esp]
+ or esi, ecx
+ add eax, edi
+ xor esi, ebx
+ mov edi, -1
+ rol ecx, 10
+ lea eax, DWORD PTR 2840853838[esi*1+eax]
+ sub edi, ecx
+ rol eax, 14
+ add eax, ebp
+ ; 76
+ mov esi, DWORD PTR 48[esp]
+ or edi, ebx
+ add ebp, esi
+ xor edi, eax
+ mov esi, -1
+ rol ebx, 10
+ lea ebp, DWORD PTR 2840853838[edi*1+ebp]
+ sub esi, ebx
+ rol ebp, 11
+ add ebp, edx
+ ; 77
+ mov edi, DWORD PTR 28[esp]
+ or esi, eax
+ add edx, edi
+ xor esi, ebp
+ mov edi, -1
+ rol eax, 10
+ lea edx, DWORD PTR 2840853838[esi*1+edx]
+ sub edi, eax
+ rol edx, 8
+ add edx, ecx
+ ; 78
+ mov esi, DWORD PTR 64[esp]
+ or edi, ebp
+ add ecx, esi
+ xor edi, edx
+ mov esi, -1
+ rol ebp, 10
+ lea ecx, DWORD PTR 2840853838[edi*1+ecx]
+ sub esi, ebp
+ rol ecx, 5
+ add ecx, ebx
+ ; 79
+ mov edi, DWORD PTR 56[esp]
+ or esi, edx
+ add ebx, edi
+ xor esi, ecx
+ mov edi, DWORD PTR 108[esp]
+ rol edx, 10
+ lea ebx, DWORD PTR 2840853838[esi*1+ebx]
+ mov DWORD PTR 68[esp],eax
+ rol ebx, 6
+ add ebx, eax
+ mov eax, DWORD PTR [edi]
+ mov DWORD PTR 72[esp],ebx
+ mov DWORD PTR 76[esp],ecx
+ mov ebx, DWORD PTR 4[edi]
+ mov DWORD PTR 80[esp],edx
+ mov ecx, DWORD PTR 8[edi]
+ mov DWORD PTR 84[esp],ebp
+ mov edx, DWORD PTR 12[edi]
+ mov ebp, DWORD PTR 16[edi]
+ ; 80
+ mov edi, -1
+ sub edi, edx
+ mov esi, DWORD PTR 24[esp]
+ or edi, ecx
+ add eax, esi
+ xor edi, ebx
+ mov esi, -1
+ rol ecx, 10
+ lea eax, DWORD PTR 1352829926[edi*1+eax]
+ sub esi, ecx
+ rol eax, 8
+ add eax, ebp
+ ; 81
+ mov edi, DWORD PTR 60[esp]
+ or esi, ebx
+ add ebp, edi
+ xor esi, eax
+ mov edi, -1
+ rol ebx, 10
+ lea ebp, DWORD PTR 1352829926[esi*1+ebp]
+ sub edi, ebx
+ rol ebp, 9
+ add ebp, edx
+ ; 82
+ mov esi, DWORD PTR 32[esp]
+ or edi, eax
+ add edx, esi
+ xor edi, ebp
+ mov esi, -1
+ rol eax, 10
+ lea edx, DWORD PTR 1352829926[edi*1+edx]
+ sub esi, eax
+ rol edx, 9
+ add edx, ecx
+ ; 83
+ mov edi, DWORD PTR 4[esp]
+ or esi, ebp
+ add ecx, edi
+ xor esi, edx
+ mov edi, -1
+ rol ebp, 10
+ lea ecx, DWORD PTR 1352829926[esi*1+ecx]
+ sub edi, ebp
+ rol ecx, 11
+ add ecx, ebx
+ ; 84
+ mov esi, DWORD PTR 40[esp]
+ or edi, edx
+ add ebx, esi
+ xor edi, ecx
+ mov esi, -1
+ rol edx, 10
+ lea ebx, DWORD PTR 1352829926[edi*1+ebx]
+ sub esi, edx
+ rol ebx, 13
+ add ebx, eax
+ ; 85
+ mov edi, DWORD PTR 12[esp]
+ or esi, ecx
+ add eax, edi
+ xor esi, ebx
+ mov edi, -1
+ rol ecx, 10
+ lea eax, DWORD PTR 1352829926[esi*1+eax]
+ sub edi, ecx
+ rol eax, 15
+ add eax, ebp
+ ; 86
+ mov esi, DWORD PTR 48[esp]
+ or edi, ebx
+ add ebp, esi
+ xor edi, eax
+ mov esi, -1
+ rol ebx, 10
+ lea ebp, DWORD PTR 1352829926[edi*1+ebp]
+ sub esi, ebx
+ rol ebp, 15
+ add ebp, edx
+ ; 87
+ mov edi, DWORD PTR 20[esp]
+ or esi, eax
+ add edx, edi
+ xor esi, ebp
+ mov edi, -1
+ rol eax, 10
+ lea edx, DWORD PTR 1352829926[esi*1+edx]
+ sub edi, eax
+ rol edx, 5
+ add edx, ecx
+ ; 88
+ mov esi, DWORD PTR 56[esp]
+ or edi, ebp
+ add ecx, esi
+ xor edi, edx
+ mov esi, -1
+ rol ebp, 10
+ lea ecx, DWORD PTR 1352829926[edi*1+ecx]
+ sub esi, ebp
+ rol ecx, 7
+ add ecx, ebx
+ ; 89
+ mov edi, DWORD PTR 28[esp]
+ or esi, edx
+ add ebx, edi
+ xor esi, ecx
+ mov edi, -1
+ rol edx, 10
+ lea ebx, DWORD PTR 1352829926[esi*1+ebx]
+ sub edi, edx
+ rol ebx, 7
+ add ebx, eax
+ ; 90
+ mov esi, DWORD PTR 64[esp]
+ or edi, ecx
+ add eax, esi
+ xor edi, ebx
+ mov esi, -1
+ rol ecx, 10
+ lea eax, DWORD PTR 1352829926[edi*1+eax]
+ sub esi, ecx
+ rol eax, 8
+ add eax, ebp
+ ; 91
+ mov edi, DWORD PTR 36[esp]
+ or esi, ebx
+ add ebp, edi
+ xor esi, eax
+ mov edi, -1
+ rol ebx, 10
+ lea ebp, DWORD PTR 1352829926[esi*1+ebp]
+ sub edi, ebx
+ rol ebp, 11
+ add ebp, edx
+ ; 92
+ mov esi, DWORD PTR 8[esp]
+ or edi, eax
+ add edx, esi
+ xor edi, ebp
+ mov esi, -1
+ rol eax, 10
+ lea edx, DWORD PTR 1352829926[edi*1+edx]
+ sub esi, eax
+ rol edx, 14
+ add edx, ecx
+ ; 93
+ mov edi, DWORD PTR 44[esp]
+ or esi, ebp
+ add ecx, edi
+ xor esi, edx
+ mov edi, -1
+ rol ebp, 10
+ lea ecx, DWORD PTR 1352829926[esi*1+ecx]
+ sub edi, ebp
+ rol ecx, 14
+ add ecx, ebx
+ ; 94
+ mov esi, DWORD PTR 16[esp]
+ or edi, edx
+ add ebx, esi
+ xor edi, ecx
+ mov esi, -1
+ rol edx, 10
+ lea ebx, DWORD PTR 1352829926[edi*1+ebx]
+ sub esi, edx
+ rol ebx, 12
+ add ebx, eax
+ ; 95
+ mov edi, DWORD PTR 52[esp]
+ or esi, ecx
+ add eax, edi
+ xor esi, ebx
+ mov edi, -1
+ rol ecx, 10
+ lea eax, DWORD PTR 1352829926[esi*1+eax]
+ mov esi, ecx
+ rol eax, 6
+ add eax, ebp
+ ; 96
+ sub edi, ecx
+ and esi, eax
+ and edi, ebx
+ or edi, esi
+ mov esi, DWORD PTR 28[esp]
+ rol ebx, 10
+ lea ebp, DWORD PTR 1548603684[edi+ebp]
+ mov edi, -1
+ add ebp, esi
+ mov esi, ebx
+ rol ebp, 9
+ add ebp, edx
+ ; 97
+ sub edi, ebx
+ and esi, ebp
+ and edi, eax
+ or edi, esi
+ mov esi, DWORD PTR 48[esp]
+ rol eax, 10
+ lea edx, DWORD PTR 1548603684[edi+edx]
+ mov edi, -1
+ add edx, esi
+ mov esi, eax
+ rol edx, 13
+ add edx, ecx
+ ; 98
+ sub edi, eax
+ and esi, edx
+ and edi, ebp
+ or edi, esi
+ mov esi, DWORD PTR 16[esp]
+ rol ebp, 10
+ lea ecx, DWORD PTR 1548603684[edi+ecx]
+ mov edi, -1
+ add ecx, esi
+ mov esi, ebp
+ rol ecx, 15
+ add ecx, ebx
+ ; 99
+ sub edi, ebp
+ and esi, ecx
+ and edi, edx
+ or edi, esi
+ mov esi, DWORD PTR 32[esp]
+ rol edx, 10
+ lea ebx, DWORD PTR 1548603684[edi+ebx]
+ mov edi, -1
+ add ebx, esi
+ mov esi, edx
+ rol ebx, 7
+ add ebx, eax
+ ; 100
+ sub edi, edx
+ and esi, ebx
+ and edi, ecx
+ or edi, esi
+ mov esi, DWORD PTR 4[esp]
+ rol ecx, 10
+ lea eax, DWORD PTR 1548603684[edi+eax]
+ mov edi, -1
+ add eax, esi
+ mov esi, ecx
+ rol eax, 12
+ add eax, ebp
+ ; 101
+ sub edi, ecx
+ and esi, eax
+ and edi, ebx
+ or edi, esi
+ mov esi, DWORD PTR 56[esp]
+ rol ebx, 10
+ lea ebp, DWORD PTR 1548603684[edi+ebp]
+ mov edi, -1
+ add ebp, esi
+ mov esi, ebx
+ rol ebp, 8
+ add ebp, edx
+ ; 102
+ sub edi, ebx
+ and esi, ebp
+ and edi, eax
+ or edi, esi
+ mov esi, DWORD PTR 24[esp]
+ rol eax, 10
+ lea edx, DWORD PTR 1548603684[edi+edx]
+ mov edi, -1
+ add edx, esi
+ mov esi, eax
+ rol edx, 9
+ add edx, ecx
+ ; 103
+ sub edi, eax
+ and esi, edx
+ and edi, ebp
+ or edi, esi
+ mov esi, DWORD PTR 44[esp]
+ rol ebp, 10
+ lea ecx, DWORD PTR 1548603684[edi+ecx]
+ mov edi, -1
+ add ecx, esi
+ mov esi, ebp
+ rol ecx, 11
+ add ecx, ebx
+ ; 104
+ sub edi, ebp
+ and esi, ecx
+ and edi, edx
+ or edi, esi
+ mov esi, DWORD PTR 60[esp]
+ rol edx, 10
+ lea ebx, DWORD PTR 1548603684[edi+ebx]
+ mov edi, -1
+ add ebx, esi
+ mov esi, edx
+ rol ebx, 7
+ add ebx, eax
+ ; 105
+ sub edi, edx
+ and esi, ebx
+ and edi, ecx
+ or edi, esi
+ mov esi, DWORD PTR 64[esp]
+ rol ecx, 10
+ lea eax, DWORD PTR 1548603684[edi+eax]
+ mov edi, -1
+ add eax, esi
+ mov esi, ecx
+ rol eax, 7
+ add eax, ebp
+ ; 106
+ sub edi, ecx
+ and esi, eax
+ and edi, ebx
+ or edi, esi
+ mov esi, DWORD PTR 36[esp]
+ rol ebx, 10
+ lea ebp, DWORD PTR 1548603684[edi+ebp]
+ mov edi, -1
+ add ebp, esi
+ mov esi, ebx
+ rol ebp, 12
+ add ebp, edx
+ ; 107
+ sub edi, ebx
+ and esi, ebp
+ and edi, eax
+ or edi, esi
+ mov esi, DWORD PTR 52[esp]
+ rol eax, 10
+ lea edx, DWORD PTR 1548603684[edi+edx]
+ mov edi, -1
+ add edx, esi
+ mov esi, eax
+ rol edx, 7
+ add edx, ecx
+ ; 108
+ sub edi, eax
+ and esi, edx
+ and edi, ebp
+ or edi, esi
+ mov esi, DWORD PTR 20[esp]
+ rol ebp, 10
+ lea ecx, DWORD PTR 1548603684[edi+ecx]
+ mov edi, -1
+ add ecx, esi
+ mov esi, ebp
+ rol ecx, 6
+ add ecx, ebx
+ ; 109
+ sub edi, ebp
+ and esi, ecx
+ and edi, edx
+ or edi, esi
+ mov esi, DWORD PTR 40[esp]
+ rol edx, 10
+ lea ebx, DWORD PTR 1548603684[edi+ebx]
+ mov edi, -1
+ add ebx, esi
+ mov esi, edx
+ rol ebx, 15
+ add ebx, eax
+ ; 110
+ sub edi, edx
+ and esi, ebx
+ and edi, ecx
+ or edi, esi
+ mov esi, DWORD PTR 8[esp]
+ rol ecx, 10
+ lea eax, DWORD PTR 1548603684[edi+eax]
+ mov edi, -1
+ add eax, esi
+ mov esi, ecx
+ rol eax, 13
+ add eax, ebp
+ ; 111
+ sub edi, ecx
+ and esi, eax
+ and edi, ebx
+ or edi, esi
+ mov esi, DWORD PTR 12[esp]
+ rol ebx, 10
+ lea ebp, DWORD PTR 1548603684[edi+ebp]
+ mov edi, -1
+ add ebp, esi
+ sub edi, eax
+ rol ebp, 11
+ add ebp, edx
+ ; 112
+ mov esi, DWORD PTR 64[esp]
+ or edi, ebp
+ add edx, esi
+ xor edi, ebx
+ mov esi, -1
+ rol eax, 10
+ lea edx, DWORD PTR 1836072691[edi*1+edx]
+ sub esi, ebp
+ rol edx, 9
+ add edx, ecx
+ ; 113
+ mov edi, DWORD PTR 24[esp]
+ or esi, edx
+ add ecx, edi
+ xor esi, eax
+ mov edi, -1
+ rol ebp, 10
+ lea ecx, DWORD PTR 1836072691[esi*1+ecx]
+ sub edi, edx
+ rol ecx, 7
+ add ecx, ebx
+ ; 114
+ mov esi, DWORD PTR 8[esp]
+ or edi, ecx
+ add ebx, esi
+ xor edi, ebp
+ mov esi, -1
+ rol edx, 10
+ lea ebx, DWORD PTR 1836072691[edi*1+ebx]
+ sub esi, ecx
+ rol ebx, 15
+ add ebx, eax
+ ; 115
+ mov edi, DWORD PTR 16[esp]
+ or esi, ebx
+ add eax, edi
+ xor esi, edx
+ mov edi, -1
+ rol ecx, 10
+ lea eax, DWORD PTR 1836072691[esi*1+eax]
+ sub edi, ebx
+ rol eax, 11
+ add eax, ebp
+ ; 116
+ mov esi, DWORD PTR 32[esp]
+ or edi, eax
+ add ebp, esi
+ xor edi, ecx
+ mov esi, -1
+ rol ebx, 10
+ lea ebp, DWORD PTR 1836072691[edi*1+ebp]
+ sub esi, eax
+ rol ebp, 8
+ add ebp, edx
+ ; 117
+ mov edi, DWORD PTR 60[esp]
+ or esi, ebp
+ add edx, edi
+ xor esi, ebx
+ mov edi, -1
+ rol eax, 10
+ lea edx, DWORD PTR 1836072691[esi*1+edx]
+ sub edi, ebp
+ rol edx, 6
+ add edx, ecx
+ ; 118
+ mov esi, DWORD PTR 28[esp]
+ or edi, edx
+ add ecx, esi
+ xor edi, eax
+ mov esi, -1
+ rol ebp, 10
+ lea ecx, DWORD PTR 1836072691[edi*1+ecx]
+ sub esi, edx
+ rol ecx, 6
+ add ecx, ebx
+ ; 119
+ mov edi, DWORD PTR 40[esp]
+ or esi, ecx
+ add ebx, edi
+ xor esi, ebp
+ mov edi, -1
+ rol edx, 10
+ lea ebx, DWORD PTR 1836072691[esi*1+ebx]
+ sub edi, ecx
+ rol ebx, 14
+ add ebx, eax
+ ; 120
+ mov esi, DWORD PTR 48[esp]
+ or edi, ebx
+ add eax, esi
+ xor edi, edx
+ mov esi, -1
+ rol ecx, 10
+ lea eax, DWORD PTR 1836072691[edi*1+eax]
+ sub esi, ebx
+ rol eax, 12
+ add eax, ebp
+ ; 121
+ mov edi, DWORD PTR 36[esp]
+ or esi, eax
+ add ebp, edi
+ xor esi, ecx
+ mov edi, -1
+ rol ebx, 10
+ lea ebp, DWORD PTR 1836072691[esi*1+ebp]
+ sub edi, eax
+ rol ebp, 13
+ add ebp, edx
+ ; 122
+ mov esi, DWORD PTR 52[esp]
+ or edi, ebp
+ add edx, esi
+ xor edi, ebx
+ mov esi, -1
+ rol eax, 10
+ lea edx, DWORD PTR 1836072691[edi*1+edx]
+ sub esi, ebp
+ rol edx, 5
+ add edx, ecx
+ ; 123
+ mov edi, DWORD PTR 12[esp]
+ or esi, edx
+ add ecx, edi
+ xor esi, eax
+ mov edi, -1
+ rol ebp, 10
+ lea ecx, DWORD PTR 1836072691[esi*1+ecx]
+ sub edi, edx
+ rol ecx, 14
+ add ecx, ebx
+ ; 124
+ mov esi, DWORD PTR 44[esp]
+ or edi, ecx
+ add ebx, esi
+ xor edi, ebp
+ mov esi, -1
+ rol edx, 10
+ lea ebx, DWORD PTR 1836072691[edi*1+ebx]
+ sub esi, ecx
+ rol ebx, 13
+ add ebx, eax
+ ; 125
+ mov edi, DWORD PTR 4[esp]
+ or esi, ebx
+ add eax, edi
+ xor esi, edx
+ mov edi, -1
+ rol ecx, 10
+ lea eax, DWORD PTR 1836072691[esi*1+eax]
+ sub edi, ebx
+ rol eax, 13
+ add eax, ebp
+ ; 126
+ mov esi, DWORD PTR 20[esp]
+ or edi, eax
+ add ebp, esi
+ xor edi, ecx
+ mov esi, -1
+ rol ebx, 10
+ lea ebp, DWORD PTR 1836072691[edi*1+ebp]
+ sub esi, eax
+ rol ebp, 7
+ add ebp, edx
+ ; 127
+ mov edi, DWORD PTR 56[esp]
+ or esi, ebp
+ add edx, edi
+ xor esi, ebx
+ mov edi, DWORD PTR 36[esp]
+ rol eax, 10
+ lea edx, DWORD PTR 1836072691[esi*1+edx]
+ mov esi, -1
+ rol edx, 5
+ add edx, ecx
+ ; 128
+ add ecx, edi
+ mov edi, ebp
+ sub esi, edx
+ and edi, edx
+ and esi, eax
+ or edi, esi
+ mov esi, DWORD PTR 28[esp]
+ rol ebp, 10
+ lea ecx, DWORD PTR 2053994217[edi*1+ecx]
+ mov edi, -1
+ rol ecx, 15
+ add ecx, ebx
+ ; 129
+ add ebx, esi
+ mov esi, edx
+ sub edi, ecx
+ and esi, ecx
+ and edi, ebp
+ or esi, edi
+ mov edi, DWORD PTR 20[esp]
+ rol edx, 10
+ lea ebx, DWORD PTR 2053994217[esi*1+ebx]
+ mov esi, -1
+ rol ebx, 5
+ add ebx, eax
+ ; 130
+ add eax, edi
+ mov edi, ecx
+ sub esi, ebx
+ and edi, ebx
+ and esi, edx
+ or edi, esi
+ mov esi, DWORD PTR 8[esp]
+ rol ecx, 10
+ lea eax, DWORD PTR 2053994217[edi*1+eax]
+ mov edi, -1
+ rol eax, 8
+ add eax, ebp
+ ; 131
+ add ebp, esi
+ mov esi, ebx
+ sub edi, eax
+ and esi, eax
+ and edi, ecx
+ or esi, edi
+ mov edi, DWORD PTR 16[esp]
+ rol ebx, 10
+ lea ebp, DWORD PTR 2053994217[esi*1+ebp]
+ mov esi, -1
+ rol ebp, 11
+ add ebp, edx
+ ; 132
+ add edx, edi
+ mov edi, eax
+ sub esi, ebp
+ and edi, ebp
+ and esi, ebx
+ or edi, esi
+ mov esi, DWORD PTR 48[esp]
+ rol eax, 10
+ lea edx, DWORD PTR 2053994217[edi*1+edx]
+ mov edi, -1
+ rol edx, 14
+ add edx, ecx
+ ; 133
+ add ecx, esi
+ mov esi, ebp
+ sub edi, edx
+ and esi, edx
+ and edi, eax
+ or esi, edi
+ mov edi, DWORD PTR 64[esp]
+ rol ebp, 10
+ lea ecx, DWORD PTR 2053994217[esi*1+ecx]
+ mov esi, -1
+ rol ecx, 14
+ add ecx, ebx
+ ; 134
+ add ebx, edi
+ mov edi, edx
+ sub esi, ecx
+ and edi, ecx
+ and esi, ebp
+ or edi, esi
+ mov esi, DWORD PTR 4[esp]
+ rol edx, 10
+ lea ebx, DWORD PTR 2053994217[edi*1+ebx]
+ mov edi, -1
+ rol ebx, 6
+ add ebx, eax
+ ; 135
+ add eax, esi
+ mov esi, ecx
+ sub edi, ebx
+ and esi, ebx
+ and edi, edx
+ or esi, edi
+ mov edi, DWORD PTR 24[esp]
+ rol ecx, 10
+ lea eax, DWORD PTR 2053994217[esi*1+eax]
+ mov esi, -1
+ rol eax, 14
+ add eax, ebp
+ ; 136
+ add ebp, edi
+ mov edi, ebx
+ sub esi, eax
+ and edi, eax
+ and esi, ecx
+ or edi, esi
+ mov esi, DWORD PTR 52[esp]
+ rol ebx, 10
+ lea ebp, DWORD PTR 2053994217[edi*1+ebp]
+ mov edi, -1
+ rol ebp, 6
+ add ebp, edx
+ ; 137
+ add edx, esi
+ mov esi, eax
+ sub edi, ebp
+ and esi, ebp
+ and edi, ebx
+ or esi, edi
+ mov edi, DWORD PTR 12[esp]
+ rol eax, 10
+ lea edx, DWORD PTR 2053994217[esi*1+edx]
+ mov esi, -1
+ rol edx, 9
+ add edx, ecx
+ ; 138
+ add ecx, edi
+ mov edi, ebp
+ sub esi, edx
+ and edi, edx
+ and esi, eax
+ or edi, esi
+ mov esi, DWORD PTR 56[esp]
+ rol ebp, 10
+ lea ecx, DWORD PTR 2053994217[edi*1+ecx]
+ mov edi, -1
+ rol ecx, 12
+ add ecx, ebx
+ ; 139
+ add ebx, esi
+ mov esi, edx
+ sub edi, ecx
+ and esi, ecx
+ and edi, ebp
+ or esi, edi
+ mov edi, DWORD PTR 40[esp]
+ rol edx, 10
+ lea ebx, DWORD PTR 2053994217[esi*1+ebx]
+ mov esi, -1
+ rol ebx, 9
+ add ebx, eax
+ ; 140
+ add eax, edi
+ mov edi, ecx
+ sub esi, ebx
+ and edi, ebx
+ and esi, edx
+ or edi, esi
+ mov esi, DWORD PTR 32[esp]
+ rol ecx, 10
+ lea eax, DWORD PTR 2053994217[edi*1+eax]
+ mov edi, -1
+ rol eax, 12
+ add eax, ebp
+ ; 141
+ add ebp, esi
+ mov esi, ebx
+ sub edi, eax
+ and esi, eax
+ and edi, ecx
+ or esi, edi
+ mov edi, DWORD PTR 44[esp]
+ rol ebx, 10
+ lea ebp, DWORD PTR 2053994217[esi*1+ebp]
+ mov esi, -1
+ rol ebp, 5
+ add ebp, edx
+ ; 142
+ add edx, edi
+ mov edi, eax
+ sub esi, ebp
+ and edi, ebp
+ and esi, ebx
+ or edi, esi
+ mov esi, DWORD PTR 60[esp]
+ rol eax, 10
+ lea edx, DWORD PTR 2053994217[edi*1+edx]
+ mov edi, -1
+ rol edx, 15
+ add edx, ecx
+ ; 143
+ add ecx, esi
+ mov esi, ebp
+ sub edi, edx
+ and esi, edx
+ and edi, eax
+ or edi, esi
+ mov esi, edx
+ rol ebp, 10
+ lea ecx, DWORD PTR 2053994217[edi*1+ecx]
+ xor esi, ebp
+ rol ecx, 8
+ add ecx, ebx
+ ; 144
+ mov edi, DWORD PTR 52[esp]
+ xor esi, ecx
+ add ebx, edi
+ rol edx, 10
+ add ebx, esi
+ mov esi, ecx
+ rol ebx, 8
+ add ebx, eax
+ ; 145
+ xor esi, edx
+ mov edi, DWORD PTR 64[esp]
+ xor esi, ebx
+ add eax, esi
+ mov esi, ebx
+ rol ecx, 10
+ add eax, edi
+ xor esi, ecx
+ rol eax, 5
+ add eax, ebp
+ ; 146
+ mov edi, DWORD PTR 44[esp]
+ xor esi, eax
+ add ebp, edi
+ rol ebx, 10
+ add ebp, esi
+ mov esi, eax
+ rol ebp, 12
+ add ebp, edx
+ ; 147
+ xor esi, ebx
+ mov edi, DWORD PTR 20[esp]
+ xor esi, ebp
+ add edx, esi
+ mov esi, ebp
+ rol eax, 10
+ add edx, edi
+ xor esi, eax
+ rol edx, 9
+ add edx, ecx
+ ; 148
+ mov edi, DWORD PTR 8[esp]
+ xor esi, edx
+ add ecx, edi
+ rol ebp, 10
+ add ecx, esi
+ mov esi, edx
+ rol ecx, 12
+ add ecx, ebx
+ ; 149
+ xor esi, ebp
+ mov edi, DWORD PTR 24[esp]
+ xor esi, ecx
+ add ebx, esi
+ mov esi, ecx
+ rol edx, 10
+ add ebx, edi
+ xor esi, edx
+ rol ebx, 5
+ add ebx, eax
+ ; 150
+ mov edi, DWORD PTR 36[esp]
+ xor esi, ebx
+ add eax, edi
+ rol ecx, 10
+ add eax, esi
+ mov esi, ebx
+ rol eax, 14
+ add eax, ebp
+ ; 151
+ xor esi, ecx
+ mov edi, DWORD PTR 32[esp]
+ xor esi, eax
+ add ebp, esi
+ mov esi, eax
+ rol ebx, 10
+ add ebp, edi
+ xor esi, ebx
+ rol ebp, 6
+ add ebp, edx
+ ; 152
+ mov edi, DWORD PTR 28[esp]
+ xor esi, ebp
+ add edx, edi
+ rol eax, 10
+ add edx, esi
+ mov esi, ebp
+ rol edx, 8
+ add edx, ecx
+ ; 153
+ xor esi, eax
+ mov edi, DWORD PTR 12[esp]
+ xor esi, edx
+ add ecx, esi
+ mov esi, edx
+ rol ebp, 10
+ add ecx, edi
+ xor esi, ebp
+ rol ecx, 13
+ add ecx, ebx
+ ; 154
+ mov edi, DWORD PTR 56[esp]
+ xor esi, ecx
+ add ebx, edi
+ rol edx, 10
+ add ebx, esi
+ mov esi, ecx
+ rol ebx, 6
+ add ebx, eax
+ ; 155
+ xor esi, edx
+ mov edi, DWORD PTR 60[esp]
+ xor esi, ebx
+ add eax, esi
+ mov esi, ebx
+ rol ecx, 10
+ add eax, edi
+ xor esi, ecx
+ rol eax, 5
+ add eax, ebp
+ ; 156
+ mov edi, DWORD PTR 4[esp]
+ xor esi, eax
+ add ebp, edi
+ rol ebx, 10
+ add ebp, esi
+ mov esi, eax
+ rol ebp, 15
+ add ebp, edx
+ ; 157
+ xor esi, ebx
+ mov edi, DWORD PTR 16[esp]
+ xor esi, ebp
+ add edx, esi
+ mov esi, ebp
+ rol eax, 10
+ add edx, edi
+ xor esi, eax
+ rol edx, 13
+ add edx, ecx
+ ; 158
+ mov edi, DWORD PTR 40[esp]
+ xor esi, edx
+ add ecx, edi
+ rol ebp, 10
+ add ecx, esi
+ mov esi, edx
+ rol ecx, 11
+ add ecx, ebx
+ ; 159
+ xor esi, ebp
+ mov edi, DWORD PTR 48[esp]
+ xor esi, ecx
+ add ebx, esi
+ rol edx, 10
+ add ebx, edi
+ mov edi, DWORD PTR 108[esp]
+ rol ebx, 11
+ add ebx, eax
+ mov esi, DWORD PTR 4[edi]
+ add edx, esi
+ mov esi, DWORD PTR 76[esp]
+ add edx, esi
+ mov esi, DWORD PTR 8[edi]
+ add ebp, esi
+ mov esi, DWORD PTR 80[esp]
+ add ebp, esi
+ mov esi, DWORD PTR 12[edi]
+ add eax, esi
+ mov esi, DWORD PTR 84[esp]
+ add eax, esi
+ mov esi, DWORD PTR 16[edi]
+ add ebx, esi
+ mov esi, DWORD PTR 68[esp]
+ add ebx, esi
+ mov esi, DWORD PTR [edi]
+ add ecx, esi
+ mov esi, DWORD PTR 72[esp]
+ add ecx, esi
+ mov DWORD PTR [edi],edx
+ mov DWORD PTR 4[edi],ebp
+ mov DWORD PTR 8[edi],eax
+ mov DWORD PTR 12[edi],ebx
+ mov DWORD PTR 16[edi],ecx
+ mov edi, DWORD PTR [esp]
+ mov esi, DWORD PTR 112[esp]
+ cmp edi, esi
+ mov edi, DWORD PTR 108[esp]
+ jge L000start
+ add esp, 88
+ pop ebx
+ pop ebp
+ pop edi
+ pop esi
+ ret
+_ripemd160_block_x86 ENDP
+_TEXT ENDS
+END
diff --git a/crypto/ripemd/asm/rm86unix.cpp b/crypto/ripemd/asm/rm86unix.cpp
new file mode 100644
index 0000000000..f90f6f2fd6
--- /dev/null
+++ b/crypto/ripemd/asm/rm86unix.cpp
@@ -0,0 +1,2016 @@
+/* Run the C pre-processor over this file with one of the following defined
+ * ELF - elf object files,
+ * OUT - a.out object files,
+ * BSDI - BSDI style a.out object files
+ * SOL - Solaris style elf
+ */
+
+#define TYPE(a,b) .type a,b
+#define SIZE(a,b) .size a,b
+
+#if defined(OUT) || defined(BSDI)
+#define ripemd160_block_x86 _ripemd160_block_x86
+
+#endif
+
+#ifdef OUT
+#define OK 1
+#define ALIGN 4
+#endif
+
+#ifdef BSDI
+#define OK 1
+#define ALIGN 4
+#undef SIZE
+#undef TYPE
+#define SIZE(a,b)
+#define TYPE(a,b)
+#endif
+
+#if defined(ELF) || defined(SOL)
+#define OK 1
+#define ALIGN 16
+#endif
+
+#ifndef OK
+You need to define one of
+ELF - elf systems - linux-elf, NetBSD and DG-UX
+OUT - a.out systems - linux-a.out and FreeBSD
+SOL - solaris systems, which are elf with strange comment lines
+BSDI - a.out with a very primative version of as.
+#endif
+
+/* Let the Assembler begin :-) */
+ /* Don't even think of reading this code */
+ /* It was automatically generated by rmd-586.pl */
+ /* Which is a perl program used to generate the x86 assember for */
+ /* any of elf, a.out, BSDI,Win32, or Solaris */
+ /* eric <eay@cryptsoft.com> */
+
+ .file "rmd-586.s"
+ .version "01.01"
+gcc2_compiled.:
+.text
+ .align ALIGN
+.globl ripemd160_block_x86
+ TYPE(ripemd160_block_x86,@function)
+ripemd160_block_x86:
+ pushl %esi
+ movl 16(%esp), %ecx
+ pushl %edi
+ movl 16(%esp), %esi
+ pushl %ebp
+ addl %esi, %ecx
+ pushl %ebx
+ subl $64, %ecx
+ subl $88, %esp
+ movl %ecx, (%esp)
+ movl 108(%esp), %edi
+.L000start:
+
+ movl (%esi), %eax
+ movl 4(%esi), %ebx
+ movl %eax, 4(%esp)
+ movl %ebx, 8(%esp)
+ movl 8(%esi), %eax
+ movl 12(%esi), %ebx
+ movl %eax, 12(%esp)
+ movl %ebx, 16(%esp)
+ movl 16(%esi), %eax
+ movl 20(%esi), %ebx
+ movl %eax, 20(%esp)
+ movl %ebx, 24(%esp)
+ movl 24(%esi), %eax
+ movl 28(%esi), %ebx
+ movl %eax, 28(%esp)
+ movl %ebx, 32(%esp)
+ movl 32(%esi), %eax
+ movl 36(%esi), %ebx
+ movl %eax, 36(%esp)
+ movl %ebx, 40(%esp)
+ movl 40(%esi), %eax
+ movl 44(%esi), %ebx
+ movl %eax, 44(%esp)
+ movl %ebx, 48(%esp)
+ movl 48(%esi), %eax
+ movl 52(%esi), %ebx
+ movl %eax, 52(%esp)
+ movl %ebx, 56(%esp)
+ movl 56(%esi), %eax
+ movl 60(%esi), %ebx
+ movl %eax, 60(%esp)
+ movl %ebx, 64(%esp)
+ addl $64, %esi
+ movl (%edi), %eax
+ movl %esi, 112(%esp)
+ movl 4(%edi), %ebx
+ movl 8(%edi), %ecx
+ movl 12(%edi), %edx
+ movl 16(%edi), %ebp
+ /* 0 */
+ movl %ecx, %esi
+ xorl %edx, %esi
+ movl 4(%esp), %edi
+ xorl %ebx, %esi
+ addl %edi, %eax
+ roll $10, %ecx
+ addl %esi, %eax
+ movl %ebx, %esi
+ roll $11, %eax
+ addl %ebp, %eax
+ /* 1 */
+ xorl %ecx, %esi
+ movl 8(%esp), %edi
+ xorl %eax, %esi
+ addl %esi, %ebp
+ movl %eax, %esi
+ roll $10, %ebx
+ addl %edi, %ebp
+ xorl %ebx, %esi
+ roll $14, %ebp
+ addl %edx, %ebp
+ /* 2 */
+ movl 12(%esp), %edi
+ xorl %ebp, %esi
+ addl %edi, %edx
+ roll $10, %eax
+ addl %esi, %edx
+ movl %ebp, %esi
+ roll $15, %edx
+ addl %ecx, %edx
+ /* 3 */
+ xorl %eax, %esi
+ movl 16(%esp), %edi
+ xorl %edx, %esi
+ addl %esi, %ecx
+ movl %edx, %esi
+ roll $10, %ebp
+ addl %edi, %ecx
+ xorl %ebp, %esi
+ roll $12, %ecx
+ addl %ebx, %ecx
+ /* 4 */
+ movl 20(%esp), %edi
+ xorl %ecx, %esi
+ addl %edi, %ebx
+ roll $10, %edx
+ addl %esi, %ebx
+ movl %ecx, %esi
+ roll $5, %ebx
+ addl %eax, %ebx
+ /* 5 */
+ xorl %edx, %esi
+ movl 24(%esp), %edi
+ xorl %ebx, %esi
+ addl %esi, %eax
+ movl %ebx, %esi
+ roll $10, %ecx
+ addl %edi, %eax
+ xorl %ecx, %esi
+ roll $8, %eax
+ addl %ebp, %eax
+ /* 6 */
+ movl 28(%esp), %edi
+ xorl %eax, %esi
+ addl %edi, %ebp
+ roll $10, %ebx
+ addl %esi, %ebp
+ movl %eax, %esi
+ roll $7, %ebp
+ addl %edx, %ebp
+ /* 7 */
+ xorl %ebx, %esi
+ movl 32(%esp), %edi
+ xorl %ebp, %esi
+ addl %esi, %edx
+ movl %ebp, %esi
+ roll $10, %eax
+ addl %edi, %edx
+ xorl %eax, %esi
+ roll $9, %edx
+ addl %ecx, %edx
+ /* 8 */
+ movl 36(%esp), %edi
+ xorl %edx, %esi
+ addl %edi, %ecx
+ roll $10, %ebp
+ addl %esi, %ecx
+ movl %edx, %esi
+ roll $11, %ecx
+ addl %ebx, %ecx
+ /* 9 */
+ xorl %ebp, %esi
+ movl 40(%esp), %edi
+ xorl %ecx, %esi
+ addl %esi, %ebx
+ movl %ecx, %esi
+ roll $10, %edx
+ addl %edi, %ebx
+ xorl %edx, %esi
+ roll $13, %ebx
+ addl %eax, %ebx
+ /* 10 */
+ movl 44(%esp), %edi
+ xorl %ebx, %esi
+ addl %edi, %eax
+ roll $10, %ecx
+ addl %esi, %eax
+ movl %ebx, %esi
+ roll $14, %eax
+ addl %ebp, %eax
+ /* 11 */
+ xorl %ecx, %esi
+ movl 48(%esp), %edi
+ xorl %eax, %esi
+ addl %esi, %ebp
+ movl %eax, %esi
+ roll $10, %ebx
+ addl %edi, %ebp
+ xorl %ebx, %esi
+ roll $15, %ebp
+ addl %edx, %ebp
+ /* 12 */
+ movl 52(%esp), %edi
+ xorl %ebp, %esi
+ addl %edi, %edx
+ roll $10, %eax
+ addl %esi, %edx
+ movl %ebp, %esi
+ roll $6, %edx
+ addl %ecx, %edx
+ /* 13 */
+ xorl %eax, %esi
+ movl 56(%esp), %edi
+ xorl %edx, %esi
+ addl %esi, %ecx
+ movl %edx, %esi
+ roll $10, %ebp
+ addl %edi, %ecx
+ xorl %ebp, %esi
+ roll $7, %ecx
+ addl %ebx, %ecx
+ /* 14 */
+ movl 60(%esp), %edi
+ xorl %ecx, %esi
+ addl %edi, %ebx
+ roll $10, %edx
+ addl %esi, %ebx
+ movl %ecx, %esi
+ roll $9, %ebx
+ addl %eax, %ebx
+ /* 15 */
+ xorl %edx, %esi
+ movl 64(%esp), %edi
+ xorl %ebx, %esi
+ addl %esi, %eax
+ movl $-1, %esi
+ roll $10, %ecx
+ addl %edi, %eax
+ movl 32(%esp), %edi
+ roll $8, %eax
+ addl %ebp, %eax
+ /* 16 */
+ addl %edi, %ebp
+ movl %ebx, %edi
+ subl %eax, %esi
+ andl %eax, %edi
+ andl %ecx, %esi
+ orl %esi, %edi
+ movl 20(%esp), %esi
+ roll $10, %ebx
+ leal 1518500249(%ebp,%edi,1),%ebp
+ movl $-1, %edi
+ roll $7, %ebp
+ addl %edx, %ebp
+ /* 17 */
+ addl %esi, %edx
+ movl %eax, %esi
+ subl %ebp, %edi
+ andl %ebp, %esi
+ andl %ebx, %edi
+ orl %edi, %esi
+ movl 56(%esp), %edi
+ roll $10, %eax
+ leal 1518500249(%edx,%esi,1),%edx
+ movl $-1, %esi
+ roll $6, %edx
+ addl %ecx, %edx
+ /* 18 */
+ addl %edi, %ecx
+ movl %ebp, %edi
+ subl %edx, %esi
+ andl %edx, %edi
+ andl %eax, %esi
+ orl %esi, %edi
+ movl 8(%esp), %esi
+ roll $10, %ebp
+ leal 1518500249(%ecx,%edi,1),%ecx
+ movl $-1, %edi
+ roll $8, %ecx
+ addl %ebx, %ecx
+ /* 19 */
+ addl %esi, %ebx
+ movl %edx, %esi
+ subl %ecx, %edi
+ andl %ecx, %esi
+ andl %ebp, %edi
+ orl %edi, %esi
+ movl 44(%esp), %edi
+ roll $10, %edx
+ leal 1518500249(%ebx,%esi,1),%ebx
+ movl $-1, %esi
+ roll $13, %ebx
+ addl %eax, %ebx
+ /* 20 */
+ addl %edi, %eax
+ movl %ecx, %edi
+ subl %ebx, %esi
+ andl %ebx, %edi
+ andl %edx, %esi
+ orl %esi, %edi
+ movl 28(%esp), %esi
+ roll $10, %ecx
+ leal 1518500249(%eax,%edi,1),%eax
+ movl $-1, %edi
+ roll $11, %eax
+ addl %ebp, %eax
+ /* 21 */
+ addl %esi, %ebp
+ movl %ebx, %esi
+ subl %eax, %edi
+ andl %eax, %esi
+ andl %ecx, %edi
+ orl %edi, %esi
+ movl 64(%esp), %edi
+ roll $10, %ebx
+ leal 1518500249(%ebp,%esi,1),%ebp
+ movl $-1, %esi
+ roll $9, %ebp
+ addl %edx, %ebp
+ /* 22 */
+ addl %edi, %edx
+ movl %eax, %edi
+ subl %ebp, %esi
+ andl %ebp, %edi
+ andl %ebx, %esi
+ orl %esi, %edi
+ movl 16(%esp), %esi
+ roll $10, %eax
+ leal 1518500249(%edx,%edi,1),%edx
+ movl $-1, %edi
+ roll $7, %edx
+ addl %ecx, %edx
+ /* 23 */
+ addl %esi, %ecx
+ movl %ebp, %esi
+ subl %edx, %edi
+ andl %edx, %esi
+ andl %eax, %edi
+ orl %edi, %esi
+ movl 52(%esp), %edi
+ roll $10, %ebp
+ leal 1518500249(%ecx,%esi,1),%ecx
+ movl $-1, %esi
+ roll $15, %ecx
+ addl %ebx, %ecx
+ /* 24 */
+ addl %edi, %ebx
+ movl %edx, %edi
+ subl %ecx, %esi
+ andl %ecx, %edi
+ andl %ebp, %esi
+ orl %esi, %edi
+ movl 4(%esp), %esi
+ roll $10, %edx
+ leal 1518500249(%ebx,%edi,1),%ebx
+ movl $-1, %edi
+ roll $7, %ebx
+ addl %eax, %ebx
+ /* 25 */
+ addl %esi, %eax
+ movl %ecx, %esi
+ subl %ebx, %edi
+ andl %ebx, %esi
+ andl %edx, %edi
+ orl %edi, %esi
+ movl 40(%esp), %edi
+ roll $10, %ecx
+ leal 1518500249(%eax,%esi,1),%eax
+ movl $-1, %esi
+ roll $12, %eax
+ addl %ebp, %eax
+ /* 26 */
+ addl %edi, %ebp
+ movl %ebx, %edi
+ subl %eax, %esi
+ andl %eax, %edi
+ andl %ecx, %esi
+ orl %esi, %edi
+ movl 24(%esp), %esi
+ roll $10, %ebx
+ leal 1518500249(%ebp,%edi,1),%ebp
+ movl $-1, %edi
+ roll $15, %ebp
+ addl %edx, %ebp
+ /* 27 */
+ addl %esi, %edx
+ movl %eax, %esi
+ subl %ebp, %edi
+ andl %ebp, %esi
+ andl %ebx, %edi
+ orl %edi, %esi
+ movl 12(%esp), %edi
+ roll $10, %eax
+ leal 1518500249(%edx,%esi,1),%edx
+ movl $-1, %esi
+ roll $9, %edx
+ addl %ecx, %edx
+ /* 28 */
+ addl %edi, %ecx
+ movl %ebp, %edi
+ subl %edx, %esi
+ andl %edx, %edi
+ andl %eax, %esi
+ orl %esi, %edi
+ movl 60(%esp), %esi
+ roll $10, %ebp
+ leal 1518500249(%ecx,%edi,1),%ecx
+ movl $-1, %edi
+ roll $11, %ecx
+ addl %ebx, %ecx
+ /* 29 */
+ addl %esi, %ebx
+ movl %edx, %esi
+ subl %ecx, %edi
+ andl %ecx, %esi
+ andl %ebp, %edi
+ orl %edi, %esi
+ movl 48(%esp), %edi
+ roll $10, %edx
+ leal 1518500249(%ebx,%esi,1),%ebx
+ movl $-1, %esi
+ roll $7, %ebx
+ addl %eax, %ebx
+ /* 30 */
+ addl %edi, %eax
+ movl %ecx, %edi
+ subl %ebx, %esi
+ andl %ebx, %edi
+ andl %edx, %esi
+ orl %esi, %edi
+ movl 36(%esp), %esi
+ roll $10, %ecx
+ leal 1518500249(%eax,%edi,1),%eax
+ movl $-1, %edi
+ roll $13, %eax
+ addl %ebp, %eax
+ /* 31 */
+ addl %esi, %ebp
+ movl %ebx, %esi
+ subl %eax, %edi
+ andl %eax, %esi
+ andl %ecx, %edi
+ orl %edi, %esi
+ movl $-1, %edi
+ roll $10, %ebx
+ leal 1518500249(%ebp,%esi,1),%ebp
+ subl %eax, %edi
+ roll $12, %ebp
+ addl %edx, %ebp
+ /* 32 */
+ movl 16(%esp), %esi
+ orl %ebp, %edi
+ addl %esi, %edx
+ xorl %ebx, %edi
+ movl $-1, %esi
+ roll $10, %eax
+ leal 1859775393(%edx,%edi,1),%edx
+ subl %ebp, %esi
+ roll $11, %edx
+ addl %ecx, %edx
+ /* 33 */
+ movl 44(%esp), %edi
+ orl %edx, %esi
+ addl %edi, %ecx
+ xorl %eax, %esi
+ movl $-1, %edi
+ roll $10, %ebp
+ leal 1859775393(%ecx,%esi,1),%ecx
+ subl %edx, %edi
+ roll $13, %ecx
+ addl %ebx, %ecx
+ /* 34 */
+ movl 60(%esp), %esi
+ orl %ecx, %edi
+ addl %esi, %ebx
+ xorl %ebp, %edi
+ movl $-1, %esi
+ roll $10, %edx
+ leal 1859775393(%ebx,%edi,1),%ebx
+ subl %ecx, %esi
+ roll $6, %ebx
+ addl %eax, %ebx
+ /* 35 */
+ movl 20(%esp), %edi
+ orl %ebx, %esi
+ addl %edi, %eax
+ xorl %edx, %esi
+ movl $-1, %edi
+ roll $10, %ecx
+ leal 1859775393(%eax,%esi,1),%eax
+ subl %ebx, %edi
+ roll $7, %eax
+ addl %ebp, %eax
+ /* 36 */
+ movl 40(%esp), %esi
+ orl %eax, %edi
+ addl %esi, %ebp
+ xorl %ecx, %edi
+ movl $-1, %esi
+ roll $10, %ebx
+ leal 1859775393(%ebp,%edi,1),%ebp
+ subl %eax, %esi
+ roll $14, %ebp
+ addl %edx, %ebp
+ /* 37 */
+ movl 64(%esp), %edi
+ orl %ebp, %esi
+ addl %edi, %edx
+ xorl %ebx, %esi
+ movl $-1, %edi
+ roll $10, %eax
+ leal 1859775393(%edx,%esi,1),%edx
+ subl %ebp, %edi
+ roll $9, %edx
+ addl %ecx, %edx
+ /* 38 */
+ movl 36(%esp), %esi
+ orl %edx, %edi
+ addl %esi, %ecx
+ xorl %eax, %edi
+ movl $-1, %esi
+ roll $10, %ebp
+ leal 1859775393(%ecx,%edi,1),%ecx
+ subl %edx, %esi
+ roll $13, %ecx
+ addl %ebx, %ecx
+ /* 39 */
+ movl 8(%esp), %edi
+ orl %ecx, %esi
+ addl %edi, %ebx
+ xorl %ebp, %esi
+ movl $-1, %edi
+ roll $10, %edx
+ leal 1859775393(%ebx,%esi,1),%ebx
+ subl %ecx, %edi
+ roll $15, %ebx
+ addl %eax, %ebx
+ /* 40 */
+ movl 12(%esp), %esi
+ orl %ebx, %edi
+ addl %esi, %eax
+ xorl %edx, %edi
+ movl $-1, %esi
+ roll $10, %ecx
+ leal 1859775393(%eax,%edi,1),%eax
+ subl %ebx, %esi
+ roll $14, %eax
+ addl %ebp, %eax
+ /* 41 */
+ movl 32(%esp), %edi
+ orl %eax, %esi
+ addl %edi, %ebp
+ xorl %ecx, %esi
+ movl $-1, %edi
+ roll $10, %ebx
+ leal 1859775393(%ebp,%esi,1),%ebp
+ subl %eax, %edi
+ roll $8, %ebp
+ addl %edx, %ebp
+ /* 42 */
+ movl 4(%esp), %esi
+ orl %ebp, %edi
+ addl %esi, %edx
+ xorl %ebx, %edi
+ movl $-1, %esi
+ roll $10, %eax
+ leal 1859775393(%edx,%edi,1),%edx
+ subl %ebp, %esi
+ roll $13, %edx
+ addl %ecx, %edx
+ /* 43 */
+ movl 28(%esp), %edi
+ orl %edx, %esi
+ addl %edi, %ecx
+ xorl %eax, %esi
+ movl $-1, %edi
+ roll $10, %ebp
+ leal 1859775393(%ecx,%esi,1),%ecx
+ subl %edx, %edi
+ roll $6, %ecx
+ addl %ebx, %ecx
+ /* 44 */
+ movl 56(%esp), %esi
+ orl %ecx, %edi
+ addl %esi, %ebx
+ xorl %ebp, %edi
+ movl $-1, %esi
+ roll $10, %edx
+ leal 1859775393(%ebx,%edi,1),%ebx
+ subl %ecx, %esi
+ roll $5, %ebx
+ addl %eax, %ebx
+ /* 45 */
+ movl 48(%esp), %edi
+ orl %ebx, %esi
+ addl %edi, %eax
+ xorl %edx, %esi
+ movl $-1, %edi
+ roll $10, %ecx
+ leal 1859775393(%eax,%esi,1),%eax
+ subl %ebx, %edi
+ roll $12, %eax
+ addl %ebp, %eax
+ /* 46 */
+ movl 24(%esp), %esi
+ orl %eax, %edi
+ addl %esi, %ebp
+ xorl %ecx, %edi
+ movl $-1, %esi
+ roll $10, %ebx
+ leal 1859775393(%ebp,%edi,1),%ebp
+ subl %eax, %esi
+ roll $7, %ebp
+ addl %edx, %ebp
+ /* 47 */
+ movl 52(%esp), %edi
+ orl %ebp, %esi
+ addl %edi, %edx
+ xorl %ebx, %esi
+ movl $-1, %edi
+ roll $10, %eax
+ leal 1859775393(%edx,%esi,1),%edx
+ movl %eax, %esi
+ roll $5, %edx
+ addl %ecx, %edx
+ /* 48 */
+ subl %eax, %edi
+ andl %edx, %esi
+ andl %ebp, %edi
+ orl %esi, %edi
+ movl 8(%esp), %esi
+ roll $10, %ebp
+ leal 2400959708(%ecx,%edi,),%ecx
+ movl $-1, %edi
+ addl %esi, %ecx
+ movl %ebp, %esi
+ roll $11, %ecx
+ addl %ebx, %ecx
+ /* 49 */
+ subl %ebp, %edi
+ andl %ecx, %esi
+ andl %edx, %edi
+ orl %esi, %edi
+ movl 40(%esp), %esi
+ roll $10, %edx
+ leal 2400959708(%ebx,%edi,),%ebx
+ movl $-1, %edi
+ addl %esi, %ebx
+ movl %edx, %esi
+ roll $12, %ebx
+ addl %eax, %ebx
+ /* 50 */
+ subl %edx, %edi
+ andl %ebx, %esi
+ andl %ecx, %edi
+ orl %esi, %edi
+ movl 48(%esp), %esi
+ roll $10, %ecx
+ leal 2400959708(%eax,%edi,),%eax
+ movl $-1, %edi
+ addl %esi, %eax
+ movl %ecx, %esi
+ roll $14, %eax
+ addl %ebp, %eax
+ /* 51 */
+ subl %ecx, %edi
+ andl %eax, %esi
+ andl %ebx, %edi
+ orl %esi, %edi
+ movl 44(%esp), %esi
+ roll $10, %ebx
+ leal 2400959708(%ebp,%edi,),%ebp
+ movl $-1, %edi
+ addl %esi, %ebp
+ movl %ebx, %esi
+ roll $15, %ebp
+ addl %edx, %ebp
+ /* 52 */
+ subl %ebx, %edi
+ andl %ebp, %esi
+ andl %eax, %edi
+ orl %esi, %edi
+ movl 4(%esp), %esi
+ roll $10, %eax
+ leal 2400959708(%edx,%edi,),%edx
+ movl $-1, %edi
+ addl %esi, %edx
+ movl %eax, %esi
+ roll $14, %edx
+ addl %ecx, %edx
+ /* 53 */
+ subl %eax, %edi
+ andl %edx, %esi
+ andl %ebp, %edi
+ orl %esi, %edi
+ movl 36(%esp), %esi
+ roll $10, %ebp
+ leal 2400959708(%ecx,%edi,),%ecx
+ movl $-1, %edi
+ addl %esi, %ecx
+ movl %ebp, %esi
+ roll $15, %ecx
+ addl %ebx, %ecx
+ /* 54 */
+ subl %ebp, %edi
+ andl %ecx, %esi
+ andl %edx, %edi
+ orl %esi, %edi
+ movl 52(%esp), %esi
+ roll $10, %edx
+ leal 2400959708(%ebx,%edi,),%ebx
+ movl $-1, %edi
+ addl %esi, %ebx
+ movl %edx, %esi
+ roll $9, %ebx
+ addl %eax, %ebx
+ /* 55 */
+ subl %edx, %edi
+ andl %ebx, %esi
+ andl %ecx, %edi
+ orl %esi, %edi
+ movl 20(%esp), %esi
+ roll $10, %ecx
+ leal 2400959708(%eax,%edi,),%eax
+ movl $-1, %edi
+ addl %esi, %eax
+ movl %ecx, %esi
+ roll $8, %eax
+ addl %ebp, %eax
+ /* 56 */
+ subl %ecx, %edi
+ andl %eax, %esi
+ andl %ebx, %edi
+ orl %esi, %edi
+ movl 56(%esp), %esi
+ roll $10, %ebx
+ leal 2400959708(%ebp,%edi,),%ebp
+ movl $-1, %edi
+ addl %esi, %ebp
+ movl %ebx, %esi
+ roll $9, %ebp
+ addl %edx, %ebp
+ /* 57 */
+ subl %ebx, %edi
+ andl %ebp, %esi
+ andl %eax, %edi
+ orl %esi, %edi
+ movl 16(%esp), %esi
+ roll $10, %eax
+ leal 2400959708(%edx,%edi,),%edx
+ movl $-1, %edi
+ addl %esi, %edx
+ movl %eax, %esi
+ roll $14, %edx
+ addl %ecx, %edx
+ /* 58 */
+ subl %eax, %edi
+ andl %edx, %esi
+ andl %ebp, %edi
+ orl %esi, %edi
+ movl 32(%esp), %esi
+ roll $10, %ebp
+ leal 2400959708(%ecx,%edi,),%ecx
+ movl $-1, %edi
+ addl %esi, %ecx
+ movl %ebp, %esi
+ roll $5, %ecx
+ addl %ebx, %ecx
+ /* 59 */
+ subl %ebp, %edi
+ andl %ecx, %esi
+ andl %edx, %edi
+ orl %esi, %edi
+ movl 64(%esp), %esi
+ roll $10, %edx
+ leal 2400959708(%ebx,%edi,),%ebx
+ movl $-1, %edi
+ addl %esi, %ebx
+ movl %edx, %esi
+ roll $6, %ebx
+ addl %eax, %ebx
+ /* 60 */
+ subl %edx, %edi
+ andl %ebx, %esi
+ andl %ecx, %edi
+ orl %esi, %edi
+ movl 60(%esp), %esi
+ roll $10, %ecx
+ leal 2400959708(%eax,%edi,),%eax
+ movl $-1, %edi
+ addl %esi, %eax
+ movl %ecx, %esi
+ roll $8, %eax
+ addl %ebp, %eax
+ /* 61 */
+ subl %ecx, %edi
+ andl %eax, %esi
+ andl %ebx, %edi
+ orl %esi, %edi
+ movl 24(%esp), %esi
+ roll $10, %ebx
+ leal 2400959708(%ebp,%edi,),%ebp
+ movl $-1, %edi
+ addl %esi, %ebp
+ movl %ebx, %esi
+ roll $6, %ebp
+ addl %edx, %ebp
+ /* 62 */
+ subl %ebx, %edi
+ andl %ebp, %esi
+ andl %eax, %edi
+ orl %esi, %edi
+ movl 28(%esp), %esi
+ roll $10, %eax
+ leal 2400959708(%edx,%edi,),%edx
+ movl $-1, %edi
+ addl %esi, %edx
+ movl %eax, %esi
+ roll $5, %edx
+ addl %ecx, %edx
+ /* 63 */
+ subl %eax, %edi
+ andl %edx, %esi
+ andl %ebp, %edi
+ orl %esi, %edi
+ movl 12(%esp), %esi
+ roll $10, %ebp
+ leal 2400959708(%ecx,%edi,),%ecx
+ movl $-1, %edi
+ addl %esi, %ecx
+ subl %ebp, %edi
+ roll $12, %ecx
+ addl %ebx, %ecx
+ /* 64 */
+ movl 20(%esp), %esi
+ orl %edx, %edi
+ addl %esi, %ebx
+ xorl %ecx, %edi
+ movl $-1, %esi
+ roll $10, %edx
+ leal 2840853838(%ebx,%edi,1),%ebx
+ subl %edx, %esi
+ roll $9, %ebx
+ addl %eax, %ebx
+ /* 65 */
+ movl 4(%esp), %edi
+ orl %ecx, %esi
+ addl %edi, %eax
+ xorl %ebx, %esi
+ movl $-1, %edi
+ roll $10, %ecx
+ leal 2840853838(%eax,%esi,1),%eax
+ subl %ecx, %edi
+ roll $15, %eax
+ addl %ebp, %eax
+ /* 66 */
+ movl 24(%esp), %esi
+ orl %ebx, %edi
+ addl %esi, %ebp
+ xorl %eax, %edi
+ movl $-1, %esi
+ roll $10, %ebx
+ leal 2840853838(%ebp,%edi,1),%ebp
+ subl %ebx, %esi
+ roll $5, %ebp
+ addl %edx, %ebp
+ /* 67 */
+ movl 40(%esp), %edi
+ orl %eax, %esi
+ addl %edi, %edx
+ xorl %ebp, %esi
+ movl $-1, %edi
+ roll $10, %eax
+ leal 2840853838(%edx,%esi,1),%edx
+ subl %eax, %edi
+ roll $11, %edx
+ addl %ecx, %edx
+ /* 68 */
+ movl 32(%esp), %esi
+ orl %ebp, %edi
+ addl %esi, %ecx
+ xorl %edx, %edi
+ movl $-1, %esi
+ roll $10, %ebp
+ leal 2840853838(%ecx,%edi,1),%ecx
+ subl %ebp, %esi
+ roll $6, %ecx
+ addl %ebx, %ecx
+ /* 69 */
+ movl 52(%esp), %edi
+ orl %edx, %esi
+ addl %edi, %ebx
+ xorl %ecx, %esi
+ movl $-1, %edi
+ roll $10, %edx
+ leal 2840853838(%ebx,%esi,1),%ebx
+ subl %edx, %edi
+ roll $8, %ebx
+ addl %eax, %ebx
+ /* 70 */
+ movl 12(%esp), %esi
+ orl %ecx, %edi
+ addl %esi, %eax
+ xorl %ebx, %edi
+ movl $-1, %esi
+ roll $10, %ecx
+ leal 2840853838(%eax,%edi,1),%eax
+ subl %ecx, %esi
+ roll $13, %eax
+ addl %ebp, %eax
+ /* 71 */
+ movl 44(%esp), %edi
+ orl %ebx, %esi
+ addl %edi, %ebp
+ xorl %eax, %esi
+ movl $-1, %edi
+ roll $10, %ebx
+ leal 2840853838(%ebp,%esi,1),%ebp
+ subl %ebx, %edi
+ roll $12, %ebp
+ addl %edx, %ebp
+ /* 72 */
+ movl 60(%esp), %esi
+ orl %eax, %edi
+ addl %esi, %edx
+ xorl %ebp, %edi
+ movl $-1, %esi
+ roll $10, %eax
+ leal 2840853838(%edx,%edi,1),%edx
+ subl %eax, %esi
+ roll $5, %edx
+ addl %ecx, %edx
+ /* 73 */
+ movl 8(%esp), %edi
+ orl %ebp, %esi
+ addl %edi, %ecx
+ xorl %edx, %esi
+ movl $-1, %edi
+ roll $10, %ebp
+ leal 2840853838(%ecx,%esi,1),%ecx
+ subl %ebp, %edi
+ roll $12, %ecx
+ addl %ebx, %ecx
+ /* 74 */
+ movl 16(%esp), %esi
+ orl %edx, %edi
+ addl %esi, %ebx
+ xorl %ecx, %edi
+ movl $-1, %esi
+ roll $10, %edx
+ leal 2840853838(%ebx,%edi,1),%ebx
+ subl %edx, %esi
+ roll $13, %ebx
+ addl %eax, %ebx
+ /* 75 */
+ movl 36(%esp), %edi
+ orl %ecx, %esi
+ addl %edi, %eax
+ xorl %ebx, %esi
+ movl $-1, %edi
+ roll $10, %ecx
+ leal 2840853838(%eax,%esi,1),%eax
+ subl %ecx, %edi
+ roll $14, %eax
+ addl %ebp, %eax
+ /* 76 */
+ movl 48(%esp), %esi
+ orl %ebx, %edi
+ addl %esi, %ebp
+ xorl %eax, %edi
+ movl $-1, %esi
+ roll $10, %ebx
+ leal 2840853838(%ebp,%edi,1),%ebp
+ subl %ebx, %esi
+ roll $11, %ebp
+ addl %edx, %ebp
+ /* 77 */
+ movl 28(%esp), %edi
+ orl %eax, %esi
+ addl %edi, %edx
+ xorl %ebp, %esi
+ movl $-1, %edi
+ roll $10, %eax
+ leal 2840853838(%edx,%esi,1),%edx
+ subl %eax, %edi
+ roll $8, %edx
+ addl %ecx, %edx
+ /* 78 */
+ movl 64(%esp), %esi
+ orl %ebp, %edi
+ addl %esi, %ecx
+ xorl %edx, %edi
+ movl $-1, %esi
+ roll $10, %ebp
+ leal 2840853838(%ecx,%edi,1),%ecx
+ subl %ebp, %esi
+ roll $5, %ecx
+ addl %ebx, %ecx
+ /* 79 */
+ movl 56(%esp), %edi
+ orl %edx, %esi
+ addl %edi, %ebx
+ xorl %ecx, %esi
+ movl 108(%esp), %edi
+ roll $10, %edx
+ leal 2840853838(%ebx,%esi,1),%ebx
+ movl %eax, 68(%esp)
+ roll $6, %ebx
+ addl %eax, %ebx
+ movl (%edi), %eax
+ movl %ebx, 72(%esp)
+ movl %ecx, 76(%esp)
+ movl 4(%edi), %ebx
+ movl %edx, 80(%esp)
+ movl 8(%edi), %ecx
+ movl %ebp, 84(%esp)
+ movl 12(%edi), %edx
+ movl 16(%edi), %ebp
+ /* 80 */
+ movl $-1, %edi
+ subl %edx, %edi
+ movl 24(%esp), %esi
+ orl %ecx, %edi
+ addl %esi, %eax
+ xorl %ebx, %edi
+ movl $-1, %esi
+ roll $10, %ecx
+ leal 1352829926(%eax,%edi,1),%eax
+ subl %ecx, %esi
+ roll $8, %eax
+ addl %ebp, %eax
+ /* 81 */
+ movl 60(%esp), %edi
+ orl %ebx, %esi
+ addl %edi, %ebp
+ xorl %eax, %esi
+ movl $-1, %edi
+ roll $10, %ebx
+ leal 1352829926(%ebp,%esi,1),%ebp
+ subl %ebx, %edi
+ roll $9, %ebp
+ addl %edx, %ebp
+ /* 82 */
+ movl 32(%esp), %esi
+ orl %eax, %edi
+ addl %esi, %edx
+ xorl %ebp, %edi
+ movl $-1, %esi
+ roll $10, %eax
+ leal 1352829926(%edx,%edi,1),%edx
+ subl %eax, %esi
+ roll $9, %edx
+ addl %ecx, %edx
+ /* 83 */
+ movl 4(%esp), %edi
+ orl %ebp, %esi
+ addl %edi, %ecx
+ xorl %edx, %esi
+ movl $-1, %edi
+ roll $10, %ebp
+ leal 1352829926(%ecx,%esi,1),%ecx
+ subl %ebp, %edi
+ roll $11, %ecx
+ addl %ebx, %ecx
+ /* 84 */
+ movl 40(%esp), %esi
+ orl %edx, %edi
+ addl %esi, %ebx
+ xorl %ecx, %edi
+ movl $-1, %esi
+ roll $10, %edx
+ leal 1352829926(%ebx,%edi,1),%ebx
+ subl %edx, %esi
+ roll $13, %ebx
+ addl %eax, %ebx
+ /* 85 */
+ movl 12(%esp), %edi
+ orl %ecx, %esi
+ addl %edi, %eax
+ xorl %ebx, %esi
+ movl $-1, %edi
+ roll $10, %ecx
+ leal 1352829926(%eax,%esi,1),%eax
+ subl %ecx, %edi
+ roll $15, %eax
+ addl %ebp, %eax
+ /* 86 */
+ movl 48(%esp), %esi
+ orl %ebx, %edi
+ addl %esi, %ebp
+ xorl %eax, %edi
+ movl $-1, %esi
+ roll $10, %ebx
+ leal 1352829926(%ebp,%edi,1),%ebp
+ subl %ebx, %esi
+ roll $15, %ebp
+ addl %edx, %ebp
+ /* 87 */
+ movl 20(%esp), %edi
+ orl %eax, %esi
+ addl %edi, %edx
+ xorl %ebp, %esi
+ movl $-1, %edi
+ roll $10, %eax
+ leal 1352829926(%edx,%esi,1),%edx
+ subl %eax, %edi
+ roll $5, %edx
+ addl %ecx, %edx
+ /* 88 */
+ movl 56(%esp), %esi
+ orl %ebp, %edi
+ addl %esi, %ecx
+ xorl %edx, %edi
+ movl $-1, %esi
+ roll $10, %ebp
+ leal 1352829926(%ecx,%edi,1),%ecx
+ subl %ebp, %esi
+ roll $7, %ecx
+ addl %ebx, %ecx
+ /* 89 */
+ movl 28(%esp), %edi
+ orl %edx, %esi
+ addl %edi, %ebx
+ xorl %ecx, %esi
+ movl $-1, %edi
+ roll $10, %edx
+ leal 1352829926(%ebx,%esi,1),%ebx
+ subl %edx, %edi
+ roll $7, %ebx
+ addl %eax, %ebx
+ /* 90 */
+ movl 64(%esp), %esi
+ orl %ecx, %edi
+ addl %esi, %eax
+ xorl %ebx, %edi
+ movl $-1, %esi
+ roll $10, %ecx
+ leal 1352829926(%eax,%edi,1),%eax
+ subl %ecx, %esi
+ roll $8, %eax
+ addl %ebp, %eax
+ /* 91 */
+ movl 36(%esp), %edi
+ orl %ebx, %esi
+ addl %edi, %ebp
+ xorl %eax, %esi
+ movl $-1, %edi
+ roll $10, %ebx
+ leal 1352829926(%ebp,%esi,1),%ebp
+ subl %ebx, %edi
+ roll $11, %ebp
+ addl %edx, %ebp
+ /* 92 */
+ movl 8(%esp), %esi
+ orl %eax, %edi
+ addl %esi, %edx
+ xorl %ebp, %edi
+ movl $-1, %esi
+ roll $10, %eax
+ leal 1352829926(%edx,%edi,1),%edx
+ subl %eax, %esi
+ roll $14, %edx
+ addl %ecx, %edx
+ /* 93 */
+ movl 44(%esp), %edi
+ orl %ebp, %esi
+ addl %edi, %ecx
+ xorl %edx, %esi
+ movl $-1, %edi
+ roll $10, %ebp
+ leal 1352829926(%ecx,%esi,1),%ecx
+ subl %ebp, %edi
+ roll $14, %ecx
+ addl %ebx, %ecx
+ /* 94 */
+ movl 16(%esp), %esi
+ orl %edx, %edi
+ addl %esi, %ebx
+ xorl %ecx, %edi
+ movl $-1, %esi
+ roll $10, %edx
+ leal 1352829926(%ebx,%edi,1),%ebx
+ subl %edx, %esi
+ roll $12, %ebx
+ addl %eax, %ebx
+ /* 95 */
+ movl 52(%esp), %edi
+ orl %ecx, %esi
+ addl %edi, %eax
+ xorl %ebx, %esi
+ movl $-1, %edi
+ roll $10, %ecx
+ leal 1352829926(%eax,%esi,1),%eax
+ movl %ecx, %esi
+ roll $6, %eax
+ addl %ebp, %eax
+ /* 96 */
+ subl %ecx, %edi
+ andl %eax, %esi
+ andl %ebx, %edi
+ orl %esi, %edi
+ movl 28(%esp), %esi
+ roll $10, %ebx
+ leal 1548603684(%ebp,%edi,),%ebp
+ movl $-1, %edi
+ addl %esi, %ebp
+ movl %ebx, %esi
+ roll $9, %ebp
+ addl %edx, %ebp
+ /* 97 */
+ subl %ebx, %edi
+ andl %ebp, %esi
+ andl %eax, %edi
+ orl %esi, %edi
+ movl 48(%esp), %esi
+ roll $10, %eax
+ leal 1548603684(%edx,%edi,),%edx
+ movl $-1, %edi
+ addl %esi, %edx
+ movl %eax, %esi
+ roll $13, %edx
+ addl %ecx, %edx
+ /* 98 */
+ subl %eax, %edi
+ andl %edx, %esi
+ andl %ebp, %edi
+ orl %esi, %edi
+ movl 16(%esp), %esi
+ roll $10, %ebp
+ leal 1548603684(%ecx,%edi,),%ecx
+ movl $-1, %edi
+ addl %esi, %ecx
+ movl %ebp, %esi
+ roll $15, %ecx
+ addl %ebx, %ecx
+ /* 99 */
+ subl %ebp, %edi
+ andl %ecx, %esi
+ andl %edx, %edi
+ orl %esi, %edi
+ movl 32(%esp), %esi
+ roll $10, %edx
+ leal 1548603684(%ebx,%edi,),%ebx
+ movl $-1, %edi
+ addl %esi, %ebx
+ movl %edx, %esi
+ roll $7, %ebx
+ addl %eax, %ebx
+ /* 100 */
+ subl %edx, %edi
+ andl %ebx, %esi
+ andl %ecx, %edi
+ orl %esi, %edi
+ movl 4(%esp), %esi
+ roll $10, %ecx
+ leal 1548603684(%eax,%edi,),%eax
+ movl $-1, %edi
+ addl %esi, %eax
+ movl %ecx, %esi
+ roll $12, %eax
+ addl %ebp, %eax
+ /* 101 */
+ subl %ecx, %edi
+ andl %eax, %esi
+ andl %ebx, %edi
+ orl %esi, %edi
+ movl 56(%esp), %esi
+ roll $10, %ebx
+ leal 1548603684(%ebp,%edi,),%ebp
+ movl $-1, %edi
+ addl %esi, %ebp
+ movl %ebx, %esi
+ roll $8, %ebp
+ addl %edx, %ebp
+ /* 102 */
+ subl %ebx, %edi
+ andl %ebp, %esi
+ andl %eax, %edi
+ orl %esi, %edi
+ movl 24(%esp), %esi
+ roll $10, %eax
+ leal 1548603684(%edx,%edi,),%edx
+ movl $-1, %edi
+ addl %esi, %edx
+ movl %eax, %esi
+ roll $9, %edx
+ addl %ecx, %edx
+ /* 103 */
+ subl %eax, %edi
+ andl %edx, %esi
+ andl %ebp, %edi
+ orl %esi, %edi
+ movl 44(%esp), %esi
+ roll $10, %ebp
+ leal 1548603684(%ecx,%edi,),%ecx
+ movl $-1, %edi
+ addl %esi, %ecx
+ movl %ebp, %esi
+ roll $11, %ecx
+ addl %ebx, %ecx
+ /* 104 */
+ subl %ebp, %edi
+ andl %ecx, %esi
+ andl %edx, %edi
+ orl %esi, %edi
+ movl 60(%esp), %esi
+ roll $10, %edx
+ leal 1548603684(%ebx,%edi,),%ebx
+ movl $-1, %edi
+ addl %esi, %ebx
+ movl %edx, %esi
+ roll $7, %ebx
+ addl %eax, %ebx
+ /* 105 */
+ subl %edx, %edi
+ andl %ebx, %esi
+ andl %ecx, %edi
+ orl %esi, %edi
+ movl 64(%esp), %esi
+ roll $10, %ecx
+ leal 1548603684(%eax,%edi,),%eax
+ movl $-1, %edi
+ addl %esi, %eax
+ movl %ecx, %esi
+ roll $7, %eax
+ addl %ebp, %eax
+ /* 106 */
+ subl %ecx, %edi
+ andl %eax, %esi
+ andl %ebx, %edi
+ orl %esi, %edi
+ movl 36(%esp), %esi
+ roll $10, %ebx
+ leal 1548603684(%ebp,%edi,),%ebp
+ movl $-1, %edi
+ addl %esi, %ebp
+ movl %ebx, %esi
+ roll $12, %ebp
+ addl %edx, %ebp
+ /* 107 */
+ subl %ebx, %edi
+ andl %ebp, %esi
+ andl %eax, %edi
+ orl %esi, %edi
+ movl 52(%esp), %esi
+ roll $10, %eax
+ leal 1548603684(%edx,%edi,),%edx
+ movl $-1, %edi
+ addl %esi, %edx
+ movl %eax, %esi
+ roll $7, %edx
+ addl %ecx, %edx
+ /* 108 */
+ subl %eax, %edi
+ andl %edx, %esi
+ andl %ebp, %edi
+ orl %esi, %edi
+ movl 20(%esp), %esi
+ roll $10, %ebp
+ leal 1548603684(%ecx,%edi,),%ecx
+ movl $-1, %edi
+ addl %esi, %ecx
+ movl %ebp, %esi
+ roll $6, %ecx
+ addl %ebx, %ecx
+ /* 109 */
+ subl %ebp, %edi
+ andl %ecx, %esi
+ andl %edx, %edi
+ orl %esi, %edi
+ movl 40(%esp), %esi
+ roll $10, %edx
+ leal 1548603684(%ebx,%edi,),%ebx
+ movl $-1, %edi
+ addl %esi, %ebx
+ movl %edx, %esi
+ roll $15, %ebx
+ addl %eax, %ebx
+ /* 110 */
+ subl %edx, %edi
+ andl %ebx, %esi
+ andl %ecx, %edi
+ orl %esi, %edi
+ movl 8(%esp), %esi
+ roll $10, %ecx
+ leal 1548603684(%eax,%edi,),%eax
+ movl $-1, %edi
+ addl %esi, %eax
+ movl %ecx, %esi
+ roll $13, %eax
+ addl %ebp, %eax
+ /* 111 */
+ subl %ecx, %edi
+ andl %eax, %esi
+ andl %ebx, %edi
+ orl %esi, %edi
+ movl 12(%esp), %esi
+ roll $10, %ebx
+ leal 1548603684(%ebp,%edi,),%ebp
+ movl $-1, %edi
+ addl %esi, %ebp
+ subl %eax, %edi
+ roll $11, %ebp
+ addl %edx, %ebp
+ /* 112 */
+ movl 64(%esp), %esi
+ orl %ebp, %edi
+ addl %esi, %edx
+ xorl %ebx, %edi
+ movl $-1, %esi
+ roll $10, %eax
+ leal 1836072691(%edx,%edi,1),%edx
+ subl %ebp, %esi
+ roll $9, %edx
+ addl %ecx, %edx
+ /* 113 */
+ movl 24(%esp), %edi
+ orl %edx, %esi
+ addl %edi, %ecx
+ xorl %eax, %esi
+ movl $-1, %edi
+ roll $10, %ebp
+ leal 1836072691(%ecx,%esi,1),%ecx
+ subl %edx, %edi
+ roll $7, %ecx
+ addl %ebx, %ecx
+ /* 114 */
+ movl 8(%esp), %esi
+ orl %ecx, %edi
+ addl %esi, %ebx
+ xorl %ebp, %edi
+ movl $-1, %esi
+ roll $10, %edx
+ leal 1836072691(%ebx,%edi,1),%ebx
+ subl %ecx, %esi
+ roll $15, %ebx
+ addl %eax, %ebx
+ /* 115 */
+ movl 16(%esp), %edi
+ orl %ebx, %esi
+ addl %edi, %eax
+ xorl %edx, %esi
+ movl $-1, %edi
+ roll $10, %ecx
+ leal 1836072691(%eax,%esi,1),%eax
+ subl %ebx, %edi
+ roll $11, %eax
+ addl %ebp, %eax
+ /* 116 */
+ movl 32(%esp), %esi
+ orl %eax, %edi
+ addl %esi, %ebp
+ xorl %ecx, %edi
+ movl $-1, %esi
+ roll $10, %ebx
+ leal 1836072691(%ebp,%edi,1),%ebp
+ subl %eax, %esi
+ roll $8, %ebp
+ addl %edx, %ebp
+ /* 117 */
+ movl 60(%esp), %edi
+ orl %ebp, %esi
+ addl %edi, %edx
+ xorl %ebx, %esi
+ movl $-1, %edi
+ roll $10, %eax
+ leal 1836072691(%edx,%esi,1),%edx
+ subl %ebp, %edi
+ roll $6, %edx
+ addl %ecx, %edx
+ /* 118 */
+ movl 28(%esp), %esi
+ orl %edx, %edi
+ addl %esi, %ecx
+ xorl %eax, %edi
+ movl $-1, %esi
+ roll $10, %ebp
+ leal 1836072691(%ecx,%edi,1),%ecx
+ subl %edx, %esi
+ roll $6, %ecx
+ addl %ebx, %ecx
+ /* 119 */
+ movl 40(%esp), %edi
+ orl %ecx, %esi
+ addl %edi, %ebx
+ xorl %ebp, %esi
+ movl $-1, %edi
+ roll $10, %edx
+ leal 1836072691(%ebx,%esi,1),%ebx
+ subl %ecx, %edi
+ roll $14, %ebx
+ addl %eax, %ebx
+ /* 120 */
+ movl 48(%esp), %esi
+ orl %ebx, %edi
+ addl %esi, %eax
+ xorl %edx, %edi
+ movl $-1, %esi
+ roll $10, %ecx
+ leal 1836072691(%eax,%edi,1),%eax
+ subl %ebx, %esi
+ roll $12, %eax
+ addl %ebp, %eax
+ /* 121 */
+ movl 36(%esp), %edi
+ orl %eax, %esi
+ addl %edi, %ebp
+ xorl %ecx, %esi
+ movl $-1, %edi
+ roll $10, %ebx
+ leal 1836072691(%ebp,%esi,1),%ebp
+ subl %eax, %edi
+ roll $13, %ebp
+ addl %edx, %ebp
+ /* 122 */
+ movl 52(%esp), %esi
+ orl %ebp, %edi
+ addl %esi, %edx
+ xorl %ebx, %edi
+ movl $-1, %esi
+ roll $10, %eax
+ leal 1836072691(%edx,%edi,1),%edx
+ subl %ebp, %esi
+ roll $5, %edx
+ addl %ecx, %edx
+ /* 123 */
+ movl 12(%esp), %edi
+ orl %edx, %esi
+ addl %edi, %ecx
+ xorl %eax, %esi
+ movl $-1, %edi
+ roll $10, %ebp
+ leal 1836072691(%ecx,%esi,1),%ecx
+ subl %edx, %edi
+ roll $14, %ecx
+ addl %ebx, %ecx
+ /* 124 */
+ movl 44(%esp), %esi
+ orl %ecx, %edi
+ addl %esi, %ebx
+ xorl %ebp, %edi
+ movl $-1, %esi
+ roll $10, %edx
+ leal 1836072691(%ebx,%edi,1),%ebx
+ subl %ecx, %esi
+ roll $13, %ebx
+ addl %eax, %ebx
+ /* 125 */
+ movl 4(%esp), %edi
+ orl %ebx, %esi
+ addl %edi, %eax
+ xorl %edx, %esi
+ movl $-1, %edi
+ roll $10, %ecx
+ leal 1836072691(%eax,%esi,1),%eax
+ subl %ebx, %edi
+ roll $13, %eax
+ addl %ebp, %eax
+ /* 126 */
+ movl 20(%esp), %esi
+ orl %eax, %edi
+ addl %esi, %ebp
+ xorl %ecx, %edi
+ movl $-1, %esi
+ roll $10, %ebx
+ leal 1836072691(%ebp,%edi,1),%ebp
+ subl %eax, %esi
+ roll $7, %ebp
+ addl %edx, %ebp
+ /* 127 */
+ movl 56(%esp), %edi
+ orl %ebp, %esi
+ addl %edi, %edx
+ xorl %ebx, %esi
+ movl 36(%esp), %edi
+ roll $10, %eax
+ leal 1836072691(%edx,%esi,1),%edx
+ movl $-1, %esi
+ roll $5, %edx
+ addl %ecx, %edx
+ /* 128 */
+ addl %edi, %ecx
+ movl %ebp, %edi
+ subl %edx, %esi
+ andl %edx, %edi
+ andl %eax, %esi
+ orl %esi, %edi
+ movl 28(%esp), %esi
+ roll $10, %ebp
+ leal 2053994217(%ecx,%edi,1),%ecx
+ movl $-1, %edi
+ roll $15, %ecx
+ addl %ebx, %ecx
+ /* 129 */
+ addl %esi, %ebx
+ movl %edx, %esi
+ subl %ecx, %edi
+ andl %ecx, %esi
+ andl %ebp, %edi
+ orl %edi, %esi
+ movl 20(%esp), %edi
+ roll $10, %edx
+ leal 2053994217(%ebx,%esi,1),%ebx
+ movl $-1, %esi
+ roll $5, %ebx
+ addl %eax, %ebx
+ /* 130 */
+ addl %edi, %eax
+ movl %ecx, %edi
+ subl %ebx, %esi
+ andl %ebx, %edi
+ andl %edx, %esi
+ orl %esi, %edi
+ movl 8(%esp), %esi
+ roll $10, %ecx
+ leal 2053994217(%eax,%edi,1),%eax
+ movl $-1, %edi
+ roll $8, %eax
+ addl %ebp, %eax
+ /* 131 */
+ addl %esi, %ebp
+ movl %ebx, %esi
+ subl %eax, %edi
+ andl %eax, %esi
+ andl %ecx, %edi
+ orl %edi, %esi
+ movl 16(%esp), %edi
+ roll $10, %ebx
+ leal 2053994217(%ebp,%esi,1),%ebp
+ movl $-1, %esi
+ roll $11, %ebp
+ addl %edx, %ebp
+ /* 132 */
+ addl %edi, %edx
+ movl %eax, %edi
+ subl %ebp, %esi
+ andl %ebp, %edi
+ andl %ebx, %esi
+ orl %esi, %edi
+ movl 48(%esp), %esi
+ roll $10, %eax
+ leal 2053994217(%edx,%edi,1),%edx
+ movl $-1, %edi
+ roll $14, %edx
+ addl %ecx, %edx
+ /* 133 */
+ addl %esi, %ecx
+ movl %ebp, %esi
+ subl %edx, %edi
+ andl %edx, %esi
+ andl %eax, %edi
+ orl %edi, %esi
+ movl 64(%esp), %edi
+ roll $10, %ebp
+ leal 2053994217(%ecx,%esi,1),%ecx
+ movl $-1, %esi
+ roll $14, %ecx
+ addl %ebx, %ecx
+ /* 134 */
+ addl %edi, %ebx
+ movl %edx, %edi
+ subl %ecx, %esi
+ andl %ecx, %edi
+ andl %ebp, %esi
+ orl %esi, %edi
+ movl 4(%esp), %esi
+ roll $10, %edx
+ leal 2053994217(%ebx,%edi,1),%ebx
+ movl $-1, %edi
+ roll $6, %ebx
+ addl %eax, %ebx
+ /* 135 */
+ addl %esi, %eax
+ movl %ecx, %esi
+ subl %ebx, %edi
+ andl %ebx, %esi
+ andl %edx, %edi
+ orl %edi, %esi
+ movl 24(%esp), %edi
+ roll $10, %ecx
+ leal 2053994217(%eax,%esi,1),%eax
+ movl $-1, %esi
+ roll $14, %eax
+ addl %ebp, %eax
+ /* 136 */
+ addl %edi, %ebp
+ movl %ebx, %edi
+ subl %eax, %esi
+ andl %eax, %edi
+ andl %ecx, %esi
+ orl %esi, %edi
+ movl 52(%esp), %esi
+ roll $10, %ebx
+ leal 2053994217(%ebp,%edi,1),%ebp
+ movl $-1, %edi
+ roll $6, %ebp
+ addl %edx, %ebp
+ /* 137 */
+ addl %esi, %edx
+ movl %eax, %esi
+ subl %ebp, %edi
+ andl %ebp, %esi
+ andl %ebx, %edi
+ orl %edi, %esi
+ movl 12(%esp), %edi
+ roll $10, %eax
+ leal 2053994217(%edx,%esi,1),%edx
+ movl $-1, %esi
+ roll $9, %edx
+ addl %ecx, %edx
+ /* 138 */
+ addl %edi, %ecx
+ movl %ebp, %edi
+ subl %edx, %esi
+ andl %edx, %edi
+ andl %eax, %esi
+ orl %esi, %edi
+ movl 56(%esp), %esi
+ roll $10, %ebp
+ leal 2053994217(%ecx,%edi,1),%ecx
+ movl $-1, %edi
+ roll $12, %ecx
+ addl %ebx, %ecx
+ /* 139 */
+ addl %esi, %ebx
+ movl %edx, %esi
+ subl %ecx, %edi
+ andl %ecx, %esi
+ andl %ebp, %edi
+ orl %edi, %esi
+ movl 40(%esp), %edi
+ roll $10, %edx
+ leal 2053994217(%ebx,%esi,1),%ebx
+ movl $-1, %esi
+ roll $9, %ebx
+ addl %eax, %ebx
+ /* 140 */
+ addl %edi, %eax
+ movl %ecx, %edi
+ subl %ebx, %esi
+ andl %ebx, %edi
+ andl %edx, %esi
+ orl %esi, %edi
+ movl 32(%esp), %esi
+ roll $10, %ecx
+ leal 2053994217(%eax,%edi,1),%eax
+ movl $-1, %edi
+ roll $12, %eax
+ addl %ebp, %eax
+ /* 141 */
+ addl %esi, %ebp
+ movl %ebx, %esi
+ subl %eax, %edi
+ andl %eax, %esi
+ andl %ecx, %edi
+ orl %edi, %esi
+ movl 44(%esp), %edi
+ roll $10, %ebx
+ leal 2053994217(%ebp,%esi,1),%ebp
+ movl $-1, %esi
+ roll $5, %ebp
+ addl %edx, %ebp
+ /* 142 */
+ addl %edi, %edx
+ movl %eax, %edi
+ subl %ebp, %esi
+ andl %ebp, %edi
+ andl %ebx, %esi
+ orl %esi, %edi
+ movl 60(%esp), %esi
+ roll $10, %eax
+ leal 2053994217(%edx,%edi,1),%edx
+ movl $-1, %edi
+ roll $15, %edx
+ addl %ecx, %edx
+ /* 143 */
+ addl %esi, %ecx
+ movl %ebp, %esi
+ subl %edx, %edi
+ andl %edx, %esi
+ andl %eax, %edi
+ orl %esi, %edi
+ movl %edx, %esi
+ roll $10, %ebp
+ leal 2053994217(%ecx,%edi,1),%ecx
+ xorl %ebp, %esi
+ roll $8, %ecx
+ addl %ebx, %ecx
+ /* 144 */
+ movl 52(%esp), %edi
+ xorl %ecx, %esi
+ addl %edi, %ebx
+ roll $10, %edx
+ addl %esi, %ebx
+ movl %ecx, %esi
+ roll $8, %ebx
+ addl %eax, %ebx
+ /* 145 */
+ xorl %edx, %esi
+ movl 64(%esp), %edi
+ xorl %ebx, %esi
+ addl %esi, %eax
+ movl %ebx, %esi
+ roll $10, %ecx
+ addl %edi, %eax
+ xorl %ecx, %esi
+ roll $5, %eax
+ addl %ebp, %eax
+ /* 146 */
+ movl 44(%esp), %edi
+ xorl %eax, %esi
+ addl %edi, %ebp
+ roll $10, %ebx
+ addl %esi, %ebp
+ movl %eax, %esi
+ roll $12, %ebp
+ addl %edx, %ebp
+ /* 147 */
+ xorl %ebx, %esi
+ movl 20(%esp), %edi
+ xorl %ebp, %esi
+ addl %esi, %edx
+ movl %ebp, %esi
+ roll $10, %eax
+ addl %edi, %edx
+ xorl %eax, %esi
+ roll $9, %edx
+ addl %ecx, %edx
+ /* 148 */
+ movl 8(%esp), %edi
+ xorl %edx, %esi
+ addl %edi, %ecx
+ roll $10, %ebp
+ addl %esi, %ecx
+ movl %edx, %esi
+ roll $12, %ecx
+ addl %ebx, %ecx
+ /* 149 */
+ xorl %ebp, %esi
+ movl 24(%esp), %edi
+ xorl %ecx, %esi
+ addl %esi, %ebx
+ movl %ecx, %esi
+ roll $10, %edx
+ addl %edi, %ebx
+ xorl %edx, %esi
+ roll $5, %ebx
+ addl %eax, %ebx
+ /* 150 */
+ movl 36(%esp), %edi
+ xorl %ebx, %esi
+ addl %edi, %eax
+ roll $10, %ecx
+ addl %esi, %eax
+ movl %ebx, %esi
+ roll $14, %eax
+ addl %ebp, %eax
+ /* 151 */
+ xorl %ecx, %esi
+ movl 32(%esp), %edi
+ xorl %eax, %esi
+ addl %esi, %ebp
+ movl %eax, %esi
+ roll $10, %ebx
+ addl %edi, %ebp
+ xorl %ebx, %esi
+ roll $6, %ebp
+ addl %edx, %ebp
+ /* 152 */
+ movl 28(%esp), %edi
+ xorl %ebp, %esi
+ addl %edi, %edx
+ roll $10, %eax
+ addl %esi, %edx
+ movl %ebp, %esi
+ roll $8, %edx
+ addl %ecx, %edx
+ /* 153 */
+ xorl %eax, %esi
+ movl 12(%esp), %edi
+ xorl %edx, %esi
+ addl %esi, %ecx
+ movl %edx, %esi
+ roll $10, %ebp
+ addl %edi, %ecx
+ xorl %ebp, %esi
+ roll $13, %ecx
+ addl %ebx, %ecx
+ /* 154 */
+ movl 56(%esp), %edi
+ xorl %ecx, %esi
+ addl %edi, %ebx
+ roll $10, %edx
+ addl %esi, %ebx
+ movl %ecx, %esi
+ roll $6, %ebx
+ addl %eax, %ebx
+ /* 155 */
+ xorl %edx, %esi
+ movl 60(%esp), %edi
+ xorl %ebx, %esi
+ addl %esi, %eax
+ movl %ebx, %esi
+ roll $10, %ecx
+ addl %edi, %eax
+ xorl %ecx, %esi
+ roll $5, %eax
+ addl %ebp, %eax
+ /* 156 */
+ movl 4(%esp), %edi
+ xorl %eax, %esi
+ addl %edi, %ebp
+ roll $10, %ebx
+ addl %esi, %ebp
+ movl %eax, %esi
+ roll $15, %ebp
+ addl %edx, %ebp
+ /* 157 */
+ xorl %ebx, %esi
+ movl 16(%esp), %edi
+ xorl %ebp, %esi
+ addl %esi, %edx
+ movl %ebp, %esi
+ roll $10, %eax
+ addl %edi, %edx
+ xorl %eax, %esi
+ roll $13, %edx
+ addl %ecx, %edx
+ /* 158 */
+ movl 40(%esp), %edi
+ xorl %edx, %esi
+ addl %edi, %ecx
+ roll $10, %ebp
+ addl %esi, %ecx
+ movl %edx, %esi
+ roll $11, %ecx
+ addl %ebx, %ecx
+ /* 159 */
+ xorl %ebp, %esi
+ movl 48(%esp), %edi
+ xorl %ecx, %esi
+ addl %esi, %ebx
+ roll $10, %edx
+ addl %edi, %ebx
+ movl 108(%esp), %edi
+ roll $11, %ebx
+ addl %eax, %ebx
+ movl 4(%edi), %esi
+ addl %esi, %edx
+ movl 76(%esp), %esi
+ addl %esi, %edx
+ movl 8(%edi), %esi
+ addl %esi, %ebp
+ movl 80(%esp), %esi
+ addl %esi, %ebp
+ movl 12(%edi), %esi
+ addl %esi, %eax
+ movl 84(%esp), %esi
+ addl %esi, %eax
+ movl 16(%edi), %esi
+ addl %esi, %ebx
+ movl 68(%esp), %esi
+ addl %esi, %ebx
+ movl (%edi), %esi
+ addl %esi, %ecx
+ movl 72(%esp), %esi
+ addl %esi, %ecx
+ movl %edx, (%edi)
+ movl %ebp, 4(%edi)
+ movl %eax, 8(%edi)
+ movl %ebx, 12(%edi)
+ movl %ecx, 16(%edi)
+ movl (%esp), %edi
+ movl 112(%esp), %esi
+ cmpl %esi, %edi
+ movl 108(%esp), %edi
+ jge .L000start
+ addl $88, %esp
+ popl %ebx
+ popl %ebp
+ popl %edi
+ popl %esi
+ ret
+.ripemd160_block_x86_end:
+ SIZE(ripemd160_block_x86,.ripemd160_block_x86_end-ripemd160_block_x86)
+.ident "desasm.pl"
diff --git a/crypto/ripemd/asm/rmd-586.pl b/crypto/ripemd/asm/rmd-586.pl
new file mode 100644
index 0000000000..4c8098ac67
--- /dev/null
+++ b/crypto/ripemd/asm/rmd-586.pl
@@ -0,0 +1,582 @@
+#!/usr/local/bin/perl
+
+# Normal is the
+# ripemd160_block_x86(MD5_CTX *c, ULONG *X);
+# version, non-normal is the
+# ripemd160_block_x86(MD5_CTX *c, ULONG *X,int blocks);
+
+$normal=0;
+
+push(@INC,"perlasm","../../perlasm");
+require "x86asm.pl";
+
+&asm_init($ARGV[0],$0);
+
+$A="eax";
+$B="ebx";
+$C="ecx";
+$D="edx";
+$E="ebp";
+$tmp1="esi";
+$tmp2="edi";
+
+$KL1=0x5A827999;
+$KL2=0x6ED9EBA1;
+$KL3=0x8F1BBCDC;
+$KL4=0xA953FD4E;
+$KR0=0x50A28BE6;
+$KR1=0x5C4DD124;
+$KR2=0x6D703EF3;
+$KR3=0x7A6D76E9;
+
+
+@wl=( 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,10,11,12,13,14,15,
+ 7, 4,13, 1,10, 6,15, 3,12, 0, 9, 5, 2,14,11, 8,
+ 3,10,14, 4, 9,15, 8, 1, 2, 7, 0, 6,13,11, 5,12,
+ 1, 9,11,10, 0, 8,12, 4,13, 3, 7,15,14, 5, 6, 2,
+ 4, 0, 5, 9, 7,12, 2,10,14, 1, 3, 8,11, 6,15,13,
+ );
+
+@wr=( 5,14, 7, 0, 9, 2,11, 4,13, 6,15, 8, 1,10, 3,12,
+ 6,11, 3, 7, 0,13, 5,10,14,15, 8,12, 4, 9, 1, 2,
+ 15, 5, 1, 3, 7,14, 6, 9,11, 8,12, 2,10, 0, 4,13,
+ 8, 6, 4, 1, 3,11,15, 0, 5,12, 2,13, 9, 7,10,14,
+ 12,15,10, 4, 1, 5, 8, 7, 6, 2,13,14, 0, 3, 9,11,
+ );
+
+@sl=( 11,14,15,12, 5, 8, 7, 9,11,13,14,15, 6, 7, 9, 8,
+ 7, 6, 8,13,11, 9, 7,15, 7,12,15, 9,11, 7,13,12,
+ 11,13, 6, 7,14, 9,13,15,14, 8,13, 6, 5,12, 7, 5,
+ 11,12,14,15,14,15, 9, 8, 9,14, 5, 6, 8, 6, 5,12,
+ 9,15, 5,11, 6, 8,13,12, 5,12,13,14,11, 8, 5, 6,
+ );
+
+@sr=( 8, 9, 9,11,13,15,15, 5, 7, 7, 8,11,14,14,12, 6,
+ 9,13,15, 7,12, 8, 9,11, 7, 7,12, 7, 6,15,13,11,
+ 9, 7,15,11, 8, 6, 6,14,12,13, 5,14,13,13, 7, 5,
+ 15, 5, 8,11,14,14, 6,14, 6, 9,12, 9,12, 5,15, 8,
+ 8, 5,12, 9,12, 5,14, 6, 8,13, 6, 5,15,13,11,11,
+ );
+
+&ripemd160_block("ripemd160_block_x86");
+&asm_finish();
+
+sub Xv
+ {
+ local($n)=@_;
+ return(&swtmp($n+1));
+ # tmp on stack
+ }
+
+sub Np
+ {
+ local($p)=@_;
+ local(%n)=($A,$E,$B,$A,$C,$B,$D,$C,$E,$D);
+ return($n{$p});
+ }
+
+sub RIP1
+ {
+ local($a,$b,$c,$d,$e,$pos,$s,$o,$pos2)=@_;
+
+ &comment($p++);
+ if ($p & 1)
+ {
+ &mov($tmp1, $c) if $o == -1;
+ &xor($tmp1, $d) if $o == -1;
+ &mov($tmp2, &Xv($pos));
+ &xor($tmp1, $b);
+ &add($a, $tmp2);
+ &rotl($c, 10);
+ &add($a, $tmp1);
+ &mov($tmp1, &Np($c)); # NEXT
+ # XXX
+ &rotl($a, $s);
+ &add($a, $e);
+ }
+ else
+ {
+ &xor($tmp1, $d);
+ &mov($tmp2, &Xv($pos));
+ &xor($tmp1, $b);
+ &add($a, $tmp1);
+ &mov($tmp1, &Np($c)) if $o <= 0;
+ &mov($tmp1, -1) if $o == 1;
+ # XXX if $o == 2;
+ &rotl($c, 10);
+ &add($a, $tmp2);
+ &xor($tmp1, &Np($d)) if $o <= 0;
+ &mov($tmp2, &Xv($pos2)) if $o == 1;
+ &mov($tmp2, &wparam(0)) if $o == 2;
+ &rotl($a, $s);
+ &add($a, $e);
+ }
+ }
+
+sub RIP2
+ {
+ local($a,$b,$c,$d,$e,$pos,$pos2,$s,$K,$o)=@_;
+
+# XXXXXX
+ &comment($p++);
+ if ($p & 1)
+ {
+# &mov($tmp2, &Xv($pos)) if $o < -1;
+# &mov($tmp1, -1) if $o < -1;
+
+ &add($a, $tmp2);
+ &mov($tmp2, $c);
+ &sub($tmp1, $b);
+ &and($tmp2, $b);
+ &and($tmp1, $d);
+ &or($tmp2, $tmp1);
+ &mov($tmp1, &Xv($pos2)) if $o <= 0; # XXXXXXXXXXXXXX
+ # XXX
+ &rotl($c, 10);
+ &lea($a, &DWP($K,$a,$tmp2,1));
+ &mov($tmp2, -1) if $o <= 0;
+ # XXX
+ &rotl($a, $s);
+ &add($a, $e);
+ }
+ else
+ {
+ # XXX
+ &add($a, $tmp1);
+ &mov($tmp1, $c);
+ &sub($tmp2, $b);
+ &and($tmp1, $b);
+ &and($tmp2, $d);
+ if ($o != 2)
+ {
+ &or($tmp1, $tmp2);
+ &mov($tmp2, &Xv($pos2)) if $o <= 0;
+ &mov($tmp2, -1) if $o == 1;
+ &rotl($c, 10);
+ &lea($a, &DWP($K,$a,$tmp1,1));
+ &mov($tmp1, -1) if $o <= 0;
+ &sub($tmp2, &Np($c)) if $o == 1;
+ } else {
+ &or($tmp2, $tmp1);
+ &mov($tmp1, &Np($c));
+ &rotl($c, 10);
+ &lea($a, &DWP($K,$a,$tmp2,1));
+ &xor($tmp1, &Np($d));
+ }
+ &rotl($a, $s);
+ &add($a, $e);
+ }
+ }
+
+sub RIP3
+ {
+ local($a,$b,$c,$d,$e,$pos,$s,$K,$o,$pos2)=@_;
+
+ &comment($p++);
+ if ($p & 1)
+ {
+# &mov($tmp2, -1) if $o < -1;
+# &sub($tmp2, $c) if $o < -1;
+ &mov($tmp1, &Xv($pos));
+ &or($tmp2, $b);
+ &add($a, $tmp1);
+ &xor($tmp2, $d);
+ &mov($tmp1, -1) if $o <= 0; # NEXT
+ # XXX
+ &rotl($c, 10);
+ &lea($a, &DWP($K,$a,$tmp2,1));
+ &sub($tmp1, &Np($c)) if $o <= 0; # NEXT
+ # XXX
+ &rotl($a, $s);
+ &add($a, $e);
+ }
+ else
+ {
+ &mov($tmp2, &Xv($pos));
+ &or($tmp1, $b);
+ &add($a, $tmp2);
+ &xor($tmp1, $d);
+ &mov($tmp2, -1) if $o <= 0; # NEXT
+ &mov($tmp2, -1) if $o == 1;
+ &mov($tmp2, &Xv($pos2)) if $o == 2;
+ &rotl($c, 10);
+ &lea($a, &DWP($K,$a,$tmp1,1));
+ &sub($tmp2, &Np($c)) if $o <= 0; # NEXT
+ &mov($tmp1, &Np($d)) if $o == 1;
+ &mov($tmp1, -1) if $o == 2;
+ &rotl($a, $s);
+ &add($a, $e);
+ }
+ }
+
+sub RIP4
+ {
+ local($a,$b,$c,$d,$e,$pos,$s,$K,$o)=@_;
+
+ &comment($p++);
+ if ($p & 1)
+ {
+# &mov($tmp2, -1) if $o == -2;
+# &mov($tmp1, $d) if $o == -2;
+ &sub($tmp2, $d);
+ &and($tmp1, $b);
+ &and($tmp2, $c);
+ &or($tmp2, $tmp1);
+ &mov($tmp1, &Xv($pos));
+ &rotl($c, 10);
+ &lea($a, &DWP($K,$a,$tmp2));
+ &mov($tmp2, -1) unless $o > 0; # NEXT
+ # XXX
+ &add($a, $tmp1);
+ &mov($tmp1, &Np($d)) unless $o > 0; # NEXT
+ # XXX
+ &rotl($a, $s);
+ &add($a, $e);
+ }
+ else
+ {
+ &sub($tmp2, $d);
+ &and($tmp1, $b);
+ &and($tmp2, $c);
+ &or($tmp2, $tmp1);
+ &mov($tmp1, &Xv($pos));
+ &rotl($c, 10);
+ &lea($a, &DWP($K,$a,$tmp2));
+ &mov($tmp2, -1) if $o == 0; # NEXT
+ &mov($tmp2, -1) if $o == 1;
+ &mov($tmp2, -1) if $o == 2;
+ # XXX
+ &add($a, $tmp1);
+ &mov($tmp1, &Np($d)) if $o == 0; # NEXT
+ &sub($tmp2, &Np($d)) if $o == 1;
+ &sub($tmp2, &Np($c)) if $o == 2;
+ # XXX
+ &rotl($a, $s);
+ &add($a, $e);
+ }
+ }
+
+sub RIP5
+ {
+ local($a,$b,$c,$d,$e,$pos,$s,$K,$o)=@_;
+
+ &comment($p++);
+ if ($p & 1)
+ {
+ &mov($tmp2, -1) if $o == -2;
+ &sub($tmp2, $d) if $o == -2;
+ &mov($tmp1, &Xv($pos));
+ &or($tmp2, $c);
+ &add($a, $tmp1);
+ &xor($tmp2, $b);
+ &mov($tmp1, -1) if $o <= 0;
+ # XXX
+ &rotl($c, 10);
+ &lea($a, &DWP($K,$a,$tmp2,1));
+ &sub($tmp1, &Np($d)) if $o <= 0;
+ # XXX
+ &rotl($a, $s);
+ &add($a, $e);
+ }
+ else
+ {
+ &mov($tmp2, &Xv($pos));
+ &or($tmp1, $c);
+ &add($a, $tmp2);
+ &xor($tmp1, $b);
+ &mov($tmp2, -1) if $o <= 0;
+ &mov($tmp2, &wparam(0)) if $o == 1; # Middle code
+ &mov($tmp2, -1) if $o == 2;
+ &rotl($c, 10);
+ &lea($a, &DWP($K,$a,$tmp1,1));
+ &sub($tmp2, &Np($d)) if $o <= 0;
+ &mov(&swtmp(1+16), $A) if $o == 1;
+ &mov($tmp1, &Np($d)) if $o == 2;
+ &rotl($a, $s);
+ &add($a, $e);
+ }
+ }
+
+sub ripemd160_block
+ {
+ local($name)=@_;
+
+ &function_begin_B($name,"",3);
+
+ # parameter 1 is the RIPEMD160_CTX structure.
+ # A 0
+ # B 4
+ # C 8
+ # D 12
+ # E 16
+
+ &push("esi");
+ &mov($C, &wparam(2));
+ &push("edi");
+ &mov($tmp1, &wparam(1)); # edi
+ &push("ebp");
+ &add($C, $tmp1); # offset we end at
+ &push("ebx");
+ &sub($C, 64);
+ &stack_push(16+5+1);
+ # XXX
+
+ &mov(&swtmp(0), $C);
+ &mov($tmp2, &wparam(0)); # Done at end of loop
+
+ &set_label("start") unless $normal;
+ &comment("");
+
+ # &mov($tmp1, &wparam(1)); # Done at end of loop
+ # &mov($tmp2, &wparam(0)); # Done at end of loop
+
+ for ($z=0; $z<16; $z+=2)
+ {
+ &mov($A, &DWP( $z*4,$tmp1,"",0));
+ &mov($B, &DWP( ($z+1)*4,$tmp1,"",0));
+ &mov(&swtmp(1+$z), $A);
+ &mov(&swtmp(1+$z+1), $B);
+ }
+ &add($tmp1, 64);
+ &mov($A, &DWP( 0,$tmp2,"",0));
+ &mov(&wparam(1),$tmp1);
+ &mov($B, &DWP( 4,$tmp2,"",0));
+ &mov($C, &DWP( 8,$tmp2,"",0));
+ &mov($D, &DWP(12,$tmp2,"",0));
+ &mov($E, &DWP(16,$tmp2,"",0));
+
+ &RIP1($A,$B,$C,$D,$E,$wl[ 0],$sl[ 0],-1);
+ &RIP1($E,$A,$B,$C,$D,$wl[ 1],$sl[ 1],0);
+ &RIP1($D,$E,$A,$B,$C,$wl[ 2],$sl[ 2],0);
+ &RIP1($C,$D,$E,$A,$B,$wl[ 3],$sl[ 3],0);
+ &RIP1($B,$C,$D,$E,$A,$wl[ 4],$sl[ 4],0);
+ &RIP1($A,$B,$C,$D,$E,$wl[ 5],$sl[ 5],0);
+ &RIP1($E,$A,$B,$C,$D,$wl[ 6],$sl[ 6],0);
+ &RIP1($D,$E,$A,$B,$C,$wl[ 7],$sl[ 7],0);
+ &RIP1($C,$D,$E,$A,$B,$wl[ 8],$sl[ 8],0);
+ &RIP1($B,$C,$D,$E,$A,$wl[ 9],$sl[ 9],0);
+ &RIP1($A,$B,$C,$D,$E,$wl[10],$sl[10],0);
+ &RIP1($E,$A,$B,$C,$D,$wl[11],$sl[11],0);
+ &RIP1($D,$E,$A,$B,$C,$wl[12],$sl[12],0);
+ &RIP1($C,$D,$E,$A,$B,$wl[13],$sl[13],0);
+ &RIP1($B,$C,$D,$E,$A,$wl[14],$sl[14],0);
+ &RIP1($A,$B,$C,$D,$E,$wl[15],$sl[15],1,$wl[16]);
+
+ &RIP2($E,$A,$B,$C,$D,$wl[16],$wl[17],$sl[16],$KL1,-1);
+ &RIP2($D,$E,$A,$B,$C,$wl[17],$wl[18],$sl[17],$KL1,0);
+ &RIP2($C,$D,$E,$A,$B,$wl[18],$wl[19],$sl[18],$KL1,0);
+ &RIP2($B,$C,$D,$E,$A,$wl[19],$wl[20],$sl[19],$KL1,0);
+ &RIP2($A,$B,$C,$D,$E,$wl[20],$wl[21],$sl[20],$KL1,0);
+ &RIP2($E,$A,$B,$C,$D,$wl[21],$wl[22],$sl[21],$KL1,0);
+ &RIP2($D,$E,$A,$B,$C,$wl[22],$wl[23],$sl[22],$KL1,0);
+ &RIP2($C,$D,$E,$A,$B,$wl[23],$wl[24],$sl[23],$KL1,0);
+ &RIP2($B,$C,$D,$E,$A,$wl[24],$wl[25],$sl[24],$KL1,0);
+ &RIP2($A,$B,$C,$D,$E,$wl[25],$wl[26],$sl[25],$KL1,0);
+ &RIP2($E,$A,$B,$C,$D,$wl[26],$wl[27],$sl[26],$KL1,0);
+ &RIP2($D,$E,$A,$B,$C,$wl[27],$wl[28],$sl[27],$KL1,0);
+ &RIP2($C,$D,$E,$A,$B,$wl[28],$wl[29],$sl[28],$KL1,0);
+ &RIP2($B,$C,$D,$E,$A,$wl[29],$wl[30],$sl[29],$KL1,0);
+ &RIP2($A,$B,$C,$D,$E,$wl[30],$wl[31],$sl[30],$KL1,0);
+ &RIP2($E,$A,$B,$C,$D,$wl[31],$wl[32],$sl[31],$KL1,1);
+
+ &RIP3($D,$E,$A,$B,$C,$wl[32],$sl[32],$KL2,-1);
+ &RIP3($C,$D,$E,$A,$B,$wl[33],$sl[33],$KL2,0);
+ &RIP3($B,$C,$D,$E,$A,$wl[34],$sl[34],$KL2,0);
+ &RIP3($A,$B,$C,$D,$E,$wl[35],$sl[35],$KL2,0);
+ &RIP3($E,$A,$B,$C,$D,$wl[36],$sl[36],$KL2,0);
+ &RIP3($D,$E,$A,$B,$C,$wl[37],$sl[37],$KL2,0);
+ &RIP3($C,$D,$E,$A,$B,$wl[38],$sl[38],$KL2,0);
+ &RIP3($B,$C,$D,$E,$A,$wl[39],$sl[39],$KL2,0);
+ &RIP3($A,$B,$C,$D,$E,$wl[40],$sl[40],$KL2,0);
+ &RIP3($E,$A,$B,$C,$D,$wl[41],$sl[41],$KL2,0);
+ &RIP3($D,$E,$A,$B,$C,$wl[42],$sl[42],$KL2,0);
+ &RIP3($C,$D,$E,$A,$B,$wl[43],$sl[43],$KL2,0);
+ &RIP3($B,$C,$D,$E,$A,$wl[44],$sl[44],$KL2,0);
+ &RIP3($A,$B,$C,$D,$E,$wl[45],$sl[45],$KL2,0);
+ &RIP3($E,$A,$B,$C,$D,$wl[46],$sl[46],$KL2,0);
+ &RIP3($D,$E,$A,$B,$C,$wl[47],$sl[47],$KL2,1);
+
+ &RIP4($C,$D,$E,$A,$B,$wl[48],$sl[48],$KL3,-1);
+ &RIP4($B,$C,$D,$E,$A,$wl[49],$sl[49],$KL3,0);
+ &RIP4($A,$B,$C,$D,$E,$wl[50],$sl[50],$KL3,0);
+ &RIP4($E,$A,$B,$C,$D,$wl[51],$sl[51],$KL3,0);
+ &RIP4($D,$E,$A,$B,$C,$wl[52],$sl[52],$KL3,0);
+ &RIP4($C,$D,$E,$A,$B,$wl[53],$sl[53],$KL3,0);
+ &RIP4($B,$C,$D,$E,$A,$wl[54],$sl[54],$KL3,0);
+ &RIP4($A,$B,$C,$D,$E,$wl[55],$sl[55],$KL3,0);
+ &RIP4($E,$A,$B,$C,$D,$wl[56],$sl[56],$KL3,0);
+ &RIP4($D,$E,$A,$B,$C,$wl[57],$sl[57],$KL3,0);
+ &RIP4($C,$D,$E,$A,$B,$wl[58],$sl[58],$KL3,0);
+ &RIP4($B,$C,$D,$E,$A,$wl[59],$sl[59],$KL3,0);
+ &RIP4($A,$B,$C,$D,$E,$wl[60],$sl[60],$KL3,0);
+ &RIP4($E,$A,$B,$C,$D,$wl[61],$sl[61],$KL3,0);
+ &RIP4($D,$E,$A,$B,$C,$wl[62],$sl[62],$KL3,0);
+ &RIP4($C,$D,$E,$A,$B,$wl[63],$sl[63],$KL3,1);
+
+ &RIP5($B,$C,$D,$E,$A,$wl[64],$sl[64],$KL4,-1);
+ &RIP5($A,$B,$C,$D,$E,$wl[65],$sl[65],$KL4,0);
+ &RIP5($E,$A,$B,$C,$D,$wl[66],$sl[66],$KL4,0);
+ &RIP5($D,$E,$A,$B,$C,$wl[67],$sl[67],$KL4,0);
+ &RIP5($C,$D,$E,$A,$B,$wl[68],$sl[68],$KL4,0);
+ &RIP5($B,$C,$D,$E,$A,$wl[69],$sl[69],$KL4,0);
+ &RIP5($A,$B,$C,$D,$E,$wl[70],$sl[70],$KL4,0);
+ &RIP5($E,$A,$B,$C,$D,$wl[71],$sl[71],$KL4,0);
+ &RIP5($D,$E,$A,$B,$C,$wl[72],$sl[72],$KL4,0);
+ &RIP5($C,$D,$E,$A,$B,$wl[73],$sl[73],$KL4,0);
+ &RIP5($B,$C,$D,$E,$A,$wl[74],$sl[74],$KL4,0);
+ &RIP5($A,$B,$C,$D,$E,$wl[75],$sl[75],$KL4,0);
+ &RIP5($E,$A,$B,$C,$D,$wl[76],$sl[76],$KL4,0);
+ &RIP5($D,$E,$A,$B,$C,$wl[77],$sl[77],$KL4,0);
+ &RIP5($C,$D,$E,$A,$B,$wl[78],$sl[78],$KL4,0);
+ &RIP5($B,$C,$D,$E,$A,$wl[79],$sl[79],$KL4,1);
+
+ # &mov($tmp2, &wparam(0)); # moved into last RIP5
+ # &mov(&swtmp(1+16), $A);
+ &mov($A, &DWP( 0,$tmp2,"",0));
+ &mov(&swtmp(1+17), $B);
+ &mov(&swtmp(1+18), $C);
+ &mov($B, &DWP( 4,$tmp2,"",0));
+ &mov(&swtmp(1+19), $D);
+ &mov($C, &DWP( 8,$tmp2,"",0));
+ &mov(&swtmp(1+20), $E);
+ &mov($D, &DWP(12,$tmp2,"",0));
+ &mov($E, &DWP(16,$tmp2,"",0));
+
+ &RIP5($A,$B,$C,$D,$E,$wr[ 0],$sr[ 0],$KR0,-2);
+ &RIP5($E,$A,$B,$C,$D,$wr[ 1],$sr[ 1],$KR0,0);
+ &RIP5($D,$E,$A,$B,$C,$wr[ 2],$sr[ 2],$KR0,0);
+ &RIP5($C,$D,$E,$A,$B,$wr[ 3],$sr[ 3],$KR0,0);
+ &RIP5($B,$C,$D,$E,$A,$wr[ 4],$sr[ 4],$KR0,0);
+ &RIP5($A,$B,$C,$D,$E,$wr[ 5],$sr[ 5],$KR0,0);
+ &RIP5($E,$A,$B,$C,$D,$wr[ 6],$sr[ 6],$KR0,0);
+ &RIP5($D,$E,$A,$B,$C,$wr[ 7],$sr[ 7],$KR0,0);
+ &RIP5($C,$D,$E,$A,$B,$wr[ 8],$sr[ 8],$KR0,0);
+ &RIP5($B,$C,$D,$E,$A,$wr[ 9],$sr[ 9],$KR0,0);
+ &RIP5($A,$B,$C,$D,$E,$wr[10],$sr[10],$KR0,0);
+ &RIP5($E,$A,$B,$C,$D,$wr[11],$sr[11],$KR0,0);
+ &RIP5($D,$E,$A,$B,$C,$wr[12],$sr[12],$KR0,0);
+ &RIP5($C,$D,$E,$A,$B,$wr[13],$sr[13],$KR0,0);
+ &RIP5($B,$C,$D,$E,$A,$wr[14],$sr[14],$KR0,0);
+ &RIP5($A,$B,$C,$D,$E,$wr[15],$sr[15],$KR0,2);
+
+ &RIP4($E,$A,$B,$C,$D,$wr[16],$sr[16],$KR1,-2);
+ &RIP4($D,$E,$A,$B,$C,$wr[17],$sr[17],$KR1,0);
+ &RIP4($C,$D,$E,$A,$B,$wr[18],$sr[18],$KR1,0);
+ &RIP4($B,$C,$D,$E,$A,$wr[19],$sr[19],$KR1,0);
+ &RIP4($A,$B,$C,$D,$E,$wr[20],$sr[20],$KR1,0);
+ &RIP4($E,$A,$B,$C,$D,$wr[21],$sr[21],$KR1,0);
+ &RIP4($D,$E,$A,$B,$C,$wr[22],$sr[22],$KR1,0);
+ &RIP4($C,$D,$E,$A,$B,$wr[23],$sr[23],$KR1,0);
+ &RIP4($B,$C,$D,$E,$A,$wr[24],$sr[24],$KR1,0);
+ &RIP4($A,$B,$C,$D,$E,$wr[25],$sr[25],$KR1,0);
+ &RIP4($E,$A,$B,$C,$D,$wr[26],$sr[26],$KR1,0);
+ &RIP4($D,$E,$A,$B,$C,$wr[27],$sr[27],$KR1,0);
+ &RIP4($C,$D,$E,$A,$B,$wr[28],$sr[28],$KR1,0);
+ &RIP4($B,$C,$D,$E,$A,$wr[29],$sr[29],$KR1,0);
+ &RIP4($A,$B,$C,$D,$E,$wr[30],$sr[30],$KR1,0);
+ &RIP4($E,$A,$B,$C,$D,$wr[31],$sr[31],$KR1,2);
+
+ &RIP3($D,$E,$A,$B,$C,$wr[32],$sr[32],$KR2,-2);
+ &RIP3($C,$D,$E,$A,$B,$wr[33],$sr[33],$KR2,0);
+ &RIP3($B,$C,$D,$E,$A,$wr[34],$sr[34],$KR2,0);
+ &RIP3($A,$B,$C,$D,$E,$wr[35],$sr[35],$KR2,0);
+ &RIP3($E,$A,$B,$C,$D,$wr[36],$sr[36],$KR2,0);
+ &RIP3($D,$E,$A,$B,$C,$wr[37],$sr[37],$KR2,0);
+ &RIP3($C,$D,$E,$A,$B,$wr[38],$sr[38],$KR2,0);
+ &RIP3($B,$C,$D,$E,$A,$wr[39],$sr[39],$KR2,0);
+ &RIP3($A,$B,$C,$D,$E,$wr[40],$sr[40],$KR2,0);
+ &RIP3($E,$A,$B,$C,$D,$wr[41],$sr[41],$KR2,0);
+ &RIP3($D,$E,$A,$B,$C,$wr[42],$sr[42],$KR2,0);
+ &RIP3($C,$D,$E,$A,$B,$wr[43],$sr[43],$KR2,0);
+ &RIP3($B,$C,$D,$E,$A,$wr[44],$sr[44],$KR2,0);
+ &RIP3($A,$B,$C,$D,$E,$wr[45],$sr[45],$KR2,0);
+ &RIP3($E,$A,$B,$C,$D,$wr[46],$sr[46],$KR2,0);
+ &RIP3($D,$E,$A,$B,$C,$wr[47],$sr[47],$KR2,2,$wr[48]);
+
+ &RIP2($C,$D,$E,$A,$B,$wr[48],$wr[49],$sr[48],$KR3,-2);
+ &RIP2($B,$C,$D,$E,$A,$wr[49],$wr[50],$sr[49],$KR3,0);
+ &RIP2($A,$B,$C,$D,$E,$wr[50],$wr[51],$sr[50],$KR3,0);
+ &RIP2($E,$A,$B,$C,$D,$wr[51],$wr[52],$sr[51],$KR3,0);
+ &RIP2($D,$E,$A,$B,$C,$wr[52],$wr[53],$sr[52],$KR3,0);
+ &RIP2($C,$D,$E,$A,$B,$wr[53],$wr[54],$sr[53],$KR3,0);
+ &RIP2($B,$C,$D,$E,$A,$wr[54],$wr[55],$sr[54],$KR3,0);
+ &RIP2($A,$B,$C,$D,$E,$wr[55],$wr[56],$sr[55],$KR3,0);
+ &RIP2($E,$A,$B,$C,$D,$wr[56],$wr[57],$sr[56],$KR3,0);
+ &RIP2($D,$E,$A,$B,$C,$wr[57],$wr[58],$sr[57],$KR3,0);
+ &RIP2($C,$D,$E,$A,$B,$wr[58],$wr[59],$sr[58],$KR3,0);
+ &RIP2($B,$C,$D,$E,$A,$wr[59],$wr[60],$sr[59],$KR3,0);
+ &RIP2($A,$B,$C,$D,$E,$wr[60],$wr[61],$sr[60],$KR3,0);
+ &RIP2($E,$A,$B,$C,$D,$wr[61],$wr[62],$sr[61],$KR3,0);
+ &RIP2($D,$E,$A,$B,$C,$wr[62],$wr[63],$sr[62],$KR3,0);
+ &RIP2($C,$D,$E,$A,$B,$wr[63],$wr[64],$sr[63],$KR3,2);
+
+ &RIP1($B,$C,$D,$E,$A,$wr[64],$sr[64],-2);
+ &RIP1($A,$B,$C,$D,$E,$wr[65],$sr[65],0);
+ &RIP1($E,$A,$B,$C,$D,$wr[66],$sr[66],0);
+ &RIP1($D,$E,$A,$B,$C,$wr[67],$sr[67],0);
+ &RIP1($C,$D,$E,$A,$B,$wr[68],$sr[68],0);
+ &RIP1($B,$C,$D,$E,$A,$wr[69],$sr[69],0);
+ &RIP1($A,$B,$C,$D,$E,$wr[70],$sr[70],0);
+ &RIP1($E,$A,$B,$C,$D,$wr[71],$sr[71],0);
+ &RIP1($D,$E,$A,$B,$C,$wr[72],$sr[72],0);
+ &RIP1($C,$D,$E,$A,$B,$wr[73],$sr[73],0);
+ &RIP1($B,$C,$D,$E,$A,$wr[74],$sr[74],0);
+ &RIP1($A,$B,$C,$D,$E,$wr[75],$sr[75],0);
+ &RIP1($E,$A,$B,$C,$D,$wr[76],$sr[76],0);
+ &RIP1($D,$E,$A,$B,$C,$wr[77],$sr[77],0);
+ &RIP1($C,$D,$E,$A,$B,$wr[78],$sr[78],0);
+ &RIP1($B,$C,$D,$E,$A,$wr[79],$sr[79],2);
+
+ # &mov($tmp2, &wparam(0)); # Moved into last round
+
+ &mov($tmp1, &DWP( 4,$tmp2,"",0)); # ctx->B
+ &add($D, $tmp1);
+ &mov($tmp1, &swtmp(1+18)); # $c
+ &add($D, $tmp1);
+
+ &mov($tmp1, &DWP( 8,$tmp2,"",0)); # ctx->C
+ &add($E, $tmp1);
+ &mov($tmp1, &swtmp(1+19)); # $d
+ &add($E, $tmp1);
+
+ &mov($tmp1, &DWP(12,$tmp2,"",0)); # ctx->D
+ &add($A, $tmp1);
+ &mov($tmp1, &swtmp(1+20)); # $e
+ &add($A, $tmp1);
+
+
+ &mov($tmp1, &DWP(16,$tmp2,"",0)); # ctx->E
+ &add($B, $tmp1);
+ &mov($tmp1, &swtmp(1+16)); # $a
+ &add($B, $tmp1);
+
+ &mov($tmp1, &DWP( 0,$tmp2,"",0)); # ctx->A
+ &add($C, $tmp1);
+ &mov($tmp1, &swtmp(1+17)); # $b
+ &add($C, $tmp1);
+
+ &mov(&DWP( 0,$tmp2,"",0), $D);
+ &mov(&DWP( 4,$tmp2,"",0), $E);
+ &mov(&DWP( 8,$tmp2,"",0), $A);
+ &mov(&DWP(12,$tmp2,"",0), $B);
+ &mov(&DWP(16,$tmp2,"",0), $C);
+
+ &mov($tmp2, &swtmp(0));
+ &mov($tmp1, &wparam(1));
+
+ &cmp($tmp2,$tmp1);
+ &mov($tmp2, &wparam(0));
+
+ # XXX
+ &jge(&label("start"));
+
+ &stack_pop(16+5+1);
+
+ &pop("ebx");
+ &pop("ebp");
+ &pop("edi");
+ &pop("esi");
+ &ret();
+ &function_end_B($name);
+ }
+
diff --git a/crypto/ripemd/ripemd.h b/crypto/ripemd/ripemd.h
new file mode 100644
index 0000000000..a3bc6e3ab2
--- /dev/null
+++ b/crypto/ripemd/ripemd.h
@@ -0,0 +1,99 @@
+/* crypto/ripemd/ripemd.h */
+/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
+ * All rights reserved.
+ *
+ * This package is an SSL implementation written
+ * by Eric Young (eay@cryptsoft.com).
+ * The implementation was written so as to conform with Netscapes SSL.
+ *
+ * This library is free for commercial and non-commercial use as long as
+ * the following conditions are aheared to. The following conditions
+ * apply to all code found in this distribution, be it the RC4, RSA,
+ * lhash, DES, etc., code; not just the SSL code. The SSL documentation
+ * included with this distribution is covered by the same copyright terms
+ * except that the holder is Tim Hudson (tjh@cryptsoft.com).
+ *
+ * Copyright remains Eric Young's, and as such any Copyright notices in
+ * the code are not to be removed.
+ * If this package is used in a product, Eric Young should be given attribution
+ * as the author of the parts of the library used.
+ * This can be in the form of a textual message at program startup or
+ * in documentation (online or textual) provided with the package.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. All advertising materials mentioning features or use of this software
+ * must display the following acknowledgement:
+ * "This product includes cryptographic software written by
+ * Eric Young (eay@cryptsoft.com)"
+ * The word 'cryptographic' can be left out if the rouines from the library
+ * being used are not cryptographic related :-).
+ * 4. If you include any Windows specific code (or a derivative thereof) from
+ * the apps directory (application code) you must include an acknowledgement:
+ * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
+ *
+ * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ *
+ * The licence and distribution terms for any publically available version or
+ * derivative of this code cannot be changed. i.e. this code cannot simply be
+ * copied and put under another distribution licence
+ * [including the GNU Public Licence.]
+ */
+
+#ifndef HEADER_RIPEMD_H
+#define HEADER_RIPEMD_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#define RIPEMD160_CBLOCK 64
+#define RIPEMD160_LBLOCK 16
+#define RIPEMD160_BLOCK 16
+#define RIPEMD160_LAST_BLOCK 56
+#define RIPEMD160_LENGTH_BLOCK 8
+#define RIPEMD160_DIGEST_LENGTH 20
+
+typedef struct RIPEMD160state_st
+ {
+ unsigned long A,B,C,D,E;
+ unsigned long Nl,Nh;
+ unsigned long data[RIPEMD160_LBLOCK];
+ int num;
+ } RIPEMD160_CTX;
+
+#ifndef NOPROTO
+void RIPEMD160_Init(RIPEMD160_CTX *c);
+void RIPEMD160_Update(RIPEMD160_CTX *c, unsigned char *data, unsigned long len);
+void RIPEMD160_Final(unsigned char *md, RIPEMD160_CTX *c);
+unsigned char *RIPEMD160(unsigned char *d, unsigned long n, unsigned char *md);
+void RIPEMD160_Transform(RIPEMD160_CTX *c, unsigned char *b);
+#else
+void RIPEMD160_Init();
+void RIPEMD160_Update();
+void RIPEMD160_Final();
+unsigned char *RIPEMD160();
+void RIPEMD160_Transform();
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/crypto/ripemd/rmd160.c b/crypto/ripemd/rmd160.c
new file mode 100644
index 0000000000..3fa1b8096e
--- /dev/null
+++ b/crypto/ripemd/rmd160.c
@@ -0,0 +1,135 @@
+/* crypto/ripemd/rmd160.c */
+/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
+ * All rights reserved.
+ *
+ * This package is an SSL implementation written
+ * by Eric Young (eay@cryptsoft.com).
+ * The implementation was written so as to conform with Netscapes SSL.
+ *
+ * This library is free for commercial and non-commercial use as long as
+ * the following conditions are aheared to. The following conditions
+ * apply to all code found in this distribution, be it the RC4, RSA,
+ * lhash, DES, etc., code; not just the SSL code. The SSL documentation
+ * included with this distribution is covered by the same copyright terms
+ * except that the holder is Tim Hudson (tjh@cryptsoft.com).
+ *
+ * Copyright remains Eric Young's, and as such any Copyright notices in
+ * the code are not to be removed.
+ * If this package is used in a product, Eric Young should be given attribution
+ * as the author of the parts of the library used.
+ * This can be in the form of a textual message at program startup or
+ * in documentation (online or textual) provided with the package.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. All advertising materials mentioning features or use of this software
+ * must display the following acknowledgement:
+ * "This product includes cryptographic software written by
+ * Eric Young (eay@cryptsoft.com)"
+ * The word 'cryptographic' can be left out if the rouines from the library
+ * being used are not cryptographic related :-).
+ * 4. If you include any Windows specific code (or a derivative thereof) from
+ * the apps directory (application code) you must include an acknowledgement:
+ * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
+ *
+ * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ *
+ * The licence and distribution terms for any publically available version or
+ * derivative of this code cannot be changed. i.e. this code cannot simply be
+ * copied and put under another distribution licence
+ * [including the GNU Public Licence.]
+ */
+
+#include <stdio.h>
+#include <stdlib.h>
+#include "ripemd.h"
+
+#define BUFSIZE 1024*16
+
+#ifndef NOPROTO
+void do_fp(FILE *f);
+void pt(unsigned char *md);
+int read(int, void *, unsigned int);
+#else
+void do_fp();
+void pt();
+int read();
+#endif
+
+int main(argc, argv)
+int argc;
+char **argv;
+ {
+ int i,err=0;
+ FILE *IN;
+
+ if (argc == 1)
+ {
+ do_fp(stdin);
+ }
+ else
+ {
+ for (i=1; i<argc; i++)
+ {
+ IN=fopen(argv[i],"r");
+ if (IN == NULL)
+ {
+ perror(argv[i]);
+ err++;
+ continue;
+ }
+ printf("RIPEMD160(%s)= ",argv[i]);
+ do_fp(IN);
+ fclose(IN);
+ }
+ }
+ exit(err);
+ }
+
+void do_fp(f)
+FILE *f;
+ {
+ RIPEMD160_CTX c;
+ unsigned char md[RIPEMD160_DIGEST_LENGTH];
+ int fd;
+ int i;
+ static unsigned char buf[BUFSIZE];
+
+ fd=fileno(f);
+ RIPEMD160_Init(&c);
+ for (;;)
+ {
+ i=read(fd,buf,BUFSIZE);
+ if (i <= 0) break;
+ RIPEMD160_Update(&c,buf,(unsigned long)i);
+ }
+ RIPEMD160_Final(&(md[0]),&c);
+ pt(md);
+ }
+
+void pt(md)
+unsigned char *md;
+ {
+ int i;
+
+ for (i=0; i<RIPEMD160_DIGEST_LENGTH; i++)
+ printf("%02x",md[i]);
+ printf("\n");
+ }
+
diff --git a/crypto/ripemd/rmd_dgst.c b/crypto/ripemd/rmd_dgst.c
new file mode 100644
index 0000000000..210de1977d
--- /dev/null
+++ b/crypto/ripemd/rmd_dgst.c
@@ -0,0 +1,535 @@
+/* crypto/ripemd/rmd_dgst.c */
+/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
+ * All rights reserved.
+ *
+ * This package is an SSL implementation written
+ * by Eric Young (eay@cryptsoft.com).
+ * The implementation was written so as to conform with Netscapes SSL.
+ *
+ * This library is free for commercial and non-commercial use as long as
+ * the following conditions are aheared to. The following conditions
+ * apply to all code found in this distribution, be it the RC4, RSA,
+ * lhash, DES, etc., code; not just the SSL code. The SSL documentation
+ * included with this distribution is covered by the same copyright terms
+ * except that the holder is Tim Hudson (tjh@cryptsoft.com).
+ *
+ * Copyright remains Eric Young's, and as such any Copyright notices in
+ * the code are not to be removed.
+ * If this package is used in a product, Eric Young should be given attribution
+ * as the author of the parts of the library used.
+ * This can be in the form of a textual message at program startup or
+ * in documentation (online or textual) provided with the package.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. All advertising materials mentioning features or use of this software
+ * must display the following acknowledgement:
+ * "This product includes cryptographic software written by
+ * Eric Young (eay@cryptsoft.com)"
+ * The word 'cryptographic' can be left out if the rouines from the library
+ * being used are not cryptographic related :-).
+ * 4. If you include any Windows specific code (or a derivative thereof) from
+ * the apps directory (application code) you must include an acknowledgement:
+ * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
+ *
+ * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ *
+ * The licence and distribution terms for any publically available version or
+ * derivative of this code cannot be changed. i.e. this code cannot simply be
+ * copied and put under another distribution licence
+ * [including the GNU Public Licence.]
+ */
+
+#include <stdio.h>
+#include "rmd_locl.h"
+
+char *RMD160_version="RIPEMD160 part of SSLeay 0.9.0b 29-Jun-1998";
+
+#ifndef NOPROTO
+# ifdef RMD160_ASM
+ void ripemd160_block_x86(RIPEMD160_CTX *c, unsigned long *p,int num);
+# define ripemd160_block ripemd160_block_x86
+# else
+ void ripemd160_block(RIPEMD160_CTX *c, unsigned long *p,int num);
+# endif
+#else
+# ifdef RMD160_ASM
+ void ripemd160_block_x86();
+# define ripemd160_block ripemd160_block_x86
+# else
+ static void ripemd160_block();
+# endif
+#endif
+
+void RIPEMD160_Init(c)
+RIPEMD160_CTX *c;
+ {
+ c->A=RIPEMD160_A;
+ c->B=RIPEMD160_B;
+ c->C=RIPEMD160_C;
+ c->D=RIPEMD160_D;
+ c->E=RIPEMD160_E;
+ c->Nl=0;
+ c->Nh=0;
+ c->num=0;
+ }
+
+void RIPEMD160_Update(c, data, len)
+RIPEMD160_CTX *c;
+register unsigned char *data;
+unsigned long len;
+ {
+ register ULONG *p;
+ int sw,sc;
+ ULONG l;
+
+ if (len == 0) return;
+
+ l=(c->Nl+(len<<3))&0xffffffffL;
+ if (l < c->Nl) /* overflow */
+ c->Nh++;
+ c->Nh+=(len>>29);
+ c->Nl=l;
+
+ if (c->num != 0)
+ {
+ p=c->data;
+ sw=c->num>>2;
+ sc=c->num&0x03;
+
+ if ((c->num+len) >= RIPEMD160_CBLOCK)
+ {
+ l= p[sw];
+ p_c2l(data,l,sc);
+ p[sw++]=l;
+ for (; sw<RIPEMD160_LBLOCK; sw++)
+ {
+ c2l(data,l);
+ p[sw]=l;
+ }
+ len-=(RIPEMD160_CBLOCK-c->num);
+
+ ripemd160_block(c,p,64);
+ c->num=0;
+ /* drop through and do the rest */
+ }
+ else
+ {
+ int ew,ec;
+
+ c->num+=(int)len;
+ if ((sc+len) < 4) /* ugly, add char's to a word */
+ {
+ l= p[sw];
+ p_c2l_p(data,l,sc,len);
+ p[sw]=l;
+ }
+ else
+ {
+ ew=(c->num>>2);
+ ec=(c->num&0x03);
+ l= p[sw];
+ p_c2l(data,l,sc);
+ p[sw++]=l;
+ for (; sw < ew; sw++)
+ { c2l(data,l); p[sw]=l; }
+ if (ec)
+ {
+ c2l_p(data,l,ec);
+ p[sw]=l;
+ }
+ }
+ return;
+ }
+ }
+ /* we now can process the input data in blocks of RIPEMD160_CBLOCK
+ * chars and save the leftovers to c->data. */
+#ifdef L_ENDIAN
+ if ((((unsigned long)data)%sizeof(ULONG)) == 0)
+ {
+ sw=(int)len/RIPEMD160_CBLOCK;
+ if (sw > 0)
+ {
+ sw*=RIPEMD160_CBLOCK;
+ ripemd160_block(c,(ULONG *)data,sw);
+ data+=sw;
+ len-=sw;
+ }
+ }
+#endif
+ p=c->data;
+ while (len >= RIPEMD160_CBLOCK)
+ {
+#if defined(L_ENDIAN) || defined(B_ENDIAN)
+ if (p != (unsigned long *)data)
+ memcpy(p,data,RIPEMD160_CBLOCK);
+ data+=RIPEMD160_CBLOCK;
+#ifdef B_ENDIAN
+ for (sw=(RIPEMD160_LBLOCK/4); sw; sw--)
+ {
+ Endian_Reverse32(p[0]);
+ Endian_Reverse32(p[1]);
+ Endian_Reverse32(p[2]);
+ Endian_Reverse32(p[3]);
+ p+=4;
+ }
+#endif
+#else
+ for (sw=(RIPEMD160_LBLOCK/4); sw; sw--)
+ {
+ c2l(data,l); *(p++)=l;
+ c2l(data,l); *(p++)=l;
+ c2l(data,l); *(p++)=l;
+ c2l(data,l); *(p++)=l;
+ }
+#endif
+ p=c->data;
+ ripemd160_block(c,p,64);
+ len-=RIPEMD160_CBLOCK;
+ }
+ sc=(int)len;
+ c->num=sc;
+ if (sc)
+ {
+ sw=sc>>2; /* words to copy */
+#ifdef L_ENDIAN
+ p[sw]=0;
+ memcpy(p,data,sc);
+#else
+ sc&=0x03;
+ for ( ; sw; sw--)
+ { c2l(data,l); *(p++)=l; }
+ c2l_p(data,l,sc);
+ *p=l;
+#endif
+ }
+ }
+
+void RIPEMD160_Transform(c,b)
+RIPEMD160_CTX *c;
+unsigned char *b;
+ {
+ ULONG p[16];
+#if !defined(L_ENDIAN)
+ ULONG *q;
+ int i;
+#endif
+
+#if defined(B_ENDIAN) || defined(L_ENDIAN)
+ memcpy(p,b,64);
+#ifdef B_ENDIAN
+ q=p;
+ for (i=(RIPEMD160_LBLOCK/4); i; i--)
+ {
+ Endian_Reverse32(q[0]);
+ Endian_Reverse32(q[1]);
+ Endian_Reverse32(q[2]);
+ Endian_Reverse32(q[3]);
+ q+=4;
+ }
+#endif
+#else
+ q=p;
+ for (i=(RIPEMD160_LBLOCK/4); i; i--)
+ {
+ ULONG l;
+ c2l(b,l); *(q++)=l;
+ c2l(b,l); *(q++)=l;
+ c2l(b,l); *(q++)=l;
+ c2l(b,l); *(q++)=l;
+ }
+#endif
+ ripemd160_block(c,p,64);
+ }
+
+#ifndef RMD160_ASM
+
+void ripemd160_block(ctx, X, num)
+RIPEMD160_CTX *ctx;
+register ULONG *X;
+int num;
+ {
+ register ULONG A,B,C,D,E;
+ ULONG a,b,c,d,e;
+
+ for (;;)
+ {
+ A=ctx->A; B=ctx->B; C=ctx->C; D=ctx->D; E=ctx->E;
+
+ RIP1(A,B,C,D,E,WL00,SL00);
+ RIP1(E,A,B,C,D,WL01,SL01);
+ RIP1(D,E,A,B,C,WL02,SL02);
+ RIP1(C,D,E,A,B,WL03,SL03);
+ RIP1(B,C,D,E,A,WL04,SL04);
+ RIP1(A,B,C,D,E,WL05,SL05);
+ RIP1(E,A,B,C,D,WL06,SL06);
+ RIP1(D,E,A,B,C,WL07,SL07);
+ RIP1(C,D,E,A,B,WL08,SL08);
+ RIP1(B,C,D,E,A,WL09,SL09);
+ RIP1(A,B,C,D,E,WL10,SL10);
+ RIP1(E,A,B,C,D,WL11,SL11);
+ RIP1(D,E,A,B,C,WL12,SL12);
+ RIP1(C,D,E,A,B,WL13,SL13);
+ RIP1(B,C,D,E,A,WL14,SL14);
+ RIP1(A,B,C,D,E,WL15,SL15);
+
+ RIP2(E,A,B,C,D,WL16,SL16,KL1);
+ RIP2(D,E,A,B,C,WL17,SL17,KL1);
+ RIP2(C,D,E,A,B,WL18,SL18,KL1);
+ RIP2(B,C,D,E,A,WL19,SL19,KL1);
+ RIP2(A,B,C,D,E,WL20,SL20,KL1);
+ RIP2(E,A,B,C,D,WL21,SL21,KL1);
+ RIP2(D,E,A,B,C,WL22,SL22,KL1);
+ RIP2(C,D,E,A,B,WL23,SL23,KL1);
+ RIP2(B,C,D,E,A,WL24,SL24,KL1);
+ RIP2(A,B,C,D,E,WL25,SL25,KL1);
+ RIP2(E,A,B,C,D,WL26,SL26,KL1);
+ RIP2(D,E,A,B,C,WL27,SL27,KL1);
+ RIP2(C,D,E,A,B,WL28,SL28,KL1);
+ RIP2(B,C,D,E,A,WL29,SL29,KL1);
+ RIP2(A,B,C,D,E,WL30,SL30,KL1);
+ RIP2(E,A,B,C,D,WL31,SL31,KL1);
+
+ RIP3(D,E,A,B,C,WL32,SL32,KL2);
+ RIP3(C,D,E,A,B,WL33,SL33,KL2);
+ RIP3(B,C,D,E,A,WL34,SL34,KL2);
+ RIP3(A,B,C,D,E,WL35,SL35,KL2);
+ RIP3(E,A,B,C,D,WL36,SL36,KL2);
+ RIP3(D,E,A,B,C,WL37,SL37,KL2);
+ RIP3(C,D,E,A,B,WL38,SL38,KL2);
+ RIP3(B,C,D,E,A,WL39,SL39,KL2);
+ RIP3(A,B,C,D,E,WL40,SL40,KL2);
+ RIP3(E,A,B,C,D,WL41,SL41,KL2);
+ RIP3(D,E,A,B,C,WL42,SL42,KL2);
+ RIP3(C,D,E,A,B,WL43,SL43,KL2);
+ RIP3(B,C,D,E,A,WL44,SL44,KL2);
+ RIP3(A,B,C,D,E,WL45,SL45,KL2);
+ RIP3(E,A,B,C,D,WL46,SL46,KL2);
+ RIP3(D,E,A,B,C,WL47,SL47,KL2);
+
+ RIP4(C,D,E,A,B,WL48,SL48,KL3);
+ RIP4(B,C,D,E,A,WL49,SL49,KL3);
+ RIP4(A,B,C,D,E,WL50,SL50,KL3);
+ RIP4(E,A,B,C,D,WL51,SL51,KL3);
+ RIP4(D,E,A,B,C,WL52,SL52,KL3);
+ RIP4(C,D,E,A,B,WL53,SL53,KL3);
+ RIP4(B,C,D,E,A,WL54,SL54,KL3);
+ RIP4(A,B,C,D,E,WL55,SL55,KL3);
+ RIP4(E,A,B,C,D,WL56,SL56,KL3);
+ RIP4(D,E,A,B,C,WL57,SL57,KL3);
+ RIP4(C,D,E,A,B,WL58,SL58,KL3);
+ RIP4(B,C,D,E,A,WL59,SL59,KL3);
+ RIP4(A,B,C,D,E,WL60,SL60,KL3);
+ RIP4(E,A,B,C,D,WL61,SL61,KL3);
+ RIP4(D,E,A,B,C,WL62,SL62,KL3);
+ RIP4(C,D,E,A,B,WL63,SL63,KL3);
+
+ RIP5(B,C,D,E,A,WL64,SL64,KL4);
+ RIP5(A,B,C,D,E,WL65,SL65,KL4);
+ RIP5(E,A,B,C,D,WL66,SL66,KL4);
+ RIP5(D,E,A,B,C,WL67,SL67,KL4);
+ RIP5(C,D,E,A,B,WL68,SL68,KL4);
+ RIP5(B,C,D,E,A,WL69,SL69,KL4);
+ RIP5(A,B,C,D,E,WL70,SL70,KL4);
+ RIP5(E,A,B,C,D,WL71,SL71,KL4);
+ RIP5(D,E,A,B,C,WL72,SL72,KL4);
+ RIP5(C,D,E,A,B,WL73,SL73,KL4);
+ RIP5(B,C,D,E,A,WL74,SL74,KL4);
+ RIP5(A,B,C,D,E,WL75,SL75,KL4);
+ RIP5(E,A,B,C,D,WL76,SL76,KL4);
+ RIP5(D,E,A,B,C,WL77,SL77,KL4);
+ RIP5(C,D,E,A,B,WL78,SL78,KL4);
+ RIP5(B,C,D,E,A,WL79,SL79,KL4);
+
+ a=A; b=B; c=C; d=D; e=E;
+ /* Do other half */
+ A=ctx->A; B=ctx->B; C=ctx->C; D=ctx->D; E=ctx->E;
+
+ RIP5(A,B,C,D,E,WR00,SR00,KR0);
+ RIP5(E,A,B,C,D,WR01,SR01,KR0);
+ RIP5(D,E,A,B,C,WR02,SR02,KR0);
+ RIP5(C,D,E,A,B,WR03,SR03,KR0);
+ RIP5(B,C,D,E,A,WR04,SR04,KR0);
+ RIP5(A,B,C,D,E,WR05,SR05,KR0);
+ RIP5(E,A,B,C,D,WR06,SR06,KR0);
+ RIP5(D,E,A,B,C,WR07,SR07,KR0);
+ RIP5(C,D,E,A,B,WR08,SR08,KR0);
+ RIP5(B,C,D,E,A,WR09,SR09,KR0);
+ RIP5(A,B,C,D,E,WR10,SR10,KR0);
+ RIP5(E,A,B,C,D,WR11,SR11,KR0);
+ RIP5(D,E,A,B,C,WR12,SR12,KR0);
+ RIP5(C,D,E,A,B,WR13,SR13,KR0);
+ RIP5(B,C,D,E,A,WR14,SR14,KR0);
+ RIP5(A,B,C,D,E,WR15,SR15,KR0);
+
+ RIP4(E,A,B,C,D,WR16,SR16,KR1);
+ RIP4(D,E,A,B,C,WR17,SR17,KR1);
+ RIP4(C,D,E,A,B,WR18,SR18,KR1);
+ RIP4(B,C,D,E,A,WR19,SR19,KR1);
+ RIP4(A,B,C,D,E,WR20,SR20,KR1);
+ RIP4(E,A,B,C,D,WR21,SR21,KR1);
+ RIP4(D,E,A,B,C,WR22,SR22,KR1);
+ RIP4(C,D,E,A,B,WR23,SR23,KR1);
+ RIP4(B,C,D,E,A,WR24,SR24,KR1);
+ RIP4(A,B,C,D,E,WR25,SR25,KR1);
+ RIP4(E,A,B,C,D,WR26,SR26,KR1);
+ RIP4(D,E,A,B,C,WR27,SR27,KR1);
+ RIP4(C,D,E,A,B,WR28,SR28,KR1);
+ RIP4(B,C,D,E,A,WR29,SR29,KR1);
+ RIP4(A,B,C,D,E,WR30,SR30,KR1);
+ RIP4(E,A,B,C,D,WR31,SR31,KR1);
+
+ RIP3(D,E,A,B,C,WR32,SR32,KR2);
+ RIP3(C,D,E,A,B,WR33,SR33,KR2);
+ RIP3(B,C,D,E,A,WR34,SR34,KR2);
+ RIP3(A,B,C,D,E,WR35,SR35,KR2);
+ RIP3(E,A,B,C,D,WR36,SR36,KR2);
+ RIP3(D,E,A,B,C,WR37,SR37,KR2);
+ RIP3(C,D,E,A,B,WR38,SR38,KR2);
+ RIP3(B,C,D,E,A,WR39,SR39,KR2);
+ RIP3(A,B,C,D,E,WR40,SR40,KR2);
+ RIP3(E,A,B,C,D,WR41,SR41,KR2);
+ RIP3(D,E,A,B,C,WR42,SR42,KR2);
+ RIP3(C,D,E,A,B,WR43,SR43,KR2);
+ RIP3(B,C,D,E,A,WR44,SR44,KR2);
+ RIP3(A,B,C,D,E,WR45,SR45,KR2);
+ RIP3(E,A,B,C,D,WR46,SR46,KR2);
+ RIP3(D,E,A,B,C,WR47,SR47,KR2);
+
+ RIP2(C,D,E,A,B,WR48,SR48,KR3);
+ RIP2(B,C,D,E,A,WR49,SR49,KR3);
+ RIP2(A,B,C,D,E,WR50,SR50,KR3);
+ RIP2(E,A,B,C,D,WR51,SR51,KR3);
+ RIP2(D,E,A,B,C,WR52,SR52,KR3);
+ RIP2(C,D,E,A,B,WR53,SR53,KR3);
+ RIP2(B,C,D,E,A,WR54,SR54,KR3);
+ RIP2(A,B,C,D,E,WR55,SR55,KR3);
+ RIP2(E,A,B,C,D,WR56,SR56,KR3);
+ RIP2(D,E,A,B,C,WR57,SR57,KR3);
+ RIP2(C,D,E,A,B,WR58,SR58,KR3);
+ RIP2(B,C,D,E,A,WR59,SR59,KR3);
+ RIP2(A,B,C,D,E,WR60,SR60,KR3);
+ RIP2(E,A,B,C,D,WR61,SR61,KR3);
+ RIP2(D,E,A,B,C,WR62,SR62,KR3);
+ RIP2(C,D,E,A,B,WR63,SR63,KR3);
+
+ RIP1(B,C,D,E,A,WR64,SR64);
+ RIP1(A,B,C,D,E,WR65,SR65);
+ RIP1(E,A,B,C,D,WR66,SR66);
+ RIP1(D,E,A,B,C,WR67,SR67);
+ RIP1(C,D,E,A,B,WR68,SR68);
+ RIP1(B,C,D,E,A,WR69,SR69);
+ RIP1(A,B,C,D,E,WR70,SR70);
+ RIP1(E,A,B,C,D,WR71,SR71);
+ RIP1(D,E,A,B,C,WR72,SR72);
+ RIP1(C,D,E,A,B,WR73,SR73);
+ RIP1(B,C,D,E,A,WR74,SR74);
+ RIP1(A,B,C,D,E,WR75,SR75);
+ RIP1(E,A,B,C,D,WR76,SR76);
+ RIP1(D,E,A,B,C,WR77,SR77);
+ RIP1(C,D,E,A,B,WR78,SR78);
+ RIP1(B,C,D,E,A,WR79,SR79);
+
+ D =ctx->B+c+D;
+ ctx->B=ctx->C+d+E;
+ ctx->C=ctx->D+e+A;
+ ctx->D=ctx->E+a+B;
+ ctx->E=ctx->A+b+C;
+ ctx->A=D;
+
+ X+=16;
+ num-=64;
+ if (num <= 0) break;
+ }
+ }
+#endif
+
+void RIPEMD160_Final(md, c)
+unsigned char *md;
+RIPEMD160_CTX *c;
+ {
+ register int i,j;
+ register ULONG l;
+ register ULONG *p;
+ static unsigned char end[4]={0x80,0x00,0x00,0x00};
+ unsigned char *cp=end;
+
+ /* c->num should definitly have room for at least one more byte. */
+ p=c->data;
+ j=c->num;
+ i=j>>2;
+
+ /* purify often complains about the following line as an
+ * Uninitialized Memory Read. While this can be true, the
+ * following p_c2l macro will reset l when that case is true.
+ * This is because j&0x03 contains the number of 'valid' bytes
+ * already in p[i]. If and only if j&0x03 == 0, the UMR will
+ * occur but this is also the only time p_c2l will do
+ * l= *(cp++) instead of l|= *(cp++)
+ * Many thanks to Alex Tang <altitude@cic.net> for pickup this
+ * 'potential bug' */
+#ifdef PURIFY
+ if ((j&0x03) == 0) p[i]=0;
+#endif
+ l=p[i];
+ p_c2l(cp,l,j&0x03);
+ p[i]=l;
+ i++;
+ /* i is the next 'undefined word' */
+ if (c->num >= RIPEMD160_LAST_BLOCK)
+ {
+ for (; i<RIPEMD160_LBLOCK; i++)
+ p[i]=0;
+ ripemd160_block(c,p,64);
+ i=0;
+ }
+ for (; i<(RIPEMD160_LBLOCK-2); i++)
+ p[i]=0;
+ p[RIPEMD160_LBLOCK-2]=c->Nl;
+ p[RIPEMD160_LBLOCK-1]=c->Nh;
+ ripemd160_block(c,p,64);
+ cp=md;
+ l=c->A; l2c(l,cp);
+ l=c->B; l2c(l,cp);
+ l=c->C; l2c(l,cp);
+ l=c->D; l2c(l,cp);
+ l=c->E; l2c(l,cp);
+
+ /* clear stuff, ripemd160_block may be leaving some stuff on the stack
+ * but I'm not worried :-) */
+ c->num=0;
+/* memset((char *)&c,0,sizeof(c));*/
+ }
+
+#ifdef undef
+int printit(l)
+unsigned long *l;
+ {
+ int i,ii;
+
+ for (i=0; i<2; i++)
+ {
+ for (ii=0; ii<8; ii++)
+ {
+ fprintf(stderr,"%08lx ",l[i*8+ii]);
+ }
+ fprintf(stderr,"\n");
+ }
+ }
+#endif
diff --git a/crypto/ripemd/rmd_locl.h b/crypto/ripemd/rmd_locl.h
new file mode 100644
index 0000000000..a1feccf7c1
--- /dev/null
+++ b/crypto/ripemd/rmd_locl.h
@@ -0,0 +1,226 @@
+/* crypto/ripemd/rmd_locl.h */
+/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
+ * All rights reserved.
+ *
+ * This package is an SSL implementation written
+ * by Eric Young (eay@cryptsoft.com).
+ * The implementation was written so as to conform with Netscapes SSL.
+ *
+ * This library is free for commercial and non-commercial use as long as
+ * the following conditions are aheared to. The following conditions
+ * apply to all code found in this distribution, be it the RC4, RSA,
+ * lhash, DES, etc., code; not just the SSL code. The SSL documentation
+ * included with this distribution is covered by the same copyright terms
+ * except that the holder is Tim Hudson (tjh@cryptsoft.com).
+ *
+ * Copyright remains Eric Young's, and as such any Copyright notices in
+ * the code are not to be removed.
+ * If this package is used in a product, Eric Young should be given attribution
+ * as the author of the parts of the library used.
+ * This can be in the form of a textual message at program startup or
+ * in documentation (online or textual) provided with the package.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. All advertising materials mentioning features or use of this software
+ * must display the following acknowledgement:
+ * "This product includes cryptographic software written by
+ * Eric Young (eay@cryptsoft.com)"
+ * The word 'cryptographic' can be left out if the rouines from the library
+ * being used are not cryptographic related :-).
+ * 4. If you include any Windows specific code (or a derivative thereof) from
+ * the apps directory (application code) you must include an acknowledgement:
+ * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
+ *
+ * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ *
+ * The licence and distribution terms for any publically available version or
+ * derivative of this code cannot be changed. i.e. this code cannot simply be
+ * copied and put under another distribution licence
+ * [including the GNU Public Licence.]
+ */
+
+#include <stdlib.h>
+#include <string.h>
+#include "ripemd.h"
+
+#define ULONG unsigned long
+#define UCHAR unsigned char
+#define UINT unsigned int
+
+#ifdef NOCONST
+#define const
+#endif
+
+#undef c2nl
+#define c2nl(c,l) (l =(((unsigned long)(*((c)++)))<<24), \
+ l|=(((unsigned long)(*((c)++)))<<16), \
+ l|=(((unsigned long)(*((c)++)))<< 8), \
+ l|=(((unsigned long)(*((c)++))) ))
+
+#undef p_c2nl
+#define p_c2nl(c,l,n) { \
+ switch (n) { \
+ case 0: l =((unsigned long)(*((c)++)))<<24; \
+ case 1: l|=((unsigned long)(*((c)++)))<<16; \
+ case 2: l|=((unsigned long)(*((c)++)))<< 8; \
+ case 3: l|=((unsigned long)(*((c)++))); \
+ } \
+ }
+
+#undef c2nl_p
+/* NOTE the pointer is not incremented at the end of this */
+#define c2nl_p(c,l,n) { \
+ l=0; \
+ (c)+=n; \
+ switch (n) { \
+ case 3: l =((unsigned long)(*(--(c))))<< 8; \
+ case 2: l|=((unsigned long)(*(--(c))))<<16; \
+ case 1: l|=((unsigned long)(*(--(c))))<<24; \
+ } \
+ }
+
+#undef p_c2nl_p
+#define p_c2nl_p(c,l,sc,len) { \
+ switch (sc) \
+ { \
+ case 0: l =((unsigned long)(*((c)++)))<<24; \
+ if (--len == 0) break; \
+ case 1: l|=((unsigned long)(*((c)++)))<<16; \
+ if (--len == 0) break; \
+ case 2: l|=((unsigned long)(*((c)++)))<< 8; \
+ } \
+ }
+
+#undef nl2c
+#define nl2c(l,c) (*((c)++)=(unsigned char)(((l)>>24)&0xff), \
+ *((c)++)=(unsigned char)(((l)>>16)&0xff), \
+ *((c)++)=(unsigned char)(((l)>> 8)&0xff), \
+ *((c)++)=(unsigned char)(((l) )&0xff))
+
+#undef c2l
+#define c2l(c,l) (l =(((unsigned long)(*((c)++))) ), \
+ l|=(((unsigned long)(*((c)++)))<< 8), \
+ l|=(((unsigned long)(*((c)++)))<<16), \
+ l|=(((unsigned long)(*((c)++)))<<24))
+
+#undef p_c2l
+#define p_c2l(c,l,n) { \
+ switch (n) { \
+ case 0: l =((unsigned long)(*((c)++))); \
+ case 1: l|=((unsigned long)(*((c)++)))<< 8; \
+ case 2: l|=((unsigned long)(*((c)++)))<<16; \
+ case 3: l|=((unsigned long)(*((c)++)))<<24; \
+ } \
+ }
+
+#undef c2l_p
+/* NOTE the pointer is not incremented at the end of this */
+#define c2l_p(c,l,n) { \
+ l=0; \
+ (c)+=n; \
+ switch (n) { \
+ case 3: l =((unsigned long)(*(--(c))))<<16; \
+ case 2: l|=((unsigned long)(*(--(c))))<< 8; \
+ case 1: l|=((unsigned long)(*(--(c)))); \
+ } \
+ }
+
+#undef p_c2l_p
+#define p_c2l_p(c,l,sc,len) { \
+ switch (sc) \
+ { \
+ case 0: l =((unsigned long)(*((c)++))); \
+ if (--len == 0) break; \
+ case 1: l|=((unsigned long)(*((c)++)))<< 8; \
+ if (--len == 0) break; \
+ case 2: l|=((unsigned long)(*((c)++)))<<16; \
+ } \
+ }
+
+#undef l2c
+#define l2c(l,c) (*((c)++)=(unsigned char)(((l) )&0xff), \
+ *((c)++)=(unsigned char)(((l)>> 8)&0xff), \
+ *((c)++)=(unsigned char)(((l)>>16)&0xff), \
+ *((c)++)=(unsigned char)(((l)>>24)&0xff))
+
+#undef ROTATE
+#if defined(WIN32)
+#define ROTATE(a,n) _lrotl(a,n)
+#else
+#define ROTATE(a,n) (((a)<<(n))|(((a)&0xffffffff)>>(32-(n))))
+#endif
+
+/* A nice byte order reversal from Wei Dai <weidai@eskimo.com> */
+#if defined(WIN32)
+/* 5 instructions with rotate instruction, else 9 */
+#define Endian_Reverse32(a) \
+ { \
+ unsigned long l=(a); \
+ (a)=((ROTATE(l,8)&0x00FF00FF)|(ROTATE(l,24)&0xFF00FF00)); \
+ }
+#else
+/* 6 instructions with rotate instruction, else 8 */
+#define Endian_Reverse32(a) \
+ { \
+ unsigned long l=(a); \
+ l=(((l&0xFF00FF00)>>8L)|((l&0x00FF00FF)<<8L)); \
+ (a)=ROTATE(l,16L); \
+ }
+#endif
+
+#define F1(x,y,z) ((x)^(y)^(z))
+#define F2(x,y,z) (((x)&(y))|((~x)&z))
+#define F3(x,y,z) (((x)|(~y))^(z))
+#define F4(x,y,z) (((x)&(z))|((y)&(~(z))))
+#define F5(x,y,z) ((x)^((y)|(~(z))))
+
+#define RIPEMD160_A 0x67452301L
+#define RIPEMD160_B 0xEFCDAB89L
+#define RIPEMD160_C 0x98BADCFEL
+#define RIPEMD160_D 0x10325476L
+#define RIPEMD160_E 0xC3D2E1F0L
+
+#include "rmdconst.h"
+
+#define RIP1(a,b,c,d,e,w,s) { \
+ a+=F1(b,c,d)+X[w]; \
+ a=ROTATE(a,s)+e; \
+ c=ROTATE(c,10); }
+
+#define RIP2(a,b,c,d,e,w,s,K) { \
+ a+=F2(b,c,d)+X[w]+K; \
+ a=ROTATE(a,s)+e; \
+ c=ROTATE(c,10); }
+
+#define RIP3(a,b,c,d,e,w,s,K) { \
+ a+=F3(b,c,d)+X[w]+K; \
+ a=ROTATE(a,s)+e; \
+ c=ROTATE(c,10); }
+
+#define RIP4(a,b,c,d,e,w,s,K) { \
+ a+=F4(b,c,d)+X[w]+K; \
+ a=ROTATE(a,s)+e; \
+ c=ROTATE(c,10); }
+
+#define RIP5(a,b,c,d,e,w,s,K) { \
+ a+=F5(b,c,d)+X[w]+K; \
+ a=ROTATE(a,s)+e; \
+ c=ROTATE(c,10); }
+
diff --git a/crypto/ripemd/rmd_one.c b/crypto/ripemd/rmd_one.c
new file mode 100644
index 0000000000..a7626dbcda
--- /dev/null
+++ b/crypto/ripemd/rmd_one.c
@@ -0,0 +1,77 @@
+/* crypto/ripemd/rmd_one.c */
+/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
+ * All rights reserved.
+ *
+ * This package is an SSL implementation written
+ * by Eric Young (eay@cryptsoft.com).
+ * The implementation was written so as to conform with Netscapes SSL.
+ *
+ * This library is free for commercial and non-commercial use as long as
+ * the following conditions are aheared to. The following conditions
+ * apply to all code found in this distribution, be it the RC4, RSA,
+ * lhash, DES, etc., code; not just the SSL code. The SSL documentation
+ * included with this distribution is covered by the same copyright terms
+ * except that the holder is Tim Hudson (tjh@cryptsoft.com).
+ *
+ * Copyright remains Eric Young's, and as such any Copyright notices in
+ * the code are not to be removed.
+ * If this package is used in a product, Eric Young should be given attribution
+ * as the author of the parts of the library used.
+ * This can be in the form of a textual message at program startup or
+ * in documentation (online or textual) provided with the package.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. All advertising materials mentioning features or use of this software
+ * must display the following acknowledgement:
+ * "This product includes cryptographic software written by
+ * Eric Young (eay@cryptsoft.com)"
+ * The word 'cryptographic' can be left out if the rouines from the library
+ * being used are not cryptographic related :-).
+ * 4. If you include any Windows specific code (or a derivative thereof) from
+ * the apps directory (application code) you must include an acknowledgement:
+ * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
+ *
+ * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ *
+ * The licence and distribution terms for any publically available version or
+ * derivative of this code cannot be changed. i.e. this code cannot simply be
+ * copied and put under another distribution licence
+ * [including the GNU Public Licence.]
+ */
+
+#include <stdio.h>
+#include "rmd_locl.h"
+
+unsigned char *RIPEMD160(d, n, md)
+unsigned char *d;
+unsigned long n;
+unsigned char *md;
+ {
+ RIPEMD160_CTX c;
+ static unsigned char m[RIPEMD160_DIGEST_LENGTH];
+
+ if (md == NULL) md=m;
+ RIPEMD160_Init(&c);
+ RIPEMD160_Update(&c,d,n);
+ RIPEMD160_Final(md,&c);
+ memset(&c,0,sizeof(c)); /* security consideration */
+ return(md);
+ }
+
diff --git a/crypto/ripemd/rmdconst.h b/crypto/ripemd/rmdconst.h
new file mode 100644
index 0000000000..59c48dead1
--- /dev/null
+++ b/crypto/ripemd/rmdconst.h
@@ -0,0 +1,399 @@
+/* crypto/ripemd/rmdconst.h */
+/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
+ * All rights reserved.
+ *
+ * This package is an SSL implementation written
+ * by Eric Young (eay@cryptsoft.com).
+ * The implementation was written so as to conform with Netscapes SSL.
+ *
+ * This library is free for commercial and non-commercial use as long as
+ * the following conditions are aheared to. The following conditions
+ * apply to all code found in this distribution, be it the RC4, RSA,
+ * lhash, DES, etc., code; not just the SSL code. The SSL documentation
+ * included with this distribution is covered by the same copyright terms
+ * except that the holder is Tim Hudson (tjh@cryptsoft.com).
+ *
+ * Copyright remains Eric Young's, and as such any Copyright notices in
+ * the code are not to be removed.
+ * If this package is used in a product, Eric Young should be given attribution
+ * as the author of the parts of the library used.
+ * This can be in the form of a textual message at program startup or
+ * in documentation (online or textual) provided with the package.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. All advertising materials mentioning features or use of this software
+ * must display the following acknowledgement:
+ * "This product includes cryptographic software written by
+ * Eric Young (eay@cryptsoft.com)"
+ * The word 'cryptographic' can be left out if the rouines from the library
+ * being used are not cryptographic related :-).
+ * 4. If you include any Windows specific code (or a derivative thereof) from
+ * the apps directory (application code) you must include an acknowledgement:
+ * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
+ *
+ * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ *
+ * The licence and distribution terms for any publically available version or
+ * derivative of this code cannot be changed. i.e. this code cannot simply be
+ * copied and put under another distribution licence
+ * [including the GNU Public Licence.]
+ */
+#define KL0 0x00000000L
+#define KL1 0x5A827999L
+#define KL2 0x6ED9EBA1L
+#define KL3 0x8F1BBCDCL
+#define KL4 0xA953FD4EL
+
+#define KR0 0x50A28BE6L
+#define KR1 0x5C4DD124L
+#define KR2 0x6D703EF3L
+#define KR3 0x7A6D76E9L
+#define KR4 0x00000000L
+
+#define WL00 0
+#define SL00 11
+#define WL01 1
+#define SL01 14
+#define WL02 2
+#define SL02 15
+#define WL03 3
+#define SL03 12
+#define WL04 4
+#define SL04 5
+#define WL05 5
+#define SL05 8
+#define WL06 6
+#define SL06 7
+#define WL07 7
+#define SL07 9
+#define WL08 8
+#define SL08 11
+#define WL09 9
+#define SL09 13
+#define WL10 10
+#define SL10 14
+#define WL11 11
+#define SL11 15
+#define WL12 12
+#define SL12 6
+#define WL13 13
+#define SL13 7
+#define WL14 14
+#define SL14 9
+#define WL15 15
+#define SL15 8
+
+#define WL16 7
+#define SL16 7
+#define WL17 4
+#define SL17 6
+#define WL18 13
+#define SL18 8
+#define WL19 1
+#define SL19 13
+#define WL20 10
+#define SL20 11
+#define WL21 6
+#define SL21 9
+#define WL22 15
+#define SL22 7
+#define WL23 3
+#define SL23 15
+#define WL24 12
+#define SL24 7
+#define WL25 0
+#define SL25 12
+#define WL26 9
+#define SL26 15
+#define WL27 5
+#define SL27 9
+#define WL28 2
+#define SL28 11
+#define WL29 14
+#define SL29 7
+#define WL30 11
+#define SL30 13
+#define WL31 8
+#define SL31 12
+
+#define WL32 3
+#define SL32 11
+#define WL33 10
+#define SL33 13
+#define WL34 14
+#define SL34 6
+#define WL35 4
+#define SL35 7
+#define WL36 9
+#define SL36 14
+#define WL37 15
+#define SL37 9
+#define WL38 8
+#define SL38 13
+#define WL39 1
+#define SL39 15
+#define WL40 2
+#define SL40 14
+#define WL41 7
+#define SL41 8
+#define WL42 0
+#define SL42 13
+#define WL43 6
+#define SL43 6
+#define WL44 13
+#define SL44 5
+#define WL45 11
+#define SL45 12
+#define WL46 5
+#define SL46 7
+#define WL47 12
+#define SL47 5
+
+#define WL48 1
+#define SL48 11
+#define WL49 9
+#define SL49 12
+#define WL50 11
+#define SL50 14
+#define WL51 10
+#define SL51 15
+#define WL52 0
+#define SL52 14
+#define WL53 8
+#define SL53 15
+#define WL54 12
+#define SL54 9
+#define WL55 4
+#define SL55 8
+#define WL56 13
+#define SL56 9
+#define WL57 3
+#define SL57 14
+#define WL58 7
+#define SL58 5
+#define WL59 15
+#define SL59 6
+#define WL60 14
+#define SL60 8
+#define WL61 5
+#define SL61 6
+#define WL62 6
+#define SL62 5
+#define WL63 2
+#define SL63 12
+
+#define WL64 4
+#define SL64 9
+#define WL65 0
+#define SL65 15
+#define WL66 5
+#define SL66 5
+#define WL67 9
+#define SL67 11
+#define WL68 7
+#define SL68 6
+#define WL69 12
+#define SL69 8
+#define WL70 2
+#define SL70 13
+#define WL71 10
+#define SL71 12
+#define WL72 14
+#define SL72 5
+#define WL73 1
+#define SL73 12
+#define WL74 3
+#define SL74 13
+#define WL75 8
+#define SL75 14
+#define WL76 11
+#define SL76 11
+#define WL77 6
+#define SL77 8
+#define WL78 15
+#define SL78 5
+#define WL79 13
+#define SL79 6
+
+#define WR00 5
+#define SR00 8
+#define WR01 14
+#define SR01 9
+#define WR02 7
+#define SR02 9
+#define WR03 0
+#define SR03 11
+#define WR04 9
+#define SR04 13
+#define WR05 2
+#define SR05 15
+#define WR06 11
+#define SR06 15
+#define WR07 4
+#define SR07 5
+#define WR08 13
+#define SR08 7
+#define WR09 6
+#define SR09 7
+#define WR10 15
+#define SR10 8
+#define WR11 8
+#define SR11 11
+#define WR12 1
+#define SR12 14
+#define WR13 10
+#define SR13 14
+#define WR14 3
+#define SR14 12
+#define WR15 12
+#define SR15 6
+
+#define WR16 6
+#define SR16 9
+#define WR17 11
+#define SR17 13
+#define WR18 3
+#define SR18 15
+#define WR19 7
+#define SR19 7
+#define WR20 0
+#define SR20 12
+#define WR21 13
+#define SR21 8
+#define WR22 5
+#define SR22 9
+#define WR23 10
+#define SR23 11
+#define WR24 14
+#define SR24 7
+#define WR25 15
+#define SR25 7
+#define WR26 8
+#define SR26 12
+#define WR27 12
+#define SR27 7
+#define WR28 4
+#define SR28 6
+#define WR29 9
+#define SR29 15
+#define WR30 1
+#define SR30 13
+#define WR31 2
+#define SR31 11
+
+#define WR32 15
+#define SR32 9
+#define WR33 5
+#define SR33 7
+#define WR34 1
+#define SR34 15
+#define WR35 3
+#define SR35 11
+#define WR36 7
+#define SR36 8
+#define WR37 14
+#define SR37 6
+#define WR38 6
+#define SR38 6
+#define WR39 9
+#define SR39 14
+#define WR40 11
+#define SR40 12
+#define WR41 8
+#define SR41 13
+#define WR42 12
+#define SR42 5
+#define WR43 2
+#define SR43 14
+#define WR44 10
+#define SR44 13
+#define WR45 0
+#define SR45 13
+#define WR46 4
+#define SR46 7
+#define WR47 13
+#define SR47 5
+
+#define WR48 8
+#define SR48 15
+#define WR49 6
+#define SR49 5
+#define WR50 4
+#define SR50 8
+#define WR51 1
+#define SR51 11
+#define WR52 3
+#define SR52 14
+#define WR53 11
+#define SR53 14
+#define WR54 15
+#define SR54 6
+#define WR55 0
+#define SR55 14
+#define WR56 5
+#define SR56 6
+#define WR57 12
+#define SR57 9
+#define WR58 2
+#define SR58 12
+#define WR59 13
+#define SR59 9
+#define WR60 9
+#define SR60 12
+#define WR61 7
+#define SR61 5
+#define WR62 10
+#define SR62 15
+#define WR63 14
+#define SR63 8
+
+#define WR64 12
+#define SR64 8
+#define WR65 15
+#define SR65 5
+#define WR66 10
+#define SR66 12
+#define WR67 4
+#define SR67 9
+#define WR68 1
+#define SR68 12
+#define WR69 5
+#define SR69 5
+#define WR70 8
+#define SR70 14
+#define WR71 7
+#define SR71 6
+#define WR72 6
+#define SR72 8
+#define WR73 2
+#define SR73 13
+#define WR74 13
+#define SR74 6
+#define WR75 14
+#define SR75 5
+#define WR76 0
+#define SR76 15
+#define WR77 3
+#define SR77 13
+#define WR78 9
+#define SR78 11
+#define WR79 11
+#define SR79 11
+
diff --git a/crypto/ripemd/rmdtest.c b/crypto/ripemd/rmdtest.c
new file mode 100644
index 0000000000..6a0297f975
--- /dev/null
+++ b/crypto/ripemd/rmdtest.c
@@ -0,0 +1,133 @@
+/* crypto/ripemd/rmdtest.c */
+/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
+ * All rights reserved.
+ *
+ * This package is an SSL implementation written
+ * by Eric Young (eay@cryptsoft.com).
+ * The implementation was written so as to conform with Netscapes SSL.
+ *
+ * This library is free for commercial and non-commercial use as long as
+ * the following conditions are aheared to. The following conditions
+ * apply to all code found in this distribution, be it the RC4, RSA,
+ * lhash, DES, etc., code; not just the SSL code. The SSL documentation
+ * included with this distribution is covered by the same copyright terms
+ * except that the holder is Tim Hudson (tjh@cryptsoft.com).
+ *
+ * Copyright remains Eric Young's, and as such any Copyright notices in
+ * the code are not to be removed.
+ * If this package is used in a product, Eric Young should be given attribution
+ * as the author of the parts of the library used.
+ * This can be in the form of a textual message at program startup or
+ * in documentation (online or textual) provided with the package.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. All advertising materials mentioning features or use of this software
+ * must display the following acknowledgement:
+ * "This product includes cryptographic software written by
+ * Eric Young (eay@cryptsoft.com)"
+ * The word 'cryptographic' can be left out if the rouines from the library
+ * being used are not cryptographic related :-).
+ * 4. If you include any Windows specific code (or a derivative thereof) from
+ * the apps directory (application code) you must include an acknowledgement:
+ * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
+ *
+ * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ *
+ * The licence and distribution terms for any publically available version or
+ * derivative of this code cannot be changed. i.e. this code cannot simply be
+ * copied and put under another distribution licence
+ * [including the GNU Public Licence.]
+ */
+
+#include <stdio.h>
+#include <string.h>
+#include <stdlib.h>
+#include "ripemd.h"
+
+char *test[]={
+ "",
+ "a",
+ "abc",
+ "message digest",
+ "abcdefghijklmnopqrstuvwxyz",
+ "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789",
+ "12345678901234567890123456789012345678901234567890123456789012345678901234567890",
+ NULL,
+ };
+
+char *ret[]={
+ "9c1185a5c5e9fc54612808977ee8f548b2258d31",
+ "0bdc9d2d256b3ee9daae347be6f4dc835a467ffe",
+ "8eb208f7e05d987a9b044a8e98c6b087f15a0bfc",
+ "5d0689ef49d2fae572b881b123a85ffa21595f36",
+ "f71c27109c692c1b56bbdceb5b9d2865b3708dbc",
+ "12a053384a9c0c88e405a06c27dcf49ada62eb2b",
+ "b0e20b6e3116640286ed3a87a5713079b21f5189",
+ "9b752e45573d4b39f4dbd3323cab82bf63326bfb",
+ };
+
+#ifndef NOPROTO
+static char *pt(unsigned char *md);
+#else
+static char *pt();
+#endif
+
+int main(argc,argv)
+int argc;
+char *argv[];
+ {
+ int i,err=0;
+ unsigned char **P,**R;
+ char *p;
+
+ P=(unsigned char **)test;
+ R=(unsigned char **)ret;
+ i=1;
+ while (*P != NULL)
+ {
+ p=pt(RIPEMD160(&(P[0][0]),(unsigned long)strlen((char *)*P),NULL));
+ if (strcmp(p,(char *)*R) != 0)
+ {
+ printf("error calculating RIPEMD160 on '%s'\n",*P);
+ printf("got %s instead of %s\n",p,*R);
+ err++;
+ }
+ else
+ printf("test %d ok\n",i);
+ i++;
+ R++;
+ P++;
+ }
+ exit(err);
+ return(0);
+ }
+
+static char *pt(md)
+unsigned char *md;
+ {
+ int i;
+ static char buf[80];
+
+ for (i=0; i<RIPEMD160_DIGEST_LENGTH; i++)
+ sprintf(&(buf[i*2]),"%02x",md[i]);
+ return(buf);
+ }
+