aboutsummaryrefslogtreecommitdiffstats
path: root/crypto/cast/asm/cx86unix.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'crypto/cast/asm/cx86unix.cpp')
-rw-r--r--crypto/cast/asm/cx86unix.cpp1010
1 files changed, 1010 insertions, 0 deletions
diff --git a/crypto/cast/asm/cx86unix.cpp b/crypto/cast/asm/cx86unix.cpp
new file mode 100644
index 0000000000..035692a5af
--- /dev/null
+++ b/crypto/cast/asm/cx86unix.cpp
@@ -0,0 +1,1010 @@
+/* Run the C pre-processor over this file with one of the following defined
+ * ELF - elf object files,
+ * OUT - a.out object files,
+ * BSDI - BSDI style a.out object files
+ * SOL - Solaris style elf
+ */
+
+#define TYPE(a,b) .type a,b
+#define SIZE(a,b) .size a,b
+
+#if defined(OUT) || defined(BSDI)
+#define CAST_S_table0 _CAST_S_table0
+#define CAST_S_table1 _CAST_S_table1
+#define CAST_S_table2 _CAST_S_table2
+#define CAST_S_table3 _CAST_S_table3
+#define CAST_encrypt _CAST_encrypt
+#define CAST_S_table0 _CAST_S_table0
+#define CAST_S_table1 _CAST_S_table1
+#define CAST_S_table2 _CAST_S_table2
+#define CAST_S_table3 _CAST_S_table3
+#define CAST_decrypt _CAST_decrypt
+#define CAST_cbc_encrypt _CAST_cbc_encrypt
+
+#endif
+
+#ifdef OUT
+#define OK 1
+#define ALIGN 4
+#endif
+
+#ifdef BSDI
+#define OK 1
+#define ALIGN 4
+#undef SIZE
+#undef TYPE
+#define SIZE(a,b)
+#define TYPE(a,b)
+#endif
+
+#if defined(ELF) || defined(SOL)
+#define OK 1
+#define ALIGN 16
+#endif
+
+#ifndef OK
+You need to define one of
+ELF - elf systems - linux-elf, NetBSD and DG-UX
+OUT - a.out systems - linux-a.out and FreeBSD
+SOL - solaris systems, which are elf with strange comment lines
+BSDI - a.out with a very primative version of as.
+#endif
+
+/* Let the Assembler begin :-) */
+ /* Don't even think of reading this code */
+ /* It was automatically generated by cast-586.pl */
+ /* Which is a perl program used to generate the x86 assember for */
+ /* any of elf, a.out, BSDI,Win32, or Solaris */
+ /* eric <eay@cryptsoft.com> */
+
+ .file "cast-586.s"
+ .version "01.01"
+gcc2_compiled.:
+.text
+ .align ALIGN
+.globl CAST_encrypt
+ TYPE(CAST_encrypt,@function)
+CAST_encrypt:
+
+ pushl %ebp
+ pushl %ebx
+ movl 12(%esp), %ebx
+ movl 16(%esp), %ebp
+ pushl %esi
+ pushl %edi
+ /* Load the 2 words */
+ movl (%ebx), %edi
+ movl 4(%ebx), %esi
+ xorl %eax, %eax
+ /* round 0 */
+ movl (%ebp), %edx
+ movl 4(%ebp), %ecx
+ addl %esi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx, %ecx
+ xorl %ecx, %edi
+ /* round 1 */
+ movl 8(%ebp), %edx
+ movl 12(%ebp), %ecx
+ xorl %edi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx, %ecx
+ xorl %ecx, %esi
+ /* round 2 */
+ movl 16(%ebp), %edx
+ movl 20(%ebp), %ecx
+ subl %esi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ subl %ebx, %ecx
+ xorl %ecx, %edi
+ /* round 3 */
+ movl 24(%ebp), %edx
+ movl 28(%ebp), %ecx
+ addl %edi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx, %ecx
+ xorl %ecx, %esi
+ /* round 4 */
+ movl 32(%ebp), %edx
+ movl 36(%ebp), %ecx
+ xorl %esi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx, %ecx
+ xorl %ecx, %edi
+ /* round 5 */
+ movl 40(%ebp), %edx
+ movl 44(%ebp), %ecx
+ subl %edi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ subl %ebx, %ecx
+ xorl %ecx, %esi
+ /* round 6 */
+ movl 48(%ebp), %edx
+ movl 52(%ebp), %ecx
+ addl %esi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx, %ecx
+ xorl %ecx, %edi
+ /* round 7 */
+ movl 56(%ebp), %edx
+ movl 60(%ebp), %ecx
+ xorl %edi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx, %ecx
+ xorl %ecx, %esi
+ /* round 8 */
+ movl 64(%ebp), %edx
+ movl 68(%ebp), %ecx
+ subl %esi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ subl %ebx, %ecx
+ xorl %ecx, %edi
+ /* round 9 */
+ movl 72(%ebp), %edx
+ movl 76(%ebp), %ecx
+ addl %edi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx, %ecx
+ xorl %ecx, %esi
+ /* round 10 */
+ movl 80(%ebp), %edx
+ movl 84(%ebp), %ecx
+ xorl %esi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx, %ecx
+ xorl %ecx, %edi
+ /* round 11 */
+ movl 88(%ebp), %edx
+ movl 92(%ebp), %ecx
+ subl %edi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ subl %ebx, %ecx
+ xorl %ecx, %esi
+ /* round 12 */
+ movl 96(%ebp), %edx
+ movl 100(%ebp), %ecx
+ addl %esi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx, %ecx
+ xorl %ecx, %edi
+ /* round 13 */
+ movl 104(%ebp), %edx
+ movl 108(%ebp), %ecx
+ xorl %edi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx, %ecx
+ xorl %ecx, %esi
+ /* round 14 */
+ movl 112(%ebp), %edx
+ movl 116(%ebp), %ecx
+ subl %esi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ subl %ebx, %ecx
+ xorl %ecx, %edi
+ /* round 15 */
+ movl 120(%ebp), %edx
+ movl 124(%ebp), %ecx
+ addl %edi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx, %ecx
+ movl 20(%esp), %eax
+ xorl %ecx, %esi
+ nop
+ movl %edi, 4(%eax)
+ movl %esi, (%eax)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.CAST_encrypt_end:
+ SIZE(CAST_encrypt,.CAST_encrypt_end-CAST_encrypt)
+.ident "CAST_encrypt"
+.text
+ .align ALIGN
+.globl CAST_decrypt
+ TYPE(CAST_decrypt,@function)
+CAST_decrypt:
+
+ pushl %ebp
+ pushl %ebx
+ movl 12(%esp), %ebx
+ movl 16(%esp), %ebp
+ pushl %esi
+ pushl %edi
+ /* Load the 2 words */
+ movl (%ebx), %edi
+ movl 4(%ebx), %esi
+ xorl %eax, %eax
+ /* round 15 */
+ movl 120(%ebp), %edx
+ movl 124(%ebp), %ecx
+ addl %esi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx, %ecx
+ xorl %ecx, %edi
+ /* round 14 */
+ movl 112(%ebp), %edx
+ movl 116(%ebp), %ecx
+ subl %edi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ subl %ebx, %ecx
+ xorl %ecx, %esi
+ /* round 13 */
+ movl 104(%ebp), %edx
+ movl 108(%ebp), %ecx
+ xorl %esi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx, %ecx
+ xorl %ecx, %edi
+ /* round 12 */
+ movl 96(%ebp), %edx
+ movl 100(%ebp), %ecx
+ addl %edi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx, %ecx
+ xorl %ecx, %esi
+ /* round 11 */
+ movl 88(%ebp), %edx
+ movl 92(%ebp), %ecx
+ subl %esi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ subl %ebx, %ecx
+ xorl %ecx, %edi
+ /* round 10 */
+ movl 80(%ebp), %edx
+ movl 84(%ebp), %ecx
+ xorl %edi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx, %ecx
+ xorl %ecx, %esi
+ /* round 9 */
+ movl 72(%ebp), %edx
+ movl 76(%ebp), %ecx
+ addl %esi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx, %ecx
+ xorl %ecx, %edi
+ /* round 8 */
+ movl 64(%ebp), %edx
+ movl 68(%ebp), %ecx
+ subl %edi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ subl %ebx, %ecx
+ xorl %ecx, %esi
+ /* round 7 */
+ movl 56(%ebp), %edx
+ movl 60(%ebp), %ecx
+ xorl %esi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx, %ecx
+ xorl %ecx, %edi
+ /* round 6 */
+ movl 48(%ebp), %edx
+ movl 52(%ebp), %ecx
+ addl %edi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx, %ecx
+ xorl %ecx, %esi
+ /* round 5 */
+ movl 40(%ebp), %edx
+ movl 44(%ebp), %ecx
+ subl %esi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ subl %ebx, %ecx
+ xorl %ecx, %edi
+ /* round 4 */
+ movl 32(%ebp), %edx
+ movl 36(%ebp), %ecx
+ xorl %edi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx, %ecx
+ xorl %ecx, %esi
+ /* round 3 */
+ movl 24(%ebp), %edx
+ movl 28(%ebp), %ecx
+ addl %esi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx, %ecx
+ xorl %ecx, %edi
+ /* round 2 */
+ movl 16(%ebp), %edx
+ movl 20(%ebp), %ecx
+ subl %edi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ subl %ebx, %ecx
+ xorl %ecx, %esi
+ /* round 1 */
+ movl 8(%ebp), %edx
+ movl 12(%ebp), %ecx
+ xorl %esi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ addl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ xorl %ebx, %ecx
+ xorl %ecx, %edi
+ /* round 0 */
+ movl (%ebp), %edx
+ movl 4(%ebp), %ecx
+ addl %edi, %edx
+ roll %cl, %edx
+ movl %edx, %ebx
+ xorl %ecx, %ecx
+ movb %dh, %cl
+ andl $255, %ebx
+ shrl $16, %edx
+ xorl %eax, %eax
+ movb %dh, %al
+ andl $255, %edx
+ movl CAST_S_table0(,%ecx,4),%ecx
+ movl CAST_S_table1(,%ebx,4),%ebx
+ xorl %ebx, %ecx
+ movl CAST_S_table2(,%eax,4),%ebx
+ subl %ebx, %ecx
+ movl CAST_S_table3(,%edx,4),%ebx
+ addl %ebx, %ecx
+ movl 20(%esp), %eax
+ xorl %ecx, %esi
+ nop
+ movl %edi, 4(%eax)
+ movl %esi, (%eax)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.CAST_decrypt_end:
+ SIZE(CAST_decrypt,.CAST_decrypt_end-CAST_decrypt)
+.ident "CAST_decrypt"
+.text
+ .align ALIGN
+.globl CAST_cbc_encrypt
+ TYPE(CAST_cbc_encrypt,@function)
+CAST_cbc_encrypt:
+
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl 28(%esp), %ebp
+ /* getting iv ptr from parameter 4 */
+ movl 36(%esp), %ebx
+ movl (%ebx), %esi
+ movl 4(%ebx), %edi
+ pushl %edi
+ pushl %esi
+ pushl %edi
+ pushl %esi
+ movl %esp, %ebx
+ movl 36(%esp), %esi
+ movl 40(%esp), %edi
+ /* getting encrypt flag from parameter 5 */
+ movl 56(%esp), %ecx
+ /* get and push parameter 3 */
+ movl 48(%esp), %eax
+ pushl %eax
+ pushl %ebx
+ cmpl $0, %ecx
+ jz .L000decrypt
+ andl $4294967288, %ebp
+ movl 8(%esp), %eax
+ movl 12(%esp), %ebx
+ jz .L001encrypt_finish
+.L002encrypt_loop:
+ movl (%esi), %ecx
+ movl 4(%esi), %edx
+ xorl %ecx, %eax
+ xorl %edx, %ebx
+.byte 15
+.byte 200 /* bswapl %eax */
+.byte 15
+.byte 203 /* bswapl %ebx */
+ movl %eax, 8(%esp)
+ movl %ebx, 12(%esp)
+ call CAST_encrypt
+ movl 8(%esp), %eax
+ movl 12(%esp), %ebx
+.byte 15
+.byte 200 /* bswapl %eax */
+.byte 15
+.byte 203 /* bswapl %ebx */
+ movl %eax, (%edi)
+ movl %ebx, 4(%edi)
+ addl $8, %esi
+ addl $8, %edi
+ subl $8, %ebp
+ jnz .L002encrypt_loop
+.L001encrypt_finish:
+ movl 52(%esp), %ebp
+ andl $7, %ebp
+ jz .L003finish
+ xorl %ecx, %ecx
+ xorl %edx, %edx
+ movl .L004cbc_enc_jmp_table(,%ebp,4),%ebp
+ jmp *%ebp
+.L005ej7:
+ xorl %edx, %edx
+ movb 6(%esi), %dh
+ sall $8, %edx
+.L006ej6:
+ movb 5(%esi), %dh
+.L007ej5:
+ movb 4(%esi), %dl
+.L008ej4:
+ movl (%esi), %ecx
+ jmp .L009ejend
+.L010ej3:
+ movb 2(%esi), %ch
+ xorl %ecx, %ecx
+ sall $8, %ecx
+.L011ej2:
+ movb 1(%esi), %ch
+.L012ej1:
+ movb (%esi), %cl
+.L009ejend:
+ xorl %ecx, %eax
+ xorl %edx, %ebx
+.byte 15
+.byte 200 /* bswapl %eax */
+.byte 15
+.byte 203 /* bswapl %ebx */
+ movl %eax, 8(%esp)
+ movl %ebx, 12(%esp)
+ call CAST_encrypt
+ movl 8(%esp), %eax
+ movl 12(%esp), %ebx
+.byte 15
+.byte 200 /* bswapl %eax */
+.byte 15
+.byte 203 /* bswapl %ebx */
+ movl %eax, (%edi)
+ movl %ebx, 4(%edi)
+ jmp .L003finish
+.align ALIGN
+.L000decrypt:
+ andl $4294967288, %ebp
+ movl 16(%esp), %eax
+ movl 20(%esp), %ebx
+ jz .L013decrypt_finish
+.L014decrypt_loop:
+ movl (%esi), %eax
+ movl 4(%esi), %ebx
+.byte 15
+.byte 200 /* bswapl %eax */
+.byte 15
+.byte 203 /* bswapl %ebx */
+ movl %eax, 8(%esp)
+ movl %ebx, 12(%esp)
+ call CAST_decrypt
+ movl 8(%esp), %eax
+ movl 12(%esp), %ebx
+.byte 15
+.byte 200 /* bswapl %eax */
+.byte 15
+.byte 203 /* bswapl %ebx */
+ movl 16(%esp), %ecx
+ movl 20(%esp), %edx
+ xorl %eax, %ecx
+ xorl %ebx, %edx
+ movl (%esi), %eax
+ movl 4(%esi), %ebx
+ movl %ecx, (%edi)
+ movl %edx, 4(%edi)
+ movl %eax, 16(%esp)
+ movl %ebx, 20(%esp)
+ addl $8, %esi
+ addl $8, %edi
+ subl $8, %ebp
+ jnz .L014decrypt_loop
+.L013decrypt_finish:
+ movl 52(%esp), %ebp
+ andl $7, %ebp
+ jz .L003finish
+ movl (%esi), %eax
+ movl 4(%esi), %ebx
+.byte 15
+.byte 200 /* bswapl %eax */
+.byte 15
+.byte 203 /* bswapl %ebx */
+ movl %eax, 8(%esp)
+ movl %ebx, 12(%esp)
+ call CAST_decrypt
+ movl 8(%esp), %eax
+ movl 12(%esp), %ebx
+.byte 15
+.byte 200 /* bswapl %eax */
+.byte 15
+.byte 203 /* bswapl %ebx */
+ movl 16(%esp), %ecx
+ movl 20(%esp), %edx
+ xorl %eax, %ecx
+ xorl %ebx, %edx
+ movl (%esi), %eax
+ movl 4(%esi), %ebx
+.L015dj7:
+ rorl $16, %edx
+ movb %dl, 6(%edi)
+ shrl $16, %edx
+.L016dj6:
+ movb %dh, 5(%edi)
+.L017dj5:
+ movb %dl, 4(%edi)
+.L018dj4:
+ movl %ecx, (%edi)
+ jmp .L019djend
+.L020dj3:
+ rorl $16, %ecx
+ movb %cl, 2(%edi)
+ sall $16, %ecx
+.L021dj2:
+ movb %ch, 1(%esi)
+.L022dj1:
+ movb %cl, (%esi)
+.L019djend:
+ jmp .L003finish
+.align ALIGN
+.L003finish:
+ movl 60(%esp), %ecx
+ addl $24, %esp
+ movl %eax, (%ecx)
+ movl %ebx, 4(%ecx)
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
+.align ALIGN
+.L004cbc_enc_jmp_table:
+ .long 0
+ .long .L012ej1
+ .long .L011ej2
+ .long .L010ej3
+ .long .L008ej4
+ .long .L007ej5
+ .long .L006ej6
+ .long .L005ej7
+.align ALIGN
+.L023cbc_dec_jmp_table:
+ .long 0
+ .long .L022dj1
+ .long .L021dj2
+ .long .L020dj3
+ .long .L018dj4
+ .long .L017dj5
+ .long .L016dj6
+ .long .L015dj7
+.CAST_cbc_encrypt_end:
+ SIZE(CAST_cbc_encrypt,.CAST_cbc_encrypt_end-CAST_cbc_encrypt)
+.ident "desasm.pl"