summaryrefslogtreecommitdiffstats
path: root/crypto
diff options
context:
space:
mode:
authorBen Laurie <ben@openssl.org>1999-03-07 15:08:04 +0000
committerBen Laurie <ben@openssl.org>1999-03-07 15:08:04 +0000
commit05dc84b82bf5390bf565eda8f32d6f3e701d3b27 (patch)
tree58d1729981bc57ab2eaaf07b753b6c6a585df8df /crypto
parent7d3ce7ba371aa6df39ec5501282dd6e11b70bbbd (diff)
Fix DWP when only given three parameters.
Diffstat (limited to 'crypto')
-rw-r--r--crypto/perlasm/x86unix.pl7
-rw-r--r--crypto/rc4/Makefile.ssl2
-rw-r--r--crypto/rc4/asm/rx86unix.cpp358
-rw-r--r--crypto/ripemd/Makefile.ssl2
-rw-r--r--crypto/ripemd/asm/rm86unix.cpp2016
-rw-r--r--crypto/ripemd/asm/rmd-586.pl2
6 files changed, 9 insertions, 2378 deletions
diff --git a/crypto/perlasm/x86unix.pl b/crypto/perlasm/x86unix.pl
index deb1185fc9..efaef87a0b 100644
--- a/crypto/perlasm/x86unix.pl
+++ b/crypto/perlasm/x86unix.pl
@@ -90,7 +90,12 @@ sub main'DWP
$reg2="$regs{$reg2}" if defined($regs{$reg2});
$ret.=$addr if ($addr ne "") && ($addr ne 0);
if ($reg2 ne "")
- { $ret.="($reg1,$reg2,$idx)"; }
+ {
+ if($idx ne "")
+ { $ret.="($reg1,$reg2,$idx)"; }
+ else
+ { $ret.="($reg1,$reg2)"; }
+ }
else
{ $ret.="($reg1)" }
return($ret);
diff --git a/crypto/rc4/Makefile.ssl b/crypto/rc4/Makefile.ssl
index 47539bd418..c1ce04590c 100644
--- a/crypto/rc4/Makefile.ssl
+++ b/crypto/rc4/Makefile.ssl
@@ -65,7 +65,7 @@ asm/rx86-out.o: asm/rx86unix.cpp
asm/rx86bsdi.o: asm/rx86unix.cpp
$(CPP) -DBSDI asm/rx86unix.cpp | sed 's/ :/:/' | as -o asm/rx86bsdi.o
-asm/rx86unix.cpp:
+asm/rx86unix.cpp: asm/rc4-586.pl
(cd asm; perl rc4-586.pl cpp >rx86unix.cpp)
files:
diff --git a/crypto/rc4/asm/rx86unix.cpp b/crypto/rc4/asm/rx86unix.cpp
deleted file mode 100644
index ec1d72a110..0000000000
--- a/crypto/rc4/asm/rx86unix.cpp
+++ /dev/null
@@ -1,358 +0,0 @@
-/* Run the C pre-processor over this file with one of the following defined
- * ELF - elf object files,
- * OUT - a.out object files,
- * BSDI - BSDI style a.out object files
- * SOL - Solaris style elf
- */
-
-#define TYPE(a,b) .type a,b
-#define SIZE(a,b) .size a,b
-
-#if defined(OUT) || defined(BSDI)
-#define RC4 _RC4
-
-#endif
-
-#ifdef OUT
-#define OK 1
-#define ALIGN 4
-#endif
-
-#ifdef BSDI
-#define OK 1
-#define ALIGN 4
-#undef SIZE
-#undef TYPE
-#define SIZE(a,b)
-#define TYPE(a,b)
-#endif
-
-#if defined(ELF) || defined(SOL)
-#define OK 1
-#define ALIGN 16
-#endif
-
-#ifndef OK
-You need to define one of
-ELF - elf systems - linux-elf, NetBSD and DG-UX
-OUT - a.out systems - linux-a.out and FreeBSD
-SOL - solaris systems, which are elf with strange comment lines
-BSDI - a.out with a very primative version of as.
-#endif
-
-/* Let the Assembler begin :-) */
- /* Don't even think of reading this code */
- /* It was automatically generated by rc4-586.pl */
- /* Which is a perl program used to generate the x86 assember for */
- /* any of elf, a.out, BSDI,Win32, or Solaris */
- /* eric <eay@cryptsoft.com> */
-
- .file "rc4-586.s"
- .version "01.01"
-gcc2_compiled.:
-.text
- .align ALIGN
-.globl RC4
- TYPE(RC4,@function)
-RC4:
-
- pushl %ebp
- pushl %ebx
- movl 12(%esp), %ebp
- movl 16(%esp), %ebx
- pushl %esi
- pushl %edi
- movl (%ebp), %ecx
- movl 4(%ebp), %edx
- movl 28(%esp), %esi
- incl %ecx
- subl $12, %esp
- addl $8, %ebp
- andl $255, %ecx
- leal -8(%ebx,%esi,), %ebx
- movl 44(%esp), %edi
- movl %ebx, 8(%esp)
- movl (%ebp,%ecx,4), %eax
- cmpl %esi, %ebx
- jl .L000end
-.L001start:
- addl $8, %esi
- /* Round 0 */
- addl %eax, %edx
- andl $255, %edx
- incl %ecx
- movl (%ebp,%edx,4), %ebx
- movl %ebx, -4(%ebp,%ecx,4)
- addl %eax, %ebx
- andl $255, %ecx
- andl $255, %ebx
- movl %eax, (%ebp,%edx,4)
- nop
- movl (%ebp,%ebx,4), %ebx
- movl (%ebp,%ecx,4), %eax
- movb %bl, (%esp)
- /* Round 1 */
- addl %eax, %edx
- andl $255, %edx
- incl %ecx
- movl (%ebp,%edx,4), %ebx
- movl %ebx, -4(%ebp,%ecx,4)
- addl %eax, %ebx
- andl $255, %ecx
- andl $255, %ebx
- movl %eax, (%ebp,%edx,4)
- nop
- movl (%ebp,%ebx,4), %ebx
- movl (%ebp,%ecx,4), %eax
- movb %bl, 1(%esp)
- /* Round 2 */
- addl %eax, %edx
- andl $255, %edx
- incl %ecx
- movl (%ebp,%edx,4), %ebx
- movl %ebx, -4(%ebp,%ecx,4)
- addl %eax, %ebx
- andl $255, %ecx
- andl $255, %ebx
- movl %eax, (%ebp,%edx,4)
- nop
- movl (%ebp,%ebx,4), %ebx
- movl (%ebp,%ecx,4), %eax
- movb %bl, 2(%esp)
- /* Round 3 */
- addl %eax, %edx
- andl $255, %edx
- incl %ecx
- movl (%ebp,%edx,4), %ebx
- movl %ebx, -4(%ebp,%ecx,4)
- addl %eax, %ebx
- andl $255, %ecx
- andl $255, %ebx
- movl %eax, (%ebp,%edx,4)
- nop
- movl (%ebp,%ebx,4), %ebx
- movl (%ebp,%ecx,4), %eax
- movb %bl, 3(%esp)
- /* Round 4 */
- addl %eax, %edx
- andl $255, %edx
- incl %ecx
- movl (%ebp,%edx,4), %ebx
- movl %ebx, -4(%ebp,%ecx,4)
- addl %eax, %ebx
- andl $255, %ecx
- andl $255, %ebx
- movl %eax, (%ebp,%edx,4)
- nop
- movl (%ebp,%ebx,4), %ebx
- movl (%ebp,%ecx,4), %eax
- movb %bl, 4(%esp)
- /* Round 5 */
- addl %eax, %edx
- andl $255, %edx
- incl %ecx
- movl (%ebp,%edx,4), %ebx
- movl %ebx, -4(%ebp,%ecx,4)
- addl %eax, %ebx
- andl $255, %ecx
- andl $255, %ebx
- movl %eax, (%ebp,%edx,4)
- nop
- movl (%ebp,%ebx,4), %ebx
- movl (%ebp,%ecx,4), %eax
- movb %bl, 5(%esp)
- /* Round 6 */
- addl %eax, %edx
- andl $255, %edx
- incl %ecx
- movl (%ebp,%edx,4), %ebx
- movl %ebx, -4(%ebp,%ecx,4)
- addl %eax, %ebx
- andl $255, %ecx
- andl $255, %ebx
- movl %eax, (%ebp,%edx,4)
- nop
- movl (%ebp,%ebx,4), %ebx
- movl (%ebp,%ecx,4), %eax
- movb %bl, 6(%esp)
- /* Round 7 */
- addl %eax, %edx
- andl $255, %edx
- incl %ecx
- movl (%ebp,%edx,4), %ebx
- movl %ebx, -4(%ebp,%ecx,4)
- addl %eax, %ebx
- andl $255, %ecx
- andl $255, %ebx
- movl %eax, (%ebp,%edx,4)
- nop
- movl (%ebp,%ebx,4), %ebx
- addl $8, %edi
- movb %bl, 7(%esp)
- /* apply the cipher text */
- movl (%esp), %eax
- movl -8(%esi), %ebx
- xorl %ebx, %eax
- movl -4(%esi), %ebx
- movl %eax, -8(%edi)
- movl 4(%esp), %eax
- xorl %ebx, %eax
- movl 8(%esp), %ebx
- movl %eax, -4(%edi)
- movl (%ebp,%ecx,4), %eax
- cmpl %ebx, %esi
- jle .L001start
-.L000end:
- /* Round 0 */
- addl $8, %ebx
- incl %esi
- cmpl %esi, %ebx
- jl .L002finished
- movl %ebx, 8(%esp)
- addl %eax, %edx
- andl $255, %edx
- incl %ecx
- movl (%ebp,%edx,4), %ebx
- movl %ebx, -4(%ebp,%ecx,4)
- addl %eax, %ebx
- andl $255, %ecx
- andl $255, %ebx
- movl %eax, (%ebp,%edx,4)
- nop
- movl (%ebp,%ebx,4), %ebx
- movl (%ebp,%ecx,4), %eax
- movb -1(%esi), %bh
- xorb %bh, %bl
- movb %bl, (%edi)
- /* Round 1 */
- movl 8(%esp), %ebx
- cmpl %esi, %ebx
- jle .L002finished
- incl %esi
- addl %eax, %edx
- andl $255, %edx
- incl %ecx
- movl (%ebp,%edx,4), %ebx
- movl %ebx, -4(%ebp,%ecx,4)
- addl %eax, %ebx
- andl $255, %ecx
- andl $255, %ebx
- movl %eax, (%ebp,%edx,4)
- nop
- movl (%ebp,%ebx,4), %ebx
- movl (%ebp,%ecx,4), %eax
- movb -1(%esi), %bh
- xorb %bh, %bl
- movb %bl, 1(%edi)
- /* Round 2 */
- movl 8(%esp), %ebx
- cmpl %esi, %ebx
- jle .L002finished
- incl %esi
- addl %eax, %edx
- andl $255, %edx
- incl %ecx
- movl (%ebp,%edx,4), %ebx
- movl %ebx, -4(%ebp,%ecx,4)
- addl %eax, %ebx
- andl $255, %ecx
- andl $255, %ebx
- movl %eax, (%ebp,%edx,4)
- nop
- movl (%ebp,%ebx,4), %ebx
- movl (%ebp,%ecx,4), %eax
- movb -1(%esi), %bh
- xorb %bh, %bl
- movb %bl, 2(%edi)
- /* Round 3 */
- movl 8(%esp), %ebx
- cmpl %esi, %ebx
- jle .L002finished
- incl %esi
- addl %eax, %edx
- andl $255, %edx
- incl %ecx
- movl (%ebp,%edx,4), %ebx
- movl %ebx, -4(%ebp,%ecx,4)
- addl %eax, %ebx
- andl $255, %ecx
- andl $255, %ebx
- movl %eax, (%ebp,%edx,4)
- nop
- movl (%ebp,%ebx,4), %ebx
- movl (%ebp,%ecx,4), %eax
- movb -1(%esi), %bh
- xorb %bh, %bl
- movb %bl, 3(%edi)
- /* Round 4 */
- movl 8(%esp), %ebx
- cmpl %esi, %ebx
- jle .L002finished
- incl %esi
- addl %eax, %edx
- andl $255, %edx
- incl %ecx
- movl (%ebp,%edx,4), %ebx
- movl %ebx, -4(%ebp,%ecx,4)
- addl %eax, %ebx
- andl $255, %ecx
- andl $255, %ebx
- movl %eax, (%ebp,%edx,4)
- nop
- movl (%ebp,%ebx,4), %ebx
- movl (%ebp,%ecx,4), %eax
- movb -1(%esi), %bh
- xorb %bh, %bl
- movb %bl, 4(%edi)
- /* Round 5 */
- movl 8(%esp), %ebx
- cmpl %esi, %ebx
- jle .L002finished
- incl %esi
- addl %eax, %edx
- andl $255, %edx
- incl %ecx
- movl (%ebp,%edx,4), %ebx
- movl %ebx, -4(%ebp,%ecx,4)
- addl %eax, %ebx
- andl $255, %ecx
- andl $255, %ebx
- movl %eax, (%ebp,%edx,4)
- nop
- movl (%ebp,%ebx,4), %ebx
- movl (%ebp,%ecx,4), %eax
- movb -1(%esi), %bh
- xorb %bh, %bl
- movb %bl, 5(%edi)
- /* Round 6 */
- movl 8(%esp), %ebx
- cmpl %esi, %ebx
- jle .L002finished
- incl %esi
- addl %eax, %edx
- andl $255, %edx
- incl %ecx
- movl (%ebp,%edx,4), %ebx
- movl %ebx, -4(%ebp,%ecx,4)
- addl %eax, %ebx
- andl $255, %ecx
- andl $255, %ebx
- movl %eax, (%ebp,%edx,4)
- nop
- movl (%ebp,%ebx,4), %ebx
- movb -1(%esi), %bh
- xorb %bh, %bl
- movb %bl, 6(%edi)
-.L002finished:
- decl %ecx
- addl $12, %esp
- movl %edx, -4(%ebp)
- movb %cl, -8(%ebp)
- popl %edi
- popl %esi
- popl %ebx
- popl %ebp
- ret
-.RC4_end:
- SIZE(RC4,.RC4_end-RC4)
-.ident "RC4"
diff --git a/crypto/ripemd/Makefile.ssl b/crypto/ripemd/Makefile.ssl
index 83fdd1174a..926ead934e 100644
--- a/crypto/ripemd/Makefile.ssl
+++ b/crypto/ripemd/Makefile.ssl
@@ -61,7 +61,7 @@ asm/rm86-out.o: asm/rm86unix.cpp
asm/rm86bsdi.o: asm/rm86unix.cpp
$(CPP) -DBSDI asm/rm86unix.cpp | sed 's/ :/:/' | as -o asm/rm86bsdi.o
-asm/rm86unix.cpp:
+asm/rm86unix.cpp: asm/rmd-586.pl
(cd asm; perl rmd-586.pl cpp >rm86unix.cpp)
files:
diff --git a/crypto/ripemd/asm/rm86unix.cpp b/crypto/ripemd/asm/rm86unix.cpp
deleted file mode 100644
index f90f6f2fd6..0000000000
--- a/crypto/ripemd/asm/rm86unix.cpp
+++ /dev/null
@@ -1,2016 +0,0 @@
-/* Run the C pre-processor over this file with one of the following defined
- * ELF - elf object files,
- * OUT - a.out object files,
- * BSDI - BSDI style a.out object files
- * SOL - Solaris style elf
- */
-
-#define TYPE(a,b) .type a,b
-#define SIZE(a,b) .size a,b
-
-#if defined(OUT) || defined(BSDI)
-#define ripemd160_block_x86 _ripemd160_block_x86
-
-#endif
-
-#ifdef OUT
-#define OK 1
-#define ALIGN 4
-#endif
-
-#ifdef BSDI
-#define OK 1
-#define ALIGN 4
-#undef SIZE
-#undef TYPE
-#define SIZE(a,b)
-#define TYPE(a,b)
-#endif
-
-#if defined(ELF) || defined(SOL)
-#define OK 1
-#define ALIGN 16
-#endif
-
-#ifndef OK
-You need to define one of
-ELF - elf systems - linux-elf, NetBSD and DG-UX
-OUT - a.out systems - linux-a.out and FreeBSD
-SOL - solaris systems, which are elf with strange comment lines
-BSDI - a.out with a very primative version of as.
-#endif
-
-/* Let the Assembler begin :-) */
- /* Don't even think of reading this code */
- /* It was automatically generated by rmd-586.pl */
- /* Which is a perl program used to generate the x86 assember for */
- /* any of elf, a.out, BSDI,Win32, or Solaris */
- /* eric <eay@cryptsoft.com> */
-
- .file "rmd-586.s"
- .version "01.01"
-gcc2_compiled.:
-.text
- .align ALIGN
-.globl ripemd160_block_x86
- TYPE(ripemd160_block_x86,@function)
-ripemd160_block_x86:
- pushl %esi
- movl 16(%esp), %ecx
- pushl %edi
- movl 16(%esp), %esi
- pushl %ebp
- addl %esi, %ecx
- pushl %ebx
- subl $64, %ecx
- subl $88, %esp
- movl %ecx, (%esp)
- movl 108(%esp), %edi
-.L000start:
-
- movl (%esi), %eax
- movl 4(%esi), %ebx
- movl %eax, 4(%esp)
- movl %ebx, 8(%esp)
- movl 8(%esi), %eax
- movl 12(%esi), %ebx
- movl %eax, 12(%esp)
- movl %ebx, 16(%esp)
- movl 16(%esi), %eax
- movl 20(%esi), %ebx
- movl %eax, 20(%esp)
- movl %ebx, 24(%esp)
- movl 24(%esi), %eax
- movl 28(%esi), %ebx
- movl %eax, 28(%esp)
- movl %ebx, 32(%esp)
- movl 32(%esi), %eax
- movl 36(%esi), %ebx
- movl %eax, 36(%esp)
- movl %ebx, 40(%esp)
- movl 40(%esi), %eax
- movl 44(%esi), %ebx
- movl %eax, 44(%esp)
- movl %ebx, 48(%esp)
- movl 48(%esi), %eax
- movl 52(%esi), %ebx
- movl %eax, 52(%esp)
- movl %ebx, 56(%esp)
- movl 56(%esi), %eax
- movl 60(%esi), %ebx
- movl %eax, 60(%esp)
- movl %ebx, 64(%esp)
- addl $64, %esi
- movl (%edi), %eax
- movl %esi, 112(%esp)
- movl 4(%edi), %ebx
- movl 8(%edi), %ecx
- movl 12(%edi), %edx
- movl 16(%edi), %ebp
- /* 0 */
- movl %ecx, %esi
- xorl %edx, %esi
- movl 4(%esp), %edi
- xorl %ebx, %esi
- addl %edi, %eax
- roll $10, %ecx
- addl %esi, %eax
- movl %ebx, %esi
- roll $11, %eax
- addl %ebp, %eax
- /* 1 */
- xorl %ecx, %esi
- movl 8(%esp), %edi
- xorl %eax, %esi
- addl %esi, %ebp
- movl %eax, %esi
- roll $10, %ebx
- addl %edi, %ebp
- xorl %ebx, %esi
- roll $14, %ebp
- addl %edx, %ebp
- /* 2 */
- movl 12(%esp), %edi
- xorl %ebp, %esi
- addl %edi, %edx
- roll $10, %eax
- addl %esi, %edx
- movl %ebp, %esi
- roll $15, %edx
- addl %ecx, %edx
- /* 3 */
- xorl %eax, %esi
- movl 16(%esp), %edi
- xorl %edx, %esi
- addl %esi, %ecx
- movl %edx, %esi
- roll $10, %ebp
- addl %edi, %ecx
- xorl %ebp, %esi
- roll $12, %ecx
- addl %ebx, %ecx
- /* 4 */
- movl 20(%esp), %edi
- xorl %ecx, %esi
- addl %edi, %ebx
- roll $10, %edx
- addl %esi, %ebx
- movl %ecx, %esi
- roll $5, %ebx
- addl %eax, %ebx
- /* 5 */
- xorl %edx, %esi
- movl 24(%esp), %edi
- xorl %ebx, %esi
- addl %esi, %eax
- movl %ebx, %esi
- roll $10, %ecx
- addl %edi, %eax
- xorl %ecx, %esi
- roll $8, %eax
- addl %ebp, %eax
- /* 6 */
- movl 28(%esp), %edi
- xorl %eax, %esi
- addl %edi, %ebp
- roll $10, %ebx
- addl %esi, %ebp
- movl %eax, %esi
- roll $7, %ebp
- addl %edx, %ebp
- /* 7 */
- xorl %ebx, %esi
- movl 32(%esp), %edi
- xorl %ebp, %esi
- addl %esi, %edx
- movl %ebp, %esi
- roll $10, %eax
- addl %edi, %edx
- xorl %eax, %esi
- roll $9, %edx
- addl %ecx, %edx
- /* 8 */
- movl 36(%esp), %edi
- xorl %edx, %esi
- addl %edi, %ecx
- roll $10, %ebp
- addl %esi, %ecx
- movl %edx, %esi
- roll $11, %ecx
- addl %ebx, %ecx
- /* 9 */
- xorl %ebp, %esi
- movl 40(%esp), %edi
- xorl %ecx, %esi
- addl %esi, %ebx
- movl %ecx, %esi
- roll $10, %edx
- addl %edi, %ebx
- xorl %edx, %esi
- roll $13, %ebx
- addl %eax, %ebx
- /* 10 */
- movl 44(%esp), %edi
- xorl %ebx, %esi
- addl %edi, %eax
- roll $10, %ecx
- addl %esi, %eax
- movl %ebx, %esi
- roll $14, %eax
- addl %ebp, %eax
- /* 11 */
- xorl %ecx, %esi
- movl 48(%esp), %edi
- xorl %eax, %esi
- addl %esi, %ebp
- movl %eax, %esi
- roll $10, %ebx
- addl %edi, %ebp
- xorl %ebx, %esi
- roll $15, %ebp
- addl %edx, %ebp
- /* 12 */
- movl 52(%esp), %edi
- xorl %ebp, %esi
- addl %edi, %edx
- roll $10, %eax
- addl %esi, %edx
- movl %ebp, %esi
- roll $6, %edx
- addl %ecx, %edx
- /* 13 */
- xorl %eax, %esi
- movl 56(%esp), %edi
- xorl %edx, %esi
- addl %esi, %ecx
- movl %edx, %esi
- roll $10, %ebp
- addl %edi, %ecx
- xorl %ebp, %esi
- roll $7, %ecx
- addl %ebx, %ecx
- /* 14 */
- movl 60(%esp), %edi
- xorl %ecx, %esi
- addl %edi, %ebx
- roll $10, %edx
- addl %esi, %ebx
- movl %ecx, %esi
- roll $9, %ebx
- addl %eax, %ebx
- /* 15 */
- xorl %edx, %esi
- movl 64(%esp), %edi
- xorl %ebx, %esi
- addl %esi, %eax
- movl $-1, %esi
- roll $10, %ecx
- addl %edi, %eax
- movl 32(%esp), %edi
- roll $8, %eax
- addl %ebp, %eax
- /* 16 */
- addl %edi, %ebp
- movl %ebx, %edi
- subl %eax, %esi
- andl %eax, %edi
- andl %ecx, %esi
- orl %esi, %edi
- movl 20(%esp), %esi
- roll $10, %ebx
- leal 1518500249(%ebp,%edi,1),%ebp
- movl $-1, %edi
- roll $7, %ebp
- addl %edx, %ebp
- /* 17 */
- addl %esi, %edx
- movl %eax, %esi
- subl %ebp, %edi
- andl %ebp, %esi
- andl %ebx, %edi
- orl %edi, %esi
- movl 56(%esp), %edi
- roll $10, %eax
- leal 1518500249(%edx,%esi,1),%edx
- movl $-1, %esi
- roll $6, %edx
- addl %ecx, %edx
- /* 18 */
- addl %edi, %ecx
- movl %ebp, %edi
- subl %edx, %esi
- andl %edx, %edi
- andl %eax, %esi
- orl %esi, %edi
- movl 8(%esp), %esi
- roll $10, %ebp
- leal 1518500249(%ecx,%edi,1),%ecx
- movl $-1, %edi
- roll $8, %ecx
- addl %ebx, %ecx
- /* 19 */
- addl %esi, %ebx
- movl %edx, %esi
- subl %ecx, %edi
- andl %ecx, %esi
- andl %ebp, %edi
- orl %edi, %esi
- movl 44(%esp), %edi
- roll $10, %edx
- leal 1518500249(%ebx,%esi,1),%ebx
- movl $-1, %esi
- roll $13, %ebx
- addl %eax, %ebx
- /* 20 */
- addl %edi, %eax
- movl %ecx, %edi
- subl %ebx, %esi
- andl %ebx, %edi
- andl %edx, %esi
- orl %esi, %edi
- movl 28(%esp), %esi
- roll $10, %ecx
- leal 1518500249(%eax,%edi,1),%eax
- movl $-1, %edi
- roll $11, %eax
- addl %ebp, %eax
- /* 21 */
- addl %esi, %ebp
- movl %ebx, %esi
- subl %eax, %edi
- andl %eax, %esi
- andl %ecx, %edi
- orl %edi, %esi
- movl 64(%esp), %edi
- roll $10, %ebx
- leal 1518500249(%ebp,%esi,1),%ebp
- movl $-1, %esi
- roll $9, %ebp
- addl %edx, %ebp
- /* 22 */
- addl %edi, %edx
- movl %eax, %edi
- subl %ebp, %esi
- andl %ebp, %edi
- andl %ebx, %esi
- orl %esi, %edi
- movl 16(%esp), %esi
- roll $10, %eax
- leal 1518500249(%edx,%edi,1),%edx
- movl $-1, %edi
- roll $7, %edx
- addl %ecx, %edx
- /* 23 */
- addl %esi, %ecx
- movl %ebp, %esi
- subl %edx, %edi
- andl %edx, %esi
- andl %eax, %edi
- orl %edi, %esi
- movl 52(%esp), %edi
- roll $10, %ebp
- leal 1518500249(%ecx,%esi,1),%ecx
- movl $-1, %esi
- roll $15, %ecx
- addl %ebx, %ecx
- /* 24 */
- addl %edi, %ebx
- movl %edx, %edi
- subl %ecx, %esi
- andl %ecx, %edi
- andl %ebp, %esi
- orl %esi, %edi
- movl 4(%esp), %esi
- roll $10, %edx
- leal 1518500249(%ebx,%edi,1),%ebx
- movl $-1, %edi
- roll $7, %ebx
- addl %eax, %ebx
- /* 25 */
- addl %esi, %eax
- movl %ecx, %esi
- subl %ebx, %edi
- andl %ebx, %esi
- andl %edx, %edi
- orl %edi, %esi
- movl 40(%esp), %edi
- roll $10, %ecx
- leal 1518500249(%eax,%esi,1),%eax
- movl $-1, %esi
- roll $12, %eax
- addl %ebp, %eax
- /* 26 */
- addl %edi, %ebp
- movl %ebx, %edi
- subl %eax, %esi
- andl %eax, %edi
- andl %ecx, %esi
- orl %esi, %edi
- movl 24(%esp), %esi
- roll $10, %ebx
- leal 1518500249(%ebp,%edi,1),%ebp
- movl $-1, %edi
- roll $15, %ebp
- addl %edx, %ebp
- /* 27 */
- addl %esi, %edx
- movl %eax, %esi
- subl %ebp, %edi
- andl %ebp, %esi
- andl %ebx, %edi
- orl %edi, %esi
- movl 12(%esp), %edi
- roll $10, %eax
- leal 1518500249(%edx,%esi,1),%edx
- movl $-1, %esi
- roll $9, %edx
- addl %ecx, %edx
- /* 28 */
- addl %edi, %ecx
- movl %ebp, %edi
- subl %edx, %esi
- andl %edx, %edi
- andl %eax, %esi
- orl %esi, %edi
- movl 60(%esp), %esi
- roll $10, %ebp
- leal 1518500249(%ecx,%edi,1),%ecx
- movl $-1, %edi
- roll $11, %ecx
- addl %ebx, %ecx
- /* 29 */
- addl %esi, %ebx
- movl %edx, %esi
- subl %ecx, %edi
- andl %ecx, %esi
- andl %ebp, %edi
- orl %edi, %esi
- movl 48(%esp), %edi
- roll $10, %edx
- leal 1518500249(%ebx,%esi,1),%ebx
- movl $-1, %esi
- roll $7, %ebx
- addl %eax, %ebx
- /* 30 */
- addl %edi, %eax
- movl %ecx, %edi
- subl %ebx, %esi
- andl %ebx, %edi
- andl %edx, %esi
- orl %esi, %edi
- movl 36(%esp), %esi
- roll $10, %ecx
- leal 1518500249(%eax,%edi,1),%eax
- movl $-1, %edi
- roll $13, %eax
- addl %ebp, %eax
- /* 31 */
- addl %esi, %ebp
- movl %ebx, %esi
- subl %eax, %edi
- andl %eax, %esi
- andl %ecx, %edi
- orl %edi, %esi
- movl $-1, %edi
- roll $10, %ebx
- leal 1518500249(%ebp,%esi,1),%ebp
- subl %eax, %edi
- roll $12, %ebp
- addl %edx, %ebp
- /* 32 */
- movl 16(%esp), %esi
- orl %ebp, %edi
- addl %esi, %edx
- xorl %ebx, %edi
- movl $-1, %esi
- roll $10, %eax
- leal 1859775393(%edx,%edi,1),%edx
- subl %ebp, %esi
- roll $11, %edx
- addl %ecx, %edx
- /* 33 */
- movl 44(%esp), %edi
- orl %edx, %esi
- addl %edi, %ecx
- xorl %eax, %esi
- movl $-1, %edi
- roll $10, %ebp
- leal 1859775393(%ecx,%esi,1),%ecx
- subl %edx, %edi
- roll $13, %ecx
- addl %ebx, %ecx
- /* 34 */
- movl 60(%esp), %esi
- orl %ecx, %edi
- addl %esi, %ebx
- xorl %ebp, %edi
- movl $-1, %esi
- roll $10, %edx
- leal 1859775393(%ebx,%edi,1),%ebx
- subl %ecx, %esi
- roll $6, %ebx
- addl %eax, %ebx
- /* 35 */
- movl 20(%esp), %edi
- orl %ebx, %esi
- addl %edi, %eax
- xorl %edx, %esi
- movl $-1, %edi
- roll $10, %ecx
- leal 1859775393(%eax,%esi,1),%eax
- subl %ebx, %edi
- roll $7, %eax
- addl %ebp, %eax
- /* 36 */
- movl 40(%esp), %esi
- orl %eax, %edi
- addl %esi, %ebp
- xorl %ecx, %edi
- movl $-1, %esi
- roll $10, %ebx
- leal 1859775393(%ebp,%edi,1),%ebp
- subl %eax, %esi
- roll $14, %ebp
- addl %edx, %ebp
- /* 37 */
- movl 64(%esp), %edi
- orl %ebp, %esi
- addl %edi, %edx
- xorl %ebx, %esi
- movl $-1, %edi
- roll $10, %eax
- leal 1859775393(%edx,%esi,1),%edx
- subl %ebp, %edi
- roll $9, %edx
- addl %ecx, %edx
- /* 38 */
- movl 36(%esp), %esi
- orl %edx, %edi
- addl %esi, %ecx
- xorl %eax, %edi
- movl $-1, %esi
- roll $10, %ebp
- leal 1859775393(%ecx,%edi,1),%ecx
- subl %edx, %esi
- roll $13, %ecx
- addl %ebx, %ecx
- /* 39 */
- movl 8(%esp), %edi
- orl %ecx, %esi
- addl %edi, %ebx
- xorl %ebp, %esi
- movl $-1, %edi
- roll $10, %edx
- leal 1859775393(%ebx,%esi,1),%ebx
- subl %ecx, %edi
- roll $15, %ebx
- addl %eax, %ebx
- /* 40 */
- movl 12(%esp), %esi
- orl %ebx, %edi
- addl %esi, %eax
- xorl %edx, %edi
- movl $-1, %esi
- roll $10, %ecx
- leal 1859775393(%eax,%edi,1),%eax
- subl %ebx, %esi
- roll $14, %eax
- addl %ebp, %eax
- /* 41 */
- movl 32(%esp), %edi
- orl %eax, %esi
- addl %edi, %ebp
- xorl %ecx, %esi
- movl $-1, %edi
- roll $10, %ebx
- leal 1859775393(%ebp,%esi,1),%ebp
- subl %eax, %edi
- roll $8, %ebp
- addl %edx, %ebp
- /* 42 */
- movl 4(%esp), %esi
- orl %ebp, %edi
- addl %esi, %edx
- xorl %ebx, %edi
- movl $-1, %esi
- roll $10, %eax
- leal 1859775393(%edx,%edi,1),%edx
- subl %ebp, %esi
- roll $13, %edx
- addl %ecx, %edx
- /* 43 */
- movl 28(%esp), %edi
- orl %edx, %esi
- addl %edi, %ecx
- xorl %eax, %esi
- movl $-1, %edi
- roll $10, %ebp
- leal 1859775393(%ecx,%esi,1),%ecx
- subl %edx, %edi
- roll $6, %ecx
- addl %ebx, %ecx
- /* 44 */
- movl 56(%esp), %esi
- orl %ecx, %edi
- addl %esi, %ebx
- xorl %ebp, %edi
- movl $-1, %esi
- roll $10, %edx
- leal 1859775393(%ebx,%edi,1),%ebx
- subl %ecx, %esi
- roll $5, %ebx
- addl %eax, %ebx
- /* 45 */
- movl 48(%esp), %edi
- orl %ebx, %esi
- addl %edi, %eax
- xorl %edx, %esi
- movl $-1, %edi
- roll $10, %ecx
- leal 1859775393(%eax,%esi,1),%eax
- subl %ebx, %edi
- roll $12, %eax
- addl %ebp, %eax
- /* 46 */
- movl 24(%esp), %esi
- orl %eax, %edi
- addl %esi, %ebp
- xorl %ecx, %edi
- movl $-1, %esi
- roll $10, %ebx
- leal 1859775393(%ebp,%edi,1),%ebp
- subl %eax, %esi
- roll $7, %ebp
- addl %edx, %ebp
- /* 47 */
- movl 52(%esp), %edi
- orl %ebp, %esi
- addl %edi, %edx
- xorl %ebx, %esi
- movl $-1, %edi
- roll $10, %eax
- leal 1859775393(%edx,%esi,1),%edx
- movl %eax, %esi
- roll $5, %edx
- addl %ecx, %edx
- /* 48 */
- subl %eax, %edi
- andl %edx, %esi
- andl %ebp, %edi
- orl %esi, %edi
- movl 8(%esp), %esi
- roll $10, %ebp
- leal 2400959708(%ecx,%edi,),%ecx
- movl $-1, %edi
- addl %esi, %ecx
- movl %ebp, %esi
- roll $11, %ecx
- addl %ebx, %ecx
- /* 49 */
- subl %ebp, %edi
- andl %ecx, %esi
- andl %edx, %edi
- orl %esi, %edi
- movl 40(%esp), %esi
- roll $10, %edx
- leal 2400959708(%ebx,%edi,),%ebx
- movl $-1, %edi
- addl %esi, %ebx
- movl %edx, %esi
- roll $12, %ebx
- addl %eax, %ebx
- /* 50 */
- subl %edx, %edi
- andl %ebx, %esi
- andl %ecx, %edi
- orl %esi, %edi
- movl 48(%esp), %esi
- roll $10, %ecx
- leal 2400959708(%eax,%edi,),%eax
- movl $-1, %edi
- addl %esi, %eax
- movl %ecx, %esi
- roll $14, %eax
- addl %ebp, %eax
- /* 51 */
- subl %ecx, %edi
- andl %eax, %esi
- andl %ebx, %edi
- orl %esi, %edi
- movl 44(%esp), %esi
- roll $10, %ebx
- leal 2400959708(%ebp,%edi,),%ebp
- movl $-1, %edi
- addl %esi, %ebp
- movl %ebx, %esi
- roll $15, %ebp
- addl %edx, %ebp
- /* 52 */
- subl %ebx, %edi
- andl %ebp, %esi
- andl %eax, %edi
- orl %esi, %edi
- movl 4(%esp), %esi
- roll $10, %eax
- leal 2400959708(%edx,%edi,),%edx
- movl $-1, %edi
- addl %esi, %edx
- movl %eax, %esi
- roll $14, %edx
- addl %ecx, %edx
- /* 53 */
- subl %eax, %edi
- andl %edx, %esi
- andl %ebp, %edi
- orl %esi, %edi
- movl 36(%esp), %esi
- roll $10, %ebp
- leal 2400959708(%ecx,%edi,),%ecx
- movl $-1, %edi
- addl %esi, %ecx
- movl %ebp, %esi
- roll $15, %ecx
- addl %ebx, %ecx
- /* 54 */
- subl %ebp, %edi
- andl %ecx, %esi
- andl %edx, %edi
- orl %esi, %edi
- movl 52(%esp), %esi
- roll $10, %edx
- leal 2400959708(%ebx,%edi,),%ebx
- movl $-1, %edi
- addl %esi, %ebx
- movl %edx, %esi
- roll $9, %ebx
- addl %eax, %ebx
- /* 55 */
- subl %edx, %edi
- andl %ebx, %esi
- andl %ecx, %edi
- orl %esi, %edi
- movl 20(%esp), %esi
- roll $10, %ecx
- leal 2400959708(%eax,%edi,),%eax
- movl $-1, %edi
- addl %esi, %eax
- movl %ecx, %esi
- roll $8, %eax
- addl %ebp, %eax
- /* 56 */
- subl %ecx, %edi
- andl %eax, %esi
- andl %ebx, %edi
- orl %esi, %edi
- movl 56(%esp), %esi
- roll $10, %ebx
- leal 2400959708(%ebp,%edi,),%ebp
- movl $-1, %edi
- addl %esi, %ebp
- movl %ebx, %esi
- roll $9, %ebp
- addl %edx, %ebp
- /* 57 */
- subl %ebx, %edi
- andl %ebp, %esi
- andl %eax, %edi
- orl %esi, %edi
- movl 16(%esp), %esi
- roll $10, %eax
- leal 2400959708(%edx,%edi,),%edx
- movl $-1, %edi
- addl %esi, %edx
- movl %eax, %esi
- roll $14, %edx
- addl %ecx, %edx
- /* 58 */
- subl %eax, %edi
- andl %edx, %esi
- andl %ebp, %edi
- orl %esi, %edi
- movl 32(%esp), %esi
- roll $10, %ebp
- leal 2400959708(%ecx,%edi,),%ecx
- movl $-1, %edi
- addl %esi, %ecx
- movl %ebp, %esi
- roll $5, %ecx
- addl %ebx, %ecx
- /* 59 */
- subl %ebp, %edi
- andl %ecx, %esi
- andl %edx, %edi
- orl %esi, %edi
- movl 64(%esp), %esi
- roll $10, %edx
- leal 2400959708(%ebx,%edi,),%ebx
- movl $-1, %edi
- addl %esi, %ebx
- movl %edx, %esi
- roll $6, %ebx
- addl %eax, %ebx
- /* 60 */
- subl %edx, %edi
- andl %ebx, %esi
- andl %ecx, %edi
- orl %esi, %edi
- movl 60(%esp), %esi
- roll $10, %ecx
- leal 2400959708(%eax,%edi,),%eax
- movl $-1, %edi
- addl %esi, %eax
- movl %ecx, %esi
- roll $8, %eax
- addl %ebp, %eax
- /* 61 */
- subl %ecx, %edi
- andl %eax, %esi
- andl %ebx, %edi
- orl %esi, %edi
- movl 24(%esp), %esi
- roll $10, %ebx
- leal 2400959708(%ebp,%edi,),%ebp
- movl $-1, %edi
- addl %esi, %ebp
- movl %ebx, %esi
- roll $6, %ebp
- addl %edx, %ebp
- /* 62 */
- subl %ebx, %edi
- andl %ebp, %esi
- andl %eax, %edi
- orl %esi, %edi
- movl 28(%esp), %esi
- roll $10, %eax
- leal 2400959708(%edx,%edi,),%edx
- movl $-1, %edi
- addl %esi, %edx
- movl %eax, %esi
- roll $5, %edx
- addl %ecx, %edx
- /* 63 */
- subl %eax, %edi
- andl %edx, %esi
- andl %ebp, %edi
- orl %esi, %edi
- movl 12(%esp), %esi
- roll $10, %ebp
- leal 2400959708(%ecx,%edi,),%ecx
- movl $-1, %edi
- addl %esi, %ecx
- subl %ebp, %edi
- roll $12, %ecx
- addl %ebx, %ecx
- /* 64 */
- movl 20(%esp), %esi
- orl %edx, %edi
- addl %esi, %ebx
- xorl %ecx, %edi
- movl $-1, %esi
- roll $10, %edx