aboutsummaryrefslogtreecommitdiffstats
path: root/crypto/bn/asm/pa-risc.s
diff options
context:
space:
mode:
Diffstat (limited to 'crypto/bn/asm/pa-risc.s')
-rw-r--r--crypto/bn/asm/pa-risc.s710
1 files changed, 710 insertions, 0 deletions
diff --git a/crypto/bn/asm/pa-risc.s b/crypto/bn/asm/pa-risc.s
new file mode 100644
index 0000000000..c49c433a83
--- /dev/null
+++ b/crypto/bn/asm/pa-risc.s
@@ -0,0 +1,710 @@
+ .SPACE $PRIVATE$
+ .SUBSPA $DATA$,QUAD=1,ALIGN=8,ACCESS=31
+ .SUBSPA $BSS$,QUAD=1,ALIGN=8,ACCESS=31,ZERO,SORT=82
+ .SPACE $TEXT$
+ .SUBSPA $LIT$,QUAD=0,ALIGN=8,ACCESS=44
+ .SUBSPA $CODE$,QUAD=0,ALIGN=8,ACCESS=44,CODE_ONLY
+ .IMPORT $global$,DATA
+ .IMPORT $$dyncall,MILLICODE
+; gcc_compiled.:
+ .SPACE $TEXT$
+ .SUBSPA $CODE$
+
+ .align 4
+ .EXPORT bn_mul_add_word,ENTRY,PRIV_LEV=3,ARGW0=GR,ARGW1=GR,ARGW2=GR,ARGW3=GR,RTNVAL=GR
+bn_mul_add_word
+ .PROC
+ .CALLINFO FRAME=0,CALLS,SAVE_RP
+ .ENTRY
+ stw %r2,-20(0,%r30)
+ ldi 0,%r28
+ extru %r23,31,16,%r2
+ stw %r2,-16(0,%r30)
+ extru %r23,15,16,%r23
+ ldil L'65536,%r31
+ fldws -16(0,%r30),%fr11R
+ stw %r23,-16(0,%r30)
+ ldo 12(%r25),%r29
+ ldo 12(%r26),%r23
+ fldws -16(0,%r30),%fr11L
+L$0002
+ ldw 0(0,%r25),%r19
+ extru %r19,31,16,%r20
+ stw %r20,-16(0,%r30)
+ extru %r19,15,16,%r19
+ fldws -16(0,%r30),%fr22L
+ stw %r19,-16(0,%r30)
+ xmpyu %fr22L,%fr11R,%fr8
+ fldws -16(0,%r30),%fr22L
+ fstws %fr8R,-16(0,%r30)
+ xmpyu %fr11R,%fr22L,%fr10
+ ldw -16(0,%r30),%r2
+ stw %r20,-16(0,%r30)
+ xmpyu %fr22L,%fr11L,%fr9
+ fldws -16(0,%r30),%fr22L
+ fstws %fr10R,-16(0,%r30)
+ copy %r2,%r22
+ ldw -16(0,%r30),%r2
+ fstws %fr9R,-16(0,%r30)
+ xmpyu %fr11L,%fr22L,%fr8
+ copy %r2,%r19
+ ldw -16(0,%r30),%r2
+ fstws %fr8R,-16(0,%r30)
+ copy %r2,%r20
+ ldw -16(0,%r30),%r2
+ addl %r2,%r19,%r21
+ comclr,<<= %r19,%r21,0
+ addl %r20,%r31,%r20
+L$0005
+ extru %r21,15,16,%r19
+ addl %r20,%r19,%r20
+ zdep %r21,15,16,%r19
+ addl %r22,%r19,%r22
+ comclr,<<= %r19,%r22,0
+ addi,tr 1,%r20,%r19
+ copy %r20,%r19
+ addl %r22,%r28,%r20
+ comclr,<<= %r28,%r20,0
+ addi 1,%r19,%r19
+ ldw 0(0,%r26),%r28
+ addl %r20,%r28,%r20
+ comclr,<<= %r28,%r20,0
+ addi,tr 1,%r19,%r28
+ copy %r19,%r28
+ addib,= -1,%r24,L$0003
+ stw %r20,0(0,%r26)
+ ldw -8(0,%r29),%r19
+ extru %r19,31,16,%r20
+ stw %r20,-16(0,%r30)
+ extru %r19,15,16,%r19
+ fldws -16(0,%r30),%fr22L
+ stw %r19,-16(0,%r30)
+ xmpyu %fr22L,%fr11R,%fr8
+ fldws -16(0,%r30),%fr22L
+ fstws %fr8R,-16(0,%r30)
+ xmpyu %fr11R,%fr22L,%fr10
+ ldw -16(0,%r30),%r2
+ stw %r20,-16(0,%r30)
+ xmpyu %fr22L,%fr11L,%fr9
+ fldws -16(0,%r30),%fr22L
+ fstws %fr10R,-16(0,%r30)
+ copy %r2,%r22
+ ldw -16(0,%r30),%r2
+ fstws %fr9R,-16(0,%r30)
+ xmpyu %fr11L,%fr22L,%fr8
+ copy %r2,%r19
+ ldw -16(0,%r30),%r2
+ fstws %fr8R,-16(0,%r30)
+ copy %r2,%r20
+ ldw -16(0,%r30),%r2
+ addl %r2,%r19,%r21
+ comclr,<<= %r19,%r21,0
+ addl %r20,%r31,%r20
+L$0010
+ extru %r21,15,16,%r19
+ addl %r20,%r19,%r20
+ zdep %r21,15,16,%r19
+ addl %r22,%r19,%r22
+ comclr,<<= %r19,%r22,0
+ addi,tr 1,%r20,%r19
+ copy %r20,%r19
+ addl %r22,%r28,%r20
+ comclr,<<= %r28,%r20,0
+ addi 1,%r19,%r19
+ ldw -8(0,%r23),%r28
+ addl %r20,%r28,%r20
+ comclr,<<= %r28,%r20,0
+ addi,tr 1,%r19,%r28
+ copy %r19,%r28
+ addib,= -1,%r24,L$0003
+ stw %r20,-8(0,%r23)
+ ldw -4(0,%r29),%r19
+ extru %r19,31,16,%r20
+ stw %r20,-16(0,%r30)
+ extru %r19,15,16,%r19
+ fldws -16(0,%r30),%fr22L
+ stw %r19,-16(0,%r30)
+ xmpyu %fr22L,%fr11R,%fr8
+ fldws -16(0,%r30),%fr22L
+ fstws %fr8R,-16(0,%r30)
+ xmpyu %fr11R,%fr22L,%fr10
+ ldw -16(0,%r30),%r2
+ stw %r20,-16(0,%r30)
+ xmpyu %fr22L,%fr11L,%fr9
+ fldws -16(0,%r30),%fr22L
+ fstws %fr10R,-16(0,%r30)
+ copy %r2,%r22
+ ldw -16(0,%r30),%r2
+ fstws %fr9R,-16(0,%r30)
+ xmpyu %fr11L,%fr22L,%fr8
+ copy %r2,%r19
+ ldw -16(0,%r30),%r2
+ fstws %fr8R,-16(0,%r30)
+ copy %r2,%r20
+ ldw -16(0,%r30),%r2
+ addl %r2,%r19,%r21
+ comclr,<<= %r19,%r21,0
+ addl %r20,%r31,%r20
+L$0015
+ extru %r21,15,16,%r19
+ addl %r20,%r19,%r20
+ zdep %r21,15,16,%r19
+ addl %r22,%r19,%r22
+ comclr,<<= %r19,%r22,0
+ addi,tr 1,%r20,%r19
+ copy %r20,%r19
+ addl %r22,%r28,%r20
+ comclr,<<= %r28,%r20,0
+ addi 1,%r19,%r19
+ ldw -4(0,%r23),%r28
+ addl %r20,%r28,%r20
+ comclr,<<= %r28,%r20,0
+ addi,tr 1,%r19,%r28
+ copy %r19,%r28
+ addib,= -1,%r24,L$0003
+ stw %r20,-4(0,%r23)
+ ldw 0(0,%r29),%r19
+ extru %r19,31,16,%r20
+ stw %r20,-16(0,%r30)
+ extru %r19,15,16,%r19
+ fldws -16(0,%r30),%fr22L
+ stw %r19,-16(0,%r30)
+ xmpyu %fr22L,%fr11R,%fr8
+ fldws -16(0,%r30),%fr22L
+ fstws %fr8R,-16(0,%r30)
+ xmpyu %fr11R,%fr22L,%fr10
+ ldw -16(0,%r30),%r2
+ stw %r20,-16(0,%r30)
+ xmpyu %fr22L,%fr11L,%fr9
+ fldws -16(0,%r30),%fr22L
+ fstws %fr10R,-16(0,%r30)
+ copy %r2,%r22
+ ldw -16(0,%r30),%r2
+ fstws %fr9R,-16(0,%r30)
+ xmpyu %fr11L,%fr22L,%fr8
+ copy %r2,%r19
+ ldw -16(0,%r30),%r2
+ fstws %fr8R,-16(0,%r30)
+ copy %r2,%r20
+ ldw -16(0,%r30),%r2
+ addl %r2,%r19,%r21
+ comclr,<<= %r19,%r21,0
+ addl %r20,%r31,%r20
+L$0020
+ extru %r21,15,16,%r19
+ addl %r20,%r19,%r20
+ zdep %r21,15,16,%r19
+ addl %r22,%r19,%r22
+ comclr,<<= %r19,%r22,0
+ addi,tr 1,%r20,%r19
+ copy %r20,%r19
+ addl %r22,%r28,%r20
+ comclr,<<= %r28,%r20,0
+ addi 1,%r19,%r19
+ ldw 0(0,%r23),%r28
+ addl %r20,%r28,%r20
+ comclr,<<= %r28,%r20,0
+ addi,tr 1,%r19,%r28
+ copy %r19,%r28
+ addib,= -1,%r24,L$0003
+ stw %r20,0(0,%r23)
+ ldo 16(%r29),%r29
+ ldo 16(%r25),%r25
+ ldo 16(%r23),%r23
+ bl L$0002,0
+ ldo 16(%r26),%r26
+L$0003
+ ldw -20(0,%r30),%r2
+ bv,n 0(%r2)
+ .EXIT
+ .PROCEND
+ .align 4
+ .EXPORT bn_mul_word,ENTRY,PRIV_LEV=3,ARGW0=GR,ARGW1=GR,ARGW2=GR,ARGW3=GR,RTNVAL=GR
+bn_mul_word
+ .PROC
+ .CALLINFO FRAME=0,CALLS,SAVE_RP
+ .ENTRY
+ stw %r2,-20(0,%r30)
+ ldi 0,%r28
+ extru %r23,31,16,%r2
+ stw %r2,-16(0,%r30)
+ extru %r23,15,16,%r23
+ ldil L'65536,%r31
+ fldws -16(0,%r30),%fr11R
+ stw %r23,-16(0,%r30)
+ ldo 12(%r26),%r29
+ ldo 12(%r25),%r23
+ fldws -16(0,%r30),%fr11L
+L$0026
+ ldw 0(0,%r25),%r19
+ extru %r19,31,16,%r20
+ stw %r20,-16(0,%r30)
+ extru %r19,15,16,%r19
+ fldws -16(0,%r30),%fr22L
+ stw %r19,-16(0,%r30)
+ xmpyu %fr22L,%fr11R,%fr8
+ fldws -16(0,%r30),%fr22L
+ fstws %fr8R,-16(0,%r30)
+ xmpyu %fr11R,%fr22L,%fr10
+ ldw -16(0,%r30),%r2
+ stw %r20,-16(0,%r30)
+ xmpyu %fr22L,%fr11L,%fr9
+ fldws -16(0,%r30),%fr22L
+ fstws %fr10R,-16(0,%r30)
+ copy %r2,%r22
+ ldw -16(0,%r30),%r2
+ fstws %fr9R,-16(0,%r30)
+ xmpyu %fr11L,%fr22L,%fr8
+ copy %r2,%r19
+ ldw -16(0,%r30),%r2
+ fstws %fr8R,-16(0,%r30)
+ copy %r2,%r20
+ ldw -16(0,%r30),%r2
+ addl %r2,%r19,%r21
+ comclr,<<= %r19,%r21,0
+ addl %r20,%r31,%r20
+L$0029
+ extru %r21,15,16,%r19
+ addl %r20,%r19,%r20
+ zdep %r21,15,16,%r19
+ addl %r22,%r19,%r22
+ comclr,<<= %r19,%r22,0
+ addi,tr 1,%r20,%r19
+ copy %r20,%r19
+ addl %r22,%r28,%r20
+ comclr,<<= %r28,%r20,0
+ addi,tr 1,%r19,%r28
+ copy %r19,%r28
+ addib,= -1,%r24,L$0027
+ stw %r20,0(0,%r26)
+ ldw -8(0,%r23),%r19
+ extru %r19,31,16,%r20
+ stw %r20,-16(0,%r30)
+ extru %r19,15,16,%r19
+ fldws -16(0,%r30),%fr22L
+ stw %r19,-16(0,%r30)
+ xmpyu %fr22L,%fr11R,%fr8
+ fldws -16(0,%r30),%fr22L
+ fstws %fr8R,-16(0,%r30)
+ xmpyu %fr11R,%fr22L,%fr10
+ ldw -16(0,%r30),%r2
+ stw %r20,-16(0,%r30)
+ xmpyu %fr22L,%fr11L,%fr9
+ fldws -16(0,%r30),%fr22L
+ fstws %fr10R,-16(0,%r30)
+ copy %r2,%r22
+ ldw -16(0,%r30),%r2
+ fstws %fr9R,-16(0,%r30)
+ xmpyu %fr11L,%fr22L,%fr8
+ copy %r2,%r19
+ ldw -16(0,%r30),%r2
+ fstws %fr8R,-16(0,%r30)
+ copy %r2,%r20
+ ldw -16(0,%r30),%r2
+ addl %r2,%r19,%r21
+ comclr,<<= %r19,%r21,0
+ addl %r20,%r31,%r20
+L$0033
+ extru %r21,15,16,%r19
+ addl %r20,%r19,%r20
+ zdep %r21,15,16,%r19
+ addl %r22,%r19,%r22
+ comclr,<<= %r19,%r22,0
+ addi,tr 1,%r20,%r19
+ copy %r20,%r19
+ addl %r22,%r28,%r20
+ comclr,<<= %r28,%r20,0
+ addi,tr 1,%r19,%r28
+ copy %r19,%r28
+ addib,= -1,%r24,L$0027
+ stw %r20,-8(0,%r29)
+ ldw -4(0,%r23),%r19
+ extru %r19,31,16,%r20
+ stw %r20,-16(0,%r30)
+ extru %r19,15,16,%r19
+ fldws -16(0,%r30),%fr22L
+ stw %r19,-16(0,%r30)
+ xmpyu %fr22L,%fr11R,%fr8
+ fldws -16(0,%r30),%fr22L
+ fstws %fr8R,-16(0,%r30)
+ xmpyu %fr11R,%fr22L,%fr10
+ ldw -16(0,%r30),%r2
+ stw %r20,-16(0,%r30)
+ xmpyu %fr22L,%fr11L,%fr9
+ fldws -16(0,%r30),%fr22L
+ fstws %fr10R,-16(0,%r30)
+ copy %r2,%r22
+ ldw -16(0,%r30),%r2
+ fstws %fr9R,-16(0,%r30)
+ xmpyu %fr11L,%fr22L,%fr8
+ copy %r2,%r19
+ ldw -16(0,%r30),%r2
+ fstws %fr8R,-16(0,%r30)
+ copy %r2,%r20
+ ldw -16(0,%r30),%r2
+ addl %r2,%r19,%r21
+ comclr,<<= %r19,%r21,0
+ addl %r20,%r31,%r20
+L$0037
+ extru %r21,15,16,%r19
+ addl %r20,%r19,%r20
+ zdep %r21,15,16,%r19
+ addl %r22,%r19,%r22
+ comclr,<<= %r19,%r22,0
+ addi,tr 1,%r20,%r19
+ copy %r20,%r19
+ addl %r22,%r28,%r20
+ comclr,<<= %r28,%r20,0
+ addi,tr 1,%r19,%r28
+ copy %r19,%r28
+ addib,= -1,%r24,L$0027
+ stw %r20,-4(0,%r29)
+ ldw 0(0,%r23),%r19
+ extru %r19,31,16,%r20
+ stw %r20,-16(0,%r30)
+ extru %r19,15,16,%r19
+ fldws -16(0,%r30),%fr22L
+ stw %r19,-16(0,%r30)
+ xmpyu %fr22L,%fr11R,%fr8
+ fldws -16(0,%r30),%fr22L
+ fstws %fr8R,-16(0,%r30)
+ xmpyu %fr11R,%fr22L,%fr10
+ ldw -16(0,%r30),%r2
+ stw %r20,-16(0,%r30)
+ xmpyu %fr22L,%fr11L,%fr9
+ fldws -16(0,%r30),%fr22L
+ fstws %fr10R,-16(0,%r30)
+ copy %r2,%r22
+ ldw -16(0,%r30),%r2
+ fstws %fr9R,-16(0,%r30)
+ xmpyu %fr11L,%fr22L,%fr8
+ copy %r2,%r19
+ ldw -16(0,%r30),%r2
+ fstws %fr8R,-16(0,%r30)
+ copy %r2,%r20
+ ldw -16(0,%r30),%r2
+ addl %r2,%r19,%r21
+ comclr,<<= %r19,%r21,0
+ addl %r20,%r31,%r20
+L$0041
+ extru %r21,15,16,%r19
+ addl %r20,%r19,%r20
+ zdep %r21,15,16,%r19
+ addl %r22,%r19,%r22
+ comclr,<<= %r19,%r22,0
+ addi,tr 1,%r20,%r19
+ copy %r20,%r19
+ addl %r22,%r28,%r20
+ comclr,<<= %r28,%r20,0
+ addi,tr 1,%r19,%r28
+ copy %r19,%r28
+ addib,= -1,%r24,L$0027
+ stw %r20,0(0,%r29)
+ ldo 16(%r23),%r23
+ ldo 16(%r25),%r25
+ ldo 16(%r29),%r29
+ bl L$0026,0
+ ldo 16(%r26),%r26
+L$0027
+ ldw -20(0,%r30),%r2
+ bv,n 0(%r2)
+ .EXIT
+ .PROCEND
+ .align 4
+ .EXPORT bn_sqr_words,ENTRY,PRIV_LEV=3,ARGW0=GR,ARGW1=GR,ARGW2=GR
+bn_sqr_words
+ .PROC
+ .CALLINFO FRAME=0,NO_CALLS
+ .ENTRY
+ ldo 28(%r26),%r23
+ ldo 12(%r25),%r28
+L$0046
+ ldw 0(0,%r25),%r21
+ extru %r21,31,16,%r22
+ stw %r22,-16(0,%r30)
+ extru %r21,15,16,%r21
+ fldws -16(0,%r30),%fr10L
+ stw %r21,-16(0,%r30)
+ fldws -16(0,%r30),%fr10R
+ xmpyu %fr10L,%fr10R,%fr8
+ fstws %fr8R,-16(0,%r30)
+ ldw -16(0,%r30),%r29
+ stw %r22,-16(0,%r30)
+ fldws -16(0,%r30),%fr10R
+ stw %r21,-16(0,%r30)
+ copy %r29,%r19
+ xmpyu %fr10L,%fr10R,%fr8
+ fldws -16(0,%r30),%fr10L
+ stw %r21,-16(0,%r30)
+ fldws -16(0,%r30),%fr10R
+ fstws %fr8R,-16(0,%r30)
+ extru %r19,16,17,%r20
+ zdep %r19,14,15,%r19
+ ldw -16(0,%r30),%r29
+ xmpyu %fr10L,%fr10R,%fr9
+ addl %r29,%r19,%r22
+ stw %r22,0(0,%r26)
+ fstws %fr9R,-16(0,%r30)
+ ldw -16(0,%r30),%r29
+ addl %r29,%r20,%r21
+ comclr,<<= %r19,%r22,0
+ addi 1,%r21,%r21
+ addib,= -1,%r24,L$0057
+ stw %r21,-24(0,%r23)
+ ldw -8(0,%r28),%r21
+ extru %r21,31,16,%r22
+ stw %r22,-16(0,%r30)
+ extru %r21,15,16,%r21
+ fldws -16(0,%r30),%fr10L
+ stw %r21,-16(0,%r30)
+ fldws -16(0,%r30),%fr10R
+ xmpyu %fr10L,%fr10R,%fr8
+ fstws %fr8R,-16(0,%r30)
+ ldw -16(0,%r30),%r29
+ stw %r22,-16(0,%r30)
+ fldws -16(0,%r30),%fr10R
+ stw %r21,-16(0,%r30)
+ copy %r29,%r19
+ xmpyu %fr10L,%fr10R,%fr8
+ fldws -16(0,%r30),%fr10L
+ stw %r21,-16(0,%r30)
+ fldws -16(0,%r30),%fr10R
+ fstws %fr8R,-16(0,%r30)
+ extru %r19,16,17,%r20
+ zdep %r19,14,15,%r19
+ ldw -16(0,%r30),%r29
+ xmpyu %fr10L,%fr10R,%fr9
+ addl %r29,%r19,%r22
+ stw %r22,-20(0,%r23)
+ fstws %fr9R,-16(0,%r30)
+ ldw -16(0,%r30),%r29
+ addl %r29,%r20,%r21
+ comclr,<<= %r19,%r22,0
+ addi 1,%r21,%r21
+ addib,= -1,%r24,L$0057
+ stw %r21,-16(0,%r23)
+ ldw -4(0,%r28),%r21
+ extru %r21,31,16,%r22
+ stw %r22,-16(0,%r30)
+ extru %r21,15,16,%r21
+ fldws -16(0,%r30),%fr10L
+ stw %r21,-16(0,%r30)
+ fldws -16(0,%r30),%fr10R
+ xmpyu %fr10L,%fr10R,%fr8
+ fstws %fr8R,-16(0,%r30)
+ ldw -16(0,%r30),%r29
+ stw %r22,-16(0,%r30)
+ fldws -16(0,%r30),%fr10R
+ stw %r21,-16(0,%r30)
+ copy %r29,%r19
+ xmpyu %fr10L,%fr10R,%fr8
+ fldws -16(0,%r30),%fr10L
+ stw %r21,-16(0,%r30)
+ fldws -16(0,%r30),%fr10R
+ fstws %fr8R,-16(0,%r30)
+ extru %r19,16,17,%r20
+ zdep %r19,14,15,%r19
+ ldw -16(0,%r30),%r29
+ xmpyu %fr10L,%fr10R,%fr9
+ addl %r29,%r19,%r22
+ stw %r22,-12(0,%r23)
+ fstws %fr9R,-16(0,%r30)
+ ldw -16(0,%r30),%r29
+ addl %r29,%r20,%r21
+ comclr,<<= %r19,%r22,0
+ addi 1,%r21,%r21
+ addib,= -1,%r24,L$0057
+ stw %r21,-8(0,%r23)
+ ldw 0(0,%r28),%r21
+ extru %r21,31,16,%r22
+ stw %r22,-16(0,%r30)
+ extru %r21,15,16,%r21
+ fldws -16(0,%r30),%fr10L
+ stw %r21,-16(0,%r30)
+ fldws -16(0,%r30),%fr10R
+ xmpyu %fr10L,%fr10R,%fr8
+ fstws %fr8R,-16(0,%r30)
+ ldw -16(0,%r30),%r29
+ stw %r22,-16(0,%r30)
+ fldws -16(0,%r30),%fr10R
+ stw %r21,-16(0,%r30)
+ copy %r29,%r19
+ xmpyu %fr10L,%fr10R,%fr8
+ fldws -16(0,%r30),%fr10L
+ stw %r21,-16(0,%r30)
+ fldws -16(0,%r30),%fr10R
+ fstws %fr8R,-16(0,%r30)
+ extru %r19,16,17,%r20
+ zdep %r19,14,15,%r19
+ ldw -16(0,%r30),%r29
+ xmpyu %fr10L,%fr10R,%fr9
+ addl %r29,%r19,%r22
+ stw %r22,-4(0,%r23)
+ fstws %fr9R,-16(0,%r30)
+ ldw -16(0,%r30),%r29
+ addl %r29,%r20,%r21
+ comclr,<<= %r19,%r22,0
+ addi 1,%r21,%r21
+ addib,= -1,%r24,L$0057
+ stw %r21,0(0,%r23)
+ ldo 16(%r28),%r28
+ ldo 16(%r25),%r25
+ ldo 32(%r23),%r23
+ bl L$0046,0
+ ldo 32(%r26),%r26
+L$0057
+ bv,n 0(%r2)
+ .EXIT
+ .PROCEND
+ .IMPORT BN_num_bits_word,CODE
+ .IMPORT fprintf,CODE
+ .IMPORT __iob,DATA
+ .SPACE $TEXT$
+ .SUBSPA $LIT$
+
+ .align 4
+L$C0000
+ .STRING "Division would overflow\x0a\x00"
+ .IMPORT abort,CODE
+ .SPACE $TEXT$
+ .SUBSPA $CODE$
+
+ .align 4
+ .EXPORT bn_div64,ENTRY,PRIV_LEV=3,ARGW0=GR,ARGW1=GR,ARGW2=GR,RTNVAL=GR
+bn_div64
+ .PROC
+ .CALLINFO FRAME=128,CALLS,SAVE_RP,ENTRY_GR=8
+ .ENTRY
+ stw %r2,-20(0,%r30)
+ stwm %r8,128(0,%r30)
+ stw %r7,-124(0,%r30)
+ stw %r4,-112(0,%r30)
+ stw %r3,-108(0,%r30)
+ copy %r26,%r3
+ copy %r25,%r4
+ stw %r6,-120(0,%r30)
+ ldi 0,%r7
+ stw %r5,-116(0,%r30)
+ movb,<> %r24,%r5,L$0059
+ ldi 2,%r6
+ bl L$0076,0
+ ldi -1,%r28
+L$0059
+ .CALL ARGW0=GR
+ bl BN_num_bits_word,%r2
+ copy %r5,%r26
+ ldi 32,%r19
+ comb,= %r19,%r28,L$0060
+ subi 31,%r28,%r19
+ mtsar %r19
+ zvdepi 1,32,%r19
+ comb,>>= %r19,%r3,L$0060
+ addil LR'__iob-$global$+32,%r27
+ ldo RR'__iob-$global$+32(%r1),%r26
+ ldil LR'L$C0000,%r25
+ .CALL ARGW0=GR,ARGW1=GR
+ bl fprintf,%r2
+ ldo RR'L$C0000(%r25),%r25
+ .CALL
+ bl abort,%r2
+ nop
+L$0060
+ comb,>> %r5,%r3,L$0061
+ subi 32,%r28,%r28
+ sub %r3,%r5,%r3
+L$0061
+ comib,= 0,%r28,L$0062
+ subi 31,%r28,%r19
+ mtsar %r19
+ zvdep %r5,32,%r5
+ zvdep %r3,32,%r21
+ subi 32,%r28,%r20
+ mtsar %r20
+ vshd 0,%r4,%r20
+ or %r21,%r20,%r3
+ mtsar %r19
+ zvdep %r4,32,%r4
+L$0062
+ extru %r5,15,16,%r23
+ extru %r5,31,16,%r28
+L$0063
+ extru %r3,15,16,%r19
+ comb,<> %r23,%r19,L$0066
+ copy %r3,%r26
+ bl L$0067,0
+ zdepi -1,31,16,%r29
+L$0066
+ .IMPORT $$divU,MILLICODE
+ bl $$divU,%r31
+ copy %r23,%r25
+L$0067
+ stw %r29,-16(0,%r30)
+ fldws -16(0,%r30),%fr10L
+ stw %r28,-16(0,%r30)
+ fldws -16(0,%r30),%fr10R
+ stw %r23,-16(0,%r30)
+ xmpyu %fr10L,%fr10R,%fr8
+ fldws -16(0,%r30),%fr10R
+ fstws %fr8R,-16(0,%r30)
+ xmpyu %fr10L,%fr10R,%fr9
+ ldw -16(0,%r30),%r8
+ fstws %fr9R,-16(0,%r30)
+ copy %r8,%r22
+ ldw -16(0,%r30),%r8
+ extru %r4,15,16,%r24
+ copy %r8,%r21
+L$0068
+ sub %r3,%r21,%r20
+ copy %r20,%r19
+ depi 0,31,16,%r19
+ comib,<> 0,%r19,L$0069
+ zdep %r20,15,16,%r19
+ addl %r19,%r24,%r19
+ comb,>>= %r19,%r22,L$0069
+ sub %r22,%r28,%r22
+ sub %r21,%r23,%r21
+ bl L$0068,0
+ ldo -1(%r29),%r29
+L$0069
+ stw %r29,-16(0,%r30)
+ fldws -16(0,%r30),%fr10L
+ stw %r28,-16(0,%r30)
+ fldws -16(0,%r30),%fr10R
+ xmpyu %fr10L,%fr10R,%fr8
+ fstws %fr8R,-16(0,%r30)
+ ldw -16(0,%r30),%r8
+ stw %r23,-16(0,%r30)
+ fldws -16(0,%r30),%fr10R
+ copy %r8,%r19
+ xmpyu %fr10L,%fr10R,%fr8
+ fstws %fr8R,-16(0,%r30)
+ extru %r19,15,16,%r20
+ ldw -16(0,%r30),%r8
+ zdep %r19,15,16,%r19
+ addl %r8,%r20,%r20
+ comclr,<<= %r19,%r4,0
+ addi 1,%r20,%r20
+ comb,<<= %r20,%r3,L$0074
+ sub %r4,%r19,%r4
+ addl %r3,%r5,%r3
+ ldo -1(%r29),%r29
+L$0074
+ addib,= -1,%r6,L$0064
+ sub %r3,%r20,%r3
+ zdep %r29,15,16,%r7
+ shd %r3,%r4,16,%r3
+ bl L$0063,0
+ zdep %r4,15,16,%r4
+L$0064
+ or %r7,%r29,%r28
+L$0076
+ ldw -148(0,%r30),%r2
+ ldw -124(0,%r30),%r7
+ ldw -120(0,%r30),%r6
+ ldw -116(0,%r30),%r5
+ ldw -112(0,%r30),%r4
+ ldw -108(0,%r30),%r3
+ bv 0(%r2)
+ ldwm -128(0,%r30),%r8
+ .EXIT
+ .PROCEND