summaryrefslogtreecommitdiffstats
path: root/arch/i386/math-emu/reg_norm.S
diff options
context:
space:
mode:
authorRalf Baechle <ralf@linux-mips.org>1995-11-14 08:00:00 +0000
committer <ralf@linux-mips.org>1995-11-14 08:00:00 +0000
commite7c2a72e2680827d6a733931273a93461c0d8d1b (patch)
treec9abeda78ef7504062bb2e816bcf3e3c9d680112 /arch/i386/math-emu/reg_norm.S
parentec6044459060a8c9ce7f64405c465d141898548c (diff)
Import of Linux/MIPS 1.3.0
Diffstat (limited to 'arch/i386/math-emu/reg_norm.S')
-rw-r--r--arch/i386/math-emu/reg_norm.S150
1 files changed, 150 insertions, 0 deletions
diff --git a/arch/i386/math-emu/reg_norm.S b/arch/i386/math-emu/reg_norm.S
new file mode 100644
index 000000000..9b7a9d77d
--- /dev/null
+++ b/arch/i386/math-emu/reg_norm.S
@@ -0,0 +1,150 @@
+/*---------------------------------------------------------------------------+
+ | reg_norm.S |
+ | |
+ | Copyright (C) 1992,1993,1994 |
+ | W. Metzenthen, 22 Parker St, Ormond, Vic 3163, |
+ | Australia. E-mail billm@vaxc.cc.monash.edu.au |
+ | |
+ | Normalize the value in a FPU_REG. |
+ | |
+ | Call from C as: |
+ | void normalize(FPU_REG *n) |
+ | |
+ | void normalize_nuo(FPU_REG *n) |
+ | |
+ +---------------------------------------------------------------------------*/
+
+#include "fpu_asm.h"
+
+
+.text
+
+ .align 2,144
+.globl _normalize
+
+_normalize:
+ pushl %ebp
+ movl %esp,%ebp
+ pushl %ebx
+
+ movl PARAM1,%ebx
+
+#ifdef PARANOID
+ cmpb TW_Valid,TAG(%ebx)
+ je L_ok
+
+ pushl $0x220
+ call _exception
+ addl $4,%esp
+
+L_ok:
+#endif PARANOID
+
+ movl SIGH(%ebx),%edx
+ movl SIGL(%ebx),%eax
+
+ orl %edx,%edx /* ms bits */
+ js L_done /* Already normalized */
+ jnz L_shift_1 /* Shift left 1 - 31 bits */
+
+ orl %eax,%eax
+ jz L_zero /* The contents are zero */
+
+ movl %eax,%edx
+ xorl %eax,%eax
+ subl $32,EXP(%ebx) /* This can cause an underflow */
+
+/* We need to shift left by 1 - 31 bits */
+L_shift_1:
+ bsrl %edx,%ecx /* get the required shift in %ecx */
+ subl $31,%ecx
+ negl %ecx
+ shld %cl,%eax,%edx
+ shl %cl,%eax
+ subl %ecx,EXP(%ebx) /* This can cause an underflow */
+
+ movl %edx,SIGH(%ebx)
+ movl %eax,SIGL(%ebx)
+
+L_done:
+ cmpl EXP_OVER,EXP(%ebx)
+ jge L_overflow
+
+ cmpl EXP_UNDER,EXP(%ebx)
+ jle L_underflow
+
+L_exit:
+ popl %ebx
+ leave
+ ret
+
+
+L_zero:
+ movl EXP_UNDER,EXP(%ebx)
+ movb TW_Zero,TAG(%ebx)
+ jmp L_exit
+
+L_underflow:
+ push %ebx
+ call _arith_underflow
+ pop %ebx
+ jmp L_exit
+
+L_overflow:
+ push %ebx
+ call _arith_overflow
+ pop %ebx
+ jmp L_exit
+
+
+
+/* Normalise without reporting underflow or overflow */
+ .align 2,144
+.globl _normalize_nuo
+
+_normalize_nuo:
+ pushl %ebp
+ movl %esp,%ebp
+ pushl %ebx
+
+ movl PARAM1,%ebx
+
+#ifdef PARANOID
+ cmpb TW_Valid,TAG(%ebx)
+ je L_ok_nuo
+
+ pushl $0x221
+ call _exception
+ addl $4,%esp
+
+L_ok_nuo:
+#endif PARANOID
+
+ movl SIGH(%ebx),%edx
+ movl SIGL(%ebx),%eax
+
+ orl %edx,%edx /* ms bits */
+ js L_exit /* Already normalized */
+ jnz L_nuo_shift_1 /* Shift left 1 - 31 bits */
+
+ orl %eax,%eax
+ jz L_zero /* The contents are zero */
+
+ movl %eax,%edx
+ xorl %eax,%eax
+ subl $32,EXP(%ebx) /* This can cause an underflow */
+
+/* We need to shift left by 1 - 31 bits */
+L_nuo_shift_1:
+ bsrl %edx,%ecx /* get the required shift in %ecx */
+ subl $31,%ecx
+ negl %ecx
+ shld %cl,%eax,%edx
+ shl %cl,%eax
+ subl %ecx,EXP(%ebx) /* This can cause an underflow */
+
+ movl %edx,SIGH(%ebx)
+ movl %eax,SIGL(%ebx)
+ jmp L_exit
+
+