summaryrefslogtreecommitdiffstats
path: root/arch/sparc64/kernel/etrap.S
diff options
context:
space:
mode:
Diffstat (limited to 'arch/sparc64/kernel/etrap.S')
-rw-r--r--arch/sparc64/kernel/etrap.S109
1 files changed, 55 insertions, 54 deletions
diff --git a/arch/sparc64/kernel/etrap.S b/arch/sparc64/kernel/etrap.S
index 7d293a88b..f936b3071 100644
--- a/arch/sparc64/kernel/etrap.S
+++ b/arch/sparc64/kernel/etrap.S
@@ -1,34 +1,39 @@
-/* $Id: etrap.S,v 1.11 1997/04/14 17:04:45 jj Exp $
+/* $Id: etrap.S,v 1.17 1997/05/18 22:52:09 davem Exp $
* etrap.S: Preparing for entry into the kernel on Sparc V9.
*
- * Copyright (C) 1996 David S. Miller (davem@caip.rutgers.edu)
+ * Copyright (C) 1996, 1997 David S. Miller (davem@caip.rutgers.edu)
* Copyright (C) 1997 Jakub Jelinek (jj@sunsite.mff.cuni.cz)
*/
#include <asm/asi.h>
#include <asm/pstate.h>
#include <asm/ptrace.h>
+#include <asm/page.h>
#include <asm/spitfire.h>
#include <asm/head.h>
-/* We assume that pstate, when entering this, has AG and IE bits set, MG and IG clear */
+ /* We assume that pstate, when entering this, has AG and
+ * IE bits set, MG and IG clear.
+ *
+ * We also guarentee for caller that AG %g4 and %g5 will have
+ * their values preserved and left in %l4 and %l5 respectively
+ * for him (fault handling needs this).
+ */
.text
.align 32
.globl etrap, etrap_irq
etrap:
- rdpr %pil, %g4
+ rdpr %pil, %g2
etrap_irq:
rdpr %tstate, %g1
- sllx %g4, 20, %g4
- rdpr %tpc, %g2
- or %g1, %g4, %g1
- rdpr %tnpc, %g3
+ sllx %g2, 20, %g2
+ or %g1, %g2, %g1
/* What happens more often? etrap when already in priv or from userland? */
andcc %g1, TSTATE_PRIV, %g0
bne,a,pn %xcc, 1f
- sub %sp, REGWIN_SZ + TRACEREG_SZ - STACK_BIAS, %g5
+ sub %sp, REGWIN_SZ + TRACEREG_SZ - STACK_BIAS, %g2
/* Just when going from userland to privileged mode,
* we have to change this stuff.
@@ -38,41 +43,48 @@ etrap_irq:
* trap level until PRIMARY_CONTEXT is set to zero, else
* we fall out of NUCLEUS too soon and crash hard.
*/
- rdpr %wstate, %g5
- mov PRIMARY_CONTEXT, %g7
- ldxa [%g7] ASI_DMMU, %g4
- mov SECONDARY_CONTEXT, %g6
- stxa %g0, [%g7] ASI_DMMU
- stxa %g4, [%g6] ASI_DMMU
- wrpr %g0, 0x0, %tl
+ mov PRIMARY_CONTEXT, %g1
+ ldxa [%g1] ASI_DMMU, %g2
+ stxa %g0, [%g1] ASI_DMMU
- sll %g5, 3, %g5
- sethi %uhi(KERNBASE), %g4
- or %g4, %ulo(KERNBASE), %g4
- sethi %hi(current_set), %g6
- or %g6, %lo(current_set), %g6
- sllx %g4, 32, %g4
- wrpr %g5, %wstate
- rdpr %canrestore, %g5
- ldx [%g6 + %g4], %g6
-#ifdef __SMP__
-/* FIXME: Fix the above insn for SMP */
-#endif
- wrpr %g0, 0, %canrestore
- wrpr %g5, 0, %otherwin
- ba,pt %xcc, 2f
- ldx [%g6 + AOFF_task_saved_kernel_stack], %g5
+ mov SECONDARY_CONTEXT, %g1
+ stxa %g2, [%g1] ASI_DMMU
+
+ rd %pic, %g1
+ sethi %hi((PAGE_SIZE<<1)-TRACEREG_SZ-REGWIN_SZ), %g2
+ or %g2, %lo((PAGE_SIZE<<1)-TRACEREG_SZ-REGWIN_SZ), %g2
+ add %g1, %g2, %g2
+ rdpr %tstate, %g1
1:
+ stx %g1, [%g2 + REGWIN_SZ + PT_V9_TSTATE]
+ rdpr %tpc, %g1
+ rdpr %tnpc, %g3
+ stx %g1, [%g2 + REGWIN_SZ + PT_V9_TPC]
+ rd %y, %g1
+ stx %g3, [%g2 + REGWIN_SZ + PT_V9_TNPC]
+ stx %g1, [%g2 + REGWIN_SZ + PT_V9_Y]
+
wrpr %g0, 0x0, %tl
-2:
- rd %y, %g4
- stx %g1, [%g5 + REGWIN_SZ + PT_V9_TSTATE]
- stx %g2, [%g5 + REGWIN_SZ + PT_V9_TPC]
- stx %g3, [%g5 + REGWIN_SZ + PT_V9_TNPC]
- stx %g4, [%g5 + REGWIN_SZ + PT_V9_Y]
rdpr %pstate, %g1
- save %g5, -STACK_BIAS, %sp
+ save %g2, -STACK_BIAS, %sp
+
+ /* Must guarentee that here andcc of TSTATE_PRIV at the top is
+ * still valid in %ccr register. Don't show this trick to your
+ * mom. -DaveM
+ */
+ bne,pn %xcc, 1f
+ rdpr %canrestore, %g3
+ wrpr %g0, 0, %canrestore
+ wrpr %g3, 0, %otherwin
+
+ rdpr %wstate, %g6
+ sll %g6, 3, %g6
+ wrpr %g6, %wstate
+
+1:
mov %g1, %l1
+ mov %g4, %l4
+ mov %g5, %l5
mov %g7, %l2
wrpr %l1, PSTATE_AG, %pstate
stx %g1, [%sp + STACK_BIAS + REGWIN_SZ + PT_V9_G1]
@@ -91,24 +103,13 @@ etrap_irq:
stx %i6, [%sp + STACK_BIAS + REGWIN_SZ + PT_V9_I6]
stx %i7, [%sp + STACK_BIAS + REGWIN_SZ + PT_V9_I7]
wrpr %l1, (PSTATE_IE | PSTATE_AG), %pstate
- sethi %uhi(KERNBASE), %g4
- or %g4, %ulo(KERNBASE), %g4
- sethi %hi(current_set), %g6
- or %g6, %lo(current_set), %g6
- sllx %g4, 32, %g4
+ srlx %sp, 43, %g4
+ rd %pic, %g6
jmpl %l2 + 0x4, %g0
- ldx [%g6 + %g4], %g6
-#ifdef __SMP__
-/* FIXME: Fix the above insn for SMP */
-#endif
+ sllx %g4, 43, %g4
.globl etraptl1
etraptl1:
- rdpr %tl, %g4
rdpr %tstate, %g1
- sub %g4, 1, %g4
- rdpr %tpc, %g2
- rdpr %tnpc, %g3
- wrpr %g4, 0x0, %tl
ba,pt %xcc, 1b
- sub %sp, REGWIN_SZ + TRACEREG_SZ - STACK_BIAS, %g5
+ sub %sp, REGWIN_SZ + TRACEREG_SZ - STACK_BIAS, %g2