1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
|
/* $Id: locks.S,v 1.5 1997/07/31 05:28:16 davem Exp $
* locks.S: SMP low-level lock primitives on Sparc64.
*
* Copyright (C) 1996 David S. Miller (davem@caip.rutgers.edu)
*/
#include <asm/asm_offsets.h>
#include <asm/ptrace.h>
#include <asm/smp.h>
.text
.align 32
___lk_busy_spin:
ldub [%g1 + 0], %g2
brnz,pt %g2, ___lk_busy_spin
membar #LoadLoad
b,pt %xcc, 1f
ldstub [%g1 + 0], %g2
.globl ___lock_kernel
___lock_kernel:
addcc %g2, -1, %g2
rdpr %pil, %g3
bcs,a,pn %icc, 9f
stw %g2, [%g6 + AOFF_task_lock_depth]
wrpr %g0, 15, %pil
ldstub [%g1 + 0], %g2
1: brnz,pn %g2, ___lk_busy_spin
membar #StoreLoad | #StoreStore
lduw [%g6 + AOFF_task_processor], %g2
stb %g2, [%g1 + 1]
2: mov -1, %g2
stw %g2, [%g6 + AOFF_task_lock_depth]
wrpr %g3, 0, %pil
9: jmpl %o7 + 0x8, %g0
mov %g5, %o7
.globl ___lock_reacquire_kernel
___lock_reacquire_kernel:
rdpr %pil, %g3
wrpr %g0, 15, %pil
stw %g2, [%g6 + AOFF_task_lock_depth]
ldstub [%g1 + 0], %g2
1: brz,pt %g2, 3f
membar #StoreLoad | #StoreStore
2: ldub [%g1 + 0], %g2
brnz,pt %g2, 2b
membar #LoadLoad
b,pt %xcc, 1b
ldstub [%g1 + 0], %g2
3: lduw [%g6 + AOFF_task_processor], %g2
stb %g2, [%g1 + 1]
wrpr %g3, 0, %pil
jmpl %o7 + 0x8, %g0
mov %g5, %o7
#undef NO_PROC_ID
#define NO_PROC_ID 0xff
.globl ___unlock_kernel
___unlock_kernel:
addcc %g2, 1, %g2
rdpr %pil, %g3
bne,a,pn %icc, 1f
stw %g2, [%g6 + AOFF_task_lock_depth]
wrpr 15, %pil
mov NO_PROC_ID, %g2
stb %g2, [%g1 + 1]
membar #StoreStore | #LoadStore
stb %g0, [%g1 + 0]
stw %g0, [%g6 + AOFF_task_lock_depth]
wrpr %g3, 0, %pil
1: jmpl %o7 + 0x8, %g0
mov %g5, %o7
|