1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
|
#ifndef _SPARC64_SEMAPHORE_HELPER_H
#define _SPARC64_SEMAPHORE_HELPER_H
/*
* SMP- and interrupt-safe semaphore helper functions, sparc64 version.
*
* (C) Copyright 1999 David S. Miller (davem@redhat.com)
* (C) Copyright 1999 Jakub Jelinek (jj@ultra.linux.cz)
*/
#define wake_one_more(__sem) atomic_inc(&((__sem)->waking));
#define waking_non_zero(__sem) \
({ int __ret; \
__asm__ __volatile__( \
"1: ldsw [%1], %%g5\n\t" \
"brlez,pt %%g5, 2f\n\t" \
" mov 0, %0\n\t" \
"sub %%g5, 1, %%g7\n\t" \
"cas [%1], %%g5, %%g7\n\t" \
"cmp %%g5, %%g7\n\t" \
"bne,pn %%icc, 1b\n\t" \
" mov 1, %0\n" \
"2:" : "=&r" (__ret) \
: "r" (&((__sem)->waking)) \
: "g5", "g7", "cc", "memory"); \
__ret; \
})
#define waking_non_zero_interruptible(__sem, __tsk) \
({ int __ret; \
__asm__ __volatile__( \
"1: ldsw [%1], %%g5\n\t" \
"brlez,pt %%g5, 2f\n\t" \
" mov 0, %0\n\t" \
"sub %%g5, 1, %%g7\n\t" \
"cas [%1], %%g5, %%g7\n\t" \
"cmp %%g5, %%g7\n\t" \
"bne,pn %%icc, 1b\n\t" \
" mov 1, %0\n" \
"2:" : "=&r" (__ret) \
: "r" (&((__sem)->waking)) \
: "g5", "g7", "cc", "memory"); \
if(__ret == 0 && signal_pending(__tsk)) { \
atomic_inc(&((__sem)->count)); \
__ret = -EINTR; \
} \
__ret; \
})
#define waking_non_zero_trylock(__sem) \
({ int __ret; \
__asm__ __volatile__( \
"1: ldsw [%1], %%g5\n\t" \
"brlez,pt %%g5, 2f\n\t" \
" mov 1, %0\n\t" \
"sub %%g5, 1, %%g7\n\t" \
"cas [%1], %%g5, %%g7\n\t" \
"cmp %%g5, %%g7\n\t" \
"bne,pn %%icc, 1b\n\t" \
" mov 0, %0\n" \
"2:" : "=&r" (__ret) \
: "r" (&((__sem)->waking)) \
: "g5", "g7", "cc", "memory"); \
if(__ret == 1) \
atomic_inc(&((__sem)->count)); \
__ret; \
})
#endif /* !(_SPARC64_SEMAPHORE_HELPER_H) */
|