1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
|
/*
* linux/include/asm-arm/proc-armo/locks.h
*
* Copyright (C) 2000 Russell King
*
* Interrupt safe locking assembler.
*/
#ifndef __ASM_PROC_LOCKS_H
#define __ASM_PROC_LOCKS_H
#define __down_op(ptr,fail) \
({ \
__asm__ __volatile__ ( \
"@ atomic down operation\n" \
" mov r0, pc\n" \
" orr lr, r0, #0x08000000\n" \
" teqp lr, #0\n" \
" ldr lr, [%0]\n" \
" and r0, r0, #0x0c000003\n" \
" subs lr, lr, #1\n" \
" str lr, [%0]\n" \
" orrmi r0, r0, #0x80000000 @ set N\n" \
" teqp r0, #0\n" \
" movmi r0, %0\n" \
blmi " SYMBOL_NAME_STR(fail) \
: \
: "r" (ptr) \
: "r0", "lr", "cc"); \
})
#define __down_op_ret(ptr,fail) \
({ \
unsigned int result; \
__asm__ __volatile__ ( \
" @ down_op_ret\n" \
" mov r0, pc\n" \
" orr lr, r0, #0x08000000\n" \
" teqp lr, #0\n" \
" ldr lr, [%1]\m" \
" and r0, r0, #0x0c000003\n" \
" subs lr, lr, #1\n" \
" str lr, [%1]\n" \
" orrmi r0, r0, #0x80000000 @ set N\n" \
" teqp r0, #0\n" \
" movmi r0, %1\n" \
" movpl r0, #0\n" \
" blmi " SYMBOL_NAME_STR(fail) "\n" \
" mov %0, r0" \
: "=&r" (result) \
: "r" (ptr) \
: "r0", "lr", "cc"); \
result; \
})
#define __up_op(ptr,wake) \
({ \
__asm__ __volatile__ ( \
"@ up_op\n" \
mov r0, pc\n" \
orr lr, r0, #0x08000000\n" \
teqp lr, #0\n" \
ldr lr, [%0]\n" \
and r0, r0, #0x0c000003\n" \
adds lr, lr, #1\n" \
str lr, [%0]\n" \
orrle r0, r0, #0x80000000 @ set N\n" \
teqp r0, #0\n" \
movmi r0, %0\n" \
blmi " SYMBOL_NAME_STR(wake) \
: \
: "r" (ptr) \
: "r0", "lr", "cc"); \
})
#endif
|