summaryrefslogtreecommitdiffstats
path: root/include/asm-sparc64/atomic.h
blob: 12baf0222d97c6bc5d47a87d5a2f0debf606bd31 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
/* $Id: atomic.h,v 1.15 1997/07/03 09:18:09 davem Exp $
 * atomic.h: Thankfully the V9 is at least reasonable for this
 *           stuff.
 *
 * Copyright (C) 1996, 1997 David S. Miller (davem@caip.rutgers.edu)
 */

#ifndef __ARCH_SPARC64_ATOMIC__
#define __ARCH_SPARC64_ATOMIC__

/* Make sure gcc doesn't try to be clever and move things around
 * on us. We need to use _exactly_ the address the user gave us,
 * not some alias that contains the same information.
 */
#define __atomic_fool_gcc(x) ((struct { int a[100]; } *)x)

typedef struct { int counter; } atomic_t;
#define ATOMIC_INIT(i)	{ (i) }

#define atomic_read(v)		((v)->counter)
#define atomic_set(v, i)	(((v)->counter) = i)

extern __inline__ void atomic_add(int i, atomic_t *v)
{
	__asm__ __volatile__("
1:	lduw		[%1], %%g1
	add		%%g1, %0, %%g2
	cas		[%1], %%g1, %%g2
	sub		%%g1, %%g2, %%g1
	brnz,pn		%%g1, 1b
	 nop"
	: /* No outputs */
	: "HIr" (i), "r" (__atomic_fool_gcc(v))
	: "g1", "g2");
}

extern __inline__ void atomic_sub(int i, atomic_t *v)
{
	__asm__ __volatile__("
1:	lduw		[%1], %%g1
	sub		%%g1, %0, %%g2
	cas		[%1], %%g1, %%g2
	sub		%%g1, %%g2, %%g1
	brnz,pn		%%g1, 1b
	 nop"
	: /* No outputs */
	: "HIr" (i), "r" (__atomic_fool_gcc(v))
	: "g1", "g2");
}

/* Same as above, but return the result value. */
extern __inline__ int atomic_add_return(int i, atomic_t *v)
{
	unsigned long oldval;
	__asm__ __volatile__("
1:	lduw		[%2], %%g1
	add		%%g1, %1, %%g2
	cas		[%2], %%g1, %%g2
	sub		%%g1, %%g2, %%g1
	brnz,pn		%%g1, 1b
	 add		%%g2, %1, %0"
	: "=&r" (oldval)
	: "HIr" (i), "r" (__atomic_fool_gcc(v))
	: "g1", "g2");
	return (int)oldval;
}

extern __inline__ int atomic_sub_return(int i, atomic_t *v)
{
	unsigned long oldval;
	__asm__ __volatile__("
1:	lduw		[%2], %%g1
	sub		%%g1, %1, %%g2
	cas		[%2], %%g1, %%g2
	sub		%%g1, %%g2, %%g1
	brnz,pn		%%g1, 1b
	 sub		%%g2, %1, %0"
	: "=&r" (oldval)
	: "HIr" (i), "r" (__atomic_fool_gcc(v))
	: "g1", "g2");
	return (int)oldval;
}

#define atomic_dec_return(v) atomic_sub_return(1,(v))
#define atomic_inc_return(v) atomic_add_return(1,(v))

#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)

#define atomic_inc(v) atomic_add(1,(v))
#define atomic_dec(v) atomic_sub(1,(v))

#endif /* !(__ARCH_SPARC64_ATOMIC__) */