sparc: prepare lib/ for unification
[deliverable/linux.git] / arch / sparc64 / lib / atomic.S
1 /* atomic.S: These things are too big to do inline.
2 *
3 * Copyright (C) 1999, 2007 David S. Miller (davem@davemloft.net)
4 */
5
6 #include <asm/asi.h>
7 #include <asm/backoff.h>
8
9 .text
10
11 /* Two versions of the atomic routines, one that
12 * does not return a value and does not perform
13 * memory barriers, and a second which returns
14 * a value and does the barriers.
15 */
16 .globl atomic_add
17 .type atomic_add,#function
18 atomic_add: /* %o0 = increment, %o1 = atomic_ptr */
19 BACKOFF_SETUP(%o2)
20 1: lduw [%o1], %g1
21 add %g1, %o0, %g7
22 cas [%o1], %g1, %g7
23 cmp %g1, %g7
24 bne,pn %icc, 2f
25 nop
26 retl
27 nop
28 2: BACKOFF_SPIN(%o2, %o3, 1b)
29 .size atomic_add, .-atomic_add
30
31 .globl atomic_sub
32 .type atomic_sub,#function
33 atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */
34 BACKOFF_SETUP(%o2)
35 1: lduw [%o1], %g1
36 sub %g1, %o0, %g7
37 cas [%o1], %g1, %g7
38 cmp %g1, %g7
39 bne,pn %icc, 2f
40 nop
41 retl
42 nop
43 2: BACKOFF_SPIN(%o2, %o3, 1b)
44 .size atomic_sub, .-atomic_sub
45
46 .globl atomic_add_ret
47 .type atomic_add_ret,#function
48 atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
49 BACKOFF_SETUP(%o2)
50 1: lduw [%o1], %g1
51 add %g1, %o0, %g7
52 cas [%o1], %g1, %g7
53 cmp %g1, %g7
54 bne,pn %icc, 2f
55 add %g7, %o0, %g7
56 sra %g7, 0, %o0
57 retl
58 nop
59 2: BACKOFF_SPIN(%o2, %o3, 1b)
60 .size atomic_add_ret, .-atomic_add_ret
61
62 .globl atomic_sub_ret
63 .type atomic_sub_ret,#function
64 atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
65 BACKOFF_SETUP(%o2)
66 1: lduw [%o1], %g1
67 sub %g1, %o0, %g7
68 cas [%o1], %g1, %g7
69 cmp %g1, %g7
70 bne,pn %icc, 2f
71 sub %g7, %o0, %g7
72 sra %g7, 0, %o0
73 retl
74 nop
75 2: BACKOFF_SPIN(%o2, %o3, 1b)
76 .size atomic_sub_ret, .-atomic_sub_ret
77
78 .globl atomic64_add
79 .type atomic64_add,#function
80 atomic64_add: /* %o0 = increment, %o1 = atomic_ptr */
81 BACKOFF_SETUP(%o2)
82 1: ldx [%o1], %g1
83 add %g1, %o0, %g7
84 casx [%o1], %g1, %g7
85 cmp %g1, %g7
86 bne,pn %xcc, 2f
87 nop
88 retl
89 nop
90 2: BACKOFF_SPIN(%o2, %o3, 1b)
91 .size atomic64_add, .-atomic64_add
92
93 .globl atomic64_sub
94 .type atomic64_sub,#function
95 atomic64_sub: /* %o0 = decrement, %o1 = atomic_ptr */
96 BACKOFF_SETUP(%o2)
97 1: ldx [%o1], %g1
98 sub %g1, %o0, %g7
99 casx [%o1], %g1, %g7
100 cmp %g1, %g7
101 bne,pn %xcc, 2f
102 nop
103 retl
104 nop
105 2: BACKOFF_SPIN(%o2, %o3, 1b)
106 .size atomic64_sub, .-atomic64_sub
107
108 .globl atomic64_add_ret
109 .type atomic64_add_ret,#function
110 atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
111 BACKOFF_SETUP(%o2)
112 1: ldx [%o1], %g1
113 add %g1, %o0, %g7
114 casx [%o1], %g1, %g7
115 cmp %g1, %g7
116 bne,pn %xcc, 2f
117 add %g7, %o0, %g7
118 mov %g7, %o0
119 retl
120 nop
121 2: BACKOFF_SPIN(%o2, %o3, 1b)
122 .size atomic64_add_ret, .-atomic64_add_ret
123
124 .globl atomic64_sub_ret
125 .type atomic64_sub_ret,#function
126 atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
127 BACKOFF_SETUP(%o2)
128 1: ldx [%o1], %g1
129 sub %g1, %o0, %g7
130 casx [%o1], %g1, %g7
131 cmp %g1, %g7
132 bne,pn %xcc, 2f
133 sub %g7, %o0, %g7
134 mov %g7, %o0
135 retl
136 nop
137 2: BACKOFF_SPIN(%o2, %o3, 1b)
138 .size atomic64_sub_ret, .-atomic64_sub_ret
This page took 0.034019 seconds and 5 git commands to generate.