blob: 93633c4355d5ae36d9e532299ec1ad175ea8f45c [file] [log] [blame]
Carl Shapiro93b0cb42010-06-03 17:05:15 -07001/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ANDROID_CUTILS_ATOMIC_ARM_H
18#define ANDROID_CUTILS_ATOMIC_ARM_H
19
20#include <stdint.h>
21#include <machine/cpu-features.h>
22
23extern inline void android_compiler_barrier(void)
24{
25 __asm__ __volatile__ ("" : : : "memory");
26}
27
28#if ANDROID_SMP == 0
29extern inline void android_memory_barrier(void)
30{
31 android_compiler_barrier();
32}
33#elif defined(__ARM_HAVE_DMB)
34extern inline void android_memory_barrier(void)
35{
36 __asm__ __volatile__ ("dmb" : : : "memory");
37}
38#elif defined(__ARM_HAVE_LDREX_STREX)
39extern inline void android_memory_barrier(void)
40{
41 __asm__ __volatile__ ("mcr p15, 0, %0, c7, c10, 5"
42 : : "r" (0) : "memory");
43}
44#else
45extern inline void android_memory_barrier(void)
46{
47 typedef void (kuser_memory_barrier)(void);
48 (*(kuser_memory_barrier *)0xffff0fa0)();
49}
50#endif
51
Carl Shapirod55f0ad2010-09-28 13:47:03 -070052extern inline int32_t android_atomic_acquire_load(volatile const int32_t *ptr)
Carl Shapiro93b0cb42010-06-03 17:05:15 -070053{
54 int32_t value = *ptr;
55 android_memory_barrier();
56 return value;
57}
58
Carl Shapirod55f0ad2010-09-28 13:47:03 -070059extern inline int32_t android_atomic_release_load(volatile const int32_t *ptr)
Carl Shapiro93b0cb42010-06-03 17:05:15 -070060{
61 android_memory_barrier();
62 return *ptr;
63}
64
65extern inline void android_atomic_acquire_store(int32_t value,
66 volatile int32_t *ptr)
67{
68 *ptr = value;
69 android_memory_barrier();
70}
71
72extern inline void android_atomic_release_store(int32_t value,
73 volatile int32_t *ptr)
74{
75 android_memory_barrier();
76 *ptr = value;
77}
78
79#if defined(__thumb__)
80extern int android_atomic_cas(int32_t old_value, int32_t new_value,
81 volatile int32_t *ptr);
82#elif defined(__ARM_HAVE_LDREX_STREX)
83extern inline int android_atomic_cas(int32_t old_value, int32_t new_value,
84 volatile int32_t *ptr)
85{
86 int32_t prev, status;
87 do {
88 __asm__ __volatile__ ("ldrex %0, [%3]\n"
89 "mov %1, #0\n"
90 "teq %0, %4\n"
91 "strexeq %1, %5, [%3]"
92 : "=&r" (prev), "=&r" (status), "+m"(*ptr)
93 : "r" (ptr), "Ir" (old_value), "r" (new_value)
94 : "cc");
95 } while (__builtin_expect(status != 0, 0));
96 return prev != old_value;
97}
98#else
99extern inline int android_atomic_cas(int32_t old_value, int32_t new_value,
100 volatile int32_t *ptr)
101{
102 typedef int (kuser_cmpxchg)(int32_t, int32_t, volatile int32_t *);
103 int32_t prev, status;
104 prev = *ptr;
105 do {
106 status = (*(kuser_cmpxchg *)0xffff0fc0)(old_value, new_value, ptr);
107 if (__builtin_expect(status == 0, 1))
108 return 0;
109 prev = *ptr;
110 } while (prev == old_value);
111 return 1;
112}
113#endif
114
115extern inline int android_atomic_acquire_cas(int32_t old_value,
116 int32_t new_value,
117 volatile int32_t *ptr)
118{
119 int status = android_atomic_cas(old_value, new_value, ptr);
120 android_memory_barrier();
121 return status;
122}
123
124extern inline int android_atomic_release_cas(int32_t old_value,
125 int32_t new_value,
126 volatile int32_t *ptr)
127{
128 android_memory_barrier();
129 return android_atomic_cas(old_value, new_value, ptr);
130}
131
132
133#if defined(__thumb__)
134extern int32_t android_atomic_swap(int32_t new_value,
135 volatile int32_t *ptr);
136#elif defined(__ARM_HAVE_LDREX_STREX)
137extern inline int32_t android_atomic_swap(int32_t new_value,
138 volatile int32_t *ptr)
139{
140 int32_t prev, status;
141 do {
142 __asm__ __volatile__ ("ldrex %0, [%3]\n"
143 "strex %1, %4, [%3]"
144 : "=&r" (prev), "=&r" (status), "+m" (*ptr)
145 : "r" (ptr), "r" (new_value)
146 : "cc");
147 } while (__builtin_expect(status != 0, 0));
148 android_memory_barrier();
149 return prev;
150}
151#else
152extern inline int32_t android_atomic_swap(int32_t new_value,
153 volatile int32_t *ptr)
154{
155 int32_t prev;
156 __asm__ __volatile__ ("swp %0, %2, [%3]"
157 : "=&r" (prev), "+m" (*ptr)
158 : "r" (new_value), "r" (ptr)
159 : "cc");
160 android_memory_barrier();
161 return prev;
162}
163#endif
164
165#if defined(__thumb__)
166extern int32_t android_atomic_add(int32_t increment,
167 volatile int32_t *ptr);
168#elif defined(__ARM_HAVE_LDREX_STREX)
169extern inline int32_t android_atomic_add(int32_t increment,
170 volatile int32_t *ptr)
171{
172 int32_t prev, tmp, status;
173 android_memory_barrier();
174 do {
175 __asm__ __volatile__ ("ldrex %0, [%4]\n"
176 "add %1, %0, %5\n"
177 "strex %2, %1, [%4]"
178 : "=&r" (prev), "=&r" (tmp),
179 "=&r" (status), "+m" (*ptr)
180 : "r" (ptr), "Ir" (increment)
181 : "cc");
182 } while (__builtin_expect(status != 0, 0));
183 return prev;
184}
185#else
186extern inline int32_t android_atomic_add(int32_t increment,
187 volatile int32_t *ptr)
188{
189 int32_t prev, status;
190 android_memory_barrier();
191 do {
192 prev = *ptr;
193 status = android_atomic_cas(prev, prev + increment, ptr);
194 } while (__builtin_expect(status != 0, 0));
195 return prev;
196}
197#endif
198
Carl Shapirod55f0ad2010-09-28 13:47:03 -0700199extern inline int32_t android_atomic_inc(volatile int32_t *addr)
200{
Carl Shapiro93b0cb42010-06-03 17:05:15 -0700201 return android_atomic_add(1, addr);
202}
203
Carl Shapirod55f0ad2010-09-28 13:47:03 -0700204extern inline int32_t android_atomic_dec(volatile int32_t *addr)
205{
Carl Shapiro93b0cb42010-06-03 17:05:15 -0700206 return android_atomic_add(-1, addr);
207}
208
209#if defined(__thumb__)
210extern int32_t android_atomic_and(int32_t value, volatile int32_t *ptr);
211#elif defined(__ARM_HAVE_LDREX_STREX)
212extern inline int32_t android_atomic_and(int32_t value, volatile int32_t *ptr)
213{
214 int32_t prev, tmp, status;
215 android_memory_barrier();
216 do {
217 __asm__ __volatile__ ("ldrex %0, [%4]\n"
218 "and %1, %0, %5\n"
219 "strex %2, %1, [%4]"
220 : "=&r" (prev), "=&r" (tmp),
221 "=&r" (status), "+m" (*ptr)
222 : "r" (ptr), "Ir" (value)
223 : "cc");
224 } while (__builtin_expect(status != 0, 0));
225 return prev;
226}
227#else
228extern inline int32_t android_atomic_and(int32_t value, volatile int32_t *ptr)
229{
230 int32_t prev, status;
231 android_memory_barrier();
232 do {
233 prev = *ptr;
234 status = android_atomic_cas(prev, prev & value, ptr);
235 } while (__builtin_expect(status != 0, 0));
236 return prev;
237}
238#endif
239
240#if defined(__thumb__)
241extern int32_t android_atomic_or(int32_t value, volatile int32_t *ptr);
242#elif defined(__ARM_HAVE_LDREX_STREX)
243extern inline int32_t android_atomic_or(int32_t value, volatile int32_t *ptr)
244{
245 int32_t prev, tmp, status;
246 android_memory_barrier();
247 do {
248 __asm__ __volatile__ ("ldrex %0, [%4]\n"
249 "orr %1, %0, %5\n"
250 "strex %2, %1, [%4]"
251 : "=&r" (prev), "=&r" (tmp),
252 "=&r" (status), "+m" (*ptr)
253 : "r" (ptr), "Ir" (value)
254 : "cc");
255 } while (__builtin_expect(status != 0, 0));
256 return prev;
257}
258#else
259extern inline int32_t android_atomic_or(int32_t value, volatile int32_t *ptr)
260{
261 int32_t prev, status;
262 android_memory_barrier();
263 do {
264 prev = *ptr;
265 status = android_atomic_cas(prev, prev | value, ptr);
266 } while (__builtin_expect(status != 0, 0));
267 return prev;
268}
269#endif
270
271#endif /* ANDROID_CUTILS_ATOMIC_ARM_H */