blob: 06b643fbe56aca8d534297dff01528a115115e57 [file] [log] [blame]
Carl Shapiro93b0cb42010-06-03 17:05:15 -07001/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ANDROID_CUTILS_ATOMIC_X86_H
18#define ANDROID_CUTILS_ATOMIC_X86_H
19
20#include <stdint.h>
21
22extern inline void android_compiler_barrier(void)
23{
24 __asm__ __volatile__ ("" : : : "memory");
25}
26
27#if ANDROID_SMP == 0
28extern inline void android_memory_barrier(void)
29{
30 android_compiler_barrier();
31}
32#else
33extern inline void android_memory_barrier(void)
34{
35 __asm__ __volatile__ ("mfence" : : : "memory");
36}
37#endif
38
39extern inline int32_t android_atomic_acquire_load(volatile int32_t *ptr) {
40 int32_t value = *ptr;
41 android_compiler_barrier();
42 return value;
43}
44
45extern inline int32_t android_atomic_release_load(volatile int32_t *ptr) {
46 android_memory_barrier();
47 return *ptr;
48}
49
50extern inline void android_atomic_acquire_store(int32_t value,
51 volatile int32_t *ptr) {
52 *ptr = value;
53 android_memory_barrier();
54}
55
56extern inline void android_atomic_release_store(int32_t value,
57 volatile int32_t *ptr) {
58 android_compiler_barrier();
59 *ptr = value;
60}
61
62extern inline int android_atomic_cas(int32_t old_value, int32_t new_value,
63 volatile int32_t *ptr)
64{
65 int32_t prev;
66 __asm__ __volatile__ ("lock; cmpxchgl %1, %2"
67 : "=a" (prev)
68 : "q" (new_value), "m" (*ptr), "0" (old_value)
69 : "memory");
70 return prev != old_value;
71}
72
73extern inline int android_atomic_acquire_cas(int32_t old_value,
74 int32_t new_value,
75 volatile int32_t *ptr)
76{
77 /* Loads are not reordered with other loads. */
78 return android_atomic_cas(old_value, new_value, ptr);
79}
80
81extern inline int android_atomic_release_cas(int32_t old_value,
82 int32_t new_value,
83 volatile int32_t *ptr)
84{
85 /* Stores are not reordered with other stores. */
86 return android_atomic_cas(old_value, new_value, ptr);
87}
88
89extern inline int32_t android_atomic_swap(int32_t new_value,
90 volatile int32_t *ptr)
91{
92 __asm__ __volatile__ ("xchgl %1, %0"
93 : "=r" (new_value)
94 : "m" (*ptr), "0" (new_value)
95 : "memory");
96 /* new_value now holds the old value of *ptr */
97 return new_value;
98}
99
100extern inline int32_t android_atomic_add(int32_t increment,
101 volatile int32_t *ptr)
102{
103 __asm__ __volatile__ ("lock; xaddl %0, %1"
104 : "+r" (increment), "+m" (*ptr)
105 : : "memory");
106 /* increment now holds the old value of *ptr */
107 return increment;
108}
109
110extern inline int32_t android_atomic_inc(volatile int32_t *addr) {
111 return android_atomic_add(1, addr);
112}
113
114extern inline int32_t android_atomic_dec(volatile int32_t *addr) {
115 return android_atomic_add(-1, addr);
116}
117
118extern inline int32_t android_atomic_and(int32_t value,
119 volatile int32_t *ptr)
120{
121 int32_t prev, status;
122 do {
123 prev = *ptr;
124 status = android_atomic_cas(prev, prev & value, ptr);
125 } while (__builtin_expect(status != 0, 0));
126 return prev;
127}
128
129extern inline int32_t android_atomic_or(int32_t value, volatile int32_t *ptr)
130{
131 int32_t prev, status;
132 do {
133 prev = *ptr;
134 status = android_atomic_cas(prev, prev | value, ptr);
135 } while (__builtin_expect(status != 0, 0));
136 return prev;
137}
138
139#endif /* ANDROID_CUTILS_ATOMIC_X86_H */