blob: 658ed54879fa3d40ad5da9f910a108eccee9a317 [file] [log] [blame]
[email protected]b72018e2012-07-13 16:49:441// Copyright (c) 2012 The Chromium Authors. All rights reserved.
license.botbf09a502008-08-24 00:55:552// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
[email protected]611dbe02008-08-05 09:57:364
5// This file is an internal atomic implementation, use base/atomicops.h instead.
6
[email protected]b72018e2012-07-13 16:49:447#ifndef BASE_ATOMICOPS_INTERNALS_MAC_H_
8#define BASE_ATOMICOPS_INTERNALS_MAC_H_
[email protected]611dbe02008-08-05 09:57:369
10#include <libkern/OSAtomic.h>
11
12namespace base {
13namespace subtle {
14
15inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32 *ptr,
16 Atomic32 old_value,
17 Atomic32 new_value) {
18 Atomic32 prev_value;
19 do {
20 if (OSAtomicCompareAndSwap32(old_value, new_value,
21 const_cast<Atomic32*>(ptr))) {
22 return old_value;
23 }
24 prev_value = *ptr;
25 } while (prev_value == old_value);
26 return prev_value;
27}
28
29inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32 *ptr,
30 Atomic32 new_value) {
31 Atomic32 old_value;
32 do {
33 old_value = *ptr;
34 } while (!OSAtomicCompareAndSwap32(old_value, new_value,
35 const_cast<Atomic32*>(ptr)));
36 return old_value;
37}
38
39inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32 *ptr,
40 Atomic32 increment) {
41 return OSAtomicAdd32(increment, const_cast<Atomic32*>(ptr));
42}
43
44inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32 *ptr,
45 Atomic32 increment) {
46 return OSAtomicAdd32Barrier(increment, const_cast<Atomic32*>(ptr));
47}
48
49inline void MemoryBarrier() {
50 OSMemoryBarrier();
51}
52
53inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32 *ptr,
54 Atomic32 old_value,
55 Atomic32 new_value) {
56 Atomic32 prev_value;
57 do {
58 if (OSAtomicCompareAndSwap32Barrier(old_value, new_value,
59 const_cast<Atomic32*>(ptr))) {
60 return old_value;
61 }
62 prev_value = *ptr;
63 } while (prev_value == old_value);
64 return prev_value;
65}
66
67inline Atomic32 Release_CompareAndSwap(volatile Atomic32 *ptr,
68 Atomic32 old_value,
69 Atomic32 new_value) {
70 return Acquire_CompareAndSwap(ptr, old_value, new_value);
71}
72
73inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
74 *ptr = value;
75}
76
77inline void Acquire_Store(volatile Atomic32 *ptr, Atomic32 value) {
78 *ptr = value;
79 MemoryBarrier();
80}
81
82inline void Release_Store(volatile Atomic32 *ptr, Atomic32 value) {
83 MemoryBarrier();
84 *ptr = value;
85}
86
87inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) {
88 return *ptr;
89}
90
91inline Atomic32 Acquire_Load(volatile const Atomic32 *ptr) {
92 Atomic32 value = *ptr;
93 MemoryBarrier();
94 return value;
95}
96
97inline Atomic32 Release_Load(volatile const Atomic32 *ptr) {
98 MemoryBarrier();
99 return *ptr;
100}
101
102#ifdef __LP64__
103
104// 64-bit implementation on 64-bit platform
105
106inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64 *ptr,
107 Atomic64 old_value,
108 Atomic64 new_value) {
109 Atomic64 prev_value;
110 do {
111 if (OSAtomicCompareAndSwap64(old_value, new_value,
[email protected]2c57915b2011-01-10 21:45:24112 reinterpret_cast<volatile int64_t*>(ptr))) {
[email protected]611dbe02008-08-05 09:57:36113 return old_value;
114 }
115 prev_value = *ptr;
116 } while (prev_value == old_value);
117 return prev_value;
118}
119
120inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64 *ptr,
121 Atomic64 new_value) {
122 Atomic64 old_value;
123 do {
124 old_value = *ptr;
125 } while (!OSAtomicCompareAndSwap64(old_value, new_value,
[email protected]2c57915b2011-01-10 21:45:24126 reinterpret_cast<volatile int64_t*>(ptr)));
[email protected]611dbe02008-08-05 09:57:36127 return old_value;
128}
129
130inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64 *ptr,
131 Atomic64 increment) {
[email protected]2c57915b2011-01-10 21:45:24132 return OSAtomicAdd64(increment, reinterpret_cast<volatile int64_t*>(ptr));
[email protected]611dbe02008-08-05 09:57:36133}
134
135inline Atomic64 Barrier_AtomicIncrement(volatile Atomic64 *ptr,
136 Atomic64 increment) {
[email protected]2c57915b2011-01-10 21:45:24137 return OSAtomicAdd64Barrier(increment,
138 reinterpret_cast<volatile int64_t*>(ptr));
[email protected]611dbe02008-08-05 09:57:36139}
140
141inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64 *ptr,
142 Atomic64 old_value,
143 Atomic64 new_value) {
144 Atomic64 prev_value;
145 do {
[email protected]2c57915b2011-01-10 21:45:24146 if (OSAtomicCompareAndSwap64Barrier(
147 old_value, new_value, reinterpret_cast<volatile int64_t*>(ptr))) {
[email protected]611dbe02008-08-05 09:57:36148 return old_value;
149 }
150 prev_value = *ptr;
151 } while (prev_value == old_value);
152 return prev_value;
153}
154
155inline Atomic64 Release_CompareAndSwap(volatile Atomic64 *ptr,
156 Atomic64 old_value,
157 Atomic64 new_value) {
158 // The lib kern interface does not distinguish between
159 // Acquire and Release memory barriers; they are equivalent.
160 return Acquire_CompareAndSwap(ptr, old_value, new_value);
161}
162
163inline void NoBarrier_Store(volatile Atomic64* ptr, Atomic64 value) {
164 *ptr = value;
165}
166
167inline void Acquire_Store(volatile Atomic64 *ptr, Atomic64 value) {
168 *ptr = value;
169 MemoryBarrier();
170}
171
172inline void Release_Store(volatile Atomic64 *ptr, Atomic64 value) {
173 MemoryBarrier();
174 *ptr = value;
175}
176
177inline Atomic64 NoBarrier_Load(volatile const Atomic64* ptr) {
178 return *ptr;
179}
180
181inline Atomic64 Acquire_Load(volatile const Atomic64 *ptr) {
182 Atomic64 value = *ptr;
183 MemoryBarrier();
184 return value;
185}
186
187inline Atomic64 Release_Load(volatile const Atomic64 *ptr) {
188 MemoryBarrier();
189 return *ptr;
190}
191
192#endif // defined(__LP64__)
193
[email protected]611dbe02008-08-05 09:57:36194} // namespace base::subtle
195} // namespace base
196
[email protected]b72018e2012-07-13 16:49:44197#endif // BASE_ATOMICOPS_INTERNALS_MAC_H_