blob: ccbb896e4cb08493b829e594e56e7d2390845fcd [file] [log] [blame]
[email protected]b72018e2012-07-13 16:49:441// Copyright (c) 2012 The Chromium Authors. All rights reserved.
license.botbf09a502008-08-24 00:55:552// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
[email protected]611dbe02008-08-05 09:57:364
5// This file is an internal atomic implementation, use base/atomicops.h instead.
6
[email protected]b72018e2012-07-13 16:49:447#ifndef BASE_ATOMICOPS_INTERNALS_MAC_H_
8#define BASE_ATOMICOPS_INTERNALS_MAC_H_
[email protected]611dbe02008-08-05 09:57:369
10#include <libkern/OSAtomic.h>
11
12namespace base {
13namespace subtle {
14
[email protected]92677102014-02-08 04:59:5515inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32* ptr,
[email protected]611dbe02008-08-05 09:57:3616 Atomic32 old_value,
17 Atomic32 new_value) {
18 Atomic32 prev_value;
19 do {
20 if (OSAtomicCompareAndSwap32(old_value, new_value,
21 const_cast<Atomic32*>(ptr))) {
22 return old_value;
23 }
24 prev_value = *ptr;
25 } while (prev_value == old_value);
26 return prev_value;
27}
28
[email protected]92677102014-02-08 04:59:5529inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32* ptr,
[email protected]611dbe02008-08-05 09:57:3630 Atomic32 new_value) {
31 Atomic32 old_value;
32 do {
33 old_value = *ptr;
34 } while (!OSAtomicCompareAndSwap32(old_value, new_value,
35 const_cast<Atomic32*>(ptr)));
36 return old_value;
37}
38
[email protected]92677102014-02-08 04:59:5539inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr,
[email protected]611dbe02008-08-05 09:57:3640 Atomic32 increment) {
41 return OSAtomicAdd32(increment, const_cast<Atomic32*>(ptr));
42}
43
[email protected]92677102014-02-08 04:59:5544inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32* ptr,
45 Atomic32 increment) {
[email protected]611dbe02008-08-05 09:57:3646 return OSAtomicAdd32Barrier(increment, const_cast<Atomic32*>(ptr));
47}
48
49inline void MemoryBarrier() {
50 OSMemoryBarrier();
51}
52
[email protected]92677102014-02-08 04:59:5553inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr,
[email protected]611dbe02008-08-05 09:57:3654 Atomic32 old_value,
55 Atomic32 new_value) {
56 Atomic32 prev_value;
57 do {
58 if (OSAtomicCompareAndSwap32Barrier(old_value, new_value,
59 const_cast<Atomic32*>(ptr))) {
60 return old_value;
61 }
62 prev_value = *ptr;
63 } while (prev_value == old_value);
64 return prev_value;
65}
66
[email protected]92677102014-02-08 04:59:5567inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr,
[email protected]611dbe02008-08-05 09:57:3668 Atomic32 old_value,
69 Atomic32 new_value) {
70 return Acquire_CompareAndSwap(ptr, old_value, new_value);
71}
72
73inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) {
74 *ptr = value;
75}
76
[email protected]92677102014-02-08 04:59:5577inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) {
[email protected]611dbe02008-08-05 09:57:3678 *ptr = value;
79 MemoryBarrier();
80}
81
[email protected]92677102014-02-08 04:59:5582inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) {
[email protected]611dbe02008-08-05 09:57:3683 MemoryBarrier();
84 *ptr = value;
85}
86
87inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) {
88 return *ptr;
89}
90
[email protected]92677102014-02-08 04:59:5591inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) {
[email protected]611dbe02008-08-05 09:57:3692 Atomic32 value = *ptr;
93 MemoryBarrier();
94 return value;
95}
96
[email protected]92677102014-02-08 04:59:5597inline Atomic32 Release_Load(volatile const Atomic32* ptr) {
[email protected]611dbe02008-08-05 09:57:3698 MemoryBarrier();
99 return *ptr;
100}
101
102#ifdef __LP64__
103
104// 64-bit implementation on 64-bit platform
105
[email protected]92677102014-02-08 04:59:55106inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64* ptr,
[email protected]611dbe02008-08-05 09:57:36107 Atomic64 old_value,
108 Atomic64 new_value) {
109 Atomic64 prev_value;
110 do {
111 if (OSAtomicCompareAndSwap64(old_value, new_value,
[email protected]2c57915b2011-01-10 21:45:24112 reinterpret_cast<volatile int64_t*>(ptr))) {
[email protected]611dbe02008-08-05 09:57:36113 return old_value;
114 }
115 prev_value = *ptr;
116 } while (prev_value == old_value);
117 return prev_value;
118}
119
[email protected]92677102014-02-08 04:59:55120inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64* ptr,
[email protected]611dbe02008-08-05 09:57:36121 Atomic64 new_value) {
122 Atomic64 old_value;
123 do {
124 old_value = *ptr;
125 } while (!OSAtomicCompareAndSwap64(old_value, new_value,
[email protected]2c57915b2011-01-10 21:45:24126 reinterpret_cast<volatile int64_t*>(ptr)));
[email protected]611dbe02008-08-05 09:57:36127 return old_value;
128}
129
[email protected]92677102014-02-08 04:59:55130inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64* ptr,
[email protected]611dbe02008-08-05 09:57:36131 Atomic64 increment) {
[email protected]2c57915b2011-01-10 21:45:24132 return OSAtomicAdd64(increment, reinterpret_cast<volatile int64_t*>(ptr));
[email protected]611dbe02008-08-05 09:57:36133}
134
[email protected]92677102014-02-08 04:59:55135inline Atomic64 Barrier_AtomicIncrement(volatile Atomic64* ptr,
[email protected]611dbe02008-08-05 09:57:36136 Atomic64 increment) {
[email protected]2c57915b2011-01-10 21:45:24137 return OSAtomicAdd64Barrier(increment,
138 reinterpret_cast<volatile int64_t*>(ptr));
[email protected]611dbe02008-08-05 09:57:36139}
140
[email protected]92677102014-02-08 04:59:55141inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64* ptr,
[email protected]611dbe02008-08-05 09:57:36142 Atomic64 old_value,
143 Atomic64 new_value) {
144 Atomic64 prev_value;
145 do {
[email protected]2c57915b2011-01-10 21:45:24146 if (OSAtomicCompareAndSwap64Barrier(
147 old_value, new_value, reinterpret_cast<volatile int64_t*>(ptr))) {
[email protected]611dbe02008-08-05 09:57:36148 return old_value;
149 }
150 prev_value = *ptr;
151 } while (prev_value == old_value);
152 return prev_value;
153}
154
[email protected]92677102014-02-08 04:59:55155inline Atomic64 Release_CompareAndSwap(volatile Atomic64* ptr,
[email protected]611dbe02008-08-05 09:57:36156 Atomic64 old_value,
157 Atomic64 new_value) {
158 // The lib kern interface does not distinguish between
159 // Acquire and Release memory barriers; they are equivalent.
160 return Acquire_CompareAndSwap(ptr, old_value, new_value);
161}
162
163inline void NoBarrier_Store(volatile Atomic64* ptr, Atomic64 value) {
164 *ptr = value;
165}
166
[email protected]92677102014-02-08 04:59:55167inline void Acquire_Store(volatile Atomic64* ptr, Atomic64 value) {
[email protected]611dbe02008-08-05 09:57:36168 *ptr = value;
169 MemoryBarrier();
170}
171
[email protected]92677102014-02-08 04:59:55172inline void Release_Store(volatile Atomic64* ptr, Atomic64 value) {
[email protected]611dbe02008-08-05 09:57:36173 MemoryBarrier();
174 *ptr = value;
175}
176
177inline Atomic64 NoBarrier_Load(volatile const Atomic64* ptr) {
178 return *ptr;
179}
180
[email protected]92677102014-02-08 04:59:55181inline Atomic64 Acquire_Load(volatile const Atomic64* ptr) {
[email protected]611dbe02008-08-05 09:57:36182 Atomic64 value = *ptr;
183 MemoryBarrier();
184 return value;
185}
186
[email protected]92677102014-02-08 04:59:55187inline Atomic64 Release_Load(volatile const Atomic64* ptr) {
[email protected]611dbe02008-08-05 09:57:36188 MemoryBarrier();
189 return *ptr;
190}
191
192#endif // defined(__LP64__)
193
[email protected]611dbe02008-08-05 09:57:36194} // namespace base::subtle
195} // namespace base
196
[email protected]b72018e2012-07-13 16:49:44197#endif // BASE_ATOMICOPS_INTERNALS_MAC_H_