|
@@ -136,7 +136,7 @@ inline Atomic64 NoBarrier_CompareAndSwap(volatile Atomic64* ptr,
|
|
|
Atomic64 prev_value;
|
|
|
do {
|
|
|
if (OSAtomicCompareAndSwap64(old_value, new_value,
|
|
|
- const_cast<Atomic64*>(ptr))) {
|
|
|
+ reinterpret_cast<volatile int64_t*>(ptr))) {
|
|
|
return old_value;
|
|
|
}
|
|
|
prev_value = *ptr;
|
|
@@ -150,18 +150,19 @@ inline Atomic64 NoBarrier_AtomicExchange(volatile Atomic64* ptr,
|
|
|
do {
|
|
|
old_value = *ptr;
|
|
|
} while (!OSAtomicCompareAndSwap64(old_value, new_value,
|
|
|
- const_cast<Atomic64*>(ptr)));
|
|
|
+ reinterpret_cast<volatile int64_t*>(ptr)));
|
|
|
return old_value;
|
|
|
}
|
|
|
|
|
|
inline Atomic64 NoBarrier_AtomicIncrement(volatile Atomic64* ptr,
|
|
|
Atomic64 increment) {
|
|
|
- return OSAtomicAdd64(increment, const_cast<Atomic64*>(ptr));
|
|
|
+ return OSAtomicAdd64(increment, reinterpret_cast<volatile int64_t*>(ptr));
|
|
|
}
|
|
|
|
|
|
inline Atomic64 Barrier_AtomicIncrement(volatile Atomic64* ptr,
|
|
|
Atomic64 increment) {
|
|
|
- return OSAtomicAdd64Barrier(increment, const_cast<Atomic64*>(ptr));
|
|
|
+ return OSAtomicAdd64Barrier(increment,
|
|
|
+ reinterpret_cast<volatile int64_t*>(ptr));
|
|
|
}
|
|
|
|
|
|
inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64* ptr,
|
|
@@ -169,8 +170,8 @@ inline Atomic64 Acquire_CompareAndSwap(volatile Atomic64* ptr,
|
|
|
Atomic64 new_value) {
|
|
|
Atomic64 prev_value;
|
|
|
do {
|
|
|
- if (OSAtomicCompareAndSwap64Barrier(old_value, new_value,
|
|
|
- const_cast<Atomic64*>(ptr))) {
|
|
|
+ if (OSAtomicCompareAndSwap64Barrier(
|
|
|
+ old_value, new_value, reinterpret_cast<volatile int64_t*>(ptr))) {
|
|
|
return old_value;
|
|
|
}
|
|
|
prev_value = *ptr;
|