Line 0
Link Here
|
|
|
1 |
// Copyright (c) 2010 JJDaNiMoTh <jjdanimoth@gmail.com>. All rights reserved. |
2 |
// Use of this source code is governed by a BSD-style license that can be |
3 |
// found in the LICENSE file. |
4 |
|
5 |
// This file is an internal atomic implementation, use base/atomicops.h instead. |
6 |
|
7 |
#ifndef BASE_ATOMICOPS_INTERNALS_PPC_GCC_H_ |
8 |
#define BASE_ATOMICOPS_INTERNALS_PPC_GCC_H_ |
9 |
#define ATOMICOPS_COMPILER_BARRIER() __asm__ __volatile__("" : : : "memory") |
10 |
|
11 |
#define __stringify_in_c(...) #__VA_ARGS__ |
12 |
#define stringify_in_c(...) __stringify_in_c(__VA_ARGS__) " " |
13 |
|
14 |
#define FTR_ENTRY_OFFSET .long |
15 |
#define START_LWSYNC_SECTION(label) label##1: |
16 |
#define MAKE_LWSYNC_SECTION_ENTRY(label, sect) \ |
17 |
label##2: \ |
18 |
.pushsection sect,"a"; \ |
19 |
.align 2; \ |
20 |
label##3: \ |
21 |
FTR_ENTRY_OFFSET label##1b-label##3b; \ |
22 |
.popsection; |
23 |
|
24 |
|
25 |
#define __PPC_ACQUIRE_BARRIER \ |
26 |
START_LWSYNC_SECTION(97); \ |
27 |
isync; \ |
28 |
MAKE_LWSYNC_SECTION_ENTRY(97, __lwsync_fixup); |
29 |
#define PPC_ACQUIRE_BARRIER "\n" stringify_in_c(__PPC_ACQUIRE_BARRIER) |
30 |
#define PPC_RELEASE_BARRIER stringify_in_c(LWSYNC) "\n" |
31 |
|
32 |
namespace base { |
33 |
namespace subtle { |
34 |
|
35 |
// 32-bit low-level operations on any platform. |
36 |
|
37 |
/* |
38 |
* Compare and exchange - if *ptr == old, set it to new, |
39 |
* and return the old value of *p. |
40 |
*/ |
41 |
inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32* ptr, |
42 |
Atomic32 old_value, |
43 |
Atomic32 new_value) { |
44 |
Atomic32 prev; |
45 |
|
46 |
__asm__ __volatile__ ( |
47 |
"1: lwarx %0,0,%2\n" |
48 |
"cmpw 0,%0,%3\n" |
49 |
"bne- 2f\n" |
50 |
"stwcx. %4,0,%2\n" |
51 |
"bne- 1b\n" |
52 |
"2:\n" |
53 |
: "=&r" (prev), "+m" (*ptr) |
54 |
: "r" (ptr), "r" (old_value), "r" (new_value) |
55 |
: "cc", "memory"); |
56 |
|
57 |
return prev; |
58 |
} |
59 |
|
60 |
/* |
61 |
* Atomic exchange |
62 |
* |
63 |
* Changes the memory location '*ptr' to be new_value and returns |
64 |
* the previous value stored there. |
65 |
*/ |
66 |
inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32* ptr, |
67 |
Atomic32 new_value) { |
68 |
Atomic32 prev; |
69 |
|
70 |
__asm__ __volatile__( |
71 |
"1: lwarx %0,0,%2 \n" |
72 |
" stwcx. %3,0,%2 \n\ |
73 |
bne- 1b" |
74 |
: "=&r" (prev), "+m" (*ptr) |
75 |
: "r" (ptr), "r" (new_value) |
76 |
: "cc", "memory"); |
77 |
|
78 |
return prev; |
79 |
} |
80 |
|
81 |
inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32* ptr, |
82 |
Atomic32 increment) { |
83 |
Atomic32 temp; |
84 |
|
85 |
__asm__ __volatile__( |
86 |
"1: lwarx %0,0,%2\n\ |
87 |
add %0,%1,%0\n" |
88 |
" stwcx. %0,0,%2 \n\ |
89 |
bne- 1b" |
90 |
: "=&r" (temp) |
91 |
: "r" (increment), "r" (ptr) |
92 |
: "cc", "memory"); |
93 |
|
94 |
return temp; |
95 |
} |
96 |
|
97 |
inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32* ptr, |
98 |
Atomic32 increment) { |
99 |
Atomic32 temp; |
100 |
|
101 |
__asm__ __volatile__( |
102 |
PPC_RELEASE_BARRIER |
103 |
"1: lwarx %0,0,%2\n\ |
104 |
add %0,%1,%0\n" |
105 |
" stwcx. %0,0,%2 \n\ |
106 |
bne- 1b" |
107 |
PPC_ACQUIRE_BARRIER |
108 |
: "=&r" (temp) |
109 |
: "r" (increment), "r" (ptr) |
110 |
: "cc", "memory"); |
111 |
|
112 |
return temp; |
113 |
} |
114 |
|
115 |
inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32* ptr, |
116 |
Atomic32 old_value, |
117 |
Atomic32 new_value) { |
118 |
return NoBarrier_CompareAndSwap(ptr, old_value, new_value); |
119 |
} |
120 |
|
121 |
inline Atomic32 Release_CompareAndSwap(volatile Atomic32* ptr, |
122 |
Atomic32 old_value, |
123 |
Atomic32 new_value) { |
124 |
return NoBarrier_CompareAndSwap(ptr, old_value, new_value); |
125 |
} |
126 |
|
127 |
inline void NoBarrier_Store(volatile Atomic32* ptr, Atomic32 value) { |
128 |
*ptr = value; |
129 |
} |
130 |
|
131 |
inline void MemoryBarrier() { |
132 |
__asm__ __volatile__("sync" : : : "memory"); |
133 |
} |
134 |
|
135 |
inline void Acquire_Store(volatile Atomic32* ptr, Atomic32 value) { |
136 |
*ptr = value; |
137 |
MemoryBarrier(); |
138 |
} |
139 |
|
140 |
inline void Release_Store(volatile Atomic32* ptr, Atomic32 value) { |
141 |
MemoryBarrier(); |
142 |
*ptr = value; |
143 |
} |
144 |
|
145 |
inline Atomic32 NoBarrier_Load(volatile const Atomic32* ptr) { |
146 |
return *ptr; |
147 |
} |
148 |
|
149 |
inline Atomic32 Acquire_Load(volatile const Atomic32* ptr) { |
150 |
Atomic32 value = *ptr; |
151 |
MemoryBarrier(); |
152 |
return value; |
153 |
|
154 |
} |
155 |
|
156 |
inline Atomic32 Release_Load(volatile const Atomic32* ptr) { |
157 |
MemoryBarrier(); |
158 |
return *ptr; |
159 |
} |
160 |
|
161 |
} // namespace base::subtle |
162 |
} // namespace base |
163 |
|
164 |
#undef ATOMICOPS_COMPILER_BARRIER |
165 |
|
166 |
#endif // BASE_ATOMICOPS_INTERNALS_PPC_GCC_H_ |