Loading...
Searching...
No Matches
freelist.hpp
1#pragma once
2
3#include "../utility/os.hpp"
4#include <atomic>
5
6namespace tf {
7
12
51template <typename T, auto MemberPtr>
53
54 // NodeType is the pointee (e.g. Node from Node*)
55 using NodeType = std::remove_pointer_t<T>;
56
57 static_assert(
58 std::is_same_v<T, std::remove_reference_t<
59 decltype(std::declval<NodeType>().*MemberPtr)
60 >>,
61 "MemberPtr must point to a field of type T within the node"
62 );
63
64 // helper: access the intrusive link field of a node
65 static T& _link(T node) { return node->*MemberPtr; }
66
67 struct TaggedPointer {
68 T ptr;
69 size_t tag;
70 };
71
72 alignas(TF_CACHELINE_SIZE) std::atomic<TaggedPointer> _head;
73
74 public:
75
86 _head.store({nullptr, 0}, std::memory_order_relaxed);
87 }
88
107 bool empty() const {
108 return _head.load(std::memory_order_relaxed).ptr == nullptr;
109 }
110
134 void push(T node) {
135
136 TaggedPointer curr = _head.load(std::memory_order_relaxed);
137
138 while(true) {
139 _link(node) = curr.ptr;
140 TaggedPointer next = {node, curr.tag + 1};
141
142 if(_head.compare_exchange_weak(curr, next,
143 std::memory_order_release,
144 std::memory_order_relaxed)) {
145 break;
146 }
147 // on failure curr is reloaded by compare_exchange_weak
148 }
149 }
150
179 T pop() {
180
181 TaggedPointer curr = _head.load(std::memory_order_acquire);
182
183 while(curr.ptr != nullptr) {
184 TaggedPointer next = {_link(curr.ptr), curr.tag + 1};
185
186 if(_head.compare_exchange_weak(curr, next,
187 std::memory_order_release,
188 std::memory_order_acquire)) {
189 // This can cause thread sanitizer to report error since this assignment
190 // has not guarantee to finish before the return while the other thread
191 // may call push that modify curr.ptr causing data race...
192 return curr.ptr;
193 }
194 // on failure curr is reloaded
195 }
196 return nullptr;
197 }
198};
199
200// ----------------------------------------------------------------------------
201// AtomicIntrusiveStack — 64-bit tagged-pointer alternative
202//
203// Uses the upper 16 bits of a 64-bit uintptr_t as a version tag, avoiding
204// the need for 128-bit CAS. Relies on x86-64 canonical address space where
205// only the lower 48 bits are used for actual addresses.
206//
207// Parameterized identically to the 128-bit version above.
208// ----------------------------------------------------------------------------
209
210/*
211template <typename T, auto MemberPtr>
212class AtomicIntrusiveStack {
213
214 using NodeType = std::remove_pointer_t<T>;
215
216 static_assert(
217 std::is_same_v<T, std::remove_reference_t<
218 decltype(std::declval<NodeType>().*MemberPtr)
219 >>,
220 "MemberPtr must point to a field of type T within the node"
221 );
222
223 static T& _link(T node) { return node->*MemberPtr; }
224
225 static constexpr uintptr_t ADDR_MASK = (uintptr_t{1} << 48) - 1;
226 static constexpr uintptr_t TAG_INC = (uintptr_t{1} << 48);
227
228 T _unpack(uintptr_t val) const {
229 return reinterpret_cast<T>(val & ADDR_MASK);
230 }
231
232 uintptr_t _pack(T ptr, uintptr_t old_val) const {
233 uintptr_t next_tag = (old_val & ~ADDR_MASK) + TAG_INC;
234 return next_tag | (reinterpret_cast<uintptr_t>(ptr) & ADDR_MASK);
235 }
236
237 alignas(TF_CACHELINE_SIZE) std::atomic<uintptr_t> _head;
238
239public:
240
241 AtomicIntrusiveStack() : _head(0) {}
242
243 bool empty() const {
244 return _unpack(_head.load(std::memory_order_relaxed)) == nullptr;
245 }
246
247 void push(T node) {
248 uintptr_t curr = _head.load(std::memory_order_relaxed);
249 while(true) {
250 _link(node) = _unpack(curr);
251 uintptr_t next = _pack(node, curr);
252 if(_head.compare_exchange_weak(curr, next,
253 std::memory_order_release,
254 std::memory_order_relaxed)) {
255 break;
256 }
257 }
258 }
259
260 template <typename I>
261 void bulk_push(I first, size_t N) {
262 if(N == 0) return;
263 for(size_t i = 0; i < N-1; ++i) {
264 _link(first[i]) = first[i+1];
265 }
266 _link(first[N-1]) = nullptr;
267 uintptr_t curr = _head.load(std::memory_order_relaxed);
268 while(true) {
269 _link(first[N-1]) = _unpack(curr);
270 uintptr_t next = _pack(first[0], curr);
271 if(_head.compare_exchange_weak(curr, next,
272 std::memory_order_release,
273 std::memory_order_relaxed)) {
274 break;
275 }
276 }
277 }
278
279 T pop() {
280 uintptr_t curr = _head.load(std::memory_order_acquire);
281 while(true) {
282 T curr_ptr = _unpack(curr);
283 if(curr_ptr == nullptr) return nullptr;
284 uintptr_t next = _pack(_link(curr_ptr), curr);
285 if(_head.compare_exchange_weak(curr, next,
286 std::memory_order_release,
287 std::memory_order_acquire)) {
288 _link(curr_ptr) = nullptr;
289 return curr_ptr;
290 }
291 }
292 }
293};
294*/
295
296} // end of namespace tf. ----------------------------------------------------
bool empty() const
queries whether the stack is empty at the time of this call
Definition freelist.hpp:107
AtomicIntrusiveStack()
constructs an empty stack
Definition freelist.hpp:85
void push(T node)
pushes a node onto the top of the stack
Definition freelist.hpp:134
T pop()
removes and returns the top node, or nullptr if the stack is empty
Definition freelist.hpp:179
taskflow namespace
Definition small_vector.hpp:20