WPILibC++ 2025.1.1
Loading...
Searching...
No Matches
memory_stack.hpp
Go to the documentation of this file.
1// Copyright (C) 2015-2023 Jonathan Müller and foonathan/memory contributors
2// SPDX-License-Identifier: Zlib
3
4#ifndef WPI_MEMORY_MEMORY_STACK_HPP_INCLUDED
5#define WPI_MEMORY_MEMORY_STACK_HPP_INCLUDED
6
7/// \file
8/// Class \ref wpi::memory::memory_stack and its \ref wpi::memory::allocator_traits specialization.
9
10// Inform that wpi::memory::memory_stack::min_block_size API is available
11#define WPI_MEMORY_MEMORY_STACK_HAS_MIN_BLOCK_SIZE
12
13#include <cstdint>
14#include <type_traits>
15
16#include "detail/assert.hpp"
18#include "config.hpp"
19#include "error.hpp"
20#include "memory_arena.hpp"
21
22namespace wpi
23{
24 namespace memory
25 {
26#if !defined(DOXYGEN)
27 template <class Impl>
28 class memory_stack;
29#endif
30
31 namespace detail
32 {
34 {
35 std::size_t index;
36 char* top;
37 const char* end;
38
39 stack_marker(std::size_t i, const detail::fixed_memory_stack& s,
40 const char* e) noexcept
41 : index(i), top(s.top()), end(e)
42 {
43 }
44
45 friend bool operator==(const stack_marker& lhs, const stack_marker& rhs) noexcept
46 {
47 if (lhs.index != rhs.index)
48 return false;
49 WPI_MEMORY_ASSERT_MSG(lhs.end == rhs.end, "you must not compare two "
50 "stack markers from different "
51 "stacks");
52 return lhs.top == rhs.top;
53 }
54
55 friend bool operator!=(const stack_marker& lhs, const stack_marker& rhs) noexcept
56 {
57 return !(rhs == lhs);
58 }
59
60 friend bool operator<(const stack_marker& lhs, const stack_marker& rhs) noexcept
61 {
62 if (lhs.index != rhs.index)
63 return lhs.index < rhs.index;
64 WPI_MEMORY_ASSERT_MSG(lhs.end == rhs.end, "you must not compare two "
65 "stack markers from different "
66 "stacks");
67 return lhs.top < rhs.top;
68 }
69
70 friend bool operator>(const stack_marker& lhs, const stack_marker& rhs) noexcept
71 {
72 return rhs < lhs;
73 }
74
75 friend bool operator<=(const stack_marker& lhs, const stack_marker& rhs) noexcept
76 {
77 return !(rhs < lhs);
78 }
79
80 friend bool operator>=(const stack_marker& lhs, const stack_marker& rhs) noexcept
81 {
82 return !(lhs < rhs);
83 }
84
85 template <class Impl>
87 };
88
90 {
91 void operator()(std::ptrdiff_t amount);
92 };
93 } // namespace detail
94
95 /// A stateful RawAllocator that provides stack-like (LIFO) allocations.
96 /// It uses a \ref memory_arena with a given \c BlockOrRawAllocator defaulting to \ref growing_block_allocator to allocate huge blocks
97 /// and saves a marker to the current top.
98 /// Allocation simply moves this marker by the appropriate number of bytes and returns the pointer at the old marker position,
99 /// deallocation is not directly supported, only setting the marker to a previously queried position.
100 /// \ingroup memory_allocator
101 template <class BlockOrRawAllocator = default_allocator>
103 : WPI_EBO(detail::default_leak_checker<detail::memory_stack_leak_handler>)
104 {
105 public:
107
108 /// \returns The minimum block size required for a stack containing the given amount of memory.
109 /// If a stack is created with the result of `min_block_size(n)`, the resulting capacity will be exactly `n`.
110 /// \requires `byte_size` must be a positive number.
111 /// \note Due to debug fence sizes, the actual amount of usable memory can vary.
112 /// However, this is impossible to compute without knowing the exact allocation pattern before,
113 /// so this is just a rough estimate.
114 static constexpr std::size_t min_block_size(std::size_t byte_size) noexcept
115 {
117 }
118
119 /// \effects Creates it with a given initial block size and and other constructor arguments for the BlockAllocator.
120 /// It will allocate the first block and sets the top to its beginning.
121 /// \requires \c block_size must be at least \c min_block_size(1).
122 template <typename... Args>
123 explicit memory_stack(std::size_t block_size, Args&&... args)
124 : arena_(block_size, detail::forward<Args>(args)...),
125 stack_(arena_.allocate_block().memory)
126 {
127 }
128
129 /// \effects Allocates a memory block of given size and alignment.
130 /// It simply moves the top marker.
131 /// If there is not enough space on the current memory block,
132 /// a new one will be allocated by the BlockAllocator or taken from a cache
133 /// and used for the allocation.
134 /// \returns A node with given size and alignment.
135 /// \throws Anything thrown by the BlockAllocator on growth
136 /// or \ref bad_allocation_size if \c size is too big.
137 /// \requires \c size and \c alignment must be valid.
138 void* allocate(std::size_t size, std::size_t alignment)
139 {
140 auto fence = detail::debug_fence_size;
141 auto offset = detail::align_offset(stack_.top() + fence, alignment);
142
143 if (!stack_.top()
144 || fence + offset + size + fence > std::size_t(block_end() - stack_.top()))
145 {
146 // need to grow
147 auto block = arena_.allocate_block();
148 stack_ = detail::fixed_memory_stack(block.memory);
149
150 // new alignment required for over-aligned types
151 offset = detail::align_offset(stack_.top() + fence, alignment);
152
153 auto needed = fence + offset + size + fence;
154 detail::check_allocation_size<bad_allocation_size>(needed, block.size, info());
155 }
156
157 return stack_.allocate_unchecked(size, offset);
158 }
159
160 /// \effects Allocates a memory block of given size and alignment,
161 /// similar to \ref allocate().
162 /// But it does not attempt a growth if the arena is empty.
163 /// \returns A node with given size and alignment
164 /// or `nullptr` if there wasn't enough memory available.
165 void* try_allocate(std::size_t size, std::size_t alignment) noexcept
166 {
167 return stack_.allocate(block_end(), size, alignment);
168 }
169
170 /// The marker type that is used for unwinding.
171 /// The exact type is implementation defined,
172 /// it is only required that it is efficiently copyable
173 /// and has all the comparision operators defined for two markers on the same stack.
174 /// Two markers are equal, if they are copies or created from two `top()` calls without a call to `unwind()` or `allocate()`.
175 /// A marker `a` is less than marker `b`, if after `a` was obtained, there was one or more call to `allocate()` and no call to `unwind()`.
176 using marker = WPI_IMPL_DEFINED(detail::stack_marker);
177
178 /// \returns A marker to the current top of the stack.
179 marker top() const noexcept
180 {
181 return {arena_.size() - 1, stack_, block_end()};
182 }
183
184 /// \effects Unwinds the stack to a certain marker position.
185 /// This sets the top pointer of the stack to the position described by the marker
186 /// and has the effect of deallocating all memory allocated since the marker was obtained.
187 /// If any memory blocks are unused after the operation,
188 /// they are not deallocated but put in a cache for later use,
189 /// call \ref shrink_to_fit() to actually deallocate them.
190 /// \requires The marker must point to memory that is still in use and was the whole time,
191 /// i.e. it must have been pointed below the top at all time.
192 void unwind(marker m) noexcept
193 {
194 WPI_MEMORY_ASSERT(m <= top());
195 detail::debug_check_pointer([&] { return m.index <= arena_.size() - 1; }, info(),
196 m.top);
197
198 if (std::size_t to_deallocate = (arena_.size() - 1) - m.index) // different index
199 {
200 arena_.deallocate_block();
201 for (std::size_t i = 1; i != to_deallocate; ++i)
202 arena_.deallocate_block();
203
205 [&]
206 {
207 auto cur = arena_.current_block();
208 return m.end == static_cast<char*>(cur.memory) + cur.size;
209 },
210 info(), m.top);
211
212 // mark memory from new top to end of the block as freed
213 detail::debug_fill_free(m.top, std::size_t(m.end - m.top), 0);
214 stack_ = detail::fixed_memory_stack(m.top);
215 }
216 else // same index
217 {
218 detail::debug_check_pointer([&] { return stack_.top() >= m.top; }, info(),
219 m.top);
220 stack_.unwind(m.top);
221 }
222 }
223
224 /// \effects \ref unwind() does not actually do any deallocation of blocks on the BlockAllocator,
225 /// unused memory is stored in a cache for later reuse.
226 /// This function clears that cache.
227 void shrink_to_fit() noexcept
228 {
229 arena_.shrink_to_fit();
230 }
231
232 /// \returns The amount of memory remaining in the current block.
233 /// This is the number of bytes that are available for allocation
234 /// before the cache or BlockAllocator needs to be used.
235 std::size_t capacity_left() const noexcept
236 {
237 return std::size_t(block_end() - stack_.top());
238 }
239
240 /// \returns The size of the next memory block after the current block is exhausted and the arena grows.
241 /// This function just forwards to the \ref memory_arena.
242 /// \note All of it is available for the stack to use, but due to fences and alignment buffers,
243 /// this may not be the exact amount of memory usable for the user.
244 std::size_t next_capacity() const noexcept
245 {
246 return arena_.next_block_size();
247 }
248
249 /// \returns A reference to the BlockAllocator used for managing the arena.
250 /// \requires It is undefined behavior to move this allocator out into another object.
252 {
253 return arena_.get_allocator();
254 }
255
256 private:
257 allocator_info info() const noexcept
258 {
259 return {WPI_MEMORY_LOG_PREFIX "::memory_stack", this};
260 }
261
262 const char* block_end() const noexcept
263 {
264 auto block = arena_.current_block();
265 return static_cast<const char*>(block.memory) + block.size;
266 }
267
268 memory_arena<allocator_type> arena_;
269 detail::fixed_memory_stack stack_;
270
271 friend allocator_traits<memory_stack<BlockOrRawAllocator>>;
272 friend composable_allocator_traits<memory_stack<BlockOrRawAllocator>>;
273 };
274
275 /// Simple utility that automatically unwinds a `Stack` to a previously saved location.
276 /// A `Stack` is anything that provides a `marker`, a `top()` function returning a `marker`
277 /// and an `unwind()` function to unwind to a `marker`,
278 /// like a \ref wpi::memory::memory_stack
279 /// \ingroup memory_allocator
280 template <class Stack = memory_stack<>>
282 {
283 public:
284 using stack_type = Stack;
285 using marker_type = typename stack_type::marker;
286
287 /// \effects Same as `memory_stack_raii_unwind(stack, stack.top())`.
290 {
291 }
292
293 /// \effects Creates the unwinder by giving it the stack and the marker.
294 /// \requires The stack must live longer than this object.
296 : marker_(marker), stack_(&stack)
297 {
298 }
299
300 /// \effects Move constructs the unwinder by taking the saved position from `other`.
301 /// `other.will_unwind()` will return `false` after it.
303 : marker_(other.marker_), stack_(other.stack_)
304 {
305 other.stack_ = nullptr;
306 }
307
308 /// \effects Unwinds to the previously saved location,
309 /// if there is any, by calling `unwind()`.
311 {
312 if (stack_)
313 stack_->unwind(marker_);
314 }
315
316 /// \effects Move assigns the unwinder by taking the saved position from `other`.
317 /// `other.will_unwind()` will return `false` after it.
319 {
320 if (stack_)
321 stack_->unwind(marker_);
322
323 marker_ = other.marker_;
324 stack_ = other.stack_;
325
326 other.stack_ = nullptr;
327
328 return *this;
329 }
330
331 /// \effects Removes the location without unwinding it.
332 /// `will_unwind()` will return `false`.
333 void release() noexcept
334 {
335 stack_ = nullptr;
336 }
337
338 /// \effects Unwinds to the saved location explictly.
339 /// \requires `will_unwind()` must return `true`.
340 void unwind() noexcept
341 {
343 stack_->unwind(marker_);
344 }
345
346 /// \returns Whether or not the unwinder will actually unwind.
347 /// \note It will not unwind if it is in the moved-from state.
348 bool will_unwind() const noexcept
349 {
350 return stack_ != nullptr;
351 }
352
353 /// \returns The saved marker, if there is any.
354 /// \requires `will_unwind()` must return `true`.
355 marker_type get_marker() const noexcept
356 {
358 return marker_;
359 }
360
361 /// \returns The stack it will unwind.
362 /// \requires `will_unwind()` must return `true`.
363 stack_type& get_stack() const noexcept
364 {
366 return *stack_;
367 }
368
369 private:
370 marker_type marker_;
371 stack_type* stack_;
372 };
373
374#if WPI_MEMORY_EXTERN_TEMPLATE
375 extern template class memory_stack<>;
376 extern template class memory_stack_raii_unwind<memory_stack<>>;
377#endif
378
379 /// Specialization of the \ref allocator_traits for \ref memory_stack classes.
380 /// \note It is not allowed to mix calls through the specialization and through the member functions,
381 /// i.e. \ref memory_stack::allocate() and this \c allocate_node().
382 /// \ingroup memory_allocator
383 template <class BlockAllocator>
384 class allocator_traits<memory_stack<BlockAllocator>>
385 {
386 public:
388 using is_stateful = std::true_type;
389
390 /// \returns The result of \ref memory_stack::allocate().
391 static void* allocate_node(allocator_type& state, std::size_t size,
392 std::size_t alignment)
393 {
394 auto mem = state.allocate(size, alignment);
395 state.on_allocate(size);
396 return mem;
397 }
398
399 /// \returns The result of \ref memory_stack::allocate().
400 static void* allocate_array(allocator_type& state, std::size_t count, std::size_t size,
401 std::size_t alignment)
402 {
403 return allocate_node(state, count * size, alignment);
404 }
405
406 /// @{
407 /// \effects Does nothing besides bookmarking for leak checking, if that is enabled.
408 /// Actual deallocation can only be done via \ref memory_stack::unwind().
409 static void deallocate_node(allocator_type& state, void*, std::size_t size,
410 std::size_t) noexcept
411 {
412 state.on_deallocate(size);
413 }
414
415 static void deallocate_array(allocator_type& state, void* ptr, std::size_t count,
416 std::size_t size, std::size_t alignment) noexcept
417 {
418 deallocate_node(state, ptr, count * size, alignment);
419 }
420 /// @}
421
422 /// @{
423 /// \returns The maximum size which is \ref memory_stack::next_capacity().
424 static std::size_t max_node_size(const allocator_type& state) noexcept
425 {
426 return state.next_capacity();
427 }
428
429 static std::size_t max_array_size(const allocator_type& state) noexcept
430 {
431 return state.next_capacity();
432 }
433 /// @}
434
435 /// \returns The maximum possible value since there is no alignment restriction
436 /// (except indirectly through \ref memory_stack::next_capacity()).
437 static std::size_t max_alignment(const allocator_type&) noexcept
438 {
439 return std::size_t(-1);
440 }
441 };
442
443 /// Specialization of the \ref composable_allocator_traits for \ref memory_stack classes.
444 /// \ingroup memory_allocator
445 template <class BlockAllocator>
447 {
448 public:
450
451 /// \returns The result of \ref memory_stack::try_allocate().
452 static void* try_allocate_node(allocator_type& state, std::size_t size,
453 std::size_t alignment) noexcept
454 {
455 return state.try_allocate(size, alignment);
456 }
457
458 /// \returns The result of \ref memory_stack::try_allocate().
459 static void* try_allocate_array(allocator_type& state, std::size_t count,
460 std::size_t size, std::size_t alignment) noexcept
461 {
462 return state.try_allocate(count * size, alignment);
463 }
464
465 /// @{
466 /// \effects Does nothing.
467 /// \returns Whether the memory will be deallocated by \ref memory_stack::unwind().
468 static bool try_deallocate_node(allocator_type& state, void* ptr, std::size_t,
469 std::size_t) noexcept
470 {
471 return state.arena_.owns(ptr);
472 }
473
474 static bool try_deallocate_array(allocator_type& state, void* ptr, std::size_t count,
475 std::size_t size, std::size_t alignment) noexcept
476 {
477 return try_deallocate_node(state, ptr, count * size, alignment);
478 }
479 /// @}
480 };
481
482#if WPI_MEMORY_EXTERN_TEMPLATE
483 extern template class allocator_traits<memory_stack<>>;
484 extern template class composable_allocator_traits<memory_stack<>>;
485#endif
486 } // namespace memory
487} // namespace wpi
488
489#endif // WPI_MEMORY_MEMORY_STACK_HPP_INCLUDED
static void deallocate_node(allocator_type &state, void *, std::size_t size, std::size_t) noexcept
Definition memory_stack.hpp:409
static void * allocate_node(allocator_type &state, std::size_t size, std::size_t alignment)
Definition memory_stack.hpp:391
static void * allocate_array(allocator_type &state, std::size_t count, std::size_t size, std::size_t alignment)
Definition memory_stack.hpp:400
std::true_type is_stateful
Definition memory_stack.hpp:388
static std::size_t max_node_size(const allocator_type &state) noexcept
Definition memory_stack.hpp:424
static std::size_t max_array_size(const allocator_type &state) noexcept
Definition memory_stack.hpp:429
static void deallocate_array(allocator_type &state, void *ptr, std::size_t count, std::size_t size, std::size_t alignment) noexcept
Definition memory_stack.hpp:415
static std::size_t max_alignment(const allocator_type &) noexcept
Definition memory_stack.hpp:437
The default specialization of the allocator_traits for a RawAllocator.
Definition allocator_traits.hpp:292
static void * allocate_node(allocator_type &state, std::size_t size, std::size_t alignment)
Definition allocator_traits.hpp:298
static void deallocate_node(allocator_type &state, void *node, std::size_t size, std::size_t alignment) noexcept
Definition allocator_traits.hpp:318
static bool try_deallocate_node(allocator_type &state, void *ptr, std::size_t, std::size_t) noexcept
Definition memory_stack.hpp:468
static void * try_allocate_node(allocator_type &state, std::size_t size, std::size_t alignment) noexcept
Definition memory_stack.hpp:452
static bool try_deallocate_array(allocator_type &state, void *ptr, std::size_t count, std::size_t size, std::size_t alignment) noexcept
Definition memory_stack.hpp:474
static void * try_allocate_array(allocator_type &state, std::size_t count, std::size_t size, std::size_t alignment) noexcept
Definition memory_stack.hpp:459
The default specialization of the composable_allocator_traits for a ComposableAllocator.
Definition allocator_traits.hpp:500
static bool try_deallocate_node(allocator_type &state, void *node, std::size_t size, std::size_t alignment) noexcept
Definition allocator_traits.hpp:522
Definition memory_stack.hpp:22
void unwind(char *top) noexcept
Definition memory_stack.hpp:100
void * allocate_unchecked(std::size_t size, std::size_t align_offset, std::size_t fence_size=debug_fence_size) noexcept
Definition memory_stack.hpp:87
void * allocate(const char *end, std::size_t size, std::size_t alignment, std::size_t fence_size=debug_fence_size) noexcept
Definition memory_stack.hpp:71
char * top() const noexcept
Definition memory_stack.hpp:107
static constexpr std::size_t implementation_offset() noexcept
Definition memory_arena.hpp:127
Definition memory_stack.hpp:34
friend bool operator<=(const stack_marker &lhs, const stack_marker &rhs) noexcept
Definition memory_stack.hpp:75
friend bool operator>(const stack_marker &lhs, const stack_marker &rhs) noexcept
Definition memory_stack.hpp:70
friend bool operator<(const stack_marker &lhs, const stack_marker &rhs) noexcept
Definition memory_stack.hpp:60
friend bool operator!=(const stack_marker &lhs, const stack_marker &rhs) noexcept
Definition memory_stack.hpp:55
friend bool operator==(const stack_marker &lhs, const stack_marker &rhs) noexcept
Definition memory_stack.hpp:45
friend bool operator>=(const stack_marker &lhs, const stack_marker &rhs) noexcept
Definition memory_stack.hpp:80
void shrink_to_fit() noexcept
Definition memory_arena.hpp:387
memory_block allocate_block()
Definition memory_arena.hpp:350
memory_block current_block() const noexcept
Definition memory_arena.hpp:362
std::size_t next_block_size() const noexcept
Definition memory_arena.hpp:415
allocator_type & get_allocator() noexcept
Definition memory_arena.hpp:425
void deallocate_block() noexcept
Definition memory_arena.hpp:371
std::size_t size() const noexcept
Definition memory_arena.hpp:406
Simple utility that automatically unwinds a Stack to a previously saved location.
Definition memory_stack.hpp:282
memory_stack_raii_unwind & operator=(memory_stack_raii_unwind &&other) noexcept
Definition memory_stack.hpp:318
void release() noexcept
Definition memory_stack.hpp:333
bool will_unwind() const noexcept
Definition memory_stack.hpp:348
marker_type get_marker() const noexcept
Definition memory_stack.hpp:355
memory_stack_raii_unwind(stack_type &stack) noexcept
Definition memory_stack.hpp:288
Stack stack_type
Definition memory_stack.hpp:284
memory_stack_raii_unwind(memory_stack_raii_unwind &&other) noexcept
Definition memory_stack.hpp:302
~memory_stack_raii_unwind() noexcept
Definition memory_stack.hpp:310
void unwind() noexcept
Definition memory_stack.hpp:340
typename stack_type::marker marker_type
Definition memory_stack.hpp:285
memory_stack_raii_unwind(stack_type &stack, marker_type marker) noexcept
Definition memory_stack.hpp:295
stack_type & get_stack() const noexcept
Definition memory_stack.hpp:363
A stateful RawAllocator that provides stack-like (LIFO) allocations.
Definition memory_stack.hpp:104
std::size_t next_capacity() const noexcept
Definition memory_stack.hpp:244
make_block_allocator_t< BlockOrRawAllocator > allocator_type
Definition memory_stack.hpp:106
void * try_allocate(std::size_t size, std::size_t alignment) noexcept
Definition memory_stack.hpp:165
void unwind(marker m) noexcept
Definition memory_stack.hpp:192
allocator_type & get_allocator() noexcept
Definition memory_stack.hpp:251
std::size_t capacity_left() const noexcept
Definition memory_stack.hpp:235
memory_stack(std::size_t block_size, Args &&... args)
Definition memory_stack.hpp:123
static constexpr std::size_t min_block_size(std::size_t byte_size) noexcept
Definition memory_stack.hpp:114
implementation_defined marker
The marker type that is used for unwinding.
Definition memory_stack.hpp:176
marker top() const noexcept
Definition memory_stack.hpp:179
void shrink_to_fit() noexcept
Definition memory_stack.hpp:227
void * allocate(std::size_t size, std::size_t alignment)
Definition memory_stack.hpp:138
Alias template for an STL container that uses a certain RawAllocator.
Definition container.hpp:184
Configuration macros.
#define WPI_MEMORY_LOG_PREFIX
Definition config.hpp:46
The exception classes.
auto ptr(T p) -> const void *
Converts p to const void* for pointer formatting.
Definition format.h:3821
implementation_defined make_block_allocator_t
Takes either a BlockAllocator or a RawAllocator.
Definition memory_arena.hpp:622
Class wpi::memory::memory_arena and related functionality regarding BlockAllocators.
detail namespace with internal helper functions
Definition input_adapters.h:32
std::size_t align_offset(std::uintptr_t address, std::size_t alignment) noexcept
Definition align.hpp:26
void check_allocation_size(std::size_t passed, Func f, const allocator_info &info)
Definition error.hpp:264
void * debug_fill_free(void *memory, std::size_t, std::size_t) noexcept
Definition debug_helpers.hpp:57
constexpr std::size_t debug_fence_size
Definition debug_helpers.hpp:22
void debug_check_pointer(Functor condition, const allocator_info &info, void *ptr)
Definition debug_helpers.hpp:71
Memory namespace.
Definition heap_allocator.hpp:20
Foonathan namespace.
Definition ntcore_cpp.h:26
Contains information about an allocator.
Definition error.hpp:23
std::size_t size
The size of the memory block (might be 0).
Definition memory_arena.hpp:30
#define WPI_MEMORY_ASSERT(Expr)
Definition assert.hpp:46
#define WPI_MEMORY_ASSERT_MSG(Expr, Msg)
Definition assert.hpp:47