WPILibC++ 2025.1.1
Loading...
Searching...
No Matches
memory_pool.hpp
Go to the documentation of this file.
1// Copyright (C) 2015-2023 Jonathan Müller and foonathan/memory contributors
2// SPDX-License-Identifier: Zlib
3
4#ifndef WPI_MEMORY_MEMORY_POOL_HPP_INCLUDED
5#define WPI_MEMORY_MEMORY_POOL_HPP_INCLUDED
6
7// Inform that wpi::memory::memory_pool::min_block_size API is available
8#define WPI_MEMORY_MEMORY_POOL_HAS_MIN_BLOCK_SIZE
9
10/// \file
11/// Class \ref wpi::memory::memory_pool and its \ref wpi::memory::allocator_traits specialization.
12
13#include <type_traits>
14
15#include "detail/align.hpp"
17#include "detail/assert.hpp"
18#include "config.hpp"
19#include "error.hpp"
20#include "memory_arena.hpp"
21#include "memory_pool_type.hpp"
22
23namespace wpi
24{
25 namespace memory
26 {
27 namespace detail
28 {
30 {
31 void operator()(std::ptrdiff_t amount);
32 };
33 } // namespace detail
34
35 /// A stateful RawAllocator that manages nodes of fixed size.
36 /// It uses a \ref memory_arena with a given \c BlockOrRawAllocator defaulting to \ref growing_block_allocator,
37 /// subdivides them in small nodes of given size and puts them onto a free list.
38 /// Allocation and deallocation simply remove or add nodes from this list and are thus fast.
39 /// The way the list is maintained can be controlled via the \c PoolType
40 /// which is either \ref node_pool, \ref array_pool or \ref small_node_pool.<br>
41 /// This kind of allocator is ideal for fixed size allocations and deallocations in any order,
42 /// for example in a node based container like \c std::list.
43 /// It is not so good for different allocation sizes and has some drawbacks for arrays
44 /// as described in \ref memory_pool_type.hpp.
45 /// \ingroup memory_allocator
46 template <typename PoolType = node_pool, class BlockOrRawAllocator = default_allocator>
48 : WPI_EBO(detail::default_leak_checker<detail::memory_pool_leak_handler>)
49 {
50 using free_list = typename PoolType::type;
52
53 public:
55 using pool_type = PoolType;
56
57 static constexpr std::size_t min_node_size =
58 WPI_IMPL_DEFINED(free_list::min_element_size);
59
60 /// \returns The minimum block size required for certain number of node.
61 /// \requires \c node_size must be a valid node size
62 /// and \c number_of_nodes must be a non-zero value.
63 /// \note MSVC's implementation of \c std::list for example is never empty and always allocates proxy nodes.
64 /// To get enough memory for \c N elements of a list, \c number_of_nodes needs to include the proxy count in addition to \c N.
65 static constexpr std::size_t min_block_size(std::size_t node_size,
66 std::size_t number_of_nodes) noexcept
67 {
69 + free_list::min_block_size(node_size, number_of_nodes);
70 }
71
72 /// \effects Creates it by specifying the size each node will have,
73 /// the initial block size for the arena and other constructor arguments for the BlockAllocator.
74 /// If the \c node_size is less than the \c min_node_size, the \c min_node_size will be the actual node size.
75 /// It will allocate an initial memory block with given size from the BlockAllocator
76 /// and puts it onto the free list.
77 /// \requires \c node_size must be a valid node size
78 /// and \c block_size must be at least \c min_block_size(node_size, 1).
79 template <typename... Args>
80 memory_pool(std::size_t node_size, std::size_t block_size, Args&&... args)
81 : arena_(block_size, detail::forward<Args>(args)...), free_list_(node_size)
82 {
83 allocate_block();
84 }
85
86 /// \effects Destroys the \ref memory_pool by returning all memory blocks,
87 /// regardless of properly deallocated back to the BlockAllocator.
88 ~memory_pool() noexcept {}
89
90 /// @{
91 /// \effects Moving a \ref memory_pool object transfers ownership over the free list,
92 /// i.e. the moved from pool is completely empty and the new one has all its memory.
93 /// That means that it is not allowed to call \ref deallocate_node() on a moved-from allocator
94 /// even when passing it memory that was previously allocated by this object.
95 memory_pool(memory_pool&& other) noexcept
96 : leak_checker(detail::move(other)),
97 arena_(detail::move(other.arena_)),
98 free_list_(detail::move(other.free_list_))
99 {
100 }
101
103 {
105 arena_ = detail::move(other.arena_);
106 free_list_ = detail::move(other.free_list_);
107 return *this;
108 }
109 /// @}
110
111 /// \effects Allocates a single node by removing it from the free list.
112 /// If the free list is empty, a new memory block will be allocated from the arena and put onto it.
113 /// The new block size will be \ref next_capacity() big.
114 /// \returns A node of size \ref node_size() suitable aligned,
115 /// i.e. suitable for any type where <tt>sizeof(T) < node_size()</tt>.
116 /// \throws Anything thrown by the used BlockAllocator's allocation function if a growth is needed.
118 {
119 if (free_list_.empty())
120 allocate_block();
121 WPI_MEMORY_ASSERT(!free_list_.empty());
122 return free_list_.allocate();
123 }
124
125 /// \effects Allocates a single node similar to \ref allocate_node().
126 /// But if the free list is empty, a new block will *not* be allocated.
127 /// \returns A suitable aligned node of size \ref node_size() or `nullptr`.
128 void* try_allocate_node() noexcept
129 {
130 return free_list_.empty() ? nullptr : free_list_.allocate();
131 }
132
133 /// \effects Allocates an array of nodes by searching for \c n continuous nodes on the list and removing them.
134 /// Depending on the \c PoolType this can be a slow operation or not allowed at all.
135 /// This can sometimes lead to a growth, even if technically there is enough continuous memory on the free list.
136 /// \returns An array of \c n nodes of size \ref node_size() suitable aligned.
137 /// \throws Anything thrown by the used BlockAllocator's allocation function if a growth is needed,
138 /// or \ref bad_array_size if <tt>n * node_size()</tt> is too big.
139 /// \requires \c n must be valid array count.
140 void* allocate_array(std::size_t n)
141 {
143 n * node_size(), [&] { return pool_type::value ? next_capacity() : 0; },
144 info());
145 return allocate_array(n, node_size());
146 }
147
148 /// \effects Allocates an array of nodes similar to \ref allocate_array().
149 /// But it will never allocate a new memory block.
150 /// \returns An array of \c n nodes of size \ref node_size() suitable aligned
151 /// or `nullptr`.
152 void* try_allocate_array(std::size_t n) noexcept
153 {
154 return try_allocate_array(n, node_size());
155 }
156
157 /// \effects Deallocates a single node by putting it back onto the free list.
158 /// \requires \c ptr must be a result from a previous call to \ref allocate_node() on the same free list,
159 /// i.e. either this allocator object or a new object created by moving this to it.
160 void deallocate_node(void* ptr) noexcept
161 {
162 free_list_.deallocate(ptr);
163 }
164
165 /// \effects Deallocates a single node but it does not be a result of a previous call to \ref allocate_node().
166 /// \returns `true` if the node could be deallocated, `false` otherwise.
167 /// \note Some free list implementations can deallocate any memory,
168 /// doesn't matter where it is coming from.
169 bool try_deallocate_node(void* ptr) noexcept
170 {
171 if (!arena_.owns(ptr))
172 return false;
173 free_list_.deallocate(ptr);
174 return true;
175 }
176
177 /// \effects Deallocates an array by putting it back onto the free list.
178 /// \requires \c ptr must be a result from a previous call to \ref allocate_array() with the same \c n on the same free list,
179 /// i.e. either this allocator object or a new object created by moving this to it.
180 void deallocate_array(void* ptr, std::size_t n) noexcept
181 {
182 WPI_MEMORY_ASSERT_MSG(pool_type::value, "does not support array allocations");
183 free_list_.deallocate(ptr, n * node_size());
184 }
185
186 /// \effects Deallocates an array but it does not be a result of a previous call to \ref allocate_array().
187 /// \returns `true` if the node could be deallocated, `false` otherwise.
188 /// \note Some free list implementations can deallocate any memory,
189 /// doesn't matter where it is coming from.
190 bool try_deallocate_array(void* ptr, std::size_t n) noexcept
191 {
192 return try_deallocate_array(ptr, n, node_size());
193 }
194
195 /// \returns The size of each node in the pool,
196 /// this is either the same value as in the constructor or \c min_node_size if the value was too small.
197 std::size_t node_size() const noexcept
198 {
199 return free_list_.node_size();
200 }
201
202 /// \effects Returns the total amount of bytes remaining on the free list.
203 /// Divide it by \ref node_size() to get the number of nodes that can be allocated without growing the arena.
204 /// \note Array allocations may lead to a growth even if the capacity_left left is big enough.
205 std::size_t capacity_left() const noexcept
206 {
207 return free_list_.capacity() * node_size();
208 }
209
210 /// \returns The size of the next memory block after the free list gets empty and the arena grows.
211 /// \ref capacity_left() will increase by this amount.
212 /// \note Due to fence memory in debug mode this cannot be just divided by the \ref node_size() to get the number of nodes.
213 std::size_t next_capacity() const noexcept
214 {
215 return free_list_.usable_size(arena_.next_block_size());
216 }
217
218 /// \returns A reference to the BlockAllocator used for managing the arena.
219 /// \requires It is undefined behavior to move this allocator out into another object.
221 {
222 return arena_.get_allocator();
223 }
224
225 private:
226 allocator_info info() const noexcept
227 {
228 return {WPI_MEMORY_LOG_PREFIX "::memory_pool", this};
229 }
230
231 void allocate_block()
232 {
233 auto mem = arena_.allocate_block();
234 free_list_.insert(static_cast<char*>(mem.memory), mem.size);
235 }
236
237 void* allocate_array(std::size_t n, std::size_t node_size)
238 {
239 auto mem = free_list_.empty() ? nullptr : free_list_.allocate(n * node_size);
240 if (!mem)
241 {
242 allocate_block();
243 mem = free_list_.allocate(n * node_size);
244 if (!mem)
245 WPI_THROW(bad_array_size(info(), n * node_size, capacity_left()));
246 }
247 return mem;
248 }
249
250 void* try_allocate_array(std::size_t n, std::size_t node_size) noexcept
251 {
252 return !pool_type::value || free_list_.empty() ? nullptr :
253 free_list_.allocate(n * node_size);
254 }
255
256 bool try_deallocate_array(void* ptr, std::size_t n, std::size_t node_size) noexcept
257 {
258 if (!pool_type::value || !arena_.owns(ptr))
259 return false;
260 free_list_.deallocate(ptr, n * node_size);
261 return true;
262 }
263
264 memory_arena<allocator_type, false> arena_;
265 free_list free_list_;
266
267 friend allocator_traits<memory_pool<PoolType, BlockOrRawAllocator>>;
268 friend composable_allocator_traits<memory_pool<PoolType, BlockOrRawAllocator>>;
269 };
270
271#if WPI_MEMORY_EXTERN_TEMPLATE
272 extern template class memory_pool<node_pool>;
273 extern template class memory_pool<array_pool>;
274 extern template class memory_pool<small_node_pool>;
275#endif
276
277 template <class Type, class Alloc>
278 constexpr std::size_t memory_pool<Type, Alloc>::min_node_size;
279
280 /// Specialization of the \ref allocator_traits for \ref memory_pool classes.
281 /// \note It is not allowed to mix calls through the specialization and through the member functions,
282 /// i.e. \ref memory_pool::allocate_node() and this \c allocate_node().
283 /// \ingroup memory_allocator
284 template <typename PoolType, class ImplRawAllocator>
286 {
287 public:
289 using is_stateful = std::true_type;
290
291 /// \returns The result of \ref memory_pool::allocate_node().
292 /// \throws Anything thrown by the pool allocation function
293 /// or a \ref bad_allocation_size exception.
294 static void* allocate_node(allocator_type& state, std::size_t size,
295 std::size_t alignment)
296 {
297 detail::check_allocation_size<bad_node_size>(size, max_node_size(state),
298 state.info());
300 alignment, [&] { return max_alignment(state); }, state.info());
301 auto mem = state.allocate_node();
302 state.on_allocate(size);
303 return mem;
304 }
305
306 /// \effects Forwards to \ref memory_pool::allocate_array()
307 /// with the number of nodes adjusted to be the minimum,
308 /// i.e. when the \c size is less than the \ref memory_pool::node_size().
309 /// \returns A array with specified properties.
310 /// \requires The \ref memory_pool has to support array allocations.
311 /// \throws Anything thrown by the pool allocation function.
312 static void* allocate_array(allocator_type& state, std::size_t count, std::size_t size,
313 std::size_t alignment)
314 {
315 detail::check_allocation_size<bad_node_size>(size, max_node_size(state),
316 state.info());
318 alignment, [&] { return max_alignment(state); }, state.info());
319 detail::check_allocation_size<bad_array_size>(count * size, max_array_size(state),
320 state.info());
321 auto mem = state.allocate_array(count, size);
322 state.on_allocate(count * size);
323 return mem;
324 }
325
326 /// \effects Just forwards to \ref memory_pool::deallocate_node().
327 static void deallocate_node(allocator_type& state, void* node, std::size_t size,
328 std::size_t) noexcept
329 {
330 state.deallocate_node(node);
331 state.on_deallocate(size);
332 }
333
334 /// \effects Forwards to \ref memory_pool::deallocate_array() with the same size adjustment.
335 static void deallocate_array(allocator_type& state, void* array, std::size_t count,
336 std::size_t size, std::size_t) noexcept
337 {
338 state.free_list_.deallocate(array, count * size);
339 state.on_deallocate(count * size);
340 }
341
342 /// \returns The maximum size of each node which is \ref memory_pool::node_size().
343 static std::size_t max_node_size(const allocator_type& state) noexcept
344 {
345 return state.node_size();
346 }
347
348 /// \returns An upper bound on the maximum array size which is \ref memory_pool::next_capacity().
349 static std::size_t max_array_size(const allocator_type& state) noexcept
350 {
351 return state.next_capacity();
352 }
353
354 /// \returns The maximum alignment which is the next bigger power of two if less than \c alignof(std::max_align_t)
355 /// or the maximum alignment itself otherwise.
356 static std::size_t max_alignment(const allocator_type& state) noexcept
357 {
358 return state.free_list_.alignment();
359 }
360 };
361
362 /// Specialization of the \ref composable_allocator_traits for \ref memory_pool classes.
363 /// \ingroup memory_allocator
364 template <typename PoolType, class BlockOrRawAllocator>
365 class composable_allocator_traits<memory_pool<PoolType, BlockOrRawAllocator>>
366 {
368
369 public:
371
372 /// \returns The result of \ref memory_pool::try_allocate_node()
373 /// or `nullptr` if the allocation size was too big.
374 static void* try_allocate_node(allocator_type& state, std::size_t size,
375 std::size_t alignment) noexcept
376 {
377 if (size > traits::max_node_size(state) || alignment > traits::max_alignment(state))
378 return nullptr;
379 return state.try_allocate_node();
380 }
381
382 /// \effects Forwards to \ref memory_pool::try_allocate_array()
383 /// with the number of nodes adjusted to be the minimum,
384 /// if the \c size is less than the \ref memory_pool::node_size().
385 /// \returns A array with specified properties
386 /// or `nullptr` if it was unable to allocate.
387 static void* try_allocate_array(allocator_type& state, std::size_t count,
388 std::size_t size, std::size_t alignment) noexcept
389 {
390 if (size > traits::max_node_size(state)
391 || count * size > traits::max_array_size(state)
392 || alignment > traits::max_alignment(state))
393 return nullptr;
394 return state.try_allocate_array(count, size);
395 }
396
397 /// \effects Just forwards to \ref memory_pool::try_deallocate_node().
398 /// \returns Whether the deallocation was successful.
399 static bool try_deallocate_node(allocator_type& state, void* node, std::size_t size,
400 std::size_t alignment) noexcept
401 {
402 if (size > traits::max_node_size(state) || alignment > traits::max_alignment(state))
403 return false;
404 return state.try_deallocate_node(node);
405 }
406
407 /// \effects Forwards to \ref memory_pool::deallocate_array() with the same size adjustment.
408 /// \returns Whether the deallocation was successful.
409 static bool try_deallocate_array(allocator_type& state, void* array, std::size_t count,
410 std::size_t size, std::size_t alignment) noexcept
411 {
412 if (size > traits::max_node_size(state)
413 || count * size > traits::max_array_size(state)
414 || alignment > traits::max_alignment(state))
415 return false;
416 return state.try_deallocate_array(array, count, size);
417 }
418 };
419
420#if WPI_MEMORY_EXTERN_TEMPLATE
421 extern template class allocator_traits<memory_pool<node_pool>>;
422 extern template class allocator_traits<memory_pool<array_pool>>;
423 extern template class allocator_traits<memory_pool<small_node_pool>>;
424
425 extern template class composable_allocator_traits<memory_pool<node_pool>>;
426 extern template class composable_allocator_traits<memory_pool<array_pool>>;
427 extern template class composable_allocator_traits<memory_pool<small_node_pool>>;
428#endif
429 } // namespace memory
430} // namespace wpi
431
432#endif // WPI_MEMORY_MEMORY_POOL_HPP_INCLUDED
This class is a wrapper around std::array that does compile time size checking.
Definition array.h:26
static void deallocate_node(allocator_type &state, void *node, std::size_t size, std::size_t) noexcept
Definition memory_pool.hpp:327
static void * allocate_array(allocator_type &state, std::size_t count, std::size_t size, std::size_t alignment)
Definition memory_pool.hpp:312
static std::size_t max_alignment(const allocator_type &state) noexcept
Definition memory_pool.hpp:356
static std::size_t max_node_size(const allocator_type &state) noexcept
Definition memory_pool.hpp:343
static std::size_t max_array_size(const allocator_type &state) noexcept
Definition memory_pool.hpp:349
static void deallocate_array(allocator_type &state, void *array, std::size_t count, std::size_t size, std::size_t) noexcept
Definition memory_pool.hpp:335
static void * allocate_node(allocator_type &state, std::size_t size, std::size_t alignment)
Definition memory_pool.hpp:294
The default specialization of the allocator_traits for a RawAllocator.
Definition allocator_traits.hpp:292
static bool try_deallocate_node(allocator_type &state, void *node, std::size_t size, std::size_t alignment) noexcept
Definition memory_pool.hpp:399
static void * try_allocate_node(allocator_type &state, std::size_t size, std::size_t alignment) noexcept
Definition memory_pool.hpp:374
static bool try_deallocate_array(allocator_type &state, void *array, std::size_t count, std::size_t size, std::size_t alignment) noexcept
Definition memory_pool.hpp:409
static void * try_allocate_array(allocator_type &state, std::size_t count, std::size_t size, std::size_t alignment) noexcept
Definition memory_pool.hpp:387
The default specialization of the composable_allocator_traits for a ComposableAllocator.
Definition allocator_traits.hpp:500
static constexpr std::size_t implementation_offset() noexcept
Definition memory_arena.hpp:127
Definition debug_helpers.hpp:102
no_leak_checker & operator=(no_leak_checker &&) noexcept
Definition debug_helpers.hpp:108
memory_block allocate_block()
Definition memory_arena.hpp:350
std::size_t next_block_size() const noexcept
Definition memory_arena.hpp:415
allocator_type & get_allocator() noexcept
Definition memory_arena.hpp:425
bool owns(const void *ptr) const noexcept
Definition memory_arena.hpp:379
A stateful RawAllocator that manages nodes of fixed size.
Definition memory_pool.hpp:49
std::size_t node_size() const noexcept
Definition memory_pool.hpp:197
std::size_t next_capacity() const noexcept
Definition memory_pool.hpp:213
~memory_pool() noexcept
Definition memory_pool.hpp:88
void * allocate_array(std::size_t n)
Definition memory_pool.hpp:140
void deallocate_node(void *ptr) noexcept
Definition memory_pool.hpp:160
PoolType pool_type
Definition memory_pool.hpp:55
void deallocate_array(void *ptr, std::size_t n) noexcept
Definition memory_pool.hpp:180
memory_pool(memory_pool &&other) noexcept
Definition memory_pool.hpp:95
memory_pool & operator=(memory_pool &&other) noexcept
Definition memory_pool.hpp:102
make_block_allocator_t< BlockOrRawAllocator > allocator_type
Definition memory_pool.hpp:54
static constexpr std::size_t min_block_size(std::size_t node_size, std::size_t number_of_nodes) noexcept
Definition memory_pool.hpp:65
bool try_deallocate_array(void *ptr, std::size_t n) noexcept
Definition memory_pool.hpp:190
void * allocate_node()
Definition memory_pool.hpp:117
allocator_type & get_allocator() noexcept
Definition memory_pool.hpp:220
void * try_allocate_node() noexcept
Definition memory_pool.hpp:128
bool try_deallocate_node(void *ptr) noexcept
Definition memory_pool.hpp:169
std::size_t capacity_left() const noexcept
Definition memory_pool.hpp:205
memory_pool(std::size_t node_size, std::size_t block_size, Args &&... args)
Definition memory_pool.hpp:80
static constexpr std::size_t min_node_size
Definition memory_pool.hpp:57
void * try_allocate_array(std::size_t n) noexcept
Definition memory_pool.hpp:152
Configuration macros.
#define WPI_MEMORY_LOG_PREFIX
Definition config.hpp:46
#define WPI_THROW(Ex)
Definition config.hpp:33
The exception classes.
auto ptr(T p) -> const void *
Converts p to const void* for pointer formatting.
Definition format.h:3821
implementation_defined make_block_allocator_t
Takes either a BlockAllocator or a RawAllocator.
Definition memory_arena.hpp:622
Class wpi::memory::memory_arena and related functionality regarding BlockAllocators.
The PoolType tag types.
detail namespace with internal helper functions
Definition input_adapters.h:32
void check_allocation_size(std::size_t passed, Func f, const allocator_info &info)
Definition error.hpp:264
std::remove_reference< T >::type && move(T &&arg) noexcept
Definition utility.hpp:25
Memory namespace.
Definition heap_allocator.hpp:20
Foonathan namespace.
Definition ntcore_cpp.h:26
Contains information about an allocator.
Definition error.hpp:23
void operator()(std::ptrdiff_t amount)
#define WPI_MEMORY_ASSERT(Expr)
Definition assert.hpp:46
#define WPI_MEMORY_ASSERT_MSG(Expr, Msg)
Definition assert.hpp:47