WPILibC++ 2025.1.1
Loading...
Searching...
No Matches
memory_pool_collection.hpp
Go to the documentation of this file.
1// Copyright (C) 2015-2023 Jonathan Müller and foonathan/memory contributors
2// SPDX-License-Identifier: Zlib
3
4#ifndef WPI_MEMORY_MEMORY_POOL_COLLECTION_HPP_INCLUDED
5#define WPI_MEMORY_MEMORY_POOL_COLLECTION_HPP_INCLUDED
6
7/// \file
8/// Class \ref wpi::memory::memory_pool_collection and related classes.
9
10#include <type_traits>
11
12#include "detail/align.hpp"
13#include "detail/assert.hpp"
16#include "config.hpp"
17#include "debugging.hpp"
18#include "error.hpp"
19#include "memory_arena.hpp"
20#include "memory_pool_type.hpp"
21
22namespace wpi
23{
24 namespace memory
25 {
26 namespace detail
27 {
29 {
30 void operator()(std::ptrdiff_t amount);
31 };
32 } // namespace detail
33
34 /// A \c BucketDistribution for \ref memory_pool_collection defining that there is a bucket, i.e. pool, for each size.
35 /// That means that for each possible size up to an upper bound there will be a seperate free list.
36 /// Allocating a node will not waste any memory.
37 /// \ingroup memory_allocator
42
43 /// A \c BucketDistribution for \ref memory_pool_collection defining that there is a bucket, i.e. pool, for each power of two.
44 /// That means for each power of two up to an upper bound there will be a separate free list.
45 /// Allocating a node will only waste half of the memory.
46 /// \ingroup memory_allocator
51
52 /// A stateful RawAllocator that behaves as a collection of multiple \ref memory_pool objects.
53 /// It maintains a list of multiple free lists, whose types are controlled via the \c PoolType tags defined in \ref memory_pool_type.hpp,
54 /// each of a different size as defined in the \c BucketDistribution (\ref identity_buckets or \ref log2_buckets).
55 /// Allocating a node of given size will use the appropriate free list.<br>
56 /// This allocator is ideal for node allocations in any order but with a predefined set of sizes,
57 /// not only one size like \ref memory_pool.
58 /// \ingroup memory_allocator
59 template <class PoolType, class BucketDistribution,
60 class BlockOrRawAllocator = default_allocator>
62 : WPI_EBO(detail::default_leak_checker<detail::memory_pool_collection_leak_handler>)
63 {
64 using free_list_array =
66 using leak_checker =
68
69 public:
71 using pool_type = PoolType;
72 using bucket_distribution = BucketDistribution;
73
74 /// \effects Creates it by giving it the maximum node size it should be able to allocate,
75 /// the size of the initial memory block and other constructor arguments for the BlockAllocator.
76 /// The \c BucketDistribution controls how many free lists are created,
77 /// but unlike in \ref memory_pool all free lists are initially empty and the first memory block queued.
78 /// \requires \c block_size must be non-zero and \c max_node_size must be a valid node size and smaller than \c block_size divided by the number of pools.
79 template <typename... Args>
80 memory_pool_collection(std::size_t max_node_size, std::size_t block_size,
81 Args&&... args)
82 : arena_(block_size, detail::forward<Args>(args)...),
83 stack_(allocate_block()),
84 pools_(stack_, block_end(), max_node_size)
85 {
87 }
88
89 /// \effects Destroys the \ref memory_pool_collection by returning all memory blocks,
90 /// regardless of properly deallocated back to the BlockAllocator.
91 ~memory_pool_collection() noexcept = default;
92
93 /// @{
94 /// \effects Moving a \ref memory_pool_collection object transfers ownership over the free lists,
95 /// i.e. the moved from pool is completely empty and the new one has all its memory.
96 /// That means that it is not allowed to call \ref deallocate_node() on a moved-from allocator
97 /// even when passing it memory that was previously allocated by this object.
99 : leak_checker(detail::move(other)),
100 arena_(detail::move(other.arena_)),
101 stack_(detail::move(other.stack_)),
102 pools_(detail::move(other.pools_))
103 {
104 }
105
107 {
109 arena_ = detail::move(other.arena_);
110 stack_ = detail::move(other.stack_);
111 pools_ = detail::move(other.pools_);
112 return *this;
113 }
114 /// @}
115
116 /// \effects Allocates a node of given size.
117 /// It first finds the appropriate free list as defined in the \c BucketDistribution.
118 /// If it is empty, it will use an implementation defined amount of memory from the arena
119 /// and inserts it in it.
120 /// If the arena is empty too, it will request a new memory block from the BlockAllocator
121 /// of size \ref next_capacity() and puts part of it onto this free list.
122 /// Then it removes a node from it.
123 /// \returns A node of given size suitable aligned,
124 /// i.e. suitable for any type where <tt>sizeof(T) < node_size</tt>.
125 /// \throws Anything thrown by the BlockAllocator if a growth is needed or a \ref bad_node_size exception if the node size is too big.
126 void* allocate_node(std::size_t node_size)
127 {
129 node_size, [&] { return max_node_size(); }, info());
130 auto& pool = pools_.get(node_size);
131 if (pool.empty())
132 {
133 auto block = reserve_memory(pool, def_capacity());
134 pool.insert(block.memory, block.size);
135 }
136
137 auto mem = pool.allocate();
139 return mem;
140 }
141
142 /// \effects Allocates a node of given size.
143 /// It is similar to \ref allocate_node() but will return `nullptr` on any failure,
144 /// instead of growing the arnea and possibly throwing.
145 /// \returns A node of given size suitable aligned
146 /// or `nullptr` in case of failure.
147 void* try_allocate_node(std::size_t node_size) noexcept
148 {
149 if (node_size > max_node_size())
150 return nullptr;
151 auto& pool = pools_.get(node_size);
152 if (pool.empty())
153 {
154 try_reserve_memory(pool, def_capacity());
155 return pool.empty() ? nullptr : pool.allocate();
156 }
157 else
158 return pool.allocate();
159 }
160
161 /// \effects Allocates an array of nodes by searching for \c n continuous nodes on the appropriate free list and removing them.
162 /// Depending on the \c PoolType this can be a slow operation or not allowed at all.
163 /// This can sometimes lead to a growth on the free list, even if technically there is enough continuous memory on the free list.
164 /// Otherwise has the same behavior as \ref allocate_node().
165 /// \returns An array of \c n nodes of size \c node_size suitable aligned.
166 /// \throws Anything thrown by the used BlockAllocator's allocation function if a growth is needed,
167 /// or a \ref bad_allocation_size exception.
168 /// \requires \c count must be valid array count and
169 /// \c node_size must be valid node size.
170 void* allocate_array(std::size_t count, std::size_t node_size)
171 {
173 node_size, [&] { return max_node_size(); }, info());
174
175 auto& pool = pools_.get(node_size);
176
177 // try allocating if not empty
178 // for pools without array allocation support, allocate() will always return nullptr
179 auto mem = pool.empty() ? nullptr : pool.allocate(count * node_size);
180 if (!mem)
181 {
182 // reserve more memory
183 auto block = reserve_memory(pool, def_capacity());
184 pool.insert(block.memory, block.size);
185
186 mem = pool.allocate(count * node_size);
187 if (!mem)
188 {
189 // reserve more then the default capacity if that didn't work either
191 count * node_size,
192 [&] { return next_capacity() - pool.alignment() + 1; }, info());
193
194 block = reserve_memory(pool, count * node_size);
195 pool.insert(block.memory, block.size);
196
197 mem = pool.allocate(count * node_size);
199 }
200 }
201
202 return mem;
203 }
204
205 /// \effects Allocates a array of given size.
206 /// It is similar to \ref allocate_node() but will return `nullptr` on any failure,
207 /// instead of growing the arnea and possibly throwing.
208 /// \returns A array of given size suitable aligned
209 /// or `nullptr` in case of failure.
210 void* try_allocate_array(std::size_t count, std::size_t node_size) noexcept
211 {
212 if (!pool_type::value || node_size > max_node_size())
213 return nullptr;
214 auto& pool = pools_.get(node_size);
215 if (pool.empty())
216 {
217 try_reserve_memory(pool, def_capacity());
218 return pool.empty() ? nullptr : pool.allocate(count * node_size);
219 }
220 else
221 return pool.allocate(count * node_size);
222 }
223
224 /// \effects Deallocates a node by putting it back onto the appropriate free list.
225 /// \requires \c ptr must be a result from a previous call to \ref allocate_node() with the same size on the same free list,
226 /// i.e. either this allocator object or a new object created by moving this to it.
227 void deallocate_node(void* ptr, std::size_t node_size) noexcept
228 {
229 pools_.get(node_size).deallocate(ptr);
230 }
231
232 /// \effects Deallocates a node similar to \ref deallocate_node().
233 /// But it checks if it can deallocate this memory.
234 /// \returns `true` if the node could be deallocated,
235 /// `false` otherwise.
236 bool try_deallocate_node(void* ptr, std::size_t node_size) noexcept
237 {
238 if (node_size > max_node_size() || !arena_.owns(ptr))
239 return false;
240 pools_.get(node_size).deallocate(ptr);
241 return true;
242 }
243
244 /// \effects Deallocates an array by putting it back onto the free list.
245 /// \requires \c ptr must be a result from a previous call to \ref allocate_array() with the same sizes on the same free list,
246 /// i.e. either this allocator object or a new object created by moving this to it.
247 void deallocate_array(void* ptr, std::size_t count, std::size_t node_size) noexcept
248 {
249 pools_.get(node_size).deallocate(ptr, count * node_size);
250 }
251
252 /// \effects Deallocates a array similar to \ref deallocate_array().
253 /// But it checks if it can deallocate this memory.
254 /// \returns `true` if the array could be deallocated,
255 /// `false` otherwise.
256 bool try_deallocate_array(void* ptr, std::size_t count, std::size_t node_size) noexcept
257 {
258 if (!pool_type::value || node_size > max_node_size() || !arena_.owns(ptr))
259 return false;
260 pools_.get(node_size).deallocate(ptr, count * node_size);
261 return true;
262 }
263
264 /// \effects Inserts more memory on the free list for nodes of given size.
265 /// It will try to put \c capacity_left bytes from the arena onto the free list defined over the \c BucketDistribution,
266 /// if the arena is empty, a new memory block is requested from the BlockAllocator
267 /// and it will be used.
268 /// \throws Anything thrown by the BlockAllocator if a growth is needed.
269 /// \requires \c node_size must be valid node size less than or equal to \ref max_node_size(),
270 /// \c capacity_left must be less than \ref next_capacity().
271 void reserve(std::size_t node_size, std::size_t capacity)
272 {
273 WPI_MEMORY_ASSERT_MSG(node_size <= max_node_size(), "node_size too big");
274 auto& pool = pools_.get(node_size);
275 reserve_memory(pool, capacity);
276 }
277
278 /// \returns The maximum node size for which is a free list.
279 /// This is the value passed to it in the constructor.
280 std::size_t max_node_size() const noexcept
281 {
282 return pools_.max_node_size();
283 }
284
285 /// \returns The amount of nodes available in the free list for nodes of given size
286 /// as defined over the \c BucketDistribution.
287 /// This is the number of nodes that can be allocated without the free list requesting more memory from the arena.
288 /// \note Array allocations may lead to a growth even if the capacity_left is big enough.
289 std::size_t pool_capacity_left(std::size_t node_size) const noexcept
290 {
291 WPI_MEMORY_ASSERT_MSG(node_size <= max_node_size(), "node_size too big");
292 return pools_.get(node_size).capacity();
293 }
294
295 /// \returns The amount of memory available in the arena not inside the free lists.
296 /// This is the number of bytes that can be inserted into the free lists
297 /// without requesting more memory from the BlockAllocator.
298 /// \note Array allocations may lead to a growth even if the capacity is big enough.
299 std::size_t capacity_left() const noexcept
300 {
301 return std::size_t(block_end() - stack_.top());
302 }
303
304 /// \returns The size of the next memory block after \ref capacity_left() arena grows.
305 /// This is the amount of memory that can be distributed in the pools.
306 /// \note If the `PoolType` is \ref small_node_pool, the exact usable memory is lower than that.
307 std::size_t next_capacity() const noexcept
308 {
309 return arena_.next_block_size();
310 }
311
312 /// \returns A reference to the BlockAllocator used for managing the arena.
313 /// \requires It is undefined behavior to move this allocator out into another object.
315 {
316 return arena_.get_allocator();
317 }
318
319 private:
320 allocator_info info() const noexcept
321 {
322 return {WPI_MEMORY_LOG_PREFIX "::memory_pool_collection", this};
323 }
324
325 std::size_t def_capacity() const noexcept
326 {
327 return arena_.next_block_size() / pools_.size();
328 }
329
330 detail::fixed_memory_stack allocate_block()
331 {
332 return detail::fixed_memory_stack(arena_.allocate_block().memory);
333 }
334
335 const char* block_end() const noexcept
336 {
337 auto block = arena_.current_block();
338 return static_cast<const char*>(block.memory) + block.size;
339 }
340
341 bool insert_rest(typename pool_type::type& pool) noexcept
342 {
343 if (auto remaining = std::size_t(block_end() - stack_.top()))
344 {
345 auto offset = detail::align_offset(stack_.top(), detail::max_alignment);
346 if (offset < remaining)
347 {
349 pool.insert(stack_.top() + offset, remaining - offset);
350 return true;
351 }
352 }
353
354 return false;
355 }
356
357 void try_reserve_memory(typename pool_type::type& pool, std::size_t capacity) noexcept
358 {
359 auto mem = stack_.allocate(block_end(), capacity, detail::max_alignment);
360 if (!mem)
361 insert_rest(pool);
362 else
363 pool.insert(mem, capacity);
364 }
365
366 memory_block reserve_memory(typename pool_type::type& pool, std::size_t capacity)
367 {
368 auto mem = stack_.allocate(block_end(), capacity, detail::max_alignment);
369 if (!mem)
370 {
371 insert_rest(pool);
372 // get new block
373 stack_ = allocate_block();
374
375 // allocate ensuring alignment
376 mem = stack_.allocate(block_end(), capacity, detail::max_alignment);
378 }
379 return {mem, capacity};
380 }
381
382 memory_arena<allocator_type, false> arena_;
383 detail::fixed_memory_stack stack_;
384 free_list_array pools_;
385
386 friend allocator_traits<memory_pool_collection>;
387 };
388
389#if WPI_MEMORY_EXTERN_TEMPLATE
390 extern template class memory_pool_collection<node_pool, identity_buckets>;
391 extern template class memory_pool_collection<array_pool, identity_buckets>;
392 extern template class memory_pool_collection<small_node_pool, identity_buckets>;
393
394 extern template class memory_pool_collection<node_pool, log2_buckets>;
395 extern template class memory_pool_collection<array_pool, log2_buckets>;
396 extern template class memory_pool_collection<small_node_pool, log2_buckets>;
397#endif
398
399 /// An alias for \ref memory_pool_collection using the \ref identity_buckets policy
400 /// and a \c PoolType defaulting to \ref node_pool.
401 /// \ingroup memory_allocator
402 template <class PoolType = node_pool, class ImplAllocator = default_allocator>
403 WPI_ALIAS_TEMPLATE(bucket_allocator,
405
406 template <class Allocator>
407 class allocator_traits;
408
409 /// Specialization of the \ref allocator_traits for \ref memory_pool_collection classes.
410 /// \note It is not allowed to mix calls through the specialization and through the member functions,
411 /// i.e. \ref memory_pool_collection::allocate_node() and this \c allocate_node().
412 /// \ingroup memory_allocator
413 template <class Pool, class BucketDist, class RawAllocator>
415 {
416 public:
418 using is_stateful = std::true_type;
419
420 /// \returns The result of \ref memory_pool_collection::allocate_node().
421 /// \throws Anything thrown by the pool allocation function
422 /// or a \ref bad_allocation_size exception if \c size / \c alignment exceeds \ref max_node_size() / the suitable alignment value,
423 /// i.e. the node is over-aligned.
424 static void* allocate_node(allocator_type& state, std::size_t size,
425 std::size_t alignment)
426 {
427 // node already checked
429 alignment, [&] { return detail::alignment_for(size); }, state.info());
430 auto mem = state.allocate_node(size);
431 state.on_allocate(size);
432 return mem;
433 }
434
435 /// \returns The result of \ref memory_pool_collection::allocate_array().
436 /// \throws Anything thrown by the pool allocation function or a \ref bad_allocation_size exception.
437 /// \requires The \ref memory_pool_collection has to support array allocations.
438 static void* allocate_array(allocator_type& state, std::size_t count, std::size_t size,
439 std::size_t alignment)
440 {
441 // node and array already checked
443 alignment, [&] { return detail::alignment_for(size); }, state.info());
444 auto mem = state.allocate_array(count, size);
445 state.on_allocate(count * size);
446 return mem;
447 }
448
449 /// \effects Calls \ref memory_pool_collection::deallocate_node().
450 static void deallocate_node(allocator_type& state, void* node, std::size_t size,
451 std::size_t) noexcept
452 {
453 state.deallocate_node(node, size);
454 state.on_deallocate(size);
455 }
456
457 /// \effects Calls \ref memory_pool_collection::deallocate_array().
458 /// \requires The \ref memory_pool_collection has to support array allocations.
459 static void deallocate_array(allocator_type& state, void* array, std::size_t count,
460 std::size_t size, std::size_t) noexcept
461 {
462 state.deallocate_array(array, count, size);
463 state.on_deallocate(count * size);
464 }
465
466 /// \returns The maximum size of each node which is \ref memory_pool_collection::max_node_size().
467 static std::size_t max_node_size(const allocator_type& state) noexcept
468 {
469 return state.max_node_size();
470 }
471
472 /// \returns An upper bound on the maximum array size which is \ref memory_pool::next_capacity().
473 static std::size_t max_array_size(const allocator_type& state) noexcept
474 {
475 return state.next_capacity();
476 }
477
478 /// \returns Just \c alignof(std::max_align_t) since the actual maximum alignment depends on the node size,
479 /// the nodes must not be over-aligned.
480 static std::size_t max_alignment(const allocator_type&) noexcept
481 {
483 }
484 };
485
486 /// Specialization of the \ref composable_allocator_traits for \ref memory_pool_collection classes.
487 /// \ingroup memory_allocator
488 template <class Pool, class BucketDist, class RawAllocator>
489 class composable_allocator_traits<memory_pool_collection<Pool, BucketDist, RawAllocator>>
490 {
492
493 public:
495
496 /// \returns The result of \ref memory_pool_collection::try_allocate_node()
497 /// or `nullptr` if the allocation size was too big.
498 static void* try_allocate_node(allocator_type& state, std::size_t size,
499 std::size_t alignment) noexcept
500 {
501 if (alignment > traits::max_alignment(state))
502 return nullptr;
503 return state.try_allocate_node(size);
504 }
505
506 /// \returns The result of \ref memory_pool_collection::try_allocate_array()
507 /// or `nullptr` if the allocation size was too big.
508 static void* try_allocate_array(allocator_type& state, std::size_t count,
509 std::size_t size, std::size_t alignment) noexcept
510 {
511 if (count * size > traits::max_array_size(state)
512 || alignment > traits::max_alignment(state))
513 return nullptr;
514 return state.try_allocate_array(count, size);
515 }
516
517 /// \effects Just forwards to \ref memory_pool_collection::try_deallocate_node().
518 /// \returns Whether the deallocation was successful.
519 static bool try_deallocate_node(allocator_type& state, void* node, std::size_t size,
520 std::size_t alignment) noexcept
521 {
522 if (alignment > traits::max_alignment(state))
523 return false;
524 return state.try_deallocate_node(node, size);
525 }
526
527 /// \effects Forwards to \ref memory_pool_collection::deallocate_array().
528 /// \returns Whether the deallocation was successful.
529 static bool try_deallocate_array(allocator_type& state, void* array, std::size_t count,
530 std::size_t size, std::size_t alignment) noexcept
531 {
532 if (count * size > traits::max_array_size(state)
533 || alignment > traits::max_alignment(state))
534 return false;
535 return state.try_deallocate_array(array, count, size);
536 }
537 };
538
539#if WPI_MEMORY_EXTERN_TEMPLATE
540 extern template class allocator_traits<memory_pool_collection<node_pool, identity_buckets>>;
541 extern template class allocator_traits<
542 memory_pool_collection<array_pool, identity_buckets>>;
543 extern template class allocator_traits<
544 memory_pool_collection<small_node_pool, identity_buckets>>;
545
546 extern template class allocator_traits<memory_pool_collection<node_pool, log2_buckets>>;
547 extern template class allocator_traits<memory_pool_collection<array_pool, log2_buckets>>;
548 extern template class allocator_traits<
549 memory_pool_collection<small_node_pool, log2_buckets>>;
550
551 extern template class composable_allocator_traits<
552 memory_pool_collection<node_pool, identity_buckets>>;
553 extern template class composable_allocator_traits<
554 memory_pool_collection<array_pool, identity_buckets>>;
555 extern template class composable_allocator_traits<
556 memory_pool_collection<small_node_pool, identity_buckets>>;
557
558 extern template class composable_allocator_traits<
559 memory_pool_collection<node_pool, log2_buckets>>;
560 extern template class composable_allocator_traits<
561 memory_pool_collection<array_pool, log2_buckets>>;
562 extern template class composable_allocator_traits<
563 memory_pool_collection<small_node_pool, log2_buckets>>;
564#endif
565 } // namespace memory
566} // namespace wpi
567
568#endif // WPI_MEMORY_MEMORY_POOL_COLLECTION_HPP_INCLUDED
This class is a wrapper around std::array that does compile time size checking.
Definition array.h:26
Specialization of the allocator_traits for memory_pool_collection classes.
Definition memory_pool_collection.hpp:415
static std::size_t max_array_size(const allocator_type &state) noexcept
Definition memory_pool_collection.hpp:473
static void deallocate_node(allocator_type &state, void *node, std::size_t size, std::size_t) noexcept
Definition memory_pool_collection.hpp:450
static void * allocate_array(allocator_type &state, std::size_t count, std::size_t size, std::size_t alignment)
Definition memory_pool_collection.hpp:438
static void deallocate_array(allocator_type &state, void *array, std::size_t count, std::size_t size, std::size_t) noexcept
Definition memory_pool_collection.hpp:459
static std::size_t max_alignment(const allocator_type &) noexcept
Definition memory_pool_collection.hpp:480
static void * allocate_node(allocator_type &state, std::size_t size, std::size_t alignment)
Definition memory_pool_collection.hpp:424
static std::size_t max_node_size(const allocator_type &state) noexcept
Definition memory_pool_collection.hpp:467
The default specialization of the allocator_traits for a RawAllocator.
Definition allocator_traits.hpp:292
static void * try_allocate_array(allocator_type &state, std::size_t count, std::size_t size, std::size_t alignment) noexcept
Definition memory_pool_collection.hpp:508
static bool try_deallocate_node(allocator_type &state, void *node, std::size_t size, std::size_t alignment) noexcept
Definition memory_pool_collection.hpp:519
static void * try_allocate_node(allocator_type &state, std::size_t size, std::size_t alignment) noexcept
Definition memory_pool_collection.hpp:498
static bool try_deallocate_array(allocator_type &state, void *array, std::size_t count, std::size_t size, std::size_t alignment) noexcept
Definition memory_pool_collection.hpp:529
The default specialization of the composable_allocator_traits for a ComposableAllocator.
Definition allocator_traits.hpp:500
void * allocate(const char *end, std::size_t size, std::size_t alignment, std::size_t fence_size=debug_fence_size) noexcept
Definition memory_stack.hpp:71
char * top() const noexcept
Definition memory_stack.hpp:107
std::size_t size() const noexcept
Definition free_list_array.hpp:77
std::size_t max_node_size() const noexcept
Definition free_list_array.hpp:83
FreeList & get(std::size_t node_size) const noexcept
Definition free_list_array.hpp:68
Definition debug_helpers.hpp:102
no_leak_checker & operator=(no_leak_checker &&) noexcept
Definition debug_helpers.hpp:108
memory_block allocate_block()
Definition memory_arena.hpp:350
memory_block current_block() const noexcept
Definition memory_arena.hpp:362
std::size_t next_block_size() const noexcept
Definition memory_arena.hpp:415
allocator_type & get_allocator() noexcept
Definition memory_arena.hpp:425
bool owns(const void *ptr) const noexcept
Definition memory_arena.hpp:379
A stateful RawAllocator that behaves as a collection of multiple memory_pool objects.
Definition memory_pool_collection.hpp:63
memory_pool_collection(std::size_t max_node_size, std::size_t block_size, Args &&... args)
Definition memory_pool_collection.hpp:80
std::size_t next_capacity() const noexcept
Definition memory_pool_collection.hpp:307
void deallocate_node(void *ptr, std::size_t node_size) noexcept
Definition memory_pool_collection.hpp:227
BucketDistribution bucket_distribution
Definition memory_pool_collection.hpp:72
std::size_t capacity_left() const noexcept
Definition memory_pool_collection.hpp:299
make_block_allocator_t< BlockOrRawAllocator > allocator_type
Definition memory_pool_collection.hpp:70
allocator_type & get_allocator() noexcept
Definition memory_pool_collection.hpp:314
memory_pool_collection & operator=(memory_pool_collection &&other) noexcept
Definition memory_pool_collection.hpp:106
void reserve(std::size_t node_size, std::size_t capacity)
Definition memory_pool_collection.hpp:271
PoolType pool_type
Definition memory_pool_collection.hpp:71
void * allocate_node(std::size_t node_size)
Definition memory_pool_collection.hpp:126
void * allocate_array(std::size_t count, std::size_t node_size)
Definition memory_pool_collection.hpp:170
bool try_deallocate_array(void *ptr, std::size_t count, std::size_t node_size) noexcept
Definition memory_pool_collection.hpp:256
void * try_allocate_array(std::size_t count, std::size_t node_size) noexcept
Definition memory_pool_collection.hpp:210
void deallocate_array(void *ptr, std::size_t count, std::size_t node_size) noexcept
Definition memory_pool_collection.hpp:247
~memory_pool_collection() noexcept=default
std::size_t pool_capacity_left(std::size_t node_size) const noexcept
Definition memory_pool_collection.hpp:289
bool try_deallocate_node(void *ptr, std::size_t node_size) noexcept
Definition memory_pool_collection.hpp:236
void * try_allocate_node(std::size_t node_size) noexcept
Definition memory_pool_collection.hpp:147
std::size_t max_node_size() const noexcept
Definition memory_pool_collection.hpp:280
Configuration macros.
#define WPI_MEMORY_LOG_PREFIX
Definition config.hpp:46
#define WPI_ALIAS_TEMPLATE(Name,...)
Definition config.hpp:73
The exception classes.
auto ptr(T p) -> const void *
Converts p to const void* for pointer formatting.
Definition format.h:3821
implementation_defined default_allocator
The default RawAllocator that will be used as BlockAllocator in memory arenas.
Definition default_allocator.hpp:32
implementation_defined make_block_allocator_t
Takes either a BlockAllocator or a RawAllocator.
Definition memory_arena.hpp:622
@ alignment_memory
Marks buffer memory used to ensure proper alignment.
Debugging facilities.
Class wpi::memory::memory_arena and related functionality regarding BlockAllocators.
The PoolType tag types.
detail namespace with internal helper functions
Definition input_adapters.h:32
std::size_t align_offset(std::uintptr_t address, std::size_t alignment) noexcept
Definition align.hpp:26
void debug_fill(void *, std::size_t, debug_magic) noexcept
Definition debug_helpers.hpp:45
constexpr std::size_t max_alignment
Definition align.hpp:42
void check_allocation_size(std::size_t passed, Func f, const allocator_info &info)
Definition error.hpp:264
std::remove_reference< T >::type && move(T &&arg) noexcept
Definition utility.hpp:25
std::size_t alignment_for(std::size_t size) noexcept
Memory namespace.
Definition heap_allocator.hpp:20
Foonathan namespace.
Definition ntcore_cpp.h:26
Contains information about an allocator.
Definition error.hpp:23
Definition free_list_array.hpp:102
Definition free_list_array.hpp:117
Definition memory_pool_collection.hpp:29
A BucketDistribution for memory_pool_collection defining that there is a bucket, i....
Definition memory_pool_collection.hpp:39
A BucketDistribution for memory_pool_collection defining that there is a bucket, i....
Definition memory_pool_collection.hpp:48
std::size_t size
The size of the memory block (might be 0).
Definition memory_arena.hpp:30
void * memory
The address of the memory block (might be nullptr).
Definition memory_arena.hpp:29
#define WPI_MEMORY_ASSERT(Expr)
Definition assert.hpp:46
#define WPI_MEMORY_ASSERT_MSG(Expr, Msg)
Definition assert.hpp:47