19#include "pw_allocator/allocator.h"
20#include "pw_allocator/block/basic.h"
21#include "pw_allocator/block/iterable.h"
22#include "pw_allocator/block/poisonable.h"
23#include "pw_allocator/block/result.h"
24#include "pw_allocator/block/with_layout.h"
25#include "pw_allocator/capability.h"
26#include "pw_allocator/config.h"
27#include "pw_allocator/fragmentation.h"
28#include "pw_allocator/hardening.h"
29#include "pw_assert/assert.h"
30#include "pw_bytes/span.h"
31#include "pw_result/result.h"
32#include "pw_status/status.h"
34namespace pw::allocator {
55 template <
typename BlockType>
81template <
typename,
size_t>
97template <
typename BlockType_>
103 using BlockType = BlockType_;
104 using Range =
typename BlockType::Range;
107 Base::GetCapabilities<BlockType>();
113 Range
blocks()
const {
return Range(first_); }
156 bool DoResize(
void* ptr,
size_t new_size)
override;
187 template <
typename Ptr>
201 template <
typename,
size_t>
248 static bool PrevIsFree(
const BlockType* block) {
249 auto* prev = block->Prev();
250 return prev !=
nullptr && prev->IsFree();
254 static bool NextIsFree(
const BlockType* block) {
255 auto* next = block->Next();
256 return next !=
nullptr && next->IsFree();
261 void UpdateLast(BlockType* block);
264 size_t capacity_ = 0;
265 size_t allocated_ = 0;
266 BlockType* first_ =
nullptr;
267 BlockType* last_ =
nullptr;
268 uint16_t unpoisoned_ = 0;
277template <
typename BlockType>
278constexpr Capabilities GenericBlockAllocator::GetCapabilities() {
279 Capabilities common = kImplementsGetUsableLayout |
280 kImplementsGetAllocatedLayout | kImplementsGetCapacity |
281 kImplementsRecognizes;
282 if constexpr (has_layout_v<BlockType>) {
283 return common | kImplementsGetRequestedLayout;
291template <
typename BlockType>
292BlockAllocator<BlockType>::~BlockAllocator() {
293 if constexpr (Hardening::kIncludesRobustChecks) {
294 for (
auto* block : blocks()) {
295 if (!block->IsFree()) {
296 CrashOnAllocated(block);
302template <
typename BlockType>
305 PW_ASSERT(result.
ok());
309template <
typename BlockType>
311 if constexpr (Hardening::kIncludesRobustChecks) {
312 PW_ASSERT(begin !=
nullptr);
313 PW_ASSERT(begin->Prev() ==
nullptr);
316 for (
auto* block : blocks()) {
318 capacity_ += block->OuterSize();
319 if (block->IsFree()) {
320 RecycleBlock(*block);
325template <
typename BlockType>
327 if (capacity_ == 0) {
332 if constexpr (Hardening::kIncludesDebugChecks) {
333 PW_ASSERT(last_->Next() ==
nullptr);
335 auto result = ChooseBlock(layout);
340 BlockType* block = result.block();
341 allocated_ += block->OuterSize();
342 switch (result.prev()) {
343 case BlockResultPrev::kSplitNew:
345 RecycleBlock(*(block->Prev()));
347 case BlockResultPrev::kResizedLarger:
349 allocated_ += result.size();
351 case BlockResultPrev::kUnchanged:
352 case BlockResultPrev::kResizedSmaller:
355 if (result.next() == BlockResultNext::kSplitNew) {
356 RecycleBlock(*(block->Next()));
360 if constexpr (Hardening::kIncludesDebugChecks) {
361 PW_ASSERT(block <= last_);
364 return block->UsableSpace();
367template <
typename BlockType>
369 BlockType* block = FromUsableSpace(ptr);
370 if (block->IsFree()) {
371 if constexpr (Hardening::kIncludesBasicChecks) {
372 CrashOnDoubleFree(block);
377 DeallocateBlock(std::move(block));
380template <
typename BlockType>
383 if (
auto* prev = block->Prev(); prev !=
nullptr && prev->IsFree()) {
386 if (
auto* next = block->Next(); next !=
nullptr && next->IsFree()) {
391 allocated_ -= block->OuterSize();
392 auto free_result = BlockType::Free(std::move(block));
393 block = free_result.block();
396 if (free_result.prev() == BlockResultPrev::kResizedSmaller) {
398 allocated_ -= free_result.size();
401 if constexpr (is_poisonable_v<BlockType> && kPoisonInterval != 0) {
403 if (unpoisoned_ >= kPoisonInterval) {
409 RecycleBlock(*block);
412template <
typename BlockType>
414 BlockType* block = FromUsableSpace(ptr);
417 if (
auto* next = block->Next(); next !=
nullptr && next->IsFree()) {
421 size_t old_size = block->OuterSize();
422 if (!block->Resize(new_size).ok()) {
425 allocated_ -= old_size;
426 allocated_ += block->OuterSize();
429 if (
auto* next = block->Next(); next !=
nullptr && next->IsFree()) {
436template <
typename BlockType>
438 const void* ptr)
const {
440 if (info_type == InfoType::kCapacity) {
444 if (ptr < first_->UsableSpace() || last_->UsableSpace() < ptr) {
447 const auto* block = BlockType::FromUsableSpace(ptr);
448 if (!block->IsValid()) {
451 if (block->IsFree()) {
454 if constexpr (kCapabilities.has(kImplementsGetRequestedLayout)) {
455 if (info_type == InfoType::kRequestedLayoutOf) {
456 return block->RequestedLayout();
460 case InfoType::kUsableLayoutOf:
461 return Layout(block->InnerSize(), BlockType::kAlignment);
462 case InfoType::kAllocatedLayoutOf:
463 return Layout(block->OuterSize(), BlockType::kAlignment);
464 case InfoType::kRecognizes:
466 case InfoType::kCapacity:
467 case InfoType::kRequestedLayoutOf:
473template <
typename BlockType>
477 for (
auto block : blocks()) {
478 if (block->IsFree()) {
479 fragmentation.
AddFragment(block->InnerSize() / BlockType::kAlignment);
482 return fragmentation;
485template <
typename BlockType>
486template <
typename Ptr>
487internal::copy_const_ptr_t<Ptr, BlockType*>
489 if (ptr < first_->UsableSpace() || last_->UsableSpace() < ptr) {
490 if constexpr (Hardening::kIncludesBasicChecks) {
491 CrashOnOutOfRange(ptr);
495 auto* block = BlockType::FromUsableSpace(ptr);
496 if (!block->IsValid()) {
497 if constexpr (Hardening::kIncludesBasicChecks) {
498 block->CheckInvariants();
505template <
typename BlockType>
507 BlockType* next = block->Next();
508 if (next ==
nullptr) {
510 }
else if (next->Next() ==
nullptr) {
Definition: allocator.h:42
constexpr bool ok() const
Definition: result.h:451
static constexpr Status DataLoss()
Definition: status.h:316
static constexpr Status Unimplemented()
Definition: status.h:280
static constexpr Status FailedPrecondition()
Definition: status.h:243
static constexpr Status NotFound()
Definition: status.h:190
Definition: block_allocator.h:98
void * DoAllocate(Layout layout) override
Definition: block_allocator.h:326
Range blocks() const
Returns a Range of blocks tracking the memory of this allocator.
Definition: block_allocator.h:113
size_t DoGetAllocated() const override
Definition: block_allocator.h:159
virtual size_t DoGetMaxAllocatable()=0
internal::copy_const_ptr_t< Ptr, BlockType * > FromUsableSpace(Ptr ptr) const
Definition: block_allocator.h:488
virtual BlockResult< BlockType > ChooseBlock(Layout layout)=0
std::optional< Fragmentation > DoMeasureFragmentation() const override
Returns fragmentation information for the allocator's memory region.
Definition: block_allocator.h:474
virtual void ReserveBlock(BlockType &)
Definition: block_allocator.h:223
void DoDeallocate(void *ptr) override
Definition: block_allocator.h:368
virtual void DeallocateBlock(BlockType *&&block)
Definition: block_allocator.h:381
bool DoResize(void *ptr, size_t new_size) override
Definition: block_allocator.h:413
void Init(ByteSpan region)
Definition: block_allocator.h:303
void Init(BlockType *begin)
Definition: block_allocator.h:310
virtual void Flush()
Definition: block_allocator.h:245
size_t GetMaxAllocatable()
Definition: block_allocator.h:144
virtual void RecycleBlock(BlockType &)
Definition: block_allocator.h:231
Result< Layout > DoGetInfo(InfoType info_type, const void *ptr) const override
Definition: block_allocator.h:437
Definition: capability.h:65
Definition: detailed_block.h:88
Definition: block_allocator.h:46
static void CrashOnOutOfRange(const void *freed)
static void CrashOnDoubleFree(const void *freed)
Crashes with an informational message that a given block was freed twice.
static void CrashOnAllocated(const void *allocated)
Next
Definition: result.h:45
Prev
Definition: result.h:36
Definition: block_allocator.h:82
#define PW_ALLOCATOR_BLOCK_POISON_INTERVAL
Definition: config.h:30
Definition: fragmentation.h:47
void AddFragment(size_t inner_size)
Includes a region of free memory in the fragmentation calculation.