18#include "pw_allocator/allocator.h"
19#include "pw_allocator/block/basic.h"
20#include "pw_allocator/block/iterable.h"
21#include "pw_allocator/block/poisonable.h"
22#include "pw_allocator/block/result.h"
23#include "pw_allocator/block/with_layout.h"
24#include "pw_allocator/capability.h"
25#include "pw_allocator/config.h"
26#include "pw_allocator/fragmentation.h"
27#include "pw_allocator/hardening.h"
28#include "pw_assert/assert.h"
29#include "pw_bytes/span.h"
30#include "pw_result/result.h"
31#include "pw_status/status.h"
33namespace pw::allocator {
54 template <
typename BlockType>
57 kImplementsGetAllocatedLayout |
58 kImplementsGetCapacity | kImplementsRecognizes;
59 if constexpr (has_layout_v<BlockType>) {
60 return common | kImplementsGetRequestedLayout;
89template <
typename,
size_t>
105template <
typename BlockType_>
111 using BlockType = BlockType_;
112 using Range =
typename BlockType::Range;
115 Base::GetCapabilities<BlockType>();
186 template <
typename Ptr>
196 using BlockResultPrev = internal::GenericBlockResult::Prev;
197 using BlockResultNext = internal::GenericBlockResult::Next;
200 template <
typename,
size_t>
213 bool DoResize(
void* ptr,
size_t new_size)
override;
265 static bool PrevIsFree(
const BlockType* block) {
266 auto* prev = block->Prev();
267 return prev !=
nullptr && prev->IsFree();
271 static bool NextIsFree(
const BlockType* block) {
272 auto* next = block->Next();
273 return next !=
nullptr && next->IsFree();
278 void UpdateLast(BlockType* block);
281 size_t capacity_ = 0;
282 size_t allocated_ = 0;
283 BlockType* first_ =
nullptr;
284 BlockType* last_ =
nullptr;
285 uint16_t unpoisoned_ = 0;
292template <
typename BlockType>
293BlockAllocator<BlockType>::~BlockAllocator() {
294 if constexpr (Hardening::kIncludesRobustChecks) {
295 for (
auto* block : blocks()) {
296 if (!block->IsFree()) {
297 CrashOnAllocated(block);
303template <
typename BlockType>
306 return Range(first_);
309template <
typename BlockType>
315template <
typename BlockType>
317 if constexpr (Hardening::kIncludesRobustChecks) {
318 PW_ASSERT(begin !=
nullptr);
319 PW_ASSERT(begin->Prev() ==
nullptr);
322 for (
auto* block : blocks()) {
324 capacity_ += block->OuterSize();
325 if (block->IsFree()) {
326 RecycleBlock(*block);
331template <
typename BlockType>
333 if (capacity_ == 0) {
338 if constexpr (Hardening::kIncludesDebugChecks) {
339 PW_ASSERT(last_->Next() ==
nullptr);
341 auto result = ChooseBlock(layout);
346 BlockType* block = result.block();
347 allocated_ += block->OuterSize();
348 switch (result.prev()) {
349 case BlockResultPrev::kSplitNew:
351 RecycleBlock(*(block->Prev()));
353 case BlockResultPrev::kResizedLarger:
355 allocated_ += result.size();
357 case BlockResultPrev::kUnchanged:
358 case BlockResultPrev::kResizedSmaller:
361 if (result.next() == BlockResultNext::kSplitNew) {
362 RecycleBlock(*(block->Next()));
366 if constexpr (Hardening::kIncludesDebugChecks) {
367 PW_ASSERT(block <= last_);
370 return block->UsableSpace();
373template <
typename BlockType>
375 BlockType* block = FromUsableSpace(ptr);
376 if (block->IsFree()) {
377 if constexpr (Hardening::kIncludesBasicChecks) {
378 CrashOnDoubleFree(block);
383 DeallocateBlock(std::move(block));
386template <
typename BlockType>
389 if (
auto* prev = block->Prev(); prev !=
nullptr && prev->IsFree()) {
392 if (
auto* next = block->Next(); next !=
nullptr && next->IsFree()) {
397 allocated_ -= block->OuterSize();
398 auto free_result = BlockType::Free(std::move(block));
399 block = free_result.block();
402 if (free_result.prev() == BlockResultPrev::kResizedSmaller) {
404 allocated_ -= free_result.size();
407 if constexpr (is_poisonable_v<BlockType> && kPoisonInterval != 0) {
409 if (unpoisoned_ >= kPoisonInterval) {
415 RecycleBlock(*block);
418template <
typename BlockType>
420 BlockType* block = FromUsableSpace(ptr);
423 if (
auto* next = block->Next(); next !=
nullptr && next->IsFree()) {
427 size_t old_size = block->OuterSize();
428 if (!block->Resize(new_size).ok()) {
431 allocated_ -= old_size;
432 allocated_ += block->OuterSize();
435 if (
auto* next = block->Next(); next !=
nullptr && next->IsFree()) {
442template <
typename BlockType>
444 const void* ptr)
const {
446 if (info_type == InfoType::kCapacity) {
450 if (ptr < first_->UsableSpace() || last_->UsableSpace() < ptr) {
451 return Status::NotFound();
453 const auto* block = BlockType::FromUsableSpace(ptr);
454 if (!block->IsValid()) {
455 return Status::DataLoss();
457 if (block->IsFree()) {
458 return Status::FailedPrecondition();
460 if constexpr (kCapabilities.has(kImplementsGetRequestedLayout)) {
461 if (info_type == InfoType::kRequestedLayoutOf) {
462 return block->RequestedLayout();
466 case InfoType::kUsableLayoutOf:
467 return Layout(block->InnerSize(), BlockType::kAlignment);
468 case InfoType::kAllocatedLayoutOf:
469 return Layout(block->OuterSize(), BlockType::kAlignment);
470 case InfoType::kRecognizes:
472 case InfoType::kCapacity:
473 case InfoType::kRequestedLayoutOf:
475 return Status::Unimplemented();
479template <
typename BlockType>
482 for (
auto block : blocks()) {
483 if (block->IsFree()) {
484 fragmentation.AddFragment(block->InnerSize() / BlockType::kAlignment);
487 return fragmentation;
490template <
typename BlockType>
491template <
typename Ptr>
492internal::copy_const_ptr_t<Ptr, BlockType*>
494 if (ptr < first_->UsableSpace() || last_->UsableSpace() < ptr) {
495 if constexpr (Hardening::kIncludesBasicChecks) {
496 CrashOnOutOfRange(ptr);
500 auto* block = BlockType::FromUsableSpace(ptr);
501 if (!block->IsValid()) {
502 if constexpr (Hardening::kIncludesBasicChecks) {
503 block->CheckInvariants();
510template <
typename BlockType>
512 BlockType* next = block->Next();
513 if (next ==
nullptr) {
515 }
else if (next->Next() ==
nullptr) {
Definition: allocator.h:36
constexpr Allocator()=default
TODO(b/326509341): Remove when downstream consumers migrate.
Definition: block_allocator.h:106
void * DoAllocate(Layout layout) override
Definition: block_allocator.h:332
size_t DoGetAllocated() const override
Definition: block_allocator.h:216
Fragmentation MeasureFragmentation() const
Returns fragmentation information for the block allocator's memory region.
Definition: block_allocator.h:480
virtual size_t DoGetMaxAllocatable()=0
internal::copy_const_ptr_t< Ptr, BlockType * > FromUsableSpace(Ptr ptr) const
Definition: block_allocator.h:493
virtual BlockResult< BlockType > ChooseBlock(Layout layout)=0
virtual void ReserveBlock(BlockType &)
Definition: block_allocator.h:240
void DoDeallocate(void *ptr) override
Definition: block_allocator.h:374
virtual void DeallocateBlock(BlockType *&&block)
Definition: block_allocator.h:387
bool DoResize(void *ptr, size_t new_size) override
Definition: block_allocator.h:419
void Init(ByteSpan region)
Definition: block_allocator.h:310
void DoDeallocate(void *ptr, Layout) override
Definition: block_allocator.h:210
void Init(BlockType *begin)
Definition: block_allocator.h:316
virtual void Flush()
Definition: block_allocator.h:262
size_t GetMaxAllocatable()
Definition: block_allocator.h:152
virtual void RecycleBlock(BlockType &)
Definition: block_allocator.h:248
Range blocks() const
Returns a Range of blocks tracking the memory of this allocator.
Definition: block_allocator.h:304
Result< Layout > DoGetInfo(InfoType info_type, const void *ptr) const override
Definition: block_allocator.h:443
Definition: capability.h:64
Definition: detailed_block.h:88
Definition: block_allocator.h:45
static void CrashOnOutOfRange(const void *freed)
static void CrashOnDoubleFree(const void *freed)
Crashes with an informational message that a given block was freed twice.
static void CrashOnAllocated(const void *allocated)
Definition: block_allocator.h:90
#define PW_ALLOCATOR_BLOCK_POISON_INTERVAL
Definition: config.h:30
Definition: fragmentation.h:46