19#include "pw_allocator/allocator.h"
20#include "pw_allocator/block/basic.h"
21#include "pw_allocator/block/iterable.h"
22#include "pw_allocator/block/poisonable.h"
23#include "pw_allocator/block/result.h"
24#include "pw_allocator/block/with_layout.h"
25#include "pw_allocator/capability.h"
26#include "pw_allocator/config.h"
27#include "pw_allocator/fragmentation.h"
28#include "pw_allocator/hardening.h"
29#include "pw_assert/assert.h"
30#include "pw_bytes/span.h"
31#include "pw_result/result.h"
32#include "pw_status/status.h"
34namespace pw::allocator {
55 template <
typename BlockType>
58 kImplementsGetAllocatedLayout |
59 kImplementsGetCapacity | kImplementsRecognizes;
60 if constexpr (has_layout_v<BlockType>) {
61 return common | kImplementsGetRequestedLayout;
90template <
typename,
size_t>
106template <
typename BlockType_>
112 using BlockType = BlockType_;
113 using Range =
typename BlockType::Range;
116 Base::GetCapabilities<BlockType>();
186 template <
typename Ptr>
196 using BlockResultPrev = internal::GenericBlockResult::Prev;
197 using BlockResultNext = internal::GenericBlockResult::Next;
200 template <
typename,
size_t>
213 bool DoResize(
void* ptr,
size_t new_size)
override;
265 static bool PrevIsFree(
const BlockType* block) {
266 auto* prev = block->Prev();
267 return prev !=
nullptr && prev->IsFree();
271 static bool NextIsFree(
const BlockType* block) {
272 auto* next = block->Next();
273 return next !=
nullptr && next->IsFree();
278 void UpdateLast(BlockType* block);
281 size_t capacity_ = 0;
282 size_t allocated_ = 0;
283 BlockType* first_ =
nullptr;
284 BlockType* last_ =
nullptr;
285 uint16_t unpoisoned_ = 0;
292template <
typename BlockType>
293BlockAllocator<BlockType>::~BlockAllocator() {
294 if constexpr (Hardening::kIncludesRobustChecks) {
295 for (
auto* block : blocks()) {
296 if (!block->IsFree()) {
297 CrashOnAllocated(block);
303template <
typename BlockType>
306 return Range(first_);
309template <
typename BlockType>
312 PW_ASSERT(result.
ok());
316template <
typename BlockType>
318 if constexpr (Hardening::kIncludesRobustChecks) {
319 PW_ASSERT(begin !=
nullptr);
320 PW_ASSERT(begin->Prev() ==
nullptr);
323 for (
auto* block : blocks()) {
325 capacity_ += block->OuterSize();
326 if (block->IsFree()) {
327 RecycleBlock(*block);
332template <
typename BlockType>
334 if (capacity_ == 0) {
339 if constexpr (Hardening::kIncludesDebugChecks) {
340 PW_ASSERT(last_->Next() ==
nullptr);
342 auto result = ChooseBlock(layout);
347 BlockType* block = result.block();
348 allocated_ += block->OuterSize();
349 switch (result.prev()) {
350 case BlockResultPrev::kSplitNew:
352 RecycleBlock(*(block->Prev()));
354 case BlockResultPrev::kResizedLarger:
356 allocated_ += result.size();
358 case BlockResultPrev::kUnchanged:
359 case BlockResultPrev::kResizedSmaller:
362 if (result.next() == BlockResultNext::kSplitNew) {
363 RecycleBlock(*(block->Next()));
367 if constexpr (Hardening::kIncludesDebugChecks) {
368 PW_ASSERT(block <= last_);
371 return block->UsableSpace();
374template <
typename BlockType>
376 BlockType* block = FromUsableSpace(ptr);
377 if (block->IsFree()) {
378 if constexpr (Hardening::kIncludesBasicChecks) {
379 CrashOnDoubleFree(block);
384 DeallocateBlock(std::move(block));
387template <
typename BlockType>
390 if (
auto* prev = block->Prev(); prev !=
nullptr && prev->IsFree()) {
393 if (
auto* next = block->Next(); next !=
nullptr && next->IsFree()) {
398 allocated_ -= block->OuterSize();
399 auto free_result = BlockType::Free(std::move(block));
400 block = free_result.block();
403 if (free_result.prev() == BlockResultPrev::kResizedSmaller) {
405 allocated_ -= free_result.size();
408 if constexpr (is_poisonable_v<BlockType> && kPoisonInterval != 0) {
410 if (unpoisoned_ >= kPoisonInterval) {
416 RecycleBlock(*block);
419template <
typename BlockType>
421 BlockType* block = FromUsableSpace(ptr);
424 if (
auto* next = block->Next(); next !=
nullptr && next->IsFree()) {
428 size_t old_size = block->OuterSize();
429 if (!block->Resize(new_size).ok()) {
432 allocated_ -= old_size;
433 allocated_ += block->OuterSize();
436 if (
auto* next = block->Next(); next !=
nullptr && next->IsFree()) {
443template <
typename BlockType>
445 const void* ptr)
const {
447 if (info_type == InfoType::kCapacity) {
451 if (ptr < first_->UsableSpace() || last_->UsableSpace() < ptr) {
454 const auto* block = BlockType::FromUsableSpace(ptr);
455 if (!block->IsValid()) {
458 if (block->IsFree()) {
461 if constexpr (kCapabilities.has(kImplementsGetRequestedLayout)) {
462 if (info_type == InfoType::kRequestedLayoutOf) {
463 return block->RequestedLayout();
467 case InfoType::kUsableLayoutOf:
468 return Layout(block->InnerSize(), BlockType::kAlignment);
469 case InfoType::kAllocatedLayoutOf:
470 return Layout(block->OuterSize(), BlockType::kAlignment);
471 case InfoType::kRecognizes:
473 case InfoType::kCapacity:
474 case InfoType::kRequestedLayoutOf:
480template <
typename BlockType>
483 for (
auto block : blocks()) {
484 if (block->IsFree()) {
485 fragmentation.AddFragment(block->InnerSize() / BlockType::kAlignment);
488 return fragmentation;
491template <
typename BlockType>
492template <
typename Ptr>
493internal::copy_const_ptr_t<Ptr, BlockType*>
495 if (ptr < first_->UsableSpace() || last_->UsableSpace() < ptr) {
496 if constexpr (Hardening::kIncludesBasicChecks) {
497 CrashOnOutOfRange(ptr);
501 auto* block = BlockType::FromUsableSpace(ptr);
502 if (!block->IsValid()) {
503 if constexpr (Hardening::kIncludesBasicChecks) {
504 block->CheckInvariants();
511template <
typename BlockType>
513 BlockType* next = block->Next();
514 if (next ==
nullptr) {
516 }
else if (next->Next() ==
nullptr) {
Definition: allocator.h:45
constexpr Allocator()=default
TODO(b/326509341): Remove when downstream consumers migrate.
constexpr bool ok() const
Definition: result.h:451
static constexpr Status DataLoss()
Definition: status.h:316
static constexpr Status Unimplemented()
Definition: status.h:280
static constexpr Status FailedPrecondition()
Definition: status.h:243
static constexpr Status NotFound()
Definition: status.h:190
Definition: block_allocator.h:107
void * DoAllocate(Layout layout) override
Definition: block_allocator.h:333
size_t DoGetAllocated() const override
Definition: block_allocator.h:216
std::optional< Fragmentation > DoMeasureFragmentation() const override
Definition: block_allocator.h:162
Fragmentation MeasureFragmentation() const
Returns fragmentation information for the block allocator's memory region.
Definition: block_allocator.h:481
virtual size_t DoGetMaxAllocatable()=0
internal::copy_const_ptr_t< Ptr, BlockType * > FromUsableSpace(Ptr ptr) const
Definition: block_allocator.h:494
virtual BlockResult< BlockType > ChooseBlock(Layout layout)=0
virtual void ReserveBlock(BlockType &)
Definition: block_allocator.h:240
void DoDeallocate(void *ptr) override
Definition: block_allocator.h:375
virtual void DeallocateBlock(BlockType *&&block)
Definition: block_allocator.h:388
bool DoResize(void *ptr, size_t new_size) override
Definition: block_allocator.h:420
void Init(ByteSpan region)
Definition: block_allocator.h:310
void DoDeallocate(void *ptr, Layout) override
Definition: block_allocator.h:210
void Init(BlockType *begin)
Definition: block_allocator.h:317
virtual void Flush()
Definition: block_allocator.h:262
size_t GetMaxAllocatable()
Definition: block_allocator.h:153
virtual void RecycleBlock(BlockType &)
Definition: block_allocator.h:248
Range blocks() const
Returns a Range of blocks tracking the memory of this allocator.
Definition: block_allocator.h:304
Result< Layout > DoGetInfo(InfoType info_type, const void *ptr) const override
Definition: block_allocator.h:444
Definition: capability.h:65
Definition: detailed_block.h:88
Definition: block_allocator.h:46
static void CrashOnOutOfRange(const void *freed)
static void CrashOnDoubleFree(const void *freed)
Crashes with an informational message that a given block was freed twice.
static void CrashOnAllocated(const void *allocated)
Definition: block_allocator.h:91
#define PW_ALLOCATOR_BLOCK_POISON_INTERVAL
Definition: config.h:30
Definition: fragmentation.h:46