18#include "pw_allocator/allocator.h"
19#include "pw_allocator/block/basic.h"
20#include "pw_allocator/block/iterable.h"
21#include "pw_allocator/block/poisonable.h"
22#include "pw_allocator/block/result.h"
23#include "pw_allocator/block/with_layout.h"
24#include "pw_allocator/capability.h"
25#include "pw_allocator/config.h"
26#include "pw_allocator/fragmentation.h"
27#include "pw_allocator/hardening.h"
28#include "pw_assert/assert.h"
29#include "pw_bytes/span.h"
30#include "pw_result/result.h"
31#include "pw_status/status.h"
33namespace pw::allocator {
54 template <
typename BlockType>
57 kImplementsGetAllocatedLayout |
58 kImplementsGetCapacity | kImplementsRecognizes;
59 if constexpr (has_layout_v<BlockType>) {
60 return common | kImplementsGetRequestedLayout;
89template <
typename,
size_t>
103template <
typename BlockType_>
109 using BlockType = BlockType_;
110 using Range =
typename BlockType::Range;
113 Base::GetCapabilities<BlockType>();
114 static constexpr size_t kPoisonInterval = PW_ALLOCATOR_BLOCK_POISON_INTERVAL;
162 template <
typename Ptr>
172 using BlockResultPrev = internal::GenericBlockResult::Prev;
173 using BlockResultNext = internal::GenericBlockResult::Next;
176 template <
typename,
size_t>
189 bool DoResize(
void* ptr,
size_t new_size)
override;
195 Result<Layout>
DoGetInfo(InfoType info_type,
const void* ptr)
const override;
238 static bool PrevIsFree(
const BlockType* block) {
239 auto* prev = block->Prev();
240 return prev !=
nullptr && prev->IsFree();
244 static bool NextIsFree(
const BlockType* block) {
245 auto* next = block->Next();
246 return next !=
nullptr && next->IsFree();
251 void UpdateLast(BlockType* block);
254 size_t capacity_ = 0;
255 size_t allocated_ = 0;
256 BlockType* first_ =
nullptr;
257 BlockType* last_ =
nullptr;
258 uint16_t unpoisoned_ = 0;
263template <
typename BlockType>
264BlockAllocator<BlockType>::~BlockAllocator() {
265 if constexpr (Hardening::kIncludesRobustChecks) {
266 for (
auto* block : blocks()) {
267 if (!block->IsFree()) {
268 CrashOnAllocated(block);
274template <
typename BlockType>
277 return Range(first_);
280template <
typename BlockType>
282 Result<BlockType*> result = BlockType::Init(region);
286template <
typename BlockType>
288 if constexpr (Hardening::kIncludesRobustChecks) {
289 PW_ASSERT(begin !=
nullptr);
290 PW_ASSERT(begin->Prev() ==
nullptr);
293 for (
auto* block : blocks()) {
295 capacity_ += block->OuterSize();
296 if (block->IsFree()) {
297 RecycleBlock(*block);
302template <
typename BlockType>
304 if (capacity_ == 0) {
309 if constexpr (Hardening::kIncludesDebugChecks) {
310 PW_ASSERT(last_->Next() ==
nullptr);
312 auto result = ChooseBlock(layout);
317 BlockType* block = result.block();
318 allocated_ += block->OuterSize();
319 switch (result.prev()) {
320 case BlockResultPrev::kSplitNew:
322 RecycleBlock(*(block->Prev()));
324 case BlockResultPrev::kResizedLarger:
326 allocated_ += result.size();
328 case BlockResultPrev::kUnchanged:
329 case BlockResultPrev::kResizedSmaller:
332 if (result.next() == BlockResultNext::kSplitNew) {
333 RecycleBlock(*(block->Next()));
337 if constexpr (Hardening::kIncludesDebugChecks) {
338 PW_ASSERT(block <= last_);
341 return block->UsableSpace();
344template <
typename BlockType>
346 BlockType* block = FromUsableSpace(ptr);
347 if (block->IsFree()) {
348 if constexpr (Hardening::kIncludesBasicChecks) {
349 CrashOnDoubleFree(block);
354 DeallocateBlock(std::move(block));
357template <
typename BlockType>
360 if (
auto* prev = block->Prev(); prev !=
nullptr && prev->IsFree()) {
363 if (
auto* next = block->Next(); next !=
nullptr && next->IsFree()) {
368 allocated_ -= block->OuterSize();
369 auto free_result = BlockType::Free(std::move(block));
370 block = free_result.block();
373 if (free_result.prev() == BlockResultPrev::kResizedSmaller) {
375 allocated_ -= free_result.size();
378 if constexpr (is_poisonable_v<BlockType> && kPoisonInterval != 0) {
380 if (unpoisoned_ >= kPoisonInterval) {
386 RecycleBlock(*block);
389template <
typename BlockType>
391 BlockType* block = FromUsableSpace(ptr);
394 if (
auto* next = block->Next(); next !=
nullptr && next->IsFree()) {
398 size_t old_size = block->OuterSize();
399 if (!block->Resize(new_size).ok()) {
402 allocated_ -= old_size;
403 allocated_ += block->OuterSize();
406 if (
auto* next = block->Next(); next !=
nullptr && next->IsFree()) {
413template <
typename BlockType>
415 const void* ptr)
const {
417 if (info_type == InfoType::kCapacity) {
421 if (ptr < first_->UsableSpace() || last_->UsableSpace() < ptr) {
422 return Status::NotFound();
424 const auto* block = BlockType::FromUsableSpace(ptr);
425 if (!block->IsValid()) {
426 return Status::DataLoss();
428 if (block->IsFree()) {
429 return Status::FailedPrecondition();
431 if constexpr (kCapabilities.has(kImplementsGetRequestedLayout)) {
432 if (info_type == InfoType::kRequestedLayoutOf) {
433 return block->RequestedLayout();
437 case InfoType::kUsableLayoutOf:
438 return Layout(block->InnerSize(), BlockType::kAlignment);
439 case InfoType::kAllocatedLayoutOf:
440 return Layout(block->OuterSize(), BlockType::kAlignment);
441 case InfoType::kRecognizes:
443 case InfoType::kCapacity:
444 case InfoType::kRequestedLayoutOf:
446 return Status::Unimplemented();
450template <
typename BlockType>
453 for (
auto block : blocks()) {
454 if (block->IsFree()) {
455 fragmentation.AddFragment(block->InnerSize() / BlockType::kAlignment);
458 return fragmentation;
461template <
typename BlockType>
462template <
typename Ptr>
463internal::copy_const_ptr_t<Ptr, BlockType*>
465 if (ptr < first_->UsableSpace() || last_->UsableSpace() < ptr) {
466 if constexpr (Hardening::kIncludesBasicChecks) {
467 CrashOnOutOfRange(ptr);
471 auto* block = BlockType::FromUsableSpace(ptr);
472 if (!block->IsValid()) {
473 if constexpr (Hardening::kIncludesBasicChecks) {
474 block->CheckInvariants();
481template <
typename BlockType>
483 BlockType* next = block->Next();
484 if (next ==
nullptr) {
486 }
else if (next->Next() ==
nullptr) {
Definition: allocator.h:34
constexpr Allocator()=default
TODO(b/326509341): Remove when downstream consumers migrate.
Definition: block_allocator.h:104
void * DoAllocate(Layout layout) override
Definition: block_allocator.h:303
size_t DoGetAllocated() const override
Definition: block_allocator.h:192
Fragmentation MeasureFragmentation() const
Returns fragmentation information for the block allocator's memory region.
Definition: block_allocator.h:451
internal::copy_const_ptr_t< Ptr, BlockType * > FromUsableSpace(Ptr ptr) const
Definition: block_allocator.h:464
virtual BlockResult< BlockType > ChooseBlock(Layout layout)=0
virtual void ReserveBlock(BlockType &)
Definition: block_allocator.h:213
void DoDeallocate(void *ptr) override
Definition: block_allocator.h:345
virtual void DeallocateBlock(BlockType *&&block)
Definition: block_allocator.h:358
bool DoResize(void *ptr, size_t new_size) override
Definition: block_allocator.h:390
void Init(ByteSpan region)
Definition: block_allocator.h:281
void DoDeallocate(void *ptr, Layout) override
Definition: block_allocator.h:186
void Init(BlockType *begin)
Definition: block_allocator.h:287
virtual void Flush()
Definition: block_allocator.h:235
virtual void RecycleBlock(BlockType &)
Definition: block_allocator.h:221
Range blocks() const
Returns a Range of blocks tracking the memory of this allocator.
Definition: block_allocator.h:275
Result< Layout > DoGetInfo(InfoType info_type, const void *ptr) const override
Definition: block_allocator.h:414
Definition: capability.h:62
Definition: detailed_block.h:86
Definition: block_allocator.h:45
static void CrashOnOutOfRange(const void *freed)
static void CrashOnDoubleFree(const void *freed)
Crashes with an informational message that a given block was freed twice.
static void CrashOnAllocated(const void *allocated)
Definition: block_allocator.h:90
Definition: fragmentation.h:44