C/C++ API Reference
Loading...
Searching...
No Matches
block_allocator.h
1// Copyright 2024 The Pigweed Authors
2//
3// Licensed under the Apache License, Version 2.0 (the "License"); you may not
4// use this file except in compliance with the License. You may obtain a copy of
5// the License at
6//
7// https://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12// License for the specific language governing permissions and limitations under
13// the License.
14#pragma once
15
16#include <cstddef>
17
18#include "pw_allocator/allocator.h"
19#include "pw_allocator/block/basic.h"
20#include "pw_allocator/block/iterable.h"
21#include "pw_allocator/block/poisonable.h"
22#include "pw_allocator/block/result.h"
23#include "pw_allocator/block/with_layout.h"
24#include "pw_allocator/capability.h"
25#include "pw_allocator/config.h"
26#include "pw_allocator/fragmentation.h"
27#include "pw_allocator/hardening.h"
28#include "pw_assert/assert.h"
29#include "pw_bytes/span.h"
30#include "pw_result/result.h"
31#include "pw_status/status.h"
32
33namespace pw::allocator {
34namespace internal {
35
46 public:
47 // Not copyable or movable.
49 GenericBlockAllocator& operator=(const GenericBlockAllocator&) = delete;
51 GenericBlockAllocator& operator=(GenericBlockAllocator&&) = delete;
52
53 protected:
54 template <typename BlockType>
55 static constexpr Capabilities GetCapabilities() {
56 Capabilities common = kImplementsGetUsableLayout |
57 kImplementsGetAllocatedLayout |
58 kImplementsGetCapacity | kImplementsRecognizes;
59 if constexpr (has_layout_v<BlockType>) {
60 return common | kImplementsGetRequestedLayout;
61 } else {
62 return common;
63 }
64 }
65
66 constexpr explicit GenericBlockAllocator(Capabilities capabilities)
67 : Allocator(capabilities) {}
68
74 [[noreturn]] static void CrashOnAllocated(const void* allocated);
75
78 [[noreturn]] static void CrashOnOutOfRange(const void* freed);
79
81 [[noreturn]] static void CrashOnDoubleFree(const void* freed);
82};
83
84} // namespace internal
85
86namespace test {
87
88// Forward declaration for friending.
89template <typename, size_t>
91
92} // namespace test
93
95
105template <typename BlockType_>
107 private:
109
110 public:
111 using BlockType = BlockType_;
112 using Range = typename BlockType::Range;
113
114 static constexpr Capabilities kCapabilities =
115 Base::GetCapabilities<BlockType>();
116 static constexpr size_t kPoisonInterval = PW_ALLOCATOR_BLOCK_POISON_INTERVAL;
117
118 ~BlockAllocator() override;
119
121 Range blocks() const;
122
130 void Init(ByteSpan region);
131
153
156
157 protected:
158 constexpr explicit BlockAllocator() : Base(kCapabilities) {}
159
167 void Init(BlockType* begin);
168
186 template <typename Ptr>
187 internal::copy_const_ptr_t<Ptr, BlockType*> FromUsableSpace(Ptr ptr) const;
188
193 virtual void DeallocateBlock(BlockType*&& block);
194
195 private:
196 using BlockResultPrev = internal::GenericBlockResult::Prev;
197 using BlockResultNext = internal::GenericBlockResult::Next;
198
199 // Let unit tests call internal methods in order to "preallocate" blocks..
200 template <typename, size_t>
201 friend class test::BlockAllocatorTest;
202
204 void* DoAllocate(Layout layout) override;
205
207 void DoDeallocate(void* ptr) override;
208
210 void DoDeallocate(void* ptr, Layout) override { DoDeallocate(ptr); }
211
213 bool DoResize(void* ptr, size_t new_size) override;
214
216 size_t DoGetAllocated() const override { return allocated_; }
217
219 Result<Layout> DoGetInfo(InfoType info_type, const void* ptr) const override;
220
222 virtual size_t DoGetMaxAllocatable() = 0;
223
233
240 virtual void ReserveBlock(BlockType&) {}
241
248 virtual void RecycleBlock(BlockType&) {}
249
262 virtual void Flush() {}
263
265 static bool PrevIsFree(const BlockType* block) {
266 auto* prev = block->Prev();
267 return prev != nullptr && prev->IsFree();
268 }
269
271 static bool NextIsFree(const BlockType* block) {
272 auto* next = block->Next();
273 return next != nullptr && next->IsFree();
274 }
275
278 void UpdateLast(BlockType* block);
279
280 // Represents the range of blocks for this allocator.
281 size_t capacity_ = 0;
282 size_t allocated_ = 0;
283 BlockType* first_ = nullptr;
284 BlockType* last_ = nullptr;
285 uint16_t unpoisoned_ = 0;
286};
287
289
290// Template method implementations
291
292template <typename BlockType>
293BlockAllocator<BlockType>::~BlockAllocator() {
294 if constexpr (Hardening::kIncludesRobustChecks) {
295 for (auto* block : blocks()) {
296 if (!block->IsFree()) {
297 CrashOnAllocated(block);
298 }
299 }
300 }
301}
302
303template <typename BlockType>
304typename BlockAllocator<BlockType>::Range BlockAllocator<BlockType>::blocks()
305 const {
306 return Range(first_);
307}
308
309template <typename BlockType>
311 Result<BlockType*> result = BlockType::Init(region);
312 Init(*result);
313}
314
315template <typename BlockType>
316void BlockAllocator<BlockType>::Init(BlockType* begin) {
317 if constexpr (Hardening::kIncludesRobustChecks) {
318 PW_ASSERT(begin != nullptr);
319 PW_ASSERT(begin->Prev() == nullptr);
320 }
321 first_ = begin;
322 for (auto* block : blocks()) {
323 last_ = block;
324 capacity_ += block->OuterSize();
325 if (block->IsFree()) {
326 RecycleBlock(*block);
327 }
328 }
329}
330
331template <typename BlockType>
333 if (capacity_ == 0) {
334 // Not initialized.
335 return nullptr;
336 }
337
338 if constexpr (Hardening::kIncludesDebugChecks) {
339 PW_ASSERT(last_->Next() == nullptr);
340 }
341 auto result = ChooseBlock(layout);
342 if (!result.ok()) {
343 // No valid block for request.
344 return nullptr;
345 }
346 BlockType* block = result.block();
347 allocated_ += block->OuterSize();
348 switch (result.prev()) {
349 case BlockResultPrev::kSplitNew:
350 // New free blocks may be created when allocating.
351 RecycleBlock(*(block->Prev()));
352 break;
353 case BlockResultPrev::kResizedLarger:
354 // Extra bytes may be appended to the previous block.
355 allocated_ += result.size();
356 break;
357 case BlockResultPrev::kUnchanged:
358 case BlockResultPrev::kResizedSmaller:
359 break;
360 }
361 if (result.next() == BlockResultNext::kSplitNew) {
362 RecycleBlock(*(block->Next()));
363 }
364
365 UpdateLast(block);
366 if constexpr (Hardening::kIncludesDebugChecks) {
367 PW_ASSERT(block <= last_);
368 }
369
370 return block->UsableSpace();
371}
372
373template <typename BlockType>
375 BlockType* block = FromUsableSpace(ptr);
376 if (block->IsFree()) {
377 if constexpr (Hardening::kIncludesBasicChecks) {
378 CrashOnDoubleFree(block);
379 } else {
380 return;
381 }
382 }
383 DeallocateBlock(std::move(block));
384}
385
386template <typename BlockType>
388 // Neighboring blocks may be merged when freeing.
389 if (auto* prev = block->Prev(); prev != nullptr && prev->IsFree()) {
390 ReserveBlock(*prev);
391 }
392 if (auto* next = block->Next(); next != nullptr && next->IsFree()) {
393 ReserveBlock(*next);
394 }
395
396 // Free the block and merge it with its neighbors, if possible.
397 allocated_ -= block->OuterSize();
398 auto free_result = BlockType::Free(std::move(block));
399 block = free_result.block();
400 UpdateLast(block);
401
402 if (free_result.prev() == BlockResultPrev::kResizedSmaller) {
403 // Bytes were reclaimed from the previous block.
404 allocated_ -= free_result.size();
405 }
406
407 if constexpr (is_poisonable_v<BlockType> && kPoisonInterval != 0) {
408 ++unpoisoned_;
409 if (unpoisoned_ >= kPoisonInterval) {
410 block->Poison();
411 unpoisoned_ = 0;
412 }
413 }
414
415 RecycleBlock(*block);
416}
417
418template <typename BlockType>
419bool BlockAllocator<BlockType>::DoResize(void* ptr, size_t new_size) {
420 BlockType* block = FromUsableSpace(ptr);
421
422 // Neighboring blocks may be merged when resizing.
423 if (auto* next = block->Next(); next != nullptr && next->IsFree()) {
424 ReserveBlock(*next);
425 }
426
427 size_t old_size = block->OuterSize();
428 if (!block->Resize(new_size).ok()) {
429 return false;
430 }
431 allocated_ -= old_size;
432 allocated_ += block->OuterSize();
433 UpdateLast(block);
434
435 if (auto* next = block->Next(); next != nullptr && next->IsFree()) {
436 RecycleBlock(*next);
437 }
438
439 return true;
440}
441
442template <typename BlockType>
444 const void* ptr) const {
445 // Handle types not related to a block first.
446 if (info_type == InfoType::kCapacity) {
447 return Layout(capacity_);
448 }
449 // Get a block from the given pointer.
450 if (ptr < first_->UsableSpace() || last_->UsableSpace() < ptr) {
451 return Status::NotFound();
452 }
453 const auto* block = BlockType::FromUsableSpace(ptr);
454 if (!block->IsValid()) {
455 return Status::DataLoss();
456 }
457 if (block->IsFree()) {
458 return Status::FailedPrecondition();
459 }
460 if constexpr (kCapabilities.has(kImplementsGetRequestedLayout)) {
461 if (info_type == InfoType::kRequestedLayoutOf) {
462 return block->RequestedLayout();
463 }
464 }
465 switch (info_type) {
466 case InfoType::kUsableLayoutOf:
467 return Layout(block->InnerSize(), BlockType::kAlignment);
468 case InfoType::kAllocatedLayoutOf:
469 return Layout(block->OuterSize(), BlockType::kAlignment);
470 case InfoType::kRecognizes:
471 return Layout();
472 case InfoType::kCapacity:
473 case InfoType::kRequestedLayoutOf:
474 default:
475 return Status::Unimplemented();
476 }
477}
478
479template <typename BlockType>
481 Fragmentation fragmentation;
482 for (auto block : blocks()) {
483 if (block->IsFree()) {
484 fragmentation.AddFragment(block->InnerSize() / BlockType::kAlignment);
485 }
486 }
487 return fragmentation;
488}
489
490template <typename BlockType>
491template <typename Ptr>
492internal::copy_const_ptr_t<Ptr, BlockType*>
494 if (ptr < first_->UsableSpace() || last_->UsableSpace() < ptr) {
495 if constexpr (Hardening::kIncludesBasicChecks) {
496 CrashOnOutOfRange(ptr);
497 }
498 return nullptr;
499 }
500 auto* block = BlockType::FromUsableSpace(ptr);
501 if (!block->IsValid()) {
502 if constexpr (Hardening::kIncludesBasicChecks) {
503 block->CheckInvariants();
504 }
505 return nullptr;
506 }
507 return block;
508}
509
510template <typename BlockType>
511void BlockAllocator<BlockType>::UpdateLast(BlockType* block) {
512 BlockType* next = block->Next();
513 if (next == nullptr) {
514 last_ = block;
515 } else if (next->Next() == nullptr) {
516 last_ = next;
517 }
518}
519
520} // namespace pw::allocator
Definition: allocator.h:36
constexpr Allocator()=default
TODO(b/326509341): Remove when downstream consumers migrate.
Definition: poll.h:25
Definition: block_allocator.h:106
void * DoAllocate(Layout layout) override
Definition: block_allocator.h:332
size_t DoGetAllocated() const override
Definition: block_allocator.h:216
Fragmentation MeasureFragmentation() const
Returns fragmentation information for the block allocator's memory region.
Definition: block_allocator.h:480
virtual size_t DoGetMaxAllocatable()=0
internal::copy_const_ptr_t< Ptr, BlockType * > FromUsableSpace(Ptr ptr) const
Definition: block_allocator.h:493
virtual BlockResult< BlockType > ChooseBlock(Layout layout)=0
virtual void ReserveBlock(BlockType &)
Definition: block_allocator.h:240
void DoDeallocate(void *ptr) override
Definition: block_allocator.h:374
virtual void DeallocateBlock(BlockType *&&block)
Definition: block_allocator.h:387
bool DoResize(void *ptr, size_t new_size) override
Definition: block_allocator.h:419
void Init(ByteSpan region)
Definition: block_allocator.h:310
void DoDeallocate(void *ptr, Layout) override
Definition: block_allocator.h:210
void Init(BlockType *begin)
Definition: block_allocator.h:316
virtual void Flush()
Definition: block_allocator.h:262
size_t GetMaxAllocatable()
Definition: block_allocator.h:152
virtual void RecycleBlock(BlockType &)
Definition: block_allocator.h:248
Range blocks() const
Returns a Range of blocks tracking the memory of this allocator.
Definition: block_allocator.h:304
Result< Layout > DoGetInfo(InfoType info_type, const void *ptr) const override
Definition: block_allocator.h:443
Definition: result.h:116
Definition: capability.h:64
Definition: detailed_block.h:88
Definition: layout.h:58
Definition: block_allocator.h:45
static void CrashOnOutOfRange(const void *freed)
static void CrashOnDoubleFree(const void *freed)
Crashes with an informational message that a given block was freed twice.
static void CrashOnAllocated(const void *allocated)
Definition: block_allocator.h:90
#define PW_ALLOCATOR_BLOCK_POISON_INTERVAL
Definition: config.h:30
Definition: fragmentation.h:46