C/C++ API Reference
Loading...
Searching...
No Matches
block_allocator.h
1// Copyright 2024 The Pigweed Authors
2//
3// Licensed under the Apache License, Version 2.0 (the "License"); you may not
4// use this file except in compliance with the License. You may obtain a copy of
5// the License at
6//
7// https://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12// License for the specific language governing permissions and limitations under
13// the License.
14#pragma once
15
16#include <cstddef>
17#include <optional>
18
19#include "pw_allocator/allocator.h"
20#include "pw_allocator/block/basic.h"
21#include "pw_allocator/block/iterable.h"
22#include "pw_allocator/block/poisonable.h"
23#include "pw_allocator/block/result.h"
24#include "pw_allocator/block/with_layout.h"
25#include "pw_allocator/capability.h"
26#include "pw_allocator/config.h"
27#include "pw_allocator/fragmentation.h"
28#include "pw_allocator/hardening.h"
29#include "pw_assert/assert.h"
30#include "pw_bytes/span.h"
31#include "pw_result/result.h"
32#include "pw_status/status.h"
33
34namespace pw::allocator {
35namespace internal {
36
47 public:
48 // Not copyable or movable.
50 GenericBlockAllocator& operator=(const GenericBlockAllocator&) = delete;
52 GenericBlockAllocator& operator=(GenericBlockAllocator&&) = delete;
53
54 protected:
55 template <typename BlockType>
56 static constexpr Capabilities GetCapabilities() {
57 Capabilities common = kImplementsGetUsableLayout |
58 kImplementsGetAllocatedLayout |
59 kImplementsGetCapacity | kImplementsRecognizes;
60 if constexpr (has_layout_v<BlockType>) {
61 return common | kImplementsGetRequestedLayout;
62 } else {
63 return common;
64 }
65 }
66
67 constexpr explicit GenericBlockAllocator(Capabilities capabilities)
68 : pw::Allocator(capabilities) {}
69
75 [[noreturn]] static void CrashOnAllocated(const void* allocated);
76
79 [[noreturn]] static void CrashOnOutOfRange(const void* freed);
80
82 [[noreturn]] static void CrashOnDoubleFree(const void* freed);
83};
84
85} // namespace internal
86
87namespace test {
88
89// Forward declaration for friending.
90template <typename, size_t>
92
93} // namespace test
94
96
106template <typename BlockType_>
108 private:
110
111 public:
112 using BlockType = BlockType_;
113 using Range = typename BlockType::Range;
114
115 static constexpr Capabilities kCapabilities =
116 Base::GetCapabilities<BlockType>();
117 static constexpr size_t kPoisonInterval = PW_ALLOCATOR_BLOCK_POISON_INTERVAL;
118
119 ~BlockAllocator() override;
120
122 Range blocks() const;
123
131 void Init(ByteSpan region);
132
154
157
158 protected:
159 constexpr explicit BlockAllocator() : Base(kCapabilities) {}
160
162 std::optional<Fragmentation> DoMeasureFragmentation() const override {
163 return MeasureFragmentation();
164 }
165
173 void Init(BlockType* begin);
174
186 template <typename Ptr>
187 internal::copy_const_ptr_t<Ptr, BlockType*> FromUsableSpace(Ptr ptr) const;
188
193 virtual void DeallocateBlock(BlockType*&& block);
194
195 private:
196 using BlockResultPrev = internal::GenericBlockResult::Prev;
197 using BlockResultNext = internal::GenericBlockResult::Next;
198
199 // Let unit tests call internal methods in order to "preallocate" blocks..
200 template <typename, size_t>
201 friend class test::BlockAllocatorTest;
202
204 void* DoAllocate(Layout layout) override;
205
207 void DoDeallocate(void* ptr) override;
208
210 bool DoResize(void* ptr, size_t new_size) override;
211
213 size_t DoGetAllocated() const override { return allocated_; }
214
216 Result<Layout> DoGetInfo(InfoType info_type, const void* ptr) const override;
217
219 virtual size_t DoGetMaxAllocatable() = 0;
220
230
237 virtual void ReserveBlock(BlockType&) {}
238
245 virtual void RecycleBlock(BlockType&) {}
246
259 virtual void Flush() {}
260
262 static bool PrevIsFree(const BlockType* block) {
263 auto* prev = block->Prev();
264 return prev != nullptr && prev->IsFree();
265 }
266
268 static bool NextIsFree(const BlockType* block) {
269 auto* next = block->Next();
270 return next != nullptr && next->IsFree();
271 }
272
275 void UpdateLast(BlockType* block);
276
277 // Represents the range of blocks for this allocator.
278 size_t capacity_ = 0;
279 size_t allocated_ = 0;
280 BlockType* first_ = nullptr;
281 BlockType* last_ = nullptr;
282 uint16_t unpoisoned_ = 0;
283};
284
286
287// Template method implementations
288
289template <typename BlockType>
290BlockAllocator<BlockType>::~BlockAllocator() {
291 if constexpr (Hardening::kIncludesRobustChecks) {
292 for (auto* block : blocks()) {
293 if (!block->IsFree()) {
294 CrashOnAllocated(block);
295 }
296 }
297 }
298}
299
300template <typename BlockType>
301typename BlockAllocator<BlockType>::Range BlockAllocator<BlockType>::blocks()
302 const {
303 return Range(first_);
304}
305
306template <typename BlockType>
308 Result<BlockType*> result = BlockType::Init(region);
309 PW_ASSERT(result.ok());
310 Init(*result);
311}
312
313template <typename BlockType>
314void BlockAllocator<BlockType>::Init(BlockType* begin) {
315 if constexpr (Hardening::kIncludesRobustChecks) {
316 PW_ASSERT(begin != nullptr);
317 PW_ASSERT(begin->Prev() == nullptr);
318 }
319 first_ = begin;
320 for (auto* block : blocks()) {
321 last_ = block;
322 capacity_ += block->OuterSize();
323 if (block->IsFree()) {
324 RecycleBlock(*block);
325 }
326 }
327}
328
329template <typename BlockType>
331 if (capacity_ == 0) {
332 // Not initialized.
333 return nullptr;
334 }
335
336 if constexpr (Hardening::kIncludesDebugChecks) {
337 PW_ASSERT(last_->Next() == nullptr);
338 }
339 auto result = ChooseBlock(layout);
340 if (!result.ok()) {
341 // No valid block for request.
342 return nullptr;
343 }
344 BlockType* block = result.block();
345 allocated_ += block->OuterSize();
346 switch (result.prev()) {
347 case BlockResultPrev::kSplitNew:
348 // New free blocks may be created when allocating.
349 RecycleBlock(*(block->Prev()));
350 break;
351 case BlockResultPrev::kResizedLarger:
352 // Extra bytes may be appended to the previous block.
353 allocated_ += result.size();
354 break;
355 case BlockResultPrev::kUnchanged:
356 case BlockResultPrev::kResizedSmaller:
357 break;
358 }
359 if (result.next() == BlockResultNext::kSplitNew) {
360 RecycleBlock(*(block->Next()));
361 }
362
363 UpdateLast(block);
364 if constexpr (Hardening::kIncludesDebugChecks) {
365 PW_ASSERT(block <= last_);
366 }
367
368 return block->UsableSpace();
369}
370
371template <typename BlockType>
373 BlockType* block = FromUsableSpace(ptr);
374 if (block->IsFree()) {
375 if constexpr (Hardening::kIncludesBasicChecks) {
376 CrashOnDoubleFree(block);
377 } else {
378 return;
379 }
380 }
381 DeallocateBlock(std::move(block));
382}
383
384template <typename BlockType>
386 // Neighboring blocks may be merged when freeing.
387 if (auto* prev = block->Prev(); prev != nullptr && prev->IsFree()) {
388 ReserveBlock(*prev);
389 }
390 if (auto* next = block->Next(); next != nullptr && next->IsFree()) {
391 ReserveBlock(*next);
392 }
393
394 // Free the block and merge it with its neighbors, if possible.
395 allocated_ -= block->OuterSize();
396 auto free_result = BlockType::Free(std::move(block));
397 block = free_result.block();
398 UpdateLast(block);
399
400 if (free_result.prev() == BlockResultPrev::kResizedSmaller) {
401 // Bytes were reclaimed from the previous block.
402 allocated_ -= free_result.size();
403 }
404
405 if constexpr (is_poisonable_v<BlockType> && kPoisonInterval != 0) {
406 ++unpoisoned_;
407 if (unpoisoned_ >= kPoisonInterval) {
408 block->Poison();
409 unpoisoned_ = 0;
410 }
411 }
412
413 RecycleBlock(*block);
414}
415
416template <typename BlockType>
417bool BlockAllocator<BlockType>::DoResize(void* ptr, size_t new_size) {
418 BlockType* block = FromUsableSpace(ptr);
419
420 // Neighboring blocks may be merged when resizing.
421 if (auto* next = block->Next(); next != nullptr && next->IsFree()) {
422 ReserveBlock(*next);
423 }
424
425 size_t old_size = block->OuterSize();
426 if (!block->Resize(new_size).ok()) {
427 return false;
428 }
429 allocated_ -= old_size;
430 allocated_ += block->OuterSize();
431 UpdateLast(block);
432
433 if (auto* next = block->Next(); next != nullptr && next->IsFree()) {
434 RecycleBlock(*next);
435 }
436
437 return true;
438}
439
440template <typename BlockType>
442 const void* ptr) const {
443 // Handle types not related to a block first.
444 if (info_type == InfoType::kCapacity) {
445 return Layout(capacity_);
446 }
447 // Get a block from the given pointer.
448 if (ptr < first_->UsableSpace() || last_->UsableSpace() < ptr) {
449 return Status::NotFound();
450 }
451 const auto* block = BlockType::FromUsableSpace(ptr);
452 if (!block->IsValid()) {
453 return Status::DataLoss();
454 }
455 if (block->IsFree()) {
457 }
458 if constexpr (kCapabilities.has(kImplementsGetRequestedLayout)) {
459 if (info_type == InfoType::kRequestedLayoutOf) {
460 return block->RequestedLayout();
461 }
462 }
463 switch (info_type) {
464 case InfoType::kUsableLayoutOf:
465 return Layout(block->InnerSize(), BlockType::kAlignment);
466 case InfoType::kAllocatedLayoutOf:
467 return Layout(block->OuterSize(), BlockType::kAlignment);
468 case InfoType::kRecognizes:
469 return Layout();
470 case InfoType::kCapacity:
471 case InfoType::kRequestedLayoutOf:
472 default:
473 return Status::Unimplemented();
474 }
475}
476
477template <typename BlockType>
479 Fragmentation fragmentation;
480 for (auto block : blocks()) {
481 if (block->IsFree()) {
482 fragmentation.AddFragment(block->InnerSize() / BlockType::kAlignment);
483 }
484 }
485 return fragmentation;
486}
487
488template <typename BlockType>
489template <typename Ptr>
490internal::copy_const_ptr_t<Ptr, BlockType*>
492 if (ptr < first_->UsableSpace() || last_->UsableSpace() < ptr) {
493 if constexpr (Hardening::kIncludesBasicChecks) {
494 CrashOnOutOfRange(ptr);
495 }
496 return nullptr;
497 }
498 auto* block = BlockType::FromUsableSpace(ptr);
499 if (!block->IsValid()) {
500 if constexpr (Hardening::kIncludesBasicChecks) {
501 block->CheckInvariants();
502 }
503 return nullptr;
504 }
505 return block;
506}
507
508template <typename BlockType>
509void BlockAllocator<BlockType>::UpdateLast(BlockType* block) {
510 BlockType* next = block->Next();
511 if (next == nullptr) {
512 last_ = block;
513 } else if (next->Next() == nullptr) {
514 last_ = next;
515 }
516}
517
518} // namespace pw::allocator
Definition: allocator.h:42
Definition: result.h:145
constexpr bool ok() const
Definition: result.h:451
static constexpr Status DataLoss()
Definition: status.h:316
static constexpr Status Unimplemented()
Definition: status.h:280
static constexpr Status FailedPrecondition()
Definition: status.h:243
static constexpr Status NotFound()
Definition: status.h:190
Definition: block_allocator.h:107
void * DoAllocate(Layout layout) override
Definition: block_allocator.h:330
size_t DoGetAllocated() const override
Definition: block_allocator.h:213
std::optional< Fragmentation > DoMeasureFragmentation() const override
Definition: block_allocator.h:162
Fragmentation MeasureFragmentation() const
Returns fragmentation information for the block allocator's memory region.
Definition: block_allocator.h:478
virtual size_t DoGetMaxAllocatable()=0
internal::copy_const_ptr_t< Ptr, BlockType * > FromUsableSpace(Ptr ptr) const
Definition: block_allocator.h:491
virtual BlockResult< BlockType > ChooseBlock(Layout layout)=0
virtual void ReserveBlock(BlockType &)
Definition: block_allocator.h:237
void DoDeallocate(void *ptr) override
Definition: block_allocator.h:372
virtual void DeallocateBlock(BlockType *&&block)
Definition: block_allocator.h:385
bool DoResize(void *ptr, size_t new_size) override
Definition: block_allocator.h:417
void Init(ByteSpan region)
Definition: block_allocator.h:307
void Init(BlockType *begin)
Definition: block_allocator.h:314
virtual void Flush()
Definition: block_allocator.h:259
size_t GetMaxAllocatable()
Definition: block_allocator.h:153
virtual void RecycleBlock(BlockType &)
Definition: block_allocator.h:245
Range blocks() const
Returns a Range of blocks tracking the memory of this allocator.
Definition: block_allocator.h:301
Result< Layout > DoGetInfo(InfoType info_type, const void *ptr) const override
Definition: block_allocator.h:441
Definition: result.h:116
Definition: capability.h:65
Definition: detailed_block.h:88
Definition: layout.h:58
Definition: block_allocator.h:46
static void CrashOnOutOfRange(const void *freed)
static void CrashOnDoubleFree(const void *freed)
Crashes with an informational message that a given block was freed twice.
static void CrashOnAllocated(const void *allocated)
Definition: block_allocator.h:91
#define PW_ALLOCATOR_BLOCK_POISON_INTERVAL
Definition: config.h:30
Definition: fragmentation.h:46