C/C++ API Reference
Loading...
Searching...
No Matches
block_allocator.h
1// Copyright 2024 The Pigweed Authors
2//
3// Licensed under the Apache License, Version 2.0 (the "License"); you may not
4// use this file except in compliance with the License. You may obtain a copy of
5// the License at
6//
7// https://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12// License for the specific language governing permissions and limitations under
13// the License.
14#pragma once
15
16#include <cstddef>
17#include <optional>
18
19#include "pw_allocator/allocator.h"
20#include "pw_allocator/block/basic.h"
21#include "pw_allocator/block/iterable.h"
22#include "pw_allocator/block/poisonable.h"
23#include "pw_allocator/block/result.h"
24#include "pw_allocator/block/with_layout.h"
25#include "pw_allocator/capability.h"
26#include "pw_allocator/config.h"
27#include "pw_allocator/fragmentation.h"
28#include "pw_allocator/hardening.h"
29#include "pw_assert/assert.h"
30#include "pw_bytes/span.h"
31#include "pw_result/result.h"
32#include "pw_status/status.h"
33
34namespace pw::allocator {
35namespace internal {
36
47 public:
48 // Not copyable or movable.
50 GenericBlockAllocator& operator=(const GenericBlockAllocator&) = delete;
52 GenericBlockAllocator& operator=(GenericBlockAllocator&&) = delete;
53
54 protected:
55 template <typename BlockType>
56 static constexpr Capabilities GetCapabilities();
57
58 constexpr explicit GenericBlockAllocator(Capabilities capabilities)
59 : pw::Allocator(capabilities) {}
60
66 [[noreturn]] static void CrashOnAllocated(const void* allocated);
67
70 [[noreturn]] static void CrashOnOutOfRange(const void* freed);
71
73 [[noreturn]] static void CrashOnDoubleFree(const void* freed);
74};
75
76} // namespace internal
77
78namespace test {
79
80// Forward declaration for friending.
81template <typename, size_t>
83
84} // namespace test
85
87
97template <typename BlockType_>
99 private:
101
102 public:
103 using BlockType = BlockType_;
104 using Range = typename BlockType::Range;
105
106 static constexpr Capabilities kCapabilities =
107 Base::GetCapabilities<BlockType>();
108 static constexpr size_t kPoisonInterval = PW_ALLOCATOR_BLOCK_POISON_INTERVAL;
109
110 ~BlockAllocator() override;
111
113 Range blocks() const { return Range(first_); }
114
122 void Init(ByteSpan region);
123
145
146 protected:
147 constexpr explicit BlockAllocator() : Base(kCapabilities) {}
148
150 void* DoAllocate(Layout layout) override;
151
153 void DoDeallocate(void* ptr) override;
154
156 bool DoResize(void* ptr, size_t new_size) override;
157
159 size_t DoGetAllocated() const override { return allocated_; }
160
162 std::optional<Fragmentation> DoMeasureFragmentation() const override;
163
165 Result<Layout> DoGetInfo(InfoType info_type, const void* ptr) const override;
166
174 void Init(BlockType* begin);
175
187 template <typename Ptr>
188 internal::copy_const_ptr_t<Ptr, BlockType*> FromUsableSpace(Ptr ptr) const;
189
194 virtual void DeallocateBlock(BlockType*&& block);
195
196 private:
199
200 // Let unit tests call internal methods in order to "preallocate" blocks..
201 template <typename, size_t>
202 friend class test::BlockAllocatorTest;
203
205 virtual size_t DoGetMaxAllocatable() = 0;
206
216
223 virtual void ReserveBlock(BlockType&) {}
224
231 virtual void RecycleBlock(BlockType&) {}
232
245 virtual void Flush() {}
246
248 static bool PrevIsFree(const BlockType* block) {
249 auto* prev = block->Prev();
250 return prev != nullptr && prev->IsFree();
251 }
252
254 static bool NextIsFree(const BlockType* block) {
255 auto* next = block->Next();
256 return next != nullptr && next->IsFree();
257 }
258
261 void UpdateLast(BlockType* block);
262
263 // Represents the range of blocks for this allocator.
264 size_t capacity_ = 0;
265 size_t allocated_ = 0;
266 BlockType* first_ = nullptr;
267 BlockType* last_ = nullptr;
268 uint16_t unpoisoned_ = 0;
269};
270
272
273// Template method implementations
274
275namespace internal {
276
277template <typename BlockType>
278constexpr Capabilities GenericBlockAllocator::GetCapabilities() {
279 Capabilities common = kImplementsGetUsableLayout |
280 kImplementsGetAllocatedLayout | kImplementsGetCapacity |
281 kImplementsRecognizes;
282 if constexpr (has_layout_v<BlockType>) {
283 return common | kImplementsGetRequestedLayout;
284 } else {
285 return common;
286 }
287}
288
289} // namespace internal
290
291template <typename BlockType>
292BlockAllocator<BlockType>::~BlockAllocator() {
293 if constexpr (Hardening::kIncludesRobustChecks) {
294 for (auto* block : blocks()) {
295 if (!block->IsFree()) {
296 CrashOnAllocated(block);
297 }
298 }
299 }
300}
301
302template <typename BlockType>
304 Result<BlockType*> result = BlockType::Init(region);
305 PW_ASSERT(result.ok());
306 Init(*result);
307}
308
309template <typename BlockType>
310void BlockAllocator<BlockType>::Init(BlockType* begin) {
311 if constexpr (Hardening::kIncludesRobustChecks) {
312 PW_ASSERT(begin != nullptr);
313 PW_ASSERT(begin->Prev() == nullptr);
314 }
315 first_ = begin;
316 for (auto* block : blocks()) {
317 last_ = block;
318 capacity_ += block->OuterSize();
319 if (block->IsFree()) {
320 RecycleBlock(*block);
321 }
322 }
323}
324
325template <typename BlockType>
327 if (capacity_ == 0) {
328 // Not initialized.
329 return nullptr;
330 }
331
332 if constexpr (Hardening::kIncludesDebugChecks) {
333 PW_ASSERT(last_->Next() == nullptr);
334 }
335 auto result = ChooseBlock(layout);
336 if (!result.ok()) {
337 // No valid block for request.
338 return nullptr;
339 }
340 BlockType* block = result.block();
341 allocated_ += block->OuterSize();
342 switch (result.prev()) {
343 case BlockResultPrev::kSplitNew:
344 // New free blocks may be created when allocating.
345 RecycleBlock(*(block->Prev()));
346 break;
347 case BlockResultPrev::kResizedLarger:
348 // Extra bytes may be appended to the previous block.
349 allocated_ += result.size();
350 break;
351 case BlockResultPrev::kUnchanged:
352 case BlockResultPrev::kResizedSmaller:
353 break;
354 }
355 if (result.next() == BlockResultNext::kSplitNew) {
356 RecycleBlock(*(block->Next()));
357 }
358
359 UpdateLast(block);
360 if constexpr (Hardening::kIncludesDebugChecks) {
361 PW_ASSERT(block <= last_);
362 }
363
364 return block->UsableSpace();
365}
366
367template <typename BlockType>
369 BlockType* block = FromUsableSpace(ptr);
370 if (block->IsFree()) {
371 if constexpr (Hardening::kIncludesBasicChecks) {
372 CrashOnDoubleFree(block);
373 } else {
374 return;
375 }
376 }
377 DeallocateBlock(std::move(block));
378}
379
380template <typename BlockType>
382 // Neighboring blocks may be merged when freeing.
383 if (auto* prev = block->Prev(); prev != nullptr && prev->IsFree()) {
384 ReserveBlock(*prev);
385 }
386 if (auto* next = block->Next(); next != nullptr && next->IsFree()) {
387 ReserveBlock(*next);
388 }
389
390 // Free the block and merge it with its neighbors, if possible.
391 allocated_ -= block->OuterSize();
392 auto free_result = BlockType::Free(std::move(block));
393 block = free_result.block();
394 UpdateLast(block);
395
396 if (free_result.prev() == BlockResultPrev::kResizedSmaller) {
397 // Bytes were reclaimed from the previous block.
398 allocated_ -= free_result.size();
399 }
400
401 if constexpr (is_poisonable_v<BlockType> && kPoisonInterval != 0) {
402 ++unpoisoned_;
403 if (unpoisoned_ >= kPoisonInterval) {
404 block->Poison();
405 unpoisoned_ = 0;
406 }
407 }
408
409 RecycleBlock(*block);
410}
411
412template <typename BlockType>
413bool BlockAllocator<BlockType>::DoResize(void* ptr, size_t new_size) {
414 BlockType* block = FromUsableSpace(ptr);
415
416 // Neighboring blocks may be merged when resizing.
417 if (auto* next = block->Next(); next != nullptr && next->IsFree()) {
418 ReserveBlock(*next);
419 }
420
421 size_t old_size = block->OuterSize();
422 if (!block->Resize(new_size).ok()) {
423 return false;
424 }
425 allocated_ -= old_size;
426 allocated_ += block->OuterSize();
427 UpdateLast(block);
428
429 if (auto* next = block->Next(); next != nullptr && next->IsFree()) {
430 RecycleBlock(*next);
431 }
432
433 return true;
434}
435
436template <typename BlockType>
438 const void* ptr) const {
439 // Handle types not related to a block first.
440 if (info_type == InfoType::kCapacity) {
441 return Layout(capacity_);
442 }
443 // Get a block from the given pointer.
444 if (ptr < first_->UsableSpace() || last_->UsableSpace() < ptr) {
445 return Status::NotFound();
446 }
447 const auto* block = BlockType::FromUsableSpace(ptr);
448 if (!block->IsValid()) {
449 return Status::DataLoss();
450 }
451 if (block->IsFree()) {
453 }
454 if constexpr (kCapabilities.has(kImplementsGetRequestedLayout)) {
455 if (info_type == InfoType::kRequestedLayoutOf) {
456 return block->RequestedLayout();
457 }
458 }
459 switch (info_type) {
460 case InfoType::kUsableLayoutOf:
461 return Layout(block->InnerSize(), BlockType::kAlignment);
462 case InfoType::kAllocatedLayoutOf:
463 return Layout(block->OuterSize(), BlockType::kAlignment);
464 case InfoType::kRecognizes:
465 return Layout();
466 case InfoType::kCapacity:
467 case InfoType::kRequestedLayoutOf:
468 default:
469 return Status::Unimplemented();
470 }
471}
472
473template <typename BlockType>
475 const {
476 Fragmentation fragmentation;
477 for (auto block : blocks()) {
478 if (block->IsFree()) {
479 fragmentation.AddFragment(block->InnerSize() / BlockType::kAlignment);
480 }
481 }
482 return fragmentation;
483}
484
485template <typename BlockType>
486template <typename Ptr>
487internal::copy_const_ptr_t<Ptr, BlockType*>
489 if (ptr < first_->UsableSpace() || last_->UsableSpace() < ptr) {
490 if constexpr (Hardening::kIncludesBasicChecks) {
491 CrashOnOutOfRange(ptr);
492 }
493 return nullptr;
494 }
495 auto* block = BlockType::FromUsableSpace(ptr);
496 if (!block->IsValid()) {
497 if constexpr (Hardening::kIncludesBasicChecks) {
498 block->CheckInvariants();
499 }
500 return nullptr;
501 }
502 return block;
503}
504
505template <typename BlockType>
506void BlockAllocator<BlockType>::UpdateLast(BlockType* block) {
507 BlockType* next = block->Next();
508 if (next == nullptr) {
509 last_ = block;
510 } else if (next->Next() == nullptr) {
511 last_ = next;
512 }
513}
514
515} // namespace pw::allocator
Definition: allocator.h:42
Definition: result.h:145
constexpr bool ok() const
Definition: result.h:451
static constexpr Status DataLoss()
Definition: status.h:316
static constexpr Status Unimplemented()
Definition: status.h:280
static constexpr Status FailedPrecondition()
Definition: status.h:243
static constexpr Status NotFound()
Definition: status.h:190
Definition: block_allocator.h:98
void * DoAllocate(Layout layout) override
Definition: block_allocator.h:326
Range blocks() const
Returns a Range of blocks tracking the memory of this allocator.
Definition: block_allocator.h:113
size_t DoGetAllocated() const override
Definition: block_allocator.h:159
virtual size_t DoGetMaxAllocatable()=0
internal::copy_const_ptr_t< Ptr, BlockType * > FromUsableSpace(Ptr ptr) const
Definition: block_allocator.h:488
virtual BlockResult< BlockType > ChooseBlock(Layout layout)=0
std::optional< Fragmentation > DoMeasureFragmentation() const override
Returns fragmentation information for the allocator's memory region.
Definition: block_allocator.h:474
virtual void ReserveBlock(BlockType &)
Definition: block_allocator.h:223
void DoDeallocate(void *ptr) override
Definition: block_allocator.h:368
virtual void DeallocateBlock(BlockType *&&block)
Definition: block_allocator.h:381
bool DoResize(void *ptr, size_t new_size) override
Definition: block_allocator.h:413
void Init(ByteSpan region)
Definition: block_allocator.h:303
void Init(BlockType *begin)
Definition: block_allocator.h:310
virtual void Flush()
Definition: block_allocator.h:245
size_t GetMaxAllocatable()
Definition: block_allocator.h:144
virtual void RecycleBlock(BlockType &)
Definition: block_allocator.h:231
Result< Layout > DoGetInfo(InfoType info_type, const void *ptr) const override
Definition: block_allocator.h:437
Definition: result.h:106
Definition: capability.h:65
Definition: detailed_block.h:88
Definition: layout.h:64
Definition: block_allocator.h:46
static void CrashOnOutOfRange(const void *freed)
static void CrashOnDoubleFree(const void *freed)
Crashes with an informational message that a given block was freed twice.
static void CrashOnAllocated(const void *allocated)
Definition: block_allocator.h:82
#define PW_ALLOCATOR_BLOCK_POISON_INTERVAL
Definition: config.h:30
Definition: fragmentation.h:47
void AddFragment(size_t inner_size)
Includes a region of free memory in the fragmentation calculation.