Pigweed
Loading...
Searching...
No Matches
block_allocator.h
1// Copyright 2024 The Pigweed Authors
2//
3// Licensed under the Apache License, Version 2.0 (the "License"); you may not
4// use this file except in compliance with the License. You may obtain a copy of
5// the License at
6//
7// https://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12// License for the specific language governing permissions and limitations under
13// the License.
14#pragma once
15
16#include "pw_allocator/allocator.h"
17#include "pw_allocator/block.h"
18#include "pw_allocator/capability.h"
19#include "pw_allocator/fragmentation.h"
20#include "pw_bytes/span.h"
21#include "pw_result/result.h"
22#include "pw_status/status.h"
23
24namespace pw::allocator {
25namespace internal {
26
37 public:
38 static constexpr Capabilities kCapabilities =
39 kImplementsGetRequestedLayout | kImplementsGetUsableLayout |
40 kImplementsGetAllocatedLayout | kImplementsGetCapacity |
41 kImplementsRecognizes;
42
43 // Not copyable or movable.
45 GenericBlockAllocator& operator=(const GenericBlockAllocator&) = delete;
47 GenericBlockAllocator& operator=(GenericBlockAllocator&&) = delete;
48
49 protected:
50 constexpr GenericBlockAllocator() : Allocator(kCapabilities) {}
51
57 static void CrashOnAllocated(void* allocated);
58
61 static void CrashOnInvalidFree(void* freed);
62
64 static void CrashOnDoubleFree(void* freed);
65};
66
67} // namespace internal
68
78template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
80 public:
82 using Range = typename BlockType::Range;
83
85 constexpr BlockAllocator() : internal::GenericBlockAllocator() {}
86
93 explicit BlockAllocator(ByteSpan region) : BlockAllocator() { Init(region); }
94
95 ~BlockAllocator() override { Reset(); }
96
98 Range blocks() const;
99
108 void Init(ByteSpan region);
109
116 void Init(BlockType* begin) { return Init(begin, nullptr); }
117
126 virtual void Init(BlockType* begin, BlockType* end);
127
130
135 void Reset();
136
137 protected:
138 using ReverseRange = typename BlockType::ReverseRange;
139
143
161 template <typename PtrType,
162 typename BlockPtrType = std::conditional_t<
163 std::is_const_v<std::remove_pointer_t<PtrType>>,
164 const BlockType*,
165 BlockType*>>
166 Result<BlockPtrType> FromUsableSpace(PtrType ptr) const;
167
174 virtual void ReserveBlock(BlockType*) {}
175
182 virtual void RecycleBlock(BlockType*) {}
183
184 private:
186 void* DoAllocate(Layout layout) override;
187
189 void DoDeallocate(void* ptr) override;
190
192 void DoDeallocate(void* ptr, Layout) override { DoDeallocate(ptr); }
193
195 bool DoResize(void* ptr, size_t new_size) override;
196
198 Result<Layout> DoGetInfo(InfoType info_type, const void* ptr) const override;
199
210 virtual BlockType* ChooseBlock(Layout layout) = 0;
211
213 bool PrevIsFree(const BlockType* block) const {
214 return block->Prev() != nullptr && !block->Prev()->Used();
215 }
216
218 bool NextIsFree(const BlockType* block) const {
219 return !block->Last() && !block->Next()->Used();
220 }
221
224 void UpdateLast(BlockType* block);
225
226 // Represents the range of blocks for this allocator.
227 size_t capacity_ = 0;
228 BlockType* first_ = nullptr;
229 BlockType* last_ = nullptr;
230 uint16_t unpoisoned_ = 0;
231};
232
233// Template method implementations
234
235template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
236typename BlockAllocator<OffsetType, kPoisonInterval, kAlign>::Range
238 return Range(first_);
239}
240
241template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
242typename BlockAllocator<OffsetType, kPoisonInterval, kAlign>::ReverseRange
244 while (last_ != nullptr && !last_->Last()) {
245 last_ = last_->Next();
246 }
247 return ReverseRange(last_);
248}
249
250template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
252 ByteSpan region) {
253 Result<BlockType*> result = BlockType::Init(region);
254 Init(*result, nullptr);
255}
256
257template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
259 BlockType* end) {
260 PW_ASSERT(begin != nullptr);
261 if (end == nullptr) {
262 end = begin;
263 while (!end->Last()) {
264 end = end->Next();
265 }
266 } else {
267 PW_ASSERT(begin < end);
268 end->MarkLast();
269 }
270 first_ = begin;
271 last_ = end;
272 for (const auto& block : blocks()) {
273 capacity_ += block->OuterSize();
274 }
275}
276
277template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
279 for (auto* block : blocks()) {
280 if (block->Used()) {
281 CrashOnAllocated(block);
282 }
283 }
284}
285
286template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
288 Layout layout) {
289 BlockType* block = ChooseBlock(layout);
290 if (block == nullptr) {
291 return nullptr;
292 }
293 UpdateLast(block);
294 return block->UsableSpace();
295}
296
297template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
298void BlockAllocator<OffsetType, kPoisonInterval, kAlign>::DoDeallocate(
299 void* ptr) {
300 auto result = FromUsableSpace(ptr);
301 if (!result.ok()) {
302 CrashOnInvalidFree(ptr);
303 }
304 BlockType* block = *result;
305 if (!block->Used()) {
306 CrashOnDoubleFree(block);
307 }
308
309 // Neighboring blocks may be merged when freeing.
310 if (PrevIsFree(block)) {
311 ReserveBlock(block->Prev());
312 }
313 if (NextIsFree(block)) {
314 ReserveBlock(block->Next());
315 }
316
317 // Free the block and merge it with its neighbors, if possible.
318 BlockType::Free(block);
319 UpdateLast(block);
320
321 if constexpr (kPoisonInterval != 0) {
322 ++unpoisoned_;
323 if (unpoisoned_ >= kPoisonInterval) {
324 block->Poison();
325 unpoisoned_ = 0;
326 }
327 }
328
329 RecycleBlock(block);
330}
331
332template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
333bool BlockAllocator<OffsetType, kPoisonInterval, kAlign>::DoResize(
334 void* ptr, size_t new_size) {
335 auto result = FromUsableSpace(ptr);
336 if (!result.ok()) {
337 return false;
338 }
339 BlockType* block = *result;
340
341 // Neighboring blocks may be merged when resizing.
342 if (NextIsFree(block)) {
343 ReserveBlock(block->Next());
344 }
345
346 if (!BlockType::Resize(block, new_size).ok()) {
347 return false;
348 }
349 UpdateLast(block);
350
351 if (NextIsFree(block)) {
352 RecycleBlock(block->Next());
353 }
354
355 return true;
356}
357
358template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
359Result<Layout> BlockAllocator<OffsetType, kPoisonInterval, kAlign>::DoGetInfo(
360 InfoType info_type, const void* ptr) const {
361 // Handle types not related to a block first.
362 if (info_type == InfoType::kCapacity) {
363 return Layout(capacity_, kAlign);
364 }
365 // Get a block from the given pointer.
366 auto result = FromUsableSpace(ptr);
367 if (!result.ok()) {
368 return Status::NotFound();
369 }
370 const BlockType* block = result.value();
371 if (!block->Used()) {
372 return Status::FailedPrecondition();
373 }
374 switch (info_type) {
375 case InfoType::kRequestedLayoutOf:
376 return Layout(block->RequestedSize(), block->Alignment());
377 case InfoType::kUsableLayoutOf:
378 return Layout(block->InnerSize(), block->Alignment());
379 case InfoType::kAllocatedLayoutOf:
380 return Layout(block->OuterSize(), block->Alignment());
381 case InfoType::kRecognizes:
382 return Layout();
383 case InfoType::kCapacity:
384 default:
385 return Status::Unimplemented();
386 }
387}
388
389template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
390Fragmentation
392 const {
393 Fragmentation fragmentation;
394 for (auto block : blocks()) {
395 if (!block->Used()) {
396 fragmentation.AddFragment(block->InnerSize() / BlockType::kAlignment);
397 }
398 }
399 return fragmentation;
400}
401
402template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
403template <typename PtrType, typename BlockPtrType>
404Result<BlockPtrType>
406 PtrType ptr) const {
407 if (ptr < first_->UsableSpace() || last_->UsableSpace() < ptr) {
408 return Status::OutOfRange();
409 }
410 BlockPtrType block = BlockType::FromUsableSpace(ptr);
411 block->CrashIfInvalid();
412 return block;
413}
414
415template <typename OffsetType, uint16_t kPoisonInterval, uint16_t kAlign>
417 BlockType* block) {
418 if (block->Last()) {
419 last_ = block;
420 } else if (block->Next()->Last()) {
421 last_ = block->Next();
422 }
423}
424
425} // namespace pw::allocator
Definition: block.h:404
Definition: block.h:428
Definition: allocator.h:32
constexpr Allocator()=default
TODO(b/326509341): Remove when downstream consumers migrate.
Definition: block.h:513
Definition: block.h:537
Definition: block_allocator.h:79
Fragmentation MeasureFragmentation() const
Returns fragmentation information for the block allocator's memory region.
Definition: block_allocator.h:391
void Init(BlockType *begin)
Definition: block_allocator.h:116
virtual void Init(BlockType *begin, BlockType *end)
Definition: block_allocator.h:258
BlockAllocator(ByteSpan region)
Definition: block_allocator.h:93
constexpr BlockAllocator()
Constexpr constructor. Callers must explicitly call Init.
Definition: block_allocator.h:85
Result< BlockPtrType > FromUsableSpace(PtrType ptr) const
Definition: block_allocator.h:405
ReverseRange rblocks()
Definition: block_allocator.h:243
void Init(ByteSpan region)
Definition: block_allocator.h:251
Range blocks() const
Returns a Range of blocks tracking the memory of this allocator.
Definition: block_allocator.h:237
virtual void ReserveBlock(BlockType *)
Definition: block_allocator.h:174
virtual void RecycleBlock(BlockType *)
Definition: block_allocator.h:182
void Reset()
Definition: block_allocator.h:278
Definition: block.h:108
size_t OuterSize() const
Definition: block.h:162
void MarkLast()
Marks this block as the last one in the chain.
Definition: block.h:347
bool Last() const
Definition: block.h:338
Block * Next() const
Definition: block.h:796
Definition: capability.h:64
Definition: layout.h:39
Definition: block_allocator.h:36
static void CrashOnDoubleFree(void *freed)
Crashes with an informational message that a given block was freed twice.
static void CrashOnAllocated(void *allocated)
Definition: fragmentation.h:44