22#include "pw_allocator/allocator.h"
23#include "pw_allocator/buffer.h"
24#include "pw_bytes/alignment.h"
25#include "pw_bytes/span.h"
26#include "pw_result/result.h"
27#include "pw_status/status.h"
28#include "pw_status/try.h"
30namespace pw::allocator {
43void CrashMisaligned(uintptr_t addr);
44void CrashNextMismatched(uintptr_t addr, uintptr_t next_prev);
45void CrashPrevMismatched(uintptr_t addr, uintptr_t prev_next);
46void CrashPoisonCorrupted(uintptr_t addr);
105template <
typename OffsetType = uintptr_t,
106 size_t kAlign =
alignof(OffsetType),
107 bool kCanPoison =
false>
110 using offset_type = OffsetType;
111 static_assert(std::is_unsigned_v<offset_type>,
112 "offset type must be unsigned");
114 static constexpr size_t kAlignment = std::max(kAlign,
alignof(offset_type));
115 static constexpr size_t kBlockOverhead =
AlignUp(
sizeof(
Block), kAlignment);
119 Block& operator=(
const Block& other) =
delete;
137 static Result<Block*> Init(ByteSpan region);
146 template <
int&... DeducedTypesOnly,
148 bool is_const_ptr = std::is_const_v<std::remove_pointer_t<PtrType>>,
150 std::conditional_t<is_const_ptr, const std::byte*, std::byte*>,
152 std::conditional_t<is_const_ptr, const Block*, Block*>>
157 auto* bytes =
reinterpret_cast<BytesPtr
>(usable_space);
158 return std::launder(
reinterpret_cast<BlockPtr
>(bytes - kBlockOverhead));
165 size_t InnerSize()
const {
return OuterSize() - kBlockOverhead; }
173 return std::launder(
reinterpret_cast<std::byte*
>(
this) + kBlockOverhead);
175 const std::byte* UsableSpace()
const {
176 return std::launder(
reinterpret_cast<const std::byte*
>(
this) +
205 static Status AllocFirst(Block*& block, Layout layout);
229 StatusWithSize CanAllocLast(Layout layout)
const;
257 static Status AllocLast(Block*& block, Layout layout);
265 static void Free(Block*& block);
289 static Status Resize(Block*& block,
size_t new_inner_size);
312 return block ==
nullptr ? nullptr : block->
Next();
321 return block ==
nullptr ? nullptr : block->
Prev();
325 size_t Alignment()
const {
return Used() ? info_.alignment : 1; }
330 bool Used()
const {
return info_.used; }
338 bool Last()
const {
return info_.last; }
362 void Poison(
bool should_poison =
true);
370 bool IsValid()
const {
return CheckStatus() == internal::kValid; }
375 void CrashIfInvalid()
const;
378 static constexpr uint8_t kPoisonByte = 0xf7;
381 static ByteSpan AsBytes(
Block*&& block);
384 static Block* AsBlock(
size_t prev_outer_size, ByteSpan bytes);
386 Block(
size_t prev_outer_size,
size_t outer_size);
393 internal::BlockStatus CheckStatus()
const;
409 static void ShiftBlock(Block*& block,
size_t pad_size);
421 static Block* Split(Block*& block,
size_t new_inner_size);
430 static void MergeNext(Block*& block);
434 bool CheckPoison()
const;
438 offset_type prev_ = 0;
443 offset_type next_ = 0;
457 uint16_t poisoned : 1;
459 uint16_t alignment : 13;
464 uint16_t padding_ = 0;
477 block_ = Block::NextBlock(block_);
480 bool operator!=(
const Iterator& other) {
return block_ != other.block_; }
481 Block* operator*() {
return block_; }
495 block_ = Block::PrevBlock(block_);
499 return block_ != other.block_;
501 Block* operator*() {
return block_; }
516 explicit Range(
Block* begin) : begin_(begin), end_(nullptr) {}
520 : begin_(begin_inclusive), end_(end_inclusive->Next()) {}
522 Iterator& begin() {
return begin_; }
544 : begin_(rbegin_inclusive), end_(rend_inclusive->Prev()) {}
553} __attribute__((packed, aligned(kAlign)));
557template <
typename OffsetType,
size_t kAlign,
bool kCanPoison>
558Result<Block<OffsetType, kAlign, kCanPoison>*>
562 return result.status();
564 region = result.value();
565 if (region.size() < kBlockOverhead) {
566 return Status::ResourceExhausted();
568 if (std::numeric_limits<OffsetType>::max() < region.size() / kAlignment) {
569 return Status::OutOfRange();
571 Block* block = AsBlock(0, region);
576template <
typename OffsetType,
size_t kAlign,
bool kCanPoison>
579 if (block ==
nullptr || layout.size() == 0) {
580 return Status::InvalidArgument();
584 return Status::FailedPrecondition();
589 size_t alignment = std::max(layout.alignment(), kAlignment);
590 auto addr =
reinterpret_cast<uintptr_t
>(block->
UsableSpace());
591 size_t pad_size =
AlignUp(addr, alignment) - addr;
592 bool should_poison = block->info_.poisoned;
595 }
else if (pad_size > kBlockOverhead) {
597 }
else if (prev ==
nullptr) {
599 pad_size +=
AlignUp(kBlockOverhead, alignment);
603 size_t inner_size =
AlignUp(layout.size(), kAlignment);
604 if (block->
InnerSize() < pad_size + inner_size) {
605 return Status::OutOfRange();
607 ShiftBlock(block, pad_size);
611 if (inner_size + kBlockOverhead <= block->InnerSize()) {
612 Block* trailing = Split(block, inner_size);
613 trailing->
Poison(should_poison);
617 block->info_.alignment = alignment;
618 block->padding_ = block->
InnerSize() - layout.size();
622template <
typename OffsetType,
size_t kAlign,
bool kCanPoison>
626 return StatusWithSize::FailedPrecondition();
631 if (InnerSize() < layout.size()) {
632 return StatusWithSize::OutOfRange();
634 auto addr =
reinterpret_cast<uintptr_t
>(UsableSpace());
635 size_t alignment = std::max(layout.alignment(), kAlignment);
636 uintptr_t next =
AlignDown(addr + (InnerSize() - layout.size()), alignment);
640 return StatusWithSize::ResourceExhausted();
642 return StatusWithSize(next - addr);
645template <
typename OffsetType,
size_t kAlign,
bool kCanPoison>
648 if (block ==
nullptr || layout.size() == 0) {
649 return Status::InvalidArgument();
653 ShiftBlock(block, pad_size);
656 block->info_.alignment = layout.alignment();
657 block->padding_ = block->
InnerSize() - layout.size();
661template <
typename OffsetType,
size_t kAlign,
bool kCanPoison>
669 Block* prev = block->
Prev();
670 if (prev ==
nullptr && pad_size <= kBlockOverhead) {
671 pad_size += kBlockOverhead;
674 bool should_poison = block->info_.poisoned;
675 if (pad_size <= kBlockOverhead) {
678 prev->padding_ += pad_size;
679 block = prev->
Next();
681 }
else if (kBlockOverhead < pad_size) {
683 Block* leading = block;
684 block = Split(leading, pad_size - kBlockOverhead);
685 leading->Poison(should_poison);
689template <
typename OffsetType,
size_t kAlign,
bool kCanPoison>
691 if (block ==
nullptr) {
696 if (prev !=
nullptr && !prev->
Used()) {
703template <
typename OffsetType,
size_t kAlign,
bool kCanPoison>
705 size_t new_inner_size) {
706 if (block ==
nullptr) {
707 return Status::InvalidArgument();
709 if (!block->
Used()) {
710 return Status::FailedPrecondition();
712 size_t requested_inner_size = new_inner_size;
713 size_t old_inner_size = block->
InnerSize();
715 uint16_t padding = block->padding_;
716 new_inner_size =
AlignUp(new_inner_size, kAlignment);
718 if (old_inner_size == new_inner_size) {
729 if (block->
InnerSize() < new_inner_size) {
731 status = Status::OutOfRange();
733 }
else if (new_inner_size + kBlockOverhead <= block->InnerSize()) {
735 bool should_poison = block->info_.poisoned;
736 Block* trailing = Split(block, new_inner_size);
737 trailing->
Poison(should_poison);
741 padding = block->
InnerSize() - requested_inner_size;
742 }
else if (block->
InnerSize() != old_inner_size) {
744 Split(block, old_inner_size);
747 block->info_.alignment = alignment;
748 block->padding_ = padding;
752template <
typename OffsetType,
size_t kAlign,
bool kCanPoison>
755 size_t new_inner_size) {
756 size_t prev_outer_size = block->prev_ * kAlignment;
757 size_t outer_size1 = new_inner_size + kBlockOverhead;
758 bool is_last = block->
Last();
759 ByteSpan bytes = AsBytes(std::move(block));
760 Block* block1 = AsBlock(prev_outer_size, bytes.subspan(0, outer_size1));
761 Block* block2 = AsBlock(outer_size1, bytes.subspan(outer_size1));
765 block2->
Next()->prev_ = block2->next_;
767 block = std::move(block1);
771template <
typename OffsetType,
size_t kAlign,
bool kCanPoison>
772void Block<OffsetType, kAlign, kCanPoison>::MergeNext(Block*& block) {
776 Block* next = block->Next();
777 if (block->Used() || next->Used()) {
780 size_t prev_outer_size = block->prev_ * kAlignment;
781 bool is_last = next->Last();
782 ByteSpan prev_bytes = AsBytes(std::move(block));
783 ByteSpan next_bytes = AsBytes(std::move(next));
784 size_t outer_size = prev_bytes.size() + next_bytes.size();
785 std::byte* merged = ::new (prev_bytes.data()) std::
byte[outer_size];
786 block = AsBlock(prev_outer_size, ByteSpan(merged, outer_size));
790 block->Next()->prev_ = block->next_;
794template <
typename OffsetType,
size_t kAlign,
bool kCanPoison>
795Block<OffsetType, kAlign, kCanPoison>*
797 uintptr_t addr = Last() ? 0 :
reinterpret_cast<uintptr_t
>(
this) + OuterSize();
799 return std::launder(
reinterpret_cast<Block*
>(addr));
802template <
typename OffsetType,
size_t kAlign,
bool kCanPoison>
807 :
reinterpret_cast<uintptr_t
>(
this) - (prev_ * kAlignment);
809 return std::launder(
reinterpret_cast<Block*
>(addr));
814template <
typename OffsetType,
size_t kAlign,
bool kCanPoison>
817 prev_ = prev_outer_size / kAlignment;
818 next_ = outer_size / kAlignment;
822 info_.alignment = kAlignment;
825template <
typename OffsetType,
size_t kAlign,
bool kCanPoison>
826ByteSpan Block<OffsetType, kAlign, kCanPoison>::AsBytes(Block*&& block) {
827 size_t block_size = block->OuterSize();
828 std::byte* bytes = ::new (std::move(block)) std::byte[block_size];
829 return {bytes, block_size};
832template <
typename OffsetType,
size_t kAlign,
bool kCanPoison>
833Block<OffsetType, kAlign, kCanPoison>*
834Block<OffsetType, kAlign, kCanPoison>::AsBlock(
size_t prev_outer_size,
836 return ::new (bytes.data()) Block(prev_outer_size, bytes.size());
839template <typename OffsetType,
size_t kAlign,
bool kCanPoison>
840void Block<OffsetType, kAlign, kCanPoison>::Poison(
bool should_poison) {
841 if constexpr (kCanPoison) {
842 if (!Used() && should_poison) {
843 std::memset(UsableSpace(), kPoisonByte, InnerSize());
844 info_.poisoned =
true;
849template <
typename OffsetType,
size_t kAlign,
bool kCanPoison>
851 if constexpr (kCanPoison) {
852 if (!info_.poisoned) {
855 const std::byte* begin = UsableSpace();
856 return std::all_of(begin, begin + InnerSize(), [](std::byte b) {
857 return static_cast<uint8_t
>(b) == kPoisonByte;
864template <
typename OffsetType,
size_t kAlign,
bool kCanPoison>
865internal::BlockStatus Block<OffsetType, kAlign, kCanPoison>::CheckStatus()
867 if (
reinterpret_cast<uintptr_t
>(
this) % kAlignment != 0) {
868 return internal::kMisaligned;
870 if (!Last() && (
this >= Next() ||
this != Next()->Prev())) {
871 return internal::kNextMismatched;
873 if (Prev() && (
this <= Prev() ||
this != Prev()->Next())) {
874 return internal::kPrevMismatched;
876 if (!Used() && !CheckPoison()) {
877 return internal::kPoisonCorrupted;
879 return internal::kValid;
882template <
typename OffsetType,
size_t kAlign,
bool kCanPoison>
884 uintptr_t addr =
reinterpret_cast<uintptr_t
>(
this);
885 switch (CheckStatus()) {
886 case internal::kValid:
888 case internal::kMisaligned:
889 internal::CrashMisaligned(addr);
891 case internal::kNextMismatched:
892 internal::CrashNextMismatched(
893 addr,
reinterpret_cast<uintptr_t
>(Next()->Prev()));
895 case internal::kPrevMismatched:
896 internal::CrashPrevMismatched(
897 addr,
reinterpret_cast<uintptr_t
>(Prev()->Next()));
899 case internal::kPoisonCorrupted:
900 internal::CrashPoisonCorrupted(addr);
constexpr bool ok() const
Definition: status.h:156
constexpr void IgnoreError() const
Definition: status.h:221
Range(Block *begin_inclusive, Block *end_inclusive)
Constructs a range of blocks from begin to end, inclusively.
Definition: block.h:519
Range(Block *begin)
Constructs a range including begin and all valid following blocks.
Definition: block.h:516
ReverseRange(Block *rbegin)
Constructs a range including rbegin and all valid preceding blocks.
Definition: block.h:540
ReverseRange(Block *rbegin_inclusive, Block *rend_inclusive)
Constructs a range of blocks from rbegin to rend, inclusively.
Definition: block.h:543
bool IsValid() const
Checks if a block is valid.
Definition: block.h:370
StatusWithSize CanAllocLast(Layout layout) const
Definition: block.h:623
size_t OuterSize() const
Definition: block.h:162
std::byte * UsableSpace()
Definition: block.h:172
static void Free(Block *&block)
Definition: block.h:690
bool Used() const
Definition: block.h:330
static Status Resize(Block *&block, size_t new_inner_size)
Definition: block.h:704
static Block * PrevBlock(const Block *block)
Definition: block.h:320
size_t Alignment() const
Returns the current alignment of a block.
Definition: block.h:325
void MarkLast()
Marks this block as the last one in the chain.
Definition: block.h:347
void MarkFree()
Marks this block as free.
Definition: block.h:344
void MarkUsed()
Marks this block as in use.
Definition: block.h:341
static Status AllocLast(Block *&block, Layout layout)
Definition: block.h:646
static Block * NextBlock(const Block *block)
Definition: block.h:311
void ClearLast()
Clears the last bit from this block.
Definition: block.h:350
void Poison(bool should_poison=true)
Definition: block.h:840
Block * Prev() const
Definition: block.h:804
static Result< Block * > Init(ByteSpan region)
Creates the first block for a given memory region.
Definition: block.h:559
size_t RequestedSize() const
Definition: block.h:169
bool Last() const
Definition: block.h:338
Block * Next() const
Definition: block.h:796
void CrashIfInvalid() const
Crashes with an informtaional message if a block is invalid.
Definition: block.h:883
static Status AllocFirst(Block *&block, Layout layout)
Definition: block.h:577
static BlockPtr FromUsableSpace(PtrType usable_space)
Definition: block.h:153
size_t InnerSize() const
Definition: block.h:165
ByteSpan GetAlignedSubspan(ByteSpan bytes, size_t alignment)
constexpr size_t AlignDown(size_t value, size_t alignment)
Returns the value rounded down to the nearest multiple of alignment.
Definition: alignment.h:25
constexpr size_t AlignUp(size_t value, size_t alignment)
Returns the value rounded up to the nearest multiple of alignment.
Definition: alignment.h:38
constexpr Status OkStatus()
Definition: status.h:233