Pigweed
Loading...
Searching...
No Matches
block.h
1// Copyright 2020 The Pigweed Authors
2//
3// Licensed under the Apache License, Version 2.0 (the "License"); you may not
4// use this file except in compliance with the License. You may obtain a copy of
5// the License at
6//
7// https://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12// License for the specific language governing permissions and limitations under
13// the License.
14#pragma once
15
16#include <algorithm>
17#include <climits>
18#include <cstddef>
19#include <cstdint>
20#include <cstring>
21
22#include "pw_allocator/allocator.h"
23#include "pw_allocator/buffer.h"
24#include "pw_bytes/alignment.h"
25#include "pw_bytes/span.h"
26#include "pw_result/result.h"
27#include "pw_status/status.h"
28#include "pw_status/try.h"
29
30namespace pw::allocator {
31namespace internal {
32
33// Types of corrupted blocks, and functions to crash with an error message
34// corresponding to each type. These functions are implemented independent of
35// any template parameters to allow them to use `PW_CHECK`.
36enum BlockStatus {
37 kValid,
38 kMisaligned,
39 kPrevMismatched,
40 kNextMismatched,
41 kPoisonCorrupted,
42};
43void CrashMisaligned(uintptr_t addr);
44void CrashNextMismatched(uintptr_t addr, uintptr_t next_prev);
45void CrashPrevMismatched(uintptr_t addr, uintptr_t prev_next);
46void CrashPoisonCorrupted(uintptr_t addr);
47
48} // namespace internal
49
105template <typename OffsetType = uintptr_t,
106 size_t kAlign = alignof(OffsetType),
107 bool kCanPoison = false>
108class Block {
109 public:
110 using offset_type = OffsetType;
111 static_assert(std::is_unsigned_v<offset_type>,
112 "offset type must be unsigned");
113
114 static constexpr size_t kAlignment = std::max(kAlign, alignof(offset_type));
115 static constexpr size_t kBlockOverhead = AlignUp(sizeof(Block), kAlignment);
116
117 // No copy or move.
118 Block(const Block& other) = delete;
119 Block& operator=(const Block& other) = delete;
120
137 static Result<Block*> Init(ByteSpan region);
138
146 template <int&... DeducedTypesOnly,
147 typename PtrType,
148 bool is_const_ptr = std::is_const_v<std::remove_pointer_t<PtrType>>,
149 typename BytesPtr =
150 std::conditional_t<is_const_ptr, const std::byte*, std::byte*>,
151 typename BlockPtr =
152 std::conditional_t<is_const_ptr, const Block*, Block*>>
153 static BlockPtr FromUsableSpace(PtrType usable_space) {
154 // Perform memory laundering to prevent the compiler from tracing the memory
155 // used to store the block and to avoid optimaztions that may be invalidated
156 // by the use of placement-new to create blocks in `Init` and `Split`.
157 auto* bytes = reinterpret_cast<BytesPtr>(usable_space);
158 return std::launder(reinterpret_cast<BlockPtr>(bytes - kBlockOverhead));
159 }
160
162 size_t OuterSize() const { return next_ * kAlignment; }
163
165 size_t InnerSize() const { return OuterSize() - kBlockOverhead; }
166
169 size_t RequestedSize() const { return InnerSize() - padding_; }
170
172 std::byte* UsableSpace() {
173 return std::launder(reinterpret_cast<std::byte*>(this) + kBlockOverhead);
174 }
175 const std::byte* UsableSpace() const {
176 return std::launder(reinterpret_cast<const std::byte*>(this) +
177 kBlockOverhead);
178 }
179
205 static Status AllocFirst(Block*& block, Layout layout);
206
229 StatusWithSize CanAllocLast(Layout layout) const;
230
257 static Status AllocLast(Block*& block, Layout layout);
258
265 static void Free(Block*& block);
266
289 static Status Resize(Block*& block, size_t new_inner_size);
290
308 Block* Next() const;
309
311 static Block* NextBlock(const Block* block) {
312 return block == nullptr ? nullptr : block->Next();
313 }
314
317 Block* Prev() const;
318
320 static Block* PrevBlock(const Block* block) {
321 return block == nullptr ? nullptr : block->Prev();
322 }
323
325 size_t Alignment() const { return Used() ? info_.alignment : 1; }
326
330 bool Used() const { return info_.used; }
331
338 bool Last() const { return info_.last; }
339
341 void MarkUsed() { info_.used = 1; }
342
344 void MarkFree() { info_.used = 0; }
345
347 void MarkLast() { info_.last = 1; }
348
350 void ClearLast() { info_.last = 1; }
351
362 void Poison(bool should_poison = true);
363
370 bool IsValid() const { return CheckStatus() == internal::kValid; }
371
375 void CrashIfInvalid() const;
376
377 private:
378 static constexpr uint8_t kPoisonByte = 0xf7;
379
381 static ByteSpan AsBytes(Block*&& block);
384 static Block* AsBlock(size_t prev_outer_size, ByteSpan bytes);
385
386 Block(size_t prev_outer_size, size_t outer_size);
387
393 internal::BlockStatus CheckStatus() const;
394
409 static void ShiftBlock(Block*& block, size_t pad_size);
421 static Block* Split(Block*& block, size_t new_inner_size);
422
430 static void MergeNext(Block*& block);
434 bool CheckPoison() const;
435
438 offset_type prev_ = 0;
439
443 offset_type next_ = 0;
444
455 struct {
456 uint16_t used : 1;
457 uint16_t poisoned : 1;
458 uint16_t last : 1;
459 uint16_t alignment : 13;
460 } info_;
461
464 uint16_t padding_ = 0;
465
466 public:
467 // Associated types.
468
473 class Iterator final {
474 public:
475 Iterator(Block* block) : block_(block) {}
476 Iterator& operator++() {
477 block_ = Block::NextBlock(block_);
478 return *this;
479 }
480 bool operator!=(const Iterator& other) { return block_ != other.block_; }
481 Block* operator*() { return block_; }
482
483 private:
484 Block* block_;
485 };
486
491 class ReverseIterator final {
492 public:
493 ReverseIterator(Block* block) : block_(block) {}
494 ReverseIterator& operator++() {
495 block_ = Block::PrevBlock(block_);
496 return *this;
497 }
498 bool operator!=(const ReverseIterator& other) {
499 return block_ != other.block_;
500 }
501 Block* operator*() { return block_; }
502
503 private:
504 Block* block_;
505 };
506
513 class Range final {
514 public:
516 explicit Range(Block* begin) : begin_(begin), end_(nullptr) {}
517
519 Range(Block* begin_inclusive, Block* end_inclusive)
520 : begin_(begin_inclusive), end_(end_inclusive->Next()) {}
521
522 Iterator& begin() { return begin_; }
523 Iterator& end() { return end_; }
524
525 private:
526 Iterator begin_;
527 Iterator end_;
528 };
529
537 class ReverseRange final {
538 public:
540 explicit ReverseRange(Block* rbegin) : begin_(rbegin), end_(nullptr) {}
541
543 ReverseRange(Block* rbegin_inclusive, Block* rend_inclusive)
544 : begin_(rbegin_inclusive), end_(rend_inclusive->Prev()) {}
545
546 ReverseIterator& begin() { return begin_; }
547 ReverseIterator& end() { return end_; }
548
549 private:
550 ReverseIterator begin_;
551 ReverseIterator end_;
552 };
553} __attribute__((packed, aligned(kAlign)));
554
555// Public template method implementations.
556
557template <typename OffsetType, size_t kAlign, bool kCanPoison>
558Result<Block<OffsetType, kAlign, kCanPoison>*>
560 Result<ByteSpan> result = GetAlignedSubspan(region, kAlignment);
561 if (!result.ok()) {
562 return result.status();
563 }
564 region = result.value();
565 if (region.size() < kBlockOverhead) {
566 return Status::ResourceExhausted();
567 }
568 if (std::numeric_limits<OffsetType>::max() < region.size() / kAlignment) {
569 return Status::OutOfRange();
570 }
571 Block* block = AsBlock(0, region);
572 block->MarkLast();
573 return block;
574}
575
576template <typename OffsetType, size_t kAlign, bool kCanPoison>
578 Layout layout) {
579 if (block == nullptr || layout.size() == 0) {
580 return Status::InvalidArgument();
581 }
582 block->CrashIfInvalid();
583 if (block->Used()) {
584 return Status::FailedPrecondition();
585 }
586 Block* prev = block->Prev();
587
588 // Check if padding will be needed at the front to align the usable space.
589 size_t alignment = std::max(layout.alignment(), kAlignment);
590 auto addr = reinterpret_cast<uintptr_t>(block->UsableSpace());
591 size_t pad_size = AlignUp(addr, alignment) - addr;
592 bool should_poison = block->info_.poisoned;
593 if (pad_size == 0) {
594 // No padding needed.
595 } else if (pad_size > kBlockOverhead) {
596 // Enough padding is needed that it can be split off as a new free block.
597 } else if (prev == nullptr) {
598 // First block; increase padding to at least the minimum block size.
599 pad_size += AlignUp(kBlockOverhead, alignment);
600 }
601
602 // Make sure everything fits.
603 size_t inner_size = AlignUp(layout.size(), kAlignment);
604 if (block->InnerSize() < pad_size + inner_size) {
605 return Status::OutOfRange();
606 }
607 ShiftBlock(block, pad_size);
608
609 // If the block is large enough to have a trailing block, split it to get the
610 // requested usable space.
611 if (inner_size + kBlockOverhead <= block->InnerSize()) {
612 Block* trailing = Split(block, inner_size);
613 trailing->Poison(should_poison);
614 }
615
616 block->MarkUsed();
617 block->info_.alignment = alignment;
618 block->padding_ = block->InnerSize() - layout.size();
619 return OkStatus();
620}
621
622template <typename OffsetType, size_t kAlign, bool kCanPoison>
624 Layout layout) const {
625 if (Used()) {
626 return StatusWithSize::FailedPrecondition();
627 }
628 CrashIfInvalid();
629 // Find the last address that is aligned and is followed by enough space for
630 // block overhead and the requested size.
631 if (InnerSize() < layout.size()) {
632 return StatusWithSize::OutOfRange();
633 }
634 auto addr = reinterpret_cast<uintptr_t>(UsableSpace());
635 size_t alignment = std::max(layout.alignment(), kAlignment);
636 uintptr_t next = AlignDown(addr + (InnerSize() - layout.size()), alignment);
637
638 if (next < addr) {
639 // Requested size does not fit.
640 return StatusWithSize::ResourceExhausted();
641 }
642 return StatusWithSize(next - addr);
643}
644
645template <typename OffsetType, size_t kAlign, bool kCanPoison>
647 Layout layout) {
648 if (block == nullptr || layout.size() == 0) {
649 return Status::InvalidArgument();
650 }
651 size_t pad_size = 0;
652 PW_TRY_ASSIGN(pad_size, block->CanAllocLast(layout));
653 ShiftBlock(block, pad_size);
654
655 block->MarkUsed();
656 block->info_.alignment = layout.alignment();
657 block->padding_ = block->InnerSize() - layout.size();
658 return OkStatus();
659}
660
661template <typename OffsetType, size_t kAlign, bool kCanPoison>
663 size_t pad_size) {
664 if (pad_size == 0) {
665 return;
666 }
667
668 // Check if this is the first block.
669 Block* prev = block->Prev();
670 if (prev == nullptr && pad_size <= kBlockOverhead) {
671 pad_size += kBlockOverhead;
672 }
673
674 bool should_poison = block->info_.poisoned;
675 if (pad_size <= kBlockOverhead) {
676 // The small amount of padding can be appended to the previous block.
677 Block::Resize(prev, prev->InnerSize() + pad_size).IgnoreError();
678 prev->padding_ += pad_size;
679 block = prev->Next();
680
681 } else if (kBlockOverhead < pad_size) {
682 // Split the large padding off the front.
683 Block* leading = block;
684 block = Split(leading, pad_size - kBlockOverhead);
685 leading->Poison(should_poison);
686 }
687}
688
689template <typename OffsetType, size_t kAlign, bool kCanPoison>
691 if (block == nullptr) {
692 return;
693 }
694 block->MarkFree();
695 Block* prev = block->Prev();
696 if (prev != nullptr && !prev->Used()) {
697 MergeNext(prev);
698 block = prev;
699 }
700 MergeNext(block);
701}
702
703template <typename OffsetType, size_t kAlign, bool kCanPoison>
705 size_t new_inner_size) {
706 if (block == nullptr) {
707 return Status::InvalidArgument();
708 }
709 if (!block->Used()) {
710 return Status::FailedPrecondition();
711 }
712 size_t requested_inner_size = new_inner_size;
713 size_t old_inner_size = block->InnerSize();
714 size_t alignment = block->Alignment();
715 uint16_t padding = block->padding_;
716 new_inner_size = AlignUp(new_inner_size, kAlignment);
717
718 if (old_inner_size == new_inner_size) {
719 return OkStatus();
720 }
721
722 // Treat the block as free and try to combine it with the next block. At most
723 // one free block is expected to follow this block.
724 block->MarkFree();
725 MergeNext(block);
726
727 Status status = OkStatus();
728
729 if (block->InnerSize() < new_inner_size) {
730 // The merged block is too small for the resized block.
731 status = Status::OutOfRange();
732
733 } else if (new_inner_size + kBlockOverhead <= block->InnerSize()) {
734 // There is enough room after the resized block for another trailing block.
735 bool should_poison = block->info_.poisoned;
736 Block* trailing = Split(block, new_inner_size);
737 trailing->Poison(should_poison);
738 }
739
740 if (status.ok()) {
741 padding = block->InnerSize() - requested_inner_size;
742 } else if (block->InnerSize() != old_inner_size) {
743 // Restore the original block on failure.
744 Split(block, old_inner_size);
745 }
746 block->MarkUsed();
747 block->info_.alignment = alignment;
748 block->padding_ = padding;
749 return status;
750}
751
752template <typename OffsetType, size_t kAlign, bool kCanPoison>
755 size_t new_inner_size) {
756 size_t prev_outer_size = block->prev_ * kAlignment;
757 size_t outer_size1 = new_inner_size + kBlockOverhead;
758 bool is_last = block->Last();
759 ByteSpan bytes = AsBytes(std::move(block));
760 Block* block1 = AsBlock(prev_outer_size, bytes.subspan(0, outer_size1));
761 Block* block2 = AsBlock(outer_size1, bytes.subspan(outer_size1));
762 if (is_last) {
763 block2->MarkLast();
764 } else {
765 block2->Next()->prev_ = block2->next_;
766 }
767 block = std::move(block1);
768 return block2;
769}
770
771template <typename OffsetType, size_t kAlign, bool kCanPoison>
772void Block<OffsetType, kAlign, kCanPoison>::MergeNext(Block*& block) {
773 if (block->Last()) {
774 return;
775 }
776 Block* next = block->Next();
777 if (block->Used() || next->Used()) {
778 return;
779 }
780 size_t prev_outer_size = block->prev_ * kAlignment;
781 bool is_last = next->Last();
782 ByteSpan prev_bytes = AsBytes(std::move(block));
783 ByteSpan next_bytes = AsBytes(std::move(next));
784 size_t outer_size = prev_bytes.size() + next_bytes.size();
785 std::byte* merged = ::new (prev_bytes.data()) std::byte[outer_size];
786 block = AsBlock(prev_outer_size, ByteSpan(merged, outer_size));
787 if (is_last) {
788 block->MarkLast();
789 } else {
790 block->Next()->prev_ = block->next_;
791 }
792}
793
794template <typename OffsetType, size_t kAlign, bool kCanPoison>
795Block<OffsetType, kAlign, kCanPoison>*
797 uintptr_t addr = Last() ? 0 : reinterpret_cast<uintptr_t>(this) + OuterSize();
798 // See the note in `FromUsableSpace` about memory laundering.
799 return std::launder(reinterpret_cast<Block*>(addr));
800}
801
802template <typename OffsetType, size_t kAlign, bool kCanPoison>
805 uintptr_t addr =
806 (prev_ == 0) ? 0
807 : reinterpret_cast<uintptr_t>(this) - (prev_ * kAlignment);
808 // See the note in `FromUsableSpace` about memory laundering.
809 return std::launder(reinterpret_cast<Block*>(addr));
810}
811
812// Private template method implementations.
813
814template <typename OffsetType, size_t kAlign, bool kCanPoison>
816 size_t outer_size) {
817 prev_ = prev_outer_size / kAlignment;
818 next_ = outer_size / kAlignment;
819 info_.used = 0;
820 info_.poisoned = 0;
821 info_.last = 0;
822 info_.alignment = kAlignment;
823}
824
825template <typename OffsetType, size_t kAlign, bool kCanPoison>
826ByteSpan Block<OffsetType, kAlign, kCanPoison>::AsBytes(Block*&& block) {
827 size_t block_size = block->OuterSize();
828 std::byte* bytes = ::new (std::move(block)) std::byte[block_size];
829 return {bytes, block_size};
830}
831
832template <typename OffsetType, size_t kAlign, bool kCanPoison>
833Block<OffsetType, kAlign, kCanPoison>*
834Block<OffsetType, kAlign, kCanPoison>::AsBlock(size_t prev_outer_size,
835 ByteSpan bytes) {
836 return ::new (bytes.data()) Block(prev_outer_size, bytes.size());
837}
838
839template <typename OffsetType, size_t kAlign, bool kCanPoison>
840void Block<OffsetType, kAlign, kCanPoison>::Poison(bool should_poison) {
841 if constexpr (kCanPoison) {
842 if (!Used() && should_poison) {
843 std::memset(UsableSpace(), kPoisonByte, InnerSize());
844 info_.poisoned = true;
845 }
846 }
847}
848
849template <typename OffsetType, size_t kAlign, bool kCanPoison>
851 if constexpr (kCanPoison) {
852 if (!info_.poisoned) {
853 return true;
854 }
855 const std::byte* begin = UsableSpace();
856 return std::all_of(begin, begin + InnerSize(), [](std::byte b) {
857 return static_cast<uint8_t>(b) == kPoisonByte;
858 });
859 } else {
860 return true;
861 }
862}
863
864template <typename OffsetType, size_t kAlign, bool kCanPoison>
865internal::BlockStatus Block<OffsetType, kAlign, kCanPoison>::CheckStatus()
866 const {
867 if (reinterpret_cast<uintptr_t>(this) % kAlignment != 0) {
868 return internal::kMisaligned;
869 }
870 if (!Last() && (this >= Next() || this != Next()->Prev())) {
871 return internal::kNextMismatched;
872 }
873 if (Prev() && (this <= Prev() || this != Prev()->Next())) {
874 return internal::kPrevMismatched;
875 }
876 if (!Used() && !CheckPoison()) {
877 return internal::kPoisonCorrupted;
878 }
879 return internal::kValid;
880}
881
882template <typename OffsetType, size_t kAlign, bool kCanPoison>
884 uintptr_t addr = reinterpret_cast<uintptr_t>(this);
885 switch (CheckStatus()) {
886 case internal::kValid:
887 break;
888 case internal::kMisaligned:
889 internal::CrashMisaligned(addr);
890 break;
891 case internal::kNextMismatched:
892 internal::CrashNextMismatched(
893 addr, reinterpret_cast<uintptr_t>(Next()->Prev()));
894 break;
895 case internal::kPrevMismatched:
896 internal::CrashPrevMismatched(
897 addr, reinterpret_cast<uintptr_t>(Prev()->Next()));
898 break;
899 case internal::kPoisonCorrupted:
900 internal::CrashPoisonCorrupted(addr);
901 break;
902 }
903}
904
905} // namespace pw::allocator
Definition: block.h:364
Definition: block.h:382
Definition: status.h:84
constexpr bool ok() const
Definition: status.h:156
constexpr void IgnoreError() const
Definition: status.h:221
Definition: block.h:473
Definition: block.h:513
Range(Block *begin_inclusive, Block *end_inclusive)
Constructs a range of blocks from begin to end, inclusively.
Definition: block.h:519
Range(Block *begin)
Constructs a range including begin and all valid following blocks.
Definition: block.h:516
Definition: block.h:537
ReverseRange(Block *rbegin)
Constructs a range including rbegin and all valid preceding blocks.
Definition: block.h:540
ReverseRange(Block *rbegin_inclusive, Block *rend_inclusive)
Constructs a range of blocks from rbegin to rend, inclusively.
Definition: block.h:543
Definition: block.h:108
bool IsValid() const
Checks if a block is valid.
Definition: block.h:370
StatusWithSize CanAllocLast(Layout layout) const
Definition: block.h:623
size_t OuterSize() const
Definition: block.h:162
std::byte * UsableSpace()
Definition: block.h:172
static void Free(Block *&block)
Definition: block.h:690
bool Used() const
Definition: block.h:330
static Status Resize(Block *&block, size_t new_inner_size)
Definition: block.h:704
static Block * PrevBlock(const Block *block)
Definition: block.h:320
size_t Alignment() const
Returns the current alignment of a block.
Definition: block.h:325
void MarkLast()
Marks this block as the last one in the chain.
Definition: block.h:347
void MarkFree()
Marks this block as free.
Definition: block.h:344
void MarkUsed()
Marks this block as in use.
Definition: block.h:341
static Status AllocLast(Block *&block, Layout layout)
Definition: block.h:646
static Block * NextBlock(const Block *block)
Definition: block.h:311
void ClearLast()
Clears the last bit from this block.
Definition: block.h:350
void Poison(bool should_poison=true)
Definition: block.h:840
Block * Prev() const
Definition: block.h:804
static Result< Block * > Init(ByteSpan region)
Creates the first block for a given memory region.
Definition: block.h:559
size_t RequestedSize() const
Definition: block.h:169
bool Last() const
Definition: block.h:338
Block * Next() const
Definition: block.h:796
void CrashIfInvalid() const
Crashes with an informtaional message if a block is invalid.
Definition: block.h:883
static Status AllocFirst(Block *&block, Layout layout)
Definition: block.h:577
static BlockPtr FromUsableSpace(PtrType usable_space)
Definition: block.h:153
size_t InnerSize() const
Definition: block.h:165
Definition: layout.h:39
ByteSpan GetAlignedSubspan(ByteSpan bytes, size_t alignment)
constexpr size_t AlignDown(size_t value, size_t alignment)
Returns the value rounded down to the nearest multiple of alignment.
Definition: alignment.h:25
constexpr size_t AlignUp(size_t value, size_t alignment)
Returns the value rounded up to the nearest multiple of alignment.
Definition: alignment.h:38
constexpr Status OkStatus()
Definition: status.h:233