aboutsummaryrefslogtreecommitdiff
path: root/pw_allocator/split_free_list_allocator_test.cc
diff options
context:
space:
mode:
Diffstat (limited to 'pw_allocator/split_free_list_allocator_test.cc')
-rw-r--r--pw_allocator/split_free_list_allocator_test.cc446
1 files changed, 273 insertions, 173 deletions
diff --git a/pw_allocator/split_free_list_allocator_test.cc b/pw_allocator/split_free_list_allocator_test.cc
index 4551ec98e..c519849d6 100644
--- a/pw_allocator/split_free_list_allocator_test.cc
+++ b/pw_allocator/split_free_list_allocator_test.cc
@@ -15,305 +15,405 @@
#include "pw_allocator/split_free_list_allocator.h"
#include "gtest/gtest.h"
+#include "pw_allocator/allocator_testing.h"
+#include "pw_allocator/block.h"
#include "pw_bytes/alignment.h"
+#include "pw_bytes/span.h"
#include "pw_containers/vector.h"
namespace pw::allocator {
namespace {
-// Test fixture.
+// Test fixtures.
+
+// Size of the memory region to use in the tests below.
+static constexpr size_t kCapacity = 256;
+
+// Minimum size of a "large" allocation; allocation less than this size are
+// considered "small".
+static constexpr size_t kThreshold = 64;
+
+// An `SplitFreeListAllocator` that is automatically initialized on
+// construction.
+using BlockType = Block<uint16_t, kCapacity>;
+class SplitFreeListAllocatorWithBuffer
+ : public test::
+ WithBuffer<SplitFreeListAllocator<BlockType>, kCapacity, BlockType> {
+ public:
+ SplitFreeListAllocatorWithBuffer() {
+ EXPECT_EQ((*this)->Init(ByteSpan(this->data(), this->size()), kThreshold),
+ OkStatus());
+ }
+};
-struct SplitFreeListAllocatorTest : ::testing::Test {
- alignas(16) std::array<std::byte, 256> buffer;
- SplitFreeListAllocator allocator;
+// Test case fixture that allows individual tests to cache allocations and
+// release them automatically on tear-down.
+class SplitFreeListAllocatorTest : public ::testing::Test {
+ protected:
+ static constexpr size_t kMaxSize = kCapacity - BlockType::kBlockOverhead;
+ static constexpr size_t kNumPtrs = 16;
void SetUp() override {
- allocator.Initialize(buffer.data(), buffer.size(), 64);
+ for (size_t i = 0; i < kNumPtrs; ++i) {
+ ptrs_[i] = nullptr;
+ }
+ }
+
+ // This method simply ensures the memory is usable by writing to it.
+ void UseMemory(void* ptr, size_t size) { memset(ptr, 0x5a, size); }
+
+ void TearDown() override {
+ for (size_t i = 0; i < kNumPtrs; ++i) {
+ if (ptrs_[i] != nullptr) {
+ // `SplitFreeListAllocator::Deallocate` doesn't actually use the layout,
+ // as the information it needs is encoded in the blocks.
+ allocator_->Deallocate(ptrs_[i], Layout::Of<void*>());
+ }
+ }
}
+
+ SplitFreeListAllocatorWithBuffer allocator_;
+
+ // Tests can store allocations in this array to have them automatically
+ // freed in `TearDown`, including on ASSERT failure. If pointers are manually
+ // deallocated, they should be set to null in the array.
+ void* ptrs_[kNumPtrs];
};
// Unit tests.
-TEST_F(SplitFreeListAllocatorTest, InitializeUnaligned) {
+TEST_F(SplitFreeListAllocatorTest, InitUnaligned) {
// The test fixture uses aligned memory to make it easier to reason about
- // allocations, but that isn't strictly required. Simply verify that a call to
- // `Initialize` with unaligned memory does not crash.
- alignas(16) std::array<std::byte, 256> buf;
- SplitFreeListAllocator unaligned;
- unaligned.Initialize(buf.data() + 1, buf.size() - 1, 64);
+ // allocations, but that isn't strictly required.
+ SplitFreeListAllocator<Block<>> unaligned;
+ ByteSpan bytes(allocator_.data(), allocator_.size());
+ EXPECT_EQ(unaligned.Init(bytes.subspan(1), kThreshold), OkStatus());
}
-TEST_F(SplitFreeListAllocatorTest, AllocateLargeDeallocate) {
- constexpr Layout layout = Layout::Of<std::byte[64]>();
- void* ptr = allocator.Allocate(layout);
- // Returned pointer should be from the beginning.
- EXPECT_EQ(ptr, buffer.data());
- allocator.Deallocate(ptr, layout);
+TEST_F(SplitFreeListAllocatorTest, AllocateLarge) {
+ constexpr Layout layout = Layout::Of<std::byte[kThreshold]>();
+ ptrs_[0] = allocator_->Allocate(layout);
+ ASSERT_NE(ptrs_[0], nullptr);
+ EXPECT_GE(ptrs_[0], allocator_.data());
+ EXPECT_LT(ptrs_[0], allocator_.data() + allocator_.size());
+ UseMemory(ptrs_[0], layout.size());
}
-TEST_F(SplitFreeListAllocatorTest, AllocateSmallDeallocate) {
+TEST_F(SplitFreeListAllocatorTest, AllocateSmall) {
// Returned pointer should not be from the beginning, but should still be in
// range. Exact pointer depends on allocator's minimum allocation size.
constexpr Layout layout = Layout::Of<uint8_t>();
- void* ptr = allocator.Allocate(layout);
- EXPECT_GT(ptr, buffer.data());
- EXPECT_LT(ptr, buffer.data() + buffer.size());
- allocator.Deallocate(ptr, layout);
+ ptrs_[0] = allocator_->Allocate(layout);
+ ASSERT_NE(ptrs_[0], nullptr);
+ EXPECT_GT(ptrs_[0], allocator_.data());
+ EXPECT_LT(ptrs_[0], allocator_.data() + allocator_.size());
+ UseMemory(ptrs_[0], layout.size());
}
TEST_F(SplitFreeListAllocatorTest, AllocateTooLarge) {
- void* ptr = allocator.Allocate(Layout::Of<std::byte[512]>());
- EXPECT_EQ(ptr, nullptr);
+ ptrs_[0] = allocator_->Allocate(Layout::Of<std::byte[kCapacity * 2]>());
+ EXPECT_EQ(ptrs_[0], nullptr);
+}
+
+TEST_F(SplitFreeListAllocatorTest, AllocateLargeAlignment) {
+ constexpr size_t kSize = sizeof(uint32_t);
+ constexpr size_t kAlignment = 64;
+ ptrs_[0] = allocator_->Allocate(Layout(kSize, kAlignment));
+ ASSERT_NE(ptrs_[0], nullptr);
+ EXPECT_EQ(reinterpret_cast<uintptr_t>(ptrs_[0]) % kAlignment, 0U);
+ UseMemory(ptrs_[0], kSize);
+
+ ptrs_[1] = allocator_->Allocate(Layout(kSize, kAlignment));
+ ASSERT_NE(ptrs_[1], nullptr);
+ EXPECT_EQ(reinterpret_cast<uintptr_t>(ptrs_[1]) % kAlignment, 0U);
+ UseMemory(ptrs_[1], kSize);
+}
+
+TEST_F(SplitFreeListAllocatorTest, AllocateFromUnaligned) {
+ SplitFreeListAllocator<Block<>> unaligned;
+ ByteSpan bytes(allocator_.data(), allocator_.size());
+ ASSERT_EQ(unaligned.Init(bytes.subspan(1), kThreshold), OkStatus());
+
+ constexpr Layout layout = Layout::Of<std::byte[kThreshold + 8]>();
+ void* ptr = unaligned.Allocate(layout);
+ ASSERT_NE(ptr, nullptr);
+ UseMemory(ptr, layout.size());
+ unaligned.Deallocate(ptr, layout);
+}
+
+TEST_F(SplitFreeListAllocatorTest, AllocateAlignmentFailure) {
+ // Determine the total number of available bytes.
+ auto base = reinterpret_cast<uintptr_t>(allocator_.data());
+ uintptr_t addr = AlignUp(base, BlockType::kAlignment);
+ size_t outer_size = allocator_.size() - (addr - base);
+
+ // The first block is large....
+ addr += BlockType::kBlockOverhead + kThreshold;
+
+ // The next block is not aligned...
+ constexpr size_t kAlignment = 128;
+ uintptr_t next = AlignUp(addr + BlockType::kBlockOverhead, kAlignment / 4);
+ if (next % kAlignment == 0) {
+ next += kAlignment / 4;
+ }
+
+ // And the last block consumes the remaining space.
+ // size_t outer_size = allocator_->begin()->OuterSize();
+ size_t inner_size1 = next - addr;
+ size_t inner_size2 = kThreshold * 2;
+ size_t inner_size3 =
+ outer_size - (BlockType::kBlockOverhead * 3 + inner_size1 + inner_size2);
+
+ // Allocate all the blocks.
+ ptrs_[0] = allocator_->Allocate(Layout(inner_size1, 1));
+ ASSERT_NE(ptrs_[0], nullptr);
+
+ ptrs_[1] = allocator_->Allocate(Layout(inner_size2, 1));
+ ASSERT_NE(ptrs_[1], nullptr);
+
+ ptrs_[2] = allocator_->Allocate(Layout(inner_size3, 1));
+ ASSERT_NE(ptrs_[2], nullptr);
+
+ // If done correctly, the second block's usable space should be unaligned.
+ EXPECT_NE(reinterpret_cast<uintptr_t>(ptrs_[1]) % kAlignment, 0U);
+
+ // Free the second region. This leaves an unaligned region available.
+ allocator_->Deallocate(ptrs_[1], Layout(inner_size2, 1));
+ ptrs_[1] = nullptr;
+
+ // The allocator should be unable to create an aligned region..
+ ptrs_[3] = allocator_->Allocate(Layout(inner_size2, kAlignment));
+ EXPECT_EQ(ptrs_[3], nullptr);
}
-TEST_F(SplitFreeListAllocatorTest, AllocateAllDeallocateShuffled) {
+TEST_F(SplitFreeListAllocatorTest, DeallocateNull) {
+ constexpr Layout layout = Layout::Of<uint8_t>();
+ allocator_->Deallocate(nullptr, layout);
+}
+
+TEST_F(SplitFreeListAllocatorTest, DeallocateShuffled) {
constexpr Layout layout = Layout::Of<std::byte[32]>();
- Vector<void*, 256> ptrs;
// Allocate until the pool is exhausted.
- while (true) {
- void* ptr = allocator.Allocate(layout);
- if (ptr == nullptr) {
+ for (size_t i = 0; i < kNumPtrs; ++i) {
+ ptrs_[i] = allocator_->Allocate(layout);
+ if (ptrs_[i] == nullptr) {
break;
}
- ptrs.push_back(ptr);
}
// Mix up the order of allocations.
- for (size_t i = 0; i < ptrs.size(); ++i) {
- if (i % 2 == 0 && i + 1 < ptrs.size()) {
- std::swap(ptrs[i], ptrs[i + 1]);
+ for (size_t i = 0; i < kNumPtrs; ++i) {
+ if (i % 2 == 0 && i + 1 < kNumPtrs) {
+ std::swap(ptrs_[i], ptrs_[i + 1]);
}
- if (i % 3 == 0 && i + 2 < ptrs.size()) {
- std::swap(ptrs[i], ptrs[i + 2]);
+ if (i % 3 == 0 && i + 2 < kNumPtrs) {
+ std::swap(ptrs_[i], ptrs_[i + 2]);
}
}
// Deallocate everything.
- for (void* ptr : ptrs) {
- allocator.Deallocate(ptr, layout);
+ for (size_t i = 0; i < kNumPtrs; ++i) {
+ allocator_->Deallocate(ptrs_[i], layout);
+ ptrs_[i] = nullptr;
}
}
-TEST_F(SplitFreeListAllocatorTest, AllocateDeallocateLargeAlignment) {
- void* ptr1 = allocator.AllocateUnchecked(sizeof(uint32_t), 64);
- void* ptr2 = allocator.AllocateUnchecked(sizeof(uint32_t), 64);
- EXPECT_EQ(reinterpret_cast<uintptr_t>(ptr1) % 64, 0U);
- EXPECT_EQ(reinterpret_cast<uintptr_t>(ptr2) % 64, 0U);
- allocator.DeallocateUnchecked(ptr1, sizeof(uint32_t), 64);
- allocator.DeallocateUnchecked(ptr2, sizeof(uint32_t), 64);
-}
-
-TEST_F(SplitFreeListAllocatorTest, AllocateFromUnaligned) {
- alignas(16) std::array<std::byte, 256> buf;
- SplitFreeListAllocator unaligned;
- unaligned.Initialize(buf.data() + 1, buf.size() - 1, 64);
-
- EXPECT_EQ(unaligned.addr() % sizeof(SplitFreeListAllocator::FreeBlock), 0U);
- EXPECT_EQ(unaligned.size() % sizeof(SplitFreeListAllocator::FreeBlock), 0U);
-
- constexpr Layout layout = Layout::Of<std::byte[72]>();
- EXPECT_EQ(layout.size(), 72U);
- EXPECT_EQ(layout.alignment(), 1U);
-
- void* ptr = unaligned.Allocate(layout);
- unaligned.Deallocate(ptr, layout);
-}
+TEST_F(SplitFreeListAllocatorTest, IterateOverBlocks) {
+ constexpr Layout layout1 = Layout::Of<std::byte[32]>();
+ constexpr Layout layout2 = Layout::Of<std::byte[16]>();
+
+ // Allocate eight blocks of alternating sizes. After this, the will also be a
+ // ninth, unallocated block of the remaining memory.
+ for (size_t i = 0; i < 4; ++i) {
+ ptrs_[i] = allocator_->Allocate(layout1);
+ ASSERT_NE(ptrs_[i], nullptr);
+ ptrs_[i + 4] = allocator_->Allocate(layout2);
+ ASSERT_NE(ptrs_[i + 4], nullptr);
+ }
-TEST_F(SplitFreeListAllocatorTest, AllocateAlignmentFailure) {
- // Find a valid address aligned to 128 bytes.
- auto base = reinterpret_cast<uintptr_t>(buffer.data());
- auto aligned = AlignUp(base + 16, 128);
-
- // Now allocate up to 3 regions:
- // * from the beginning to 16 bytes before the alignment boundary
- // * the next 128 bytes
- // * whatever is left
- size_t size1 = aligned - base - 16;
- void* ptr1 = allocator.AllocateUnchecked(size1, 1);
-
- size_t size2 = 128;
- void* ptr2 = allocator.AllocateUnchecked(size2, 1);
-
- size_t size3 = 128 - size1;
- void* ptr3 = allocator.AllocateUnchecked(size3, 1);
-
- // Now free the second region. This leaves a 128-byte region available, but it
- // is not aligned to a 128 byte boundary.
- allocator.DeallocateUnchecked(ptr2, size2, 1);
-
- // The allocator should be unable to create an aligned region of the given
- // size.
- void* ptr = allocator.AllocateUnchecked(128, 128);
- EXPECT_EQ(ptr, nullptr);
-
- if (ptr1 != nullptr) {
- allocator.DeallocateUnchecked(ptr1, size1, 1);
+ // Deallocate every other block. After this there will be four more
+ // unallocated blocks, for a total of five.
+ for (size_t i = 0; i < 4; ++i) {
+ allocator_->Deallocate(ptrs_[i], layout1);
}
- allocator.DeallocateUnchecked(ptr3, size3, 1);
-}
-TEST_F(SplitFreeListAllocatorTest, DeallocateNull) {
- constexpr Layout layout = Layout::Of<uint8_t>();
- allocator.Deallocate(nullptr, layout);
+ // Count the blocks. The unallocated ones vary in size, but the allocated ones
+ // should all be the same.
+ size_t free_count = 0;
+ size_t used_count = 0;
+ for (auto* block : allocator_->blocks()) {
+ if (block->Used()) {
+ EXPECT_GE(block->InnerSize(), layout2.size());
+ ++used_count;
+ } else {
+ ++free_count;
+ }
+ }
+ EXPECT_EQ(used_count, 4U);
+ EXPECT_EQ(free_count, 5U);
}
TEST_F(SplitFreeListAllocatorTest, QueryLargeValid) {
- constexpr Layout layout = Layout::Of<std::byte[128]>();
- void* ptr = allocator.Allocate(layout);
- EXPECT_EQ(allocator.Query(ptr, layout), OkStatus());
- allocator.Deallocate(ptr, layout);
+ constexpr Layout layout = Layout::Of<std::byte[kThreshold * 2]>();
+ ptrs_[0] = allocator_->Allocate(layout);
+ EXPECT_EQ(allocator_->Query(ptrs_[0], layout), OkStatus());
}
TEST_F(SplitFreeListAllocatorTest, QuerySmallValid) {
constexpr Layout layout = Layout::Of<uint8_t>();
- void* ptr = allocator.Allocate(layout);
- EXPECT_EQ(allocator.Query(ptr, layout), OkStatus());
- allocator.Deallocate(ptr, layout);
+ ptrs_[0] = allocator_->Allocate(layout);
+ EXPECT_EQ(allocator_->Query(ptrs_[0], layout), OkStatus());
}
TEST_F(SplitFreeListAllocatorTest, QueryInvalidPtr) {
constexpr Layout layout = Layout::Of<SplitFreeListAllocatorTest>();
- EXPECT_EQ(allocator.Query(this, layout), Status::OutOfRange());
-}
-
-TEST_F(SplitFreeListAllocatorTest, QueryInvalidSize) {
- constexpr Layout layout = Layout::Of<uint8_t>();
- void* ptr = allocator.Allocate(layout);
- EXPECT_EQ(allocator.QueryUnchecked(ptr, 0, layout.alignment()),
- Status::OutOfRange());
- allocator.Deallocate(ptr, layout);
+ EXPECT_EQ(allocator_->Query(this, layout), Status::OutOfRange());
}
TEST_F(SplitFreeListAllocatorTest, ResizeNull) {
constexpr Layout old_layout = Layout::Of<uint8_t>();
size_t new_size = 1;
- EXPECT_FALSE(allocator.Resize(nullptr, old_layout, new_size));
+ EXPECT_FALSE(allocator_->Resize(nullptr, old_layout, new_size));
}
TEST_F(SplitFreeListAllocatorTest, ResizeSame) {
constexpr Layout old_layout = Layout::Of<uint32_t>();
- void* ptr = allocator.Allocate(old_layout);
- EXPECT_NE(ptr, nullptr);
+ ptrs_[0] = allocator_->Allocate(old_layout);
+ ASSERT_NE(ptrs_[0], nullptr);
+
constexpr Layout new_layout = Layout::Of<uint32_t>();
- EXPECT_TRUE(allocator.Resize(ptr, old_layout, new_layout.size()));
- allocator.Deallocate(ptr, new_layout);
+ EXPECT_TRUE(allocator_->Resize(ptrs_[0], old_layout, new_layout.size()));
+ ASSERT_NE(ptrs_[0], nullptr);
+ UseMemory(ptrs_[0], new_layout.size());
}
TEST_F(SplitFreeListAllocatorTest, ResizeLargeSmaller) {
- constexpr Layout old_layout = Layout::Of<std::byte[240]>();
- void* ptr = allocator.Allocate(old_layout);
+ constexpr Layout old_layout = Layout::Of<std::byte[kMaxSize]>();
+ ptrs_[0] = allocator_->Allocate(old_layout);
+ ASSERT_NE(ptrs_[0], nullptr);
// Shrinking always succeeds.
- constexpr Layout new_layout = Layout::Of<std::byte[80]>();
- EXPECT_TRUE(allocator.Resize(ptr, old_layout, new_layout.size()));
- allocator.Deallocate(ptr, new_layout);
+ constexpr Layout new_layout = Layout::Of<std::byte[kThreshold]>();
+ EXPECT_TRUE(allocator_->Resize(ptrs_[0], old_layout, new_layout.size()));
+ ASSERT_NE(ptrs_[0], nullptr);
+ UseMemory(ptrs_[0], new_layout.size());
}
TEST_F(SplitFreeListAllocatorTest, ResizeLargeLarger) {
- constexpr Layout old_layout = Layout::Of<std::byte[80]>();
- void* ptr = allocator.Allocate(old_layout);
+ constexpr Layout old_layout = Layout::Of<std::byte[kThreshold]>();
+ ptrs_[0] = allocator_->Allocate(old_layout);
+ ASSERT_NE(ptrs_[0], nullptr);
// Nothing after ptr, so `Resize` should succeed.
- constexpr Layout new_layout = Layout::Of<std::byte[240]>();
- EXPECT_TRUE(allocator.Resize(ptr, old_layout, new_layout.size()));
- allocator.Deallocate(ptr, new_layout);
+ constexpr Layout new_layout = Layout::Of<std::byte[kMaxSize]>();
+ EXPECT_TRUE(allocator_->Resize(ptrs_[0], old_layout, new_layout.size()));
+ ASSERT_NE(ptrs_[0], nullptr);
+ UseMemory(ptrs_[0], new_layout.size());
}
TEST_F(SplitFreeListAllocatorTest, ResizeLargeLargerFailure) {
- constexpr Layout old_layout = Layout::Of<std::byte[80]>();
- void* ptr1 = allocator.Allocate(old_layout);
- void* ptr2 = allocator.Allocate(old_layout);
+ constexpr Layout old_layout = Layout::Of<std::byte[kThreshold]>();
+ ptrs_[0] = allocator_->Allocate(old_layout);
+ ASSERT_NE(ptrs_[0], nullptr);
+
+ ptrs_[1] = allocator_->Allocate(old_layout);
+ ASSERT_NE(ptrs_[1], nullptr);
// Memory after ptr is already allocated, so `Resize` should fail.
- size_t new_size = 240;
- EXPECT_FALSE(allocator.Resize(ptr1, old_layout, new_size));
- allocator.Deallocate(ptr1, old_layout);
- allocator.Deallocate(ptr2, old_layout);
+ EXPECT_FALSE(allocator_->Resize(ptrs_[0], old_layout, kMaxSize));
}
TEST_F(SplitFreeListAllocatorTest, ResizeLargeSmallerAcrossThreshold) {
- constexpr Layout old_layout = Layout::Of<std::byte[80]>();
- void* ptr = allocator.Allocate(old_layout);
+ constexpr Layout old_layout = Layout::Of<std::byte[kThreshold]>();
+ ptrs_[0] = allocator_->Allocate(old_layout);
+ ASSERT_NE(ptrs_[0], nullptr);
// Shrinking succeeds, and the pointer is unchanged even though it is now
// below the threshold.
- constexpr Layout new_layout = Layout::Of<std::byte[16]>();
- EXPECT_TRUE(allocator.Resize(ptr, old_layout, new_layout.size()));
- allocator.Deallocate(ptr, new_layout);
+ constexpr Layout new_layout = Layout::Of<std::byte[kThreshold / 4]>();
+ EXPECT_TRUE(allocator_->Resize(ptrs_[0], old_layout, new_layout.size()));
+ ASSERT_NE(ptrs_[0], nullptr);
+ UseMemory(ptrs_[0], new_layout.size());
}
TEST_F(SplitFreeListAllocatorTest, ResizeSmallSmaller) {
constexpr Layout old_layout = Layout::Of<uint32_t>();
- void* ptr = allocator.Allocate(old_layout);
+ ptrs_[0] = allocator_->Allocate(old_layout);
+ ASSERT_NE(ptrs_[0], nullptr);
// Shrinking always succeeds.
constexpr Layout new_layout = Layout::Of<uint8_t>();
- EXPECT_TRUE(allocator.Resize(ptr, old_layout, new_layout.size()));
- allocator.Deallocate(ptr, new_layout);
+ EXPECT_TRUE(allocator_->Resize(ptrs_[0], old_layout, new_layout.size()));
}
TEST_F(SplitFreeListAllocatorTest, ResizeSmallLarger) {
// First, allocate a trailing block.
- constexpr Layout layout1 = Layout::Of<std::byte[16]>();
- void* ptr1 = allocator.Allocate(layout1);
- EXPECT_NE(ptr1, nullptr);
+ constexpr Layout layout1 = Layout::Of<std::byte[kThreshold / 4]>();
+ ptrs_[0] = allocator_->Allocate(layout1);
+ ASSERT_NE(ptrs_[0], nullptr);
// Next allocate the memory to be resized.
- constexpr Layout old_layout = Layout::Of<std::byte[16]>();
- void* ptr = allocator.Allocate(old_layout);
- EXPECT_NE(ptr, nullptr);
+ constexpr Layout old_layout = Layout::Of<std::byte[kThreshold / 4]>();
+ ptrs_[1] = allocator_->Allocate(old_layout);
+ ASSERT_NE(ptrs_[1], nullptr);
// Now free the trailing block.
- allocator.Deallocate(ptr1, layout1);
+ allocator_->Deallocate(ptrs_[0], layout1);
+ ptrs_[0] = nullptr;
// And finally, resize. Since the memory after the block is available and big
// enough, `Resize` should succeed.
- constexpr Layout new_layout = Layout::Of<std::byte[24]>();
- EXPECT_TRUE(allocator.Resize(ptr, old_layout, new_layout.size()));
- allocator.Deallocate(ptr, new_layout);
+ constexpr Layout new_layout = Layout::Of<std::byte[kThreshold / 2]>();
+ EXPECT_TRUE(allocator_->Resize(ptrs_[1], old_layout, new_layout.size()));
+ ASSERT_NE(ptrs_[1], nullptr);
+ UseMemory(ptrs_[1], new_layout.size());
}
TEST_F(SplitFreeListAllocatorTest, ResizeSmallLargerFailure) {
// First, allocate a trailing block.
- constexpr Layout layout1 = Layout::Of<std::byte[8]>();
- void* ptr1 = allocator.Allocate(layout1);
- EXPECT_NE(ptr1, nullptr);
+ constexpr Layout layout1 = Layout::Of<std::byte[kThreshold / 4]>();
+ ptrs_[0] = allocator_->Allocate(layout1);
+ ASSERT_NE(ptrs_[0], nullptr);
// Next allocate the memory to be resized.
- constexpr Layout old_layout = Layout::Of<std::byte[16]>();
- void* ptr = allocator.Allocate(old_layout);
- EXPECT_NE(ptr, nullptr);
+ constexpr Layout old_layout = Layout::Of<std::byte[kThreshold / 4]>();
+ ptrs_[1] = allocator_->Allocate(old_layout);
+ ASSERT_NE(ptrs_[1], nullptr);
// Now free the trailing block.
- allocator.Deallocate(ptr1, layout1);
+ allocator_->Deallocate(ptrs_[0], layout1);
+ ptrs_[0] = nullptr;
// And finally, resize. Since the memory after the block is available but not
// big enough, `Resize` should fail.
size_t new_size = 48;
- EXPECT_FALSE(allocator.Resize(ptr, old_layout, new_size));
- allocator.Deallocate(ptr, old_layout);
+ EXPECT_FALSE(allocator_->Resize(ptrs_[1], old_layout, new_size));
}
TEST_F(SplitFreeListAllocatorTest, ResizeSmallLargerAcrossThreshold) {
// First, allocate several trailing block.
- constexpr Layout layout1 = Layout::Of<std::byte[48]>();
- void* ptr1 = allocator.Allocate(layout1);
- EXPECT_NE(ptr1, nullptr);
- void* ptr2 = allocator.Allocate(layout1);
- EXPECT_NE(ptr2, nullptr);
+ constexpr Layout layout1 = Layout::Of<std::byte[kThreshold / 2]>();
+ ptrs_[0] = allocator_->Allocate(layout1);
+ ASSERT_NE(ptrs_[0], nullptr);
+
+ ptrs_[1] = allocator_->Allocate(layout1);
+ ASSERT_NE(ptrs_[1], nullptr);
// Next allocate the memory to be resized.
- constexpr Layout old_layout = Layout::Of<std::byte[16]>();
- void* ptr = allocator.Allocate(old_layout);
- EXPECT_NE(ptr, nullptr);
+ constexpr Layout old_layout = Layout::Of<std::byte[kThreshold / 4]>();
+ ptrs_[2] = allocator_->Allocate(old_layout);
+ EXPECT_NE(ptrs_[2], nullptr);
// Now free the trailing blocks.
- allocator.Deallocate(ptr1, layout1);
- allocator.Deallocate(ptr2, layout1);
+ allocator_->Deallocate(ptrs_[0], layout1);
+ ptrs_[0] = nullptr;
+ allocator_->Deallocate(ptrs_[1], layout1);
+ ptrs_[1] = nullptr;
// Growing succeeds, and the pointer is unchanged even though it is now
// above the threshold.
- constexpr Layout new_layout = Layout::Of<std::byte[96]>();
- EXPECT_TRUE(allocator.Resize(ptr, old_layout, new_layout.size()));
- allocator.Deallocate(ptr, new_layout);
+ constexpr Layout new_layout = Layout::Of<std::byte[kThreshold]>();
+ EXPECT_TRUE(allocator_->Resize(ptrs_[2], old_layout, new_layout.size()));
+ ASSERT_NE(ptrs_[2], nullptr);
+ UseMemory(ptrs_[2], new_layout.size());
}
} // namespace