Skip to content

Instantly share code, notes, and snippets.

@porglezomp
Created November 16, 2018 04:03
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save porglezomp/d70c3a7ef99135b77a3fdfdc10f5a956 to your computer and use it in GitHub Desktop.
Save porglezomp/d70c3a7ef99135b77a3fdfdc10f5a956 to your computer and use it in GitHub Desktop.
Allocator

An Allocator

This is a working memory allocator for C++! It was written as test code for an operating systems class project, we were implementing a virtual memroy system, and so I built an allocator on top of that. To work, it needs access to a vm_map call which will return a fresh 4KB page, and it needs to return consecutive addresses for consecutive calls (at least most of the time, or else we can't allocate large blocks).

It is based on a linear search through a free list, with a best-fit allocation scheme.

#pragma once
#include <algorithm>
#include <cassert>
#include <cstddef>
#include <functional>
#include <memory>
#include <new>
#include "vm_app.h"
#define ALLOC_TRACE 0
#define ALLOC_INVARIANT 0
#if ALLOC_TRACE
#define TRACE(x) std::cout << x << std::endl
#define TRACE_VAR(x) std::cout << #x " " << x << std::endl
#else
#define TRACE(X)
#define TRACE_VAR(x)
#endif
class Allocator {
struct FreeChunk;
struct AllocatedChunk {
size_t size = 0;
std::max_align_t allocation[1];
AllocatedChunk(size_t size) : size(size) {}
FreeChunk *to_free();
};
struct FreeChunk {
size_t size = 0;
FreeChunk *prev = nullptr, *next = nullptr;
FreeChunk(size_t size) : size(size) {}
void *end_ptr(size_t block_size) const;
bool merge();
FreeChunk *split(size_t block_size);
static FreeChunk *from_block(void *block);
AllocatedChunk *to_allocated();
bool before(const FreeChunk *chunk) const;
bool fits(size_t size) const;
};
static constexpr size_t HEADER_SIZE =
offsetof(AllocatedChunk, allocation);
static constexpr size_t MIN_BLOCK_SIZE =
(sizeof(AllocatedChunk) - sizeof(std::max_align_t) > sizeof(FreeChunk) ?
sizeof(AllocatedChunk) - sizeof(std::max_align_t) : sizeof(FreeChunk)) - HEADER_SIZE;
const char *fname = nullptr;
size_t block_index = 0;
FreeChunk *first_free = nullptr;
FreeChunk *last_free = nullptr;
void invariants() const;
void insert(FreeChunk *after, FreeChunk *chunk);
public:
Allocator(const char *fname) : fname(fname) { }
void reserve(size_t);
void *alloc(size_t size);
void free(void *ptr);
friend std::ostream &operator<<(std::ostream&, const FreeChunk&);
};
std::ostream &operator<<(std::ostream &out, const Allocator::FreeChunk &chunk) {
return out << "FreeChunk {"
<< " this = " << &chunk
<< ", size = " << chunk.size
<< ", next = " << chunk.next
<< ", prev = " << chunk.prev
<< " }";
}
static Allocator GLOBAL_ALLOCATOR(nullptr);
void *operator new(size_t size) {
return GLOBAL_ALLOCATOR.alloc(size);
}
void operator delete(void *ptr) throw() {
GLOBAL_ALLOCATOR.free(ptr);
}
#if ALLOC_INVARIANT == 0
void Allocator::invariants() const { }
#else
void Allocator::invariants() const {
#if ALLOC_TRACE == 2
{
FreeChunk *chunk = first_free;
TRACE("invariants:");
if (first_free) { TRACE("first " << *first_free); }
else { TRACE("no first"); }
while (chunk) {
TRACE(*chunk);
chunk = chunk->next;
}
if (last_free) { TRACE(" last " << *last_free); }
else { TRACE("no last"); }
}
#endif
if (first_free) {
assert(last_free);
assert(not last_free->before(first_free));
assert(not last_free->next);
assert(not first_free->prev);
}
else {
assert(not last_free);
}
FreeChunk *chunk = first_free;
while (chunk) {
if (chunk->next) {
assert(chunk->before(chunk->next));
assert(chunk->next->prev == chunk);
}
else {
assert(chunk == last_free);
}
if (chunk->prev) {
assert(chunk->prev->next == chunk);
}
else {
assert(chunk == first_free);
}
chunk = chunk->next;
}
}
#endif
void Allocator::insert(FreeChunk *after, FreeChunk *chunk) {
TRACE("insert(" << after <<", " << chunk << ")");
invariants();
assert(chunk);
assert(not chunk->next);
assert(not chunk->prev);
if (after == nullptr) {
// Insert at the front of the list
chunk->next = first_free;
if (first_free) {
first_free->prev = chunk;
}
}
else {
assert(after->before(chunk));
FreeChunk *next = after->next;
after->next = chunk;
if (next) { next->prev = chunk; }
chunk->prev = after;
chunk->next = next;
}
if (after) {
if (after->merge()) {
chunk = after;
}
}
else {
chunk->merge();
}
if (not chunk->prev) {
first_free = chunk;
}
if (not chunk->next) {
last_free = chunk;
}
TRACE("inserted");
invariants();
}
void Allocator::reserve(size_t size) {
TRACE("reserve(" << size << ")");
invariants();
while (not last_free or last_free->size < size) {
void *new_block = vm_map(fname, block_index++);
insert(last_free, FreeChunk::from_block(new_block));
}
TRACE("reserved");
invariants();
}
void *Allocator::alloc(size_t size) {
TRACE("alloc(" << size << ")...");
invariants();
// We need to allocate minimum sizes for metadata purposes
if (size < MIN_BLOCK_SIZE) {
size = MIN_BLOCK_SIZE;
}
// Best-fit allocation
FreeChunk *best_fit = nullptr;
{
FreeChunk *chunk = first_free;
while (chunk) {
if (chunk->fits(size)) {
// If it fits
if (best_fit == nullptr or chunk->size < best_fit->size) {
// And is a *better* fit
best_fit = chunk;
}
}
// If we've found a perfect fit, we can stop here
if (best_fit and best_fit->size == size) { break; }
chunk = chunk->next;
}
}
if (not best_fit) {
reserve(size);
best_fit = last_free;
}
FreeChunk *free_chunk = best_fit->split(size);
if (best_fit == first_free) {
first_free = free_chunk->next;
}
if (best_fit == last_free) {
last_free = free_chunk->next;
}
AllocatedChunk *allocation = free_chunk->to_allocated();
TRACE("alloc(" << size << ") = " << &allocation->allocation);
invariants();
return &allocation->allocation;
}
void Allocator::free(void *ptr) {
TRACE("free(" << ptr << ")");
invariants();
if (ptr == nullptr) { return; }
AllocatedChunk *alloc = (AllocatedChunk*)(((char*)ptr) - HEADER_SIZE);
FreeChunk *freed = alloc->to_free();
FreeChunk *chunk = first_free;
// want to find the last chunk that comes before it
if (freed->before(first_free)) {
chunk = nullptr;
}
else if (last_free and last_free->before(freed)) {
chunk = last_free;
}
else {
assert(chunk->next);
while (chunk->next->before(freed)) {
chunk = chunk->next;
assert(chunk);
assert(chunk->next);
}
}
insert(chunk, freed);
TRACE("freed");
invariants();
}
void *Allocator::FreeChunk::end_ptr(size_t block_size) const {
return (void*)(((char*)this) + HEADER_SIZE + block_size);
}
bool Allocator::FreeChunk::merge() {
TRACE("merge " << *this);
if (end_ptr(size) == (void*)next) {
// Safe to merge
size += next->size + HEADER_SIZE;
// std::cout << "merging: " << std::endl;
// std::cout << " - " << *this << std::endl;
// std::cout << " - " << *next << std::endl;
next = next->next;
if (next) {
next->prev = this;
}
return true;
}
else {
// Can't merge
return false;
}
}
Allocator::FreeChunk *Allocator::FreeChunk::split(size_t block_size) {
TRACE("split " << *this << " " << block_size);
assert(block_size <= size);
void *next_pointer = end_ptr(block_size);
size_t used_size = ((char*)next_pointer - (char*)this) - HEADER_SIZE;
assert(used_size == block_size);
size_t remaining_size = size - used_size;
next_pointer = std::align(alignof(Allocator::AllocatedChunk),
0, next_pointer, remaining_size);
assert(next_pointer);
used_size = ((char*)next_pointer - (char*)this) - HEADER_SIZE;
assert(used_size < size);
if (size - used_size < sizeof(FreeChunk)) {
// If there's not enough space to fit a FreeChunk, then don't split
// anything off, just use the whole block.
if (next) { next->prev = prev; }
if (prev) { prev->next = next; }
// Set the `next` pointer to `prev` so that the caller can find
// the new end of the list if it needs to.
if (not next) { next = prev; }
prev = nullptr;
return this;
}
size = used_size;
FreeChunk *next_chunk =
new (next_pointer) FreeChunk(remaining_size - HEADER_SIZE);
if (next) { next->prev = next_chunk; }
if (prev) { prev->next = next_chunk; }
next_chunk->next = next;
next_chunk->prev = prev;
next = next_chunk;
prev = nullptr;
return this;
}
Allocator::FreeChunk *Allocator::FreeChunk::from_block(void *block) {
if (not block) { throw std::bad_alloc(); }
FreeChunk *chunk = new (block) FreeChunk(VM_PAGESIZE - HEADER_SIZE);
return chunk;
}
Allocator::FreeChunk *Allocator::AllocatedChunk::to_free() {
return new (this) FreeChunk(size);
}
Allocator::AllocatedChunk *Allocator::FreeChunk::to_allocated() {
return new (this) AllocatedChunk(size);
}
bool Allocator::FreeChunk::before(const Allocator::FreeChunk *chunk) const {
return std::less<const FreeChunk*>()(this, chunk);
}
bool Allocator::FreeChunk::fits(size_t block_size) const {
if (size < block_size) { return false; }
void *next_pointer = end_ptr(block_size);
size_t used_size = ((char*)next_pointer - (char*)this) - HEADER_SIZE;
assert(used_size == block_size);
size_t remaining_size = size - used_size;
next_pointer = std::align(alignof(Allocator::AllocatedChunk),
0, next_pointer, remaining_size);
assert(next_pointer);
used_size = ((char*)next_pointer - (char*)this) - HEADER_SIZE;
return used_size < size;
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment