Simple allocator wrapper that adds ASan-poisoned header and footer buffers to each allocation.
// ---------------------------------------------------------------------------- | |
// c++ -std=c++11 -Wall -Wextra -Wshadow -Wunused -pedantic -fsanitize=address -fno-omit-frame-pointer asan_guarded_allocator.cpp | |
#include <cassert> | |
#include <cstdio> | |
#include <cstdint> | |
#include <cstdlib> | |
#include <unordered_map> | |
// ---------------------------------------------------------------------------- | |
#if defined(__clang__) && defined(__has_feature) | |
#if __has_feature(address_sanitizer) | |
#include <sanitizer/asan_interface.h> | |
#define WITH_ASAN 1 | |
#endif // address_sanitizer | |
#endif // __clang__ && __has_feature | |
#if !defined(WITH_ASAN) | |
#define ASAN_POISON_MEMORY_REGION(addr, size) /* nothing */ | |
#define ASAN_UNPOISON_MEMORY_REGION(addr, size) /* nothing */ | |
#endif // WITH_ASAN | |
// ---------------------------------------------------------------------------- | |
// Adds a footer and header block to the allocation, | |
// as big as the block requested by the user. The footer | |
// and header are kept poisoned, acting as guard pages | |
// around the addressable block. | |
class AsanGuardedAllocator final | |
{ | |
public: | |
static const std::size_t kAlign = 16; | |
void * Allocate(const std::size_t bytes) | |
{ | |
// Allocate a header and footer of the same size as the requested block. | |
const std::size_t realSize = AlignSize((bytes * 3) + kAlign); | |
// Poison the whole range: | |
void * block = std::malloc(realSize); | |
ASAN_POISON_MEMORY_REGION(block, realSize); | |
AllocRecord ar; | |
ar.headerStart = block; | |
ar.userStart = AdjustUserPtr(block, bytes); // Will unpoison the user portion | |
ar.userSize = bytes; | |
auto iter = m_allocs.emplace(ar.userStart, ar); | |
assert(iter.second == true); | |
return ar.userStart; | |
} | |
void Free(const void * ptr) | |
{ | |
auto iter = m_allocs.find(ptr); | |
if (iter == m_allocs.end()) | |
{ | |
return; | |
} | |
const std::size_t bytes = iter->second.userSize; | |
const std::size_t realSize = AlignSize((bytes * 3) + kAlign); | |
void * block = iter->second.headerStart; | |
ASAN_UNPOISON_MEMORY_REGION(block, realSize); | |
std::free(block); | |
m_allocs.erase(iter); | |
} | |
std::size_t GetSize(const void * ptr) const | |
{ | |
auto iter = m_allocs.find(ptr); | |
return (iter != m_allocs.end() ? iter->second.userSize : 0); | |
} | |
bool IsValidPtr(const void * ptr) const | |
{ | |
return (m_allocs.find(ptr) != m_allocs.end()); | |
} | |
private: | |
static std::size_t AlignSize(const std::size_t bytes) | |
{ | |
const std::size_t alignedSize = (bytes + kAlign - 1) & ~(kAlign - 1); | |
assert((alignedSize % kAlign) == 0); | |
return alignedSize; | |
} | |
static void * AlignPtr(const void * ptr) | |
{ | |
std::uintptr_t alignedPtr = (std::uintptr_t(ptr) + kAlign - 1) & ~(kAlign - 1); | |
void * userPtr = reinterpret_cast<void *>(alignedPtr); | |
assert((std::uintptr_t(userPtr) % kAlign) == 0); | |
return userPtr; | |
} | |
static void * AdjustUserPtr(void * headerStart, const std::size_t bytes) | |
{ | |
void * userPtr = AlignPtr(static_cast<std::uint8_t *>(headerStart) + bytes); | |
ASAN_UNPOISON_MEMORY_REGION(userPtr, bytes); | |
return userPtr; | |
} | |
struct AllocRecord | |
{ | |
void * headerStart; | |
void * userStart; | |
std::size_t userSize; | |
}; | |
std::unordered_map<const void *, AllocRecord> m_allocs; | |
}; | |
// ---------------------------------------------------------------------------- | |
int main() | |
{ | |
AsanGuardedAllocator alloc; | |
char * p = static_cast<char *>(alloc.Allocate(128)); | |
for (int i = 0; i < 128; ++i) | |
{ | |
p[i] = char(i); | |
} | |
alloc.Free(p); | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment