Skip to content

Instantly share code, notes, and snippets.

@juxeii
Last active January 14, 2021 13:21
Show Gist options
  • Save juxeii/e02c20cdac8f70a9067dd6a901ef03be to your computer and use it in GitHub Desktop.
Save juxeii/e02c20cdac8f70a9067dd6a901ef03be to your computer and use it in GitHub Desktop.
Blog post for Memoization part 1
auto foo(int n, std::string s)
{
// ... calculate something expensive(like the answer to everything)
return 42;
}
auto fooM(int n, std::string s)
{
// what goes in here?
}
auto fooM = memoize(foo);
auto resultForBla = fooM(14, "bla");
auto resultForBlubb = fooM(14, "blubb");
auto fooM = memoizeWithLRU<3>(foo);
auto fooM(int n, std::string s)
{
// TODO
}
using OrderedMap = std::map<std::tuple<int, std::string>, int>;
struct MapCache
{
};
template <auto CacheCapacity>
struct LRUCache
{
};
template <typename Container, auto CacheCapacity>
struct LRUCacheImpl
{
Container cacheMap{};
std::queue<typename Container::const_iterator> cacheQueue{};
};
template <typename Cache, typename Key, typename Value>
auto emplaceInCache(Cache &cache, const Key &key, Value &&value)
{
return cache.try_emplace(key, std::forward<Value>(value)).first;
}
template <typename T, auto CacheCapacity, typename Key, typename Value>
auto emplaceInCache(LRUCacheImpl<T, CacheCapacity> &lru, const Key &key, Value &&value)
{
if (lru.cacheQueue.size() == CacheCapacity)
{
lru.cacheMap.erase(lru.cacheQueue.front());
lru.cacheQueue.pop();
}
auto iterator{emplaceInCache(lru.cacheMap, key, std::forward<Value>(value))};
return lru.cacheQueue.emplace(iterator);
}
template <typename Cache, typename Key>
auto findInCache(const Cache &cache, const Key &key)
{
auto iterator{cache.find(key)};
return iterator == cache.end() ? std::nullopt : std::optional{iterator};
}
template <typename T, auto CacheCapacity, typename Key>
auto findInCache(const LRUCacheImpl<T, CacheCapacity> &lru, const Key &key)
{
return findInCache(lru.cacheMap, key);
}
template <typename CacheReturn, typename Callable, typename Cache>
auto createMemoizer(Callable callable, Cache cache)
{
return [callable{std::move(callable)}, cache{std::move(cache)}](auto &&... args) mutable -> CacheReturn {
auto arguments{std::forward_as_tuple(std::forward<decltype(args)>(args)...)};
if (auto maybeValue{findInCache(cache, arguments)})
{
return (*maybeValue)->second;
}
return applyFunction(callable, arguments, cache)->second;
};
}
template <typename Callable, typename Arguments, typename Cache>
auto applyFunction(Callable &callable, const Arguments &arguments, Cache &cache)
{
auto result{std::apply(callable, arguments)};
return emplaceInCache(cache, arguments, std::move(result));
}
auto first = [](int){};
auto second = []{ return 42; };
auto third = [](int) -> int& {
auto x{42};
return x;
};
auto fourth = [](int){ return std::make_unique<int>(42); };
auto fifth = [&someGlobalInt](int x){
++someGlobalInt;
return x+42;
};
using ResultType = callable_result_t<Callable>;
static_assert(!std::is_void_v<ResultType>, "Callable with void return type cannot be memoized!");
static_assert(
!(std::is_reference_v<ResultType> && !std::is_const_v<std::remove_reference_t<ResultType>>),
"Callable with non-const reference return type cannot be memoized!");
static_assert(std::is_copy_constructible_v<ResultType>, "Callable return type must be copy constructible!");
static_assert(
std::tuple_size_v<callable_arguments_t<Callable>> > 0, "Callable with no arguments cannot be memoized!");
template <typename CacheTag, typename Result>
struct result_adaption : std::add_lvalue_reference<std::add_const_t<Result>>
{
};
template <auto CacheCapacity, typename Result>
struct result_adaption<LRUCache<CacheCapacity>, Result> : std::remove_cv<std::remove_reference_t<Result>>
{
};
template <typename CacheTag, typename Result>
using result_adaption_t = typename result_adaption<CacheTag, Result>::type;
using AdaptedResultType = result_adaption_t<CacheTag, ResultType>;
template <typename CacheTag = MapCache, typename Callable>
[[nodiscard]] auto memoize(Callable &&callable) noexcept
{
using ResultType = callable_result_t<Callable>;
static_assert(!std::is_void_v<ResultType>, "Callable with void return type cannot be memoized!");
static_assert(
!(std::is_reference_v<ResultType> && !std::is_const_v<std::remove_reference_t<ResultType>>),
"Callable with non-const reference return type cannot be memoized!");
static_assert(std::is_copy_constructible_v<ResultType>, "Callable return type must be copy constructible!");
static_assert(
std::tuple_size_v<callable_arguments_t<Callable>> > 0, "Callable with no arguments cannot be memoized!");
using AdaptedResultType = result_adaption_t<CacheTag, ResultType>;
using CacheType = create_cache_t<CacheTag, callable_to_function_t<Callable>>;
return createMemoizer<AdaptedResultType>(std::forward<Callable>(callable), CacheType{});
}
template <auto CacheCapacity, typename Callable>
[[nodiscard]] auto memoizeWithLRU(Callable &&callable) noexcept
{
return memoize<LRUCache<CacheCapacity>>(std::forward<Callable>(callable));
}
memoize(foo); // useless statement, triggering compiler warning
int foo(int, std::string);
int foo(int n, std::string s)
{
// ... calculate something expensive(like the answer to everything)
return 42;
}
namespace
{
auto fooM = memoizeWithLRU<1>([](int n, std::string s){
// ... calculate something expensive(like the answer to everything)
return 42;
};
}
int foo(int n, std::string s)
{
return fooM(n, s)
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment