Skip to content

Instantly share code, notes, and snippets.

Embed
What would you like to do?
template <typename T>
struct semi_lazy {
struct promise_type {
std::optional<T> val;
std::function<void()> defer;
suspend_never initial_suspend() { return {}; }
suspend_always final_suspend() { return {}; }
void return_value(T v) {
val.emplace(std::move(v));
}
semi_lazy get_return_object() {
return semi_lazy(this);
}
void unhandled_exception() {
//throw;
}
template<typename U>
auto await_transform(lazy<U> sub) {
struct SubSub {
promise_type* p;
lazy<U> sub;
std::optional<decltype(sub.operator co_await())> awaitable;
bool await_ready() { return false; }
void await_suspend(coroutine_handle<> h) {
p->defer = [this, h] {
awaitable.emplace(sub.operator co_await());
awaitable->await_suspend(h);
};
}
U await_resume() {
return awaitable->await_resume();
}
void operator=(SubSub&&) = delete;
};
return SubSub{this, std::move(sub)};
}
};
auto operator co_await() {
struct Awaitable {
promise_type* p;
bool await_ready() {
return bool(p->val);
}
auto await_suspend(coroutine_handle<> h) {
p->defer();
// TODO this assumes task<> completes syncronously when awaited.
// Good enough for prototype, but would need to actually stash
// the handles for a real impl.
coroutine_handle<promise_type>::from_promise(*p).resume();
return false;
}
T&& await_resume() {
return std::move(*p->val);
}
};
return Awaitable{p};
}
semi_lazy(semi_lazy&& source) : p(std::exchange(source.p, nullptr)) {}
promise_type* p;
semi_lazy(promise_type* p) :p(p) {}
~semi_lazy() {
if (p) coroutine_handle<promise_type>::from_promise(*p).destroy();
}
};
struct Key{};
struct Result{};
std::optional<Result> cache_lookup(Key);
lazy<Result> fetch_to_cache(Key);
semi_lazy<Result> cached_fetch(Key k) {
if (auto r = cache_lookup(k))
co_return *r;
co_return co_await fetch_to_cache(k);
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment