Skip to content

Instantly share code, notes, and snippets.

@jord-goldberg
Created April 14, 2021 17:18
Show Gist options
  • Save jord-goldberg/9a1d2de601f4a08304bad40ed62dcdfd to your computer and use it in GitHub Desktop.
Save jord-goldberg/9a1d2de601f4a08304bad40ed62dcdfd to your computer and use it in GitHub Desktop.
import set from "lodash/set";
import { createAction, createSlice } from "@reduxjs/toolkit";
import { all, fork } from "redux-saga/effects";
const isValidFeatureSaga = saga =>
typeof saga === "function" || typeof saga?.worker === "function";
const getType = (featureName, actionKey) => `${featureName}/${actionKey}`;
/**
* Wraps @reduxjs/toolkit `createSlice` function
* (https://redux-toolkit.js.org/api/createSlice), but also incorporates
* redux-saga. Sagas can listen to actions and dispatch new actions in
* response. They can be added similarly to how prepare is added to customize
* action payloads:
*
* @example
*
* features: {
* fetchCollection: {
* prepare(query) {
* if (query instanceof Query) {
* return { payload: query, meta: "Example meta" };
* }
* throw Error("Query object is required to fetch collection.");
* },
* reducer(state, action) {
* state.query = action.payload;
* state.isLoading = true;
* state.error = null;
* },
* saga: {
* watcherEffect: takeLatest,
* worker: function*(actionCreators, action) {
* const { fetchCollectionSuccess, fetchCollectionError } = actionCreators;
* try {
* const collection = yield call(api, action.payload);
* yield put(fetchCollectionSuccess(collection));
* } catch (err) {
* yield put(fetchCollectionError(err));
* }
* }
* }
* }
* }
*/
export function createFeature({
name,
initialState,
features,
extraFeatures = {}
}) {
/*
* REDUCERS
*
* We remove features from `createSlice` that have no reducer.
* Those features need to call `createAction` separately from `createSlice`
*/
const actionKeys = Object.keys(features);
const reducersMap = {};
const sagaActionsMap = {};
actionKeys.forEach(actionKey => {
const maybeEnhancedReducer = features[actionKey];
if (isValidFeatureSaga(maybeEnhancedReducer.saga)) {
const { prepare = payload => ({ payload }) } = maybeEnhancedReducer;
// combined with promiseMiddleware in src/store.js, this makes saga actions
// return a Promise that can optionally be resolved within the saga.
// By convention, feature sagas resolve the promise with a success/error
// action that can be inspected for a `payload` or `error` property.
// Inspiration from: https://redux-toolkit.js.org/api/createAsyncThunk#handling-thunk-results
maybeEnhancedReducer.prepare = (...args) => {
const action = prepare(...args);
return set(action, "meta.promise", action.meta?.promise ?? true);
};
}
const isOrHasReducer =
typeof maybeEnhancedReducer === "function" ||
"reducer" in maybeEnhancedReducer;
if (isOrHasReducer) {
reducersMap[actionKey] = maybeEnhancedReducer;
} else {
const type = getType(name, actionKey);
sagaActionsMap[actionKey] =
"prepare" in maybeEnhancedReducer
? createAction(type, maybeEnhancedReducer.prepare)
: createAction(type);
}
});
/*
* EXTRA REDUCERS
*
* the `extraFeatures` property is an object of
* maybeEnhancedReducers so we must check to see if they're a reducer
* function or an object with reducer/saga properties
*/
const extraActionKeys = Object.keys(extraFeatures);
const extraReducerMap = extraActionKeys.reduce((map, actionKey) => {
const maybeEnhancedReducer = extraFeatures[actionKey];
const isOrHasReducer =
typeof maybeEnhancedReducer === "function" ||
"reducer" in maybeEnhancedReducer;
if (isOrHasReducer) {
map[actionKey] =
typeof maybeEnhancedReducer === "function"
? maybeEnhancedReducer
: maybeEnhancedReducer.reducer;
}
return map;
}, {});
const slice = createSlice({
name,
initialState,
reducers: reducersMap,
extraReducers: extraReducerMap
});
Object.assign(slice.actions, sagaActionsMap);
/*
* SAGAS
*/
const { actions } = slice;
let sagaMap = Object.keys(features).reduce((map, action) => {
const saga = features[action].saga;
if (isValidFeatureSaga(saga)) {
map[action] = function*() {
const effect = saga.worker ? saga.watcherEffect : saga;
const args = saga.worker
? [actions[action].type, saga.worker, actions]
: [actions, actions[action].type];
yield effect(...args);
};
}
return map;
}, {});
sagaMap = extraActionKeys.reduce((map, actionType) => {
const saga = extraFeatures[actionType].saga;
if (isValidFeatureSaga(saga)) {
map[actionType] = function*() {
const effect = saga.worker ? saga.watcherEffect : saga;
const args = saga.worker
? [actionType, saga.worker, actions]
: [actions, actionType];
yield effect(...args);
};
}
return map;
}, sagaMap);
slice.caseSagas = sagaMap;
slice.saga = function* saga() {
yield all(Object.values(sagaMap).map(saga => fork(saga)));
};
/*
* SELECTORS
*/
slice.selectors = {};
return slice;
}
import set from "lodash/set";
import { channel } from "redux-saga";
import {
call,
delay,
fork,
put,
select,
take,
takeLatest
} from "redux-saga/effects";
import { errorReducer, getFeatureName, batchActions } from "./index";
export const getDeleteSingleFeature = ({ api, maxConcurrentDeletes = 10 }) => ({
deleteSingle: {
prepare: (id, clearData = false) => ({
payload: id,
meta: { clearData }
}),
reducer(state, action) {
const id = action.payload;
set(state.data, `${id}.isLoading`, true);
state.data[id].error = null;
},
saga: function*(actionCreators, actionType) {
const {
deleteSingleSuccess,
deleteSingleError,
redistributePages
} = actionCreators;
const taskMap = {};
const workerChannel = yield call(channel);
const successChannel = yield call(channel);
// create worker 'threads' (i.e. N max concurrent DELETE requests)
for (var i = 0; i < maxConcurrentDeletes; i++) {
yield fork(function* deleteSingleWorker() {
while (true) {
const action = yield take(workerChannel);
try {
yield call(api, action.payload);
yield put(successChannel, action);
} catch (err) {
const path = `data.${action.payload}`;
const resultAction = deleteSingleError(err, { path });
yield put(resultAction);
action.meta.promise.resolve(resultAction);
delete taskMap[action.payload];
}
}
});
}
const buffer = [];
yield takeLatest(successChannel, function*(action) {
// we collect successes and wait 30ms before proceeding. since we use `takeLatest`,
// if another deleteSuccess comes quickly after, this task is cancelled and
// a new one is run; no dispatch to store will happen until 30ms has passed
buffer.push(action);
yield delay(30);
const successes = buffer.splice(0, buffer.length);
const [actions, ids] = successes.reduce(
([actions, ids], action, i) => {
delete taskMap[action.payload];
actions[i] = deleteSingleSuccess(action.payload);
ids[i] = action.payload;
return [actions, ids];
},
[[], []]
);
const featureName = getFeatureName(actionType);
const { pages } = yield select(state => state[featureName]);
if (pages != null) {
actions.unshift(redistributePages({ filterIds: ids }));
}
yield put(batchActions(actions));
successes.forEach((a, i) => a.meta.promise.resolve(actions[i]));
});
while (true) {
const action = yield take(actionType);
if (taskMap[action.payload] == null) {
taskMap[action.payload] = true;
yield put(workerChannel, action);
}
}
}
},
deleteSingleSuccess(state, action) {
delete state.data[action.payload];
},
deleteSingleError: errorReducer
});
import set from "lodash/set";
import { createAction } from "@reduxjs/toolkit";
import { call, fork, take } from "redux-saga/effects";
export const errorReducer = {
prepare(error, { path } = {}) {
const action = { error, payload: undefined };
if (path) set(action, "meta.path", path);
return action;
},
reducer(state, action) {
const { error, meta, type } = action;
if (error) error.type = type;
const { path } = meta || {};
if (path) {
if (Array.isArray(path)) {
path.forEach(path => {
set(state, `${path}.isLoading`, false);
set(state, `${path}.error`, error);
});
} else {
set(state, `${path}.isLoading`, false);
set(state, `${path}.error`, error);
}
} else {
state.isLoading = false;
state.error = error;
}
}
};
export const batchActions = createAction("batchActions", actions => ({
payload: actions,
meta: { batch: true }
}));
export const getFeatureName = actionOrType =>
(actionOrType.type ?? actionOrType).split("/")[0];
export const getInitialBaseState = () => ({
data: {}, // map of id: { resource: Object, isLoading: boolean, error: Error }
query: undefined, // latest query
pages: {}, // map of pageNumber: id[]
total: undefined,
isLoading: false,
error: undefined
});
export function takeLeadingPerId(actionType, ...args) {
return fork(function*() {
const idTaskMap = {};
while (true) {
const action = yield take(actionType);
const id = action.payload.id ?? action.payload;
const idTask = idTaskMap[id];
if (!idTask || !idTask.isRunning()) {
idTaskMap[id] = yield fork(...args, action);
idTaskMap[id].toPromise().then(() => delete idTaskMap[id]);
}
}
});
}
import { Query } from "some-api-service";
const DEFAULT_LIMIT = Query.DEFAULT_PARAMS.pagination.limit;
const redistributePages = (pages, limit, opts = {}) => {
if (limit <= 0) throw Error("limit must be greater than 0");
const {
oldLimit = limit,
unshiftIds = [],
concatIds = [],
filterIds = []
} = opts;
const distributeIds = (pages, ids, toPageNum) => {
if (!pages[toPageNum]) {
pages[toPageNum] = [];
}
const freeSpaces = Math.max(limit - pages[toPageNum].length, 0);
if (freeSpaces < ids.length) {
pages[toPageNum] = pages[toPageNum].concat(ids.slice(0, freeSpaces));
distributeIds(pages, ids.slice(freeSpaces), toPageNum + 1);
} else {
pages[toPageNum] = pages[toPageNum].concat(ids);
}
};
if (unshiftIds.length) {
pages[0] = unshiftIds.concat(pages[0] ?? []);
}
if (concatIds.length) {
const toPage = parseInt(Object.keys(pages).pop(), 10) || 1;
distributeIds(pages, concatIds, toPage);
}
let newPages;
if (filterIds.length || oldLimit !== limit) {
newPages = {};
let absOffset = 0;
const filterSet = new Set(filterIds);
Object.keys(pages).forEach(oldPageNum => {
const filteredPage =
filterIds.length === 0
? pages[oldPageNum]
: pages[oldPageNum].filter(id => !filterSet.has(id));
if (oldPageNum === "0") {
newPages[0] = filteredPage;
} else {
const absStartIndex = (parseInt(oldPageNum, 10) - 1) * oldLimit + 1;
const newPageNum = Math.ceil((absStartIndex + absOffset) / limit);
distributeIds(newPages, filteredPage, newPageNum);
absOffset += filteredPage.length - oldLimit;
}
});
}
return newPages ?? { ...pages };
};
export const redistributePagesReducer = (state, options) => {
const { pages, query } = state;
const oldLimit = query?.pagination.limit ?? DEFAULT_LIMIT;
const {
limit = oldLimit,
unshiftIds = [],
concatIds = [],
filterIds = []
} = options;
state.pages = redistributePages(pages, limit, {
oldLimit,
unshiftIds,
concatIds,
filterIds
});
if (state.query && limit !== state.query.pagination.limit) {
state.query = new Query(state.query);
state.query.pagination.limit = limit;
}
const totalDiff = unshiftIds.length + concatIds.length - filterIds.length;
if (totalDiff && Number.isInteger(state.total)) {
state.total += totalDiff;
}
};
export const getPaginationFeature = () => ({
redistributePages(state, action) {
redistributePagesReducer(state, action.payload);
}
});
import { combineReducers } from "redux";
import { batch } from "react-redux";
import { configureStore } from "@reduxjs/toolkit";
import { createLogger } from "redux-logger";
import createSagaMiddleware from "redux-saga";
import { all, fork } from "redux-saga/effects";
import authFeature from "./features/auth";
import settingsFeature from "./features/settings";
const isDev = process.env.NODE_ENV !== "production";
const batchActionsMiddleware = store => next => action => {
if (action?.meta?.batch) {
let batchedActions;
batch(() => {
batchedActions = action.payload.map(a => store.dispatch(a));
});
return batchedActions;
}
return next(action);
};
const promiseMiddleware = store => next => action => {
if (action?.meta?.promise) {
// The promises generated by dispatches can only resolve (no rejections)
// so that catching errors at the dispatch call-site can be ignored
return new Promise(resolve => {
action.meta.promise = { resolve };
next(action);
});
}
return next(action);
};
const rootReducer = {
[authFeature.name]: authFeature.reducer,
[settingsFeature.name]: settingsFeature.reducer
};
function* rootSaga() {
yield fork(authFeature.saga);
}
export const createStore = ({ name, reducer, saga, preloadedState }) => {
const rootReducer = name ? { [name]: reducer } : { ...reducer };
const sagaMiddleware = createSagaMiddleware();
const middleware = [
batchActionsMiddleware,
promiseMiddleware,
sagaMiddleware,
isDev && createLogger({ collapsed: true })
].filter(Boolean);
const store = configureStore({
reducer: rootReducer,
middleware,
preloadedState,
devTools: isDev
});
sagaMiddleware.run(saga);
store.injectFeature = feature => {
const { name, reducer, saga } = feature;
if (!rootReducer[name]) {
rootReducer[name] = reducer;
store.replaceReducer(combineReducers(rootReducer));
sagaMiddleware.run(saga);
}
};
return store;
};
const store = createStore({
reducer: rootReducer,
saga: rootSaga
});
export default store;
export const injectFeature = store.injectFeature;
import set from "lodash/set";
import { call, put, select, take, race } from "redux-saga/effects";
import {
errorReducer,
getFeatureName,
batchActions,
takeLeadingPerId
} from "./index";
import { ScimPatch } from "some-api-service";
export const getUpdateSingleFeature = ({ api, scimPatch = false }) => ({
updateSingle: {
prepare: (resource, options = {}) => ({
payload: resource,
meta: options
}),
reducer(state, action) {
const { id } = action.payload;
set(state.data, `${id}.isLoading`, true);
state.data[id].error = null;
},
saga: {
// Allows a stream of updates per uniquely id'd `action.payload`.
// If responses from previous updates are still pending and new
// actions come in, loading and error states will only be
// resolved when all update requests are settled
watcherEffect: takeLeadingPerId,
worker: function*(actionCreators, action) {
const {
updateSingleLocally,
updateSingleSuccess,
updateSingleError
} = actionCreators;
const { id } = action.payload;
const featureName = getFeatureName(action);
let { resource } = yield select(state => state[featureName].data[id]);
// keep track of relative updatedAt time to sort update stack
// priority - most recently updated is ultimately kept
const stack = [{ resource, updatedAt: performance.now() }];
function* addToUpdateStack(currentAction) {
let { payload: nextResource, meta: options } = currentAction;
if (options.scimPatch ?? scimPatch) {
const currentResource = yield select(
state => state[featureName].data[id].resource
);
nextResource = new ScimPatch(nextResource, currentResource);
}
yield put(updateSingleLocally(currentAction.payload));
const update = api(id, nextResource).then(response => ({
resource: response,
updatedAt: performance.now()
}));
stack.push(update);
const [responses, nextAction] = yield race([
call(() => Promise.allSettled(stack)),
take(
nextAction =>
nextAction.type === action.type &&
nextAction.payload.id === action.payload.id
)
]);
if (nextAction) {
yield call(addToUpdateStack, nextAction);
return;
}
// if the final response has an error, consider the update an error
const { reason: err } = responses[responses.length - 1];
const { value } = responses
.filter(response => response.value != null)
.sort((a, b) => a.value.updatedAt - b.value.updatedAt)
.pop();
let resultAction;
if (err) {
resultAction = updateSingleError(err, { path: `data.${id}` });
const actions = [updateSingleLocally(value.resource), resultAction];
yield put(batchActions(actions));
} else {
resultAction = updateSingleSuccess(value.resource);
yield put(resultAction);
}
action.meta.promise.resolve(resultAction);
}
yield call(addToUpdateStack, action);
}
}
},
updateSingleLocally(state, action) {
const { id } = action.payload;
state.data[id].resource = action.payload;
},
updateSingleSuccess(state, action) {
const { id } = action.payload;
set(state.data, `${id}.isLoading`, false);
state.data[id].resource = action.payload;
},
updateSingleError: errorReducer
});
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment