Skip to content

Instantly share code, notes, and snippets.

@Zyclotrop-j
Last active April 24, 2023 03:00
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save Zyclotrop-j/841bfc0ba6f55e0f997e37b7a6db4714 to your computer and use it in GitHub Desktop.
Save Zyclotrop-j/841bfc0ba6f55e0f997e37b7a6db4714 to your computer and use it in GitHub Desktop.
/* A DECORATOR TO NOTIFY ABOUT UNUSUALLY LONG ASYNC CALLS */
/* Let your user and your error-reporting know when a network call takes unusually long */
// HOW TO USE:
/*
const makeCall = observe(.... my function to make an async call, e.g. network....., opts, takes longer, takes much longer, takes way too long)
// example: const observedFetch = observe(fetch, {id: 'my-api-calls'}, () => console.log('This is taking a little longer than usual'), () => console.log('This is taking a longer than usual'), () => console.log('This is taking a much longer than usual'));
// Then proceed as usual
makeCall(....); // or observedFetch('https://example.com').then(i => i.text()).then(i => console.log(i));
// for the "takes longer" callbacks, you can react how you want, e.g., you could hook a notification system in!
// unusually long calls are calculated based on confidence intervals
// the first opts.min=5 calls are used to gather statistics before starting
// calls after add to the sample-pool until opts.max=200 size is reached
// samples survive page-reloads via localstorrage, but you can configure this with your custom load, store and modify methods,
// e.g., you could use sessionStorrage or even load/save statistics to your server!
// the callbacks get called with the time taken as first param and then start,end,level and calculated confidence interval
*/
export function getNormals(x) { // confidence intervals - 111K ops/s ± 5.4% (97.8 % slower)
const sort = x.sort((a, b) => a-b); // mutates (!)
const pe = (sort[Math.floor(x.length/2)] + sort[Math.ceil(x.length/2)])/2; // mean-value
const tmp = Math.sqrt(x.map(i => (i-pe) * (i-pe) / (x.length - 1)).reduce((p, i) => p+i, 0))/Math.sqrt(x.length);
return {
info: [pe -1.96*tmp, pe + 1.96*tmp], // 95
warn: [pe - 2.57*tmp, pe + 2.57*tmp], // 99
error: [pe - 3.29*tmp, pe + 3.29*tmp], // 99.9
};
}
export function fastGetNormals(x) { // confidence intervals - 5.1M ops/s ± 1.87%
const pe = x.reduce(( p, c ) => p + c, 0 ) / x.length;
const qq = x.reduce(( p, i ) => p = ( (i-pe) * (i-pe) / (x.length - 1) ), 0);
const tmp = Math.sqrt(qq)/Math.sqrt(x.length);
return {
info: [pe -1.96*tmp, pe + 1.96*tmp], // 95
warn: [pe - 2.57*tmp, pe + 2.57*tmp], // 99
error: [pe - 3.29*tmp, pe + 3.29*tmp], // 99.9
};
}
const noop = () => null;
const decorate = (onFn, now, level, interval) => () => onFn({duration: Date.now() - now, start: now, end: Date.now(), level, interval});
const defaultLoad = id => {const raw = localStorrage.getItem(id); try {return JSON.parse(raw)} catch(e) {return raw}};
const defaultStore = (id, data) => localStorrage.setItem(id, JSON.stringify(data));
const defaultModify = (id, cb) => addEventListener("storage", (evt) => {
if(evt.key === id) {
cb(evt.newValue);
}
});
function uuidv4() {
return ([1e7]+-1e3+-4e3+-8e3+-1e11).replace(/[018]/g, c =>
(c ^ crypto.getRandomValues(new Uint8Array(1))[0] & 15 >> c / 4).toString(16)
);
}
function createObserver(fn, ops, onInfo = noop, onWarn = noop, onError = noop) {
const internalEvents = new EventTarget();
const emit = (evt) => internalEvents.dispatchEvent(new CustomEvent(evt.level, evt));
const d = onFn => evt => {emit(evt);return onFn(evt);}
const newFn = observe(fn, ops, d(onInfo), d(onWarn), d(onError));
const handler1 = {
apply: function(target, thisArg, argumentsList) {
return newFn.apply(thisArg, argumentsList);
},
get: function(target, prop, receiver) {
const def = Reflect.get(...arguments);
if (internalEvents[prop]) {
return new Proxy(internalEvents[prop], {
// addEventListener() removeEventListener() dispatchEvent()
apply: function(target, thisArg, argumentsList) {
if(def) def.apply(thisArg, argumentsList);
return target.apply(thisArg, argumentsList);
}
});
}
return def;
}
};
return new Proxy(fn, handler1);
}
export default function observe(fn, {getNorm, id = uuidv4(), store = defaultStore, load = defaultLoad, modified = defaultModify, min = 5, max = 200} = {}, onInfo = noop, onWarn = noop, onError = noop) {
let samples = load(id) || [];
let info, warn, error;
const inited = (async () => {
samples = await samples; // load can be async (and so can store)
if(samples.length >= min) { // init if sufficient data was loaded in
const rates = getNorm(samples);
info = rates.info;
warn = rates.warn;
error = rates.error;
}
modified(data => (samples = data));
})();
return async (...args) => {
await inited;
const now = Date.now();
const timers = onError ? [
setTimeout(decorate(onError, now, 'error', error), error[1]), // we don't care about too short requests
setTimeout(decorate(onWarn, now, 'warn', warn), warn[1]),
setTimeout(decorate(onInfo, now, 'info', info), info[1]),
] : [];
try {
const r = await fn(...args);
} catch(e) {
throw e;
} finally {
times.forEach(t => clearTimeout(t));
}
// in case of error, we don't calc, we only clear, thus throw above
if(samples.length <= max) {
samples.push(diff);
store(samples);
}
if(samples.length >= min) {
const getNorms = getNorm || (samples.length > 20000 ? fastGetNormals : getNormals); // for big sample sizes, we want to calculate this without a delay, hence choose the faster algorithm
const rates = getNorms(samples);
info = rates.info;
warn = rates.warn;
error = rates.error;
}
return r;
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment