Skip to content

Instantly share code, notes, and snippets.

@DashBarkHuss
Last active August 11, 2022 18:28
Show Gist options
  • Save DashBarkHuss/9c74cdf35c60daef6f9e25de5af9e4fc to your computer and use it in GitHub Desktop.
Save DashBarkHuss/9c74cdf35c60daef6f9e25de5af9e4fc to your computer and use it in GitHub Desktop.
background worker node.js with react frontend using polling
// run your react frontend on port 3000
import React, { useState, useEffect, useRef } from "react";
function useInterval(callback, delay) {
const savedCallback = useRef();
// Remember the latest callback.
useEffect(() => {
savedCallback.current = callback;
}, [callback]);
// Set up the interval.
useEffect(() => {
function tick() {
savedCallback.current();
}
if (delay !== null) {
let id = setInterval(tick, delay);
return () => clearInterval(id);
}
}, [delay]);
}
export default function App() {
// Store for all of the jobs in progress
const [jobs, setJobs] = useState([]);
const [poll, setPoll] = useState(null);
useInterval(updateJobs, poll);
// Kick off a new job by POST-ing to the server
async function addJob() {
let res = await fetch("http://localhost:4000/job/", { method: "POST" });
let job = await res.json();
const jobsCopy = jobs;
jobsCopy.push({ id: job.id, state: "queued" });
setJobs(jobsCopy);
if (!poll) setPoll(200);
render();
}
// Fetch updates for each job
async function updateJobs() {
jobs.forEach(async (job) => {
let res = await fetch(`http://localhost:4000/job/${job.id}`);
let result = await res.json();
const jobExistsInJobsList = !!jobs.filter((j) => j.id === job.id);
if (jobExistsInJobsList) {
const jobsCopy = jobs;
const i = jobsCopy.findIndex((j) => j.id === result.id);
jobsCopy[i] = result;
setJobs(jobsCopy);
const noActiveOrQueuedJobs =
poll &&
!jobsCopy.find((j) => j.state === "active" || j.state === "queued");
if (noActiveOrQueuedJobs) {
setPoll(null);
}
}
render();
});
}
// Delete all stored jobs
function clear() {
setJobs([]);
}
// Update the UI colors
function render() {
const jobsCopy = jobs.map((job) => renderJob(job));
setJobs(jobsCopy);
}
// Renders the HTML for each job object
function renderJob(job) {
let progress = job.progress || 0;
let color = "blue";
if (job.state === "completed") {
color = "purple";
progress = 100;
} else if (job.state === "failed") {
color = "red";
progress = 100;
}
return { id: job.id, state: job.state, progress, color };
}
return (
<div>
<button onClick={addJob}>add job</button>
<button onClick={clear}>clear</button>
{jobs.map((job, i) => (
<div key={i} style={{ color: job.color }}>
<div>id: {job.id}</div>
<div>progress: {job.progress}</div>
</div>
))}
</div>
);
}
// install redis and start redis- tutorial: https://www.youtube.com/watch?v=lgWjGkdrExA
const throng = require('throng');
const Queue = require('bull');
// Connect to a local redis instance locally, and the Heroku-provided URL in production
const REDIS_URL = process.env.REDIS_URL || 'redis://127.0.0.1:6379';
// Spin up multiple processes to handle jobs to take advantage of more CPU cores
// See: https://devcenter.heroku.com/articles/node-concurrency for more info
const workers = process.env.WEB_CONCURRENCY || 2;
// The maximum number of jobs each worker should process at once. This will need
// to be tuned for your application. If each job is mostly waiting on network
// responses it can be much higher. If each job is CPU-intensive, it might need
// to be much lower.
const maxJobsPerWorker = 1;
function sleep(ms) {
return new Promise((resolve) => setTimeout(resolve, ms));
}
function start() {
// Connect to the named work queue
const workQueue = new Queue('work', REDIS_URL);
workQueue.process(maxJobsPerWorker, async (job) => {
// This is an example job that just slowly reports on progress
// while doing no work. Replace this with your own job logic.
let progress = 0;
// throw an error 20% of the time
if (Math.random() < 0.2) {
console.log('a job failed');
throw new Error('This job failed!');
}
while (progress < 100) {
await sleep(50);
progress += 1;
job.progress(progress);
}
// A job can return values that will be stored in Redis as JSON
// This return value is unused in this demo application.
return { value: progress };
});
}
// Initialize the clustered worker process
// See: https://devcenter.heroku.com/articles/node-concurrency for more info
throng({ workers, start });
// run this on port 4000
const express = require('express');
const Queue = require('bull');
const cors = require('cors');
// Serve on PORT on Heroku and on localhost:5000 locally
const PORT = process.env.PORT || '5000';
// Connect to a local redis intance locally, and the Heroku-provided URL in production
const REDIS_URL = process.env.REDIS_URL || 'redis://127.0.0.1:6379';
const app = express();
app.use(
cors({
origin: 'http://localhost:3000',
})
);
// Create / Connect to a named work queue
const workQueue = new Queue('work', REDIS_URL);
// Kick off a new job by adding it to the work queue
app.post('/job', async (req, res) => {
// This would be where you could pass arguments to the job
// Ex: workQueue.add({ url: 'https://www.heroku.com' })
// Docs: https://github.com/OptimalBits/bull/blob/develop/REFERENCE.md#queueadd
const job = await workQueue.add();
res.json({ id: job.id });
});
// Allows the client to query the state of a background job
app.get('/job/:id', async (req, res) => {
const { id } = req.params;
const job = await workQueue.getJob(id);
if (job === null) {
res.status(404).end();
} else {
const state = await job.getState();
// eslint-disable-next-line no-underscore-dangle
const progress = job._progress;
const reason = job.failedReason;
res.json({ id, state, progress, reason });
}
});
// You can listen to global events to get notified when jobs are processed
workQueue.on('global:completed', (jobId, result) => {
console.log(`Job completed with result ${result}`);
});
app.listen(PORT, () => console.log('Server started!'));
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment