Skip to content

Instantly share code, notes, and snippets.

@jonchurch
Last active May 16, 2023 00:30
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save jonchurch/0d416ee5a4de0034b372e3de8227d8f4 to your computer and use it in GitHub Desktop.
Save jonchurch/0d416ee5a4de0034b372e3de8227d8f4 to your computer and use it in GitHub Desktop.
[WIP] Axios Request Queuing for Spacetraders.io rate limit .... uh, limiting
import axios, { AxiosInstance, AxiosRequestConfig } from 'axios';
interface Job {
id: string;
createdAt: string;
status: 'queued' | 'processing' | 'completed' | 'failed';
priority?: number;
retryCount?: number;
data?: any;
execute: () => void;
}
type ConstructArgs = {
maxRequestsPerSecond: number,
burstRequests: number,
burstTime: number;
instance: AxiosInstance
}
class RequestQueue {
private instance: AxiosInstance;
private queue: Job[];
private maxRequestsPerSecond: number;
private burstRequests: number;
private burstTime: number;
private requestsMade: number;
private burstRequestsMade: number;
private sending: boolean;
private burstTimer: NodeJS.Timeout | null
private rateTimer: NodeJS.Timeout | null
private requestLogs: {time: string}[]
private start: number | null
constructor({maxRequestsPerSecond, burstRequests, burstTime, instance}: ConstructArgs) {
this.queue = [];
this.maxRequestsPerSecond = maxRequestsPerSecond;
this.burstRequests = burstRequests;
this.burstTime = burstTime;
this.requestsMade = 0;
this.burstRequestsMade = 0;
this.sending = false;
this.burstTimer = null
this.rateTimer = null
this.requestLogs = []
this.start = null
this.instance = instance ?? axios.create();
this.addRequestInterceptor();
}
private async processQueue() {
if (!this.sending) {
}
if (this.queue.length === 0) {
this.sending = false;
return;
}
const canProcessBurstRequest = this.burstRequestsMade < this.burstRequests;
const canProcessNonBurstRequest = this.requestsMade < this.maxRequestsPerSecond;
if (canProcessBurstRequest || canProcessNonBurstRequest) {
// console.log(`requestsMade: ${this.requestsMade} burstRequestsMade:${this.burstRequestsMade}`);
const { execute } = this.queue.shift() as Job;
try {
if (this.queue.length === 0) {
// await execute()
if (this.start) {
// dang, I don't actually have the promise for the request
// this won't be accurate if I can't tell when it's done
console.log(`Queue drained after ${Date.now() - this.start}`)
}
}
execute();
this.handleRequestSent(); // Call after executing the job
} catch (error) {
// we retry if we hit an error
// Idk what axios does when we throw in the interceptor
console.log(error);
}
}
// If there are more requests and we haven't exhausted the burst limit, process the next request immediately
if (canProcessBurstRequest && this.queue.length > 0) {
this.processQueue();
} else {
setTimeout(() => {
this.processQueue();
}, this.getRequestDelay());
}
}
private getRequestDelay() {
const delayBetweenRequests = 1000 / this.maxRequestsPerSecond;
const burstWindowReset = this.burstTime * 1000;
const rateLimitReset = 1000;
if (!this.rateTimer) {
this.rateTimer = setTimeout(() => {
this.requestsMade = 0;
// I think this should suffice for having a single reset for the overall called
this.rateTimer = null
}, rateLimitReset);
}
if (!this.burstTimer) {
// does it matter that these are on static resets?
// I can use the reset time on response header, but
// I still don't know how bursts are tracked, is the window rolling? I doubt it
this.burstTimer = setTimeout(() => {
this.burstRequestsMade = 0;
this.burstTimer = null
}, burstWindowReset);
}
return delayBetweenRequests;
}
private handleRequestSent() {
const timestamp = new Date()
this.requestLogs.push({ time: timestamp.toISOString() });
if (this.burstRequestsMade < this.burstRequests) {
this.burstRequestsMade++;
} else {
this.requestsMade++;
}
}
public exportRequestLogs() {
return this.requestLogs
}
// should also ad a response interceptor to update
// the queue information based on returned rate limit data
private addRequestInterceptor() {
this.instance.interceptors.request.use(async (config: AxiosRequestConfig) => {
return new Promise<AxiosRequestConfig>(async (resolve) => {
const executeJob = () => {
resolve(config);
this.handleRequestSent();
};
const newJob: Job = {
id: this.generateUniqueId(),
createdAt: new Date().toISOString(),
status: 'queued',
execute: executeJob,
};
this.queue.push(newJob);
if (!this.sending) {
console.log('starting')
this.start = Date.now()
this.sending = true;
this.processQueue();
}
});
});
}
private generateUniqueId(): string {
return (
Date.now().toString(36) + Math.random().toString(36).substr(2, 5)
).toUpperCase();
}
getInstance(): AxiosInstance {
return this.instance;
}
}
export default RequestQueue;
import fs from 'fs'
import axios from 'axios'
import axiosRetry from 'axios-retry'
import chalk from 'chalk'
import { Configuration, SystemsApi } from 'spacetraders-api'
import RequestQueue from './axiosRequestQueue'
import env from './.env.js'
const configuration = new Configuration({
// read token from environment variables
accessToken: env.TOKEN
})
// create our axios client
const instance = axios.create()
// setup retry
axiosRetry(instance)
// create rate limit queue
const rateLimitedAxiosQueue = new RequestQueue({
// using 4 here to test the implementation, and also the retry behavior
// interestingly enough, idk if I messed something up or not, but at 4 RPS limit, I only see like 4 429s
// when making these 120 requests
// something is off, either my code, the rate limit calc, or my understanding!
maxRequestsPerSecond: 4,
burstRequests: 10,
burstTime: 10,
instance
})
// pass axios instance with rate limit queue
const systems = new SystemsApi(configuration, undefined, rateLimitedAxiosQueue.getInstance())
// let currentSystem: System | null = null
process.on('exit', () => {
const requestLogs = rateLimitedAxiosQueue.exportRequestLogs()
fs.writeFileSync('run.json', JSON.stringify(requestLogs, null, 2));
})
async function run() {
for (let i = 0; i < 120; i++) {
// console.log(`Running: ${i}`)
systems.getSystem(env.HOME_SYSTEM)
.then(res => {
// const remaining = res.headers['x-ratelimit-remaining']
// const reset = res.headers['x-ratelimit-reset']
// console.log('')
// console.log(`${i}:${res.status}: Remaining:${remaining} reset:${Math.abs(new Date(reset).getTime() - new Date().getTime())}`)
})
.catch((err: Error) => console.log(chalk.red(err.message)))
}
}
run().catch(() => console.log("The loop threw"))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment