Skip to content

Instantly share code, notes, and snippets.

@shagamemnon
Created June 21, 2019 14:24
Show Gist options
  • Save shagamemnon/4041f6155ecce15440234d28fa749c32 to your computer and use it in GitHub Desktop.
Save shagamemnon/4041f6155ecce15440234d28fa749c32 to your computer and use it in GitHub Desktop.
addEventListener('fetch', event => event.respondWith(handleRequest(event)))
async function cacheOnRequest (requestOrUrl, { headers, options }) {
let request = new Request(requestOrUrl)
/* You can pass a JSON object or a new Headers instance - both are accepted by
* the Service Workers spec
*/
if (headers && headers.req) {
request.headers = new Headers(request.headers)
for (const [k,v] of headers.req.entries()) {
request.headers.set(k, v)
}
}
/* redirect: manual and method: GET are the default options for the Request
* constructor Thus, you don't need to define them explicitly.
*
* The reason we are testing the datatype of requestOrUrl below is because if
* requestOrUrl is a Request object, rather than just a URL, we probably
* want to retain the Request objects' settings
*/
if (requestOrUrl instanceof URL && !options) {
options = {}
options.cf = {
cacheTtlByStatus: {
"400-599": 0
}
}
}
request = new Request(request, options)
console.group(`Headers for ${request.url}`)
console.info(JSON.stringify(Object.fromEntries(request.headers), null, 2))
let response = await fetch(request)
if (headers && headers.res) {
response = new Response(response.body, response)
response.headers = new Headers(request.headers)
for (const [k,v] of headers.res.entries()) {
response.headers.set(k, v)
}
}
console.info(JSON.stringify(Object.fromEntries(response.headers), null, 2))
console.groupEnd()
return response
}
async function cacheBigChunks (event, requestOrUrl, { ttl, headers, mightBeOversized = false }) {
let request = new Request(requestOrUrl)
if (headers) request.headers = headers
let response = await cache.match(request)
if (response instanceof Response) {
return response
}
if (ttl) {
request.headers.append('Cache-Control', `s-maxage=${ttl}`)
}
if (mightBeOversized) {
let checkResourceSize = await fetch(requestOrUrl, { method: 'HEAD' })
let contentLength = parseInt(checkResourceSize.headers.get('Content-Length'), 10)
if (contentLength > 499995555) {
return oversizedResource(request, ttl) // we've already set headers on this request object
}
}
let { readable, writable } = new TransformStream();
var readableStreams = readable.tee();
originResponse.body.pipeTo(writable);
eyeballResponse = new Response(readableStreams[0]);
cacheResponse = new Response(readableStreams[1]);
event.waitUntil(cache.put(request, cacheResponse))
return eyeballResponse;
}
async function oversizedResource (request, ttl = 86400) {
return fetch(request, {
cf: {
cacheEverything: true,
cacheTtlByStatus: {
"200-299": ttl,
"300-599": 0
},
cacheKey: request.url
}
})
}
async function neverCache (request) {
return fetch(request, {
cf: {
cacheTtl: -1
}
})
}
async function Router() {
/* These requests will be cached based on its URI. Cloudflare cache will store whatever the origin
* sends back to us - including Cache Tags, which are always valid when sent from the origin. The major
* difference here is that the headers you apply when caching on the request are applied AFTER the request is stored in
* cache but before client's see the payload. Thus, you cannot set Cache Tags in your Workers script in this case
*/
let cdndistro = cacheOnRequest("https://cdndistro.fast.io/api/current/server/cdn/", {
headers: {
'cdn_auth': true,
'cdnauth': "0#s8d)a09d",
'cdnprovider': 'cloudflare',
'cdnhost': 'fastdevtom.imfast.io',
'cdnpath': '/counter.txt',
'cdnfilename': 'counter.txt'
},
options: {
timeout: 5,
cf: {
// disable scrape sheild
scrapeShield: false,
// disable apps
apps: false,
// cache time
cacheTtlByStatus: {
"200-299": -1,
"301-599": 120,
}
}
}
})
/* This uses the Cache API. In this instance, you can set Cache-Tags directly in Workers and purge the resources later on
* Note: if you decide to rewrite this Worker, do not forget the 304 handlder in the cacheBigChunks function above
*
* When you set "mightBeOversized", I've programmed the Worker to make a HEAD requewt before requesting the file. If the
* Content-Length is greater than 500MB, then we simply use a function to request the resource via cache-on-request, as I described above.
*/
let 480mbGzip = cacheBigChunks(event, 'https://fastdevtom.imfast.io/480mbtest.zip', {
ttl: 86400, // optional. defaults to the cache control response directives
headers: {
'X-my-header': 'can be added',
'Cache-Tag': 'king-kong'
},
mightBeOversized: true
})
/* Never cache */
let sensitiveFile = neverCache(new Request("https://fastdevtom.imfast.io/counter.txt", { method: 'POST' }))
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment