-
-
Save domenic/440db8eac99c5b41bf95 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
async fetchWithProgress(url, progress) { | |
let body = ''; | |
const response = await fetch(url); | |
const length = Number(response.headers.get('Content-Length')); | |
const decoder = new TextDecoder(); | |
await readWithProgress(); | |
return body; | |
async function readWithProgress() { | |
while (response.body.state === 'readable') { | |
body += decoder.decode(response.body.read(), { stream: true }); | |
} | |
if (length) { | |
progress(body.length / length); | |
} | |
if (response.body.state === 'waiting') { | |
await response.body.ready; | |
return readWithProgress(); | |
} | |
} | |
} | |
// usage | |
const progressEl = document.querySelector('.progress'); | |
progressEl.max = 1; | |
fetchWithProgress('/blah.json', function(val) { | |
progressEl.value = val; | |
}).then(JSON.parse).then(doStuff); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// Based on https://github.com/whatwg/streams/blob/master/Examples.md#getting-the-next-piece-of-available-data | |
const EOF = Symbol("ReadableStream getNext EOF"); | |
async function getNext(stream) { | |
await stream.ready; | |
if (stream.state === "closed") { | |
return EOF; | |
} | |
// If stream is "errored", this will throw, causing the promise to be rejected. | |
return stream.read(); | |
} | |
async fetchWithProgress(url, progress) { | |
let body = ''; | |
const response = await fetch(url); | |
const length = Number(response.headers.get('Content-Length')); | |
const decoder = new TextDecoder(); | |
while ((const chunk = await getNext(response.body)) !== EOF) { | |
body += decoder.decode(chunk, { stream: true }); | |
if (length) { | |
progress(body.length / length); | |
} | |
} | |
return body; | |
} | |
// usage | |
const progressEl = document.querySelector('.progress'); | |
progressEl.max = 1; | |
fetchWithProgress('/blah.json', function(val) { | |
progressEl.value = val; | |
}).then(JSON.parse).then(doStuff); |
Both are not very efficient compared to using ReadableByteStream's readInto
method (which would allow you to pre-allocated an ArrayBuffer up front and read data into it, instead of having the UA allocate new ones for each chunk). I'm unclear whether that'll be shipping in M43 though.
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
v2.js fires events more often than v1.js (and is a bit less efficient, since if multiple chunks are available at once it will still delay a microtask in between each of them).
Also I think all of these make the mistake of assuming the stream chunks are strings, whereas they're actually ArrayBuffers.Fixed with recent edits