Skip to content

Instantly share code, notes, and snippets.

@heri16
Last active October 22, 2020 10:50
Show Gist options
  • Save heri16/d4e325a13c3e57f7e24ada1f7c459843 to your computer and use it in GitHub Desktop.
Save heri16/d4e325a13c3e57f7e24ada1f7c459843 to your computer and use it in GitHub Desktop.
Browser Download Multiple Files into ZIP with S3 GetObject (Pure Client-side)
<!DOCTYPE html>
<html lang="en" dir="ltr">
<head>
<meta charset="utf-8">
<title>Saving multiple files as zip</title>
<script src="https://sdk.amazonaws.com/js/aws-sdk-2.773.0.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/amazon-cognito-identity-js@4.4.0/dist/amazon-cognito-identity.min.js"></script>
<script src="https://gist.githubusercontent.com/heri16/d4e325a13c3e57f7e24ada1f7c459843/raw/events.js"></script>
<script src="https://cdn.jsdelivr.net/npm/web-streams-polyfill@2.0.2/dist/ponyfill.min.js"></script>
<script>
// Helper function for browsers with no support for ReadableStream.pipeTo (e.g. firefox)
function pipeTo(readable, writable) {
try {
if (readable.pipeTo) return readable.pipeTo(writable);
} catch (_) {}
var reader = readable.getReader();
var writer = writable.getWriter();
var pump = function() {
return reader.read()
.then(function(res) {
return res.done ? writer.close() : writer.write(res.value).then(pump);
});
};
return pump();
}
// Helper function for browsers with no support for ReadableStream.pipeThrough (e.g. firefox)
function pipeThrough(readable, transform) {
try {
if (readable.pipeThrough && typeof TransformStream !== 'undefined') return readable.pipeThrough(transform);
} catch (_) {}
var reader = readable.getReader();
var writer = transform.writable.getWriter();
var pump = function() {
return reader.read()
.then(function(res) {
return res.done ? writer.close() : writer.write(res.value).then(pump);
});
};
pump();
return transform.readable;
}
</script>
<script>
function patchRequest(request) {
request.removeListener('send', AWS.EventListeners.Core.SEND);
request.onAsync('send', function SEND(resp, done) {
resp.httpResponse._abortCallback = done;
resp.error = null;
resp.data = null;
function callback(httpResp) {
resp.httpResponse.stream = httpResp;
var stream = resp.request.httpRequest.stream;
var service = resp.request.service;
var api = service.api;
var operationName = resp.request.operation;
var operation = api.operations[operationName] || {};
httpResp.on('headers', function onHeaders(statusCode, headers, statusMessage) {
resp.request.emit(
'httpHeaders',
[statusCode, headers, resp, statusMessage]
);
if (!resp.httpResponse.streaming) {
if (AWS.HttpClient.streamsApiVersion >= 2) { // streams2 API check
// if we detect event streams, we're going to have to
// return the stream immediately
if (operation.hasEventOutput && service.successfulResponse(resp)) {
// skip reading the IncomingStream
resp.request.emit('httpDone');
done();
return;
}
if (AWS.HttpClient.streamsApiVersion === 3) {
var reader = resp.request.httpRequest.responseStream.getReader();
var pump = function() {
return reader.read().then(function(res) {
if (res.done) {
httpResp.emit('end');
} else {
resp.request.emit('httpData', [res.value, resp]);
pump();
}
});
};
pump();
} else {
httpResp.on('readable', function onReadable() {
var data = httpResp.read();
if (data !== null) {
resp.request.emit('httpData', [data, resp]);
}
});
}
} else { // legacy streams API
httpResp.on('data', function onData(data) {
resp.request.emit('httpData', [data, resp]);
});
}
}
});
httpResp.on('end', function onEnd() {
if (!stream || !stream.didCallback) {
if (AWS.HttpClient.streamsApiVersion >= 2 && (operation.hasEventOutput && service.successfulResponse(resp))) {
// don't concatenate response chunks when streaming event stream data when response is successful
return;
}
resp.request.emit('httpDone');
done();
}
});
}
function progress(httpResp) {
httpResp.on('sendProgress', function onSendProgress(value) {
resp.request.emit('httpUploadProgress', [value, resp]);
});
httpResp.on('receiveProgress', function onReceiveProgress(value) {
resp.request.emit('httpDownloadProgress', [value, resp]);
});
}
function error(err) {
if (err.code !== 'RequestAbortedError') {
var errCode = err.code === 'TimeoutError' ? err.code : 'NetworkingError';
err = AWS.util.error(err, {
code: errCode,
region: resp.request.httpRequest.region,
hostname: resp.request.httpRequest.endpoint.hostname,
retryable: true
});
}
resp.error = err;
resp.request.emit('httpError', [resp.error, resp], function() {
done();
});
}
function executeSend() {
var http = AWS.HttpClient.getInstance();
var httpOptions = resp.request.service.config.httpOptions || {};
try {
var stream = http.handleRequest(resp.request.httpRequest, httpOptions,
callback, error);
progress(stream);
} catch (err) {
error(err);
}
}
var timeDiff = (resp.request.service.getSkewCorrectedDate() - this.signedAt) / 1000;
if (timeDiff >= 60 * 10) { // if we signed 10min ago, re-sign
this.emit('sign', [this], function(err) {
if (err) done(err);
else executeSend();
});
} else {
executeSend();
}
});
return request;
}
</script>
<script>
var global = (typeof window !== 'undefined' ? window : self);
AWS.FetchClient = AWS.util.inherit({
handleRequest: function handleRequest(httpRequest, httpOptions, callback, errCallback) {
var self = this;
var endpoint = httpRequest.endpoint;
if (!httpOptions) httpOptions = {};
var emitter = new EventEmitter(); // https://www.npmjs.com/package/events
// Construct href
var href = endpoint.protocol + '//' + endpoint.hostname;
if (endpoint.port !== 80 && endpoint.port !== 443) {
href += ':' + endpoint.port;
}
href += httpRequest.path;
// Construct headers
var headers = new Headers();
AWS.util.each(httpRequest.headers, function (key, value) {
// See: https://fetch.spec.whatwg.org/#forbidden-header-name
if (key !== 'Content-Length' && key !== 'Expect' && key !== 'Host') {
headers.set(key, value);
}
});
// Send cookies?
var credentials = 'omit';
if (httpOptions.xhrWithCredentials) {
credentials = 'include';
}
// Construct request
// See: https://developer.mozilla.org/en-US/docs/Web/API/Request/Request
var req = {
method: httpRequest.method,
headers: headers,
credentials: credentials
};
if (httpRequest.body) req.body = httpRequest.body;
var request = new Request(href, req);
// Send request
fetch(request).then(function(response) {
if (!response.ok) throw Error(response.statusText);
return response;
}).then(function(response) {
emitter.statusCode = response.status;
emitter.headers = self.parseHeaders(response.headers);
emitter.abort = function() { response.body.cancel(); };
httpRequest.responseStream = self.readBody(emitter, response); // attach responseStream to httpRequest
httpRequest.stream = emitter; // attach emitter to httpRequest
emitter.didCallback = false; // Required for httpDone event
emitter.on('error', function() {
if (emitter.didCallback) return; emitter.didCallback = true;
errCallback.apply(emitter, arguments);
});
// See: https://github.com/aws/aws-sdk-js/blob/a006fee6a6bb67a98070bc3b6b8845cbb5dc3a5a/lib/event_listeners.js#L275
if (emitter.didCallback) return;
callback(emitter);
emitter.emit(
'headers',
emitter.statusCode,
emitter.headers,
response.statusText
);
}, function(err) {
if (emitter.didCallback) return; emitter.didCallback = true;
errCallback(AWS.util.error(new Error('Network Failure'), {
code: 'NetworkingError'
}));
}).catch(function(err) {
if (emitter.didCallback) return; emitter.didCallback = true;
errCallback(AWS.util.error(err, {
code: 'BadReplyError'
}));
});
// See: https://github.com/aws/aws-sdk-js/blob/a006fee6a6bb67a98070bc3b6b8845cbb5dc3a5a/lib/event_listeners.js#L348
return emitter;
},
readBody: function readBody(emitter, response) {
var body = response.body;
var totalBytes = parseInt(response.headers.get('Content-Length'), 10);
// For progress support of streaming content -
// pipe the data through a transform stream to emit 'sendProgress' events
var progressStream = this.progressStream(emitter, totalBytes);
return progressStream ? pipeThrough(body, progressStream) : body;
},
progressStream: function progressStream(emitter, totalBytes) {
// Use ponyfill: https://github.com/MattiasBuelens/web-streams-polyfill/blob/master/src/lib/transform-stream.ts
var TransformStream = global.TransformStream || (global.WebStreamsPolyfill && global.WebStreamsPolyfill.TransformStream);
if (typeof TransformStream === 'undefined') {
return;
}
var loadedBytes = 0;
var transform = function(chunk, controller, callback) {
if (chunk) {
loadedBytes += chunk.length;
emitter.emit('receiveProgress', {
loaded: loadedBytes,
total: totalBytes
});
}
if (typeof callback === 'function') {
callback(null, chunk);
} else {
controller.enqueue(chunk);
}
};
return new TransformStream({ transform: transform });
},
parseHeaders: function parseHeaders(rawHeaders) {
var headers = {};
rawHeaders.forEach(function(val,key){
headers[key] = val;
});
return headers;
},
emitter: null
});
AWS.HttpClient.prototype = AWS.FetchClient.prototype;
AWS.HttpClient.streamsApiVersion = 3;
// Monkey-patching
AWS.Request.prototype.createReadStream = function createReadStream() {
var streams = AWS.util.stream;
var req = this;
var stream = null;
if (AWS.HttpClient.streamsApiVersion === 3) {
// Use ponyfill: https://github.com/MattiasBuelens/web-streams-polyfill/blob/master/src/lib/transform-stream.ts
var TransformStream = global.TransformStream || (global.WebStreamsPolyfill && global.WebStreamsPolyfill.TransformStream);
if (typeof TransformStream !== 'undefined') {
// If no transform() method is supplied, the identity transform is used, which enqueues chunks unchanged from the writable side to the readable side.
// See: https://streams.spec.whatwg.org/#dom-transformer-transform
stream = new TransformStream();
}
var cb = function() { req.send(); };
if (global.setImmediate) { global.setImmediate(cb); } else { Promise.resolve().then(cb); }
} else if (AWS.HttpClient.streamsApiVersion === 2) {
stream = new streams.PassThrough();
process.nextTick(function() { req.send(); });
} else {
stream = new streams.Stream();
stream.readable = true;
stream.sent = false;
stream.on('newListener', function(event) {
if (!stream.sent && event === 'data') {
stream.sent = true;
process.nextTick(function() { req.send(); });
}
});
}
this.on('error', function(err) {
if (AWS.HttpClient.streamsApiVersion === 3) {
if (stream && stream.writable) stream.writable.abort();
} else {
stream.emit('error', err);
}
});
this.on('httpHeaders', function streamHeaders(statusCode, headers, resp) {
if (statusCode < 300) {
req.removeListener('httpData', AWS.EventListeners.Core.HTTP_DATA);
req.removeListener('httpError', AWS.EventListeners.Core.HTTP_ERROR);
req.on('httpError', function streamHttpError(error) {
resp.error = error;
resp.error.retryable = false;
});
var shouldCheckContentLength = false;
var expectedLen;
if (req.httpRequest.method !== 'HEAD') {
expectedLen = parseInt(headers['content-length'], 10);
}
if (expectedLen !== undefined && !isNaN(expectedLen) && expectedLen >= 0) {
shouldCheckContentLength = true;
var receivedLen = 0;
}
var checkContentLengthAndEmit = function checkContentLengthAndEmit(controller) {
// Some CORS config is missing <ExposeHeader>Content-Encoding</ExposeHeader>
if (shouldCheckContentLength && receivedLen < expectedLen) {
var err = new Error('Stream content length mismatch. Received ' +
receivedLen + ' of ' + expectedLen + ' bytes.');
if (controller) {
controller.error(err);
} else {
stream.emit('error', AWS.util.error(
err,
{ code: 'StreamContentLengthMismatch' }
));
}
} else if (AWS.HttpClient.streamsApiVersion === 3) {
// Do nothing
} else if (AWS.HttpClient.streamsApiVersion === 2) {
stream.end();
} else {
stream.emit('end');
}
};
var httpStream = resp.httpResponse.createUnbufferedStream();
if (AWS.HttpClient.streamsApiVersion === 3) {
var readableStream = resp.request.httpRequest.responseStream;
if (shouldCheckContentLength && typeof TransformStream !== 'undefined') {
var transform = function(chunk, controller) {
if (chunk && chunk.length) {
receivedLen += chunk.length;
}
controller.enqueue(chunk);
};
var flush = function(controller) {
checkContentLengthAndEmit(controller);
}
var lengthAccumulator = new TransformStream({ transform: transform, flush: flush });
readableStream = pipeThrough(readableStream, lengthAccumulator);
}
if (stream && stream.writable) {
const promise = pipeTo(readableStream, stream.writable);
promise.catch(function(err) {
shouldCheckContentLength = false;
});
}
} else if (AWS.HttpClient.streamsApiVersion === 2) {
if (shouldCheckContentLength) {
var lengthAccumulator = new streams.PassThrough();
lengthAccumulator._write = function(chunk) {
if (chunk && chunk.length) {
receivedLen += chunk.length;
}
return streams.PassThrough.prototype._write.apply(this, arguments);
};
lengthAccumulator.on('end', checkContentLengthAndEmit);
stream.on('error', function(err) {
shouldCheckContentLength = false;
httpStream.unpipe(lengthAccumulator);
lengthAccumulator.emit('end');
lengthAccumulator.end();
});
httpStream.pipe(lengthAccumulator).pipe(stream, { end: false });
} else {
httpStream.pipe(stream);
}
} else {
if (shouldCheckContentLength) {
httpStream.on('data', function(arg) {
if (arg && arg.length) {
receivedLen += arg.length;
}
});
}
httpStream.on('data', function(arg) {
stream.emit('data', arg);
});
httpStream.on('end', checkContentLengthAndEmit);
}
httpStream.on('error', function(err) {
shouldCheckContentLength = false;
if (AWS.HttpClient.streamsApiVersion === 3) {
if (stream && stream.writable) stream.writable.abort();
} else {
stream.emit('error', err);
}
});
}
});
return AWS.HttpClient.streamsApiVersion === 3 ? (stream && stream.readable) : stream;
};
</script>
</head>
<body>
<button id="auth-user">Login / Sign-in</button>
<button id="fetch-list">Fetch File List</button>
<button id="stream-many">Stream Many & Zip</button>
<div id="results"></div>
<!--
includes blob.stream() polyfill
while Also making File constructor work in some browser that don't support it
-->
<script src="https://cdn.jsdelivr.net/gh/eligrey/Blob.js/Blob.js"></script>
<script src="https://cdn.jsdelivr.net/npm/web-streams-polyfill@2.0.2/dist/ponyfill.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/streamsaver@2.0.5/StreamSaver.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/streamsaver@2.0.5/examples/zip-stream.js"></script>
<script>
let userId;
let prefix;
const roleArn = 'YOUR_ROLE_ARN';
const bucketName = 'YOUR_BUCKET_NAME'; // Bucket name must not contain dot/period
const bucketEndpoint = 'https://xxxx.cloudfront.net';
AWS.config.region = 'YOUR_BUCKET_REGION';
// AWS.config.accessKeyId = 'AKIASPBJHQTBHVWEUU7O';
// AWS.config.secretAccessKey = '';
// See: https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#constructor-property
const s3 = new AWS.S3({
params: { Bucket: bucketName },
endpoint: bucketEndpoint,
s3BucketEndpoint: true,
computeChecksums: true,
correctClockSkew: true,
});
const $ = (selector) => document.querySelector(selector);
const poolData = {
UserPoolId: 'YOUR_COGNITO_USER_POOL_ID', // Your user pool id here
ClientId: 'YOUR_COGNITO_CLIENT_ID', // Your client id here
};
const userPool = new AmazonCognitoIdentity.CognitoUserPool(poolData);
$('#auth-user').addEventListener('click', () => {
// Auth.currentSession().then((session) => {
// // Access Bucket with Web Identity (this provides simpler Cloudtrail logs)
// // See: https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/WebIdentityCredentials.html
// // See: https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_providers_oidc_manual.html
// s3.config.credentials = new AWS.WebIdentityCredentials({
// RoleArn: roleArn,
// WebIdentityToken: session.getIdToken().getJwtToken()
// });
// userId = session.getIdToken().decodePayload().sub;
// prefix = 'user-' + userId;
// }, (err) => {
// alert(err.message || JSON.stringify(err));
// });
const authenticationDetails = new AmazonCognitoIdentity.AuthenticationDetails({
Username: 'YOUR_USERNAME',
Password: 'YOUR_PASSWORD',
});
const cognitoUser = new AmazonCognitoIdentity.CognitoUser({
Username: authenticationDetails.getUsername(),
Pool: userPool,
});
cognitoUser.authenticateUser(authenticationDetails, {
onSuccess: (session) => {
// Access Bucket with Web Identity (this provides simpler Cloudtrail logs)
// See: https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/WebIdentityCredentials.html
// See: https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_providers_oidc_manual.html
s3.config.credentials = new AWS.WebIdentityCredentials({
RoleArn: roleArn,
WebIdentityToken: session.getIdToken().getJwtToken()
});
userId = session.getIdToken().decodePayload().sub;
prefix = 'user-' + userId;
},
onFailure: (err) => {
alert(err.message || JSON.stringify(err));
}
});
// FB.login((response) => {
// s3.config.credentials = new AWS.WebIdentityCredentials({
// ProviderId: 'graph.facebook.com',
// RoleArn: roleArn,
// WebIdentityToken: response.authResponse.accessToken
// });
// userId = response.authResponse.userID;
// prefix = 'facebook-' + userId;
// });
});
const streamOneHandler = (event) => {
const key = event.target.dataset.key;
console.log(key);
const regexpExt = /\.(?:gz|br)$/;
const fileName = key.split("/").pop().replace(regexpExt, "");
const head = patchRequest(s3.headObject({ Key: key }));
head.on("httpDone", () => console.log("httpDone"));
head.on("success", () => console.log("success"));
head.on("error", console.error);
head.on("complete", (resp) => console.log("complete"));
head.promise().then(({ ContentLength, ContentType }) => {
const yes = confirm(
`Download ${fileName}? (${ContentType}: ${formatBytes(ContentLength)})`
);
if (!yes) return;
const request = patchRequest(s3.getObject({ Key: key }));
request.on("httpDownloadProgress", ({ loaded, total }) => {
message.textContent = `${fileName} - ${formatBytes(loaded)} of ${formatBytes(total)}`;
});
const responsePromise = new Promise((resolve, reject) => {
// See: https://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/using-a-response-event-handler.html
// See: https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/Request.html#httpHeaders-event
request
.on("httpHeaders", (statusCode, headers, response, statusMessage) => {
if (statusCode < 200 || statusCode > 299) {
reject(new Error(`${statusCode}: $statusMessage`));
} else {
resolve({ statusCode, headers, response, statusMessage });
}
})
.on("httpError", (error, response) => {
console.error(error);
reject(error);
});
});
const stream = request.createReadStream();
// Ensure file is accessible
responsePromise
.then(({ statusCode, statusMessage, headers, response }) => {
console.log(statusCode);
console.log(statusMessage);
console.log(headers);
// See: https://docs.aws.amazon.com/AmazonS3/latest/API/API_GetObject.html#API_GetObject_ResponseSyntax
const contentType = headers["content-type"]; // image/jpeg
const size = headers["content-length"]
? parseInt(headers["content-length"], 10)
: undefined;
console.log(contentType);
console.log("Size: " + size);
const fileStream = streamSaver.createWriteStream(fileName, { size });
return pipeTo(stream, fileStream);
})
.then(() => console.log("done writing"));
}, console.error);
};
const file1 = {
name: "example/file1.txt",
stream: () => new Blob(["support blobs too"]).stream()
};
// Note: windows gets confused when file & folders starts with /
const fileMap = new Map([
["file1", file1],
["file2", new File(["file1 content"], "example/file2.txt")]
]);
$("#fetch-list").addEventListener("click", () => {
fileMap.clear();
results.innerHTML = "";
// https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#listObjects-property
const request = patchRequest(s3.listObjects({ Prefix: prefix }));
request.promise().then(
(data) => {
let div = document.createElement("div");
const regexpExt = /\.(?:gz|br)$/;
data.Contents.forEach(function (obj) {
const found = obj.Key.indexOf("/");
const isFile = found !== -1 && found !== obj.Key.length - 1;
if (isFile) {
fileMap.set(obj.Key.replace(regexpExt, ""), obj.Key);
}
const a = document.createElement("a");
a.href = "#";
a.className = "stream-one";
a.dataset.key = obj.Key;
a.textContent = obj.Key;
a.addEventListener("click", streamOneHandler);
div.append(a);
div.append(document.createElement("br"));
});
results.append(div);
},
(err) => {
results.innerHTML = "ERROR: " + err;
}
);
});
$("#stream-many").addEventListener("click", () => {
const fileEntries = fileMap.entries();
const fileStream = streamSaver.createWriteStream("download.zip");
// In a ideall world i would just have used a TransformStream
// where you would get `{ readable writable } = new TransformStream()`
// `readable` would be piped to streamsaver, and the writer would accept
// file-like object, but that made it dependent on TransformStream and WritableStream
// So i built ZIP-Stream simular to a ReadbleStream but you enqueue
// file-like objects meaning it should have at at the very least { name, stream() }
//
// it supports pull() too that gets called when it ask for more files.
//
// NOTE: My zip library can't generate zip's over 4gb and has no compression
// it was built solo for the purpus of saving multiple files in browser
const readableZipStream = new ZIP({
start(ctrl) {
// Insert uen as empty folder
ctrl.enqueue({ name: "info/SD21342412", directory: true });
// Insert company name as empty folder
ctrl.enqueue({ name: "info/Jardines Pte Ltd", directory: true });
},
async pull(ctrl) {
// Gets executed everytime zip.js asks for more data
const it = fileEntries.next();
if (!it.done) {
const [name, value] = it.value;
// See: https://javascript.info/instanceof
if (value.name && typeof value.stream === "function") {
// enqueue file-like object
ctrl.enqueue(value);
return;
}
// Create a request without sending it
// See: https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#getObject-property
const request = patchRequest(s3.getObject({ Key: value }));
// See: https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/Request.html#sign-event
//request.onAsync('sign', signCloudfront)
// See: https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/Request.html#httpDownloadProgress-event
request.on("httpDownloadProgress", function ({ loaded, total }) {
// Display progress
message.textContent = `${name} - ${formatBytes(loaded)} of ${formatBytes(total)}`;
});
// See: https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/Request.html#error-event
request.on("error", (error, response) => {
// Display request error
});
const responsePromise = new Promise((resolve, reject) => {
// See: https://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/using-a-response-event-handler.html
// See: https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/Request.html#httpHeaders-event
request
.on(
"httpHeaders",
(statusCode, headers, response, statusMessage) => {
if (statusCode < 200 || statusCode > 299) {
reject(new Error(`${statusCode}: $statusMessage`));
} else {
resolve({ statusCode, headers, response, statusMessage });
}
}
)
.on("httpError", (error, response) => {
reject(error);
});
});
// Sends the request and converts the request object into a readable stream that can be read from or piped into a writable stream.
// See: https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/Request.html#createReadStream-property
const stream = request.createReadStream();
// Ensure file is accessible
const {
statusCode,
statusMessage,
headers,
response
} = await responsePromise;
// See: https://docs.aws.amazon.com/AmazonS3/latest/API/API_GetObject.html#API_GetObject_ResponseSyntax
//const contentType = headers["content-type"]; // image/jpeg
// enqueue file-like object
ctrl.enqueue({ name, stream: () => stream });
} else {
// if (done adding all files)
ctrl.close();
}
}
});
const promise = pipeTo(readableZipStream, fileStream);
promise.then(() => {
console.log("done writing");
message.textContent = "Download complete";
}).catch(console.error);
});
function signCloudfront(req, done) {
// This listener is called after the AWS Signature Version 4 headers have been added to req.httpRequest.headers,
// but before the request is sent.
const service = req.service
// REST or GRAPHQL Query to remote server which will call AWS.CloudFront.Signer.getSignedUrl()
// Remote server stores the Cloudfront privateKey
// See: https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/CloudFront/Signer.html#getSignedUrl-property
// See: https://javascript.info/fetch
const requestData = {
url: `${keyPrefix}/*`,
}
const request = new Request('https://api.lawkin.com/cloudfront/sign', {
method: 'POST',
cache: 'no-store',
headers: {
'Content-Type': 'application/json;charset=utf-8',
'Authorization': 'Bearer xxxxxxxxx',
},
body: JSON.stringify(requestData)
})
fetch(request).then((response) => {
if (!response.ok) { throw new Error(response.statusText) }
return response.json()
}).then((data) => {
// Append search to href
req.httpRequest.path += new Url(data.signedUrl).search
}).then(() => {
done()
}, (err) => {
req.response.error = err
done()
})
}
function formatBytes(bytes, precision) {
if (isNaN(parseFloat(bytes)) || !isFinite(bytes)) return '-';
if (typeof precision === 'undefined') precision = 1;
var units = ['bytes', 'kiB', 'MiB', 'GiB', 'TiB', 'PiB'],
number = Math.floor(Math.log(bytes) / Math.log(1024));
return (bytes / Math.pow(1024, Math.floor(number))).toFixed(precision) + ' ' + units[number];
}
</script>
</body>
</html>
<script>var module = {};</script>
<script src="https://cdn.jsdelivr.net/npm/events@3.2.0/events.js"></script>
<script>var EventEmitter = module.exports; delete module;</script>
<?xml version="1.0" encoding="UTF-8"?>
<CORSConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<CORSRule>
<AllowedOrigin>https://*</AllowedOrigin>
<AllowedMethod>HEAD</AllowedMethod>
<AllowedMethod>GET</AllowedMethod>
<AllowedMethod>PUT</AllowedMethod>
<AllowedMethod>POST</AllowedMethod>
<AllowedMethod>DELETE</AllowedMethod>
<AllowedHeader>*</AllowedHeader>
<MaxAgeSeconds>3000</MaxAgeSeconds>
<!-- https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Expose-Headers -->
<!-- https://docs.aws.amazon.com/AmazonS3/latest/API/RESTCommonResponseHeaders.html -->
<!-- https://github.com/w3c/ServiceWorker/issues/339#issuecomment-372304884 -->
<ExposeHeader>Content-Range</ExposeHeader>
<ExposeHeader>Content-Disposition</ExposeHeader>
<ExposeHeader>Content-Location</ExposeHeader>
<ExposeHeader>Content-Encoding</ExposeHeader>
<ExposeHeader>Content-Length</ExposeHeader>
<ExposeHeader>Date</ExposeHeader>
<ExposeHeader>ETag</ExposeHeader>
<ExposeHeader>Server</ExposeHeader>
<ExposeHeader>x-amz-delete-marker</ExposeHeader>
<ExposeHeader>x-amz-id-2</ExposeHeader>
<ExposeHeader>x-amz-request-id</ExposeHeader>
<ExposeHeader>x-amz-version-id</ExposeHeader>
<ExposeHeader>x-amz-server-side-encryption</ExposeHeader>
</CORSRule>
</CORSConfiguration>
@heri16
Copy link
Author

heri16 commented Oct 21, 2020

Support for Safari fixed. Problem due to Safari's unique implementation of ReadableStream.pipeThrough()

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment