Skip to content

Instantly share code, notes, and snippets.

@heri16
Last active November 18, 2020 05:28
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save heri16/348064570f59d126c268c0e26e9866b0 to your computer and use it in GitHub Desktop.
Save heri16/348064570f59d126c268c0e26e9866b0 to your computer and use it in GitHub Desktop.
S3 Fetch Folder to Zip file with aws-sdk-js
<?xml version="1.0" encoding="UTF-8"?>
<CORSConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<CORSRule>
<AllowedOrigin>https://*</AllowedOrigin>
<AllowedMethod>HEAD</AllowedMethod>
<AllowedMethod>GET</AllowedMethod>
<AllowedMethod>PUT</AllowedMethod>
<AllowedMethod>POST</AllowedMethod>
<AllowedMethod>DELETE</AllowedMethod>
<AllowedHeader>*</AllowedHeader>
<MaxAgeSeconds>3000</MaxAgeSeconds>
<!-- https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Expose-Headers -->
<!-- https://docs.aws.amazon.com/AmazonS3/latest/API/RESTCommonResponseHeaders.html -->
<!-- https://github.com/w3c/ServiceWorker/issues/339#issuecomment-372304884 -->
<ExposeHeader>Content-Range</ExposeHeader>
<ExposeHeader>Content-Disposition</ExposeHeader>
<ExposeHeader>Content-Location</ExposeHeader>
<ExposeHeader>Content-Encoding</ExposeHeader>
<ExposeHeader>Content-Length</ExposeHeader>
<ExposeHeader>Date</ExposeHeader>
<ExposeHeader>ETag</ExposeHeader>
<ExposeHeader>Server</ExposeHeader>
<ExposeHeader>x-amz-delete-marker</ExposeHeader>
<ExposeHeader>x-amz-id-2</ExposeHeader>
<ExposeHeader>x-amz-request-id</ExposeHeader>
<ExposeHeader>x-amz-version-id</ExposeHeader>
<ExposeHeader>x-amz-server-side-encryption</ExposeHeader>
</CORSRule>
</CORSConfiguration>
<script src="https://sdk.amazonaws.com/js/aws-sdk-2.773.0.min.js"></script>
<!--
includes blob.stream() polyfill
while Also making File constructor work in some browser that don't support it
-->
<script src="https://cdn.jsdelivr.net/gh/eligrey/Blob.js/Blob.js"></script>
<script src="https://cdn.jsdelivr.net/npm/web-streams-polyfill@2.0.2/dist/ponyfill.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/streamsaver@2.0.5/StreamSaver.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/streamsaver@2.0.5/examples/zip-stream.js"></script>
<h1>👋 Hello AWS-SDK-JS!</h1>
<button id="fetch-list">Fetch File List</button>
<button id="stream-all">Download All & Zip</button>
<pre id="message"></pre>
<div id="results"></div>
let userId;
let prefix;
const bucketName = "lawkin-userfiles-dev";
AWS.config.region = "ap-southeast-1";
AWS.config.accessKeyId = " AKIASPBJHQTBMIGWCN7P";
AWS.config.secretAccessKey = "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";
// const cf = {
// hostname: "xxxx",
// originHostname: "xxxx"
// }
const s3 = new AWS.S3({
params: { Bucket: bucketName },
// endpoint: "https://" + cf.originHostname,
// s3BucketEndpoint: true,
s3DisableBodySigning: false,
computeChecksums: true,
correctClockSkew: true,
});
const $ = (selector) => document.querySelector(selector);
const streamOneHandler = (event) => {
const key = event.target.dataset.key;
const regexpExt = /\.(?:gz|br)$/u;
const filename = key.split('/').pop().replace(regexpExt, '');
const head = s3.headObject({ Key: key });
head.on('success', () => console.log(`File is accessible: ${key}`));
head.promise().then(async ({ ContentLength, ContentType }) => {
const yes = confirm(
`Download ${filename}? (${ContentType}: ${formatBytes(ContentLength)})`
);
if (!yes) throw new Error('Download Cancelled');
const httpRequest = await getHttpRequestOnly(s3.getObject({ Key: key }));
// const cfSignedUrl = new URL(await remoteSigner.getSignedUrl({ url: httpRequest.path }).promise());
// httpRequest.endpoint.hostname = cf.hostname;
// httpRequest.path += cfSignedUrl.searchParams.toString();
const response = await handleRequest(httpRequest);
// See: https://developer.mozilla.org/en-US/docs/Web/API/Response
if (!response.ok) throw new Error(`${response.status} ${response.statusText}`);
const totalBytes = parseInt(response.headers.get('Content-Length'), 10);
const progressStream = makeProgressStream(totalBytes, ({ loaded, total }) => {
message.textContent = `${filename} - ${formatBytes(loaded)} of ${formatBytes(total)}`;
});
const responseStream = progressStream ? pipeThrough(response.body, progressStream) : response.body;
const fileStream = streamSaver.createWriteStream(filename, { size: totalBytes });
return pipeTo(responseStream, fileStream);
}).then(() => {
message.textContent += '\n(Download complete)';
}).catch((err) => {
// Display request error
message.textContent = String(err);
});
};
const file1 = {
name: "example/file1.txt",
stream: () => new Blob(["support blobs too"]).stream()
};
// Note: windows gets confused when file & folders starts with /
const fileMap = new Map([
["file1", file1],
["file2", new File(["file1 content"], "example/file2.txt")]
]);
$("#fetch-list").addEventListener("click", () => {
fileMap.clear();
results.innerHTML = "";
// https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#listObjects-property
const request = s3.listObjects({ Prefix: prefix });
request.promise().then(
(data) => {
let div = document.createElement("div");
const regexpExt = /\.(?:gz|br)$/u;
data.Contents.forEach(function (obj) {
const found = obj.Key.indexOf("/");
const isFile = found !== obj.Key.length - 1;
if (isFile) {
const filename = obj.Key.replace(regexpExt, "");
fileMap.set(filename, { name: filename, key: obj.Key });
}
const a = document.createElement("a");
a.href = "#";
a.className = "stream-one";
a.dataset.key = obj.Key;
a.textContent = obj.Key;
a.addEventListener("click", streamOneHandler);
div.append(a);
div.append(document.createElement("br"));
});
results.append(div);
},
(err) => {
results.innerHTML = "ERROR: " + err;
}
);
});
$("#stream-all").addEventListener("click", () => {
const fileEntries = fileMap.entries();
const fileStream = streamSaver.createWriteStream("download.zip");
// In a ideal world i would just have used a TransformStream
// where you would get `{ readable writable } = new TransformStream()`
// `readable` would be piped to streamsaver, and the writer would accept
// file-like object, but that made it dependent on TransformStream and WritableStream
// So i built ZIP-Stream simular to a ReadbleStream but you enqueue
// file-like objects meaning it should have at at the very least { name, stream() }
//
// it supports pull() too that gets called when it ask for more files.
//
// NOTE: My zip library can't generate zip's over 4gb and has no compression
// it was built solo for the purpose of saving multiple files in browser
const readableZipStream = new ZIP({
start(ctrl) {
// Insert uen as empty folder
ctrl.enqueue({ name: "info/SD21342412", directory: true });
// Insert company name as empty folder
ctrl.enqueue({ name: "info/Jardines Pte Ltd", directory: true });
},
async pull(ctrl) {
// This is called when it ask for more files
const it = fileEntries.next();
if (!it.done) {
const [index, value] = it.value;
if (value.name && typeof value.stream === "function") {
// enqueue file-like object
ctrl.enqueue(file);
return;
}
const filename = value.name;
const key = value.key;
// Create a request without sending it
// See: https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#getObject-property
const httpRequest = await getHttpRequestOnly(s3.getObject({ Key: key }));
const response = await handleRequest(httpRequest);
// See: https://developer.mozilla.org/en-US/docs/Web/API/Response
if (!response.ok) throw new Error(`${response.status} ${response.statusText}`);
// enqueue file-like object
ctrl.enqueue({
name: filename,
stream: () => {
const totalBytes = parseInt(response.headers.get('Content-Length'), 10);
const progressStream = makeProgressStream(totalBytes, ({ loaded, total }) => {
message.textContent = `${filename} - ${formatBytes(loaded)} of ${formatBytes(total)}`;
});
return progressStream ? pipeThrough(response.body, progressStream) : response.body;
},
});
} else {
// if (done adding all files)
ctrl.close();
}
}
});
const promise = pipeTo(readableZipStream, fileStream);
promise.then(() => {
message.textContent = 'ZIP Download complete';
}).catch((err) => {
// Display request error
message.textContent = String(err);
});
});
/* ---------------------------- BEGIN: Helper functions ---------------------------- */
function getHttpRequestOnly(request) {
return new Promise((resolve, reject) => {
request.removeListener('send', AWS.EventListeners.Core.SEND);
request.onAsync('send', function send(resp, done) {
var ready = function() {
resolve(resp.request.httpRequest);
resp.error = AWS.util.error(new Error('Request aborted by user'), {
code: 'RequestAbortedError', retryable: false
});
resp.request.emit('httpError', [resp.error, resp], function() {
done();
});
};
var timeDiff = (resp.request.service.getSkewCorrectedDate() - this.signedAt) / 1000;
if (timeDiff >= 60 * 10) { // if we signed 10min ago, re-sign
this.emit('sign', [this], function(err) {
if (err) done(err);
else ready();
});
} else {
ready();
}
}).promise().then(console.debug, reject);
});
}
function handleRequest(httpRequest, httpOptions) {
var endpoint = httpRequest.endpoint;
if (!httpOptions) httpOptions = {};
// Construct href
var href = endpoint.protocol + '//' + endpoint.hostname;
if (endpoint.port !== 80 && endpoint.port !== 443) {
href += ':' + endpoint.port;
}
href += httpRequest.path;
// Construct headers
var headers = new Headers();
objectEach(httpRequest.headers, function (key, value) {
// See: https://fetch.spec.whatwg.org/#forbidden-header-name
if (key !== 'Content-Length' && key !== 'Expect' && key !== 'Host') {
headers.set(key, value);
}
});
// Send cookies?
var credentials = 'omit';
if (httpOptions.xhrWithCredentials) {
credentials = 'include';
}
// Construct request
// See: https://developer.mozilla.org/en-US/docs/Web/API/Request/Request
var req = {
method: httpRequest.method,
headers: headers,
credentials: credentials
};
if (httpRequest.body) req.body = httpRequest.body;
var request = new Request(href, req);
return fetch(request);
}
function makeProgressStream(totalBytes, callback) {
if (typeof TransformStream === 'undefined' && typeof WebStreamsPolyfill !== 'undefined') {
var TransformStream = WebStreamsPolyfill.TransformStream;
}
var start = function() { this.loadedBytes = 0; }
var transform = function(chunk, controller) {
if (chunk) {
this.loadedBytes += chunk.length;
callback({
loaded: this.loadedBytes,
total: totalBytes
});
}
controller.enqueue(chunk);
};
return new TransformStream({ start, transform });
}
function pipeTo(readable, writable) {
try {
if (readable.pipeTo && typeof WritableStream !== 'undefined') return readable.pipeTo(writable);
} catch (_) {}
var reader = readable.getReader();
var writer = writable.getWriter();
var pump = function() {
return reader.read()
.then(function(res) {
return res.done ? writer.close() : writer.write(res.value).then(pump);
});
};
return pump();
}
function pipeThrough(readable, transform) {
try {
if (readable.pipeThrough && typeof TransformStream !== 'undefined') return readable.pipeThrough(transform);
} catch (_) {}
var reader = readable.getReader();
var writer = transform.writable.getWriter();
var pump = function() {
return reader.read()
.then(function(res) {
return res.done ? writer.close() : writer.write(res.value).then(pump);
});
};
pump();
return transform.readable;
}
function objectEach(object, iterFunction) {
for (var key in object) {
if (Object.prototype.hasOwnProperty.call(object, key)) {
var ret = iterFunction.call(this, key, object[key]);
if (ret === false) break;
}
}
}
function formatBytes(bytes, precision) {
if (isNaN(parseFloat(bytes)) || !isFinite(bytes)) return '-';
if (typeof precision === 'undefined') precision = 1;
var units = ['bytes', 'kiB', 'MiB', 'GiB', 'TiB', 'PiB'],
number = Math.floor(Math.log(bytes) / Math.log(1024));
return (bytes / Math.pow(1024, Math.floor(number))).toFixed(precision) + ' ' + units[number];
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment