Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Save adrienv1520/81764713d52fdb15878b957e2207bd36 to your computer and use it in GitHub Desktop.
Save adrienv1520/81764713d52fdb15878b957e2207bd36 to your computer and use it in GitHub Desktop.
Uploading a directory or a file to an AWS S3 bucket with Node.js

Uploading a directory or a file to an AWS S3 bucket with Node.js

Important notes:

  • upload a directory and its sub-directories recursively;
  • could be an absolute or relative path to a directory;
  • params and options are the same as in the AWS documentation so theses functions are very flexible;
  • rootKey is the root AWS key to use, by default it is the S3 root, e.g. saying rootKey is public and you want to upload /Users/you/my-project/images, files will be uploaded to s3://bucket/public/images;
  • aws-sdk will automatically check for AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY environment variables, it is the safest way to deal with credentials imo;
  • without clustering I found uploading a directory of 1254 files was nearly 2 times faster than the native AWS CLI sync method (it's Python underneath, Node.js should be faster);
  • don't forget to add file's content-type, mostly for static websites, or it would be set to application/octet-stream by default and lead to unexpected behaviors;
  • use your favorite debugger/logger over console;
  • const x = { ...params }; is the same as Object.assign BUT will not deeply clone objects which could lead to unexpected object mutations, prefer a safe clone function or similar;
  • tested with Node.js 12.15.0;
  • improve this by clustering the whole upload, some extra code/controls will be needed (based on files' length, number of files, available cores, etc.).
const { createReadStream, promises: { readdir, stat: getStats } } = require('fs');
const { resolve, join } = require('path');
const S3 = require('aws-sdk/clients/s3');
const { getMIMEType } = require('node-mime-types');

const s3 = new S3({
  signatureVersion: 'v4',
});

// upload file
const uploadFile = async function uploadFile({ path, params, options } = {}) {
  const parameters = { ...params };
  const opts = { ...options };

  try {
    const rstream = createReadStream(resolve(path));

    rstream.once('error', (err) => {
      console.error(`unable to upload file ${path}, ${err.message}`);
    });

    parameters.Body = rstream;
    parameters.ContentType = getMIMEType(path);
    await s3.upload(parameters, opts).promise();

    console.info(`${parameters.Key} (${parameters.ContentType}) uploaded in bucket ${parameters.Bucket}`);
  } catch (e) {
    throw new Error(`unable to upload file ${path} at ${parameters.Key}, ${e.message}`);
  }

  return true;
};

// upload directory and its sub-directories if any
const uploadDirectory = async function uploadDirectory({
  path,
  params,
  options,
  rootKey,
} = {}) {
  const parameters = { ...params };
  const opts = { ...options };
  const root = rootKey && rootKey.constructor === String ? rootKey : '';
  let dirPath;

  try {
    dirPath = resolve(path);
    const dirStats = await getStats(dirPath);

    if (!dirStats.isDirectory()) {
      throw new Error(`${dirPath} is not a directory`);
    }

    console.info(`uploading directory ${dirPath}...`);

    const filenames = await readdir(dirPath);

    if (Array.isArray(filenames)) {
      await Promise.all(filenames.map(async (filename) => {
        const filepath = `${dirPath}/${filename}`;
        const fileStats = await getStats(filepath);

        if (fileStats.isFile()) {
          parameters.Key = join(root, filename);
          await uploadFile({
            path: filepath,
            params: parameters,
            options: opts,
          });
        } else if (fileStats.isDirectory()) {
          await uploadDirectory({
            params,
            options,
            path: filepath,
            rootKey: join(root, filename),
          });
        }
      }));
    }
  } catch (e) {
    throw new Error(`unable to upload directory ${path}, ${e.message}`);
  }

  console.info(`directory ${dirPath} successfully uploaded`);
  return true;
};

// example
(async () => {
  try {
    console.time('s3 upload');

    await uploadDirectory({
      path: '../front/dist',
      params: {
        Bucket: 'my-bucket',
      },
      options: {},
      rootKey: '',
    });

    console.timeEnd('s3 upload');
  } catch (e) {
    console.error(e);
  }
})();
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment