Last active
January 23, 2019 15:27
-
-
Save sboli/9215b18c5ad9cfed4d019cce729256fb to your computer and use it in GitHub Desktop.
Resize images and push to S3
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import Config from "./config"; | |
import { IHash } from "./models/hash"; | |
import { IItemModel, ItemModel } from "./models/item"; | |
import sharp = require("sharp"); | |
import S3 = require('aws-sdk/clients/s3'); | |
import { rejects } from "assert"; | |
class PictureLibrary { | |
async resizeAndSave(filePath: string, item: IItemModel) { | |
const options: sharp.ResizeOptions = { fit: 'inside' }; | |
const sizes = []; | |
sizes.push({ width: 750, height: 500, tag: 'lg' }); | |
sizes.push({ width: 750 / 2, height: 500 / 2, tag: 'md' }); | |
sizes.push({ width: 750 / 4, height: 500 / 4, tag: 'sm' }); | |
sizes.push({ width: 21, height: 14, tag: 'xs' }); | |
const wait = []; | |
// Enveloppe les actions pour pouvoir les effectuer en parallèle | |
const execute = async (filePath: string, size: any) => { | |
const update: IHash = {}; | |
try { | |
const buffer = await this.resize(filePath, size, options); | |
const info = await this.pushToS3(buffer, item, size.tag); | |
const key = 'pictureUrl' + size.tag.charAt(0).toUpperCase() + size.tag.slice(1); | |
update[key] = info.Location; | |
ItemModel.findOneAndUpdate({ _id: item._id }, update).exec(); | |
} catch(e) { | |
console.log(e); | |
} | |
}; | |
for (const ea of sizes) { | |
wait.push(execute(filePath, ea)); | |
} | |
await Promise.all(wait); | |
return ItemModel.findOne({ _id: item._id }); | |
} | |
resize(filePath: string, size: any, options: sharp.ResizeOptions) { | |
const s = sharp(filePath) | |
.resize(Math.round(size.width), Math.round(size.height), options) | |
.jpeg({ | |
quality: size.tag === 'xs' ? 20 : 80 | |
}); | |
if (size.tag === 'xs') { | |
s.blur(2); | |
} | |
return s.toBuffer(); | |
} | |
async pushToS3(buffer: Buffer, item: IItemModel, size: string) { | |
const uploadedFileName = this.getUploadedFileName(item, size); | |
console.log('Uploading to S3: ' + uploadedFileName); | |
const aws = this.getAws(); | |
try { | |
const data = await aws.upload({ | |
Bucket: Config.AWS_BUCKET, | |
Key: Config.AWS_ITEM_PICTURES_PATH + '/' + uploadedFileName, | |
Body: buffer, | |
ACL: 'public-read' | |
}).promise(); | |
return data; | |
} catch (err) { | |
console.log('error while uploading to S3'); | |
console.log(err); | |
} | |
return undefined; | |
} | |
async deletePicturesFromS3(item: IItemModel) { | |
const aws = this.getAws(); | |
try { | |
const wait = []; | |
for (const size of ['lg', 'md', 'sm', 'xs']) { | |
wait.push(aws.deleteObject({ | |
Bucket: Config.AWS_BUCKET, | |
Key: Config.AWS_ITEM_PICTURES_PATH + '/' + this.getUploadedFileName(item, size) | |
}).promise()); | |
} | |
await Promise.all(wait); | |
console.log('Deleted ' + item.name + ' images'); | |
} catch (err) { | |
console.log(err); | |
} | |
} | |
getAws(): S3 { | |
return new S3({ | |
accessKeyId: Config.AWS_USER_KEY, | |
secretAccessKey: Config.AWS_USER_SECRET | |
}); | |
} | |
getUploadedFileName(item: IItemModel, size: string) { | |
return item._id + '_' + size + '.jpg'; | |
} | |
} | |
export { PictureLibrary }; |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment