Skip to content

Instantly share code, notes, and snippets.

Embed
What would you like to do?
Upload image to S3 with resizing using lambda functions via hooks
/* @flow */
/* eslint-disable no-param-reassign */
import express, { type $Request, type $Response } from 'express';
import uniqid from 'uniqid';
import aws from 'aws-sdk';
export type SignResult = {
publicUrl: string,
signedUrl: string,
publicUrl1200: string,
publicUrl500: string,
publicUrl100: string,
filename: string,
};
if (process.env.AWS_ACCESS_KEY_ID && process.env.AWS_SECRET_ACCESS_KEY) {
aws.config.update({
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
});
}
const UPLOAD_IMG_S3_BUCKET: string = process.env.UPLOAD_IMG_S3_BUCKET || '';
const UPLOAD_IMG_S3_REGION: string = process.env.UPLOAD_IMG_S3_REGION || '';
const s3Options = {
region: UPLOAD_IMG_S3_REGION,
signatureVersion: 'v4',
authType: 'REST-QUERY-STRING',
};
export default function getUploadRouter() {
const router = express.Router(); // eslint-disable-line
router.get('/sign', (req: $Request, res: $Response) => {
const ext = req.query.ext || 'jpg';
const date = new Date();
const folder = `${date.getFullYear()}/${date.getMonth() + 1}/${date.getDate()}/`;
const filename = `${folder}${uniqid()}.${ext}`;
const s3 = new aws.S3(s3Options);
const params = {
Bucket: UPLOAD_IMG_S3_BUCKET,
Key: filename,
Expires: 60,
ContentType: req.query.contentType,
ACL: 'public-read', // 'private',
Metadata: {
'cabinet-id': req.cabinet ? req.cabinet.getId() : '',
},
};
s3.getSignedUrl('putObject', params, (err, data) => {
if (err) {
req.raven.captureError(err, {
tags: { aws: 's3upload' },
extra: params,
});
res.status(500).send('Cannot create S3 signed URL');
return;
}
const bucketUrl = `https://s3-${UPLOAD_IMG_S3_REGION}.amazonaws.com/${UPLOAD_IMG_S3_BUCKET}`;
const result: SignResult = {
signedUrl: data,
publicUrl: `${bucketUrl}/${filename}`,
publicUrl1200: `${bucketUrl}-1200/${filename}`,
publicUrl500: `${bucketUrl}-500/${filename}`,
publicUrl100: `${bucketUrl}-100/${filename}`,
filename,
};
res.json(result);
});
});
return router;
}
/* eslint-disable no-console, strict */
'use strict';
const async = require('async');
const AWS = require('aws-sdk');
const s3 = new AWS.S3();
const gm = require('gm').subClass({
imageMagick: true,
});
const sizeConfigs = [
{ dstBucketPostfix: '-1200', width: 1200 },
{ dstBucketPostfix: '-500', width: 500 },
{ dstBucketPostfix: '-100', width: 100 },
];
// eslint-disable-next-line no-unused-vars
exports.handler = (event, context, done) => {
console.time('totalExecutionTime');
const srcBucket = event.Records[0].s3.bucket.name;
const srcKey = event.Records[0].s3.object.key;
const elements = srcKey.split('.');
const dstFolderName = elements[0];
const ext = elements[1] || 'jpg';
if (dstFolderName === null) {
context.fail();
return;
}
const typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
console.error(`unable to infer image type for key ${srcKey}`);
context.fail();
return;
}
const imageType = typeMatch[1].toLowerCase();
if (imageType !== 'jpg' && imageType !== 'jpeg' && imageType !== 'png') {
console.log(`skipping non-image ${srcKey}`);
context.fail();
return;
}
async.waterfall(
[
function download(next) {
console.time('downloadImage');
s3.getObject({ Bucket: srcBucket, Key: srcKey }, (err, response) => {
console.timeEnd('downloadImage');
next(err, response);
});
},
function convert(response, next) {
console.time('convertImage');
gm(response.Body)
.density(300)
.noProfile()
.autoOrient()
.toBuffer('jpg', (err, buffer) => {
if (err) {
next(err);
} else {
console.timeEnd('convertImage');
next(null, buffer);
}
});
},
function resizeUpToDown(buffer, next) {
let lastBuffer = buffer;
async.mapSeries(
sizeConfigs,
(config, callback) => {
console.time(`resize ${config.width}`);
gm(lastBuffer)
.resize(config.width, null, '>')
.quality(95)
.toBuffer('jpg', (err, resizedBuffer) => {
console.timeEnd(`resize ${config.width}`);
if (err) {
console.error(err);
callback(err);
} else {
lastBuffer = resizedBuffer;
const obj = config;
obj.contentType = 'image/jpg';
obj.data = resizedBuffer;
obj.dstKey = `${dstFolderName}.${ext}`;
callback(null, obj);
}
});
},
(err, items) => {
next(err, items);
}
);
},
function upload(items, next) {
console.time('totalUpload');
async.each(
items,
(item, callback) => {
const tMsg = ` asyncUpload ${item.width} ${parseInt(item.data.length / 1024, 10)}kb`;
console.time(tMsg);
const dstBucket = event.Records[0].s3.bucket.name + item.dstBucketPostfix;
if (srcBucket === dstBucket) {
console.error('Destination bucket must not match source bucket.');
context.fail();
return;
}
s3.putObject(
{
Bucket: dstBucket,
Key: item.dstKey,
Body: item.data,
ContentType: item.contentType,
ACL: 'public-read',
CacheControl: 'max-age=2592000,public',
},
err => {
console.timeEnd(tMsg);
if (err) {
console.error(`Unable putObject to ${dstBucket} ${item.dstKey} ${err}`);
}
callback(err);
}
);
},
err => {
console.timeEnd('totalUpload');
next(err, items);
}
);
},
],
err => {
console.timeEnd('totalExecutionTime');
console.log('');
if (err) {
console.error(`Unable to resize [${srcBucket}]/${srcKey} due to an error: ${err}`);
context.fail(err);
} else {
console.log(`Successfully resized ${srcBucket} ${srcKey}`);
}
}
);
};
{
"name": "lambda-image-resizer",
"version": "0.0.0",
"description": "",
"main": "index.js",
"author": "nodkz",
"dependencies": {
"async": "~0.2.8",
"aws-sdk": "^2.1.24",
"gm": "^1.17.0"
}
}
/* @flow */
/* eslint-disable no-console */
import type { SignResult } from './server';
export type S3UploaderOpts = {|
files: FileList,
onProgress: (percent: number, message: string) => any,
onFinish: (result: SignResult, file: File) => any,
onError: (error: string) => any,
server: string,
uploadRequestHeaders: {
[header: string]: string,
},
signingUrlHeaders: {
[header: string]: string,
},
|};
export default class S3UploaderApi {
server: string;
signingUrl: string;
files: ?FileList;
uploadRequestHeaders: {
[header: string]: string,
};
httprequest: ?XMLHttpRequest;
signingUrlQueryParams: Object = {};
signingUrlHeaders: {
[header: string]: string,
};
constructor(options: $Shape<S3UploaderOpts> = {}) {
this.server = '';
this.signingUrl = '/upload/sign';
this.files = null;
this.uploadRequestHeaders = {
'Cache-Control': 'max-age=2592000,public',
...options.uploadRequestHeaders,
};
this.signingUrlHeaders = options.signingUrlHeaders;
// signingUrlHeaders: this.props.signingUrlHeaders,
// signingUrlQueryParams: this.props.signingUrlQueryParams,
// uploadRequestHeaders: this.props.uploadRequestHeaders,
// contentDisposition: this.props.contentDisposition,
Object.keys(options).forEach(k => {
if (options[k] !== undefined) {
// $FlowFixMe
this[k] = options[k];
}
});
this.handleFileSelect(this.files);
}
onFinish(signResult: SignResult, file: File): void {
console.log('base.onFinish()', signResult.publicUrl, file);
}
onProgress(percent: number, status: string, file: File): void {
console.log('base.onProgress()', percent, status, file);
}
onError(status: string, file: File): void {
console.log('base.onError()', status, file);
}
handleFileSelect(files: ?FileList) {
const result = [];
if (!files) return result;
// IMPORTANT `files` has stupid `FileList` object type. So traverse it via `for`.
for (let i = 0; i < files.length; i++) {
const file = files[i];
this.onProgress(0, 'Waiting', file);
result.push(this.uploadFile(file));
}
return result;
}
createCORSRequest(method: 'POST' | 'GET' | 'PUT', url: string): XMLHttpRequest {
let xhr = new XMLHttpRequest();
if (xhr.withCredentials != null) {
xhr.open(method, url, true);
} else if (typeof XDomainRequest !== 'undefined') {
xhr = new XDomainRequest();
xhr.open(method, url);
}
if (!xhr) {
throw new Error('S3Upload.createCORSRequest() can not create xhr');
}
// $FlowFixMe
return xhr;
}
executeOnSignedUrl(file: File, callback: (result: any) => any) {
const queryParams = Object.assign({}, this.signingUrlQueryParams, {
fileName: file.name,
ext: file.name.split('.').pop(),
contentType: file.type,
});
const queryString = [];
Object.keys(queryParams).forEach(key => {
queryString.push(`${key}=${encodeURIComponent(queryParams[key])}`);
});
const xhr = this.createCORSRequest(
'GET',
`${this.server}${this.signingUrl}?${queryString.join('&')}`
);
if (this.signingUrlHeaders) {
Object.keys(this.signingUrlHeaders).forEach(key => {
xhr.setRequestHeader(key, this.signingUrlHeaders[key]);
});
}
if (xhr.overrideMimeType) {
xhr.overrideMimeType('text/plain; charset=x-user-defined');
}
xhr.onreadystatechange = () => {
if (xhr.readyState === 4 && xhr.status === 200) {
let result;
try {
result = JSON.parse(xhr.responseText);
} catch (error) {
this.onError('Invalid response from server', file);
return;
}
callback(result);
} else if (xhr.readyState === 4 && xhr.status !== 200) {
this.onError(`Could not contact request signing server. Status = ${xhr.status}`, file);
}
};
return xhr.send();
}
uploadToS3(file: File, signResult: SignResult): void {
const xhr = this.createCORSRequest('PUT', signResult.signedUrl);
if (!xhr) {
this.onError('CORS not supported', file);
return;
}
xhr.onload = () => {
if (xhr.status === 200) {
this.onProgress(100, 'Upload completed', file);
this.onFinish(signResult, file);
} else {
this.onError(`Upload error: ${xhr.status}`, file);
}
};
xhr.onerror = () => this.onError('XHR error', file);
xhr.upload.onprogress = e => {
let percentLoaded;
if (e.lengthComputable) {
percentLoaded = Math.round(e.loaded / e.total * 100);
this.onProgress(percentLoaded, percentLoaded === 100 ? 'Finalizing' : 'Uploading', file);
}
};
xhr.setRequestHeader('Content-Type', file.type);
if (this.uploadRequestHeaders) {
Object.keys(this.uploadRequestHeaders).forEach(key => {
xhr.setRequestHeader(key, this.uploadRequestHeaders[key]);
});
// } else {
// xhr.setRequestHeader('x-amz-acl', 'public-read');
}
this.httprequest = xhr;
xhr.send(file);
}
uploadFile(file: File) {
return this.executeOnSignedUrl(file, signResult => this.uploadToS3(file, signResult));
}
abortUpload(): void {
if (this.httprequest) {
this.httprequest.abort();
}
}
}
/* @flow */
/* eslint-disable jsx-a11y/label-has-for */
import * as React from 'react';
import { cabinetApi } from 'clientStores';
import S3UploaderApi from 'app/upload/S3UploadApi';
import ProgressBar from '_components/Loader/ProgressBar';
import SvgIcon from '_components/SvgIcon';
import type { SignResult } from 'app/upload/server';
import s from './PhotoUpload.scss';
function b64toBlob(b64Data, contentType = '', sliceSize = 512) {
const byteCharacters = atob(b64Data);
const byteArrays = [];
for (let offset = 0; offset < byteCharacters.length; offset += sliceSize) {
const slice = byteCharacters.slice(offset, offset + sliceSize);
const byteNumbers = new Array(slice.length);
for (let i = 0; i < slice.length; i++) {
byteNumbers[i] = slice.charCodeAt(i);
}
const byteArray = new Uint8Array(byteNumbers);
byteArrays.push(byteArray);
}
const blob = new Blob(byteArrays, { type: contentType });
return blob;
}
type Props = {
value?: { big?: string, thumb?: string, mid?: string } | string, // eslint-disable-line
onChange?: Function,
onBlur?: Function,
};
type State = {
photoUploaded: boolean,
photoUrl1200: string,
photoUrl500: string,
photoUrl100: string,
progress: number,
errorMsg: string,
photoThumb: string,
photoUrl?: string,
};
export default class PhotoUpload extends React.Component<Props, State> {
static NOD_FORM_ELEMENT = 'PhotoUpload';
state: State;
_fileNode: ?HTMLInputElement;
constructor(props: Props) {
super(props);
let isPhotoUploaded;
const value = props.value;
if (typeof value === 'object') {
if (!value.big && !value.mid && !value.thumb) {
isPhotoUploaded = false;
} else {
isPhotoUploaded = value || false;
}
}
this.state = {
photoUploaded: !!isPhotoUploaded,
photoUrl1200: '',
photoUrl500: '',
photoUrl100: '',
progress: 0,
errorMsg: '',
photoThumb: '',
};
// TODO remove ability
if (
props.value &&
typeof props.value === 'string' &&
props.value.substr(0, 22).indexOf('base64') !== -1
) {
this.uploadBase64File();
}
}
onProgress = (percent: number) => {
// console.log(`Upload progress: ${percent}% ${message}`);
this.setState({ progress: parseInt(percent * 0.5, 10) });
};
onPhotoFinish = () => {
const avatar = {
thumb: this.state.photoUrl100,
mid: this.state.photoUrl500,
big: this.state.photoUrl1200,
};
if (this.props.onChange) this.props.onChange({}, avatar);
if (this.props.onBlur) this.props.onBlur({}, avatar);
};
onPhotoUploaded = (signResult: SignResult, file: File) => {
/*
console.log(`Upload finished:
${signResult.publicUrl}
${signResult.publicUrl1200}
${signResult.publicUrl500}
${signResult.publicUrl100}`);
*/
/* Roma! Here is all file variants only for you */
this.setState({
// photoUrl: signResult.publicUrl,
photoUrl1200: signResult.publicUrl1200,
photoUrl500: signResult.publicUrl500,
photoUrl100: signResult.publicUrl100,
});
const aliveProgressId = setInterval(() => {
this.setState({
progress: this.state.progress + ((100 - this.state.progress) * 0.2 - 2),
});
}, 1000);
const checkIsResizedImgReady = (attempt = 0) => {
if (document) {
const image = new Image();
image.onload = () => {
clearInterval(aliveProgressId);
this.setState(
{
progress: 0,
photoUploaded: true,
photoThumb: signResult.publicUrl100,
errorMsg: '',
},
this.onPhotoFinish
);
};
image.onerror = () => {
setTimeout(() => checkIsResizedImgReady(attempt + 1), 1.7 ** (attempt + 8) * 100);
};
image.src = signResult.publicUrl100;
}
};
const waitBigFileInSec = (file && file.size && parseInt(file.size / 200000, 10)) || 0;
setTimeout(checkIsResizedImgReady, waitBigFileInSec * 1000);
};
onError = () => {
this.setState({
errorMsg: 'Не удалось загрузить фотографию, попробуйте позже',
});
};
uploadBase64File = (): ?S3UploaderApi => {
const { value } = this.props;
if (typeof value === 'string') {
const myFile = new File([b64toBlob(value.substr(22), 'image/png')], 'avatar.png', {
type: 'image/png',
lastModified: new Date(),
});
return new S3UploaderApi({
files: ([myFile]: any),
onProgress: this.onProgress,
onFinish: this.onPhotoUploaded,
onError: this.onError,
signingUrlHeaders: {
...cabinetApi.token.getHeaders(),
},
});
}
return null;
};
uploadFile = () => {
if (!this._fileNode) {
return null;
}
return new S3UploaderApi({
files: this._fileNode.files,
onProgress: this.onProgress,
onFinish: this.onPhotoUploaded,
onError: this.onError,
signingUrlHeaders: {
...cabinetApi.token.getHeaders(),
},
});
};
deletePhoto = () => {
this.setState({ photoUploaded: false }, () => {
if (this.props.onChange) this.props.onChange({}, null);
});
};
abortUpload = () => {
// TODO
console.log('abort');
};
render() {
const { photoUploaded, photoThumb, progress, errorMsg } = this.state;
const { value = '' } = this.props;
let thumb = '';
if (typeof value === 'object') {
thumb = value.thumb;
} else {
thumb = value;
}
return (
<div className={`${s.photoUpload} ${photoUploaded ? s.uploaded : ''}`}>
<div className={s.photo}>
<div className={s.photoWrap} style={{ backgroundImage: `url(${thumb || photoThumb})` }}>
<img src={thumb || photoThumb} alt="" />
</div>
</div>
<div className={s.loader}>
<label className={s.label}>
<span className={s.text}>
{photoUploaded ? 'Загрузить другое фото' : 'Загрузить фото'}
</span>
<div>
<input
type="file"
ref={ref => {
this._fileNode = ref;
}}
onChange={this.uploadFile}
value=""
/>
</div>
</label>
</div>
{progress > 0 && (
<div className={s.photoAction} onClick={this.abortUpload}>
<SvgIcon color="#95A5A6" wh="22px" file="close" />
</div>
)}
{photoUploaded &&
progress === 0 && (
<div className={s.photoAction} onClick={this.deletePhoto}>
<SvgIcon color="#95A5A6" wh="22px" file="delete" />
</div>
)}
{progress > 0 && (
<div className={s.progressBar}>
<ProgressBar
type="circular"
mode="determinate"
value={progress}
style={{ width: '110px', height: '110px', overflow: 'hidden' }}
circleLineWidth="1"
multicolor
/>
</div>
)}
{errorMsg && <div className={s.errorMess}>{errorMsg}</div>}
</div>
);
}
}
@nodkz

This comment has been minimized.

Copy link
Owner Author

commented Apr 13, 2018

1-server-generate-sign-url.js) На сервере реализовать некий ендпоинт который будет для тебя генерить ссылки для аплоада signedUrl и 4ре адреса до корзин где в publicUrl загрузиться исходник а в корзины publicUrl100|500|1200 будут размещены нарезанные картинки через лямбда функцию.

2-aws-s3-lambda-index.js) Вторым этапом надо настроить хук на появление новой картинке в корзине publicUrl, чтобы эту картинку брала лямбда функция и нарезала в оставшиеся корзины картинки нужного размера.

3-client-S3UploadApi.js) Загружалка файла в S3 которая на входе получает объект файла, лезет на сервак за подписанной ссылкой для загрузки, загружает картинку, отслеживает прогресс и сообщает о завершении. Это апи не зависит от клиентского фреймворка, оно юзается в п.4

4-client-react-PhotoUpload.js) Реактовская компонента которая рисует уже имеющиеся картинки, а если нет то создает инпут поле, которое ждет выбор файла и при его получении дергает апи из п.3. После дого как апи3 загрузило картинку, на авс происходит магия нарезки картинок лямда функцией из п.2, оно может занимать несколько секунд. Поэтому в этой компоненте есть метод checkIsResizedImgReady которая запрашивает тумбнеил из корзины нарезанных картинок, и делает это пока не получит статус 200. После чего рапортует об успешной загрузке и нарезке карттинок, возвращая 4 урла где загруженная картинка может быть получена.

Потом эти урлы уже можно передавать в свое АПИ как строку, а не грузить свой основной сервак обработкой картинки и получение пути где она хранится. Сразу загрузили в S3 все отработали, и только после этого сделали к себе в апи запрос на сохранение данных.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
You can’t perform that action at this time.