Skip to content

Instantly share code, notes, and snippets.

@kibolho
Last active May 28, 2024 13:48
Show Gist options
  • Select an option

  • Save kibolho/99cd18c72baf56780ce0423a2fa7be94 to your computer and use it in GitHub Desktop.

Select an option

Save kibolho/99cd18c72baf56780ce0423a2fa7be94 to your computer and use it in GitHub Desktop.
Upload to AWS S3 using Django, Boto3 and S3Direct
import { httpClient } from '@/infra/http';
import { HttpStatusCode } from '@/infra/http/http-client';
import { logErrorSentry } from '@/infra/monitoring/sentry';
import { generateRandomUUID } from '@/utils/uuid';
export const getS3PreSignedUrl = async ({
token,
dest = 'client_destination',
name = generateRandomUUID(),
type = 'video/quicktime',
size = 12000,
coach_id,
client_id,
}: {
token: string;
dest?: string;
name?: string;
type?: string;
size?: number;
coach_id: string;
client_id: string;
}) => {
const formData = new FormData();
formData.append('dest', dest);
formData.append('name', name);
formData.append('type', type);
formData.append('size', String(size));
formData.append('creator_id', coach_id);
formData.append('client_id', client_id);
const response = await httpClient.request<S3ParamsResponseData>({
url: '/api/s3direct/get_presigned_url/',
method: 'post',
body: formData,
token,
});
if (response.statusCode === HttpStatusCode.ok && !!response.body) {
return response.body;
}
logErrorSentry(
{ coach_id, client_id, ...response },
{ message: 'Getting s3 pre signed url failed' }
);
throw new Error();
};
type S3ParamsResponseData = {
upload_url: string; //'https://bucket_name.s3.amazonaws.com/client_id/2/dc3ddb2334254f0ba210a533b8dcd85f.mov?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIAUF7OFQ6GOUKOUVMR%2F20240509%2Fus-east-2%2Fs3%2Faws4_request&X-Amz-Date=20240509T230425Z&X-Amz-Expires=3600&X-Amz-SignedHeaders=host&X-Amz-Signature=d8ad2caf1bfd4722b7449bfc7d5e60f28983930add86c99690f049b42c56f33f"';
};
from typing import Optional
import boto3
from botocore.client import Config
from django.conf import settings
import logging
from botocore.exceptions import ClientError
def create_presigned_post(bucket_name, object_name, expiration=3600):
"""Generate a presigned URL to share an S3 object
:param bucket_name: string
:param object_name: string
:param expiration: Time in seconds for the presigned URL to remain valid
:return: Presigned URL as string. If error, returns None.
"""
# Generate a presigned S3 POST URL
s3_client = boto3.client(
service,
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
region_name=settings.AWS_S3_REGION_NAME,
config=Config(signature_version="s3v4")
try:
presigned_url = s3_client.generate_presigned_url(
ClientMethod="put_object",
Params={"Bucket": bucket_name, "Key": object_name},
ExpiresIn=expiration,
)
except ClientError as e:
logging.error(e)
return None
# The response contains the presigned URL and required fields
return {"upload_url": presigned_url}
import { logErrorSentry } from '@/infra/monitoring/sentry';
export const uploadS3 = async ({
url,
uri,
fileSize,
type = 'video/quicktime',
onProgress,
}: {
url: string;
uri: string;
fileSize?: number;
type?: string;
onProgress?: ({ progress }: { progress: number }) => void;
}) => {
try {
return new Promise((resolve, reject) => {
const xhr = new XMLHttpRequest();
let interval: NodeJS.Timeout;
// Fake progress if fileSize is provided
if (fileSize && onProgress) {
let progress = 0;
interval = setInterval(() => {
progress += 8; // Increment progress by 8% every interval
if (progress >= 90) {
clearInterval(interval);
onProgress({ progress: 90 }); // Call onProgress with 96% progress
} else {
onProgress({ progress });
}
}, fileSize / 10000); // Adjust the interval duration (in milliseconds) base on fileSize
}
xhr.onreadystatechange = function () {
if (xhr.readyState === 4) {
if (xhr.status === 200) {
clearInterval(interval);
resolve({ success: true });
} else {
reject(Error('Failed to upload'));
}
}
};
xhr.open('PUT', url);
xhr.setRequestHeader('Content-Type', type);
xhr.send({ uri, type });
// @TODO it is not working
// if (xhr.upload) {
// xhr.upload.onprogress = ({ total, loaded }) => console.log(loaded / total);
// }
});
} catch (e) {
logErrorSentry(e);
throw new Error('Upload error');
}
};
from django.conf.urls import url
from .views import get_upload_params, generate_aws_v4_signature, get_presigned_url
urlpatterns = [
url("^get_upload_params/", get_upload_params, name="s3direct"),
url("^get_aws_v4_signature/", generate_aws_v4_signature, name="s3direct-signing"),
url("^get_presigned_url/", get_presigned_url, name="s3direct-url-signing"),
]
import json
import os
import uuid
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseServerError
from django.views.decorators.http import require_POST
from s3direct import views as s3direct_views
from s3direct.utils import get_s3direct_destinations
from knox.auth import TokenAuthentication
from django.views.decorators.csrf import csrf_exempt
from project.utils.textutil import as_int
from .s3helper import create_presigned_post
# Extract this into a separate function
def validate_and_authenticate_user(request):
if not request.user.is_authenticated:
token_auth = TokenAuthentication()
user, auth = token_auth.authenticate(request)
if user is not None:
request.user = user
if not request.user.is_authenticated:
return _as_resp_bad_req_logged_out()
return request.user
# Extract this into a separate function
def validate_file_size(dest, file_size):
cl_range = dest.get("content_length_range")
if cl_range:
msg = None
if file_size < cl_range[0]:
size_mbs = cl_range[0] / 1000000
msg = "File too small. Please upload a file greater than %s MB." % (
size_mbs,
)
elif file_size > cl_range[1]:
size_mbs = cl_range[1] / 1000000
msg = "File too large. Please upload a file less than %s MB." % (size_mbs,)
if msg:
return _as_resp_bad_req(msg)
return None
def handle_uuid_names(request, file_name):
dest = get_s3direct_destinations().get(request.POST.get("dest", None), None)
if dest.get("uuid_names"):
# "uuid_names", means we're making
# a filename like:
# {user_id}/{uuid}.{ext}
user = validate_and_authenticate_user(request)
if isinstance(user, HttpResponse):
return user
req_user_id = user.id
new_file_name = f"{req_user_id}/{_as_uuid_filename(file_name)}"
post = request.POST.copy()
post["name"] = new_file_name
request.POST = post
return request
# TODO (security) - have this conditionally perform csrf_protect if the request is cookie-based
# @csrf_protect
@csrf_exempt
@require_POST
def get_presigned_url(request):
"""Authorizes user and validates given file properties."""
dest = get_s3direct_destinations().get(request.POST.get("dest", None), None)
file_name = request.POST["name"]
file_size = int(request.POST["size"])
# Validate file size
file_size_error = validate_file_size(dest, file_size)
if file_size_error:
return file_size_error
# Handle UUID names
request = handle_uuid_names(request, file_name)
if isinstance(request, HttpResponse):
return request
# HACK - this disables CSRF checking for the rest of this request and was done so the app can make requests to this endpoint without CSRF.
# TODO - can we do this conditionally on whether the app used token-based authentication or not?
request._dont_enforce_csrf_checks = True
upload_params = s3direct_views.get_upload_params(request)
try:
upload_params = json.loads(upload_params.content.decode("utf-8"))
except (ValueError, AttributeError):
resp = json.dumps({"error": "Invalid upload parameters."})
return HttpResponseServerError(resp, content_type="application/json")
bucket = upload_params.get("bucket")
object_key = upload_params.get("object_key")
url = create_presigned_post(
bucket_name=bucket,
object_name=object_key,
)
resp = json.dumps(url)
return HttpResponse(resp, content_type="application/json")
# TODO (security) - have this conditionally perform csrf_protect if the request is cookie-based
# @csrf_protect
@require_POST
@csrf_exempt
def get_upload_params(request):
"""Authorizes user and validates given file properties."""
dest = get_s3direct_destinations().get(request.POST.get("dest", None), None)
file_name = request.POST["name"]
file_size = int(request.POST["size"])
# Validate file size
file_size_error = validate_file_size(dest, file_size)
if file_size_error:
return file_size_error
# Handle UUID names
request = handle_uuid_names(request, file_name)
if isinstance(request, HttpResponse):
return request
# HACK - this disables CSRF checking for the rest of this request and was done so the app can make requests to this endpoint without CSRF.
# TODO - can we do this conditionally on whether the app used token-based authentication or not?
request._dont_enforce_csrf_checks = True
return s3direct_views.get_upload_params(request)
def _as_uuid_filename(file_name: str):
ext = os.path.splitext(file_name)[1]
return uuid.uuid4().hex + ext
def _as_resp_bad_req(msg: str):
return HttpResponseBadRequest(
json.dumps({"error": msg}),
content_type="application/json",
)
def _as_resp_bad_req_logged_out():
return _as_resp_bad_req(
"You must be logged in to perform this action. Please refresh and try again."
)
# TODO (security) - have this conditionally perform csrf_protect if the request is cookie-based
# @csrf_protect
@csrf_exempt
@require_POST
def generate_aws_v4_signature(request):
# check if user is authenticated
if not request.user.is_authenticated:
token_auth = TokenAuthentication()
user, auth = token_auth.authenticate(request)
if user is not None:
request.user = user
if not request.user.is_authenticated:
resp = json.dumps(
{
"error": "You must be logged in to perform this action. Please refresh and try again."
}
)
return HttpResponseBadRequest(
resp,
content_type="application/json",
)
# HACK - this disables CSRF checking for the rest of this request and was done so the app can make requests to this endpoint without CSRF.
# TODO - can we do this conditionally on whether the app used token-based authentication or not?
request._dont_enforce_csrf_checks = True
return s3direct_views.generate_aws_v4_signature(request)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment