Skip to content

Instantly share code, notes, and snippets.

@Legends
Last active November 1, 2019 20:11
Show Gist options
  • Select an option

  • Save Legends/a00afe60724322175266237eee82f8c9 to your computer and use it in GitHub Desktop.

Select an option

Save Legends/a00afe60724322175266237eee82f8c9 to your computer and use it in GitHub Desktop.
// **************************************** HTML *****************************************************
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<title></title>
</head>
<body>
<form enctype="multipart/form-data" action="http://localhost:14864/upload/upload">
<input type="file" id="files" name="files[]" multiple />
<div id="drop_zone">
Drop files here
</div>
<output id="list"></output>
</form>
<script src="https://code.jquery.com/jquery-3.4.1.min.js"
integrity="sha256-CSXorXvZcTkaix6Yvo6HppcZGetbYMGWSFlBw8HfCJo="
crossorigin="anonymous"></script>
</body>
</html>
<script type="module">
import { ChunkedUploader } from "./scripts/chunkedUpload.js"; // this happens async
$(document).ready(() => {
$('#drop_zone').on({
'dragover dragenter': (e) => {
$(e.currentTarget).css({ opacity: 0.5 });
e.preventDefault();
e.stopPropagation();
},
'drop': (e) => {
console.log("Drop");
//Upload file here
onFileDrop(e);
e.preventDefault();
e.stopPropagation();
},
'dragexit dragend dragleave': (e) => {
$(e.currentTarget).css({ opacity: 1 });
}
});
});
function onFileDrop(evt) {
$(evt.currentTarget).css({ opacity: 1 });
try {
var files = evt.originalEvent.dataTransfer.files;
for (var i = 0; i < files.length; i++) {
let cu = new ChunkedUploader('/upload/UploadChunks', 50000);
cu.addEventListener('progress', progressHandler);
cu.addEventListener('uploadFinished', uploadFinishedHandler);
cu.uploadFile(files[i]);
}
} catch (e) {
debugger;
throw e;
}
}
const progressHandler = (p) => { console.log("progress handler: "); console.log(p); };
const uploadFinishedHandler = (p) => { console.log("uploadFinished handler: "); console.log(p);};
</script>
// **************************************** JavaScript (chunkUpload.js) **************************************
// while this script and server code are far from production use, it is a good starting point
// ToDo's:
// - Upload chunks in parallel? does it make sense?
// - retry logic on error
// - what happens when connection is lost half the way -> resume logic?
class BaseClass {
constructor() {
this.events = {};
this.addEventListener = function (name, handler) {
if (this.events.hasOwnProperty(name))
this.events[name].push(handler);
else
this.events[name] = [handler];
};
this.removeEventListener = function (name, handler) {
/* This is a bit tricky, because how would you identify functions?
This simple solution should work if you pass THE SAME handler. */
if (!this.events.hasOwnProperty(name))
return;
var index = this.events[name].indexOf(handler);
if (index !== -1)
this.events[name].splice(index, 1);
};
this.fireEvent = function (name, args) {
if (!this.events.hasOwnProperty(name))
return;
if (!args)// || !args.length)
args = [];
var evs = this.events[name], l = evs.length;
for (var i = 0; i < l; i++) {
evs[i].call(null, args);
}
};
}
}
// Uploads a file in chunks
class ChunkedUploader extends BaseClass {
constructor(uploadUrl, chunkSize) {
super();
this.uploadUrl = uploadUrl;
this.chunkSize = chunkSize || 500000;
}
uploadFile(file) {
//max file chunk size set to 100 KB change as per requirement.
var maxFileSizeKB = this.chunkSize;
var fileChunks = [];
var bufferChunkSizeInBytes = maxFileSizeKB * (1024);
var currentStreamPosition = 0;
var endPosition = bufferChunkSizeInBytes;
var size = file.size;
while (currentStreamPosition < size) {
fileChunks.push(file.slice(currentStreamPosition, endPosition));
currentStreamPosition = endPosition;
endPosition = currentStreamPosition + bufferChunkSizeInBytes;
}
this.uploadFileChunk(fileChunks, file.name, 1, fileChunks.length, this.guid(), size);
}
uploadFileChunk(fileChunks, fileName, currentSlicePartNumber, totalPart, requestId, totalFileSize) {
var self = this;
var formData = new FormData();
formData.append('file', fileChunks[currentSlicePartNumber - 1], fileName);
formData.append("requestId", requestId);
formData.append("currentSlicePartNumber", currentSlicePartNumber);
formData.append("totalPart", totalPart);
formData.append("totalFileSize", totalFileSize);
var xhr = $.ajax({
type: "POST",
url: this.uploadUrl,
contentType: false,
processData: false,
data: formData,
success: function (data) {
if (totalPart >= currentSlicePartNumber) {
console.log("Request-Id: " + requestId + ", uploading file part no: " + currentSlicePartNumber, " out of " + totalPart);
if (data.status === true) {
if (totalPart === currentSlicePartNumber) {
//Whole file uploaded
console.log("Request-Id: " + requestId + ", uploaded completed successfully!");
self.fireEvent('uploadFinished', { requestId, currentSlicePartNumber });
} else {
//self.fireEvent('progress', [{ requestId, fileName, currentSlicePartNumber, totalPart, totalFileSize }]);
self.fireEvent('progress', { requestId, fileName, currentSlicePartNumber, totalPart, totalFileSize });
//Show uploading progress
self.uploadFileChunk(fileChunks, fileName, ++currentSlicePartNumber, totalPart, requestId, totalFileSize);
}
} else {
self.fireEvent('chunkError', { requestId, fileName, currentSlicePartNumber, totalPart, totalFileSize });
//retry message to upload rest of the file
console.log("Request-Id: " + requestId + ", failed to upload file part no: " + currentSlicePartNumber);
}
}
},
error: function (info) {
self.fireEvent('error', info);
//retry message to upload rest of the file
console.log("Request-Id: " + requestId + ", error to upload file part no: " + currentSlicePartNumber);
}
});
// xhr.abort(); --> kills the request --> cancelation
}
guid() {
return ([1e7] + -1e3 + -4e3 + -8e3 + -1e11).replace(/[018]/g, c =>
(c ^ crypto.getRandomValues(new Uint8Array(1))[0] & 15 >> c / 4).toString(16)
);
}
}
export { ChunkedUploader };
// **************************************** ASP.NET C# *****************************************************
using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.IO;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
namespace CustomModelBindingSample.Controllers
{
public class UploadController : Controller
{
[DisableRequestSizeLimit]
[HttpPost]
public async Task<JsonResult> UploadChunks(IFormFile file, string requestId, int currentSlicePartNumber, int totalPart, long totalFileSize)
{
const string delimiter = "-####-";
var _targetFilePath = @"D:\temp";
var uploadPath = Path.Combine(_targetFilePath, requestId);
if (!Directory.Exists(uploadPath))
{
Directory.CreateDirectory(uploadPath);
}
var trustedFileNameForFileStorage = GetHashString(file.FileName) + delimiter + currentSlicePartNumber + delimiter + ".upload";
using (var targetStream = System.IO.File.Create(Path.Combine(uploadPath, trustedFileNameForFileStorage)))
{
await file.CopyToAsync(targetStream);
}
var dirFiles = new DirectoryInfo(uploadPath).GetFiles();
var dirFilesCount = dirFiles.Count();
if (totalPart == dirFilesCount) // all parts are uploaded
{
if (dirFiles.Sum(fi => fi.Length) == totalFileSize) // Check if sum of all parts == total size
{
await Task.Delay(3000);
}
Array.Sort(dirFiles, new FileComparer());
var extension = Path.GetExtension(file.FileName);
var outputFinalFilePath = Path.Combine(uploadPath, file.FileName);
using (var outputStream = System.IO.File.Create(outputFinalFilePath))
{
foreach (var inputFilePath in dirFiles)
{
using (var inputStream = System.IO.File.OpenRead(inputFilePath.FullName))
{
// Buffer size can be passed as the second argument.
await inputStream.CopyToAsync(outputStream);
}
}
}
// Cleanup Directory
dirFiles.ForEach(f =>
{
try
{
if (!f.Name.EndsWith(file.FileName))
{
f.Delete();
}
}
catch (Exception ex) { throw; }
});
}
return Json(new { status = true });
}
class FileComparer : Comparer<FileInfo>
{
const string delimiter = "-####-";
public override int Compare([AllowNull] FileInfo xFile, [AllowNull] FileInfo yFile)
{
var x = int.Parse(xFile.Name.Split(new string[] { delimiter }, StringSplitOptions.None)[1]);
var y = int.Parse(yFile.Name.Split(new string[] { delimiter }, StringSplitOptions.None)[1]);
if (x == y)
{
return 0;
}
else if (x > y)
{
return 1;
}
return -1;
}
}
/// <summary>
/// Returns the first 10 characters of the hash by default
/// </summary>
/// <param name="inputString"></param>
/// <param name="maxCharCount"></param>
/// <returns></returns>
public static string GetHashString(string inputString, int maxCharCount = 10)
{
StringBuilder sb = new StringBuilder();
foreach (byte b in GetHash(inputString))
sb.Append(b.ToString("X2"));
return sb.ToString().Substring(0, maxCharCount);
}
public static byte[] GetHash(string inputString)
{
HashAlgorithm algorithm = SHA256.Create();
return algorithm.ComputeHash(Encoding.UTF8.GetBytes(inputString));
}
}
public static class IEnumerableExtensions
{
public static void ForEach<T>(this IEnumerable<T> source, Action<T> action)
{
foreach (var item in source)
{
action(item);
}
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment