Skip to content

Instantly share code, notes, and snippets.

@subena22jf
Last active January 27, 2024 06:48
Show Gist options
  • Save subena22jf/3358b8609966203502a5 to your computer and use it in GitHub Desktop.
Save subena22jf/3358b8609966203502a5 to your computer and use it in GitHub Desktop.
c# download multi part
using System;
using System.Collections.Generic;
using System.IO;
using System.Net.Http;
using System.Threading.Tasks;
namespace TestApp
{
internal class Program
{
private static void Main(string[] args)
{
Task.Run(() => new Downloader().Download(
"url_file_download",
"url_save_to"
)).Wait();
}
}
public class Downloader
{
public async Task Download(string url, string saveAs)
{
var httpClient = new HttpClient();
var response = await httpClient.SendAsync(new HttpRequestMessage(HttpMethod.Head, url));
var parallelDownloadSuported = response.Headers.AcceptRanges.Contains("bytes");
var contentLength = response.Content.Headers.ContentLength ?? 0;
if (parallelDownloadSuported)
{
const double numberOfParts = 5.0;
var tasks = new List<Task>();
var partSize = (long)Math.Ceiling(contentLength / numberOfParts);
File.Create(saveAs).Dispose();
for (var i = 0; i < numberOfParts; i++)
{
var start = i*partSize + Math.Min(1, i);
var end = Math.Min((i + 1)*partSize, contentLength);
tasks.Add(
Task.Run(() => DownloadPart(url, saveAs, start, end))
);
}
await Task.WhenAll(tasks);
}
}
private async void DownloadPart(string url, string saveAs, long start, long end)
{
using (var httpClient = new HttpClient())
using (var fileStream = new FileStream(saveAs, FileMode.Open, FileAccess.Write, FileShare.Write))
{
var message = new HttpRequestMessage(HttpMethod.Get, url);
message.Headers.Add("Range", string.Format("bytes={0}-{1}", start, end));
fileStream.Position = start;
await httpClient.SendAsync(message).Result.Content.CopyToAsync(fileStream);
}
}
}
}
@ramtinak
Copy link

ramtinak commented May 19, 2021

For anyone who is looking for fix the ZIP file problem, just replace message.Headers.Add("Range", string.Format("bytes={0}-{1}", start, end)) with this one>

message.Headers.Range = new RangeHeaderValue(start, end);

Also modified a little>

using System;
using System.Collections.Generic;
using System.IO;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Threading.Tasks;
using System.Net;

namespace DownloadMultipart
{
    class Program
    {
        static void Main(string[] args)
        {
            Task.Run(() => new Downloader().Download(
                "url_to_download",
                "file_path_to_save"
                )).Wait();

            Console.Read();
        }
    }
    public class Downloader
    {
        public async Task Download(string url, string saveAs)
        {
            using (var httpClient = new HttpClient())
            using (var request = new HttpRequestMessage(HttpMethod.Get, url))
            {
                request.Headers.Range = new RangeHeaderValue(0, 255); // bugfix for 2147483647 exception [ max buffer size => int.MaxValue ]
                // why not HttpCompletionOption.ResponseHeadersRead ?
                // because it will respond 200 only, you won't get partial/206 response
                using (var response = await httpClient.SendAsync(request))
                {
                    File.Create(saveAs).Dispose();
                    if (response.StatusCode == HttpStatusCode.PartialContent)
                    {
                        var contentLength = response.Content.Headers.ContentRange?.Length ?? 0;
                        const double numberOfParts = 5.0;
                        var tasks = new List<Task>();
                        var partSize = (long)Math.Ceiling(contentLength / numberOfParts);
                        for (var i = 0; i < numberOfParts; i++)
                        {
                            var start = i * partSize + Math.Min(1, i);
                            var end = Math.Min((i + 1) * partSize, contentLength);

                            tasks.Add(Task.Run(() => DownloadPart(url, saveAs, start, end)));
                        }

                        await Task.WhenAll(tasks);
                    }
                    else if (response.StatusCode == HttpStatusCode.OK)
                    {
                        await Task.Run(() => DownloadPart(url, saveAs));
                    }
                }
            }
        }

        private async void DownloadPart(string url, string saveAs, long? start = null, long? end = null)
        {
            using (var httpClient = new HttpClient())
            using (var fileStream = new FileStream(saveAs, FileMode.Open, FileAccess.Write, FileShare.Write))
            {
                var message = new HttpRequestMessage(HttpMethod.Get, url);
                if (start.HasValue)
                {
                    message.Headers.Range = new RangeHeaderValue(start, end);
                    fileStream.Position = start.Value;
                }

                Console.WriteLine($"{start}-{end} Started");
                await httpClient.SendAsync(message).Result.Content.CopyToAsync(fileStream);
                Console.WriteLine($"{start}-{end} Downloaded");
            }
        }
    }
}

@KyleC69
Copy link

KyleC69 commented May 28, 2021

i've been pulling out my now gray hair looking for a solid resume partial download method for 2+ weeks. Thank you for this fast easy solution. +100 points for all :)

@ssbroad
Copy link

ssbroad commented Nov 17, 2021

it's not work for me, but that's code from ramtinak is work, and must remove "else if (response.StatusCode == HttpStatusCode.OK)"

@BlueMystical
Copy link

@ramtinak 's code works, however, no matter the number of chunks, it always takes the same amount of time to download the whole file,
actually, i have another code who downloads in single thread, and takes exactly the same amount of time.
Therefore... it breaks the purpose of Parallel downloads: to save time.

2 Chunks should download in half the time than 1.
4 Chunks should download in half the time than 2.
...

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment