Skip to content

Instantly share code, notes, and snippets.

@0x4445565A
Created December 28, 2016 23:55
Show Gist options
  • Save 0x4445565A/328e1997fa62682accb31f0c07ea2d46 to your computer and use it in GitHub Desktop.
Save 0x4445565A/328e1997fa62682accb31f0c07ea2d46 to your computer and use it in GitHub Desktop.
package main
import (
"crypto/md5"
"encoding/hex"
"fmt"
"io"
"io/ioutil"
"log"
"net/http"
"os"
"strconv"
"sync"
"time"
)
func downloadPart(url string, startByte int, endByte int, outfileName string, downloadWaitGround *sync.WaitGroup) {
var client http.Client
defer downloadWaitGround.Done()
log.Println("Creating", outfileName)
outfile, _ := os.Create(outfileName)
defer outfile.Close()
log.Println("Downloading bytes", startByte, "-", endByte, "from", url)
request, _ := http.NewRequest("GET", url, nil)
request.Header.Add("Range", "bytes="+strconv.Itoa(startByte)+"-"+strconv.Itoa(endByte))
response, err := client.Do(request)
if err != nil {
log.Println("ERROR", err)
return
}
defer response.Body.Close()
if response.StatusCode != 206 {
log.Println("ERROR", "Non 206 status assuming rejected request, skipping file")
return
}
log.Println("Writing Response", startByte, "-", endByte, "to file")
_, err = io.Copy(outfile, response.Body)
if err != nil {
log.Println("ERROR", err)
}
}
func grabUrlSizeAndPath(url string) (string, int, error) {
log.Println("Retrieving", url, "header")
response, err := http.Head(url)
if err != nil {
log.Println("ERROR", err)
return url, 0, err
}
url = response.Request.URL.String()
size, err := strconv.Atoi(response.Header.Get("Content-Length"))
if err != nil {
log.Println("ERROR", err)
return url, 0, err
}
return url, size, nil
}
func concatAndRemoveFiles(md5 string) {
endFileName := "./" + md5
os.Remove(endFileName)
log.Println("Creating", endFileName)
endFile, err := os.OpenFile(endFileName, os.O_CREATE|os.O_APPEND|os.O_WRONLY, 0600)
if err != nil {
log.Println("ERROR", err)
}
defer endFile.Close()
for i := 0; i < partsPerFile; i++ {
tempFileName := "./" + md5 + strconv.Itoa(i) + ".part"
log.Println("Copying", tempFileName, "into", endFileName)
data, err := ioutil.ReadFile(tempFileName)
if err != nil {
log.Println("ERROR", err)
}
_, err = endFile.Write(data)
if err != nil {
log.Println("ERROR", err)
}
log.Println("Removing temp ", tempFileName)
os.Remove(tempFileName)
}
}
func stringToMd5(url string) string {
hash := md5.Sum([]byte(url))
return hex.EncodeToString(hash[:])
}
func downloadURL(url string, primaryWaitGround *sync.WaitGroup) {
var downloadWaitGround sync.WaitGroup
defer primaryWaitGround.Done()
url, size, err := grabUrlSizeAndPath(url)
if err != nil {
log.Println("Error returned, skipping", url)
return
}
urlMd5 := stringToMd5(url)
for i := 0; i < partsPerFile; i++ {
tempFileName := "./" + urlMd5 + strconv.Itoa(i) + ".part"
lengthToGrab := size / partsPerFile
startByte := (lengthToGrab * i)
endByte := startByte + lengthToGrab - 1
if i == partsPerFile-1 {
endByte = size
}
downloadWaitGround.Add(1)
go downloadPart(url, startByte, endByte, tempFileName, &downloadWaitGround)
}
downloadWaitGround.Wait()
log.Println(url, "download complete, building file")
concatAndRemoveFiles(urlMd5)
}
var partsPerFile int
func main() {
logFile, err := os.OpenFile(strconv.Itoa(int(time.Now().Unix()))+".log", os.O_RDWR|os.O_CREATE|os.O_APPEND, 0666)
if err != nil {
panic(err)
}
defer logFile.Close()
log.SetOutput(logFile)
partsPerFile = 40
var urls []string
if len(os.Args) > 1 {
urls = os.Args[1:]
} else {
fmt.Println("No url arguments detected please include downloadable content as CLI arguments")
fmt.Println("Example:", os.Args[0], "https://i.imgur.com/TyUrNWk.mp4")
return
}
var primaryWaitGround sync.WaitGroup
for _, url := range urls {
log.Println("Initilizing", url)
primaryWaitGround.Add(1)
log.Println("Downloading", url, "into", partsPerFile, "parts")
go downloadURL(url, &primaryWaitGround)
}
primaryWaitGround.Wait()
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment