|
package main |
|
|
|
import ( |
|
"crypto/sha1" |
|
"fmt" |
|
"github.com/dwarvesf/glod" |
|
"github.com/dwarvesf/glod/facebook" |
|
"github.com/dwarvesf/glod/soundcloud" |
|
"github.com/dwarvesf/glod/vimeo" |
|
"github.com/dwarvesf/glod/youtube" |
|
"io" |
|
"log" |
|
"net/http" |
|
"os" |
|
"path" |
|
"strings" |
|
"sync" |
|
"time" |
|
) |
|
|
|
const ( |
|
initYoutube string = "youtube" |
|
initSoundCloud string = "soundcloud" |
|
initFacebook string = "facebook" |
|
initVimeo string = "vimeo" |
|
) |
|
|
|
type goody struct { |
|
resp *http.Response |
|
name string |
|
key string |
|
originalLink string |
|
timestamp time.Time |
|
finished bool |
|
} |
|
|
|
func extractName(link, fullLink string) (name string) { |
|
if strings.Contains(fullLink, initYoutube) { |
|
splitName := strings.Split(link, "~") |
|
name = strings.Trim(splitName[1], " ") |
|
} else if strings.Contains(fullLink, initSoundCloud) { |
|
splitName := strings.Split(link, "/") |
|
name = strings.Trim(splitName[4]+".mp3", " ") |
|
} else if strings.Contains(fullLink, initFacebook) { |
|
splitName := strings.Split(fullLink, "/") |
|
name = strings.Trim(splitName[len(splitName)-2]+".mp4", " ") |
|
} else if strings.Contains(fullLink, initVimeo) { |
|
splitName := strings.Split(link, "~") |
|
name = strings.Trim(splitName[1]+".mp4", " ") |
|
} |
|
return name |
|
} |
|
|
|
func download(g *goody) { |
|
defer g.resp.Body.Close() |
|
fmt.Printf("downloading %v\n", g) |
|
|
|
out, err := os.Create(path.Join(directory, g.key)) |
|
defer out.Close() |
|
if err != nil { |
|
fmt.Println(err.Error()) |
|
fmt.Println("Cannot create file") |
|
return |
|
} |
|
|
|
_, err = io.Copy(out, g.resp.Body) |
|
if err != nil { |
|
fmt.Println(err) |
|
} |
|
g.finished = true |
|
fmt.Printf("Finished %v\n", g) |
|
} |
|
|
|
// consume queue one download at a time (safety first) |
|
func consumeQueue() { |
|
defer wg.Done() |
|
for { |
|
select { |
|
case g := <-queue: |
|
download(g) |
|
} |
|
} |
|
} |
|
|
|
func start(link string) ([]*goody, error) { |
|
var glod glod.Source |
|
var goodies []*goody |
|
// when download started, do not retry: |
|
done[link] = goodies |
|
|
|
if strings.Contains(link, initYoutube) { |
|
glod = &youtube.Youtube{} |
|
} else if strings.Contains(link, initSoundCloud) { |
|
glod = &soundcloud.SoundCloud{} |
|
} else if strings.Contains(link, initFacebook) { |
|
glod = &facebook.Facebook{} |
|
} else if strings.Contains(link, initVimeo) { |
|
glod = &vimeo.Vimeo{} |
|
} else { |
|
return goodies, nil |
|
} |
|
|
|
listStream, err := glod.GetDirectLink(link) |
|
if err != nil { |
|
fmt.Println("GetDirectLink failed:") |
|
fmt.Println(err) |
|
return goodies, err |
|
} |
|
|
|
for _, l := range listStream { |
|
name := extractName(l, link) |
|
key := fmt.Sprintf("%x", sha1.Sum([]byte(name))) |
|
|
|
// youtube and Vimeo links cleanup |
|
cleanuplink := l |
|
if strings.Contains(link, initYoutube) || strings.Contains(link, initVimeo) { |
|
splitUrl := strings.Split(l, "~") |
|
cleanuplink = splitUrl[0] |
|
} |
|
wg.Add(1) |
|
|
|
resp, err := http.Get(cleanuplink) |
|
if err != nil { |
|
fmt.Println(err) |
|
return goodies, err |
|
} |
|
|
|
g := &goody{ |
|
name: name, |
|
key: key, |
|
resp: resp, |
|
originalLink: link, |
|
timestamp: time.Now(), |
|
} |
|
|
|
queue <- g |
|
db[g.key] = g |
|
|
|
goodies = append(goodies, g) |
|
} |
|
done[link] = goodies |
|
return goodies, nil |
|
} |
|
|
|
var queue chan *goody |
|
var db map[string]*goody // map key -> *goody |
|
var done map[string][]*goody // map originalLink -> [*goody] |
|
var wg sync.WaitGroup |
|
var directory string |
|
var proto string |
|
|
|
func getLink(g *goody, r *http.Request) string { |
|
return fmt.Sprintf("%s://%s/get?key=%s", proto, r.Host, g.key) |
|
} |
|
|
|
func formatHTML(g *goody, r *http.Request) string { |
|
url := getLink(g, r) |
|
return fmt.Sprintf("you are being redirected to <a href='%s'>%s</a>", url, url) |
|
} |
|
|
|
func formatJSON(g *goody, r *http.Request) string { |
|
url := getLink(g, r) |
|
return fmt.Sprintf("{\"name\": \"%s\", \"url\": \"%s\"}", g.name, url) |
|
} |
|
|
|
func startHandler(w http.ResponseWriter, r *http.Request) { |
|
url := strings.Trim(r.PostFormValue("url"), " ") |
|
human := r.PostFormValue("human") |
|
|
|
if url == "" { |
|
return |
|
} |
|
|
|
var err error |
|
goodies, present := done[url] |
|
|
|
if !present { |
|
goodies, err = start(url) |
|
if err != nil { |
|
fmt.Println(err) |
|
http.Error(w, "urls extraction failed\n", 500) |
|
} |
|
} |
|
|
|
for _, g := range goodies { |
|
if human == "yes" { |
|
w.Header().Set("Content-Type", "text/html; charset=utf-8") |
|
http.Redirect(w, r, getLink(g, r), 303) |
|
fmt.Fprintf(w, "%s\n", formatHTML(g, r)) |
|
} else { |
|
w.Header().Set("Content-Type", "application/json; charset=utf-8") |
|
fmt.Fprintf(w, "%s\n", formatJSON(g, r)) |
|
} |
|
} |
|
} |
|
|
|
func getStatusHandler(w http.ResponseWriter, r *http.Request) { |
|
key := r.FormValue("key") |
|
if key == "" { |
|
return |
|
} |
|
|
|
g, present := db[key] |
|
if !present { |
|
http.NotFound(w, r) |
|
return |
|
} |
|
|
|
f, err := os.Open(path.Join(directory, g.key)) |
|
if err != nil { |
|
http.NotFound(w, r) |
|
} |
|
|
|
fileinfo, err := f.Stat() |
|
if err != nil { |
|
http.Error(w, "f.Stat() failed\n", 500) |
|
return |
|
} |
|
size := fileinfo.Size() |
|
var percentage float64 |
|
if g.finished { |
|
percentage = 100 |
|
} else if g.resp.ContentLength == 0 { |
|
percentage = 0 |
|
} else { |
|
percentage = 100 * float64(size) / float64(g.resp.ContentLength) |
|
} |
|
fmt.Fprintf(w, "%.2f %%", percentage) |
|
} |
|
|
|
func getHandler(w http.ResponseWriter, r *http.Request) { |
|
key := r.FormValue("key") |
|
if key == "" { |
|
return |
|
} |
|
|
|
g, present := db[key] |
|
if !present { |
|
http.NotFound(w, r) |
|
return |
|
} |
|
|
|
f, err := os.Open(path.Join(directory, g.key)) |
|
if err != nil { |
|
http.NotFound(w, r) |
|
} |
|
|
|
fileinfo, err := f.Stat() |
|
if err != nil { |
|
http.Error(w, "f.Stat() failed\n", 500) |
|
return |
|
} |
|
size := fileinfo.Size() |
|
|
|
if g.finished && size == 0 { |
|
w.Header().Set("Content-Type", "text/html; charset=utf-8") |
|
output := fmt.Sprintf("Could not retreive link, sorry.\nHere some debug:\n\n\n%v\n%v\n", g, r) |
|
http.Error(w, output, 500) |
|
return |
|
} |
|
percentage := 100 * float64(size) / float64(g.resp.ContentLength) |
|
if percentage == 100 { |
|
http.ServeFile(w, r, path.Join(directory, g.key)) |
|
} else { |
|
http.ServeFile(w, r, "get.html") |
|
} |
|
} |
|
|
|
func homeHandler(w http.ResponseWriter, r *http.Request) { |
|
http.ServeFile(w, r, "home.html") |
|
} |
|
|
|
// we do not want persistence. Just remove all files at startup. |
|
func resetDir() { |
|
os.RemoveAll(directory) |
|
os.MkdirAll(directory, 0755) |
|
} |
|
|
|
// remove files older than 24h |
|
func purgeOld() { |
|
for key, value := range db { |
|
if time.Since(value.timestamp) > 24*time.Hour { |
|
delete(db, key) |
|
delete(done, value.originalLink) |
|
os.Remove(path.Join(directory, value.key)) |
|
fmt.Printf("%s %s removed\n", value.key, value.name) |
|
} |
|
} |
|
} |
|
|
|
func purgeDaemon() { |
|
defer wg.Done() |
|
for { |
|
purgeOld() |
|
//time.Sleep(10 * time.Minute) |
|
time.Sleep(10 * time.Minute) |
|
} |
|
} |
|
|
|
func main() { |
|
proto = os.Getenv("WEBPROTO") |
|
if proto == "" { |
|
proto = "http" |
|
} |
|
|
|
directory = os.Getenv("DATADIR") |
|
if directory == "" { |
|
directory = "files" |
|
} |
|
resetDir() |
|
wg.Add(1) |
|
go purgeDaemon() |
|
|
|
queue = make(chan *goody) |
|
db = make(map[string]*goody) |
|
done = make(map[string][]*goody) |
|
|
|
wg.Add(1) |
|
go consumeQueue() |
|
|
|
http.HandleFunc("/start", startHandler) |
|
http.HandleFunc("/get", getHandler) |
|
http.HandleFunc("/getstatus", getStatusHandler) |
|
http.HandleFunc("/", homeHandler) |
|
log.Fatal(http.ListenAndServe(":8080", nil)) |
|
} |