Skip to content

Instantly share code, notes, and snippets.

@NuarkNoir
Created May 9, 2022 16:06
Show Gist options
  • Save NuarkNoir/d4b781ca2d7c9bac9374a431c1c4e556 to your computer and use it in GitHub Desktop.
Save NuarkNoir/d4b781ca2d7c9bac9374a431c1c4e556 to your computer and use it in GitHub Desktop.
#!/usr/bin/env python3
from http.server import BaseHTTPRequestHandler, HTTPServer
import logging
import json
import youtube_dl
import asyncio
downloading_queue = asyncio.Queue(maxsize=1000)
ydl = youtube_dl.YoutubeDL({'outtmpl': '%(id)s.%(ext)s'})
is_stopping = False
async def downloading_handler(queue: asyncio.Queue):
print("Started downloading coroutine")
while not is_stopping:
print("Waiting for link...")
try:
link: str = await queue.get()
except KeyboardInterrupt:
break
if link is None:
break
print("Got", link)
with ydl:
ydl.download([link])
print("Finished downloading coroutine")
class S(BaseHTTPRequestHandler):
def _set_response(self):
self.send_response(200)
self.send_header("Content-type", "text/plain")
self.end_headers()
def do_GET(self):
logging.info("GET request,\nPath: %s\nHeaders:\n%s\n", str(self.path), str(self.headers))
self._set_response()
self.wfile.write("GET request for {}".format(self.path).encode("utf-8"))
def do_POST(self):
global downloading_queue
content_length = int(self.headers["Content-Length"])
post_data = self.rfile.read(content_length)
logging.info("POST request,\nPath: %s\nHeaders:\n%s\n\nBody:\n%s\n", str(self.path), str(self.headers), post_data.decode("utf-8"))
self._set_response()
self.wfile.write("POST request for {}".format(self.path).encode("utf-8"))
data = json.loads(post_data.decode("utf-8"))
if "vlink" in data:
downloading_queue.put_nowait(data["vlink"])
print("Put", data["vlink"], "to downloading queue")
else:
print("Not found suitable data in", data)
async def run():
logging.basicConfig(level=logging.INFO)
server_address = ("", 8000)
httpd = HTTPServer(server_address, S)
logging.info("Starting httpd...\n")
try:
httpd.serve_forever()
except KeyboardInterrupt:
global is_stopping
is_stopping = True
pass
httpd.server_close()
logging.info("Stopping httpd...\n")
if __name__ == "__main__":
run_coro = run()
download_coro = downloading_handler(downloading_queue)
ioloop = asyncio.get_event_loop()
ioloop.run_until_complete(asyncio.gather(download_coro, run_coro))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment