Skip to content

Instantly share code, notes, and snippets.

@kgantsov
Created April 14, 2017 11:18
Show Gist options
  • Star 13 You must be signed in to star a gist
  • Fork 1 You must be signed in to fork a gist
  • Save kgantsov/619ddb532ae0da3730007b84587257c5 to your computer and use it in GitHub Desktop.
Save kgantsov/619ddb532ae0da3730007b84587257c5 to your computer and use it in GitHub Desktop.
Example of serving large files using aiohttp server without reading entire file into a memory
import asyncio
import os
from aiohttp import web
from aiohttp import streamer
@streamer
async def file_sender(writer, file_path=None):
"""
This function will read large file chunk by chunk and send it through HTTP
without reading them into memory
"""
with open(file_path, 'rb') as f:
chunk = f.read(2 ** 16)
while chunk:
await writer.write(chunk)
chunk = f.read(2 ** 16)
async def download_file(request):
file_name = request.match_info['file_name'] # Could be a HUGE file
headers = {
"Content-disposition": "attachment; filename={file_name}".format(file_name=file_name)
}
file_path = os.path.join('data', file_name)
if not os.path.exists(file_path):
return web.Response(
body='File <{file_name}> does not exist'.format(file_name=file_name),
status=404
)
return web.Response(
body=file_sender(file_path=file_path),
headers=headers
)
if __name__ == '__main__':
loop = asyncio.get_event_loop()
app = web.Application()
app.router.add_get('/file/{file_name}', download_file)
web.run_app(app, host='0.0.0.0', port=8000)
loop.close()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment