Skip to content

Instantly share code, notes, and snippets.

Embed
What would you like to do?
Solution to large dictionaries not released to memory: Run a separate process
import random
import optparse
import gc
from flask import redirect, Flask
from memory_profiler import memory_usage
from multiprocessing import Process
# Create flask app
app = Flask(__name__)
app.debug = True
@app.route('/')
def hello_world():
###
# The test:
# Generate a 2440 mb dictionary and return a blank page.
###
def separate_process():
large_dict = {}
for x in xrange(125000):
large_row = {}
for y in xrange(125):
large_row[random.randint(1, 10000000000)] = random.randint(1, 10000000000)
large_dict[random.randint(1, 10000000000)] = large_row
p = Process(target=separate_process)
p.start()
p.join() # this blocks until the process terminates
return ''
@app.after_request
def after(response):
print memory_usage(-1, interval=.2, timeout=1), "after request"
return response
def main():
app.config['SECRET_KEY'] = "BLAH"
parser = optparse.OptionParser()
parser.add_option("--host", default="0.0.0.0") # can't use -h, it's already taken by help
parser.add_option("-p", "--port", default="5123")
options, args = parser.parse_args()
if len(args) > 0:
parser.error("Unexpected arguments: %s" % args)
app.run(host=options.host, port=int(options.port))
if __name__ == '__main__':
# Start app
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
You can’t perform that action at this time.