Skip to content

Instantly share code, notes, and snippets.

Last active July 24, 2019 01:34
What would you like to do?
Solution to large dictionaries not released to memory: Run a separate process
import random
import optparse
import gc
from flask import redirect, Flask
from memory_profiler import memory_usage
from multiprocessing import Process
# Create flask app
app = Flask(__name__)
app.debug = True
def hello_world():
# The test:
# Generate a 2440 mb dictionary and return a blank page.
def separate_process():
large_dict = {}
for x in xrange(125000):
large_row = {}
for y in xrange(125):
large_row[random.randint(1, 10000000000)] = random.randint(1, 10000000000)
large_dict[random.randint(1, 10000000000)] = large_row
p = Process(target=separate_process)
p.join() # this blocks until the process terminates
return ''
def after(response):
print memory_usage(-1, interval=.2, timeout=1), "after request"
return response
def main():
app.config['SECRET_KEY'] = "BLAH"
parser = optparse.OptionParser()
parser.add_option("--host", default="") # can't use -h, it's already taken by help
parser.add_option("-p", "--port", default="5123")
options, args = parser.parse_args()
if len(args) > 0:
parser.error("Unexpected arguments: %s" % args), port=int(options.port))
if __name__ == '__main__':
# Start app
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment