Skip to content

Instantly share code, notes, and snippets.

@andyljones
Last active May 21, 2019 18:43
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save andyljones/9d8e4fb97bb05b2bbbe167995eb34971 to your computer and use it in GitHub Desktop.
Save andyljones/9d8e4fb97bb05b2bbbe167995eb34971 to your computer and use it in GitHub Desktop.
"""
This is a standalone script for demonstrating some memory leaks that're troubling me. It's a torn-down version of
the project I'm currently working on.
To run this, you'll need panda3d, pandas and tqdm. You should be able to install these with
```
pip install panda3d pandas tqdm
```
You'll **also need to enable memory tracking**. Do that by setting `track-memory-usage 1` in `panda3d.__file__`'s
`etc/Config.prc` file. Setting it anywhere else is too late! (It's a unique setting in that way - thanks rdb!)
With those things done, you can run this from the commandline with
```
python standalone.py
```
and you should get a report on how much memory has been leaked and which pointers have been left lying around after
each iteration.
You can also run this in a Jupyter/IPython console, in which case you'll likely want to look into setting up
'autoreload' and then running
```python
from standalone import *
stats = run()
stats.counts
```
"""
from panda3d.core import Texture, GeomNode, PointLight, NodePath, RenderState, TransformState
### PROCEDURAL GEOMETRY ###
def add_scenery(root):
root.setShaderAuto()
# Add a textured surface
surf = root.attach_new_node(GeomNode('wall'))
tex = Texture()
tex.setup_2d_texture(256, 1, Texture.T_unsigned_byte, Texture.F_luminance)
surf.set_texture(tex) # TODO: This is the line that generates half the memory leaks.
# Add point lights
point = root.attach_new_node(PointLight('point_light'))
root.set_light(point) # TODO: This is the line that generates the other half of the memory leaks.
root.flattenStrong()
def descendents(p):
return [p] + [d for c in p.children for d in descendents(c)]
def create_destroy():
root = NodePath('root')
add_scenery(root)
# Remove all the nodes I created
for p in descendents(root):
p.remove_node()
# Clear various caches
TransformState.garbage_collect()
RenderState.garbage_collect()
RenderState.garbage_collect()
### MEMORY TRACKING ###
from panda3d.core import MemoryUsage, MemoryUsagePointers
from tqdm.auto import tqdm
import pandas as pd
class dotdict(dict):
__getattr__ = dict.__getitem__
__setattr__ = dict.__setitem__
class Leaks:
def __init__(self, clean=True):
self._last = None
self._count = 0
self._clean = clean
def __enter__(self):
MemoryUsage.freeze()
return self
def __call__(self):
if self._last:
return self._last
ptrs = MemoryUsagePointers()
MemoryUsage.get_pointers(ptrs)
pointers = [ptrs.get_python_pointer(i) for i in range(ptrs.get_num_pointers())]
# Group the pointers by type
groups = {}
for p in pointers:
groups.setdefault(type(p).__name__, []).append(p)
if self._clean:
# Get rid of the stuff that's so common it's uninformative
boring = ['CopyOnWriteObject', 'NodeReferenceCount', 'TypedReferenceCount', 'ReferenceCount']
groups = {k: vs for k, vs in groups.items() if k not in boring}
# Number of examples of each group
counts = pd.Series({k: len(v) for k, v in groups.items()}).sort_index()
# Number of ref counts of the first example from each group
refs = pd.Series({k: sum(v.ref_count for v in vs)/len(vs) for k, vs in groups.items()}).sort_index()
size = MemoryUsage.get_current_cpp_size()
count = self._count
self._count += 1
return dotdict(pointers=pointers, groups=groups, counts=counts, size=size, count=count, refs=refs)
def __exit__(self, t, v, tb):
self._last = self()
return False
class CumulativeLeaks:
def __init__(self, count=10, clean=True):
self._count = count
self._clean = clean
def __enter__(self):
self._results = []
self._pbar = tqdm(total=self._count).__enter__()
self._leaks = Leaks(self._clean).__enter__()
return self
def __iter__(self):
for _ in range(self._count):
yield
result = self._leaks()
self._results.append(result)
self._pbar.update(1)
self._pbar.set_description(f'{result.size/1e6:.1f}MB')
def __call__(self):
counts = pd.DataFrame.from_dict({r.count: r.counts for r in self._results}, orient='index')
return dotdict(
size=pd.Series({r.count: r.size/1e6 for r in self._results}),
counts=counts,
diff=counts.diff().iloc[-1] if counts.size > 0 else pd.Series())
def __exit__(self, t, v, tb):
self._leaks.__exit__(t, v, tb)
self._pbar.__exit__(t, v, tb)
return False
### MAIN ###
def cumulative():
# Warm up the caches so they don't pollute our pointer counts
create_destroy()
# Now create and destroy the environment a bunch of times, tracking the memory as we go
with CumulativeLeaks(10) as leaks:
for _ in leaks:
create_destroy()
return leaks()
def single():
create_destroy()
with Leaks() as leaks:
create_destroy()
return leaks()
if __name__ == '__main__':
onestats = single()
cumstats = cumulative()
print(f'''
Leaked {cumstats.size.iloc[-1]:.2f}MB of memory in {len(cumstats.size)} loops. Pointer counts are
{cumstats.counts}
And the average ref counts are
{onestats.refs}''')
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment