Skip to content

Instantly share code, notes, and snippets.

@nim65s
Last active April 16, 2024 21:10
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save nim65s/295bc6eda86dc8250e982dac566a96f5 to your computer and use it in GitHub Desktop.
Save nim65s/295bc6eda86dc8250e982dac566a96f5 to your computer and use it in GitHub Desktop.
reduce mesh sizes
#!/usr/bin/env python
from os import chdir, walk
from pathlib import Path
from tempfile import TemporaryDirectory
from gepetto.corbaserver.client import Client
from meshcat import Visualizer, geometry, transformations
from pymeshlab import MeshSet
def humansize(size: int) -> str:
for unit in " KMGT":
if size < 1_000:
return f"{size:7.3f}{unit}"
size /= 1_000
def init_meshcat():
mc = Visualizer()
mc.open()
return mc
def init_gv():
gv = Client().gui
for name in ["src", "dst"]:
if name in gv.getWindowList():
w_id = gv.getWindowID(name)
else:
w_id = gv.createWindow(name)
if name not in gv.getSceneList():
gv.createScene(name)
gv.addSceneToWindow(name, w_id)
if not gv.nodeExists(name):
gv.createGroup(name)
return gv
def find_big_files(root: Path, max_size: int = 0):
cwd = Path().resolve()
chdir(root)
big_files = []
for path, dirs, files in walk("."):
path = Path(path)
for f in files:
p = path / f
size = p.stat().st_size
if size > max_size:
big_files.append((size, p))
big_files = sorted(big_files)
big_total = sum(size for size, _ in big_files)
for size, p in big_files:
print(humansize(size), p)
print(humansize(big_total), "TOTAL")
chdir(cwd)
return big_files
class Main:
def __init__(self):
self.gv = init_gv()
self.mc = init_meshcat()
def disp_meshcat(self, path: Path, src: Path, dst: Path) -> bool:
self.mc["src"].set_object(geometry.DaeMeshGeometry.from_file(src / path))
self.mc["dst"].set_object(geometry.DaeMeshGeometry.from_file(dst / path))
self.mc["src"].set_transform(
transformations.translation_matrix([0.1, 0.1, 0.1])
)
self.mc["dst"].set_transform(
transformations.translation_matrix([-0.1, -0.1, -0.1])
)
return input().lower().startswith("y")
def disp_gv(self, path: Path, src: Path, dst: Path):
self.gv.deleteNode("src/src", False)
self.gv.deleteNode("dst/dst", False)
self.gv.addMesh("src/src", str(Path.cwd() / src / path))
self.gv.addMesh("dst/dst", str(Path.cwd() / dst / path))
self.gv.refresh()
return input().lower().startswith("y")
def process_file(self, path: Path, src: Path, dst: Path, decimation: bool = True):
(dst / path).parent.mkdir(parents=True, exist_ok=True)
ms = MeshSet()
ms.load_new_mesh(str(src / path))
if decimation:
ms.meshing_decimation_quadric_edge_collapse()
print("decimation:", end=" ")
else:
ms.meshing_merge_close_vertices()
print("merge:", end=" ")
ms.save_current_mesh(str(dst / path))
src_size = (src / path).stat().st_size
dst_size = (dst / path).stat().st_size
if dst_size < src_size:
print(f"{path} ({humansize(src_size)} → {humansize(dst_size)}). Keep it?")
if self.disp_gv(path, src, dst) and self.disp_meshcat(path, src, dst):
(dst / path).rename(src / path)
if decimation:
self.process_file(path, src, dst, decimation=False)
def process_dir(self, src: Path, dst: Path, max_size: int):
print(f"reducing files in {src} bigger than {max_size}")
for _, path in find_big_files(root=src, max_size=max_size):
self.process_file(path=path, src=src, dst=dst)
if __name__ == "__main__":
main = Main()
src = Path("robots")
with TemporaryDirectory(prefix="clean") as dst:
main.process_dir(src, Path(dst), 10_000_000)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment