Skip to content

Instantly share code, notes, and snippets.

@NicolasHug
Created March 17, 2021 15:31
Show Gist options
  • Save NicolasHug/fb2c28d758951959da94d3db326ce21b to your computer and use it in GitHub Desktop.
Save NicolasHug/fb2c28d758951959da94d3db326ce21b to your computer and use it in GitHub Desktop.
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"id": "worst-parallel",
"metadata": {},
"outputs": [],
"source": [
"%load_ext autoreload\n",
"%autoreload 2\n",
"import numpy as np\n",
"import pandas as pd\n",
"import matplotlib as mpl\n",
"import matplotlib.pyplot as plt\n",
"from PIL import Image\n",
"import torch\n",
"import torchvision\n",
"import torchvision.transforms as t\n",
"from torchvision import models\n",
"import torchvision.ops as ops"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "center-template",
"metadata": {},
"outputs": [],
"source": [
"def get_available_classification_models():\n",
" return [k for k, v in models.__dict__.items() if callable(v) and k[0].lower() == k[0] and k[0] != \"_\"]\n",
"\n",
"all_classification_models = get_available_classification_models()"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "antique-entity",
"metadata": {},
"outputs": [],
"source": [
"for model_name in all_classification_models:\n",
" if model_name in ('mnasnet0_75', 'mnasnet1_3', 'shufflenet_v2_x1_5', 'shufflenet_v2_x2_0'):\n",
" continue\n",
" getattr(models, model_name)(pretrained=True)"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "meaningful-florida",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"['/Users/nicolashug/.cache/torch/hub/checkpoints/resnet34-333f7ec4.pth',\n",
" '/Users/nicolashug/.cache/torch/hub/checkpoints/alexnet-owt-4df8aa71.pth',\n",
" '/Users/nicolashug/.cache/torch/hub/checkpoints/squeezenet1_0-a815701f.pth',\n",
" '/Users/nicolashug/.cache/torch/hub/checkpoints/resnet152-b121ed2d.pth',\n",
" '/Users/nicolashug/.cache/torch/hub/checkpoints/squeezenet1_1-f364aa15.pth',\n",
" '/Users/nicolashug/.cache/torch/hub/checkpoints/resnet50-19c8e357.pth',\n",
" '/Users/nicolashug/.cache/torch/hub/checkpoints/resnet101-5d3b4d8f.pth',\n",
" '/Users/nicolashug/.cache/torch/hub/checkpoints/resnet18-5c106cde.pth']"
]
},
"execution_count": 4,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"from glob import glob\n",
"import io\n",
"from pickle import UnpicklingError\n",
"\n",
"all_files = glob('/Users/nicolashug/.cache/torch/hub/checkpoints/*.pth')\n",
"baddies = []\n",
"\n",
"for filename in all_files:\n",
" with open(filename, \"rb\") as fd:\n",
" buf = io.BytesIO(fd.read())\n",
" \n",
" try:\n",
" state_dict = torch.load(buf, \"cpu\") \n",
" except UnpicklingError:\n",
" baddies.append(filename)\n",
"baddies"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "specified-force",
"metadata": {},
"outputs": [],
"source": [
"import hashlib\n",
"import os\n",
"import uuid\n",
"\n",
"output_dir = '/Users/nicolashug/new_models'\n",
"tmp_path = os.path.join(output_dir, 'blah')\n",
"\n",
"for filename in baddies:\n",
" model_name = '-'.join(filename.split('/')[-1].split('-')[:-1])\n",
" model = torch.load(filename)\n",
" torch.save(model, tmp_path)\n",
"\n",
" sha256_hash = hashlib.sha256()\n",
" with open(tmp_path, \"rb\") as f:\n",
" # Read and update hash string value in blocks of 4K\n",
" for byte_block in iter(lambda: f.read(4096), b\"\"):\n",
" sha256_hash.update(byte_block)\n",
" hh = sha256_hash.hexdigest()\n",
"\n",
"\n",
" output_path = os.path.join(output_dir, model_name + \"-\" + str(hh[:8]) + \".pth\")\n",
" os.replace(tmp_path, output_path)"
]
},
{
"cell_type": "code",
"execution_count": 6,
"id": "living-defense",
"metadata": {},
"outputs": [],
"source": [
"# Make sure they can be loaded now\n",
"for filename in glob(output_dir + '/*'):\n",
" with open(filename, \"rb\") as fd:\n",
" buf = io.BytesIO(fd.read())\n",
" \n",
" state_dict = torch.load(buf, \"cpu\") \n",
" \n",
"# Also make sure the new dicts are the same as the old ones\n",
"for d1, d2 in zip(sorted(glob(output_dir + '/*')), sorted(baddies)):\n",
" d1 = torch.load(d1)\n",
" d2 = torch.load(d2)\n",
" assert d1.keys() == d2.keys()\n",
" for v1, v2 in zip(d1.values(), d2.values()):\n",
" torch.testing.assert_allclose(v1, v2)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "simplified-oxygen",
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"id": "rapid-merchandise",
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"id": "regulation-telling",
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"id": "entire-martial",
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"id": "perceived-biography",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.8"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment