Skip to content

Instantly share code, notes, and snippets.

@phizaz
Last active September 12, 2023 20:31
Show Gist options
  • Star 2 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save phizaz/664e9f040b157cce4166d602daf730cc to your computer and use it in GitHub Desktop.
Save phizaz/664e9f040b157cce4166d602daf730cc to your computer and use it in GitHub Desktop.
Pytorch: Inspect Activations Layer-by-layer in Trained Model
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"import torch\n",
"import torch.nn as nn"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"from torchvision import models"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"class Model(nn.Module):\n",
" def __init__(self):\n",
" super().__init__()\n",
" net = models.vgg11(pretrained=True)\n",
" self.features = nn.Sequential(*list(net.features))\n",
" self.classifier = nn.Sequential(*list(net.classifier))\n",
" \n",
" def forward(self, x):\n",
" outputs = []\n",
" \n",
" for layer in self.features:\n",
" x = layer(x)\n",
" outputs.append(x)\n",
" \n",
" # flatten\n",
" x = x.view(x.size(0), -1)\n",
" \n",
" for layer in self.classifier:\n",
" x = layer(x)\n",
" outputs.append(x)\n",
" \n",
" return x, outputs"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"m = Model().cuda()\n",
"x = torch.randn(10, 3, 224, 224).cuda()\n",
"x, activations = m(x)"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"torch.Size([10, 1000])\n"
]
}
],
"source": [
"print(x.shape)"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"torch.Size([10, 64, 224, 224])\n",
"torch.Size([10, 64, 224, 224])\n",
"torch.Size([10, 64, 112, 112])\n",
"torch.Size([10, 128, 112, 112])\n",
"torch.Size([10, 128, 112, 112])\n",
"torch.Size([10, 128, 56, 56])\n",
"torch.Size([10, 256, 56, 56])\n",
"torch.Size([10, 256, 56, 56])\n",
"torch.Size([10, 256, 56, 56])\n",
"torch.Size([10, 256, 56, 56])\n",
"torch.Size([10, 256, 28, 28])\n",
"torch.Size([10, 512, 28, 28])\n",
"torch.Size([10, 512, 28, 28])\n",
"torch.Size([10, 512, 28, 28])\n",
"torch.Size([10, 512, 28, 28])\n",
"torch.Size([10, 512, 14, 14])\n",
"torch.Size([10, 512, 14, 14])\n",
"torch.Size([10, 512, 14, 14])\n",
"torch.Size([10, 512, 14, 14])\n",
"torch.Size([10, 512, 14, 14])\n",
"torch.Size([10, 512, 7, 7])\n",
"torch.Size([10, 4096])\n",
"torch.Size([10, 4096])\n",
"torch.Size([10, 4096])\n",
"torch.Size([10, 4096])\n",
"torch.Size([10, 4096])\n",
"torch.Size([10, 4096])\n",
"torch.Size([10, 1000])\n"
]
}
],
"source": [
"for a in activations:\n",
" print(a.shape)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.7"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment