Skip to content

Instantly share code, notes, and snippets.

@katerakelly
Last active October 18, 2021 17:00
Show Gist options
  • Star 2 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save katerakelly/3fb565f172df3a371f5178d51e8f1039 to your computer and use it in GitHub Desktop.
Save katerakelly/3fb565f172df3a371f5178d51e8f1039 to your computer and use it in GitHub Desktop.
pytorch-upsampling
import numpy as np
import torch
import torch.nn.functional as F
from torch import nn
from torch.autograd import Variable
def make_bilinear_weights(size, num_channels):
''' Make a 2D bilinear kernel suitable for upsampling
Stack the bilinear kernel for application to tensor '''
factor = (size + 1) // 2
if size % 2 == 1:
center = factor - 1
else:
center = factor - 0.5
og = np.ogrid[:size, :size]
filt = (1 - abs(og[0] - center) / factor) * \
(1 - abs(og[1] - center) / factor)
print filt
filt = torch.from_numpy(filt)
w = torch.zeros(num_channels, num_channels, size, size)
for i in range(num_channels):
w[i, i] = filt
return w
# Define a toy grid
x = np.array([[1,2],[3,4]], dtype=np.float32)
x = Variable(torch.from_numpy(x[np.newaxis, np.newaxis, :,:]))
# Upsample using Pytorch bilinear upsampling
out1 = F.upsample(x, None, 2, 'bilinear')
# Upsample using transposed convolution
# kernel size is 2x the upsample rate for smoothing
# output will need to be cropped to size
out2 = F.conv_transpose2d(x, Variable(make_bilinear_weights(4, 1)), stride=2)
'''
Output
out1 = [ 1. 1.33333325 1.66666675 2. ]
[ 1.66666663 2. 2.33333349 2.66666651]
[ 2.33333325 2.66666675 3.00000024 3.33333349]
[ 3. 3.33333325 3.66666675 4. ]
out2 = [ 0.0625 0.1875 0.3125 0.4375 0.375 0.125 ]
[ 0.1875 0.5625 0.9375 1.3125 1.125 0.375 ]
[ 0.375 1.125 1.75 2.25 1.875 0.625 ]
[ 0.625 1.875 2.75 3.25 2.625 0.875 ]
[ 0.5625 1.6875 2.4375 2.8125 2.25 0.75 ]
[ 0.1875 0.5625 0.8125 0.9375 0.75 0.25 ]
'''
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment