Skip to content

Instantly share code, notes, and snippets.

View fg91's full-sized avatar

Fabio M. Graetz, Ph.D. fg91

View GitHub Profile
@fg91
fg91 / example.py
Last active November 17, 2021 20:22
Flyte remote build and register
from x import task2
@task
def my_task():
...
def relocate(task, context="."):
def cos_loss(input, target):
return 1 - F.cosine_similarity(input, target).mean()
model = models.resnet34(pretrained=True).cuda()
# Freeze the base of the network and only train the new custom layers
for param in model.parameters():
param.requires_grad = False
p=0.1
model.fc = nn.Sequential(nn.BatchNorm1d(512),
nn.Dropout(p),
layer = 40
filter = 265
FV = FilterVisualizer(size=56, upscaling_steps=12, upscaling_factor=1.2)
FV.visualize(layer, filter, blur=5)
class FilterVisualizer():
def __init__(self, size=56, upscaling_steps=12, upscaling_factor=1.2):
self.size, self.upscaling_steps, self.upscaling_factor = size, upscaling_steps, upscaling_factor
self.model = vgg16(pre=True).cuda().eval()
set_trainable(self.model, False)
def visualize(self, layer, filter, lr=0.1, opt_steps=20, blur=None):
sz = self.size
img = np.uint8(np.random.uniform(150, 180, (sz, sz, 3)))/255 # generate random image
activations = SaveFeatures(list(self.model.children())[layer]) # register hook
class SaveFeatures():
def __init__(self, module):
self.hook = module.register_forward_hook(self.hook_fn)
def hook_fn(self, module, input, output):
self.features = torch.tensor(output,requires_grad=True).cuda()
def close(self):
self.hook.remove()
@fg91
fg91 / truncate.py
Last active December 17, 2018 15:26
how to truncate a base network
class YourCustomModel(nn.Module):
def __init__(self):
super().__init__()
# truncated base network, „True“ refers to pretrained
self.backbone = nn.Sequential(*list(resnet34(True).children())[:8])
# and your custom layers
self.features = nn.Sequential(
self.backbone,
# custom layers: