Skip to content

Instantly share code, notes, and snippets.

View fmassa's full-sized avatar

Francisco Massa fmassa

  • Facebook AI Reseach
View GitHub Profile
@fmassa
fmassa / query_distribution.py
Created June 28, 2020 16:11
Code to reproduce Fig 7 in "End to End Object Detection with Transformers"
# this file needs to be added to the root folder of detr github repo
import torch
import time
import torchvision
import numpy as np
import tqdm
import matplotlib.pyplot as plt
@fmassa
fmassa / compute_flops.py
Created June 28, 2020 13:34
Utility functions used to compute flops in DETR.
# this is the main entrypoint
# as we describe in the paper, we compute the flops over the first 100 images
# on COCO val2017, and report the average result
import torch
import time
import torchvision
import numpy as np
import tqdm
@fmassa
fmassa / visualize_maskrcnn_predictions.py
Created December 2, 2019 10:52
Script to visualize predictions from torchvision Mask R-CNN
import torch
import numpy as np
import matplotlib.pyplot as plt
import cv2
def select_top_predictions(predictions, threshold):
idx = (predictions["scores"] > threshold).nonzero().squeeze(1)
new_predictions = {}
for k, v in predictions.items():
@fmassa
fmassa / sparse_adam_jit.py
Created July 31, 2019 17:41
Fused sparse adam with JIT
import math
import torch
from torch.optim.optimizer import Optimizer
class SparseAdam(Optimizer):
r"""Implements lazy version of Adam algorithm suitable for sparse tensors.
In this variant, only moments that show up in the gradient get updated, and
only those portions of the gradient get applied to the parameters.
Arguments:
@fmassa
fmassa / recover_optnet.lua
Created November 1, 2016 16:25
Script to recover saved optnet models for which the tensor pointer changed after optimization
local optnet = require 'optnet'
local net = torch.load('celeba_24_G.t7')
local keys
-- the tensor pointers that were saved in the model
-- have changed and are not valid anymore.
-- In order to try to recover, let's suppose that
-- the mapping is given by the offsets in ascending
-- order.
do
@fmassa
fmassa / optnet_test_resnet101.lua
Created August 10, 2016 16:18
Test script to verify that ResNet101 fits in memory with batch size 64 with latest optnet
local optnet = require 'optnet'
require 'cudnn'
require 'cunn'
local createModel = require 'resnet'
local opt = {dataset='imagenet', depth=101}
local model = createModel(opt)
local input = torch.zeros(4,3,224,224):cuda()
-- suppose you have a model called model
lrs_model = model:clone()
lrs = lrs_model:getParameters()
lrs:fill(1) -- setting the base learning rate to 1
-- now lets set the learning rate factor of the bias of module 5 to 2
lrs_model:get(5).bias:fill(2)
-- same thing for the weights of module 2, let's set them to 3
lrs_model:get(2).weight:fill(3)
@fmassa
fmassa / convertLinear2Conv1x1.lua
Last active May 30, 2018 20:16
Simple example on how to convert a Linear model to a 1x1 convolution
require 'nn'
-- you just need to provide the linear module you want to convert,
-- and the dimensions of the field of view of the linear layer
function convertLinear2Conv1x1(linmodule,in_size)
local s_in = linmodule.weight:size(2)/(in_size[1]*in_size[2])
local s_out = linmodule.weight:size(1)
local convmodule = nn.SpatialConvolutionMM(s_in,s_out,in_size[1],in_size[2],1,1)
convmodule.weight:copy(linmodule.weight)
convmodule.bias:copy(linmodule.bias)