One Paragraph of project description goes here
These instructions will get you a copy of the project up and running on your local machine for development and testing purposes. See deployment for notes on how to deploy the project on a live system.
# argmax is not differentiable, so the hack to get argmax is | |
# softmax(x)^T * range(indices) | |
# numpy | |
beta = 12 | |
y_est = np.array([[1.1, 3.0, 1.1, 1.3, 0.8]]) | |
# multiplying by some large constant beta to make the resulting | |
# distribution more peaky near the max | |
a = np.exp(beta*y_est) | |
b = np.sum(np.exp(beta*y_est)) |
import faiss | |
# example data | |
xb = np.random.rand(200000, 32).astype('float32') | |
xq = np.random.rand(500, 32).astype('float32') | |
# get reference result with index | |
index = faiss.IndexFlatL2(xb.shape[1]) | |
index.add(xb) | |
lims, D, I = index.range_search(xq, 1.5) |
class FocalLoss(nn.Module): | |
def __init__(self, alpha=1, gamma=2, logits=False, reduce=True): | |
super(FocalLoss, self).__init__() | |
self.alpha = alpha | |
self.gamma = gamma | |
self.logits = logits | |
self.reduce = reduce | |
def forward(self, inputs, targets): | |
if self.logits: |
# This would replaces names of all files in folder having the string ".obj.ply" with ".ply" | |
for filename in *; do newname=`echo $filename | sed 's/.obj.ply/.ply/g'`; mv $filename $newname; done |
classnames = { | |
0:'ceiling', | |
1:'floor', | |
2:'wall', | |
3:'beam', | |
4:'column', | |
5:'window', | |
6:'door', | |
7:'table', | |
8:'chair', |
Source: | |
https://stackoverflow.com/questions/2853334/glueing-tile-images-together-using-imagemagicks-montage-command-without-resizin | |
montage -mode concatenate -tile 12x *.jpg out.jpg |
import torch | |
def chamfer_distance_without_batch(p1, p2, debug=False): | |
''' | |
Calculate Chamfer Distance between two point sets | |
:param p1: size[1, N, D] | |
:param p2: size[1, M, D] | |
:param debug: whether need to output debug info |
def exp_lr_scheduler(optimizer, global_step, init_lr, decay_steps, decay_rate, lr_clip, staircase=True): | |
"""Decay learning rate by a factor of 0.1 every lr_decay_epoch epochs.""" | |
if staircase: | |
lr = init_lr * decay_rate**(global_step // decay_steps) | |
else: | |
lr = init_lr * decay_rate**(global_step / decay_steps) | |
lr = max(lr, lr_clip) | |
if global_step % decay_steps == 0: | |
print('LR is set to {}'.format(lr)) |
# pytorch function to replicate tensorflow's tf.nn.softmax_cross_entropy_with_logits | |
# works for soft targets or one-hot encodings | |
import torch | |
import torch.nn.functional as F | |
logits = model(input) | |
loss = torch.sum(- target * F.log_softmax(logits, -1), -1) | |
mean_loss = loss.mean() |