Skip to content

Instantly share code, notes, and snippets.

View davidnvq's full-sized avatar
🎯
Focusing

Van-Quang Nguyen davidnvq

🎯
Focusing
View GitHub Profile
@davidnvq
davidnvq / tmux_local_install.sh
Last active August 27, 2020 04:30 — forked from ryin/tmux_local_install.sh
bash script for installing tmux without root access
#!/bin/bash
# Script for installing tmux on systems where you don't have root access.
# tmux will be installed in $HOME/local/bin.
# It's assumed that wget and a C/C++ compiler are installed.
# exit on error
set -e
TMUX_VERSION=1.8
@davidnvq
davidnvq / test_dropout.py
Created June 5, 2019 09:55
Test Dropout
import torch
import torch.nn as nn
nn.Dropout(p=0.0).eval()(torch.ones(4, 10))
"""Output
tensor([[1., 1., 1., 1., 1., 1., 1., 1., 1., 1.],
[1., 1., 1., 1., 1., 1., 1., 1., 1., 1.],
[1., 1., 1., 1., 1., 1., 1., 1., 1., 1.],
[1., 1., 1., 1., 1., 1., 1., 1., 1., 1.]])
"""
@davidnvq
davidnvq / param_dict.py
Last active June 5, 2019 09:53
Init class to get an instance with parameter dictionary
class Foo():
def __init__(self, a, b, c, d=0):
self.a = a
self.b = b
self.c = c
self.d = d
params = {
import torch
print("HELLO")
x = torch.randn(100, 100).cuda()
def print_hello(name):
print("Hello", name)
print("Very nice to meet you")
@davidnvq
davidnvq / lr_adjust.py
Last active April 25, 2019 02:06
Adaptive learning rate example from ImageNet
# https://github.com/pytorch/examples/blob/master/imagenet/main.py#L392-L396
def adjust_learning_rate(optimizer, epoch, args):
"""Sets the learning rate to the initial LR decayed by 10 every 30 epochs"""
lr = args.lr * (0.1 ** (epoch // 30))
for param_group in optimizer.param_groups:
param_group['lr'] = lr
# Create a learning rate adjustment function that divides the learning rate by 10 every 30 epochs
def adjust_learning_rate(epoch):
@davidnvq
davidnvq / dynamic_rnn.py
Last active October 17, 2023 21:33
Define a Dynamic RNN with pack_padded_sequence and pad_packed_sequence
import torch
from torch import nn
from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence
class DynamicRNN(nn.Module):
"""
The wrapper version of recurrent modules including RNN, LSTM
that support packed sequence batch.
"""
@davidnvq
davidnvq / parser.py
Created April 19, 2019 15:18
Parse with list
import argparse
parser.add_argument(
"--list",
nargs="+",
help="This will accept the list of values or a single value",
default=['1']
)
args = parser.parse_args()
@davidnvq
davidnvq / normalize.py
Last active April 20, 2019 09:50
Image transforms and Normalize in Pytorch
"""
Reference: https://pytorch.org/docs/master/torchvision/models.html
"""
from urllib.request import urlopen
from PIL import Image
from torchvision import transforms
# Read an example image
import time
import torch
import torch.nn as nn
device = 'cuda:0'
batch_size = 10
channels = 64
h, w = 128, 128
import torch
import torch.nn as nn
import torch.optim as optim
import torch.nn.functional as F
torch.manual_seed(2809)