Skip to content

Instantly share code, notes, and snippets.

View abhishekkrthakur's full-sized avatar
🏠
Working from home

abhishek thakur abhishekkrthakur

🏠
Working from home
View GitHub Profile
@abhishekkrthakur
abhishekkrthakur / gist:7116671
Created October 23, 2013 11:06
opencv failed to build on 10.9
==> Installing dependencies for opencv: cmake, pkg-config, libpng
==> Installing opencv dependency: cmake
==> Downloading http://www.cmake.org/files/v2.8/cmake-2.8.12.tar.gz
######################################################################## 100.0%
==> ./bootstrap --prefix=/usr/local/Cellar/cmake/2.8.12 --system-libs --no-system-libarchive --datadir=/shar
==> make
==> make install
Warning: Could not link cmake. Unlinking...
Error: The `brew link` step did not complete successfully
@abhishekkrthakur
abhishekkrthakur / km_latlong
Created July 17, 2014 16:32
kmeans_based_latitude_longitude_clustering_haversine
# k-Means clustering for Normal Distributions - Almost from scratch!
import numpy as np
import scipy as sp
import random
from math import radians, cos, sin, asin, sqrt
def haversine(lon1, lat1, lon2, lat2):
"""
import cv2
scaling = 10
webcam = cv2.VideoCapture(0)
haar = cv2.CascadeClassifier("/usr/local/Cellar/opencv/2.4.8.2/share/OpenCV/lbpcascades/lbpcascade_frontalface.xml")
if webcam.isOpened(): # try to get the first frame
rval, frame = webcam.read()
@abhishekkrthakur
abhishekkrthakur / min-char-rnn.py
Created November 25, 2015 14:24 — forked from karpathy/min-char-rnn.py
Minimal character-level language model with a Vanilla Recurrent Neural Network, in Python/numpy
"""
Minimal character-level Vanilla RNN model. Written by Andrej Karpathy (@karpathy)
BSD License
"""
import numpy as np
# data I/O
data = open('input.txt', 'r').read() # should be simple plain text file
chars = list(set(data))
data_size, vocab_size = len(data), len(chars)
@abhishekkrthakur
abhishekkrthakur / collections_dataset.py
Last active June 11, 2019 19:52
finetuning_collections_dataset
from PIL import Image
from torch.utils.data import Dataset
class CollectionsDataset(Dataset):
def __init__(self,
csv_file,
root_dir,
num_classes,
transform=None):
@abhishekkrthakur
abhishekkrthakur / collections_dataset_test.py
Created June 11, 2019 19:52
collections dataset test
class CollectionsDatasetTest(Dataset):
def __init__(self,
csv_file,
root_dir,
transform=None):
self.data = pd.read_csv(csv_file)
self.root_dir = root_dir
self.transform = transform
import torch.nn as nn
import pretrainedmodels as pm
model = pm.__dict__["resnet50"](pretrained='imagenet')
model.avg_pool = nn.AdaptiveAvgPool2d(1)
model.last_linear = nn.Sequential(
nn.BatchNorm1d(2048),
nn.Dropout(p=0.25),
nn.Linear(in_features=2048, out_features=2048),
def train_model(model,
data_loader,
dataset_size,
optimizer,
scheduler,
num_epochs):
criterion = nn.BCEWithLogitsLoss()
for epoch in range(num_epochs):
print('Epoch {}/{}'.format(epoch, num_epochs - 1))
print('-' * 10)
import torch
from torchvision import transforms
# define some re-usable stuff
IMAGE_SIZE = 224
NUM_CLASSES = 1103
BATCH_SIZE = 32
device = torch.device("cuda:0")
IMG_MEAN = model_ft.mean
IMG_STD = model_ft.std
import torch.optim as optim
from torch.optim import lr_scheduler
plist = [
{'params': model_ft.layer4.parameters(), 'lr': 1e-5},
{'params': model_ft.last_linear.parameters(), 'lr': 5e-3}
]
optimizer_ft = optim.Adam(plist, lr=0.001)
lr_sch = lr_scheduler.StepLR(optimizer_ft, step_size=10, gamma=0.1)