Skip to content

Instantly share code, notes, and snippets.

View shuuchen's full-sized avatar
🎯
Focusing

Shuchen Du shuuchen

🎯
Focusing
View GitHub Profile
@shuuchen
shuuchen / nms.csv
Last active December 30, 2020 08:52
object_name anchor_no confidence
A 1 0.8
A 2 0.75
B 3 0.7
@shuuchen
shuuchen / test_sync_batch_ddp.py
Last active December 1, 2020 03:27
Multi-GPU sync-batch-norm test
import os
import argparse
import torch
import shutil
import torch.optim as optim
import torch.nn as nn
import numpy as np
import pandas as pd
import torch
from torch import nn
class SelfAttnBottleneck(nn.Module):
expansion = 8
def __init__(self, in_channel, out_channel):
super().__init__()
# inside a class __init__ function
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.normal_(m.weight, std=0.001)
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
head = nn.Sequential(
Conv(ch, ch, 1), # fusion with a 1x1 conv module
nn.Conv2d(ch, out_ch, 1)) # final prediction a 1x1 conv class
class Conv(nn.Module):
def __init__(self, in_ch, out_ch, kernel_size=3, stride=1, relued=True):
super(Conv, self).__init__()
padding = (kernel_size - 1) // 2
self.conv_bn = nn.Sequential(
nn.Conv2d(in_ch, out_ch, kernel_size, stride, padding, bias=False),
nn.BatchNorm2d(out_ch, momentum=BN_MOMENTUM))
self.relu = nn.ReLU()
self.relued = relued
nn.Conv2d(ch, ch * 2, 3, 2, 1)
nn.BatchNorm2d(ch * 2)
nn.Conv2d(ch, ch * 2, 3, 2, 1)
@shuuchen
shuuchen / unet.py
Created July 5, 2020 02:59
Implementation of U-Net with attention mechanism using Pytorch
import torch
from torch import nn
class Conv(nn.Module):
def __init__(self, in_ch, out_ch):
super(Conv, self).__init__()
self.conv = nn.Sequential(
nn.Conv2d(in_ch, out_ch, 3, padding=1),
@shuuchen
shuuchen / ContrastiveLoss.py
Last active June 8, 2020 13:08
Contrastive Loss
import torch
from torch import nn
from torch.nn import functional as F
class ContrastiveLoss(nn.Module):
def __init__(self, margin=5.0):
super(ContrastiveLoss, self).__init__()
self.margin = margin