Skip to content

Instantly share code, notes, and snippets.

@jinyup100
Created July 30, 2020 16:43
Show Gist options
  • Save jinyup100/53c5e542573e46d11e7ed881af66af57 to your computer and use it in GitHub Desktop.
Save jinyup100/53c5e542573e46d11e7ed881af66af57 to your computer and use it in GitHub Desktop.
import numpy as np
import os
import onnx
import torch
import torch.nn as nn
import torch.nn.functional as F
import cv2
# Class for RPN
class RPN(nn.Module):
"Region Proposal Network"
def __init__(self):
super(RPN, self).__init__()
def forward(self, z_f, x_f):
raise NotImplementedError
class DepthwiseXCorr(nn.Module):
"Depthwise Correlation Layer"
def __init__(self, in_channels, hidden, out_channels, kernel_size=3, hidden_kernel_size=5):
super(DepthwiseXCorr, self).__init__()
self.conv_kernel = nn.Sequential(
nn.Conv2d(in_channels, hidden, kernel_size=kernel_size, bias=False),
nn.BatchNorm2d(hidden),
nn.ReLU(inplace=True),
)
self.conv_search = nn.Sequential(
nn.Conv2d(in_channels, hidden, kernel_size=kernel_size, bias=False),
nn.BatchNorm2d(hidden),
nn.ReLU(inplace=True),
)
self.head = nn.Sequential(
nn.Conv2d(hidden, hidden, kernel_size=1, bias=False),
nn.BatchNorm2d(hidden),
nn.ReLU(inplace=True),
nn.Conv2d(hidden, out_channels, kernel_size=1)
)
def forward(self, kernel, search):
kernel = self.conv_kernel(kernel)
search = self.conv_search(search)
feature = xcorr_depthwise(search, kernel)
out = self.head(feature)
return out
class DepthwiseRPN(RPN):
def __init__(self, anchor_num=5, in_channels=256, out_channels=256):
super(DepthwiseRPN, self).__init__()
self.cls = DepthwiseXCorr(in_channels, out_channels, 2 * anchor_num)
self.loc = DepthwiseXCorr(in_channels, out_channels, 4 * anchor_num)
def forward(self, z_f, x_f):
cls = self.cls(z_f, x_f)
loc = self.loc(z_f, x_f)
return cls, loc
class MultiRPN(RPN):
def __init__(self, anchor_num, in_channels, weighted=False):
super(MultiRPN, self).__init__()
self.weighted = weighted
for i in range(len(in_channels)):
self.add_module('rpn'+str(i+2),
DepthwiseRPN(anchor_num, in_channels[i], in_channels[i]))
if self.weighted:
self.cls_weight = nn.Parameter(torch.ones(len(in_channels)))
self.loc_weight = nn.Parameter(torch.ones(len(in_channels)))
def forward(self, z_fs, x_fs):
cls = []
loc = []
#z_fs = data[0]
#x_fs = data[1]
rpn2 = self.rpn2
z_f2 = z_fs[0]
x_f2 = x_fs[0]
c2,l2 = rpn2(z_f2, x_f2)
cls.append(c2)
loc.append(l2)
rpn3 = self.rpn3
z_f3 = z_fs[1]
x_f3 = x_fs[1]
c3,l3 = rpn3(z_f3, x_f3)
cls.append(c3)
loc.append(l3)
rpn4 = self.rpn4
z_f4 = z_fs[2]
x_f4 = x_fs[2]
c4,l4 = rpn4(z_f4, x_f4)
cls.append(c4)
loc.append(l4)
if self.weighted:
cls_weight = F.softmax(self.cls_weight, 0)
loc_weight = F.softmax(self.loc_weight, 0)
def avg(lst):
return sum(lst) / len(lst)
def weighted_avg(lst, weight):
s = 0
fixed_len = 3
for i in range(3):
s += lst[i] * weight[i]
return s
if self.weighted:
weighted_avg_cls = weighted_avg(cls, cls_weight)
weighted_avg_loc = weighted_avg(loc, loc_weight)
#clsloc = [weighted_avg_cls, weighted_avg_loc]
return weighted_avg_cls, weighted_avg_loc
else:
avg_cls = avg(cls)
avg_loc = avg(loc)
#clsloc = [avg_cls, avg_loc]
return avg_cls, avg_loc
# End of class for RPN
def conv3x3(in_planes, out_planes, stride=1, dilation=1):
"3x3 convolution with padding"
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=dilation, bias=False, dilation=dilation)
def xcorr_depthwise(x, kernel):
"""
Deptwise convolution for input and weights with the same shapes
Elementwise multiplication -> GlobalAveragePooling -> scalar mul on (kernel_h * kernel_w)
"""
batch = kernel.size(0)
channel = kernel.size(1)
x = x.view(1, batch*channel, x.size(2), x.size(3))
kernel = kernel.view(batch*channel, 1, kernel.size(2), kernel.size(3))
conv = nn.Conv2d(batch*channel, batch*channel, kernel_size=(kernel.size(2), kernel.size(3)), bias=False, groups=batch*channel)
conv.weight = nn.Parameter(kernel)
out = conv(x)
out = out.view(batch, channel, out.size(2), out.size(3))
out = out.detach()
return out
# Load the target and search
z_crop = np.load('numpy_z_crop.npy')
x_crop = np.load('numpy_x_crop.npy')
# Load the ONNX models
backbone_search = cv2.dnn.readNetFromONNX('resnet_search.onnx')
backbone_target = cv2.dnn.readNetFromONNX('resnet_target.onnx')
neck_1_out_1 = cv2.dnn.readNetFromONNX('neck_1_out_1.onnx')
neck_1_out_2 = cv2.dnn.readNetFromONNX('neck_1_out_2.onnx')
neck_2_out_1 = cv2.dnn.readNetFromONNX('neck_2_out_1.onnx')
neck_2_out_2 = cv2.dnn.readNetFromONNX('neck_2_out_2.onnx')
neck_3_out_1 = cv2.dnn.readNetFromONNX('neck_3_out_1.onnx')
neck_3_out_2 = cv2.dnn.readNetFromONNX('neck_3_out_2.onnx')
rpn_head = cv2.dnn.readNetFromONNX('rpn_head.onnx')
# Use the Imported ONNX Models
backbone_target.setInput(z_crop)
outNames = ['output_1', 'output_2', 'output_3']
zf_1, zf_2, zf_3 = backbone_target.forward(outNames)
neck_1_out_1.setInput(zf_1)
zf_s_1 = neck_1_out_1.forward()
neck_2_out_1.setInput(zf_2)
zf_s_2 = neck_2_out_1.forward()
neck_3_out_1.setInput(zf_3)
zf_s_3 = neck_3_out_1.forward()
zf_s = np.stack([zf_s_1, zf_s_2, zf_s_3])
backbone_search.setInput(x_crop)
outNames = ['output_1', 'output_2', 'output_3']
xf_1, xf_2, xf_3 = backbone_search.forward(outNames)
neck_1_out_2.setInput(xf_1)
xf_s_1 = neck_1_out_2.forward()
neck_2_out_2.setInput(xf_2)
xf_s_2 = neck_2_out_2.forward()
neck_3_out_2.setInput(xf_3)
xf_s_3 = neck_3_out_2.forward()
xf_s = np.stack([xf_s_1, xf_s_2, xf_s_3])
# Outputs from Imported RPN
rpn_head.setInput(zf_s, 'input_1')
rpn_head.setInput(xf_s, 'input_2')
cls = rpn_head.forward('output_1')
loc = rpn_head.forward('output_2')
print(cls)
print(loc)
# Outputs from torch RPN
torch_rpn_head = MultiRPN(anchor_num=5,in_channels=[256, 256, 256],weighted=False)
torch_cls, torch_loc = torch_rpn_head(torch.Tensor(zf_s), torch.Tensor(xf_s))
print(torch_cls)
print(torch_loc)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment