Skip to content

Instantly share code, notes, and snippets.

@shelhamer
Last active August 7, 2017 13:35
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 2 You must be signed in to fork a gist
  • Save shelhamer/ec8f96517fed5a430635 to your computer and use it in GitHub Desktop.
Save shelhamer/ec8f96517fed5a430635 to your computer and use it in GitHub Desktop.
Net Surgery
import sys
import numpy as np
import scipy.io
import caffe
# take args for model arch (prototxt), input/output models (binaryproto ),
# layer name, and channel permutation.
arch_f, in_f, out_f = sys.argv[1:4]
layer = sys.argv[4]
channel_order = sys.argv[5:]
# load model
net = caffe.Net(arch_f, in_f)
# map channels from in to out for weights and bias
remapped_W = net.params[layer][0].data[:, channel_order, :, :]
net.params[layer][0].data[...] = remapped_W
# save model
net.save(out_f)
from __future__ import division
import caffe
import numpy as np
def transplant(new_net, net):
for p in net.params:
if p not in new_net.params:
print 'dropping', p
continue
for i in range(len(net.params[p])):
if net.params[p][i].data.shape != new_net.params[p][i].data.shape:
print 'coercing', p, i, 'from', net.params[p][i].data.shape, 'to', new_net.params[p][i].data.shape
else:
print 'copying', p, i
new_net.params[p][i].data.flat = net.params[p][i].data.flat
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment