Skip to content

Instantly share code, notes, and snippets.

@hasnainv
Last active December 4, 2018 03:33
Show Gist options
  • Star 5 You must be signed in to star a gist
  • Fork 3 You must be signed in to fork a gist
  • Save hasnainv/9f254b2ce436db805811 to your computer and use it in GitHub Desktop.
Save hasnainv/9f254b2ce436db805811 to your computer and use it in GitHub Desktop.
Convert Caffe models to Keras models

##Convert Caffe VGG and other models to Keras Use this keras fork to convert the model.

How to convert the VGG16 model:

python -m keras.caffe.caffe2keras -load_path [model_directory] -prototxt [protoxt_name] -caffemodel [caffemodel_name] -store_path [output_path] -network_type Sequential

Load the keras model file using loadvgg.py

#How to load the model
def build_model(img_width=224, img_height=224):
from keras.models import Sequential
from keras.layers import Convolution2D, ZeroPadding2D, MaxPooling2D, Activation
model = Sequential()
model = Sequential()
model.add(ZeroPadding2D((1,1),input_shape=(3,img_width,img_height)))
model.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_1'))
model.add(Activation('relu'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(64, 3, 3, activation='relu', name='conv1_2'))
model.add(Activation('relu'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_1'))
model.add(Activation('relu'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(128, 3, 3, activation='relu', name='conv2_2'))
model.add(Activation('relu'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_1'))
model.add(Activation('relu'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_2'))
model.add(Activation('relu'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(256, 3, 3, activation='relu', name='conv3_3'))
model.add(Activation('relu'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_1'))
model.add(Activation('relu'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_2'))
model.add(Activation('relu'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv4_3'))
model.add(Activation('relu'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_1'))
model.add(Activation('relu'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_2'))
model.add(Activation('relu'))
model.add(ZeroPadding2D((1, 1)))
model.add(Convolution2D(512, 3, 3, activation='relu', name='conv5_3'))
model.add(Activation('relu'))
model.add(MaxPooling2D((2, 2), strides=(2, 2)))
model.add(Flatten())
model.add(Dense(4096))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(4096)
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(1000)
model.add(Activation('softmax'))
return model
def loadweights(model, weights_path):
f = h5py.File(weights_path)
for k in range(f.attrs['nb_layers']):
if k >= len(model.layers):
# we don't look at the last (fully-connected) layers in the savefile
break
g = f['layer_{}'.format(k)]
weights = [g['param_{}'.format(p)] for p in range(g.attrs['nb_params'])]
try:
model.layers[k].set_weights(weights)
except Exception:
pass
f.close()
model = build_model()
weights_path = '/path/to/your/keras/model.h5'
loadweights(model, weights_path)
@hadikazemi
Copy link

hadikazemi commented Oct 19, 2016

Nice Job, but why the activation functions of Convolution2D layers are duplicated? The activation of each Convolution2D layer is defined both in 'activation=' argument of the layer as well as 'Activation' layer after that.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment