Skip to content

Instantly share code, notes, and snippets.

@Abhishek-Shaw-Kolkata
Last active March 13, 2021 13:58
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save Abhishek-Shaw-Kolkata/13053763328864156f7d18a55f0e268b to your computer and use it in GitHub Desktop.
Save Abhishek-Shaw-Kolkata/13053763328864156f7d18a55f0e268b to your computer and use it in GitHub Desktop.
import tensorflow as tf
from tensorflow import keras
def get_CheXNet_model(HEIGHT,WIDTH,N_CHANNELS):
base_model = keras.applications.DenseNet121(
weights=None,
include_top=False,
input_shape=(HEIGHT,WIDTH,N_CHANNELS), pooling="avg"
)
predictions = keras.layers.Dense(14, activation='sigmoid', name='predictions')(base_model.output)
base_model = keras.Model(inputs=base_model.input, outputs=predictions,name='CheXNet')
base_model.load_weights("brucechou1983_CheXNet_Keras_0.3.0_weights.h5")
base_model.layers.pop()
#base_model.trainable = False
return base_model
tf.random.set_seed(100) # Set global seed
keras.backend.clear_session() # For easy reset of notebook state.
base_model = get_CheXNet_model(IMG_HEIGHT,IMG_WIDTH,N_CHANNELS)
x = Dense(1024, activation=LeakyReLU(),kernel_initializer='he_normal', kernel_regularizer=regularizers.l2(0.0001))(base_model.output)
x = Dropout(0.2)(x)
x = Dense(512, activation=LeakyReLU(),kernel_initializer='he_normal', kernel_regularizer=regularizers.l2(0.0001))(x)
x = Dropout(0.2)(x)
x = Dense(256, activation=LeakyReLU(), kernel_initializer='he_normal',kernel_regularizer=regularizers.l2(0.0001))(x)
x = Dropout(0.2)(x)
x = Dense(128, activation=LeakyReLU(), kernel_initializer='he_normal',kernel_regularizer=regularizers.l2(0.0001))(x)
x = Dropout(0.2)(x)
x = Dense(64, activation=LeakyReLU(), kernel_initializer='he_normal',kernel_regularizer=regularizers.l2(0.0001))(x)
x = Dropout(0.2)(x)
x = Dense(32, activation=LeakyReLU(),kernel_initializer='he_normal', kernel_regularizer=regularizers.l2(0.0001))(x)
x = Dropout(0.2)(x)
predictions = Dense(2, activation='softmax',name='Final')(x)
model = keras.models.Model(inputs=base_model.input, outputs=predictions)
model.compile(optimizer=tf.keras.optimizers.Adam(lr=0.0001),
loss='categorical_crossentropy',
metrics=[keras.metrics.Recall()])
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment