model=Sequential() | |
#embedding layer | |
model.add(Embedding(size_of_vocabulary,300,weights=[embedding_matrix],input_length=100,trainable=False)) | |
#lstm layer | |
model.add(LSTM(128,return_sequences=True,dropout=0.2)) | |
#Global Maxpooling | |
model.add(GlobalMaxPooling1D()) | |
#Dense Layer | |
model.add(Dense(64,activation='relu')) | |
model.add(Dense(1,activation='sigmoid')) | |
#Add loss function, metrics, optimizer | |
model.compile(optimizer='adam', loss='binary_crossentropy',metrics=["acc"]) | |
#Adding callbacks | |
es = EarlyStopping(monitor='val_loss', mode='min', verbose=1,patience=3) | |
mc=ModelCheckpoint('best_model.h5', monitor='val_acc', mode='max', save_best_only=True,verbose=1) | |
#Print summary of model | |
print(model.summary()) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment