Skip to content

Instantly share code, notes, and snippets.

@shubham0204
Created May 1, 2019 03:13
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save shubham0204/e6875fad1b15a8b54e603dd4960b7f53 to your computer and use it in GitHub Desktop.
Save shubham0204/e6875fad1b15a8b54e603dd4960b7f53 to your computer and use it in GitHub Desktop.
import tensorflow as tf
from tensorflow.keras import optimizers,losses,activations
from tensorflow.keras.layers import *
dropout_rate = 0.5
input_shape = ( maxlen , )
target_shape = ( maxlen , 1 )
# Note : activations.leaky_relu is a CUSTOM IMPLEMENTATION. It DOES NOT EXIST in the official TensorFlow build.
self.model_scheme = [
Reshape( input_shape=input_shape , target_shape=( maxlen , 1 ) ),
Conv1D( 128, kernel_size=2 , strides=1, activation=activations.leaky_relu , kernel_regularizer='l1'),
MaxPooling1D(pool_size=2 ),
Flatten() ,
Dense( 64 , activation=activations.leaky_relu ) ,
BatchNormalization(),
Dropout(dropout_rate),
Dense( number_of_classes, activation=activations.softmax )
]
self.__model = tf.keras.Sequential(self.model_scheme)
self.__model.compile(
optimizer=optimizers.Adam( lr=0.0001 ),
loss=losses.categorical_crossentropy ,
metrics=[ 'accuracy' ] ,
)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment