Skip to content

Instantly share code, notes, and snippets.

@lettergram
Last active January 4, 2019 15:35
Show Gist options
  • Save lettergram/4a1b599a6a9a9bfde9d949ba61e202be to your computer and use it in GitHub Desktop.
Save lettergram/4a1b599a6a9a9bfde9d949ba61e202be to your computer and use it in GitHub Desktop.
max_words, batch_size, maxlen, epochs = 10000, 64, 500, 2
embedding_dims, filters, kernel_size, hidden_dims = 50, 250, 5, 150
# Determine the number of categories + default(i.e. sentence types)
num_classes = np.max(y_train) + 1
# Vectorize the output sentence type classifcations to Keras readable format
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
# Pad the input vectors to ensure a consistent length
x_train = sequence.pad_sequences(x_train, maxlen=maxlen)
x_test = sequence.pad_sequences(x_test, maxlen=maxlen)
model = Sequential()
# Created Embedding (Input) Layer (max_words) --> Convolutional Layer
model.add(Embedding(max_words, embedding_dims, input_length=maxlen))
model.add(Dropout(0.2)) # masks various input values
# Create the convolutional layer
model.add(Conv1D(filters, kernel_size,padding='valid', activation='relu', strides=1))
# Create the pooling layer
model.add(GlobalMaxPooling1D())
# Create the fully connected layer
model.add(Dense(hidden_dims))
model.add(Dropout(0.2))
model.add(Activation('relu'))
# Create the output layer (num_classes)
model.add(Dense(num_classes))
model.add(Activation('softmax'))
# Add optimization method, loss function and optimization value
model.compile(loss='categorical_crossentropy',
optimizer='adam', metrics=['accuracy'])
# "Fit the model" (train model), using training data (80% of datset)
model.fit(x_train, y_train, batch_size=batch_size,
epochs=epochs, validation_data=(x_test, y_test))
# Evaluate the trained model, using the test data (20% of the dataset)
score = model.evaluate(x_test, y_test, batch_size=batch_size)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment