Skip to content

Instantly share code, notes, and snippets.

View saimadhu-polamuri's full-sized avatar
💭
For the love of data.

saimadhu saimadhu-polamuri

💭
For the love of data.
View GitHub Profile
# Creating the confusion matrix graphs
cf_train_matrix = confusion_matrix(y_train, train_predictions)
plt.figure(figsize=(10,8))
sns.heatmap(cf_train_matrix, annot=True, fmt='d')
cf_test_matrix = confusion_matrix(y_test, test_predictions)
plt.figure(figsize=(10,8))
sns.heatmap(cf_test_matrix, annot=True, fmt='d')
"""
===============================================
Objective: Building email classifier with spacy
Author: saimadhu.polamuri
Blog: dataaspirant.com
Date: 2020-07-17
===============================================
"""
"""
===============================================
Objective: Implementing confusion matrix in different ways.
Author: saimadhu.polamuri
Blog: https://dataaspirant.com
Date: 2020-08-02
===============================================
"""
"""
===============================================
Objective: Building email classifier with spacy
Author: sharmila.polamuri
Blog: https://dataaspirant.com
Date: 2020-08-05
===============================================
"""
# ===========================================
"""
===============================================
Objective: Handling imbalance data
Author: Jaiganesh Nagidi
Blog: https://dataaspirant.com
Date: 2020-08-09
===============================================
"""
"""
===============================================
Objective: Implementing Markov Chains model
Author: Venkatesh Nagilla
Blog: https://dataaspirant.com
Date: 2020-08-09
===============================================
"""
import pymc3 as pm
## Moons dataset
import numpy as np
from sklearn.datasets import make_moons
np.random.seed(800)
x, y = make_moons(n_samples=100, noise=0.2, random_state=1)
# plot the graph
import matplotlib.pyplot as plt
# importing libraries
import tensorflow as tf
import warnings
from mlxtend.plotting import plot_decision_regions
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
from sklearn.model_selection import train_test_split
x_train,x_test,y_train,y_test = train_test_split(
## Plot train and test loss
plt.plot(history.history['loss'], label='train')
plt.plot(history.history['val_loss'], label='test')
plt.legend()
plt.show()
## Model after applying Regularization techniques
model = Sequential()
model.add(Dense(500, input_dim=2, activation='relu',kernel_regularizer='l2'))
model.add(Dense(1, activation='sigmoid',kernel_regularizer='l2'))
model.compile(loss='binary_crossentropy',
optimizer='adam',
metrics=['accuracy'])
history = model.fit(x_train, y_train,
validation_data=(x_test, y_test),