Skip to content

Instantly share code, notes, and snippets.

View rdisipio's full-sized avatar

Riccardo Di Sipio rdisipio

View GitHub Profile
plt.plot(history1.history['binary_accuracy'], color='red')
plt.plot(history1.history['val_binary_accuracy'], color='red', linestyle='dashed')
plt.plot(history2.history['binary_accuracy'], color='blue')
plt.plot(history2.history['val_binary_accuracy'], color='blue', linestyle='dashed')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train 1', 'val 1', 'train 2', 'val 2'], loc='lower right')
plt.ylim([0.8, 1])
plt.show()
embed_dim = 16
batch_size = 32
n_epochs = 20
import tensorflow as tf
from tensorflow.keras.layers import Conv1D, Flatten, Dense, Dropout
model2 = tf.keras.models.Sequential([
QuantizedFeaturesEmbedding(n_features, n_bins, embed_dim),
from tensorflow.keras import layers
from tensorflow.keras import initializers
class QuantizedFeaturesEmbedding(layers.Layer):
def __init__(self,
n_features,
n_bins,
embed_dim,
**kwargs):
super(QuantizedFeaturesEmbedding, self).__init__(**kwargs)
from sklearn.preprocessing import KBinsDiscretizer
n_bins = 10 # the more, the merrier
scaled_feature_names = [f"q_{x}" for x in feature_names]
qt = KBinsDiscretizer(n_bins=n_bins, encode='ordinal', strategy='quantile')
X_qt = qt.fit_transform(X).astype(np.int32) # nb: bin number must be an integer
plt.plot(history1.history['loss'])
plt.plot(history1.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'validation'], loc='upper left')
plt.ylim([0., 1.])
plt.show()
model1 = tf.keras.Sequential([
tf.keras.layers.Dense(128, activation='relu'),
tf.keras.layers.Dense(2, activation='softmax') # binary classification
])
loss = tf.keras.losses.BinaryCrossentropy(from_logits=False)
metrics = [tf.keras.metrics.BinaryAccuracy()]
model1.compile(optimizer='adam',
loss=loss,
import numpy as np
import tensorflow as tf
from sklearn import preprocessing
X_scaled = preprocessing.StandardScaler().fit_transform(X)
y_onehot = preprocessing.OneHotEncoder(sparse=False).fit_transform(y.reshape(-1, 1))
(
X_train,
from dtreeviz.trees import dtreeviz
viz = dtreeviz(clf, X_test, y_test,
target_name="target",
feature_names=feature_names,
class_names=class_names)
viz
from sklearn.tree import DecisionTreeClassifier
from sklearn.model_selection import train_test_split # Import train_test_split function
from sklearn import metrics
(
X_train,
X_test,
y_train,
y_test
import matplotlib.pyplot as plt
plot = plt.scatter(X_pca[:,0], X_pca[:,1], c=y)
plt.legend(handles=plot.legend_elements()[0], labels=class_names)