Skip to content

Instantly share code, notes, and snippets.

View shashwatwork's full-sized avatar
💭
Happy Learning!

SHASHWAT TIWARI shashwatwork

💭
Happy Learning!
View GitHub Profile
#Import Library
#Import other necessary libraries like pandas, numpy...
from sklearn import tree
#Assumed you have, X (predictor) and Y (target) for training data set and x_test(predictor) of test_dataset
# Create tree object
model = tree.DecisionTreeClassifier(criterion='gini') # for classification, here you can change the algorithm as gini or entropy (information gain) by default it is gini
# model = tree.DecisionTreeRegressor() for regression
# Train the model using the training sets and check score
model.fit(X, y)
model.score(X, y)
from sklearn import datasets
from sklearn.neighbors import KNeighborsClassifier
digits = datasets.load_digits()
#create the KNeighborsClassifier
clf = KNeighborsClassifier(n_neighbors=6)
#set training set
x, y = digits.data[:-1], digits.target[:-1]
import numpy as np
import matplotlib.pyplot as plt
from sklearn import linear_model
xmin,xmax=-7,7 #Test set; straight line with Gaussian noise
n_samples=77
np.random.seed(0)
x=np.random.normal(size=n_samples)
y=(x>0).astype(np.float)
#Import Library
from sklearn.linear_model import LogisticRegression
import matplotlib.pyplot as plt
import numpy as np
from sklearn import datasets,linear_model
from sklearn.metrics import mean_squared_error,r2_score
diabetes=datasets.load_diabetes()
diabetes_X=diabetes.data[:,np.newaxis,2]
diabetes_X_train=diabetes_X[:-30] #splitting data into training and test sets
diabetes_X_test=diabetes_X[-30:]
diabetes_y_train=diabetes.target[:-30] #splitting targets into training and test sets
diabetes_y_test=diabetes.target[-30:]
import numpy as np
def gaussian(x, derivative=False):
if (derivative == True):
for i in range(0, len(x)):
for k in range(0, len(x[i])):
x[i][k] = -2* x[i][k] * np.exp(-x[i][k] ** 2)
for i in range(0, len(x)):
for k in range(0, len(x[i])):
x[i][k] = np.exp(-x[i][k] ** 2)
import numpy as np
def squash(x, derivative=False):
if (derivative == True):
for i in range(0, len(x)):
for k in range(0, len(x[i])):
if x[i][k] > 0:
x[i][k] = (x[i][k]) / (1 + x[i][k])
else:
import numpy as np
def step(x, derivative=False):
if (derivative == True):
for i in range(0, len(x)):
for k in range(len(x[i])):
if x[i][k] > 0:
x[i][k] = 0
return x
for i in range(0, len(x)):
import numpy as np
def arctan(x, derivative=False):
if (derivative == True):
return (np.cos(x) ** 2)
return np.arctan(x)
x = arctan(x)
print(x)
import numpy as np
def relu(x, derivative=False):
if (derivative == True):
for i in range(0, len(x)):
for k in range(len(x[i])):
if x[i][k] > 0:
x[i][k] = 1
else:
x[i][k] = 0
import numpy as np
def tanh(x, derivative=False):
if (derivative == True):
return (1 - (x ** 2))
return np.tanh(x)
x = tanh(x)
print(x)