Skip to content

Instantly share code, notes, and snippets.

Avatar
👀
Look out, working on exciting things :)

Khyati Mahendru KhyatiMahendru

👀
Look out, working on exciting things :)
View GitHub Profile
@KhyatiMahendru
KhyatiMahendru / get_art_galleries_Places_API.py
Created Apr 11, 2019
This script was developed by me for my Data Analytics Internship at CSIR-CDRI from December 2017 to July 2018. The script is used to collect contact and location details about art galleries in a country, say Malta. Through experiments on various ways of running the script, I found that dividing our search into smaller units than countries like s…
View get_art_galleries_Places_API.py
import requests
from bs4 import BeautifulSoup
import csv
file = open('Art-galleries-in-Malta.csv', 'w')
f = csv.writer(file)
f.writerow(['Name', 'Phone_no', 'Website', 'Address', 'State/Province'])
country = 'Malta'
View weight_update_MSE.py
def update_weights_MSE(m, b, X, Y, learning_rate):
m_deriv = 0
b_deriv = 0
N = len(X)
for i in range(N):
# Calculate partial derivatives
# -2x(y - (mx + b))
m_deriv += -2*X[i] * (Y[i] - (m*X[i] + b))
# -2(y - (mx + b))
View weight_update_MAE.py
def update_weights_MAE(m, b, X, Y, learning_rate):
m_deriv = 0
b_deriv = 0
N = len(X)
for i in range(N):
# Calculate partial derivatives
# -x(y - (mx + b)) / |mx + b|
m_deriv += - X[i] * (Y[i] - (m*X[i] + b)) / abs(Y[i] - (m*X[i] + b))
# -(y - (mx + b)) / |mx + b|
View weight_update_Hinge.py
def update_weights_Hinge(m1, m2, b, X1, X2, Y, learning_rate):
m1_deriv = 0
m2_deriv = 0
b_deriv = 0
N = len(X1)
for i in range(N):
# Calculate partial derivatives
if Y[i]*(m1*X1[i] + m2*X2[i] + b) <= 1:
m1_deriv += -X1[i] * Y[i]
m2_deriv += -X2[i] * Y[i]
View weight_update_BCE.py
def update_weights_BCE(m1, m2, b, X1, X2, Y, learning_rate):
m1_deriv = 0
m2_deriv = 0
b_deriv = 0
N = len(X1)
for i in range(N):
s = 1 / (1 / (1 + math.exp(-m1*X1[i] - m2*X2[i] - b)))
# Calculate partial derivatives
m1_deriv += -X1[i] * (s - Y[i])
View generate_clustering_data.py
from sklearn.datasets import make_blobs
# Create dataset with 3 random cluster centers and 1000 datapoints
x, y = make_blobs(n_samples = 1000, centers = 3, n_features=2, shuffle=True, random_state=31)
View model_MCE.py
# importing requirements
from keras.layers import Dense
from keras.models import Sequential
from keras.optimizers import adam
# alpha = 0.001 as given in the lr parameter in adam() optimizer
# build the model
model_alpha1 = Sequential()
model_alpha1.add(Dense(50, input_dim=2, activation='relu'))
View model_KL.py
# importing requirements
from keras.layers import Dense
from keras.models import Sequential
from keras.optimizers import adam
# alpha = 0.001 as given in the lr parameter in adam() optimizer
# build the model
model_alpha1 = Sequential()
model_alpha1.add(Dense(50, input_dim=2, activation='relu'))
View decisiontree_entropy.py
from sklearn.tree import DecisionTreeClassifier
clf_entropy = DecisionTreeClassifier(criterion = 'entropy', random_state = 33)
clf_entropy.fit(X, Y)
View decisiontree_gini.py
from sklearn.tree import DecisionTreeClassifier
clf_gini = DecisionTreeClassifier(criterion = 'gini', random_state = 33)
clf_gini.fit(X, Y)