Skip to content

Instantly share code, notes, and snippets.

View dtellogaete's full-sized avatar
💭
Work hard

Daniel Tello Gaete dtellogaete

💭
Work hard
View GitHub Profile
# Regresion logística
# Importar dataset
dataset = read.csv('Social_Network_Ads.csv')
dataset = dataset[, 3:5]
# Selección conjunto de entrenamiento y test
library(caTools)
set.seed(0)
split = sample.split(dataset$Purchased, SplitRatio = 0.75)
# Importar librerías
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
# Importar el dataset de training
dataset = pd.read_csv('Social_Network_Ads.csv')
dataset['Age'] = dataset['Age'].fillna(dataset['Age'].mean())
dataset['EstimatedSalary'] = dataset['EstimatedSalary'].fillna(dataset['EstimatedSalary'].mean())
X = dataset.iloc[:len(dataset),[2,3]].values
# Algoritmo de gradient descent
class LogisticRegressionGD(object):
def __init__(self, l_rate = 0.1, n_iter =10000, random_state =1):
self.l_rate = l_rate
self.n_iter = n_iter
self.random_state = random_state
def fit(self, X, y):
rgen = np.random.RandomState(self.random_state)
# Importar el dataset
dataset = read.csv('Admission_Predict_Ver1.1.csv', sep = ",")
dataset = dataset[1:length(dataset$GRE.Score), c(2,9)]
# Selección conjunto de entrenamiento y test
library(caTools)
set.seed(0)
split = sample.split(dataset$GRE.Score, SplitRatio = 0.75)
training = subset(dataset, split == TRUE)
testing = subset(dataset, split == FALSE)
# Función de Descenso de Gradiente
LinearRegressionGD = function(lrate = 0.1, niter = 10000,
X, y, theta){
const = lrate*(1/length(X))
for(i in 1:niter){
h = X*theta[2]+theta[1]
theta[1] = theta[1]-const*(sum(h-y))
theta[2] = theta[2]-const*(sum(h-y))*X
}
# Importar las librerías
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
# Importar el dataset de entranamiento
dataset = pd.read_csv('Admission_Predict_Ver1.1.csv')
X = dataset.iloc[:len(dataset), 1].values
X = X.reshape(-1,1)
X = np.insert(X, 0, 1, axis = 1)
@dtellogaete
dtellogaete / linear_regression.py
Last active February 13, 2020 15:19
Linear Regression Gradient Descent Python
class LinearRegressionGD(object):
def __init__(self, l_rate = 0.1, n_iter =10000):
self.l_rate = l_rate
self.n_iter = n_iter
def fit(self, X, y, theta):
self.theta = theta
X_value = X[:,1].reshape(-1, 1)
const = self.l_rate*(1/X.shape[0])