Skip to content

Instantly share code, notes, and snippets.

@mmuratarat
Last active September 16, 2017 02:27
Show Gist options
  • Save mmuratarat/14f306c4e2697ef68d64150e1b930964 to your computer and use it in GitHub Desktop.
Save mmuratarat/14f306c4e2697ef68d64150e1b930964 to your computer and use it in GitHub Desktop.
GradientDescent <- function(data, alpha, iteration, epsilon){
data <- matrix(unlist(data), ncol=ncol(data), byrow=FALSE)
# bagimli degiskeni ve bagimsiz degiskenleri ayiralim.
#Veridaki en son kolon, bagimli degiskene ait olmalidir.
independent.variable<- data[,1:ncol(data)-1]
dependent.variable<- data[,ncol(data)]
# girdi degiskenlerine z-değeri normalleştirmesi uygulayalim.
# Her degiskene ait ortalama ve standart sapma bilgisini kaydedelim.
normalized <- function(x) ( x - mean(x) ) / sd(x)
independent.variable.mean <- apply(independent.variable, 2, mean)
independent.variable.sd <- apply(independent.variable, 2, sd)
independent.variable <- apply(independent.variable, 2,normalized)
# Sabit terimi (theta0) hesaplayabilmek icin bagimsiz degiskenlerden olusan matrise
#1'lerden olusan bir kolon ekleyelim.
independent.variable <- cbind(theta0 = 1, independent.variable)
# theta_new : baslangic degerleri
# theta_old
theta_new <- matrix( 1, ncol = ncol(independent.variable))
theta_old <- matrix( 2, ncol = ncol(independent.variable))
#Maliyet fonksiyonu
CostFunction <- function (independent.variable, dependent.variable, theta){
1/(2*(NROW(dependent.variable))) * sum(((independent.variable %*% t(theta)) - dependent.variable)^2);
}
# her iterasyondaki theta degerlerini ve bu degerlere kasilik gelen maliyet
#fonksiyonu degerlerini kaydedelim. Bu iki vektorun ilk degelerini atayalim.
thetas <- vector( mode = "list", length = iteration )
thetas[[1]] <- theta_new
J <- numeric( length = iteration )
J[1] <- CostFunction(independent.variable, dependent.variable, theta_old )
#Gradyan inis algoritmasindaki kismi turev islemini yapan fonksiyon
derivative <- function(independent.variable, dependent.variable, theta)
{
descent <- (t(independent.variable) %*% ((independent.variable %*% t(theta)) - dependent.variable))/ NROW(dependent.variable)
return( t(descent) )
}
#Durdurma kriterlerini tanımlayalım.
step <- 1
while(any(abs(theta_new - theta_old) > epsilon) & step <= iteration )
{
step <- step + 1
# gradient descent
theta_old <- theta_new
theta_new <- theta_old - alpha * derivative(independent.variable, dependent.variable, theta_old)
# record keeping
thetas[[step]] <- theta_new
J[step] <- CostFunction(independent.variable, dependent.variable, theta_new)
}
# Sonuclari yazdiralim.
costs <- data.frame( costs = J )
theta <- data.frame( do.call( rbind, thetas ), row.names = NULL )
norm <- data.frame( input_mean = independent.variable.mean, input_sd = independent.variable.sd)
return( list( costs = costs, theta = theta, norm = norm))
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment