Skip to content

Instantly share code, notes, and snippets.

@anirudhacharya
Created December 12, 2018 01:20
Show Gist options
  • Save anirudhacharya/0e4bfbf489efb1487aba3b22f8b5f43d to your computer and use it in GitHub Desktop.
Save anirudhacharya/0e4bfbf489efb1487aba3b22f8b5f43d to your computer and use it in GitHub Desktop.
mxnet-R autoencoder
library ( mxnet )
mx.set.seed ( 0 )
rawdata <- read.csv("train.csv", header = T)
rawdata <- as.matrix(rawdata)
train.index <- sample(x = 1:nrow(rawdata), size = 30000)
train <- rawdata[train.index, ]
test <- rawdata[-train.index, ]
train.x <- t(train[, -1]/ 255)
train.y <- train[, 1]
test.x <- t(test[, -1]/ 255)
test.y <- test[, 1]
pretrain <- function(input, hidden_size) {
input_size <- nrow(input) # colmajor
symbol <- mx.symbol.Variable("data")
symbol <- mx.symbol.FullyConnected(data = symbol, name = "encoder", num_hidden = hidden_size)
symbol <- mx.symbol.Activation(data = symbol, name = "encoder_act", act_type = "relu")
symbol <- mx.symbol.FullyConnected(data = symbol, name = "decoder", num_hidden = input_size)
symbol <- mx.symbol.LinearRegressionOutput(data = symbol, name = "output")
model <- mx.model.FeedForward.create(
symbol = symbol,
X = input, y = input,
ctx = mx.cpu(),
num.round = 10, array.batch.size = 100,
optimizer = "sgd", learning.rate = 0.01, momentum = 0.9,
initializer = mx.init.Xavier(rnd_type = "uniform", factor_type = "in", magnitude = 2),
eval.metric = mx.metric.rmse,
batch.end.callback = mx.callback.log.train.metric(10),
array.layout = "colmajor")
return(model)
}
encode <- function(input, model) {
arg.params = model$arg.params[c("encoder_weight", "encoder_bias")]
symbol <- mx.symbol.Variable("data")
symbol <- mx.symbol.FullyConnected(data = symbol, name = "encoder", num_hidden = ncol(arg.params$encoder_weight))
symbol <- mx.symbol.Activation(data = symbol, name = "encoder_act", act_type = "relu")
model <- list(symbol = symbol, arg.params = arg.params, aux.params = list())
class(model) <- "MXFeedForwardModel"
output <- predict(model, input, array.layout = "colmajor")
return(output)
}
input.1 <- train.x
model.1 <- pretrain(input = input.1, hidden_size = 392)
input.2 <- encode(input = input.1, model = model.1)
model.2 <- pretrain(input = input.2, hidden_size = 196)
input.3 <- encode(input = input.2, model = model.2)
model.3 <- pretrain(input = input.3, hidden_size = 98)
input.4 <- encode(input = input.3, model = model.3)
model.4 <- pretrain(input = input.4, hidden_size = 49)
symbol <- mx.symbol.Variable("data")
symbol <- mx.symbol.FullyConnected(data = symbol, name = "encoder_1", num_hidden = 392)
symbol <- mx.symbol.Activation(data = symbol, name = "encoder_act_1", act_type = "relu")
symbol <- mx.symbol.FullyConnected(data = symbol, name = "encoder_2", num_hidden = 196)
symbol <- mx.symbol.Activation(data = symbol, name = "encoder_act_2", act_type = "relu")
symbol <- mx.symbol.FullyConnected(data = symbol, name = "encoder_3", num_hidden = 98)
symbol <- mx.symbol.Activation(data = symbol, name = "encoder_act_3", act_type = "relu")
symbol <- mx.symbol.FullyConnected(data = symbol, name = "encoder_4", num_hidden = 49)
symbol <- mx.symbol.Activation(data = symbol, name = "encoder_act_4", act_type = "relu")
symbol <- mx.symbol.FullyConnected(data = symbol, name = "affine", num_hidden = 10)
symbol <- mx.symbol.SoftmaxOutput(data = symbol, name = "output")
arg.params <- list()
arg.params <- c(arg.params, "encoder_1_weight" = model.1$arg.params$encoder_weight)
arg.params <- c(arg.params, "encoder_1_bias" = model.1$arg.params$encoder_bias)
arg.params <- c(arg.params, "encoder_2_weight" = model.2$arg.params$encoder_weight)
arg.params <- c(arg.params, "encoder_2_bias" = model.2$arg.params$encoder_bias)
arg.params <- c(arg.params, "encoder_3_weight" = model.3$arg.params$encoder_weight)
arg.params <- c(arg.params, "encoder_3_bias" = model.3$arg.params$encoder_bias)
arg.params <- c(arg.params, "encoder_4_weight" = model.4$arg.params$encoder_weight)
arg.params <- c(arg.params, "encoder_4_bias" = model.4$arg.params$encoder_bias)
arg.params <- c(arg.params, "affine_weight" = mx.rnorm(c(49, 10), 0, sqrt(2 / 49), mx.cpu()))
arg.params <- c(arg.params, "affine_bias" = mx.nd.zeros(10, mx.cpu()))
model <- mx.model.FeedForward.create(symbol = symbol,
X = train.x, y = train.y,
ctx = mx.cpu(),
num.round = 10, array.batch.size = 100,
optimizer = "sgd", learning.rate = 0.01, momentum = 0.9,
eval.metric = mx.metric.accuracy,
batch.end.callback = mx.callback.log.train.metric(10),
array.layout = "colmajor",
arg.params = arg.params, aux.params = NULL)
score <- predict(model, test.x, array.layout = "colmajor")
label <- max.col(t(score)) - 1
table(test.y, label)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment