Skip to content

Instantly share code, notes, and snippets.

@Athospd
Created July 10, 2017 14:10
Show Gist options
  • Save Athospd/219e972614a02be97b89d04e5bd6f629 to your computer and use it in GitHub Desktop.
Save Athospd/219e972614a02be97b89d04e5bd6f629 to your computer and use it in GitHub Desktop.
library(keras)
library(dplyr)
logit <- function(p) log(p) - log(1 - p)
inv_logit <- function(x) 1/(1 + exp(-x))
n <- 10000
set.seed(19880923)
df <- data.frame(x = rnorm(n)) %>%
mutate(y_1 = rbinom(n, 1, prob = inv_logit(-1 + 4 * x)) %>% as.factor,
y_2 = rbinom(n, 1, prob = inv_logit(-1 + 1 * inv_logit(-3 * 3 * x))) %>% as.factor)
# modelo glm 1 ------------------------------------------------------
modelo_lm_1 <- glm(y_1 ~ x, data = df, family = binomial)
# coefficients
coef(modelo_lm_1)
# accuracy
conf_matrix_lm_1 <- table(modelo_lm_1$fitted.values > 0.5, df$y_1)
sum(diag(conf_matrix_lm_1))/sum(conf_matrix_lm_1)
# modelo keras 1 -------------------------------------------------------
modelo_keras_1 <- keras_model_sequential(name = "modelo_keras_1") %>%
layer_dense(units = 1, input_shape = 1, name = "camada_unica", use_bias = TRUE, bias_constraint = NULL, bias_regularizer = NULL, bias_initializer = "RandomNormal") %>%
layer_activation( activation = 'softmax', input_shape = 1, name = "softmax")
summary(modelo_keras_1)
modelo_keras_1 %>% compile(
loss = 'binary_crossentropy',
optimizer = optimizer_sgd(lr = 1),
metrics = c('accuracy')
)
modelo_keras_1_fit <- modelo_keras_1 %>% fit(
x = df$x,
y = df$y_1,
epochs = 10,
batch_size = 128
)
# coefficients
modelo_keras_1 %>% get_layer("camada_unica") %>% get_weights
# accuracy
loss_and_metrics_1 <- modelo_keras_1 %>% evaluate(df$x, df$y_1)
loss_and_metrics_1[[2]]
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment