Skip to content

Instantly share code, notes, and snippets.

@statcompute
Created October 15, 2019 05:04
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save statcompute/2a7407e30f174131a13a987d1c3b408b to your computer and use it in GitHub Desktop.
Save statcompute/2a7407e30f174131a13a987d1c3b408b to your computer and use it in GitHub Desktop.
library(keras)
df <- read.csv("credit_count.txt")
Y <- matrix(df[df$CARDHLDR == 1, ]$DEFAULT)
X <- scale(df[df$CARDHLDR == 1, ][3:14])
inputs <- layer_input(shape = c(ncol(X)))
mlp <- inputs %>%
layer_dense(units = 64, activation = 'relu', kernel_initializer = 'he_uniform') %>%
layer_dropout(rate = 0.2, seed = 1) %>%
layer_dense(units = 64, activation = 'relu', kernel_initializer = 'he_uniform') %>%
layer_dropout(rate = 0.2, seed = 1) %>%
layer_dense(1, activation = 'sigmoid')
cnv <- inputs %>%
layer_reshape(c(ncol(X), 1)) %>%
layer_conv_1d(32, 4, activation = 'relu', padding = "same", kernel_initializer = 'he_uniform') %>%
layer_max_pooling_1d(2) %>%
layer_spatial_dropout_1d(0.2) %>%
layer_flatten() %>%
layer_dense(1, activation = 'sigmoid')
avg <- layer_average(c(mlp, cnv))
mdl <- keras_model(inputs = inputs, outputs = avg)
mdl %>% compile(optimizer = optimizer_sgd(lr = 0.1, momentum = 0.9), loss = 'binary_crossentropy', metrics = c('binary_accuracy'))
mdl %>% fit(x = X, y = Y, epochs = 50, batch_size = 1000, verbose = 0)
mdl %>% predict(x = X)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment