knitr::opts_chunk$set(eval = FALSE)

ultimate model

library(keras)
# entradas
set.seed(474747)

n_test <- 2000
n_epochs <- 30
n_units <- 512
batch_size <- 64

# setup ------------------------------------------------------------------------
n_tot <- prepared_data$n
if (n_tot < n_test) stop('n_test should be less than your data rows.')
my_sample <- sample(seq_len(n_tot), n_tot - n_test, replace = FALSE)

# MODELO DO DANIEL -------------------------------------------------------------
model <- keras_model_sequential()
model %>%
  layer_conv_2d(
    input_shape = dim(prepared_data$x)[-1],
    filters = 32,
    kernel_size = c(5,5),
    padding = "same",
    activation = "relu"
  ) %>%
  layer_max_pooling_2d() %>%
  layer_conv_2d(
    filters =  64,
    kernel_size = c(5,5),
    padding = "same",
    activation = "relu"
  ) %>%
  layer_max_pooling_2d() %>%
  layer_conv_2d(
    filters =  128,
    kernel_size = c(5,5),
    padding = "same",
    activation = "relu"
  ) %>%
  layer_max_pooling_2d() %>%
  layer_flatten() %>%
  layer_dense(units = n_units) %>%
  layer_dropout(.1) %>% 
  layer_dense(units = prod(dim(prepared_data$y)[-1])) %>%
  layer_reshape(target_shape = dim(prepared_data$y)[-1]) %>%
  layer_activation("softmax")

# compile-----------------------------------------------------------------------
model %>%
  compile(
    optimizer = "adam",
    loss = "categorical_crossentropy",
    metrics = "accuracy"
  )

# fit---------------------------------------------------------------------------
model %>%
  fit(
    x = prepared_data$x[my_sample,,,, drop = FALSE],
    y = prepared_data$y[my_sample,,, drop = FALSE],
    batch_size = batch_size,
    epochs = n_epochs,
    shuffle = TRUE,
    validation_data = list(
      prepared_data$x[-my_sample,,,, drop = FALSE], 
      prepared_data$y[-my_sample,,, drop = FALSE]
    )
  )

save_model_hdf5(model, 'esaj-06.hdf5')


marceloturim/decryptr documentation built on May 14, 2019, 5:26 p.m.