knitr::opts_chunk$set( collapse = TRUE, comment = "#>" ) if(Sys.info()[["sysname"]] == "Darwin") Sys.setenv(KMP_DUPLICATE_LIB_OK=TRUE) # reticulate::use_virtualenv("tf-2.2.0", TRUE)
autograph()
Here is a full MNIST training loop implemented in R using tfautograph. (originally adapted from here)
library(magrittr) library(purrr, warn.conflicts = FALSE) library(tensorflow) library(tfdatasets) library(keras) library(tfautograph) tf$version$VERSION
First, some helpers so we can capture tf.print()
output in the Rmarkdown vignette.
TEMPFILE <- tempfile("tf-print-out", fileext = ".txt") print_tempfile <- function(clear_after_read = TRUE) { if (clear_after_read) on.exit(unlink(TEMPFILE)) writeLines(readLines(TEMPFILE, warn = FALSE)) } tf_print <- function(...) tf$print(..., output_stream = sprintf("file://%s", TEMPFILE))
c(c(x_train, y_train), .) %<-% tf$keras$datasets$mnist$load_data() train_dataset <- list(x_train, y_train) %>% tensor_slices_dataset() %>% dataset_map(function(x, y) { x <- tf$cast(x, tf$float32) / 255 y <- tf$cast(y, tf$int64) list(x, y) }) %>% dataset_take(20000) %>% dataset_shuffle(20000) %>% dataset_batch(100)
new_model_and_optimizer <- function() { model <- keras_model_sequential() %>% layer_reshape(target_shape = c(28 * 28), input_shape = shape(28, 28)) %>% layer_dense(100, activation = 'relu') %>% layer_dense(100, activation = 'relu') %>% layer_dense(10) model$build() optimizer <- tf$keras$optimizers$Adam() list(model, optimizer) } c(model, optimizer) %<-% new_model_and_optimizer()
compute_loss <- tf$keras$losses$SparseCategoricalCrossentropy(from_logits = TRUE) compute_accuracy <- tf$keras$metrics$SparseCategoricalAccuracy() train_one_step <- function(model, optimizer, x, y) { with(tf$GradientTape() %as% tape, { logits <- model(x) loss <- compute_loss(y, logits) }) grads <- tape$gradient(loss, model$trainable_variables) optimizer$apply_gradients( transpose(list(grads, model$trainable_variables))) compute_accuracy(y, logits) loss } train <- autograph(function(model, optimizer) { step <- 0L loss <- 0 for (batch in train_dataset) { c(x, y) %<-% batch step %<>% add(1L) loss <- train_one_step(model, optimizer, x, y) if (compute_accuracy$result() > 0.8) { tf_print("Accuracy over 0.8; breaking early") break } else if (step %% 10L == 0L) tf_print('Step', step, ': loss', loss, '; accuracy', compute_accuracy$result()) } tf_print('Final step', step, ": loss", loss, "; accuracy", compute_accuracy$result()) list(step, loss) })
# autograph also works in eager mode c(model, optimizer) %<-% new_model_and_optimizer() c(step, loss) %<-% train(model, optimizer) print_tempfile()
c(model, optimizer) %<-% new_model_and_optimizer() train_on_graph <- tf_function(train) c(step, loss) %<-% train_on_graph(model, optimizer) print_tempfile()
Add the following code to your website.
For more information on customizing the embed code, read Embedding Snippets.