Nothing
#' Keras Model (Functional API)
#'
#' A model is a directed acyclic graph of layers.
#'
#' @param inputs Input tensor(s) (from [`keras_input()`])
#' @param outputs Output tensors (from calling layers with `inputs`)
#' @param ... Any additional arguments
#'
#' @details
#'
#' # Examples
#' ```{r}
#' library(keras3)
#'
#' # input tensor
#' inputs <- keras_input(shape = c(784))
#'
#' # outputs compose input + dense layers
#' predictions <- inputs |>
#' layer_dense(units = 64, activation = 'relu') |>
#' layer_dense(units = 64, activation = 'relu') |>
#' layer_dense(units = 10, activation = 'softmax')
#'
#' # create and compile model
#' model <- keras_model(inputs = inputs, outputs = predictions)
#' model |> compile(
#' optimizer = 'rmsprop',
#' loss = 'categorical_crossentropy',
#' metrics = c('accuracy')
#' )
#' ```
#'
#' @returns A `Model` instance.
#' @export
#' @family model functions
#' @family model creation
#' @tether keras.Model
keras_model <- function(inputs = NULL, outputs = NULL, ...) {
keras$models$Model(inputs = inputs, outputs = outputs, ...)
}
#' Create a Keras tensor (Functional API input).
#'
#' @description
#' A Keras tensor is a symbolic tensor-like object, which we augment with
#' certain attributes that allow us to build a Keras model just by knowing the
#' inputs and outputs of the model.
#'
#' For instance, if `a`, `b` and `c` are Keras tensors,
#' it becomes possible to do:
#' `model <- keras_model(input = c(a, b), output = c)`
#'
#' # Examples
#' ```{r}
#' # This is a logistic regression in Keras
#' input <- layer_input(shape=c(32))
#' output <- input |> layer_dense(16, activation='softmax')
#' model <- keras_model(input, output)
#' ```
#'
#' @returns
#' A Keras tensor,
#' which can passed to the `inputs` argument of ([`keras_model()`]).
#'
#' @param shape
#' A shape list (list of integers or `NULL` objects),
#' not including the batch size.
#' For instance, `shape = c(32)` indicates that the expected input
#' will be batches of 32-dimensional vectors. Elements of this list
#' can be `NULL` or `NA`; `NULL`/`NA` elements represent dimensions where the shape
#' is not known and may vary (e.g. sequence length).
#'
#' @param batch_size
#' Optional static batch size (integer).
#'
#' @param dtype
#' The data type expected by the input, as a string
#' (e.g. `"float32"`, `"int32"`...)
#'
#' @param sparse
#' A boolean specifying whether the expected input will be sparse
#' tensors. Note that, if `sparse` is `FALSE`, sparse tensors can still
#' be passed into the input - they will be densified with a default
#' value of 0. This feature is only supported with the TensorFlow
#' backend. Defaults to `FALSE`.
#'
#' @param batch_shape
#' Optional shape list (list of integers or `NULL` objects),
#' including the batch size.
#'
#' @param name
#' Optional name string for the layer.
#' Should be unique in a model (do not reuse the same name twice).
#' It will be autogenerated if it isn't provided.
#'
#' @param tensor
#' Optional existing tensor to wrap into the `Input` layer.
#' If set, the layer will use this tensor rather
#' than creating a new placeholder tensor.
#'
#' @param optional
#' Boolean, whether the input is optional or not.
#' An optional input can accept `NULL` values.
#'
#' @export
#' @family model creation
# @seealso
# + <https://keras.io/api/layers/core_layers/input/>
#'
#' @tether keras.layers.Input
keras_input <-
function (shape = NULL, batch_size = NULL, dtype = NULL, sparse = NULL,
batch_shape = NULL, name = NULL, tensor = NULL, optional = FALSE)
{
args <- capture_args(list(shape = normalize_shape, batch_size = as_integer,
input_shape = normalize_shape, batch_input_shape = normalize_shape,
batch_shape = normalize_shape))
do.call(keras$Input, args)
}
#' Keras Model composed of a linear stack of layers
#'
#' @param input_shape
#' A shape integer vector,
#' not including the batch size.
#' For instance, `shape=c(32)` indicates that the expected input
#' will be batches of 32-dimensional vectors. Elements of this shape
#' can be `NA`; `NA` elements represent dimensions where the shape
#' is not known and may vary (e.g. sequence length).
#'
#' @param name Name of model
#'
#' @param input_batch_size Optional static batch size (integer).
#'
#' @param input_dtype
#' The data type expected by the input, as a string
#' (e.g. `"float32"`, `"int32"`...)
#'
#' @param input_sparse
#' A boolean specifying whether the expected input will be sparse
#' tensors. Note that, if `sparse` is `FALSE`, sparse tensors can still
#' be passed into the input - they will be densified with a default
#' value of `0`. This feature is only supported with the TensorFlow
#' backend. Defaults to `FALSE`.
#'
#' @param input_batch_shape
#' An optional way to specify `batch_size` and `input_shape` as one argument.
#'
#' @param input_name
#' Optional name string for the input layer.
#' Should be unique in a model (do not reuse the same name twice).
#' It will be autogenerated if it isn't provided.
#'
#' @param input_tensor
#' Optional existing tensor to wrap into the `InputLayer`.
#' If set, the layer will use this tensor rather
#' than creating a new placeholder tensor.
#'
#' @param input_optional
#' Boolean, whether the input is optional or not.
#' An optional input can accept `NULL` values.
#'
#' @param ... additional arguments passed on to `keras.layers.InputLayer`.
#'
#' @param layers List of layers to add to the model.
#'
#' @param trainable Boolean, whether the model's variables should be trainable.
#' You can also change the trainable status of a model/layer with
#' [`freeze_weights()`] and [`unfreeze_weights()`].
#'
#' @note
#'
#' If `input_shape` is omitted, then the model layer
#' shapes, including the final model output shape, will not be known until
#' the model is built, either by calling the model with an input tensor/array
#' like `model(input)`, (possibly via `fit()`/`evaluate()`/`predict()`), or by
#' explicitly calling `model$build(input_shape)`.
#'
#' @details
#'
#' # Examples
#'
#' ```{r}
#' model <- keras_model_sequential(input_shape = c(784))
#' model |>
#' layer_dense(units = 32) |>
#' layer_activation('relu') |>
#' layer_dense(units = 10) |>
#' layer_activation('softmax')
#'
#' model |> compile(
#' optimizer = 'rmsprop',
#' loss = 'categorical_crossentropy',
#' metrics = c('accuracy')
#' )
#'
#' model
#' ```
#'
#' @returns A `Sequential` model instance.
#' @export
#' @family model functions
#' @family model creation
#' @tether keras.Sequential
keras_model_sequential <-
function(input_shape = NULL, name = NULL,
...,
input_dtype = NULL,
input_batch_size = NULL,
input_sparse = NULL,
input_batch_shape = NULL,
input_name = NULL,
input_tensor = NULL,
input_optional = FALSE,
trainable = TRUE,
layers = list())
{
args <- capture_args(list(layers = as_list))
Sequental_arg_names <- c("layers", "name", "trainable")
Sequental_args <- args[intersect(names(args), Sequental_arg_names)]
InputLayer_args <- args[setdiff(names(args), Sequental_arg_names)]
if (length(InputLayer_args)) {
# If we received `layers` for the first positional arg, throw a nicer error
# message. (The first positional arg used to be `layers`.)
if (is_layer(input_shape) ||
(is.list(input_shape) && any(map_lgl(input_shape, is_layer))))
stop("`layers` must be passed in as a named argument.")
prepend(Sequental_args$layers) <- do.call(InputLayer, InputLayer_args)
}
do.call(keras$models$Sequential, Sequental_args)
}
#' @tether keras.layers.InputLayer
InputLayer <-
function(input_shape = NULL,
...,
input_batch_size = NULL,
input_dtype = NULL,
input_sparse = NULL,
input_batch_shape = NULL,
input_name = NULL,
input_tensor = NULL,
input_optional = FALSE)
{
args <- capture_args(list(
input_shape = normalize_shape,
shape = normalize_shape,
batch_shape = normalize_shape,
input_batch_shape = normalize_shape,
batch_input_shape = normalize_shape,
input_batch_size = as_integer,
batch_size = as_integer
))
args <- rename(args,
name = "input_layer_name", # legacy
name = "input_name",
shape = "input_shape",
batch_shape = "batch_input_shape", # legacy
batch_shape = "input_batch_shape",
batch_size = "input_batch_size",
dtype = "input_dtype",
sparse = "input_sparse",
optional = "input_optional",
.skip_existing = TRUE)
do.call(keras$layers$InputLayer, args)
}
#' Clone a Functional or Sequential `Model` instance.
#'
#' @description
#' Model cloning is similar to calling a model on new inputs,
#' except that it creates new layers (and thus new weights) instead
#' of sharing the weights of the existing layers.
#'
#' Note that
#' `clone_model()` will not preserve the uniqueness of shared objects within the
#' model (e.g. a single variable attached to two distinct layers will be
#' restored as two separate variables).
#'
#'
#' # Examples
#' ```{r}
#' # Create a test Sequential model.
#' model <- keras_model_sequential(input_shape = c(728)) |>
#' layer_dense(32, activation = 'relu') |>
#' layer_dense(1, activation = 'sigmoid')
#'
#' # Create a copy of the test model (with freshly initialized weights).
#' new_model <- clone_model(model)
#' ```
#'
#' Using a `clone_function` to make a model deterministic by setting the
#' random seed everywhere:
#'
#' ```{r}
#' clone_function <- function(layer) {
#' config <- layer$get_config()
#' if ("seed" %in% names(config))
#' config$seed <- 1337L
#' layer$`__class__`$from_config(config)
#' }
#'
#' new_model <- clone_model(model, clone_function = clone_function)
#' ```
#'
#' Using a `call_function` to add a `Dropout` layer after each `Dense` layer
#' (without recreating new layers):
#'
#' ```{r}
#' call_function <- function(layer, ...) {
#' out <- layer(...)
#' if (inherits(layer, keras$layers$Dense))
#' out <- out |> layer_dropout(0.5)
#' out
#' }
#'
#' inputs <- keras_input(c(728))
#' outputs <- inputs |>
#' layer_dense(32, activation = 'relu') |>
#' layer_dense(1, activation = 'sigmoid')
#' model <- keras_model(inputs, outputs)
#'
#' new_model <- clone_model(
#' model,
#' clone_function = function(x) x, # Reuse the same layers.
#' call_function = call_function,
#' )
#' new_model
#' ```
#'
#' Note that subclassed models cannot be cloned by default,
#' since their internal layer structure is not known.
#' To achieve equivalent functionality
#' as `clone_model` in the case of a subclassed model, simply make sure
#' that the model class implements `get_config()`
#' (and optionally `from_config()`), and call:
#'
#' ```r
#' new_model <- model$`__class__`$from_config(model$get_config())
#' ```
#'
#' In the case of a subclassed model, you cannot using a custom
#' `clone_function`.
#'
#' @returns
#' An instance of `Model` reproducing the behavior
#' of the original model, on top of new inputs tensors,
#' using newly instantiated weights. The cloned model may behave
#' differently from the original model if a custom `clone_function`
#' or `call_function` modifies a layer or layer call.
#'
#' @param model
#' Instance of `Model`
#' (could be a Functional model or a Sequential model).
#'
#' @param input_tensors
#' Optional list of input tensors
#' to build the model upon. If not provided,
#' new `keras_input()` objects will be created.
#'
#' @param clone_function
#' Callable with signature `function(layer)`
#' to be used to clone each layer in the target
#' model (except `Input` instances). It takes as argument the
#' layer instance to be cloned, and returns the corresponding layer
#' instance to be used in the model copy. If unspecified, this callable
#' defaults to the following serialization/deserialization function:
#' ``` function(layer) layer$`__class__`$from_config(layer$get_config()) ```.
#' By passing a custom callable, you can customize your copy of the
#' model, e.g. by wrapping certain layers of interest (you might want
#' to replace all `LSTM` instances with equivalent
#' `Bidirectional(LSTM(...))` instances, for example).
#' Defaults to `NULL`.
#'
#' @param call_function
#' Callable with signature
#' `function(layer, ...)` to be used to call each
#' cloned layer and a set of inputs. It takes the layer instance,
#' and the call arguments, and returns the
#' call outputs. If unspecified, this callable defaults to
#' the regular `call()` method:
#' `function(layer, ...) do.call(layer, list(...))`.
#' By passing a custom callable, you can insert new layers before or
#' after a given layer.
#'
#' @param recursive
#' Note, This argument can only be used with
#' Functional models.
#' Boolean. Whether to recursively clone any Sequential
#' or Functional models encountered in the original
#' Sequential/Functional model. If `FALSE`,
#' then inner models are cloned by calling `clone_function()`.
#' If `TRUE`, then inner models are cloned by calling `clone_model()`
#' with the same `clone_function`, `call_function`, and `recursive`
#' arguments. Note that in this case, `call_function`
#' will not be propagated to any Sequential model
#' (since it is not applicable to Sequential models).
#'
#' @param ...
#' For forward/backward compatability.
#'
#'
#' @export
#' @tether keras.models.clone_model
clone_model <-
function(model, input_tensors = NULL, clone_function = NULL,
call_function = NULL, recursive = FALSE, ...)
{
args <- capture_args()
do.call(keras$models$clone_model, args)
}
# ---- Model methods ----
#' Retrieves a layer based on either its name (unique) or index.
#'
#' Indices are based on order of horizontal graph traversal (bottom-up) and are
#' 1-based. If `name` and `index` are both provided, `index` will take
#' precedence.
#'
#' @param object Keras model object
#' @param name String, name of layer.
#' @param index Integer, index of layer (1-based). Also valid are negative
#' values, which count from the end of model.
#'
#' @returns A layer instance.
#'
#' @family model functions
#'
#' @export
get_layer <- function(object, name = NULL, index = NULL) {
object$get_layer(
name = name,
index = as_layer_index(index)
)
}
#' Remove the last layer in a Sequential model
#'
#' @param object Sequential keras model object
#' @returns The input `object`, invisibly.
#'
#' @family model functions
#'
#' @export
pop_layer <- function(object) {
object$pop()
invisible(object)
}
#' Retrieves tree-like structure of model variables.
#'
#' @description
#' This method allows retrieval of different model variables (trainable,
#' non-trainable, optimizer, and metrics). The variables are returned in a
#' nested dictionary format, where the keys correspond to the variable
#' names and the values are the nested representations of the variables.
#'
#' # Examples
#' ```{r}
#' model <- keras_model_sequential(name = "my_sequential",
#' input_shape = c(1),
#' input_name = "my_input") |>
#' layer_dense(1, activation = "sigmoid", name = "my_dense")
#'
#' model |> compile(optimizer="adam", loss="mse", metrics=c("mae"))
#' model |> fit(matrix(1), matrix(1), verbose = 0)
#' state_tree <- model |> get_state_tree()
#' ```
#'
#' The `state_tree` list returned looks like:
#'
#' ```r
#' list(
#' metrics_variables = list(
#' loss = list(
#' count = ...,
#' total = ...
#' ),
#' mean_absolute_error = list(
#' count = ...,
#' total = ...
#' )
#' ),
#' trainable_variables = list(
#' my_sequential = list(
#' my_dense = list(
#' bias = ...,
#' kernel = ...
#' )
#' )
#' ),
#' non_trainable_variables = list(),
#' optimizer_variables = list(
#' adam = list(
#' iteration = ...,
#' learning_rate = ...,
#' my_sequential_my_dense_bias_momentum = ...,
#' my_sequential_my_dense_bias_velocity = ...,
#' my_sequential_my_dense_kernel_momentum = ...,
#' my_sequential_my_dense_kernel_velocity = ...
#' )
#' )
#' )
#' ```
#'
#' For example:
#' ```{r}
#' str(state_tree)
#' ```
#'
#' @returns
#' A named list containing the nested representations of the
#' requested variables. The names are the variable names, and the
#' values are the corresponding nested named lists.
#'
#' @param object A Keras Model.
#'
#' @param value_format
#' One of `"backend_tensor"`, `"numpy_array"`, `"array"`.
#' The kind of array to return as the leaves of the nested
#' state tree.
#'
#' @export
#' @tether keras.Model.get_state_tree
#' @family model functions
get_state_tree <- function (object, value_format = "backend_tensor")
{
switch(
value_format,
"numpy_array" = {
getter <- object$get_state_tree
r_to_py(getter)("numpy_array")
},
"r_array" = ,
"array" = {
object$get_state_tree("numpy_array")
},
# "backend_tensor" =
{
object$get_state_tree(value_format)
}
)
}
#' Assigns values to variables of the model.
#'
#' @description
#' This method takes a dictionary (named list) of nested variable values, which
#' represents the state tree of the model, and assigns them to the
#' corresponding variables of the model. The keys (list names) represent the
#' variable names (e.g., `'trainable_variables'`, `'optimizer_variables'`),
#' and the values are nested dictionaries containing the variable
#' paths and their corresponding values.
#'
#' @param object A keras model.
#'
#' @param state_tree
#' A dictionary representing the state tree of the model.
#' The keys are the variable names, and the values are nested
#' dictionaries representing the variable paths and their values.
#'
#' @export
#' @family model functions
#' @tether keras.Model.set_state_tree
set_state_tree <-
function (object, state_tree)
{
object$set_state_tree(state_tree)
}
Any scripts or data that you put into this service are public.
Add the following code to your website.
For more information on customizing the embed code, read Embedding Snippets.