#' Input layer
#'
#' Layer to be used as an entry point into a graph.
#'
#' @param shape Shape, not including the batch size. For instance,
#' `shape=c(32)` indicates that the expected input will be batches
#' of 32-dimensional vectors.
#' @param batch_shape Shape, including the batch size. For instance,
#' `shape = c(10,32)` indicates that the expected input will be batches
#' of 10 32-dimensional vectors. `batch_shape = list(NULL, 32)` indicates
#' batches of an arbitrary number of 32-dimensional vectors.
#' @param name An optional name string for the layer. Should be unique in a
#' model (do not reuse the same name twice). It will be autogenerated if it
#' isn't provided.
#' @param dtype The data type expected by the input, as a string (`float32`,
#' `float64`, `int32`...)
#' @param sparse Boolean, whether the placeholder created is meant to be sparse.
#' @param tensor Existing tensor to wrap into the `Input` layer. If set, the
#' layer will not create a placeholder tensor.
#'
#' @return A tensor
#'
#' @family core layers
#'
#' @export
layer_input <- function(shape = NULL, batch_shape = NULL, name = NULL,
dtype = NULL, sparse = FALSE, tensor = NULL) {
args <- list(
name = name,
dtype = ifelse(is.null(dtype), keras$backend$floatx(), dtype),
sparse = sparse,
tensor = tensor
)
if (!missing(shape))
args$shape <- normalize_shape(shape)
if (!missing(batch_shape))
args$batch_shape <- normalize_shape(batch_shape)
do.call(keras$layers$Input, args)
}
#' Add a densely-connected NN layer to an output
#'
#' Implements the operation: `output = activation(dot(input, kernel) + bias)`
#' where `activation` is the element-wise activation function passed as the
#' `activation` argument, `kernel` is a weights matrix created by the layer, and
#' `bias` is a bias vector created by the layer (only applicable if `use_bias`
#' is `TRUE`). Note: if the input to the layer has a rank greater than 2, then
#' it is flattened prior to the initial dot product with `kernel`.
#'
#' @inheritParams layer_input
#'
#' @param object Model or layer object
#' @param units Positive integer, dimensionality of the output space.
#' @param activation Name of activation function to use. If you don't specify
#' anything, no activation is applied (ie. "linear" activation: a(x) = x).
#' @param use_bias Whether the layer uses a bias vector.
#' @param kernel_initializer Initializer for the `kernel` weights matrix.
#' @param bias_initializer Initializer for the bias vector.
#' @param kernel_regularizer Regularizer function applied to the `kernel`
#' weights matrix.
#' @param bias_regularizer Regularizer function applied to the bias vector.
#' @param activity_regularizer Regularizer function applied to the output of the
#' layer (its "activation")..
#' @param kernel_constraint Constraint function applied to the `kernel` weights
#' matrix.
#' @param bias_constraint Constraint function applied to the bias vector.
#' @param input_shape Dimensionality of the input (integer) not including the
#' samples axis. This argument is required when using this layer as the first
#' layer in a model.
#' @param batch_input_shape Shapes, including the batch size. For instance,
#' `batch_input_shape=c(10, 32)` indicates that the expected input will be
#' batches of 10 32-dimensional vectors. `batch_input_shape=list(NULL, 32)`
#' indicates batches of an arbitrary number of 32-dimensional vectors.
#' @param batch_size Fixed batch size for layer
#' @param trainable Whether the layer weights will be updated during training.
#' @param weights Initial weights for layer.
#'
#' @section Input and Output Shapes:
#'
#' Input shape: nD tensor with shape: `(batch_size, ..., input_dim)`. The most
#' common situation would be a 2D input with shape `(batch_size, input_dim)`.
#'
#' Output shape: nD tensor with shape: `(batch_size, ..., units)`. For
#' instance, for a 2D input with shape `(batch_size, input_dim)`, the output
#' would have shape `(batch_size, unit)`.
#'
#' @family core layers
#'
#' @export
layer_dense <- function(object, units, activation = NULL, use_bias = TRUE,
kernel_initializer = 'glorot_uniform', bias_initializer = 'zeros',
kernel_regularizer = NULL, bias_regularizer = NULL, activity_regularizer = NULL,
kernel_constraint = NULL, bias_constraint = NULL, input_shape = NULL,
batch_input_shape = NULL, batch_size = NULL, dtype = NULL,
name = NULL, trainable = NULL, weights = NULL
) {
create_layer(keras$layers$Dense, object, list(
units = as.integer(units),
activation = activation,
use_bias = use_bias,
kernel_initializer = kernel_initializer,
bias_initializer = bias_initializer,
kernel_regularizer = kernel_regularizer,
bias_regularizer = bias_regularizer,
activity_regularizer = activity_regularizer,
kernel_constraint = kernel_constraint,
bias_constraint = bias_constraint,
input_shape = normalize_shape(input_shape),
batch_input_shape = normalize_shape(batch_input_shape),
batch_size = as_nullable_integer(batch_size),
dtype = dtype,
name = name,
trainable = trainable,
weights = weights
))
}
#' Reshapes an output to a certain shape.
#'
#' @inheritParams layer_activation
#'
#' @param target_shape List of integers, does not include the samples dimension
#' (batch size).
#'
#' @section Input and Output Shapes:
#'
#' Input shape: Arbitrary, although all dimensions in the input shaped must be
#' fixed.
#'
#' Output shape: `(batch_size,) + target_shape`.
#'
#' @family core layers
#'
#' @export
layer_reshape <- function(object, target_shape, input_shape = NULL,
batch_input_shape = NULL, batch_size = NULL, dtype = NULL,
name = NULL, trainable = NULL, weights = NULL) {
create_layer(keras$layers$Reshape, object, list(
target_shape = normalize_shape(target_shape),
input_shape = normalize_shape(input_shape),
batch_input_shape = normalize_shape(batch_input_shape),
batch_size = as_nullable_integer(batch_size),
dtype = dtype,
name = name,
trainable = trainable,
weights = weights
))
}
#' Permute the dimensions of an input according to a given pattern
#'
#' @param dims List of integers. Permutation pattern, does not include the
#' samples dimension. Indexing starts at 1. For instance, `(2, 1)` permutes
#' the first and second dimension of the input.
#'
#' @inheritParams layer_activation
#'
#' @section Input and Output Shapes:
#'
#' Input shape: Arbitrary
#'
#' Output shape: Same as the input shape, but with the dimensions re-ordered
#' according to the specified pattern.
#'
#' @note Useful for e.g. connecting RNNs and convnets together.
#'
#' @family core layers
#'
#' @export
layer_permute <- function(object, dims, input_shape = NULL,
batch_input_shape = NULL, batch_size = NULL, dtype = NULL,
name = NULL, trainable = NULL, weights = NULL) {
create_layer(keras$layers$Permute, object, list(
dims = as_integer_tuple(dims, force_tuple = TRUE),
input_shape = normalize_shape(input_shape),
batch_input_shape = normalize_shape(batch_input_shape),
batch_size = as_nullable_integer(batch_size),
dtype = dtype,
name = name,
trainable = trainable,
weights = weights
))
}
#' Repeats the input n times.
#'
#' @inheritParams layer_dense
#'
#' @param n integer, repetition factor.
#'
#' @section Input shape: 2D tensor of shape `(num_samples, features)`.
#'
#' @section Output shape: 3D tensor of shape `(num_samples, n, features)`.
#'
#' @family core layers
#'
#' @export
layer_repeat_vector <- function(object, n,
batch_size = NULL, name = NULL, trainable = NULL, weights = NULL) {
create_layer(keras$layers$RepeatVector, object, list(
n = as.integer(n),
batch_size = as_nullable_integer(batch_size),
name = name,
trainable = trainable,
weights = weights
))
}
#' Wraps arbitrary expression as a layer
#'
#' @inheritParams layer_dense
#'
#' @param f The function to be evaluated. Takes input tensor as first
#' argument.
#' @param output_shape Expected output shape from the function (not required
#' when using TensorFlow back-end).
#' @param mask mask
#' @param arguments optional named list of keyword arguments to be passed to the
#' function.
#'
#' @section Input shape: Arbitrary. Use the keyword argument input_shape (list
#' of integers, does not include the samples axis) when using this layer as
#' the first layer in a model.
#'
#' @section Output shape: Arbitrary (based on tensor returned from the function)
#'
#' @family core layers
#'
#' @export
layer_lambda <- function(object, f, output_shape = NULL, mask = NULL, arguments = NULL,
input_shape = NULL, batch_input_shape = NULL, batch_size = NULL, dtype = NULL,
name = NULL, trainable = NULL, weights = NULL) {
args <- list(
`function` = f,
mask = mask,
arguments = arguments,
input_shape = normalize_shape(input_shape),
batch_input_shape = normalize_shape(batch_input_shape),
batch_size = as_nullable_integer(batch_size),
dtype = dtype,
name = name,
trainable = trainable,
weights = weights
)
if (backend()$backend() %in% c("theano", "cntk"))
args$output_shape <- as_integer_tuple(output_shape, force_tuple = TRUE)
else if(!is.null(output_shape))
args$output_shape <- normalize_shape(output_shape)
create_layer(keras$layers$Lambda, object, args)
}
#' Layer that applies an update to the cost function based input activity.
#'
#' @inheritParams layer_dense
#'
#' @param l1 L1 regularization factor (positive float).
#' @param l2 L2 regularization factor (positive float).
#'
#' @section Input shape: Arbitrary. Use the keyword argument `input_shape` (list
#' of integers, does not include the samples axis) when using this layer as
#' the first layer in a model.
#'
#' @section Output shape: Same shape as input.
#'
#' @family core layers
#'
#' @export
layer_activity_regularization <- function(object, l1 = 0.0, l2 = 0.0, input_shape = NULL,
batch_input_shape = NULL, batch_size = NULL,
dtype = NULL, name = NULL, trainable = NULL,
weights = NULL) {
create_layer(keras$layers$ActivityRegularization, object, list(
l1 = l1,
l2 = l2,
input_shape = normalize_shape(input_shape),
batch_input_shape = normalize_shape(batch_input_shape),
batch_size = as_nullable_integer(batch_size),
dtype = dtype,
name = name,
trainable = trainable,
weights = weights
))
}
#' Masks a sequence by using a mask value to skip timesteps.
#'
#' For each timestep in the input tensor (dimension #1 in the tensor), if all
#' values in the input tensor at that timestep are equal to `mask_value`, then
#' the timestep will be masked (skipped) in all downstream layers (as long as
#' they support masking). If any downstream layer does not support masking yet
#' receives such an input mask, an exception will be raised.
#'
#' @inheritParams layer_dense
#'
#' @param mask_value float, mask value
#'
#' @family core layers
#'
#' @export
layer_masking <- function(object, mask_value = 0.0, input_shape = NULL,
batch_input_shape = NULL, batch_size = NULL, dtype = NULL,
name = NULL, trainable = NULL, weights = NULL) {
create_layer(keras$layers$Masking, object, list(
mask_value = mask_value,
input_shape = normalize_shape(input_shape),
batch_input_shape = normalize_shape(batch_input_shape),
batch_size = as_nullable_integer(batch_size),
dtype = dtype,
name = name,
trainable = trainable,
weights = weights
))
}
#' Flattens an input
#'
#' Flatten a given input, does not affect the batch size.
#'
#' @inheritParams layer_activation
#'
#' @param data_format A string. one of `channels_last` (default) or
#' `channels_first`. The ordering of the dimensions in the inputs. The purpose
#' of this argument is to preserve weight ordering when switching a model from
#' one data format to another. `channels_last` corresponds to inputs with
#' shape `(batch, ..., channels)` while `channels_first` corresponds to inputs
#' with shape `(batch, channels, ...)`. It defaults to the `image_data_format`
#' value found in your Keras config file at `~/.keras/keras.json`. If you
#' never set it, then it will be "channels_last".
#'
#' @family core layers
#'
#' @export
layer_flatten <- function(object, data_format = NULL, input_shape = NULL, dtype = NULL,
name = NULL, trainable = NULL, weights = NULL) {
args <- list(
input_shape = normalize_shape(input_shape),
dtype = dtype,
name = name,
trainable = trainable,
weights = weights
)
if (keras_version() >= "2.2.0") {
args$data_format <- data_format
} else if (keras_version() >= "2.1.6") {
if (is.null(data_format))
data_format <- "channels_last"
args$data_format <- data_format
}
create_layer(keras$layers$Flatten, object, args)
}
as_integer <- function(x) {
if (is.numeric(x))
as.integer(x)
else
x
}
as_integer_tuple <- function(x, force_tuple = FALSE) {
if (is.null(x))
x
else if (is.list(x) || force_tuple)
tuple(as.list(as.integer(x)))
else
as.integer(x)
}
as_nullable_integer <- function(x) {
if (is.null(x))
x
else
as.integer(x)
}
as_layer_index <- function(x) {
x <- as_nullable_integer(x)
if (!is.null(x))
x <- x - 1L
x
}
# Helper function to normalize paths
normalize_path <- function(path) {
if (is.null(path))
NULL
else
normalizePath(path.expand(path), mustWork = FALSE)
}
# Helper function to coerce shape arguments to tuple
normalize_shape <- function(shape) {
# reflect NULL back
if (is.null(shape))
return(shape)
# if it's a list or a numeric vector then convert to integer
if (is.list(shape) || is.numeric(shape)) {
shape <- lapply(shape, function(value) {
if (!is.null(value))
as.integer(value)
else
NULL
})
}
# coerce to tuple so it's iterable
tuple(shape)
}
#' Create a Keras Layer
#'
#' @param layer_class Python layer class or R6 class of type KerasLayer
#' @param object Object to compose layer with. This is either a
#' [keras_model_sequential()] to add the layer to, or another Layer which
#' this layer will call.
#' @param args List of arguments to layer constructor function
#'
#' @return A Keras layer
#'
#' @note The `object` parameter can be missing, in which case the
#' layer is created without a connection to an existing graph.
#'
#' @export
create_layer <- function(layer_class, object, args = list()) {
# remove kwargs that are null
args$input_shape <- args$input_shape
args$batch_input_shape = args$batch_input_shape
args$batch_size <- args$batch_size
args$dtype <- args$dtype
args$name <- args$name
args$trainable <- args$trainable
args$weights <- args$weights
# convert custom constraints
constraint_args <- grepl("^.*_constraint$", names(args))
constraint_args <- names(args)[constraint_args]
for (arg in constraint_args)
args[[arg]] <- as_constraint(args[[arg]])
# if this is an R6 class then create a Python wrapper for it
if (inherits(layer_class, "R6ClassGenerator")) {
# common layer parameters (e.g. "input_shape") need to be passed to the
# Python Layer constructor rather than the R6 constructor. Here we
# extract and set aside any of those arguments we find and set them to
# NULL within the args list which will be passed to the R6 layer
common_arg_names <- c("input_shape", "batch_input_shape", "batch_size",
"dtype", "name", "trainable", "weights")
py_wrapper_args <- args[common_arg_names]
py_wrapper_args[sapply(py_wrapper_args, is.null)] <- NULL
for (arg in names(py_wrapper_args))
args[[arg]] <- NULL
# create the R6 layer
r6_layer <- do.call(layer_class$new, args)
# create the python wrapper (passing the extracted py_wrapper_args)
python_path <- system.file("python", package = "keras")
tools <- import_from_path("kerastools", path = python_path)
py_wrapper_args$r_build <- r6_layer$build
py_wrapper_args$r_call <- reticulate::py_func(r6_layer$call)
py_wrapper_args$r_compute_output_shape <- r6_layer$compute_output_shape
layer <- do.call(tools$layer$RLayer, py_wrapper_args)
# set back reference in R layer
r6_layer$.set_wrapper(layer)
} else {
# create layer from class
layer <- do.call(layer_class, args)
}
# compose if we have an x
if (missing(object) || is.null(object))
layer
else
invisible(compose_layer(object, layer))
}
# Helper function to compose a layer with an object of type Model or Layer
compose_layer <- function(object, layer, ...) {
UseMethod("compose_layer")
}
compose_layer.default <- function(object, layer, ...) {
stop_with_invalid_layer()
}
compose_layer.keras.models.Sequential <- function(object, layer, ...) {
if(length(list(...)) > 0) warning("arguments passed via ellipsis will be ignored")
object$add(layer)
object
}
compose_layer.keras.engine.sequential.Sequential <- compose_layer.keras.models.Sequential
compose_layer.python.builtin.object <- function(object, layer, ...) {
if (is.function(layer))
layer(object, ...)
else
stop_with_invalid_layer()
}
compose_layer.list <- function(object, layer, ...) {
layer(object, ...)
}
compose_layer.numeric <- function(object, layer, ...) {
if (!tensorflow::tf_version() >= "1.14") stop_with_invalid_layer()
if (is.function(layer))
layer(object, ...)
else
stop_with_invalid_layer()
}
stop_with_invalid_layer <- function() {
stop("Invalid input to layer function (must be a model or a tensor)",
call. = FALSE)
}
Add the following code to your website.
For more information on customizing the embed code, read Embedding Snippets.