HessianMLP | R Documentation |
Function for evaluating the sensitivities of the inputs variables in a mlp model
HessianMLP(
MLP.fit,
.returnSens = TRUE,
plot = TRUE,
.rawSens = FALSE,
sens_origin_layer = 1,
sens_end_layer = "last",
sens_origin_input = TRUE,
sens_end_input = FALSE,
...
)
## Default S3 method:
HessianMLP(
MLP.fit,
.returnSens = TRUE,
plot = TRUE,
.rawSens = FALSE,
sens_origin_layer = 1,
sens_end_layer = "last",
sens_origin_input = TRUE,
sens_end_input = FALSE,
trData,
actfunc = NULL,
deractfunc = NULL,
der2actfunc = NULL,
preProc = NULL,
terms = NULL,
output_name = NULL,
...
)
## S3 method for class 'train'
HessianMLP(
MLP.fit,
.returnSens = TRUE,
plot = TRUE,
.rawSens = FALSE,
sens_origin_layer = 1,
sens_end_layer = "last",
sens_origin_input = TRUE,
sens_end_input = FALSE,
...
)
## S3 method for class 'H2OMultinomialModel'
HessianMLP(
MLP.fit,
.returnSens = TRUE,
plot = TRUE,
.rawSens = FALSE,
sens_origin_layer = 1,
sens_end_layer = "last",
sens_origin_input = TRUE,
sens_end_input = FALSE,
...
)
## S3 method for class 'H2ORegressionModel'
HessianMLP(
MLP.fit,
.returnSens = TRUE,
plot = TRUE,
.rawSens = FALSE,
sens_origin_layer = 1,
sens_end_layer = "last",
sens_origin_input = TRUE,
sens_end_input = FALSE,
...
)
## S3 method for class 'list'
HessianMLP(
MLP.fit,
.returnSens = TRUE,
plot = TRUE,
.rawSens = FALSE,
sens_origin_layer = 1,
sens_end_layer = "last",
sens_origin_input = TRUE,
sens_end_input = FALSE,
trData,
actfunc,
...
)
## S3 method for class 'mlp'
HessianMLP(
MLP.fit,
.returnSens = TRUE,
plot = TRUE,
.rawSens = FALSE,
sens_origin_layer = 1,
sens_end_layer = "last",
sens_origin_input = TRUE,
sens_end_input = FALSE,
trData,
preProc = NULL,
terms = NULL,
...
)
## S3 method for class 'nn'
HessianMLP(
MLP.fit,
.returnSens = TRUE,
plot = TRUE,
.rawSens = FALSE,
sens_origin_layer = 1,
sens_end_layer = "last",
sens_origin_input = TRUE,
sens_end_input = FALSE,
preProc = NULL,
terms = NULL,
...
)
## S3 method for class 'nnet'
HessianMLP(
MLP.fit,
.returnSens = TRUE,
plot = TRUE,
.rawSens = FALSE,
sens_origin_layer = 1,
sens_end_layer = "last",
sens_origin_input = TRUE,
sens_end_input = FALSE,
trData,
preProc = NULL,
terms = NULL,
...
)
## S3 method for class 'nnetar'
HessianMLP(
MLP.fit,
.returnSens = TRUE,
plot = TRUE,
.rawSens = FALSE,
sens_origin_layer = 1,
sens_end_layer = "last",
sens_origin_input = TRUE,
sens_end_input = FALSE,
...
)
## S3 method for class 'numeric'
HessianMLP(
MLP.fit,
.returnSens = TRUE,
plot = TRUE,
.rawSens = FALSE,
sens_origin_layer = 1,
sens_end_layer = "last",
sens_origin_input = TRUE,
sens_end_input = FALSE,
trData,
actfunc = NULL,
preProc = NULL,
terms = NULL,
...
)
MLP.fit |
fitted neural network model |
.returnSens |
DEPRECATED |
plot |
|
.rawSens |
DEPRECATED |
sens_origin_layer |
|
sens_end_layer |
|
sens_origin_input |
|
sens_end_input |
|
... |
additional arguments passed to or from other methods |
trData |
|
actfunc |
|
deractfunc |
|
der2actfunc |
|
preProc |
preProcess structure applied to the training data. See also
|
terms |
function applied to the training data to create factors. See
also |
output_name |
|
In case of using an input of class factor
and a package which
need to enter the input data as matrix, the dummies must be created before
training the neural network.
After that, the training data must be given to the function using the
trData
argument.
SensMLP
object with the sensitivity metrics and sensitivities of
the MLP model passed to the function.
Plot 1: colorful plot with the classification of the classes in a 2D map
Plot 2: b/w plot with probability of the chosen class in a 2D map
Plot 3: plot with the stats::predictions of the data provided
## Load data -------------------------------------------------------------------
data("DAILY_DEMAND_TR")
fdata <- DAILY_DEMAND_TR
## Parameters of the NNET ------------------------------------------------------
hidden_neurons <- 5
iters <- 100
decay <- 0.1
################################################################################
######################### REGRESSION NNET #####################################
################################################################################
## Regression dataframe --------------------------------------------------------
# Scale the data
fdata.Reg.tr <- fdata[,2:ncol(fdata)]
fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10
fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000
# Normalize the data for some models
preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale"))
nntrData <- predict(preProc, fdata.Reg.tr)
#' ## TRAIN nnet NNET --------------------------------------------------------
# Create a formula to train NNET
form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ")
form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ "))
set.seed(150)
nnetmod <- nnet::nnet(form,
data = nntrData,
linear.output = TRUE,
size = hidden_neurons,
decay = decay,
maxit = iters)
# Try HessianMLP
NeuralSens::HessianMLP(nnetmod, trData = nntrData, plot = FALSE)
# Try HessianMLP to calculate sensitivities with respect to output of hidden neurones
NeuralSens::HessianMLP(nnetmod, trData = nntrData,
sens_origin_layer = 2,
sens_end_layer = "last",
sens_origin_input = FALSE,
sens_end_input = FALSE)
## Train caret NNET ------------------------------------------------------------
# Create trainControl
ctrl_tune <- caret::trainControl(method = "boot",
savePredictions = FALSE,
summaryFunction = caret::defaultSummary)
set.seed(150) #For replication
caretmod <- caret::train(form = DEM~.,
data = fdata.Reg.tr,
method = "nnet",
linout = TRUE,
tuneGrid = data.frame(size = 3,
decay = decay),
maxit = iters,
preProcess = c("center","scale"),
trControl = ctrl_tune,
metric = "RMSE")
# Try HessianMLP
NeuralSens::HessianMLP(caretmod)
## Train h2o NNET --------------------------------------------------------------
# Create a cluster with 4 available cores
h2o::h2o.init(ip = "localhost",
nthreads = 4)
# Reset the cluster
h2o::h2o.removeAll()
fdata_h2o <- h2o::as.h2o(x = fdata.Reg.tr, destination_frame = "fdata_h2o")
set.seed(150)
h2omod <-h2o:: h2o.deeplearning(x = names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)],
y = names(fdata.Reg.tr)[1],
distribution = "AUTO",
training_frame = fdata_h2o,
standardize = TRUE,
activation = "Tanh",
hidden = c(hidden_neurons),
stopping_rounds = 0,
epochs = iters,
seed = 150,
model_id = "nnet_h2o",
adaptive_rate = FALSE,
rate_decay = decay,
export_weights_and_biases = TRUE)
# Try HessianMLP
NeuralSens::HessianMLP(h2omod)
# Turn off the cluster
h2o::h2o.shutdown(prompt = FALSE)
rm(fdata_h2o)
## Train RSNNS NNET ------------------------------------------------------------
# Normalize data using RSNNS algorithms
trData <- as.data.frame(RSNNS::normalizeData(fdata.Reg.tr))
names(trData) <- names(fdata.Reg.tr)
set.seed(150)
RSNNSmod <-RSNNS::mlp(x = trData[,2:ncol(trData)],
y = trData[,1],
size = hidden_neurons,
linOut = TRUE,
learnFuncParams=c(decay),
maxit=iters)
# Try HessianMLP
NeuralSens::HessianMLP(RSNNSmod, trData = trData, output_name = "DEM")
## USE DEFAULT METHOD ----------------------------------------------------------
NeuralSens::HessianMLP(caretmod$finalModel$wts,
trData = fdata.Reg.tr,
mlpstr = caretmod$finalModel$n,
coefnames = caretmod$coefnames,
actfun = c("linear","sigmoid","linear"),
output_name = "DEM")
################################################################################
######################### CLASSIFICATION NNET #################################
################################################################################
## Regression dataframe --------------------------------------------------------
# Scale the data
fdata.Reg.cl <- fdata[,2:ncol(fdata)]
fdata.Reg.cl[,2:3] <- fdata.Reg.cl[,2:3]/10
fdata.Reg.cl[,1] <- fdata.Reg.cl[,1]/1000
# Normalize the data for some models
preProc <- caret::preProcess(fdata.Reg.cl, method = c("center","scale"))
nntrData <- predict(preProc, fdata.Reg.cl)
# Factorize the output
fdata.Reg.cl$DEM <- factor(round(fdata.Reg.cl$DEM, digits = 1))
# Normalize the data for some models
preProc <- caret::preProcess(fdata.Reg.cl, method = c("center","scale"))
nntrData <- predict(preProc, fdata.Reg.cl)
## Train caret NNET ------------------------------------------------------------
# Create trainControl
ctrl_tune <- caret::trainControl(method = "boot",
savePredictions = FALSE,
summaryFunction = caret::defaultSummary)
set.seed(150) #For replication
caretmod <- caret::train(form = DEM~.,
data = fdata.Reg.cl,
method = "nnet",
linout = FALSE,
tuneGrid = data.frame(size = hidden_neurons,
decay = decay),
maxit = iters,
preProcess = c("center","scale"),
trControl = ctrl_tune,
metric = "Accuracy")
# Try HessianMLP
NeuralSens::HessianMLP(caretmod)
## Train h2o NNET --------------------------------------------------------------
# Create local cluster with 4 available cores
h2o::h2o.init(ip = "localhost",
nthreads = 4)
# Reset the cluster
h2o::h2o.removeAll()
fdata_h2o <- h2o::as.h2o(x = fdata.Reg.cl, destination_frame = "fdata_h2o")
set.seed(150)
h2omod <- h2o::h2o.deeplearning(x = names(fdata.Reg.cl)[2:ncol(fdata.Reg.cl)],
y = names(fdata.Reg.cl)[1],
distribution = "AUTO",
training_frame = fdata_h2o,
standardize = TRUE,
activation = "Tanh",
hidden = c(hidden_neurons),
stopping_rounds = 0,
epochs = iters,
seed = 150,
model_id = "nnet_h2o",
adaptive_rate = FALSE,
rate_decay = decay,
export_weights_and_biases = TRUE)
# Try HessianMLP
NeuralSens::HessianMLP(h2omod)
# Apaga el cluster
h2o::h2o.shutdown(prompt = FALSE)
rm(fdata_h2o)
## TRAIN nnet NNET --------------------------------------------------------
# Create a formula to train NNET
form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ")
form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ "))
set.seed(150)
nnetmod <- nnet::nnet(form,
data = nntrData,
linear.output = TRUE,
size = hidden_neurons,
decay = decay,
maxit = iters)
# Try HessianMLP
NeuralSens::HessianMLP(nnetmod, trData = nntrData)
Add the following code to your website.
For more information on customizing the embed code, read Embedding Snippets.