R/RLearner_regr_nnet.R

Defines functions predictLearner.regr.nnet trainLearner.regr.nnet makeRLearner.regr.nnet

#' @export
makeRLearner.regr.nnet = function() {
  makeRLearnerRegr(
    cl = "regr.nnet",
    package = "nnet",
    par.set = makeParamSet(
      makeIntegerLearnerParam(id = "size", default = 3L, lower = 0L),
      # FIXME nnet() seems to have no default for size, but if it is 3, par.vals is redundant
      makeIntegerLearnerParam(id = "maxit", default = 100L, lower = 1L),
      # we hardcode linout = TRUE for regr, and 'entropy', 'softmax' and 'censored' are only for classif
      makeLogicalLearnerParam(id = "skip", default = FALSE),
      makeNumericLearnerParam(id = "rang", default = 0.7),
      makeNumericLearnerParam(id = "decay", default = 0, lower = 0),
      makeLogicalLearnerParam(id = "Hess", default = FALSE),
      makeLogicalLearnerParam(id = "trace", default = TRUE, tunable = FALSE),
      makeIntegerLearnerParam(id = "MaxNWts", default = 1000L, lower = 1L, tunable = FALSE),
      makeNumericLearnerParam(id = "abstol", default = 1.0e-4),
      makeNumericLearnerParam(id = "reltol", default = 1.0e-8)
    ),
    par.vals = list(size = 3L),
    properties = c("numerics", "factors", "weights"),
    name = "Neural Network",
    short.name = "nnet",
    note = "`size` has been set to `3` by default.",
    callees = "nnet"
  )
}

#' @export
trainLearner.regr.nnet = function(.learner, .task, .subset, .weights = NULL, ...) {
  if (is.null(.weights)) {
    f = getTaskFormula(.task)
    nnet::nnet(f, data = getTaskData(.task, .subset), linout = TRUE, ...)
  } else {
    f = getTaskFormula(.task)
    nnet::nnet(f, data = getTaskData(.task, .subset), linout = TRUE, weights = .weights, ...)
  }
}

#' @export
predictLearner.regr.nnet = function(.learner, .model, .newdata, ...) {
  predict(.model$learner.model, newdata = .newdata, ...)[, 1L]
}

Try the mlr package in your browser

Any scripts or data that you put into this service are public.

mlr documentation built on Sept. 29, 2022, 5:05 p.m.