#' Bayesian Optimization, Best Result Set
#'
#' \code{optimize.bayesian.best} gets the best result issued from \code{optimize.bayesian}.
#'
#' Please see vignette for demos: \code{vignette("optimize.bayesian", package = "Laurae2")} or \code{help_me("optimize.bayesian")}.
#'
#' @param optimized Type: List returned from \code{optimize.bayesian}. The object to look for the best result.
#'
#' @return A list where \code{x} is the list of the best parameter set, where \code{y} is the loss value at the \code{x} parameters, and where \code{iter} is the best iteration number.
#' @export
#'
#' @examples
#' \dontrun{
#' library(xgboost)
#' library(mlrMBO)
#'
#' # Load demo data
#' data(EuStockMarkets)
#'
#' # Transform dataset to "quantiles"
#' for (i in 1:4) {
#' EuStockMarkets[, i] <- (ecdf(EuStockMarkets[, i]))(EuStockMarkets[, i])
#' }
#'
#' # Create datasets: 1500 observations for training, 360 for testing
#' # Features are:
#' # -- Deutscher Aktienindex (DAX),
#' # -- Swiss Market Index (SMI),
#' # -- and Cotation Assistee en Continu (CAC)
#' # Label is Financial Times Stock Exchange 100 Index (FTSE)
#' dtrain <- xgb.DMatrix(EuStockMarkets[1:1500, 1:3], label = EuStockMarkets[1:1500, 4])
#' dval <- xgb.DMatrix(EuStockMarkets[1501:1860, 1:3], label = EuStockMarkets[1501:1860, 4])
#'
#' # Create watchlist for monitoring metric
#' watchlist <- list(train = dtrain, eval = dval)
#'
#' # Our loss function to optimize: minimize RMSE
#' xgboost_optimization <- function(x) {
#'
#' # Train the model
#' gc(verbose = FALSE)
#' set.seed(1)
#' model <- xgb.train(params = list(max_depth = x[1],
#' subsample = x[2],
#' tree_method = x[3],
#' eta = 0.2,
#' nthread = 1,
#' objective = "reg:linear",
#' eval_metric = "rmse"),
#' data = dtrain, # Warn: Access using parent environment
#' nrounds = 9999999,
#' watchlist = watchlist, # Warn: Access using parent environment
#' early_stopping_rounds = 5,
#' verbose = 0)
#' score <- model$best_score
#' rm(model)
#' return(score)
#'
#' }
#'
#' # The paramters: max_depth in [1, 15], subsample in [0.1, 1], and tree_method IN {exact, hist}
#' my_parameters <- makeParamSet(
#' makeIntegerParam(id = "max_depth", lower = 1, upper = 15),
#' makeNumericParam(id = "subsample", lower = 0.1, upper = 1),
#' makeDiscreteParam(id = "tree_method", values = c("exact", "hist"))
#' )
#'
#' # Perform optimization
#' optimization <- optimize.bayesian(loss_func = xgboost_optimization,
#' param_set = my_parameters,
#' seed = 1,
#' maximize = FALSE,
#' initialization = 10,
#' max_evaluations = 25,
#' time_budget = 30,
#' verbose = TRUE)
#'
#' # What are the best parameters?
#' optimize.bayesian.best(optimization)
#' }
optimize.bayesian.best <- function(optimized) {
return(list(x = optimized$x, y = optimized$y, iter = optimized$best.ind))
}
Add the following code to your website.
For more information on customizing the embed code, read Embedding Snippets.