knitr::opts_chunk$set(
  collapse = TRUE,
  comment = "#>",
  eval = FALSE
)
library(mlr3)
library(mlr3learners.lightgbm)
library(paradox)
library(mlbench)

Load the dataset

data("PimaIndiansDiabetes2")
dataset = data.table::as.data.table(PimaIndiansDiabetes2)
target_col = "diabetes"

dataset = backend_preprocessing(
  datatable = dataset,
  target_col = target_col,
  task_type = "class:binary",
  positive = "pos"
)

task = mlr3::TaskClassif$new(
  id = "pima",
  backend = dataset,
  target = target_col,
  positive = "1"
)
set.seed(17)
split = list(
  train_index = sample(seq_len(task$nrow), size = 0.7 * task$nrow)
)
split$test_index = setdiff(seq_len(task$nrow), split$train_index)

Early stopping using the internal lightgbm implementation

learner = mlr3::lrn("classif.lightgbm", objective = "binary")
# define parameters
learner$param_set$values = mlr3misc::insert_named(
  learner$param_set$values,
    list(
    "early_stopping_round" = 10,
    "learning_rate" = 0.1,
    "seed" = 17L,
    "metric" = "auc",
    "num_iterations" = 100
  )
)

system.time(
  learner$train(task, row_ids = split$train_index)
)

learner$model$current_iter()

Early stopping using the mlr3tuning implementation

learner = mlr3::lrn("classif.lightgbm")

# define parameters
learner$param_set$values = mlr3misc::insert_named(
  learner$param_set$values,
    list(
      "objective" = "binary",
      "nrounds_by_cv" = FALSE,
      "learning_rate" = 0.1,
      "seed" = 17L,
      "metric" = "auc",
      "num_threads" = 1
  )
)

# define num_iterations as tuning parameter
tune_ps = ParamSet$new(list(
  ParamDbl$new("num_iterations", lower = 1L, upper = 100L)
))

# design_points
design = paradox::generate_design_grid(
  tune_ps,
  param_resolutions = c(
    num_iterations = 100
))

# Create the resampling strategy and the measure
resampling = mlr3::rsmp("cv", folds = 5)
measure = mlr3::msr("classif.auc")

# Create the tuner 
tuner = mlr3tuning::tnr("design_points", design = design$data, batch_size = 1)

# Create the terminator 
terminator = mlr3tuning::term("stagnation", iters = 10)

# Instantiate the AutoTuner instance 
at = mlr3tuning::AutoTuner$new(
  learner = learner,
  resampling = resampling,
  measures = measure,
  tune_ps = tune_ps,
  terminator = terminator,
  tuner = tuner
)
at

# Train the tuner 
future::plan("multisession")
set.seed(17)

system.time(
  at$train(task, row_ids = split$train_index)
)
future::plan("sequential")

at$learner$model$current_iter()


kapsner/mlr3learners.lightgbm documentation built on Feb. 17, 2021, 5:53 p.m.