tests/testthat/test_feature_weights.R

library(xgboost)

context("feature weights")

n_threads <- 2

test_that("training with feature weights works", {
  nrows <- 1000
  ncols <- 9
  set.seed(2022)
  x <- matrix(rnorm(nrows * ncols), nrow = nrows)
  y <- rowSums(x)
  weights <- seq(from = 1, to = ncols)

  test <- function(tm) {
    names <- paste0("f", 1:ncols)
    xy <- xgb.DMatrix(
      data = x, label = y, feature_weights = weights, nthread = n_threads
    )
    params <- list(
      colsample_bynode = 0.4, tree_method = tm, nthread = n_threads
    )
    model <- xgb.train(params = params, data = xy, nrounds = 32)
    importance <- xgb.importance(model = model, feature_names = names)
    expect_equal(dim(importance), c(ncols, 4))
    importance <- importance[order(importance$Feature)]
    expect_lt(importance[1, Frequency], importance[9, Frequency])
  }

  for (tm in c("hist", "approx", "exact")) {
    test(tm)
  }
})

Try the xgboost package in your browser

Any scripts or data that you put into this service are public.

xgboost documentation built on May 29, 2024, 5:11 a.m.