tests/testthat/test_select_definitions.R

library(cvms)
context("select_definitions()")


test_that("select_definitions() works with output from cross-validation", {

  testthat::skip_on_cran()

  # Create data ####

  xpectr::set_test_seed(1)
  dat <- participant.scores %>%
    groupdata2::fold(k = 3, cat_col = "diagnosis")
  mdata <- musicians %>%
    groupdata2::fold(k = 3, cat_col = "Class")

  cv_gauss <- suppressMessages(
    cross_validate_fn(
      data = dat,
      formulas = "score ~ diagnosis + (1|session)",
      model_fn = model_functions("lmer"),
      predict_fn = predict_functions("lmer"),
      hyperparameters = list("REML" = FALSE),
      fold_cols = ".folds",
      metrics = list("all" = TRUE),
      type = "gaussian"
    ))

  cv_binom <- suppressMessages(
    cross_validate_fn(
      data = dat,
      formulas = "diagnosis ~ score + (1|session)",
      model_fn = model_functions("glmer_binomial"),
      predict_fn = predict_functions("glmer_binomial"),
      fold_cols = ".folds",
      metrics = list("all" = TRUE),
      type = "binomial"
    ))

  cv_multinom <- suppressMessages(
    cross_validate_fn(
      data = mdata,
      formulas = "Class ~ Height + Bass + Guitar + Keys + Vocals + (1|Drums)",
      model_fn = model_functions("svm_multinomial"),
      predict_fn = predict_functions("svm_multinomial"),
      hyperparameters = list("kernel" = "linear", "cost" = 10),
      fold_cols = ".folds",
      metrics = list("all" = TRUE),
      type = "multinomial"
    ))

  # Test select_definitions() ####

  gaussian_definitions <- select_definitions(cv_gauss)
  binomial_definitions <- select_definitions(cv_binom)
  multinomial_definitions <- select_definitions(cv_multinom)

  ## Testing 'gaussian_definitions'                                         ####
  ## Initially generated by xpectr
  xpectr::set_test_seed(42)
  # Testing class
  expect_equal(
    class(gaussian_definitions),
    c("tbl_df", "tbl", "data.frame"),
    fixed = TRUE)
  # Testing column values
  expect_equal(
    gaussian_definitions[["Dependent"]],
    "score",
    fixed = TRUE)
  expect_equal(
    gaussian_definitions[["Fixed"]],
    "diagnosis",
    fixed = TRUE)
  expect_equal(
    gaussian_definitions[["Random"]],
    "(1|session)",
    fixed = TRUE)
  expect_equal(
    gaussian_definitions[["REML"]],
    FALSE)
  # Testing column names
  expect_equal(
    names(gaussian_definitions),
    c("Dependent", "Fixed", "Random", "REML"),
    fixed = TRUE)
  # Testing column classes
  expect_equal(
    xpectr::element_classes(gaussian_definitions),
    c("character", "character", "character", "logical"),
    fixed = TRUE)
  # Testing column types
  expect_equal(
    xpectr::element_types(gaussian_definitions),
    c("character", "character", "character", "logical"),
    fixed = TRUE)
  # Testing dimensions
  expect_equal(
    dim(gaussian_definitions),
    c(1L, 4L))
  # Testing group keys
  expect_equal(
    colnames(dplyr::group_keys(gaussian_definitions)),
    character(0),
    fixed = TRUE)
  ## Finished testing 'gaussian_definitions'                                ####

  ## Testing 'binomial_definitions'                                         ####
  ## Initially generated by xpectr
  xpectr::set_test_seed(42)
  # Testing class
  expect_equal(
    class(binomial_definitions),
    c("tbl_df", "tbl", "data.frame"),
    fixed = TRUE)
  # Testing column values
  expect_equal(
    binomial_definitions[["Dependent"]],
    "diagnosis",
    fixed = TRUE)
  expect_equal(
    binomial_definitions[["Fixed"]],
    "score",
    fixed = TRUE)
  expect_equal(
    binomial_definitions[["Random"]],
    "(1|session)",
    fixed = TRUE)
  # Testing column names
  expect_equal(
    names(binomial_definitions),
    c("Dependent", "Fixed", "Random"),
    fixed = TRUE)
  # Testing column classes
  expect_equal(
    xpectr::element_classes(binomial_definitions),
    c("character", "character", "character"),
    fixed = TRUE)
  # Testing column types
  expect_equal(
    xpectr::element_types(binomial_definitions),
    c("character", "character", "character"),
    fixed = TRUE)
  # Testing dimensions
  expect_equal(
    dim(binomial_definitions),
    c(1L, 3L))
  # Testing group keys
  expect_equal(
    colnames(dplyr::group_keys(binomial_definitions)),
    character(0),
    fixed = TRUE)
  ## Finished testing 'binomial_definitions'                                ####

  ## Testing 'multinomial_definitions'                                      ####
  ## Initially generated by xpectr
  xpectr::set_test_seed(42)
  # Testing class
  expect_equal(
    class(multinomial_definitions),
    c("tbl_df", "tbl", "data.frame"),
    fixed = TRUE)
  # Testing column values
  expect_equal(
    multinomial_definitions[["Dependent"]],
    "Class",
    fixed = TRUE)
  expect_equal(
    multinomial_definitions[["Fixed"]],
    "Height+Bass+Guitar+Keys+Vocals",
    fixed = TRUE)
  expect_equal(
    multinomial_definitions[["Random"]],
    "(1|Drums)",
    fixed = TRUE)
  expect_equal(
    multinomial_definitions[["kernel"]],
    "linear",
    fixed = TRUE)
  expect_equal(
    multinomial_definitions[["cost"]],
    10,
    tolerance = 1e-4)
  # Testing column names
  expect_equal(
    names(multinomial_definitions),
    c("Dependent", "Fixed", "Random", "kernel", "cost"),
    fixed = TRUE)
  # Testing column classes
  expect_equal(
    xpectr::element_classes(multinomial_definitions),
    c("character", "character", "character", "character", "numeric"),
    fixed = TRUE)
  # Testing column types
  expect_equal(
    xpectr::element_types(multinomial_definitions),
    c("character", "character", "character", "character", "double"),
    fixed = TRUE)
  # Testing dimensions
  expect_equal(
    dim(multinomial_definitions),
    c(1L, 5L))
  # Testing group keys
  expect_equal(
    colnames(dplyr::group_keys(multinomial_definitions)),
    character(0),
    fixed = TRUE)
  ## Finished testing 'multinomial_definitions'                             ####

})

Try the cvms package in your browser

Any scripts or data that you put into this service are public.

cvms documentation built on July 9, 2023, 6:56 p.m.