R/PEC_binom_bound.R

Defines functions PEC_binom_bound

Documented in PEC_binom_bound

#' Predictive Expectation Criterion: Binomial Model
#'
#' @description \loadmathjax{} Implements the predictive expectation criterion in the binomial model using the upper bound of the \mjseqn{\ell_{1}} Wasserstein distance of order one.
#'
#' @usage PEC_binom_bound(
#'   n,
#'   alpha_1,
#'   beta_1,
#'   alpha_2,
#'   beta_2,
#'   alpha_D,
#'   beta_D,
#'   v,
#'   plot = FALSE
#' )
#'
#' @param n The sample size. Must be a vector of positive integers arranged in ascending order.
#' @param alpha_1,beta_1 The parameters of the first beta prior. Must be non-negative values.
#' @param alpha_2,beta_2 The parameters of the second beta prior. Must be non-negative values.
#' @param alpha_D,beta_D The parameters of the design beta prior. Must be positive values.
#' @param v A constant used to determine the optimal sample size. Must be a value in \mjseqn{(0, 1)}.
#' @param plot Logical. If \code{TRUE}, a plot shows the behavior of the predictive expectation as a function of the sample size.
#'
#' @details Users can use non-informative improper priors for the first and second beta priors, whereas the design beta prior must be proper. \cr
#'
#' If the first and second beta priors are equal, the function stops with an error.
#'
#' @return A list with the following components: \cr
#'
#' \item{e_n}{The predictive expectation.}
#' \item{t_opt}{The optimal threshold.}
#' \item{n_opt}{The optimal sample size.}
#'
#' @author Michele Cianfriglia \email{michele.cianfriglia@@uniroma1.it}
#'
#' @references Cianfriglia, M., Padellini, T., and Brutti, P. (2023). Wasserstein consensus for Bayesian sample size determination.
#'
#' @seealso [PEC_binom()]
#'
#' @examples
#' # Parameters of the first beta prior
#' prior_1 <- c(51, 42)
#'
#' # Parameters of the second beta prior
#' prior_2 <- c(55, 29)
#'
#' # Parameters of the design beta prior
#' prior_D <- c(23, 15)
#'
#' output <- PEC_binom_bound(n = 1:1000,
#'                           alpha_1 = prior_1[1], beta_1 = prior_1[2],
#'                           alpha_2 = prior_2[1], beta_2 = prior_2[2],
#'                           alpha_D = prior_D[1], beta_D = prior_D[2],
#'                           v = 0.1)
#' @export
#'
#' @importFrom crayon italic
#' @importFrom graphics abline legend par
#' @importFrom mathjaxr preview_rd

PEC_binom_bound <- function(n, alpha_1, beta_1, alpha_2, beta_2, alpha_D, beta_D, v, plot = FALSE) {

  check_PEC_binom_bound(n = n, alpha_1 = alpha_1, beta_1 = beta_1, alpha_2 = alpha_2, beta_2 = beta_2, alpha_D = alpha_D, beta_D = beta_D, v = v, plot = plot)

  if (alpha_1 == alpha_2 & beta_1 == beta_2) {
    stop(paste0("The predictive expectation is zero for the selected parameters. Please choose (", italic("alpha_1"), ", ", italic("beta_1"), ") different from (", italic("alpha_2"), ", ", italic("beta_2"), ")."), call. = FALSE)
  }

  a <- alpha_D * abs(beta_1 - beta_2) + beta_D * abs(alpha_1 - alpha_2)
  b <- 2 * (alpha_D + beta_D) * (alpha_2 * abs(beta_1 - beta_2) + beta_2 * abs(alpha_1 - alpha_2))
  c <- 0.5 * b * (alpha_1 + beta_1 + alpha_2 + beta_2) - a * (alpha_1 + beta_1) * (alpha_2 + beta_2)

  e_n <- (abs(beta_1 - beta_2) * (n * alpha_D + alpha_2 * (alpha_D + beta_D)) + abs(alpha_1 - alpha_2) * (n * beta_D + beta_2 * (alpha_D + beta_D))) / ((alpha_D + beta_D) * (n + alpha_1 + beta_1) * (n + alpha_2 + beta_2))
  e_n_der <- -(a * n^2 + b * n + c) / ((alpha_D + beta_D) * (n + alpha_1 + beta_1)^2 * (n + alpha_2 + beta_2)^2)

  negative <- which(e_n_der < 0)

  if (length(negative) == max(n)) {

    t_opt <- v * e_n[1]
    n_opt <- which.max(e_n < t_opt)

    if (n_opt == 1) {
      warning("The optimal sample size is equal to one. The selected sample size is probably too small.", call. = FALSE)
    }

    if (plot) {
      par(pty = "s")
      plot(x = n, y = e_n, type = "l", lwd = 2, xlab = "n", ylab = "predictive expectation")
      abline(h = t_opt, lwd = 2, col = "red")
      abline(v = n_opt, lwd = 2, col = "blue")
      legend("top", c(paste("optimal threshold =", formatC(x = t_opt, format = "e", digits = 2)), paste("optimal sample size =", n_opt)), lty = c(1, 1), lwd = c(2, 2), col = c("red", "blue"), bty = "n", inset = c(0, -0.25), xpd = TRUE)
      par(pty = "m")
    }

    return(list("e_n" = e_n, "t_opt" = t_opt, "n_opt" = n_opt))

  } else {

    n_max <- min(negative)
    t_opt <- v * e_n[n_max]
    n_opt <- which.max(e_n[n_max:max(n)] < t_opt) + n_max - 1

    if (n_opt == 1) {
      warning("The optimal sample size is equal to one. The selected sample size is probably too small.", call. = FALSE)
    }

    if (plot) {
      par(pty = "s")
      plot(x = n, y = e_n, type = "l", lwd = 2, xlab = "n", ylab = "predictive expectation")
      abline(h = t_opt, lwd = 2, col = "red")
      abline(v = n_opt, lwd = 2, col = "blue")
      legend("top", c(paste("optimal threshold =", formatC(x = t_opt, format = "e", digits = 2)), paste("optimal sample size =", n_opt)), lty = c(1, 1), lwd = c(2, 2), col = c("red", "blue"), bty = "n", inset = c(0, -0.25), xpd = TRUE)
      par(pty = "m")
    }

    return(list("e_n" = e_n, "t_opt" = t_opt, "n_opt" = n_opt))

  }

}
michelecianfriglia/SampleSizeWass documentation built on Feb. 28, 2023, 8:56 a.m.