R/runGradientDescent.R

Defines functions runGradientDescent

Documented in runGradientDescent

#' runGradientDescent
#'
#' @keywords internal
#'
#' @description Runner for gradient descent (or stochastic gradient descent) for
#' the specified number of epoch
#'
#' @return a list containing two matrices generated by the gradient descent
runGradientDescent <- function(L, R, lambda, epochs, gamma = 0.01, blockNr,
                               is, js, D, r) {
  # flog.debug(paste("Calculating the gradient descent for block", blockNr))
  LR <- list(L = L, R = R)

  loss_result <- loss(LR$L, LR$R, lambda, D = D)

  curLoss <- loss_result$loss
  error_matrix <- loss_result$error_matrix

  for (epoch in seq_len(epochs)) {
    # flog.debug(paste("Calculating gradient descent epoch", epoch, "of",
    #                 epochs, "for block", blockNr))
    LR <- gdepoch(LR$L, LR$R, lambda, gamma,
      is = is, js = js, D = D,
      error_matrix = error_matrix
    )

    ## bold driver step size update
    oldLoss <- curLoss

    loss_result <- loss(LR$L, LR$R, lambda, D = D)

    curLoss <- loss_result$loss
    
    error_matrix <- loss_result$error_matrix
    
    if(is.infinite(curLoss)){
      flog.error("The Gradient Descent diverged.")
      flog.error("This indicates that the step size is to big.")
      flog.error("Try lowering gamma to fix this.")
      stop("The Gradient Descent diverged")
    }
    
    if (oldLoss < curLoss) {
      gamma <- gamma / 2
    } else {
      gamma <- gamma * 1.05
    }
  }
  return(LR)
}

Try the BEclear package in your browser

Any scripts or data that you put into this service are public.

BEclear documentation built on Nov. 8, 2020, 8:07 p.m.