# Generated by using Rcpp::compileAttributes() -> do not edit by hand
# Generator token: 10BE3573-1514-4C36-9D1C-5A225CD40393
#' Conduct Gradient Descent on a given model
#'
#' This function applies the Gradient Descent algorithm to a specific model.
#'
#' @param model [\code{Model}]\cr
#' Pointer of the model we want to fit.
#' @param param_start [\code{numeric}]\cr
#' The initial parameter.
#' @param learning_rate [\code{numeric(1)}]\cr
#' Step size of the gradient updates.
#' @param iters [\code{integer(1)}]\cr
#' Number of iterations.
#' @param trace [\code{logical(1)}]\cr
#' Flag to specify whether to print the progress or not.
#' @param warnings [\code{logical(1)}]\cr
#' Flag to specify whether to print warnings or not.
#' @return [\code{list}] List containing the parameter, the last update, the
#' cumulated updates, and the actual MSE of the parameter.
NULL
#' Conduct Momentum on a given model
#'
#' This function applies the Momentum algorithm to a specific model.
#'
#' @param model [\code{Model}]\cr
#' Pointer of the model we want to fit.
#' @param param_start [\code{numeric}]\cr
#' The initial parameter.
#' @param learning_rate [\code{numeric(1)}]\cr
#' Step size of the gradient updates.
#' @param momentum [\code{numeric(1)}]\cr
#' Momentum term, fraction of how much of the previous gradient we add.
#' @param iters [\code{integer(1)}]\cr
#' Number of iterations.
#' @param trace [\code{logical(1)}]\cr
#' Flag to specify whether to print the progress or not.
#' @param warnings [\code{logical(1)}]\cr
#' Flag to specify whether to print warnings or not.
#' @return [\code{list}] List containing the parameter, the last update, the
#' cumulated updates, and the actual MSE of the parameter.
NULL
doSomethingWithTest <- function(test) {
.Call(`_distributedLearning_doSomethingWithTest`, test)
}
getTest <- function(a, b) {
.Call(`_distributedLearning_getTest`, a, b)
}
Add the following code to your website.
For more information on customizing the embed code, read Embedding Snippets.