knitr::opts_chunk$set(
  collapse = TRUE,
  comment = "#>",
  fig.align = "center",
  fig.path = "man/figures/README-"
)

RMoE: LASSO Regularized Mixture of Experts Models

R Toolbox to run the algorithms and to produce the results presented in the submitted paper:

Estimation and Feature Selection in Mixtures of Generalized Linear Experts Models. Ref: arXiv:1907.06994, July, 2019 by Tuyen Huynh and Faicel Chamroukhi. Please cite the paper and the toolbox when using the code.

This package has three main functions:

Installation

You can install RMoE package from GitHub with:

# install.packages("devtools")
devtools::install_github("fchamroukhi/HDME")

To build vignettes for examples of usage, type the command below instead:

# install.packages("devtools")
devtools::install_github("fchamroukhi/HDME", 
                         build_opts = c("--no-resave-data", "--no-manual"), 
                         build_vignettes = TRUE)

Use the following command to display vignettes:

browseVignettes("RMoE")

Usage

library(RMoE)

Gaussian Regularized Mixture-of-Experts

# Application to a simulated data set

data("gaussian")
X <- as.matrix(gaussian[, -8])
y <- gaussian$V8

K <- 2 # Number of experts
Lambda <- 5
Gamma <- 5
opt <- FALSE # opt = FALSE: proximal Newton; opt = TRUE: proximal Newton-type

grmoe <- GaussRMoE(Xm = X, Ym = y, K = K, Lambda = Lambda, 
                   Gamma = Gamma, option = opt, verbose = TRUE)

grmoe$plot()
# Application to a real data set

data("housing")
X <- as.matrix(housing[, -15])
y <- housing$V15

K <- 2 # Number of experts
Lambda <- 42
Gamma <- 10
opt <- FALSE # opt = FALSE: proximal Newton; opt = TRUE: proximal Newton-type

grmoe <- GaussRMoE(Xm = X, Ym = y, K = K, Lambda = Lambda, 
                   Gamma = Gamma, option = opt, verbose = TRUE)

grmoe$plot()

Logistic Regularized Mixture-of-Experts

# Application to a simulated data set

data("logistic")
X <- as.matrix(logistic[, -8])
y <- logistic$V8

K <- 2 # Number of experts
Lambda <- 3
Gamma <- 3
opt <- FALSE # opt = FALSE: proximal Newton; opt = TRUE: proximal Newton-type

lrmoe <- LogisticRMoE(Xmat = X, Ymat = y, K = K, Lambda = Lambda, 
                   Gamma = Gamma, option = opt, verbose = TRUE)

lrmoe$plot()
# Application to a real data set

data("ionosphere")
X <- as.matrix(ionosphere[, -35])
y <- ionosphere$V35

K <- 2 # Number of experts
Lambda <- 3
Gamma <- 3
opt <- FALSE # opt = FALSE: proximal Newton; opt = TRUE: proximal Newton-type

lrmoe <- LogisticRMoE(Xmat = X, Ymat = y, K = K, Lambda = Lambda, 
                   Gamma = Gamma, option = opt, verbose = TRUE)

lrmoe$plot()

Poisson Regularized Mixture-of-Experts

# Application to a simulated data set

data("poisson")
X <- as.matrix(poisson[, -8])
y <- poisson$V8

K <- 2 # Number of experts
Lambda <- 20
Gamma <- 10
opt <- FALSE # opt = FALSE: proximal Newton; opt = TRUE: proximal Newton-type

prmoe <- PoissonRMoE(Xmat = X, Ymat = y, K = K, Lambda = Lambda, 
                   Gamma = Gamma, option = opt, verbose = TRUE)

prmoe$plot()
# Application to a real data set

data("cleveland")
X <- as.matrix(cleveland[, -15])
y <- cleveland$V15

K <- 2 # Number of experts
Lambda <- 10
Gamma <- 4
opt <- FALSE # opt = FALSE: proximal Newton; opt = TRUE: proximal Newton-type

prmoe <- PoissonRMoE(Xmat = X, Ymat = y, K = K, Lambda = Lambda, 
                   Gamma = Gamma, option = opt, verbose = TRUE)

prmoe$plot()



fchamroukhi/HDME documentation built on Nov. 4, 2019, 12:37 p.m.