inst/doc/unifiedml.R

## ----fig.width=7--------------------------------------------------------------
library(unifiedml) # this package
library(randomForest)
library(e1071)
library(glmnet)

# ------------------------------------------------------------
# REGRESSION EXAMPLES
# ------------------------------------------------------------

cat("\n=== REGRESSION EXAMPLES ===\n\n")

# Example 1: Synthetic data (numeric y → automatic regression)
 
X <- matrix(rnorm(100), ncol = 4)
y <- 2*X[,1] - 1.5*X[,2] + rnorm(25)  # numeric -> regression
 
mod <- Model$new(glmnet::glmnet)
mod$fit(X, y, alpha = 0, lambda = 0.1)
mod$print()
mod$summary(h = 0.01)
print(head(mod$predict(X)))
(cv_scores <- cross_val_score(mod, X, y, cv = 5))  # auto-uses RMSE
mean(cv_scores)  # Average RMSE


# Example 2: Random Forest Regression
cat("\n2. Random Forest Regression - Auto-detected: Regression\n")
# randomForest regression
set.seed(123)
X <- MASS::Boston[, -ncol(MASS::Boston)]
y <- MASS::Boston$medv
mod2 <- Model$new(randomForest::randomForest)  # No task parameter!
mod2$fit(X, y, ntree = 50)
mod2$print()
cat("\n")
mod2$summary(h = 0.01)
(cv2 <- cross_val_score(mod2, X, y, cv = 5L)) 
print(head(mod2$predict(X)))

# ------------------------------------------------------------
# CLASSIFICATION EXAMPLES
# ------------------------------------------------------------

cat("\n\n=== CLASSIFICATION EXAMPLES ===\n\n")

# Example: Iris dataset (factor y → automatic classification)
data(iris)

# Binary classification with factor
cat("3. Binary Classification with Factor Response\n")
iris_binary <- iris[iris$Species %in% c("setosa", "versicolor"), ]
X_binary <- iris_binary[, 1:4]
y_binary <- as.factor(as.character(iris_binary$Species))  # factor → classification


mod4 <- Model$new(randomForest::randomForest)  # No task parameter!
mod4$fit(X_binary, y_binary, ntree = 50)
mod4$print()
print(head(mod4$predict(X_binary)))
print(head(mod4$predict(X_binary, type="prob")))
(cv4 <- cross_val_score(mod4, X_binary, y_binary, cv = 5L))  # Auto-uses accuracy
cat("\nMean Accuracy:", mean(cv4), "\n")


mod4 <- Model$new(nnet::nnet) 
mod4$fit(X_binary, y_binary, size=50, trace=FALSE)
print(head(mod4$predict(X_binary, type="class")))
(cv4 <- cross_val_score(mod4, X_binary, y_binary, fit_params=list(size=50L, 
                        cv = 5L, type="class", trace=FALSE)))
cat("\nMean Accuracy:", mean(cv4), "\n")


mod4 <- Model$new(e1071::svm)  # No task parameter!
mod4$fit(X_binary, y_binary)
mod4$print()
print(head(mod4$predict(X_binary)))
(cv4 <- cross_val_score(mod4, X_binary, y_binary, cv = 5L))  # Auto-uses accuracy
cat("\nMean Accuracy:", mean(cv4), "\n")


# Multi-class classification
cat("4. Multi-class Classification\n")
X_multi <- iris[, 1:4]
y_multi <- iris$Species  # factor with 3 levels → multi-class classification

mod4 <- Model$new(randomForest::randomForest)  # No task parameter!
mod4$fit(X_multi, y_multi, ntree = 50)
mod4$print()
print(head(mod4$predict(X_multi)))
print(head(mod4$predict(X_binary, type="prob")))
(cv4 <- cross_val_score(mod4, X_multi, y_multi, cv = 5L))  # Auto-uses accuracy
cat("\nMean Accuracy:", mean(cv4), "\n")


mod4 <- Model$new(nnet::nnet) 
mod4$fit(X_multi, y_multi, size=50, trace=FALSE)
print(head(mod4$predict(X_multi, type="class")))
(cv4 <- cross_val_score(mod4, X_multi, y_multi, fit_params=list(size=50L, 
                        cv = 5L, type="class", trace=FALSE)))
cat("\nMean Accuracy:", mean(cv4), "\n")


mod4 <- Model$new(e1071::svm)  # No task parameter!
mod4$fit(X_multi, y_multi, kernel="radial")
mod4$print()
print(head(mod4$predict(X_multi)))
(cv4 <- cross_val_score(mod4, X_multi, y_multi, cv = 5L))  # Auto-uses accuracy
cat("\nMean Accuracy:", mean(cv4), "\n")


mod4 <- Model$new(e1071::svm)  # No task parameter!
mod4$fit(X_multi, y_multi, kernel="polynomial", probability=TRUE)
mod4$print()
print(head(mod4$predict(X_multi)))
(cv4 <- cross_val_score(mod4, X_multi, y_multi, cv = 5L))  # Auto-uses accuracy
cat("\nMean Accuracy:", mean(cv4), "\n")

Try the unifiedml package in your browser

Any scripts or data that you put into this service are public.

unifiedml documentation built on May 5, 2026, 9:06 a.m.