Nothing
#'@title Ordinary Least Square regression
#'@description This is a function that estimates coefficients for a linear model using Ordinary Least Squares (OLS) regression.
#'@usage ols(data,y,x,alpha=0.025,verbose=TRUE)
#'@param data Dataset used to estimated the coefficients
#'@param y name of the dependent variable
#'@param x name or a vector of names of the independent variables
#'@param alpha confedence level
#'@param verbose logical, if TRUE, the table will be printed
#'@return coefficients of the linear model, or a table with the coefficients, standard errors, t-values, p-values and confidence intervals
#'@export ols
#'@examples
#'df = data.frame("hours"=c(1, 2, 4, 5, 5, 6, 6, 7, 8, 10, 11, 11, 12, 12, 14),
#'"score"=c(64, 66, 76, 73, 74, 81, 83, 82, 80, 88, 84, 82, 91, 93, 89))
#'ols(df,"score","hours")
ols<-function(data,y,x,alpha=0.025,verbose=TRUE){
n<-nrow(data)
r<-length(x)
df<-n-r-1
Y<-as.matrix(data[y])
X<-as.matrix(subset(data,select=x))
Z<-cbind(matrix(1,nrow=n,ncol=1),X)
colnames(Z)[1]<-"intercept"
y_mean<-mean(Y)
RYY<-crossprod(Y-y_mean,Y-y_mean)
#dependent variable value
ZZ<-crossprod(Z,Z)
B<-round(solve(ZZ)%*%crossprod(Z,Y),4)
#Since both dv_coeff and intercept are unbiased estimators,hence,the standard errors are:
#residual sum of squares
RSS<-crossprod((Y-Z%*%B),(Y-Z%*%B))
SSreg<-RYY-RSS
#porportion of Y explained by X
R_square<-1-(RSS/(RYY))
#similar to R^2, used to compared models wit different numbers of predictors
R_ajust<-1-((RSS/df)/(RYY/(n-1)))
variance<-as.numeric(RSS/df)
#variances
sd_dv<-round(sqrt(variance*diag(solve(ZZ))),4)
F_stat<-round((SSreg/r)/(RSS/df),2) #whether B=0 , if F>>1, at least one element of B!=0--> There's a relation between X-Y
prob_F=stats::pf(F_stat,r,df,lower.tail = F) # if prob<0.05 reject H0
t<-round((B/sd_dv),4)
prob_t<-round(stats::pt(abs(t),df,lower.tail = F)*2,6)
#confidence interval
tt<-matrix(c(stats::qt(alpha,df),stats::qt(1-alpha,df)),ncol = 1)
margin<-apply(t(sd_dv),2,function(x)x*tt)# margen de error
CI<-round(apply(margin,1,function(x)B+x),4)# confidence interval (B0 B1 B2...)
coeff_table<-cbind(B,sd_dv,t,prob_t,CI)
colnames(coeff_table)<-c("coefficients","standard error","t value",
"p(>|t|)",paste("[",alpha),paste(1-alpha,"]"))
if(verbose){
cat("\n",
"Model: OLS R-squared: ",round(R_square,2),"\n",
"Df: ",df," R.S-ajusted: ",round(R_ajust,2),"\n",
"F-statistic: ",F_stat," prob(F-statistic):",prob_F,"\n",
"Resudual standard error:", round(sqrt(variance),2),"\n",
"=====================================================================",
"\n"
)
print(coeff_table)
}else{return(B)}
return(B)
}
#'@title k_fold_cross
#'@description k_fold_cross splits the dataset into k parts, and uses k-1 parts to train the model and the remaining part to test the model.
#'@usage k_fold_cross(data,k)
#'@export k_fold_cross
#'@import dplyr
#'@return a list with two sublists: training set and test set
#'@param data dataset which will be used for K-Fols Cross Validation
#'@param k integer
#'@examples
#'df = data.frame("hours"=c(1, 2, 4, 5, 5, 6, 6, 7, 8, 10, 11, 11, 12, 12, 14),
#'"score"=c(64, 66, 76, 73, 74, 81, 83, 82, 80, 88, 84, 82, 91, 93, 89))
#'k_fold_cross(df,k=2)
k_fold_cross<-function(data,k){
n<-nrow(data)
aux<-1:n # vector to decide which rows of dataframe select
train<-list()
test<-list()
for(i in 1:k){
t<-sample(aux,size=round(n/k))
l<-aux[!aux %in% t]
test_set<-data[t,]
train_set<-data[l,]
train[[i]]<-list(number=l,data=train_set)#sublist
test[[i]]<-list(number=t,data=test_set)
}
return(dplyr::tibble(train=train,test=test))
}
#when select table$train[[1]][[1]] -> number table$train[[2]][[2]] dataframe
#'@title K-Fold Cross Validation for OLS
#'@description ols_KCV makes the K-Fold Cross Validation for ordinary least squared regression
#'@usage ols_KCV(data,k,y,x)
#'@param data full dataset which will be used for KCV
#'@param k integer, which indicates how many training and test set will be splited from the dataset
#'@param y dependent variable
#'@param x independent variables
#'@return the root mean square error after K-Fold Cross Validation on training set
#'@export ols_KCV
#'@examples
#'df<-mtcars
#'ols_KCV(mtcars,5,"hp",c("mpg","qsec","disp"))
ols_KCV<-function(data,k,y,x){
table<-k_fold_cross(data,k)
MSE<-c()
for(i in 1:k){
train_data<-table$train[[i]][[2]]
test_data<-table$test[[i]][[2]]
test_value<-as.matrix(test_data[[y]]) # Yi
variable_value<-as.matrix(cbind(1,subset(test_data,select=x)))
coeff<-ols(train_data,y,x)# we get the coefficients of the regression
y_pred<-variable_value%*%coeff #Z%*%B
RSS<-crossprod(test_value-y_pred,test_value-y_pred)
MSE<-c(MSE,RSS/k)
}
return(RMSE=sqrt(mean(MSE)))
}
#'@title Ridge regression
#'@description ridge function estimates the coefficients for a linear model using Ridge regression.
#'@return a matrix with the coefficients for each lambda
#'@usage ridge(data,y,x,lambda)
#'@param data name of the dataset
#'@param y name of dependent variables
#'@param x name of independent variable
#'@param lambda a numeric value or a numeric vector to penalize the squared residual
#'@export ridge
#'@examples
#'ridge(mtcars,"hp",c("mpg","qsec","disp"),c(0.01,0.1))
ridge<-function(data,y,x,lambda){
n<-nrow(data)
r<-length(x)
df<-n-r-1
Y<-as.matrix(data[y])
RYY<-stats::var(Y)
X<-as.matrix(subset(data,select=x))
std<-apply(X,2,stats::sd)
Z<-cbind(matrix(1,nrow=n,ncol=1),X)
colnames(Z)[1]<-"intercept"
ZZ<-crossprod(Z,Z)
B<-sapply(lambda,function(x)solve(ZZ+x*diag(r+1))%*%crossprod(Z,Y))
colnames(B)<-lambda
rownames(B)<-c("intercept",x)
return(B)
}
#'@export l_CV
#'@title K-Fold Cross validation for L1/L2 regression
#'@description the function realizes K-Fold Cross validation for ridge/Lasso regression
#'to help to choose the lambda that minimise the RSS
#'@usage l_CV(data,y,x,lambda,k,mode=2,binary=FALSE,step=1000,bound=0.5,fista=TRUE,tol=10^-7)
#'@return the lambda values that minimize the MSE
#'@param data name of the dataset
#'@param y name of the dependent variables
#'@param x name of the independent variable
#'@param lambda a number or a vector of lambda-value to be evaluated in the regression
#'@param mode 1: ridge regression; 2: lasso regression
#'@param k integer, which indicates how many training and test set will be splited from the dataset
#'@param binary logical, if TRUE, the dependent variable is binary
#'@param step maximum number of steps
#'@param fista logical, if TRUE, the FISTA algorithm is used
#'@param tol tolerance for convergence, it is 10^-7 by default
#'@param bound threshold for binary dependent variable
#'@examples
#'l_CV(mtcars,"hp",c("mpg","qsec","disp"),c(0.01,0.1),k=5,mode=2)
l_CV<-function(data,y,x,lambda,k,mode=2,binary=FALSE,step=1000,bound=0.5,fista=TRUE,tol=10^-7){
table<-k_fold_cross(data,k)
MSE<-matrix(0,ncol=length(lambda),nrow=k)
for(i in 1:k){
train_data<-table$train[[i]][[2]]
test_data<-table$test[[i]][[2]]
test_value<-as.matrix(as.data.frame(test_data)[[y]]) # Yi
variable_value<-as.matrix(cbind(1,subset(test_data,select=x)))
if(mode==1){
coeff<-ridge(train_data,y,x,lambda)# Each column has coefficients for lambda=i
}
if(mode==2){
if(binary){
if(fista){
coeff<-lasso_fista(train_data,y,x,lambda,max_step=step,type="Binomial",image=FALSE)[[1]]
}else{coeff<-lasso_ista(train_data,y,x,lambda,max_step=step,type="Binomial",image=FALSE)[[1]]}
}
else{
if(fista){
coeff<-lasso_fista(train_data,y,x,lambda,max_step=step,image=FALSE)[[1]]
}else{coeff<-lasso_ista(train_data,y,x,lambda,max_step=step,image=FALSE)[[1]]}
}
}
if(binary){
#train_mean <- colMeans(train_data[, x])
#train_sd <- apply(train_data[, x], 2, sd)
#scaled_test <- scale(test_data[, x], center = train_mean, scale = train_sd)
variable_value <- as.matrix(cbind(1, test_data[, x]))
pre <- apply(coeff, 2, function(coef) 1 / (1 + exp(-variable_value %*% coef)))
y_pred <- ifelse(pre > bound, 1, 0)
## calculate 1- accuracy
PE <- apply(y_pred, 2, function(pred) sum(pred != test_data[[y]]) / nrow(test_data))
}
else{
y_pred<-apply(coeff,2,function(x)variable_value%*%x)
#Each column is the predicted value for lambda i
PE<-apply(y_pred,2,function(x)sqrt(crossprod(test_value-x,test_value-x)/nrow(y_pred)))#MSE in each fold
}
MSE[i,]<-(PE)
}
means<-colMeans(MSE)
return(lambda_min=lambda[which(means==min(means))])
}
s_threshold<-function(lambda,beta){
return(sign(beta)*max((abs(beta)-lambda),0))
# same as if beta<-lambda: beta<-beta+lambda
# if beta<lambda: beta<-beta-lambda
# if bas(beta)<=lambda<-0
}
#'lasso_ista
#'@export lasso_ista
#'@title Lasso regression with fixed step with ISTA algorithm
#'@description the function carries out the Lasso regression using fixed step using ISTA algorithm.
#'@usage lasso_ista(data,y,x,lambda,max_step=10000,type="Gaussian",image=TRUE,tol=10^-7,ini=0.5)
#' @return A list containing:
#' \itemize{
#' \item{\code{coefficients}: A matrix where each column represents the estimated regression coefficients for a different lambda value.}
#' \item{\code{error_evolution}: A numeric vector tracking the error at certain step.}
#' \item{\code{num_steps}: An integer vector indicating the number of steps in which errors are calculated.}
#' }
#'@param data name of the dataset
#'@param y name of the dependent variables
#'@param x name of the independent variable
#'@param lambda a vector of lambda-value to be evaluated in the regression
#'@param max_step maximum number of steps
#'@param type type of response variable, by default, it is 'Gaussian' for continuos response and can be modified as 'Binomial' for binary response
#'@param image logical, if TRUE, the evolution of errors in term of lambda values will be plotted
#'@param tol tolerance for convergence, it is 10^-7 by default
#'@param ini initial value for the coefficients
#'@examples
#'library("glmnet")
#'data("QuickStartExample")
#'test<-as.data.frame(cbind(QuickStartExample$y,QuickStartExample$x))
#'lasso_ista(test,"V1",colnames(test)[2:21],lambda=0.1,image=TRUE,max_step=1000)
lasso_ista<-function(data,y,x,lambda,max_step=10000,type="Gaussian",image=TRUE,tol=10^-7,ini=0.5){
error<-c()
cont<-c()
lamb<-matrix(nrow=length(x)+1,ncol=length(lambda))
race<-1
Y<-as.matrix(data[y])
X<-as.matrix(subset(data,select=x))
D<-cbind(intercept=1,X)
DD<- crossprod(D,D)
h<-matrix(ini,nrow=ncol(D),ncol=1)# funciona cuando los números no son grandes
# en caso del dataset mtcars, converge, pero a un ritmo lento
L<-max(eigen(DD)$values) #Lipschitz constant
race<-1/(L)
if(type=="Gaussian"){
for(i in 1:length(lambda)){
alpha<-lambda[i]
for(k in 1:max_step){
h_old <- h
h <- h - race*t(D) %*% (D%*%h - Y)
h <- as.matrix(apply(h,1,function(x)s_threshold(alpha*race,x)),nrow=ncol(D))
if (k == 1 || k %% (max_step%/%10) == 0 || k == max_step){
residuals <- Y - D %*% h
mse_k <- mean(residuals^2)
error<-c(error,mse_k)
cont<-c(cont,k)
if(mse_k < tol){
break
}
}
lamb[,i]<-h
}
}
}
if(type=="Binomial"){
for(i in 1:length(lambda)){
alpha<-lambda[i]
for(k in 1:max_step){
h_old <- h
P<-1/(1+exp(-(D%*%h)))## new
h <- h-race*((1/nrow(D))*(t(D)%*%(P-Y)))## modified
h <- as.matrix(apply(h,1,function(x)s_threshold(alpha*race,x)),nrow=ncol(D))
if (k == 1 || k %% (max_step%/%10) == 0 || k == max_step){
logistic_loss <- -(t(Y)%*%log(pmax(P,1e-15))+t(1-Y)%*%log(pmax(1-P,1e-15)))+alpha*sum(abs(h))
current_error <- logistic_loss/(nrow(D))
error <- c(error, current_error)
cont<-c(cont,k)
lamb[,i]<-h
if(current_error < tol){
break
}
}
}
}
}
if(image){
plot(x=cont,error,xlab="MSE",main="MSE evolution")}
colnames(lamb)<-lambda
rownames(lamb)<-c("intercept",x)
return(list(lamb,error,cont))
}
#'lasso_fista
#'@export lasso_fista
#'@title Lasso regression with fixed step with FISTA algorithm
#'@description the function carries out the Lasso regression using fixed step using FISTA algorithm.
#'@usage lasso_fista(data,y,x,lambda,max_step=10000,type="Gaussian",image=TRUE,ini=0.5,tol=10^-7)
#' @return A list containing:
#' \itemize{
#' \item{\code{coefficients}: A matrix where each column represents the estimated regression coefficients for a different lambda value.}
#' \item{\code{error_evolution}: A numeric vector tracking the error at certain step.}
#' \item{\code{num_steps}: An integer vector indicating the number of steps in which errors are calculated.}
#' }
#'@param data name of the dataset
#'@param y name of the dependent variables
#'@param x name of the independent variable
#'@param lambda a vector of lambda-value to be evaluated in the regression
#'@param max_step maximum number of steps
#'@param type type of response variable, by default, it is 'Gaussian' for continuos response and can be modified as 'Binomial' for binary response
#'@param ini initial value for the coefficients
#'@param tol tolerance for convergence, it is 10^-7 by default
#'@param image logical, if TRUE, the evolution of errors in term of lambda values will be plotted
#'@examples
#'library("glmnet")
#'data("QuickStartExample")
#'test<-as.data.frame(cbind(QuickStartExample$y,QuickStartExample$x))
#'lasso_fista(test,"V1",colnames(test)[2:21],lambda=0.1,image=TRUE,max_step=1000)
lasso_fista <- function(data, y, x, lambda, max_step = 10000, type = "Gaussian",image=TRUE,ini=0.5,tol=10^-7) {
error <- c()
cont <- c()
lamb <- matrix(nrow = length(x) + 1, ncol = length(lambda))
race <- 1
Y <- as.matrix(data[y])
X <- as.matrix(subset(data, select = x))
D <- cbind(intercept = 1, X)
DD <- crossprod(D, D)
h <- matrix(ini, nrow = ncol(D), ncol = 1) # valores iniciales
L <- max(eigen(DD)$values) # Lipschitz constant
race <- 1 / (L)
if (type == "Gaussian") {
for (i in 1:length(lambda)) {
alpha <- lambda[i]
z <- h# valor inicial 2
t_old <- 1
for (k in 1:max_step) {
h_old <- h
h <- z - race * t(D) %*% (D %*% z - Y)
h <- as.matrix(apply(h, 1, function(x) s_threshold(alpha * race, x)), nrow = ncol(D))
t_new <- (1 + sqrt(1 + 4 * t_old^2)) / 2
z <- h + ((t_old - 1) / t_new) * (h - h_old)
t_old <- t_new
# error
if (k == 1 || k %% (max_step %/% 10) == 0 || k == max_step) {
residuals <- Y - D %*% h
mse_k <- mean(residuals^2)
error<-c(error,mse_k)
cont<-c(cont,k)
if(mse_k < tol){
break
}
}
lamb[, i] <- h
}
}
}
if (type == "Binomial") {
for (i in 1:length(lambda)) {
alpha <- lambda[i]
h_old <- h
z <- h
t_old <- 1
for (k in 1:max_step) {
h_old <- h
P <- 1 / (1 + exp(-(D %*% z))) # logistic probability
h <- z - race * ((1 / nrow(D)) * (t(D) %*% (P - Y)))
h <- as.matrix(apply(h, 1, function(x) s_threshold(alpha * race, x)), nrow = ncol(D))
t_new <- (1 + sqrt(1 + 4 * t_old^2)) / 2
z <- h + ((t_old - 1) / t_new) * (h - h_old)
t_old <- t_new
if (k == 1 || k %% (max_step %/% 10) == 0 || k == max_step) {
logistic_loss <- -(t(Y) %*% log(pmax(P,1e-15)) + t(1 - Y) %*% log(pmax(1 - P,1e-15))) + alpha * sum(abs(h))
current_error <- logistic_loss / nrow(D)
error <- c(error, current_error)
lamb[, i] <- h
cont<-c(cont,k)
if(current_error < tol){
break
}
}
}
}
}
if(image){
plot(x = cont, error, xlab = "Iteration", ylab = "MSE", main = "MSE Evolution (FISTA)")}
colnames(lamb) <- lambda
rownames(lamb) <- c("intercept", x)
return(list(lamb,error,cont))
}
#'softmax
#'@export softmax
#'@title Softmax function for multinomial response variable
#'@description the function calculates the softmax function for the multinomial response variable
#'@usage softmax(num)
#'@return A numeric matrix or vector of the same shape as num, where each element represents a probability value between 0 and 1. The values sum to 1 across each row or the entire vector.
#'@param num A numeric matrix or vector
softmax <- function(num) {
exp_X <- exp(num - apply(num, 1, max))
return(exp_X / rowSums(exp_X))
}
#'lasso_multi
#'@export lasso_multi
#'@title Lasso logistic regression for multinomial response variable with fixed step
#'@description the function realizes L1-regularized classification for multinomial response variable using ISTA / FISTA algorithm
#'@usage lasso_multi(data,y,x,lambda,max_step=10000,image=FALSE,fista=TRUE)
#' @return A list containing:
#' \itemize{
#' \item{\code{coefficients}: A matrix where each column represents the estimated regression coefficients for a different lambda value.}
#' \item{\code{error_evolution}: A numeric vector tracking the error at certain step.}
#' \item{\code{num_steps}: An integer vector indicating the number of steps in which errors are calculated.}
#' }
#'@param data name of the dataset
#'@param y name of the dependent variables
#'@param x name of the independent variable
#'@param lambda a number or a vector of lambda-value to be evaluated in the regression
#'@param max_step maximum number of steps
#'@param image plots the evolution of errors in term of lambda values
#'@param fista fista=TRUE: use FISTA algortihm for the multiclass logistic regression; fista=FALSE: use ISTA algortihm
#'@examples
#'library(glmnet)
#'data("MultinomialExample")
#'x<-MultinomialExample$x
#'y<-MultinomialExample$y
#'mult<-as.data.frame(cbind(x,y))
#'lasso_multi(mult,y="y",x=colnames(mult)[-31],max_step = 1000,lambda=0.01,image=TRUE,fista=TRUE)
lasso_multi <- function(data, y, x, lambda, max_step = 10000, image = FALSE,fista=TRUE) {
error <- vector("list", length = length(lambda))
count <- vector("list", length = length(lambda))
lamb <- matrix(nrow = length(x) + 1, ncol = length(lambda))
l <- list()
Y <- as.matrix(stats::model.matrix(~ factor(data[[y]]) - 1)) # One-Hot encoding
colnames(Y) <- as.character(unique(data[[y]])) # etiquetar las clases
# Predictor matrix with intercept
X <- as.matrix(subset(data, select = x))
n <- nrow(data)
D <- cbind(intercept = 1, X) # intercept+predictores
DD <- crossprod(D, D) # Lipschitz constant
L <- max(eigen(DD)$values) # Calculate Lipschitz constant
learning_rate <- 1 / L # Step size
if(fista){
for (i in 1:length(lambda)) {
alpha <- lambda[i]
t_old <- 1
h <- matrix(0.5, nrow = ncol(D), ncol = ncol(Y)) # coeficientes iniciales
z <- h # Auxiliary variable for FISTA
h_old <- h
for (k in 1:max_step) {
P <- softmax(D %*% h) # hat{y}: respuesta estimada
# calculo de gradientes
grad <- (1 / n) * (t(D) %*% (P - Y))
h <- z - learning_rate * grad # valores iniciales renovados
# Soft-thresholding
h <- apply(h, 2, function(col) as.matrix(apply(as.matrix(col), 1, function(x) s_threshold(alpha * learning_rate, x))))
# FISTA
t_new <- (1 + sqrt(1 + 4 * t_old^2)) / 2
z <- h + ((t_old - 1) / t_new) * (h - h_old)
t_old <- t_new
# Loss function con lasso penalty
if (k == 1 || k %% (max_step %/% 10) == 0 || k == max_step) {
logistic_loss <- -(1 / n) * sum(Y * log(pmax(P,1e-15))) + alpha * sum(abs(z))
current_error <- logistic_loss
error[[i]] <- c(error[[i]], current_error)
count[[i]] <- c(count[[i]], k)
}
# convergencia
# if (norm(h - h_old, "F") / max(1, norm(h_old, "F")) < tol) {
# break
# }
h_old <- h
}
rownames(h) <- c("intercept", x)
l[[i]] <- h
}
}else{
for (i in 1:length(lambda)) {
alpha <- lambda[i]
h <- matrix(0.5, nrow = ncol(D), ncol = ncol(Y)) # coeficientes iniciales
h_old <- h
for (k in 1:max_step) {
P <- softmax(D %*% h) # hat{y}: respuesta estimada
# calculo de gradientes
grad <- (1 / n) * (t(D) %*% (P - Y))
h <- h - learning_rate * grad # valores iniciales renovados
# Soft-thresholding
h <- apply(h, 2, function(col) as.matrix(apply(as.matrix(col), 1, function(x) s_threshold(alpha * learning_rate, x))))
# FISTA
# Loss function con lasso penalty
if (k == 1 || k %% (max_step %/% 10) == 0 || k == max_step) {
logistic_loss <- -(1 / n) * sum(Y*log(pmax(P,1e-15))) + alpha * sum(abs(h))
#logistic_loss <- -(1 / n) * sum(crossprod(Y,log(pmax(P,1e-15)))) + alpha * sum(abs(h))
current_error <- logistic_loss
error[[i]] <- c(error[[i]], current_error)
count[[i]] <- c(count[[i]], k)
}
h_old <- h
# convergencia
# if (norm(h - h_old, "F") / max(1, norm(h_old, "F")) < tol) {
# break
# }
}
rownames(h) <- c("intercept", x)
l[[i]] <- h
}
}
# Plot loss if requested
if (image == TRUE) {
plot(NULL, xlim = c(1, max(sapply(count, max))), ylim = range(unlist(error)),
xlab = "Iteration", ylab = "Cross-Entropy Loss", main = "Loss Evolution (FISTA with LASSO)")
colors <- grDevices::rainbow(length(lambda))
for (i in 1:length(lambda)) {
graphics::lines(x = count[[i]], y = error[[i]], col = colors[i], lwd = 2)
}
graphics::legend("topright", legend = paste("Lambda =", lambda), col = colors, lwd = 2)
}
return(list(l,error,count))
}
#################################################################### lasso backtraking Line search#################################################
g_func <- function(h, Y, D, lambda = 0) {
residual <- Y - D %*% h
val_smooth <- 0.5 * norm((residual),"2")^2
val_penalty <- lambda * sum(abs(h))
val_smooth + val_penalty
}
#'lasso_ista_back
#'@export lasso_ista_back
#'@title Lasso regression with backtraking line research
#'@description the function carries out the Lasso regression using backtraking line research and ISTA algorithm.
#'@usage lasso_ista_back(data,y,x,lambda,max_step=10000,tol=10^-7,
#'type="Gaussian",ini=0.5,image=TRUE)
#' @return A list containing:
#' \itemize{
#' \item{\code{coefficients}: A matrix where each column represents the estimated regression coefficients for a different lambda value.}
#' \item{\code{error_evolution}: A numeric vector tracking the error at certain step.}
#' \item{\code{num_steps}: An integer vector indicating the number of steps in which errors are calculated.}
#' }
#'@param data name of the dataset
#'@param y name of the dependent variables
#'@param x name of the independent variable
#'@param lambda a vector of lambda-value to be evaluated in the regression
#'@param max_step maximum number of steps
#'@param ini initial value for the coefficients,dafault is 0.5
#'@param image plots the evolution of errors in term of lambda values
#'@param tol tolerance for convergence, it is 10^-7 by default
#'@param type type of response variable, by default, it is 'Gaussian' for continuos response and can be modified as 'Binomial' for binary response
#'@examples
#'library("glmnet")
#'data("QuickStartExample")
#'test<-as.data.frame(cbind(QuickStartExample$y,QuickStartExample$x))
#'lasso_ista_back(test,"V1",colnames(test)[2:21],lambda=0.1,image=TRUE,type='Gaussian',max_step=100)
lasso_ista_back<-function(data,y,x,lambda,max_step=10000,tol=10^-7,type="Gaussian",ini=0.5,image=TRUE){
error<-c()
cont<-c()
Y<-as.matrix(data[y])
X<-as.matrix(subset(data,select=x))
lamb<-matrix(nrow=length(x)+1,ncol=length(lambda))
beta<-0.8
D<-cbind(intercept=1,X)
DD<-crossprod(D,D)
L <- max(eigen(DD)$values) # Calculate Lipschitz constant
h<-matrix(ini,nrow=ncol(D),ncol=1)
# h<-lsfit(data[x], data[y])$coefficients
if(type=="Gaussian"){
for(k in 1:max_step){
race<-4 / L
h_old <- h
grad<- t(D)%*%(D%*%h_old - Y)
h_new <- h_old - race*grad # provisional
h_new <- as.matrix(apply(h_new,1,function(x)s_threshold(lambda*race,x)),nrow=ncol(D))
left<-g_func(h_new, Y, D, lambda)
right<-g_func(h_old, Y, D, lambda) + crossprod(grad,(h_new-h_old))[1]+ (1/(2*race)) * norm(h_new - h_old, "2")^2
#################### Backtracking line search ####################
while(left>right){
race<-beta*race
h_new<- h_old-race*grad
h_new<- as.matrix(apply(h_new,1,function(x)s_threshold(lambda*race,x)),nrow=ncol(D))
left<-g_func(h_new, Y, D, lambda)
right<-g_func(h_old, Y, D, lambda) + crossprod(grad,h_new-h_old)[1]+ (1/(2*race)) * norm(h_new - h_old, "2")^2
}
h<-h_new
if (k == 1 || k %% (max_step%/%10) == 0 || k == max_step){
residuals <- Y - D %*% h
mse_k <- mean(residuals^2)
error<-c(error,mse_k)
cont<-c(cont,k)
}
if(mse_k< tol){
break
}
}
}
if(type=="Binomial"){
for(k in 1:max_step){
race<-4 / L
h_old <- h
P<-1/(1+exp(-(D%*%h_old)))
grad<-((1/nrow(D))*(t(D)%*%(P-Y)))
h_new<- h_old-race*grad
h_new<- as.matrix(apply(h_new,1,function(x)s_threshold(lambda*race,x)),nrow=ncol(D))
left<-g_func(h_new, Y, D, lambda)
right<-g_func(h_old, Y, D, lambda) + crossprod(grad,h_new-h_old)[1]+ (1/(2*race)) * norm(h_new - h_old, "2")^2
while (left > right){
race<-race*beta
h_new<-h_old-race*grad
h_new<-as.matrix(apply(h_new,1,function(x)s_threshold(lambda*race,x)),nrow=ncol(D))
left<-g_func(h_new, Y, D, lambda)
right<-g_func(h_old, Y, D, lambda) + crossprod(grad,h_new-h_old)[1]+ (1/(2*race)) * norm(h_new - h_old, "2")^2
}
h<-h_new
if(k==1||k%%(max_step%/%10)==0 || k==max_step){
logistic_loss <- -(t(Y)%*%log(pmax(P,1e-15))+t(1-Y)%*%log(pmax(1-P,1e-15)))+lambda*sum(abs(h))
current_error <- logistic_loss/(nrow(D))
error <- c(error, current_error)
cont<-c(cont,k)
}
if(current_error<tol){
break
}
}
}
plot(x=cont,error,xlab="MSE",main="MSE evolution")
return(list(h,error,cont))
}
#'lasso_fista_back
#'@export lasso_fista_back
#'@title Lasso regression with backtraking line research with FISTA algorithm
#'@description the function carries out the Lasso regression using backtraking line research and FISTA algorithm.
#'@usage lasso_fista_back(data,y,x,lambda,max_step=10000,tol=10^-7,
#'type="Gaussian",ini=0.5,image=TRUE)
#' @return A list containing:
#' \itemize{
#' \item{\code{coefficients}: A matrix where each column represents the estimated regression coefficients for a different lambda value.}
#' \item{\code{error_evolution}: A numeric vector tracking the error at certain step.}
#' \item{\code{num_steps}: An integer vector indicating the number of steps in which errors are calculated.}
#' }
#'@param data name of the dataset
#'@param y name of the dependent variables
#'@param x name of the independent variable
#'@param lambda a vector of lambda-value to be evaluated in the regression
#'@param max_step maximum number of steps
#'@param ini initial value for the coefficients, default is 0.5
#'@param image plots the evolution of errors in term of lambda values
#'@param tol tolerance for convergence, it is 10^-7 by default
#'@param type type of response variable, by default, it is 'Gaussian' for continuos response and can be modified as 'Binomial' for binary response
#'@examples
#'library("glmnet")
#'data("QuickStartExample")
#'test<-as.data.frame(cbind(QuickStartExample$y,QuickStartExample$x))
#'lasso_fista_back(test,"V1",colnames(test)[2:21],lambda=0.1,image=TRUE,type='Gaussian',max_step=1000)
lasso_fista_back<-function(data,y,x,lambda,max_step=10000,tol=10^-7,type="Gaussian",ini=0.5,image=TRUE){
error<-c()
cont<-c()
Y<-as.matrix(data[y])
X<-as.matrix(subset(data,select=x))
lamb<-matrix(nrow=length(x)+1,ncol=length(lambda))
beta<-0.8
D<-cbind(intercept=1,X)
DD<-crossprod(D,D)
L <- max(eigen(DD)$values)
h<-matrix(ini,nrow=ncol(D),ncol=1)
# h<-lsfit(data[x], data[y])$coefficients
z<-h
t_old<-1
if(type=="Gaussian"){
for(k in 1:max_step){
race<-race<-4 / L
h_old <- h
grad<- t(D)%*%(D%*%z - Y)
h_new <- z - race*grad # provisional
h_new <- as.matrix(apply(h_new,1,function(x)s_threshold(lambda*race,x)),nrow=ncol(D))
left<-g_func(h_new, Y, D, lambda)
right<-g_func(h_old, Y, D, lambda) + crossprod(grad,(h_new-h_old))[1]+ (1/(2*race)) * sum((h_new-h_old)^2)
#################### Backtracking line search ####################
while(left>right){
race<-beta*race # reduce step size
h_new<- z-race*grad
h_new<- as.matrix(apply(h_new,1,function(x)s_threshold(lambda*race,x)),nrow=ncol(D))
left<-g_func(h_new, Y, D, lambda)
right<-g_func(h_old, Y, D, lambda) + crossprod(grad,h_new-h_old)[1]+ (1/(2*race)) * sum((h_new-h_old)^2)
}
t_new <- (1 + sqrt(1 + 4 * t_old^2)) / 2
z <- h_new + ((t_old - 1) / t_new) * (h_new - h_old)
t_old <- t_new
h<-h_new
if (k == 1 || k %% (max_step%/%10) == 0 || k == max_step){
residuals <- Y - D %*% h
mse_k <- mean(residuals^2)
error<-c(error,mse_k)
cont<-c(cont,k)
}
if(mse_k< tol){
break
}
}
}
if(type=="Binomial"){
for(k in 1:max_step){
race<-race<-4 / L
h_old <- h
P<-1/(1+exp(-(D%*%z)))
grad<-((1/nrow(D))*(t(D)%*%(P-Y)))
h_new <- z-race*grad
h_new <- as.matrix(apply(h_new,1,function(x)s_threshold(lambda*race,x)),nrow=ncol(D))
left<-g_func(h_new, Y, D, lambda)
right<-g_func(h_old, Y, D, lambda) + crossprod(grad,h_new-h_old)[1]+ (1/(2*race)) * sum((h_new-h_old)^2)
while (left > right){
race<-beta*race
h_new <- z-race*grad
h_new<-as.matrix(apply(h_new,1,function(x)s_threshold(lambda*race,x)),nrow=ncol(D))
left<-g_func(h_new, Y, D, lambda)
right<-g_func(h_old, Y, D, lambda) + crossprod(grad,h_new-h_old)[1]+ (1/(2*race)) * sum((h_new-h_old)^2)
}
h<-h_new
t_new <- (1 + sqrt(1 + 4 * t_old^2)) / 2
z <- h + ((t_old - 1) / t_new) * (h - h_old)
t_old <- t_new
if(k==1||k%%(max_step%/%10)==0 || k==max_step){
logistic_loss <- -(t(Y)%*%log(pmax(P,1e-15))+t(1-Y)%*%log(pmax(1-P,1e-15)))+lambda*sum(abs(h))
current_error <- logistic_loss/(nrow(D))
error <- c(error, current_error)
cont<-c(cont,k)
}
if(current_error<tol){
break
}
}
}
if(image){
plot(x=cont,error,xlab="MSE",main="MSE evolution")
}
return(list(h,error,cont))
}
#'lasso_multi_back
#'@export lasso_multi_back
#'@title Lasso regression with backtraking line research for multinomial response variable
#'@description the function carries out the Lasso regression for multinomial response using backtraking line research and FISTA/ISTA algorithm.
#'@usage lasso_multi_back(data,y,x,lambda,max_step=10000,image=FALSE,fista=TRUE,tol=10^-7,ini=0)
#' @return A list containing:
#' \itemize{
#' \item{\code{coefficients}: A matrix where each column represents the estimated regression coefficients for a different lambda value.}
#' \item{\code{error_evolution}: A numeric vector tracking the error at certain step.}
#' \item{\code{num_steps}: An integer vector indicating the number of steps in which errors are calculated.}
#' }
#'@param data name of the dataset
#'@param y name of the dependent variables
#'@param x name of the independent variable
#'@param lambda a vector of lambda-value to be evaluated in the regression
#'@param max_step maximum number of steps
#'@param image plots the evolution of errors in term of lambda values
#'@param fista fista=TRUE: use FISTA algortihm for the multiclass logistic regression; fista=FALSE: use ISTA algortihm
#'@param tol tolerance for the convergence
#'@param ini initial value for the coefficients, default is 0
#'#'@examples
#'library(glmnet)
#'data("MultinomialExample")
#'x<-MultinomialExample$x
#'y<-MultinomialExample$y
#'mult<-as.data.frame(cbind(x,y))
#'lasso_multi_back(mult,y="y",x=colnames(mult)[-31],max_step = 1000,lambda=0.01,image=TRUE,fista=TRUE,ini=0)
lasso_multi_back <- function(data, y, x, lambda, max_step = 10000, image = FALSE,fista=TRUE,tol=10^-7,ini=0) {
error <- vector("list", length = length(lambda))
count <- vector("list", length = length(lambda))
lamb <- matrix(nrow = length(x) + 1, ncol = length(lambda))
l <- list()
beta<-0.8
Y <- as.matrix(stats::model.matrix(~ factor(data[[y]]) - 1)) # One-Hot encoding
colnames(Y) <- as.character(unique(data[[y]])) # etiquetar las clases
# Predictor matrix with intercept
X <- as.matrix(subset(data, select = x))
n <- nrow(data)
D <- cbind(intercept = 1, X) # intercept+predictores
DD <- crossprod(D, D) # Lipschitz constant
if(fista){
for (i in 1:length(lambda)) {
L <- max(eigen(DD)$values) # Calculate Lipschitz constant
alpha <- lambda[i]
t_old <- 1
h <- matrix(ini, nrow = ncol(D), ncol = ncol(Y)) # coeficientes iniciales
z <- h # Auxiliary variable for FISTA
for (k in 1:max_step) {
race <- 1 / L
h_old <- h
P_z <- softmax(D %*% z)
grad <- (1 / n) * t(D) %*% (P_z - Y)
h_new <- z - race * grad
h_new <- apply(h_new, 2, function(col) {
as.matrix(apply(as.matrix(col), 1, function(x) s_threshold(alpha * race, x)))
})
left <- g_func(h_new, Y, D, alpha)
right <- g_func(h_old, Y, D, alpha) + sum(grad * (h_new - h_old)) + (1/(2*race)) * sum((h_new - h_old)^2)
while (left > right) {
race <- race * beta
h_new <- z - race * grad
h_new <- apply(h_new, 2, function(col) {
as.matrix(apply(as.matrix(col), 1, function(x) s_threshold(alpha * race, x)))
})
left <- g_func(h_new, Y, D, alpha)
right <- g_func(h_old, Y, D, alpha) + sum(grad * (h_new - h_old)) + (1/(2*race)) * sum((h_new - h_old)^2)
}
t_new <- (1 + sqrt(1 + 4 * t_old^2)) / 2
z <- h_new + ((t_old - 1) / t_new) * (h_new - h_old)
t_old <- t_new
h <- h_new
#P_current <- softmax(D %*% h)
if (k == 1 || k %% (max_step %/% 10) == 0 || k == max_step) {
logistic_loss <- -(1/n) * sum(Y * log(pmax(P_z, 1e-15))) + alpha * sum(abs(h))
current_error <- logistic_loss
error[[i]] <- c(error[[i]], current_error)
count[[i]] <- c(count[[i]], k)
}
if (current_error < tol) break
}
rownames(h) <- c("intercept", x)
l[[i]] <- h
}
}else{
for (i in 1:length(lambda)) {
L <- 0.25*max(eigen(DD)$values)
alpha <- lambda[i]
h <- matrix(ini, nrow = ncol(D), ncol = ncol(Y)) # coeficientes iniciales
h_old <- h
for (k in 1:max_step) {
race <- 4 / L
P <- softmax(D %*% h) # hat{y}: respuesta estimada
# calculo de gradientes
grad <- (1 / n) * (t(D) %*% (P - Y))
h_new <- h_old - race * grad # valores iniciales renovados
# Soft-thresholding
h_new <- apply(h_new, 2, function(col) as.matrix(apply(as.matrix(col), 1, function(x) s_threshold(alpha * race, x))))
left<-g_func(h_new,Y,D,alpha)
right<- g_func(h_old,Y,D,alpha)+crossprod(grad,h_new-h_old)[1]+(1/(2*race))*sum((h_new-h_old)^2)
while (left>right) {
race<-race*beta
h_new<-h_old-race*grad
h_new<-apply(h_new, 2, function(col) as.matrix(apply(as.matrix(col), 1, function(x) s_threshold(alpha * race, x))))
left<-g_func(h_new,Y,D,alpha)
right<-g_func(h_old,Y,D,alpha)+crossprod(grad,h_new-h_old)[1]+(1/(2*race))*sum((h_new-h_old)^2)
}
h <- h_new
# Loss function con lasso penalty
if (k == 1 || k %% (max_step %/% 10) == 0 || k == max_step) {
logistic_loss <- -(1 / n) * sum(Y*log(pmax(P,1e-15))) + alpha * sum(abs(h))
#logistic_loss <- -(1 / n) * sum(crossprod(Y,log(pmax(P,1e-15)))) + alpha * sum(abs(h))
current_error <- logistic_loss
error[[i]] <- c(error[[i]], current_error)
count[[i]] <- c(count[[i]], k)
}
if (current_error < tol) {
break
}
}
rownames(h) <- c("intercept", x)
l[[i]] <- h
}
}
# Print loss evolution
# Plot loss if requested
if (image == TRUE) {
plot(NULL, xlim = c(1, max(sapply(count, max))), ylim = range(unlist(error)),
xlab = "Iteration", ylab = "Cross-Entropy Loss", main = "Loss Evolution (FISTA with LASSO)")
colors <- grDevices::rainbow(length(lambda))
for (i in 1:length(lambda)) {
graphics::lines(x = count[[i]], y = error[[i]], col = colors[i], lwd = 2)
}
graphics::legend("topright", legend = paste("Lambda =", lambda), col = colors, lwd = 2)
}
return(list(l,error,count))
}
#'delete_rect
#'@export delete_rect
#'@title rectangular hole in image
#'@import EBImage
#'@description creates a rectangular hole in the image with the specified dimensions
#'@usage delete_rect(image,i,j,width,height)
#'@return a 3D array with pixels in the hole set to -100 and the rest of the image pixels unchanged
#'@param image image to be modified, it has to be a 3D array proceed with readImage function from EBImage package
#'@param i row index of the upper left corner of the rectangle
#'@param j column index of the upper left corner of the rectangle
#'@param width width of the rectangle
#'@param height height of the rectangle
#'@examples
#'image<-EBImage::readImage(system.file("extdata", "bird.jpg", package = "ProxReg"))
#'image_noise<-delete_rect(image,160,160,20,20)
#'image_noise<-EBImage::Image(image_noise,colormode = "Color")
#'EBImage::display(image_noise)
delete_rect<-function(image,i,j,width,height){
arr_hole=EBImage::imageData(image)
for(k in i:(i+height)){
for(l in j:(j+width)){
arr_hole[k,l,]<- -100
}
}
return(arr_hole)
}
get_patch<-function(image,i,j,h){
image_data<-EBImage::imageData(image)
start_i<-(i-h%/%2)
start_j<-(j-h%/%2)
return(image_data[start_i:(start_i+h-1),start_j:(start_j+h-1),])
}
fill_patch<-function(image,i,j,h,valores){
image_data<-EBImage::imageData(image)
start_i<-as.numeric(i-h%/%2)
start_j<-as.numeric(j-h%/%2)
image_data[start_i:(start_i+h-1),start_j:(start_j+h-1),]<-array(valores, dim = c(h, h, 3)) # matrix de valores
return(image_data)
}
get_diccionary <- function(image, h, stride = 1) {
image_data <- EBImage::imageData(image)
diccionari <- list()
for (i in seq(1, dim(image_data)[1] - h, stride)) {
for (j in seq(1, dim(image_data)[2] - h, stride)) {
patch <- as.vector(get_patch(image_data, i + h %/% 2, j + h %/% 2, h))
patch_noise <- any(patch == -100) # Check if the entire patch is noisy
patch_clean <- any(patch != -100) # Check if the entire patch is clean
if (!patch_noise && patch_clean) {
diccionari[[length(diccionari) + 1]] <- patch # Append patch to the list
}
}
}
# Convert list to a matrix (each patch as a column)
if (length(diccionari) > 0) {
diccionari <- do.call(cbind, diccionari)}
return(diccionari)
}
######################################
get_boundaries_pixels<-function(image){
image_data<-EBImage::imageData(image)
channel <- matrix(EBImage::imageData(image)[,,1], nrow = dim(EBImage::imageData(image))[1], ncol = dim(EBImage::imageData(image))[2])
coords<-list()
for(i in 1:(dim(channel)[1])){
for(j in 1:(dim(channel)[2])){
pixel<- channel[i,j]
if(pixel== -100){ # si un pixel es ruidoso
if(channel[i-1,j]!=-100 || channel[i+1,j]!=-100 || channel[i,j-1]!=-100 || channel[i,j+1]!=-100){
coords<-c(coords,list(c(i,j)))
}
}
}
}
return(coords)
}
priority_pixel<-function(image,h,boundaries){
image_data<-EBImage::imageData(image)
priority<- -1
priority_pixel<-c(1,1)
protity_patch<-NA
for(pixel in boundaries){
p<- as.vector(get_patch(image,pixel[1],pixel[2],h)) # boundaries --> conjunto de coordenadas # OK
num_pixel_complete<-sum(p!=-100)
if(num_pixel_complete>priority){
priority<-num_pixel_complete
priority_pixel<-pixel
priority_patch<-p
}
}
return(list(pixel=priority_pixel,patch=priority_patch))
}
#'inpainting
#'@export inpainting
#'@title image recovery using Lasso regression
#'@description predicts the missing pixels in an image using Lasso regression and fills the hole in the image
#'@usage inpainting(image,h,stride,i,j,width,height,lambda=0.1,max_iter=50000,
#'fista=TRUE, verbose=TRUE,ini=0,glmnet=TRUE,noise=TRUE)
#'@return a 3D array with the hole filled by pixels predicted by Lasso regression
#'@import EBImage
#'@param image image to be modified, it has to be a 3D array proceed with readImage function from EBImage package
#'@param h size of the patch
#'@param stride stride for the patch
#'@param i row index of the upper left corner of the rectangle
#'@param j column index of the upper left corner of the rectangle
#'@param width width of the rectangle
#'@param height height of the rectangle
#'@param lambda a penalized parameter for the Lasso regression, it is 0.1 by default
#'@param max_iter maximum number of iterations, it is 50000 by default
#'@param fista fista=TRUE: use FISTA algortihm for the pixel prediction
#'@param verbose print the iteration number and the size of the boundary
#'@param ini initial value for the coefficients, default is 0
#'@param glmnet use glmnet package for the Lasso regression
#'@param noise display the image with the hole, it is TRUE by default
#'@import glmnet
#'@import stats
#'@examples
#'test_img <- EBImage::readImage(system.file("extdata", "bird.jpg", package = "ProxReg"))
#' image_repaired <- inpainting(
#' test_img, h = 10, stride = 6, i = 160, j = 160, width = 20, height = 20,
#' lambda = 0.001, max_iter = 1000, verbose = TRUE, glmnet = TRUE,noise=TRUE)
#'RGB_repaired<-EBImage::Image(image_repaired,colormode = "Color")
inpainting<-function(image,h,stride,i,j,width,height,lambda=0.1,max_iter=50000,fista=TRUE, verbose=TRUE,ini=0,glmnet=TRUE,noise=TRUE){
# Extract image data and convert to HSV
image_hsv <- EBImage::imageData(image)
# Create a hole in the image
image_hole_hsv <- delete_rect(image_hsv, i = i, j = j, width = width, height = height)
if(noise){
image_disp<-EBImage::Image(image_hole_hsv,colormode = "Color")
EBImage::display(image_disp)
}
image_hole_hsv[is.na(image_hole_hsv)] <- mean(image_hole_hsv[!is.na(image_hole_hsv)], na.rm = TRUE)
X <- get_diccionary(image_hole_hsv, h, stride)
boundaries <- get_boundaries_pixels(image_hole_hsv)
iter <- 0
while (length(boundaries) > 0) {
# Priority pixel selection
priority_info <- priority_pixel(image_hole_hsv, h, boundaries)
pixeles <- priority_info[1]
Y <- as.vector((unlist(priority_pixel(image_hole_hsv,h,boundaries)[2])))
# Training and test indices
train_i <- which(Y != -100)
test_i <- which(Y == -100)
if (length(train_i) == 0) {
warning("No training data available, skipping iteration.")
break
}
train_target <- Y[train_i]
data<-data.frame(cbind((X[train_i,]),as.matrix(Y[train_i],ncol=1))) ## 225 300 , 225
##
if(glmnet){
train_data <- (as.matrix(X[train_i,]))
train_target <- Y[train_i]
lasso_model <- glmnet::glmnet(train_data, train_target, alpha = 1, lambda = lambda)
test_data <- (as.matrix(X[test_i,]))
Y[test_i] <- stats::predict(lasso_model, test_data)
}
else{
if(fista){
coef <- lasso_fista(data,y=colnames(data)[ncol(data)],x=colnames(data)[1:(ncol(data)-1)],lambda=lambda,max_step=max_iter,image=FALSE,ini=ini)
}else{
coef <- lasso_ista(data,y=colnames(data)[ncol(data)],x=colnames(data)[1:(ncol(data)-1)],lambda=lambda,max_step=max_iter,image=FALSE,ini=ini)
}
Y[test_i] <- abs(cbind(1,(X[test_i,]))%*%coef[[1]])
}
## prediction
Y[test_i] <- pmin(pmax(Y[test_i], 0), 1)
# Fill the patch
image_hole_hsv <- fill_patch(image_hole_hsv, as.numeric(pixeles$pixel[1]),as.numeric(pixeles$pixel[2]), h, Y)
# Update boundaries
boundaries <- get_boundaries_pixels(image_hole_hsv)
# Verbose logging
if (verbose) {
message("Iteration:", iter, "\n")
message("Boundary size:", length(boundaries), "\n")
}
iter <- iter + 1
}
result<-image_hole_hsv
repaired<-EBImage::Image(result,colormode = "Color")
EBImage::display(repaired)
return(result)
}
Any scripts or data that you put into this service are public.
Add the following code to your website.
For more information on customizing the embed code, read Embedding Snippets.