knitr::opts_chunk$set(echo = TRUE)
```r
prior <- function(y){
  levels <- sort(unique(y)[,1])
  k <- length(levels)
  prior <- rep(0,k)
  for(i in 1:k){
    prior[i] <- sum(y==levels[i])/length(y)
  }
  return(prior)
}
# create empty matrices for all classes
naive_bayes <- function(X,y){
  d <- dim(X)[2]
  levels <- sort(unique(y)[,1])
  k <- length(levels)

  #prior
  prior <- rep(0,k)
  for(i in 1:k){
    prior[i] <- sum(y==levels[i])/length(y)
  }

  # summaries
  classes <- array(rep(1,d*k*2), dim=c(k,d,2))
  for(i in 1:k){
    X_k <- X[which(y==(i-1)),]
    classes[i,,1] <- apply(X_k,2,mean)
    classes[i,,2] <- apply(X_k,2,sd)
  }
  nb <- list(X=X,y=y,summaries=classes,prior=prior)
  class(nb) <- "naive_bayes"
  return(nb)
}

Predict

predict.naive_bayes <- function(obj,x_new){
  prior <- obj$prior
  summaries <- obj$summaries
  n <- dim(x_new)[1]
  d <- dim(x_new)[2]
  k <- dim(summaries)[1]
  result <- matrix(rep(0,n*k),nrow=n)
  for(obs in 1:n){
    for(class in 1:k){
      post <- log(prior[class])
      for(feat in 1:d){
        mu <- summaries[class,feat,1]
        sd <- summaries[class,feat,2]
        cond <- dnorm(x_new[obs,feat],mu, sd, log = TRUE)
        post <- post + cond
      }
      result[obs,class] <- post
    }
  }
  pred <- apply(result,1,which.max)
  return(pred-1)
}  


andreabecsek/NaiveBayes documentation built on Jan. 19, 2020, 12:49 p.m.