knitr::opts_chunk$set( collapse = TRUE, comment = "#>" )
library(bis557)
Question 1
#the linear Hessian is well-conditioned set.seed(2020) X <- matrix(rnorm(10), ncol=2) #calculate condition number svals<- svd(X)$d max(svals)/ min(svals)
#the logistic variation is not X <- cbind(rep(1,5), X) beta <- c(0.1, 100,1) mu <- 1 / (1+exp(-X %*% beta)) D <- diag(x=as.vector(mu), nrow=5, ncol=5) H <- t(X) %*% D %*% X #calculate condition number svals<- svd(H)$d max(svals)/ min(svals)
Question 2
1st-order solution for the GLM maximum likelihood problem: constant step size
set.seed(20) n = 1000; p = 5 X <- cbind(1, matrix(rnorm(n*(p-1)), ncol=p-1)) Y = sample(c(0,1), replace=TRUE, size=n) glm_grad(X, Y, update=FALSE)
1st-order solution for the GLM maximum likelihood problem: adaptive step size
glm_grad(X, Y, update=TRUE) library(MASS) df <- as.data.frame(cbind(X,Y)) glm(Y ~ .-1, data=df, family = poisson(link="log"))
Here we can see the fitting of constant step size is a little better.
Question 3
The classification model to generalize logistic regression to accommodate 3 classes
#test the algorithm X <- matrix(rnorm(500), ncol=5) y <- sample(c(1,2,3) , size=100, replace = T) class_log(X,y, class=3)
Add the following code to your website.
For more information on customizing the embed code, read Embedding Snippets.