sandbox/DEPRECATED/sim_pro_dat2.R

sim_pro_dat2 <- function(N = 1000,
                        number.timepoints = 4,
                        #--------Pass to sim_dat_ord()
                        number.anchor.categories = 4,
                        reg.formula = formula( ~ Time + Group + Time*Group),
                        Beta.anchor = 0,
                        thresholds = c(0.2, 0.4, 0.6, 0.8),
                        polychor.struc = 'ar1',
                        polychor.value = 0.4,
                        number.of.anchor.groups = 5,
                        #------- Generate PRO Score (Y_comp)
                        Beta.PGIS.delta = 1,
                        Beta.PGIS.bl = 1,
                        corr = 'ar1',
                        cor.value = 0.8,
                        var.values = 2,
                        #-------- Pass to sim_val_var()
                        n.val = 5, # number of validators
                        n.cat = c(3, NA, NA, NA, 5),
                        cor.val.ref = c(0.5, 0.6, 0.8, 0.2, 0.3)){



  # Simulate CLMM data
  out <- sim_dat_ord(N = N,
                 number.groups = number.anchor.categories,
                 number.timepoints = number.timepoints,
                 reg.formula = formula( ~ Time + Group + Time*Group),
                 Beta = Beta.anchor,
                 thresholds = c(0.2, 0.40, 0.6, 0.8),
                 corr = polychor.struc,
                 cor.value = polychor.value)


  # Create the anchor groups:
dat <- out$dat
dat <- dat[, c('USUBJID',  'Time', 'Y_comp')]
# dropping Group, was no Group effect here - later go back and do with txa!
colnames(dat) <- c('USUBJID', 'Time', 'PGIS')

# FIX this part - notice this doesn't work if your subjects IDS are ordered this way
tmp1 <- dat[dat$Time == 'Time_1', c('USUBJID', 'PGIS')]
colnames(tmp1) <- c('USUBJID', 'PGIS_bl')
dat <- merge(x = dat, y = tmp1, by = 'USUBJID', all.x = T)
dat$PGIS_delta <- dat$PGIS - dat$PGIS_bl

av <- dat$PGIS_delta

# Number of anchor groups:
if (number.of.anchor.groups == 5) {

ag <- ifelse(av >= 2, 2,
             ifelse(av == 1, 1,
                    ifelse(av == 0, 0,
                           ifelse(av == -1, -1,
                                  ifelse(av <= -2, -2, NA)))))


  if (number.of.anchor.groups == 3) {

 ag <- ifelse(av >= 1, 1,
             ifelse(av == 0, 0,
                    ifelse(av <= -1, -1, NA)))

  }
} else {
  ag <- NA
}

    dat$ag <- as.vector(ag)

# This isn't it
# X <- model.matrix( ~ ag, data = dat)
# Beta <- matrix(0, nrow = ncol(X), dimnames=list(colnames(X), 'param'))
# Beta['ag', ] <- 1


# 5.6.21: Looks good!
X <- model.matrix( ~ ag*Time, data = dat)
Beta <- matrix(0, nrow = ncol(X), dimnames=list(colnames(X), 'param'))
Beta[grepl('Time_2', rownames(Beta)) & grepl('ag', rownames(Beta)), ] <-  0.25
Beta[grepl('Time_3', rownames(Beta)) & grepl('ag', rownames(Beta)), ] <-  0.5
Beta[grepl('Time_4', rownames(Beta)) & grepl('ag', rownames(Beta)), ] <-  1.0


# 5.6.21 - this seems to work fine as well
# this is required for the COA34 R package, but not for right now!
# X <- model.matrix( ~ PGIS_bl + ag*Time, data = dat)
# Beta <- matrix(0, nrow = ncol(X), dimnames=list(colnames(X), 'param'))
# Beta['PGIS_bl', ] <- 1
# Beta[grepl('Time_2', rownames(Beta)) & grepl('ag', rownames(Beta)), ] <-  0.25
# Beta[grepl('Time_3', rownames(Beta)) & grepl('ag', rownames(Beta)), ] <-  0.5
# Beta[grepl('Time_4', rownames(Beta)) & grepl('ag', rownames(Beta)), ] <-  1.0



# Matrix multiply:
XB <- X %*% Beta
dat$XB <- as.vector(XB)


#-------------------------------------------------------------------------------------------------

  if (corr == 'ind') {

    cor.mat <- diag(1, nrow = number.timepoints, ncol = number.timepoints)

  }# end independent structure


  if (corr == 'cs') {

    if (is.null(cor.value)) { cor.value <- 0.4 }
    cor.mat <- matrix(cor.value, nrow = number.timepoints, ncol = number.timepoints)
    diag(cor.mat) <- 1

  } # end Compound Symmetry correlation


  if (corr == 'ar1') {

    if (is.null(cor.value)) { cor.value <- 0.8 }

    cor.mat <- diag(1, nrow = number.timepoints, ncol = number.timepoints)
    for (i in 1:number.timepoints) {
      for (j in 1:i) {

        cor.mat[i , j] <- cor.value^(i -j) # AR1
        cor.mat[j, i] <- cor.mat[i, j]

      }
    }

  }# end exponential decay correlations



# Default variance value at last timepoint is 2
  # Can either adjust that last value (rest will be filled in automatically)
  # OR you can pass the full vector
  if (length(var.values) == 1) {

        var.values <- seq(1, var.values, length.out = number.timepoints)

  } else {

    if (length(var.values) != number.timepoints) stop('Vector of variance values does not equal number of timepoints')

  }

  # Variance- Covariance Matrix:
  var.mat <- diag(sqrt(var.values), nrow = number.timepoints, ncol = number.timepoints)
  sigma <- var.mat %*% cor.mat %*% var.mat


  # Simulate the errors:
  error <- MASS:::mvrnorm(n = N, mu = rep(0, number.timepoints), Sigma = sigma)
    colnames(error) <- unique(dat$Time)

  # Associate errors with the correct XB to create correct Y for each subject
  dat$error <- NA

  for (tt in unique(dat$Time)) {

    dat$error[which(dat$Time == tt)] <- error[, tt, drop = T]

  }


  #Y <- XB + error.long
  dat$Y_comp <- as.vector(dat$XB + dat$error)


#-------------------------------------------------------------------------------
# Validator Variables
#source("C:/Users/ciaconangelo/Documents/RESEARCH_NEW_LAPTOP/R_CODE_Long_Mixed_Models/sim_val_var.R")

 out2 <- sim_val_var(dat = dat,
                    n.val = n.val,
                    n.cat = n.cat,
                    cor.val.ref = cor.val.ref  )


dat <- out2$dat


return(list('dat' = dat,
            'Beta' = Beta,
            'sigma' = sigma,
            'out.clmm' = out,
            'out.val' = out2) )

}
CJangelo/COA34 documentation built on June 23, 2022, 12:10 p.m.