#' Define state-space for spatial capture-recapture models
#'
#' @description Returns a list of a matrix or array object of grid
#' coordinates and an Extent object from the \code{raster} package as a
#' state-space.
#'
#' @param X Either a matrix or array representing the coordinates of traps in
#' UTMs. An array is used when traps are clustered over a survey area.
#' @param crs_ The UTM coordinate reference system (EPSG code) used for your
#' location provided as an integer (e.g., 32608 for WGS 84/UTM Zone 8N).
#' @param buff The distance (m or km) that the traps should be
#' buffered by as an integer. This is typically 3 times the sigma parameter.
#' @param res The grid cell resolution for the state-space.
#' @return
#' \itemize{
#' \item{\code{grid}}{ A list of a matrix or array of grid coordinates.}
#' \item{\code{ext}}{ An extent object from the \code{raster} package.}
#' }
#' @note A matrix object is returned if the
#' coordinates of the traps are a matrix (i.e., 2D), otherwise an array object
#' is returned when trap coordinates are in a 3D array.
#' @details This function supports spatial capture-recapture analysis by
#' creating two outputs that are used to define the state-space. If a habitat
#' mask is not used, then only the \code{Extent} object from the \code{raster}
#' package is needed under a uniform state-space.
#' The matrix or array object can be used to develop a habitat mask in a uniform
#' state-space or as a discretized state-space.
#' @author Daniel Eacker
#' @seealso \code{\link[raster:extent]{raster::extent}}
#' @importFrom raster extent coordinates raster rasterFromXYZ ncell proj4string
#' @importFrom sf st_crs
#' @examples
#' # simulate a single trap array with random positional noise
#' x <- seq(-800, 800, length.out = 5)
#' y <- seq(-800, 800, length.out = 5)
#' traps <- as.matrix(expand.grid(x = x, y = y))
#' # add some random noise to locations
#' traps <- traps + runif(prod(dim(traps)),-20,20)
#' mysigma = 300 # simulate sigma of 300 m
#' mycrs = 32608 # EPSG for WGS 84 / UTM zone 8N
#'
#' # create state-space
#' Grid = grid_classic(X = traps, crs_ = mycrs, buff = 3*mysigma, res = 100)
#'
#' # make plot of grid and trap locations
#' par(mfrow=c(1,1))
#' plot(Grid$grid, pch=19)
#' points(traps, col="blue",pch=20)
#' @name grid_classic
#' @export
grid_classic <- function(X, crs_, buff, res){
# need to determine if X is 2 or 3 dimensional (stop if not)
if(length(dim(X))!=2 & length(dim(X))!=3){
stop("Trapping grid must be only 2 or 3 dimensions")
}
if(res >= max(buff)){
stop("Resolution should not be equal to or greater than buffer")
}
# for dim of length 2
if(length(dim(X))==2){
# get new extent
ext <- raster::extent(c(apply(X, 2, min)-max(buff),apply(X, 2, max)+
max(buff))[c(1,3,2,4)])
rast <- raster::raster(ext = ext, crs = crs_, res = res)
gridOut <- raster::coordinates(rast)
}else
# for dim of length 3 (assumes different sites are in dimension 3)
if(length(dim(X))==3){
ext_initial <- apply(X, 3, function(x) raster::extent(c(apply(x, 2, min)-
max(buff),apply(x, 2, max)+max(buff))[c(1,3,2,4)]))
centroids <- lapply(ext_initial, function(x) cbind(mean(c(x[2],x[1]),na.rm=TRUE),
mean(c(x[4],x[3]),na.rm=TRUE)))
x_lengths <- unlist(lapply(ext_initial, function(x) x[2]-x[1]))
x_index <- which(x_lengths==max(x_lengths))[1]
y_lengths <- unlist(lapply(ext_initial, function(x) x[4]-x[3]))
y_index <- which(y_lengths==max(y_lengths))[1]
ext_standard <-extent(c(-x_lengths[x_index]/2,x_lengths[x_index]/
2,-y_lengths[y_index]/2,y_lengths[y_index]/2))
coords <- raster::coordinates(raster(ext = ext_standard, crs = crs_,
res = res))
rast <- lapply(centroids, function(x) raster::rasterFromXYZ(
cbind(coords[,1]+x[,1],coords[,2]+x[,2]),crs = crs_))
gridOut <- lapply(rast, raster::coordinates)
ext <- lapply(rast, function(x) raster::extent(x))
gridOut <- array(unlist(gridOut),dim=c(raster::ncell(rast[[1]]),
dim(X)[2:3])) # convert to array
}
return(list(grid = gridOut, ext = ext))
} # End 'grid_classic' function
#' Simulate basic spatial capture-recapture data
#'
#' @description Returns a list of simulated data including the encounter
#' history, binary sex indicator, activity centers, and site identifier.
#'
#' @param X Either a matrix or array object representing the coordinates of traps in
#' UTMs. An array is used when traps are clustered over a survey area.
#' @param ext An \code{Extent} object from the \code{raster} package. This is
#' returned from \code{\link{grid_classic}}.
#' @param crs_ The UTM coordinate reference system (EPSG code) used for your
#' location provided as an integer (e.g., 32608 for WGS 84/UTM Zone 8N).
#' @param N Simulated total abundance as an integer.
#' @param sigma_ The scaling parameter of the bivariate normal kernel either
#' in meters or kilometers as an integer.
#' @param prop_sex The portion of females or males as a numeric value. This
#' will depend upon the indicator coding scheme used (e.g., females = 1 and
#' males = 0; then proportion of females in the simulation). Must be a numeric
#' value between 0 and 1. Note that 0 or 1 can be used if a non-sex-specific
#' sigma is desired.
#' @param K The number of sampling occasions desired as an integer.
#' @param base_encounter The baseline encounter probability or rate as a numeric
#' value. Note that a probabilty is given for a \code{"binomial"} observation
#' distribution while a rate is given for a \code{"poisson"} distribution.
#' @param enc_dist Either \code{"binomial"} or \code{"poisson"}. Default is
#' \code{"binomial"}.
#' @param hab_mask Either \code{FALSE} (the default) or a matrix or arrary output from \code{\link{mask_polygon}}
#' or \code{\link{mask_raster}} functions.
#' @param setSeed The random number generater seed as an integer used in
#' simulations to obtain repeatable data simulations. Default is 500.
#' @return
#' \itemize{
#' \item{\code{y}}{ A list of a matrix or array of encounter histories.}
#' \item{\code{sex}}{ A vector or matrix of 0's and 1's for sex identification.}
#' \item{\code{s}}{ A matrix of simulated activity centers.}
#' \item{\code{site}}{ A vector for the site identifier.}
#' }
#' @note The \code{site} identfier is only returned when a 3-dimensional
#' trap array is provided.
#' @author Daniel Eacker
#' @details This function supports spatial capture-recapture (SCR) analysis by
#' allowing for easy simulation of data components used by nimble in Baysian
#' SCR models. Note that the output for the encounter histories \code{y} will be
#' sorted by detected and not detected individuals.
#' @seealso \code{\link{grid_classic}}
#' @importFrom stats runif rbinom rpois
#' @importFrom scales rescale
#' @importFrom sf st_cast st_sfc st_multipoint st_distance
#' @examples
#' # simulate a single trap array with random positional noise
#' x <- seq(-800, 800, length.out = 5)
#' y <- seq(-800, 800, length.out = 5)
#' traps <- as.matrix(expand.grid(x = x, y = y))
#'
#' # add some random noise to locations
#' traps <- traps + runif(prod(dim(traps)),-20,20)
#'
#' mysigma = 300 # simulate sigma of 300 m
#' mycrs = 32608 # EPSG for WGS 84 / UTM zone 8N
#'
#' # Create grid and extent
#' Grid = grid_classic(X = traps, crs_ = mycrs, buff = 3*mysigma, res = 100)
#'
#' # simulate SCR data
#' data3d = sim_classic(X = traps, ext = Grid$ext, crs_ = mycrs, sigma_ = c(300),
#' prop_sex = 1, N = 200, K = 4, base_encounter = 0.25, enc_dist = "binomial",
#' hab_mask = FALSE, setSeed = 50)
#'
#' # make simple plot
#' par(mfrow=c(1,1))
#' plot(Grid$grid, pch=20,ylab="Northing",xlab="Easting")
#' points(traps, col="blue",pch=20)
#' points(data3d$s,col="red",pch = 20)
#' points(data3d$s[which(apply(data3d$y,1,sum)!=0),],col="green",pch = 20)
#' @name sim_classic
#' @export
sim_classic <- function(X, ext, crs_, N, sigma_, prop_sex, K, base_encounter,
enc_dist = "binomial", hab_mask = FALSE, setSeed = 500){
if(is.null(setSeed)==FALSE){
set.seed(setSeed)
}
if(length(dim(X))!=2 & length(dim(X))!=3){
stop("Trapping grid must be only 2 or 3 dimensions")
}
if(length(sigma_)>2 | length(sigma_)<1){
stop("Sigma can only be of length 1 or 2")
}
if(enc_dist != "poisson" & enc_dist != "binomial"){
stop("Encounter model has to be either binomial or poisson")
}
# for dim of length 2
if(length(dim(X))==2){
xlim <- ext[1:2] # create x limits for state-space
ylim <- ext[3:4] # create y limits for state-space
if(isFALSE(hab_mask)){
sx <- stats::runif(N,xlim[1],xlim[2])
sy <- stats::runif(N,ylim[1],ylim[2])
s <- cbind(sx,sy)
} else
if(isFALSE(hab_mask)==FALSE){
sx <- stats::runif(N,xlim[1],xlim[2])
sx.rescale <- scales::rescale(sx, to = c(0,dim(hab_mask)[2]), from=xlim)
sy <- stats::runif(N,ylim[1],ylim[2])
sy.rescale <- scales::rescale(sy, to = c(0,dim(hab_mask)[1]), from=ylim)
pOK <- numeric(N)
for(i in 1:N){
pOK[i] <- hab_mask[(trunc(sy.rescale[i])+1),(trunc(sx.rescale[i])+1)]
while(pOK[i]==0){
sx[i] <- stats::runif(1,xlim[1],xlim[2])
sx.rescale[i] <- scales::rescale(sx[i], to = c(0,dim(hab_mask)[2]),
from=xlim)
sy[i] <- stats::runif(1,ylim[1],ylim[2])
sy.rescale[i] <- scales::rescale(sy[i], to = c(0,dim(hab_mask)[1]),
from=ylim)
pOK[i] <- hab_mask[(trunc(sy.rescale[i])+1),(trunc(sx.rescale[i])+1)]
}
}
s <- cbind(sx,sy)
}
# set propsex to 1 if you only want to simulate for one sex
sex <- stats::rbinom(N, 1, prop_sex)
sex_ <- sex + 1 # sex indicator for sigma
if(length(sigma_) == 1){ # make sigma length 2 if only 1 given
sigma_ <- c(sigma_,sigma_)
}
# compute distance matrix for traps
Dmat <- sf::st_distance(sf::st_cast(sf::st_sfc(sf::st_multipoint(s),
crs = crs_),"POINT"),
sf::st_cast(sf::st_sfc(sf::st_multipoint(X),
crs = crs_),"POINT"))
# convert manually to matrix
Dmat <- matrix(as.numeric(Dmat), nrow=nrow(Dmat), ncol=ncol(Dmat))
# get detection prob matrix
prob <- base_encounter*exp(-Dmat^2/(2*sigma_[sex_]^2))
Y3d <- array(NA, dim=c(N,nrow(X),K)) # encounter data
if(enc_dist == "binomial"){
for(i in 1:N){
for(j in 1:nrow(X)){
Y3d[i,j,1:K] <- stats::rbinom(K,1,prob[i,j])
}}
}else
if(enc_dist == "poisson"){
for(i in 1:N){
for(j in 1:nrow(X)){
Y3d[i,j,1:K] <- stats::rpois(K,prob[i,j])
}}
}
# organize simulated activity centers
s <- s[c(which(apply(Y3d,1,sum)!=0),which(apply(Y3d,1,sum)==0)),]
sex[which(apply(Y3d,1,sum)==0)] <- NA
# organize encountered individuals and then 0's (augmented)
Y3d <- Y3d[c(which(apply(Y3d,1,sum)!=0),which(apply(Y3d,1,sum)==0)),,]
}else
# for dim of length 3 (assumes different sites are in dimension 3)
if(length(dim(X))==3){
if(length(sigma_) == 1){ # make sigma length 2 if only 1 given
sigma_ <- c(sigma_,sigma_)
}
site <- sort(rep(1:dim(X)[3],N/dim(X)[3]))
if(length(site) < N){ # check if site variable too short
N <- length(site)
warning("N was reduced from requested number of simulated individuals")
}
sex <- stats::rbinom(N, 1, prop_sex)
sex_ <- sex + 1 # indicator for sigma
xlim <-sapply(ext, function(x) x[1:2]) # create x limits for state-space
ylim <- sapply(ext, function(x) x[3:4]) # create y limits for state-space
if(isFALSE(hab_mask)){
sx <- stats::runif(N,xlim[1,site],xlim[2,site])
sy <- stats::runif(N,ylim[1,site],ylim[2,site])
s <- cbind(sx,sy)
} else
if(isFALSE(hab_mask)==FALSE){
pOK <- numeric(N)
s <- matrix(NA, nrow=N, ncol=2)
for(i in 1:N){
s[i,1] <- runif(1,xlim[1,site[i]],xlim[2,site[i]])
sx.rescale <- scales::rescale(s[i,1], to = c(0,dim(hab_mask)[2]),
from=xlim[,site[i]])
s[i,2] <- runif(1,ylim[1,site[i]],ylim[2,site[i]])
sy.rescale <- scales::rescale(s[i,2], to = c(0,dim(hab_mask)[1]),
from=ylim[,site[i]])
pOK[i] <- hab_mask[(trunc(sy.rescale)+1),(trunc(sx.rescale)+1),
site[i]]
while(pOK[i]==0){
s[i,1] <- runif(1,xlim[1,site[i]],xlim[2,site[i]])
sx.rescale <- scales::rescale(s[i,1], to = c(0,dim(hab_mask)[2]),
from=xlim[,site[i]])
s[i,2] <- runif(1,ylim[1,site[i]],ylim[2,site[i]])
sy.rescale <- scales::rescale(s[i,2], to = c(0,dim(hab_mask)[1]),
from=ylim[,site[i]])
pOK[i] <- hab_mask[(trunc(sy.rescale)+1),(trunc(sx.rescale)+1),
site[i]]
}
}
}
Y4d <- array(NA, dim=c(N,dim(X)[1],K)) # encounter data
prob <- list()
# estimate probability matrices by site to save run time
for(g in 1:dim(X)[3]){
Dmat <- sf::st_distance(sf::st_cast(sf::st_sfc(
sf::st_multipoint(s[site==g,]), crs = crs_),"POINT"),
sf::st_cast(sf::st_sfc(sf::st_multipoint(X[,,g]), crs = crs_),
"POINT"))
# compute distance matrix for traps and convert manually to matrix
Dmat <- matrix(as.numeric(Dmat), nrow=nrow(Dmat), ncol=ncol(Dmat))
# get detection prob matrix
prob[[g]] <- base_encounter*exp(-Dmat^2/(2*sigma_[sex_[site==g]]^2))
}
prob = do.call(rbind, prob)
for(i in 1:N){
if(enc_dist == "binomial"){
for(j in 1:dim(X)[1]){
Y4d[i,j,1:K] <- stats::rbinom(K,1,prob[i,j])
}
}else
if(enc_dist == "poisson"){
for(j in 1:dim(X)[1]){
Y4d[i,j,1:K] <- stats::rpois(K,prob[i,j])
}
}
}
} # end 3D loop
if(length(dim(X))==2){
dataList <- list(y=Y3d,sex=sex,s=s)
}else
if(length(dim(X))==3){
# organize data elements s, site, sex, and y
site <- site[c(which(apply(Y4d,1,sum)!=0),which(apply(Y4d,1,sum)==0))]
sex <- sex[c(which(apply(Y4d,1,sum)!=0),which(apply(Y4d,1,sum)==0))]
sex[(length(which(apply(Y4d,1,sum)!=0))+1):N] <- NA
s <- s[c(which(apply(Y4d,1,sum)!=0),which(apply(Y4d,1,sum)==0)),1:2]
Y4d <- Y4d[c(which(apply(Y4d,1,sum)!=0),which(apply(Y4d,1,sum)==0)),,]
dataList <- list(y=Y4d,sex=sex,site=site,s=s)
}
return(dataList)
} # End 'sim_encounter' function
#' Retrieve nimbleCode for spatial capture-recapture models
#'
#' @description Creates model code using the \code{\link[nimble]{nimbleCode}} function.
#'
#' @param dim_y An integer of either 2 (the default) or that defines what
#' dimensional format the encounter history data are in.
#' @param enc_dist Either \code{"binomial"} or \code{"poisson"}. Default is
#' \code{"binomial"}.
#' @param sex_sigma A logical value indicating whether the scaling parameter
#' ('sigma') is sex-specific
#' @param hab_mask A logical value indicating whether a habitat mask will be
#' used. Default is \code{FALSE}.
#' @param trapsClustered A logical value indicating if traps are clustered in
#' arrays across the sampling area.
#' @return A \code{nimbleCode} object from the \code{nimble} package.
#' @details This function provides templates that could be copied and easily
#' modified to include further model complexity such as covariates explaining
#' detection probability. The models include different encounter probability
#' distributions, sex-specific scaling parameters, and habitat masking.
#' @author Daniel Eacker
#' @examples
#' # get model for 2D encounter data, binomial encounter distribution,
#' # non-sex-specific scaling parameter, and no habitat mask
#' scr_model = get_classic(dim_y = 2,enc_dist = "binomial",sex_sigma = FALSE,
#' hab_mask = FALSE,trapsClustered = FALSE)
#'
#' # inspect model
#' scr_model
#' @name get_classic
#' @export
get_classic <- function(dim_y, enc_dist = "binomial",sex_sigma = FALSE,
hab_mask = FALSE,trapsClustered = FALSE){
M <- J <- s <- X <- p0 <- sigma <- n0 <- z <- A <- lam0 <- K <- sex <-
nSites <- site <- pixelWidth <- psi <- prop.habitat <- NULL
if(dim_y !=2 & dim_y!=3){
stop("dim_y must be either 2 or 3")
}
if(enc_dist != "poisson" & enc_dist != "binomial"){
stop("Encounter distribution has to be either binomial or poisson")
}
if(trapsClustered == FALSE){
if(isFALSE(hab_mask)){ # determine if hab_mask is included
if(dim_y == 2){
if(enc_dist == "binomial" & sex_sigma == FALSE){
scrcode <- nimble::nimbleCode({
p0 ~ dunif(0,1) # baseline encounter probability
sigma ~ dunif(0, sigma_upper) # scaling parameter
psi ~ dunif(0, 1) # inclusion prob
for(i in 1:M){
z[i]~dbern(psi)
s[i,1] ~ dunif(x_lower, x_upper)
s[i,2] ~ dunif(y_lower, y_upper)
dist[i,1:J] <- sqrt((s[i,1]-X[1:J,1])^2 + (s[i,2]-X[1:J,2])^2)
p[i,1:J] <- p0*exp(-dist[i,1:J]^2/(2*sigma^2))
} # i individuals
# use zeros trick for detected individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
y[i,j] ~ dbin(p[i,j],K)
} # i individuals
} # j traps
for(i in (n0+1):M){
zeros[i] ~ dbern((1 - prod(1 - p[i,1:J])^K)*z[i])
} # i individuals
N <- sum(z[1:M])
D <- N/A
})
} else
if(enc_dist == "poisson" & sex_sigma == FALSE){
scrcode <- nimble::nimbleCode({
lam0 ~ dunif(0,lam0_upper) # baseline encounter probability
sigma ~ dunif(0, sigma_upper) # scaling parameter
psi ~ dunif(0, 1) # inclusion prob
for(i in 1:M){
z[i]~dbern(psi)
s[i,1] ~ dunif(x_lower, x_upper)
s[i,2] ~ dunif(y_lower, y_upper)
dist[i,1:J] <- sqrt((s[i,1]-X[1:J,1])^2 + (s[i,2]-X[1:J,2])^2)
lam[i,1:J] <- lam0*K*exp(-dist[i,1:J]^2/(2*sigma^2))
} # i individuals
# use zeros trick for individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
y[i,j] ~ dpois(lam[i,j])
} # i individuals
} # j traps
for(i in (n0+1):M){
zeros[i] ~ dpois(sum(lam[i,1:J])*z[i])
}
N <- sum(z[1:M])
D <- N/A
})
}else
if(enc_dist == "binomial" & sex_sigma == TRUE){
scrcode <- nimble::nimbleCode({
p0 ~ dunif(0,1) # baseline encounter probability
psi_sex ~ dunif(0,1) # probability sex = 1
sigma[1] ~ dunif(0, sigma_upper) # scaling parameter, sex = 0
sigma[2] ~ dunif(0, sigma_upper) # scaling parameter, sex = 1
psi ~ dunif(0, 1) # inclusion prob
for(i in 1:M){
z[i]~dbern(psi)
sex[i] ~ dbern(psi_sex)
sx[i] <- sex[i] + 1
s[i,1] ~ dunif(x_lower, x_upper)
s[i,2] ~ dunif(y_lower, y_upper)
dist[i,1:J] <- sqrt((s[i,1]-X[1:J,1])^2 + (s[i,2]-X[1:J,2])^2)
p[i,1:J] <- p0*exp(-dist[i,1:J]^2/(2*sigma[sx[i]]^2))
} # i individuals
# use zeros trick for individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
y[i,j] ~ dbin(p[i,j],K)
} # i individuals
} # j traps
for(i in (n0+1):M){
zeros[i] ~ dbern((1 - prod(1 - p[i,1:J])^K)*z[i])
} # i individuals
N <- sum(z[1:M])
D <- N/A
})
} else
if(enc_dist == "poisson" & sex_sigma == TRUE){
scrcode <- nimble::nimbleCode({
lam0 ~ dunif(0,lam0_upper) # baseline encounter probability
psi_sex ~ dunif(0,1) # probability sex = 1
sigma[1] ~ dunif(0, sigma_upper) # scaling parameter, sex = 0
sigma[2] ~ dunif(0, sigma_upper) # scaling parameter, sex = 1
psi ~ dunif(0, 1) # inclusion prob
for(i in 1:M){
z[i]~dbern(psi)
sex[i] ~ dbern(psi_sex)
sx[i] <- sex[i] + 1
s[i,1] ~ dunif(x_lower, x_upper)
s[i,2] ~ dunif(y_lower, y_upper)
dist[i,1:J] <- sqrt((s[i,1]-X[1:J,1])^2 + (s[i,2]-X[1:J,2])^2)
lam[i,1:J] <- lam0*K*exp(-dist[i,1:J]^2/(2*sigma[sx[i]]^2))
} # i individuals
# use zeros trick for individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
y[i,j] ~ dpois(lam[i,j])
} # i individuals
} # j traps
for(i in (n0+1):M){
zeros[i] ~ dpois(sum(lam[i,1:J])*z[i])
}
N <- sum(z[1:M])
D <- N/A
})
}
return(scrcode)
} else # End 2D models
if(dim_y == 3){
if(enc_dist == "binomial" & sex_sigma == FALSE){
scrcode <- nimble::nimbleCode({
p0 ~ dunif(0,1) # baseline encounter probability
sigma ~ dunif(0, sigma_upper) # scaling parameter
psi ~ dunif(0, 1) # inclusion prob
for(i in 1:M){
z[i]~dbern(psi)
s[i,1] ~ dunif(x_lower, x_upper)
s[i,2] ~ dunif(y_lower, y_upper)
dist[i,1:J] <- sqrt((s[i,1]-X[1:J,1])^2 + (s[i,2]-X[1:J,2])^2)
for(k in 1:K){
p[i,1:J,k] <- p0*exp(-dist[i,1:J]^2/(2*sigma^2))
}
} # i individuals
# use zeros trick for individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
for(k in 1:K){
y[i,j,k] ~ dbin(p[i,j,k],1)
} # k occasions
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dbern((1 - prod(1 - p[i,1:J,1:K]))*z[i])
} # i individuals
N <- sum(z[1:M])
D <- N/A
})
} else
if(enc_dist == "poisson" & sex_sigma == FALSE){
scrcode <- nimble::nimbleCode({
lam0 ~ dunif(0,lam0_upper) # baseline encounter rate
sigma ~ dunif(0, sigma_upper) # scaling parameter
psi ~ dunif(0, 1) # inclusion prob
for(i in 1:M){
z[i]~dbern(psi)
s[i,1] ~ dunif(x_lower, x_upper)
s[i,2] ~ dunif(y_lower, y_upper)
dist[i,1:J] <- sqrt((s[i,1]-X[1:J,1])^2 + (s[i,2]-X[1:J,2])^2)
for(k in 1:K){
lam[i,1:J,k] <- lam0*exp(-dist[i,1:J]^2/(2*sigma^2))
} # k occasions
} # i individuals
# use zeros trick for individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
for(k in 1:K){
y[i,j,k] ~ dpois(lam[i,j,k])
} # occasions
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dpois(sum(lam[i,1:J,1:K])*z[i])
}
N <- sum(z[1:M])
D <- N/A
})
}else
if(enc_dist == "binomial" & sex_sigma == TRUE){
scrcode <- nimble::nimbleCode({
p0 ~ dunif(0,1) # baseline encounter probability
psi_sex ~ dunif(0,1) # probability sex = 1
sigma[1] ~ dunif(0, sigma_upper) # scaling parameter, sex = 0
sigma[2] ~ dunif(0, sigma_upper) # scaling parameter, sex = 1
psi ~ dunif(0, 1) # inclusion prob
for(i in 1:M){
z[i]~dbern(psi)
sex[i] ~ dbern(psi_sex)
sx[i] <- sex[i] + 1
s[i,1] ~ dunif(x_lower, x_upper)
s[i,2] ~ dunif(y_lower, y_upper)
dist[i,1:J] <- sqrt((s[i,1]-X[1:J,1])^2 + (s[i,2]-X[1:J,2])^2)
for(k in 1:K){
p[i,1:J,k] <- p0*exp(-dist[i,1:J]^2/(2*sigma[sx[i]]^2))
} # k occasions
} # i individuals
# use zeros trick for individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
for(k in 1:K){
y[i,j,k] ~ dbin(p[i,j,k],1)
} # k occasions
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dbern((1 - prod(1 - p[i,1:J,1:K]))*z[i])
} # i individuals
N <- sum(z[1:M])
D <- N/A
})
} else
if(enc_dist == "poisson" & sex_sigma == TRUE){
scrcode <- nimble::nimbleCode({
lam0 ~ dunif(0,lam0_upper) # baseline encounter rate
psi_sex ~ dunif(0,1) # probability sex = 1
sigma[1] ~ dunif(0, sigma_upper) # scaling parameter, sex = 0
sigma[2] ~ dunif(0, sigma_upper) # scaling parameter, sex = 1
psi ~ dunif(0, 1) # inclusion prob
for(i in 1:M){
z[i]~dbern(psi)
sex[i] ~ dbern(psi_sex)
sx[i] <- sex[i] + 1
s[i,1] ~ dunif(x_lower, x_upper)
s[i,2] ~ dunif(y_lower, y_upper)
dist[i,1:J] <- sqrt((s[i,1]-X[1:J,1])^2 + (s[i,2]-X[1:J,2])^2)
for(k in 1:K){
lam[i,1:J,k] <- lam0*exp(-dist[i,1:J]^2/(2*sigma[sx[i]]^2))
} # k occasions
} # i marked individuals
# use zeros trick for marked individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
for(k in 1:K){
y[i,j,k] ~ dpois(lam[i,j,k])
} # occasions
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dpois(sum(lam[i,1:J,1:K])*z[i])
}
N <- sum(z[1:M])
D <- N/A
})
}
return(scrcode)
} # End 3D models
} else
if(hab_mask==TRUE){
if(dim_y == 2){
if(enc_dist == "binomial" & sex_sigma == FALSE){
scrcode <- nimble::nimbleCode({
p0 ~ dunif(0,1) # baseline encounter probability
sigma ~ dunif(0, sigma_upper) # scaling parameter
sigma.pixel <- sigma / pixelWidth # scaled for habitat mask
psi ~ dunif(0, 1) # inclusion prob
for(i in 1:M){
z[i]~dbern(psi)
s[i,1] ~ dunif(x_lower, x_upper)
s[i,2] ~ dunif(y_lower, y_upper)
pOK[i] <- hab_mask[(trunc(s[i,2])+1),(trunc(s[i,1])+1)] # habitat check
OK[i] ~ dbern(pOK[i]) # OK[i] <- 1, the ones trick
dist[i,1:J] <- sqrt((s[i,1]-X[1:J,1])^2 + (s[i,2]-X[1:J,2])^2)
p[i,1:J] <- p0*exp(-dist[i,1:J]^2/(2*sigma.pixel^2))
} # i individuals
# use zeros trick for detected individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
y[i,j] ~ dbin(p[i,j],K)
} # i individuals
} # j traps
for(i in (n0+1):M){
zeros[i] ~ dbern((1 - prod(1 - p[i,1:J])^K)*z[i])
} # i individuals
N <- sum(z[1:M])
D <- N/A
})
} else
if(enc_dist == "poisson" & sex_sigma == FALSE){
scrcode <- nimble::nimbleCode({
lam0 ~ dunif(0,lam0_upper) # baseline encounter probability
sigma ~ dunif(0, sigma_upper) # scaling parameter
sigma.pixel <- sigma / pixelWidth # scaled for habitat mask
psi ~ dunif(0, 1) # inclusion prob
for(i in 1:M){
z[i]~dbern(psi)
s[i,1] ~ dunif(x_lower, x_upper)
s[i,2] ~ dunif(y_lower, y_upper)
pOK[i] <- hab_mask[(trunc(s[i,2])+1),(trunc(s[i,1])+1)] # habitat check
OK[i] ~ dbern(pOK[i]) # OK[i] <- 1, the ones trick
dist[i,1:J] <- sqrt((s[i,1]-X[1:J,1])^2 + (s[i,2]-X[1:J,2])^2)
lam[i,1:J] <- lam0*K*exp(-dist[i,1:J]^2/(2*sigma.pixel^2))
} # i individuals
# use zeros trick for individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
y[i,j] ~ dpois(lam[i,j])
} # i individuals
} # j traps
for(i in (n0+1):M){
zeros[i] ~ dpois(sum(lam[i,1:J])*z[i])
}
N <- sum(z[1:M])
D <- N/A
})
}else
if(enc_dist == "binomial" & sex_sigma == TRUE){
scrcode <- nimble::nimbleCode({
p0 ~ dunif(0,1) # baseline encounter probability
psi_sex ~ dunif(0,1) # probability sex = 1
sigma[1] ~ dunif(0, sigma_upper) # scaling parameter, sex = 0
sigma[2] ~ dunif(0, sigma_upper) # scaling parameter, sex = 1
sigma.pixel[1] <- sigma[1] / pixelWidth # scaled for habitat mask
sigma.pixel[2] <- sigma[2] / pixelWidth # scaled for habitat mask
psi ~ dunif(0, 1) # inclusion prob
for(i in 1:M){
z[i]~dbern(psi)
sex[i] ~ dbern(psi_sex)
sx[i] <- sex[i] + 1
s[i,1] ~ dunif(x_lower, x_upper)
s[i,2] ~ dunif(y_lower, y_upper)
pOK[i] <- hab_mask[(trunc(s[i,2])+1),(trunc(s[i,1])+1)] # habitat check
OK[i] ~ dbern(pOK[i]) # OK[i] <- 1, the ones trick
dist[i,1:J] <- sqrt((s[i,1]-X[1:J,1])^2 + (s[i,2]-X[1:J,2])^2)
p[i,1:J] <- p0*exp(-dist[i,1:J]^2/(2*sigma.pixel[sx[i]]^2))
} # i individuals
# use zeros trick for individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
y[i,j] ~ dbin(p[i,j],K)
} # i individuals
} # j traps
for(i in (n0+1):M){
zeros[i] ~ dbern((1 - prod(1 - p[i,1:J])^K)*z[i])
} # i individuals
N <- sum(z[1:M])
D <- N/A
})
} else
if(enc_dist == "poisson" & sex_sigma == TRUE){
scrcode <- nimble::nimbleCode({
lam0 ~ dunif(0,lam0_upper) # baseline encounter probability
psi_sex ~ dunif(0,1) # probability sex = 1
sigma[1] ~ dunif(0, sigma_upper) # scaling parameter, sex = 0
sigma[2] ~ dunif(0, sigma_upper) # scaling parameter, sex = 1
sigma.pixel[1] <- sigma[1] / pixelWidth # scaled for habitat mask
sigma.pixel[2] <- sigma[2] / pixelWidth # scaled for habitat mask
psi ~ dunif(0, 1) # inclusion prob
for(i in 1:M){
z[i]~dbern(psi)
sex[i] ~ dbern(psi_sex)
sx[i] <- sex[i] + 1
s[i,1] ~ dunif(x_lower, x_upper)
s[i,2] ~ dunif(y_lower, y_upper)
pOK[i] <- hab_mask[(trunc(s[i,2])+1),(trunc(s[i,1])+1)] # habitat check
OK[i] ~ dbern(pOK[i]) # OK[i] <- 1, the ones trick
dist[i,1:J] <- sqrt((s[i,1]-X[1:J,1])^2 + (s[i,2]-X[1:J,2])^2)
lam[i,1:J] <- lam0*K*exp(-dist[i,1:J]^2/(2*sigma.pixel[sx[i]]^2))
} # i individuals
# use zeros trick for individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
y[i,j] ~ dpois(lam[i,j])
} # i individuals
} # j traps
for(i in (n0+1):M){
zeros[i] ~ dpois(sum(lam[i,1:J])*z[i])
}
N <- sum(z[1:M])
D <- N/A
})
}
return(scrcode)
} else # End 2D models
if(dim_y == 3){
if(enc_dist == "binomial" & sex_sigma == FALSE){
scrcode <- nimble::nimbleCode({
p0 ~ dunif(0,1) # baseline encounter probability
sigma ~ dunif(0, sigma_upper) # scaling parameter
sigma.pixel <- sigma / pixelWidth # scaled for habitat mask
psi ~ dunif(0, 1) # inclusion prob
for(i in 1:M){
z[i]~dbern(psi)
s[i,1] ~ dunif(x_lower, x_upper)
s[i,2] ~ dunif(y_lower, y_upper)
pOK[i] <- hab_mask[(trunc(s[i,2])+1),(trunc(s[i,1])+1)] # habitat check
OK[i] ~ dbern(pOK[i]) # OK[i] <- 1, the ones trick
dist[i,1:J] <- sqrt((s[i,1]-X[1:J,1])^2 + (s[i,2]-X[1:J,2])^2)
for(k in 1:K){
p[i,1:J,k] <- p0*exp(-dist[i,1:J]^2/(2*sigma.pixel^2))
}
} # i individuals
# use zeros trick for individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
for(k in 1:K){
y[i,j,k] ~ dbin(p[i,j,k],1)
} # k occasions
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dbern((1 - prod(1 - p[i,1:J,1:K]))*z[i])
} # i individuals
N <- sum(z[1:M])
D <- N/A
})
} else
if(enc_dist == "poisson" & sex_sigma == FALSE){
scrcode <- nimble::nimbleCode({
lam0 ~ dunif(0,lam0_upper) # baseline encounter rate
sigma ~ dunif(0, sigma_upper) # scaling parameter
sigma.pixel <- sigma / pixelWidth # scaled for habitat mask
psi ~ dunif(0, 1) # inclusion prob
for(i in 1:M){
z[i]~dbern(psi)
s[i,1] ~ dunif(x_lower, x_upper)
s[i,2] ~ dunif(y_lower, y_upper)
pOK[i] <- hab_mask[(trunc(s[i,2])+1),(trunc(s[i,1])+1)] # habitat check
OK[i] ~ dbern(pOK[i]) # OK[i] <- 1, the ones trick
dist[i,1:J] <- sqrt((s[i,1]-X[1:J,1])^2 + (s[i,2]-X[1:J,2])^2)
for(k in 1:K){
lam[i,1:J,k] <- lam0*exp(-dist[i,1:J]^2/(2*sigma.pixel^2))
} # k occasions
} # i individuals
# use zeros trick for individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
for(k in 1:K){
y[i,j,k] ~ dpois(lam[i,j,k])
} # occasions
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dpois(sum(lam[i,1:J,1:K])*z[i])
}
N <- sum(z[1:M])
D <- N/A
})
}else
if(enc_dist == "binomial" & sex_sigma == TRUE){
scrcode <- nimble::nimbleCode({
p0 ~ dunif(0,1) # baseline encounter probability
psi_sex ~ dunif(0,1) # probability sex = 1
sigma[1] ~ dunif(0, sigma_upper) # scaling parameter, sex = 0
sigma[2] ~ dunif(0, sigma_upper) # scaling parameter, sex = 1
sigma.pixel[1] <- sigma[1] / pixelWidth # scaled for habitat mask
sigma.pixel[2] <- sigma[2] / pixelWidth # scaled for habitat mask
psi ~ dunif(0, 1) # inclusion prob
for(i in 1:M){
z[i]~dbern(psi)
sex[i] ~ dbern(psi_sex)
sx[i] <- sex[i] + 1
s[i,1] ~ dunif(x_lower, x_upper)
s[i,2] ~ dunif(y_lower, y_upper)
pOK[i] <- hab_mask[(trunc(s[i,2])+1),(trunc(s[i,1])+1)] # habitat check
OK[i] ~ dbern(pOK[i]) # OK[i] <- 1, the ones trick
dist[i,1:J] <- sqrt((s[i,1]-X[1:J,1])^2 + (s[i,2]-X[1:J,2])^2)
for(k in 1:K){
p[i,1:J,k] <- p0*exp(-dist[i,1:J]^2/(2*sigma.pixel[sx[i]]^2))
} # k occasions
} # i individuals
# use zeros trick for individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
for(k in 1:K){
y[i,j,k] ~ dbin(p[i,j,k],1)
} # k occasions
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dbern((1 - prod(1 - p[i,1:J,1:K]))*z[i])
} # i individuals
N <- sum(z[1:M])
D <- N/A
})
} else
if(enc_dist == "poisson" & sex_sigma == TRUE){
scrcode <- nimble::nimbleCode({
lam0 ~ dunif(0,lam0_upper) # baseline encounter rate
psi_sex ~ dunif(0,1) # probability sex = 1
sigma[1] ~ dunif(0, sigma_upper) # scaling parameter, sex = 0
sigma[2] ~ dunif(0, sigma_upper) # scaling parameter, sex = 1
sigma.pixel[1] <- sigma[1] / pixelWidth # scaled for habitat mask
sigma.pixel[2] <- sigma[2] / pixelWidth # scaled for habitat mask
psi ~ dunif(0, 1) # inclusion prob
for(i in 1:M){
z[i]~dbern(psi)
sex[i] ~ dbern(psi_sex)
sx[i] <- sex[i] + 1
s[i,1] ~ dunif(x_lower, x_upper)
s[i,2] ~ dunif(y_lower, y_upper)
pOK[i] <- hab_mask[(trunc(s[i,2])+1),(trunc(s[i,1])+1)] # habitat check
OK[i] ~ dbern(pOK[i]) # OK[i] <- 1, the ones trick
dist[i,1:J] <- sqrt((s[i,1]-X[1:J,1])^2 + (s[i,2]-X[1:J,2])^2)
for(k in 1:K){
lam[i,1:J,k] <- lam0*exp(-dist[i,1:J]^2/(2*sigma.pixel[sx[i]]^2))
} # k occasions
} # i marked individuals
# use zeros trick for marked individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
for(k in 1:K){
y[i,j,k] ~ dpois(lam[i,j,k])
} # occasions
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dpois(sum(lam[i,1:J,1:K])*z[i])
}
N <- sum(z[1:M])
D <- N/A
})
}
return(scrcode)
} # End 3D model
} # end models with hab_mask
}else # trapsClustered
if(trapsClustered){
if(isFALSE(hab_mask)){ # determine if hab_mask is included
if(dim_y == 2){
if(enc_dist == "binomial" & sex_sigma == FALSE){
scrcode <- nimble::nimbleCode({
for(g in 1:nSites){
p0[g] ~ dunif(0,1) # baseline encounter probability
} # g sites
sigma ~ dunif(0, sigma_upper) # scaling parameter
psi ~ dunif(0, 1) # inclusion prob
for(i in 1:M){
z[i]~dbern(psi)
s[i,1] ~ dunif(x_lower[site[i]], x_upper[site[i]])
s[i,2] ~ dunif(y_lower[site[i]], y_upper[site[i]])
dist[i,1:J] <- sqrt((s[i,1]-X[1:J,1,site[i]])^2 + (s[i,2]-X[1:J,2,site[i]])^2)
p[i,1:J] <- p0[site[i]]*exp(-dist[i,1:J]^2/(2*sigma^2))
} # i individuals
# use zeros trick for individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
y[i,j] ~ dbin(p[i,j],K)
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dbern((1 - prod(1 - p[i,1:J])^K)*z[i])
} # i individuals
N <- sum(z[1:M])
D <- N/A
})
} else
if(enc_dist == "poisson" & sex_sigma == FALSE){
scrcode <- nimble::nimbleCode({
for(g in 1:nSites){
lam0[g] ~ dunif(0,lam0_upper) # baseline encounter rate
}
sigma ~ dunif(0, sigma_upper) # scaling parameter
psi ~ dunif(0, 1) # inclusion prob
for(i in 1:M){
z[i]~dbern(psi)
s[i,1] ~ dunif(x_lower[site[i]], x_upper[site[i]])
s[i,2] ~ dunif(y_lower[site[i]], y_upper[site[i]])
dist[i,1:J] <- sqrt((s[i,1]-X[1:J,1,site[i]])^2 + (s[i,2]-X[1:J,2,site[i]])^2)
lam[i,1:J] <- lam0[site[i]]*K*exp(-dist[i,1:J]^2/(2*sigma^2))
} # i individuals
# use zeros trick for individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
y[i,j] ~ dpois(lam[i,j])
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dpois(sum(lam[i,1:J])*z[i])
} # individuals
N <- sum(z[1:M])
D <- N/A
})
}else
if(enc_dist == "binomial" & sex_sigma == TRUE){
scrcode <- nimble::nimbleCode({
for(g in 1:nSites){
p0[g] ~ dunif(0,1) # baseline encounter probability
} # g sites
psi_sex ~ dunif(0,1) # probability sex = 1
sigma[1] ~ dunif(0, sigma_upper) # scaling parameter, sex = 0
sigma[2] ~ dunif(0, sigma_upper) # scaling parameter, sex = 1
psi ~ dunif(0, 1) # inclusion prob
for(i in 1:M){
z[i]~dbern(psi)
sex[i] ~ dbern(psi_sex)
sx[i] <- sex[i] + 1
s[i,1] ~ dunif(x_lower[site[i]], x_upper[site[i]])
s[i,2] ~ dunif(y_lower[site[i]], y_upper[site[i]])
dist[i,1:J] <- sqrt((s[i,1]-X[1:J,1,site[i]])^2 + (s[i,2]-X[1:J,2,site[i]])^2)
p[i,1:J] <- p0[site[i]]*exp(-dist[i,1:J]^2/(2*sigma[sx[i]]^2))
} # i individuals
# use zeros trick for individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
y[i,j] ~ dbin(p[i,j],K)
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dbern((1 - prod(1 - p[i,1:J])^K)*z[i])
} # i individuals
N <- sum(z[1:M])
D <- N/A
})
} else
if(enc_dist == "poisson" & sex_sigma == TRUE){
scrcode <- nimble::nimbleCode({
for(g in 1:nSites){
lam0[g] ~ dunif(0,lam0_upper) # baseline encounter rate
}
psi_sex ~ dunif(0,1) # probability sex = 1
sigma[1] ~ dunif(0, sigma_upper) # scaling parameter, sex = 0
sigma[2] ~ dunif(0, sigma_upper) # scaling parameter, sex = 1
psi ~ dunif(0, 1) # inclusion prob
for(i in 1:M){
z[i]~dbern(psi)
sex[i] ~ dbern(psi_sex)
sx[i] <- sex[i] + 1
s[i,1] ~ dunif(x_lower[site[i]], x_upper[site[i]])
s[i,2] ~ dunif(y_lower[site[i]], y_upper[site[i]])
dist[i,1:J] <- sqrt((s[i,1]-X[1:J,1,site[i]])^2 + (s[i,2]-X[1:J,2,site[i]])^2)
lam[i,1:J,k] <- lam0[site[i]]*K*exp(-dist[i,1:J]^2/(2*sigma[sx[i]]^2))
} # i marked individuals
# use zeros trick for marked individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
y[i,j] ~ dpois(lam[i,j])
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dpois(sum(lam[i,1:J])*z[i])
} # individuals
N <- sum(z[1:M])
D <- N/A
})
}
return(scrcode)
} else # End 2D models
if(dim_y == 3){
if(enc_dist == "binomial" & sex_sigma == FALSE){
scrcode <- nimble::nimbleCode({
sigma ~ dunif(0, sigma_upper) # scaling parameter
psi ~ dunif(0, 1) # inclusion prob
for(g in 1:nSites){
p0[g] ~ dunif(0,1) # site-specific baseline encounter probability
} # g sites
for(i in 1:M){
z[i]~dbern(psi)
s[i,1] ~ dunif(x_lower[site[i]], x_upper[site[i]])
s[i,2] ~ dunif(y_lower[site[i]], y_upper[site[i]])
dist[i,1:J] <- sqrt((s[i,1]-X[1:J,1,site[i]])^2 + (s[i,2]-X[1:J,2,site[i]])^2)
for(k in 1:K){
p[i,1:J,k] <- p0[site[i]]*exp(-dist[i,1:J]^2/(2*sigma^2))
}
} # i individuals
# use zeros trick for marked individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
for(k in 1:K){
y[i,j,k] ~ dbin(p[i,j,k],1)
} # k occasions
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dbern((1 - prod(1 - p[i,1:J,1:K]))*z[i])
} # i individuals
N <- sum(z[1:M])
D <- N/A
})
} else
if(enc_dist == "poisson" & sex_sigma == FALSE){
scrcode <- nimble::nimbleCode({
sigma ~ dunif(0, sigma_upper) # scaling parameter
psi ~ dunif(0, 1) # inclusion prob
for(g in 1:nSites){
lam0[g] ~ dunif(0,lam0_upper) # site-specific baseline encounter rate
} # g sites
for(i in 1:M){
z[i]~dbern(psi)
s[i,1] ~ dunif(x_lower[site[i]], x_upper[site[i]])
s[i,2] ~ dunif(y_lower[site[i]], y_upper[site[i]])
dist[i,1:J] <- sqrt((s[i,1]-X[1:J,1,site[i]])^2 + (s[i,2]-X[1:J,2,site[i]])^2)
for(k in 1:K){
lam[i,1:J,k] <- lam0[site[i]]*exp(-dist[i,1:J]^2/(2*sigma^2))
} # k occasions
} # i individuals
# use zeros trick for marked individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
for(k in 1:K){
y[i,j,k] ~ dpois(lam[i,j,k])
} # occasions
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dpois(sum(lam[i,1:J,1:K])*z[i])
} # i individuals
N <- sum(z[1:M])
D <- N/A
})
}else
if(enc_dist == "binomial" & sex_sigma == TRUE){
scrcode <- nimble::nimbleCode({
psi_sex ~ dunif(0,1) # probability sex = 1
sigma[1] ~ dunif(0, sigma_upper) # scaling parameter, sex = 0
sigma[2] ~ dunif(0, sigma_upper) # scaling parameter, sex = 1
psi ~ dunif(0, 1) # inclusion prob
for(g in 1:nSites){
p0[g] ~ dunif(0,1) # site-specific baseline encounter probability
} # g sites
for(i in 1:M){
z[i]~dbern(psi)
sex[i] ~ dbern(psi_sex)
sx[i] <- sex[i] + 1
s[i,1] ~ dunif(x_lower[site[i]], x_upper[site[i]])
s[i,2] ~ dunif(y_lower[site[i]], y_upper[site[i]])
dist[i,1:J] <- sqrt((s[i,1]-X[1:J,1,site[i]])^2 + (s[i,2]-X[1:J,2,site[i]])^2)
for(k in 1:K){
p[i,1:J,k] <- p0[site[i]]*exp(-dist[i,1:J]^2/(2*sigma[sx[i]]^2))
} # k occasions
} # i marked individuals
# use zeros trick for marked individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
for(k in 1:K){
y[i,j,k] ~ dbin(p[i,j,k],1)
} # k occasions
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dbern((1 - prod(1 - p[i,1:J,1:K]))*z[i])
} # i individuals
N <- sum(z[1:M])
D <- N/A
})
} else
if(enc_dist == "poisson" & sex_sigma == TRUE){
scrcode <- nimble::nimbleCode({
for(g in 1:nSites){
lam0[g] ~ dunif(0,lam0_upper) # baseline encounter rate
} # g sites
psi_sex ~ dunif(0,1) # probability sex = 1
sigma[1] ~ dunif(0, sigma_upper) # scaling parameter, sex = 0
sigma[2] ~ dunif(0, sigma_upper) # scaling parameter, sex = 1
psi ~ dunif(0, 1) # inclusion prob
for(i in 1:M){
z[i]~dbern(psi)
sex[i] ~ dbern(psi_sex)
sx[i] <- sex[i] + 1
s[i,1] ~ dunif(x_lower[site[i]], x_upper[site[i]])
s[i,2] ~ dunif(y_lower[site[i]], y_upper[site[i]])
dist[i,1:J] <- sqrt((s[i,1]-X[1:J,1,site[i]])^2 + (s[i,2]-X[1:J,2,site[i]])^2)
for(k in 1:K){
lam[i,1:J,k] <- lam0[site[i]]*exp(-dist[i,1:J]^2/(2*sigma[sx[i]]^2))
} # k occasions
} # i individuals
# use zeros trick for marked individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
for(k in 1:K){
y[i,j,k] ~ dpois(lam[i,j,k])
} # occasions
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dpois(sum(lam[i,1:J,1:K])*z[i])
} # i individuals
N <- sum(z[1:M])
D <- N/A
})
}
return(scrcode)
} # end 3D model
} else
if(hab_mask==TRUE){
if(dim_y == 2){
if(enc_dist == "binomial" & sex_sigma == FALSE){
scrcode <- nimble::nimbleCode({
for(g in 1:nSites){
p0[g] ~ dunif(0,1) # baseline encounter probability
} # g sites
sigma ~ dunif(0, sigma_upper) # scaling parameter
sigma.pixel <- sigma / pixelWidth # scaled for habitat mask
psi ~ dunif(0, 1) # inclusion prob
for(i in 1:M){
z[i]~dbern(psim[i])
# adjust psi for the proportion of available habitat at each site
psim[i] <- (1-(1-psi)^prop.habitat[site[i]])
s[i,1] ~ dunif(x_lower[site[i]], x_upper[site[i]])
s[i,2] ~ dunif(y_lower[site[i]], y_upper[site[i]])
pOK[i] <- hab_mask[(trunc(s[i,2])+1),(trunc(s[i,1])+1),site[i]] # habitat check
OK[i] ~ dbern(pOK[i]) # OK[i] <- 1, the ones trick
dist[i,1:J] <- sqrt((s[i,1]-X[1:J,1,site[i]])^2 + (s[i,2]-X[1:J,2,site[i]])^2)
p[i,1:J] <- p0[site[i]]*exp(-dist[i,1:J]^2/(2*sigma.pixel^2))
} # i individuals
# use zeros trick for individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
y[i,j] ~ dbin(p[i,j],K)
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dbern((1 - prod(1 - p[i,1:J])^K)*z[i])
} # i individuals
N <- sum(z[1:M])
D <- N/A
})
} else
if(enc_dist == "poisson" & sex_sigma == FALSE){
scrcode <- nimble::nimbleCode({
for(g in 1:nSites){
lam0[g] ~ dunif(0,lam0_upper) # baseline encounter rate
}
sigma ~ dunif(0, sigma_upper) # scaling parameter
sigma.pixel <- sigma / pixelWidth # scaled for habitat mask
psi ~ dunif(0, 1) # inclusion prob
for(i in 1:M){
z[i]~dbern(psim[i])
# adjust psi for the proportion of available habitat at each site
psim[i] <- (1-(1-psi)^prop.habitat[site[i]])
s[i,1] ~ dunif(x_lower[site[i]], x_upper[site[i]])
s[i,2] ~ dunif(y_lower[site[i]], y_upper[site[i]])
pOK[i] <- hab_mask[(trunc(s[i,2])+1),(trunc(s[i,1])+1),site[i]] # habitat check
OK[i] ~ dbern(pOK[i]) # OK[i] <- 1, the ones trick
dist[i,1:J] <- sqrt((s[i,1]-X[1:J,1,site[i]])^2 + (s[i,2]-X[1:J,2,site[i]])^2)
lam[i,1:J] <- lam0[site[i]]*K*exp(-dist[i,1:J]^2/(2*sigma.pixel^2))
} # i individuals
# use zeros trick for individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
y[i,j] ~ dpois(lam[i,j])
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dpois(sum(lam[i,1:J])*z[i])
} # individuals
N <- sum(z[1:M])
D <- N/A
})
}else
if(enc_dist == "binomial" & sex_sigma == TRUE){
scrcode <- nimble::nimbleCode({
for(g in 1:nSites){
p0[g] ~ dunif(0,1) # baseline encounter probability
} # g sites
psi_sex ~ dunif(0,1) # probability sex = 1
sigma[1] ~ dunif(0, sigma_upper) # scaling parameter, sex = 0
sigma[2] ~ dunif(0, sigma_upper) # scaling parameter, sex = 1
sigma.pixel[1] <- sigma[1] / pixelWidth # scaled for habitat mask
sigma.pixel[2] <- sigma[2] / pixelWidth # scaled for habitat mask
psi ~ dunif(0, 1) # inclusion prob
for(i in 1:M){
sex[i] ~ dbern(psi_sex)
sx[i] <- sex[i] + 1
z[i]~dbern(psim[i])
# adjust psi for the proportion of available habitat at each site
psim[i] <- (1-(1-psi)^prop.habitat[site[i]])
s[i,1] ~ dunif(x_lower[site[i]], x_upper[site[i]])
s[i,2] ~ dunif(y_lower[site[i]], y_upper[site[i]])
pOK[i] <- hab_mask[(trunc(s[i,2])+1),(trunc(s[i,1])+1),site[i]] # habitat check
OK[i] ~ dbern(pOK[i]) # OK[i] <- 1, the ones trick
dist[i,1:J] <- sqrt((s[i,1]-X[1:J,1,site[i]])^2 + (s[i,2]-X[1:J,2,site[i]])^2)
p[i,1:J] <- p0[site[i]]*exp(-dist[i,1:J]^2/(2*sigma.pixel[sx[i]]^2))
} # i individuals
# use zeros trick for individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
y[i,j] ~ dbin(p[i,j],K)
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dbern((1 - prod(1 - p[i,1:J])^K)*z[i])
} # i individuals
N <- sum(z[1:M])
D <- N/A
})
} else
if(enc_dist == "poisson" & sex_sigma == TRUE){
scrcode <- nimble::nimbleCode({
for(g in 1:nSites){
lam0[g] ~ dunif(0,lam0_upper) # baseline encounter rate
} # g sites
psi_sex ~ dunif(0,1) # probability sex = 1
sigma[1] ~ dunif(0, sigma_upper) # scaling parameter, sex = 0
sigma[2] ~ dunif(0, sigma_upper) # scaling parameter, sex = 1
sigma.pixel[1] <- sigma[1] / pixelWidth # scaled for habitat mask
sigma.pixel[2] <- sigma[2] / pixelWidth # scaled for habitat mask
psi ~ dunif(0, 1) # inclusion prob
for(i in 1:M){
sex[i] ~ dbern(psi_sex)
sx[i] <- sex[i] + 1
z[i]~dbern(psim[i])
# adjust psi for the proportion of available habitat at each site
psim[i] <- (1-(1-psi)^prop.habitat[site[i]])
s[i,1] ~ dunif(x_lower[site[i]], x_upper[site[i]])
s[i,2] ~ dunif(y_lower[site[i]], y_upper[site[i]])
pOK[i] <- hab_mask[(trunc(s[i,2])+1),(trunc(s[i,1])+1),site[i]] # habitat check
OK[i] ~ dbern(pOK[i]) # OK[i] <- 1, the ones trick
dist[i,1:J] <- sqrt((s[i,1]-X[1:J,1,site[i]])^2 + (s[i,2]-X[1:J,2,site[i]])^2)
lam[i,1:J,k] <- lam0[site[i]]*K*exp(-dist[i,1:J]^2/(2*sigma.pixel[sx[i]]^2))
} # i marked individuals
# use zeros trick for marked individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
y[i,j] ~ dpois(lam[i,j])
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dpois(sum(lam[i,1:J,1:K])*z[i])
} # individuals
N <- sum(z[1:M])
D <- N/A
})
}
return(scrcode)
} else # End 2D models
if(dim_y == 3){
if(enc_dist == "binomial" & sex_sigma == FALSE){
scrcode <- nimble::nimbleCode({
sigma ~ dunif(0, sigma_upper) # scaling parameter
sigma.pixel <- sigma / pixelWidth # scaled for habitat mask
psi ~ dunif(0, 1) # inclusion prob
for(g in 1:nSites){
p0[g] ~ dunif(0,1) # site-specific baseline encounter probability
} # g sites
for(i in 1:M){
z[i]~dbern(psim[i])
# adjust psi for the proportion of available habitat at each site
psim[i] <- (1-(1-psi)^prop.habitat[site[i]])
s[i,1] ~ dunif(x_lower[site[i]], x_upper[site[i]])
s[i,2] ~ dunif(y_lower[site[i]], y_upper[site[i]])
pOK[i] <- hab_mask[(trunc(s[i,2])+1),(trunc(s[i,1])+1),site[i]] # habitat check
OK[i] ~ dbern(pOK[i]) # OK[i] <- 1, the ones trick
dist[i,1:J] <- sqrt((s[i,1]-X[1:J,1,site[i]])^2 + (s[i,2]-X[1:J,2,site[i]])^2)
for(k in 1:K){
p[i,1:J,k] <- p0[site[i]]*exp(-dist[i,1:J]^2/(2*sigma.pixel^2))
}
} # i individuals
# use zeros trick for marked individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
for(k in 1:K){
y[i,j,k] ~ dbin(p[i,j,k],1)
} # k occasions
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dbern((1 - prod(1 - p[i,1:J,1:K]))*z[i])
} # i individuals
N <- sum(z[1:M])
D <- N/A
})
} else
if(enc_dist == "poisson" & sex_sigma == FALSE){
scrcode <- nimble::nimbleCode({
sigma ~ dunif(0, sigma_upper) # scaling parameter
sigma.pixel <- sigma / pixelWidth # scaled for habitat mask
psi ~ dunif(0, 1) # inclusion prob
for(g in 1:nSites){
lam0[g] ~ dunif(0,lam0_upper) # site-specific baseline encounter rate
} # g sites
for(i in 1:M){
z[i]~dbern(psim[i])
# adjust psi for the proportion of available habitat at each site
psim[i] <- (1-(1-psi)^prop.habitat[site[i]])
s[i,1] ~ dunif(x_lower[site[i]], x_upper[site[i]])
s[i,2] ~ dunif(y_lower[site[i]], y_upper[site[i]])
pOK[i] <- hab_mask[(trunc(s[i,2])+1),(trunc(s[i,1])+1),site[i]] # habitat check
OK[i] ~ dbern(pOK[i]) # OK[i] <- 1, the ones trick
dist[i,1:J] <- sqrt((s[i,1]-X[1:J,1,site[i]])^2 + (s[i,2]-X[1:J,2,site[i]])^2)
for(k in 1:K){
lam[i,1:J,k] <- lam0[site[i]]*exp(-dist[i,1:J]^2/(2*sigma.pixel^2))
} # k occasions
} # i individuals
# use zeros trick for marked individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
for(k in 1:K){
y[i,j,k] ~ dpois(lam[i,j,k])
} # occasions
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dpois(sum(lam[i,1:J,1:K])*z[i])
}
N <- sum(z[1:M])
D <- N/A
})
}else
if(enc_dist == "binomial" & sex_sigma == TRUE){
scrcode <- nimble::nimbleCode({
psi_sex ~ dunif(0,1) # probability sex = 1
sigma[1] ~ dunif(0, sigma_upper) # scaling parameter, sex = 0
sigma[2] ~ dunif(0, sigma_upper) # scaling parameter, sex = 1
sigma.pixel[1] <- sigma[1] / pixelWidth # scaled for habitat mask
sigma.pixel[2] <- sigma[2] / pixelWidth # scaled for habitat mask
psi ~ dunif(0, 1) # inclusion prob
for(g in 1:nSites){
p0[g] ~ dunif(0,1) # site-specific baseline encounter probability
} # g sites
for(i in 1:M){
z[i]~dbern(psim[i])
# adjust psi for the proportion of available habitat at each site
psim[i] <- (1-(1-psi)^prop.habitat[site[i]])
sex[i] ~ dbern(psi_sex)
sx[i] <- sex[i] + 1
s[i,1] ~ dunif(x_lower[site[i]], x_upper[site[i]])
s[i,2] ~ dunif(y_lower[site[i]], y_upper[site[i]])
pOK[i] <- hab_mask[(trunc(s[i,2])+1),(trunc(s[i,1])+1),site[i]] # habitat check
OK[i] ~ dbern(pOK[i]) # OK[i] <- 1, the ones trick
dist[i,1:J] <- sqrt((s[i,1]-X[1:J,1,site[i]])^2 + (s[i,2]-X[1:J,2,site[i]])^2)
for(k in 1:K){
p[i,1:J,k] <- p0[site[i]]*exp(-dist[i,1:J]^2/(2*sigma.pixel[sx[i]]^2))
} # k occasions
} # i marked individuals
# use zeros trick for marked individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
for(k in 1:K){
y[i,j,k] ~ dbin(p[i,j,k],1)
} # k occasions
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dbern((1 - prod(1 - p[i,1:J,1:K]))*z[i])
} # i individuals
N <- sum(z[1:M])
D <- N/A
})
} else
if(enc_dist == "poisson" & sex_sigma == TRUE){
scrcode <- nimble::nimbleCode({
for(g in 1:nSites){
lam0[g] ~ dunif(0,lam0_upper) # baseline encounter rate
} # g sites
psi_sex ~ dunif(0,1) # probability sex = 1
sigma[1] ~ dunif(0, sigma_upper) # scaling parameter, sex = 0
sigma[2] ~ dunif(0, sigma_upper) # scaling parameter, sex = 1
sigma.pixel[1] <- sigma[1] / pixelWidth # scaled for habitat mask
sigma.pixel[2] <- sigma[2] / pixelWidth # scaled for habitat mask
psi ~ dunif(0, 1) # inclusion prob
for(i in 1:M){
z[i]~dbern(psim[i])
# adjust psi for the proportion of available habitat at each site
psim[i] <- (1-(1-psi)^prop.habitat[site[i]])
sex[i] ~ dbern(psi_sex)
sx[i] <- sex[i] + 1
s[i,1] ~ dunif(x_lower[site[i]], x_upper[site[i]])
s[i,2] ~ dunif(y_lower[site[i]], y_upper[site[i]])
pOK[i] <- hab_mask[(trunc(s[i,2])+1),(trunc(s[i,1])+1),site[i]] # habitat check
OK[i] ~ dbern(pOK[i]) # OK[i] <- 1, the ones trick
dist[i,1:J] <- sqrt((s[i,1]-X[1:J,1,site[i]])^2 + (s[i,2]-X[1:J,2,site[i]])^2)
for(k in 1:K){
lam[i,1:J,k] <- lam0[site[i]]*exp(-dist[i,1:J]^2/(2*sigma.pixel[sx[i]]^2))
} # k occasions
} # i individuals
# use zeros trick for marked individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
for(k in 1:K){
y[i,j,k] ~ dpois(lam[i,j,k])
} # occasions
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dpois(sum(lam[i,1:J,1:K])*z[i])
}
N <- sum(z[1:M])
D <- N/A
})
}
return(scrcode)
} # end 3D model
} # end models with hab_mask
} # end trapsClustered
} # End function 'get_classic"
#' Generate starting locations for activity centers
#'
#' @description Generate a matrix of initial starting locations, possibly accounting
#' for habitat mask.
#'
#' @param y Either a matrix or array of encounter history data, possibly
#' from \code{sim_classic()}.
#' @param M An integer of the total augmented population size (i.e., detected
#' and augmented individuals). UTMs. An array is used when traps are clustered
#' over a survey area.
#' @param X Either a matrix or array representing the coordinates of traps in
#' UTMs. An array is used when traps are clustered over a survey area.
#' @param ext An \code{Extent} object from the \code{raster} package. This is
#' returned from \code{\link{grid_classic}}.
#' @param site Either \code{NULL} (if a 2D trap array is used) or a vector of
#' integers denoting which trap array an individual (either detected or
#' augmented) belongs to. Note that \code{site} is provided from
#' \code{\link{sim_classic}} when a 3D trap array is used. However, this
#' \code{site} variable must be correctly augmented based on the total
#' augmented population size (i.e., \code{M}).
#' @param hab_mask Either \code{FALSE} (the default) or a matrix or array output
#' from \code{\link{mask_polygon}} or \code{\link{mask_raster}} functions.
#' @param all_random Logical. If \code{TRUE}, then encounter data \code{y} are
#' ignored and all initial activity center starting locations are randomly chosen.
#' If \code{FALSE} (the default), then initial values will be the mean capture
#' location for detected individuals and random locations for augmented individuals.
#' @return A matrix of initial activity center coordinates with \code{M} rows
#' and 2 columns.
#' @details This function generates initial activity center locations based
#' on encounter histories, augmented population size, state-space buffer,
#' and potentially a habitat mask. Note that mean trap detection locations
#' are used for detected individuals while intial values are randomly drawn
#' for augemented individuals. Also, a habitat check will be conducted for
#' all locations when a habitat mask is included.
#' @author Daniel Eacker
#' @examples
#' # simulate a single trap array with random positional noise
#' x <- seq(-800, 800, length.out = 5)
#' y <- seq(-800, 800, length.out = 5)
#' traps <- as.matrix(expand.grid(x = x, y = y))
#'
#' # add some random noise to locations
#' traps <- traps + runif(prod(dim(traps)),-20,20)
#'
#' mysigma = 300 # simulate sigma of 300 m
#' mycrs = 32608 # EPSG for WGS 84 / UTM zone 8N
#'
#' # Create grid and extent
#' Grid = grid_classic(X = traps, crs_ = mycrs, buff = 3*mysigma, res = 100)
#'
#' # simulate SCR data
#' data3d = sim_classic(X = traps, ext = Grid$ext, crs_ = mycrs,
#' sigma_ = mysigma, prop_sex = 1, N = 200, K = 4, base_encounter = 0.25,
#' enc_dist = "binomial", hab_mask = FALSE, setSeed = 50)
#'
#' # generate initial activity center coordinates for 2D trap array without
#' # habitat mask
#' s.st3d = initialize_classic(y=data3d$y, M=500, X=traps, ext = Grid$ext,
#' hab_mask = FALSE, all_random = FALSE)
#'
#' # make simple plot
#' par(mfrow=c(1,1))
#' plot(Grid$grid, pch=20,ylab="Northing",xlab="Easting")
#' points(traps, col="blue",pch=20)
#' points(s.st3d, col="red",pch=20)
#' @name initialize_classic
#' @export
initialize_classic <- function(y, M, X, ext, site, hab_mask = FALSE, all_random = FALSE){
if(length(dim(X))!=2 & length(dim(X))!=3){
stop("Trapping grid must be only 2 or 3 dimensions")
}
if(isFALSE(all_random)){
# for dim of length 2
if(length(dim(X))==2){
n0 <- length(which(apply(y,1,sum)!=0))
s.st <- matrix(NA, nrow=M, ncol=2)
# create x limits for state-space
xlim <- ext[1:2]
# create y limits for state-space
ylim <- ext[3:4]
for(i in 1:n0){
temp.y <- y[i,,]
temp.X <- X[which(apply(temp.y,1,sum)!=0),]
ntimes <- apply(temp.y,1,sum)[which(apply(temp.y,1,sum)!=0)]
df <- data.frame(temp.X,ntimes)
temp.X2 <- df[rep(seq_len(nrow(df)), df$ntimes),1:2]
if(isFALSE(hab_mask)){ # if no habitat mask used
if(length(ntimes)==1){
s.st[i,1]<-temp.X[1]
s.st[i,2]<-temp.X[2]
}else
if(length(ntimes)>1){
s.st[i,1]<-mean(temp.X2[,1],na.rm=TRUE)
s.st[i,2]<-mean(temp.X2[,2],na.rm=TRUE)
}
}else
if(isFALSE(hab_mask)==FALSE){ # if habitat mask used
if(length(ntimes)==1){
s.st[i,1]<-temp.X[1]
s.st[i,2]<-temp.X[2]
}else
if(length(ntimes)>1){
s.st[i,1]<-mean(temp.X2[,1],na.rm=TRUE)
s.st[i,2]<-mean(temp.X2[,2],na.rm=TRUE)
}
sx.rescale <- scales::rescale(s.st[i,1], to = c(0,dim(hab_mask)[2]),
from=xlim)
sy.rescale <- scales::rescale(s.st[i,2], to = c(0,dim(hab_mask)[1]),
from=ylim)
pOK <- hab_mask[(trunc(sy.rescale)+1),(trunc(sx.rescale)+1)]
# if their mean activity center is not in suitable habitat,
# then randomly sample
while(pOK==0){
s.st[i,1]<-runif(1,xlim[1],xlim[2])
s.st[i,2]<-runif(1,ylim[1],ylim[2])
sx.rescale <- scales::rescale(s.st[i,1], to = c(0,dim(hab_mask)[2]),
from=xlim)
sy.rescale <- scales::rescale(s.st[i,2], to = c(0,dim(hab_mask)[1]),
from=ylim)
pOK <- hab_mask[(trunc(sy.rescale)+1),(trunc(sx.rescale)+1)]
}
}# end habitat check
} # end detected individuals
for(i in (n0+1):M){
if(isFALSE(hab_mask)){ # check for habitat mask for augmented individuals
s.st[i,1]<-runif(1,xlim[1],xlim[2])
s.st[i,2]<-runif(1,ylim[1],ylim[2])
} else
if(isFALSE(hab_mask)==FALSE){
s.st[i,1]<-runif(1,xlim[1],xlim[2])
s.st[i,2]<-runif(1,ylim[1],ylim[2])
sx.rescale <- scales::rescale(s.st[i,1], to = c(0,dim(hab_mask)[2]),
from=xlim)
sy.rescale <- scales::rescale(s.st[i,2], to = c(0,dim(hab_mask)[1]),
from=ylim)
pOK <- hab_mask[(trunc(sy.rescale)+1),(trunc(sx.rescale)+1)]
while(pOK==0){
s.st[i,1]<-runif(1,xlim[1],xlim[2])
s.st[i,2]<-runif(1,ylim[1],ylim[2])
sx.rescale <- scales::rescale(s.st[i,1], to = c(0,dim(hab_mask)[2]),
from=xlim)
sy.rescale <- scales::rescale(s.st[i,2], to = c(0,dim(hab_mask)[1]),
from=ylim)
pOK <- hab_mask[(trunc(sy.rescale)+1),(trunc(sx.rescale)+1)]
}
} # end habitat check
} # augmented individuals
} else
if(length(dim(X))==3){
if(length(dim(X))==3 & length(site)!=M){
stop("Include 'site' variable with length M")
}
n0 <- length(which(apply(y,1,sum)!=0))
s.st <- matrix(NA, nrow=M, ncol=2)
for(i in 1:n0){
# create x limits for state-space
xlim <- ext[[site[i]]][1:2]
# create y limits for state-space
ylim <- ext[[site[i]]][3:4]
temp.y <- y[i,,]
temp.X <- X[which(apply(temp.y,1,sum)!=0),,site[i]]
ntimes <- apply(temp.y,1,sum)[which(apply(temp.y,1,sum)!=0)]
df <- data.frame(temp.X,ntimes)
temp.X2 = df[rep(seq_len(nrow(df)), df$ntimes),1:2]
if(isFALSE(hab_mask)){ # if no habitat mask used
if(length(ntimes)==1){
s.st[i,1]<-temp.X[1]
s.st[i,2]<-temp.X[2]
}else
if(length(ntimes)>1){
s.st[i,1]<-mean(temp.X2[,1],na.rm=TRUE)
s.st[i,2]<-mean(temp.X2[,2],na.rm=TRUE)
}
}else
if(isFALSE(hab_mask)==FALSE){ # if habitat mask used
if(length(ntimes)==1){
s.st[i,1]<-temp.X[1]
s.st[i,2]<-temp.X[2]
}else
if(length(ntimes)>1){
s.st[i,1]<-mean(temp.X2[,1],na.rm=TRUE)
s.st[i,2]<-mean(temp.X2[,2],na.rm=TRUE)
}
sx.rescale <- scales::rescale(s.st[i,1],
to = c(0,dim(hab_mask)[2]), from=xlim)
sy.rescale <- scales::rescale(s.st[i,2],
to = c(0,dim(hab_mask)[1]), from=ylim)
pOK <- hab_mask[(trunc(sy.rescale)+1),(trunc(sx.rescale)+1),
site[i]]
# if their mean activity center is not in suitable habitat,
#then randomly sample
while(pOK==0){
s.st[i,1]<-runif(1,xlim[1],xlim[2])
s.st[i,2]<-runif(1,ylim[1],ylim[2])
sx.rescale <- scales::rescale(s.st[i,1],
to = c(0,dim(hab_mask)[2]), from=xlim)
sy.rescale <- scales::rescale(s.st[i,2],
to = c(0,dim(hab_mask)[1]), from=ylim)
pOK <- hab_mask[(trunc(sy.rescale)+1),(trunc(sx.rescale)+1),
site[i]]
}
} # end habitat check
} # end detected individuals
for(i in (n0+1):M){
# create x limits for state-space
xlim <- ext[[site[i]]][1:2]
# create y limits for state-space
ylim <- ext[[site[i]]][3:4]
if(isFALSE(hab_mask)){ # check for habitat mask for augmented individuals
s.st[i,1]<-runif(1,xlim[1],xlim[2])
s.st[i,2]<-runif(1,ylim[1],ylim[2])
} else
if(isFALSE(hab_mask)==FALSE){
s.st[i,1] <-runif(1,xlim[1],xlim[2])
s.st[i,2] <-runif(1,ylim[1],ylim[2])
sx.rescale <- scales::rescale(s.st[i,1], to = c(0,dim(hab_mask)[2]),
from=xlim)
sy.rescale <- scales::rescale(s.st[i,2], to = c(0,dim(hab_mask)[1]),
from=ylim)
pOK <- hab_mask[(trunc(sy.rescale)+1),(trunc(sx.rescale)+1),site[i]]
while(pOK==0){
s.st[i,1] <-runif(1,xlim[1],xlim[2])
s.st[i,2] <-runif(1,ylim[1],ylim[2])
sx.rescale <- scales::rescale(s.st[i,1], to = c(0,dim(hab_mask)[2]),
from=xlim)
sy.rescale <- scales::rescale(s.st[i,2], to = c(0,dim(hab_mask)[1]),
from=ylim)
pOK <- hab_mask[(trunc(sy.rescale)+1),(trunc(sx.rescale)+1),site[i]]
}
} # end habitat check
} # end augmented individuals
} # end 3D initialize
} else # end all_random = FALSE
if(isFALSE(all_random) == FALSE){
# for dim of length 2
if(length(dim(X))==2){
s.st <- matrix(NA, nrow=M, ncol=2)
# create x limits for state-space
xlim <- ext[1:2]
# create y limits for state-space
ylim <- ext[3:4]
for(i in 1:M){
if(isFALSE(hab_mask)){ # check for habitat mask for augmented individuals
s.st[i,1]<-runif(1,xlim[1],xlim[2])
s.st[i,2]<-runif(1,ylim[1],ylim[2])
} else
if(isFALSE(hab_mask)==FALSE){
s.st[i,1]<-runif(1,xlim[1],xlim[2])
s.st[i,2]<-runif(1,ylim[1],ylim[2])
sx.rescale <- scales::rescale(s.st[i,1], to = c(0,dim(hab_mask)[2]),
from=xlim)
sy.rescale <- scales::rescale(s.st[i,2], to = c(0,dim(hab_mask)[1]),
from=ylim)
pOK <- hab_mask[(trunc(sy.rescale)+1),(trunc(sx.rescale)+1)]
while(pOK==0){
s.st[i,1]<-runif(1,xlim[1],xlim[2])
s.st[i,2]<-runif(1,ylim[1],ylim[2])
sx.rescale <- scales::rescale(s.st[i,1], to = c(0,dim(hab_mask)[2]),
from=xlim)
sy.rescale <- scales::rescale(s.st[i,2], to = c(0,dim(hab_mask)[1]),
from=ylim)
pOK <- hab_mask[(trunc(sy.rescale)+1),(trunc(sx.rescale)+1)]
}
} # end habitat check
} # augmented individuals
} else
if(length(dim(X))==3){
if(length(dim(X))==3 & length(site)!=M){
stop("Include 'site' variable with length M")
}
s.st <- matrix(NA, nrow=M, ncol=2)
for(i in 1:M){
# create x limits for state-space
xlim <- ext[[site[i]]][1:2]
# create y limits for state-space
ylim <- ext[[site[i]]][3:4]
if(isFALSE(hab_mask)){ # check for habitat mask for augmented individuals
s.st[i,1]<-runif(1,xlim[1],xlim[2])
s.st[i,2]<-runif(1,ylim[1],ylim[2])
} else
if(isFALSE(hab_mask)==FALSE){
s.st[i,1] <-runif(1,xlim[1],xlim[2])
s.st[i,2] <-runif(1,ylim[1],ylim[2])
sx.rescale <- scales::rescale(s.st[i,1], to = c(0,dim(hab_mask)[2]),
from=xlim)
sy.rescale <- scales::rescale(s.st[i,2], to = c(0,dim(hab_mask)[1]),
from=ylim)
pOK <- hab_mask[(trunc(sy.rescale)+1),(trunc(sx.rescale)+1),site[i]]
while(pOK==0){
s.st[i,1] <-runif(1,xlim[1],xlim[2])
s.st[i,2] <-runif(1,ylim[1],ylim[2])
sx.rescale <- scales::rescale(s.st[i,1], to = c(0,dim(hab_mask)[2]),
from=xlim)
sy.rescale <- scales::rescale(s.st[i,2], to = c(0,dim(hab_mask)[1]),
from=ylim)
pOK <- hab_mask[(trunc(sy.rescale)+1),(trunc(sx.rescale)+1),site[i]]
}
} # end habitat check
} # end augmented individuals
} # end 3D initialize
} # end all_random = TRUE
return(s.st)
} # end function 'initialize_classic'
#' Rescale trap coordinates, grid extent, and starting activity
#' center coordinates
#'
#' @description Rescale inputs to prepare data for habitat mask to be used.
#'
#' @param X Either a matrix or array representing the coordinates of traps in
#' UTMs. An array is used when traps are clustered over a survey area.
#' @param ext An \code{Extent} object from the \code{raster} package. This is
#' returned from \code{\link{grid_classic}}.
#' @param s.st A matrix of starting activity center coordinates. This is
#' returned from \code{\link{initialize_classic}}.
#' @param site Either \code{NULL} (if a 2D trap array is used) or a vector of
#' integers denoting which trap array an individual (either detected or
#' augmented) belongs to. Note that \code{site} is provided from
#' \code{\link{sim_classic}} when a 3D trap array is used. However, this
#' \code{site} variable must be correctly augmented based on the total
#' augmented population size (i.e., \code{M}).
#' @param hab_mask A matrix or arary output from \code{\link{mask_polygon}} or
#' \code{\link{mask_raster}} functions.
#' @return
#' \itemize{
#' \item{\code{X}}{ Rescaled trap coordinates}
#' \item{\code{ext}}{ Rescaled \code{Extent} object from the \code{raster} package.}
#' \item{\code{s.st}}{ A matrix of rescaled starting activity center coordinates.}
#' }
#' @details This function is only meant to be used when habitat masking is
#' incorporated into the model. The functions properly rescales inputs based on
#' the dimensions of the habitat mask. Note that the \code{pixelWidth} needs to
#' be included as an input in the model after inputs are rescaled to correctly
#' estimate the scaling parameter (i.e., 'sigma').
#' @author Daniel Eacker
#' @importFrom sf st_polygon
#' @seealso \code{\link{mask_polygon}}, \code{\link{mask_raster}}
#' @examples
#'# simulate a single trap array with random positional noise
#' x <- seq(-800, 800, length.out = 5)
#' y <- seq(-800, 800, length.out = 5)
#' traps <- as.matrix(expand.grid(x = x, y = y))
#'
#' # add some random noise to locations
#' traps <- traps + runif(prod(dim(traps)),-20,20)
#'
#' mysigma = 300 # simulate sigma of 300 m
#' mycrs = 32608 # EPSG for WGS 84 / UTM zone 8N
#'
#' # create grid
#' Grid = grid_classic(X = traps, crs_ = mycrs, buff = 3*mysigma, res = 100)
#'
#' # create polygon to use as a mask
#' library(sf)
#' poly = st_sfc(st_polygon(x=list(matrix(c(-1765,-1765,1730,-1650,1600,1650,
#' 0,1350,-800,1700,-1850,1000,-1765,-1765),ncol=2, byrow=TRUE))), crs = mycrs)
#'
#' # make simple plot
#' par(mfrow=c(1,1))
#' plot(Grid$grid, pch=20)
#' points(traps, col="blue",pch=20)
#' plot(poly, add=TRUE)
#'
#' # create habitat mask
#' hab_mask = mask_polygon(poly = poly, grid = Grid$grid, crs_ = mycrs,
#' prev_mask = NULL)
#'
#' # simulate SCR data
#' data3d = sim_classic(X = traps, ext = Grid$ext, crs_ = mycrs,
#' sigma_ = mysigma, prop_sex = 1, N = 200, K = 4, base_encounter = 0.25,
#' enc_dist = "binomial", hab_mask = hab_mask, setSeed = 50)
#'
#' # generate initial activity center coordinates for 2D trap array without
#' #habitat mask
#' s.st3d = initialize_classic(y=data3d$y, M=500, X=traps, ext=Grid$ext,
#' hab_mask = hab_mask)
#'
#' # build rescaled constants list for 2D trap array
#' constList = rescale_classic(X = traps, ext = Grid$ext, s.st = s.st3d,
#' site = NULL, hab_mask = hab_mask)
#'
#' # inspect list
#' str(constList)
#' @name rescale_classic
#' @export
rescale_classic <- function(X, ext, s.st, site, hab_mask){
if(length(dim(X))!=2 & length(dim(X))!=3){
stop("Trapping grid must be only 2 or 3 dimensions")
}
if(is.null(hab_mask)){
stop("Must include habitat mask for rescaling of inputs")
}
rescale_list <- list(X=X,ext=ext,s.st=s.st)
# for dim of length 2
if(length(dim(X))==2){
rescale_list$X[,1] <- scales::rescale(X[,1], to = c(0,dim(hab_mask)[2]),
from=ext[1:2])
rescale_list$X[,2] <- scales::rescale(X[,2], to = c(0,dim(hab_mask)[1]),
from=ext[3:4])
rescale_list$s.st[,1] <- scales::rescale(s.st[,1],
to = c(0,dim(hab_mask)[2]), from=ext[1:2])
rescale_list$s.st[,2] <- scales::rescale(s.st[,2],
to = c(0,dim(hab_mask)[1]), from=ext[3:4])
rescale_list$ext[1:2] <- scales::rescale(ext[1:2],
to = c(0,dim(hab_mask)[2]), from=ext[1:2])
rescale_list$ext[3:4] <- scales::rescale(ext[3:4],
to = c(0,dim(hab_mask)[1]), from=ext[3:4])
}else
if(length(dim(X))==3){
for(g in 1:dim(X)[3]){ # loop over trap arrays
rescale_list$X[,1,g] <- scales::rescale(X[,1,g],
to = c(0,dim(hab_mask)[2]), from=ext[[g]][1:2])
rescale_list$X[,2,g] <- scales::rescale(X[,2,g],
to = c(0,dim(hab_mask)[1]), from=ext[[g]][3:4])
rescale_list$s.st[which(site==g),1] <- scales::rescale(s.st[which(site==g),1],
to = c(0,dim(hab_mask)[2]), from=ext[[g]][1:2])
rescale_list$s.st[which(site==g),2] <- scales::rescale(s.st[which(site==g),2],
to = c(0,dim(hab_mask)[1]), from=ext[[g]][3:4])
# check that starting coordinates are still in bounds
rescale_list$s.st[which(site==g),1] <-
ifelse(rescale_list$s.st[which(site==g),1]>=dim(hab_mask)[2],
rescale_list$s.st[which(site==g),1]-1,rescale_list$s.st[which(site==g),1])
rescale_list$s.st[which(site==g),2] <-
ifelse(rescale_list$s.st[which(site==g),2]>=dim(hab_mask)[1],
rescale_list$s.st[which(site==g),2]-1,rescale_list$s.st[which(site==g),2])
rescale_list$ext[[g]][1:2] <- scales::rescale(ext[[g]][1:2],
to = c(0,dim(hab_mask)[2]), from=ext[[g]][1:2])
rescale_list$ext[[g]][3:4] <- scales::rescale(ext[[g]][3:4],
to = c(0,dim(hab_mask)[1]), from=ext[[g]][3:4])
}
}
return(rescale_list)
} # end function 'rescale_classic'
#' Create habitat mask matrix or array from polygon
#'
#' @description Creates a matrix or array to use as a habitat mask to account for unsuitable
#' habitat.
#'
#' @param poly A polygon created using the \code{sf} package of class
#' \code{"sfc_POLYGON"}
#' @param grid A matrix or array object of the the state-space grid. This
#' is returned from \code{\link{grid_classic}}.
#' @param crs_ The UTM coordinate reference system (EPSG code) used for your
#' location provided as an integer (e.g., 32608 for WGS 84/UTM Zone 8N).
#' @param prev_mask Either \code{NULL} or a previously created habitat mask
#' matrix or array from \code{\link{mask_polygon}} or \code{\link{mask_raster}}.
#' This allows for habitat masks to be combined to account for different
#' spatial features.
#' @return A matrix or array of 0's and 1's denoting unsuitable and suitable
#' habitat respectively.
#' @details This function creates a habitat matrix or array depending upon
#' whether a 2D (former) or 3D (latter) trap array is used. This matrix can be
#' directly included as data in Bayesian SCR models run using \code{nimble}.
#' @author Daniel Eacker
#' @seealso \code{\link{mask_raster}}
#' @examples
#' # simulate a single trap array with random positional noise
#' x <- seq(-800, 800, length.out = 5)
#' y <- seq(-800, 800, length.out = 5)
#' traps <- as.matrix(expand.grid(x = x, y = y))
#'
#' # add some random noise to locations
#' traps <- traps + runif(prod(dim(traps)),-20,20)
#'
#' mysigma = 300 # simulate sigma of 300 m
#' mycrs = 32608 # EPSG for WGS 84 / UTM zone 8N
#'
#' # create state-space grid and extent
#' Grid = grid_classic(X = traps, crs_ = mycrs, buff = 3*mysigma, res = 100)
#'
#' # create polygon to use as a mask
#' library(sf)
#' poly = st_sfc(st_polygon(x=list(matrix(c(-1765,-1765,1730,-1650,1600,1650,
#' 0,1350,-800,1700,-1850,1000,-1765,-1765),ncol=2, byrow=TRUE))), crs = mycrs)
#'
#' # make simple plot
#' par(mfrow=c(1,2))
#' plot(Grid$grid, pch=20)
#' points(traps, col="blue",pch=20)
#' plot(poly, add=TRUE)
#'
#' # create habitat mask from polygon
#' hab_mask = mask_polygon(poly = poly, grid = Grid$grid, crs_ = mycrs,
#' prev_mask = NULL)
#'
#' # make simple plot
#' library(raster)
#' plot(raster(apply(hab_mask,2,rev)))
#'
#' # make simple plot
#' poly2 = st_sfc(st_polygon(x=list(matrix(c(-1365,-1365,1730,-1650,1500,1550,
#' 0,1350,-800,1700,-1850,1000,-1365,-1365),ncol=2, byrow=TRUE))), crs = mycrs)
#' plot(poly2, add=TRUE)
#'
#' # mask second polygon, building on previous habitat mask
#' hab_mask2 = mask_polygon(poly = poly2, grid = Grid$grid, crs_ = mycrs,
#' prev_mask = hab_mask)
#'
#' # make simple plot
#' plot(Grid$grid, pch=20)
#' points(traps, col="blue",pch=20)
#' plot(poly, add=TRUE)
#' plot(poly2, add=TRUE)
#' plot(raster(apply(hab_mask2,2,rev)))
#'
#' # create an array of traps, as an approach where individuals will only be
#' # detected at one of the trap arrays (e.g., Furnas et al. 2018)
#' Xarray = array(NA, dim=c(nrow(traps),2,2))
#' Xarray[,,1]=traps
#' Xarray[,,2]=traps+4000 # shift trapping grid to new locations
#'
#' # Example of using habitat mask with 3D trap array (need polygon that
#' # masks both trapping extents)
#' GridX = grid_classic(X = Xarray, crs_ = mycrs, buff = 3*mysigma, res = 100)
#'
#' # make simple plot
#' par(mfrow=c(1,1))
#' plot(GridX$grid[,,1],xlim=c(-1600,6000),ylim=c(-1600,6000),col="darkgrey",
#' pch=20,ylab="Northing",xlab="Easting")
#' points(Xarray[,,1],col="blue",pch=20)
#' points(GridX$grid[,,2],pch=20,col="darkgrey")
#' points(Xarray[,,2],col="blue",pch=20)
#'
#' poly = st_sfc(st_polygon(x=list(matrix(c(-1660,-1900,5730,-1050,5470,5150,
#' 0,6050,-1800,5700,-1660,-1900),ncol=2, byrow=TRUE))), crs = mycrs)
#' plot(poly, add=TRUE)
#'
#' # get 3D habitat mask array for 3D grid
#' hab_mask = mask_polygon(poly = poly, grid = GridX$grid, crs_ = mycrs,
#' prev_mask = NULL)
#'
#' # make simple plot
#' par(mfrow=c(1,2))
#' apply(hab_mask,3,function(x) plot(raster(apply(x,2,rev))))
#' @name mask_polygon
#' @export
mask_polygon <- function(poly, grid, crs_, prev_mask){
# need to determine if X is 2 or 3 dimensional (stop if not)
if(length(dim(grid))!=2 & length(dim(grid))!=3){
stop("Trapping grid must be only 2 or 3 dimensions")
}
# for dim of length 2
if(length(dim(grid))==2){
grid_pts <- sf::st_cast(sf::st_sfc(sf::st_multipoint(grid), crs = crs_),
"POINT")
habitat_mask <- apply(matrix(ifelse(is.na(as.numeric(sf::st_intersects(grid_pts,
poly))),0,as.numeric(sf::st_intersects(grid_pts,poly))),
nrow=dim(raster::rasterFromXYZ(grid,crs=crs_))[1],
ncol=dim(raster::rasterFromXYZ(grid,crs=crs_))[2], byrow=TRUE),2,rev)
# to combine with a previous habitat mask
if(is.null(prev_mask)==FALSE){
# Check to see if dimensions of previous and current habitat
if(all(dim(habitat_mask) == dim(prev_mask))==FALSE){
stop("Dimension of previous habitat matrix does not equal
dimension of current one")
}
habitat_mask <- habitat_mask * prev_mask
}
}else
# for dim of length 3
if(length(dim(grid))==3){
grid_pts <- apply(grid, 3, function(x) sf::st_cast(sf::st_sfc(
sf::st_multipoint(x), crs = crs_),"POINT"))
habitat_mask <- lapply(grid_pts, function(x) apply(matrix(ifelse(
is.na(as.numeric(sf::st_intersects(x,poly))),0,
as.numeric(sf::st_intersects(x,poly))),nrow=dim(raster::rasterFromXYZ(
grid[,,1],crs=crs_))[1], ncol=dim(raster::rasterFromXYZ(grid[,,1],
crs=crs_))[2], byrow=TRUE),2,rev))
habitat_mask <- array(unlist(habitat_mask),dim=c(dim(habitat_mask[[1]]),
dim(grid)[3])) # convert to array
# to combine with a previous habitat mask
if(is.null(prev_mask)==FALSE){
# Check to see if dimensions of previous and current habitat
if(all(dim(habitat_mask) == dim(prev_mask))==FALSE){
stop("Dimension of previous habitat matrix does not equal
dimension of current one")
}
habitat_mask <- habitat_mask * prev_mask
}
}
return(habitat_mask)
} # End function 'mask_polygon'
#' Create habitat mask matrix or array from raster
#'
#' @description Creates a matrix or array to use as a habitat mask to account for unsuitable
#' habitat.
#'
#' @param rast A raster layer created using the \code{raster} package of class
#' \code{"RasterLayer"}
#' @param FUN A function that defines the criteria for suitable habitat.
#' @param grid A matrix or array object of the the state-space grid. This is
#' returned from \code{\link{grid_classic}}.
#' @param crs_ The UTM coordinate reference system (EPSG code) used for your
#' location provided as an integer (e.g., 32608 for WGS 84/UTM Zone 8N).
#' @param prev_mask Either \code{NULL} or a previously created habitat mask
#' matrix or array from \code{\link{mask_polygon}} or \code{\link{mask_raster}}.
#' This allows for habitat masks to be combined to account for different spatial
#' features.
#' @return A matrix or array of 0's and 1's denoting unsuitable and suitable
#' habitat respectively.
#' @details This function creates a habitat matrix or array depending upon
#' whether a 2D (former) or 3D (latter) trap array is used. This matrix can be
#' directly included as data in Bayesian SCR models run using \code{nimble}.
#' @author Daniel Eacker
#' @importFrom sp CRS proj4string
#' @importFrom sf st_intersects
#' @importFrom raster proj4string extract
#' @importFrom methods as
#' @seealso \code{\link{mask_polygon}}
#' @examples
#' # simulate a single trap array with random positional noise
#' x <- seq(-800, 800, length.out = 5)
#' y <- seq(-800, 800, length.out = 5)
#' traps <- as.matrix(expand.grid(x = x, y = y))
#'
#' # add some random noise to locations
#' traps <- traps + runif(prod(dim(traps)),-20,20)
#'
#' mysigma = 300 # simulate sigma of 300 m
#' mycrs = 32608 # EPSG for WGS 84 / UTM zone 8N
#'
#' # create state-space grid and extent
#' Grid = grid_classic(X = traps, crs_ = mycrs, buff = 3*mysigma, res = 100)
#'
#' # run previous code used for mask_polygon() to create raster for example
#' library(sf)
#' poly = st_sfc(st_polygon(x=list(matrix(c(-1665,-1665,1730,-1650,1600,1650,
#' 0,1350,-800,1700,-1850,1000,-1665,-1665),ncol=2, byrow=TRUE))), crs = mycrs)
#' hab_mask = mask_polygon(poly = poly, grid = Grid$grid, crs_ = mycrs,
#' prev_mask = NULL)
#'
#' # create raster for demonstration purposes
#' library(raster)
#' rast <- raster(nrow=dim(hab_mask)[1], ncol=dim(hab_mask)[2],ext=Grid$ext,
#' crs=mycrs)
#' rast[] = apply(hab_mask,2,rev)
#'
#' # create habitat mask using raster
#' hab_mask_r = mask_raster(rast = rast, FUN = function(x){x==1},
#' grid = Grid$grid, crs_ = mycrs, prev_mask = NULL)
#'
#' # make simple plot
#' # returns identical results as input rast (but this was just an example raster)
#' plot(raster(apply(hab_mask_r,2,rev)))
#'
#' # create an array of traps, as an approach where individuals will only be
#' # detected at one of the trap arrays (e.g., Furnas et al. 2018)
#' Xarray = array(NA, dim=c(nrow(traps),2,2))
#' Xarray[,,1]=traps
#' Xarray[,,2]=traps+4000 # shift trapping grid to new locations
#'
#' # create grid and extent for 3D trap array
#' GridX = grid_classic(X = Xarray, crs_ = mycrs, buff = 3*mysigma, res = 100)
#'
#' # make simple plot
#' par(mfrow=c(1,1))
#' plot(GridX$grid[,,1],xlim=c(-1600,6000),ylim=c(-1600,6000),col="darkgrey",
#' pch=20,ylab="Northing",xlab="Easting")
#' points(Xarray[,,1],col="blue",pch=20)
#' points(GridX$grid[,,2],pch=20,col="darkgrey")
#' points(Xarray[,,2],col="blue",pch=20)
#'
#' # create polygon to use as a mask and covert to raster
#' poly = st_sfc(st_polygon(x=list(matrix(c(-1660,-1900,5730,-1050,5470,5650,
#' 0,6050,-1800,5700,-1660,-1900),ncol=2, byrow=TRUE))), crs = mycrs)
#'
#' # add polygon to plot
#' plot(poly, add=TRUE)
#'
#' # make raster from polygon
#' rast = raster(xmn=-2000, xmx=6000, ymn=-2000, ymx=6500,res=100,crs=mycrs)
#' rast[]=st_intersects(st_cast(st_sfc(st_multipoint(coordinates(rast)),
#' crs = mycrs),"POINT"),poly,sparse=FALSE)
#'
#' # make simple plot of raster
#' plot(rast)
#'
#' # get 3D habitat mask array for 3D grid
#' hab_mask = mask_raster(rast = rast, FUN = function(x){x==1},grid = GridX$grid,
#' crs_ = mycrs, prev_mask = NULL)
# #make plot
#' par(mfrow=c(1,2))
#' apply(hab_mask,3,function(x) plot(raster(apply(x,2,rev))))
#' @name mask_raster
#' @export
mask_raster <- function(rast, FUN, grid, crs_, prev_mask){
# need to determine if X is 2 or 3 dimensional (stop if not)
if(length(dim(grid))!=2 & length(dim(grid))!=3){
stop("Trapping grid must be only 2 or 3 dimensions")
}
if(identical(raster::proj4string(rast),sf::st_crs(crs_)$proj4string)==FALSE){
stop("crs of raster layer must be the same as crs_")
}
# for dim of length 2
if(length(dim(grid))==2){
grid_pts <- sf::st_cast(sf::st_sfc(sf::st_multipoint(grid), crs = crs_),
"POINT")
vals <- raster::extract(rast,methods::as(grid_pts,"Spatial"))
rast_ind <- FUN(vals)
habitat_mask <- apply(matrix(as.numeric(rast_ind),nrow=dim(
raster::rasterFromXYZ(grid,crs=crs_))[1],
ncol=dim(raster::rasterFromXYZ(grid,crs=crs_))[2], byrow=TRUE),2,rev)
# to combine with a previous habitat mask
if(is.null(prev_mask)==FALSE){
# Check to see if dimensions of previous and current habitat
if(all(dim(habitat_mask) == dim(prev_mask))==FALSE){
stop("Dimension of previous habitat matrix does not equal dimension
of current one")
}
habitat_mask <- habitat_mask * prev_mask
}
}else
# for dim of length 3
if(length(dim(grid))==3){
grid_pts <- apply(grid, 3, function(x) sf::st_cast(sf::st_sfc(
sf::st_multipoint(x), crs = crs_),"POINT"))
vals <- lapply(grid_pts,function(x) raster::extract(rast,
methods::as(x,"Spatial")))
rast_ind <- lapply(vals, function(x) FUN(x))
habitat_mask <- lapply(rast_ind, function(x)
apply(matrix(as.numeric(x),nrow=dim(raster::rasterFromXYZ(grid[,,1],
crs=crs_))[1],
ncol=dim(raster::rasterFromXYZ(grid[,,1],crs=crs_))[2], byrow=TRUE),
2,rev))
habitat_mask <- array(unlist(habitat_mask),dim=c(dim(habitat_mask[[1]]),
dim(grid)[3])) # convert to array
# to combine with a previous habitat mask
if(is.null(prev_mask)==FALSE){
# Check to see if dimensions of previous and current habitat
if(all(dim(habitat_mask) == dim(prev_mask))==FALSE){
stop("Dimension of previous habitat matrix does not equal dimension
of current one")
}
habitat_mask <- habitat_mask * prev_mask
}
}
return(habitat_mask)
} # End function 'mask_raster'
#' Run spatial capture-recapture models in nimble using
#' parallel processing
#'
#' @description A wrapper function to conduct Markov Chain Monte Carlo (MCMC) sampling using
#' nimble.
#'
#' @param model The \code{nimbleCode} used to define model in \code{nimble} package,
#' possibly generated from \code{\link{get_classic}}.
#' @param data A list of data inputs needed to run SCR models in \code{nimble}.
#' @param constants A list of constants needed to run SCR models in \code{nimble}.
#' @param inits Starting values for stochastic parameters to begin MCMC sampling.
#' @param params A vector of character strings that define the parameters to
#' trace in the MCMC simulation.
#' @param dimensions A named list of dimensions for variables. Only needed for variables
#' used with empty indices in model code that are not provided in constants or data.
#' @param nimFun A list of nimbleFunction(s) to be used in the model when using
#' \code{parallel = TRUE}.
#' @param niter An integer value of the total number of MCMC iterations to run
#' per chain.
#' @param nburnin An integer value of the number of MCMC iterations to discard
#' as 'burnin'.
#' @param thin An integer value of the amount of thinning of the chain. For
#' example, \code{thin=2} would retain every other MCMC sample.
#' @param nchains An integer value for the number of MCMC chains to run
#' @param parallel A logical value indicating whether MCMC chains shoud be run
#' in parallel processing. Default is \code{FALSE}.
#' @param RNGseed An integer value specifying the random number generating seed
#' using in parallel processing. Also used when \code{parallel = FALSE} in
#' setSeed within \code{runMCMC} function in 'nimble'.
#' @param s_alias A character value used to identify the latent activity center
#' coordinates used in the model. Default is \code{"s"}. Note that the length
#' of \code{s_alias} must be either 1 (e.g., \code{"s"}) or 2
#' (e.g., \code{c("s","su")}).
#' @return A list of MCMC samples for each parameter traced with length equal to
#' the number of chains run.
#' @details This function provides a wrapper to easily run Bayesian SCR models
#' using \code{nimble}.
#' @importFrom tictoc tic toc
#' @importFrom graphics hist par abline lines
#' @importFrom parallel parLapply makeCluster stopCluster clusterSetRNGStream
#' @import nimble
#' @author Daniel Eacker
#' @examples
#'\dontrun{
#' # simulate a single trap array with random positional noise
#' x <- seq(-800, 800, length.out = 5)
#' y <- seq(-800, 800, length.out = 5)
#' traps <- as.matrix(expand.grid(x = x, y = y))
#'
#' # add some random noise to locations
#' traps <- traps + runif(prod(dim(traps)),-20,20)
#'
#' mysigma = 300 # simulate sigma of 300 m
#' mycrs = 32608 # EPSG for WGS 84 / UTM zone 8N
#' pixelWidth = 100 # store pixelWidth
#'
# create grid and extent
#' Grid = grid_classic(X = traps, crs_ = mycrs, buff = 3*mysigma,
#' res = pixelWidth)
#'
#' # create polygon to use as a mask
#' library(sf)
#' poly = st_sfc(st_polygon(x=list(matrix(c(-1765,-1765,1730,-1650,1600,1650,
#' 0,1350,-800,1700,-1850,1000,-1765,-1765),ncol=2, byrow=TRUE))), crs = mycrs)
#'
#' # create habitat mask
#' hab_mask = mask_polygon(poly = poly, grid = Grid$grid, crs_ = mycrs,
#' prev_mask = NULL)
#'
#' # simulate data for uniform state-space and habitat mask
#' data3d = sim_classic(X = traps, ext = Grid$ext, crs_ = mycrs, sigma_ =
#' mysigma, prop_sex = 1, N = 200, K = 4, base_encounter = 0.15,
#' enc_dist = "binomial",hab_mask = hab_mask, setSeed = 50)
#'
#' # get initial activity center starting values
#' s.st3d = initialize_classic(y=data3d$y, M=500, X=traps, ext=Grid$ext,
#' hab_mask = hab_mask)
#'
#' # rescale inputs
#' rescale_list = rescale_classic(X = traps, ext = Grid$ext, s.st = s.st3d,
#' hab_mask = hab_mask)
#'
#' # store rescaled extent
#' ext = rescale_list$ext
#'
#' # prepare data
#' data = list(y=data3d$y)
#' # remove augmented records
#' data$y = data$y[which(apply(data$y, 1, sum)!=0),,]
#' data$y = apply(data$y, c(1,2), sum) # covert to 2d by summing over occasions
#'
#' # add rescaled traps
#' data$X = rescale_list$X
#'
#' # prepare constants
#' constants = list(M = 500,n0 = nrow(data$y),J=dim(data$y)[2],
#' K=dim(data3d$y)[3],x_lower = ext[1], x_upper = ext[2],
#' y_lower = ext[3], y_upper = ext[4],sigma_upper = 1000,
#' A = sum(hab_mask)*(pixelWidth)^2,pixelWidth=pixelWidth)
#'
#' # add z and zeros vector data for latent inclusion indicator
#' data$z = c(rep(1,constants$n0),rep(NA,constants$M - constants$n0))
#' data$zeros = c(rep(NA,constants$n0),rep(0,constants$M - constants$n0))
#'
#' # add hab_mask and OK for habitat check
#' data$hab_mask = hab_mask
#' data$OK = rep(1,constants$M)
#'
#' # get initial activity center starting values
#' s.st3d = rescale_list$s.st
#'
#' # define all initial values
#' inits = list(sigma = runif(1, 250, 350), s = s.st3d,psi=runif(1,0.2,0.3),
#' p0 = runif(1, 0.05, 0.15), pOK = data$OK,
#' z=c(rep(NA,constants$n0),rep(0,constants$M-constants$n0)))
#'
#' # parameters to monitor
#' params = c("sigma","psi","p0","N","D")
#'
#' # get model
#' scr_model = get_classic(dim_y = 2, enc_dist = "binomial",sex_sigma = FALSE,
#' hab_mask=TRUE)
#'
#' # run model
#' library(tictoc)
#' tic() # track time elapsed
#' out = run_classic(model = scr_model, data=data, constants=constants,
#' inits=inits, params = params, niter = 1000, nburnin=500,
#' thin=1, nchains=2, parallel=TRUE, RNGseed = 500)
#' toc()
#'
#' # summarize output
#' samples = do.call(rbind, out)
#' par(mfrow=c(1,1))
#' hist(samples[,which(dimnames(out[[1]])[[2]]=="N")], xlab = "Abundance",
#' xlim = c(0,500), main="")
#' abline(v=200, col="red") # add line for simulated abundance
#'
#' # not run
#' #nimSummary(out)
#'}
#' @name run_classic
#' @export
run_classic <- function(model, data, constants, inits, params, dimensions = NULL,
nimFun = NULL, niter = 1000, nburnin=100, thin=1, nchains=1,
parallel=FALSE, RNGseed=NULL,s_alias="s"){
# for parallel processing
if(parallel == FALSE){
SCRmodelR <- nimble::nimbleModel(code=model,data=data,constants=constants,
inits=inits,check=FALSE,calculate=TRUE,dimensions=dimensions)
SCRmodelR$initializeInfo()
# compile model to C++#
SCRmodelC <- nimble::compileNimble(SCRmodelR) # compile code
# MCMC configurations
mcmcspec<-nimble::configureMCMC(SCRmodelR, monitors=params)
# block updating for individual activity centers
if(length(s_alias)!=1 & length(s_alias)!=2){
stop("s_alias must be either a scalar or vector of length 2")
}
if(length(s_alias)==1 && s_alias == "s"){
mcmcspec$removeSamplers(s_alias, print = FALSE)
for(i in 1:constants$M){
snew = paste(s_alias,"[",i,","," 1:2","]",sep="")
mcmcspec$addSampler(target = snew, type = 'RW_block', silent = TRUE)
}
}else
if(length(s_alias)==1 && s_alias == "su"){
# test out effect of AF_slice sampler to reduce autocorrlation and cross correlation
for(i in 1:constants$m){
snew = paste(s_alias,"[",i,","," 1:2","]",sep="")
mcmcspec$addSampler(target = snew, type = 'RW_block', silent = TRUE)
}
mcmcspec$removeSamplers(c("psiu","lam0","sigma"))
mcmcspec$addSampler(target = c("psiu","lam0","sigma"), type = "AF_slice", silent = TRUE)
}else
if(length(s_alias)==2){ # for spatial mark-resight model
mcmcspec$removeSamplers(s_alias[which(s_alias=="s")], print = FALSE)
for(i in 1:constants$M){
snew = paste(s_alias[which(s_alias=="s")],"[",i,","," 1:2","]",sep="")
mcmcspec$addSampler(target = snew, type = 'RW_block', silent = TRUE)
}
mcmcspec$removeSamplers(s_alias[which(s_alias=="su")], print = FALSE)
for(i in 1:constants$m){
snew = paste(s_alias[which(s_alias=="su")],"[",i,","," 1:2","]",sep="")
mcmcspec$addSampler(target = snew, type = 'RW_block', silent = TRUE)
}
}
scrMCMC <- nimble::buildMCMC(mcmcspec)
CscrMCMC <- nimble::compileNimble(scrMCMC, project = SCRmodelR,
resetFunctions = TRUE)
results <- nimble::runMCMC(CscrMCMC, niter = niter, nburnin=nburnin,
thin=thin, nchains=nchains, setSeed=RNGseed)
return(results)
}else
if(parallel == TRUE){
run_parallel <- function(seed,data,model,constants,dimensions,
inits, params,
nimFun,niter,nburnin,thin){
# load any custom functions
if(is.null(nimFun)==FALSE){
for(i in 1:length(nimFun)){
nimFun[[i]]
assign(names(nimFun)[i],nimFun[[i]], envir = .GlobalEnv)
}
}
SCRmodelR <- nimble::nimbleModel(code=model,data=data,
constants=constants,inits=inits,check=FALSE,
calculate=TRUE,dimensions=dimensions)
SCRmodelR$initializeInfo()
# compile model to C++#
SCRmodelC <- nimble::compileNimble(SCRmodelR) # compile code
# MCMC configurations
mcmcspec<-nimble::configureMCMC(SCRmodelR, monitors=params)
# block updating for individual activity centers
if(length(s_alias)!=1 & length(s_alias)!=2){
stop("s_alias must be either a scalar or vector of length 2")
}
if(length(s_alias)==1 && s_alias == "s"){
mcmcspec$removeSamplers(s_alias, print = FALSE)
for(i in 1:constants$M){
snew = paste(s_alias,"[",i,","," 1:2","]",sep="")
mcmcspec$addSampler(target = snew, type = 'RW_block', silent = TRUE)
}
}else
if(length(s_alias)==1 && s_alias == "su"){
# test out effect of AF_slice sampler to reduce autocorrlation and cross correlation
for(i in 1:constants$m){
snew = paste(s_alias,"[",i,","," 1:2","]",sep="")
mcmcspec$addSampler(target = snew, type = 'RW_block', silent = TRUE)
}
mcmcspec$removeSamplers(c("psiu","lam0","sigma"))
mcmcspec$addSampler(target = c("psiu","lam0","sigma"), type = "AF_slice", silent = TRUE)
}else
if(length(s_alias)==2){ # for spatial mark-resight model
mcmcspec$removeSamplers(s_alias[which(s_alias=="s")], print = FALSE)
for(i in 1:constants$M){
snew = paste(s_alias[which(s_alias=="s")],"[",i,","," 1:2","]",sep="")
mcmcspec$addSampler(target = snew, type = 'RW_block', silent = TRUE)
}
mcmcspec$removeSamplers(s_alias[which(s_alias=="su")], print = FALSE)
for(i in 1:constants$m){
snew = paste(s_alias[which(s_alias=="su")],"[",i,","," 1:2","]",sep="")
mcmcspec$addSampler(target = snew, type = 'RW_block', silent = TRUE)
}
}
scrMCMC <- nimble::buildMCMC(mcmcspec)
CscrMCMC <- nimble::compileNimble(scrMCMC, project = SCRmodelR,
resetFunctions = TRUE)
results <- nimble::runMCMC(CscrMCMC, niter = niter, nburnin= nburnin,
thin=thin,setSeed = seed)
return(results)
} # end parallel processing function
if(nchains < 2){
stop("Must have at least 2 chains to run parallel processing")
}
this_cluster <- parallel::makeCluster(nchains)
parallel::clusterEvalQ(this_cluster, library("nimble"))
if(is.null(RNGseed)==FALSE){
parallel::clusterSetRNGStream(cl = this_cluster, RNGseed)
}
chain_output <- parallel::parLapply(cl = this_cluster, fun = run_parallel,
X=1:nchains,model = model, data=data,constants=constants,
dimensions=dimensions,inits=inits,params = params,
nimFun=nimFun,niter = niter,nburnin=nburnin, thin=thin)
parallel::stopCluster(this_cluster)
return(chain_output)
}
} # End function 'run_classic'
#' Summarize MCMC chain output from nimble
#'
#' @description Summarizes lists of MCMC chain output from nimble
#'
#' @param d A list of MCMC samples from each chain returned from
#' \code{\link{run_classic}}.
#' @param trace A logical value indicating whether or not traces of MCMC samples
#' should be plotted. Default is \code{FALSE}.
#' @param plot_all A logical value indicating whether or not all parameters
#' should be included in plots. This assumes that some parameters
#' are excluded in the summary table (i.e., \code{exclude != NULL}).
#' Default is \code{FALSE}.
#' @param exclude Either \code{NULL} or a scalar or vector containing
#' parameter(s) to exclude from summary. Note that high dimensional parameters
#' (e.g., \code{s[1, 1, 1]}) can be excluded by calling \code{exclude =
#' "s"}. Default is \code{NULL}.
#' @param digits An integer value indicating how many digits the output should
#' be rounded to.
#' @return A dataframe of summary statistics for MCMC samples.
#' @details This function summarizes Bayesian SCR models run using \code{nimble}
#' including mean and quantiles of samples, as well as effective sample size and
#' Rhat statistics. Note that \code{f0} is the proportion of samples that are
#' greater than zero. Also, at least 2 chains must be run to use this function.
#' @author Daniel Eacker
#' @importFrom stringr str_extract str_locate
#' @importFrom coda effectiveSize mcmc.list as.mcmc gelman.diag
#' @importFrom stats na.omit sd quantile
#' @importFrom crayon red
#' @importFrom viridisLite viridis
#' @examples
#'\dontrun{
#' # simulate a single trap array with random positional noise
#' x <- seq(-800, 800, length.out = 5)
#' y <- seq(-800, 800, length.out = 5)
#' traps <- as.matrix(expand.grid(x = x, y = y))
#'
#' # add some random noise to locations
#' traps <- traps + runif(prod(dim(traps)),-20,20)
#'
#' mysigma = 300 # simulate sigma of 300 m
#' mycrs = 32608 # EPSG for WGS 84 / UTM zone 8N
#' pixelWidth = 100 # store pixelWidth
#'
#' # create grid and extent
#' Grid = grid_classic(X = traps, crs_ = mycrs, buff = 3*mysigma,
#' res = pixelWidth)
#'
#' # simulate encounter data
#' data3d = sim_classic(X = traps, ext = Grid$ext, crs_ = mycrs, sigma = mysigma,
#' prop_sex = 1, N = 200, K = 4, base_encounter = 0.15, enc_dist = "binomial",
#' hab_mask = FALSE, setSeed = 200)
#'
#' # prepare data
#' data = list(y=data3d$y)
#' data$y = data$y[which(apply(data$y, 1, sum)!=0),,] # remove augmented records
#' data$y = apply(data$y, c(1,2), sum) # covert to 2d by summing over occasions
#' data$X = traps/1000 # rescale to kilometers
#' ext = as.vector(Grid$ext)/1000 # recale to kilometers
#'
#' # prepare constants
#' constants = list(M = 500,n0 = nrow(data$y),J=dim(data$y)[2],
#' K=dim(data3d$y)[3],x_lower = ext[1], x_upper = ext[2],
#' y_lower = ext[3], y_upper = ext[4],sigma_upper = 1,
#' A = prod(ext[2]-ext[1],ext[4]-ext[3]))
#'
#' # add z and zeros vector data for latent inclusion indicator
#' data$z = c(rep(1,constants$n0),rep(NA,constants$M - constants$n0))
#' data$zeros = c(rep(NA,constants$n0),rep(0,constants$M - constants$n0))
#'
#' # get initial activity center starting values
#' s.st3d = initialize_classic(y=data3d$y, M=500, X=traps, ext=Grid$ext,
#' hab_mask=FALSE)
#'
#' # define all initial values
#' inits = list(sigma = runif(1, 0.250, 0.350), s = s.st3d/1000,
#' psi=runif(1,0.2,0.3), p0 = runif(1, 0.05, 0.15),
#' z=c(rep(NA,constants$n0),rep(0,constants$M-constants$n0)))
#'
#' # parameters to monitor
#' params = c("sigma","psi","p0","N","D")
#'
#' # get model
#' scr_model = get_classic(dim_y = 2, enc_dist = "binomial",sex_sigma = FALSE)
#'
#' # run model
#' library(tictoc)
#' tic() # track time elapsed
#' out = run_classic(model = scr_model, data=data, constants=constants,
#' inits=inits, params = params,niter = 5000, nburnin=1000,
#' thin=1, nchains=2, parallel=TRUE, RNGseed = 500)
#' toc()
#'
#' # summarize output
#' samples = do.call(rbind, out)
#' par(mfrow=c(1,1))
#' hist(samples[,which(dimnames(out[[1]])[[2]]=="N")], xlab = "Abundance",
#' xlim = c(0,500), main="")
#' abline(v=200, col="red") # add line for simulated abundance
#'
#' # summarize output
#' nimSummary(out, trace=TRUE, plot_all=TRUE)
#'}
#' @name nimSummary
#' @export
nimSummary <- function(d, trace=FALSE, plot_all=FALSE, exclude = NULL,
digits=3){
if(is.null(exclude)==FALSE){
tmp1 <- ifelse(is.na(as.numeric(stringr::str_extract(
attributes(d[[1]])$dimnames[[2]],"[1-9]+"))),
attributes(d[[1]])$dimnames[[2]],
substr(attributes(d[[1]])$dimnames[[2]],1,
as.numeric(stringr::str_locate(attributes(d[[1]])$dimnames[[2]],
"\\[")[, 1])-1))
d.remove <- lapply(d, function(x) which(tmp1 %in% exclude))
d2 <- lapply(d, function(x) x[,-d.remove[[1]]])
}else
if(is.null(exclude)){
if(dim(d[[1]])[2]>100){
param_length_check <- readline(cat(crayon::red(paste(
"Do you really want to
build a summary table for",dim(d[[1]])[2],"parameters? (1 = Yes or 0 = No);
note that this will take forever! \n",
"Recommend selecting 0 and setting exclude"))))
if(param_length_check=="0"){stop("Exiting function...",call. = FALSE)}
}
d2 <- d
d.remove <- 0
}
if((length(attributes(d[[1]])$dimnames[[2]])-length(d.remove[[1]])==1)){
d3 <- as.data.frame(do.call(c, d2))
Means <- mean(d3[,1], na.rm=TRUE)
SDs <- stats::sd(d3[,1], na.rm=TRUE)
q2.5 <- stats::quantile(d3[,1], 0.025, na.rm=TRUE)
q50 <- stats::quantile(d3[,1], 0.50, na.rm=TRUE)
q97.5 <- stats::quantile(d3[,1], 0.975, na.rm=TRUE)
over.zero <- round(mean(d3[,1]>0,na.rm=TRUE),2)
n.eff <- coda::effectiveSize(mcmc.list(lapply(d2, coda::as.mcmc)))
Rhat <- round(coda::gelman.diag(coda::mcmc.list(lapply(d2, coda::as.mcmc)),
multivariate = FALSE)[[1]][,1],3)
}else
if((length(attributes(d[[1]])$dimnames[[2]])-length(d.remove[[1]])>1)){
d3 <- do.call(rbind,d2)
Means <- apply(d3, 2,function(x) mean(x,na.rm=TRUE))
SDs <- apply(d3, 2,function(x) stats::sd(x,na.rm=TRUE))
q2.5 <- apply(d3, 2,function(x) stats::quantile(x, 0.025,na.rm=TRUE))
q50 <- apply(d3, 2,function(x) stats::quantile(x, 0.50,na.rm=TRUE))
q97.5 <- apply(d3, 2,function(x) stats::quantile(x, 0.975,na.rm=TRUE))
over.zero <- round(apply(d3, 2, function(x) mean(x>0,na.rm=TRUE)),2)
n.eff <- coda::effectiveSize(coda::mcmc.list(lapply(d2, coda::as.mcmc)))
Rhat <- round(coda::gelman.diag(coda::mcmc.list(lapply(d2, coda::as.mcmc)),
multivariate = FALSE)[[1]][,1],3)
}
if(trace){
if((length(attributes(d[[1]])$dimnames[[2]])-length(d.remove[[1]])>1)){
par(mfrow=c(3,2))
g <- 1 # set g index
if(plot_all){
d2 <- d
d3 <- do.call(rbind, d2)
}
plot.seq <- seq(3,3000,3)
cols <- rev(viridisLite::viridis(100)[seq(1,100,length.out=length(d2))])
for(i in 1:3){ # plot first 3 variables to start out
plot(1:dim(d2[[1]])[1],d2[[1]][,i],col=cols[1],xlab="iteration",
ylab=colnames(d3)[i],type="l",ylim=range(do.call(rbind,
lapply(d2,function(x) apply(x, 2, range)))[,i]))
for(j in 2:length(d2)){
lines(1:dim(d2[[1]])[1],d2[[j]][,i],xlab="iteration",
ylab=colnames(d3)[i],type="l",col=cols[j])
}
hist(d3[,i],main="",xlab=colnames(d3)[i])
}
if(interactive() & ncol(d3) > 3){
answer <- readline(cat(crayon::red("Plot next set of parameters?\n
(1 = Yes or 0 = No) ")))
while(answer == "1"){
upper_index <- ifelse(plot.seq[g+1] > ncol(d3), ncol(d3),
plot.seq[g+1])
for(i in (plot.seq[g]+1):upper_index){
plot(1:dim(d2[[1]])[1],d2[[1]][,i],col=cols[1],xlab="iteration",
ylab=colnames(d3)[i],type="l",ylim=range(do.call(rbind,
lapply(d2,function(x) apply(x, 2, range)))[,i]))
for(j in 2:length(d2)){
lines(1:dim(d2[[1]])[1],d2[[j]][,i],xlab="iteration",
ylab=colnames(d3)[i],type="l",col=cols[j])
}
hist(d3[,i],main="",xlab=colnames(d3)[i])
} # end i loop
g <- g + 1
if(plot.seq[g+1] <= ncol(d3)){
answer <- readline(cat(crayon::red("Plot next set of parameters?\n
(1 = Yes or 0 = No) ")))
}else
if(plot.seq[g+1] > ncol(d3)){
break
}
}
} # is interactive
}else
if((length(attributes(d[[1]])$dimnames[[2]])-length(d.remove[[1]])==1)){
cols <- rev(viridisLite::viridis(100)[seq(1,100,length.out=length(d2))])
par(mfrow=c(1,2))
plot(1:length(d2[[1]]),d2[[1]],xlab="iteration",ylab=colnames(d3)[i],
type="l",ylim=range(d3[,1]),col=cols[1])
for(j in 2:length(d2)){
lines(1:length(d2[[j]]),d2[[j]],xlab="iteration",
ylab=colnames(d3)[i],type="l",col=cols[j])
}
hist(d3[,1],main="",xlab=attributes(d[[1]])$dimnames[[2]][-d.remove[[1]]])
}
on.exit(par(mfrow=c(1,1))) # reset graphical frame
} # end if trace
tmp.frame = data.frame(post.mean=Means,post.sd=SDs,q2.5=q2.5,q50=q50,
q97.5=q97.5,f0=over.zero,n.eff=n.eff,Rhat=Rhat)
if(nrow(tmp.frame)==1){
row.names(tmp.frame) = attributes(d[[1]])$dimnames[[2]][-d.remove[[1]]]
}
return(round(tmp.frame, digits=digits))
} # End function 'nimSummary'
#' Generate realized density surface from MCMC output
#'
#' @description Streamlined construction of realized density surface from posterior
#' samples of latent indicator variable (\code{z}) and activity center
#' coordinates (\code{s})
#'
#' @param samples Either a matrix (for a single MCMC chain) or list of posterior
#' samples from multiple chains from MCMC sampling; possibly returned from
#' \code{\link{run_classic}}.
#' @param grid A matrix or array object of the the state-space grid. This is
#' returned from \code{\link{grid_classic}}.
#' @param ext An \code{Extent} object from the \code{raster} package. This is
#' returned from \code{\link{grid_classic}}. Note that \code{ext} is only
#' used when when the state-space grid is a 3-dimensional array.
#' @param crs_ The UTM coordinate reference system (EPSG code) used for your
#' location provided as an integer (e.g., 32608 for WGS 84/UTM Zone 8N).
#' @param site Site identifier variable included for detected and augmented
#' individuals used as a constant in model runs.
#' @param hab_mask Either \code{FALSE} (the default) or a matrix or arary
#' output from \code{\link{mask_polygon}}
#' or \code{\link{mask_raster}} functions.
#' @param discrete Logical. Either \code{FALSE} (the default) to indicate that the
#' MCMC output is from a 'classic' or continuous SCR model or \code{TRUE} that
#' the MCMC output is from a 'discrete' SCR model.
#' @param s_alias A character value used to identify the latent activity center
#' coordinates used in the model. Default is \code{"s"}.
#' @param z_alias A character value used to identify the latent inclusion
#' indicator used in the model. Default is \code{"z"}.
#' @importFrom sp SpatialPoints
#' @importFrom raster cellFromXY
#' @author Daniel Eacker
#' @return a \code{raster} object or a list of \code{raster} objects if
#' state-space grid is an array.
#' @details This function automates the construction of realized density
#' surfaces from MCMC samples.
#' @examples
#' \dontrun{
#' # simulate a single trap array with random positional noise
#' x <- seq(-800, 800, length.out = 5)
#' y <- seq(-800, 800, length.out = 5)
#' traps <- as.matrix(expand.grid(x = x, y = y))
#'
#' # add some random noise to locations
#' traps <- traps + runif(prod(dim(traps)),-20,20)
#'
#' mysigma = 300 # simulate sigma of 300 m
#' mycrs = 32608 # EPSG for WGS 84 / UTM zone 8N
#' pixelWidth = 100 # store pixelWidth
#'
#' # create grid and extent
#' Grid = grid_classic(X = traps, crs_ = mycrs, buff = 3*mysigma,
#' res = pixelWidth)
#'
#' # simulate encounter data
#' data3d = sim_classic(X = traps, ext = Grid$ext, crs_ = mycrs, sigma = mysigma,
#' prop_sex = 1, N = 200, K = 4, base_encounter = 0.15, enc_dist = "binomial",
#' hab_mask = FALSE, setSeed = 200)
#'
#' # prepare data
#' data = list(y=data3d$y)
#' data$y = data$y[which(apply(data$y, 1, sum)!=0),,] # remove augmented records
#' data$y = apply(data$y, c(1,2), sum) # covert to 2d by summing over occasions
#' data$X = traps # rescale to kilometers
#' ext = as.vector(Grid$ext) # recale to kilometers
#'
#' # prepare constants
#' constants = list(M = 500,n0 = nrow(data$y),J=dim(data$y)[2],
#' K=dim(data3d$y)[3],x_lower = ext[1], x_upper = ext[2],
#' y_lower = ext[3], y_upper = ext[4],sigma_upper = 1000,
#' A = prod(ext[2]-ext[1],ext[4]-ext[3]))
#'
#' # add z and zeros vector data for latent inclusion indicator
#' data$z = c(rep(1,constants$n0),rep(NA,constants$M - constants$n0))
#' data$zeros = c(rep(NA,constants$n0),rep(0,constants$M - constants$n0))
#'
#' # get initial activity center starting values
#' s.st3d = initialize_classic(y=data3d$y, M=500, X=traps, ext=Grid$ext,
#' hab_mask=FALSE)
#'
#' # define all initial values
#' inits = list(sigma = runif(1, 250, 350), s = s.st3d,psi=runif(1,0.2,0.3),
#' p0 = runif(1, 0.05, 0.15),
#' z=c(rep(NA,constants$n0),rep(0,constants$M-constants$n0)))
#'
#' # parameters to monitor
#' params = c("sigma","psi","p0","N","D","s","z")
#'
#' # get model
#' scr_model = get_classic(dim_y = 2, enc_dist = "binomial",sex_sigma = FALSE,
#' trapsClustered = FALSE)
#'
#' # run model
#' library(tictoc())
#' tic() # track time elapsed
#' out = run_classic(model = scr_model, data=data, constants=constants,
#' inits=inits, params = params, niter = 5000, nburnin=1000,
#' thin=1, nchains=2, parallel=TRUE, RNGseed = 500)
#' toc()
#'
#' # summarize output (exclude lengthy parameters "s" and "z")
#' nimSummary(out, exclude = c("s","z"), trace = TRUE)
#'
#' library(tictoc)
#' tic() # track time
#' r = realized_density(samples = out, grid = Grid$grid,
#' ext = Grid$ext, crs_ = mycrs, site = NULL, hab_mask = FALSE,
#' discrete = FALSE)
#' toc()
#'
#' # load virdiis color pallete library
#' library(viridis)
#' library(raster)
#'
#' # make simple raster plot
#' label = expression("Realized density (activity centers/100 m"^2*")")
#' plot(r, col=viridis(100),main=label)
#'}
#' @name realized_density
#' @export
realized_density <- function(samples, grid, ext, crs_, site, hab_mask = FALSE,
discrete = FALSE, s_alias = "s", z_alias = "z"){
if (discrete) {
smat <- list()
for (g in 1:length(samples)) {
zlen <- samples[[g]][, grep(paste0(z_alias, "\\["),
colnames(samples[[g]]))]
stemp <- samples[[g]][, grep(paste0(s_alias, "\\["),
colnames(samples[[g]]))]
smat[[g]] <- matrix(NA, nrow = nrow(samples[[g]]),
ncol = dim(zlen)[2] * 2)
for (j in 1:dim(samples[[g]])[1]) {
if (length(dim(grid)) == 2) {
smat[[g]][j,1:dim(zlen)[2]] <- grid[stemp[j,], 1]
smat[[g]][j,(dim(zlen)[2] + 1):ncol(smat[[g]])] <- grid[stemp[j,], 2]
}
else if (length(dim(grid)) == 3) {
for(i in 1:dim(grid)[3]){
smat[[g]][j,1:dim(zlen)[2]][site==i] <- grid[stemp[j,site==i], 1, i]
smat[[g]][j,(dim(zlen)[2] + 1):ncol(smat[[g]])][site==i] <-
grid[stemp[j,site==i], 2, i]
}
}
}
samples[[g]] <- cbind(zlen, smat[[g]])
dimnames(samples[[g]])[[2]][(dim(zlen)[2] + 1):dim(samples[[g]])[2]] <-
c(paste(s_alias,"[", 1:dim(zlen)[2], ",", " 1", "]", sep = ""),
paste(s_alias, "[", 1:dim(zlen)[2], ",", " 2", "]", sep = ""))
}
if (length(samples) > 1) {
samples <- do.call(rbind, samples)
}
n.iter <- dim(samples)[1]
if (length(dim(grid)) != 2 & length(dim(grid)) != 3) {
stop("State-space grid must be only 2 or 3 dimensions")
}
if (length(dim(grid)) == 2) {
z <- samples[, grep(paste0(z_alias, "\\["), colnames(samples))]
z_vec <- as.vector(z)
sx <- samples[, grep(paste0(s_alias, "\\["), colnames(samples))][,1:dim(z)[2]]
sx_vec <- as.vector(sx)[z_vec == 1]
sy <- samples[, grep(paste0(s_alias, "\\["), colnames(samples))][,(dim(z)[2] + 1):(dim(z)[2] * 2)]
sy_vec <- as.vector(sy)[z_vec == 1]
ac_pts <- sp::SpatialPoints(cbind(sx_vec, sy_vec),
proj4string = sp::CRS(sf::st_crs(crs_)$proj4string))
r <- raster::rasterFromXYZ(grid, crs = crs_)
tab <- table(raster::cellFromXY(r, ac_pts))
r[as.numeric(names(tab))] <- tab/n.iter
}
else if (length(dim(grid)) == 3) {
if(isFALSE(hab_mask)){
r <- apply(grid, 3, function(x) raster::rasterFromXYZ(x,
crs = crs_))
}else
if(isFALSE(hab_mask)==FALSE){
r=list()
for (g in 1:dim(grid)[3]) {
r[[g]] <- raster::raster(nrows=dim(hab_mask)[2],ncols=dim(hab_mask)[1],
ext=ext[[g]], crs = crs_)
}
}
z <- samples[, grep(paste0(z_alias, "\\["), colnames(samples))]
sx <- samples[, grep(paste0(s_alias, "\\["), colnames(samples))][,1:dim(z)[2]]
sy <- samples[, grep(paste0(s_alias, "\\["), colnames(samples))][,(dim(z)[2] + 1):(dim(z)[2] * 2)]
for (g in 1:dim(grid)[3]) {
z_site <- z[, site == g]
z_vec <- as.vector(z_site)
sx_site <- sx[, site == g]
sx_vec <- as.vector(sx_site)[z_vec == 1]
sy_site <- sy[, site == g]
sy_vec <- as.vector(sy_site)[z_vec == 1]
ac_pts <- sp::SpatialPoints(cbind(sx_vec, sy_vec),
proj4string = sp::CRS(sf::st_crs(crs_)$proj4string))
tab <- table(raster::cellFromXY(r[[g]], ac_pts))
r[[g]][as.numeric(names(tab))] <- tab/n.iter
}
} # end 3D
}else # if not discrete
if(isFALSE(discrete)){
if (length(samples) > 1) {
samples <- do.call(rbind, samples)
}
n.iter <- dim(samples)[1]
if (length(dim(grid)) != 2 & length(dim(grid)) != 3) {
stop("State-space grid must be only 2 or 3 dimensions")
}
if (length(dim(grid)) == 2) {
z <- samples[, grep(paste0(z_alias, "\\["), colnames(samples))]
z_vec <- as.vector(z)
sx <- samples[, grep(paste0(s_alias, "\\["), colnames(samples))][,
1:dim(z)[2]]
sx_vec <- as.vector(sx)[z_vec == 1]
sy <- samples[, grep(paste0(s_alias, "\\["), colnames(samples))][,
(dim(z)[2] + 1):(dim(z)[2] * 2)]
sy_vec <- as.vector(sy)[z_vec == 1]
if (isFALSE(hab_mask)) {
ac_pts <- sp::SpatialPoints(cbind(sx_vec, sy_vec),
proj4string = sp::CRS(sf::st_crs(crs_)$proj4string))
r <- raster::rasterFromXYZ(grid, crs = crs_)
tab <- table(raster::cellFromXY(r, ac_pts))
r[as.numeric(names(tab))] <- tab/n.iter
}
else if (isFALSE(hab_mask) == FALSE) {
r <- raster::rasterFromXYZ(grid, crs = crs_)
rescale.sx <- scales::rescale(sx_vec, to = raster::extent(r)[1:2],
from = c(0, dim(hab_mask)[2]))
rescale.sy <- scales::rescale(sy_vec, to = raster::extent(r)[3:4],
from = c(0, dim(hab_mask)[1]))
ac_pts <- sp::SpatialPoints(cbind(rescale.sx, rescale.sy),
proj4string = sp::CRS(sf::st_crs(crs_)$proj4string))
tab <- table(raster::cellFromXY(r, ac_pts))
r[as.numeric(names(tab))] <- tab/n.iter
}
}
else if (length(dim(grid)) == 3) {
if (isFALSE(hab_mask)) {
r <- apply(grid, 3, function(x) raster::rasterFromXYZ(x,
crs = crs_))
z <- samples[, grep(paste0(z_alias, "\\["), colnames(samples))]
sx <- samples[, grep(paste0(s_alias, "\\["), colnames(samples))][,
1:dim(z)[2]]
sy <- samples[, grep(paste0(s_alias, "\\["), colnames(samples))][,
(dim(z)[2] + 1):(dim(z)[2] * 2)]
for (g in 1:dim(grid)[3]) {
z_site <- z[, site == g]
z_vec <- as.vector(z_site)
sx_site <- sx[, site == g]
sx_vec <- as.vector(sx_site)[z_vec == 1]
sy_site <- sy[, site == g]
sy_vec <- as.vector(sy_site)[z_vec == 1]
ac_pts <- sp::SpatialPoints(cbind(sx_vec, sy_vec),
proj4string = sp::CRS(sf::st_crs(crs_)$proj4string))
tab <- table(raster::cellFromXY(r[[g]], ac_pts))
r[[g]][as.numeric(names(tab))] <- tab/n.iter
}
}
else if (isFALSE(hab_mask) == FALSE) {
r=list()
for (g in 1:dim(grid)[3]) {
r[[g]] <- raster::raster(nrows=dim(hab_mask)[2],ncols=dim(hab_mask)[1],
ext=ext[[g]], crs = crs_)
}
for (g in 1:dim(grid)[3]) {
z <- samples[, grep(paste0(z_alias, "\\["), colnames(samples))]
z_site <- z[, site == g]
z_vec <- as.vector(z_site)
sx <- samples[, grep(paste0(s_alias, "\\["),
colnames(samples))][, 1:dim(z)[2]]
sx_site <- sx[, site == g]
sx_vec <- as.vector(sx_site)[z_vec == 1]
sy <- samples[, grep(paste0(s_alias, "\\["),
colnames(samples))][, (dim(z)[2] + 1):(dim(z)[2] *
2)]
sy_site <- sy[, site == g]
sy_vec <- as.vector(sy_site)[z_vec == 1]
rescale.sx <- scales::rescale(sx_vec, to = raster::extent(r[[g]])[1:2],
from = c(0, dim(hab_mask)[2]))
rescale.sy <- scales::rescale(sy_vec, to = raster::extent(r[[g]])[3:4],
from = c(0, dim(hab_mask)[1]))
ac_pts <- sp::SpatialPoints(cbind(rescale.sx,
rescale.sy), proj4string = sp::CRS(sf::st_crs(crs_)$proj4string))
tab <- table(raster::cellFromXY(r[[g]], ac_pts))
r[[g]][as.numeric(names(tab))] <- tab/n.iter
}
}
}
}
return(r)
} # End function 'realized_density'
#' Efficiently edit rows of model code generated from nimble
#'
#' @description Allows for efficient editing of model code produced by \code{nimbleCode()}
#' function
#'
#' @param model The \code{nimbleCode()} used to define model in \code{nimble} package,
#' possibly generated from \code{\link{get_classic}} function.
#' @param append_code Either \code{NULL} or model code produced from
#' \code{nimbleCode()} or \code{\link{get_classic}} function. Note that if
#' \code{remove_line = NULL}, then code will be appended just after existing model
#' code; otherwise specify the lines to insert new code into by setting
#' \code{append_line}.
#' @param append_line Either \code{NULL} or an integer value as a scalar or vector
#' defining which positions to insert new lines of code in the model. Note that if multiple
#' lines of new code are to be inserted on the same line, then just use \code{rep(44,3)} for
#' example if the new code had 3 lines to insert (not counting \code{"{"} and \code{"}"}). The lines
#' to append to should be based on the original model given to \code{model}.
#' @param remove_line Either \code{NULL} or an integer value as a scalar or vector
#' defining which lines of code to remove from the original model. Set to \code{NULL} when only
#' appending to and not replacing code in previous model file (i.e., \code{model}).
#' @param write Logical. If \code{TRUE}, then a text file is written to the
#' working directory called "new_model.txt". Otherwise, model is written to temp
#' file and then deleted. Default is \code{FALSE}.
#' @return A model description that can be run using \code{\link{run_classic}}.
#' @author Daniel Eacker
#' @importFrom R.utils insert
#' @examples
#' # get model
#' scr_model = get_classic(dim_y = 2, enc_dist = "binomial",sex_sigma = TRUE,
#' hab_mask=TRUE,trapsClustered = TRUE)
#'
#' # create new nimbleCode to use for replacement in 'scr_model'
#' p0_prior = nimble::nimbleCode({
#' p0[g] ~ dbeta(1,1)
#' })
#'
#' # replace line 3 of old model code with 'p0_prior'
#' new_model = customize_model(model = scr_model, append_code = p0_prior,
#' append_line = 3, remove_line = 3)
#'
#' # inspect new model code
#' new_model
#' @name customize_model
#' @export
customize_model <- function(model,append_code = NULL,append_line = NULL,
remove_line = NULL,write = FALSE){
# read in main model
model_list <- as.list(model)
file_list <- list()
length_lines <- numeric(length(model_list))
for(i in 1:length(model_list)){
tmodel <- strsplit(as.character(model[i]),"\n")[[1]]
file_list[[i]] <- tempfile(fileext = ".txt")
writeLines(tmodel, file_list[[i]])
length_lines[i] <- length(readLines(file_list[[i]],encoding="UTF-8"))
}
main_model <- character(sum(length_lines))
for(i in 1:length(model_list)){
main_model[which(main_model=="")[1]:ifelse(length_lines[i]==1,(which(main_model=="")[1]),
(which(main_model=="")[1])+length_lines[i]-1)] =
readLines(file_list[[i]],encoding="UTF-8")
}
main_model[length(main_model)+1] <- "}"
main_model[c(1,length(main_model))] <- c("nimble::nimbleCode({","})")
unlink(unlist(file_list)) # unlink temp files
# check for removal of lines and do this first
if(isFALSE(is.null(remove_line))){
remove_model <- main_model[-remove_line]
}
# just append new code after old code
if(isFALSE(is.null(append_code)) & is.null(append_line)){
model_list <- as.list(append_code)
file_list <- list()
length_lines <- numeric(length(model_list))
for(i in 1:length(model_list)){
tmodel <- strsplit(as.character(append_code[i]),"\n")[[1]]
file_list[[i]] <- tempfile(fileext = ".txt")
writeLines(tmodel, file_list[[i]])
length_lines[i] <- length(readLines(file_list[[i]],encoding="UTF-8"))
}
append_model <- character(sum(length_lines))
for(i in 1:length(model_list)){
append_model[which(append_model=="")[1]:ifelse(length_lines[i]==1,(which(append_model=="")[1]),
(which(append_model=="")[1])+length_lines[i]-1)] =
readLines(file_list[[i]],encoding="UTF-8")
}
append_model[length(append_model)+1] <- "}"
unlink(unlist(file_list))
if(is.null(remove_line)){
updated_model <- c("nimble::nimbleCode({",
main_model[-c(1,length(main_model))],
append_model[-c(1,length(append_model))],
"})")
}else
if(isFALSE(is.null(remove_line))){
updated_model <- c("nimble::nimbleCode({",
remove_model[-c(1,length(remove_model))],
append_model[-c(1,length(append_model))],
"})")
} # check for line removal
}else # add new code to specific index position in character vector
if(isFALSE(is.null(append_code)) & isFALSE(is.null(append_line))){
model_list <- as.list(append_code)
file_list <- list()
length_lines <- numeric(length(model_list))
for(i in 1:length(model_list)){
tmodel <- strsplit(as.character(append_code[i]),"\n")[[1]]
file_list[[i]] <- tempfile(fileext = ".txt")
writeLines(tmodel, file_list[[i]])
length_lines[i] <- length(readLines(file_list[[i]],encoding="UTF-8"))
}
append_model <- character(sum(length_lines))
for(i in 1:length(model_list)){
append_model[which(append_model=="")[1]:ifelse(length_lines[i]==1,(which(append_model=="")[1]),
(which(append_model=="")[1])+length_lines[i]-1)] =
readLines(file_list[[i]],encoding="UTF-8")
}
append_model[length(append_model)+1] <- "}"
append_model[c(1,length(append_model))] <- c("nimble::nimbleCode({","})")
unlink(unlist(file_list))
# check if length(append_line) == length(append_model[-c(1,length(append_model))])
if(length(append_line) != length(append_model[-c(1,length(append_model))])){
stop(paste("length of append_line must equal lines of append_code minus outside braces, i.e.,","{","}"))
}
if(is.null(remove_line)){
updated_model <- character(length(main_model)+length(append_model)-2)
updated_model[append_line] <- append_model[-c(1,length(append_model))]
updated_model[(1:(length(updated_model)-1))[-c(1,append_line)]] <- main_model[-c(1,length(main_model))]
updated_model[c(1,length(updated_model))] <- c("nimble::nimbleCode({","})")
}else
if(isFALSE(is.null(remove_line))){
updated_model <- character(length(main_model))
updated_model[(1:length(main_model))[-remove_line]] <- remove_model
if(min(append_line) <= length(main_model)){
updated_model <- R.utils::insert(updated_model, ats = append_line,
append_model[-c(1,length(append_model))])
updated_model <- updated_model[which(updated_model!="")]
}else
if(min(append_line) > length(main_model)){
updated_model <- c(updated_model[-length(updated_model)],
rep("",min(append_line) - length(main_model)),"})")
updated_model <- R.utils::insert(updated_model, ats = append_line,
append_model[-c(1,length(append_model))])
updated_model <- updated_model[which(updated_model!="")]
}
} # check for line removal
} else # end append_code and append_line = TRUE
# if nothing is done to code, just return original code
if(is.null(remove_line) & is.null(append_code) & is.null(append_line)){
updated_model <- main_model
warning("Returning same model code as input into function")
}else
# if only removal of lines
if(isFALSE(is.null(remove_line)) & is.null(append_code) & is.null(append_line)){
updated_model <- remove_model
}
# if write to file
if(write){
local.path <- paste0(getwd(),"/new_model.txt")
writeLines(updated_model,local.path,useBytes = FALSE)
}
txtPath1 <- tempfile(fileext = ".txt")
writeLines(updated_model,txtPath1,useBytes = FALSE)
return(source(txtPath1)$value)
# on.exit(unlink(txtPath1))
} # End function 'customize_model'
#' Retrieve nimbleCode for spatial count models
#'
#' @description Creates model code using the \code{\link[nimble]{nimbleCode}} function.
#'
#' @param occ_specific Logical. If \code{FALSE}, the encounter rate will
#' not include an occasion-specific loop in the detection function; otherwise,
#' the model will include a for loop for occasions (K) in the detection function.
#' Default is \code{FALSE}.
#' @param hab_mask A logical value indicating whether a habitat mask will be
#' used. Default is \code{FALSE}.
#' @param trapsClustered A logical value indicating if traps are clustered in
#' arrays across the sampling area.
#' @return Model code created from \code{\link[nimble]{nimbleCode}}.
#' @details This function provides templates for unmarked models that can be
#' easily modified to include further model complexity such as covariates
#' explaining detection probability. The models include habitat masking.
#' @author Daniel Eacker
#' @examples
#' # get spatial count model with non-occasion-specific detection
#' # function, single scaling parameter, no habitat mask, and no clustering
#' unmarked_model = get_unmarked(occ_specific=FALSE,hab_mask = FALSE,
#' trapsClustered = FALSE)
#'
#' # inspect model
#' unmarked_model
#' @name get_unmarked
#' @export
get_unmarked <- function(occ_specific = FALSE,
hab_mask = FALSE,trapsClustered = FALSE){
m <- J <- su <- X <- sigma <- n0 <- zu <- A <- lam0 <- K <-
nSites <- site <- pixelWidth <- psiu <- prop.habitat <- site_indexL <-
site_indexU <- NULL
if(trapsClustered == FALSE){
if(isFALSE(hab_mask)){ # determine if hab_mask is included
if(isFALSE(occ_specific)){
scrcode <- nimble::nimbleCode({
lam0 ~ dunif(0,lam0_upper) # baseline encounter probability
sigma ~ dunif(0, sigma_upper) # scaling parameter
psiu ~ dunif(0, 1) # inclusion prob
for(i in 1:m){
zu[i]~dbern(psiu)
su[i,1] ~ dunif(x_lower, x_upper)
su[i,2] ~ dunif(y_lower, y_upper)
distu[i,1:J] <- sqrt((su[i,1]-X[1:J,1])^2 + (su[i,2]-X[1:J,2])^2)
lamu[i,1:J] <- lam0*exp(-distu[i,1:J]^2/(2*sigma^2))*zu[i]
} # i individuals
# unmarked spatial count model likelihood
for(j in 1:J){
bigLambda[j] <- sum(lamu[1:m,j])
for(k in 1:K){
n[j,k] ~ dpois(bigLambda[j])
} # k occasions
} # j traps
N <- sum(zu[1:m])
D <- N/A
})
return(scrcode)
} else # End occ_specific = FALSE
if(isFALSE(occ_specific)==FALSE){
scrcode <- nimble::nimbleCode({
lam0 ~ dunif(0,lam0_upper) # baseline encounter rate
sigma ~ dunif(0, sigma_upper) # scaling parameter
psiu ~ dunif(0, 1) # inclusion prob
for(i in 1:m){
zu[i]~dbern(psiu)
su[i,1] ~ dunif(x_lower, x_upper)
su[i,2] ~ dunif(y_lower, y_upper)
distu[i,1:J] <- sqrt((su[i,1]-X[1:J,1])^2 + (su[i,2]-X[1:J,2])^2)
for(k in 1:K){
lamu[i,1:J,k] <- lam0*exp(-distu[i,1:J]^2/(2*sigma^2))*zu[i]
} # k occasions
} # i individuals
# unmarked spatial count model likelihood
for(j in 1:J){
bigLambda[j] <- sum(lamu[1:m,j,1:K])
for(k in 1:K){
n[j,k] ~ dpois(bigLambda[j])
} # k occasions
} # j traps
N <- sum(zu[1:m])
D <- N/A
})
return(scrcode)
}
} else # End no habitat mask
if(hab_mask==TRUE){
if(isFALSE(occ_specific)){
scrcode <- nimble::nimbleCode({
lam0 ~ dunif(0,lam0_upper) # baseline encounter probability
sigma ~ dunif(0, sigma_upper) # scaling parameter
sigma.pixel <- sigma / pixelWidth # scaled for habitat mask
psiu ~ dunif(0, 1) # inclusion prob
for(i in 1:m){
zu[i]~dbern(psiu)
su[i,1] ~ dunif(x_lower, x_upper)
su[i,2] ~ dunif(y_lower, y_upper)
pOKu[i] <- hab_mask[(trunc(su[i,2])+1),(trunc(su[i,1])+1)] # habitat check
OKu[i] ~ dbern(pOKu[i]) # OKu[i] <- 1, the ones trick
distu[i,1:J] <- sqrt((su[i,1]-X[1:J,1])^2 + (su[i,2]-X[1:J,2])^2)
lamu[i,1:J] <- lam0*exp(-distu[i,1:J]^2/(2*sigma.pixel^2))*zu[i]
} # i individuals
# unmarked spatial count model likelihood
for(j in 1:J){
bigLambda[j] <- sum(lamu[1:m,j])
for(k in 1:K){
n[j,k] ~ dpois(bigLambda[j])
} # k occasions
} # j traps
N <- sum(zu[1:m])
D <- N/A
})
return(scrcode)
} else
if(isFALSE(occ_specific)==FALSE){
scrcode <- nimble::nimbleCode({
lam0 ~ dunif(0,lam0_upper) # baseline encounter rate
sigma ~ dunif(0, sigma_upper) # scaling parameter
sigma.pixel <- sigma / pixelWidth # scaled for habitat mask
psiu ~ dunif(0, 1) # inclusion prob
for(i in 1:m){
zu[i]~dbern(psiu)
su[i,1] ~ dunif(x_lower, x_upper)
su[i,2] ~ dunif(y_lower, y_upper)
pOKu[i] <- hab_mask[(trunc(su[i,2])+1),(trunc(su[i,1])+1)] # habitat check
OKu[i] ~ dbern(pOKu[i]) # OKu[i] <- 1, the ones trick
distu[i,1:J] <- sqrt((su[i,1]-X[1:J,1])^2 + (su[i,2]-X[1:J,2])^2)
for(k in 1:K){
lamu[i,1:J,k] <- lam0*exp(-distu[i,1:J]^2/(2*sigma.pixel^2))*zu[i]
} # k occasions
} # i individuals
# unmarked spatial count model likelihood
for(j in 1:J){
bigLambda[j] <- sum(lamu[1:m,j,1:K])
for(k in 1:K){
n[j,k] ~ dpois(bigLambda[j])
} # k occasions
} # j traps
N <- sum(zu[1:m])
D <- N/A
})
return(scrcode)
} # end occasion-specific
} # end models with hab_mask
}else # end trapsClustered == FALSE
if(trapsClustered){
if(isFALSE(hab_mask)){ # determine if hab_mask is included
if(isFALSE(occ_specific)){
scrcode <- nimble::nimbleCode({
for(g in 1:nSites){
lam0[g] ~ dunif(0,lam0_upper) # baseline encounter rate
}
sigma ~ dunif(0, sigma_upper) # scaling parameter
psiu ~ dunif(0, 1) # inclusion prob
for(i in 1:m){
zu[i]~dbern(psiu)
su[i,1] ~ dunif(x_lower[site[i]], x_upper[site[i]])
su[i,2] ~ dunif(y_lower[site[i]], y_upper[site[i]])
distu[i,1:J] <- sqrt((su[i,1]-X[1:J,1,site[i]])^2 + (su[i,2]-X[1:J,2,site[i]])^2)
lamu[i,1:J] <- lam0[site[i]]*exp(-distu[i,1:J]^2/(2*sigma^2))*zu[i]
} # i individuals
# unmarked spatial count model likelihood
for(g in 1:nSites){
for(j in 1:J){
bigLambda[j,g] <- sum(lamu[site_indexL[g]:site_indexU[g],j])
for(k in 1:K){
n[j,k,g] ~ dpois(bigLambda[j,g])
} # k occasions
} # j traps
} # g sites
N <- sum(zu[1:m])
D <- N/A
})
return(scrcode)
} else # end occasion-specific == FALSE
if(isFALSE(occ_specific)==FALSE){ # occasion-specific models
scrcode <- nimble::nimbleCode({
sigma ~ dunif(0, sigma_upper) # scaling parameter
psiu ~ dunif(0, 1) # inclusion prob
for(g in 1:nSites){
lam0[g] ~ dunif(0,lam0_upper) # site-specific baseline encounter rate
} # g sites
for(i in 1:m){
zu[i]~dbern(psiu)
su[i,1] ~ dunif(x_lower[site[i]], x_upper[site[i]])
su[i,2] ~ dunif(y_lower[site[i]], y_upper[site[i]])
distu[i,1:J] <- sqrt((su[i,1]-X[1:J,1,site[i]])^2 + (su[i,2]-X[1:J,2,site[i]])^2)
for(k in 1:K){
lamu[i,1:J,k] <- lam0*exp(-distu[i,1:J]^2/(2*sigma^2))*zu[i]
} # k occasions
} # i marked individuals
# unmarked spatial count model likelihood
for(g in 1:nSites){
for(j in 1:J){
bigLambda[j,g] <- sum(lamu[site_indexL[g]:site_indexU[g],j,1:K])
for(k in 1:K){
n[j,k,g] ~ dpois(bigLambda[j,g])
} # k occasions
} # j traps
} # g sites
N <- sum(zu[1:m])
D <- N/A
})
return(scrcode)
} # end occasion-specific models
} else # end hab_mask == FALSE
if(hab_mask==TRUE){
if(isFALSE(occ_specific)){
scrcode <- nimble::nimbleCode({
for(g in 1:nSites){
lam0[g] ~ dunif(0,lam0_upper) # baseline encounter rate
}
sigma ~ dunif(0, sigma_upper) # scaling parameter
sigma.pixel <- sigma / pixelWidth # scaled for habitat mask
psiu ~ dunif(0, 1) # inclusion prob
for(i in 1:m){
zu[i]~dbern(psiuu[i])
# adjust psiu for the proportion of available habitat at each site
psiuu[i] <- (1-(1-psiu)^prop.habitat[site[i]])
su[i,1] ~ dunif(x_lower[site[i]], x_upper[site[i]])
su[i,2] ~ dunif(y_lower[site[i]], y_upper[site[i]])
pOKu[i] <- hab_mask[(trunc(su[i,2])+1),(trunc(su[i,1])+1),site[i]] # habitat check
OKu[i] ~ dbern(pOKu[i]) # OKu[i] <- 1, the ones trick
distu[i,1:J] <- sqrt((su[i,1]-X[1:J,1,site[i]])^2 + (su[i,2]-X[1:J,2,site[i]])^2)
lamu[i,1:J] <- lam0[site[i]]*exp(-distu[i,1:J]^2/(2*sigma.pixel^2))*zu[i]
} # i individuals
# unmarked spatial count model likelihood
for(g in 1:nSites){
for(j in 1:J){
bigLambda[j,g] <- sum(lamu[site_indexL[g]:site_indexU[g],j])
for(k in 1:K){
n[j,k,g] ~ dpois(bigLambda[j,g])
} # k occasions
} # j traps
} # g sites
N <- sum(zu[1:m])
D <- N/A
})
return(scrcode)
} else # End isFALSE(occ_specific)
if(isFALSE(occ_specific)==FALSE){
scrcode <- nimble::nimbleCode({
sigma ~ dunif(0, sigma_upper) # scaling parameter
sigma.pixel <- sigma / pixelWidth # scaled for habitat mask
psiu ~ dunif(0, 1) # inclusion prob
for(g in 1:nSites){
lam0[g] ~ dunif(0,lam0_upper) # site-specific baseline encounter rate
} # g sites
for(i in 1:m){
zu[i]~dbern(psiuu[i])
# adjust psiu for the proportion of available habitat at each site
psiuu[i] <- (1-(1-psiu)^prop.habitat[site[i]])
su[i,1] ~ dunif(x_lower[site[i]], x_upper[site[i]])
su[i,2] ~ dunif(y_lower[site[i]], y_upper[site[i]])
pOKu[i] <- hab_mask[(trunc(su[i,2])+1),(trunc(su[i,1])+1),site[i]] # habitat check
OKu[i] ~ dbern(pOKu[i]) # OKu[i] <- 1, the ones trick
distu[i,1:J] <- sqrt((su[i,1]-X[1:J,1,site[i]])^2 + (su[i,2]-X[1:J,2,site[i]])^2)
for(k in 1:K){
lamu[i,1:J,k] <- lam0[site[i]]*exp(-distu[i,1:J]^2/(2*sigma.pixel^2))*zu[i]
} # k occasions
} # i individuals
# unmarked spatial count model likelihood
for(g in 1:nSites){
for(j in 1:J){
bigLambda[j,g] <- sum(lamu[site_indexL[g]:site_indexU[g],j,1:K])
for(k in 1:K){
n[j,k,g] ~ dpois(bigLambda[j,g])
} # k occasions
} # j traps
} # g sites
N <- sum(zu[1:m])
D <- N/A
})
return(scrcode)
} # end isFALSE(occ_specific)==FALSE
} # end models with hab_mask
} # end trapsClustered
} # End function 'get_unmarked"
#' Discretize traps and initial activity centers
#'
#' @description Discretize state-space grid, traps and initial activity center
#' locations to prepare for using in discrete SCR model.
#'
#' @param X Either a matrix or array representing the coordinates of traps in
#' UTMs. An array is used when traps are clustered over a survey area.
#' @param grid A matrix or array object of the the state-space grid. This is
#' returned from \code{\link{grid_classic}}.
#' @param s.st A matrix of starting activity center coordinates. This is
#' returned from \code{\link{initialize_classic}}
#' @param crs_ The UTM coordinate reference system (EPSG code) used for your
#' location provided as an integer (e.g., 32608 for WGS 84/UTM Zone 8N).
#' @param site Either \code{NULL} (if a 2D trap array is used) or a vector of
#' integers denoting which trap array an individual (either detected or
#' augmented) belongs to. Note that \code{site} is provided from
#' \code{\link{sim_classic}} when a 3D trap array is used. However, this
#' \code{site} variable must be correctly augmented based on the total
#' augmented population size (i.e., \code{M}).
#' @param hab_mask Either \code{NULL} (the default) or a matrix or array output
#' from \code{\link{mask_polygon}} or \code{\link{mask_raster}} functions.
#' @return
#' \itemize{
#' \item{\code{grid}}{ Grid coordinates for the state-space.}
#' \item{\code{nPix}}{ Number of state-space pixels.}
#' \item{\code{X}}{ Discretized traps as a matrix or array.}
#' \item{\code{s.st}}{ Indices for initial activity center locations.}
#' }
#' @note The vector \code{s.st} returned from the function indexes the
#' rows of \code{grid}.
#' @details This function prepares the state-space grid, trap coordinates and
#' initial activity center coordinates for use in a discrete spatial capture-recapture
#' model. Note that the number of rows in the state-space grid coordinate matrix will
#' be reduced and this object will need to be adjusted before being used in the model.
#' @importFrom purrr map
#' @author Daniel Eacker
#' @examples
#' # simulate a single trap array with random positional noise
#' x <- seq(-800, 800, length.out = 5)
#' y <- seq(-800, 800, length.out = 5)
#' traps <- as.matrix(expand.grid(x = x, y = y))
#' set.seed(200)
#' traps <- traps + runif(prod(dim(traps)),-20,20)
#'
#' mysigma = 300 # simulate sigma of 300 m
#' mycrs = 32608 # EPSG for WGS 84 / UTM zone 8N
#'
#' # create state-space
#' Grid = grid_classic(X = traps, crs_ = mycrs, buff = 3*mysigma, res = 100)
#'
#' # simulate data for uniform state-space and habitat mask
#' data3d = sim_classic(X = traps, ext = Grid$ext, crs_ = mycrs, sigma_ = mysigma,
#' prop_sex = 0.7, N = 200, K = 4, base_encounter = 0.15, enc_dist = "binomial",
#' hab_mask = FALSE, setSeed = 100)
#'
#' # total augmented population size
#' M = 400
#'
#' # get initial activity center starting values
#' s.st = initialize_classic(y=data3d$y, M=M, X=traps, ext=Grid$ext,
#' hab_mask = FALSE)
#'
#' # get discretized traps and initial activity center grid indices
#' d_list <- discretize_classic(X = traps, grid=Grid$grid,
#' s.st = s.st, crs_= mycrs,hab_mask=NULL)
#'
#' str(d_list)
#' @name discretize_classic
#' @export
discretize_classic <- function(X, grid, s.st,
crs_,site,hab_mask=NULL){
# need to determine if X is 2 or 3 dimensional (stop if not)
if(length(dim(X))!=2 & length(dim(X))!=3){
stop("Trapping grid must be only 2 or 3 dimensions")
}
# for dim of length 2
if(length(dim(X))==2){
if(is.null(hab_mask)){
# 1) discretize traps (snap to grid)
X_dist <- sf::st_distance(sf::st_cast(sf::st_sfc(sf::st_multipoint(X),crs = crs_),"POINT"),
sf::st_cast(sf::st_sfc(sf::st_multipoint(grid),crs = crs_),"POINT"))
# just take the first trap (unless it's already represented)
grid_index <- unlist(lapply(apply(X_dist, 1, function(x) which(x==min(x))),function(x) x[1]))
# just take the first trap (unless it's already represented)
X_discrete <- grid[grid_index,]
# 2) discrete starting activity center locations
s.st_dist <- sf::st_distance(sf::st_cast(sf::st_sfc(sf::st_multipoint(s.st),crs = crs_),"POINT"),
sf::st_cast(sf::st_sfc(sf::st_multipoint(grid),crs = crs_),"POINT"))
# get the cell indices; just take the first trap (unless it's already represented)
s.st_discrete <- unlist(lapply(apply(s.st_dist, 1, function(x) which(x==min(x))),function(x) x[1]))
nPix <- nrow(grid) # get number of pixels
}else
# with habitat mask
if(isFALSE(is.null(hab_mask))){
# first remove grid rows that are unsuitable
hab_vec <- as.vector(t(apply(hab_mask,2,rev)))
grid <- grid[which(hab_vec==1),]
X_dist <- sf::st_distance(sf::st_cast(sf::st_sfc(sf::st_multipoint(X),crs = crs_),"POINT"),
sf::st_cast(sf::st_sfc(sf::st_multipoint(grid),crs = crs_),"POINT"))
# just take the first trap (unless it's already represented)
grid_index <- unlist(lapply(apply(X_dist, 1, function(x) which(x==min(x))),function(x) x[1]))
# just take the first trap (unless it's already represented)
X_discrete <- grid[grid_index,]
# 2) discrete starting activity center locations
s.st_dist <- sf::st_distance(sf::st_cast(sf::st_sfc(sf::st_multipoint(s.st),crs = crs_),"POINT"),
sf::st_cast(sf::st_sfc(sf::st_multipoint(grid),crs = crs_),"POINT"))
# get the cell indicies; just take the first trap (unless it's already represented)
s.st_discrete <- unlist(lapply(apply(s.st_dist, 1, function(x) which(x==min(x))),function(x) x[1]))
nPix <- nrow(grid) # get number of pixels
} # end hab_mask
}else # end dimension 2
# for dim of length 2
if(length(dim(X))==3){
if(length(site)!=nrow(s.st)){
stop("Include 'site' variable with length equal to number of rows of s.st")
}
if(is.null(hab_mask)){
multi2singlePoint <- function(x)(sf::st_cast(sf::st_sfc(sf::st_multipoint(x),crs = crs_),"POINT"))
# first get discretized trap coordinates
Xlist <- purrr::map(apply(X, 3, function(x) list(x)[[1]], simplify = FALSE),multi2singlePoint)
gridlist <- purrr::map(apply(grid, 3, function(x) list(x)[[1]], simplify = FALSE),multi2singlePoint)
X_dist <- array(mapply(sf::st_distance, Xlist, gridlist),dim=c(length(Xlist[[1]]),
length(gridlist[[1]]),length(Xlist)))
X_dist2 <- apply(X_dist,c(1,3),function(x) which(x==min(x))[1])
Xlist_index <- lapply(apply(X_dist2, 2,function(x) list(x)),function(x) x[[1]])
gridlist_index <- lapply(gridlist, sf::st_coordinates)
X_discrete <- array(mapply(function(x,y){y[x,]},Xlist_index,gridlist_index),
dim=c(length(Xlist[[1]]),2,length(Xlist)))
# now discretize starting activity center locations
s.st_dist <- array(mapply(sf::st_distance, list(multi2singlePoint(s.st)), gridlist),
dim=c(nrow(s.st),length(gridlist[[1]]),length(Xlist)))
s.st_dist2 <- apply(s.st_dist,c(1,3),function(x) which(x==min(x))[1])
s.st_discrete <- numeric(nrow(s.st))
for(i in 1:nrow(s.st)){
s.st_discrete[i] <- s.st_dist2[site[i]][[1]][i]
}
# convert grid list to array
grid <- array(NA,dim = c(max(unlist(lapply(gridlist_index, nrow))),2,dim(X)[3]))
for(i in 1:dim(X)[3]){
grid[1:unlist(lapply(gridlist_index, nrow))[i],1:2,i] = gridlist_index[[i]]
}
nPix <- unlist(lapply(gridlist_index, nrow)) # get number of pixels
}else
# with habitat mask
if(isFALSE(is.null(hab_mask))){
hab_vec <- apply(hab_mask, 3, function(x) as.vector(t(apply(x,2,rev))))
hab_vec <- apply(hab_vec, 2, function(x) list(x)[[1]], simplify = FALSE)
gridlist1 <- apply(grid, 3, function(x) list(x)[[1]], simplify = FALSE)
gridlist2 <- mapply(function(x,y){y[which(x==1),]},hab_vec,gridlist1)
multi2singlePoint <- function(x)(sf::st_cast(sf::st_sfc(sf::st_multipoint(x),crs = crs_),"POINT"))
# first get discretized trap coordinates
Xlist <- purrr::map(apply(X, 3, function(x) list(x)[[1]], simplify = FALSE),multi2singlePoint)
gridlist <- lapply(gridlist2, multi2singlePoint)
X_dist <- mapply(sf::st_distance, Xlist, gridlist)
Xlist_index <- lapply(X_dist,function(x) apply(x, 1,function(y) which(y==min(y))[1]))
gridlist_index <- lapply(gridlist, sf::st_coordinates)
X_discrete <- array(mapply(function(x,y){y[x,]},Xlist_index,gridlist_index),
dim=c(length(Xlist[[1]]),2,length(Xlist)))
# now discretize starting activity center locations
s.st_dist <- mapply(sf::st_distance, list(multi2singlePoint(s.st)), gridlist)
s.st_dist2 <- lapply(s.st_dist, function(x) apply(x, 1, function(y) which(y==min(y))[1]))
s.st_discrete <- numeric(nrow(s.st))
for(i in 1:nrow(s.st)){
s.st_discrete[i] <- s.st_dist2[site[i]][[1]][i]
}
# convert grid list to array
grid <- array(NA,dim = c(max(unlist(lapply(gridlist_index, nrow))),2,dim(X)[3]))
for(i in 1:dim(X)[3]){
grid[1:unlist(lapply(gridlist_index, nrow))[i],1:2,i] = gridlist_index[[i]]
}
nPix <- unlist(lapply(gridlist_index, nrow)) # get number of pixels
} # end habitat mask
} # end dimension 3
return(list(grid=grid,nPix=nPix,X=X_discrete,s.st=s.st_discrete))
} # End 'discretize_classic' function
#' Retrieve nimbleCode for discrete spatial capture-recapture models
#'
#' @description Creates model code using the \code{\link[nimble]{nimbleCode}}
#' function.
#'
#' @param type Specifies the type of discrete model for either \code{"marked"}
#' (the default) or \code{"unmarked"} data sources.
#' @param dim_y Either \code{NULL} (the default) or an integer of either 2 or 3
#' that define the dimensional format the encounter history data are in. Note
#' that this input is not used when \code{type = "unmarked"}.
#' @param occ_specific Logical. If \code{FALSE}, the encounter rate will
#' not include an occasion-specific loop in the detection function; otherwise,
#' the model will include a for loop for occasions (K) in the detection function.
#' Default is \code{FALSE}. Only applied when \code{type = "unmarked"}.
#' @param enc_dist Either \code{"binomial"} or \code{"poisson"}. Default is
#' \code{"binomial"}.
#' @param sex_sigma A logical value indicating whether the scaling parameter
#' ('sigma') is sex-specific
#' @param trapsClustered A logical value indicating if traps are clustered in
#' arrays across the sampling area.
#' @return A \code{nimbleCode} object from the \code{nimble} package.
#' @details This function provides templates that could be copied and easily
#' modified to include further model complexity such as covariates explaining
#' detection probability. These discrete models include different encounter
#' probability distributions and sex-specific scaling parameters for marked and
#' unmarked data sets.
#' @author Daniel Eacker
#' @examples
#' # get discrete model for 2D marked encounter data, binomial encounter
#' # distribution, non-sex-specific scaling parameter, and no clustering of traps
#' discrete_model_m = get_discrete(type="marked", dim_y = 2,enc_dist = "binomial",
#' sex_sigma = FALSE, trapsClustered = FALSE)
#'
#' # inspect model
#' discrete_model_m
#'
#' # get discrete model for unmarked encounter data, binomial encounter
#' # distribution, non-sex-specific scaling parameter, and no clustering of traps
#' discrete_model_u = get_discrete(type="unmarked",occ_specific = FALSE,
#' enc_dist = "binomial",sex_sigma = FALSE, trapsClustered = FALSE)
#'
#' # inspect model
#' discrete_model_u
#' @name get_discrete
#' @export
get_discrete <- function(type = "marked", dim_y = NULL, occ_specific = FALSE,
enc_dist = "binomial",sex_sigma = FALSE, trapsClustered = FALSE){
M <- J <- s <- X <- p0 <- sigma <- n0 <- z <-
A <- lam0 <- K <- sex <-
nSites <- site <- pixelWidth <- psi <- prop.habitat <-
EN <- ED <- alpha0 <- mu <- x0g <- y0g <- probs <-
grid <- nPix <- pixArea <- x0gu <- y0gu <-
m <- su <- s <- zu <- psiu <-
site_indexL <- site_indexU <- NULL
if(is.null(dim_y)==FALSE){
if(dim_y !=2 & dim_y!=3){
stop("dim_y must be either 2 or 3 or NULL")
}
}
if(enc_dist != "poisson" & enc_dist != "binomial"){
stop("Encounter distribution has to be either binomial or poisson")
}
if(type=="marked"){
if(trapsClustered == FALSE){
if(dim_y == 2){
if(enc_dist == "binomial" & sex_sigma == FALSE){
scrcode <- nimble::nimbleCode({
for(j in 1:nPix){
probs[j] <- mu[j]/EN
mu[j] <- exp(alpha0) * pixArea
}
alpha0 ~ dunif(-20,20) # Gelman cauchy prior on density
EN <- sum(mu[1:nPix]) # expected abundance
psi <- EN/M # derived inclusion prob
p0 ~ dunif(0,1) # baseline encounter probability
sigma ~ dunif(0, sigma_upper) # scaling parameter
for(i in 1:M){
z[i]~dbern(psi)
s[i] ~ dcat(probs[1:nPix])
x0g[i] <- grid[s[i],1] # x-coordinate of state-space grid
y0g[i] <- grid[s[i],2] # y-coordinate of state-space grid
dist[i,1:J] <- sqrt((x0g[i]-X[1:J,1])^2 + (y0g[i]-X[1:J,2])^2)
p[i,1:J] <- p0*exp(-dist[i,1:J]^2/(2*sigma^2))
} # i individuals
# use zeros trick for detected individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
y[i,j] ~ dbin(p[i,j],K)
} # i individuals
} # j traps
for(i in (n0+1):M){
zeros[i] ~ dbern((1 - prod(1 - p[i,1:J])^K)*z[i])
} # i individuals
N <- sum(z[1:M])
D <- N/A
})
} else
if(enc_dist == "poisson" & sex_sigma == FALSE){
scrcode <- nimble::nimbleCode({
for(j in 1:nPix){
probs[j] <- mu[j]/EN
mu[j] <- exp(alpha0) * pixArea
}
alpha0 ~ dunif(-20,20) # Gelman cauchy prior on density
EN <- sum(mu[1:nPix]) # expected abundance
psi <- EN/M # derived inclusion prob
lam0 ~ dunif(0,lam0_upper) # baseline encounter probability
sigma ~ dunif(0, sigma_upper) # scaling parameter
for(i in 1:M){
z[i]~dbern(psi)
s[i] ~ dcat(probs[1:nPix])
x0g[i] <- grid[s[i],1] # x-coordinate of state-space grid
y0g[i] <- grid[s[i],2] # y-coordinate of state-space grid
dist[i,1:J] <- sqrt((x0g[i]-X[1:J,1])^2 + (y0g[i]-X[1:J,2])^2)
lam[i,1:J] <- lam0*K*exp(-dist[i,1:J]^2/(2*sigma^2))
} # i individuals
# use zeros trick for individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
y[i,j] ~ dpois(lam[i,j])
} # i individuals
} # j traps
for(i in (n0+1):M){
zeros[i] ~ dpois(sum(lam[i,1:J])*z[i])
}
N <- sum(z[1:M])
D <- N/A
})
}else
if(enc_dist == "binomial" & sex_sigma == TRUE){
scrcode <- nimble::nimbleCode({
for(j in 1:nPix){
probs[j] <- mu[j]/EN
mu[j] <- exp(alpha0) * pixArea
}
alpha0 ~ dunif(-20,20) # Gelman cauchy prior on density
EN <- sum(mu[1:nPix]) # expected abundance
psi <- EN/M # derived inclusion prob
p0 ~ dunif(0,1) # baseline encounter probability
psi_sex ~ dunif(0,1) # probability sex = 1
sigma[1] ~ dunif(0, sigma_upper) # scaling parameter, sex = 0
sigma[2] ~ dunif(0, sigma_upper) # scaling parameter, sex = 1
for(i in 1:M){
z[i]~dbern(psi)
sex[i] ~ dbern(psi_sex)
sx[i] <- sex[i] + 1
s[i] ~ dcat(probs[1:nPix])
x0g[i] <- grid[s[i],1] # x-coordinate of state-space grid
y0g[i] <- grid[s[i],2] # y-coordinate of state-space grid
dist[i,1:J] <- sqrt((x0g[i]-X[1:J,1])^2 + (y0g[i]-X[1:J,2])^2)
p[i,1:J] <- p0*exp(-dist[i,1:J]^2/(2*sigma[sx[i]]^2))
} # i individuals
# use zeros trick for individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
y[i,j] ~ dbin(p[i,j],K)
} # i individuals
} # j traps
for(i in (n0+1):M){
zeros[i] ~ dbern((1 - prod(1 - p[i,1:J])^K)*z[i])
} # i individuals
N <- sum(z[1:M])
D <- N/A
})
} else
if(enc_dist == "poisson" & sex_sigma == TRUE){
scrcode <- nimble::nimbleCode({
for(j in 1:nPix){
probs[j] <- mu[j]/EN
mu[j] <- exp(alpha0) * pixArea
}
alpha0 ~ dunif(-20,20) # Gelman cauchy prior on density
EN <- sum(mu[1:nPix]) # expected abundance
psi <- EN/M # derived inclusion prob
lam0 ~ dunif(0,lam0_upper) # baseline encounter probability
psi_sex ~ dunif(0,1) # probability sex = 1
sigma[1] ~ dunif(0, sigma_upper) # scaling parameter, sex = 0
sigma[2] ~ dunif(0, sigma_upper) # scaling parameter, sex = 1
for(i in 1:M){
z[i]~dbern(psi)
sex[i] ~ dbern(psi_sex)
sx[i] <- sex[i] + 1
s[i] ~ dcat(probs[1:nPix])
x0g[i] <- grid[s[i],1] # x-coordinate of state-space grid
y0g[i] <- grid[s[i],2] # y-coordinate of state-space grid
dist[i,1:J] <- sqrt((x0g[i]-X[1:J,1])^2 + (y0g[i]-X[1:J,2])^2)
lam[i,1:J] <- lam0*K*exp(-dist[i,1:J]^2/(2*sigma[sx[i]]^2))
} # i individuals
# use zeros trick for individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
y[i,j] ~ dpois(lam[i,j])
} # i individuals
} # j traps
for(i in (n0+1):M){
zeros[i] ~ dpois(sum(lam[i,1:J])*z[i])
}
N <- sum(z[1:M])
D <- N/A
})
}
return(scrcode)
} else # End 2D models
if(dim_y == 3){
if(enc_dist == "binomial" & sex_sigma == FALSE){
scrcode <- nimble::nimbleCode({
for(j in 1:nPix){
probs[j] <- mu[j]/EN
mu[j] <- exp(alpha0) * pixArea
}
alpha0 ~ dunif(-20,20) # Gelman cauchy prior on density
EN <- sum(mu[1:nPix]) # expected abundance
psi <- EN/M # derived inclusion prob
p0 ~ dunif(0,1) # baseline encounter probability
sigma ~ dunif(0, sigma_upper) # scaling parameter
for(i in 1:M){
z[i]~dbern(psi)
s[i] ~ dcat(probs[1:nPix])
x0g[i] <- grid[s[i],1] # x-coordinate of state-space grid
y0g[i] <- grid[s[i],2] # y-coordinate of state-space grid
dist[i,1:J] <- sqrt((x0g[i]-X[1:J,1])^2 + (y0g[i]-X[1:J,2])^2)
for(k in 1:K){
p[i,1:J,k] <- p0*exp(-dist[i,1:J]^2/(2*sigma^2))
}
} # i individuals
# use zeros trick for individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
for(k in 1:K){
y[i,j,k] ~ dbin(p[i,j,k],1)
} # k occasions
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dbern((1 - prod(1 - p[i,1:J,1:K]))*z[i])
} # i individuals
N <- sum(z[1:M])
D <- N/A
})
} else
if(enc_dist == "poisson" & sex_sigma == FALSE){
scrcode <- nimble::nimbleCode({
for(j in 1:nPix){
probs[j] <- mu[j]/EN
mu[j] <- exp(alpha0) * pixArea
}
alpha0 ~ dunif(-20,20) # Gelman cauchy prior on density
EN <- sum(mu[1:nPix]) # expected abundance
psi <- EN/M # derived inclusion prob
p0 ~ dunif(0,1) # baseline encounter probability
lam0 ~ dunif(0,lam0_upper) # baseline encounter rate
sigma ~ dunif(0, sigma_upper) # scaling parameter
for(i in 1:M){
z[i]~dbern(psi)
s[i] ~ dcat(probs[1:nPix])
x0g[i] <- grid[s[i],1] # x-coordinate of state-space grid
y0g[i] <- grid[s[i],2] # y-coordinate of state-space grid
dist[i,1:J] <- sqrt((x0g[i]-X[1:J,1])^2 + (y0g[i]-X[1:J,2])^2)
for(k in 1:K){
lam[i,1:J,k] <- lam0*exp(-dist[i,1:J]^2/(2*sigma^2))
} # k occasions
} # i individuals
# use zeros trick for individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
for(k in 1:K){
y[i,j,k] ~ dpois(lam[i,j,k])
} # occasions
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dpois(sum(lam[i,1:J,1:K])*z[i])
}
N <- sum(z[1:M])
D <- N/A
})
}else
if(enc_dist == "binomial" & sex_sigma == TRUE){
scrcode <- nimble::nimbleCode({
for(j in 1:nPix){
probs[j] <- mu[j]/EN
mu[j] <- exp(alpha0) * pixArea
}
alpha0 ~ dunif(-20,20) # Gelman cauchy prior on density
EN <- sum(mu[1:nPix]) # expected abundance
psi <- EN/M # derived inclusion prob
p0 ~ dunif(0,1) # baseline encounter probability
psi_sex ~ dunif(0,1) # probability sex = 1
sigma[1] ~ dunif(0, sigma_upper) # scaling parameter, sex = 0
sigma[2] ~ dunif(0, sigma_upper) # scaling parameter, sex = 1
for(i in 1:M){
z[i]~dbern(psi)
sex[i] ~ dbern(psi_sex)
sx[i] <- sex[i] + 1
s[i] ~ dcat(probs[1:nPix])
x0g[i] <- grid[s[i],1] # x-coordinate of state-space grid
y0g[i] <- grid[s[i],2] # y-coordinate of state-space grid
dist[i,1:J] <- sqrt((x0g[i]-X[1:J,1])^2 + (y0g[i]-X[1:J,2])^2)
for(k in 1:K){
p[i,1:J,k] <- p0*exp(-dist[i,1:J]^2/(2*sigma[sx[i]]^2))
} # k occasions
} # i individuals
# use zeros trick for individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
for(k in 1:K){
y[i,j,k] ~ dbin(p[i,j,k],1)
} # k occasions
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dbern((1 - prod(1 - p[i,1:J,1:K]))*z[i])
} # i individuals
N <- sum(z[1:M])
D <- N/A
})
} else
if(enc_dist == "poisson" & sex_sigma == TRUE){
scrcode <- nimble::nimbleCode({
for(j in 1:nPix){
probs[j] <- mu[j]/EN
mu[j] <- exp(alpha0) * pixArea
}
alpha0 ~ dunif(-20,20) # Gelman cauchy prior on density
EN <- sum(mu[1:nPix]) # expected abundance
psi <- EN/M # derived inclusion prob
lam0 ~ dunif(0,lam0_upper) # baseline encounter rate
psi_sex ~ dunif(0,1) # probability sex = 1
sigma[1] ~ dunif(0, sigma_upper) # scaling parameter, sex = 0
sigma[2] ~ dunif(0, sigma_upper) # scaling parameter, sex = 1
for(i in 1:M){
z[i]~dbern(psi)
sex[i] ~ dbern(psi_sex)
sx[i] <- sex[i] + 1
s[i] ~ dcat(probs[1:nPix])
x0g[i] <- grid[s[i],1] # x-coordinate of state-space grid
y0g[i] <- grid[s[i],2] # y-coordinate of state-space grid
dist[i,1:J] <- sqrt((x0g[i]-X[1:J,1])^2 + (y0g[i]-X[1:J,2])^2)
for(k in 1:K){
lam[i,1:J,k] <- lam0*exp(-dist[i,1:J]^2/(2*sigma[sx[i]]^2))
} # k occasions
} # i marked individuals
# use zeros trick for marked individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
for(k in 1:K){
y[i,j,k] ~ dpois(lam[i,j,k])
} # occasions
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dpois(sum(lam[i,1:J,1:K])*z[i])
}
N <- sum(z[1:M])
D <- N/A
})
}
return(scrcode)
} # End 3D models
}else # trapsClustered
if(trapsClustered){
if(dim_y == 2){
if(enc_dist == "binomial" & sex_sigma == FALSE){
scrcode <- nimble::nimbleCode({
alpha0 ~ dunif(-20,20) # Gelman cauchy prior on density
EN <- sum(mu[1:nPix])
psi <- EN/M # derived inclusion prob
for(j in 1:nPix){
probs[j] <- mu[j]/EN
mu[j] <- exp(alpha0) * pixArea
} # j pixels
for(g in 1:nSites){
p0[g] ~ dunif(0,1) # baseline encounter probability
} # g sites
sigma ~ dunif(0, sigma_upper) # scaling parameter
for(i in 1:M){
z[i]~dbern(psi)
s[i] ~ dcat(probs[npixSite[site[i],1]:npixSite[site[i],2]])
x0g[i] <- grid[s[i],1,site[i]] # x-coordinate of state-space grid
y0g[i] <- grid[s[i],2,site[i]] # y-coordinate of state-space grid
dist[i,1:J] <- sqrt((x0g[i]-X[1:J,1,site[i]])^2 + (y0g[i]-X[1:J,2,site[i]])^2)
p[i,1:J] <- p0[site[i]]*exp(-dist[i,1:J]^2/(2*sigma^2))
} # i individuals
# use zeros trick for individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
y[i,j] ~ dbin(p[i,j],K)
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dbern((1 - prod(1 - p[i,1:J])^K)*z[i])
} # i individuals
N <- sum(z[1:M])
D <- N/A
})
} else
if(enc_dist == "poisson" & sex_sigma == FALSE){
scrcode <- nimble::nimbleCode({
alpha0 ~ dunif(-20,20) # Gelman cauchy prior on density
EN <- sum(mu[1:nPix])
psi <- EN/M # derived inclusion prob
for(j in 1:nPix){
probs[j] <- mu[j]/EN
mu[j] <- exp(alpha0) * pixArea
} # j pixels
for(g in 1:nSites){
lam0[g] ~ dunif(0,lam0_upper) # baseline encounter rate
} # g sites
sigma ~ dunif(0, sigma_upper) # scaling parameter
for(i in 1:M){
z[i]~dbern(psi)
s[i] ~ dcat(probs[npixSite[site[i],1]:npixSite[site[i],2]])
x0g[i] <- grid[s[i],1,site[i]] # x-coordinate of state-space grid
y0g[i] <- grid[s[i],2,site[i]] # y-coordinate of state-space grid
dist[i,1:J] <- sqrt((x0g[i]-X[1:J,1,site[i]])^2 + (y0g[i]-X[1:J,2,site[i]])^2)
lam[i,1:J] <- lam0[site[i]]*K*exp(-dist[i,1:J]^2/(2*sigma^2))
} # i individuals
# use zeros trick for individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
y[i,j] ~ dpois(lam[i,j])
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dpois(sum(lam[i,1:J])*z[i])
} # individuals
N <- sum(z[1:M])
D <- N/A
})
}else
if(enc_dist == "binomial" & sex_sigma == TRUE){
scrcode <- nimble::nimbleCode({
alpha0 ~ dunif(-20,20) # Gelman cauchy prior on density
EN <- sum(mu[1:nPix])
psi <- EN/M # derived inclusion prob
for(j in 1:nPix){
probs[j] <- mu[j]/EN
mu[j] <- exp(alpha0) * pixArea
} # j pixels
for(g in 1:nSites){
p0[g] ~ dunif(0,1) # baseline encounter probability
} # g sites
sigma ~ dunif(0, sigma_upper) # scaling parameter
psi_sex ~ dunif(0,1) # probability sex = 1
sigma[1] ~ dunif(0, sigma_upper) # scaling parameter, sex = 0
sigma[2] ~ dunif(0, sigma_upper) # scaling parameter, sex = 1
for(i in 1:M){
z[i]~dbern(psi)
sex[i] ~ dbern(psi_sex)
sx[i] <- sex[i] + 1
s[i] ~ dcat(probs[npixSite[site[i],1]:npixSite[site[i],2]])
x0g[i] <- grid[s[i],1,site[i]] # x-coordinate of state-space grid
y0g[i] <- grid[s[i],2,site[i]] # y-coordinate of state-space grid
dist[i,1:J] <- sqrt((x0g[i]-X[1:J,1,site[i]])^2 + (y0g[i]-X[1:J,2,site[i]])^2)
p[i,1:J] <- p0[site[i]]*exp(-dist[i,1:J]^2/(2*sigma[sx[i]]^2))
} # i individuals
# use zeros trick for individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
y[i,j] ~ dbin(p[i,j],K)
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dbern((1 - prod(1 - p[i,1:J])^K)*z[i])
} # i individuals
N <- sum(z[1:M])
D <- N/A
})
} else
if(enc_dist == "poisson" & sex_sigma == TRUE){
scrcode <- nimble::nimbleCode({
alpha0 ~ dunif(-20,20) # Gelman cauchy prior on density
EN <- sum(mu[1:nPix])
psi <- EN/M # derived inclusion prob
for(j in 1:nPix){
probs[j] <- mu[j]/EN
mu[j] <- exp(alpha0) * pixArea
} # j pixels
for(g in 1:nSites){
lam0[g] ~ dunif(0,lam0_upper) # baseline encounter rate
} # g sites
psi_sex ~ dunif(0,1) # probability sex = 1
sigma[1] ~ dunif(0, sigma_upper) # scaling parameter, sex = 0
sigma[2] ~ dunif(0, sigma_upper) # scaling parameter, sex = 1
for(i in 1:M){
z[i]~dbern(psi)
sex[i] ~ dbern(psi_sex)
sx[i] <- sex[i] + 1
s[i] ~ dcat(probs[npixSite[site[i],1]:npixSite[site[i],2]])
x0g[i] <- grid[s[i],1,site[i]] # x-coordinate of state-space grid
y0g[i] <- grid[s[i],2,site[i]] # y-coordinate of state-space grid
dist[i,1:J] <- sqrt((x0g[i]-X[1:J,1,site[i]])^2 + (y0g[i]-X[1:J,2,site[i]])^2)
lam[i,1:J,k] <- lam0[site[i]]*K*exp(-dist[i,1:J]^2/(2*sigma[sx[i]]^2))
} # i marked individuals
# use zeros trick for marked individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
y[i,j] ~ dpois(lam[i,j])
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dpois(sum(lam[i,1:J])*z[i])
} # individuals
N <- sum(z[1:M])
D <- N/A
})
}
return(scrcode)
} else # End 2D models
if(dim_y == 3){
if(enc_dist == "binomial" & sex_sigma == FALSE){
scrcode <- nimble::nimbleCode({
sigma ~ dunif(0, sigma_upper) # scaling parameter
alpha0 ~ dunif(-20,20) # Gelman cauchy prior on density
EN <- sum(mu[1:nPix])
psi <- EN/M # derived inclusion prob
for(j in 1:nPix){
probs[j] <- mu[j]/EN
mu[j] <- exp(alpha0) * pixArea
} # j pixels
for(g in 1:nSites){
p0[g] ~ dunif(0,1) # baseline encounter probability
} # g sites
for(i in 1:M){
z[i]~dbern(psi)
s[i] ~ dcat(probs[npixSite[site[i],1]:npixSite[site[i],2]])
x0g[i] <- grid[s[i],1,site[i]] # x-coordinate of state-space grid
y0g[i] <- grid[s[i],2,site[i]] # y-coordinate of state-space grid
dist[i,1:J] <- sqrt((x0g[i]-X[1:J,1,site[i]])^2 + (y0g[i]-X[1:J,2,site[i]])^2)
for(k in 1:K){
p[i,1:J,k] <- p0[site[i]]*exp(-dist[i,1:J]^2/(2*sigma^2))
}
} # i individuals
# use zeros trick for marked individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
for(k in 1:K){
y[i,j,k] ~ dbin(p[i,j,k],1)
} # k occasions
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dbern((1 - prod(1 - p[i,1:J,1:K]))*z[i])
} # i individuals
N <- sum(z[1:M])
D <- N/A
})
} else
if(enc_dist == "poisson" & sex_sigma == FALSE){
scrcode <- nimble::nimbleCode({
sigma ~ dunif(0, sigma_upper) # scaling parameter
alpha0 ~ dunif(-20,20) # Gelman cauchy prior on density
EN <- sum(mu[1:nPix])
psi <- EN/M # derived inclusion prob
for(j in 1:nPix){
probs[j] <- mu[j]/EN
mu[j] <- exp(alpha0) * pixArea
} # j pixels
for(g in 1:nSites){
lam0[g] ~ dunif(0,lam0_upper) # baseline encounter rate
} # g sites
for(i in 1:M){
z[i]~dbern(psi)
s[i] ~ dcat(probs[npixSite[site[i],1]:npixSite[site[i],2]])
x0g[i] <- grid[s[i],1,site[i]] # x-coordinate of state-space grid
y0g[i] <- grid[s[i],2,site[i]] # y-coordinate of state-space grid
dist[i,1:J] <- sqrt((x0g[i]-X[1:J,1,site[i]])^2 + (y0g[i]-X[1:J,2,site[i]])^2)
for(k in 1:K){
lam[i,1:J,k] <- lam0[site[i]]*exp(-dist[i,1:J]^2/(2*sigma^2))
} # k occasions
} # i individuals
# use zeros trick for marked individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
for(k in 1:K){
y[i,j,k] ~ dpois(lam[i,j,k])
} # occasions
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dpois(sum(lam[i,1:J,1:K])*z[i])
} # i individuals
N <- sum(z[1:M])
D <- N/A
})
}else
if(enc_dist == "binomial" & sex_sigma == TRUE){
scrcode <- nimble::nimbleCode({
psi_sex ~ dunif(0,1) # probability sex = 1
sigma[1] ~ dunif(0, sigma_upper) # scaling parameter, sex = 0
sigma[2] ~ dunif(0, sigma_upper) # scaling parameter, sex = 1
alpha0 ~ dunif(-20,20) # Gelman cauchy prior on density
EN <- sum(mu[1:nPix])
psi <- EN/M # derived inclusion prob
for(j in 1:nPix){
probs[j] <- mu[j]/EN
mu[j] <- exp(alpha0) * pixArea
} # j pixels
for(g in 1:nSites){
p0[g] ~ dunif(0,1) # baseline encounter probability
} # g sites
for(i in 1:M){
z[i]~dbern(psi)
sex[i] ~ dbern(psi_sex)
sx[i] <- sex[i] + 1
s[i] ~ dcat(probs[npixSite[site[i],1]:npixSite[site[i],2]])
x0g[i] <- grid[s[i],1,site[i]] # x-coordinate of state-space grid
y0g[i] <- grid[s[i],2,site[i]] # y-coordinate of state-space grid
dist[i,1:J] <- sqrt((x0g[i]-X[1:J,1,site[i]])^2 + (y0g[i]-X[1:J,2,site[i]])^2)
for(k in 1:K){
p[i,1:J,k] <- p0[site[i]]*exp(-dist[i,1:J]^2/(2*sigma[sx[i]]^2))
} # k occasions
} # i marked individuals
# use zeros trick for marked individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
for(k in 1:K){
y[i,j,k] ~ dbin(p[i,j,k],1)
} # k occasions
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dbern((1 - prod(1 - p[i,1:J,1:K]))*z[i])
} # i individuals
N <- sum(z[1:M])
D <- N/A
})
} else
if(enc_dist == "poisson" & sex_sigma == TRUE){
scrcode <- nimble::nimbleCode({
alpha0 ~ dunif(-20,20) # Gelman cauchy prior on density
EN <- sum(mu[1:nPix])
psi <- EN/M # derived inclusion prob
for(j in 1:nPix){
probs[j] <- mu[j]/EN
mu[j] <- exp(alpha0) * pixArea
} # j pixels
for(g in 1:nSites){
lam0[g] ~ dunif(0,lam0_upper) # baseline encounter rate
} # g sites
psi_sex ~ dunif(0,1) # probability sex = 1
sigma[1] ~ dunif(0, sigma_upper) # scaling parameter, sex = 0
sigma[2] ~ dunif(0, sigma_upper) # scaling parameter, sex = 1
for(i in 1:M){
z[i]~dbern(psi)
sex[i] ~ dbern(psi_sex)
sx[i] <- sex[i] + 1
s[i] ~ dcat(probs[npixSite[site[i],1]:npixSite[site[i],2]])
x0g[i] <- grid[s[i],1,site[i]] # x-coordinate of state-space grid
y0g[i] <- grid[s[i],2,site[i]] # y-coordinate of state-space grid
dist[i,1:J] <- sqrt((x0g[i]-X[1:J,1,site[i]])^2 + (y0g[i]-X[1:J,2,site[i]])^2)
for(k in 1:K){
lam[i,1:J,k] <- lam0[site[i]]*exp(-dist[i,1:J]^2/(2*sigma[sx[i]]^2))
} # k occasions
} # i individuals
# use zeros trick for marked individuals to speed up the computation
for(i in 1:n0){
for(j in 1:J){
for(k in 1:K){
y[i,j,k] ~ dpois(lam[i,j,k])
} # occasions
} # j traps
} # i individuals
for(i in (n0+1):M){
zeros[i] ~ dpois(sum(lam[i,1:J,1:K])*z[i])
} # i individuals
N <- sum(z[1:M])
D <- N/A
})
}
return(scrcode)
} # end 3D model
} # end trapsClustered
}else # end marked models
if(type == "unmarked"){
if(trapsClustered == FALSE){
if(isFALSE(occ_specific)){
scrcode <- nimble::nimbleCode({
for(j in 1:nPix){
probs[j] <- mu[j]/EN
mu[j] <- exp(alpha0) * pixArea
}
alpha0 ~ dunif(-20,20) # Gelman cauchy prior on density
EN <- sum(mu[1:nPix]) # expected abundance
psiu <- EN/m # derived inclusion prob
lam0 ~ dunif(0,lam0_upper) # baseline encounter probability
sigma ~ dunif(0, sigma_upper) # scaling parameter
for(i in 1:m){
zu[i]~dbern(psiu)
su[i] ~ dcat(probs[1:nPix])
x0gu[i] <- grid[su[i],1] # x-coordinate of state-space grid
y0gu[i] <- grid[su[i],2] # y-coordinate of state-space grid
distu[i,1:J] <- sqrt((x0gu[i]-X[1:J,1])^2 + (y0gu[i]-X[1:J,2])^2)
lamu[i,1:J] <- lam0*exp(-distu[i,1:J]^2/(2*sigma^2))*zu[i]
} # i individuals
# unmarked spatial count model likelihood
for(j in 1:J){
bigLambda[j] <- sum(lamu[1:m,j])
for(k in 1:K){
n[j,k] ~ dpois(bigLambda[j])
} # k occasions
} # j traps
N <- sum(zu[1:m])
D <- N/A
})
return(scrcode)
} else # End occ_specific = FALSE
if(isFALSE(occ_specific)==FALSE){
scrcode <- nimble::nimbleCode({
for(j in 1:nPix){
probs[j] <- mu[j]/EN
mu[j] <- exp(alpha0) * pixArea
}
alpha0 ~ dunif(-20,20) # Gelman cauchy prior on density
EN <- sum(mu[1:nPix]) # expected abundance
psiu <- EN/m # derived inclusion prob
lam0 ~ dunif(0,lam0_upper) # baseline encounter rate
sigma ~ dunif(0, sigma_upper) # scaling parameter
for(i in 1:m){
zu[i]~dbern(psiu)
su[i] ~ dcat(probs[1:nPix])
x0gu[i] <- grid[su[i],1] # x-coordinate of state-space grid
y0gu[i] <- grid[su[i],2] # y-coordinate of state-space grid
distu[i,1:J] <- sqrt((x0gu[i]-X[1:J,1])^2 + (y0gu[i]-X[1:J,2])^2)
for(k in 1:K){
lamu[i,1:J,k] <- lam0*exp(-distu[i,1:J]^2/(2*sigma^2))*zu[i]
} # k occasions
} # i individuals
# unmarked spatial count model likelihood
for(j in 1:J){
bigLambda[j] <- sum(lamu[1:m,j,1:K])
for(k in 1:K){
n[j,k] ~ dpois(bigLambda[j])
} # k occasions
} # j traps
N <- sum(zu[1:m])
D <- N/A
})
return(scrcode)
} # end occasion-specific
}else # end trapsClustered == FALSE
if(trapsClustered){
if(isFALSE(occ_specific)){
scrcode <- nimble::nimbleCode({
alpha0 ~ dunif(-20,20) # Gelman cauchy prior on density
EN <- sum(mu[1:nPix])
psiu <- EN/m # derived inclusion prob
for(j in 1:nPix){
probs[j] <- mu[j]/EN
mu[j] <- exp(alpha0) * pixArea
} # j pixels
for(g in 1:nSites){
lam0[g] ~ dunif(0,lam0_upper) # baseline encounter rate
} # g sites
sigma ~ dunif(0, sigma_upper) # scaling parameter
for(i in 1:m){
zu[i]~dbern(psiu)
su[i] ~ dcat(probs[npixSite[site[i],1]:npixSite[site[i],2]])
x0gu[i] <- grid[su[i],1,site[i]] # x-coordinate of state-space grid
y0gu[i] <- grid[su[i],2,site[i]] # y-coordinate of state-space grid
distu[i,1:J] <- sqrt((x0gu[i]-X[1:J,1,site[i]])^2 + (y0gu[i]-X[1:J,2,site[i]])^2)
lamu[i,1:J] <- lam0[site[i]]*exp(-distu[i,1:J]^2/(2*sigma^2))*zu[i]
} # i individuals
# unmarked spatial count model likelihood
for(g in 1:nSites){
for(j in 1:J){
bigLambda[j,g] <- sum(lamu[site_indexL[g]:site_indexU[g],j])
for(k in 1:K){
n[j,k,g] ~ dpois(bigLambda[j,g])
} # k occasions
} # j traps
} # g sites
N <- sum(zu[1:m])
D <- N/A
})
return(scrcode)
} else # end occasion-specific == FALSE
if(isFALSE(occ_specific)==FALSE){ # occasion-specific models
scrcode <- nimble::nimbleCode({
sigma ~ dunif(0, sigma_upper) # scaling parameter
alpha0 ~ dunif(-20,20) # Gelman cauchy prior on density
EN <- sum(mu[1:nPix])
psiu <- EN/m # derived inclusion prob
for(j in 1:nPix){
probs[j] <- mu[j]/EN
mu[j] <- exp(alpha0) * pixArea
} # j pixels
for(g in 1:nSites){
lam0[g] ~ dunif(0,lam0_upper) # baseline encounter rate
} # g sites
for(i in 1:m){
zu[i]~dbern(psiu)
su[i] ~ dcat(probs[npixSite[site[i],1]:npixSite[site[i],2]])
x0gu[i] <- grid[su[i],1,site[i]] # x-coordinate of state-space grid
y0gu[i] <- grid[su[i],2,site[i]] # y-coordinate of state-space grid
distu[i,1:J] <- sqrt((x0gu[i]-X[1:J,1,site[i]])^2 + (y0gu[i]-X[1:J,2,site[i]])^2)
for(k in 1:K){
lamu[i,1:J,k] <- lam0*exp(-distu[i,1:J]^2/(2*sigma^2))*zu[i]
} # k occasions
} # i marked individuals
# unmarked spatial count model likelihood
for(g in 1:nSites){
for(j in 1:J){
bigLambda[j,g] <- sum(lamu[site_indexL[g]:site_indexU[g],j,1:K])
for(k in 1:K){
n[j,k,g] ~ dpois(bigLambda[j,g])
} # k occasions
} # j traps
} # g sites
N <- sum(zu[1:m])
D <- N/A
})
return(scrcode)
} # end occasion-specific models
} # end trapsClustered
} # end unmarked models
} # End function 'get_discrete"
#' Display row indices for model used in 'nimble'
#'
#' @description Prints model created with \code{nimbleCode()}to show row indices.
#'
#' @param model The \code{nimbleCode()} used to define model in \code{nimble} package,
#' possibly generated from \code{\link{get_classic}} function.
#' @return A vector of characters with each row representing a line of the model. This
#' function is meant to help with using \code{\link{customize_model}}.
#' @author Daniel Eacker
#' @examples
#' # get model
#' scr_model = get_classic(dim_y = 2, enc_dist = "binomial",sex_sigma = TRUE,
#' hab_mask=TRUE,trapsClustered = TRUE)
#'
#' print_model(scr_model)
#' @name print_model
#' @export
print_model <- function(model){
# read in main model
model_list <- as.list(model)
file_list <- list()
length_lines <- numeric(length(model_list))
for(i in 1:length(model_list)){
tmodel <- strsplit(as.character(model[i]),"\n")[[1]]
file_list[[i]] <- tempfile(fileext = ".txt")
writeLines(tmodel, file_list[[i]])
length_lines[i] <- length(readLines(file_list[[i]],encoding="UTF-8"))
}
main_model <- character(sum(length_lines))
for(i in 1:length(model_list)){
main_model[which(main_model=="")[1]:ifelse(length_lines[i]==1,(which(main_model=="")[1]),
(which(main_model=="")[1])+length_lines[i]-1)] =
readLines(file_list[[i]],encoding="UTF-8")
}
main_model[length(main_model)+1] <- "}"
main_model[c(1,length(main_model))] <- c("nimble::nimbleCode({","})")
unlink(unlist(file_list)) # unlink temp files
return(main_model)
} # End function 'print_model'
#' Run discrete spatial capture-recapture models in 'nimble' using
#' parallel processing
#'
#' @description A wrapper function to conduct Markov Chain Monte Carlo (MCMC) sampling using
#' 'nimble'.
#'
#' @param model The \code{nimbleCode} used to define model in \code{nimble} package,
#' possibly generated from \code{\link{get_classic}}.
#' @param data A list of data inputs needed to run SCR models in \code{nimble}.
#' @param constants A list of constants needed to run SCR models in \code{nimble}.
#' @param inits Starting values for stochastic parameters to begin MCMC sampling.
#' @param params A vector of character strings that define the parameters to
#' trace in the MCMC simulation.
#' @param dimensions A named list of dimensions for variables. Only needed for variables
#' used with empty indices in model code that are not provided in constants or data.
#' @param nimFun A list of nimbleFunction(s) to be used in the model when using
#' \code{parallel = TRUE}.
#' @param niter An integer value of the total number of MCMC iterations to run
#' per chain.
#' @param nburnin An integer value of the number of MCMC iterations to discard
#' as 'burnin'.
#' @param thin An integer value of the amount of thinning of the chain. For
#' example, \code{thin=2} would retain every other MCMC sample.
#' @param nchains An integer value for the number of MCMC chains to run
#' @param parallel A logical value indicating whether MCMC chains shoud be run
#' in parallel processing. Default is \code{FALSE}.
#' @param RNGseed An integer value specifying the random number generating seed
#' using in parallel processing. Also used when \code{parallel = FALSE} in
#' setSeed within \code{runMCMC} function in 'nimble'.
#' This ensures that the MCMC samples will be the same during each run using the
#' same data, etc. Default is \code{NULL}.
#' @return A list of MCMC samples for each parameter traced with length equal to
#' the number of chains run.
#' @details This function provides a wrapper to easily run Bayesian SCR models
#' using \code{nimble}.
#' @importFrom tictoc tic toc
#' @importFrom graphics hist par abline lines
#' @importFrom parallel parLapply makeCluster stopCluster clusterSetRNGStream
#' @import nimble
#' @author Daniel Eacker
#' @examples
#'\dontrun{
#'# simulate a single trap array with random positional noise
#'x <- seq(-800, 800, length.out = 5)
#'y <- seq(-800, 800, length.out = 5)
#'traps <- as.matrix(expand.grid(x = x, y = y))
#'set.seed(200)
#'traps <- traps + runif(prod(dim(traps)),-20,20)
#'
#'mysigma = c(220, 300) # simulate sex-specific scaling parameter
#'mycrs = 32608 # EPSG for WGS 84 / UTM zone 8N
#'pixelWidth = 100 # store pixelWidth or grid resolution
#'
#'# create state-space
#'Grid = grid_classic(X = traps, crs_ = mycrs, buff = 3*mysigma, res = pixelWidth)
#'
#'# create polygon for mask
#'library(sf)
#'poly = st_sfc(st_polygon(x=list(matrix(c(-1765,-1765,1730,-1650,1600,1650,0,1350,
#' -800,1700,-1850,1000,-1765,-1765),ncol=2, byrow=TRUE))), crs = mycrs)
#'
#'# create habitat mask
#'hab_mask = mask_polygon(poly = poly, grid = Grid$grid, crs_ = mycrs,
#'prev_mask = NULL)
#'
#'# simulate data for uniform state-space and habitat mask
#'data3d = sim_classic(X = traps, ext = Grid$ext, crs_ = mycrs, sigma_ = mysigma,
#' prop_sex = 0.6, N = 200, K = 4, base_encounter = 0.15, enc_dist = "binomial",
#' hab_mask = hab_mask, setSeed = 100)
#'
#'# total augmented population size
#'M = 400
#'
#'# get initial activity center starting values
#'s.st = initialize_classic(y=data3d$y, M=M, X=traps, ext=Grid$ext,
#' hab_mask = hab_mask)
#'
#'# convert traps and starting locations to discrete format
#'d_list = discretize_classic(X=traps, grid = Grid$grid, s.st = s.st,
#' crs_ = mycrs, hab_mask = hab_mask)
#'
#'# inspect discrete data list
#'str(d_list)
#'
#'# prepare data
#'data = list(y=data3d$y)
#'data$y = data$y[which(apply(data$y, 1, sum)!=0),,] # remove augmented records
#'# covert to 2d by summing over individuals and traps
#'data$y = apply(data$y, c(1,2), sum)
#'
#'# add discretized traps
#'data$X = d_list$X100 # convert units to km
#'
#'# add grid
#'data$grid = d_list$grid/1000 # convert units to km
#'
#'# prepare constants (note get density in activity center/100 m2 rather
#'# than activity centers/m2)
#'constants = list(M = M,n0 = nrow(data$y),J=dim(data$y)[2], K=dim(data3d$y)[3],
#'nPix=d_list$nPix,pixArea = 0.1^2,
#'sigma_upper = 1, A = (sum(hab_mask)*(pixelWidth/100)^2))
#'
#'# augment sex
#'data$sex = c(data3d$sex,rep(NA,constants$M-length(data3d$sex)))
#'
#'# add z and zeros vector data for latent inclusion indicator
#'data$z = c(rep(1,constants$n0),rep(NA,constants$M - constants$n0))
#'data$zeros = c(rep(NA,constants$n0),rep(0,constants$M - constants$n0))
#'
#'# define all initial values
#'inits = list(sigma = runif(2, 0.250, 0.350), s = d_list$s.st,alpha0 = 3,
#' p0 = runif(1, 0.05, 0.15),psi_sex = runif(1,0.5, 0.7),
#' sex=c(rep(NA,constants$n0),rbinom(constants$M-constants$n0,1,0.6)),
#' z=c(rep(NA,constants$n0),rep(0,constants$M-constants$n0)))
#'
#'# parameters to monitor
#'params = c("sigma","psi","p0","N","D","EN","psi_sex","alpha0","s","z")
#'
#'
#'# get model
#'discrete_model = get_discrete(type="marked",dim_y = 2, enc_dist = "binomial",
#' sex_sigma = TRUE,trapsClustered=FALSE)
#'
#'# run model
#'library(tictoc)
#'tic() # track time elapsed
#'out = run_discrete(model = discrete_model, data=data, constants=constants,
#' inits=inits, params = params,niter = 10000, nburnin=1000,
#' thin=1, nchains=2, parallel=TRUE, RNGseed = 500)
#'toc()
#'
#'nimSummary(out, exclude = c("s","z"))
#'}
#' @name run_discrete
#' @export
run_discrete <- function(model, data, constants, inits, params, dimensions = NULL,
nimFun = NULL, niter = 1000, nburnin=100, thin=1, nchains=1,
parallel=FALSE, RNGseed=NULL){
# for parallel processing
if(parallel == FALSE){
SCRmodelR <- nimble::nimbleModel(code=model,data=data,constants=constants,
inits=inits,check=FALSE,calculate=TRUE,dimensions = dimensions)
SCRmodelR$initializeInfo()
# compile model to C++#
SCRmodelC <- nimble::compileNimble(SCRmodelR) # compile code
# MCMC configurations
mcmcspec<-nimble::configureMCMC(SCRmodelR, monitors=params)
scrMCMC <- nimble::buildMCMC(mcmcspec)
CscrMCMC <- nimble::compileNimble(scrMCMC, project = SCRmodelR,
resetFunctions = TRUE)
results <- nimble::runMCMC(CscrMCMC, niter = niter, nburnin=nburnin,
thin=thin, nchains=nchains, setSeed=RNGseed) # add setSeed
return(results)
}else
if(parallel == TRUE){
run_parallel <- function(seed,data,model,constants,dimensions,
nimFun,inits, params, niter,nburnin,thin){
# load any custom functions
if(is.null(nimFun)==FALSE){
for(i in 1:length(nimFun)){
nimFun[[i]]
assign(names(nimFun)[i],nimFun[[i]], envir = .GlobalEnv)
}
}
SCRmodelR <- nimble::nimbleModel(code=model,data=data,
constants=constants,inits=inits,check=FALSE,calculate=TRUE,
dimensions = dimensions)
SCRmodelR$initializeInfo()
# compile model to C++#
SCRmodelC <- nimble::compileNimble(SCRmodelR) # compile code
# MCMC configurations
mcmcspec<-nimble::configureMCMC(SCRmodelR, monitors=params)
scrMCMC <- nimble::buildMCMC(mcmcspec)
CscrMCMC <- nimble::compileNimble(scrMCMC, project = SCRmodelR,
resetFunctions = TRUE)
results <- nimble::runMCMC(CscrMCMC, niter = niter, nburnin= nburnin,
thin=thin,setSeed = seed)
return(results)
} # end parallel processing function
if(nchains < 2){
stop("Must have at least 2 chains to run parallel processing")
}
this_cluster <- parallel::makeCluster(nchains)
parallel::clusterEvalQ(this_cluster, library("nimble"))
if(is.null(RNGseed)==FALSE){
parallel::clusterSetRNGStream(cl = this_cluster, RNGseed)
}
chain_output <- parallel::parLapply(cl = this_cluster, fun = run_parallel,
X=1:nchains,model = model, data=data,constants=constants,
nimFun=nimFun, dimensions = dimensions,inits=inits,
params = params, niter = niter,nburnin=nburnin, thin=thin)
parallel::stopCluster(this_cluster)
return(chain_output)
}
} # End function 'run_discrete'
#' Prepare classic SCR data components for local modeling approach
#'
#' @description A function to scale up from individual-level state-space to
#' study-area level state-space using classic SCR data components.
#'
#' @param y Either a matrix or array of encounter history data, possibly
#' from \code{sim_classic()}.
#' @param grid_ind A matrix representing an individual state-space grid. This
#' is returned from \code{\link{grid_classic}}.
#' @param X Either a matrix or array representing the coordinates of traps in
#' UTMs. An array is used when traps are clustered over a survey area.
#' @param crs_ The UTM coordinate reference system (EPSG code) used for your
#' location provided as an integer (e.g., 32608 for WGS 84/UTM Zone 8N).
#' @param sigma_ The scaling parameter of the bivariate normal kernel either
#' in meters or kilometers as an integer. Note that if \code{sigma_} is
#' sex-specific, the maximum value will be used.
#' @param s.st A matrix of starting activity center coordinates. This is
#' returned from \code{\link{initialize_classic}}.
#' @param site Either \code{NULL} (if a 2D trap array is used) or a vector of
#' integers denoting which trap array an individual (either detected or
#' augmented) belongs to. Note that \code{site} is provided from
#' \code{\link{sim_classic}} when a 3D trap array is used. However, this
#' \code{site} variable must be correctly augmented based on the total
#' augmented population size (i.e., \code{M}).
#' @param hab_mask Either \code{FALSE} (the default) or a matrix or array output
#' from \code{\link{mask_polygon}} or \code{\link{mask_raster}} functions.
#' @return A list of data components needed to for classic SCR models in a local
#' approach. Specifically, the function returns:
#' \itemize{
#' \item{\code{y}}{ Individual-specific encounter history that only considers traps
#' within a distance of 9 times \code{sigma_}.}
#' \item{\code{X}}{ Individual-specific trap array that only provides coordinates
#' for traps within a distance of 9 times \code{sigma_}.}
#' \item{\code{grid}}{ A matrix or array of state-space grid coordinates that
#' encompasses all individual state-space grids.}
#' \item{\code{prop_habitat}}{ The proportion of suitable habitat for each individual.
#' Note that this is only returned when a habitat mask is used.}
#' \item{\code{ext_mat}}{ A matrix of individual state-space grid extents.}
#' \item{\code{ext}}{ An \code{Extent} object from the \code{raster} package for the
#' scaled-up state-space grid.}
#' \item{\code{Jind}}{ A vector with the number of traps each individual is exposed to.}
#' \item{\code{s.st}}{ A matrix of starting activity center coordinates.}
#' }
#' @details This function converts classic SCR data to a format that is used in
#' local evaluations of the individual state-space.
#' @importFrom sf st_buffer st_point st_multipoint st_buffer st_cast st_intersects
#' @importFrom raster coordinates rasterFromXYZ extent res
#' @importFrom scales rescale
#' @author Daniel Eacker
#' @examples
#'\dontrun{
#'# simulate a single trap array with random positional noise
#'x <- seq(-1600, 1600, length.out = 6)
#'y <- seq(-1600, 1600, length.out = 6)
#'traps <- as.matrix(expand.grid(x = x, y = y))
#'# add some random noise to locations
#'traps <- traps + runif(prod(dim(traps)),-20,20)
#'mysigma = 300 # simulate sigma of 300 m
#'mycrs = 32608 # EPSG for WGS 84 / UTM zone 8N
#'pixelWidth = 100 # grid resolution
#'
#'# Create initial grid and extent (use a slightly bigger buffer to match
#'# scaled-up state-space below)
#'Grid = grid_classic(X = traps, crs_ = mycrs, buff = 3.7*mysigma, res = pixelWidth)
#'
#'# Simulated abundance
#'Nsim = 250
#'
#'# simulate SCR data
#'data3d = sim_classic(X = traps, ext = Grid$ext, crs_ = mycrs,
#'sigma_ = mysigma, prop_sex = 1, N = Nsim, K = 4, base_encounter = 0.15,
#'enc_dist = "binomial", hab_mask = FALSE, setSeed = 50)
#'
#'# generate initial activity center coordinates for 2D trap array without
#'# habitat mask
#'s.st = initialize_classic(y=data3d$y, M=500, X=traps, ext = Grid$ext,
#'hab_mask = FALSE, all_random = FALSE)
#'
#'# now use grid_classic to create an individual-level state-space (with origin 0, 0)
#'Grid_ind = grid_classic(X = matrix(c(0,0),nrow=1), crs_ = mycrs,
#' buff = 3*mysigma, res = 100)
#'
#'# now localize the data components created above
#'local_list = localize_classic(y = data3d$y, grid_ind = Grid_ind$grid, X=traps,
#' crs_ = mycrs, sigma_ = mysigma, s.st = s.st,
#' site = NULL, hab_mask = FALSE)
#'# inspect local list
#'str(local_list)
#'}
#' @name localize_classic
#' @export
# uses abind importFrom abind abind
localize_classic <- function(y, grid_ind, X, crs_, sigma_,
s.st,site=NULL,hab_mask = FALSE){
# need to determine if X is 2 or 3 dimensional (stop if not)
if(length(dim(X))!=2 & length(dim(X))!=3){
stop("Trapping grid must be only 2 or 3 dimensions")
}
# for dim of length 2
if(length(dim(X))==2){
n0 <- length(which(apply(y, 1, function(x) sum(x, na.rm=TRUE))!=0))
y_array <- array(NA, dim=c(n0,dim(y)[2],dim(y)[3]))
X_array <- array(NA, dim=c(nrow(s.st),dim(X)[1],dim(X)[2]))
ext_mat <- matrix(NA, nrow = nrow(s.st), ncol=4)
Jind <- numeric(nrow(s.st)) # number of traps each individual is exposed to
if(isFALSE(hab_mask)){ # no habitat mask
# first need to build up from individual state-space
for(i in 1:n0){
# first get trap indices
grid_i <- cbind(grid_ind[,1]+s.st[i,1],grid_ind[,2]+s.st[i,2])
ext_mat[i,] <- as.vector(raster::extent(raster::rasterFromXYZ(grid_i,crs= crs_)))
poly_buff <- sf::st_buffer(sf::st_sfc(sf::st_point(s.st[i,]), crs = crs_),
dist = max(sigma_)*9, endCapStyle = "SQUARE")
Xint <- unlist(sf::st_intersects(poly_buff,
sf::st_cast(sf::st_sfc(sf::st_multipoint(X), crs = crs_),"POINT")))
Jind[i] <- length(Xint)
X_array[i,1:length(Xint),] <- X[Xint,]
y_array[i,1:length(Xint),] <- y[i,Xint,]
} # end individual loop for detected individuals
# now deal with augmentation
centroid <- matrix(apply(X, 2, mean),nrow=1)
expand_factor <- diff(raster::extent(raster::rasterFromXYZ(grid_ind))[1:2])/sigma_
x_coord <- seq(-max(sigma_)*expand_factor,max(sigma_)*expand_factor,
max(sigma_)*expand_factor) + centroid[1]
y_coord <- seq(-max(sigma_)*expand_factor,max(sigma_)*expand_factor,
max(sigma_)*expand_factor) + centroid[2]
xy_ss <- as.matrix(expand.grid(x_coord,y_coord))
# check to see if augmented coordinates are in trap range
poly_check <- sf::st_buffer(sf::st_cast(sf::st_sfc(sf::st_multipoint(xy_ss),
crs = crs_),"POINT"),dist = max(sigma_)*9, endCapStyle = "SQUARE")
Xcheck <- as.numeric(sf::st_intersects(poly_check,
sf::st_sfc(sf::st_multipoint(X), crs = crs_)))
# remove augmented coordinates that are out of trap range
xy_ss <- xy_ss[Xcheck == 1,]
# remake grid
grid_out <- raster::rasterFromXYZ(xy_ss,crs= crs_)
raster::res(grid_out) <- raster::res(raster::rasterFromXYZ(grid_ind,crs= crs_))
# calculate the global extent of the state-space
ext <- raster::extent(grid_out)
aug_mat <- do.call(rbind,
replicate(n = ceiling((nrow(s.st) - n0)/nrow(xy_ss)), xy_ss, simplify = FALSE))
# replicate layers of individual state-space centroids
for(i in (n0+1):nrow(s.st)){
s.st[i,] <- aug_mat[i-n0,]
poly_buff <- sf::st_buffer(sf::st_sfc(sf::st_point(s.st[i,]), crs = crs_),
dist = max(sigma_)*9, endCapStyle = "SQUARE")
Xint <- unlist(sf::st_intersects(poly_buff,
sf::st_cast(sf::st_sfc(sf::st_multipoint(X), crs = crs_),"POINT")))
Jind[i] <- length(Xint)
X_array[i,1:length(Xint),] <- X[Xint,]
grid_i <- cbind(grid_ind[,1]+s.st[i,1],grid_ind[,2]+s.st[i,2])
ext_mat[i,] <- as.vector(raster::extent(raster::rasterFromXYZ(grid_i,crs= crs_)))
s.st[i,1] <- runif(1,ext_mat[i,1],ext_mat[i,2])
s.st[i,2] <- runif(1,ext_mat[i,3],ext_mat[i,4])
}
} else
if(isFALSE(hab_mask)==FALSE){ # habitat mask
# now deal with augmentation
centroid <- matrix(apply(X, 2, mean),nrow=1)
# get coordinates for augmented state-space centroid
expand_factor <- diff(raster::extent(raster::rasterFromXYZ(grid_ind))[1:2])/sigma_
x_coord <- seq(-max(sigma_)*expand_factor,max(sigma_)*expand_factor,
max(sigma_)*expand_factor) + centroid[1]
y_coord <- seq(-max(sigma_)*expand_factor,max(sigma_)*expand_factor,
max(sigma_)*expand_factor) + centroid[2]
xy_ss <- as.matrix(expand.grid(x_coord,y_coord))
# check to see if augmented coordinates are in trap range
poly_check <- sf::st_buffer(sf::st_cast(sf::st_sfc(sf::st_multipoint(xy_ss),
crs = crs_),"POINT"),dist = max(sigma_)*9, endCapStyle = "SQUARE")
Xcheck <- as.numeric(sf::st_intersects(poly_check,
sf::st_sfc(sf::st_multipoint(X), crs = crs_)))
# remove augmented coordinates that are out of trap range
xy_ss <- xy_ss[Xcheck == 1,]
# remake grid
grid_out <- raster::rasterFromXYZ(xy_ss,crs=crs_)
raster::res(grid_out) <- raster::res(raster::rasterFromXYZ(grid_ind,crs=crs_))
# calculate the global extent of the state-space
ext <- raster::extent(grid_out)
# create x limits for state-space
xlim <- ext[1:2]
# create y limits for state-space
ylim <- ext[3:4]
# get proportion of available habitat for each individual
prop_habitat <- numeric(nrow(s.st))
# first need to build up from individual state-space
for(i in 1:n0){
# first get trap indices
grid_i <- cbind(grid_ind[,1]+s.st[i,1],grid_ind[,2]+s.st[i,2])
grid_i_rescale_x <- scales::rescale(grid_i[,1], to = c(0,dim(hab_mask)[2]),
from=xlim)
grid_i_rescale_y <- scales::rescale(grid_i[,2], to = c(0,dim(hab_mask)[1]),
from=ylim)
grid_i_rescale <- cbind(grid_i_rescale_x,grid_i_rescale_y)
prop_habitat[i] <- mean(apply(grid_i_rescale, 1, function(x)
hab_mask[(trunc(x[2])+1),(trunc(x[1])+1)]),na.rm=TRUE)
ext_mat[i,] <- as.vector(raster::extent(raster::rasterFromXYZ(grid_i,crs= crs_)))
poly_buff <- sf::st_buffer(sf::st_sfc(sf::st_point(s.st[i,]), crs = crs_),
dist = max(sigma_)*9, endCapStyle = "SQUARE")
Xint <- unlist(sf::st_intersects(poly_buff,
sf::st_cast(sf::st_sfc(sf::st_multipoint(X), crs = crs_),"POINT")))
Jind[i] <- length(Xint)
X_array[i,1:length(Xint),] <- X[Xint,]
y_array[i,1:length(Xint),] <- y[i,Xint,]
} # end individual loop for detected individuals
aug_mat <- do.call(rbind,
replicate(n = ceiling((nrow(s.st) - n0)/nrow(xy_ss)), xy_ss, simplify = FALSE))
for(i in (n0+1):nrow(s.st)){
s.st[i,] <- aug_mat[i-n0,]
poly_buff <- sf::st_buffer(sf::st_sfc(sf::st_point(s.st[i,]), crs = crs_),
dist = max(sigma_)*9, endCapStyle = "SQUARE")
Xint <- unlist(sf::st_intersects(poly_buff,
sf::st_cast(sf::st_sfc(sf::st_multipoint(X), crs = crs_),"POINT")))
Jind[i] <- length(Xint)
X_array[i,1:length(Xint),] <- X[Xint,]
grid_i <- cbind(grid_ind[,1]+s.st[i,1],grid_ind[,2]+s.st[i,2])
grid_i_rescale_x <- scales::rescale(grid_i[,1], to = c(0,dim(hab_mask)[2]),
from=xlim)
grid_i_rescale_y <- scales::rescale(grid_i[,2], to = c(0,dim(hab_mask)[1]),
from=ylim)
grid_i_rescale <- cbind(grid_i_rescale_x,grid_i_rescale_y)
prop_habitat[i] <- mean(apply(grid_i_rescale, 1, function(x)
hab_mask[(trunc(x[2])+1),(trunc(x[1])+1)]),na.rm=TRUE)
ext_mat[i,] <- as.vector(raster::extent(raster::rasterFromXYZ(grid_i,crs= crs_)))
s.st[i,1] <- runif(1,ext_mat[i,1],ext_mat[i,2])
s.st[i,2] <- runif(1,ext_mat[i,3],ext_mat[i,4])
sx.rescale <- scales::rescale(s.st[i,1], to = c(0,dim(hab_mask)[2]),
from=xlim)
sy.rescale <- scales::rescale(s.st[i,2], to = c(0,dim(hab_mask)[1]),
from=ylim)
pOK <- hab_mask[(trunc(sy.rescale)+1),(trunc(sx.rescale)+1)]
while(pOK==0){
s.st[i,1] <- runif(1,ext_mat[i,1],ext_mat[i,2])
s.st[i,2] <- runif(1,ext_mat[i,3],ext_mat[i,4])
sx.rescale <- scales::rescale(s.st[i,1], to = c(0,dim(hab_mask)[2]),
from=xlim)
sy.rescale <- scales::rescale(s.st[i,2], to = c(0,dim(hab_mask)[1]),
from=ylim)
pOK <- hab_mask[(trunc(sy.rescale)+1),(trunc(sx.rescale)+1)]
} # end hab check
} # end augmentation
} # end habitat mask
} # end dim length 2
# for dim of length 3
if(length(dim(X))==3){
n0 <- length(which(apply(y, 1, function(x) sum(x, na.rm=TRUE))!=0))
y_array <- array(NA, dim=c(n0,dim(y)[2],dim(y)[3]))
X_array <- array(NA, dim=c(nrow(s.st),dim(X)[1],dim(X)[2]))
ext_mat <- matrix(NA, nrow = nrow(s.st), ncol=4)
Jind <- numeric(nrow(s.st)) # number of traps each individual is exposed to
if(isFALSE(hab_mask)){ # no habitat mask
# first need to build up from individual state-space
for(i in 1:n0){
# first get trap indices
grid_i <- cbind(grid_ind[,1]+s.st[i,1],grid_ind[,2]+s.st[i,2])
ext_mat[i,] <- as.vector(raster::extent(raster::rasterFromXYZ(grid_i,crs= crs_)))
poly_buff <- sf::st_buffer(sf::st_sfc(sf::st_point(s.st[i,]), crs = crs_),
dist = max(sigma_)*9, endCapStyle = "SQUARE")
Xint <- unlist(sf::st_intersects(poly_buff,
sf::st_cast(sf::st_sfc(sf::st_multipoint(X[,,site[i]]), crs = crs_),"POINT")))
Jind[i] <- length(Xint)
X_array[i,1:length(Xint),] <- X[Xint,,site[i]]
y_array[i,1:length(Xint),] <- y[i,Xint,]
} # end individual loop for detected individuals
# now deal with augmentation
centroid <- matrix(apply(X, c(2,3), mean),nrow=2)
# get coordinates for augmented state-space centroid
x_coord <- apply(centroid, 2, function(x) seq(-max(sigma_)*expand_factor,max(sigma_)*expand_factor,
max(sigma_)*expand_factor) + x[1])
y_coord <- apply(centroid, 2, function(x) seq(-max(sigma_)*expand_factor,max(sigma_)*expand_factor,
max(sigma_)*expand_factor) + x[2])
xy_ss <- list() # create empty list to hold course state-space coordinates for each site
grid_out <- list() # create empty list to hold fine-scale state-space coordinates for each site
for(i in 1:dim(X)[3]){
xy_ss[[i]] <- as.matrix(expand.grid(x_coord[,i],y_coord[,i]))
# check to see if augmented coordinates are in trap range
poly_check <- sf::st_buffer(sf::st_cast(sf::st_sfc(sf::st_multipoint(xy_ss[[i]]),
crs = crs_),"POINT"),dist = max(sigma_)*9, endCapStyle = "SQUARE")
Xcheck <- as.numeric(sf::st_intersects(poly_check,
sf::st_sfc(sf::st_multipoint(X[,,i]), crs = crs_)))
# remove augmented coordinates that are out of trap range
xy_ss[[i]] <- xy_ss[[i]][Xcheck == 1,]
grid_out[[i]] <- raster::rasterFromXYZ(xy_ss[[i]],crs= crs_)
raster::res(grid_out[[i]]) <- raster::res(
raster::rasterFromXYZ(grid_ind,crs= crs_))
}
# calculate the global extent of the state-space
ext <- lapply(grid_out, function(x) raster::extent(x))
# remake grid
grid_out <- array(unlist(lapply(grid_out, raster::coordinates)),
dim=c(dim(raster::coordinates(grid_out[[1]])),dim(X)[3]))
n_vec <- as.vector(table(site[(n0+1):nrow(s.st)]))
aug_mat <- list() # create empty list to hold augmentation coordinates
for(i in 1:dim(X)[3]){
aug_mat[[i]] <- do.call(rbind, replicate(xy_ss[[i]],
n = ceiling(n_vec[i]/nrow(xy_ss[[i]])), simplify = FALSE))
aug_mat[[i]] <- aug_mat[[i]][1:n_vec[i],]
}
aug_all <- do.call(rbind, aug_mat)
for(i in 1:dim(X)[3]){
aug_all[which(site[(n0+1):nrow(s.st)]==i),1:2] <- aug_mat[[i]]
}
# replicate layers of individual state-space centroids
X_array = abind::abind(X_array, array(NA, dim=c((nrow(aug_all)+n0-nrow(s.st)),
dim(X)[1],dim(X)[2])),along=1)
Jind <- c(Jind, numeric(nrow(aug_all)+n0-nrow(s.st)))
ext_mat <- rbind(ext_mat, matrix(NA, nrow = (nrow(aug_all)+n0-nrow(s.st)), ncol=4))
s.st = rbind(s.st, matrix(NA,nrow=(nrow(aug_all)+n0-nrow(s.st)), ncol=2))
for(i in (n0+1):nrow(s.st)){
s.st[i,] <- aug_all[i-n0,]
poly_buff <- sf::st_buffer(sf::st_sfc(sf::st_point(s.st[i,]), crs = crs_),
dist = max(sigma_)*9, endCapStyle = "SQUARE")
Xint <- unlist(sf::st_intersects(poly_buff,
sf::st_cast(sf::st_sfc(sf::st_multipoint(X[,,site[i]]), crs = crs_),"POINT")))
Jind[i] <- length(Xint)
X_array[i,1:length(Xint),] <- X[Xint,,site[i]]
grid_i <- cbind(grid_ind[,1]+s.st[i,1],grid_ind[,2]+s.st[i,2])
ext_mat[i,] <- as.vector(raster::extent(raster::rasterFromXYZ(grid_i,crs= crs_)))
s.st[i,1] <- runif(1,ext_mat[i,1],ext_mat[i,2])
s.st[i,2] <- runif(1,ext_mat[i,3],ext_mat[i,4])
}
}else # end no habitat mask
if(isFALSE(hab_mask)==FALSE){ # habitat mask
centroid <- matrix(apply(X, c(2,3), mean),nrow=2)
# get coordinates for augmented state-space centroid
# now deal with augmentation
centroid <- matrix(apply(X, c(2,3), mean),nrow=2)
# get coordinates for augmented state-space centroid
x_coord <- apply(centroid, 2, function(x) seq(-max(sigma_)*expand_factor,max(sigma_)*expand_factor,
max(sigma_)*expand_factor) + x[1])
y_coord <- apply(centroid, 2, function(x) seq(-max(sigma_)*expand_factor,max(sigma_)*expand_factor,
max(sigma_)*expand_factor) + x[2])
xy_ss <- list() # create empty list to hold course state-space coordinates for each site
# create empty list to hold fine-scale state-space coordinates for each site
grid_out <- list()
for(i in 1:dim(X)[3]){
xy_ss[[i]] <- as.matrix(expand.grid(x_coord[,i],y_coord[,i]))
# check to see if augmented coordinates are in trap range
poly_check <- sf::st_buffer(sf::st_cast(sf::st_sfc(sf::st_multipoint(xy_ss[[i]]),
crs = crs_),"POINT"),dist = max(sigma_)*9, endCapStyle = "SQUARE")
Xcheck <- as.numeric(sf::st_intersects(poly_check,
sf::st_sfc(sf::st_multipoint(X[,,i]), crs = crs_)))
# remove augmented coordinates that are out of trap range
xy_ss[[i]] <- xy_ss[[i]][Xcheck == 1,]
grid_out[[i]] <- raster::rasterFromXYZ(xy_ss[[i]],crs= crs_)
raster::res(grid_out[[i]]) <- raster::res(
raster::rasterFromXYZ(grid_ind,crs= crs_))
}
# calculate the global extent of the state-space
ext <- lapply(grid_out, function(x) raster::extent(x))
# remake grid
grid_out <- array(unlist(lapply(grid_out, raster::coordinates)),
dim=c(dim(raster::coordinates(grid_out[[1]])),dim(X)[3]))
# create x limits for state-space
xlim <- lapply(ext, function(x) x[1:2])
# create y limits for state-space
ylim <- lapply(ext, function(x) x[3:4])
# get proportion of available habitat for each individual
prop_habitat <- numeric(nrow(s.st))
# first need to build up from individual state-space
for(i in 1:n0){
# first get trap indices
grid_i <- cbind(grid_ind[,1]+s.st[i,1],grid_ind[,2]+s.st[i,2])
grid_i_rescale_x <- scales::rescale(grid_i[,1], to = c(0,dim(hab_mask)[2]),
from=xlim[[site[i]]])
grid_i_rescale_y <- scales::rescale(grid_i[,2], to = c(0,dim(hab_mask)[1]),
from=ylim[[site[i]]])
grid_i_rescale <- cbind(grid_i_rescale_x,grid_i_rescale_y)
prop_habitat[i] <- mean(apply(grid_i_rescale, 1, function(x)
hab_mask[(trunc(x[2])+1),(trunc(x[1])+1),site[i]]),na.rm=TRUE)
ext_mat[i,] <- as.vector(raster::extent(raster::rasterFromXYZ(grid_i,crs= crs_)))
poly_buff <- sf::st_buffer(sf::st_sfc(sf::st_point(s.st[i,]), crs = crs_),
dist = max(sigma_)*9, endCapStyle = "SQUARE")
Xint <- unlist(sf::st_intersects(poly_buff,
sf::st_cast(sf::st_sfc(sf::st_multipoint(X[,,site[i]]), crs = crs_),"POINT")))
Jind[i] <- length(Xint)
X_array[i,1:length(Xint),] <- X[Xint,,site[i]]
y_array[i,1:length(Xint),] <- y[i,Xint,]
} # end individual loop for detected individuals
n_vec <- as.vector(table(site[(n0+1):nrow(s.st)]))
aug_mat <- list() # create empty list to hold augmentation coordinates
for(i in 1:dim(X)[3]){
aug_mat[[i]] <- do.call(rbind, replicate(xy_ss[[i]],
n = ceiling(n_vec[i]/nrow(xy_ss[[1]])), simplify = FALSE))
aug_mat[[i]] <- aug_mat[[i]][1:n_vec[i],]
}
aug_all <- do.call(rbind, aug_mat)
for(i in 1:dim(X)[3]){
aug_all[which(site[(n0+1):nrow(s.st)]==i),1:2] <- aug_mat[[i]]
}
for(i in (n0+1):nrow(s.st)){
s.st[i,] <- aug_all[i-n0,]
poly_buff <- sf::st_buffer(sf::st_sfc(sf::st_point(s.st[i,]), crs = crs_),
dist = max(sigma_)*9, endCapStyle = "SQUARE")
Xint <- unlist(sf::st_intersects(poly_buff,
sf::st_cast(sf::st_sfc(sf::st_multipoint(X[,,site[i]]), crs = crs_),"POINT")))
Jind[i] <- length(Xint)
X_array[i,1:length(Xint),] <- X[Xint,,site[i]]
grid_i <- cbind(grid_ind[,1]+s.st[i,1],grid_ind[,2]+s.st[i,2])
grid_i_rescale_x <- scales::rescale(grid_i[,1], to = c(0,dim(hab_mask)[2]),
from=xlim[[site[i]]])
grid_i_rescale_y <- scales::rescale(grid_i[,2], to = c(0,dim(hab_mask)[1]),
from=ylim[[site[i]]])
grid_i_rescale <- cbind(grid_i_rescale_x,grid_i_rescale_y)
prop_habitat[i] <- mean(apply(grid_i_rescale, 1, function(x)
hab_mask[(trunc(x[2])+1),(trunc(x[1])+1),site[i]]),na.rm=TRUE)
ext_mat[i,] <- as.vector(raster::extent(raster::rasterFromXYZ(grid_i,crs= crs_)))
s.st[i,1] <- runif(1,ext_mat[i,1],ext_mat[i,2])
s.st[i,2] <- runif(1,ext_mat[i,3],ext_mat[i,4])
sx.rescale <- scales::rescale(s.st[i,1], to = c(0,dim(hab_mask)[2]),
from=xlim[[site[i]]])
sy.rescale <- scales::rescale(s.st[i,2], to = c(0,dim(hab_mask)[1]),
from=ylim[[site[i]]])
pOK <- hab_mask[(trunc(sy.rescale)+1),(trunc(sx.rescale)+1),site[i]]
while(pOK==0){
s.st[i,1] <- runif(1,ext_mat[i,1],ext_mat[i,2])
s.st[i,2] <- runif(1,ext_mat[i,3],ext_mat[i,4])
sx.rescale <- scales::rescale(s.st[i,1], to = c(0,dim(hab_mask)[2]),
from=xlim[[site[i]]])
sy.rescale <- scales::rescale(s.st[i,2], to = c(0,dim(hab_mask)[1]),
from=ylim[[site[i]]])
pOK <- hab_mask[(trunc(sy.rescale)+1),(trunc(sx.rescale)+1),site[i]]
} # end hab check
} # end augmentation
} # end hab mask
} # end dimension 3
if(length(dim(X))==2){
if(isFALSE(hab_mask)){ # no habitat mask
return(list(y=y_array,X=X_array,grid = raster::coordinates(grid_out),
ext_mat=ext_mat,ext=ext,Jind=Jind,s.st=s.st))
} else
if(isFALSE(hab_mask)==FALSE){ # habitat mask
return(list(y=y_array,X=X_array,grid = raster::coordinates(grid_out),
prop_habitat=prop_habitat,ext_mat=ext_mat,
ext=ext,Jind=Jind,s.st=s.st))
}
}else # end dim 2
if(length(dim(X))==3){
if(isFALSE(hab_mask)){ # no habitat mask
return(list(y=y_array,X=X_array,grid = grid_out,
ext_mat=ext_mat,ext=ext,Jind=Jind,s.st=s.st))
} else
if(isFALSE(hab_mask)==FALSE){ # habitat mask
return(list(y=y_array,X=X_array,grid = grid_out,prop_habitat=prop_habitat,
ext_mat=ext_mat,ext=ext,Jind=Jind,s.st=s.st))
}
} # end dim 3
} # end function 'localize_classic'
#' Rescale trap coordinates, individual-level grid extent,
#' and starting activity center coordinates for local SCR approach
#'
#' @description Rescale inputs to prepare data for habitat mask to be used.
#'
#' @param X An array representing the coordinates of traps in
#' UTMs for each individual. This is returned from \code{\link{localize_classic}}.
#' @param ext An \code{Extent} object from the \code{raster} package. This is
#' returned from \code{\link{localize_classic}}.
#' @param ext_mat A matrix of individual state-space grid extents returned from
#' \code{\link{localize_classic}}.
#' @param s.st A matrix of starting activity center coordinates. This is
#' returned from \code{\link{localize_classic}}
#' @param site Either \code{NULL} (if a 2D trap array is used) or a vector of
#' integers denoting which trap array an individual (either detected or
#' augmented) belongs to. Note that \code{site} is provided from
#' \code{\link{sim_classic}} when a 3D trap array is used. However, this
#' \code{site} variable must be correctly augmented based on the total
#' augmented population size (i.e., \code{M}).
#' @param hab_mask A matrix or arary output from \code{\link{mask_polygon}} or
#' \code{\link{mask_raster}} functions.
#' @return
#' \itemize{
#' \item{\code{X}}{ Rescaled trap coordinates}
#' \item{\code{ext_mat}}{ Rescaled matrix of individual state-space grid extents.}
#' \item{\code{s.st}}{ A matrix of rescaled starting activity center coordinates.}
#' }
#' @details This function is only meant to be used when habitat masking is
#' incorporated into a local SCR model. The functions properly rescales inputs for this
#' SCR approach based onthe habitat mask. Note that the \code{pixelWidth} needs to
#' be included as an input in the model after inputs are rescaled to correctly
#' estimate the scaling parameter (i.e., 'sigma').
#' @author Daniel Eacker
#' @importFrom scales rescale
#' @seealso \code{\link{mask_polygon}}, \code{\link{mask_raster}}
#' @examples
#'\dontrun{
#' # simulate a single trap array with random positional noise
#' x <- seq(-1600, 1600, length.out = 6)
#' y <- seq(-1600, 1600, length.out = 6)
#' traps <- as.matrix(expand.grid(x = x, y = y))
#' # add some random noise to locations
#' set.seed(100)
#' traps <- traps + runif(prod(dim(traps)),-20,20)
#' mysigma = 300 # simulate sigma of 300 m
#' mycrs = 32608 # EPSG for WGS 84 / UTM zone 8N
#' pixelWidth = 100 # grid resolution
#'
#' # Simulated abundance
#' Nsim = 250
#'
#' # Create initial grid and extent (use a slightly bigger buffer to match
#' # scaled-up state-space below)
#' Grid = grid_classic(X = traps, crs_ = mycrs, buff = 3.7*mysigma, res = pixelWidth)
#'
#' # create polygon to use as a mask
#' library(sf)
#' poly = st_sfc(st_polygon(x=list(matrix(c(-2465,-2465,2530,-2550,2650,2550,
#' 0,2550,-800,2500,-2350,2300,-2465,-2465),ncol=2, byrow=TRUE))), crs = mycrs)
#'
#' # make simple plot
#' par(mfrow=c(1,1))
#' plot(Grid$grid, pch=20)
#' points(traps, col="blue",pch=20)
#' plot(poly, add=TRUE)
#'
#' # create habitat mask from polygon
#' hab_mask = mask_polygon(poly = poly, grid = Grid$grid, crs_ = mycrs,
#' prev_mask = NULL)
#'
# simulate SCR data
#' data3d = sim_classic(X = traps, ext = Grid$ext, crs_ = mycrs, sigma_ = mysigma,
#' prop_sex = 1, N = Nsim, K = 4, base_encounter = 0.15,
#' enc_dist = "binomial", hab_mask = hab_mask, setSeed = 100)
#'
#' # generate initial activity center coordinates for 2D trap array without
#' # habitat mask
#' s.st = initialize_classic(y=data3d$y, M=500, X=traps, ext = Grid$ext,
#' hab_mask = hab_mask)
#'
#' # now use grid_classic to create an individual-level state-space (with origin 0, 0)
#' Grid_ind = grid_classic(X = matrix(c(0,0),nrow=1), crs_ = mycrs,
#' buff = 3*mysigma, res = 100)
#'
#' # now localize the data components created above
#' local_list = localize_classic(y = data3d$y, grid_ind = Grid_ind$grid, X=traps,
#' crs_ = mycrs, sigma_ = mysigma, s.st = s.st,
#' hab_mask = hab_mask)
#'
#' # rescale inputs
#' rescale_list = rescale_local(X = local_list$X, ext = local_list$ext,
#' ext_mat = local_list$ext_mat,
#' s.st = local_list$s.st, hab_mask = hab_mask)
#'
#' # inspect rescaled list
#' str(rescale_list)
#'}
#' @name rescale_local
#' @export
rescale_local <- function(X, ext, ext_mat, s.st, site, hab_mask){
if(is.null(hab_mask)){
stop("Must include habitat mask for rescaling of inputs")
}
rescale_list <- list(X=X,ext_mat=ext_mat,s.st=s.st)
# for non-clustered traps
if(class(ext)!="list"){
rescale_list$X[,,1] <- scales::rescale(X[,,1], to = c(0,dim(hab_mask)[2]),
from=ext[1:2])
rescale_list$X[,,2] <- scales::rescale(X[,,2], to = c(0,dim(hab_mask)[1]),
from=ext[3:4])
rescale_list$s.st[,1] <- scales::rescale(s.st[,1],
to = c(0,dim(hab_mask)[2]), from=ext[1:2])
rescale_list$s.st[,2] <- scales::rescale(s.st[,2],
to = c(0,dim(hab_mask)[1]), from=ext[3:4])
rescale_list$ext_mat[,1:2] <- scales::rescale(ext_mat[,1:2],
to = c(0,dim(hab_mask)[2]), from=ext[1:2])
rescale_list$ext_mat[,3:4] <- scales::rescale(ext_mat[,3:4],
to = c(0,dim(hab_mask)[1]), from=ext[3:4])
}else
if(class(ext)=="list"){ # for clustered traps
for(g in 1:dim(X)[3]){ # loop over trap arrays
rescale_list$X[which(site==g),,1] <- scales::rescale(X[which(site==g),,1],
to = c(0,dim(hab_mask)[2]), from=ext[[g]][1:2])
rescale_list$X[which(site==g),,2] <- scales::rescale(X[which(site==g),,2],
to = c(0,dim(hab_mask)[1]), from=ext[[g]][3:4])
rescale_list$s.st[which(site==g),1] <- scales::rescale(s.st[which(site==g),1],
to = c(0,dim(hab_mask)[2]), from=ext[[g]][1:2])
rescale_list$s.st[which(site==g),2] <- scales::rescale(s.st[which(site==g),2],
to = c(0,dim(hab_mask)[1]), from=ext[[g]][3:4])
# check that starting coordinates are still in bounds
rescale_list$s.st[which(site==g),1] <-
ifelse(rescale_list$s.st[which(site==g),1]>=dim(hab_mask)[2],
rescale_list$s.st[which(site==g),1]-1,rescale_list$s.st[which(site==g),1])
rescale_list$s.st[which(site==g),2] <-
ifelse(rescale_list$s.st[which(site==g),2]>=dim(hab_mask)[1],
rescale_list$s.st[which(site==g),2]-1,rescale_list$s.st[which(site==g),2])
rescale_list$ext_mat[which(site==g),1:2] <-
scales::rescale(ext_mat[which(site==g),1:2],
to = c(0,dim(hab_mask)[2]), from=ext[[g]][1:2])
rescale_list$ext_mat[which(site==g),3:4] <-
scales::rescale(ext_mat[which(site==g),3:4],
to = c(0,dim(hab_mask)[1]), from=ext[[g]][3:4])
}
}
return(rescale_list)
} # end function 'rescale_local'
Add the following code to your website.
For more information on customizing the embed code, read Embedding Snippets.