knitr::opts_chunk$set( collapse = TRUE, comment = "#>" )
library(terra) library(tidyterra) library(PaGAn) library(ggplot2) library(fmesher) library(INLA) library(inlabru) library(sf) library(rgbif) library(here) source(file.path(here(), "R", "priorityMesh.R")) source(file.path(here(), "R", "gbifDownload.R")) #source("https://raw.githubusercontent.com/joechip90/PaGAn/master/R/priorityMesh.R") #source("https://raw.githubusercontent.com/joechip90/PaGAn/master/R/gbifDownload.R")
# Define a boundary to work within (in latitude and longitude) minLong <- 2.5 maxLong <- 15 minLat <- 56 maxLat <- 65 cutBox <- st_sfc(st_polygon(list(matrix(c( minLong, minLat, minLong, maxLat, maxLong, maxLat, maxLong, minLat, minLong, minLat ), ncol = 2, byrow = TRUE))), crs = gbifDefaultCRS()) # Setup a temporary directory to hold the information workLoc <- tempdir() options(timeout = max(300, getOption("timeout"))) # Location of a vector map of Norway (from DIVA-GIS website) norwayVectorLoc <- "https://biogeo.ucdavis.edu/data/diva/adm/NOR_adm.zip" tempNorwayVectorLoc <- file.path(workLoc, "norwayVector.zip") if(file.exists(tempNorwayVectorLoc)) { unlink(tempNorwayVectorLoc) } if(download.file(norwayVectorLoc, tempNorwayVectorLoc, mode = "wb", method = "libcurl") != 0) { stop("unable to download Norway data from server") } # Unzip the Norway vector data tempNorwayVectorDir <- file.path(workLoc, "norwayVector") if(dir.exists(tempNorwayVectorDir)) { unlink(tempNorwayVectorDir, recursive = TRUE) } dir.create(tempNorwayVectorDir) unzip(tempNorwayVectorLoc, exdir = tempNorwayVectorDir) norwayVector <- st_intersection(st_transform(st_read(tempNorwayVectorDir, "NOR_adm0"), gbifDefaultCRS()), cutBox) ggplot(norwayVector) + geom_sf()
# Specify the location of the WorldClim environmental covariate data worldClimDataLoc <- "https://biogeo.ucdavis.edu/data/worldclim/v2.1/base/wc2.1_10m_bio.zip" tempWorldClimLoc <- file.path(workLoc, "worldClim.zip") if(file.exists(tempWorldClimLoc)) { unlink(tempWorldClimLoc) } if(download.file(worldClimDataLoc, tempWorldClimLoc, mode = "wb", method = "libcurl") != 0) { stop("unable to download WORLDCLIM data from server") } # Unzip the WORLDCLIM data tempWorldClimDir <- file.path(workLoc, "worldClim") if(dir.exists(tempWorldClimDir)) { unlink(tempWorldClimDir, recursive = TRUE) } dir.create(tempWorldClimDir) unzip(tempWorldClimLoc, exdir = tempWorldClimDir) # Retrieve the locations of the mean annual temperature and mean annual precipitation files tempGIFLoc <- file.path(tempWorldClimDir, dir(tempWorldClimDir)[grepl("_1\\.tif$", dir(tempWorldClimDir), perl = TRUE)]) precGIFLoc <- file.path(tempWorldClimDir, dir(tempWorldClimDir)[grepl("_12\\.tif$", dir(tempWorldClimDir), perl = TRUE)]) envCovars <- rast(list(temp = rast(tempGIFLoc), prec = rast(precGIFLoc))) # Make sure that that coordinate reference system for the administrative data is the same as # the covariate data norwayVector <- st_transform(norwayVector, crs = st_crs(envCovars)) # Crop the covariate data to the administrative data envCovars <- crop(envCovars, norwayVector, mask = TRUE) # Centre and scale the covariates for easier model fitting envCovars$temp <- (envCovars$temp - mean(values(envCovars$temp), na.rm = TRUE)) / sd(values(envCovars$temp), na.rm = TRUE) envCovars$prec <- (envCovars$prec - mean(values(envCovars$prec), na.rm = TRUE)) / sd(values(envCovars$prec), na.rm = TRUE)
# Plot the temperature data ggplot() + geom_spatraster(aes(fill = temp), data = envCovars) + scale_fill_gradient(low = rgb(255, 255, 240, maxColorValue = 255), high = rgb(255, 106, 106, maxColorValue = 255), na.value = NA)
# Plot the precipitation data ggplot() + geom_spatraster(aes(fill = prec), data = envCovars) + scale_fill_gradient(low = rgb(255, 255, 240, maxColorValue = 255), high = rgb(125, 158, 192, maxColorValue = 255), na.value = NA)
# Some example dragonfly species to use and model speciesToUse <- c( "Aeshna grandis" # Blågrønnlibelle / Brown hawker ) # Setup a query to pass to the GBIF occurrence API # This query asks for just the records associated with the species lsit for Norway after 1960 queryText <- jsonOccFormulation(species = speciesToUse, order = "Odonata", COUNTRY = "NO", YEAR.min = 1960, DECIMAL_LATITUDE.range = c(minLat, maxLat), DECIMAL_LONGITUDE.range = c(minLong, maxLong), BASIS_OF_RECORD = NULL, user = Sys.getenv("GBIFUser"), email = Sys.getenv("GBIFEmail")) # We can look at the "taxonFrame" attribute of the query to get a data frame of results of the # taxon lookup based on the taxonomic search criteria knitr::kable(head(attr(queryText, "taxonFrame")), "html") cat(queryText)
# Retrieve the occurrence data from the GBIF servers occData <- gbifOccDownload(user = Sys.getenv("GBIFUser"), pwd = Sys.getenv("GBIFPass"), email = Sys.getenv("GBIFEmail"), body = queryText, tmploc = workLoc) # Ensure that the occurrence data share the same CRS as the covariates occData <- st_transform(occData, st_crs(envCovars)) ggplot() + geom_sf(data = norwayVector) + geom_sf(data = occData, size = 0.5)
effMesh <- priorityMesh(envCovars, 100, loc = st_as_sfc(occData), cutoff = 0.2, max.edge = c(1, 2) * 2) autoplot(effMesh, show.initialLocs = FALSE)
# We make a couple of functions that allow us to look what the value of a covariate is # for an arbitrary location findCovar <- function(where, data, layer) { # Extract the values outVal <- eval_spatial(data, where, layer = layer) # Fill in missing values if(any(is.na(outVal))) { outVal <- bru_fill_missing(data, where, outVal, layer = layer) } outVal } standardLikeli <- geometry ~ temp * findCovar(.data., envCovars, "temp") + prec * findCovar(.data., envCovars, "prec") + tempSq * findCovar(.data., envCovars, "temp")^2 + precSq * findCovar(.data., envCovars, "prec")^2 + Intercept # Fit the model standardSDM <- bru( ~ temp(1) + tempSq(1) + prec(1) + precSq(1) + Intercept(1), like( formula = standardLikeli, family = "cp", data = occData, samplers = norwayVector, domain = list(geometry = effMesh) ), options = list(control.inla = list(int.strategy = "eb")) ) # Make a prediction using the model standardSDMPred <- predict(standardSDM, fm_pixels(effMesh, mask = norwayVector), as.formula(paste(as.character(standardLikeli)[c(1, 3)], collapse = " "))) # Plot the mean prediction ggplot() + gg(standardSDMPred, mapping = aes(fill = mean), geom = "tile") + scale_fill_gradient(low = rgb(255, 255, 240, maxColorValue = 255), high = rgb(171, 130, 255, maxColorValue = 255), na.value = NA)
# Plot the uncertainty around the prediction standardSDMPred$uncert <- standardSDMPred$q0.975 - standardSDMPred$q0.025 ggplot() + gg(standardSDMPred, mapping = aes(fill = uncert), geom = "tile") + scale_fill_gradient(low = rgb(255, 255, 240, maxColorValue = 255), high = rgb(219, 112, 147, maxColorValue = 255), na.value = NA)
# Construct the latent model components maternField <- inla.spde2.matern(effMesh) # Fit the model spatialSDM <- bru( ~ temp(findCovar(.data., envCovars, "temp"), model = "linear") + prec(findCovar(.data., envCovars, "prec"), model = "linear") + tempSq(findCovar(.data., envCovars, "temp")^2, model = "linear") + precSq(findCovar(.data., envCovars, "prec")^2, model = "linear") + spatSmooth(geometry, model = maternField) + Intercept(1), like( formula = geometry ~ ., family = "cp", data = occData, samplers = norwayVector, domain = list(geometry = effMesh) ), options = list(control.inla = list(int.strategy = "eb")) ) # Predict the intensity of points (using both spatial random effects and the covariates) spatialSDMPred_full <- predict(spatialSDM, fm_pixels(effMesh, mask = norwayVector), ~ temp + tempSq + prec + precSq + spatSmooth + Intercept) ggplot() + gg(spatialSDMPred_full, geom = "tile") + scale_fill_gradient(low = rgb(255, 255, 240, maxColorValue = 255), high = rgb(171, 130, 255, maxColorValue = 255), na.value = NA)
spatialSDMPred_climate <- predict(spatialSDM, fm_pixels(effMesh, mask = norwayVector), ~ temp + tempSq + prec + precSq + Intercept) ggplot() + gg(spatialSDMPred_climate, geom = "tile") + scale_fill_gradient(low = rgb(255, 255, 240, maxColorValue = 255), high = rgb(171, 130, 255, maxColorValue = 255), na.value = NA)
fakeDataOne <- data.frame( x = 1:100, y = rnorm(100, 4.0 * 1:100 + 1.0, 1.0) ) fakeObs <- rnorm(100) fakeDataTwo <- data.frame( x = 1:100, obs = fakeObs, y = rnorm(100, 4.0 * 1:100 + 1.0 + -2.0 * fakeObs, 1.0) ) fakeModel <- bru( ~ Intercept(1) + x + obs, like( formula = y ~ x + Intercept, data = fakeDataOne ), like( formula = y ~ x + obs + Intercept, data = fakeDataTwo ) )
Add the following code to your website.
For more information on customizing the embed code, read Embedding Snippets.