## =============================================================================
#' Calculate Fetch Distances for a Dataset
#'
#' @description
#'
#' @param data
#' @param height
#' @param displacement
#' @param roughness_length
#' @param percent
#' @param method
#'
#' @return
#' @export
#'
#' @examples
calculate_fetch <- function(data, height, displacement = metadata$displacement,
roughness_length = metadata$roughness_length,
percent, method = c("KM01", "H00")) {
cat(paste0("\n", "Calculating fetch lengths", "\n"))
# Simplify variables names
z <- height
zd <- displacement
zo <- roughness_length
x <- rep(NA, nrow(data))
p <- sort(percent)
method <- match.arg(method)
pbar <- dplyr::progress_estimated(length(x))
if (method == "KM01") {
vars <- c("ws", "ustar", "zeta")
if (!all(vars %in% names(data))) {
stop("Missing ", paste0(
vars[!(vars %in% names(data))], collapse = ", "
), ".")
}
out <- data[, 0]
names <- dplyr::if_else(
!suppressWarnings(is.na(as.numeric(p))),
paste0("x", p, "p"),
paste0("x", p)
)
out[, names] <- NA
for (i in seq_along(x)) {
# Set model inputs - Kormann & Meixner 2001
ws <- data$ws[i]
ustar <- data$ustar[i]
zeta <- data$zeta[i]
# Execute model function
out[i, ] <- kormann_ftp_dist(ws, ustar, zeta, z, zd, p)
pbar$tick()$print()
}
# Set varnames and units
varnames(out) <- paste0(
"calculate_fetch(percent = ", names(out), ", method = ", method, ")"
)
tidyflux::units(out) <- rep("m", ncol(out))
} else if (method == "H00") {
if (!"l" %in% names(data)) stop("Missing l.")
out <- data[, 0]
if (is.numeric(p)) {
names <- paste0("x", sort(p * 100), "p")
} else names <- p
out[, names] <- NA
for (i in seq_along(x)) {
# Set model inputs - Hsieh 2000
l <- data$l[i]
# Execute model function
out[i, ] <- hsieh_ftp_dist(z, zd, zo, l, p)
pbar$tick()$print()
}
}
# Return vector if only one p was requested
if (ncol(out) < 2) out <- out[, 1]
return(out)
}
## =============================================================================
#' Calculate Fetch Distance Using Hsieh (2000) Model
#'
#' @description
#'
#' @param z
#' @param zd
#' @param zo
#' @param L
#' @param p
#'
#' @return
#' @export
#'
#' @examples
hsieh_ftp_dist <- function(z, zd, zo, l, p) {
# gap-fill l prior to using this model
if (!is.na(l)) {
# define and calculate footprint parameters
k <- 0.4
zm <- z - zd
zu <- zm * (log(zm / zo) - 1 + (zo / zm))
if (abs(zu / l) < 0.04) {
D <- 0.97
P <- 1
} else if (zu / l < 0) {
D <- 0.28
P <- 0.59
} else if (zu / l > 0) {
D <- 2.44
P <- 1.33
}
# calculate x if a flux percent is given
if (is.numeric(p)) {
# define footprint model
f <- function(x) exp(-(D * zu^P * abs(l)^(1 - P) / (k^2 * x)))
# iterate x until p is reached
for (i in 1:10000) {
fp <- f(i)
fx <- i - 1
if (fp > p) break
}
# calculate x if flux max is requested
} else if (p == "max") fx <- DP / (2 * k^2)
} else {
fx <- NA
}
return(fx)
}
## =============================================================================
#' Calculate Fetch Distance Using Kormann & Meixner (2001) Model
#'
#' @description
#'
#' @param wind_speed
#' @param ustar
#' @param zeta
#' @param z
#' @param zd
#' @param p
#'
#' @return
#' @export
#'
#' @examples
kormann_ftp_dist <- function(ws, ustar, zeta, z, zd, p) {
if (!is.na(ws) & !is.na(ustar) & !is.na(zeta)) {
# Calculate model parameters
zm <- z - zd # measurement height
k <- 0.4 # Von Karman constant
if (zeta > 0) {
# similarity relations
phi_m <- 1 + 5 * zeta
phi_c <- phi_m
# exponent of diffusivity power law
n <- 1 / phi_m
} else if (zeta <= 0) {
# similarity relations
phi_m <- (1 - 16 * zeta)^-0.25
phi_c <- (1 - 16 * zeta)^-0.5
eta <- (1 - 16 * zeta)^0.25
# exponent of the diffusivity power law
n <- (1 - 24 * zeta) / (1 - 16 * zeta)
}
# proportionality constant of the diffusivity power law (Eqs. 11 & 32)
key <- k * ustar * zm / (phi_c * zm^n)
# exponent of the wind speed power law
m <- ustar * phi_m / (k * ws)
# proportionality constant of the wind speed power law (Eqs. 11 & 31)
U <- ws / (zm^m)
# intermediate parameters
r <- 2 + m - n # shape factor
mu <- (1 + m) / r # constant
xi <- U * zm^r / (r^2 * key) # flux length scale
# Calculate x if flux percents are given
all_p <- p
p <- as.numeric(all_p[!suppressWarnings(is.na(as.numeric(all_p)))])
if (length(p) > 0) {
di <- 1 # displacement
fx <- 0
# Define footprint model as a function of x (distance from tower)
f <- function(x) xi^mu * exp(-xi / x) / (x^(1 + mu) * gamma(mu))
p <- sort(p)
fx <- rep(NA, length(p))
x <- 1
while (x < 10000) {
fp <- try(integrate(f, 0, x)$value, silent = TRUE)
if (class(fp) == "try-error") {
# Certain (uncommon) meteorological conditions result in a non-finite
# integral of the footprint function: set these records to NA
fp <- NA
break
}
fx[which(((fp > (p / 100) & is.na(fx))) == TRUE)[1]] <- x
if (!anyNA(fx)) break # finished if all fetch lengths are found
x <- x + 1
}
}
if ("peak" %in% all_p) {
peak <- xi / (1 + mu)
fx <- c(fx, peak)
}
} else {
# Cannot get fetch lengths if meteorological parameters are absent
fx <- rep(NA, length(p))
}
return(fx)
}
fp_grid <- function(extent, d, n) {
if (missing(extent) & missing(d) & missing(n)) {
extent <- c(-500, 500, -500, 500)
dx <- 1
dy <- 1
nx <- 500
ny <- 500
} else if (!missing(extent)) {
if (!missing(d)) {
dx <- d
dy <- d
nx <- (extent[2] - extent[1]) / dx
ny <- (extent[4] - extent[3]) / dy
} else {
if (missing(n)) {
nx <- 500
ny <- 500
}
dx <- (extent[2] - extent[1]) / nx
dy <- (extent[4] - extent[3]) / ny
}
} else if (!missing(d) & !missing(n)) {
dx <- d
dy <- d
nx <- n
ny <- n
extent <- c(-nx * dx / 2, nx * dx / 2, -ny * dy / 2, ny * dy / 2)
} else if (!missing(d)) {
extent <- c(-500, 500, -500, 500)
dx <- d
dy <- d
nx <- (extent[2] - extent[1]) / dx
ny <- (extent[4] - extent[3]) / dy
} else if (!missing(n)) {
nx <- n
ny <- n
extent <- c(-500, 500, -500, 500)
dx <- (extent[2] - extent[1]) / nx
dy <- (extent[4] - extent[3]) / ny
}
# Put extent into more convenient vars
xmin <- extent[1]
xmax <- extent[2]
ymin <- extent[3]
ymax <- extent[4]
extent <- raster::extent(extent)
# Define physical domain in cartesian coordinates
x <- seq(xmin, xmax, length.out = nx + 1)
y <- -seq(ymin, ymax, length.out = ny + 1)
x_2d <- matrix(rep(x, each = ny + 1), nrow = ny + 1)
y_2d <- matrix(rep(y, nx + 1), nrow = ny + 1)
f_2d <- matrix(0, nrow = ny + 1, ncol = nx + 1)
list(x = x_2d, y = y_2d, f = f_2d)
}
fp_rotate <- function(a) {
r = a * pi / 180 #degrees to radians
matrix(c(cos(r), sin(r), -sin(r), cos(r)), nrow = 2, ncol = 2)
}
## =============================================================================
#' Two-dimensional footprint models
#'
#' Create a two-dimensional matrix of footprint probabilities.
#'
#' @param timestamp A vector of class POSIXct
#' @param wd A numeric vector representing wind direction in degrees from North.
#' @param ustar A numeric vector representing friction velocity in units
#' ms+1s-1.
#' @param l A numeric vector representing Monin-Obukhov length in units m.
#' @param v_sd A numeric vector representing the standard deviation of the
#' crosswind component.
#' @param ws A numeric vector representing wind speed in units ms+1s-1.
#' @param fetch An integer specifying the maximum length in each direction to
#' calculate footprint probabilities. Defaults to 100 * (z - zd).
#' @param grid An integer specifying the number of grid cells to be used in
#' calculating footprint probabilities. Defaults to the fetch length (i.e. a
#' 1-m resolution).
#' @param model A character string naming the model to be used in footprint
#' calculations. Can be "KM01" for the Kormann & Meixner (2001) model or "H00"
#' for the Hsieh et al. (2000) model.
#' @param z The tower height in meters. Defaults to an attempt to retrieve the
#' value from the 'tower_height' variable in the 'metadata' list in the global
#' environment.
#' @param zd The zero-plane displacement height in meters. Defaults to an
#' attempt to retrieve the value from the 'displacement' variable in the
#' 'metadata' list in the global environment.
#' @param zo The aerodynamic roughness length in meters. Defaults to an attempt
#' to retrieve the value from the 'roughness_length' variable in the
#' 'metadata' list in the global environment.
#'
#' @return
#' @export
#'
#' @examples
fp_calculate <- function(timestamp, wd, ustar, l, v_sd, ws, weights = NULL,
fetch = (z - zd) * 100, grid = 2 * fetch,
model = c("KM01", "H00"), percent_out = FALSE,
z = metadata$tower_height, zd = metadata$displacement,
zo = metadata$roughness_length, raster_out = FALSE,
coords, clip_area = NULL, normalize = TRUE) {
model <- match.arg(model)
# Check inputs required for either model
if (!is.null(weights)) check_input(weights, "numeric_na")
if (any(missing(wd), missing(ustar), missing(l), missing(v_sd))) {
stop("All models require 'wd', 'ustar', 'l', and 'v_sd'.")
}
# Check inputs based on selected model
if (model == "KM01") {
# Requires z, zd, zo, l, wd, ustar, v_sd, ws
if (missing(ws)) stop("KM01 model requires 'ws'.")
lens <- c(length(ws), length(wd), length(ustar), length(l), length(v_sd))
} else if (model == "H00") {
# Requires z, zd, zo, l, wd, ustar, v_sd
lens <- c(length(wd), length(l), length(v_sd), length(ustar))
}
# Check input lengths
if (min(lens) == max(lens)) {
len <- lens[1]
} else stop("All variables must be of same length.", call. = FALSE)
# Create general spatial grid
dm <- fp_grid(d = 1, n = grid)
# Create empty list to hold footprints
out <- vector("list", length = len)
# Begin loop for calculating footprint at each input record
pbar <- dplyr::progress_estimated(len)
for (i in 1:len) {
# Check required variables
if (model == "KM01") {
vars <- c(ws[i], wd[i], ustar[i], l[i], v_sd[i])
} else if (model == "H00") {
vars <- c(wd[i], ustar[i], l[i], v_sd[i])
}
if (!is.null(weights)) vars <- c(vars, weights[i])
# All required variables must be present - if not return empty matrix
if (anyNA(vars)) {
fpp <- matrix(NA, nrow = grid, ncol = grid)
out[[i]] <- list(fpp = fpp, fpx = fpx, fpy = fpy)
names(out)[[i]] <- format(as.numeric(timestamp[i]), scientific = FALSE)
pbar$tick()$print()
next
}
# Construct matrix to hold calculated values at each grid point
phi <- matrix(0, nrow = grid + 1, ncol = grid + 1)
#browser()
# Wind direction and mathematical definitions of angles
theta <- ((360 - wd[i]) %% 360) * (pi / 180)
x_ <- dm$x * cos(theta) - dm$y * sin(theta)
y_ <- dm$x * sin(theta) + dm$y * cos(theta)
#fpd <- sqrt(dm$y^2 + dm$x^2)
#fpa <- (atan2(dm$x, dm$y) * 180 / pi) - wd[i]
#x_ <- cos(fpa / 57) * fpd
#y_ <- sin(fpa / 57) * fpd
#x_ <- dm$x
#y_ <- dm$y
# Subset downwind area of grid for calculating contributions
dw <- x_ > 0
# Execute chosen model
if (model == "KM01") {
# Kormann model: not valid if abs(zeta) > 3
if (!dplyr::between((z - zd) / l[i], -3, 3)) {
fpp <- matrix(NA, nrow = grid, ncol = grid)
out[[i]] <- list(fpp = fpp, fpx = fpx, fpy = fpy)
names(out)[[i]] <- format(as.numeric(timestamp[i]), scientific = FALSE)
pbar$tick()$print()
next
}
# Evaluate model across grid points
phi[dw] <- kormann_model(
x_, y_, ws[i], ustar[i], l[i], v_sd[i], z, zd, zo, grid
)
} else if (model == "H00") {
# Hsieh model
phi[dw] <- hsieh_model(
x_, y_, ustar[i], l[i], v_sd[i], z, zd, zo, grid
)
}
# Output footprint as a list of matrices
out[[i]] <- list(x = dm$x, y = dm$y, z = phi)
# Prepare for output differently depending on output type
if (raster_out) {
# Convert matrices to raster
out[[i]] <- fp_rasterize(out[[i]], coords, clip_area)
phi_sum <- raster::cellStats(out[[i]], sum)
# Multiply by weights if indicated
if (!is.null(weights)) {
out[[i]] <- raster::calc(out[[i]], function(x) x * weights[i])
phi_sum <- raster::cellStats(out[[i]], sum)
}
# Normalize *after* clipping
if (normalize) out[[i]] <- raster::calc(out[[i]], function(x) x / phi_sum)
# Include phi_sum as an attribute
attr(out[[i]], "phi_sum") <- phi_sum
} else {
phi_sum <- sum(sum(phi))
# Multiply by weights if indicated
if (!is.null(weights)) {
phi <- phi * weights[i]
phi_sum <- sum(sum(phi))
}
# Normalize integrated footprint to 1
if (normalize) out[[i]]$z <- phi / phi_sum else out[[i]]$z <- phi
# Include phi_sum in output list
out[[i]]$phi_sum <- phi_sum
}
# Set output name as the numeric timestamp
names(out)[[i]] <- format(as.numeric(timestamp[i]), scientific = FALSE)
pbar$tick()$print()
}
out
}
#' Title
#'
#' @param x
#' @param weights
#' @param output
#'
#' @return
#' @export
#'
#' @examples
fp_average <- function(x, weights = NULL, output = c("matrix", "raster")) {
#browser()
class <- class(x)
if (class == "RasterLayer") {}
output <- match.arg(output)
# Get grid size from input object
grid <- length(x[[1]][[1]][, 1])
len <- length(x)
if (len >= 1) {
if (!is.null(weights)) {
avg_ftp <- list(
z = matrix(0, nrow = grid, ncol = grid),
fpw = matrix(0, nrow = grid, ncol = grid)
)
} else avg_ftp <- list(z = matrix(0, nrow = grid, ncol = grid))
#finput <- t(mapply(list, Ws, Wd, ustar, zol, sigma_v))
count = 0
pbar <- dplyr::progress_estimated(len)
for (i in 1:len) {
fp <- x[[i]]
if (any(!is.na(fp$z))) {
if (!is.null(weights)) {
# If using weights, only include record if both fp and weight exist
if (!is.na(weights[i])) {
avg_ftp$z <- avg_ftp$z + fp$z
avg_ftp$fpw <- avg_ftp$fpw + fp$z * weights[i]
count <- count + 1
}
} else if (is.null(weights)) {
# Only need fp if not using weights
avg_ftp$z <- avg_ftp$z + fp$z
count <- count + 1
}
}
pbar$tick()$print()
}
}
avg_ftp$z <- avg_ftp$z / count
if (!is.null(weights)) {
# Similar weighting routine to Budishchev et al. (2014)
wsum <- sum(avg_ftp$fpw, na.rm = TRUE) # equals sum of weights
avg_ftp$fpw <- avg_ftp$fpw / wsum
#avg_ftp$fpw <- avg_ftp$fpw / count # this preserves the weights units
}
out <- list(
x = fp$x, # distance east, from last footprint used in avg
y = fp$y, # distance north, from last footprint used in avg
z = if (is.null(weights)) avg_ftp$z else avg_ftp$fpw # probability
)
if (output == "raster") {
out <- raster::raster(out)
}
out
}
kormann_model <- function(x, y, ws, ustar, l, v_sd, z, zd, zo, grid) {
# Not meant to be used by itself - use fp_calculate instead
# Construct helper matrices
h0 <- matrix(0, nrow = grid + 1, ncol = grid + 1)
h1 <- matrix(0, nrow = grid + 1, ncol = grid + 1)
h2 <- matrix(0, nrow = grid + 1, ncol = grid + 1)
h3 <- matrix(0, nrow = grid + 1, ncol = grid + 1)
## KORMANN MODEL - via Forbrich et al. (2011)
# Set model constants
zm <- z - zd # aerodynamic measurement height
k <- 0.4 # Von Karman constant
zmol <- zm / l
# limited to -3 to +3 zeta
# Dimensionless gradient functions of wind and temp profiles (Dyer 1974)
if (l > 0) {
phi <- 1 + 5 * zmol
phim <- 1 + 5 * zmol
phic <- 1 + 5 * zmol
m <- (1 + 5 * (zmol)) / (log(zm / zo) + 5 * zmol)
n <- 1 / (1 + 5 * zmol)
} else if (l < 0) {
zeta <- (1 - 16 * (zmol))^(-1 / 4)
phi <- 1 / (zeta^2)
phim <- (1 - 16 * (zmol))^(-1 / 4)
phic <- (1 - 16 * (zmol))^(-1 / 2)
m <- ustar / k / zeta / ws
n <- (1 - 24 * zmol) / (1 - 16 * zmol)
}
# Coefficients of diffusivity power law (Eqs. A.4-A.5)
# Many different ways to calculate U
#U <- (ustar * log((zm / zo) + (phim))) / (k * zm^m)
#U <- (ustar * phim) / (k * m * zm^m) # neftel
#kappa <- (k * ustar * zm) / (phic * zm^n)
kappa <- (k * ustar * zm) / (phi * zm^n)
U <- ws / zm^n
# Intermediate parameters (Eqs. A.6-A.7; p = m)
r <- 2 + m - n
mu <- (1 + m) / r
# Flux length scale (Eq. A.8)
xi <- (U * zm^r) / (r^2 * kappa)
# Neftel parameters
A <- 1 + mu
B <- (U * (zm^r)) / (r^2 * kappa)
C <- B^mu / gamma(mu)
D <- v_sd / U * gamma(1 / r) / gamma(mu) * (U / (r^2 * kappa))^(m / r)
E <- (r - m) / r
# Subset downwind area of grid for calculating contributions
dw <- x > 0
# Splitting Eq 2.1 into a few helpers as per OzFlux
h0[dw] <- x[dw]^E
h1[dw] <- (sqrt(2 * pi) * D * (h0[dw]))
h2[dw] <- exp(-(y[dw]^2) / (2 * (D * (h0[dw]))^2))
h3[dw] <- C * exp(-B / x[dw]) * (x[dw]^(-A))
# Combine helpers - two-dimensional footprint function (Eq. 2.1)
h2[dw] * h3[dw] / h1[dw]
}
hsieh_model <- function(x, y, ustar, l, v_sd, z, zd, zo, grid) {
# Not meant to be used by itself - use fp_calculate instead
# Construct helper matrices
f <- matrix(0, nrow = grid + 1, ncol = grid + 1)
sig <- matrix(0, nrow = grid + 1, ncol = grid + 1)
dy <- matrix(0, nrow = grid + 1, ncol = grid + 1)
## HSIEH MODEL - via Hsieh et al. (2000)
# Set model constants
zm <- z - zd # aerodynamic measurement height
k <- 0.4 # Von Karman constant
# define and calculate footprint parameters
zu <- zm * (log(zm / zo) - 1 + (zo / zm))
zl <- zu / l
if (abs(zl) < 0.04) {
d <- 0.97
p <- 1
} else if (zl < 0) {
d <- 0.28
p <- 0.59
} else if (zl > 0) {
d <- 2.44
p <- 1.33
}
dp <- d * zu^p * abs(l)^(1 - p)
# Subset downwind area of grid for calculating contributions
dw <- x > 0
# calculate cross-wind integrated footprint
f[dw] <- (dp / (k^2 * x[dw]^2)) * (exp(-dp / (k^2 * x[dw])))
# define and calculate 2D extension parameters
a1 <- 0.3
p1 <- 0.86
sig[dw] <- a1 * zo * (v_sd / ustar) * (x[dw] / zo) ^ p1
dy[dw] <- 1 / (sqrt(2 * pi) * sig[dw]) * exp(-0.5 * (y[dw] / sig[dw]) ^ 2)
# calculate 2D footprint adjusted by lateral diffusion
dy[dw] * f[dw]
}
fp_tidy <- function(x, weights = FALSE) {
rotate <- function(x) t(apply(x, 2, rev))
fpx <- x$x
fpy <- x$y
if (weights == FALSE) fpz <- x$z else fpz <- x$fpw
df <- data.frame(
x = numeric(length(fpx)),
y = numeric(length(fpy)),
z = numeric(length(fpz))
)
df$z <- as.vector(fpz)
df$x <- as.vector(fpx)
df$y <- as.vector(fpy)
t <- df[order(-df$z), ]
m <- xtabs(z ~ x + y, data = t)
f <- x
f$z <- rotate(rotate(rotate(unclass(m))))
f <- as.data.frame(as.table(f$z))
f$y <- as.numeric(as.character(f$y))
f$x <- as.numeric(as.character(f$x))
names(f)[3] <- "z"
f
}
fp_percent <- function(x, p, return = c("raster", "polygon")) {
return <- match.arg(return)
# Sort raster values from high to low
rs <- sort(as.vector(x), decreasing = TRUE)
# Calculate cumulative sums of footprint weights
cs <- cumsum(rs)
# Get the minimum value of cumulative sum less than p
rp <- min(rs[cs < p])
out <- x
# Set all other cells to NA
out[out < rp] <- NA
# Return a polygon if requested
if (return == "polygon") out <- fp_polygonize(out)
out
}
fp_is_empty <- function(fp) {
#browser()
class <- class(fp)
# If fp is a spatial object, it is not empty
if ("SpatialLinesDataFrame" %in% class) {
FALSE
} else if ("SpatialPolygons" %in% class) {
FALSE
} else if ("RasterLayer" %in% class) {
FALSE
} else if (length(fp) == 1) {
# If fp has length one, NA indicates it is empty
is.na(fp)
# If fp is a matrix, check contents
} else all(!is.finite(fp$z))
}
fp_rasterize <- function(x, coords, clip_area) {
#browser()
x$y <- x$y + coords[1] # add latitude to grid
x$x <- x$x + coords[2] # add longitude to grid
out <- fp_tidy(x)
out <- raster::rasterFromXYZ(
out, crs = sp::CRS("+proj=utm +zone=18 ellps=WGS84")
)
if (!missing(clip_area)) {
#out <- raster::crop(out, clip_area)
out <- raster::mask(out, clip_area)
}
out
}
fp_polygonize <- function(x) {
class <- class(x)
#browser()
if (fp_is_empty(x)) return(NA)
if (class == "RasterLayer") {
# Method for RasterLayer class footprint
# Set all raster values to 0 - they don't matter anymore
homog <- raster::calc(x, function(x) x * 0)
# Return polygon of the total area
raster::rasterToPolygons(homog, dissolve = TRUE)
} else if (class == "SpatialLinesDataFrame") {
# Method for SpatialLinesDataFrame class footprint
# Tidy ftp for creating polygon object
fort <- broom::tidy(x)
# Create spatial polygons object
poly <- sp::Polygon(fort[, 1:2])
polys <- sp::Polygons(list(sp::Polygon(poly)), "poly")
sp::SpatialPolygons(
list(polys), proj4string = sp::CRS("+proj=utm +zone=18 +ellps=GRS80")
)
}
}
write_footprint <- function(x, path, matrix = "fpp", save_log_vals = FALSE,
tag = "fp") {
len <- length(x)
if (purrr::vec_depth(x) == 2) {
out <- x[[matrix]]
path_out <- paste0(path, paste0(tag, ".csv"))
write.table(out, path_out, sep = ",", row.names = FALSE, col.names = FALSE)
} else if (purrr::vec_depth(x) == 3) {
pbar <- dplyr::progress_estimated(len)
for (i in 1:len) {
out <- x[[i]][matrix]
if (save_log_vals) {
out <- log(out)
}
name_out <- paste0(tag, "_", names(x)[[i]], ".csv")
path_out <- paste0(path, name_out)
write.table(
out, path_out, sep = ",", row.names = FALSE, col.names = FALSE
)
pbar$tick()$print()
}
}
}
read_footprint <- function(path) {
files <- list.files(path)
len <- length(files)
out <- vector("list", len)
pbar <- dplyr::progress_estimated(len)
for (i in seq_along(files)) {
out[[i]] <- read.table(paste0(path, files[i]), sep = ",")
names(out)[[i]] <- stringr::str_replace(files[i], "fp_", "")
names(out)[[i]] <- stringr::str_replace(files[i], "\\.csv", "")
pbar$tick()$print()
}
out
}
spread_footprints <- function(x) {
len <- length(x[[1]]$fpp[1, ])
for (i in seq_along(x)) {
fp <- as.data.frame(x[[i]]$fpp) %>%
tidyr::gather(var, val) %>%
dplyr::mutate(
x = rep(seq(1:len), times = len, each = 1),
y = rep(seq(1:len), times = 1, each = len)
) %>%
tidyr::unite("xy", x, y) %>%
select(-var) %>%
tidyr::spread(xy, val) %>%
dplyr::mutate(timestamp = names(x)[[i]]) %>%
dplyr::select(timestamp, dplyr::everything())
out <- dplyr::bind_rows(out, fp)
}
}
project_footprint <- function(ftp, latitude, longitude, grid,
proj = c("longlat", "utm"), percents) {
# Probably going to deprecate this - much easier to work w/ rasters then
# convert to other spatial features if needed
## Create lat/long coordinates for each footprint grid box
if (fp_is_empty(ftp)) {return(NA)}
fp_p <- percent_footprint(ftp)
footprint_c <- fp_p %>%
dplyr::mutate(
d = sqrt(x ^ 2 + y ^ 2),
# calculate theta, correct for quadrants
theta = ifelse(
x > 0 & y > 0, atan(y / x) * 180 / pi,
ifelse(
x > 0 & y < 0,
atan(y / x) * 180 / pi + 360, atan(y / x) * 180 / pi + 180
)
),
# convert theta to bearing using modulus
b = (450 - theta) %% 360,
# compute lat/long coordinates
lat = geosphere::destPoint(p = c(longitude, latitude), b = b, d = d)[, 2],
lon = geosphere::destPoint(p = c(longitude, latitude), b = b, d = d)[, 1]
) %>%
dplyr::select(x = lon, y = lat, z = z)
if (proj == "utm") {
x <- footprint_c$x
y <- footprint_c$y
z <- footprint_c$z
xy <- data.frame(ID = 1:length(x), X = x, Y = y, Z = z)
sp::coordinates(xy) <- c("X", "Y")
sp::proj4string(xy) <- sp::CRS("+proj=longlat +datum=WGS84")
xy_proj <- sp::spTransform(xy, sp::CRS("+proj=utm +zone=18 ellps=WGS84"))
footprint_r <- raster::rasterize(
xy_proj,
raster::raster(
raster::extent(xy_proj), ncols = grid, nrows = grid,
sp::CRS("+proj=utm +zone=18 ellps=WGS84")
),
"Z", fun = mean
)
}
# Convert footprint to projected raster
if (proj == "longlat") {
footprint_r <- raster::rasterize(
footprint_c[, 1:2],
raster::raster(
raster::extent(footprint_c[, 1:2]), ncol = grid, nrow = grid,
sp::CRS("+proj=longlat +datum=NAD83")
),
footprint_c[,3], fun = mean
)
}
# Convert raster to contour lines, return SpatialLinesDataFrame
raster::rasterToContour(footprint_r, levels = percents)
}
percent_footprint <- function(x, percent = 100, weights = FALSE) {
# Might deprecate this - way too clunky, and fp_percent should do the job
rotate <- function(x) t(apply(x, 2, rev))
fpx <- x$x
fpy <- x$y
fpz <- x$z
if (weights == TRUE) fpw <- x$fpw else fpw <- matrix(NA, nrow(fpx), ncol(fpx))
df <- data.frame(
x = numeric(length(fpx)),
y = numeric(length(fpy)),
z = numeric(length(fpz)),
zp = numeric(length(cumsum(fpz))),
w = numeric(length(fpw)),
wp = numeric(length(cumsum(fpw)))
)
df$z <- as.vector(fpz)
df$zp <- as.vector(df$zp)
df$x <- as.vector(fpx)
df$y <- as.vector(fpy)
df$w <- as.vector(df$w)
df$wp <- as.vector(df$wp)
t <- df[order(-df$z), ]
t$cumz <- cumsum(t$z) * 100
m <- xtabs(cumz ~ x + y, data = t, addNA = TRUE)
msub <- ifelse(m <= percent, m, NA)
f <- x
f$z <- rotate(rotate(rotate(unclass(msub))))
f <- as.data.frame(as.table(f$z))
f$y <- as.numeric(as.character(f$y))
f$x <- as.numeric(as.character(f$x))
names(f)[3] <- "z"
f
}
calculate_cover <- function(x, image, type) {
if (fp_is_empty(x)) return(NA)
fp_sub <- image %>%
raster::extract(x, df = TRUE, factors = TRUE)
tot <- nrow(fp_sub)
patch <- fp_sub %>%
tidyr::gather(key, value, -ID) %>%
dplyr::filter(value == rlang::as_name(type)) %>%
# Count number of cells in each patch type
dplyr::tally() %>%
dplyr::pull(n)
# Return proportion
patch / tot
}
calculate_overlap <- function(area, ftp) {
# 0-buffers to prevent topology issues
area <- rgeos::gBuffer(area, width = 0)
ftp <- rgeos::gBuffer(ftp, width = 0)
# calculate footprint area
ftp_area <- rgeos::gArea(ftp)
# create polygon of overlap
overlap <- raster::intersect(ftp, area)
# calculate overlap area
overlap_area <- rgeos::gArea(overlap)
# calculate proportion of ftp within site area
p_overlap <- overlap_area / ftp_area
return(p_overlap)
}
ndwi <- function(x) {
# Takes RGB+NI image and returns normalized difference water index
num <- raster::subset(x, 2) - raster::subset(x, 4) # g - ni
den <- raster::subset(x, 2) + raster::subset(x, 4) # g + ni
num / den
}
ndvi <- function(x) {
# Takes RGB+NI image and returns normalized difference vegetation index
num <- raster::subset(x, 4) - raster::subset(x, 1) # ni - r
den <- raster::subset(x, 4) + raster::subset(x, 1) # ni + r
num / den
}
Add the following code to your website.
For more information on customizing the embed code, read Embedding Snippets.