knitr::opts_chunk$set( collapse = TRUE, comment = "#>" )
library(tidyverse); library(sf); library(lubridate); library(WeStCOMS)
The WeStCOMS (v1 and v2) mesh files are in the data/ directory as .gpkg and .nc files. Both include information for element index, element depth, element coordinates, and element nodes.
Any of the four files can be loaded with the loadMesh()
function.
if(.Platform$OS.type=="unix") { sep <- "/" westcoms.dir <- paste0("/media/archiver/common/sa01da-work/", c("minch2/Archive/", "WeStCOMS2/Archive/")) mesh.f <- paste0("/home/sa04ts/FVCOM_meshes/", c("WeStCOMS_mesh.gpkg", "WeStCOMS2_mesh.gpkg")) } else { sep <- "\\" westcoms.dir <- paste0("D:\\hydroOut\\", c("minch2\\Archive\\", "WeStCOMS2\\Archive\\")) mesh.f <- paste0("data\\", c("WeStCOMS_mesh.gpkg", "WeStCOMS2_mesh.gpkg")) } mesh.sf <- map(mesh.f, loadMesh) mesh.nc <- map(mesh.f, ~loadMesh(.x, type="nc")) str(mesh.sf) mesh.nc
The trinodes specify the nodes for each triangular element.
Note that the coordinates are OS coordinates using the British National Grid (epsg 27700).
Let's take some example sites. All of the corresponding dates are already loaded. We need to add the element id and trinode information from the mesh file for efficiency. We'll set any dates before 2019-05-02 to WeStCOMS v1 and any dates afterwards as WeStCOMS v2.
sampling.df <- read_csv("data\\example_sites.csv") %>% mutate(grid=if_else(ymd(date) < "2019-05-01", 1, 2)) %>% group_by(grid) %>% group_split() %>% map2(.x=., .y=mesh.sf, ~st_as_sf(.x, coords=c("easting", "northing"), remove=F, crs=27700) %>% st_join(., .y %>% select(-area) %>% rename(depth.elem=depth, site.elem=i)) %>% filter(!is.na(site.elem)) %>% st_drop_geometry) %>% bind_rows %>% mutate(depth=pmin(depth, depth.elem)) sampling.df
var.df <- tibble(var=c("temp", "salinity", "short_wave", "u", "v", "ww", "uwind_speed", "vwind_speed", "precip"), dayFn=c(mean, mean, integrateShortWave, q90, q90, q90, q90, q90, sum), depthFn=c(mean, mean, NA, mean, mean, mean, NA, NA, NA)) hour_depth.df <- sampling.df %>% full_join(extractHydroVars(sampling.df, westcoms.dir, var.df$var, daySummaryFn=NULL, depthSummaryFn=NULL, cores=7, progress=T))
day_depth.df <- sampling.df %>% full_join(extractHydroVars(sampling.df, westcoms.dir, var.df$var, daySummaryFn=var.df$dayFn, depthSummaryFn=NULL, cores=7, progress=T))
day_depRng.df <- sampling.df %>% full_join(extractHydroVars(sampling.df, westcoms.dir, var.df$var, daySummaryFn=var.df$dayFn, depthSummaryFn=var.df$depthFn, cores=7, progress=T))
buffer_radius <- 1e3 # in meters sampling.df <- read_csv("data\\example_sites.csv") %>% mutate(grid=if_else(ymd(date) < "2019-05-01", 1, 2)) %>% group_by(grid) %>% group_split() %>% map2(.x=., .y=mesh.sf, ~st_as_sf(.x, coords=c("easting", "northing"), remove=F, crs=27700) %>% st_buffer(dist=buffer_radius) %>% st_join(., .y %>% select(-area) %>% rename(depth.elem=depth, site.elem=i)) %>% filter(!is.na(site.elem)) %>% st_drop_geometry) %>% bind_rows %>% mutate(depth=pmin(depth, depth.elem)) sampling.df
This gives one row for each element captured within the buffer_radius
of each sample. Then, things work as before except that we set regional=TRUE
to take the mean of elements for each observation.
regional.df <- sampling.df %>% group_by(obs.id) %>% slice_head(n=1) %>% ungroup %>% full_join(extractHydroVars(sampling.df, westcoms.dir, var.df$var, daySummaryFn=var.df$dayFn, depthSummaryFn=var.df$depthFn, regional=T, cores=7, progress=T))
Add the following code to your website.
For more information on customizing the embed code, read Embedding Snippets.