#' Extract hydrological response unit HRU specific climate time series from
#' nc-files.
#'
#' Function extracts precipitation (tp) or temperature (tas_) data from climate
#' raster bricks (observed history obs_hist, GCM-simulated history hist_sim and
#' GCM-simulated future (fut_sim) and prepares a dataframe for later import in
#' RSMinerve (use readr::write_csv(.,col_names=FALSE)). tp and tas_ have to be
#' exported, the function has to be called twice and the resulting tibble
#' columns added.
#'
#' @param climate_files List of either temperature or precipitation climate .nc
#' files to process (do not mix!). Make sure the file list time interval is
#' consistent with startY and endY.
#' @param catchmentName Name of catchment for which data should be extracted
#' @param temp_or_precip Either 'Temperature' or 'Precipitation'
#' @param elBands_shp Shapefile with hydrological response units. The column
#' containing the names of the hydrological response units must be \code{name}
#' and the column containing the average elevation of the elevation band must
#' be \code{Z}. Please make sure that the shape file is in UTM coordinates.
#' @param startY Starting year for which data should be made available (assuming
#' data 'is' available from the start of that year)
#' @param endY Ending year from which data should be extracted (assuming data
#' 'is' actually available until the end of that year)
#' @param obs_frequency Climate observation frequency ('hour', 'day', 'month')
#' @param climate_data_type String of denoting observation type. Either
#' 'hist_obs' (historical observations, i.e. CHELSA V21 high resolution
#' climate data), 'hist_sim' (GCM model output data over the historical
#' period) and 'fut_sim' (fture GCM simulations)
#' @param crs_in_use Proj code for crs in use. For example
#' '+proj=longlat +datum=WGS84' for epsg 4326
#' @param output_file_dir Path to output file dir (if empty, file will not be
#' written)
#' @param tz Time zone information. Default "UTC" which can be overridden.
#' @return Dataframe tibble with temperature in deg. C. or precipitation in mm/h
#' @family Pre-processing
#' @note This function currently can only read input files with full years of
#' data, that is, with data from January to December of a given year.
#' @export
gen_HRU_Climate_CSV_RSMinerve <- function(climate_files,
catchmentName,
temp_or_precip,
elBands_shp,
startY,
endY,
obs_frequency,
climate_data_type,
crs_in_use,
output_file_dir=0,
gcm_model=0,
gcm_scenario=0,
tz = "UTC"){
. <- NULL
# Ensure conforming crs
crs_elBands <- sf::st_crs(elBands_shp)
elBands_shp_latlon <- sf::st_transform(elBands_shp,
crs = sf::st_crs(crs_in_use))
# Generate date sequence in accordance with RSMinerve Requirements
dateElBands <- riversCentralAsia::generateSeqDates(startY, endY,
obs_frequency, tz)
datesChar <- riversCentralAsia::posixct2rsminerveChar(dateElBands$date,tz) %>%
dplyr::rename(Station = value)
# Get names of elevation bands
namesElBands <- elBands_shp$name
dataElBands_df <- namesElBands %>%
purrr::map_dfc(stats::setNames, object = base::list(base::logical())) # fancy trick to generate an empty dataframe with column names from a vector of characters.
use_exactextract = TRUE
# .nc-file extraction
for (yrIDX in 1:base::length(climate_files)){
base::print(base::paste0('Processing File: ', climate_files[yrIDX]))
histobs_data <- raster::brick(base::paste0(climate_files[yrIDX]))
if (is.na(raster::crs(histobs_data))) {
raster::crs(histobs_data) <- crs_in_use
}
# Test if the fast exactextractr::exact_extract is working as expected by
# comparison to raster::extract. If yes, continue with exact_extract, if not,
# use extract.
if (yrIDX == 1) {
subbasin_data <- exactextractr::exact_extract(
histobs_data,
elBands_shp_latlon,
'mean') %>% t() %>%
tibble::as_tibble(., .name_repair = "unique") %>%
dplyr::slice(1:base::nrow(dateElBands))
test <- raster::extract(histobs_data,
elBands_shp_latlon,
'mean') %>% t() %>%
tibble::as_tibble(., .name_repair = "unique") %>%
dplyr::slice(1:base::nrow(dateElBands))
# Test if the difference between the two extracted data sets is smaller than
# 10 %, we use the fast exactextractr package.
if (abs((sum(sum(subbasin_data))-sum(sum(test))))/sum(sum(subbasin_data))
<= 0.1) {
# Results with exactextractr & raster are consistent, use the faster
use_exactextract = TRUE
sprintf("Message: Using exactextractr::exact_extract()\n")
} else {
# Results are not consistent, use the more reliable raster package
use_exactextract = FALSE
sprintf("Message: Using raster::extract()\n")
subbasin_data = test
}
} else {
if (use_exactextract == TRUE) {
subbasin_data <- exactextractr::exact_extract(
histobs_data,
elBands_shp_latlon,
'mean') %>% t() %>%
tibble::as_tibble(., .name_repair = "unique") %>%
dplyr::slice(1:base::nrow(dateElBands))
} else if (use_exactextract == FALSE) {
subbasin_data <- raster::extract(histobs_data,
elBands_shp_latlon,
'mean') %>% t() %>%
tibble::as_tibble(., .name_repair = "unique") %>%
dplyr::slice(1:base::nrow(dateElBands))
}
}
# if endY is not corresponding to end date of .nc-file, we need to slice it!
base::names(subbasin_data) <- base::names(dataElBands_df)
dataElBands_df <- dataElBands_df %>% tibble::add_row(subbasin_data)
}
dataElBands_df_data <- base::cbind(datesChar,dataElBands_df) %>%
tibble::as_tibble(., .name_repair = "unique")
# Construct csv-file header. See the definition of the RSMinerve .csv database file at:
# https://www.youtube.com/watch?v=p4Zh7zBoQho
dataElbands_df_header_Station <- tibble::tibble(
Station = c('X','Y','Z','Sensor','Category','Unit','Interpolation'))
dataElBands_df_body <- namesElBands %>%
purrr::map_dfc(stats::setNames, object = base::list(base::logical()))
# Get XY (via centroids) and Z (mean alt. band elevation)
elBands_XY <- sf::st_transform(elBands_shp, crs = crs_elBands)
sf::st_agr(elBands_XY) = "constant"
elBands_XY <- elBands_XY %>%
sf::st_centroid() %>% sf::st_coordinates() %>%
tibble::as_tibble(., .name_repair = "unique")
elBands_Z <- elBands_shp$Z %>% tibble::as_tibble(., .name_repair = "unique") %>%
dplyr::rename(Z = value)
elBands_XYZ <- base::cbind(elBands_XY, elBands_Z) %>% base::as.matrix() %>%
base::t() %>% tibble::as_tibble(., .name_repair = "unique") %>%
dplyr::mutate_all(as.character)
base::names(elBands_XYZ) <- base::names(dataElBands_df_body)
# Sensor (P or T), Category, Unit and Interpolation
nBands <- elBands_XYZ %>% base::dim() %>% dplyr::last()
if (temp_or_precip=='Temperature'){
sensorType <- 'T' %>% base::rep(.,nBands)
unit <- 'C' %>% base::rep(.,nBands)
} else {
sensorType <- 'P' %>% base::rep(.,nBands)
unit <- 'mm/d' %>% base::rep(.,nBands)
}
category <- temp_or_precip %>% base::rep(.,nBands)
interpolation <- 'Linear' %>% base::rep(.,nBands)
sensor <- base::rbind(sensorType,category,unit,interpolation) %>%
tibble::as_tibble(., .name_repair = "unique")
base::names(sensor) <- base::names(dataElBands_df_body)
# Put everything together
file2write <- elBands_XYZ %>% tibble::add_row(sensor)
file2write <- dataElbands_df_header_Station %>% tibble::add_column(file2write)
file2write <- file2write %>% tibble::add_row(dataElBands_df_data %>%
dplyr::mutate_all(as.character))
file2write <- base::rbind(base::names(file2write),file2write)
# Write file to disk
if (output_file_dir != 0){
if (gcm_model==0 & gcm_scenario==0){
readr::write_csv(file2write,
base::paste0(output_file_dir, climate_data_type, "_",
temp_or_precip, "_", startY, "_", endY, "_",
catchmentName, ".csv"), col_names = FALSE)
} else if (gcm_model!=0 & gcm_scenario==0) {
readr::write_csv(file2write,base::paste0(output_file_dir,climate_data_type,
"_",gcm_model,"_",temp_or_precip,
"_",startY,"_",endY,"_",
catchmentName,".csv"),
col_names = FALSE)
} else if (gcm_model!=0 & gcm_scenario!=0) {
readr::write_csv(file2write,base::paste0(output_file_dir,climate_data_type,
"_",gcm_model,"_",gcm_scenario,
"_",temp_or_precip,"_",startY,"_",
endY,"_",catchmentName,".csv"),
col_names = FALSE)
}
}
# Return file
base::return(file2write)
}
Add the following code to your website.
For more information on customizing the embed code, read Embedding Snippets.