knitr::opts_chunk$set(
  collapse = TRUE,
  comment = "#>"
)

Load the packages

library(tidyverse)
library(here)
library(gganimate)
library(iffitoR)
library(sf)
library(forcats)
library(glue)
library(purrr)
library(stringr)
library(crayon) # for some colours
library(raster)

Get the slope units

# which os to automatically set the paths
os = Sys.info()["sysname"]

if(os == "Linux"){
  path_ncdf = "/mnt/CEPH_PROJECTS/Proslide/PREC_GRIDS_updated/"
  su_path = "/mnt/CEPH_PROJECTS/Proslide/Envir_data/SlopeUnits/su_opt_16_TAA/su_16_TAA.shp"
}else if(os == "Windows"){
  path_ncdf = "\\\\projectdata.eurac.edu/projects/Proslide/PREC_GRIDS_updated/"
  su_path = "\\\\projectdata.eurac.edu/projects/Proslide/Envir_data/SlopeUnits/su_opt_16_TAA/su_16_TAA.shp"
}else{
  stop(call. = F, "what the hell are you working on...")
}

What have we got

# get vector of South Tyrol
st = iffitoR::get_shape_southtyrol() 
st = st %>% st_union()
ggplot() +
  geom_sf(data=st) +
  theme_minimal()
su = st_read(su_path)
# and verify directly if they are in the same crs
st_crs(su) == st_crs(st)
st_reproj = st_transform(st,st_crs(su))
st_crs(st_reproj) == st_crs(su)
su_agg = su %>% st_union()

ggplot() +
  geom_sf(data = su_agg) +
  geom_sf(data = st_reproj, fill="blue") +
  theme_light()

Clip the slopunits to South Tyrol

su = st_crop(su, st_reproj)
# are the two bounding boxes equal?
st_bbox(su) == st_bbox(st_reproj) # for some reason the xmax is not the same
dim(su)

Get the rainfall for the slope Units

glimpse(su)
# get the translational slides
slides = landsld %>% 
  filter(str_detect(second_level, "translational")) %>% 
  filter(date_info == "day") %>% 
  filter(year.int >= 1980) %>% 
  mutate(.id = 1:nrow(.)) %>% # give each slide a unique id
  st_transform(st_crs(su)) 


# assign each slope unit an id
su$su_id = 1:nrow(su) 

# apply spatial join
# this can have more rows than the original su dataframe
# each su will appear that many times as they have points in them
joined = st_join(su, slides)
# how many slides per points
slides_per_poly =  joined %>%
  count(su_id, sort = T)


# which are the slides that happened in the max slope units
max_slides = joined[joined$su_id == 3242, ]

# assign to each su true or false value if it has or has no landslides
for (row in 1:nrow(su)) {

  # get the original id from the slope units
  id_su = su[row, ][["su_id"]]

  # select all the possible (minimum 1) rows from the joined table
  joined_selected = joined %>% 
    filter(su_id == id_su)

  # we only want the the rainfall per Slop Unit. And are not directly interested in the single landslides polygons. So if
  # there are more than one landslide in one su on the same day, we only take one
  joined_selected = joined_selected %>% dplyr::distinct(date, .keep_all=T)

  # check if the slope unit in the joined table has a value in the .id column from the points
  slide_dates_per_su = rep(NA, nrow(joined_selected))

  # check how many points fall in the slope unit for the seleted id 
  for (point in 1:nrow(joined_selected)) {
    # if there is a value in the .id-column (so there is a point), get the 
    if (!is.na(joined_selected[point, ]$.id)) {
      date = joined_selected[point, ]$date
      slide_dates_per_su[[point]] = date
    }
  }

  # convert back to datetime object 
  slide_dates_per_su = as.Date.numeric(slide_dates_per_su, origin="1970-01-01")

  su$slide_dates[[row]] = slide_dates_per_su

}

# make the binary classification
su = su %>% 
  mutate(
    slide = if_else(!is.na(slide_dates), TRUE, FALSE)
  )

# this is just to thest if each slope unit only has unique days
a = su$slide_dates
duplicates = 0
b = sapply(a, function(x){
  l = length(x); 
  l_u = length(unique(x))
  if(l > 1){
    if(l != l_u){
      print("duplicates days per slide...")
      duplicates = duplicates + 1
    }
  }
})
if(duplicates == 0){
    print("No Duplicates")
  } 

Extract the rainfall data for each su with landslides for each day

# make a list for each slope unit that as least had one slide
n_su_with_slides  = length(which(su$slide))
su_rainfall = vector("list", length=n_su_with_slides)

# the names of the list are the IDs of the Slope Units
idx = which(su$slide)
ids = su[idx,]$su_id
names(su_rainfall) = ids

# a counter for the print messages
counter = 1

for (row in 1:nrow(su)) {

  # if there actually happened a slide 
  # this will run as many times as the list elements are in su_rainfall

 if (su[row,]$slide) {

   n = n_su_with_slides
   str = paste0(counter, "/", n)
   dashes = paste0(replicate(20, "-"), collapse = "")
   cat(yellow$bold$underline("\n------------", str, dashes, "\n\n"))



   # get the current id of the slope unit
   current_id = su[row, ]$su_id

   dates = su[row,]$slide_dates[[1]]

   ## get the rainfall data for that slope unit the first day
   spatial.obj = su[row, ]
   days_back = 5

   # in case a slope unit experienced more than one slide on morethan one date we need an inner list
   inner_su_list = vector("list", length=length(dates))

   # get the rainfall for each data a landslide happened in that slope unit
   # as we are just getting one date this loop will only run once
   for (day in seq_along(dates)) {

     d = dates[[day]]

     res = rainfallR::get_rainfall(data_path = path_ncdf
                                   ,fun = mean
                                   ,spatial.obj = spatial.obj
                                   ,dts = d
                                   ,days_back = days_back)

     # again: THIS ALWAYS RETURNS A LIST BUT AS WE ONLY SUPPLY ONE DTS; SELECT THE FIRST ONE
     res = rainfallR::make_cumulative_rainfall(res, days_back = days_back) %>% .[[1]]

     # add the rainfall of that event to the list of all events for that slides
     inner_su_list[[day]] = res
   }

   # add this to the outer list 
  su_rainfall[[counter]] = inner_su_list

  # increment the counter
  counter = counter + 1

 } 
}
# bind each inner list of dataframes into one dataframe
one_df_each_su = lapply(su_rainfall, bind_rows)

# now create one big dataframe
one_df_all_su = bind_rows(one_df_each_su)

Plotting

anim_su = one_df_all_su %>%
  group_by(su_id, date_consid) %>%
  mutate(time_anim = row_number()) %>%
  ggplot() +
  geom_path(mapping = aes(x = days_before_event, y = cumsum, group = su_id, col=cumsum),
            alpha = .2, size=1.6) +
 geom_point(aes(x=last(days_before_event), y=cumsum, group=su_id, col=cumsum)) +
  scale_x_reverse() +
  scale_color_viridis_c() +
  labs(
    x = "Days before the translational Slide",
    y = "Cumulative Rainfall [mm]",
    title = "Cumulative Rainfall before a Translational Slide",
    subtitle = "(on slope unit level)"
  ) +
  theme_minimal() +
  transition_reveal(time_anim)

print(anim_su)

# save the animation
anim_save(here("plots/anims/anim_polar_antecedent_rainfall_SlopeUnits.gif"), animation=anim_su)


RobinKohrs/rainfallR documentation built on Oct. 3, 2021, 1:42 a.m.