## PROJECT: WAVELENGTH
## AUTHOR: A.Chafetz, B.Kagniniwa, T.Essam | USAID
## PURPOSE: Update DATIM Tables and Upload to S3 Buckets
## LICENSE: MIT
## UPDATED: 2021-09-22
# DEPENDENCIES ------------------------------------------------------------
library(Wavelength)
# GLOBAL VARIABLES --------------------------------------------------------
#load secrets
glamr::load_secrets()
#folderpath output
savefolder <- "out/DATIM"
#DDC Buckets
hfr_bucket <- "gov-usaid"
# LOCAL FUNCTIONS ----
# UPDATE META DATA TABLES -------------------------------------------------
# Note: check MER API output using data-raw/validate_api_to_msd.R
#update MER PSNU Targets (only pre Q1 before site level results are available) -s3
# update_meta_targets(2021, upload = FALSE)
#update MER results and targets - s3
update_meta_mer(fy_pd = c(2021:2023), upload = FALSE) #only needed when MER refreshes
#update org hierarchy - s3 + gdrive
update_meta_orgs(upload = FALSE) #every run
#update mechs - s3 + gdrive
update_meta_mechs(upload = FALSE) #every run
# DOWNLOAD SUBMISSIONS FROM GOOGLE DRIVE ----------------------------------
# Note: Make sure to update "Send to DDC for processing" column
#identify new files missing from s3
df_new <- identify_newfiles(print_files = TRUE, id_modified = TRUE)
#apply file timestamp and renane on GDrive
df_new <- apply_filetimestamp(df_new, gdrive_rename = TRUE)
#download all files missing from s3 locally to temp folder
new_files <- download_new(df_new)
# new_files <- list.files(folderpath_tmp, full.names = TRUE)
#basename(new_files) %>% clipr::write_clip()
# Note: make sure to exclude problematic files removed after ddcpv_check()
folderpath_tmp %>%
list.files(full.names = TRUE) %>%
basename() %>%
clipr::write_clip()
# UPLOAD SUBMISSION FILES to S3 -------------------------------------------
#check and resolve any issues DDC cannot handle
purrr::walk(new_files, ddcpv_check)
# Remove problematic files and re-check
list.files(path = folderpath_tmp, full.names = T) %>%
purrr::walk(ddcpv_check)
# push local files to s3
purrr::walk(new_files,
~ grabr::s3_upload(
file = .,
bucket = hfr_bucket,
prefix = "ddc/uat/raw/hfr/incoming"))
#remove local files
unlink(folderpath_tmp, recursive = TRUE)
# DOWNLOAD TABLEAU OUTPUTS ------------------------------------------------
#FY21 HFR Outputs
stash_outgoing("HFR_Tableau", "out/joint", gdrive = FALSE)
# PUSH ERROR REPORT TO GOOGLE DRIVE ---------------------------------------
#HFR Submissions status
stash_outgoing("HFR_Submission", "out/DDC", gdrive = FALSE)
#HFR Mechanisms Status:
# stash_outgoing("Mechanism", "out/DDC", gdrive = FALSE)
#HFR Detailed Errors
stash_outgoing("Detailed", "out/DDC", gdrive = FALSE)
# RETIRED - PROCESS HFR SUBMISSIONS (WAVELENGTH) --------------------------
# #hfr submission period
# submission_pd <- 5
#
# #processed/output folder
# savefolder <- "out/processed"
#
# #store list of HFR submissions
# (files <- list.files("ou_submissions", full.names = TRUE))
#
# #validate submissions
# purrr::walk(files, hfr_process_template)
#
# #save processed files if submissions meet validation checks(filter for just submission pd)
# purrr::walk(files, hfr_process_template, round_hfrdate = TRUE,
# hfr_pd_sel = submission_pd, folderpath_output = savefolder)
Add the following code to your website.
For more information on customizing the embed code, read Embedding Snippets.