R/yay.gen.R

Defines functions as_dns_records

# DO NOT EDIT THIS FILE BY HAND! Instead edit the R Markdown source file `Rmd/yay.Rmd` and run `pkgpurl::purl_rmd()`.
# See `README.md#r-markdown-format` for more information on the literate programming approach used applying the R Markdown format.

# yay: Delightful Convenience Functions
# Copyright (C) 2025 Salim Brüggemann
# 
# This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, either version 3 of the License, or any later version.
# 
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details.
# 
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see <https://www.gnu.org/licenses/>.

utils::globalVariables(names = c(".",
                                 # tidyselect fns
                                 "any_of",
                                 "everything",
                                 # other
                                 "content",
                                 "end",
                                 "hostname",
                                 "i_pattern",
                                 "is_draft",
                                 "is_pre_release",
                                 "key",
                                 "minus",
                                 "pattern",
                                 "plus",
                                 "replacement",
                                 "is_standard",
                                 "start",
                                 "status",
                                 "type",
                                 "value",
                                 "version_nr"))

ptype_dns_records <- tibble::tibble(type = character(),
                                    hostname = character(),
                                    value = character(),
                                    ttl = integer(),
                                    priority = integer(),
                                    weight = integer(),
                                    port = integer(),
                                    flag = integer(),
                                    tag = character(),
                                    target = character())

cols_gh_releases <- c("id",
                      "version_nr",
                      "is_pre_release",
                      "is_draft")
cols_dns_records <-
  ptype_dns_records |>
  purrr::imap(\(val, key) tibble::tibble(key = key,
                                         is_standard = TRUE,
                                         type = ifelse(key %in% c("ttl", "priority", "weight", "port", "flag"),
                                                       "integer",
                                                       "character"))) |>
  purrr::list_rbind()

cols_dns_records_netlify <-
  tibble::tribble(
    ~key,          ~type,       ~is_standard,
    "id",          "character", FALSE,
    "dns_zone_id", "character", FALSE,
    "site_id",     "character", FALSE,
    "managed",     "logical",   FALSE
  ) %>%
  dplyr::bind_rows(cols_dns_records) |>
  # Netlify does not support `HTTPS` and `SVCB` record types
  dplyr::filter(key != "target")

cols_dns_records_porkbun <-
  cols_dns_records |>
  tibble::add_row(key = "id",
                  type = "character",
                  is_standard = FALSE,
                  .before = 1L)

md_gh_pat <- paste0("Works for both public and private repositories, for the latter you just need to set up a sufficiently authorized [GitHub Personal Access ",
                    "Token (PAT)][gh::gh_token].")

dns_record_types <- list(netlify = c("A",
                                     "AAAA",
                                     "ALIAS",
                                     "CAA",
                                     "CNAME",
                                     "MX",
                                     "NS",
                                     "SPF",
                                     "SRV",
                                     "TXT"),
                         porkbun = c("A",
                                     "AAAA",
                                     "ALIAS",
                                     "CAA",
                                     "CNAME",
                                     "HTTPS",
                                     "MX",
                                     "NS",
                                     "SRV",
                                     "SVCB",
                                     "TLSA",
                                     "TXT"))

paths_to_keep <- c("netlify.toml",
                   "robots.txt",
                   "_headers",
                   "_redirects",
                   ".gitignore",
                   ".gitmodules",
                   ".gitsigners",
                   ".htaccess",
                   ".hvm",
                   ".well-known")

reason_pkg_required_gh <- "for yay's `gh_*()` functions, but is not installed."

unicode_ellipsis <- "\u2026"

as_dns_records <- function(records,
                           registrar = c("netlify", "porkbun"),
                           check_record_types = TRUE) {
  
  registrar <- rlang::arg_match0(arg = registrar,
                                 values = registrar)
  checkmate::assert_data_frame(records,
                               row.names = "unique")
  
  if (check_record_types) {
    checkmate::assert_subset(records$type,
                             choices = dns_record_types[[registrar]])
  }
  
  # ensure all required cols are present
  missing_col_names <- setdiff(c("type", "hostname", "value"),
                               colnames(records))
  
  if (length(missing_col_names) > 0L) {
    cli::cli_abort("{.arg records} is missing the following required columns: {.var {missing_col_names}}",
                   .frame = parent.frame(2L))
  }
  
  # ensure no required fields are missing
  purrr::walk(c("type", "hostname", "value"),
              \(x) {
                if (anyNA(records[[x]])) {
                  cli::cli_abort("Column {.var {x}} in {.arg records} can't have missings.",
                                 .frame = parent.frame(2L))
                }
              })
  
  cols_dns_records_registrar <- get(paste0("cols_dns_records_", registrar))
  
  # complement missing optional columns
  cols_dns_records_registrar |>
    dplyr::filter(is_standard) %$%
    key |>
    setdiff(colnames(records)) |>
    purrr::map(\(x) tibble::as_tibble_col(x = NA,
                                          column_name = x)) |>
    purrr::list_cbind() |>
    pal::when(ncol(.) > 0L ~ dplyr::cross_join(x = records,
                                               y = .),
              ~ records) |>
    # coerce to target types
    dplyr::mutate(dplyr::across(.cols = any_of(cols_dns_records_registrar$key),
                                .fns = \(x) {
                                  .Primitive(paste0("as.", cols_dns_records_registrar$type[cols_dns_records_registrar$key == dplyr::cur_column()]))(x)
                                }))
}

as_dns_record_ids <- function(records) {
  
  if (is.character(records)) {
    checkmate::assert_character(records,
                                any.missing = FALSE)
  } else {
    checkmate::assert_data_frame(records,
                                 row.names = "unique")
    # ensure all required cols are present
    if (!("id" %in% colnames(records))) {
      cli::cli_abort("{.arg records} is missing an {.var id} column.")
    }
    
    records %<>% dplyr::pull("id")
  }
}

assert_domain <- function(domain) {
  
  checkmate::assert_string(domain,
                           pattern = "\\w+\\.\\w+(\\.\\w+)*")
}

clean_git_dir <- function(path,
                          exclude_paths = paths_to_keep,
                          repo = path) {
  fs::dir_ls(path = path,
             all = TRUE,
             type = "directory") |>
    setdiff(fs::path(path, c(exclude_paths, ".git"))) |>
    purrr::walk(fs::dir_delete)
  
  fs::dir_ls(path = path,
             all = TRUE,
             type = "file") |>
    setdiff(fs::path(path, exclude_paths)) |>
    purrr::walk(fs::file_delete)
  
  gert::git_status(repo = repo,
                   staged = FALSE) |>
    dplyr::filter(status == "deleted"
                  & fs::path_has_parent(path = file,
                                        parent = fs::path_rel(path = path,
                                                              start = repo))) %$%
    file |>
    gert::git_add(repo = repo)
}

extract_vrsn_nr <- function(x) {
  
  pattern_vrsn <- "\\d+(\\.\\d+)*"
  version_nr <- stringr::str_extract(string = x$tag_name,
                                     pattern = pattern_vrsn)
  
  # fall back to parsing name field
  if (length(version_nr) == 0L || is.na(version_nr)) {
    version_nr <- stringr::str_extract(string = x$name,
                                       pattern = pattern_vrsn)
  }
  
  as.numeric_version(version_nr)
}

gh_release_as_tibble <- function(x) {
  
  tibble::tibble(id = x$id,
                 version_nr = extract_vrsn_nr(x),
                 is_pre_release = x$prerelease,
                 is_draft = x$draft)
}

normalize_tree_path <- function(path) {
  
  checkmate::assert_string(path) |>
    fs::path_norm() |>
    stringr::str_remove(pattern = "^\\.{0,2}(/|$)")
}

perform_porkbun_req <- function(url,
                                data = NULL,
                                api_key = pal::pkg_config_val("porkbun_api_key"),
                                secret_api_key = pal::pkg_config_val("porkbun_secret_api_key"),
                                max_tries = 3L) {
  checkmate::assert_list(data,
                         any.missing = FALSE,
                         names = "strict",
                         null.ok = TRUE)
  checkmate::assert_string(api_key)
  checkmate::assert_string(secret_api_key)
  checkmate::assert_count(max_tries,
                          positive = TRUE)
  
  httr2::request(base_url = url) |>
    httr2::req_method(method = "POST") |>
    httr2::req_body_json(list(apikey = api_key,
                              secretapikey = secret_api_key)) |>
    httr2::req_body_json_modify(!!!data) |>
    httr2::req_user_agent(string = "yay R package (https://yay.rpkg.dev)") |>
    httr2::req_retry(max_tries = max_tries) |>
    httr2::req_error(body = \(resp) httr2::resp_body_json(resp)$message) |>
    httr2::req_perform() |>
    httr2::resp_body_json()
}

#' Regular expression patterns and replacements for text normalization
#'
#' @format `r pkgsnip::return_lbl("tibble")`
#' @family regex
#' @seealso String normalization functions: [str_normalize()] [str_normalize_file()]
#'
#' @examples
#' # unnest the pattern column
#' tidyr::unnest_longer(data = yay::regex_text_normalization,
#'                      col = pattern)
"regex_text_normalization"

#' Regular expression patterns and replacements for file normalization
#'
#' @format `r pkgsnip::return_lbl("tibble")`
#' @family regex
#' @seealso String normalization functions: [str_normalize()] [str_normalize_file()]
#'
#' @examples
#' # unnest the pattern column
#' tidyr::unnest_longer(data = yay::regex_file_normalization,
#'                      col = pattern)
"regex_file_normalization"

#' Determine the differences between two data frames/tibbles in tabular diff format
#'
#' Compares two [data frames][base::data.frame()]/[tibbles][tibble::tbl_df] (or two objects coercible to tibbles like
#' [matrices][base::matrix()]) and offers to inspect any differences in [tabular diff format](https://paulfitz.github.io/daff-doc/spec.html) as neatly rendered
#' HTML.
#'
#' This function is basically a convenience wrapper combining [pal::is_equal_df()], [daff::diff_data()] and [daff::render_diff()]. If run non-interactively or
#' `ask = FALSE`, the differences will be shown right away, otherwise the user will be asked on the console.
#'
#' Note that in tabular diff format, only changes in the _column content_ of `x` and `y` are visible, meaning that the following properties and changes
#' therein won't be displayed:
#'
#' - column types (e.g. integer vs. double)
#' - row names and other attributes
#'
#' @param x The data frame / tibble to check for changes.
#' @param y The data frame / tibble that `x` should be checked against, i.e. the reference.
#' @param ignore_order Whether or not to ignore the order of columns and rows.
#' @param ids A character vector of column names that make up a [primary key](https://en.wikipedia.org/wiki/Primary_key), if known. If `NULL`, heuristics are
#'   used to find a decent key (or a set of decent keys).
#' @param ask Whether or not to ask interactively if the resulting difference object should be opened in case `x` and `y` differ. If `FALSE`, it will be opened
#'   right away. Only relevant if run [interactively][base::interactive()].
#' @param bypass_rstudio_viewer If `TRUE`, `x` and `y` actually differ, and `ask` is set to `TRUE`, the resulting difference object will be
#'   opened in the system's default web browser instead of RStudio's built-in viewer. Only relevant if run within RStudio.
#' @param verbose Whether or not to also output the differences detected by [pal::is_equal_df()] to the console.
#' @param max_diffs The maximum number of differences shown on the console. Only relevant if `verbose = TRUE`.
#' @param caption The caption of the rendered difference object. It is passed to [glue::glue()] allowing its string interpolation syntax to be used. A character
#'   scalar.
#' @param diff_text The text to display on the console in case `x` and `y` differ. It is passed to [glue::glue()] allowing its string interpolation syntax to be
#'   used. A character scalar.
#' @param ask_text The text that is displayed when `ask = TRUE`. Ignored if `ask = FALSE`. A character scalar.
#' @param ... Further arguments passed on to [daff::diff_data()], excluding `data`, `data_ref`, `ids`, `ordered`, and `columns_to_ignore`.
#' @inheritParams pal::is_equal_df
#'
#' @return A [difference object][daff::daff], invisibly. It could be rendered later using [daff::render_diff()], for example.
#' @family tibble
#' @export
#'
#' @examples
#' \dontrun{
#' library(magrittr)
#' 
#' mtcars |>
#'   dplyr::mutate(dplyr::across(c(cyl, gear),
#'                               \(x) dplyr::if_else(x > 4, x * 2, x))) |>
#'   yay::show_diff(mtcars)}
show_diff <- function(x,
                      y,
                      ignore_order = FALSE,
                      ignore_col_types = FALSE,
                      ids = NULL,
                      ask = TRUE,
                      bypass_rstudio_viewer = FALSE,
                      verbose = TRUE,
                      max_diffs = 10L,
                      diff_text = "{x_lbl} is different from {y_lbl}",
                      ask_text = "Do you wish to display the changes in tabular diff format?",
                      caption = "{x_lbl} \u2192 {y_lbl}",
                      ...) {
  
  checkmate::assert_character(ids,
                              any.missing = FALSE,
                              null.ok = TRUE)
  checkmate::assert_string(diff_text)
  checkmate::assert_string(ask_text)
  checkmate::assert_string(caption)
  checkmate::assert_flag(ignore_order)
  checkmate::assert_flag(ask)
  checkmate::assert_flag(bypass_rstudio_viewer)
  reason_pkg_required_fn <- pal::reason_pkg_required()
  rlang::check_installed("daff",
                         reason = reason_pkg_required_fn)
  pal::check_dots_named(...,
                        .fn = daff::diff_data,
                        .forbidden = c("data",
                                       "data_ref",
                                       "ids",
                                       "ordered",
                                       "columns_to_ignore"))
  # generate `x`/`y` labels
  x_lbl <- deparse(substitute(x))
  y_lbl <- deparse(substitute(y))
  
  if (length(x_lbl) > 1L || x_lbl == "." || make.names(x_lbl) != x_lbl) {
    x_lbl <- "`x`"
  } else {
    x_lbl <- glue::glue("`x` (`{x_lbl}`)")
  }
  
  if (length(y_lbl) > 1L || y_lbl == "." || make.names(y_lbl) != y_lbl) {
    y_lbl <- "`y`"
  } else {
    y_lbl <- glue::glue("`y` (`{y_lbl}`)")
  }
  
  # generate HTML caption
  caption %<>% glue::glue()
  n_backtick <- stringr::str_count(caption,
                                   pattern = stringr::fixed("`"))
  
  if (n_backtick) {
    
    caption %<>% purrr::reduce(.x = 1:n_backtick,
                               .init = .,
                               .f = \(string, i) {
                                 
                                 stringr::str_replace(string = string,
                                                      pattern = stringr::fixed("`"),
                                                      replacement = ifelse(i %% 2L == 0L,
                                                                           "</code>",
                                                                           "<code>"))
                               })
  }
  
  daff_obj <- daff::diff_data(data = x,
                              data_ref = y,
                              ids = ids,
                              ordered = !ignore_order)
  
  diff <- pal::is_equal_df(x = x,
                           y = y,
                           ignore_col_order = ignore_order,
                           ignore_row_order = ignore_order,
                           ignore_col_types = ignore_col_types,
                           quiet = TRUE,
                           return_waldo_compare = TRUE)
  
  if (length(diff) > 0L) {
    
    if (verbose) {
      cli::cli_alert_info(text = paste0(diff_text, ":"))
      cat("\n")
      print(diff)
      
    } else {
      cli::cli_alert_info(text = paste0(diff_text, "."))
    }
    
    open_diff <- TRUE
    
    if (ask && interactive()) {
      rlang::check_installed("yesno",
                             reason = reason_pkg_required_fn)
      cat("\n")
      open_diff <- yesno::yesno2(ask_text)
    }
    
    if (open_diff) {
      
      if (interactive()) {
        
        if (bypass_rstudio_viewer) {
          rlang::check_installed("withr",
                         reason = reason_pkg_required_fn)
          withr::with_options(new = list(viewer = NULL),
                              code = daff::render_diff(diff = daff_obj,
                                                       view = TRUE,
                                                       title = caption))
        } else {
          daff::render_diff(diff = daff_obj,
                            view = TRUE,
                            title = caption)
        }
      } else {
        
        rlang::check_installed("xopen",
                         reason = reason_pkg_required_fn)
        tmp_file <- fs::file_temp(pattern = "yay-show_diff_",
                                  ext = "html")
        daff::render_diff(diff = daff_obj,
                          file = tmp_file,
                          view = FALSE,
                          title = caption)
        xopen::xopen(target = glue::glue("file://{tmp_file}"),
                     quiet = TRUE)
        cli::cli_alert_info("A new tab displaying the changes has been opened in your default web browser.")
      }
    }
  }
  
  invisible(daff_obj)
}

#' Open as temporary spreadsheet
#'
#' Writes an \R object – usually tabular data like a dataframe or [tibble][tibble::tibble()] – to a temporary spreadsheet and subsequently opens that
#' spreadsheet in the system's default application using [xopen::xopen()]. The latter is usually equivalent to double-clicking on the file in a file browser.
#'
#' @param x A dataframe or something coercible to.
#' @param format The spreadsheet format to use. One of
#'   - `"csv"` for a [comma-separated values](https://en.wikipedia.org/wiki/Comma-separated_values) file written using [readr::write_csv()]. The default.
#'   - `"xlsx"` for an [Office Open XML](https://en.wikipedia.org/wiki/Office_Open_XML) file commonly used by Microsoft Excel 2007+, written using
#'     [writexl::write_xlsx()].
#' @param ... Further arguments passed on to [readr::write_csv()] or [writexl::write_xlsx()], depending on `format`.
#' @param quiet Whether or not to echo the command to open the temporary spreadsheet on the console before running it.
#'
#' @return `x` invisibly.
#' @family tibble
#' @export
#'
#' @examples
#' \dontrun{
#' yay::open_as_tmp_spreadsheet(mtcars,
#'                              format = "xlsx")}
open_as_tmp_spreadsheet <- function(x,
                                    format = c("csv", "xlsx"),
                                    ...,
                                    quiet = TRUE) {
  
  reason_pkg_required_fn <- pal::reason_pkg_required()
  rlang::check_installed("xopen",
                         reason = reason_pkg_required_fn)
  format <- rlang::arg_match(format)
  tmp_file <- fs::file_temp(pattern = "tmp_spreadsheet",
                            ext = format)
  
  if (format == "csv") {
    
    rlang::check_installed("readr",
                           reason = reason_pkg_required_fn)
    pal::check_dots_named(...,
                          .fn = readr::write_csv,
                          .forbidden = "x")
    readr::write_csv(x = x,
                     file = tmp_file,
                     ...)
    
  } else if (format == "xlsx") {
    
    rlang::check_installed("writexl",
                           reason = reason_pkg_required_fn)
    pal::check_dots_named(...,
                          .fn = writexl::write_xlsx,
                          .forbidden = "x")
    writexl::write_xlsx(x = x,
                        path = tmp_file,
                        ...)
  }
  
  xopen::xopen(target = tmp_file,
               quiet = quiet)
  
  invisible(x)
}

#' Deploy static website to local Git folder
#'
#' @description
#' Copies the content of a specific filesystem folder to another local Git folder, then stages, commits and pushes the changes. Primarily useful to deploy a
#' [static website](https://en.wikipedia.org/wiki/Static_web_page) (typically the output of a [static site generator](https://jamstack.org/generators/)).
#'
#' Use this function with **caution** since by default it – except for the paths listed in `never_clean` – **completely wipes the `to_path`
#' directory**!
#'
#' @param from_path Path to the directory containing the static website files that are to be deployed. A character scalar.
#' @param to_path Path to the Git (sub)folder to which the static website files are to be deployed. A character scalar.
#' @param clean_to_path Whether or not to wipe `to_path` before deploying the new website files. Setting this to `TRUE` ensures there are no obsolete files left
#'   over from previous deployments.
#' @param never_clean A character vector of paths relative to `to_path` which are preserved when wiping `to_path` (i.e. `clean_to_path = TRUE`). By default,
#'   this includes the following files and directories:
#'   `r pal::as_md_val_list(paths_to_keep)`
#' @param branch The name of the Git branch to which the static website files are to be committed. A character scalar or `NULL`. If `NULL`, defaults to the
#'   currently checked out branch of the repository `to_path` belongs to.
#' @param commit_msg The Git commit message used for the deployment. A character scalar.
#' @param quiet `r pkgsnip::param_lbl("quiet")`
#'
#' @return A vector of paths to the deployed files/folders, invisibly.
#' @family gitrepo
#' @export
deploy_static_site <- function(from_path,
                               to_path,
                               clean_to_path = TRUE,
                               never_clean = paths_to_keep,
                               branch = NULL,
                               commit_msg = "auto-deploy static website",
                               quiet = FALSE) {
  
  rlang::check_installed("gert",
                         reason = pal::reason_pkg_required())
  checkmate::assert_directory_exists(from_path,
                                     access = "r")
  checkmate::assert_flag(clean_to_path)
  checkmate::assert_character(never_clean,
                              any.missing = FALSE)
  checkmate::assert_string(branch,
                           null.ok = TRUE)
  checkmate::assert_string(commit_msg)
  checkmate::assert_flag(quiet)
  
  if (!checkmate::test_path_for_output(to_path,
                                       overwrite = TRUE)) {
    cli::cli_abort("{.arg {to_path}} is not a valid filesystem path.")
  }
  
  # create website deploy subfolder if necessary (only leaf directory will be created because of `checkmate::test_path_for_output()` above)
  if (!fs::dir_exists(to_path)) fs::dir_create(path = to_path)
  
  # determine root path of Git repository
  repo <- gert::git_find(path = to_path)
  
  # change branch if requested
  if (!is.null(branch)) {
    
    initial_branch <- gert::git_branch(repo = repo)
    
    if (!(gert::git_branch_exists(branch = branch,
                                  repo = repo))) {
      cli::cli_abort("Branch {.val {branch}} doesn't exist in Git repository {.path {repo}}.")
    }
    
    gert::git_branch_checkout(branch = branch,
                              repo = repo)
  }
  
  # clean destination path if requested
  if (clean_to_path) {
    clean_git_dir(path = to_path,
                  exclude_paths = never_clean,
                  repo = repo)
  }
  
  # copy files/dirs
  dirs <- fs::dir_ls(path = from_path,
                     type = "directory",
                     all = TRUE)
  
  purrr::walk2(.x = dirs,
               .y = fs::path(to_path, fs::path_rel(path = dirs,
                                                   start = from_path)),
               .f = \(x) fs::dir_copy(path = x,
                                      overwrite = TRUE))
  
  files <- fs::dir_ls(path = from_path,
                      type = "file",
                      all = TRUE)
  
  purrr::walk(files,
              \(x) fs::file_copy(path = x,
                                 new_path = to_path,
                                 overwrite = TRUE))
  # commit and push files
  staged <-
    c(dirs, files) |>
    fs::path_rel(start = from_path) %>%
    fs::path(fs::path_rel(path = to_path,
                          start = repo),
             .) |>
    fs::path_norm() |>
    gert::git_add(repo = repo)
  
  if (nrow(staged)) {
    
    gert::git_commit(message = commit_msg,
                     repo = repo)
    
    gert::git_push(repo = repo,
                   verbose = !quiet)
    
  } else {
    cli::cli_alert_info("No files changed.")
  }
  
  # restore branch if necessary
  if (!is.null(branch)) {
    gert::git_branch_checkout(branch = initial_branch,
                              repo = repo)
  }
  
  invisible(fs::path_abs(path = staged$file,
                         start = repo))
}

#' Deploy pkgdown site to local Git folder
#'
#' @description
#' Copies the static [pkgdown][pkgdown::pkgdown] website files to another local Git folder, then stages, commits and pushes the changes. Use
#' [pkgdown::build_site()] before running this function in order to create the website files.
#'
#' Use this function with **caution** since by default it – except for the paths listed in `never_clean` – **completely wipes the `to_path`
#' directory**!
#' 
#' @details
#' `r pkgsnip::md_snip("rstudio_addin")`
#'
#' # Setting `to_path` via \R options
#'
#' Instead of directly providing a valid `to_path` argument, you can also set `to_path = NULL` and provide
#'
#' - the desired deploy paths for each package in the \R option `yay.local_pkgdown_deploy_paths`. The value of this option must be a named character vector
#'   whereas the names correspond to the package names and the values to the filesystem paths. As an example:
#' 
#'   ```r
#'   options(yay.local_pkgdown_deploy_paths = c(some_pkg    = "/path/to/common_pkgdown_repo/some_pkg",
#'                                              another_pkg = "/path/to/common_pkgdown_repo/another_pkg",
#'                                              foo_pkg     = "/path/to/dedicated_foo_pkgdown_repo"))
#'   ```
#'
#' - a default parent path in the \R option `yay.default_local_pkgdown_deploy_parent_path` that will be used as the deployment parent directory for all packages
#'   that are not explicitly listed in `yay.local_pkgdown_deploy_paths`. For example you could set
#' 
#'   ```r
#'   options(yay.default_local_pkgdown_deploy_parent_path = "/path/to/common_pkgdown_repo")
#'   ```
#'   
#'   which would result in deploying the pkgdown website files of a hypothetical package named _bar_pkg_ to `/path/to/common_pkgdown_repo/bar_pkg`.
#'
#' A suitable place to define the above options is the [`.Rprofile`
#' file](https://support.rstudio.com/hc/en-us/articles/360047157094-Managing-R-with-Rprofile-Renviron-Rprofile-site-Renviron-site-rsession-conf-and-repos-conf)
#' in the user's home directory.
#'
#' @inheritParams deploy_static_site
#' @param pkg_path Path to the \R package of which the pkgdown website files are to be deployed.
#' @param to_path Path to the Git (sub)folder to which the pkgdown website files are to be deployed. If `NULL`, the \R options `yay.local_pkgdown_deploy_paths`
#'   and `yay.default_local_pkgdown_deploy_parent_path` will be respected. See section _Setting `to_path` via R options_ for details.
#' @param use_dev_build Whether or not to deploy the development build of the pkgdown website files. If `NULL`, the
#'   [`development.mode`](https://pkgdown.r-lib.org/reference/build_site.html#development-mode) set in the pkgdown YAML configuration file from `pkg_path` will
#'   be respected.
#'
#' @inherit deploy_static_site return
#' @family gitrepo
#' @export
deploy_pkgdown_site <- function(pkg_path = ".",
                                to_path = NULL,
                                use_dev_build = NULL,
                                clean_to_path = TRUE,
                                never_clean = paths_to_keep,
                                branch = NULL,
                                commit_msg = paste0("auto-deploy pkgdown site for ",
                                                    desc::desc_get_field(file = pkg_path,
                                                                         key = "Package")),
                                quiet = FALSE) {
  
  reason_pkg_required_fn <- pal::reason_pkg_required()
  rlang::check_installed("desc",
                         reason = reason_pkg_required_fn)
  rlang::check_installed("gert",
                         reason = reason_pkg_required_fn)
  rlang::check_installed("pkgdown",
                         reason = reason_pkg_required_fn)
  checkmate::assert_flag(use_dev_build,
                         null.ok = TRUE)
  checkmate::assert_flag(clean_to_path)
  checkmate::assert_character(never_clean,
                              any.missing = FALSE)
  checkmate::assert_string(branch,
                           null.ok = TRUE)
  checkmate::assert_string(commit_msg)
  checkmate::assert_flag(quiet)
  
  if (!(pal::is_pkgdown_dir(pkg_path))) {
    cli::cli_abort("No pkgdown configuration found under path: {.path {pkg_path}}")
  }
  
  # get pkg's pkgdown config
  override <-
    use_dev_build |>
    pal::when(isTRUE(.) ~ list(development = list(mode = "devel")),
              isFALSE(.) ~ list(development = list(mode = "release")),
              ~ list())
  
  config <- pkgdown::as_pkgdown(pkg = pkg_path,
                                override = override)
  
  # determine `to_path` if necessary
  arg_to_path <- to_path
  to_path <- to_path %||% getOption("yay.local_pkgdown_deploy_paths")[config$package]
  to_path_parent <- getOption("yay.default_local_pkgdown_deploy_parent_path")
  to_path <- to_path %||% fs::path_join(c(to_path_parent, config$package))
  to_path_parent <- fs::path_dir(to_path)
  
  if (!checkmate::test_path_for_output(to_path,
                                       overwrite = TRUE)) {
    if (is.null(arg_to_path)) {
      if (is.null(getOption("yay.local_pkgdown_deploy_paths")[config$package])) {
        if (is.null(getOption("yay.default_local_pkgdown_deploy_parent_path"))) {
          cli::cli_abort(paste0("Either set {.arg to_path} directly to a valid filesystem path or provide the path in the R option {.field ",
                                "yay.local_pkgdown_deploy_paths} as a named character vector of the form {.code c({config$package} = ",
                                "\"/path/to/local_git_repo\")}. Another alternative is to set a default parent path in the R option {.field ",
                                "yay.default_local_pkgdown_deploy_parent_path}. See {.code ?yay::deploy_pkgdown_site} for details."))
        } else {
          cli::cli_abort(paste0("The R option {.field yay.default_local_pkgdown_deploy_parent_path} is set, but not to a valid filesystem path",
                                " with write access. Current option value: {.path {to_path_parent}}"))
        }
      } else {
        cli::cli_abort(paste0("The R option {.field yay.local_pkgdown_deploy_paths[\"{config$package}\"]} is set, but not to a valid ",
                              "filesystem path with write access. Current option value: {.path {to_path}}"))
      }
    } else {
      cli::cli_abort("{.arg {to_path}} is not a valid filesystem path.")
    }
  }
  
  # deploy dev build to `dev/` subfolder
  if (config$development$mode == "devel") to_path %<>% fs::path("dev")
  
  # create pkg website subfolder(s) if necessary (only leaf directories will be created because of `checkmate::test_path_for_output()` above)
  if (!fs::dir_exists(to_path)) fs::dir_create(path = to_path)
  
  # determine root path of Git repository
  repo <- gert::git_find(path = to_path)
  
  # change branch if requested
  if (!is.null(branch)) {
    
    initial_branch <- gert::git_branch(repo = repo)
    
    if (!(gert::git_branch_exists(branch = branch,
                                  repo = repo))) {
      cli::cli_abort("Branch {.val {branch}} doesn't exist in Git repository {.path {repo}}.")
    }
    
    gert::git_branch_checkout(branch = branch,
                              repo = repo)
  }
  
  # clean destination path if requested
  if (clean_to_path) {
    clean_git_dir(path = to_path,
                  exclude_paths = never_clean,
                  repo = repo)
  }
  
  # copy files/dirs
  dirs <- fs::dir_ls(path = config$dst_path,
                     type = "directory",
                     regexp = paste0("^\\Q", fs::path(config$dst_path, config$development$destination), "\\E"),
                     all = TRUE,
                     invert = TRUE)
  
  purrr::walk2(.x = dirs,
               .y = fs::path(to_path, fs::path_rel(path = dirs,
                                                   start = config$dst_path)),
               .f = \(x) fs::dir_copy(path = x,
                                      overwrite = TRUE))
  
  files <- fs::dir_ls(path = config$dst_path,
                      type = "file",
                      all = TRUE)
  
  purrr::walk(files,
              \(x) fs::file_copy(path = files,
                                 new_path = to_path,
                                 overwrite = TRUE))
  # commit and push files
  staged <-
    c(dirs, files) |>
    fs::path_rel(start = config$dst_path) %>%
    fs::path(fs::path_rel(path = to_path,
                          start = repo),
             .) |>
    fs::path_norm() |>
    gert::git_add(repo = repo)
  
  if (nrow(staged)) {
    
    gert::git_commit(message = commit_msg,
                     repo = repo)
    
    gert::git_push(repo = repo,
                   verbose = !quiet)
    
  } else {
    cli::cli_alert_info("No files changed.")
  }
  
  # restore branch if necessary
  if (!is.null(branch)) {
    gert::git_branch_checkout(branch = initial_branch,
                              repo = repo)
  }
  
  invisible(fs::path_abs(path = staged$file,
                         start = repo))
}

#' Get Netlify DNS records
#'
#' Retrieves DNS records from Netlify for the specified `domain` using the
#' [`getDnsRecords`](https://open-api.netlify.com/#tag/dnsZone/operation/getDnsRecords) endpoint of [Netlify's RESTful
#' API](https://docs.netlify.com/api/get-started/). 
#'
#' @inheritParams netlify_dns_records_set
#' @param domain Domain name to retrieve DNS records for. This is translated into the corresponding Netlify DNS Zone. A character scalar.
#'
#' @return `r pkgsnip::param_lbl("tibble_cols", cols = cols_dns_records_netlify$key)`
#' @family netlify
#' @export
#'
#' @examples
#' \dontrun{
#' yay::netlify_dns_records_get(domain = "my.site")
#'
#' # to write "portable" record keys to a TOML file `dns_records.toml` with a `records` table
#' # NOTE that the CLI tool `jsontoml` is required for this: https://github.com/pelletier/go-toml/
#' yay::netlify_dns_records_get(domain = "my.site") |>
#'   # remove records which can't be handled via API
#'   dplyr::filter(!managed) |>
#'   # remove Netlify-specific cols
#'   dplyr::select(any_of(yay:::cols_dns_records$key)) |>
#'   # convert to target list structure
#'   list(records = _) |>
#'   # convert to JSON
#'   jsonlite::toJSON(auto_unbox = TRUE,
#'                    pretty = TRUE) |>
#'   # convert JSON to TOML via external CLI
#'   system2(input = _,
#'           stdout = TRUE,
#'           command = "jsontoml") |>
#'   # remove leading newline
#'   _[-1L] |>
#'   # write TOML to file
#'   brio::write_lines(path = "dns_records.toml")}
netlify_dns_records_get <- function(domain,
                                    token = pal::pkg_config_val("netlify_token"),
                                    max_tries = 3L) {
  assert_domain(domain)
  checkmate::assert_string(token)
  checkmate::assert_count(max_tries,
                          positive = TRUE)
  
  zone_id <- stringr::str_replace_all(string = domain,
                                      pattern = stringr::fixed("."),
                                      replacement = "_")
  
  httr2::request(base_url = glue::glue("https://api.netlify.com/api/v1/dns_zones/{zone_id}/dns_records/")) |>
    httr2::req_method(method = "GET") |>
    httr2::req_headers(Authorization = paste0("Bearer ", token)) |>
    httr2::req_retry(max_tries = max_tries) |>
    httr2::req_perform() |>
    httr2::resp_body_json() |>
    # remove empty keys
    purrr::map(purrr::compact) |>
    purrr::map_dfr(tibble::as_tibble_row) |>
    # canonicalize key order
    dplyr::relocate(any_of(cols_dns_records_netlify$key)) |>
    dplyr::arrange(type, hostname, value) |>
    # check and normalize cols (just in case since Netlify's API should already return the fields in the proper types)
    as_dns_records(registrar = "netlify",
                   check_record_types = FALSE)
}

#' Set Netlify DNS records
#'
#' Sets DNS records on Netlify for the specified `domain` using the
#' [`createDnsRecord`](https://open-api.netlify.com/#tag/dnsZone/operation/createDnsRecord) endpoint of [Netlify's RESTful
#' API](https://docs.netlify.com/api/get-started/). DNS `records` must be provided as a dataframe/tibble with the columns
#' `r cols_dns_records_netlify |> dplyr::filter(is_standard) %$% key |> pal::enum_str(wrap = "\x60")`. Further columns are silently ignored.
#'
#' Supported are the DNS record types `r dns_record_types$netlify |> pal::wrap_chr("\x60") |> cli::ansi_collapse( last = " and ")`. Netlify's own custom record
#' types `NETLIFY` and `NETLIFY6` cannot be altered via the API and must be configured via [Netlify's web interface](https://app.netlify.com/).
#'
#' @inheritParams pal::req_cached
#' @param records DNS records. A dataframe/tibble with the columns
#'   `r cols_dns_records_netlify |> dplyr::filter(is_standard) %$% key |> pal::enum_str(wrap = "\x60")`. The first three columns are mandatory, columns not
#'   listed here are silently ignored.
#' @param domain Domain name to set DNS records for. This is translated into the corresponding Netlify DNS Zone. A character scalar.
#' @param token [Netlify personal access token](https://docs.netlify.com/api/get-started/#authentication) used for authentication. A character scalar.
#'
#' @return The newly set DNS records.
#' `r pkgsnip::param_lbl("tibble_cols", cols = cols_dns_records_netlify$key, as_sentence = FALSE) |> pal::capitalize_first()`, invisibly.
#' @family netlify
#' @export
#'
#' @examples
#' \dontrun{
#' tibble::tribble(
#'   ~type,   ~hostname,            ~value,           ~ttl, ~priority, ~weight, ~port, ~flag, ~tag,
#'   "CAA",   "my.site",            "letsencrypt.org", 3600L, NA,      NA,      NA,    NA,    NA,
#'   "CNAME", "autoconfig",         "mailbox.org",     3600L, NA,      NA,      NA,    NA,    NA,
#'   "MX",    "my.site",            "mxext1.mailbox.org", 3600L, NA,   NA,      NA,    NA,    NA,
#'   "SRV",   "_hkps._tcp.my.site", "pgp.mailbox.org",    3600L, 1L,   1L,      443L,  NA,    NA,
#'   "TXT",   "_mta-sts",           "v=STSv1; id=001",    3600L, NA,   NA,      NA,    NA,    NA) |>
#'   yay::netlify_dns_records_set(domain = "my.site")
#'                                
#' # to use a TOML file that defines a `records` table as input:
#' pal::toml_read("dns_records.toml")$records |>
#'   purrr::map_dfr(tibble::as_tibble_row) |>
#'   yay::netlify_dns_records_set(domain = "my.site")}
netlify_dns_records_set <- function(records,
                                    domain,
                                    token = pal::pkg_config_val("netlify_token"),
                                    max_tries = 3L) {
  
  records %<>% as_dns_records(registrar = "netlify")
  assert_domain(domain)
  checkmate::assert_string(token)
  checkmate::assert_count(max_tries,
                          positive = TRUE)
  
  zone_id <- stringr::str_replace_all(string = domain,
                                      pattern = stringr::fixed("."),
                                      replacement = "_")
  
  pal::cli_progress_step_quick(msg = "Setting {.val {nrow(records)}} DNS record{?s} on Netlify for domain {.field {domain}}")
  
  records |>
    purrr::pmap(.progress = TRUE,
                .f = \(type,
                       hostname,
                       value,
                       ttl,
                       priority,
                       weight,
                       port,
                       flag,
                       tag,
                       ...) {
                  
                  httr2::request(base_url = glue::glue("https://api.netlify.com/api/v1/dns_zones/{zone_id}/dns_records/")) |>
                    httr2::req_method(method = "POST") |>
                    httr2::req_headers(Authorization = paste0("Bearer ", token),
                                       `Content-Type` = "application/json") |>
                    httr2::req_body_json(purrr::discard(list(type = type,
                                                             hostname = hostname,
                                                             value = value,
                                                             ttl = ttl,
                                                             priority = priority,
                                                             weight = weight,
                                                             port = port,
                                                             flag = flag,
                                                             tag = tag),
                                                        is.na)) |>
                    httr2::req_retry(max_tries = max_tries) |>
                    httr2::req_perform() |>
                    httr2::resp_body_json() |>
                    # remove empty keys
                    purrr::compact() |>
                    tibble::as_tibble_row() |>
                    # canonicalize key order
                    dplyr::relocate(any_of(cols_dns_records_netlify$key)) |>
                    dplyr::arrange(type, hostname, value)
                }) |>
    purrr::list_rbind() |>
    invisible()
}

#' Delete Netlify DNS records
#'
#' Deletes DNS records on Netlify for the specified `domain` using the [`deleteDnsRecord`](https://open-api.netlify.com/#tag/dnsZone/operation/deleteDnsRecord)
#' endpoint of [Netlify's RESTful API](https://docs.netlify.com/api/get-started/). DNS `records` must be provided as either a character vector of DNS record
#' identifiers or a dataframe/tibble with an `id` column. Further columns are silently ignored.
#'
#' @inherit netlify_dns_records_set details
#'
#' @inheritParams netlify_dns_records_set
#' @param records DNS records to delete. A character vector of record identifiers or a dataframe/tibble with an `id` column. Further columns are silently
#'   ignored.
#' @param domain Domain name to delete DNS records for. This is translated into the corresponding Netlify DNS Zone. A character scalar.
#'
#' @return A character vector of deleted record identifiers, invisibly.
#' @family netlify
#' @export
#'
#' @examples
#' \dontrun{
#' yay::netlify_dns_records_delete(domain = "my.site",
#'                                 records = "xyz123")
#' 
#' # The output of `netlify_dns_records_get()` can directly be fed. To delete all (!) records:
#' yay::netlify_dns_records_get(domain = "my.site") |>
#'   dplyr::filter(!managed) |>
#'   yay::netlify_dns_records_delete(domain = "my.site")}
netlify_dns_records_delete <- function(records,
                                       domain,
                                       token = pal::pkg_config_val("netlify_token"),
                                       max_tries = 3L) {
  records %<>% as_dns_record_ids()
  checkmate::assert_string(domain,
                           pattern = "\\w+\\.\\w+(\\.\\w+)*")
  checkmate::assert_string(token)
  checkmate::assert_count(max_tries,
                          positive = TRUE)
  
  zone_id <- stringr::str_replace_all(string = domain,
                                      pattern = stringr::fixed("."),
                                      replacement = "_")
  
  pal::cli_progress_step_quick(msg = "Deleting {.val {length(records)}} DNS record{?s} on Netlify for domain {.field {domain}}")
  
  records |>
    purrr::walk(.progress = TRUE,
                .f = \(dns_record_id) {
                  
                  httr2::request(base_url = glue::glue("https://api.netlify.com/api/v1/dns_zones/{zone_id}/dns_records/{dns_record_id}")) |>
                    httr2::req_method(method = "DELETE") |>
                    httr2::req_headers(Authorization = paste0("Bearer ", token)) |>
                    httr2::req_retry(max_tries = max_tries) |>
                    httr2::req_perform()
                })
  
  invisible(records)
}

#' Get Porkbun DNS records
#'
#' Retrieves DNS records from Porkbun for the specified `domain` using the
#' [`/api/json/v3/dns/retrieve/{domain}`](https://porkbun.com/api/json/v3/documentation#DNS%20Retrieve%20Records%20by%20Domain%20or%20ID) endpoint of Porkbun's
#' API.
#'
#' @inheritParams pal::req_cached
#' @param domain Domain name to retrieve DNS records for. A character scalar.
#' @param api_key [Porkbun API key](https://porkbun.com/api/json/v3/documentation#Authentication) used for authentication. A character scalar.
#' @param secret_api_key [Porkbun secret API key](https://porkbun.com/api/json/v3/documentation#Authentication) used for authentication. A character scalar.
#'
#' @return  `r pkgsnip::param_lbl("tibble_cols", cols = cols_dns_records_porkbun$key)`
#' @family porkbun
#' @export
#'
#' @examples
#' \dontrun{
#' yay::porkbun_dns_records_get(domain = "my.site")
#'
#' # to write "portable" record keys to a TOML file `dns_records.toml` with a `records` table
#' # NOTE that the CLI tool `jsontoml` is required for this: https://github.com/pelletier/go-toml/
#' yay::porkbun_dns_records_get(domain = "my.site") |>
#'   # remove Porkbun-specific cols
#'   dplyr::select(any_of(yay:::cols_dns_records$key)) |>
#'   # convert to target list structure
#'   list(records = _) |>
#'   # convert to JSON
#'   jsonlite::toJSON(auto_unbox = TRUE,
#'                    pretty = TRUE) |>
#'   # convert JSON to TOML via external CLI
#'   system2(input = _,
#'           stdout = TRUE,
#'           command = "jsontoml") |>
#'   # remove leading newline
#'   _[-1L] |>
#'   # write TOML to file
#'   brio::write_lines(path = "dns_records.toml")}
porkbun_dns_records_get <- function(domain,
                                    api_key = pal::pkg_config_val("porkbun_api_key"),
                                    secret_api_key = pal::pkg_config_val("porkbun_secret_api_key"),
                                    max_tries = 3L) {
  assert_domain(domain)
  
  perform_porkbun_req(url = glue::glue("https://api.porkbun.com/api/json/v3/dns/retrieve/{domain}"),
                      api_key = api_key,
                      secret_api_key = secret_api_key,
                      max_tries = max_tries) |>
    purrr::chuck("records") |>
    purrr::map(\(x) {
      
      x$value <- switch(EXPR  = x$type,
                        CAA   = {
                          fields <- stringr::str_split_1(x$content, " ")
                          x$flag <- fields[1L]
                          x$tag <- fields[2L]
                          fields[3L]
                        },
                        HTTPS = {
                          fields <- stringr::str_split_1(x$content, " ")
                          x$prio <- fields[1L]
                          x$target <- fields[2L]
                          fields[3L]
                        },
                        SRV   = {
                          fields <- stringr::str_split_1(x$content, " ")
                          x$weight <- fields[1L]
                          x$port <- fields[2L]
                          fields[3L]
                        },
                        SVCB  = {
                          fields <- stringr::str_split_1(x$content, " ")
                          x$prio <- fields[1L]
                          x$target <- fields[2L]
                          fields[3L]
                        },
                        x$content)
      tibble::as_tibble_row(purrr::compact(x))
    }) |>
    purrr::list_rbind() |>
    dplyr::select(-any_of("content")) |>
    dplyr::rename_with(.cols = everything(),
                       .fn = \(x) dplyr::case_match(x,
                                                    "name" ~ "hostname",
                                                    "prio" ~ "priority",
                                                    .default = x)) |>
    # canonicalize key order
    dplyr::relocate(any_of(cols_dns_records_porkbun$key)) %>%
    dplyr::arrange(!!!rlang::syms(intersect(c("type", "hostname", "value"),
                                            colnames(.)))) |>
    # normalize cols
    as_dns_records(registrar = "porkbun")
}

#' Set Porkbun DNS records
#'
#' @description
#' Sets DNS records on Porkbun for the specified `domain` using the
#' [`/api/json/v3/dns/create/{domain}`](https://porkbun.com/api/json/v3/documentation#DNS%20Create%20Record) endpoint of Porkbun's API. DNS `records` must be
#' provided as a dataframe/tibble with the columns `r cols_dns_records_porkbun |> dplyr::filter(is_standard) %$% key |> pal::enum_str(wrap = "\x60")`. Further
#' columns are silently ignored.
#'
#' @details
#' Supported are the DNS record types `r dns_record_types$porkbun |> pal::wrap_chr("\x60") |> cli::ansi_collapse( last = " and ")`.
#'
#' @inheritParams porkbun_dns_records_get
#' @inheritParams netlify_dns_records_set
#' @param records DNS records. A dataframe/tibble with the columns
#'   `r cols_dns_records_porkbun |> dplyr::filter(is_standard) %$% key |> pal::enum_str(wrap = "\x60")`. The first three columns are mandatory, columns not
#'   listed here are silently ignored.
#' @param domain Domain name to set DNS records for. A character scalar.
#'
#' @return A character vector of created record identifiers, invisibly.
#' @family porkbun
#' @export
#'
#' @examples
#' \dontrun{
#' tibble::tribble(
#'   ~type,   ~hostname,          ~value,           ~ttl, ~priority, ~weight, ~port, ~flag, ~tag,
#'   "CAA",   "my.site",          "letsencrypt.org", 3600L, NA,      NA,      NA,    0L,    "issue",
#'   "CNAME", "autoconfig",       "mailbox.org",     3600L, NA,      NA,      NA,    NA,    NA,
#'   "MX",    "my.site",          "mxext1.mailbox.org", 3600L, NA,   NA,      NA,    NA,    NA,
#'   "SRV",   "_hkps._tcp.my.site", "pgp.mailbox.org",    3600L, 1L,   1L,      443L,  NA,    NA,
#'   "TXT",   "_mta-sts",           "v=STSv1; id=001",    3600L, NA,   NA,      NA,    NA,    NA) |>
#'   yay::porkbun_dns_records_set(domain = "my.site")
#'                                
#' # to use a TOML file that defines a `records` table as input:
#' pal::toml_read("dns_records.toml")$records |>
#'   purrr::map_dfr(tibble::as_tibble_row) |>
#'   yay::porkbun_dns_records_set(domain = "my.site")}
porkbun_dns_records_set <- function(records,
                                    domain,
                                    api_key = pal::pkg_config_val("porkbun_api_key"),
                                    secret_api_key = pal::pkg_config_val("porkbun_secret_api_key"),
                                    max_tries = 3L) {
  
  records %<>% as_dns_records(registrar = "porkbun")
  assert_domain(domain)
  
  pal::cli_progress_step_quick(msg = "Setting {.val {nrow(records)}} DNS record{?s} on Porkbun for domain {.field {domain}}")
  
  records |>
    purrr::pmap_int(.progress = TRUE,
                    .f = \(type,
                           hostname,
                           value,
                           ttl,
                           priority,
                           weight,
                           port,
                           flag,
                           tag,
                           target,
                           ...) {
                      
                      # handle root domain properly
                      hostname %<>% stringr::str_remove(pattern = paste0(stringr::str_escape(domain), "$"))
                      
                      # assemble type-specific content
                      content <- switch(EXPR  = type,
                                        CAA   = paste(flag, tag, value),
                                        HTTPS = paste(priority, target, value),
                                        SRV   = paste(weight, port, value),
                                        SVCB  = paste(priority, target, value),
                                        value)
                      
                      # priority must be either absent or set via `content` for most record types
                      if (!(type %in% c("MX", "SRV"))) {
                        priority <- NA_character_
                      }
                      
                      perform_porkbun_req(url = glue::glue("https://api.porkbun.com/api/json/v3/dns/create/{domain}"),
                                          data = purrr::discard(list(name = hostname,
                                                                     type = type,
                                                                     content = content,
                                                                     ttl = ttl,
                                                                     prio = priority),
                                                                is.na),
                                          api_key = api_key,
                                          secret_api_key = secret_api_key,
                                          max_tries = max_tries) |>
                        purrr::chuck("id")
                    }) |>
    invisible()
}

#' Delete Porkbun DNS records
#'
#' Deletes DNS records on Porkbun for the specified `domain` using the
#' [`/api/json/v3/dns/delete/{domain}/{dns_record_id}`](https://porkbun.com/api/json/v3/documentation#DNS%20Delete%20Record%20by%20Domain%20and%20ID) endpoint
#' of Porkbun's API. DNS `records` must be provided as either a character vector of DNS record identifiers or a dataframe/tibble with an `id` column. Further
#' columns are silently ignored.
#'
#' @inheritParams porkbun_dns_records_set
#' @param domain Domain name to delete DNS records for. A character scalar.
#'
#' @return A character vector of deleted record identifiers, invisibly.
#' @family porkbun
#' @export
#'
#' @examples
#' \dontrun{
#' yay::porkbun_dns_records_delete(domain = "my.site",
#'                                 records = "xyz123")
#' 
#' # The output of `netlify_dns_records_get()` can directly be fed. To delete all (!) non-NS records:
#' yay::porkbun_dns_records_get(domain = "my.site") |>
#'   dplyr::filter(type != "NS") |>
#'   yay::porkbun_dns_records_delete(domain = "my.site")}
porkbun_dns_records_delete <- function(records,
                                       domain,
                                       api_key = pal::pkg_config_val("porkbun_api_key"),
                                       secret_api_key = pal::pkg_config_val("porkbun_secret_api_key"),
                                       max_tries = 3L) {
  records %<>% as_dns_record_ids()
  
  pal::cli_progress_step_quick(msg = "Deleting {.val {length(records)}} DNS record{?s} on Netlify for domain {.field {domain}}")
  
  records |>
    purrr::walk(.progress = TRUE,
                .f = \(dns_record_id) {
                  
                  perform_porkbun_req(url = glue::glue("https://api.porkbun.com/api/json/v3/dns/delete/{domain}/{dns_record_id}"),
                                      api_key = api_key,
                                      secret_api_key = secret_api_key,
                                      max_tries = max_tries)
                })
  
  invisible(records)
}

#' List files and directories in GitHub repository
#'
#' Lists file and directory names found under
#' [`rev:path`](https://git-scm.com/docs/revisions#Documentation/revisions.txt-emltrevgtltpathgtemegemHEADREADMEememmasterREADMEem) in a GitHub repository via
#' [GitHub's GraphQL API](https://docs.github.com/en/graphql/overview/about-the-graphql-api).
#'
#' `r md_gh_pat`
#'
#' Note that an empty character vector is returned in case `path` is invalid or no file/directory exists underneath `path`.
#'
#' @param owner Repository owner's GitHub user or organization name. A character scalar.
#' @param name Repository name. A character scalar.
#' @param path Path to a directory, relative to the repository root. A character scalar.
#' @param rev [Git revision expression](https://git-scm.com/docs/revisions#_specifying_revisions) matching the desired Git tree object, e.g. a branch or tag
#'   name or another symbolic reference like `"HEAD@{yesterday}"` or `"HEAD~10"`. A character scalar.
#' @param recurse Whether or not to recurse into subdirectories of `path`.
#' @param incl_dirs Whether or not to list directories (and subdirectories if `recurse = TRUE`).
#' @param incl_files Whether or not to list files (also inside subdirectories if `recurse = TRUE`).
#'
#' @return A character vector of paths to the files and subdirectories found under `rev:path`, relative to the repository root.
#' @family gh
#' @export
#'
#' @examples
#' # you can opt-out from directory recursion
#' yay::gh_dir_ls(owner = "rpkg-dev",
#'                name = "pal",
#'                recurse = FALSE) |>
#'   pal::cat_lines()
#'
#' # you can list only files in a directory
#' yay::gh_dir_ls(owner = "rpkg-dev",
#'                name = "pal",
#'                path = "tests",
#'                incl_dirs = FALSE) |>
#'   pal::cat_lines()
#'
#' # or you can list only directories in a directory
#' yay::gh_dir_ls(owner = "rpkg-dev",
#'                name = "pal",
#'                path = "tests",
#'                incl_files = FALSE) |>
#'   pal::cat_lines()
gh_dir_ls <- function(owner,
                      name,
                      path = ".",
                      rev = "HEAD",
                      recurse = TRUE,
                      incl_dirs = TRUE,
                      incl_files = TRUE) {
  
  rlang::check_installed("gh",
                         reason = reason_pkg_required_gh)
  checkmate::assert_string(owner)
  checkmate::assert_string(name)
  checkmate::assert_string(path)
  checkmate::assert_string(rev)
  checkmate::assert_flag(recurse)
  checkmate::assert_flag(incl_dirs)
  checkmate::assert_flag(incl_files)
  
  path_norm <- normalize_tree_path(path)
  
  entries <-
    gh::gh_gql(query = 'query($name:String!, $owner:String!, $expression:String!) {
                          repository(name: $name, owner: $owner) {
                            object(expression: $expression) {
                              ... on Tree {
                                entries {
                                  path
                                  type
                                }
                              }
                            }
                          }
                        }',
               variables = list(name = name,
                                owner = owner,
                                expression = glue::glue("{rev}:{path_norm}"))) |>
    purrr::pluck("data", "repository", "object", "entries") %||%
    list()
  
  dirs <-
    entries |>
    purrr::keep(\(x) x$type == "tree") |>
    purrr::map_depth(.depth = 1L,
                     .f = \(x) purrr::pluck(x, "path")) |>
    purrr::list_c(ptype = character())
  
  result <-
    entries |>
    purrr::keep(\(x) x$type %in% c("blob"[incl_files], "tree"[incl_dirs])) |>
    purrr::map_depth(.depth = 1L,
                     .f = \(x) purrr::pluck(x, "path")) |>
    purrr::list_c(ptype = character())
  
  if (recurse && length(dirs) > 0L) {
    
    result <-
      dirs |>
      purrr::map(\(x) gh_dir_ls(owner = owner,
                                name = name,
                                path = x,
                                rev = rev,
                                recurse = TRUE,
                                incl_dirs = incl_dirs,
                                incl_files = incl_files)) |>
      purrr::list_c(ptype = character()) |>
      c(result)
  }
  
  sort(result)
}

#' Read in text file from GitHub repository
#'
#' Downloads the text file under the specified path from a GitHub repository via [GitHub's GraphQL
#' API](https://docs.github.com/en/graphql/overview/about-the-graphql-api) and returns its content as a string.
#'
#' @details
#' `r md_gh_pat`
#'
#' Note that nothing is returned in case of a [_binary_ file](https://en.wikipedia.org/wiki/Binary_file), as if no file at all existed under the given
#' `path`.
#'
#' @inheritParams gh_dir_ls
#' @param path Path to a file, relative to the repository root. A character scalar.
#'
#' @return A character scalar, or an empty character vector in case no text file is found under
#'   [`rev:path`](https://git-scm.com/docs/revisions#Documentation/revisions.txt-emltrevgtltpathgtemegemHEADREADMEememmasterREADMEem).
#' @family gh
#' @export
#'
#' @examples
#' yay::gh_text_file(owner = "rpkg-dev",
#'                   name = "pal",
#'                   path = "pal.Rproj",
#'                   rev = "HEAD~2") |>
#'   cat()
gh_text_file <- function(owner,
                         name,
                         path,
                         rev = "HEAD") {
  
  rlang::check_installed("gh",
                         reason = reason_pkg_required_gh)
  checkmate::assert_string(owner)
  checkmate::assert_string(name)
  checkmate::assert_string(path)
  checkmate::assert_string(rev)
  
  path %<>% normalize_tree_path()
  
  result <- gh::gh_gql(query = 'query ($name: String!, $owner: String!, $expression: String!) {
                        repository(name: $name, owner: $owner) {
                          object(expression: $expression) {
                            ... on Blob {
                              text
                            }
                          }
                        }
                      }',
                      variables = list(name = name,
                                       owner = owner,
                                       expression = glue::glue("{rev}:{path}"))) |>
    purrr::pluck("data", "repository", "object", "text")
  
  if (is.null(result)) {
    result <- character()
  }
  
  result
}

#' Read in text files from GitHub repository
#'
#' @description
#' Downloads all text files under the specified path from a GitHub repository via [GitHub's GraphQL
#' API](https://docs.github.com/en/graphql/overview/about-the-graphql-api) and returns a named character vector with the file paths as names and the file
#' contents as values.
#'
#' This is a simple convenience function combining [gh_dir_ls()] and [gh_text_file()]. 
#'
#' @inherit gh_text_file details
#'
#' @inheritParams gh_dir_ls
#' @param recurse Whether or not to also include text files in subfolders of `path`. Enabling this option may result in many API calls and thus produce a
#'   significant delay.
#'
#' @return A named character vector of length equal to the number of files found under `rev:path` with the file paths as names and the file contents as values.
#' @family gh
#' @export
#'
#' @examples
#' yay::gh_text_files(owner = "rpkg-dev",
#'                    name = "pal",
#'                    path = "tests") |>
#'   str()
#' 
#' # you have to opt-in into directory recursion
#' yay::gh_text_files(owner = "rpkg-dev",
#'                    name = "pal",
#'                    path = "tests",
#'                    recurse = TRUE) |>
#'   str()
gh_text_files <- function(owner,
                          name,
                          path = ".",
                          rev = "HEAD",
                          recurse = FALSE) {
  
  gh_dir_ls(owner = owner,
            name = name,
            path = path,
            rev = rev,
            recurse = recurse,
            incl_dirs = FALSE) %>%
    magrittr::set_names(x = .,
                        value = .) |>
    purrr::map(\(x) gh_text_file(owner = owner,
                                 name = name,
                                 path = x,
                                 rev = rev)) |>
    purrr::compact() |>
    unlist()
}

#' List releases from GitHub repository
#'
#' Uses [gh::gh()] to fetch all available [releases](https://docs.github.com/repositories/releasing-projects-on-github) of the specified GitHub repository
#' via [GitHub's RESTful API](https://docs.github.com/en/rest/releases/releases#list-releases) and returns them as a [tibble][tibble::tbl_df] containing the
#' columns `r pal::enum_str(cols_gh_releases, wrap = "\x60")`.
#'
#' @param owner GitHub repository owner (GitHub user or organisation). A character scalar.
#' @param name Repository name. A character scalar.
#' @param n Number of releases to return. A positive integer or `Inf`. If `Inf`, all releases are returned, otherwise only the most recent `n` releases. Note
#'   that requesting many releases may result in multiple GitHub API calls, which can take a considerable amount of time.
#' @param incl_pre_releases Whether or not to include pre-releases in the result. Pre-releases are usually not deemed ready for production and may be unstable.
#'   For more information, see the [GitHub Docs](https://docs.github.com/en/repositories/releasing-projects-on-github/managing-releases-in-a-repository).
#' @param incl_drafts Whether or not to include draft releases in the result. For more information, see the [GitHub
#'   Docs](https://docs.github.com/en/repositories/releasing-projects-on-github/managing-releases-in-a-repository).
#'
#' @return `r pkgsnip::param_lbl(id = "tibble_cols", cols = cols_gh_releases)`
#' @family gh
#' @export
#'
#' @examples
#' yay::gh_releases(owner = "jgm",
#'                  name = "pandoc",
#'                  n = 3L)
gh_releases <- function(owner,
                        name,
                        n = Inf,
                        incl_pre_releases = FALSE,
                        incl_drafts = FALSE) {
  
  checkmate::assert_string(owner)
  checkmate::assert_string(name)
  pal::assert_inf_count(n)
  checkmate::assert_flag(incl_pre_releases)
  checkmate::assert_flag(incl_drafts)
  rlang::check_installed("gh",
                         reason = pal::reason_pkg_required())
  
  gh::gh(endpoint = "/repos/{owner}/{name}/releases", # nolint
         owner = owner,
         name = name,
         .method = "GET",
         .limit = n) |>
    purrr::keep(.p = \(x) if (incl_pre_releases) TRUE else isFALSE(x$prerelease)) |>
    purrr::keep(.p = \(x) if (incl_drafts) TRUE else isFALSE(x$draft)) |>
    purrr::map(gh_release_as_tibble) |>
    purrr::list_rbind() |>
    dplyr::arrange(dplyr::desc(version_nr),
                   is_pre_release,
                   is_draft)
}

#' Get latest release from GitHub repository
#'
#' Uses [gh::gh()] to fetch the latest [GitHub release](https://docs.github.com/repositories/releasing-projects-on-github) of the specified GitHub repository
#' via [GitHub's RESTful API](https://docs.github.com/en/rest/releases/releases#get-the-latest-release) and returns it as a [numeric
#' version][numeric_version()].
#'
#' @inheritParams gh_releases
#'
#' @return `r pkgsnip::param_lbl("num_vrsn")`
#' @family gh
#' @export
#'
#' @examples
#' yay::gh_release_latest(owner = "jgm",
#'                        name = "pandoc")
gh_release_latest <- function(owner,
                              name) {
  
  checkmate::assert_string(owner)
  checkmate::assert_string(name)
  rlang::check_installed("gh",
                         reason = pal::reason_pkg_required())
  
  gh::gh(endpoint = "/repos/{owner}/{name}/releases/latest", # nolint
         owner = owner,
         name = name,
         .method = "GET") |>
    gh_release_as_tibble()
}

#' Replace matched patterns in strings _verbosely_
#'
#' Applies a series of regular-expression-replacement pairs to one or more strings. All performed replacements are displayed on the console by default
#' (`verbose = TRUE`).
#'
#' This function provides a subset of [stringr::str_replace_all()]'s functionality. If you don't need the visual console output, it's recommended to directly
#' resort to the latter.
#'
#' @param pattern A named character vector with patterns as names and replacements as values (`c(pattern1 = replacement1)`). Patterns are interpreted as
#'   regular expressions as described in [stringi::stringi-search-regex()]. Replacements are interpreted as-is, except that references of the form `\1`, `\2`,
#'   etc. will be replaced with the contents of the respective matched group (created in patterns using `()`). Pattern-replacement pairs are processed in the
#'   order given, meaning that first listed pairs are applied before later listed ones.
#' @param verbose Whether or not to display replacements on the console.
#' @param n_context_chrs The (maximum) number of characters displayed around the actual `string` and its replacement. The number refers to a single side of
#'   `string`/replacement, so the total number of context characters is at the maximum `2 * n_context_chrs`. Only relevant if `verbose = TRUE`.
#' @inheritParams stringr::str_replace_all
#'
#' @return A character vector.
#' @family string
#' @export
#'
#' @examples
#' "Make love, not war" |>
#'   yay::str_replace_verbose(pattern = c("love" = "hummus",
#'                                        "war" = "walls"))
#'
#' # pattern-replacement pairs are processed one-by-one, so the following gives the same result
#' "Make love, not war" |>
#'   yay::str_replace_verbose(pattern = c("love" = "hummus",
#'                                        "hummus, not war" = "hummus, not walls"))
#'
#' # varying `n_context_chrs` affects console output summarization
#' input <- c("Tulips are not durable, ",
#'            "not scarce, ",
#'            "not programmable, ",
#'            "not fungible, ",
#'            "not verifiable, ",
#'            "not divisible, ",
#'            "and hard to transfer. ",
#'            "But tell me more about your analogy...",
#'            "",
#'            "-[Naval Ravikant](https://twitter.com/naval/status/939316447318122496)")
#'
#' pattern <- c("not" = "extremely",
#'              "hard" = "ridiculously easy",
#'              "^But.*" = "So... flower power?",
#'              "(^-).*Naval.*" = "\\1\U0001F92A")
#'
#' yay::str_replace_verbose(string = input,
#'                          pattern = pattern,
#'                          n_context_chrs = 5L) |>
#'   pal::cat_lines()
#'
#' yay::str_replace_verbose(string = input,
#'                          pattern = pattern,
#'                          n_context_chrs = 0L) |>
#'   pal::cat_lines()
str_replace_verbose <- function(string,
                                pattern,
                                verbose = TRUE,
                                n_context_chrs = 20L) {
  
  checkmate::assert_flag(verbose)
  checkmate::assert_character(pattern, any.missing = FALSE)
  if (!checkmate::test_named(pattern)) rlang::abort("All elements of `pattern` must be named (names are patterns, values are replacements).")
  n_context_chrs <- checkmate::assert_count(n_context_chrs, coerce = TRUE)
  
  # print replacement info for humans
  if (verbose) {
    
    # we have to process each pattern-replacement pair one-by-one because other than `stringr::str_replace_all()`, `stringr::str_locate_all()` doesn't support
    # the pair-wise spec
    string_changed <- string
    msgs <- tibble::tibble(minus = character(),
                           plus = character(),
                           i_pattern = integer())
    
    for (i in seq_along(pattern)) {
      
      msgs <-
        str_replace_verbose_single_info(string = string_changed,
                                        pattern = pattern[i],
                                        n_context_chrs = n_context_chrs) |>
        dplyr::mutate(i_pattern = i) %>%
        dplyr::bind_rows(msgs, .)
      
      string_changed %<>% stringr::str_replace_all(pattern = pattern[i])
    }
    
    msgs |>
      # we need to reframe instead of summarize since there can be 0 rows in the result
      dplyr::reframe(i_pattern = pal::safe_min(i_pattern),
                     n = dplyr::n(),
                     .by = c(minus, plus)) |>
      # since reframing can change row order, we need to restore the original order
      dplyr::arrange(i_pattern) |>
      purrr::pwalk(\(minus, plus, i_pattern, n) {
        
        # using string interpolation ensures `{` and `}` are escaped, cf. ?cli::`inline-markup`
        cat(n, "\u00D7 ", minus, "\n",
            sep = "")
        cat(rep("\u00A0", times = nchar(n) + 2L), plus, "\n",
            sep = "")
      })
    
    result <- string_changed
    
  } else {
    result <- stringr::str_replace_all(string = string,
                                       pattern = pattern)
  }
  
  # return result
  result
}

# helper to generate info for single replacement 
str_replace_verbose_single_info <- function(string,
                                            pattern,
                                            n_context_chrs) {
  
  # define dark background colors that are easy on the eyes based on ANSI escape sequences
  # (shouldn't be outsourced to package-constants since the number of supported terminal colors couldn't be auto-detected that way)
  bg_red_dark <- cli::make_ansi_style("#330000",
                                      bg = TRUE)
  
  bg_green_dark <- cli::make_ansi_style("#003300",
                                        bg = TRUE)
  
  # escape newlines (in case replacement contains newlines)
  replacement <- pal::escape_lf(as.character(pattern))
  
  stringr::str_locate_all(string = string,
                          pattern = names(pattern)) |>
    purrr::map2_dfr(.y = string,
                    .f = \(positions, string) {
                      
                      positions %<>% dplyr::as_tibble() %>% dplyr::filter(start <= end)
                      
                      purrr::map2_dfr(.x = positions$start,
                                      .y = positions$end,
                                      .f = \(start, end) {
                                        
                                        # reduce to `string` excerpt of +/- `n_context_chrs`
                                        ## determine if we prune
                                        prune_start <- (start - n_context_chrs) > 1L 
                                        prune_end <- (end + n_context_chrs) < nchar(string)
                                        
                                        ## extract excerpt
                                        ### begin (part before `pattern`)
                                        excerpt_begin <- stringr::str_sub(string = string,
                                                                          start = ifelse(prune_start,
                                                                                         start - n_context_chrs,
                                                                                         1L),
                                                                          end = start - 1L)
                                        ### the `pattern` as-is, i.e. without regex syntax
                                        pattern_asis <- stringr::str_sub(string = string,
                                                                         start = start,
                                                                         end = end)
                                        ### end (part after `pattern`)
                                        excerpt_end <- stringr::str_sub(string = string,
                                                                        start = end + 1L,
                                                                        end = ifelse(prune_end,
                                                                                     end + n_context_chrs,
                                                                                     -1L))
                                        
                                        # replace excerpt start/end with ellipsis dots (pruned to whole words if appropriate)
                                        if (prune_start) excerpt_begin %<>% paste0(unicode_ellipsis, .)
                                        
                                        if (prune_end) excerpt_end %<>% paste0(unicode_ellipsis)
                                        
                                        # escape newlines (in case pattern contains newlines)
                                        excerpt_begin %<>% pal::escape_lf()
                                        pattern_asis %<>% pal::escape_lf()
                                        excerpt_end %<>% pal::escape_lf()
                                        
                                        # assemble info msgs
                                        tibble::tibble(minus = pal::as_str(cli::col_red("-"), " ", cli::bg_black(excerpt_begin),
                                                                           cli::style_strikethrough(bg_red_dark(pattern_asis)), cli::bg_black(excerpt_end)),
                                                       plus = pal::as_str(cli::col_green("+"), " ", cli::bg_black(excerpt_begin), bg_green_dark(replacement),
                                                                          cli::bg_black(excerpt_end)))
                                      })
                    })
}

#' Replace matched patterns in text files
#'
#' Applies pattern-based string replacements to one or more files. Expects a series of regular-expression-replacement pairs that are applied one-by-one in the
#' given order. By default, all performed replacements are displayed on the console (`verbose = TRUE`) without actually changing any file content
#' (`run_dry = TRUE`).
#'
#' Note that `process_line_by_line` requires the [line ending standard (EOL)](https://en.wikipedia.org/wiki/Newline) of the input files to be correctly set via
#' `eol`. It _always_ defaults to `"LF"` (Unix standard) since this is something which cannot be reliably detected without complex heuristics (and even then
#' not unambiguously in all edge cases). Simply deriving a default depending on the host OS (i.a. `"LF"` on Unix systems like Linux and macOS and `"CRLF"` on
#' Windows) seems like a really bad idea with regard to cross-system collaboration (files shared via Git etc.), thus it was refrained from.
#'
#' The text files are assumed to be in [UTF-8 character encoding](https://en.wikipedia.org/wiki/UTF-8), other encodings are not supported.
#'
#' @param path Paths to the text files. A character vector.
#' @param process_line_by_line Whether each line in a file should be treated as a separate string or the whole file as one single string. While the latter is 
#'   more performant, you probably want the former if you're using `"^"` or `"$"` in your `pattern`s.
#' @param eol `r pkgsnip::param_lbl("eol") |> stringr::str_replace(stringr::fixed("."), ". Only relevant if \x60process_line_by_line = TRUE\x60.")`
#' @param show_rel_path Whether or not to display file `path`s as relative from the current working directory. If `FALSE`, absolute paths are displayed. Only
#'   relevant if `verbose = TRUE`.
#' @param run_dry Whether or not to show replacements on the console only, without actually modifying any files. Implies `verbose = TRUE`.
#' @inheritParams str_replace_verbose
#'
#' @return `path` invisibly.
#' @family string
#' @export
str_replace_file <- function(path,
                             pattern,
                             process_line_by_line = FALSE,
                             eol = c("LF", "CRLF", "CR", "LFCR"),
                             verbose = TRUE,
                             n_context_chrs = 20L,
                             show_rel_path = TRUE,
                             run_dry = TRUE) {
  
  rlang::check_installed("brio",
                         reason = pal::reason_pkg_required())
  checkmate::assert_file_exists(path,
                                access = "r")
  checkmate::assert_flag(process_line_by_line)
  eol %<>% pal::as_line_feed_chr()
  checkmate::assert_flag(verbose)
  checkmate::assert_flag(show_rel_path)
  checkmate::assert_flag(run_dry)
  
  if (run_dry && !verbose) {
    rlang::abort("Setting `run_dry = TRUE` and `verbose = FALSE` at the same time is pointless.")
    
  } else if (run_dry) {
    cli::cli_alert_info("Running in dry mode. Set {.code run_dry = FALSE} to actually modify any files.")
  }
  
  purrr::walk(.x = path,
              .f = \(path) {
                
                # print file progress info
                if (verbose) {
                  
                  path_show <- ifelse(show_rel_path,
                                      fs::path_rel(path),
                                      fs::path_abs(path))
                  
                  pal::cli_progress_step_quick("Processing file {.file {path_show}}{unicode_ellipsis}")
                }
                
                # perform replacement
                input <- brio::read_file(path = path)
                
                if (process_line_by_line) {
                  input %<>% stringr::str_split_1(pattern = eol)
                }
                
                output <- str_replace_verbose(string = input,
                                              pattern = pattern,
                                              verbose = verbose,
                                              n_context_chrs = n_context_chrs)
                
                if (!run_dry && !identical(input, output)) {
                  
                  brio::write_file(text = paste0(output,
                                                 collapse = eol),
                                   path = path)
                }
                
                if (verbose) cli::cli_progress_done()
              })
  
  invisible(path)
}

#' Apply regular-expression-based text normalization to strings
#'
#' Applies a set of regular-expression-based text normalization rules to one or more strings. All performed replacements are displayed on the console by default
#' (`verbose = TRUE`).
#'
#' @param rules A data frame of regular expression `pattern`s and `replacement`s. `pattern` can optionally be a list column condensing multiple patterns to the
#'   same replacement rule. Patterns are interpreted as regular expressions as described in [stringi::stringi-search-regex()]. Replacements are interpreted
#'   as-is, except that references of the form `\1`, `\2`, etc. will be replaced with the contents of the respective matched group (created in patterns using
#'   `()`). Pattern-replacement pairs are processed in the order given, meaning that first listed pairs are applied before later listed ones.
#' @inheritParams str_replace_verbose
#'
#' @inherit str_replace_file return
#' @family string
#' @seealso Regular expression rules: [`regex_text_normalization`] [`regex_file_normalization`]
#' @export
#'
#' @examples
#' "This kind of “text normalization” is e.g. useful to apply before feeding stuff to ‘Pandoc’" |>
#'   yay::str_normalize()
str_normalize <- function(string,
                          rules = yay::regex_text_normalization,
                          n_context_chrs = 20L,
                          verbose = TRUE) {
  rules |>
    tidyr::unnest_longer(col = pattern) %$%
    magrittr::set_names(x = replacement,
                        value = pattern) |>
    str_replace_verbose(string = string,
                        n_context_chrs = n_context_chrs,
                        verbose = verbose)
}

#' Apply regular-expression-based text normalization to files
#'
#' Applies a set of regular-expression-based text normalization rules to one or more files. By default, changes are shown on the console only, without actually
#' modifying any files. Set `run_dry = FALSE` to apply the changes.
#'
#' @inheritParams str_normalize
#' @inheritParams str_replace_file
#'
#' @inherit str_replace_file return
#' @family string
#' @seealso Regular expression rules: [`regex_text_normalization`] [`regex_file_normalization`]
#' @export
#'
#' @examples
#' # Use POSIX-related file normalization rule(s) included in this package
#' temp_file <- tempfile()
#' download.file(url = paste0("https://raw.githubusercontent.com/RcppCore/Rcpp/72f0652b93f196d",
#'                            "64faab6b108cd02a197510a7b/inst/include/Rcpp/utils/tinyformat.h"),
#'               destfile = temp_file,
#'               quiet = TRUE,
#'               mode = "wb")
#'
#' yay::regex_file_normalization |>
#'   dplyr::filter(category == "posix") |>
#'   yay::str_normalize_file(path = temp_file)
str_normalize_file <- function(path,
                               rules = yay::regex_text_normalization,
                               run_dry = TRUE,
                               process_line_by_line = FALSE,
                               n_context_chrs = 20L,
                               verbose = TRUE) {
  rules |>
    tidyr::unnest_longer(col = pattern) %$%
    magrittr::set_names(x = replacement,
                        value = pattern) |>
    str_replace_file(path = path,
                     n_context_chrs = n_context_chrs,
                     process_line_by_line = process_line_by_line,
                     verbose = verbose,
                     run_dry = run_dry)
}

#' Write Zotero library to bibliography file
#'
#' Writes a file of the chosen bibliography `format` to `path` with items belonging to a Zotero user or group library, optionally limited to a specific
#' collection. Also writes a text file with suffix `.version` alongside `path` which stores the [Zotero library
#' version number](https://www.zotero.org/support/dev/web_api/v3/syncing) and allows for efficient caching.
#' 
#' If the Zotero library's content hasn't changed since the last export, nothing will be written to `path` and the performed API request will be significantly
#' faster. See section *Caching* in [zoterro::zotero_api()] for further details.
#'
#' @inheritParams zoterro::write_bib
#' @param lib_id Zotero user or group library identifier. An object as returned by [zoterro::zotero_user_id()] or [zoterro::zotero_group_id()].
#' @param path Path to write the bibliography file to. A character scalar.
#' @param force Whether or not to enforce overwriting the bibliography file regardless of actual changes to the Zotero library since the last export.
#' @param show_progress `r pkgsnip::param_lbl("show_progress")`
#'
#' @return `path`, invisibly.
#' @family zotero
#' @export
zotero_write_bib <- function(lib_id,
                             path,
                             collection_key = NULL,
                             incl_children = FALSE,
                             format = "csljson",
                             force = FALSE,
                             show_progress = TRUE) {

  checkmate::assert_flag(incl_children)
  checkmate::assert_flag(force)
  checkmate::assert_flag(show_progress)
  reason_pkg_required_fn <- pal::reason_pkg_required()
  rlang::check_installed("brio",
                         reason = reason_pkg_required_fn)
  rlang::check_installed("zoterro",
                         reason = reason_pkg_required_fn)
  
  if (show_progress) {
    pal::cli_progress_step_quick(msg = paste0("Updating bibliography file with ",
                                              ifelse(incl_children,
                                                     "all",
                                                     "top-level"),
                                              " items from the Zotero ",
                                              ifelse(is.null(collection_key),
                                                     "",
                                                     "collection {.val {collection_key}} in the "),
                                              "library with ID {.val {lib_id}}"))
  }

  bib_version_last <- NULL
  path_bib_version <- fs::path_ext_set(path,
                                       ext = "version")

  if (!force && fs::file_exists(path_bib_version)) {
    bib_version_last <-
      path_bib_version |>
      brio::read_file() |>
      stringr::str_trim() |>
      as.integer()
  }

  bib_version_current <-
    zoterro::write_bib(collection_key = collection_key,
                       incl_children = incl_children,
                       path = path,
                       format = format,
                       modified_since = bib_version_last,
                       user = lib_id) |>
    attr(which = "version")

  if (force || length(bib_version_current)) {
    brio::write_lines(text = bib_version_current,
                      path = path_bib_version)
  }

  invisible(path)
}
salim-b/yay documentation built on Jan. 3, 2025, 6:16 p.m.