Nothing
# == title
# Package dependency analysis
#
# == param
# -package Package name. The value can be 1. a CRAN/Bioconductor package, 2. an installed package, 3. a path of a local package, 4. URL of a GitHub repository.
# -verbose Whether to show messages.
# -online If the value is ``TRUE``, it will directly use the newest package database file from CRAN/Bioconductor. Note the default Bioconductor
# version is the one corresponding to the current R version. If you want to use a higher bioc version, you need to set the global
# option ``pkgndep_opt$bioc_version``. If the value of ``online`` is ``FALSE``, a snapshot of the CRAN/Bioconductor package database
# will be used. The version of the package database snapshot can be via the global option `pkgndep_opt`$heaviness_db_version.
# -load If the value is ``TRUE``, the ``package`` is loaded into a fresh new R session and the function will check which upstream packages are also loaded
# into the R session. Note it is possible that an "Imports" package is not loaded or a "Suggests" package is loaded in the R session when loading ``package``.
# -parse_namespace Whether to also parse the NAMESPACE file. It is only used internally.
#
# == value
# A ``pkgndep`` object.
#
# == example
# \dontrun{
# x = pkgndep("ComplexHeatmap")
# }
# # The `x` variable generated by `pkgndep()` is already saved in this package.
# x = readRDS(system.file("extdata", "ComplexHeatmap_dep.rds", package = "pkgndep"))
# x
# dependency_heatmap(x)
#
pkgndep = function(package, verbose = TRUE, online = TRUE, load = FALSE, parse_namespace = TRUE) {
# by default, it tests with the newest CRAN/Bioconductor
# the default Bioconductor is the one corresponding to current R version, but higher bioc version or devel version
# can be set via pkgndep_opt$bioc_version.
## check whether package is an CRAN/Bioc package, an installed package, a path or a github link
load_pkg_db(verbose = verbose, online = online)
if(online) {
pkg_db = ENV$pkg_db
} else {
pkg_db = ENV$pkg_db_snapshot
}
if(is.null(pkg_db$dep_ind_hash[[package]])) {
if(pkg_installed(package)) {
package_name = package
} else if(file.exists(package)) {
package_name = basename(package)
if(load) message_wrap("`package` is specified as a path, reset `load` to `FALSE`.")
load = FALSE
} else if(grepl("^https://github.com/", package)) {
package_name = basename(package)
if(load) message_wrap("`package` is a GitHub URL, reset `load` to `FALSE`.")
load = FALSE
} else {
stop_wrap("`package` should be 1. a CRAN/Bioconductor package, 2. an installed package,\n3. a path of a local package, 4. URL of a GitHub repository.")
}
res = get_package_info_by_path(package)
version = res$version
repository = res$repository
depends = res$depends
imports = res$imports
linkingto = res$linkingto
suggests = res$suggests
enhances = res$enhances
imports = setdiff(imports, depends)
linkingto = setdiff(linkingto, c(imports, depends))
suggests = setdiff(suggests, c(depends, imports, linkingto))
enhances = setdiff(enhances, c(depends, imports, linkingto, suggests))
} else {
version = pkg_db$get_meta(package)[1, "Version"]
repository = pkg_db$get_meta(package)[1, "Repository"]
depends = pkg_db$package_dependencies(package, which = "Depends", simplify = TRUE)
imports = pkg_db$package_dependencies(package, which = "Imports", simplify = TRUE); imports = setdiff(imports, depends)
linkingto = pkg_db$package_dependencies(package, which = "LinkingTo", simplify = TRUE); linkingto = setdiff(linkingto, c(imports, depends))
suggests = pkg_db$package_dependencies(package, which = "Suggests", simplify = TRUE); suggests = setdiff(suggests, c(depends, imports, linkingto))
enhances = pkg_db$package_dependencies(package, which = "Enhances", simplify = TRUE); enhances = setdiff(enhances, c(depends, imports, linkingto, suggests))
package_name = package
}
dep_fields = c(rep("Depends", length(depends)),
rep("Imports", length(imports)),
rep("LinkingTo", length(linkingto)),
rep("Suggests", length(suggests)),
rep("Enhances", length(enhances)))
all_pkgs = c(depends, imports, linkingto, suggests, enhances)
if(pkg_exists(package_name) && load) {
qqcat("loading @{package_name} into a new R session to test number of namespaces loaded...")
if(!requireNamespace("callr", quietly = TRUE)) {
stop_wrap("You need to install 'callr' package.")
}
tb = callr::r(load_pkg_freshly, args = list(pkg = package_name), user_profile = FALSE)
cat(" done.\n")
if(is.null(tb)) {
tb = data.frame(pkg = character(0))
}
} else {
tb = data.frame(pkg = character(0))
}
if(length(all_pkgs) == 0) {
obj = list(
package = package_name,
version = version,
repository = repository,
dep_mat = matrix(nrow = 0, ncol = 0),
dep_fields = character(0),
which_required = logical(0),
which_required_but_not_loaded = logical(0),
which_suggested_but_also_loaded = logical(0),
n_by_strong = 0,
n_by_all = 0,
heaviness = numeric(0),
df_imports = matrix(nrow = 0, ncol = 3, dimnames = list(character(0), c("imports", "importMethods", "importClasses"))),
pkg_from_session_info = tb$pkg,
gini_index = 0
)
class(obj) = "pkgndep"
return(obj)
}
all_pkgs_dep = lapply(all_pkgs, function(p) {
pkg_db$package_dependencies(p, which = "strong", recursive = TRUE, simplify = TRUE)
})
all_pkgs2 = unique(unlist(all_pkgs_dep))
rn = all_pkgs
cn = all_pkgs2
dep_mat = matrix(0, nrow = length(all_pkgs), ncol = length(all_pkgs2), dimnames = list(rn, cn))
for(i in seq_along(all_pkgs_dep)) {
if(length(all_pkgs_dep[[i]])) {
dep_mat[i, all_pkgs_dep[[i]]] = 1
}
}
df_imports = matrix(0, nrow = nrow(dep_mat), ncol = 3)
colnames(df_imports) = c("imports", "importMethods", "importClasses")
rownames(df_imports) = rn
if(parse_namespace) {
lt_imports2 = lt_imports = parse_imports_from_namespace(package, package_name, online = online)
if(!is.null(lt_imports)) {
if(length(lt_imports$n_imports)) {
lt_imports$n_imports = lt_imports$n_imports[intersect(names(lt_imports$n_imports), rn)]
df_imports[names(lt_imports$n_imports), "imports"] = lt_imports$n_imports
}
if(length(lt_imports$n_import_methods)) {
lt_imports$n_import_methods = lt_imports$n_import_methods[intersect(names(lt_imports$n_import_methods), rn)]
df_imports[names(lt_imports$n_import_methods), "importMethods"] = lt_imports$n_import_methods
}
if(length(lt_imports$n_import_classes)) {
lt_imports$n_import_classes = lt_imports$n_import_classes[intersect(names(lt_imports$n_import_classes), rn)]
df_imports[names(lt_imports$n_import_classes), "importClasses"] = lt_imports$n_import_classes
}
}
not_used = setdiff(rownames(df_imports), c(unlist(lapply(lt_imports, names)), rn[!is_field_required(dep_fields)]))
if(length(not_used)) {
df_imports[not_used, 1] = -Inf
}
} else {
df_imports[, 1] = NA
lt_imports2 = lt_imports = NULL
}
obj = list(
package = package_name,
version = version,
repository = repository,
dep_mat = dep_mat,
dep_fields = dep_fields,
which_required = is_field_required(dep_fields),
which_required_but_not_loaded = is.infinite(df_imports[, 1]) & !(rownames(df_imports) %in% tb$pkg),
which_suggested_but_also_loaded = rn %in% tb$pkg & !is_field_required(dep_fields),
n_by_strong = 0,
n_by_all = 0,
heaviness = 0,
df_imports = df_imports,
lt_imports = lt_imports2,
pkg_from_session_info = tb$pkg
)
class(obj) = "pkgndep"
obj$n_by_strong = length(required_dependency_packages(obj, FALSE))
obj$n_by_all = length(required_dependency_packages(obj, TRUE))
obj$heaviness = heaviness(obj)
row_order = order(factor(dep_fields, levels = c("Depends", "Imports", "LinkingTo", "Suggests", "Enhances")), rowSums(dep_mat))
# a rude way to move all packages which are attached by imported packages before those by suggested packages
column_order_by = colSums(dep_mat)
l2 = colSums(dep_mat[is_field_required(dep_fields), ,drop = FALSE]) > 0
column_order_by[l2] = column_order_by[l2] + 10000
column_order = order(column_order_by, -apply(dep_mat[row_order, , drop = FALSE], 2, function(x) which(x > 0)[1]), decreasing = TRUE)
obj$dep_mat = obj$dep_mat[row_order, column_order, drop = FALSE]
obj$which_required = obj$which_required[row_order]
obj$which_required_but_not_loaded = obj$which_required_but_not_loaded[row_order]
obj$which_suggested_but_also_loaded = obj$which_suggested_but_also_loaded[row_order]
obj$heaviness = obj$heaviness[row_order]
obj$df_imports = obj$df_imports[row_order, , drop = FALSE]
obj$gini_index = gini_index(obj$heaviness[obj$which_required])
if(!online) {
obj$pkgndep_db_version = ENV$pkg_db_snapshot_version
}
return(obj)
}
# internally used, it is approximately 1.5 times faster than the full version of the function
pkgndep_simplified = function(package, pkg_db) {
depends = pkg_db$package_dependencies(package, which = "Depends", simplify = TRUE)
imports = pkg_db$package_dependencies(package, which = "Imports", simplify = TRUE); imports = setdiff(imports, depends)
linkingto = pkg_db$package_dependencies(package, which = "LinkingTo", simplify = TRUE); linkingto = setdiff(linkingto, c(imports, depends))
suggests = pkg_db$package_dependencies(package, which = "Suggests", simplify = TRUE); suggests = setdiff(suggests, c(depends, imports, linkingto))
enhances = pkg_db$package_dependencies(package, which = "Enhances", simplify = TRUE); enhances = setdiff(enhances, c(depends, imports, linkingto, suggests))
dep_fields = c(rep("Depends", length(depends)),
rep("Imports", length(imports)),
rep("LinkingTo", length(linkingto)),
rep("Suggests", length(suggests)),
rep("Enhances", length(enhances)))
all_pkgs = c(depends, imports, linkingto, suggests, enhances)
if(length(all_pkgs) == 0) {
obj = list(
package = package,
version = "0.0.0",
dep_mat = matrix(nrow = 0, ncol = 0),
dep_fields = character(0),
which_required = logical(0),
n_by_strong = 0,
n_by_all = 0,
heaviness = numeric(0),
which_required = logical(0),
which_required_but_not_loaded = logical(0),
which_suggested_but_also_loaded = logical(0),
df_imports = matrix(nrow = 0, ncol = 3, dimnames = list(character(0), c("imports", "importMethods", "importClasses")))
)
return(obj)
}
all_pkgs_dep = lapply(all_pkgs, function(p) {
pkg_db$package_dependencies(p, which = "strong", recursive = TRUE, simplify = TRUE)
})
all_pkgs2 = unique(unlist(all_pkgs_dep))
rn = all_pkgs
cn = all_pkgs2
dep_mat = matrix(0, nrow = length(all_pkgs), ncol = length(all_pkgs2), dimnames = list(rn, cn))
for(i in seq_along(all_pkgs_dep)) {
if(length(all_pkgs_dep[[i]])) {
dep_mat[i, all_pkgs_dep[[i]]] = 1
}
}
obj = list(
package = package,
version = "0.0.0",
dep_mat = dep_mat,
dep_fields = dep_fields,
which_required = is_field_required(dep_fields),
n_by_strong = 0,
n_by_all = 0,
heaviness = 0
)
obj$n_by_strong = length(required_dependency_packages(obj, FALSE))
obj$n_by_all = length(required_dependency_packages(obj, TRUE))
obj$heaviness = heaviness(obj)
obj$which_required = logical(nrow(dep_mat))
obj$which_required_but_not_loaded = logical(nrow(dep_mat))
obj$which_suggested_but_also_loaded = logical(nrow(dep_mat))
df_imports = matrix(0, nrow = nrow(dep_mat), ncol = 3)
colnames(df_imports) = c("imports", "importMethods", "importClasses")
rownames(df_imports) = rn
obj$df_imports = df_imports
class(obj) = "pkgndep"
obj
}
# == title
# Print method
#
# == param
# -x An object from `pkgndep`.
# -... Other arguments.
#
# == value
# No value is returned.
#
# == example
# # See examples in `pkgndep()`.
#
print.pkgndep = function(x, ...) {
qqcat("'@{x$package}', version @{x$version}\n")
qqcat("- @{x$n_by_strong} package@{ifelse(x$n_by_strong > 1, 's', '')} @{ifelse(x$n_by_strong > 1, 'are', 'is')} required for installing '@{x$package}'.\n")
qqcat("- @{x$n_by_all} package@{ifelse(x$n_by_all > 1, 's', '')} @{ifelse(x$n_by_all > 1, 'are', 'is')} required if installing packages listed in all fields in DESCRIPTION.\n")
if(nrow(x$dep_mat) == 0) {
return(invisible(NULL))
}
l = x$heaviness >= 20 & x$df_imports[, "imports"] > 0 & x$df_imports[, "importMethods"] == 0 & x$df_imports[, "importClasses"] == 0
if(any(l)) {
cat("\n")
cat("Following adjustment could be performed:\n")
for(i in which(l)) {
ni = x$df_imports[i, 'imports']
nm = rownames(x$df_imports)[i]
qqcat("- Found @{ni} function@{ifelse(ni == 1, ' is', 's are')} imported from a heavy parent '@{nm}'. Moving '@{nm}''\n to 'Suggests' will reduce @{x$heaviness[i]} dependencies.\n")
}
l1 = x$which_required
l1[l] = FALSE
m = x$dep_mat
l2 = colSums(m[l1, , drop = FALSE]) > 0
n_by_strong2 = length(unique(c(unlist(dimnames(m[l1, l2, drop = FALSE])))))
qqcat("Moving all mentioned packages to 'Suggests' will reduce the dependency packages from @{x$n_by_strong} to @{n_by_strong2}.\n")
}
if(!is.null(x$pkgndep_db_version)) {
qqcat("Using pkgndep_db snapshot, version: @{x$pkgndep_db_version}\n")
}
}
# == title
# Required dependency packages
#
# == param
# -x An object from `pkgndep`.
# -all Whether to include the packages required if also including packages from "Suggests"/"Enhances" field.
#
# == details
# The function returns all upstream packages.
#
# == value
# A vector of package names.
#
# == example
# \dontrun{
# x = readRDS(system.file("extdata", "ComplexHeatmap_dep.rds", package = "pkgndep"))
# required_dependency_packages(x)
# }
required_dependency_packages = function(x, all = FALSE) {
m = x$dep_mat
if(nrow(m) == 0) {
return(character(0))
}
if(all) {
unique(c(rownames(m), colnames(m)))
} else {
l1 = x$which_required
l2 = colSums(m[l1, , drop = FALSE]) > 0
unique(c(unlist(dimnames(m[l1, l2, drop = FALSE]))))
}
}
is_field_required = function(x) {
if(inherits(x, "pkgndep")) {
is_field_required(x$dep_fields)
} else {
x %in% c("Depends", "Imports", "LinkingTo")
}
}
pkg_exists = function(x) {
system.file(package = x) != ""
}
pkg_installed = pkg_exists
# x can be 1. an installed package, 2. a path (local or github), 3. a cran/bioc package
parse_imports_from_namespace = function(x, pkg = basename(x), online = TRUE) {
if(pkg %in% names(ENV$ns_data_list)) {
ns_data = ENV$ns_data_list[[pkg]]
} else if(!online) {
bioc_version = ALL_BIOC_RELEASES$Release[ALL_BIOC_RELEASES$Date == pkgndep_opt$heaviness_db_version]
lt_desc = load_from_heaviness_db(qq("pkg_description_@{bioc_version}.rds"))
lt_ns = load_from_heaviness_db(qq("pkg_namespace_@{bioc_version}.rds"))
if(ENV$pkg_db_snapshot$meta[x, "Repository"] == "CRAN") {
nm = paste0(x, "_", ENV$pkg_db_snapshot$meta[x, "Version"])
x1 = lt_ns[[nm]]
x2 = lt_desc[[nm]]
} else {
nm = paste0(bioc_version, "/", x, "_", ENV$pkg_db_snapshot$meta[x, "Version"])
x1 = lt_ns[[nm]]
x2 = lt_desc[[nm]]
}
if(!is.null(x1) && !is.null(x2)) {
tmpfile1 = tempfile()
tmpfile2 = tempfile()
writeLines(x1, tmpfile1)
writeLines(x2, tmpfile2)
on.exit(file.remove(c(tmpfile1, tmpfile2)))
oe = try(ns_data <- parseNamespaceFile_cp(x, tmpfile1, tmpfile2), silent = TRUE)
if(inherits(oe, "try-error")) {
ns_data = NULL
}
} else {
ns_data = NULL
}
ENV$ns_data_list[[pkg]] = ns_data
} else if(pkg_installed(x)) {
lib_dir = dirname(system.file(package = x))
if(packageHasNamespace(x, lib_dir)) {
ns_data = parseNamespaceFile(x, lib_dir)
} else {
ns_data = NULL
}
ENV$ns_data_list[[pkg]] = ns_data
} else if(grepl("^https://github.com/", x)) {
x = gsub("https://github.com/", "https://raw.githubusercontent.com/", x)
namespace_link = paste0(x, "/master/NAMESPACE")
description_link = paste0(x, "/master/DESCRIPTION")
tmpfile1 = tempfile()
tmpfile2 = tempfile()
oe1 = try(download.file(namespace_link, tmpfile1, quiet = TRUE), silent = TRUE)
oe2 = try(download.file(description_link, tmpfile2, quiet = TRUE), silent = TRUE)
on.exit(file.remove(c(tmpfile1, tmpfile2)))
if(inherits(oe1, "try-error") || inherits(oe2, "try-error")) {
ns_data = NULL
} else {
ns_data = parseNamespaceFile_cp(x, tmpfile1, tmpfile2)
}
ENV$ns_data_list[[pkg]] = ns_data
} else if(file.exists(x)) {
namespace_link = paste0(x, "/NAMESPACE")
description_link = paste0(x, "/DESCRIPTION")
if(file.exists(namespace_link) && file.exists(description_link)) {
ns_data = parseNamespaceFile_cp(x, namespace_link, description_link)
} else {
ns_data = NULL
}
ENV$ns_data_list[[pkg]] = ns_data
} else {
load_pkg_db()
if("Repository" %in% colnames(ENV$pkg_db$meta)) {
if(any(ENV$pkg_db$meta[x, "Repository"] %in% c("CRAN", "Bioconductor"))) {
ns_data = NULL
} else {
if(grepl("(bioc|books|annotation|experiment|workflow)", ENV$pkg_db$meta[x, "Repository"])) {
namespace_link = paste0("https://code.bioconductor.org/browse/", x, "/raw/master/NAMESPACE")
description_link = paste0("https://code.bioconductor.org/browse/", x, "/raw/master/DESCRIPTION")
} else {
namespace_link = paste0("https://raw.githubusercontent.com/cran/", x, "/master/NAMESPACE")
description_link = paste0("https://raw.githubusercontent.com/cran/", x, "/master/DESCRIPTION")
}
tmpfile1 = tempfile()
tmpfile2 = tempfile()
oe1 = try(download.file(namespace_link, tmpfile1, quiet = TRUE), silent = TRUE)
oe2 = try(download.file(description_link, tmpfile2, quiet = TRUE), silent = TRUE)
on.exit({
if(file.exists(tmpfile1)) file.remove(tmpfile1)
if(file.exists(tmpfile2)) file.remove(tmpfile2)
})
if(inherits(oe1, "try-error") || inherits(oe2, "try-error")) {
ns_data = NULL
} else {
ns_data = parseNamespaceFile_cp(x, tmpfile1, tmpfile2)
}
}
} else {
ns_data = NULL
}
ENV$ns_data_list[[pkg]] = ns_data
}
if(is.null(ns_data)) {
return(NULL)
}
lt = ns_data$imports
if(length(lt)) {
lt1 = lt[!sapply(lt, is.list)]
lt2 = lt[sapply(lt, is.list)]
n_imports = tapply(seq_along(lt2), unlist(sapply(lt2, function(x) x[1])),
function(x) unique(unlist(lapply(lt2[x], function(y) y[2]))))
import_fun_list = n_imports
n_imports = sapply(n_imports, length)
if(length(n_imports)) {
sign = tapply(seq_along(lt2), unlist(sapply(lt2, function(x) x[1])),
function(x) unique(unlist(lapply(lt2[x], function(y) names(y)[2]))))
sign = sapply(sign, function(x) ifelse(identical(x, "except"), -1, 1))
n_imports = n_imports * sign
} else {
n_imports = NULL
}
n_imports = c(n_imports, structure(rep(0, length(lt1)), names = unlist(lt1)))
attr(n_imports, "fun_list") = import_fun_list
} else {
n_imports = NULL
}
lt = ns_data$importMethods
if(length(lt)) {
lt1 = lt[!sapply(lt, is.list)]
lt2 = lt[sapply(lt, is.list)]
n_import_methods = tapply(seq_along(lt2), unlist(sapply(lt2, function(x) x[1])),
function(x) unique(unlist(lapply(lt2[x], function(y) y[2]))))
import_method_list = n_import_methods
n_import_methods = sapply(n_import_methods, length)
if(length(n_import_methods) == 0) n_import_methods = NULL
n_import_methods = c(n_import_methods, structure(rep(0, length(lt1)), names = unlist(lt1)))
attr(n_import_methods, "method_list") = import_method_list
} else {
n_import_methods = NULL
}
lt = ns_data$importClasses
if(length(lt)) {
lt1 = lt[!sapply(lt, is.list)]
lt2 = lt[sapply(lt, is.list)]
n_import_classes = tapply(seq_along(lt2), unlist(sapply(lt2, function(x) x[1])),
function(x) unique(unlist(lapply(lt2[x], function(y) y[2]))))
import_class_list = n_import_classes
n_import_classes = sapply(n_import_classes, length)
if(length(n_import_classes) == 0) n_import_classes = NULL
n_import_classes = c(n_import_classes, structure(rep(0, length(lt1)), names = unlist(lt1)))
attr(n_import_classes, "class_list") = import_class_list
} else {
n_import_classes = NULL
}
list(n_imports = n_imports, n_import_methods = n_import_methods, n_import_classes = n_import_classes)
}
# modified from base::parseNamespaceFile
parseNamespaceFile_cp <- function(package, nsFile, descfile, mustExist = TRUE) {
namespaceFilePath <- function(package, package.lib)
file.path(package.lib, package, "NAMESPACE")
## These two functions are essentially local to the parsing of
## the namespace file and don't need to be made available to
## users. These manipulate the data from useDynLib() directives
## for the same DLL to determine how to map the symbols to R
## variables.
nativeRoutineMap <-
## Creates a new NativeRoutineMap.
function(useRegistration, symbolNames, fixes) {
proto <- list(useRegistration = FALSE,
symbolNames = character())
class(proto) <- "NativeRoutineMap"
mergeNativeRoutineMaps(proto, useRegistration, symbolNames, fixes)
}
mergeNativeRoutineMaps <-
## Merges new settings into a NativeRoutineMap
function(map, useRegistration, symbolNames, fixes) {
if(!useRegistration)
names(symbolNames) <-
paste0(fixes[1L], names(symbolNames), fixes[2L])
else
map$registrationFixes <- fixes
map$useRegistration <- map$useRegistration || useRegistration
map$symbolNames <- c(map$symbolNames, symbolNames)
map
}
enc <- if (file.exists(descfile)) {
read.dcf(file = descfile, "Encoding")[1L]
} else NA_character_
if (file.exists(nsFile))
directives <- if (!is.na(enc) &&
! Sys.getlocale("LC_CTYPE") %in% c("C", "POSIX")) {
lines <- readLines(nsFile, warn = FALSE)
tmp <- iconv(lines, from = enc, to = "")
bad <- which(is.na(tmp))
## do not report purely comment lines,
comm <- grep("^[[:space:]]*#", lines[bad],
invert = TRUE, useBytes = TRUE)
if(length(bad[comm]))
stop("unable to re-encode some lines in NAMESPACE file")
tmp <- iconv(lines, from = enc, to = "", sub = "byte")
con <- textConnection(tmp)
on.exit(close(con))
parse(con, keep.source = FALSE, srcfile = NULL)
} else parse(nsFile, keep.source = FALSE, srcfile = NULL)
else if (mustExist)
stop(gettextf("package %s has no 'NAMESPACE' file", sQuote(package)),
domain = NA)
else directives <- NULL
exports <- character()
exportPatterns <- character()
exportClasses <- character()
exportClassPatterns <- character()
exportMethods <- character()
imports <- list()
importMethods <- list()
importClasses <- list()
dynlibs <- character()
nS3methods <- 1000L
S3methods <- matrix(NA_character_, nS3methods, 4L)
nativeRoutines <- list()
nS3 <- 0L
parseDirective <- function(e) {
## trying to get more helpful error message:
asChar <- function(cc) {
r <- as.character(cc)
if(any(r == ""))
stop(gettextf("empty name in directive '%s' in 'NAMESPACE' file",
as.character(e[[1L]])),
domain = NA)
r
}
evalToChar <- function(cc) {
vars <- all.vars(cc)
names(vars) <- vars
as.character(eval(eval(call("substitute", cc, as.list(vars))),
.GlobalEnv))
}
switch(as.character(e[[1L]]),
"if" = {
oe = try(lll <- eval(e[[2L]], .GlobalEnv), silent = TRUE)
if(inherits(oe, "try-error")) {
lll = FALSE
}
if (lll) {
parseDirective(e[[3L]])
} else if (length(e) == 4L) {
parseDirective(e[[4L]])
}
},
"{" = for (ee in as.list(e[-1L])) parseDirective(ee),
"=" =,
"<-" = {
parseDirective(e[[3L]])
if(as.character(e[[3L]][[1L]]) == "useDynLib")
names(dynlibs)[length(dynlibs)] <<- asChar(e[[2L]])
},
export = {
exp <- e[-1L]
exp <- structure(asChar(exp), names = names(exp))
exports <<- c(exports, exp)
},
exportPattern = {
pat <- asChar(e[-1L])
exportPatterns <<- c(pat, exportPatterns)
},
exportClassPattern = {
pat <- asChar(e[-1L])
exportClassPatterns <<- c(pat, exportClassPatterns)
},
exportClass = , exportClasses = {
exportClasses <<- c(asChar(e[-1L]), exportClasses)
},
exportMethods = {
exportMethods <<- c(asChar(e[-1L]), exportMethods)
},
import = {
except <- e$except
e$except <- NULL
pkgs <- as.list(asChar(e[-1L]))
if (!is.null(except)) {
pkgs <- lapply(pkgs, list, except=evalToChar(except))
}
imports <<- c(imports, pkgs)
},
importFrom = {
imp <- e[-1L]
ivars <- imp[-1L]
inames <- names(ivars)
imp <- list(asChar(imp[1L]),
structure(asChar(ivars), names = inames))
imports <<- c(imports, list(imp))
},
importClassFrom = , importClassesFrom = {
imp <- asChar(e[-1L])
pkg <- imp[[1L]]
impClasses <- imp[-1L]
imp <- list(asChar(pkg), asChar(impClasses))
importClasses <<- c(importClasses, list(imp))
},
importMethodsFrom = {
imp <- asChar(e[-1L])
pkg <- imp[[1L]]
impMethods <- imp[-1L]
imp <- list(asChar(pkg), asChar(impMethods))
importMethods <<- c(importMethods, list(imp))
},
useDynLib = {
## This attempts to process as much of the
## information as possible when NAMESPACE is parsed
## rather than when it is loaded and creates
## NativeRoutineMap objects to handle the mapping
## of symbols to R variable names.
## The name is the second element after useDynLib
dyl <- as.character(e[2L])
## We ensure uniqueness at the end.
dynlibs <<-
structure(c(dynlibs, dyl),
names = c(names(dynlibs),
ifelse(!is.null(names(e)) &&
nzchar(names(e)[2L]), names(e)[2L], "" )))
if (length(e) > 2L) {
## Author has specified some mappings for the symbols
symNames <- as.character(e[-c(1L, 2L)])
names(symNames) <- names(e[-c(1, 2)])
## If there are no names, then use the names of
## the symbols themselves.
if (length(names(symNames)) == 0L)
names(symNames) <- symNames
else if (any(w <- names(symNames) == "")) {
names(symNames)[w] <- symNames[w]
}
## For each DLL, we build up a list the (R
## variable name, symbol name) mappings. We do
## this in a NativeRoutineMap object and we
## merge potentially multiple useDynLib()
## directives for the same DLL into a single
## map. Then we have separate NativeRoutineMap
## for each different DLL. E.g. if we have
## useDynLib(foo, a, b, c) and useDynLib(bar,
## a, x, y) we would maintain and resolve them
## separately.
dup <- duplicated(names(symNames))
if (any(dup))
warning(gettextf("duplicate symbol names %s in useDynLib(\"%s\")",
paste(sQuote(names(symNames)[dup]),
collapse = ", "), dyl),
domain = NA, call. = FALSE)
symNames <- symNames[!dup]
## Deal with any prefix/suffix pair.
fixes <- c("", "")
idx <- match(".fixes", names(symNames))
if(!is.na(idx)) {
## Take .fixes and treat it as a call,
## e.g. c("pre", "post") or a regular name
## as the prefix.
if(nzchar(symNames[idx])) {
e <- parse(text = symNames[idx],
keep.source = FALSE,
srcfile = NULL)[[1L]]
if(is.call(e))
val <- eval(e, .GlobalEnv)
else
val <- as.character(e)
if(length(val))
fixes[seq_along(val)] <- val
}
symNames <- symNames[-idx]
}
## Deal with a .registration entry. It must be
## .registration = value and value will be coerced
## to a logical.
useRegistration <- FALSE
idx <- match(".registration", names(symNames))
if(!is.na(idx)) {
useRegistration <- as.logical(symNames[idx])
symNames <- symNames[-idx]
}
## Now merge into the NativeRoutineMap.
nativeRoutines[[ dyl ]] <<-
if(dyl %in% names(nativeRoutines))
mergeNativeRoutineMaps(nativeRoutines[[ dyl ]],
useRegistration,
symNames, fixes)
else
nativeRoutineMap(useRegistration, symNames,
fixes)
}
},
S3method = {
spec <- e[-1L]
if (length(spec) != 2L && length(spec) != 3L)
stop(gettextf("bad 'S3method' directive: %s",
deparse(e)),
call. = FALSE, domain = NA)
nS3 <<- nS3 + 1L
if(nS3 > nS3methods) {
old <- S3methods
nold <- nS3methods
nS3methods <<- nS3methods * 2L
new <- matrix(NA_character_, nS3methods, 4L)
ind <- seq_len(nold)
for (i in 1:4) new[ind, i] <- old[ind, i]
S3methods <<- new
rm(old, new)
}
if(is.call(gen <- spec[[1L]]) &&
identical(as.character(gen[[1L]]), "::")) {
pkg <- as.character(gen[[2L]])[1L]
gen <- as.character(gen[[3L]])[1L]
S3methods[nS3, c(seq_along(spec), 4L)] <<-
c(gen, asChar(spec[-1L]), pkg)
} else
S3methods[nS3, seq_along(spec)] <<- asChar(spec)
},
stop(gettextf("unknown namespace directive: %s", deparse(e, nlines=1L)),
call. = FALSE, domain = NA)
)
}
for (e in directives)
parseDirective(e)
## need to preserve the names on dynlibs, so unique() is not appropriate.
dynlibs <- dynlibs[!duplicated(dynlibs)]
list(imports = imports, exports = exports,
exportPatterns = unique(exportPatterns),
importClasses = importClasses, importMethods = importMethods,
exportClasses = unique(exportClasses),
exportMethods = unique(exportMethods),
exportClassPatterns = unique(exportClassPatterns),
dynlibs = dynlibs, nativeRoutines = nativeRoutines,
S3methods = unique(S3methods[seq_len(nS3), , drop = FALSE]) )
} ## end{parseNamespaceFile}
get_package_info_by_path = function(path) {
if(grepl("^https://github.com/", path)) {
dcf = tempfile()
path = gsub("https://github.com/", "https://raw.githubusercontent.com/", path)
download.file(paste0(path, "/master/DESCRIPTION"), dcf, quiet = TRUE)
on.exit(file.remove(dcf))
} else if(pkg_installed(path)) {
dcf = system.file("DESCRIPTION", package = path)
} else {
dcf = paste0(path, "/", "DESCRIPTION")
}
de = read.dcf(dcf)
if("Depends" %in% colnames(de)) {
depends = de[1, "Depends"]
depends = gsub("\\(.*?\\)", "", depends)
depends = gsub("\\s", "", depends)
depends = strsplit(depends, ",")[[1]]
depends = setdiff(depends, "R")
} else {
depends = character(0)
}
if("Imports" %in% colnames(de)) {
imports = de[1, "Imports"]
imports = gsub("\\(.*?\\)", "", imports)
imports = gsub("\\s", "", imports)
imports = strsplit(imports, ",")[[1]]
} else {
imports = character(0)
}
if("Suggests" %in% colnames(de)) {
suggests = de[1, "Suggests"]
suggests = gsub("\\(.*?\\)", "", suggests)
suggests = gsub("\\s", "", suggests)
suggests = strsplit(suggests, ",")[[1]]
} else {
suggests = character(0)
}
if("LinkingTo" %in% colnames(de)) {
linkingto = de[1, "LinkingTo"]
linkingto = gsub("\\(.*?\\)", "", linkingto)
linkingto = gsub("\\s", "", linkingto)
linkingto = strsplit(linkingto, ",")[[1]]
} else {
linkingto = character(0)
}
if("Enhances" %in% colnames(de)) {
enhances = de[1, "Enhances"]
enhances = gsub("\\(.*?\\)", "", enhances)
enhances = gsub("\\s", "", enhances)
enhances = strsplit(enhances, ",")[[1]]
} else {
enhances = character(0)
}
version = de[1, "Version"]
if(grepl("^https://", path)) {
repository = "GitHub"
} else {
repository = "local"
}
list(depends = depends, imports = imports, linkingto = linkingto, suggests = suggests, enhances = enhances,
version = version, repository = repository)
}
# == title
# Get functions that are imported to its child packages
#
# == param
# -package Package name.
#
# == details
# The information is based on pre-computated results for a specific CRAN/Bioconductor snapshot. See `pkgndep`$heaviness_db_version for how to set the version of the snapshot.
#
# == value
# It returns a list of function names that are imported to every of its child packages.
#
# == example
# \dontrun{
# get_all_functions_imported_to_children("circlize")
# }
get_all_functions_imported_to_children = function(package) {
load_all_pkg_dep()
lt = ENV$all_pkg_dep
children = unique(child_dependency(package, fields = c("Depends", "LinkingTo", "Imports"), online = FALSE)[, 2])
fl = list()
for(p in children) {
x = lt[[p]]
if(length(x$lt_imports$n_imports) > 0) {
imported_functions = attr(x$lt_imports$n_imports, "fun_list")
if(package %in% names(imported_functions)) {
fl[[p]] = imported_functions[[package]]
}
}
}
fl
}
Any scripts or data that you put into this service are public.
Add the following code to your website.
For more information on customizing the embed code, read Embedding Snippets.