6571 lines
202 KiB
R
6571 lines
202 KiB
R
|
# Autogenerated from contents in the package's R directory, do not edit!
|
|||
|
# Run make to update.
|
|||
|
|
|||
|
function(...) {
|
|||
|
|
|||
|
## This is the code of the package, put in here by brew
|
|||
|
|
|||
|
# Contents of R/bioc-standalone.R
|
|||
|
#' Tools for Bioconductor versions and repositories
|
|||
|
#'
|
|||
|
#' \section{API:}
|
|||
|
#'
|
|||
|
#' ```
|
|||
|
#' get_yaml_config(forget = FALSE)
|
|||
|
#' set_yaml_config(text)
|
|||
|
#'
|
|||
|
#' get_release_version(forget = FALSE)
|
|||
|
#' get_devel_version(forget = FALSE)
|
|||
|
#'
|
|||
|
#' get_version_map(forget = FALSE)
|
|||
|
#' get_matching_bioc_version(r_version = getRversion(), forget = FALSE)
|
|||
|
#' get_bioc_version(r_version = getRversion(), forget = FALSE)
|
|||
|
#'
|
|||
|
#' get_repos(bioc_version = "auto", forget = FALSE)
|
|||
|
#' ```
|
|||
|
#'
|
|||
|
#' * `forget`: Whether to forget the cached version of the Bioconductor
|
|||
|
#' config YAML file and download it again.
|
|||
|
#' * `text`: character vector (linewise) or scalar, the contents of the
|
|||
|
#' `config.yaml` file, if obtained externally, to be used as a cached
|
|||
|
#' version in the future.
|
|||
|
#' * `r_version`: R version string, or `package_version` object.
|
|||
|
#' * `bioc_version`: Bioc version string or `package_version` object,
|
|||
|
#' or the string `"auto"` to use the one matching the current R version.
|
|||
|
#'
|
|||
|
#' `get_yaml_config()` returns the raw contents of the `config.yaml` file,
|
|||
|
#' linewise. It is typically not needed, except if one needs information
|
|||
|
#' that cannot be surfaces via the other API functions.
|
|||
|
#'
|
|||
|
#' `set_yaml_config()` can be used to _set_ the contents of the
|
|||
|
#' `config.yaml` file. This is useful, if one has already obtained it
|
|||
|
#' externally, but wants to use the obtained file with the rest of the
|
|||
|
#' bioc standalone code.
|
|||
|
#'
|
|||
|
#' `get_release_version()` returns the version of the current Bioconductor
|
|||
|
#' release.
|
|||
|
#'
|
|||
|
#' `get_devel_version()` returns the version of the current development
|
|||
|
#' version of Bioconductor.
|
|||
|
#'
|
|||
|
#' `get_version_map()` return the mapping between R versions and
|
|||
|
#' Bioconductor versions. Note that this is not a one to one mapping.
|
|||
|
#' E.g. currently R `3.6.x` maps to both Bioc `3.9` (Bioc release) and
|
|||
|
#' `3.10` (Bioc devel); and also Bioc `3.10` maps to both R `3.6.x` and
|
|||
|
#' R `3.7.x` (current R-devel). It returns a data frame with three columns:
|
|||
|
#' `bioc_version`, `r_version` and `bioc_status`. The first two columns
|
|||
|
#' contain `package_vesion` objects, the third is a factor with levels:
|
|||
|
#' `out-of-date`, `release`, `devel`, `future`.
|
|||
|
#'
|
|||
|
#' `get_matching_bioc_version()` returns the matching Bioc version for an
|
|||
|
#' R version. If the R version matches to both a released and a devel
|
|||
|
#' version, then the released version is chosen.
|
|||
|
#'
|
|||
|
#' `get_bioc_version()` returns the matching Bioc version for the
|
|||
|
#' specified R version. It does observe the `R_BIOC_VERSION` environment
|
|||
|
#' variable, which can be used to force a Bioconductor version. If this is
|
|||
|
#' not set, it just calls `get_matching_bioc_version()`.
|
|||
|
#'
|
|||
|
#' `get_repos()` returns the Bioc repositories of the specified Bioc
|
|||
|
#' version. It defaults to the Bioc version that matches the calling R
|
|||
|
#' version. It returns a named character vector.
|
|||
|
#'
|
|||
|
#' \section{NEWS:}
|
|||
|
#' * 2019-05-30 First version in remotes.
|
|||
|
#' * 2020-03-22 get_matching_bioc_version() is now correct if the current
|
|||
|
#' R version is not in the builtin mapping.
|
|||
|
#' * 2020-11-21 Update internal map for 3.12.
|
|||
|
#' * 2023-05-08 Add 'books' repo.
|
|||
|
#'
|
|||
|
#' @name bioconductor
|
|||
|
#' @keywords internal
|
|||
|
#' @noRd
|
|||
|
NULL
|
|||
|
|
|||
|
|
|||
|
bioconductor <- local({
|
|||
|
|
|||
|
# -------------------------------------------------------------------
|
|||
|
# Configuration that does not change often
|
|||
|
|
|||
|
config_url <- function() {
|
|||
|
Sys.getenv(
|
|||
|
"R_BIOC_CONFIG_URL",
|
|||
|
"https://bioconductor.org/config.yaml"
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
builtin_map <- list(
|
|||
|
"2.1" = package_version("1.6"),
|
|||
|
"2.2" = package_version("1.7"),
|
|||
|
"2.3" = package_version("1.8"),
|
|||
|
"2.4" = package_version("1.9"),
|
|||
|
"2.5" = package_version("2.0"),
|
|||
|
"2.6" = package_version("2.1"),
|
|||
|
"2.7" = package_version("2.2"),
|
|||
|
"2.8" = package_version("2.3"),
|
|||
|
"2.9" = package_version("2.4"),
|
|||
|
"2.10" = package_version("2.5"),
|
|||
|
"2.11" = package_version("2.6"),
|
|||
|
"2.12" = package_version("2.7"),
|
|||
|
"2.13" = package_version("2.8"),
|
|||
|
"2.14" = package_version("2.9"),
|
|||
|
"2.15" = package_version("2.11"),
|
|||
|
"3.0" = package_version("2.13"),
|
|||
|
"3.1" = package_version("3.0"),
|
|||
|
"3.2" = package_version("3.2"),
|
|||
|
"3.3" = package_version("3.4"),
|
|||
|
"3.4" = package_version("3.6"),
|
|||
|
"3.5" = package_version("3.8"),
|
|||
|
"3.6" = package_version("3.10"),
|
|||
|
"4.0" = package_version("3.12"),
|
|||
|
"4.1" = package_version("3.14"),
|
|||
|
"4.2" = package_version("3.16"),
|
|||
|
"4.3" = package_version("3.17"),
|
|||
|
"4.4" = package_version("3.18")
|
|||
|
)
|
|||
|
|
|||
|
# -------------------------------------------------------------------
|
|||
|
# Cache
|
|||
|
|
|||
|
devel_version <- NULL
|
|||
|
release_version <- NULL
|
|||
|
version_map <- NULL
|
|||
|
yaml_config <- NULL
|
|||
|
|
|||
|
clear_cache <- function() {
|
|||
|
devel_version <<- NULL
|
|||
|
release_version <<- NULL
|
|||
|
version_map <<- NULL
|
|||
|
yaml_config <<- NULL
|
|||
|
}
|
|||
|
|
|||
|
# -------------------------------------------------------------------
|
|||
|
# API
|
|||
|
|
|||
|
get_yaml_config <- function(forget = FALSE) {
|
|||
|
if (forget || is.null(yaml_config)) {
|
|||
|
new <- tryCatch(read_url(config_url()), error = function(x) x)
|
|||
|
if (inherits(new, "error")) {
|
|||
|
http_url <- sub("^https", "http", config_url())
|
|||
|
new <- tryCatch(read_url(http_url), error = function(x) x)
|
|||
|
}
|
|||
|
if (inherits(new, "error")) stop(new)
|
|||
|
yaml_config <<- new
|
|||
|
}
|
|||
|
|
|||
|
yaml_config
|
|||
|
}
|
|||
|
|
|||
|
set_yaml_config <- function(text) {
|
|||
|
if (length(text) == 1) text <- strsplit(text, "\n", fixed = TRUE)[[1]]
|
|||
|
yaml_config <<- text
|
|||
|
}
|
|||
|
|
|||
|
get_release_version <- function(forget = FALSE) {
|
|||
|
if (forget || is.null(release_version)) {
|
|||
|
yaml <- get_yaml_config(forget)
|
|||
|
pattern <- "^release_version: \"(.*)\""
|
|||
|
release_version <<- package_version(
|
|||
|
sub(pattern, "\\1", grep(pattern, yaml, value=TRUE))
|
|||
|
)
|
|||
|
}
|
|||
|
release_version
|
|||
|
}
|
|||
|
|
|||
|
get_devel_version <- function(forget = FALSE) {
|
|||
|
if (forget || is.null(devel_version)) {
|
|||
|
yaml <- get_yaml_config(forget)
|
|||
|
pattern <- "^devel_version: \"(.*)\""
|
|||
|
devel_version <<- package_version(
|
|||
|
sub(pattern, "\\1", grep(pattern, yaml, value=TRUE))
|
|||
|
)
|
|||
|
}
|
|||
|
devel_version
|
|||
|
}
|
|||
|
|
|||
|
get_version_map <- function(forget = FALSE) {
|
|||
|
if (forget || is.null(version_map)) {
|
|||
|
txt <- get_yaml_config(forget)
|
|||
|
grps <- grep("^[^[:blank:]]", txt)
|
|||
|
start <- match(grep("r_ver_for_bioc_ver", txt), grps)
|
|||
|
map <- txt[seq(grps[start] + 1, grps[start + 1] - 1)]
|
|||
|
map <- trimws(gsub("\"", "", sub(" #.*", "", map)))
|
|||
|
pattern <- "(.*): (.*)"
|
|||
|
bioc <- package_version(sub(pattern, "\\1", map))
|
|||
|
r <- package_version(sub(pattern, "\\2", map))
|
|||
|
status <- rep("out-of-date", length(bioc))
|
|||
|
release <- get_release_version()
|
|||
|
devel <- get_devel_version()
|
|||
|
status[bioc == release] <- "release"
|
|||
|
status[bioc == devel] <- "devel"
|
|||
|
|
|||
|
# append final version for 'devel' R
|
|||
|
bioc <- c(
|
|||
|
bioc, max(bioc)
|
|||
|
)
|
|||
|
r <- c(r, package_version(paste(unlist(max(r)) + 0:1, collapse = ".")))
|
|||
|
status <- c(status, "future")
|
|||
|
|
|||
|
version_map <<- rbind(
|
|||
|
.VERSION_MAP_SENTINEL,
|
|||
|
data.frame(
|
|||
|
bioc_version = bioc, r_version = r,
|
|||
|
bioc_status = factor(
|
|||
|
status,
|
|||
|
levels = c("out-of-date", "release", "devel", "future")
|
|||
|
)
|
|||
|
)
|
|||
|
)
|
|||
|
}
|
|||
|
version_map
|
|||
|
}
|
|||
|
|
|||
|
get_matching_bioc_version <- function(r_version = getRversion(),
|
|||
|
forget = FALSE) {
|
|||
|
|
|||
|
minor <- as.character(get_minor_r_version(r_version))
|
|||
|
if (minor %in% names(builtin_map)) return(builtin_map[[minor]])
|
|||
|
|
|||
|
# If we are not in the map, then we need to look this up in
|
|||
|
# YAML data. It is possible that the current R version matches multiple
|
|||
|
# Bioc versions. Then we choose the latest released version. If none
|
|||
|
# of them were released (e.g. they are 'devel' and 'future'), then
|
|||
|
# we'll use the 'devel' version.
|
|||
|
|
|||
|
map <- get_version_map(forget = forget)
|
|||
|
mine <- which(package_version(minor) == map$r_version)
|
|||
|
if (length(mine) == 0) {
|
|||
|
mine <- NA
|
|||
|
} else if (length(mine) > 1) {
|
|||
|
if ("release" %in% map$bioc_status[mine]) {
|
|||
|
mine <- mine["release" == map$bioc_status[mine]]
|
|||
|
} else if ("devel" %in% map$bioc_status[mine]) {
|
|||
|
mine <- mine["devel" == map$bioc_status[mine]]
|
|||
|
} else {
|
|||
|
mine <- rev(mine)[1]
|
|||
|
}
|
|||
|
}
|
|||
|
if (!is.na(mine)) return(map$bioc_version[mine])
|
|||
|
|
|||
|
# If it is not even in the YAML, then it must be some very old
|
|||
|
# or very new version. If old, we fail. If new, we assume bioc-devel.
|
|||
|
if (package_version(minor) < "2.1") {
|
|||
|
stop("R version too old, cannot run Bioconductor")
|
|||
|
}
|
|||
|
|
|||
|
get_devel_version()
|
|||
|
}
|
|||
|
|
|||
|
get_bioc_version <- function(r_version = getRversion(),
|
|||
|
forget = FALSE) {
|
|||
|
if (nzchar(v <- Sys.getenv("R_BIOC_VERSION", ""))) {
|
|||
|
return(package_version(v))
|
|||
|
}
|
|||
|
get_matching_bioc_version(r_version, forget = forget)
|
|||
|
}
|
|||
|
|
|||
|
get_repos <- function(bioc_version = "auto", forget = FALSE) {
|
|||
|
if (identical(bioc_version, "auto")) {
|
|||
|
bioc_version <- get_bioc_version(getRversion(), forget)
|
|||
|
} else {
|
|||
|
bioc_version <- package_version(bioc_version)
|
|||
|
}
|
|||
|
mirror <- Sys.getenv("R_BIOC_MIRROR", "https://bioconductor.org")
|
|||
|
mirror <- getOption("BioC_mirror", mirror)
|
|||
|
repos <- c(
|
|||
|
BioCsoft = "{mirror}/packages/{bv}/bioc",
|
|||
|
BioCann = "{mirror}/packages/{bv}/data/annotation",
|
|||
|
BioCexp = "{mirror}/packages/{bv}/data/experiment",
|
|||
|
BioCworkflows =
|
|||
|
if (bioc_version >= "3.7") "{mirror}/packages/{bv}/workflows",
|
|||
|
BioCextra =
|
|||
|
if (bioc_version <= "3.5") "{mirror}/packages/{bv}/extra",
|
|||
|
BioCbooks =
|
|||
|
if (bioc_version >= "3.12") "{mirror}/packages/{bv}/books"
|
|||
|
)
|
|||
|
|
|||
|
## It seems that if a repo is not available yet for bioc-devel,
|
|||
|
## they redirect to the bioc-release version, so we do not need to
|
|||
|
## parse devel_repos from the config.yaml file
|
|||
|
|
|||
|
sub("{mirror}", mirror, fixed = TRUE,
|
|||
|
sub("{bv}", bioc_version, repos, fixed = TRUE))
|
|||
|
}
|
|||
|
|
|||
|
# -------------------------------------------------------------------
|
|||
|
# Internals
|
|||
|
|
|||
|
read_url <- function(url) {
|
|||
|
tmp <- tempfile()
|
|||
|
on.exit(unlink(tmp), add = TRUE)
|
|||
|
suppressWarnings(download.file(url, tmp, quiet = TRUE))
|
|||
|
if (!file.exists(tmp) || file.info(tmp)$size == 0) {
|
|||
|
stop("Failed to download `", url, "`")
|
|||
|
}
|
|||
|
readLines(tmp, warn = FALSE)
|
|||
|
}
|
|||
|
|
|||
|
.VERSION_SENTINEL <- local({
|
|||
|
version <- package_version(character())
|
|||
|
class(version) <- c("unknown_version", class(version))
|
|||
|
version
|
|||
|
})
|
|||
|
|
|||
|
.VERSION_MAP_SENTINEL <- data.frame(
|
|||
|
bioc_version = .VERSION_SENTINEL,
|
|||
|
r_version = .VERSION_SENTINEL,
|
|||
|
bioc_status = factor(
|
|||
|
factor(),
|
|||
|
levels = c("out-of-date", "release", "devel", "future")
|
|||
|
)
|
|||
|
)
|
|||
|
|
|||
|
get_minor_r_version <- function (x) {
|
|||
|
package_version(x)[,1:2]
|
|||
|
}
|
|||
|
|
|||
|
# -------------------------------------------------------------------
|
|||
|
|
|||
|
structure(
|
|||
|
list(
|
|||
|
.internal = environment(),
|
|||
|
get_yaml_config = get_yaml_config,
|
|||
|
set_yaml_config = set_yaml_config,
|
|||
|
get_release_version = get_release_version,
|
|||
|
get_devel_version = get_devel_version,
|
|||
|
get_version_map = get_version_map,
|
|||
|
get_matching_bioc_version = get_matching_bioc_version,
|
|||
|
get_bioc_version = get_bioc_version,
|
|||
|
get_repos = get_repos
|
|||
|
),
|
|||
|
class = c("standalone_bioc", "standalone"))
|
|||
|
})
|
|||
|
# Contents of R/bioc.R
|
|||
|
|
|||
|
|
|||
|
#' @export
|
|||
|
#' @rdname bioc_install_repos
|
|||
|
#' @keywords internal
|
|||
|
#' @examples
|
|||
|
#' bioc_version()
|
|||
|
#' bioc_version("3.4")
|
|||
|
|
|||
|
bioc_version <- function(r_ver = getRversion()) {
|
|||
|
bioconductor$get_bioc_version(r_ver)
|
|||
|
}
|
|||
|
|
|||
|
#' Tools for Bioconductor repositories
|
|||
|
#'
|
|||
|
#' `bioc_version()` returns the Bioconductor version for the current or the
|
|||
|
#' specified R version.
|
|||
|
#'
|
|||
|
#' `bioc_install_repos()` deduces the URLs of the Bioconductor repositories.
|
|||
|
#'
|
|||
|
#' @details
|
|||
|
#' Both functions observe the `R_BIOC_VERSION` environment variable, which
|
|||
|
#' can be set to force a Bioconductor version. If this is set, then the
|
|||
|
#' `r_ver` and `bioc_ver` arguments are ignored.
|
|||
|
#'
|
|||
|
#' `bioc_install_repos()` observes the `R_BIOC_MIRROR` environment variable
|
|||
|
#' and also the `BioC_mirror` option, which can be set to the desired
|
|||
|
#' Bioconductor mirror. The option takes precedence if both are set. Its
|
|||
|
#' default value is `https://bioconductor.org`.
|
|||
|
#'
|
|||
|
#' @return
|
|||
|
#' `bioc_version()` returns a Bioconductor version, a `package_version`
|
|||
|
#' object.
|
|||
|
#'
|
|||
|
#' `bioc_install_repos()` returns a named character vector of the URLs of
|
|||
|
#' the Bioconductor repositories, appropriate for the current or the
|
|||
|
#' specified R version.
|
|||
|
#'
|
|||
|
#' @param r_ver R version to use. For `bioc_install_repos()` it is
|
|||
|
#' ignored if `bioc_ver` is specified.
|
|||
|
#' @param bioc_ver Bioconductor version to use. Defaults to the default one
|
|||
|
#' corresponding to `r_ver`.
|
|||
|
#'
|
|||
|
#' @export
|
|||
|
#' @keywords internal
|
|||
|
#' @examples
|
|||
|
#' bioc_install_repos()
|
|||
|
|
|||
|
bioc_install_repos <- function(r_ver = getRversion(),
|
|||
|
bioc_ver = bioc_version(r_ver)) {
|
|||
|
bioconductor$get_repos(bioc_ver)
|
|||
|
}
|
|||
|
# Contents of R/circular.R
|
|||
|
|
|||
|
## A environment to hold which packages are being installed so packages
|
|||
|
## with circular dependencies can be skipped the second time.
|
|||
|
|
|||
|
installing <- new.env(parent = emptyenv())
|
|||
|
|
|||
|
is_root_install <- function() is.null(installing$packages)
|
|||
|
|
|||
|
exit_from_root_install <- function() installing$packages <- NULL
|
|||
|
|
|||
|
check_for_circular_dependencies <- function(pkgdir, quiet) {
|
|||
|
pkgdir <- normalizePath(pkgdir)
|
|||
|
pkg <- get_desc_field(file.path(pkgdir, "DESCRIPTION"), "Package")
|
|||
|
|
|||
|
if (pkg %in% installing$packages) {
|
|||
|
if (!quiet) message("Skipping ", pkg, ", it is already being installed")
|
|||
|
TRUE
|
|||
|
|
|||
|
} else {
|
|||
|
installing$packages <- c(installing$packages, pkg)
|
|||
|
FALSE
|
|||
|
}
|
|||
|
}
|
|||
|
# Contents of R/cran.R
|
|||
|
cache <- new.env(parent = emptyenv())
|
|||
|
|
|||
|
#' @rdname available_packages
|
|||
|
#' @export
|
|||
|
available_packages_set <- function(repos, type, db) {
|
|||
|
signature <- rawToChar(serialize(list(repos, type), NULL, ascii = TRUE))
|
|||
|
if (is.null(cache[[signature]])) {
|
|||
|
cache[[signature]] <- db
|
|||
|
}
|
|||
|
cache[[signature]]
|
|||
|
}
|
|||
|
|
|||
|
#' @rdname available_packages
|
|||
|
#' @export
|
|||
|
available_packages_reset <- function() {
|
|||
|
rm(list = ls(envir = cache), envir = cache)
|
|||
|
}
|
|||
|
|
|||
|
#' Simpler available.packages
|
|||
|
#'
|
|||
|
#' This is mostly equivalent to [utils::available.packages()] however it also
|
|||
|
#' caches the full result. Additionally the cache can be assigned explicitly with
|
|||
|
#' [available_packages_set()] and reset (cleared) with [available_packages_reset()].
|
|||
|
#'
|
|||
|
#' @inheritParams utils::available.packages
|
|||
|
#' @keywords internal
|
|||
|
#' @seealso [utils::available.packages()] for full documentation on the output format.
|
|||
|
#' @export
|
|||
|
available_packages <- function(repos = getOption("repos"), type = getOption("pkgType")) {
|
|||
|
available_packages_set(
|
|||
|
repos, type,
|
|||
|
suppressWarnings(utils::available.packages(utils::contrib.url(repos, type), type = type))
|
|||
|
)
|
|||
|
}
|
|||
|
# Contents of R/dcf.R
|
|||
|
read_dcf <- function(path) {
|
|||
|
fields <- colnames(read.dcf(path))
|
|||
|
as.list(read.dcf(path, keep.white = fields)[1, ])
|
|||
|
}
|
|||
|
|
|||
|
write_dcf <- function(path, desc) {
|
|||
|
write.dcf(
|
|||
|
rbind(unlist(desc)),
|
|||
|
file = path,
|
|||
|
keep.white = names(desc),
|
|||
|
indent = 0
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
get_desc_field <- function(path, field) {
|
|||
|
dcf <- read_dcf(path)
|
|||
|
dcf[[field]]
|
|||
|
}
|
|||
|
# Contents of R/decompress.R
|
|||
|
# Decompress pkg, if needed
|
|||
|
source_pkg <- function(path, subdir = NULL) {
|
|||
|
if (!dir.exists(path)) {
|
|||
|
bundle <- path
|
|||
|
outdir <- tempfile(pattern = "remotes")
|
|||
|
dir.create(outdir)
|
|||
|
|
|||
|
path <- decompress(path, outdir)
|
|||
|
} else {
|
|||
|
bundle <- NULL
|
|||
|
}
|
|||
|
|
|||
|
pkg_path <- if (is.null(subdir)) path else file.path(path, subdir)
|
|||
|
|
|||
|
# Check it's an R package
|
|||
|
if (!file.exists(file.path(pkg_path, "DESCRIPTION"))) {
|
|||
|
stop("Does not appear to be an R package (no DESCRIPTION)", call. = FALSE)
|
|||
|
}
|
|||
|
|
|||
|
# Check configure is executable if present
|
|||
|
config_path <- file.path(pkg_path, "configure")
|
|||
|
if (file.exists(config_path)) {
|
|||
|
Sys.chmod(config_path, "777")
|
|||
|
}
|
|||
|
|
|||
|
pkg_path
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
decompress <- function(src, target) {
|
|||
|
stopifnot(file.exists(src))
|
|||
|
|
|||
|
if (grepl("\\.zip$", src)) {
|
|||
|
my_unzip(src, target)
|
|||
|
outdir <- getrootdir(as.vector(utils::unzip(src, list = TRUE)$Name))
|
|||
|
} else if (grepl("\\.(tar|tar\\.gz|tar\\.bz2|tgz|tbz)$", src)) {
|
|||
|
untar(src, exdir = target)
|
|||
|
outdir <- getrootdir(untar(src, list = TRUE))
|
|||
|
} else {
|
|||
|
ext <- gsub("^[^.]*\\.", "", src)
|
|||
|
stop("Don't know how to decompress files with extension ", ext,
|
|||
|
call. = FALSE)
|
|||
|
}
|
|||
|
|
|||
|
file.path(target, outdir)
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
# Returns everything before the last slash in a filename
|
|||
|
# getdir("path/to/file") returns "path/to"
|
|||
|
# getdir("path/to/dir/") returns "path/to/dir"
|
|||
|
getdir <- function(path) sub("/[^/]*$", "", path)
|
|||
|
|
|||
|
# Given a list of files, returns the root (the topmost folder)
|
|||
|
# getrootdir(c("path/to/file", "path/to/other/thing")) returns "path/to"
|
|||
|
# It does not check that all paths have a common prefix. It fails for
|
|||
|
# empty input vector. It assumes that directories end with '/'.
|
|||
|
getrootdir <- function(file_list) {
|
|||
|
stopifnot(length(file_list) > 0)
|
|||
|
slashes <- nchar(gsub("[^/]", "", file_list))
|
|||
|
if (min(slashes) == 0) return(".")
|
|||
|
|
|||
|
getdir(file_list[which.min(slashes)])
|
|||
|
}
|
|||
|
|
|||
|
my_unzip <- function(src, target, unzip = getOption("unzip", "internal")) {
|
|||
|
if (unzip %in% c("internal", "")) {
|
|||
|
return(utils::unzip(src, exdir = target))
|
|||
|
}
|
|||
|
|
|||
|
args <- paste(
|
|||
|
"-oq", shQuote(src),
|
|||
|
"-d", shQuote(target)
|
|||
|
)
|
|||
|
|
|||
|
system_check(unzip, args)
|
|||
|
}
|
|||
|
# Contents of R/deps.R
|
|||
|
|
|||
|
#' Find all dependencies of a CRAN or dev package.
|
|||
|
#'
|
|||
|
#' Find all the dependencies of a package and determine whether they are ahead
|
|||
|
#' or behind CRAN. A `print()` method identifies mismatches (if any)
|
|||
|
#' between local and CRAN versions of each dependent package; an
|
|||
|
#' `update()` method installs outdated or missing packages from CRAN.
|
|||
|
#'
|
|||
|
#' @param packages A character vector of package names.
|
|||
|
#' @param pkgdir Path to a package directory, or to a package tarball.
|
|||
|
#' @param dependencies Which dependencies do you want to check?
|
|||
|
#' Can be a character vector (selecting from "Depends", "Imports",
|
|||
|
#' "LinkingTo", "Suggests", or "Enhances"), or a logical vector.
|
|||
|
#'
|
|||
|
#' `TRUE` is shorthand for "Depends", "Imports", "LinkingTo" and
|
|||
|
#' "Suggests". `NA` is shorthand for "Depends", "Imports" and "LinkingTo"
|
|||
|
#' and is the default. `FALSE` is shorthand for no dependencies (i.e.
|
|||
|
#' just check this package, not its dependencies).
|
|||
|
#'
|
|||
|
#' The value "soft" means the same as `TRUE`, "hard" means the same as `NA`.
|
|||
|
#'
|
|||
|
#' You can also specify dependencies from one or more additional fields,
|
|||
|
#' common ones include:
|
|||
|
#' - Config/Needs/website - for dependencies used in building the pkgdown site.
|
|||
|
#' - Config/Needs/coverage for dependencies used in calculating test coverage.
|
|||
|
#' @param quiet If `TRUE`, suppress output.
|
|||
|
#' @param upgrade Should package dependencies be upgraded? One of "default", "ask", "always", or "never". "default"
|
|||
|
#' respects the value of the `R_REMOTES_UPGRADE` environment variable if set,
|
|||
|
#' and falls back to "ask" if unset. "ask" prompts the user for which out of
|
|||
|
#' date packages to upgrade. For non-interactive sessions "ask" is equivalent
|
|||
|
#' to "always". `TRUE` and `FALSE` are also accepted and correspond to
|
|||
|
#' "always" and "never" respectively.
|
|||
|
#' @param repos A character vector giving repositories to use.
|
|||
|
#' @param type Type of package to `update`.
|
|||
|
#'
|
|||
|
#' @param object A `package_deps` object.
|
|||
|
#' @param ... Additional arguments passed to `install_packages`.
|
|||
|
#' @inheritParams install_github
|
|||
|
#'
|
|||
|
#' @return
|
|||
|
#'
|
|||
|
#' A `data.frame` with columns:
|
|||
|
#'
|
|||
|
#' \tabular{ll}{
|
|||
|
#' `package` \tab The dependent package's name,\cr
|
|||
|
#' `installed` \tab The currently installed version,\cr
|
|||
|
#' `available` \tab The version available on CRAN,\cr
|
|||
|
#' `diff` \tab An integer denoting whether the locally installed version
|
|||
|
#' of the package is newer (1), the same (0) or older (-1) than the version
|
|||
|
#' currently available on CRAN.\cr
|
|||
|
#' }
|
|||
|
#'
|
|||
|
#' @export
|
|||
|
#' @examples
|
|||
|
#' \dontrun{
|
|||
|
#' package_deps("devtools")
|
|||
|
#' # Use update to update any out-of-date dependencies
|
|||
|
#' update(package_deps("devtools"))
|
|||
|
#' }
|
|||
|
|
|||
|
package_deps <- function(packages, dependencies = NA,
|
|||
|
repos = getOption("repos"),
|
|||
|
type = getOption("pkgType")) {
|
|||
|
|
|||
|
repos <- fix_repositories(repos)
|
|||
|
cran <- available_packages(repos, type)
|
|||
|
|
|||
|
deps <- find_deps(packages, available = cran, top_dep = dependencies)
|
|||
|
|
|||
|
# Remove base packages
|
|||
|
inst <- utils::installed.packages()
|
|||
|
base <- unname(inst[inst[, "Priority"] %in% c("base", "recommended"), "Package"])
|
|||
|
deps <- setdiff(deps, base)
|
|||
|
|
|||
|
# get remote types
|
|||
|
remote <- structure(lapply(deps, package2remote, repos = repos, type = type), class = "remotes")
|
|||
|
|
|||
|
inst_ver <- vapply(deps, local_sha, character(1))
|
|||
|
cran_ver <- vapply(remote, function(x) remote_sha(x), character(1))
|
|||
|
is_cran_remote <- vapply(remote, inherits, logical(1), "cran_remote")
|
|||
|
|
|||
|
diff <- compare_versions(inst_ver, cran_ver, is_cran_remote)
|
|||
|
|
|||
|
res <- structure(
|
|||
|
data.frame(
|
|||
|
package = deps,
|
|||
|
installed = inst_ver,
|
|||
|
available = cran_ver,
|
|||
|
diff = diff,
|
|||
|
is_cran = is_cran_remote,
|
|||
|
stringsAsFactors = FALSE
|
|||
|
),
|
|||
|
class = c("package_deps", "data.frame")
|
|||
|
)
|
|||
|
|
|||
|
res$remote <- remote
|
|||
|
|
|||
|
res
|
|||
|
}
|
|||
|
|
|||
|
#' `local_package_deps` extracts dependencies from a
|
|||
|
#' local DESCRIPTION file.
|
|||
|
#'
|
|||
|
#' @export
|
|||
|
#' @rdname package_deps
|
|||
|
|
|||
|
local_package_deps <- function(pkgdir = ".", dependencies = NA) {
|
|||
|
pkg <- load_pkg_description(pkgdir)
|
|||
|
|
|||
|
dependencies <- tolower(standardise_dep(dependencies))
|
|||
|
dependencies <- intersect(dependencies, names(pkg))
|
|||
|
|
|||
|
parsed <- lapply(pkg[tolower(dependencies)], parse_deps)
|
|||
|
unlist(lapply(parsed, `[[`, "name"), use.names = FALSE)
|
|||
|
}
|
|||
|
|
|||
|
#' `dev_package_deps` lists the status of the dependencies
|
|||
|
#' of a local package.
|
|||
|
#'
|
|||
|
#' @export
|
|||
|
#' @rdname package_deps
|
|||
|
|
|||
|
dev_package_deps <- function(pkgdir = ".", dependencies = NA,
|
|||
|
repos = getOption("repos"),
|
|||
|
type = getOption("pkgType")) {
|
|||
|
|
|||
|
pkg <- load_pkg_description(pkgdir)
|
|||
|
repos <- c(repos, parse_additional_repositories(pkg))
|
|||
|
|
|||
|
deps <- local_package_deps(pkgdir = pkgdir, dependencies = dependencies)
|
|||
|
|
|||
|
if (is_bioconductor(pkg)) {
|
|||
|
bioc_repos <- bioc_install_repos()
|
|||
|
|
|||
|
missing_repos <- setdiff(names(bioc_repos), names(repos))
|
|||
|
|
|||
|
if (length(missing_repos) > 0)
|
|||
|
repos[missing_repos] <- bioc_repos[missing_repos]
|
|||
|
}
|
|||
|
|
|||
|
cran_deps <- package_deps(deps, repos = repos, type = type)
|
|||
|
|
|||
|
res <- combine_remote_deps(cran_deps, extra_deps(pkg, "remotes"))
|
|||
|
|
|||
|
res <- do.call(rbind, c(list(res), lapply(get_extra_deps(pkg, dependencies), extra_deps, pkg = pkg), stringsAsFactors = FALSE))
|
|||
|
|
|||
|
res[is.na(res$package) | !duplicated(res$package, fromLast = TRUE), ]
|
|||
|
}
|
|||
|
|
|||
|
combine_remote_deps <- function(cran_deps, remote_deps) {
|
|||
|
# If there are no dependencies there will be no remote dependencies either,
|
|||
|
# so just return them (and don't force the remote_deps promise)
|
|||
|
if (nrow(cran_deps) == 0) {
|
|||
|
return(cran_deps)
|
|||
|
}
|
|||
|
|
|||
|
# Only keep the remotes that are specified in the cran_deps or are NA
|
|||
|
remote_deps <- remote_deps[is.na(remote_deps$package) | remote_deps$package %in% cran_deps$package, ]
|
|||
|
|
|||
|
# If there are remote deps remove the equivalent CRAN deps
|
|||
|
cran_deps <- cran_deps[!(cran_deps$package %in% remote_deps$package), ]
|
|||
|
|
|||
|
rbind(remote_deps, cran_deps)
|
|||
|
}
|
|||
|
|
|||
|
## -2 = not installed, but available on CRAN
|
|||
|
## -1 = installed, but out of date
|
|||
|
## 0 = installed, most recent version
|
|||
|
## 1 = installed, version ahead of CRAN
|
|||
|
## 2 = package not on CRAN
|
|||
|
|
|||
|
compare_versions <- function(inst, remote, is_cran) {
|
|||
|
stopifnot(length(inst) == length(remote) && length(inst) == length(is_cran))
|
|||
|
|
|||
|
compare_var <- function(i, c, cran) {
|
|||
|
if (!cran) {
|
|||
|
if (identical(i, c)) {
|
|||
|
return(CURRENT)
|
|||
|
} else {
|
|||
|
return(BEHIND)
|
|||
|
}
|
|||
|
}
|
|||
|
if (is.na(c)) return(UNAVAILABLE) # not on CRAN
|
|||
|
if (is.na(i)) return(UNINSTALLED) # not installed, but on CRAN
|
|||
|
|
|||
|
i <- package_version(i)
|
|||
|
c <- package_version(c)
|
|||
|
|
|||
|
if (i < c) {
|
|||
|
BEHIND # out of date
|
|||
|
} else if (i > c) {
|
|||
|
AHEAD # ahead of CRAN
|
|||
|
} else {
|
|||
|
CURRENT # most recent CRAN version
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
vapply(seq_along(inst),
|
|||
|
function(i) compare_var(inst[[i]], remote[[i]], is_cran[[i]]),
|
|||
|
integer(1))
|
|||
|
}
|
|||
|
|
|||
|
has_extra_deps <- function(pkg, dependencies) {
|
|||
|
any(dependencies %in% names(pkg))
|
|||
|
}
|
|||
|
|
|||
|
get_extra_deps <- function(pkg, dependencies) {
|
|||
|
dependencies <- tolower(dependencies)
|
|||
|
|
|||
|
dependencies <- intersect(dependencies, names(pkg))
|
|||
|
|
|||
|
#remove standard dependencies
|
|||
|
setdiff(dependencies, tolower(standardise_dep(c("Depends", "Imports", "LinkingTo", "Suggests", "Enhances"))))
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
print.package_deps <- function(x, show_ok = FALSE, ...) {
|
|||
|
class(x) <- "data.frame"
|
|||
|
x$remote <-lapply(x$remote, format)
|
|||
|
|
|||
|
ahead <- x$diff > 0L
|
|||
|
behind <- x$diff < 0L
|
|||
|
same_ver <- x$diff == 0L
|
|||
|
|
|||
|
x$diff <- NULL
|
|||
|
x[] <- lapply(x, format_str, width = 12)
|
|||
|
|
|||
|
if (any(behind)) {
|
|||
|
cat("Needs update -----------------------------\n")
|
|||
|
print(x[behind, , drop = FALSE], row.names = FALSE, right = FALSE)
|
|||
|
}
|
|||
|
|
|||
|
if (any(ahead)) {
|
|||
|
cat("Not on CRAN ----------------------------\n")
|
|||
|
print(x[ahead, , drop = FALSE], row.names = FALSE, right = FALSE)
|
|||
|
}
|
|||
|
|
|||
|
if (show_ok && any(same_ver)) {
|
|||
|
cat("OK ---------------------------------------\n")
|
|||
|
print(x[same_ver, , drop = FALSE], row.names = FALSE, right = FALSE)
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
## -2 = not installed, but available on CRAN
|
|||
|
## -1 = installed, but out of date
|
|||
|
## 0 = installed, most recent version
|
|||
|
## 1 = installed, version ahead of CRAN
|
|||
|
## 2 = package not on CRAN
|
|||
|
|
|||
|
UNINSTALLED <- -2L
|
|||
|
BEHIND <- -1L
|
|||
|
CURRENT <- 0L
|
|||
|
AHEAD <- 1L
|
|||
|
UNAVAILABLE <- 2L
|
|||
|
|
|||
|
#' @export
|
|||
|
#' @rdname package_deps
|
|||
|
#' @importFrom stats update
|
|||
|
|
|||
|
update.package_deps <- function(object,
|
|||
|
dependencies = NA,
|
|||
|
upgrade = c("default", "ask", "always", "never"),
|
|||
|
force = FALSE,
|
|||
|
quiet = FALSE,
|
|||
|
build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"),
|
|||
|
build_manual = FALSE, build_vignettes = FALSE,
|
|||
|
repos = getOption("repos"),
|
|||
|
type = getOption("pkgType"),
|
|||
|
...) {
|
|||
|
|
|||
|
dependencies <- standardise_dep(dependencies)
|
|||
|
|
|||
|
object <- upgradable_packages(object, upgrade, quiet)
|
|||
|
|
|||
|
unavailable_on_cran <- object$diff == UNAVAILABLE & object$is_cran
|
|||
|
|
|||
|
unknown_remotes <- (object$diff == UNAVAILABLE | object$diff == UNINSTALLED) & !object$is_cran
|
|||
|
|
|||
|
if (any(unavailable_on_cran) && !quiet) {
|
|||
|
message("Skipping ", sum(unavailable_on_cran), " packages not available: ",
|
|||
|
paste(object$package[unavailable_on_cran], collapse = ", "))
|
|||
|
}
|
|||
|
|
|||
|
if (any(unknown_remotes)) {
|
|||
|
install_remotes(object$remote[unknown_remotes],
|
|||
|
dependencies = dependencies,
|
|||
|
upgrade = upgrade,
|
|||
|
force = force,
|
|||
|
quiet = quiet,
|
|||
|
build = build,
|
|||
|
build_opts = build_opts,
|
|||
|
build_manual = build_manual,
|
|||
|
build_vignettes = build_vignettes,
|
|||
|
repos = repos,
|
|||
|
type = type,
|
|||
|
...)
|
|||
|
}
|
|||
|
|
|||
|
ahead_of_cran <- object$diff == AHEAD & object$is_cran
|
|||
|
if (any(ahead_of_cran) && !quiet) {
|
|||
|
message("Skipping ", sum(ahead_of_cran), " packages ahead of CRAN: ",
|
|||
|
paste(object$package[ahead_of_cran], collapse = ", "))
|
|||
|
}
|
|||
|
|
|||
|
ahead_remotes <- object$diff == AHEAD & !object$is_cran
|
|||
|
if (any(ahead_remotes)) {
|
|||
|
install_remotes(object$remote[ahead_remotes],
|
|||
|
dependencies = dependencies,
|
|||
|
upgrade = upgrade,
|
|||
|
force = force,
|
|||
|
quiet = quiet,
|
|||
|
build = build,
|
|||
|
build_opts = build_opts,
|
|||
|
build_manual = build_manual,
|
|||
|
build_vignettes = build_vignettes,
|
|||
|
repos = repos,
|
|||
|
type = type,
|
|||
|
...)
|
|||
|
}
|
|||
|
|
|||
|
behind <- is.na(object$installed) | object$diff < CURRENT
|
|||
|
|
|||
|
if (any(object$is_cran & !unavailable_on_cran & behind)) {
|
|||
|
# get the first cran-like remote and use its repos and pkg_type
|
|||
|
r <- object$remote[object$is_cran & behind][[1]]
|
|||
|
install_packages(object$package[object$is_cran & behind], repos = r$repos,
|
|||
|
type = r$pkg_type, dependencies = dependencies, quiet = quiet, ...)
|
|||
|
}
|
|||
|
|
|||
|
install_remotes(object$remote[!object$is_cran & behind],
|
|||
|
dependencies = dependencies,
|
|||
|
upgrade = upgrade,
|
|||
|
force = force,
|
|||
|
quiet = quiet,
|
|||
|
build = build,
|
|||
|
build_opts = build_opts,
|
|||
|
build_manual = build_manual,
|
|||
|
build_vignettes = build_vignettes,
|
|||
|
repos = repos,
|
|||
|
type = type,
|
|||
|
...)
|
|||
|
|
|||
|
invisible()
|
|||
|
}
|
|||
|
|
|||
|
install_packages <- function(packages, repos = getOption("repos"),
|
|||
|
type = getOption("pkgType"), ...,
|
|||
|
dependencies = FALSE, quiet = NULL) {
|
|||
|
|
|||
|
# We want to pass only args that exist in the downstream functions
|
|||
|
args_to_keep <-
|
|||
|
unique(
|
|||
|
names(
|
|||
|
c(
|
|||
|
formals(utils::install.packages),
|
|||
|
formals(utils::download.file)
|
|||
|
)
|
|||
|
)
|
|||
|
)
|
|||
|
|
|||
|
args <- list(...)
|
|||
|
args <- args[names(args) %in% args_to_keep]
|
|||
|
|
|||
|
if (is.null(quiet))
|
|||
|
quiet <- !identical(type, "source")
|
|||
|
|
|||
|
message("Installing ", length(packages), " packages: ",
|
|||
|
paste(packages, collapse = ", "))
|
|||
|
|
|||
|
do.call(
|
|||
|
safe_install_packages,
|
|||
|
c(list(
|
|||
|
packages,
|
|||
|
repos = repos,
|
|||
|
type = type,
|
|||
|
dependencies = dependencies,
|
|||
|
quiet = quiet
|
|||
|
),
|
|||
|
args
|
|||
|
)
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
find_deps <- function(packages, available = available_packages(),
|
|||
|
top_dep = TRUE, rec_dep = NA, include_pkgs = TRUE) {
|
|||
|
if (length(packages) == 0 || identical(top_dep, FALSE))
|
|||
|
return(character())
|
|||
|
|
|||
|
top_dep <- standardise_dep(top_dep)
|
|||
|
rec_dep <- standardise_dep(rec_dep)
|
|||
|
|
|||
|
top <- tools::package_dependencies(packages, db = available, which = top_dep)
|
|||
|
top_flat <- unlist(top, use.names = FALSE)
|
|||
|
|
|||
|
if (length(rec_dep) != 0 && length(top_flat) > 0) {
|
|||
|
rec <- tools::package_dependencies(top_flat, db = available, which = rec_dep,
|
|||
|
recursive = TRUE)
|
|||
|
rec_flat <- unlist(rec, use.names = FALSE)
|
|||
|
} else {
|
|||
|
rec_flat <- character()
|
|||
|
}
|
|||
|
|
|||
|
# We need to return these in reverse order, so that the packages furthest
|
|||
|
# down in the tree are installed first.
|
|||
|
unique(rev(c(if (include_pkgs) packages, top_flat, rec_flat)))
|
|||
|
}
|
|||
|
|
|||
|
#' Standardise dependencies using the same logical as [install.packages]
|
|||
|
#'
|
|||
|
#' @param x The dependencies to standardise.
|
|||
|
#' A character vector (selecting from "Depends", "Imports",
|
|||
|
#' "LinkingTo", "Suggests", or "Enhances"), or a logical vector.
|
|||
|
#'
|
|||
|
#' `TRUE` is shorthand for "Depends", "Imports", "LinkingTo" and
|
|||
|
#' "Suggests". `NA` is shorthand for "Depends", "Imports" and "LinkingTo"
|
|||
|
#' and is the default. `FALSE` is shorthand for no dependencies.
|
|||
|
#'
|
|||
|
#' The value "soft" means the same as `TRUE`, "hard" means the same as `NA`.
|
|||
|
#'
|
|||
|
#' Any additional values that don't match one of the standard dependency
|
|||
|
#' types are filtered out.
|
|||
|
#'
|
|||
|
#' @seealso <https://r-pkgs.org/description.html> for
|
|||
|
#' additional information on what each dependency type means.
|
|||
|
#' @keywords internal
|
|||
|
#' @export
|
|||
|
standardise_dep <- function(x) {
|
|||
|
if (identical(x, NA)) {
|
|||
|
c("Depends", "Imports", "LinkingTo")
|
|||
|
} else if (isTRUE(x)) {
|
|||
|
c("Depends", "Imports", "LinkingTo", "Suggests")
|
|||
|
} else if (identical(x, FALSE)) {
|
|||
|
character(0)
|
|||
|
} else if (is.character(x)) {
|
|||
|
if (any(x == "hard")) {
|
|||
|
c("Depends", "Imports", "LinkingTo")
|
|||
|
} else if (any(x == "soft")) {
|
|||
|
c("Depends", "Imports", "LinkingTo", "Suggests")
|
|||
|
} else {
|
|||
|
intersect(x, c("Depends", "Imports", "LinkingTo", "Suggests", "Enhances"))
|
|||
|
}
|
|||
|
} else {
|
|||
|
stop("Dependencies must be a boolean or a character vector", call. = FALSE)
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
#' Update packages that are missing or out-of-date.
|
|||
|
#'
|
|||
|
#' Works similarly to [utils::install.packages()] but doesn't install packages
|
|||
|
#' that are already installed, and also upgrades out dated dependencies.
|
|||
|
#'
|
|||
|
#' @param packages Character vector of packages to update.
|
|||
|
#' @param force Deprecated, this argument has no effect.
|
|||
|
#' @inheritParams install_github
|
|||
|
#' @seealso [package_deps()] to see which packages are out of date/
|
|||
|
#' missing.
|
|||
|
#' @export
|
|||
|
#' @examples
|
|||
|
#' \dontrun{
|
|||
|
#' update_packages("ggplot2")
|
|||
|
#' update_packages(c("plyr", "ggplot2"))
|
|||
|
#' }
|
|||
|
|
|||
|
update_packages <- function(packages = TRUE,
|
|||
|
dependencies = NA,
|
|||
|
upgrade = c("default", "ask", "always", "never"),
|
|||
|
force = FALSE,
|
|||
|
quiet = FALSE,
|
|||
|
build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"),
|
|||
|
build_manual = FALSE, build_vignettes = FALSE,
|
|||
|
repos = getOption("repos"),
|
|||
|
type = getOption("pkgType"),
|
|||
|
...) {
|
|||
|
|
|||
|
if (isTRUE(force)) {
|
|||
|
.Deprecated(msg = "`update_packages(force = TRUE)` is deprecated and has no effect.")
|
|||
|
}
|
|||
|
|
|||
|
if (isTRUE(packages)) {
|
|||
|
packages <- utils::installed.packages()[, "Package"]
|
|||
|
}
|
|||
|
|
|||
|
pkgs <- package_deps(packages, repos = repos, type = type)
|
|||
|
update(pkgs,
|
|||
|
dependencies = dependencies,
|
|||
|
upgrade = upgrade,
|
|||
|
force = FALSE,
|
|||
|
quiet = quiet,
|
|||
|
build = build,
|
|||
|
build_opts = build_opts,
|
|||
|
build_manual = build_manual,
|
|||
|
build_vignettes = build_vignettes,
|
|||
|
repos = repos,
|
|||
|
type = type,
|
|||
|
...)
|
|||
|
}
|
|||
|
|
|||
|
has_additional_repositories <- function(pkg) {
|
|||
|
"additional_repositories" %in% names(pkg)
|
|||
|
}
|
|||
|
|
|||
|
parse_additional_repositories <- function(pkg) {
|
|||
|
if (has_additional_repositories(pkg)) {
|
|||
|
|
|||
|
strsplit(trim_ws(pkg[["additional_repositories"]]), "[,[:space:]]+")[[1]]
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
fix_repositories <- function(repos) {
|
|||
|
if (length(repos) == 0)
|
|||
|
repos <- character()
|
|||
|
|
|||
|
# Override any existing default values with the cloud mirror
|
|||
|
# Reason: A "@CRAN@" value would open a GUI for choosing a mirror
|
|||
|
repos[repos == "@CRAN@"] <- download_url("cloud.r-project.org")
|
|||
|
repos
|
|||
|
}
|
|||
|
|
|||
|
parse_one_extra <- function(x, ...) {
|
|||
|
pieces <- strsplit(x, "::", fixed = TRUE)[[1]]
|
|||
|
|
|||
|
if (length(pieces) == 1) {
|
|||
|
if (!grepl("/", pieces)) {
|
|||
|
type <- "cran"
|
|||
|
} else {
|
|||
|
type <- "github"
|
|||
|
}
|
|||
|
repo <- pieces
|
|||
|
} else if (length(pieces) == 2) {
|
|||
|
type <- pieces[1]
|
|||
|
repo <- pieces[2]
|
|||
|
} else {
|
|||
|
stop("Malformed remote specification '", x, "'", call. = FALSE)
|
|||
|
}
|
|||
|
|
|||
|
type = sub("^[.a-zA-Z0-9]+=", "", type)
|
|||
|
|
|||
|
if (grepl("@", type)) {
|
|||
|
# Custom host
|
|||
|
tah <- strsplit(type, "@", fixed = TRUE)[[1]]
|
|||
|
type <- tah[1]
|
|||
|
host <- tah[2]
|
|||
|
} else {
|
|||
|
host <- NULL
|
|||
|
}
|
|||
|
|
|||
|
tryCatch({
|
|||
|
# We need to use `environment(sys.function())` instead of
|
|||
|
# `asNamespace("remotes")` because when used as a script in
|
|||
|
# install-github.R there is no remotes namespace.
|
|||
|
|
|||
|
fun <- get(paste0(tolower(type), "_remote"), mode = "function", inherits = TRUE)
|
|||
|
|
|||
|
if (!is.null(host)) {
|
|||
|
res <- fun(repo, host = host, ...)
|
|||
|
} else {
|
|||
|
res <- fun(repo, ...)
|
|||
|
}
|
|||
|
}, error = function(e) stop("Unknown remote type: ", type, "\n ", conditionMessage(e), call. = FALSE)
|
|||
|
)
|
|||
|
res
|
|||
|
}
|
|||
|
|
|||
|
split_extra_deps <- function(x, name = "Remotes") {
|
|||
|
pkgs <- trim_ws(unlist(strsplit(x, ",[[:space:]]*")))
|
|||
|
if (any((res <- grep("[[:space:]]+", pkgs)) != -1)) {
|
|||
|
stop("Missing commas separating ", name, ": '", pkgs[res], "'", call. = FALSE)
|
|||
|
}
|
|||
|
pkgs
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
package_deps_new <- function(package = character(), installed = character(),
|
|||
|
available = character(), diff = logical(), is_cran = logical(),
|
|||
|
remote = list()) {
|
|||
|
|
|||
|
res <- structure(
|
|||
|
data.frame(package = package, installed = installed, available = available, diff = diff, is_cran = is_cran, stringsAsFactors = FALSE),
|
|||
|
class = c("package_deps", "data.frame")
|
|||
|
)
|
|||
|
|
|||
|
res$remote = structure(remote, class = "remotes")
|
|||
|
res
|
|||
|
}
|
|||
|
|
|||
|
extra_deps <- function(pkg, field) {
|
|||
|
if (!has_extra_deps(pkg, field)) {
|
|||
|
return(package_deps_new())
|
|||
|
}
|
|||
|
dev_packages <- split_extra_deps(pkg[[field]])
|
|||
|
extra <- lapply(dev_packages, parse_one_extra)
|
|||
|
|
|||
|
package <- vapply(extra, function(x) remote_package_name(x), character(1), USE.NAMES = FALSE)
|
|||
|
installed <- vapply(package, function(x) local_sha(x), character(1), USE.NAMES = FALSE)
|
|||
|
available <- vapply(extra, function(x) remote_sha(x), character(1), USE.NAMES = FALSE)
|
|||
|
diff <- installed == available
|
|||
|
diff <- ifelse(!is.na(diff) & diff, CURRENT, BEHIND)
|
|||
|
diff[is.na(installed)] <- UNINSTALLED
|
|||
|
is_cran_remote <- vapply(extra, inherits, logical(1), "cran_remote")
|
|||
|
|
|||
|
package_deps_new(package, installed, available, diff, is_cran = is_cran_remote, extra)
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
# interactive is an argument to make testing easier.
|
|||
|
resolve_upgrade <- function(upgrade, is_interactive = interactive()) {
|
|||
|
if (isTRUE(upgrade)) {
|
|||
|
upgrade <- "always"
|
|||
|
} else if (identical(upgrade, FALSE)) {
|
|||
|
upgrade <- "never"
|
|||
|
}
|
|||
|
|
|||
|
upgrade <- match.arg(upgrade[[1]], c("default", "ask", "always", "never"))
|
|||
|
|
|||
|
if (identical(upgrade, "default"))
|
|||
|
upgrade <- Sys.getenv("R_REMOTES_UPGRADE", unset = "ask")
|
|||
|
|
|||
|
if (!is_interactive && identical(upgrade, "ask")) {
|
|||
|
upgrade <- "always"
|
|||
|
}
|
|||
|
|
|||
|
upgrade
|
|||
|
}
|
|||
|
|
|||
|
upgradable_packages <- function(x, upgrade, quiet, is_interactive = interactive()) {
|
|||
|
|
|||
|
uninstalled <- x$diff == UNINSTALLED
|
|||
|
|
|||
|
behind <- x$diff == BEHIND
|
|||
|
|
|||
|
switch(resolve_upgrade(upgrade, is_interactive = is_interactive),
|
|||
|
|
|||
|
always = {
|
|||
|
return(msg_upgrades(x, quiet))
|
|||
|
},
|
|||
|
|
|||
|
never = return(x[uninstalled, ]),
|
|||
|
|
|||
|
ask = {
|
|||
|
|
|||
|
if (!any(behind)) {
|
|||
|
return(x)
|
|||
|
}
|
|||
|
|
|||
|
pkgs <- format_upgrades(x[behind, ])
|
|||
|
|
|||
|
choices <- pkgs
|
|||
|
if (length(choices) > 0) {
|
|||
|
choices <- c("All", "CRAN packages only", "None", choices)
|
|||
|
}
|
|||
|
|
|||
|
res <- select_menu(choices, title = "These packages have more recent versions available.\nIt is recommended to update all of them.\nWhich would you like to update?")
|
|||
|
|
|||
|
if ("None" %in% res || length(res) == 0) {
|
|||
|
return(x[uninstalled, ])
|
|||
|
}
|
|||
|
|
|||
|
if ("All" %in% res) {
|
|||
|
wch <- seq_len(NROW(x))
|
|||
|
} else {
|
|||
|
|
|||
|
if ("CRAN packages only" %in% res) {
|
|||
|
wch <- uninstalled | (behind & x$is_cran)
|
|||
|
} else {
|
|||
|
wch <- sort(c(which(uninstalled), which(behind)[pkgs %in% res]))
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
msg_upgrades(x[wch, ], quiet)
|
|||
|
}
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
select_menu <- function(choices, title = NULL, msg = "Enter one or more numbers, or an empty line to skip updates: ", width = getOption("width")) {
|
|||
|
if (!is.null(title)) {
|
|||
|
cat(title, "\n", sep = "")
|
|||
|
}
|
|||
|
|
|||
|
nc <- length(choices)
|
|||
|
op <- paste0(format(seq_len(nc)), ": ", choices)
|
|||
|
fop <- format(op)
|
|||
|
cat("", fop, "", sep = "\n")
|
|||
|
repeat {
|
|||
|
answer <- readline(msg)
|
|||
|
answer <- strsplit(answer, "[ ,]+")[[1]]
|
|||
|
if (all(answer %in% seq_along(choices))) {
|
|||
|
return(choices[as.integer(answer)])
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
msg_upgrades <- function(x, quiet) {
|
|||
|
|
|||
|
if (isTRUE(quiet) || nrow(x) == 0) {
|
|||
|
return(invisible(x))
|
|||
|
}
|
|||
|
|
|||
|
cat(format_upgrades(x[x$diff <= BEHIND, ]), sep = "\n")
|
|||
|
|
|||
|
invisible(x)
|
|||
|
}
|
|||
|
|
|||
|
format_upgrades <- function(x) {
|
|||
|
|
|||
|
if (nrow(x) == 0) {
|
|||
|
return(character(0))
|
|||
|
}
|
|||
|
|
|||
|
remote_type <- lapply(x$remote, format)
|
|||
|
|
|||
|
# This call trims widths to 12 characters
|
|||
|
x[] <- lapply(x, format_str, width = 12)
|
|||
|
|
|||
|
# This call aligns the columns
|
|||
|
x[] <- lapply(x, format, trim = FALSE, justify = "left")
|
|||
|
|
|||
|
pkgs <- paste0(x$package, " (", x$installed, " -> ", x$available, ") ", "[", remote_type, "]")
|
|||
|
pkgs
|
|||
|
}
|
|||
|
# Contents of R/devel.R
|
|||
|
|
|||
|
## The checking code looks for the objects in the package namespace, so defining
|
|||
|
## dll here removes the following NOTE
|
|||
|
## Registration problem:
|
|||
|
## Evaluating ‘dll$foo’ during check gives error
|
|||
|
## ‘object 'dll' not found’:
|
|||
|
## .C(dll$foo, 0L)
|
|||
|
## See https://github.com/wch/r-source/blob/d4e8fc9832f35f3c63f2201e7a35fbded5b5e14c/src/library/tools/R/QC.R##L1950-L1980
|
|||
|
## Setting the class is needed to avoid a note about returning the wrong class.
|
|||
|
## The local object is found first in the actual call, so current behavior is
|
|||
|
## unchanged.
|
|||
|
|
|||
|
dll <- list(foo = structure(list(), class = "NativeSymbolInfo"))
|
|||
|
|
|||
|
has_devel <- function() {
|
|||
|
tryCatch(
|
|||
|
has_devel2(),
|
|||
|
error = function(e) FALSE
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
## This is similar to devtools:::has_devel(), with some
|
|||
|
## very minor differences.
|
|||
|
|
|||
|
has_devel2 <- function() {
|
|||
|
foo_path <- file.path(tempfile(fileext = ".c"))
|
|||
|
|
|||
|
cat("void foo(int *bar) { *bar=1; }\n", file = foo_path)
|
|||
|
on.exit(unlink(foo_path))
|
|||
|
|
|||
|
R(c("CMD", "SHLIB", basename(foo_path)), dirname(foo_path))
|
|||
|
dylib <- sub("\\.c$", .Platform$dynlib.ext, foo_path)
|
|||
|
on.exit(unlink(dylib), add = TRUE)
|
|||
|
|
|||
|
dll <- dyn.load(dylib)
|
|||
|
on.exit(dyn.unload(dylib), add = TRUE)
|
|||
|
|
|||
|
stopifnot(.C(dll$foo, 0L)[[1]] == 1L)
|
|||
|
TRUE
|
|||
|
}
|
|||
|
|
|||
|
missing_devel_warning <- function(pkgdir) {
|
|||
|
pkgname <- tryCatch(
|
|||
|
get_desc_field(file.path(pkgdir, "DESCRIPTION"), "Package"),
|
|||
|
error = function(e) NULL
|
|||
|
) %||% "<unknown>"
|
|||
|
|
|||
|
sys <- sys_type()
|
|||
|
|
|||
|
warning(
|
|||
|
"Package ",
|
|||
|
pkgname,
|
|||
|
" has compiled code, but no suitable ",
|
|||
|
"compiler(s) were found. Installation will likely fail.\n ",
|
|||
|
if (sys == "windows") {
|
|||
|
c("Install Rtools (https://cran.r-project.org/bin/windows/Rtools/).",
|
|||
|
"Then use the pkgbuild package, or make sure that Rtools in the PATH.")
|
|||
|
},
|
|||
|
if (sys == "macos") "Install XCode and make sure it works.",
|
|||
|
if (sys == "linux") "Install compilers via your Linux package manager."
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
R <- function(args, path = tempdir()) {
|
|||
|
|
|||
|
r <- file.path(R.home("bin"), "R")
|
|||
|
|
|||
|
args <- c(
|
|||
|
"--no-site-file", "--no-environ", "--no-save",
|
|||
|
"--no-restore", "--quiet",
|
|||
|
args
|
|||
|
)
|
|||
|
|
|||
|
system_check(r, args, path = path)
|
|||
|
}
|
|||
|
# Contents of R/download.R
|
|||
|
|
|||
|
#' Download a file
|
|||
|
#'
|
|||
|
#' Uses either the curl package for R versions older than 3.2.0,
|
|||
|
#' otherwise a wrapper around [download.file()].
|
|||
|
#'
|
|||
|
#' We respect the `download.file.method` setting of the user. If it is
|
|||
|
#' not set, then see `download_method()` for choosing a method.
|
|||
|
#'
|
|||
|
#' Authentication can be supplied three ways:
|
|||
|
#' * By setting `auth_token`. This will append an HTTP `Authorization`
|
|||
|
#' header: `Authorization: token {auth_token}`.
|
|||
|
#' * By setting `basic_auth` to a list with elements `user` and `password`.
|
|||
|
#' This will append a proper `Authorization: Basic {encoded_password}`
|
|||
|
#' HTTP header.
|
|||
|
#' * By specifying the proper `headers` directly.
|
|||
|
#'
|
|||
|
#' If both `auth_token` and `basic_auth` are specified, that's an error.
|
|||
|
#' `auth_token` and `basic_auth` are _appended_ to `headers`, so they
|
|||
|
#' take precedence over an `Authorization` header that is specified
|
|||
|
#' directly in `headers`.
|
|||
|
#'
|
|||
|
#' @param path Path to download to. `dirname(path)` must exist.
|
|||
|
#' @param url URL.
|
|||
|
#' @param auth_token Token for token-based authentication or `NULL`.
|
|||
|
#' @param basic_auth List with `user` and `password` for basic HTTP
|
|||
|
#' authentication, or `NULL`.
|
|||
|
#' @param quiet Passed to [download.file()] or [curl::curl_download()].
|
|||
|
#' @param headers Named character vector of HTTP headers to use.
|
|||
|
#' @return `path`, if the download was successful.
|
|||
|
#'
|
|||
|
#' @keywords internal
|
|||
|
#' @importFrom utils compareVersion
|
|||
|
|
|||
|
download <- function(path, url, auth_token = NULL, basic_auth = NULL,
|
|||
|
quiet = TRUE, headers = NULL) {
|
|||
|
|
|||
|
if (!is.null(basic_auth) && !is.null(auth_token)) {
|
|||
|
stop("Cannot use both Basic and Token authentication at the same time")
|
|||
|
}
|
|||
|
|
|||
|
if (!is.null(basic_auth)) {
|
|||
|
userpass <- paste0(basic_auth$user, ":", basic_auth$password)
|
|||
|
auth <- paste("Basic", base64_encode(charToRaw(userpass)))
|
|||
|
headers <- c(headers, Authorization = auth)
|
|||
|
}
|
|||
|
|
|||
|
if (!is.null(auth_token)) {
|
|||
|
headers <- c(headers, Authorization = paste("token", auth_token))
|
|||
|
}
|
|||
|
|
|||
|
if (getRversion() < "3.2.0") {
|
|||
|
curl_download(url, path, quiet, headers)
|
|||
|
|
|||
|
} else {
|
|||
|
|
|||
|
base_download(url, path, quiet, headers)
|
|||
|
}
|
|||
|
|
|||
|
path
|
|||
|
}
|
|||
|
|
|||
|
base_download <- function(url, path, quiet, headers) {
|
|||
|
|
|||
|
method <- download_method()
|
|||
|
|
|||
|
status <- if (method == "wget") {
|
|||
|
base_download_wget(url, path, quiet, headers)
|
|||
|
} else if (method =="curl") {
|
|||
|
base_download_curl(url, path, quiet, headers)
|
|||
|
} else if (getRversion() < "3.6.0") {
|
|||
|
base_download_noheaders(url, path, quiet, headers, method)
|
|||
|
} else {
|
|||
|
base_download_headers(url, path, quiet, headers, method)
|
|||
|
}
|
|||
|
|
|||
|
if (status != 0) stop("Cannot download file from ", url, call. = FALSE)
|
|||
|
|
|||
|
path
|
|||
|
}
|
|||
|
|
|||
|
base_download_wget <- function(url, path, quiet, headers) {
|
|||
|
|
|||
|
extra <- getOption("download.file.extra")
|
|||
|
|
|||
|
if (length(headers)) {
|
|||
|
qh <- shQuote(paste0(names(headers), ": ", headers))
|
|||
|
extra <- c(extra, paste0("--header=", qh))
|
|||
|
}
|
|||
|
|
|||
|
with_options(
|
|||
|
list(download.file.extra = extra),
|
|||
|
suppressWarnings(
|
|||
|
utils::download.file(
|
|||
|
url,
|
|||
|
path,
|
|||
|
method = "wget",
|
|||
|
quiet = quiet,
|
|||
|
mode = "wb",
|
|||
|
extra = extra
|
|||
|
)
|
|||
|
)
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
base_download_curl <- function(url, path, quiet, headers) {
|
|||
|
|
|||
|
extra <- getOption("download.file.extra")
|
|||
|
|
|||
|
# always add `-L`, so that curl follows redirects. GitHub in particular uses
|
|||
|
# 302 redirects extensively, so without -L these requests fail.
|
|||
|
extra <- c(extra, "--fail", "-L")
|
|||
|
|
|||
|
if (length(headers)) {
|
|||
|
qh <- shQuote(paste0(names(headers), ": ", headers))
|
|||
|
extra <- c(extra, paste("-H", qh))
|
|||
|
}
|
|||
|
|
|||
|
with_options(
|
|||
|
list(download.file.extra = extra),
|
|||
|
suppressWarnings(
|
|||
|
utils::download.file(
|
|||
|
url,
|
|||
|
path,
|
|||
|
method = "curl",
|
|||
|
quiet = quiet,
|
|||
|
mode = "wb",
|
|||
|
extra = extra
|
|||
|
)
|
|||
|
)
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
base_download_noheaders <- function(url, path, quiet, headers, method) {
|
|||
|
|
|||
|
if (length(headers)) {
|
|||
|
|
|||
|
if (method == "wininet" && getRversion() < "3.6.0") {
|
|||
|
warning(paste(
|
|||
|
"R (< 3.6.0) cannot send HTTP headers with the `wininet` download method.",
|
|||
|
"This download will likely fail. Please choose a different download",
|
|||
|
"method, via the `download.file.method` option. The `libcurl` method is",
|
|||
|
"best, if available, and the `wget` and `curl` methods work as well,",
|
|||
|
"if the corresponding external tool is available. See `?download.file`"))
|
|||
|
}
|
|||
|
|
|||
|
get("unlockBinding", baseenv())("makeUserAgent", asNamespace("utils"))
|
|||
|
orig <- get("makeUserAgent", envir = asNamespace("utils"))
|
|||
|
on.exit({
|
|||
|
assign("makeUserAgent", orig, envir = asNamespace("utils"))
|
|||
|
lockBinding("makeUserAgent", asNamespace("utils"))
|
|||
|
}, add = TRUE)
|
|||
|
ua <- orig(FALSE)
|
|||
|
|
|||
|
flathead <- paste0(names(headers), ": ", headers, collapse = "\r\n")
|
|||
|
agent <- paste0(ua, "\r\n", flathead)
|
|||
|
assign(
|
|||
|
"makeUserAgent",
|
|||
|
envir = asNamespace("utils"),
|
|||
|
function(format = TRUE) {
|
|||
|
if (format) {
|
|||
|
paste0("User-Agent: ", agent, "\r\n")
|
|||
|
} else {
|
|||
|
agent
|
|||
|
}
|
|||
|
})
|
|||
|
}
|
|||
|
|
|||
|
suppressWarnings(
|
|||
|
utils::download.file(
|
|||
|
url,
|
|||
|
path,
|
|||
|
method = method,
|
|||
|
quiet = quiet,
|
|||
|
mode = "wb"
|
|||
|
)
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
base_download_headers <- function(url, path, quiet, headers, method) {
|
|||
|
suppressWarnings(
|
|||
|
utils::download.file(
|
|||
|
url,
|
|||
|
path,
|
|||
|
method = method,
|
|||
|
quiet = quiet,
|
|||
|
mode = "wb",
|
|||
|
headers = headers
|
|||
|
)
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
has_curl <- function() isTRUE(unname(capabilities("libcurl")))
|
|||
|
|
|||
|
download_method <- function() {
|
|||
|
|
|||
|
user_option <- getOption("download.file.method")
|
|||
|
|
|||
|
if (!is.null(user_option)) {
|
|||
|
## The user wants what the user wants
|
|||
|
user_option
|
|||
|
|
|||
|
} else if (has_curl()) {
|
|||
|
## If we have libcurl, it is usually the best option
|
|||
|
"libcurl"
|
|||
|
|
|||
|
} else if (compareVersion(get_r_version(), "3.3") == -1 &&
|
|||
|
os_type() == "windows") {
|
|||
|
## Before 3.3 we select wininet on Windows
|
|||
|
"wininet"
|
|||
|
|
|||
|
} else {
|
|||
|
## Otherwise this is probably hopeless, but let R select, and
|
|||
|
## try something
|
|||
|
"auto"
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
curl_download <- function(url, path, quiet, headers) {
|
|||
|
|
|||
|
if (!pkg_installed("curl")) {
|
|||
|
stop("The 'curl' package is required if R is older than 3.2.0")
|
|||
|
}
|
|||
|
|
|||
|
handle <- curl::new_handle()
|
|||
|
if (!is.null(headers)) curl::handle_setheaders(handle, .list = headers)
|
|||
|
curl::curl_download(url, path, quiet = quiet, mode = "wb", handle = handle)
|
|||
|
}
|
|||
|
|
|||
|
true_download_method <- function(x) {
|
|||
|
if (identical(x, "auto")) {
|
|||
|
auto_download_method()
|
|||
|
} else {
|
|||
|
x
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
auto_download_method <- function() {
|
|||
|
if (isTRUE(capabilities("libcurl"))) {
|
|||
|
"libcurl"
|
|||
|
} else if (isTRUE(capabilities("http/ftp"))) {
|
|||
|
"internal"
|
|||
|
} else if (nzchar(Sys.which("wget"))) {
|
|||
|
"wget"
|
|||
|
} else if (nzchar(Sys.which("curl"))) {
|
|||
|
"curl"
|
|||
|
} else {
|
|||
|
""
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
download_method_secure <- function() {
|
|||
|
method <- true_download_method(download_method())
|
|||
|
|
|||
|
if (method %in% c("wininet", "libcurl", "wget", "curl")) {
|
|||
|
# known good methods
|
|||
|
TRUE
|
|||
|
} else if (identical(method, "internal")) {
|
|||
|
# only done before R 3.3
|
|||
|
if (utils::compareVersion(get_r_version(), "3.3") == -1) {
|
|||
|
# if internal then see if were using windows internal with inet2
|
|||
|
identical(Sys.info()[["sysname"]], "Windows") && utils::setInternet2(NA)
|
|||
|
} else {
|
|||
|
FALSE
|
|||
|
}
|
|||
|
} else {
|
|||
|
# method with unknown properties (e.g. "lynx") or unresolved auto
|
|||
|
FALSE
|
|||
|
}
|
|||
|
}
|
|||
|
# Contents of R/git-auth.R
|
|||
|
|
|||
|
# nocov start
|
|||
|
|
|||
|
gitcreds_get <- NULL
|
|||
|
gitcreds_set <- NULL
|
|||
|
gitcreds_delete <- NULL
|
|||
|
gitcreds_list_helpers <- NULL
|
|||
|
gitcreds_cache_envvar <- NULL
|
|||
|
gitcreds_fill <- NULL
|
|||
|
gitcreds_approve <- NULL
|
|||
|
gitcreds_reject <- NULL
|
|||
|
gitcreds_parse_output <- NULL
|
|||
|
|
|||
|
gitcreds <- local({
|
|||
|
|
|||
|
# ------------------------------------------------------------------------
|
|||
|
# Public API
|
|||
|
# ------------------------------------------------------------------------
|
|||
|
|
|||
|
gitcreds_get <<- function(url = "https://github.com", use_cache = TRUE,
|
|||
|
set_cache = TRUE) {
|
|||
|
|
|||
|
stopifnot(
|
|||
|
is_string(url), has_no_newline(url),
|
|||
|
is_flag(use_cache),
|
|||
|
is_flag(set_cache)
|
|||
|
)
|
|||
|
|
|||
|
cache_ev <- gitcreds_cache_envvar(url)
|
|||
|
if (use_cache && !is.null(ans <- gitcreds_get_cache(cache_ev))) {
|
|||
|
return(ans)
|
|||
|
}
|
|||
|
|
|||
|
check_for_git()
|
|||
|
|
|||
|
out <- gitcreds_fill(list(url = url), dummy = TRUE)
|
|||
|
creds <- gitcreds_parse_output(out, url)
|
|||
|
|
|||
|
if (set_cache) {
|
|||
|
gitcreds_set_cache(cache_ev, creds)
|
|||
|
}
|
|||
|
|
|||
|
creds
|
|||
|
}
|
|||
|
|
|||
|
gitcreds_set <<- function(url = "https://github.com") {
|
|||
|
if (!is_interactive()) {
|
|||
|
throw(new_error(
|
|||
|
"gitcreds_not_interactive_error",
|
|||
|
message = "`gitcreds_set()` only works in interactive sessions"
|
|||
|
))
|
|||
|
}
|
|||
|
stopifnot(is_string(url), has_no_newline(url))
|
|||
|
check_for_git()
|
|||
|
|
|||
|
current <- tryCatch(
|
|||
|
gitcreds_get(url, use_cache = FALSE, set_cache = FALSE),
|
|||
|
gitcreds_no_credentials = function(e) NULL
|
|||
|
)
|
|||
|
|
|||
|
if (!is.null(current)) {
|
|||
|
gitcreds_set_replace(url, current)
|
|||
|
} else {
|
|||
|
gitcreds_set_new(url)
|
|||
|
}
|
|||
|
|
|||
|
msg("-> Removing credentials from cache...")
|
|||
|
gitcreds_delete_cache(gitcreds_cache_envvar(url))
|
|||
|
|
|||
|
msg("-> Done.")
|
|||
|
invisible()
|
|||
|
}
|
|||
|
|
|||
|
#' Replace credentials with new ones
|
|||
|
#'
|
|||
|
#' It only works interactively, because of `menu()` in `ack()` and
|
|||
|
#' `readline()`.
|
|||
|
#'
|
|||
|
#' We need to set a username, it is compulsory for git credential.
|
|||
|
#' 1. If there was one in the url, then we use that.
|
|||
|
#' 2. Otherwise if git has a username configured for this URL, we use that.
|
|||
|
#' 3. Otherwise we use the username in the credentials we are replacing.
|
|||
|
#'
|
|||
|
#' @param url URL.
|
|||
|
#' @param current Must not be `NULL`, and it must contain a
|
|||
|
#' `gitcreds` object. (Well, a named list, really.)
|
|||
|
#' @noRd
|
|||
|
#' @return Nothing.
|
|||
|
|
|||
|
gitcreds_set_replace <- function(url, current) {
|
|||
|
|
|||
|
# Potentially take username from the credential we are replacing
|
|||
|
current_username <- current$username
|
|||
|
|
|||
|
# Keep warning until there is a credential to replace.
|
|||
|
# In case there are multiple credentials for the same URL.
|
|||
|
while (!is.null(current)) {
|
|||
|
if (!ack(url, current, "Replace")) {
|
|||
|
throw(new_error("gitcreds_abort_replace_error"))
|
|||
|
}
|
|||
|
|
|||
|
msg("\n-> Removing current credentials...")
|
|||
|
gitcreds_reject(current)
|
|||
|
|
|||
|
current <- tryCatch(
|
|||
|
gitcreds_get(url, use_cache = FALSE, set_cache = FALSE),
|
|||
|
gitcreds_no_credentials = function(e) NULL
|
|||
|
)
|
|||
|
if (!is.null(current)) msg("\n!! Found more matching credentials!")
|
|||
|
}
|
|||
|
|
|||
|
msg("")
|
|||
|
pat <- readline("? Enter new password or token: ")
|
|||
|
|
|||
|
username <- get_url_username(url) %||%
|
|||
|
gitcreds_username(url) %||%
|
|||
|
current_username
|
|||
|
|
|||
|
msg("-> Adding new credentials...")
|
|||
|
gitcreds_approve(list(url = url, username = username, password = pat))
|
|||
|
|
|||
|
invisible()
|
|||
|
}
|
|||
|
|
|||
|
#' Set new credentials
|
|||
|
#'
|
|||
|
#' This should not replace or remove any old credentials, but of course
|
|||
|
#' we cannot be sure, because credential helpers pretty much do what they
|
|||
|
#' want.
|
|||
|
#'
|
|||
|
#' We need to set a username, it is compulsory for git credential.
|
|||
|
#' 1. If there was one in the url, then we use that.
|
|||
|
#' 2. Otherwise if git has a username configured for this URL, we use that.
|
|||
|
#' 3. Otherwise we use a default username.
|
|||
|
#'
|
|||
|
#' @param url URL.
|
|||
|
#' @noRd
|
|||
|
#' @return Nothing.
|
|||
|
|
|||
|
gitcreds_set_new <- function(url) {
|
|||
|
msg("\n")
|
|||
|
pat <- readline("? Enter password or token: ")
|
|||
|
|
|||
|
username <- get_url_username(url) %||%
|
|||
|
gitcreds_username(url) %||%
|
|||
|
default_username()
|
|||
|
|
|||
|
msg("-> Adding new credentials...")
|
|||
|
gitcreds_approve(list(url = url, username = username, password = pat))
|
|||
|
|
|||
|
invisible()
|
|||
|
}
|
|||
|
|
|||
|
gitcreds_delete <<- function(url = "https://github.com") {
|
|||
|
if (!is_interactive()) {
|
|||
|
throw(new_error(
|
|||
|
"gitcreds_not_interactive_error",
|
|||
|
message = "`gitcreds_delete()` only works in interactive sessions"
|
|||
|
))
|
|||
|
}
|
|||
|
stopifnot(is_string(url))
|
|||
|
check_for_git()
|
|||
|
|
|||
|
current <- tryCatch(
|
|||
|
gitcreds_get(url, use_cache = FALSE, set_cache = FALSE),
|
|||
|
gitcreds_no_credentials = function(e) NULL
|
|||
|
)
|
|||
|
|
|||
|
if (is.null(current)) {
|
|||
|
return(invisible(FALSE))
|
|||
|
}
|
|||
|
|
|||
|
if (!ack(url, current, "Delete")) {
|
|||
|
throw(new_error("gitcreds_abort_delete_error"))
|
|||
|
}
|
|||
|
|
|||
|
msg("-> Removing current credentials...")
|
|||
|
gitcreds_reject(current)
|
|||
|
|
|||
|
msg("-> Removing credentials from cache...")
|
|||
|
gitcreds_delete_cache(gitcreds_cache_envvar(url))
|
|||
|
|
|||
|
msg("-> Done.")
|
|||
|
|
|||
|
invisible(TRUE)
|
|||
|
}
|
|||
|
|
|||
|
gitcreds_list_helpers <<- function() {
|
|||
|
check_for_git()
|
|||
|
out <- git_run(c("config", "--get-all", "credential.helper"))
|
|||
|
clear <- rev(which(out == ""))
|
|||
|
if (length(clear)) out <- out[-(1:clear[1])]
|
|||
|
out
|
|||
|
}
|
|||
|
|
|||
|
gitcreds_cache_envvar <<- function(url) {
|
|||
|
pcs <- parse_url(url)
|
|||
|
bad <- is.na(pcs$protocol) | is.na(pcs$host)
|
|||
|
if (any(bad)) {
|
|||
|
stop("Invalid URL(s): ", paste(url[bad], collapse = ", "))
|
|||
|
}
|
|||
|
|
|||
|
proto <- sub("^https?_$", "", paste0(pcs$protocol, "_"))
|
|||
|
user <- ifelse(pcs$username != "", paste0(pcs$username, "_AT_"), "")
|
|||
|
host0 <- sub("^api[.]github[.]com$", "github.com", pcs$host)
|
|||
|
host1 <- gsub("[.:]+", "_", host0)
|
|||
|
host <- gsub("[^a-zA-Z0-9_-]", "x", host1)
|
|||
|
|
|||
|
slug1 <- paste0(proto, user, host)
|
|||
|
|
|||
|
# fix the user name ambiguity, not that it happens often...
|
|||
|
slug2 <- ifelse(grepl("^AT_", slug1), paste0("AT_", slug1), slug1)
|
|||
|
|
|||
|
# env vars cannot start with a number
|
|||
|
slug3 <- ifelse(grepl("^[0-9]", slug2), paste0("AT_", slug2), slug2)
|
|||
|
|
|||
|
paste0("GITHUB_PAT_", toupper(slug3))
|
|||
|
}
|
|||
|
|
|||
|
gitcreds_get_cache <- function(ev) {
|
|||
|
val <- Sys.getenv(ev, NA_character_)
|
|||
|
if (is.na(val) && ev == "GITHUB_PAT_GITHUB_COM") {
|
|||
|
val <- Sys.getenv("GITHUB_PAT", NA_character_)
|
|||
|
}
|
|||
|
if (is.na(val) && ev == "GITHUB_PAT_GITHUB_COM") {
|
|||
|
val <- Sys.getenv("GITHUB_TOKEN", NA_character_)
|
|||
|
}
|
|||
|
if (is.na(val) || val == "") {
|
|||
|
return(NULL)
|
|||
|
}
|
|||
|
if (val == "FAIL" || grepl("^FAIL:", val)) {
|
|||
|
class <- strsplit(val, ":", fixed = TRUE)[[1]][2]
|
|||
|
if (is.na(class)) class <- "gitcreds_no_credentials"
|
|||
|
throw(new_error(class))
|
|||
|
}
|
|||
|
|
|||
|
unesc <- function(x) {
|
|||
|
gsub("\\\\(.)", "\\1", x)
|
|||
|
}
|
|||
|
|
|||
|
# split on `:` that is not preceded by a `\`
|
|||
|
spval <- strsplit(val, "(?<!\\\\):", perl = TRUE)[[1]]
|
|||
|
spval0 <- unesc(spval)
|
|||
|
|
|||
|
# Single field, then the token
|
|||
|
if (length(spval) == 1) {
|
|||
|
return(new_gitcreds(
|
|||
|
protocol = NA_character_,
|
|||
|
host = NA_character_,
|
|||
|
username = NA_character_,
|
|||
|
password = unesc(val)
|
|||
|
))
|
|||
|
}
|
|||
|
|
|||
|
# Two fields? Then it is username:password
|
|||
|
if (length(spval) == 2) {
|
|||
|
return(new_gitcreds(
|
|||
|
protocol = NA_character_,
|
|||
|
host = NA_character_,
|
|||
|
username = spval0[1],
|
|||
|
password = spval0[2]
|
|||
|
))
|
|||
|
}
|
|||
|
|
|||
|
# Otherwise a full record
|
|||
|
if (length(spval) %% 2 == 1) {
|
|||
|
warning("Invalid gitcreds credentials in env var `", ev, "`. ",
|
|||
|
"Maybe an unescaped ':' character?")
|
|||
|
return(NULL)
|
|||
|
}
|
|||
|
|
|||
|
creds <- structure(
|
|||
|
spval0[seq(2, length(spval0), by = 2)],
|
|||
|
names = spval[seq(1, length(spval0), by = 2)]
|
|||
|
)
|
|||
|
do.call("new_gitcreds", as.list(creds))
|
|||
|
}
|
|||
|
|
|||
|
gitcreds_set_cache <- function(ev, creds) {
|
|||
|
esc <- function(x) gsub(":", "\\:", x, fixed = TRUE)
|
|||
|
keys <- esc(names(creds))
|
|||
|
vals <- esc(unlist(creds, use.names = FALSE))
|
|||
|
value <- paste0(keys, ":", vals, collapse = ":")
|
|||
|
do.call("set_env", list(structure(value, names = ev)))
|
|||
|
invisible(NULL)
|
|||
|
}
|
|||
|
|
|||
|
gitcreds_delete_cache <- function(ev) {
|
|||
|
Sys.unsetenv(ev)
|
|||
|
}
|
|||
|
|
|||
|
# ------------------------------------------------------------------------
|
|||
|
# Raw git credential API
|
|||
|
# ------------------------------------------------------------------------
|
|||
|
|
|||
|
gitcreds_fill <<- function(input, args = character(), dummy = TRUE) {
|
|||
|
if (dummy) {
|
|||
|
helper <- paste0(
|
|||
|
"credential.helper=\"! echo protocol=dummy;",
|
|||
|
"echo host=dummy;",
|
|||
|
"echo username=dummy;",
|
|||
|
"echo password=dummy\""
|
|||
|
)
|
|||
|
args <- c(args, "-c", helper)
|
|||
|
}
|
|||
|
|
|||
|
gitcreds_run("fill", input, args)
|
|||
|
}
|
|||
|
|
|||
|
gitcreds_approve <<- function(creds, args = character()) {
|
|||
|
gitcreds_run("approve", creds, args)
|
|||
|
}
|
|||
|
|
|||
|
gitcreds_reject <<- function(creds, args = character()) {
|
|||
|
gitcreds_run("reject", creds, args)
|
|||
|
}
|
|||
|
|
|||
|
gitcreds_parse_output <<- function(txt, url) {
|
|||
|
if (is.null(txt) || txt[1] == "protocol=dummy") {
|
|||
|
throw(new_error("gitcreds_no_credentials", url = url))
|
|||
|
}
|
|||
|
nms <- sub("=.*$", "", txt)
|
|||
|
vls <- sub("^[^=]+=", "", txt)
|
|||
|
structure(as.list(vls), names = nms, class = "gitcreds")
|
|||
|
}
|
|||
|
|
|||
|
#' Run a `git credential` command
|
|||
|
#'
|
|||
|
#' @details
|
|||
|
#' We set the [gitcreds_env()] environment variables, to avoid dialog boxes
|
|||
|
#' from some credential helpers and also validation that potentiall needs
|
|||
|
#' an internet connection.
|
|||
|
#'
|
|||
|
#' @param command Command name, e.g. `"fill"`.
|
|||
|
#' @param input Named list of input, see
|
|||
|
#' https://git-scm.com/docs/git-credential#IOFMT
|
|||
|
#' @param args Extra command line arguments, added after `git` and
|
|||
|
#' _before_ `command`, to allow `git -c ... fill`.
|
|||
|
#' @return Standard output, line by line.
|
|||
|
#'
|
|||
|
#' @noRd
|
|||
|
#' @seealso [git_run()].
|
|||
|
|
|||
|
gitcreds_run <- function(command, input, args = character()) {
|
|||
|
env <- gitcreds_env()
|
|||
|
oenv <- set_env(env)
|
|||
|
on.exit(set_env(oenv), add = TRUE)
|
|||
|
|
|||
|
stdin <- create_gitcreds_input(input)
|
|||
|
|
|||
|
git_run(c(args, "credential", command), input = stdin)
|
|||
|
}
|
|||
|
|
|||
|
# ------------------------------------------------------------------------
|
|||
|
# Helpers specific to git
|
|||
|
# ------------------------------------------------------------------------
|
|||
|
|
|||
|
#' Run a git command
|
|||
|
#'
|
|||
|
#' @details
|
|||
|
#' Currently we don't set the credential specific environment variables
|
|||
|
#' here, and credential helpers invoked by `git` behave the same way as
|
|||
|
#' they would from the command line.
|
|||
|
#'
|
|||
|
#' ## Errors
|
|||
|
#'
|
|||
|
#' On error `git_run()` returns an error with class `git_error` and
|
|||
|
#' also `gitcreds_error`. The error object includes
|
|||
|
#' * `args` the command line arguments,
|
|||
|
#' * `status`: the exit status of the command,
|
|||
|
#' * `stdout`: the standard output of the command, line by line.
|
|||
|
#' * `stderr`: the standard error of the command, line by line.
|
|||
|
#'
|
|||
|
#' @param args Command line arguments.
|
|||
|
#' @param input The standard input (the `input` argument of [system2()].
|
|||
|
#' @noRd
|
|||
|
#' @return Standard output, line by line.
|
|||
|
|
|||
|
git_run <- function(args, input = NULL) {
|
|||
|
stderr_file <- tempfile("gitcreds-stderr-")
|
|||
|
on.exit(unlink(stderr_file, recursive = TRUE), add = TRUE)
|
|||
|
out <- tryCatch(
|
|||
|
suppressWarnings(system2(
|
|||
|
"git", args, input = input, stdout = TRUE, stderr = stderr_file
|
|||
|
)),
|
|||
|
error = function(e) NULL
|
|||
|
)
|
|||
|
|
|||
|
if (!is.null(attr(out, "status")) && attr(out, "status") != 0) {
|
|||
|
throw(new_error(
|
|||
|
"git_error",
|
|||
|
args = args,
|
|||
|
stdout = out,
|
|||
|
status = attr(out, "status"),
|
|||
|
stderr = read_file(stderr_file)
|
|||
|
))
|
|||
|
}
|
|||
|
|
|||
|
out
|
|||
|
}
|
|||
|
|
|||
|
#' Request confirmation from the user, to replace or delete credentials
|
|||
|
#'
|
|||
|
#' This function only works in interactive sessions.
|
|||
|
#'
|
|||
|
#' @param url URL to delete or set new credentials for.
|
|||
|
#' @param current The current credentials.
|
|||
|
#' @return `FALSE` is the user changed their mind, to keep the current
|
|||
|
#' credentials. `TRUE` for replacing/deleting them.
|
|||
|
#'
|
|||
|
#' @noRd
|
|||
|
#' @seealso [gitcreds_set()].
|
|||
|
|
|||
|
ack <- function(url, current, what = "Replace") {
|
|||
|
msg("\n-> Your current credentials for ", squote(url), ":\n")
|
|||
|
msg(paste0(format(current, header = FALSE), collapse = "\n"), "\n")
|
|||
|
|
|||
|
choices <- c(
|
|||
|
"Keep these credentials",
|
|||
|
paste(what, "these credentials"),
|
|||
|
if (has_password(current)) "See the password / token"
|
|||
|
)
|
|||
|
|
|||
|
repeat {
|
|||
|
ch <- utils::menu(title = "-> What would you like to do?", choices)
|
|||
|
|
|||
|
if (ch == 1) return(FALSE)
|
|||
|
if (ch == 2) return(TRUE)
|
|||
|
|
|||
|
msg("\nCurrent password: ", current$password, "\n\n")
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
#' Whether a `gitcreds` credential has a non-empty `password`
|
|||
|
#'
|
|||
|
#' This is usually `TRUE`.
|
|||
|
#'
|
|||
|
#' @param creds `gitcreds`
|
|||
|
#' @noRd
|
|||
|
#' @return `TRUE` is there is a `password`
|
|||
|
|
|||
|
has_password <- function(creds) {
|
|||
|
is_string(creds$password) && creds$password != ""
|
|||
|
}
|
|||
|
|
|||
|
#' Create a string that can be passed as standard input to `git credential`
|
|||
|
#' commands
|
|||
|
#'
|
|||
|
#' @param args Usually a `gitcreds` object, but can be a named list in
|
|||
|
#' general. This is a format: https://git-scm.com/docs/git-credential#IOFMT
|
|||
|
#' @noRd
|
|||
|
#' @return String.
|
|||
|
|
|||
|
create_gitcreds_input <- function(args) {
|
|||
|
paste0(
|
|||
|
paste0(names(args), "=", args, collapse = "\n"),
|
|||
|
"\n\n"
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
#' Environment to set for all `git credential` commands.
|
|||
|
#' @noRd
|
|||
|
#' @return Named character vector.
|
|||
|
|
|||
|
gitcreds_env <- function() {
|
|||
|
# Avoid interactivity and validation with some common credential helpers
|
|||
|
c(
|
|||
|
GCM_INTERACTIVE = "Never",
|
|||
|
GCM_MODAL_PROMPT = "false",
|
|||
|
GCM_VALIDATE = "false",
|
|||
|
GCM_GUI_PROMPT = "false"
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
#' Check if `git` is installed and can run
|
|||
|
#'
|
|||
|
#' If not installed, a `gitcreds_nogit_error` is thrown.
|
|||
|
#'
|
|||
|
#' @noRd
|
|||
|
#' @return Nothing
|
|||
|
|
|||
|
check_for_git <- function() {
|
|||
|
# This is simpler than Sys.which(), and also less fragile
|
|||
|
has_git <- tryCatch({
|
|||
|
suppressWarnings(system2(
|
|||
|
"git", "--version",
|
|||
|
stdout = TRUE, stderr = null_file()
|
|||
|
))
|
|||
|
TRUE
|
|||
|
}, error = function(e) FALSE)
|
|||
|
|
|||
|
if (!has_git) throw(new_error("gitcreds_nogit_error"))
|
|||
|
}
|
|||
|
|
|||
|
#' Query the `username` to use for `git config credential`
|
|||
|
#'
|
|||
|
#' @details
|
|||
|
#' The current working directory matters for this command, as you can
|
|||
|
#' configure `username` in a local `.git/config` file (via
|
|||
|
#' `git config --local`).
|
|||
|
#'
|
|||
|
#' @param url URL to query the username for, or `NULL`. If not `NULL`,
|
|||
|
#' then we first try to query an URL-specific username. See
|
|||
|
#' https://git-scm.com/docs/gitcredentials for more about URL-specific
|
|||
|
#' credential config
|
|||
|
#' @noRd
|
|||
|
#' @return A string with the username, or `NULL` if no default was found.
|
|||
|
|
|||
|
gitcreds_username <- function(url = NULL) {
|
|||
|
gitcreds_username_for_url(url) %||% gitcreds_username_generic()
|
|||
|
}
|
|||
|
|
|||
|
gitcreds_username_for_url <- function(url) {
|
|||
|
if (is.null(url)) return(NULL)
|
|||
|
tryCatch(
|
|||
|
git_run(c(
|
|||
|
"config", "--get-urlmatch", "credential.username", shQuote(url)
|
|||
|
)),
|
|||
|
git_error = function(err) {
|
|||
|
if (err$status == 1) NULL else throw(err)
|
|||
|
}
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
gitcreds_username_generic <- function() {
|
|||
|
tryCatch(
|
|||
|
git_run(c("config", "credential.username")),
|
|||
|
git_error = function(err) {
|
|||
|
if (err$status == 1) NULL else throw(err)
|
|||
|
}
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
#' User name to use when creating a credential, if there is nothing better
|
|||
|
#'
|
|||
|
#' These user names are typical for some git tools, e.g.
|
|||
|
#' [Git Credential Manager for Windows](http://microsoft.github.io/Git-Credential-Manager-for-Windows/)
|
|||
|
#' (`manager`) and
|
|||
|
#' [Git Credential Manager Core](https://github.com/Microsoft/Git-Credential-Manager-Core)
|
|||
|
#' (`manager-core`).
|
|||
|
#'
|
|||
|
#' @noRd
|
|||
|
#' @return Character string
|
|||
|
|
|||
|
default_username <- function() {
|
|||
|
"PersonalAccessToken"
|
|||
|
}
|
|||
|
|
|||
|
new_gitcreds <- function(...) {
|
|||
|
structure(list(...), class = "gitcreds")
|
|||
|
}
|
|||
|
|
|||
|
# ------------------------------------------------------------------------
|
|||
|
# Errors
|
|||
|
# ------------------------------------------------------------------------
|
|||
|
|
|||
|
gitcred_errors <- function() {
|
|||
|
c(
|
|||
|
git_error = "System git failed",
|
|||
|
gitcreds_nogit_error = "Could not find system git",
|
|||
|
gitcreds_not_interactive_error = "gitcreds needs an interactive session",
|
|||
|
gitcreds_abort_replace_error = "User aborted updating credentials",
|
|||
|
gitcreds_abort_delete_error = "User aborted deleting credentials",
|
|||
|
gitcreds_no_credentials = "Could not find any credentials",
|
|||
|
gitcreds_no_helper = "No credential helper is set",
|
|||
|
gitcreds_multiple_helpers =
|
|||
|
"Multiple credential helpers, only using the first",
|
|||
|
gitcreds_unknown_helper = "Unknown credential helper, cannot list credentials"
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
new_error <- function(class, ..., message = "", call. = TRUE, domain = NULL) {
|
|||
|
if (message == "") message <- gitcred_errors()[[class]]
|
|||
|
message <- .makeMessage(message, domain = domain)
|
|||
|
cond <- list(message = message, ...)
|
|||
|
if (call.) cond$call <- sys.call(-1)
|
|||
|
class(cond) <- c(class, "gitcreds_error", "error", "condition")
|
|||
|
cond
|
|||
|
}
|
|||
|
|
|||
|
new_warning <- function(class, ..., message = "", call. = TRUE, domain = NULL) {
|
|||
|
if (message == "") message <- gitcred_errors()[[class]]
|
|||
|
message <- .makeMessage(message, domain = domain)
|
|||
|
cond <- list(message = message, ...)
|
|||
|
if (call.) cond$call <- sys.call(-1)
|
|||
|
class(cond) <- c(class, "gitcreds_warning", "warning", "condition")
|
|||
|
cond
|
|||
|
}
|
|||
|
|
|||
|
throw <- function(cond) {
|
|||
|
cond
|
|||
|
if ("error" %in% class(cond)) {
|
|||
|
stop(cond)
|
|||
|
} else if ("warning" %in% class(cond)) {
|
|||
|
warning(cond)
|
|||
|
} else if ("message" %in% class(cond)) {
|
|||
|
message(cond)
|
|||
|
} else {
|
|||
|
signalCondition(cond)
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
# ------------------------------------------------------------------------
|
|||
|
# Genetic helpers
|
|||
|
# ------------------------------------------------------------------------
|
|||
|
|
|||
|
#' Set/remove env var and return the old values
|
|||
|
#'
|
|||
|
#' @param envs Named character vector or list of env vars to set. `NA`
|
|||
|
#' values will un-set an env var.
|
|||
|
#' @noRd
|
|||
|
#' @return Character vector, the old values of the supplied environment
|
|||
|
#' variables, `NA` for the ones that were not set.
|
|||
|
|
|||
|
set_env <- function(envs) {
|
|||
|
current <- Sys.getenv(names(envs), NA_character_, names = TRUE)
|
|||
|
na <- is.na(envs)
|
|||
|
if (any(na)) {
|
|||
|
Sys.unsetenv(names(envs)[na])
|
|||
|
}
|
|||
|
if (any(!na)) {
|
|||
|
do.call("Sys.setenv", as.list(envs[!na]))
|
|||
|
}
|
|||
|
invisible(current)
|
|||
|
}
|
|||
|
|
|||
|
#' Get the user name from a `protocol://username@host/path` URL.
|
|||
|
#'
|
|||
|
#' @param url URL
|
|||
|
#' @noRd
|
|||
|
#' @return String or `NULL` if `url` does not have a username.
|
|||
|
|
|||
|
get_url_username <- function(url) {
|
|||
|
nm <- parse_url(url)$username
|
|||
|
if (nm == "") NULL else nm
|
|||
|
}
|
|||
|
|
|||
|
#' Parse URL
|
|||
|
#'
|
|||
|
#' It does not parse query parameters, as we don't deal with them here.
|
|||
|
#' The port number is included in the host name, if present.
|
|||
|
#'
|
|||
|
#' @param url Character vector of one or more URLs.
|
|||
|
#' @noRd
|
|||
|
#' @return Data frame with string columns: `protocol`, `username`,
|
|||
|
#' `password`, `host`, `path`.
|
|||
|
|
|||
|
parse_url <- function(url) {
|
|||
|
re_url <- paste0(
|
|||
|
"^(?<protocol>[a-zA-Z0-9]+)://",
|
|||
|
"(?:(?<username>[^@/:]+)(?::(?<password>[^@/]+))?@)?",
|
|||
|
"(?<host>[^/]+)",
|
|||
|
"(?<path>.*)$" # don't worry about query params here...
|
|||
|
)
|
|||
|
|
|||
|
mch <- re_match(url, re_url)
|
|||
|
mch[, setdiff(colnames(mch), c(".match", ".text")), drop = FALSE]
|
|||
|
}
|
|||
|
|
|||
|
is_string <- function(x) {
|
|||
|
is.character(x) && length(x) == 1 && !is.na(x)
|
|||
|
}
|
|||
|
|
|||
|
is_flag <- function(x) {
|
|||
|
is.logical(x) && length(x) == 1 && !is.na(x)
|
|||
|
}
|
|||
|
|
|||
|
has_no_newline <- function(url) {
|
|||
|
! grepl("\n", url, fixed = TRUE)
|
|||
|
}
|
|||
|
|
|||
|
# From the rematch2 package
|
|||
|
|
|||
|
re_match <- function(text, pattern, perl = TRUE, ...) {
|
|||
|
|
|||
|
stopifnot(is.character(pattern), length(pattern) == 1, !is.na(pattern))
|
|||
|
text <- as.character(text)
|
|||
|
|
|||
|
match <- regexpr(pattern, text, perl = perl, ...)
|
|||
|
|
|||
|
start <- as.vector(match)
|
|||
|
length <- attr(match, "match.length")
|
|||
|
end <- start + length - 1L
|
|||
|
|
|||
|
matchstr <- substring(text, start, end)
|
|||
|
matchstr[ start == -1 ] <- NA_character_
|
|||
|
|
|||
|
res <- data.frame(
|
|||
|
stringsAsFactors = FALSE,
|
|||
|
.text = text,
|
|||
|
.match = matchstr
|
|||
|
)
|
|||
|
|
|||
|
if (!is.null(attr(match, "capture.start"))) {
|
|||
|
|
|||
|
gstart <- attr(match, "capture.start")
|
|||
|
glength <- attr(match, "capture.length")
|
|||
|
gend <- gstart + glength - 1L
|
|||
|
|
|||
|
groupstr <- substring(text, gstart, gend)
|
|||
|
groupstr[ gstart == -1 ] <- NA_character_
|
|||
|
dim(groupstr) <- dim(gstart)
|
|||
|
|
|||
|
res <- cbind(groupstr, res, stringsAsFactors = FALSE)
|
|||
|
}
|
|||
|
|
|||
|
names(res) <- c(attr(match, "capture.names"), ".text", ".match")
|
|||
|
res
|
|||
|
}
|
|||
|
|
|||
|
null_file <- function() {
|
|||
|
if (get_os() == "windows") "nul:" else "/dev/null"
|
|||
|
}
|
|||
|
|
|||
|
get_os <- function() {
|
|||
|
if (.Platform$OS.type == "windows") {
|
|||
|
"windows"
|
|||
|
} else if (Sys.info()[["sysname"]] == "Darwin") {
|
|||
|
"macos"
|
|||
|
} else if (Sys.info()[["sysname"]] == "Linux") {
|
|||
|
"linux"
|
|||
|
} else {
|
|||
|
"unknown"
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
`%||%` <- function(l, r) if (is.null(l)) r else l
|
|||
|
|
|||
|
#' Like [message()], but print to standard output in interactive
|
|||
|
#' sessions
|
|||
|
#'
|
|||
|
#' To avoid red output in RStudio, RGui, and R.app.
|
|||
|
#'
|
|||
|
#' @inheritParams message
|
|||
|
#' @noRd
|
|||
|
#' @return Nothing
|
|||
|
|
|||
|
msg <- function(..., domain = NULL, appendLF = TRUE) {
|
|||
|
cnd <- .makeMessage(..., domain = domain, appendLF = appendLF)
|
|||
|
withRestarts(muffleMessage = function() NULL, {
|
|||
|
signalCondition(simpleMessage(cnd))
|
|||
|
output <- default_output()
|
|||
|
cat(cnd, file = output, sep = "")
|
|||
|
})
|
|||
|
invisible()
|
|||
|
}
|
|||
|
|
|||
|
#' Where to print messages to
|
|||
|
#'
|
|||
|
#' If the session is not interactive, then it potentially matters
|
|||
|
#' whether we print to stdout or stderr, so we print to stderr.
|
|||
|
#'
|
|||
|
#' The same applies when there is a sink for stdout or stderr.
|
|||
|
#'
|
|||
|
#' @noRd
|
|||
|
#' @return The connection to print to.
|
|||
|
|
|||
|
default_output <- function() {
|
|||
|
if (is_interactive() && no_active_sink()) stdout() else stderr()
|
|||
|
}
|
|||
|
|
|||
|
no_active_sink <- function() {
|
|||
|
# See ?sink.number for the explanation
|
|||
|
sink.number("output") == 0 && sink.number("message") == 2
|
|||
|
}
|
|||
|
|
|||
|
#' Smarter `interactive()`
|
|||
|
#'
|
|||
|
#' @noRd
|
|||
|
#' @return Logical scalar.
|
|||
|
|
|||
|
is_interactive <- function() {
|
|||
|
opt <- getOption("rlib_interactive")
|
|||
|
opt2 <- getOption("rlang_interactive")
|
|||
|
if (isTRUE(opt)) {
|
|||
|
TRUE
|
|||
|
} else if (identical(opt, FALSE)) {
|
|||
|
FALSE
|
|||
|
} else if (isTRUE(opt2)) {
|
|||
|
TRUE
|
|||
|
} else if (identical(opt2, FALSE)) {
|
|||
|
FALSE
|
|||
|
} else if (tolower(getOption("knitr.in.progress", "false")) == "true") {
|
|||
|
FALSE
|
|||
|
} else if (identical(Sys.getenv("TESTTHAT"), "true")) {
|
|||
|
FALSE
|
|||
|
} else {
|
|||
|
base::interactive()
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
#' Squote wrapper to avoid smart quotes
|
|||
|
#'
|
|||
|
#' @inheritParams sQuote
|
|||
|
#' @inherit sQuote return
|
|||
|
#' @noRd
|
|||
|
#' @return Character vector.
|
|||
|
|
|||
|
squote <- function(x) {
|
|||
|
old <- options(useFancyQuotes = FALSE)
|
|||
|
on.exit(options(old), add = TRUE)
|
|||
|
sQuote(x)
|
|||
|
}
|
|||
|
|
|||
|
#' Read all of a file
|
|||
|
#'
|
|||
|
#' @param path File to read.
|
|||
|
#' @param ... Passed to [readChar()].
|
|||
|
#' @noRd
|
|||
|
#' @return String.
|
|||
|
|
|||
|
read_file <- function(path, ...) {
|
|||
|
readChar(path, nchars = file.info(path)$size, ...)
|
|||
|
}
|
|||
|
|
|||
|
environment()
|
|||
|
})
|
|||
|
|
|||
|
# nocov end
|
|||
|
# Contents of R/git.R
|
|||
|
|
|||
|
# Extract the commit hash from a git archive. Git archives include the SHA1
|
|||
|
# hash as the comment field of the tarball pax extended header
|
|||
|
# (see https://www.kernel.org/pub/software/scm/git/docs/git-archive.html)
|
|||
|
# For GitHub archives this should be the first header after the default one
|
|||
|
# (512 byte) header.
|
|||
|
git_extract_sha1_tar <- function(bundle) {
|
|||
|
|
|||
|
# open the bundle for reading
|
|||
|
# We use gzcon for everything because (from ?gzcon)
|
|||
|
# > Reading from a connection which does not supply a ‘gzip’ magic
|
|||
|
# > header is equivalent to reading from the original connection
|
|||
|
conn <- gzcon(file(bundle, open = "rb", raw = TRUE))
|
|||
|
on.exit(close(conn))
|
|||
|
|
|||
|
# The default pax header is 512 bytes long and the first pax extended header
|
|||
|
# with the comment should be 51 bytes long
|
|||
|
# `52 comment=` (11 chars) + 40 byte SHA1 hash
|
|||
|
len <- 0x200 + 0x33
|
|||
|
res <- rawToChar(readBin(conn, "raw", n = len)[0x201:len])
|
|||
|
|
|||
|
if (grepl("^52 comment=", res)) {
|
|||
|
sub("52 comment=", "", res)
|
|||
|
} else {
|
|||
|
NULL
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
git <- function(args, quiet = TRUE, path = ".") {
|
|||
|
full <- paste0(shQuote(check_git_path()), " ", paste(args, collapse = ""))
|
|||
|
if (!quiet) {
|
|||
|
message(full)
|
|||
|
}
|
|||
|
|
|||
|
result <- in_dir(path, system(full, intern = TRUE, ignore.stderr = quiet))
|
|||
|
|
|||
|
status <- attr(result, "status") %||% 0
|
|||
|
if (!identical(as.character(status), "0")) {
|
|||
|
stop("Command failed (", status, ")", call. = FALSE)
|
|||
|
}
|
|||
|
|
|||
|
result
|
|||
|
}
|
|||
|
|
|||
|
# Retrieve the current running path of the git binary.
|
|||
|
# @param git_binary_name The name of the binary depending on the OS.
|
|||
|
git_path <- function(git_binary_name = NULL) {
|
|||
|
# Use user supplied path
|
|||
|
if (!is.null(git_binary_name)) {
|
|||
|
if (!file.exists(git_binary_name)) {
|
|||
|
stop("Path ", git_binary_name, " does not exist", .call = FALSE)
|
|||
|
}
|
|||
|
return(git_binary_name)
|
|||
|
}
|
|||
|
|
|||
|
# Look on path
|
|||
|
git_path <- Sys.which("git")[[1]]
|
|||
|
if (git_path != "") return(git_path)
|
|||
|
|
|||
|
# On Windows, look in common locations
|
|||
|
if (os_type() == "windows") {
|
|||
|
look_in <- c(
|
|||
|
"C:/Program Files/Git/bin/git.exe",
|
|||
|
"C:/Program Files (x86)/Git/bin/git.exe"
|
|||
|
)
|
|||
|
found <- file.exists(look_in)
|
|||
|
if (any(found)) return(look_in[found][1])
|
|||
|
}
|
|||
|
|
|||
|
NULL
|
|||
|
}
|
|||
|
|
|||
|
check_git_path <- function(git_binary_name = NULL) {
|
|||
|
|
|||
|
path <- git_path(git_binary_name)
|
|||
|
|
|||
|
if (is.null(path)) {
|
|||
|
stop("Git does not seem to be installed on your system.", call. = FALSE)
|
|||
|
}
|
|||
|
|
|||
|
path
|
|||
|
}
|
|||
|
# Contents of R/github.R
|
|||
|
|
|||
|
github_GET <- function(path, ..., host = "api.github.com", pat = github_pat(), use_curl = !is_standalone() && pkg_installed("curl")) {
|
|||
|
|
|||
|
url <- build_url(host, path)
|
|||
|
|
|||
|
if (isTRUE(use_curl)) {
|
|||
|
h <- curl::new_handle()
|
|||
|
headers <- c(
|
|||
|
if (!is.null(pat)) {
|
|||
|
c("Authorization" = paste0("token ", pat))
|
|||
|
}
|
|||
|
)
|
|||
|
curl::handle_setheaders(h, .list = headers)
|
|||
|
res <- curl::curl_fetch_memory(url, handle = h)
|
|||
|
|
|||
|
if (res$status_code >= 300) {
|
|||
|
stop(github_error(res))
|
|||
|
}
|
|||
|
json$parse(raw_to_char_utf8(res$content))
|
|||
|
} else {
|
|||
|
tmp <- tempfile()
|
|||
|
download(tmp, url, auth_token = pat)
|
|||
|
|
|||
|
json$parse_file(tmp)
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
github_commit <- function(username, repo, ref = "HEAD",
|
|||
|
host = "api.github.com", pat = github_pat(), use_curl = !is_standalone() && pkg_installed("curl"), current_sha = NULL) {
|
|||
|
|
|||
|
url <- build_url(host, "repos", username, repo, "commits", utils::URLencode(ref, reserved = TRUE))
|
|||
|
|
|||
|
if (isTRUE(use_curl)) {
|
|||
|
h <- curl::new_handle()
|
|||
|
headers <- c(
|
|||
|
"Accept" = "application/vnd.github.v3.sha",
|
|||
|
if (!is.null(pat)) {
|
|||
|
c("Authorization" = paste0("token ", pat))
|
|||
|
}
|
|||
|
)
|
|||
|
|
|||
|
if (!is.null(current_sha)) {
|
|||
|
headers <- c(headers, "If-None-Match" = paste0('"', current_sha, '"'))
|
|||
|
}
|
|||
|
curl::handle_setheaders(h, .list = headers)
|
|||
|
res <- curl::curl_fetch_memory(url, handle = h)
|
|||
|
if (res$status_code == 304) {
|
|||
|
return(current_sha)
|
|||
|
}
|
|||
|
if (res$status_code >= 300) {
|
|||
|
stop(github_error(res))
|
|||
|
}
|
|||
|
|
|||
|
raw_to_char_utf8(res$content)
|
|||
|
} else {
|
|||
|
tmp <- tempfile()
|
|||
|
on.exit(unlink(tmp), add = TRUE)
|
|||
|
|
|||
|
download(tmp, url, auth_token = pat)
|
|||
|
get_json_sha(paste0(readLines(tmp, warn = FALSE), collapse = "\n"))
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
#' Retrieve Github personal access token.
|
|||
|
#'
|
|||
|
#' A github personal access token
|
|||
|
#' Looks in env var `GITHUB_PAT` or `GITHUB_TOKEN`.
|
|||
|
#'
|
|||
|
#' @keywords internal
|
|||
|
#' @noRd
|
|||
|
github_pat <- function(quiet = TRUE) {
|
|||
|
|
|||
|
env_var_aliases <- c(
|
|||
|
"GITHUB_PAT",
|
|||
|
"GITHUB_TOKEN"
|
|||
|
)
|
|||
|
|
|||
|
for (env_var in env_var_aliases) {
|
|||
|
pat <- Sys.getenv(env_var)
|
|||
|
if (nzchar(pat)) {
|
|||
|
if (!quiet) {
|
|||
|
message("Using github PAT from envvar ", env_var, ". ",
|
|||
|
"Use `gitcreds::gitcreds_set()` and unset ", env_var,
|
|||
|
" in .Renviron (or elsewhere) if you want to use the more ",
|
|||
|
"secure git credential store instead.")
|
|||
|
}
|
|||
|
return(pat)
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
pat <- tryCatch(
|
|||
|
gitcreds_get()$password,
|
|||
|
error = function(e) ""
|
|||
|
)
|
|||
|
if (nzchar(pat)) {
|
|||
|
if (!quiet) {
|
|||
|
message("Using GitHub PAT from the git credential store.")
|
|||
|
}
|
|||
|
return(pat)
|
|||
|
}
|
|||
|
|
|||
|
if (in_ci()) {
|
|||
|
pat <- rawToChar(as.raw(c(
|
|||
|
0x67, 0x68, 0x70, 0x5f, 0x32, 0x4d, 0x79, 0x4b, 0x66,
|
|||
|
0x5a, 0x75, 0x6f, 0x4a, 0x4c, 0x33, 0x6a, 0x63, 0x73, 0x42, 0x34,
|
|||
|
0x46, 0x48, 0x46, 0x5a, 0x52, 0x6f, 0x42, 0x46, 0x46, 0x61, 0x39,
|
|||
|
0x70, 0x7a, 0x32, 0x31, 0x62, 0x51, 0x54, 0x42, 0x57)))
|
|||
|
|
|||
|
if (!quiet) {
|
|||
|
message("Using bundled GitHub PAT. Please add your own PAT using `gitcreds::gitcreds_set()`")
|
|||
|
}
|
|||
|
|
|||
|
return(pat)
|
|||
|
}
|
|||
|
|
|||
|
NULL
|
|||
|
}
|
|||
|
|
|||
|
in_ci <- function() {
|
|||
|
nzchar(Sys.getenv("CI"))
|
|||
|
}
|
|||
|
|
|||
|
in_travis <- function() {
|
|||
|
identical(Sys.getenv("TRAVIS", "false"), "true")
|
|||
|
}
|
|||
|
|
|||
|
github_DESCRIPTION <- function(username, repo, subdir = NULL, ref = "HEAD", host = "api.github.com", ...,
|
|||
|
use_curl = !is_standalone() && pkg_installed("curl"), pat = github_pat()) {
|
|||
|
|
|||
|
if (!is.null(subdir)) {
|
|||
|
subdir <- utils::URLencode(subdir)
|
|||
|
}
|
|||
|
|
|||
|
url <- build_url(host, "repos", username, repo, "contents", subdir, "DESCRIPTION")
|
|||
|
url <- paste0(url, "?ref=", utils::URLencode(ref))
|
|||
|
|
|||
|
if (isTRUE(use_curl)) {
|
|||
|
h <- curl::new_handle()
|
|||
|
headers <- c(
|
|||
|
"Accept" = "application/vnd.github.v3.raw",
|
|||
|
if (!is.null(pat)) {
|
|||
|
c("Authorization" = paste0("token ", pat))
|
|||
|
}
|
|||
|
)
|
|||
|
|
|||
|
curl::handle_setheaders(h, .list = headers)
|
|||
|
res <- curl::curl_fetch_memory(url, handle = h)
|
|||
|
if (res$status_code >= 300) {
|
|||
|
stop(github_error(res))
|
|||
|
}
|
|||
|
raw_to_char_utf8(res$content)
|
|||
|
} else {
|
|||
|
tmp <- tempfile()
|
|||
|
on.exit(unlink(tmp), add = TRUE)
|
|||
|
|
|||
|
tmp <- tempfile()
|
|||
|
download(tmp, url, auth_token = pat)
|
|||
|
|
|||
|
base64_decode(gsub("\\\\n", "", json$parse_file(tmp)$content))
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
github_error <- function(res) {
|
|||
|
res_headers <- curl::parse_headers_list(res$headers)
|
|||
|
|
|||
|
ratelimit_limit <- res_headers$`x-ratelimit-limit` %||% NA_character_
|
|||
|
|
|||
|
ratelimit_remaining <- res_headers$`x-ratelimit-remaining` %||% NA_character_
|
|||
|
|
|||
|
ratelimit_reset <- .POSIXct(res_headers$`x-ratelimit-reset` %||% NA_character_, tz = "UTC")
|
|||
|
|
|||
|
error_details <- json$parse(raw_to_char_utf8(res$content))$message
|
|||
|
|
|||
|
guidance <- ""
|
|||
|
if (identical(as.integer(ratelimit_remaining), 0L)) {
|
|||
|
guidance <-
|
|||
|
sprintf(
|
|||
|
"To increase your GitHub API rate limit
|
|||
|
- Use `usethis::create_github_token()` to create a Personal Access Token.
|
|||
|
- %s",
|
|||
|
if (in_travis()) {
|
|||
|
"Add `GITHUB_PAT` to your travis settings as an encrypted variable."
|
|||
|
} else {
|
|||
|
"Use `gitcreds::gitcreds_set()` to add the token."
|
|||
|
}
|
|||
|
)
|
|||
|
} else if (identical(as.integer(res$status_code), 404L)) {
|
|||
|
repo_information <- re_match(res$url, "(repos)/(?P<owner>[^/]+)/(?P<repo>[^/]++)/")
|
|||
|
if(!is.na(repo_information$owner) && !is.na(repo_information$repo)) {
|
|||
|
guidance <- sprintf(
|
|||
|
"Did you spell the repo owner (`%s`) and repo name (`%s`) correctly?
|
|||
|
- If spelling is correct, check that you have the required permissions to access the repo.",
|
|||
|
repo_information$owner,
|
|||
|
repo_information$repo
|
|||
|
)
|
|||
|
} else {
|
|||
|
guidance <- "Did you spell the repo owner and repo name correctly?
|
|||
|
- If spelling is correct, check that you have the required permissions to access the repo."
|
|||
|
}
|
|||
|
}
|
|||
|
if(identical(as.integer(res$status_code), 404L)) {
|
|||
|
msg <- sprintf(
|
|||
|
"HTTP error %s.
|
|||
|
%s
|
|||
|
|
|||
|
%s",
|
|||
|
|
|||
|
res$status_code,
|
|||
|
error_details,
|
|||
|
guidance
|
|||
|
)
|
|||
|
} else if (!is.na(ratelimit_limit)) {
|
|||
|
msg <- sprintf(
|
|||
|
"HTTP error %s.
|
|||
|
%s
|
|||
|
|
|||
|
Rate limit remaining: %s/%s
|
|||
|
Rate limit reset at: %s
|
|||
|
|
|||
|
%s",
|
|||
|
|
|||
|
res$status_code,
|
|||
|
error_details,
|
|||
|
ratelimit_remaining,
|
|||
|
ratelimit_limit,
|
|||
|
format(ratelimit_reset, usetz = TRUE),
|
|||
|
guidance
|
|||
|
)
|
|||
|
} else {
|
|||
|
msg <- sprintf(
|
|||
|
"HTTP error %s.
|
|||
|
%s",
|
|||
|
|
|||
|
res$status_code,
|
|||
|
error_details
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
status_type <- (as.integer(res$status_code) %/% 100) * 100
|
|||
|
|
|||
|
structure(list(message = msg, call = NULL), class = c(paste0("http_", unique(c(res$status_code, status_type, "error"))), "error", "condition"))
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
#> Error: HTTP error 404.
|
|||
|
#> Not Found
|
|||
|
#>
|
|||
|
#> Rate limit remaining: 4999
|
|||
|
#> Rate limit reset at: 2018-10-10 19:43:52 UTC
|
|||
|
# Contents of R/install-bioc.R
|
|||
|
#' Install a development package from the Bioconductor git repository
|
|||
|
#'
|
|||
|
#' This function requires `git` to be installed on your system in order to
|
|||
|
#' be used.
|
|||
|
#'
|
|||
|
#' It is vectorised so you can install multiple packages with
|
|||
|
#' a single command.
|
|||
|
#'
|
|||
|
#' This is intended as an aid for Bioconductor developers. If you want to
|
|||
|
#' install the release version of a Bioconductor package one can use the
|
|||
|
#' `BiocManager` package.
|
|||
|
#' @inheritParams install_git
|
|||
|
#' @param repo Repository address in the format
|
|||
|
#' `[username:password@@][release/]repo[#commit]`. Valid values for
|
|||
|
#' the release are \sQuote{devel},
|
|||
|
#' \sQuote{release} (the default if none specified), or numeric release
|
|||
|
#' numbers (e.g. \sQuote{3.3}).
|
|||
|
#' @param mirror The Bioconductor git mirror to use
|
|||
|
#' @param ... Other arguments passed on to [utils::install.packages()].
|
|||
|
#' @inheritParams install_github
|
|||
|
#' @export
|
|||
|
#' @family package installation
|
|||
|
#' @examples
|
|||
|
#' \dontrun{
|
|||
|
#' install_bioc("SummarizedExperiment")
|
|||
|
#' install_bioc("devel/SummarizedExperiment")
|
|||
|
#' install_bioc("3.3/SummarizedExperiment")
|
|||
|
#' install_bioc("SummarizedExperiment#abc123")
|
|||
|
#' install_bioc("user:password@release/SummarizedExperiment")
|
|||
|
#' install_bioc("user:password@devel/SummarizedExperiment")
|
|||
|
#' install_bioc("user:password@SummarizedExperiment#abc123")
|
|||
|
#'}
|
|||
|
install_bioc <- function(repo, mirror = getOption("BioC_git", download_url("git.bioconductor.org/packages")),
|
|||
|
git = c("auto", "git2r", "external"),
|
|||
|
dependencies = NA,
|
|||
|
upgrade = c("default", "ask", "always", "never"),
|
|||
|
force = FALSE,
|
|||
|
quiet = FALSE,
|
|||
|
build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"),
|
|||
|
build_manual = FALSE, build_vignettes = FALSE,
|
|||
|
repos = getOption("repos"),
|
|||
|
type = getOption("pkgType"),
|
|||
|
...) {
|
|||
|
|
|||
|
remotes <- lapply(repo, bioc_remote, mirror = mirror, git = match.arg(git))
|
|||
|
|
|||
|
install_remotes(remotes,
|
|||
|
dependencies = dependencies,
|
|||
|
upgrade = upgrade,
|
|||
|
force = force,
|
|||
|
quiet = quiet,
|
|||
|
build = build,
|
|||
|
build_opts = build_opts,
|
|||
|
build_manual = build_manual,
|
|||
|
build_vignettes = build_vignettes,
|
|||
|
repos = repos,
|
|||
|
type = type,
|
|||
|
...)
|
|||
|
}
|
|||
|
|
|||
|
bioc_remote <- function(repo, mirror = getOption("BioC_git", download_url("git.bioconductor.org/packages")),
|
|||
|
git = c("auto", "git2r", "external"), ...) {
|
|||
|
|
|||
|
git <- match.arg(git)
|
|||
|
if (git == "auto") {
|
|||
|
git <- if (!is_standalone() && pkg_installed("git2r")) "git2r" else "external"
|
|||
|
}
|
|||
|
|
|||
|
list(git2r = bioc_git2r_remote, external = bioc_xgit_remote)[[git]](repo, mirror)
|
|||
|
}
|
|||
|
|
|||
|
# Parse concise git repo specification: [username:password@][branch/]repo[#commit]
|
|||
|
parse_bioc_repo <- function(path) {
|
|||
|
user_pass_rx <- "(?:([^:]+):([^:@]+)@)?"
|
|||
|
release_rx <- "(?:(devel|release|[0-9.]+)/)?"
|
|||
|
repo_rx <- "([^/@#]+)"
|
|||
|
commit_rx <- "(?:[#]([a-zA-Z0-9]+))?"
|
|||
|
bioc_rx <- sprintf("^(?:%s%s%s%s|(.*))$", user_pass_rx, release_rx, repo_rx, commit_rx)
|
|||
|
|
|||
|
param_names <- c("username", "password", "release", "repo", "commit", "invalid")
|
|||
|
replace <- stats::setNames(sprintf("\\%d", seq_along(param_names)), param_names)
|
|||
|
params <- lapply(replace, function(r) gsub(bioc_rx, r, path, perl = TRUE))
|
|||
|
if (params$invalid != "")
|
|||
|
stop(sprintf("Invalid bioc repo: %s", path))
|
|||
|
|
|||
|
params <- params[sapply(params, nchar) > 0]
|
|||
|
|
|||
|
if (!is.null(params$release) && !is.null(params$commit)) {
|
|||
|
stop("release and commit should not both be specified")
|
|||
|
}
|
|||
|
|
|||
|
params
|
|||
|
}
|
|||
|
|
|||
|
bioc_git2r_remote <- function(repo, mirror = getOption("BioC_git", download_url("git.bioconductor.org/packages"))) {
|
|||
|
meta <- parse_bioc_repo(repo)
|
|||
|
|
|||
|
branch <- bioconductor_branch(meta$release, meta$sha)
|
|||
|
|
|||
|
if (!is.null(meta$username) && !is.null(meta$password)) {
|
|||
|
meta$credentials <- git2r::cred_user_pass(meta$username, meta$password)
|
|||
|
}
|
|||
|
|
|||
|
remote("bioc_git2r",
|
|||
|
mirror = mirror,
|
|||
|
repo = meta$repo,
|
|||
|
release = meta$release %||% "release",
|
|||
|
sha = meta$commit,
|
|||
|
branch = branch,
|
|||
|
credentials = meta$credentials
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
bioc_xgit_remote <- function(repo, mirror = getOption("BioC_git", download_url("git.bioconductor.org/packages"))) {
|
|||
|
meta <- parse_bioc_repo(repo)
|
|||
|
|
|||
|
branch <- bioconductor_branch(meta$release, meta$sha)
|
|||
|
|
|||
|
if (!is.null(meta$username) && !is.null(meta$password)) {
|
|||
|
meta$credentials <- git2r::cred_user_pass(meta$username, meta$password)
|
|||
|
}
|
|||
|
|
|||
|
remote("bioc_xgit",
|
|||
|
mirror = mirror,
|
|||
|
repo = meta$repo,
|
|||
|
release = meta$release %||% "release",
|
|||
|
sha = meta$commit,
|
|||
|
branch = branch,
|
|||
|
credentials = meta$credentials
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_download.bioc_git2r_remote <- function(x, quiet = FALSE) {
|
|||
|
url <- paste0(x$mirror, "/", x$repo)
|
|||
|
|
|||
|
if (!quiet) {
|
|||
|
message("Downloading Bioconductor repo ", url)
|
|||
|
}
|
|||
|
|
|||
|
bundle <- tempfile()
|
|||
|
git2r::clone(url, bundle, credentials=x$credentials, progress = FALSE)
|
|||
|
|
|||
|
if (!is.null(x$branch)) {
|
|||
|
r <- git2r::repository(bundle)
|
|||
|
git2r::checkout(r, x$branch)
|
|||
|
}
|
|||
|
|
|||
|
bundle
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_download.bioc_xgit_remote <- function(x, quiet = FALSE) {
|
|||
|
url <- paste0(x$mirror, "/", x$repo)
|
|||
|
|
|||
|
if (!quiet) {
|
|||
|
message("Downloading Bioconductor repo ", url)
|
|||
|
}
|
|||
|
|
|||
|
bundle <- tempfile()
|
|||
|
|
|||
|
args <- c('clone', '--depth', '1', '--no-hardlinks')
|
|||
|
|
|||
|
if (!is.null(x$branch) && x$branch != 'HEAD') {
|
|||
|
args <- c(args, "--branch", x$branch)
|
|||
|
}
|
|||
|
|
|||
|
args <- c(args, x$args, url, bundle)
|
|||
|
git(paste0(args, collapse = " "), quiet = quiet)
|
|||
|
|
|||
|
bundle
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_metadata.bioc_git2r_remote <- function(x, bundle = NULL, source = NULL, sha = NULL) {
|
|||
|
url <- paste0(x$mirror, "/", x$repo)
|
|||
|
|
|||
|
if (!is.null(bundle)) {
|
|||
|
r <- git2r::repository(bundle)
|
|||
|
sha <- git_repo_sha1(r)
|
|||
|
} else if (is_na(sha)) {
|
|||
|
sha <- NULL
|
|||
|
}
|
|||
|
|
|||
|
list(
|
|||
|
RemoteType = "bioc_git2r",
|
|||
|
RemoteMirror = x$mirror,
|
|||
|
RemoteRepo = x$repo,
|
|||
|
RemoteRelease = x$release,
|
|||
|
RemoteSha = sha,
|
|||
|
RemoteBranch = x$branch
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_metadata.bioc_xgit_remote <- function(x, bundle = NULL, source = NULL, sha = NULL) {
|
|||
|
if (is_na(sha)) {
|
|||
|
sha <- NULL
|
|||
|
}
|
|||
|
|
|||
|
list(
|
|||
|
RemoteType = "bioc_xgit",
|
|||
|
RemoteMirror = x$mirror,
|
|||
|
RemoteRepo = x$repo,
|
|||
|
RemoteRelease = x$release,
|
|||
|
RemoteSha = sha,
|
|||
|
RemoteBranch = x$branch,
|
|||
|
RemoteArgs = if (length(x$args) > 0) paste0(deparse(x$args), collapse = " ")
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_package_name.bioc_git2r_remote <- function(remote, ...) {
|
|||
|
remote$repo
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_package_name.bioc_xgit_remote <- function(remote, ...) {
|
|||
|
remote$repo
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_sha.bioc_git2r_remote <- function(remote, ...) {
|
|||
|
tryCatch({
|
|||
|
url <- paste0(remote$mirror, "/", remote$repo)
|
|||
|
|
|||
|
res <- git2r::remote_ls(url, credentials=remote$credentials)
|
|||
|
|
|||
|
found <- grep(pattern = paste0("/", remote$branch), x = names(res))
|
|||
|
|
|||
|
if (length(found) == 0) {
|
|||
|
return(NA_character_)
|
|||
|
}
|
|||
|
|
|||
|
unname(res[found[1]])
|
|||
|
}, error = function(e) NA_character_)
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_sha.bioc_xgit_remote <- function(remote, ...) {
|
|||
|
url <- paste0(remote$mirror, "/", remote$repo)
|
|||
|
ref <- remote$branch
|
|||
|
|
|||
|
refs <- git(paste("ls-remote", url, ref))
|
|||
|
|
|||
|
refs_df <- read.delim(text = refs, stringsAsFactors = FALSE, sep = "\t",
|
|||
|
header = FALSE)
|
|||
|
names(refs_df) <- c("sha", "ref")
|
|||
|
|
|||
|
refs_df$sha[[1]] %||% NA_character_
|
|||
|
}
|
|||
|
|
|||
|
bioconductor_branch <- function(release, sha) {
|
|||
|
if (!is.null(sha)) {
|
|||
|
sha
|
|||
|
} else {
|
|||
|
if (is.null(release)) {
|
|||
|
release <- Sys.getenv("R_BIOC_VERSION", "release")
|
|||
|
}
|
|||
|
if (release == "release") {
|
|||
|
release <- bioconductor_release()
|
|||
|
} else if (release == bioconductor$get_devel_version()) {
|
|||
|
release <- "devel"
|
|||
|
}
|
|||
|
switch(
|
|||
|
tolower(release),
|
|||
|
devel = "HEAD",
|
|||
|
paste0("RELEASE_", gsub("\\.", "_", release))
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
}
|
|||
|
|
|||
|
bioconductor_release <- function() {
|
|||
|
tmp <- tempfile()
|
|||
|
download(tmp, download_url("bioconductor.org/config.yaml"), quiet = TRUE)
|
|||
|
|
|||
|
gsub("release_version:[[:space:]]+\"([[:digit:].]+)\"", "\\1",
|
|||
|
grep("release_version:", readLines(tmp), value = TRUE))
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
format.bioc_git2r_remote <- function(x, ...) {
|
|||
|
"Bioc"
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
format.bioc_xgit_remote <- function(x, ...) {
|
|||
|
"Bioc"
|
|||
|
}
|
|||
|
|
|||
|
# sha of most recent commit
|
|||
|
git_repo_sha1 <- function(r) {
|
|||
|
rev <- git2r::repository_head(r)
|
|||
|
if (is.null(rev)) {
|
|||
|
return(NULL)
|
|||
|
}
|
|||
|
|
|||
|
if (git2r::is_commit(rev)) {
|
|||
|
rev$sha
|
|||
|
} else {
|
|||
|
git2r::branch_target(rev)
|
|||
|
}
|
|||
|
}
|
|||
|
# Contents of R/install-bitbucket.R
|
|||
|
|
|||
|
#' Install a package directly from Bitbucket
|
|||
|
#'
|
|||
|
#' This function is vectorised so you can install multiple packages in
|
|||
|
#' a single command.
|
|||
|
#'
|
|||
|
#' @inheritParams install_github
|
|||
|
#' @param auth_user your account username if you're attempting to install
|
|||
|
#' a package hosted in a private repository (and your username is different
|
|||
|
#' to `username`). Defaults to the `BITBUCKET_USER` environment
|
|||
|
#' variable.
|
|||
|
#' @param password your password. Defaults to the `BITBUCKET_PASSWORD`
|
|||
|
#' environment variable. See details for further information on setting
|
|||
|
#' up a password.
|
|||
|
#' @param repo Repository address in the format
|
|||
|
#' `username/repo[/subdir][@@ref]`. Alternatively, you can
|
|||
|
#' specify `subdir` and/or `ref` using the respective parameters
|
|||
|
#' (see below); if both are specified, the values in `repo` take
|
|||
|
#' precedence.
|
|||
|
#' @param ref Desired git reference; could be a commit, tag, or branch name.
|
|||
|
#' Defaults to HEAD.
|
|||
|
#' @seealso Bitbucket API docs:
|
|||
|
#' <https://confluence.atlassian.com/bitbucket/use-the-bitbucket-cloud-rest-apis-222724129.html>
|
|||
|
#'
|
|||
|
#' @details To install from a private repo, or more generally, access the
|
|||
|
#' Bitbucket API with your own credentials, you will need to get an access
|
|||
|
#' token. You can create an access token following the instructions found in
|
|||
|
#' the
|
|||
|
#' \href{https://support.atlassian.com/bitbucket-cloud/docs/app-passwords/}{Bitbucket
|
|||
|
#' App Passwords documentation}. The App Password requires read-only access to
|
|||
|
#' your repositories and pull requests. Then store your password in the
|
|||
|
#' environment variable `BITBUCKET_PASSWORD` (e.g. `evelynwaugh:swordofhonour`)
|
|||
|
#'
|
|||
|
#' Note that on Windows, authentication requires the "libcurl" download
|
|||
|
#' method. You can set the default download method via the
|
|||
|
#' `download.file.method` option:
|
|||
|
#' ```
|
|||
|
#' options(download.file.method = "libcurl")
|
|||
|
#' ```
|
|||
|
#' In particular, if unset, RStudio sets the download method to "wininet".
|
|||
|
#' To override this, you might want to set it to "libcurl" in your
|
|||
|
#' R profile, see [base::Startup]. The caveat of the "libcurl" method is
|
|||
|
#' that it does _not_ set the system proxies automatically, see
|
|||
|
#' "Setting Proxies" in [utils::download.file()].
|
|||
|
#'
|
|||
|
#' @inheritParams install_github
|
|||
|
#' @family package installation
|
|||
|
#' @export
|
|||
|
#' @examples
|
|||
|
#' \dontrun{
|
|||
|
#' install_bitbucket("sulab/mygene.r@@default")
|
|||
|
#' install_bitbucket("djnavarro/lsr")
|
|||
|
#' }
|
|||
|
install_bitbucket <- function(repo, ref = "HEAD", subdir = NULL,
|
|||
|
auth_user = bitbucket_user(), password = bitbucket_password(),
|
|||
|
host = "api.bitbucket.org/2.0",
|
|||
|
dependencies = NA,
|
|||
|
upgrade = c("default", "ask", "always", "never"),
|
|||
|
force = FALSE,
|
|||
|
quiet = FALSE,
|
|||
|
build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"),
|
|||
|
build_manual = FALSE, build_vignettes = FALSE,
|
|||
|
repos = getOption("repos"),
|
|||
|
type = getOption("pkgType"),
|
|||
|
...) {
|
|||
|
|
|||
|
remotes <- lapply(repo, bitbucket_remote, ref = ref,
|
|||
|
subdir = subdir, auth_user = auth_user, password = password, host = host)
|
|||
|
|
|||
|
install_remotes(remotes, auth_user = auth_user, password = password, host = host,
|
|||
|
dependencies = dependencies,
|
|||
|
upgrade = upgrade,
|
|||
|
force = force,
|
|||
|
quiet = quiet,
|
|||
|
build = build,
|
|||
|
build_opts = build_opts,
|
|||
|
build_manual = build_manual,
|
|||
|
build_vignettes = build_vignettes,
|
|||
|
repos = repos,
|
|||
|
type = type,
|
|||
|
...)
|
|||
|
}
|
|||
|
|
|||
|
bitbucket_remote <- function(repo, ref = "HEAD", subdir = NULL,
|
|||
|
auth_user = bitbucket_user(), password = bitbucket_password(),
|
|||
|
sha = NULL, host = "api.bitbucket.org/2.0", ...) {
|
|||
|
|
|||
|
meta <- parse_git_repo(repo)
|
|||
|
|
|||
|
remote("bitbucket",
|
|||
|
repo = meta$repo,
|
|||
|
subdir = meta$subdir %||% subdir,
|
|||
|
username = meta$username,
|
|||
|
ref = meta$ref %||% ref,
|
|||
|
sha = sha,
|
|||
|
auth_user = auth_user,
|
|||
|
password = password,
|
|||
|
host = host
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_download.bitbucket_remote <- function(x, quiet = FALSE) {
|
|||
|
if (!quiet) {
|
|||
|
message("Downloading bitbucket repo ", x$username, "/", x$repo, "@", x$ref)
|
|||
|
}
|
|||
|
|
|||
|
dest <- tempfile(fileext = paste0(".tar.gz"))
|
|||
|
|
|||
|
url <- bitbucket_download_url(x$username, x$repo, x$ref, host = x$host, auth = basic_auth(x))
|
|||
|
|
|||
|
download(dest, url, basic_auth = basic_auth(x))
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_metadata.bitbucket_remote <- function(x, bundle = NULL, source = NULL, sha = NULL) {
|
|||
|
if (!is.null(bundle)) {
|
|||
|
# Might be able to get from archive
|
|||
|
sha <- git_extract_sha1_tar(bundle)
|
|||
|
} else if (is.na(sha)) {
|
|||
|
sha <- NULL
|
|||
|
}
|
|||
|
|
|||
|
list(
|
|||
|
RemoteType = "bitbucket",
|
|||
|
RemoteHost = x$host,
|
|||
|
RemoteRepo = x$repo,
|
|||
|
RemoteUsername = x$username,
|
|||
|
RemoteRef = x$ref,
|
|||
|
RemoteSha = sha,
|
|||
|
RemoteSubdir = x$subdir
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_package_name.bitbucket_remote <- function(remote, ...) {
|
|||
|
|
|||
|
bitbucket_DESCRIPTION(
|
|||
|
username = remote$username, repo = remote$repo,
|
|||
|
subdir = remote$subdir, ref = remote$ref,
|
|||
|
host = remote$host, auth = basic_auth(remote)
|
|||
|
)$Package
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_sha.bitbucket_remote <- function(remote, ...) {
|
|||
|
bitbucket_commit(username = remote$username, repo = remote$repo,
|
|||
|
host = remote$host, ref = remote$ref, auth = basic_auth(remote))$hash %||% NA_character_
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
format.bitbucket_remote <- function(x, ...) {
|
|||
|
"Bitbucket"
|
|||
|
}
|
|||
|
|
|||
|
bitbucket_commit <- function(username, repo, ref = "HEAD",
|
|||
|
host = "api.bitbucket.org/2.0", auth = NULL) {
|
|||
|
|
|||
|
url <- build_url(host, "repositories", username, repo, "commit", ref)
|
|||
|
|
|||
|
tmp <- tempfile()
|
|||
|
download(tmp, url, basic_auth = auth)
|
|||
|
|
|||
|
json$parse_file(tmp)
|
|||
|
}
|
|||
|
|
|||
|
bitbucket_DESCRIPTION <- function(username, repo, subdir = NULL, ref = "HEAD", host = "https://api.bitbucket.org/2.0", auth = NULL,...) {
|
|||
|
|
|||
|
url <- build_url(host, "repositories", username, repo, "src", ref, subdir, "DESCRIPTION")
|
|||
|
|
|||
|
tmp <- tempfile()
|
|||
|
download(tmp, url, basic_auth = auth)
|
|||
|
|
|||
|
read_dcf(tmp)
|
|||
|
}
|
|||
|
|
|||
|
basic_auth <- function(x) {
|
|||
|
if (!is.null(x$password)) {
|
|||
|
list(
|
|||
|
user = x$auth_user %||% x$username,
|
|||
|
password = x$password
|
|||
|
)
|
|||
|
} else {
|
|||
|
NULL
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
bitbucket_download_url <- function(username, repo, ref = "HEAD",
|
|||
|
host = "api.bitbucket.org/2.0", auth = NULL) {
|
|||
|
|
|||
|
url <- build_url(host, "repositories", username, repo)
|
|||
|
|
|||
|
tmp <- tempfile()
|
|||
|
download(tmp, url, basic_auth = auth)
|
|||
|
|
|||
|
paste0(build_url(json$parse_file(tmp)$links$html$href, "get", ref), ".tar.gz")
|
|||
|
}
|
|||
|
|
|||
|
bitbucket_password <- function(quiet = TRUE) {
|
|||
|
pass <- Sys.getenv("BITBUCKET_PASSWORD")
|
|||
|
if (identical(pass, "")) return(NULL)
|
|||
|
if (!quiet) message("Using bitbucket password from envvar BITBUCKET_PASSWORD")
|
|||
|
pass
|
|||
|
}
|
|||
|
|
|||
|
bitbucket_user <- function(quiet = TRUE) {
|
|||
|
user <- Sys.getenv("BITBUCKET_USER")
|
|||
|
if (identical(user, "")) return(NULL)
|
|||
|
if (!quiet) message("Using bitbucket user from envvar BITBUCKET_USER")
|
|||
|
user
|
|||
|
}
|
|||
|
# Contents of R/install-cran.R
|
|||
|
|
|||
|
#' Attempts to install a package from CRAN.
|
|||
|
#'
|
|||
|
#' This function is vectorised on `pkgs` so you can install multiple
|
|||
|
#' packages in a single command.
|
|||
|
#'
|
|||
|
#' @param pkgs A character vector of packages to install.
|
|||
|
#' @inheritParams install_github
|
|||
|
#' @export
|
|||
|
#' @family package installation
|
|||
|
#' @examples
|
|||
|
#' \dontrun{
|
|||
|
#' install_cran("ggplot2")
|
|||
|
#' install_cran(c("httpuv", "shiny"))
|
|||
|
#' }
|
|||
|
install_cran <- function(pkgs, repos = getOption("repos"), type = getOption("pkgType"),
|
|||
|
dependencies = NA,
|
|||
|
upgrade = c("default", "ask", "always", "never"),
|
|||
|
force = FALSE,
|
|||
|
quiet = FALSE,
|
|||
|
build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"),
|
|||
|
build_manual = FALSE, build_vignettes = FALSE,
|
|||
|
...) {
|
|||
|
|
|||
|
remotes <- lapply(pkgs, cran_remote, repos = repos, type = type)
|
|||
|
|
|||
|
install_remotes(remotes,
|
|||
|
dependencies = dependencies,
|
|||
|
upgrade = upgrade,
|
|||
|
force = force,
|
|||
|
quiet = quiet,
|
|||
|
build = build,
|
|||
|
build_opts = build_opts,
|
|||
|
build_manual = build_manual,
|
|||
|
build_vignettes = build_vignettes,
|
|||
|
repos = repos,
|
|||
|
type = type,
|
|||
|
...)
|
|||
|
}
|
|||
|
|
|||
|
cran_remote <- function(pkg, repos = getOption("repos"), type = getOption("pkgType"), ...) {
|
|||
|
|
|||
|
repos <- fix_repositories(repos)
|
|||
|
|
|||
|
remote("cran",
|
|||
|
name = pkg,
|
|||
|
repos = repos,
|
|||
|
pkg_type = type)
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_package_name.cran_remote <- function(remote, ...) {
|
|||
|
remote$name
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_sha.cran_remote <- function(remote, ...) {
|
|||
|
cran <- available_packages(remote$repos, remote$pkg_type)
|
|||
|
|
|||
|
trim_ws(unname(cran[, "Version"][match(remote$name, rownames(cran))]))
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
format.cran_remote <- function(x, ...) {
|
|||
|
"CRAN"
|
|||
|
}
|
|||
|
# Contents of R/install-dev.R
|
|||
|
#' Install the development version of a package
|
|||
|
#'
|
|||
|
#' `install_dev()` retrieves the package DESCRIPTION from the CRAN mirror and
|
|||
|
#' looks in the 'URL' and 'BugReports' fields for GitHub, GitLab or Bitbucket
|
|||
|
#' URLs. It then calls the appropriate `install_()` function to install the
|
|||
|
#' development package.
|
|||
|
#'
|
|||
|
#' @param package The package name to install.
|
|||
|
#' @param cran_url The URL of the CRAN mirror to use, by default based on the
|
|||
|
#' 'repos' option. If unset uses 'https://cloud.r-project.org'.
|
|||
|
#' @param ... Additional arguments passed to [install_github()],
|
|||
|
#' [install_gitlab()], or [install_bitbucket()] functions.
|
|||
|
#' @family package installation
|
|||
|
#' @export
|
|||
|
#' @examples
|
|||
|
#' \dontrun{
|
|||
|
#' # From GitHub
|
|||
|
#' install_dev("dplyr")
|
|||
|
#'
|
|||
|
#' # From GitLab
|
|||
|
#' install_dev("iemiscdata")
|
|||
|
#'
|
|||
|
#' # From Bitbucket
|
|||
|
#' install_dev("argparser")
|
|||
|
#' }
|
|||
|
install_dev <- function(package, cran_url = getOption("repos")[["CRAN"]], ...) {
|
|||
|
if (is.null(cran_url) || identical(cran_url, "@CRAN@")) {
|
|||
|
cran_url <- "https://cloud.r-project.org"
|
|||
|
}
|
|||
|
|
|||
|
refs <- dev_split_ref(package)
|
|||
|
url <- build_url(cran_url, "web", "packages", refs[["pkg"]], "DESCRIPTION")
|
|||
|
|
|||
|
f <- tempfile()
|
|||
|
on.exit(unlink(f))
|
|||
|
|
|||
|
download(f, url)
|
|||
|
desc <- read_dcf(f)
|
|||
|
|
|||
|
url_fields <- c(desc$URL, desc$BugReports)
|
|||
|
|
|||
|
if (length(url_fields) == 0) {
|
|||
|
stop("Could not determine development repository", call. = FALSE)
|
|||
|
}
|
|||
|
|
|||
|
pkg_urls <- unlist(strsplit(url_fields, "[[:space:]]*,[[:space:]]*"))
|
|||
|
|
|||
|
# Remove trailing "/issues" from the BugReports URL
|
|||
|
pkg_urls <- sub("/issues/?$", "", pkg_urls)
|
|||
|
|
|||
|
valid_domains <- c("github[.]com", "gitlab[.]com", "bitbucket[.]org")
|
|||
|
|
|||
|
parts <-
|
|||
|
re_match(pkg_urls,
|
|||
|
sprintf("^https?://(?<domain>%s)/(?<username>%s)/(?<repo>%s)(?:/(?<subdir>%s))?",
|
|||
|
domain = paste0(valid_domains, collapse = "|"),
|
|||
|
username = "[^/]+",
|
|||
|
repo = "[^/@#]+",
|
|||
|
subdir = "[^/@$ ]+"
|
|||
|
)
|
|||
|
)[c("domain", "username", "repo", "subdir")]
|
|||
|
|
|||
|
# Remove cases which don't match and duplicates
|
|||
|
|
|||
|
parts <- unique(stats::na.omit(parts))
|
|||
|
|
|||
|
if (nrow(parts) != 1) {
|
|||
|
stop("Could not determine development repository", call. = FALSE)
|
|||
|
}
|
|||
|
|
|||
|
full_ref <- paste0(
|
|||
|
paste0(c(parts$username, parts$repo, if (nzchar(parts$subdir)) parts$subdir), collapse = "/"),
|
|||
|
refs[["ref"]]
|
|||
|
)
|
|||
|
|
|||
|
switch(parts$domain,
|
|||
|
github.com = install_github(full_ref, ...),
|
|||
|
gitlab.com = install_gitlab(full_ref, ...),
|
|||
|
bitbucket.org = install_bitbucket(full_ref, ...)
|
|||
|
)
|
|||
|
}
|
|||
|
# Contents of R/install-git.R
|
|||
|
|
|||
|
#' Install a package from a git repository
|
|||
|
#'
|
|||
|
#' It is vectorised so you can install multiple packages with
|
|||
|
#' a single command. You do not need to have the `git2r` package,
|
|||
|
#' or an external git client installed.
|
|||
|
#'
|
|||
|
#' If you need to set git credentials for use in the `Remotes` field you can do
|
|||
|
#' so by placing the credentials in the `remotes.git_credentials` global
|
|||
|
#' option.
|
|||
|
#'
|
|||
|
#' @param url Location of package. The url should point to a public or
|
|||
|
#' private repository.
|
|||
|
#' @param ref Name of branch, tag or SHA reference to use, if not HEAD.
|
|||
|
#' @param branch Deprecated, synonym for ref.
|
|||
|
#' @param subdir A sub-directory within a git repository that may
|
|||
|
#' contain the package we are interested in installing.
|
|||
|
#' @param credentials A git2r credentials object passed through to clone.
|
|||
|
#' Supplying this argument implies using `git2r` with `git`.
|
|||
|
#' @param git Whether to use the `git2r` package, or an external
|
|||
|
#' git client via system. Default is `git2r` if it is installed,
|
|||
|
#' otherwise an external git installation.
|
|||
|
#' @param ... Other arguments passed on to [utils::install.packages()].
|
|||
|
#' @inheritParams install_github
|
|||
|
#' @family package installation
|
|||
|
#' @export
|
|||
|
#' @examples
|
|||
|
#' \dontrun{
|
|||
|
#' install_git("https://github.com/hadley/stringr.git")
|
|||
|
#' install_git("https://github.com/hadley/stringr.git", ref = "stringr-0.2")
|
|||
|
#' }
|
|||
|
install_git <- function(url, subdir = NULL, ref = NULL, branch = NULL,
|
|||
|
credentials = git_credentials(),
|
|||
|
git = c("auto", "git2r", "external"),
|
|||
|
dependencies = NA,
|
|||
|
upgrade = c("default", "ask", "always", "never"),
|
|||
|
force = FALSE,
|
|||
|
quiet = FALSE,
|
|||
|
build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"),
|
|||
|
build_manual = FALSE, build_vignettes = FALSE,
|
|||
|
repos = getOption("repos"),
|
|||
|
type = getOption("pkgType"),
|
|||
|
...) {
|
|||
|
if (!missing(branch)) {
|
|||
|
warning("`branch` is deprecated, please use `ref`")
|
|||
|
ref <- branch
|
|||
|
}
|
|||
|
|
|||
|
remotes <- lapply(url, git_remote,
|
|||
|
subdir = subdir, ref = ref,
|
|||
|
credentials = credentials, git = match.arg(git)
|
|||
|
)
|
|||
|
|
|||
|
install_remotes(remotes,
|
|||
|
credentials = credentials,
|
|||
|
dependencies = dependencies,
|
|||
|
upgrade = upgrade,
|
|||
|
force = force,
|
|||
|
quiet = quiet,
|
|||
|
build = build,
|
|||
|
build_opts = build_opts,
|
|||
|
build_manual = build_manual,
|
|||
|
build_vignettes = build_vignettes,
|
|||
|
repos = repos,
|
|||
|
type = type,
|
|||
|
...
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
git_remote <- function(url, subdir = NULL, ref = NULL, credentials = git_credentials(),
|
|||
|
git = c("auto", "git2r", "external"), ...) {
|
|||
|
git <- match.arg(git)
|
|||
|
if (git == "auto") {
|
|||
|
git <- if (!is_standalone() && pkg_installed("git2r")) "git2r" else "external"
|
|||
|
}
|
|||
|
|
|||
|
if (!is.null(credentials) && git != "git2r") {
|
|||
|
stop("`credentials` can only be used with `git = \"git2r\"`", call. = FALSE)
|
|||
|
}
|
|||
|
|
|||
|
url_parts = re_match( url,
|
|||
|
"(?<protocol>[^/]*://)?(?<authhost>[^/]+)(?<path>[^@]*)(@(?<ref>.*))?")
|
|||
|
|
|||
|
ref <- ref %||% (if (url_parts$ref == "") NULL else url_parts$ref)
|
|||
|
|
|||
|
url = paste0(url_parts$protocol, url_parts$authhost, url_parts$path)
|
|||
|
|
|||
|
list(git2r = git_remote_git2r, external = git_remote_xgit)[[git]](url, subdir, ref, credentials)
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
git_remote_git2r <- function(url, subdir = NULL, ref = NULL, credentials = git_credentials()) {
|
|||
|
remote("git2r",
|
|||
|
url = url,
|
|||
|
subdir = subdir,
|
|||
|
ref = ref,
|
|||
|
credentials = credentials
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
git_remote_xgit <- function(url, subdir = NULL, ref = NULL, credentials = git_credentials()) {
|
|||
|
remote("xgit",
|
|||
|
url = url,
|
|||
|
subdir = subdir,
|
|||
|
ref = ref
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_download.git2r_remote <- function(x, quiet = FALSE) {
|
|||
|
if (!quiet) {
|
|||
|
message("Downloading git repo ", x$url)
|
|||
|
}
|
|||
|
|
|||
|
bundle <- tempfile()
|
|||
|
git2r::clone(x$url, bundle, credentials = x$credentials, progress = FALSE)
|
|||
|
|
|||
|
if (!is.null(x$ref)) {
|
|||
|
r <- git2r::repository(bundle)
|
|||
|
git2r::checkout(r, x$ref)
|
|||
|
}
|
|||
|
|
|||
|
bundle
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_metadata.git2r_remote <- function(x, bundle = NULL, source = NULL, sha = NULL) {
|
|||
|
if (!is.null(bundle)) {
|
|||
|
r <- git2r::repository(bundle)
|
|||
|
sha <- git2r::commits(r)[[1]]$sha
|
|||
|
} else {
|
|||
|
sha <- NULL
|
|||
|
}
|
|||
|
|
|||
|
list(
|
|||
|
RemoteType = "git2r",
|
|||
|
RemoteUrl = x$url,
|
|||
|
RemoteSubdir = x$subdir,
|
|||
|
RemoteRef = x$ref,
|
|||
|
RemoteSha = sha
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_package_name.git2r_remote <- function(remote, ...) {
|
|||
|
tmp <- tempfile()
|
|||
|
on.exit(unlink(tmp))
|
|||
|
description_path <- paste0(collapse = "/", c(remote$subdir, "DESCRIPTION"))
|
|||
|
|
|||
|
if (grepl("^https?://", remote$url)) {
|
|||
|
# assumes GitHub-style "<repo>/raw/<ref>/<path>" url
|
|||
|
url <- build_url(sub("\\.git$", "", remote$url), "raw", remote_sha(remote, ...), description_path)
|
|||
|
download_args <- list(path = tmp, url = url)
|
|||
|
if (!is.null(remote$credentials)) {
|
|||
|
if (inherits(remote$credentials, "cred_user_pass")) {
|
|||
|
download_args$basic_auth <- list(
|
|||
|
user = remote$credentials$username,
|
|||
|
password = remote$credentials$password
|
|||
|
)
|
|||
|
} else if (inherits(remote$credentials, "cred_env")) {
|
|||
|
if (Sys.getenv(remote$credentials$username) == "") {
|
|||
|
stop(paste0("Environment variable `", remote$credentials$username, "` is unset."), .call = FALSE)
|
|||
|
}
|
|||
|
if (Sys.getenv(remote$credentials$password) == "") {
|
|||
|
stop(paste0("Environment variable `", remote$credentials$password, "` is unset."), .call = FALSE)
|
|||
|
}
|
|||
|
download_args$basic_auth <- list(
|
|||
|
user = Sys.getenv(remote$credentials$username),
|
|||
|
password = Sys.getenv(remote$credentials$username)
|
|||
|
)
|
|||
|
} else if (inherits(remote$credentials, "cred_token")) {
|
|||
|
if (Sys.getenv(remote$credentials$token) == "") {
|
|||
|
stop(paste0("Environment variable `", remote$credentials$token, "` is unset."), .call = FALSE)
|
|||
|
}
|
|||
|
download_args$auth_token <- Sys.getenv(remote$credentials$token)
|
|||
|
} else if (inherits(remote$credentials, "cred_ssh_key")) {
|
|||
|
stop(paste(
|
|||
|
"Unable to fetch the package DESCRIPTION file using SSH key authentication.",
|
|||
|
"Try using `git2r::cred_user_pass`, `git2r::cred_env`, or `git2r::cred_token` instead of `git2r::cred_ssh_key` for authentication."
|
|||
|
), .call = FALSE)
|
|||
|
} else {
|
|||
|
stop(paste(
|
|||
|
"`remote$credentials` is not NULL and it does not inherit from a recognized class.",
|
|||
|
"Recognized classes for `remote$credentials` are `cred_user_pass`, `cred_env`, `cred_token`, and `cred_ssh_key`."
|
|||
|
), .call = FALSE)
|
|||
|
}
|
|||
|
}
|
|||
|
tryCatch({
|
|||
|
do.call(download, args = download_args)
|
|||
|
read_dcf(tmp)$Package
|
|||
|
}, error = function(e) {
|
|||
|
NA_character_
|
|||
|
})
|
|||
|
} else {
|
|||
|
# Try using git archive --remote to retrieve the DESCRIPTION, if the protocol
|
|||
|
# or server doesn't support that return NA
|
|||
|
res <- try(
|
|||
|
silent = TRUE,
|
|||
|
system_check(git_path(),
|
|||
|
args = c(
|
|||
|
"archive", "-o", tmp, "--remote", remote$url,
|
|||
|
if (is.null(remote$ref)) "HEAD" else remote$ref,
|
|||
|
description_path
|
|||
|
),
|
|||
|
quiet = TRUE
|
|||
|
)
|
|||
|
)
|
|||
|
|
|||
|
if (inherits(res, "try-error")) {
|
|||
|
return(NA_character_)
|
|||
|
}
|
|||
|
|
|||
|
# git archive returns a tar file, so extract it to tempdir and read the DCF
|
|||
|
utils::untar(tmp, files = description_path, exdir = tempdir())
|
|||
|
|
|||
|
read_dcf(file.path(tempdir(), description_path))$Package
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_sha.git2r_remote <- function(remote, ...) {
|
|||
|
tryCatch(
|
|||
|
{
|
|||
|
# set suppressWarnings in git2r 0.23.0+
|
|||
|
res <- suppressWarnings(git2r::remote_ls(remote$url, credentials = remote$credentials))
|
|||
|
|
|||
|
ref <- remote$ref %||% "HEAD"
|
|||
|
|
|||
|
if (ref != "HEAD") ref <- paste0("/", ref)
|
|||
|
|
|||
|
found <- grep(pattern = paste0(ref, "$"), x = names(res))
|
|||
|
|
|||
|
# If none found, it is either a SHA, so return the pinned sha or NA
|
|||
|
if (length(found) == 0) {
|
|||
|
return(remote$ref %||% NA_character_)
|
|||
|
}
|
|||
|
|
|||
|
unname(res[found[1]])
|
|||
|
},
|
|||
|
error = function(e) {
|
|||
|
warning(e)
|
|||
|
NA_character_
|
|||
|
}
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
format.xgit_remote <- function(x, ...) {
|
|||
|
"Git"
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
format.git2r_remote <- function(x, ...) {
|
|||
|
"Git"
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_download.xgit_remote <- function(x, quiet = FALSE) {
|
|||
|
if (!quiet) {
|
|||
|
message("Downloading git repo ", x$url)
|
|||
|
}
|
|||
|
|
|||
|
bundle <- tempfile()
|
|||
|
|
|||
|
args <- c("clone", "--depth", "1", "--no-hardlinks")
|
|||
|
args <- c(args, x$args, x$url, bundle)
|
|||
|
git(paste0(args, collapse = " "), quiet = quiet)
|
|||
|
|
|||
|
if (!is.null(x$ref)) {
|
|||
|
git(paste0(c("fetch", "origin", x$ref), collapse = " "), quiet = quiet, path = bundle)
|
|||
|
git(paste0(c("checkout", "FETCH_HEAD"), collapse = " "), quiet = quiet, path = bundle)
|
|||
|
}
|
|||
|
|
|||
|
bundle
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_metadata.xgit_remote <- function(x, bundle = NULL, source = NULL, sha = NULL) {
|
|||
|
if (is_na(sha)) {
|
|||
|
sha <- NULL
|
|||
|
}
|
|||
|
|
|||
|
list(
|
|||
|
RemoteType = "xgit",
|
|||
|
RemoteUrl = x$url,
|
|||
|
RemoteSubdir = x$subdir,
|
|||
|
RemoteRef = x$ref,
|
|||
|
RemoteSha = sha,
|
|||
|
RemoteArgs = if (length(x$args) > 0) paste0(deparse(x$args), collapse = " ")
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
#' @importFrom utils read.delim
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_package_name.xgit_remote <- remote_package_name.git2r_remote
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_sha.xgit_remote <- function(remote, ...) {
|
|||
|
url <- remote$url
|
|||
|
ref <- remote$ref
|
|||
|
|
|||
|
refs <- git(paste("ls-remote", url, ref))
|
|||
|
|
|||
|
# If none found, it is either a SHA, so return the pinned SHA or NA
|
|||
|
if (length(refs) == 0) {
|
|||
|
return(remote$ref %||% NA_character_)
|
|||
|
}
|
|||
|
|
|||
|
refs_df <- read.delim(
|
|||
|
text = refs, stringsAsFactors = FALSE, sep = "\t",
|
|||
|
header = FALSE
|
|||
|
)
|
|||
|
names(refs_df) <- c("sha", "ref")
|
|||
|
|
|||
|
refs_df$sha[[1]]
|
|||
|
}
|
|||
|
|
|||
|
#' Specify git credentials to use
|
|||
|
#'
|
|||
|
#' The global option `remotes.git_credentials` is used to set the git
|
|||
|
#' credentials.
|
|||
|
#' @export
|
|||
|
#' @keywords internal
|
|||
|
git_credentials <- function() {
|
|||
|
getOption("remotes.git_credentials", NULL)
|
|||
|
}
|
|||
|
# Contents of R/install-github.R
|
|||
|
#' Attempts to install a package directly from GitHub.
|
|||
|
#'
|
|||
|
#' This function is vectorised on `repo` so you can install multiple
|
|||
|
#' packages in a single command.
|
|||
|
#'
|
|||
|
#' @param repo Repository address in the format
|
|||
|
#' `username/repo[/subdir][@@ref|#pull|@@*release]`. Alternatively, you can
|
|||
|
#' specify `subdir` and/or `ref` using the respective parameters
|
|||
|
#' (see below); if both are specified, the values in `repo` take
|
|||
|
#' precedence.
|
|||
|
#' @param ref Desired git reference. Could be a commit, tag, or branch
|
|||
|
#' name, or a call to [github_pull()] or [github_release()]. Defaults to
|
|||
|
#' `"HEAD"`, which means the default branch on GitHub and for git remotes.
|
|||
|
#' See [setting-the-default-branch](https://help.github.com/en/github/administering-a-repository/setting-the-default-branch)
|
|||
|
#' for more details.
|
|||
|
#' @param subdir Subdirectory within repo that contains the R package.
|
|||
|
#' @param auth_token To install from a private repo, generate a personal
|
|||
|
#' access token (PAT) with at least repo scope in
|
|||
|
#' \url{https://github.com/settings/tokens} and
|
|||
|
#' supply to this argument. This is safer than using a password because
|
|||
|
#' you can easily delete a PAT without affecting any others. Defaults to
|
|||
|
#' the `GITHUB_PAT` environment variable.
|
|||
|
#' @param host GitHub API host to use. Override with your GitHub enterprise
|
|||
|
#' hostname, for example, `"github.hostname.com/api/v3"`.
|
|||
|
#' @param force Force installation, even if the remote state has not changed
|
|||
|
#' since the previous install.
|
|||
|
#' @inheritParams install_deps
|
|||
|
#' @param ... Other arguments passed on to [utils::install.packages()].
|
|||
|
#' @details
|
|||
|
#' If the repository uses submodules a command-line git client is required to
|
|||
|
#' clone the submodules.
|
|||
|
#' @family package installation
|
|||
|
#' @export
|
|||
|
#' @seealso [github_pull()]
|
|||
|
#' @examples
|
|||
|
#' \dontrun{
|
|||
|
#' install_github("klutometis/roxygen")
|
|||
|
#' install_github("wch/ggplot2", ref = github_pull("142"))
|
|||
|
#' install_github(c("rstudio/httpuv", "rstudio/shiny"))
|
|||
|
#' install_github(c("hadley/httr@@v0.4", "klutometis/roxygen#142",
|
|||
|
#' "r-lib/roxygen2@@*release", "mfrasca/r-logging/pkg"))
|
|||
|
#'
|
|||
|
#' # To install from a private repo, use auth_token with a token
|
|||
|
#' # from https://github.com/settings/tokens. You only need the
|
|||
|
#' # repo scope. Best practice is to save your PAT in env var called
|
|||
|
#' # GITHUB_PAT.
|
|||
|
#' install_github("hadley/private", auth_token = "abc")
|
|||
|
#'
|
|||
|
#' # To pass option arguments to `R CMD INSTALL` use `INSTALL_opts`. e.g. to
|
|||
|
#' install a package with source references and tests
|
|||
|
#' install_github("rstudio/shiny", INSTALL_opts = c("--with-keep.source", "--install-tests"))
|
|||
|
#' }
|
|||
|
install_github <- function(repo,
|
|||
|
ref = "HEAD",
|
|||
|
subdir = NULL,
|
|||
|
auth_token = github_pat(quiet),
|
|||
|
host = "api.github.com",
|
|||
|
dependencies = NA,
|
|||
|
upgrade = c("default", "ask", "always", "never"),
|
|||
|
force = FALSE,
|
|||
|
quiet = FALSE,
|
|||
|
build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"),
|
|||
|
build_manual = FALSE, build_vignettes = FALSE,
|
|||
|
repos = getOption("repos"),
|
|||
|
type = getOption("pkgType"),
|
|||
|
...) {
|
|||
|
|
|||
|
remotes <- lapply(repo, github_remote, ref = ref,
|
|||
|
subdir = subdir, auth_token = auth_token, host = host)
|
|||
|
|
|||
|
install_remotes(remotes, auth_token = auth_token, host = host,
|
|||
|
dependencies = dependencies,
|
|||
|
upgrade = upgrade,
|
|||
|
force = force,
|
|||
|
quiet = quiet,
|
|||
|
build = build,
|
|||
|
build_opts = build_opts,
|
|||
|
build_manual = build_manual,
|
|||
|
build_vignettes = build_vignettes,
|
|||
|
repos = repos,
|
|||
|
type = type,
|
|||
|
...)
|
|||
|
}
|
|||
|
|
|||
|
#' Create a new github_remote
|
|||
|
#'
|
|||
|
#' This is an internal function to create a new github_remote, users should
|
|||
|
#' generally have no need for it.
|
|||
|
#' @inheritParams install_github
|
|||
|
#' @export
|
|||
|
#' @keywords internal
|
|||
|
github_remote <- function(repo, ref = "HEAD", subdir = NULL,
|
|||
|
auth_token = github_pat(), sha = NULL,
|
|||
|
host = "api.github.com", ...) {
|
|||
|
|
|||
|
meta <- parse_git_repo(repo)
|
|||
|
meta <- github_resolve_ref(meta$ref %||% ref, meta, host = host, auth_token = auth_token)
|
|||
|
|
|||
|
remote("github",
|
|||
|
host = host,
|
|||
|
package = meta$package,
|
|||
|
repo = meta$repo,
|
|||
|
subdir = meta$subdir %||% subdir,
|
|||
|
username = meta$username,
|
|||
|
ref = meta$ref,
|
|||
|
sha = sha,
|
|||
|
auth_token = auth_token
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_download.github_remote <- function(x, quiet = FALSE) {
|
|||
|
if (!quiet) {
|
|||
|
message("Downloading GitHub repo ", x$username, "/", x$repo, "@", x$ref)
|
|||
|
}
|
|||
|
|
|||
|
dest <- tempfile(fileext = paste0(".tar.gz"))
|
|||
|
src_root <- build_url(x$host, "repos", x$username, x$repo)
|
|||
|
src <- paste0(src_root, "/tarball/", utils::URLencode(x$ref, reserved = TRUE))
|
|||
|
|
|||
|
download(dest, src, auth_token = x$auth_token)
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_metadata.github_remote <- function(x, bundle = NULL, source = NULL, sha = NULL) {
|
|||
|
|
|||
|
if (!is.null(bundle)) {
|
|||
|
# Might be able to get from archive
|
|||
|
sha <- git_extract_sha1_tar(bundle)
|
|||
|
} else if (is_na(sha)) {
|
|||
|
sha <- NULL
|
|||
|
}
|
|||
|
|
|||
|
list(
|
|||
|
RemoteType = "github",
|
|||
|
RemoteHost = x$host,
|
|||
|
RemotePackage = x$package,
|
|||
|
RemoteRepo = x$repo,
|
|||
|
RemoteUsername = x$username,
|
|||
|
RemoteRef = x$ref,
|
|||
|
RemoteSha = sha,
|
|||
|
RemoteSubdir = x$subdir,
|
|||
|
# Backward compatibility for packrat etc.
|
|||
|
GithubRepo = x$repo,
|
|||
|
GithubUsername = x$username,
|
|||
|
GithubRef = x$ref,
|
|||
|
GithubSHA1 = sha,
|
|||
|
GithubSubdir = x$subdir
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
#' GitHub references
|
|||
|
#'
|
|||
|
#' Use as `ref` parameter to [install_github()].
|
|||
|
#' Allows installing a specific pull request or the latest release.
|
|||
|
#'
|
|||
|
#' @param pull Character string specifying the pull request to install
|
|||
|
#' @seealso [install_github()]
|
|||
|
#' @examples
|
|||
|
#' github_pull("42")
|
|||
|
#' @rdname github_refs
|
|||
|
#' @export
|
|||
|
github_pull <- function(pull) structure(pull, class = "github_pull")
|
|||
|
|
|||
|
#' @rdname github_refs
|
|||
|
#' @export
|
|||
|
github_release <- function() structure(NA_integer_, class = "github_release")
|
|||
|
|
|||
|
github_resolve_ref <- function(x, params, ...) UseMethod("github_resolve_ref")
|
|||
|
|
|||
|
#' @export
|
|||
|
github_resolve_ref.default <- function(x, params, ...) {
|
|||
|
params$ref <- x
|
|||
|
params
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
github_resolve_ref.NULL <- function(x, params, ...) {
|
|||
|
params$ref <- "HEAD"
|
|||
|
params
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
github_resolve_ref.github_pull <- function(x, params, ..., host, auth_token = github_pat()) {
|
|||
|
# GET /repos/:user/:repo/pulls/:number
|
|||
|
path <- file.path("repos", params$username, params$repo, "pulls", x)
|
|||
|
response <- tryCatch(
|
|||
|
github_GET(path, host = host, pat = auth_token),
|
|||
|
error = function(e) e
|
|||
|
)
|
|||
|
|
|||
|
## Just because libcurl might download the error page...
|
|||
|
if (methods::is(response, "error") || is.null(response$head)) {
|
|||
|
stop("Cannot find GitHub pull request ", params$username, "/",
|
|||
|
params$repo, "#", x, "\n",
|
|||
|
response$message)
|
|||
|
}
|
|||
|
|
|||
|
params$username <- response$head$user$login
|
|||
|
params$ref <- response$head$ref
|
|||
|
params
|
|||
|
}
|
|||
|
|
|||
|
# Retrieve the ref for the latest release
|
|||
|
#' @export
|
|||
|
github_resolve_ref.github_release <- function(x, params, ..., host, auth_token = github_pat()) {
|
|||
|
# GET /repos/:user/:repo/releases
|
|||
|
path <- paste("repos", params$username, params$repo, "releases", sep = "/")
|
|||
|
response <- tryCatch(
|
|||
|
github_GET(path, host = host, pat = auth_token),
|
|||
|
error = function(e) e
|
|||
|
)
|
|||
|
|
|||
|
if (methods::is(response, "error") || !is.null(response$message)) {
|
|||
|
stop("Cannot find repo ", params$username, "/", params$repo, ".", "\n",
|
|||
|
response$message)
|
|||
|
}
|
|||
|
|
|||
|
if (length(response) == 0L)
|
|||
|
stop("No releases found for repo ", params$username, "/", params$repo, ".")
|
|||
|
|
|||
|
params$ref <- response[[1L]]$tag_name
|
|||
|
params
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_package_name.github_remote <- function(remote, ..., use_local = TRUE,
|
|||
|
use_curl = !is_standalone() && pkg_installed("curl")) {
|
|||
|
|
|||
|
# If the package name was explicitly specified, use that
|
|||
|
if (!is.null(remote$package)) {
|
|||
|
return(remote$package)
|
|||
|
}
|
|||
|
|
|||
|
# Otherwise if the repo is an already installed package assume that.
|
|||
|
if (isTRUE(use_local)) {
|
|||
|
local_name <- suppressWarnings(utils::packageDescription(remote$repo, fields = "Package"))
|
|||
|
if (!is.na(local_name)) {
|
|||
|
return(local_name)
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
# Otherwise lookup the package name from the remote DESCRIPTION file
|
|||
|
desc <- github_DESCRIPTION(username = remote$username, repo = remote$repo,
|
|||
|
subdir = remote$subdir, host = remote$host, ref = remote$ref,
|
|||
|
pat = remote$auth_token %||% github_pat(), use_curl = use_curl)
|
|||
|
|
|||
|
if (is.null(desc)) {
|
|||
|
return(NA_character_)
|
|||
|
}
|
|||
|
|
|||
|
tmp <- tempfile()
|
|||
|
writeChar(desc, tmp)
|
|||
|
on.exit(unlink(tmp))
|
|||
|
|
|||
|
read_dcf(tmp)$Package
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_sha.github_remote <- function(remote, ..., use_curl = !is_standalone() && pkg_installed("curl")) {
|
|||
|
tryCatch(
|
|||
|
github_commit(username = remote$username, repo = remote$repo,
|
|||
|
host = remote$host, ref = remote$ref, pat = remote$auth_token %||% github_pat(), use_curl = use_curl),
|
|||
|
|
|||
|
# 422 errors most often occur when a branch or PR has been deleted, so we
|
|||
|
# ignore the error in this case
|
|||
|
http_422 = function(e) NA_character_
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
format.github_remote <- function(x, ...) {
|
|||
|
"GitHub"
|
|||
|
}
|
|||
|
# Contents of R/install-gitlab.R
|
|||
|
#' Install a package from GitLab
|
|||
|
#'
|
|||
|
#' This function is vectorised on `repo` so you can install multiple
|
|||
|
#' packages in a single command. Like other remotes the repository will skip
|
|||
|
#' installation if `force == FALSE` (the default) and the remote state has
|
|||
|
#' not changed since the previous installation.
|
|||
|
#'
|
|||
|
#' @inheritParams install_github
|
|||
|
#' @param repo Repository address in the format
|
|||
|
#' `username/repo[@@ref]`.
|
|||
|
#' @param host GitLab API host to use. Override with your GitLab enterprise
|
|||
|
#' hostname, for example, `"<PROTOCOL://>gitlab.hostname.com"`.
|
|||
|
#' The PROTOCOL is required by packrat during Posit Connect deployment. While
|
|||
|
#' \link{install_gitlab} may work without, omitting it generally
|
|||
|
#' leads to package restoration errors.
|
|||
|
#' @param auth_token To install from a private repo, generate a personal access
|
|||
|
#' token (PAT) with at least read_api scope in
|
|||
|
#' \url{https://docs.gitlab.com/ee/user/profile/personal_access_tokens.html} and
|
|||
|
#' supply to this argument. This is safer than using a password because you
|
|||
|
#' can easily delete a PAT without affecting any others. Defaults to the
|
|||
|
#' GITLAB_PAT environment variable.
|
|||
|
#' @inheritParams install_github
|
|||
|
#' @export
|
|||
|
#' @family package installation
|
|||
|
#' @examples
|
|||
|
#' \dontrun{
|
|||
|
#' install_gitlab("jimhester/covr")
|
|||
|
#' }
|
|||
|
install_gitlab <- function(repo,
|
|||
|
subdir = NULL,
|
|||
|
auth_token = gitlab_pat(quiet),
|
|||
|
host = "gitlab.com",
|
|||
|
dependencies = NA,
|
|||
|
upgrade = c("default", "ask", "always", "never"),
|
|||
|
force = FALSE,
|
|||
|
quiet = FALSE,
|
|||
|
build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"),
|
|||
|
build_manual = FALSE, build_vignettes = FALSE,
|
|||
|
repos = getOption("repos"),
|
|||
|
type = getOption("pkgType"),
|
|||
|
...) {
|
|||
|
|
|||
|
remotes <- lapply(repo, gitlab_remote, subdir = subdir, auth_token = auth_token, host = host)
|
|||
|
|
|||
|
install_remotes(remotes, auth_token = auth_token, host = host,
|
|||
|
dependencies = dependencies,
|
|||
|
upgrade = upgrade,
|
|||
|
force = force,
|
|||
|
quiet = quiet,
|
|||
|
build = build,
|
|||
|
build_opts = build_opts,
|
|||
|
build_manual = build_manual,
|
|||
|
build_vignettes = build_vignettes,
|
|||
|
repos = repos,
|
|||
|
type = type,
|
|||
|
...)
|
|||
|
}
|
|||
|
|
|||
|
gitlab_remote <- function(repo, subdir = NULL,
|
|||
|
auth_token = gitlab_pat(), sha = NULL,
|
|||
|
host = "gitlab.com", ...) {
|
|||
|
|
|||
|
meta <- parse_git_repo(repo)
|
|||
|
meta$ref <- meta$ref %||% "HEAD"
|
|||
|
|
|||
|
remote("gitlab",
|
|||
|
host = host,
|
|||
|
repo = paste(c(meta$repo, meta$subdir), collapse = "/"),
|
|||
|
subdir = subdir,
|
|||
|
username = meta$username,
|
|||
|
ref = meta$ref,
|
|||
|
sha = sha,
|
|||
|
auth_token = auth_token
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_download.gitlab_remote <- function(x, quiet = FALSE) {
|
|||
|
dest <- tempfile(fileext = paste0(".tar.gz"))
|
|||
|
|
|||
|
project_id <- gitlab_project_id(x$username, x$repo, x$ref, x$host, x$auth_token)
|
|||
|
|
|||
|
src_root <- build_url(x$host, "api", "v4", "projects", project_id)
|
|||
|
src <- paste0(src_root, "/repository/archive.tar.gz?sha=", utils::URLencode(x$ref, reserved = TRUE))
|
|||
|
|
|||
|
if (!quiet) {
|
|||
|
message("Downloading GitLab repo ", x$username, "/", x$repo, "@", x$ref,
|
|||
|
"\nfrom URL ", src)
|
|||
|
}
|
|||
|
|
|||
|
download(dest, src, headers = c("Private-Token" = x$auth_token))
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_metadata.gitlab_remote <- function(x, bundle = NULL, source = NULL, sha = NULL) {
|
|||
|
|
|||
|
if (!is.null(bundle)) {
|
|||
|
# Might be able to get from archive
|
|||
|
sha <- git_extract_sha1_tar(bundle)
|
|||
|
} else if (is_na(sha)) {
|
|||
|
sha <- NULL
|
|||
|
}
|
|||
|
|
|||
|
list(
|
|||
|
RemoteType = "gitlab",
|
|||
|
RemoteHost = x$host,
|
|||
|
RemoteRepo = x$repo,
|
|||
|
RemoteUsername = x$username,
|
|||
|
RemoteRef = x$ref,
|
|||
|
RemoteSha = sha,
|
|||
|
RemoteSubdir = x$subdir
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_package_name.gitlab_remote <- function(remote, ...) {
|
|||
|
|
|||
|
tmp <- tempfile()
|
|||
|
|
|||
|
src_root <- build_url(
|
|||
|
remote$host, "api", "v4", "projects",
|
|||
|
utils::URLencode(paste0(remote$username, "/", remote$repo),
|
|||
|
reserved = TRUE),
|
|||
|
"repository")
|
|||
|
|
|||
|
src <- paste0(
|
|||
|
src_root, "/files/",
|
|||
|
ifelse(
|
|||
|
is.null(remote$subdir),
|
|||
|
"DESCRIPTION",
|
|||
|
utils::URLencode(paste0(remote$subdir, "/DESCRIPTION"), reserved = TRUE)),
|
|||
|
"/raw?ref=", utils::URLencode(remote$ref, reserved = TRUE))
|
|||
|
|
|||
|
dest <- tempfile()
|
|||
|
res <- download(dest, src, headers = c("Private-Token" = remote$auth_token))
|
|||
|
|
|||
|
tryCatch(
|
|||
|
read_dcf(dest)$Package,
|
|||
|
error = function(e) remote$repo)
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_sha.gitlab_remote <- function(remote, ...) {
|
|||
|
gitlab_commit(username = remote$username, repo = remote$repo,
|
|||
|
host = remote$host, ref = remote$ref, pat = remote$auth_token)
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
format.gitlab_remote <- function(x, ...) {
|
|||
|
"GitLab"
|
|||
|
}
|
|||
|
|
|||
|
gitlab_commit <- function(username, repo, ref = "HEAD",
|
|||
|
host = "gitlab.com", pat = gitlab_pat()) {
|
|||
|
|
|||
|
url <- build_url(host, "api", "v4", "projects", utils::URLencode(paste0(username, "/", repo), reserved = TRUE), "repository", "commits", utils::URLencode(ref, reserved = TRUE))
|
|||
|
|
|||
|
tmp <- tempfile()
|
|||
|
download(tmp, url, headers = c("Private-Token" = pat))
|
|||
|
|
|||
|
json$parse_file(tmp)$id
|
|||
|
}
|
|||
|
|
|||
|
#' Retrieve GitLab personal access token.
|
|||
|
#'
|
|||
|
#' A GitLab personal access token
|
|||
|
#' Looks in env var `GITLAB_PAT`
|
|||
|
#'
|
|||
|
#' @keywords internal
|
|||
|
#' @export
|
|||
|
gitlab_pat <- function(quiet = TRUE) {
|
|||
|
pat <- Sys.getenv("GITLAB_PAT")
|
|||
|
if (nzchar(pat)) {
|
|||
|
if (!quiet) {
|
|||
|
message("Using GitLab PAT from envvar GITLAB_PAT")
|
|||
|
}
|
|||
|
return(pat)
|
|||
|
}
|
|||
|
return(NULL)
|
|||
|
}
|
|||
|
|
|||
|
gitlab_project_id <- function(username, repo, ref = "HEAD",
|
|||
|
host = "gitlab.com", pat = gitlab_pat()) {
|
|||
|
|
|||
|
url <- build_url(host, "api", "v4", "projects", utils::URLencode(paste0(username, "/", repo), reserved = TRUE), "repository", "commits", utils::URLencode(ref, reserved = TRUE))
|
|||
|
|
|||
|
tmp <- tempfile()
|
|||
|
download(tmp, url, headers = c("Private-Token" = pat))
|
|||
|
|
|||
|
json$parse_file(tmp)$project_id
|
|||
|
}
|
|||
|
# Contents of R/install-local.R
|
|||
|
|
|||
|
#' Install a package from a local file
|
|||
|
#'
|
|||
|
#' This function is vectorised so you can install multiple packages in
|
|||
|
#' a single command.
|
|||
|
#'
|
|||
|
#' @param path path to local directory, or compressed file (tar, zip, tar.gz
|
|||
|
#' tar.bz2, tgz2 or tbz)
|
|||
|
#' @inheritParams install_url
|
|||
|
#' @inheritParams install_github
|
|||
|
#' @export
|
|||
|
#' @family package installation
|
|||
|
#' @examples
|
|||
|
#' \dontrun{
|
|||
|
#' dir <- tempfile()
|
|||
|
#' dir.create(dir)
|
|||
|
#' pkg <- download.packages("testthat", dir, type = "source")
|
|||
|
#' install_local(pkg[, 2])
|
|||
|
#' }
|
|||
|
|
|||
|
install_local <- function(path = ".", subdir = NULL,
|
|||
|
dependencies = NA,
|
|||
|
upgrade = c("default", "ask", "always", "never"),
|
|||
|
force = FALSE,
|
|||
|
quiet = FALSE,
|
|||
|
build = !is_binary_pkg(path),
|
|||
|
build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"),
|
|||
|
build_manual = FALSE, build_vignettes = FALSE,
|
|||
|
repos = getOption("repos"),
|
|||
|
type = getOption("pkgType"),
|
|||
|
...) {
|
|||
|
|
|||
|
remotes <- lapply(path, local_remote, subdir = subdir)
|
|||
|
install_remotes(remotes,
|
|||
|
dependencies = dependencies,
|
|||
|
upgrade = upgrade,
|
|||
|
force = force,
|
|||
|
quiet = quiet,
|
|||
|
build = build,
|
|||
|
build_opts = build_opts,
|
|||
|
build_manual = build_manual,
|
|||
|
build_vignettes = build_vignettes,
|
|||
|
repos = repos,
|
|||
|
type = type,
|
|||
|
...)
|
|||
|
}
|
|||
|
|
|||
|
local_remote <- function(path, subdir = NULL, branch = NULL, args = character(0), ...) {
|
|||
|
remote("local",
|
|||
|
path = normalizePath(path),
|
|||
|
subdir = subdir
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_download.local_remote <- function(x, quiet = FALSE) {
|
|||
|
# Already downloaded - just need to copy to tempdir()
|
|||
|
bundle <- tempfile()
|
|||
|
dir.create(bundle)
|
|||
|
suppressWarnings(
|
|||
|
res <- file.copy(x$path, bundle, recursive = TRUE)
|
|||
|
)
|
|||
|
if (!all(res)) {
|
|||
|
stop("Could not copy `", x$path, "` to `", bundle, "`", call. = FALSE)
|
|||
|
}
|
|||
|
|
|||
|
# file.copy() creates directory inside of bundle
|
|||
|
dir(bundle, full.names = TRUE)[1]
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_metadata.local_remote <- function(x, bundle = NULL, source = NULL, sha = NULL) {
|
|||
|
list(
|
|||
|
RemoteType = "local",
|
|||
|
RemoteUrl = x$path,
|
|||
|
RemoteSubdir = x$subdir
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_package_name.local_remote <- function(remote, ...) {
|
|||
|
is_tarball <- !dir.exists(remote$path)
|
|||
|
if (is_tarball) {
|
|||
|
# Assume the name is the name of the tarball
|
|||
|
return(sub("_.*$", "", basename(remote$path)))
|
|||
|
}
|
|||
|
description_path <- file.path(remote$path, "DESCRIPTION")
|
|||
|
|
|||
|
read_dcf(description_path)$Package
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_sha.local_remote <- function(remote, ...) {
|
|||
|
is_tarball <- !dir.exists(remote$path)
|
|||
|
if (is_tarball) {
|
|||
|
return(NA_character_)
|
|||
|
}
|
|||
|
|
|||
|
read_dcf(file.path(remote$path, "DESCRIPTION"))$Version
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
format.local_remote <- function(x, ...) {
|
|||
|
"local"
|
|||
|
}
|
|||
|
# Contents of R/install-remote.R
|
|||
|
#' Install a remote package.
|
|||
|
#'
|
|||
|
#' This:
|
|||
|
#' \enumerate{
|
|||
|
#' \item downloads source bundle
|
|||
|
#' \item decompresses & checks that it's a package
|
|||
|
#' \item adds metadata to DESCRIPTION
|
|||
|
#' \item calls install
|
|||
|
#' }
|
|||
|
#'
|
|||
|
#' It uses the additional S3 generic functions to work. Writing methods for
|
|||
|
#' these functions would allow 3rd party packages to define custom remotes.
|
|||
|
#' @inheritParams install_github
|
|||
|
#' @keywords internal
|
|||
|
#' @export
|
|||
|
install_remote <- function(remote,
|
|||
|
dependencies,
|
|||
|
upgrade,
|
|||
|
force,
|
|||
|
quiet,
|
|||
|
build,
|
|||
|
build_opts,
|
|||
|
build_manual,
|
|||
|
build_vignettes,
|
|||
|
repos,
|
|||
|
type,
|
|||
|
...) {
|
|||
|
|
|||
|
stopifnot(is.remote(remote))
|
|||
|
|
|||
|
package_name <- remote_package_name(remote)
|
|||
|
local_sha <- local_sha(package_name)
|
|||
|
remote_sha <- remote_sha(remote, local_sha)
|
|||
|
|
|||
|
if (!isTRUE(force) &&
|
|||
|
!different_sha(remote_sha = remote_sha, local_sha = local_sha)) {
|
|||
|
|
|||
|
if (!quiet) {
|
|||
|
message(
|
|||
|
"Skipping install of '", package_name, "' from a ", sub("_remote", "", class(remote)[1L]), " remote,",
|
|||
|
" the SHA1 (", substr(remote_sha, 1L, 8L), ") has not changed since last install.\n",
|
|||
|
" Use `force = TRUE` to force installation")
|
|||
|
}
|
|||
|
return(invisible(package_name))
|
|||
|
}
|
|||
|
|
|||
|
if (inherits(remote, "cran_remote")) {
|
|||
|
install_packages(
|
|||
|
package_name, repos = remote$repos, type = remote$pkg_type,
|
|||
|
dependencies = dependencies,
|
|||
|
quiet = quiet,
|
|||
|
...)
|
|||
|
return(invisible(package_name))
|
|||
|
}
|
|||
|
|
|||
|
res <- try(bundle <- remote_download(remote, quiet = quiet), silent = quiet)
|
|||
|
if (inherits(res, "try-error")) {
|
|||
|
return(NA_character_)
|
|||
|
}
|
|||
|
|
|||
|
on.exit(unlink(bundle), add = TRUE)
|
|||
|
|
|||
|
source <- source_pkg(bundle, subdir = remote$subdir)
|
|||
|
on.exit(unlink(source, recursive = TRUE), add = TRUE)
|
|||
|
|
|||
|
update_submodules(source, remote$subdir, quiet)
|
|||
|
|
|||
|
add_metadata(source, remote_metadata(remote, bundle, source, remote_sha))
|
|||
|
|
|||
|
# Because we've modified DESCRIPTION, its original MD5 value is wrong
|
|||
|
clear_description_md5(source)
|
|||
|
|
|||
|
install(source,
|
|||
|
dependencies = dependencies,
|
|||
|
upgrade = upgrade,
|
|||
|
force = force,
|
|||
|
quiet = quiet,
|
|||
|
build = build,
|
|||
|
build_opts = build_opts,
|
|||
|
build_manual = build_manual,
|
|||
|
build_vignettes = build_vignettes,
|
|||
|
repos = repos,
|
|||
|
type = type,
|
|||
|
...)
|
|||
|
}
|
|||
|
|
|||
|
install_remotes <- function(remotes, ...) {
|
|||
|
res <- character(length(remotes))
|
|||
|
for (i in seq_along(remotes)) {
|
|||
|
tryCatch(
|
|||
|
res[[i]] <- install_remote(remotes[[i]], ...),
|
|||
|
error = function(e) {
|
|||
|
stop(remote_install_error(remotes[[i]], e))
|
|||
|
})
|
|||
|
}
|
|||
|
invisible(res)
|
|||
|
}
|
|||
|
|
|||
|
remote_install_error <- function(remote, error) {
|
|||
|
msg <- sprintf(
|
|||
|
"Failed to install '%s' from %s:\n %s", remote_name_or_unknown(remote), format(remote), conditionMessage(error)
|
|||
|
)
|
|||
|
|
|||
|
structure(list(message = msg, call = NULL, error = error, remote = remote), class = c("install_error", "error", "condition"))
|
|||
|
}
|
|||
|
|
|||
|
remote_name_or_unknown <- function(remote) {
|
|||
|
res <- tryCatch(
|
|||
|
res <- remote_package_name(remote),
|
|||
|
error = function(e) NA_character_)
|
|||
|
|
|||
|
if (is.na(res)) {
|
|||
|
return("unknown package")
|
|||
|
}
|
|||
|
|
|||
|
res
|
|||
|
}
|
|||
|
|
|||
|
#' @rdname install_remote
|
|||
|
#' @export
|
|||
|
#' @keywords internal
|
|||
|
add_metadata <- function(pkg_path, meta) {
|
|||
|
|
|||
|
# During installation, the DESCRIPTION file is read and an package.rds file
|
|||
|
# created with most of the information from the DESCRIPTION file. Functions
|
|||
|
# that read package metadata may use either the DESCRIPTION file or the
|
|||
|
# package.rds file, therefore we attempt to modify both of them
|
|||
|
source_desc <- file.path(pkg_path, "DESCRIPTION")
|
|||
|
binary_desc <- file.path(pkg_path, "Meta", "package.rds")
|
|||
|
if (file.exists(source_desc)) {
|
|||
|
desc <- read_dcf(source_desc)
|
|||
|
|
|||
|
desc <- utils::modifyList(desc, meta)
|
|||
|
|
|||
|
write_dcf(source_desc, desc)
|
|||
|
}
|
|||
|
|
|||
|
if (file.exists(binary_desc)) {
|
|||
|
pkg_desc <- base::readRDS(binary_desc)
|
|||
|
desc <- as.list(pkg_desc$DESCRIPTION)
|
|||
|
desc <- utils::modifyList(desc, meta)
|
|||
|
pkg_desc$DESCRIPTION <- stats::setNames(as.character(desc), names(desc))
|
|||
|
base::saveRDS(pkg_desc, binary_desc)
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
# Modify the MD5 file - remove the line for DESCRIPTION
|
|||
|
clear_description_md5 <- function(pkg_path) {
|
|||
|
path <- file.path(pkg_path, "MD5")
|
|||
|
|
|||
|
if (file.exists(path)) {
|
|||
|
text <- readLines(path)
|
|||
|
text <- text[!grepl(".*\\*DESCRIPTION$", text)]
|
|||
|
|
|||
|
writeLines(text, path)
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
remote <- function(type, ...) {
|
|||
|
structure(list(...), class = c(paste0(type, "_remote"), "remote"))
|
|||
|
}
|
|||
|
|
|||
|
is.remote <- function(x) inherits(x, "remote")
|
|||
|
|
|||
|
#' @rdname install_remote
|
|||
|
#' @keywords internal
|
|||
|
#' @export
|
|||
|
remote_download <- function(x, quiet = FALSE) UseMethod("remote_download")
|
|||
|
|
|||
|
#' @rdname install_remote
|
|||
|
#' @keywords internal
|
|||
|
#' @export
|
|||
|
remote_metadata <- function(x, bundle = NULL, source = NULL, sha = NULL) UseMethod("remote_metadata")
|
|||
|
|
|||
|
#' @rdname install_remote
|
|||
|
#' @keywords internal
|
|||
|
#' @export
|
|||
|
remote_package_name <- function(remote, ...) UseMethod("remote_package_name")
|
|||
|
|
|||
|
#' @rdname install_remote
|
|||
|
#' @keywords internal
|
|||
|
#' @export
|
|||
|
remote_sha <- function(remote, ...) UseMethod("remote_sha")
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_package_name.default <- function(remote, ...) remote$repo
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_sha.default <- function(remote, ...) NA_character_
|
|||
|
|
|||
|
different_sha <- function(remote_sha, local_sha) {
|
|||
|
|
|||
|
same <- remote_sha == local_sha
|
|||
|
same <- isTRUE(same) && !is.na(same)
|
|||
|
!same
|
|||
|
}
|
|||
|
|
|||
|
local_sha <- function(name) {
|
|||
|
package2remote(name)$sha %||% NA_character_
|
|||
|
}
|
|||
|
|
|||
|
# Convert an installed package to its equivalent remote. This constructs the
|
|||
|
# remote from metadata stored in the package's DESCRIPTION file; the metadata
|
|||
|
# is added to the package when it is installed by remotes. If the package is
|
|||
|
# installed some other way, such as by `install.packages()` there will be no
|
|||
|
# meta-data, so there we construct a generic CRAN remote.
|
|||
|
package2remote <- function(name, lib = .libPaths(), repos = getOption("repos"), type = getOption("pkgType")) {
|
|||
|
|
|||
|
x <- tryCatch(utils::packageDescription(name, lib.loc = lib), error = function(e) NA, warning = function(e) NA)
|
|||
|
|
|||
|
# will be NA if not installed
|
|||
|
if (identical(x, NA)) {
|
|||
|
return(remote("cran",
|
|||
|
name = name,
|
|||
|
repos = repos,
|
|||
|
pkg_type = type,
|
|||
|
sha = NA_character_))
|
|||
|
}
|
|||
|
|
|||
|
if (is.null(x$RemoteType) || x$RemoteType %in% c("cran", "standard", "any")) {
|
|||
|
|
|||
|
# Packages installed with install.packages() or locally without remotes
|
|||
|
return(remote("cran",
|
|||
|
name = x$Package,
|
|||
|
repos = repos,
|
|||
|
pkg_type = type,
|
|||
|
sha = x$Version))
|
|||
|
}
|
|||
|
|
|||
|
switch(x$RemoteType,
|
|||
|
standard = remote("cran",
|
|||
|
name = x$Package,
|
|||
|
repos = x$RemoteRepos %||% repos,
|
|||
|
pkg_type = x$RemotePkgType %||% type,
|
|||
|
sha = x$RemoteSha),
|
|||
|
github = remote("github",
|
|||
|
host = x$RemoteHost,
|
|||
|
package = x$RemotePackage,
|
|||
|
repo = x$RemoteRepo,
|
|||
|
subdir = x$RemoteSubdir,
|
|||
|
username = x$RemoteUsername,
|
|||
|
ref = x$RemoteRef,
|
|||
|
sha = x$RemoteSha,
|
|||
|
auth_token = github_pat()),
|
|||
|
gitlab = remote("gitlab",
|
|||
|
host = x$RemoteHost,
|
|||
|
repo = x$RemoteRepo,
|
|||
|
subdir = x$RemoteSubdir,
|
|||
|
username = x$RemoteUsername,
|
|||
|
ref = x$RemoteRef,
|
|||
|
sha = x$RemoteSha,
|
|||
|
auth_token = gitlab_pat()),
|
|||
|
xgit = remote("xgit",
|
|||
|
url = trim_ws(x$RemoteUrl),
|
|||
|
ref = x$RemoteRef %||% x$RemoteBranch,
|
|||
|
sha = x$RemoteSha,
|
|||
|
subdir = x$RemoteSubdir,
|
|||
|
args = x$RemoteArgs),
|
|||
|
git2r = remote("git2r",
|
|||
|
url = trim_ws(x$RemoteUrl),
|
|||
|
ref = x$RemoteRef %||% x$RemoteBranch,
|
|||
|
sha = x$RemoteSha,
|
|||
|
subdir = x$RemoteSubdir,
|
|||
|
credentials = git_credentials()),
|
|||
|
bitbucket = remote("bitbucket",
|
|||
|
host = x$RemoteHost,
|
|||
|
repo = x$RemoteRepo,
|
|||
|
username = x$RemoteUsername,
|
|||
|
ref = x$RemoteRef,
|
|||
|
sha = x$RemoteSha,
|
|||
|
subdir = x$RemoteSubdir,
|
|||
|
auth_user = bitbucket_user(),
|
|||
|
password = bitbucket_password()),
|
|||
|
svn = remote("svn",
|
|||
|
url = trim_ws(x$RemoteUrl),
|
|||
|
svn_subdir = x$RemoteSubdir,
|
|||
|
revision = x$RemoteSha,
|
|||
|
args = x$RemoteArgs),
|
|||
|
local = remote("local",
|
|||
|
path = {
|
|||
|
path <- trim_ws(x$RemoteUrl)
|
|||
|
if (length(path) == 0) {
|
|||
|
path <- parse_pkg_ref(x$RemotePkgRef)$ref
|
|||
|
}
|
|||
|
path
|
|||
|
},
|
|||
|
subdir = x$RemoteSubdir,
|
|||
|
sha = {
|
|||
|
# Packages installed locally might have RemoteSha == NA_character_
|
|||
|
x$RemoteSha %||% x$Version
|
|||
|
}),
|
|||
|
url = remote("url",
|
|||
|
url = trim_ws(x$RemoteUrl),
|
|||
|
subdir = x$RemoteSubdir,
|
|||
|
config = x$RemoteConfig,
|
|||
|
pkg_type = x$RemotePkgType %||% type),
|
|||
|
bioc_git2r = remote("bioc_git2r",
|
|||
|
mirror = x$RemoteMirror,
|
|||
|
repo = x$RemoteRepo,
|
|||
|
release = x$RemoteRelease,
|
|||
|
sha = x$RemoteSha,
|
|||
|
branch = x$RemoteBranch),
|
|||
|
bioc_xgit = remote("bioc_xgit",
|
|||
|
mirror = x$RemoteMirror,
|
|||
|
repo = x$RemoteRepo,
|
|||
|
release = x$RemoteRelease,
|
|||
|
sha = x$RemoteSha,
|
|||
|
branch = x$RemoteBranch),
|
|||
|
stop(sprintf("can't convert package %s with RemoteType '%s' to remote", name, x$RemoteType))
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
parse_pkg_ref <- function(x) {
|
|||
|
res <- re_match(x, "(?<type>[^:]+)::(?<ref>.*)")
|
|||
|
if (is.na(res$ref)) {
|
|||
|
stop("Invalid package reference:\n ", x, call. = FALSE)
|
|||
|
}
|
|||
|
res
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
format.remotes <- function(x, ...) {
|
|||
|
vapply(x, format, character(1))
|
|||
|
}
|
|||
|
# Contents of R/install-svn.R
|
|||
|
|
|||
|
#' Install a package from a SVN repository
|
|||
|
#'
|
|||
|
#' This function requires \command{svn} to be installed on your system in order to
|
|||
|
#' be used.
|
|||
|
#'
|
|||
|
#' It is vectorised so you can install multiple packages with
|
|||
|
#' a single command.
|
|||
|
#'
|
|||
|
#' @inheritParams install_git
|
|||
|
#' @param subdir A sub-directory within a svn repository that contains the
|
|||
|
#' package we are interested in installing.
|
|||
|
#' @param args A character vector providing extra options to pass on to
|
|||
|
#' \command{svn}.
|
|||
|
#' @param revision svn revision, if omitted updates to latest
|
|||
|
#' @param ... Other arguments passed on to [utils::install.packages()].
|
|||
|
#' @inheritParams install_github
|
|||
|
#' @family package installation
|
|||
|
#' @export
|
|||
|
#'
|
|||
|
#' @examples
|
|||
|
#' \dontrun{
|
|||
|
#' install_svn("https://github.com/hadley/stringr/trunk")
|
|||
|
#' install_svn("https://github.com/hadley/httr/branches/oauth")
|
|||
|
#'}
|
|||
|
install_svn <- function(url, subdir = NULL, args = character(0),
|
|||
|
revision = NULL,
|
|||
|
dependencies = NA,
|
|||
|
upgrade = c("default", "ask", "always", "never"),
|
|||
|
force = FALSE,
|
|||
|
quiet = FALSE,
|
|||
|
build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"),
|
|||
|
build_manual = FALSE, build_vignettes = FALSE,
|
|||
|
repos = getOption("repos"),
|
|||
|
type = getOption("pkgType"),
|
|||
|
...) {
|
|||
|
|
|||
|
remotes <- lapply(url, svn_remote, svn_subdir = subdir,
|
|||
|
revision = revision, args = args)
|
|||
|
|
|||
|
install_remotes(remotes, args = args,
|
|||
|
dependencies = dependencies,
|
|||
|
upgrade = upgrade,
|
|||
|
force = force,
|
|||
|
quiet = quiet,
|
|||
|
build = build,
|
|||
|
build_opts = build_opts,
|
|||
|
build_manual = build_manual,
|
|||
|
build_vignettes = build_vignettes,
|
|||
|
repos = repos,
|
|||
|
type = type,
|
|||
|
...)
|
|||
|
}
|
|||
|
|
|||
|
svn_remote <- function(url, svn_subdir = NULL, revision = NULL,
|
|||
|
args = character(0), ...) {
|
|||
|
remote("svn",
|
|||
|
url = url,
|
|||
|
svn_subdir = svn_subdir,
|
|||
|
revision = revision,
|
|||
|
args = args
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_download.svn_remote <- function(x, quiet = FALSE) {
|
|||
|
if (!quiet) {
|
|||
|
message("Downloading svn repo ", x$url)
|
|||
|
}
|
|||
|
|
|||
|
bundle <- tempfile()
|
|||
|
svn_binary_path <- svn_path()
|
|||
|
url <- x$url
|
|||
|
|
|||
|
args <- "co"
|
|||
|
if (!is.null(x$revision)) {
|
|||
|
args <- c(args, "-r", x$revision)
|
|||
|
}
|
|||
|
args <- c(args, x$args, full_svn_url(x), bundle)
|
|||
|
|
|||
|
if (!quiet) { message(shQuote(svn_binary_path), " ", paste0(args, collapse = " ")) }
|
|||
|
request <- system2(svn_binary_path, args, stdout = FALSE, stderr = FALSE)
|
|||
|
|
|||
|
# This is only looking for an error code above 0-success
|
|||
|
if (request > 0) {
|
|||
|
stop("There seems to be a problem retrieving this SVN-URL.", call. = FALSE)
|
|||
|
}
|
|||
|
|
|||
|
in_dir(bundle, {
|
|||
|
if (!is.null(x$revision)) {
|
|||
|
request <- system2(svn_binary_path, paste("update -r", x$revision), stdout = FALSE, stderr = FALSE)
|
|||
|
if (request > 0) {
|
|||
|
stop("There was a problem switching to the requested SVN revision", call. = FALSE)
|
|||
|
}
|
|||
|
}
|
|||
|
})
|
|||
|
bundle
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_metadata.svn_remote <- function(x, bundle = NULL, source = NULL, sha = NULL) {
|
|||
|
|
|||
|
if (!is.null(bundle)) {
|
|||
|
in_dir(bundle, {
|
|||
|
revision <- svn_revision()
|
|||
|
})
|
|||
|
} else {
|
|||
|
revision <- sha
|
|||
|
}
|
|||
|
|
|||
|
list(
|
|||
|
RemoteType = "svn",
|
|||
|
RemoteUrl = x$url,
|
|||
|
RemoteSubdir = x$svn_subdir,
|
|||
|
RemoteArgs = if (length(x$args) > 0) paste0(deparse(x$args), collapse = " "),
|
|||
|
RemoteSha = revision # for compatibility with other remotes
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
svn_path <- function(svn_binary_name = NULL) {
|
|||
|
# Use user supplied path
|
|||
|
if (!is.null(svn_binary_name)) {
|
|||
|
if (!file.exists(svn_binary_name)) {
|
|||
|
stop("Path ", svn_binary_name, " does not exist", .call = FALSE)
|
|||
|
}
|
|||
|
return(svn_binary_name)
|
|||
|
}
|
|||
|
|
|||
|
# Look on path
|
|||
|
svn_path <- Sys.which("svn")[[1]]
|
|||
|
if (svn_path != "") return(svn_path)
|
|||
|
|
|||
|
# On Windows, look in common locations
|
|||
|
if (os_type() == "windows") {
|
|||
|
look_in <- c(
|
|||
|
"C:/Program Files/Svn/bin/svn.exe",
|
|||
|
"C:/Program Files (x86)/Svn/bin/svn.exe"
|
|||
|
)
|
|||
|
found <- file.exists(look_in)
|
|||
|
if (any(found)) return(look_in[found][1])
|
|||
|
}
|
|||
|
|
|||
|
stop("SVN does not seem to be installed on your system.", call. = FALSE)
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_package_name.svn_remote <- function(remote, ...) {
|
|||
|
description_url <- file.path(full_svn_url(remote), "DESCRIPTION")
|
|||
|
tmp_file <- tempfile()
|
|||
|
on.exit(rm(tmp_file))
|
|||
|
response <- system2(svn_path(), paste("cat", description_url), stdout = tmp_file)
|
|||
|
if (!identical(response, 0L)) {
|
|||
|
return(NA_character_)
|
|||
|
}
|
|||
|
read_dcf(tmp_file)$Package
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_sha.svn_remote <- function(remote, ...) {
|
|||
|
svn_revision(full_svn_url(remote))
|
|||
|
}
|
|||
|
|
|||
|
svn_revision <- function(url = NULL, svn_binary_path = svn_path()) {
|
|||
|
request <- system2(svn_binary_path, paste("info --xml", url), stdout = TRUE)
|
|||
|
if (!is.null(attr(request, "status")) && !identical(attr(request, "status"), 0L)) {
|
|||
|
stop("There was a problem retrieving the current SVN revision", call. = FALSE)
|
|||
|
}
|
|||
|
gsub(".*<commit[[:space:]]+revision=\"([[:digit:]]+)\">.*", "\\1", paste(collapse = "\n", request))
|
|||
|
}
|
|||
|
|
|||
|
full_svn_url <- function(x) {
|
|||
|
url <- x$url
|
|||
|
if (!is.null(x$svn_subdir)) {
|
|||
|
url <- file.path(url, x$svn_subdir)
|
|||
|
}
|
|||
|
|
|||
|
url
|
|||
|
}
|
|||
|
|
|||
|
format.svn_remote <- function(x, ...) {
|
|||
|
"SVN"
|
|||
|
}
|
|||
|
# Contents of R/install-url.R
|
|||
|
|
|||
|
#' Install a package from a url
|
|||
|
#'
|
|||
|
#' This function is vectorised so you can install multiple packages in
|
|||
|
#' a single command.
|
|||
|
#'
|
|||
|
#' @param url location of package on internet. The url should point to a
|
|||
|
#' zip file, a tar file or a bzipped/gzipped tar file.
|
|||
|
#' @param subdir subdirectory within url bundle that contains the R package.
|
|||
|
#' @param ... Other arguments passed on to [utils::install.packages()].
|
|||
|
#' @inheritParams install_github
|
|||
|
#' @export
|
|||
|
#'
|
|||
|
#' @family package installation
|
|||
|
#' @examples
|
|||
|
#' \dontrun{
|
|||
|
#' install_url("https://github.com/hadley/stringr/archive/HEAD.zip")
|
|||
|
#' }
|
|||
|
|
|||
|
install_url <- function(url, subdir = NULL,
|
|||
|
dependencies = NA,
|
|||
|
upgrade = c("default", "ask", "always", "never"),
|
|||
|
force = FALSE,
|
|||
|
quiet = FALSE,
|
|||
|
build = TRUE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"),
|
|||
|
build_manual = FALSE, build_vignettes = FALSE,
|
|||
|
repos = getOption("repos"),
|
|||
|
type = getOption("pkgType"),
|
|||
|
...) {
|
|||
|
remotes <- lapply(url, url_remote, subdir = subdir)
|
|||
|
install_remotes(remotes,
|
|||
|
dependencies = dependencies,
|
|||
|
upgrade = upgrade,
|
|||
|
force = force,
|
|||
|
quiet = quiet,
|
|||
|
build = build,
|
|||
|
build_opts = build_opts,
|
|||
|
build_manual = build_manual,
|
|||
|
build_vignettes = build_vignettes,
|
|||
|
repos = repos,
|
|||
|
type = type,
|
|||
|
...)
|
|||
|
}
|
|||
|
|
|||
|
url_remote <- function(url, subdir = NULL, ...) {
|
|||
|
remote("url",
|
|||
|
url = url,
|
|||
|
subdir = subdir
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
#' @importFrom tools file_ext
|
|||
|
#' @export
|
|||
|
remote_download.url_remote <- function(x, quiet = FALSE) {
|
|||
|
if (!quiet) {
|
|||
|
message("Downloading package from url: ", x$url) # nocov
|
|||
|
}
|
|||
|
|
|||
|
ext <- if (grepl("\\.tar\\.gz$", x$url)) "tar.gz" else file_ext(x$url)
|
|||
|
|
|||
|
bundle <- tempfile(fileext = paste0(".", ext))
|
|||
|
download(bundle, x$url)
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_metadata.url_remote <- function(x, bundle = NULL, source = NULL, sha = NULL) {
|
|||
|
list(
|
|||
|
RemoteType = "url",
|
|||
|
RemoteUrl = x$url,
|
|||
|
RemoteSubdir = x$subdir
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_package_name.url_remote <- function(remote, ...) {
|
|||
|
NA_character_
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
remote_sha.url_remote <- function(remote, ...) {
|
|||
|
NA_character_
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
format.url_remote <- function(x, ...) {
|
|||
|
"URL"
|
|||
|
}
|
|||
|
# Contents of R/install-version.R
|
|||
|
|
|||
|
#' Install specific version of a package.
|
|||
|
#'
|
|||
|
#' This function knows how to look in multiple CRAN-like package repositories, and in their
|
|||
|
#' \code{archive} directories, in order to find specific versions of the requested package.
|
|||
|
#'
|
|||
|
#' The repositories are searched in the order specified by the \code{repos} argument. This enables
|
|||
|
#' teams to maintain multiple in-house repositories with different policies - for instance, one repo
|
|||
|
#' for development snapshots and one for official releases. A common setup would be to first search
|
|||
|
#' the official release repo, then the dev snapshot repo, then a public CRAN mirror.
|
|||
|
#'
|
|||
|
#' Older versions of packages on CRAN are usually only available in source form. If your requested
|
|||
|
#' package contains compiled code, you will need to have an R development environment installed. You
|
|||
|
#' can check if you do by running `devtools::has_devel` (you need the `devtools` package for this).
|
|||
|
#'
|
|||
|
#' @export
|
|||
|
#' @family package installation
|
|||
|
#' @param package Name of the package to install.
|
|||
|
#' @param version Version of the package to install. Can either be a string giving the exact
|
|||
|
#' version required, or a specification in the same format as the parenthesized expressions used
|
|||
|
#' in package dependencies. One of the following formats:
|
|||
|
#' - An exact version required, as a string, e.g. `"0.1.13"`
|
|||
|
#' - A comparison operator and a version, e.g. `">= 0.1.12"`
|
|||
|
#' - Several criteria to satisfy, as a comma-separated string, e.g. `">= 1.12.0, < 1.14"`
|
|||
|
#' - Several criteria to satisfy, as elements of a character vector, e.g. `c(">= 1.12.0", "< 1.14")`
|
|||
|
#' @param ... Other arguments passed on to [utils::install.packages()].
|
|||
|
#' @inheritParams utils::install.packages
|
|||
|
#' @inheritParams install_github
|
|||
|
#' @examples
|
|||
|
#' \dontrun{
|
|||
|
#' install_version("devtools", "1.11.0")
|
|||
|
#' install_version("devtools", ">= 1.12.0, < 1.14")
|
|||
|
#'
|
|||
|
#' ## Specify search order (e.g. in ~/.Rprofile)
|
|||
|
#' options(repos = c(
|
|||
|
#' prod = "http://mycompany.example.com/r-repo",
|
|||
|
#' dev = "http://mycompany.example.com/r-repo-dev",
|
|||
|
#' CRAN = "https://cran.revolutionanalytics.com"
|
|||
|
#' ))
|
|||
|
#' install_version("mypackage", "1.15") # finds in 'prod'
|
|||
|
#' install_version("mypackage", "1.16-39487") # finds in 'dev'
|
|||
|
#' }
|
|||
|
#' @importFrom utils available.packages contrib.url install.packages
|
|||
|
|
|||
|
install_version <- function(package, version = NULL,
|
|||
|
dependencies = NA,
|
|||
|
upgrade = c("default", "ask", "always", "never"),
|
|||
|
force = FALSE,
|
|||
|
quiet = FALSE,
|
|||
|
build = FALSE, build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"),
|
|||
|
build_manual = FALSE, build_vignettes = FALSE,
|
|||
|
repos = getOption("repos"),
|
|||
|
type = "source",
|
|||
|
...) {
|
|||
|
|
|||
|
# TODO would it make sense to vectorize this, e.g. `install_version(c("foo", "bar"), c("1.1", "2.2"))`?
|
|||
|
if (length(package) < 1) {
|
|||
|
return()
|
|||
|
}
|
|||
|
if (length(package) > 1) {
|
|||
|
stop("install_version() must be called with a single 'package' argument - multiple packages given")
|
|||
|
}
|
|||
|
|
|||
|
if (!identical(type, "source")) {
|
|||
|
stop("`type` must be 'source' for `install_version()`", call. = FALSE)
|
|||
|
}
|
|||
|
|
|||
|
url <- download_version_url(package, version, repos, type)
|
|||
|
res <- install_url(url,
|
|||
|
dependencies = dependencies,
|
|||
|
upgrade = upgrade,
|
|||
|
force = force,
|
|||
|
quiet = quiet,
|
|||
|
build = build,
|
|||
|
build_opts = build_opts,
|
|||
|
build_manual = build_manual,
|
|||
|
build_vignettes = build_vignettes,
|
|||
|
repos = repos,
|
|||
|
type = type,
|
|||
|
...
|
|||
|
)
|
|||
|
|
|||
|
lib <- list(...)$lib %||% .libPaths()
|
|||
|
|
|||
|
# Remove Metadata from installed package
|
|||
|
add_metadata(
|
|||
|
system.file(package = package, lib.loc = lib),
|
|||
|
list(RemoteType = NULL, RemoteUrl = NULL, RemoteSubdir = NULL)
|
|||
|
)
|
|||
|
|
|||
|
invisible(res)
|
|||
|
}
|
|||
|
|
|||
|
version_from_tarball <- function(tarball_name) {
|
|||
|
package_ver_regex <- paste0(".+_(", .standard_regexps()$valid_package_version, ")\\.tar\\.gz$")
|
|||
|
ifelse(grepl(package_ver_regex, tarball_name), sub(package_ver_regex, "\\1", tarball_name), NULL)
|
|||
|
}
|
|||
|
|
|||
|
version_satisfies_criteria <- function(to_check, criteria) {
|
|||
|
to_check <- package_version(to_check)
|
|||
|
result <- apply(version_criteria(criteria), 1, function(r) {
|
|||
|
if (is.na(r["compare"])) {
|
|||
|
TRUE
|
|||
|
} else {
|
|||
|
get(r["compare"], mode = "function")(to_check, r["version"])
|
|||
|
}
|
|||
|
})
|
|||
|
all(result)
|
|||
|
}
|
|||
|
|
|||
|
package_installed <- function(pkg, criteria) {
|
|||
|
v <- suppressWarnings(utils::packageDescription(pkg, fields = "Version"))
|
|||
|
!is.na(v) && version_satisfies_criteria(v, criteria)
|
|||
|
}
|
|||
|
|
|||
|
version_criteria <- function(criteria) {
|
|||
|
if (is.character(criteria) && length(criteria) == 1) {
|
|||
|
criteria <- strsplit(criteria, ",")[[1]]
|
|||
|
}
|
|||
|
|
|||
|
numeric_ver <- .standard_regexps()$valid_numeric_version
|
|||
|
|
|||
|
package <- "p" # dummy package name, required by parse_deps()
|
|||
|
|
|||
|
spec <- if (is.null(criteria) || (length(criteria) == 1 && is.na(criteria[[1L]]))) {
|
|||
|
package
|
|||
|
} else {
|
|||
|
ifelse(grepl(paste0("^", numeric_ver, "$"), criteria),
|
|||
|
paste0(package, "(== ", criteria, ")"),
|
|||
|
paste0(package, "(", criteria, ")")
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
parse_deps(paste(spec, collapse = ", "))[c("compare", "version")]
|
|||
|
}
|
|||
|
|
|||
|
# Find a given package record in the `archive.rds` file of a repository
|
|||
|
package_find_archives <- function(package, repo, verbose = FALSE) {
|
|||
|
if (verbose) {
|
|||
|
message("Trying ", repo)
|
|||
|
}
|
|||
|
|
|||
|
# TODO it would be nice to cache these downloaded files like `available.packages` does
|
|||
|
archive <-
|
|||
|
tryCatch(
|
|||
|
{
|
|||
|
tf <- tempfile(fileext = ".gz")
|
|||
|
on.exit(unlink(tf), add = TRUE)
|
|||
|
download(tf, sprintf("%s/src/contrib/Meta/archive.rds", repo))
|
|||
|
con <- gzfile(tf, "rb")
|
|||
|
on.exit(close(con), add = TRUE)
|
|||
|
readRDS(con)
|
|||
|
},
|
|||
|
warning = function(e) list(),
|
|||
|
error = function(e) list()
|
|||
|
)
|
|||
|
|
|||
|
info <- archive[[package]]
|
|||
|
if (!is.null(info)) {
|
|||
|
info$repo <- repo
|
|||
|
return(info)
|
|||
|
}
|
|||
|
|
|||
|
NULL
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
#' Download a specified version of a CRAN package
|
|||
|
#'
|
|||
|
#' It downloads the package to a temporary file, and
|
|||
|
#' returns the name of the file.
|
|||
|
#'
|
|||
|
#' @inheritParams install_version
|
|||
|
#' @return Name of the downloaded file.
|
|||
|
#'
|
|||
|
#' @export
|
|||
|
|
|||
|
download_version <- function(package, version = NULL,
|
|||
|
repos = getOption("repos"),
|
|||
|
type = getOption("pkgType"), ...) {
|
|||
|
url <- download_version_url(package, version, repos, type)
|
|||
|
download(path = tempfile(), url = url)
|
|||
|
}
|
|||
|
|
|||
|
download_version_url <- function(package, version, repos, type, available, verbose = length(repos) > 1) {
|
|||
|
|
|||
|
## TODO should we do for(r in repos) { for (t in c('published','archive')) {...}}, or
|
|||
|
## for (t in c('published','archive')) { for(r in repos) {...}} ? Right now it's the latter. It
|
|||
|
## only matters if required version is satisfied by both an early repo in archive/ and a late repo
|
|||
|
|
|||
|
if (missing(available)) {
|
|||
|
contriburl <- contrib.url(repos, type)
|
|||
|
available <- available.packages(contriburl, filters = c("R_version", "OS_type", "subarch"))
|
|||
|
}
|
|||
|
|
|||
|
package_exists <- FALSE
|
|||
|
|
|||
|
# available.packages() returns a matrix with entries in the same order as the repositories in
|
|||
|
# `repos`, so the first packages we encounter should be preferred.
|
|||
|
for (ix in which(available[, "Package"] == package)) {
|
|||
|
package_exists <- TRUE
|
|||
|
row <- available[ix, ]
|
|||
|
if (version_satisfies_criteria(row["Version"], version)) {
|
|||
|
return(paste0(
|
|||
|
row["Repository"],
|
|||
|
"/",
|
|||
|
row["Package"],
|
|||
|
"_",
|
|||
|
row["Version"],
|
|||
|
".tar.gz"
|
|||
|
))
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
for (repo in repos) {
|
|||
|
info <- package_find_archives(package, repo, verbose = verbose)
|
|||
|
if (is.null(info)) {
|
|||
|
next
|
|||
|
}
|
|||
|
|
|||
|
package_exists <- TRUE
|
|||
|
|
|||
|
for (i in rev(seq_len(nrow(info)))) {
|
|||
|
package_path <- row.names(info)[i]
|
|||
|
if (version_satisfies_criteria(version_from_tarball(package_path), version)) {
|
|||
|
return(file.path(repo, "src", "contrib", "Archive", package_path))
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
if (!package_exists) {
|
|||
|
stop(sprintf("couldn't find package '%s'", package))
|
|||
|
}
|
|||
|
|
|||
|
stop(sprintf("version '%s' is invalid for package '%s'", version, package))
|
|||
|
}
|
|||
|
# Contents of R/install.R
|
|||
|
install <- function(pkgdir, dependencies, quiet, build, build_opts, build_manual, build_vignettes,
|
|||
|
upgrade, repos, type, ...) {
|
|||
|
warn_for_potential_errors()
|
|||
|
|
|||
|
if (file.exists(file.path(pkgdir, "src"))) {
|
|||
|
if (!is_standalone() && has_package("pkgbuild")) {
|
|||
|
pkgbuild::local_build_tools(required = TRUE)
|
|||
|
} else if (!has_devel()) {
|
|||
|
missing_devel_warning(pkgdir)
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
pkg_name <- load_pkg_description(pkgdir)$package
|
|||
|
|
|||
|
## Check for circular dependencies. We need to know about the root
|
|||
|
## of the install process.
|
|||
|
if (is_root_install()) on.exit(exit_from_root_install(), add = TRUE)
|
|||
|
if (check_for_circular_dependencies(pkgdir, quiet)) {
|
|||
|
return(invisible(pkg_name))
|
|||
|
}
|
|||
|
|
|||
|
install_deps(pkgdir, dependencies = dependencies, quiet = quiet,
|
|||
|
build = build, build_opts = build_opts, build_manual = build_manual,
|
|||
|
build_vignettes = build_vignettes, upgrade = upgrade, repos = repos,
|
|||
|
type = type, ...)
|
|||
|
|
|||
|
if (isTRUE(build)) {
|
|||
|
dir <- tempfile()
|
|||
|
dir.create(dir)
|
|||
|
on.exit(unlink(dir), add = TRUE)
|
|||
|
|
|||
|
pkgdir <- safe_build_package(pkgdir, build_opts, build_manual, build_vignettes, dir, quiet)
|
|||
|
}
|
|||
|
|
|||
|
safe_install_packages(
|
|||
|
pkgdir,
|
|||
|
repos = NULL,
|
|||
|
quiet = quiet,
|
|||
|
type = "source",
|
|||
|
...
|
|||
|
)
|
|||
|
|
|||
|
invisible(pkg_name)
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
safe_install_packages <- function(...) {
|
|||
|
|
|||
|
lib <- paste(.libPaths(), collapse = .Platform$path.sep)
|
|||
|
|
|||
|
if (!is_standalone() &&
|
|||
|
has_package("crancache") && has_package("callr")) {
|
|||
|
i.p <- "crancache" %::% "install_packages"
|
|||
|
} else {
|
|||
|
i.p <- utils::install.packages
|
|||
|
}
|
|||
|
|
|||
|
with_options(list(install.lock = getOption("install.lock", TRUE)), {
|
|||
|
with_envvar(
|
|||
|
c(R_LIBS = lib,
|
|||
|
R_LIBS_USER = lib,
|
|||
|
R_LIBS_SITE = lib,
|
|||
|
RGL_USE_NULL = "TRUE"),
|
|||
|
|
|||
|
# Set options(warn = 2) for this process and child processes, so that
|
|||
|
# warnings from `install.packages()` are converted to errors.
|
|||
|
if (should_error_for_warnings()) {
|
|||
|
with_options(list(warn = 2),
|
|||
|
with_rprofile_user("options(warn = 2)",
|
|||
|
i.p(...)
|
|||
|
)
|
|||
|
)
|
|||
|
} else {
|
|||
|
i.p(...)
|
|||
|
}
|
|||
|
)
|
|||
|
})
|
|||
|
}
|
|||
|
|
|||
|
normalize_build_opts <- function(build_opts, build_manual, build_vignettes) {
|
|||
|
if (!isTRUE(build_manual)) {
|
|||
|
build_opts <- union(build_opts, "--no-manual")
|
|||
|
} else {
|
|||
|
build_opts <- setdiff(build_opts, "--no-manual")
|
|||
|
}
|
|||
|
|
|||
|
if (!isTRUE(build_vignettes)) {
|
|||
|
build_opts <- union(build_opts, "--no-build-vignettes")
|
|||
|
} else {
|
|||
|
build_opts <- setdiff(build_opts, "--no-build-vignettes")
|
|||
|
}
|
|||
|
|
|||
|
build_opts
|
|||
|
}
|
|||
|
|
|||
|
safe_build_package <- function(pkgdir, build_opts, build_manual, build_vignettes, dest_path, quiet, use_pkgbuild = !is_standalone() && pkg_installed("pkgbuild")) {
|
|||
|
build_opts <- normalize_build_opts(build_opts, build_manual, build_vignettes)
|
|||
|
|
|||
|
if (use_pkgbuild) {
|
|||
|
vignettes <- TRUE
|
|||
|
manual <- FALSE
|
|||
|
has_no_vignettes <- grepl("--no-build-vignettes", build_opts)
|
|||
|
if (any(has_no_vignettes)) {
|
|||
|
vignettes <- FALSE
|
|||
|
}
|
|||
|
has_no_manual <- grepl("--no-manual", build_opts)
|
|||
|
if (!any(has_no_manual)) {
|
|||
|
manual <- TRUE
|
|||
|
}
|
|||
|
build_opts <- build_opts[!(has_no_vignettes | has_no_manual)]
|
|||
|
pkgbuild::build(pkgdir, dest_path = dest_path, binary = FALSE,
|
|||
|
vignettes = vignettes, manual = manual, args = build_opts, quiet = quiet)
|
|||
|
} else {
|
|||
|
# No pkgbuild, so we need to call R CMD build ourselves
|
|||
|
|
|||
|
lib <- paste(.libPaths(), collapse = .Platform$path.sep)
|
|||
|
env <- c(R_LIBS = lib,
|
|||
|
R_LIBS_USER = lib,
|
|||
|
R_LIBS_SITE = lib,
|
|||
|
R_PROFILE_USER = tempfile())
|
|||
|
|
|||
|
pkgdir <- normalizePath(pkgdir)
|
|||
|
|
|||
|
message("Running `R CMD build`...")
|
|||
|
in_dir(dest_path, {
|
|||
|
with_envvar(env, {
|
|||
|
output <- rcmd("build", c(build_opts, shQuote(pkgdir)), quiet = quiet,
|
|||
|
fail_on_status = FALSE)
|
|||
|
})
|
|||
|
})
|
|||
|
|
|||
|
if (output$status != 0) {
|
|||
|
cat("STDOUT:\n")
|
|||
|
cat(output$stdout, sep = "\n")
|
|||
|
cat("STDERR:\n")
|
|||
|
cat(output$stderr, sep = "\n")
|
|||
|
msg_for_long_paths(output)
|
|||
|
stop(sprintf("Failed to `R CMD build` package, try `build = FALSE`."),
|
|||
|
call. = FALSE)
|
|||
|
}
|
|||
|
|
|||
|
building_regex <- paste0(
|
|||
|
"^[*] building[^[:alnum:]]+", # prefix, "* building '"
|
|||
|
"([-[:alnum:]_.]+)", # package file name, e.g. xy_1.0-2.tar.gz
|
|||
|
"[^[:alnum:]]+$" # trailing quote
|
|||
|
)
|
|||
|
|
|||
|
pkgfile <- sub(building_regex, "\\1", output$stdout[length(output$stdout)])
|
|||
|
file.path(dest_path, pkgfile)
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
msg_for_long_paths <- function(output) {
|
|||
|
if (sys_type() == "windows" &&
|
|||
|
(r_error_matches("over-long path", output$stderr) ||
|
|||
|
r_error_matches("over-long path length", output$stderr))) {
|
|||
|
message(
|
|||
|
"\nIt seems that this package contains files with very long paths.\n",
|
|||
|
"This is not supported on most Windows versions. Please contact the\n",
|
|||
|
"package authors and tell them about this. See this GitHub issue\n",
|
|||
|
"for more details: https://github.com/r-lib/remotes/issues/84\n")
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
r_error_matches <- function(msg, str) {
|
|||
|
any(grepl(msg, str)) ||
|
|||
|
any(grepl(gettext(msg, domain = "R"), str))
|
|||
|
}
|
|||
|
|
|||
|
#' Install package dependencies if needed.
|
|||
|
#'
|
|||
|
#' @inheritParams package_deps
|
|||
|
#' @param ... additional arguments passed to [utils::install.packages()].
|
|||
|
#' @param build If `TRUE` build the package before installing.
|
|||
|
#' @param build_opts Options to pass to `R CMD build`, only used when `build` is `TRUE`.
|
|||
|
#' @param build_manual If `FALSE`, don't build PDF manual ('--no-manual').
|
|||
|
#' @param build_vignettes If `FALSE`, don't build package vignettes ('--no-build-vignettes').
|
|||
|
#' @export
|
|||
|
#' @examples
|
|||
|
#' \dontrun{install_deps(".")}
|
|||
|
|
|||
|
install_deps <- function(pkgdir = ".", dependencies = NA,
|
|||
|
repos = getOption("repos"),
|
|||
|
type = getOption("pkgType"),
|
|||
|
upgrade = c("default", "ask", "always", "never"),
|
|||
|
quiet = FALSE,
|
|||
|
build = TRUE,
|
|||
|
build_opts = c("--no-resave-data", "--no-manual", "--no-build-vignettes"),
|
|||
|
build_manual = FALSE, build_vignettes = FALSE,
|
|||
|
...) {
|
|||
|
packages <- dev_package_deps(
|
|||
|
pkgdir,
|
|||
|
repos = repos,
|
|||
|
dependencies = dependencies,
|
|||
|
type = type
|
|||
|
)
|
|||
|
|
|||
|
dep_deps <- if (isTRUE(dependencies)) NA else dependencies
|
|||
|
|
|||
|
update(
|
|||
|
packages,
|
|||
|
dependencies = dep_deps,
|
|||
|
quiet = quiet,
|
|||
|
upgrade = upgrade,
|
|||
|
build = build,
|
|||
|
build_opts = build_opts,
|
|||
|
build_manual = build_manual,
|
|||
|
build_vignettes = build_vignettes,
|
|||
|
type = type,
|
|||
|
repos = repos,
|
|||
|
...
|
|||
|
)
|
|||
|
}
|
|||
|
|
|||
|
should_error_for_warnings <- function() {
|
|||
|
|
|||
|
no_errors <- Sys.getenv("R_REMOTES_NO_ERRORS_FROM_WARNINGS", "true")
|
|||
|
|
|||
|
!config_val_to_logical(no_errors)
|
|||
|
}
|
|||
|
# Contents of R/json.R
|
|||
|
|
|||
|
# Standalone JSON parser
|
|||
|
#
|
|||
|
# The purpose of this file is to provide a standalone JSON parser.
|
|||
|
# It is quite slow and bare. If you need a proper parser please use the
|
|||
|
# jsonlite package.
|
|||
|
#
|
|||
|
# The canonical location of this file is in the remotes package:
|
|||
|
# https://github.com/r-lib/remotes/blob/HEAD/R/json.R
|
|||
|
#
|
|||
|
# API:
|
|||
|
# parse(text)
|
|||
|
# parse_file(filename)
|
|||
|
#
|
|||
|
# NEWS:
|
|||
|
# - 2019/05/15 First standalone version
|
|||
|
|
|||
|
json <- local({
|
|||
|
|
|||
|
tokenize_json <- function(text) {
|
|||
|
text <- paste(text, collapse = "\n")
|
|||
|
|
|||
|
ESCAPE <- '(\\\\[^u[:cntrl:]]|\\\\u[0-9a-fA-F]{4})'
|
|||
|
CHAR <- '[^[:cntrl:]"\\\\]'
|
|||
|
|
|||
|
STRING <- paste0('"', CHAR, '*(', ESCAPE, CHAR, '*)*"')
|
|||
|
NUMBER <- "-?(0|[1-9][0-9]*)([.][0-9]*)?([eE][+-]?[0-9]*)?"
|
|||
|
KEYWORD <- 'null|false|true'
|
|||
|
SPACE <- '[[:space:]]+'
|
|||
|
|
|||
|
match <- gregexpr(
|
|||
|
pattern = paste0(
|
|||
|
STRING, "|", NUMBER, "|", KEYWORD, "|", SPACE, "|", "."
|
|||
|
),
|
|||
|
text = text,
|
|||
|
perl = TRUE
|
|||
|
)
|
|||
|
|
|||
|
grep("^\\s+$", regmatches(text, match)[[1]], value = TRUE, invert = TRUE)
|
|||
|
}
|
|||
|
|
|||
|
throw <- function(...) {
|
|||
|
stop("JSON: ", ..., call. = FALSE)
|
|||
|
}
|
|||
|
|
|||
|
# Parse a JSON file
|
|||
|
#
|
|||
|
# @param filename Path to the JSON file.
|
|||
|
# @return R objects corresponding to the JSON file.
|
|||
|
|
|||
|
parse_file <- function(filename) {
|
|||
|
parse(readLines(filename, warn = FALSE))
|
|||
|
}
|
|||
|
|
|||
|
# Parse a JSON string
|
|||
|
#
|
|||
|
# @param text JSON string.
|
|||
|
# @return R object corresponding to the JSON string.
|
|||
|
|
|||
|
parse <- function(text) {
|
|||
|
|
|||
|
tokens <- tokenize_json(text)
|
|||
|
token <- NULL
|
|||
|
ptr <- 1
|
|||
|
|
|||
|
read_token <- function() {
|
|||
|
if (ptr <= length(tokens)) {
|
|||
|
token <<- tokens[ptr]
|
|||
|
ptr <<- ptr + 1
|
|||
|
} else {
|
|||
|
token <<- 'EOF'
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
parse_value <- function(name = "") {
|
|||
|
if (token == "{") {
|
|||
|
parse_object()
|
|||
|
} else if (token == "[") {
|
|||
|
parse_array()
|
|||
|
} else if (token == "EOF" || (nchar(token) == 1 && ! token %in% 0:9)) {
|
|||
|
throw("EXPECTED value GOT ", token)
|
|||
|
} else {
|
|||
|
j2r(token)
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
parse_object <- function() {
|
|||
|
res <- structure(list(), names = character())
|
|||
|
|
|||
|
read_token()
|
|||
|
|
|||
|
## Invariant: we are at the beginning of an element
|
|||
|
while (token != "}") {
|
|||
|
|
|||
|
## "key"
|
|||
|
if (grepl('^".*"$', token)) {
|
|||
|
key <- j2r(token)
|
|||
|
} else {
|
|||
|
throw("EXPECTED string GOT ", token)
|
|||
|
}
|
|||
|
|
|||
|
## :
|
|||
|
read_token()
|
|||
|
if (token != ":") { throw("EXPECTED : GOT ", token) }
|
|||
|
|
|||
|
## value
|
|||
|
read_token()
|
|||
|
res[key] <- list(parse_value())
|
|||
|
|
|||
|
## } or ,
|
|||
|
read_token()
|
|||
|
if (token == "}") {
|
|||
|
break
|
|||
|
} else if (token != ",") {
|
|||
|
throw("EXPECTED , or } GOT ", token)
|
|||
|
}
|
|||
|
read_token()
|
|||
|
}
|
|||
|
|
|||
|
res
|
|||
|
}
|
|||
|
|
|||
|
parse_array <- function() {
|
|||
|
res <- list()
|
|||
|
|
|||
|
read_token()
|
|||
|
|
|||
|
## Invariant: we are at the beginning of an element
|
|||
|
while (token != "]") {
|
|||
|
## value
|
|||
|
res <- c(res, list(parse_value()))
|
|||
|
|
|||
|
## ] or ,
|
|||
|
read_token()
|
|||
|
if (token == "]") {
|
|||
|
break
|
|||
|
} else if (token != ",") {
|
|||
|
throw("EXPECTED , GOT ", token)
|
|||
|
}
|
|||
|
read_token()
|
|||
|
}
|
|||
|
|
|||
|
res
|
|||
|
}
|
|||
|
|
|||
|
read_token()
|
|||
|
parse_value(tokens)
|
|||
|
}
|
|||
|
|
|||
|
j2r <- function(token) {
|
|||
|
if (token == "null") {
|
|||
|
NULL
|
|||
|
} else if (token == "true") {
|
|||
|
TRUE
|
|||
|
} else if (token == "false") {
|
|||
|
FALSE
|
|||
|
} else if (grepl('^".*"$', token)) {
|
|||
|
trimq(token)
|
|||
|
} else {
|
|||
|
as.numeric(token)
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
trimq <- function(x) {
|
|||
|
sub('^"(.*)"$', "\\1", x)
|
|||
|
}
|
|||
|
|
|||
|
structure(
|
|||
|
list(
|
|||
|
.internal = environment(),
|
|||
|
parse = parse,
|
|||
|
parse_file = parse_file
|
|||
|
),
|
|||
|
class = c("standalone_json", "standalone"))
|
|||
|
})
|
|||
|
# Contents of R/package-deps.R
|
|||
|
|
|||
|
parse_deps <- function(string) {
|
|||
|
if (is.null(string)) return()
|
|||
|
stopifnot(is.character(string), length(string) == 1)
|
|||
|
if (grepl("^\\s*$", string)) return()
|
|||
|
|
|||
|
# Split by commas with surrounding whitespace removed
|
|||
|
pieces <- strsplit(string, "[[:space:]]*,[[:space:]]*")[[1]]
|
|||
|
|
|||
|
# Get the names
|
|||
|
names <- gsub("\\s*\\(.*?\\)", "", pieces)
|
|||
|
names <- gsub("^\\s+|\\s+$", "", names)
|
|||
|
|
|||
|
# Get the versions and comparison operators
|
|||
|
versions_str <- pieces
|
|||
|
have_version <- grepl("\\(.*\\)", versions_str)
|
|||
|
versions_str[!have_version] <- NA
|
|||
|
|
|||
|
compare <- sub(".*\\(\\s*(\\S+)\\s+.*\\s*\\).*", "\\1", versions_str)
|
|||
|
versions <- sub(".*\\(\\s*\\S+\\s+(\\S*)\\s*\\).*", "\\1", versions_str)
|
|||
|
|
|||
|
# Check that non-NA comparison operators are valid
|
|||
|
compare_nna <- compare[!is.na(compare)]
|
|||
|
compare_valid <- compare_nna %in% c(">", ">=", "==", "<=", "<")
|
|||
|
if(!all(compare_valid)) {
|
|||
|
stop("Invalid comparison operator in dependency: ",
|
|||
|
paste(compare_nna[!compare_valid], collapse = ", "))
|
|||
|
}
|
|||
|
|
|||
|
deps <- data.frame(name = names, compare = compare,
|
|||
|
version = versions, stringsAsFactors = FALSE)
|
|||
|
|
|||
|
# Remove R dependency
|
|||
|
deps[names != "R", ]
|
|||
|
}
|
|||
|
# Contents of R/package.R
|
|||
|
|
|||
|
load_pkg_description <- function(path) {
|
|||
|
|
|||
|
path <- normalizePath(path, mustWork = TRUE)
|
|||
|
|
|||
|
if (!is_dir(path)) {
|
|||
|
dir <- tempfile()
|
|||
|
path_desc <- untar_description(path, dir = dir)
|
|||
|
on.exit(unlink(dir, recursive = TRUE))
|
|||
|
|
|||
|
} else {
|
|||
|
path_desc <- file.path(path, "DESCRIPTION")
|
|||
|
}
|
|||
|
|
|||
|
desc <- read_dcf(path_desc)
|
|||
|
names(desc) <- tolower(names(desc))
|
|||
|
desc$path <- path
|
|||
|
|
|||
|
desc
|
|||
|
}
|
|||
|
# Contents of R/parse-git.R
|
|||
|
#' Parse a remote git repo specification
|
|||
|
#'
|
|||
|
#' A remote repo can be specified in two ways:
|
|||
|
#' \describe{
|
|||
|
#' \item{as a URL}{`parse_github_url()` handles HTTPS and SSH remote URLs
|
|||
|
#' and various GitHub browser URLs}
|
|||
|
#' \item{via a shorthand}{`parse_repo_spec()` handles this concise form:
|
|||
|
#' `[username/]repo[/subdir][#pull|@ref|@*release]`}
|
|||
|
#' }
|
|||
|
#'
|
|||
|
#' @param repo Character scalar, the repo specification.
|
|||
|
#' @return List with members: `username`, `repo`, `subdir`
|
|||
|
#' `ref`, `pull`, `release`, some which will be empty.
|
|||
|
#'
|
|||
|
#' @name parse-git-repo
|
|||
|
#' @examples
|
|||
|
#' parse_repo_spec("metacran/crandb")
|
|||
|
#' parse_repo_spec("jimhester/covr#47") ## pull request
|
|||
|
#' parse_repo_spec("jeroen/curl@v0.9.3") ## specific tag
|
|||
|
#' parse_repo_spec("tidyverse/dplyr@*release") ## shorthand for latest release
|
|||
|
#' parse_repo_spec("r-lib/remotes@550a3c7d3f9e1493a2ba") ## commit SHA
|
|||
|
#' parse_repo_spec("igraph=igraph/rigraph") ## Different package name from repo name
|
|||
|
#'
|
|||
|
#' parse_github_url("https://github.com/jeroen/curl.git")
|
|||
|
#' parse_github_url("git@github.com:metacran/crandb.git")
|
|||
|
#' parse_github_url("https://github.com/jimhester/covr")
|
|||
|
#' parse_github_url("https://github.example.com/user/repo.git")
|
|||
|
#' parse_github_url("git@github.example.com:user/repo.git")
|
|||
|
#'
|
|||
|
#' parse_github_url("https://github.com/r-lib/remotes/pull/108")
|
|||
|
#' parse_github_url("https://github.com/r-lib/remotes/tree/name-of-branch")
|
|||
|
#' parse_github_url("https://github.com/r-lib/remotes/commit/1234567")
|
|||
|
#' parse_github_url("https://github.com/r-lib/remotes/releases/latest")
|
|||
|
#' parse_github_url("https://github.com/r-lib/remotes/releases/tag/1.0.0")
|
|||
|
NULL
|
|||
|
|
|||
|
#' @export
|
|||
|
#' @rdname parse-git-repo
|
|||
|
parse_repo_spec <- function(repo) {
|
|||
|
package_name_rx <- "(?:(?<package>[[:alpha:]][[:alnum:].]*[[:alnum:]])=)?"
|
|||
|
username_rx <- "(?:(?<username>[^/]+)/)"
|
|||
|
repo_rx <- "(?<repo>[^/@#]+)"
|
|||
|
subdir_rx <- "(?:/(?<subdir>[^@#]*[^@#/])/?)?"
|
|||
|
ref_rx <- "(?:@(?<ref>[^*].*))"
|
|||
|
pull_rx <- "(?:#(?<pull>[0-9]+))"
|
|||
|
release_rx <- "(?:@(?<release>[*]release))"
|
|||
|
ref_or_pull_or_release_rx <- sprintf(
|
|||
|
"(?:%s|%s|%s)?", ref_rx, pull_rx, release_rx
|
|||
|
)
|
|||
|
spec_rx <- sprintf(
|
|||
|
"^%s%s%s%s%s$", package_name_rx, username_rx, repo_rx, subdir_rx, ref_or_pull_or_release_rx
|
|||
|
)
|
|||
|
params <- as.list(re_match(text = repo, pattern = spec_rx))
|
|||
|
|
|||
|
if (is.na(params$.match)) {
|
|||
|
stop(sprintf("Invalid git repo specification: '%s'", repo))
|
|||
|
}
|
|||
|
|
|||
|
params[grepl("^[^\\.]", names(params))]
|
|||
|
}
|
|||
|
|
|||
|
#' @export
|
|||
|
#' @rdname parse-git-repo
|
|||
|
parse_github_repo_spec <- parse_repo_spec
|
|||
|
|
|||
|
#' @export
|
|||
|
#' @rdname parse-git-repo
|
|||
|
parse_github_url <- function(repo) {
|
|||
|
prefix_rx <- "(?:github[^/:]+[/:])"
|
|||
|
username_rx <- "(?:(?<username>[^/]+)/)"
|
|||
|
repo_rx <- "(?<repo>[^/@#]+)"
|
|||
|
ref_rx <- "(?:(?:tree|commit|releases/tag)/(?<ref>.+$))"
|
|||
|
pull_rx <- "(?:pull/(?<pull>.+$))"
|
|||
|
release_rx <- "(?:releases/)(?<release>.+$)"
|
|||
|
ref_or_pull_or_release_rx <- sprintf(
|
|||
|
"(?:/(%s|%s|%s))?", ref_rx, pull_rx, release_rx
|
|||
|
)
|
|||
|
url_rx <- sprintf(
|
|||
|
"%s%s%s%s",
|
|||
|
prefix_rx, username_rx, repo_rx, ref_or_pull_or_release_rx
|
|||
|
)
|
|||
|
params <- as.list(re_match(text = repo, pattern = url_rx))
|
|||
|
|
|||
|
if (is.na(params$.match)) {
|
|||
|
stop(sprintf("Invalid GitHub URL: '%s'", repo))
|
|||
|
}
|
|||
|
if (params$ref == "" && params$pull == "" && params$release == "") {
|
|||
|
params$repo <- gsub("\\.git$", "", params$repo)
|
|||
|
}
|
|||
|
if (params$release == "latest") {
|
|||
|
params$release <- "*release"
|
|||
|
}
|
|||
|
|
|||
|
params[grepl("^[^\\.]", names(params))]
|
|||
|
}
|
|||
|
|
|||
|
parse_git_repo <- function(repo) {
|
|||
|
|
|||
|
if (grepl("^https://github|^git@github", repo)) {
|
|||
|
params <- parse_github_url(repo)
|
|||
|
} else {
|
|||
|
params <- parse_repo_spec(repo)
|
|||
|
}
|
|||
|
params <- params[viapply(params, nchar) > 0]
|
|||
|
|
|||
|
if (!is.null(params$pull)) {
|
|||
|
params$ref <- github_pull(params$pull)
|
|||
|
params$pull <- NULL
|
|||
|
}
|
|||
|
|
|||
|
if (!is.null(params$release)) {
|
|||
|
params$ref <- github_release()
|
|||
|
params$release <- NULL
|
|||
|
}
|
|||
|
|
|||
|
params
|
|||
|
}
|
|||
|
|
|||
|
# Contents of R/remotes-package.R
|
|||
|
#' @keywords internal
|
|||
|
"_PACKAGE"
|
|||
|
|
|||
|
## usethis namespace: start
|
|||
|
## usethis namespace: end
|
|||
|
NULL
|
|||
|
# Contents of R/submodule.R
|
|||
|
parse_submodules <- function(file) {
|
|||
|
if (grepl("\n", file)) {
|
|||
|
x <- strsplit(file, "\n")[[1]]
|
|||
|
} else {
|
|||
|
x <- readLines(file)
|
|||
|
}
|
|||
|
|
|||
|
# https://git-scm.com/docs/git-config#_syntax
|
|||
|
# Subsection names are case sensitive and can contain any characters except
|
|||
|
# newline and the null byte. Doublequote " and backslash can be included by
|
|||
|
# escaping them as \" and \\
|
|||
|
double_quoted_string_with_escapes <- '(?:\\\\.|[^"])*'
|
|||
|
|
|||
|
# Otherwise extract section names
|
|||
|
section_names <- re_match(
|
|||
|
x,
|
|||
|
sprintf('^[[:space:]]*\\[submodule "(?<submodule>%s)"\\][[:space:]]*$', double_quoted_string_with_escapes)
|
|||
|
)$submodule
|
|||
|
|
|||
|
# If no sections found return the empty list
|
|||
|
if (all(is.na(section_names))) {
|
|||
|
return(list())
|
|||
|
}
|
|||
|
|
|||
|
# Extract name = value
|
|||
|
# The variable names are case-insensitive, allow only alphanumeric characters
|
|||
|
# and -, and must start with an alphabetic character.
|
|||
|
variable_name <- "[[:alpha:]][[:alnum:]\\-]*"
|
|||
|
mapping_values <- re_match(
|
|||
|
x,
|
|||
|
sprintf('^[[:space:]]*(?<name>%s)[[:space:]]*=[[:space:]]*(?<value>.*)[[:space:]]*$', variable_name),
|
|||
|
)
|
|||
|
|
|||
|
values <- cbind(submodule = fill(section_names), mapping_values[c("name", "value")], stringsAsFactors = FALSE)
|
|||
|
values <- values[!is.na(mapping_values$.match), ]
|
|||
|
|
|||
|
# path and valid url are required
|
|||
|
if (!all(c("path", "url") %in% values$name)) {
|
|||
|
warning("Invalid submodule definition, skipping submodule installation", immediate. = TRUE, call. = FALSE)
|
|||
|
return(list())
|
|||
|
}
|
|||
|
|
|||
|
# Roughly equivalent to tidyr::spread(values, name, value)
|
|||
|
res <- stats::reshape(values, idvar = "submodule", timevar = "name", v.name = "value", direction = "wide")
|
|||
|
|
|||
|
# Set the column names, reshape prepends `value.` to path, url and branch
|
|||
|
colnames(res) <- gsub("value[.]", "", colnames(res))
|
|||
|
|
|||
|
# path and valid url are required
|
|||
|
if (any(is.na(res$url), is.na(res$path))) {
|
|||
|
warning("Invalid submodule definition, skipping submodule installation", immediate. = TRUE, call. = FALSE)
|
|||
|
return(list())
|
|||
|
}
|
|||
|
|
|||
|
# branch is optional
|
|||
|
if (!exists("branch", res)) {
|
|||
|
res$branch <- NA_character_
|
|||
|
}
|
|||
|
|
|||
|
# Remove unneeded attribute
|
|||
|
attr(res, "reshapeWide") <- NULL
|
|||
|
|
|||
|
# Remove rownames
|
|||
|
rownames(res) <- NULL
|
|||
|
|
|||
|
res
|
|||
|
}
|
|||
|
|
|||
|
# Adapted from https://stackoverflow.com/a/9517731/2055486
|
|||
|
fill <- function(x) {
|
|||
|
not_missing <- !is.na(x)
|
|||
|
|
|||
|
res <- x[not_missing]
|
|||
|
res[cumsum(not_missing)]
|
|||
|
}
|
|||
|
|
|||
|
update_submodule <- function(url, path, branch, quiet) {
|
|||
|
args <- c('clone', '--depth', '1', '--no-hardlinks --recurse-submodules')
|
|||
|
if (length(branch) > 0 && !is.na(branch) && branch != 'HEAD') {
|
|||
|
args <- c(args, "--branch", branch)
|
|||
|
}
|
|||
|
args <- c(args, url, path)
|
|||
|
|
|||
|
git(paste0(args, collapse = " "), quiet = quiet)
|
|||
|
}
|
|||
|
|
|||
|
update_submodules <- function(source, subdir, quiet) {
|
|||
|
file <- file.path(source, ".gitmodules")
|
|||
|
|
|||
|
if (!file.exists(file)) {
|
|||
|
|
|||
|
if (!is.null(subdir)) {
|
|||
|
nb_sub_folders <- lengths(strsplit(subdir, "/"))
|
|||
|
source <- do.call(file.path, as.list(c(source, rep("..", nb_sub_folders))))
|
|||
|
}
|
|||
|
|
|||
|
file <- file.path(source, ".gitmodules")
|
|||
|
if (!file.exists(file)) {
|
|||
|
return()
|
|||
|
}
|
|||
|
}
|
|||
|
info <- parse_submodules(file)
|
|||
|
|
|||
|
# Fixes #234
|
|||
|
if (length(info) == 0) {
|
|||
|
return()
|
|||
|
}
|
|||
|
to_ignore <- in_r_build_ignore(info$path, file.path(source, ".Rbuildignore"))
|
|||
|
if (!(length(info) > 0)) {
|
|||
|
return()
|
|||
|
}
|
|||
|
info <- info[!to_ignore, ]
|
|||
|
|
|||
|
for (i in seq_len(NROW(info))) {
|
|||
|
update_submodule(info$url[[i]], file.path(source, info$path[[i]]), info$branch[[i]], quiet)
|
|||
|
}
|
|||
|
}
|
|||
|
# Contents of R/system.R
|
|||
|
|
|||
|
system_check <- function(command, args = character(), quiet = TRUE,
|
|||
|
error = TRUE, path = ".") {
|
|||
|
|
|||
|
out <- tempfile()
|
|||
|
err <- tempfile()
|
|||
|
on.exit(unlink(out), add = TRUE)
|
|||
|
on.exit(unlink(err), add = TRUE)
|
|||
|
|
|||
|
## We suppress warnings, they are given if the command
|
|||
|
## exits with a non-zero status
|
|||
|
res <- in_dir(
|
|||
|
path,
|
|||
|
suppressWarnings(
|
|||
|
system2(command, args = args, stdout = out, stderr = err)
|
|||
|
)
|
|||
|
)
|
|||
|
|
|||
|
res <- list(
|
|||
|
stdout = tryCatch(
|
|||
|
suppressWarnings(win2unix(read_char(out))),
|
|||
|
error = function(e) ""
|
|||
|
),
|
|||
|
stderr = tryCatch(
|
|||
|
suppressWarnings(win2unix(read_char(err))),
|
|||
|
error = function(e) ""
|
|||
|
),
|
|||
|
status = res
|
|||
|
)
|
|||
|
|
|||
|
if (error && res$status != 0) {
|
|||
|
stop("Command ", command, " failed ", res$stderr)
|
|||
|
}
|
|||
|
|
|||
|
if (! quiet) {
|
|||
|
if (! identical(res$stdout, NA_character_)) cat(res$stdout)
|
|||
|
if (! identical(res$stderr, NA_character_)) cat(res$stderr)
|
|||
|
}
|
|||
|
|
|||
|
res
|
|||
|
}
|
|||
|
|
|||
|
win2unix <- function(str) {
|
|||
|
gsub("\r\n", "\n", str, fixed = TRUE)
|
|||
|
}
|
|||
|
|
|||
|
read_char <- function(path, ...) {
|
|||
|
readChar(path, nchars = file.info(path)$size, ...)
|
|||
|
}
|
|||
|
# Contents of R/system_requirements.R
|
|||
|
DEFAULT_RSPM_REPO_ID <- "1" # cran
|
|||
|
DEFAULT_RSPM <- "https://packagemanager.posit.co"
|
|||
|
|
|||
|
#' Query the system requirements for a package (and its dependencies)
|
|||
|
#'
|
|||
|
#' Returns a character vector of commands to run that will install system
|
|||
|
#' requirements for the queried operating system.
|
|||
|
#'
|
|||
|
#' @param os,os_release The operating system and operating system release version, see
|
|||
|
#' <https://github.com/rstudio/r-system-requirements#operating-systems> for the
|
|||
|
#' list of supported operating systems.
|
|||
|
#'
|
|||
|
#' If `os_release` is `NULL`, `os` must consist of the operating system
|
|||
|
#' and the version separated by a dash, e.g. `"ubuntu-18.04"`.
|
|||
|
#' @param path The path to the dev package's root directory.
|
|||
|
#' @param package CRAN package name(s) to lookup system requirements for. If not
|
|||
|
#' `NULL`, this is used and `path` is ignored.
|
|||
|
#' @param curl The location of the curl binary on your system.
|
|||
|
#' @return A character vector of commands needed to install the system requirements for the package.
|
|||
|
#' @export
|
|||
|
system_requirements <- function(os, os_release = NULL, path = ".", package = NULL, curl = Sys.which("curl")) {
|
|||
|
if (is.null(os_release)) {
|
|||
|
os_release <- strsplit(os, "-", fixed = TRUE)[[1]]
|
|||
|
if (length(os_release) != 2) {
|
|||
|
stop("If os_release is missing, os must consist of name and release.", call. = FALSE)
|
|||
|
}
|
|||
|
|
|||
|
os <- os_release[[1]]
|
|||
|
os_release <- os_release[[2]]
|
|||
|
}
|
|||
|
|
|||
|
os_versions <- supported_os_versions()
|
|||
|
|
|||
|
os <- match.arg(os, names(os_versions))
|
|||
|
|
|||
|
os_release <- match.arg(os_release, os_versions[[os]])
|
|||
|
|
|||
|
if (!nzchar(curl)) {
|
|||
|
stop("`curl` must be on the `PATH`.", call. = FALSE)
|
|||
|
}
|
|||
|
|
|||
|
rspm <- Sys.getenv("RSPM_ROOT", DEFAULT_RSPM)
|
|||
|
rspm_repo_id <- Sys.getenv("RSPM_REPO_ID", DEFAULT_RSPM_REPO_ID)
|
|||
|
rspm_repo_url <- sprintf("%s/__api__/repos/%s", rspm, rspm_repo_id)
|
|||
|
|
|||
|
if (!is.null(package)) {
|
|||
|
res <- system2(
|
|||
|
curl,
|
|||
|
args = c(
|
|||
|
"--silent",
|
|||
|
"-L",
|
|||
|
shQuote(sprintf("%s/sysreqs?all=false&pkgname=%s&distribution=%s&release=%s",
|
|||
|
rspm_repo_url,
|
|||
|
paste(package, collapse = "&pkgname="),
|
|||
|
os,
|
|||
|
os_release)
|
|||
|
)),
|
|||
|
stdout = TRUE
|
|||
|
)
|
|||
|
res <- json$parse(res)
|
|||
|
if (!is.null(res$error)) {
|
|||
|
stop(res$error)
|
|||
|
}
|
|||
|
pre_install <- unique(unlist(c(res[["pre_install"]], lapply(res[["requirements"]], `[[`, c("requirements", "pre_install")))))
|
|||
|
install_scripts <- unique(unlist(c(res[["install_scripts"]], lapply(res[["requirements"]], `[[`, c("requirements", "install_scripts")))))
|
|||
|
} else {
|
|||
|
desc_file <- normalizePath(file.path(path, "DESCRIPTION"), mustWork = FALSE)
|
|||
|
if (!file.exists(desc_file)) {
|
|||
|
stop("`", path, "` must contain a package.", call. = FALSE)
|
|||
|
}
|
|||
|
|
|||
|
res <- system2(
|
|||
|
curl,
|
|||
|
args = c(
|
|||
|
"--silent",
|
|||
|
"-L",
|
|||
|
"--data-binary",
|
|||
|
shQuote(paste0("@", desc_file)),
|
|||
|
shQuote(sprintf("%s/sysreqs?distribution=%s&release=%s&suggests=true",
|
|||
|
rspm_repo_url,
|
|||
|
os,
|
|||
|
os_release)
|
|||
|
)
|
|||
|
),
|
|||
|
stdout = TRUE
|
|||
|
)
|
|||
|
res <- json$parse(res)
|
|||
|
if (!is.null(res$error)) {
|
|||
|
stop(res$error)
|
|||
|
}
|
|||
|
|
|||
|
pre_install <- unique(unlist(c(res[["pre_install"]], lapply(res[["dependencies"]], `[[`, "pre_install"))))
|
|||
|
install_scripts <- unique(unlist(c(res[["install_scripts"]], lapply(res[["dependencies"]], `[[`, "install_scripts"))))
|
|||
|
}
|
|||
|
|
|||
|
as.character(c(pre_install, install_scripts))
|
|||
|
}
|
|||
|
|
|||
|
# Adapted from https://github.com/rstudio/r-system-requirements/blob/master/systems.json
|
|||
|
# OSs commented out are not currently supported by the API
|
|||
|
supported_os_versions <- function() {
|
|||
|
list(
|
|||
|
#"debian" = c("8", "9"),
|
|||
|
"ubuntu" = c("14.04", "16.04", "18.04", "20.04", "22.04"),
|
|||
|
"centos" = c("6", "7", "8"),
|
|||
|
"redhat" = c("6", "7", "8"),
|
|||
|
"opensuse" = c("42.3", "15.0"),
|
|||
|
"sle" = c("12.3", "15.0")
|
|||
|
#"windows" = c("")
|
|||
|
)
|
|||
|
}
|
|||
|
# Contents of R/utils.R
|
|||
|
|
|||
|
`%||%` <- function (a, b) if (!is.null(a)) a else b
|
|||
|
|
|||
|
`%:::%` <- function (p, f) get(f, envir = asNamespace(p))
|
|||
|
|
|||
|
`%::%` <- function (p, f) get(f, envir = asNamespace(p))
|
|||
|
|
|||
|
viapply <- function(X, FUN, ..., USE.NAMES = TRUE) {
|
|||
|
vapply(X, FUN, integer(1L), ..., USE.NAMES = USE.NAMES)
|
|||
|
}
|
|||
|
|
|||
|
vlapply <- function(X, FUN, ..., USE.NAMES = TRUE) {
|
|||
|
vapply(X, FUN, logical(1L), ..., USE.NAMES = USE.NAMES)
|
|||
|
}
|
|||
|
|
|||
|
rcmd <- function(cmd, args, path = R.home("bin"), quiet, fail_on_status = TRUE) {
|
|||
|
if (os_type() == "windows") {
|
|||
|
real_cmd <- file.path(path, "Rcmd.exe")
|
|||
|
args <- c(cmd, args)
|
|||
|
} else {
|
|||
|
real_cmd <- file.path(path, "R")
|
|||
|
args <- c("CMD", cmd, args)
|
|||
|
}
|
|||
|
|
|||
|
stdoutfile <- tempfile()
|
|||
|
stderrfile <- tempfile()
|
|||
|
on.exit(unlink(c(stdoutfile, stderrfile), recursive = TRUE), add = TRUE)
|
|||
|
status <- system2(real_cmd, args, stderr = stderrfile, stdout = stdoutfile)
|
|||
|
out <- tryCatch(readLines(stdoutfile, warn = FALSE), error = function(x) "")
|
|||
|
err <- tryCatch(readLines(stderrfile, warn = FALSE), error = function(x) "")
|
|||
|
|
|||
|
if (fail_on_status && status != 0) {
|
|||
|
cat("STDOUT:\n")
|
|||
|
cat(out, sep = "\n")
|
|||
|
cat("STDERR:\n")
|
|||
|
cat(err, sep = "\n")
|
|||
|
stop(sprintf("Error running '%s' (status '%i')", cmd, status), call. = FALSE)
|
|||
|
}
|
|||
|
if (!quiet) {
|
|||
|
cat(out, sep = "\n")
|
|||
|
}
|
|||
|
|
|||
|
list(stdout = out, stderr = err, status = status)
|
|||
|
}
|
|||
|
|
|||
|
is_bioconductor <- function(x) {
|
|||
|
!is.null(x$biocviews)
|
|||
|
}
|
|||
|
|
|||
|
trim_ws <- function(x) {
|
|||
|
gsub("^[[:space:]]+|[[:space:]]+$", "", x)
|
|||
|
}
|
|||
|
|
|||
|
set_envvar <- function(envs) {
|
|||
|
if (length(envs) == 0) return()
|
|||
|
|
|||
|
stopifnot(is.named(envs))
|
|||
|
|
|||
|
old <- Sys.getenv(names(envs), names = TRUE, unset = NA)
|
|||
|
set <- !is.na(envs)
|
|||
|
|
|||
|
both_set <- set & !is.na(old)
|
|||
|
|
|||
|
if (any(set)) do.call("Sys.setenv", as.list(envs[set]))
|
|||
|
if (any(!set)) Sys.unsetenv(names(envs)[!set])
|
|||
|
|
|||
|
invisible(old)
|
|||
|
}
|
|||
|
|
|||
|
with_envvar <- function(new, code) {
|
|||
|
old <- set_envvar(new)
|
|||
|
on.exit(set_envvar(old))
|
|||
|
force(code)
|
|||
|
}
|
|||
|
|
|||
|
is.named <- function(x) {
|
|||
|
!is.null(names(x)) && all(names(x) != "")
|
|||
|
}
|
|||
|
|
|||
|
pkg_installed <- function(pkg) {
|
|||
|
|
|||
|
if (pkg %in% loadedNamespaces()) {
|
|||
|
TRUE
|
|||
|
} else if (requireNamespace(pkg, quietly = TRUE)) {
|
|||
|
try(unloadNamespace(pkg))
|
|||
|
TRUE
|
|||
|
} else {
|
|||
|
FALSE
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
has_package <- function(pkg) {
|
|||
|
if (pkg %in% loadedNamespaces()) {
|
|||
|
TRUE
|
|||
|
} else {
|
|||
|
requireNamespace(pkg, quietly = TRUE)
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
with_something <- function(set, reset = set) {
|
|||
|
function(new, code) {
|
|||
|
old <- set(new)
|
|||
|
on.exit(reset(old))
|
|||
|
force(code)
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
in_dir <- with_something(setwd)
|
|||
|
|
|||
|
get_r_version <- function() {
|
|||
|
paste(R.version$major, sep = ".", R.version$minor)
|
|||
|
}
|
|||
|
|
|||
|
set_options <- function(x) {
|
|||
|
do.call(options, as.list(x))
|
|||
|
}
|
|||
|
|
|||
|
with_options <- with_something(set_options)
|
|||
|
|
|||
|
# Read the current user .Rprofile. Here is the order it is searched, from
|
|||
|
# ?Startup
|
|||
|
#
|
|||
|
# 'R_PROFILE_USER’ environment variable (and tilde expansion
|
|||
|
# will be performed). If this is unset, a file called ‘.Rprofile’
|
|||
|
# is searched for in the current directory or in the user's home
|
|||
|
# directory (in that order). The user profile file is sourced into
|
|||
|
# the workspace.
|
|||
|
read_rprofile_user <- function() {
|
|||
|
f <- normalizePath(Sys.getenv("R_PROFILE_USER", ""), mustWork = FALSE)
|
|||
|
if (file.exists(f)) {
|
|||
|
return(readLines(f))
|
|||
|
}
|
|||
|
|
|||
|
f <- normalizePath("~/.Rprofile", mustWork = FALSE)
|
|||
|
if (file.exists(f)) {
|
|||
|
return(readLines(f))
|
|||
|
}
|
|||
|
|
|||
|
character()
|
|||
|
}
|
|||
|
|
|||
|
with_rprofile_user <- function(new, code) {
|
|||
|
temp_rprofile <- tempfile()
|
|||
|
on.exit(unlink(temp_rprofile), add = TRUE)
|
|||
|
|
|||
|
writeLines(c(read_rprofile_user(), new), temp_rprofile)
|
|||
|
with_envvar(c("R_PROFILE_USER" = temp_rprofile), {
|
|||
|
force(code)
|
|||
|
})
|
|||
|
}
|
|||
|
|
|||
|
## There are two kinds of tar on windows, one needs --force-local
|
|||
|
## not to interpret : characters, the other does not. We try both ways.
|
|||
|
|
|||
|
untar <- function(tarfile, ...) {
|
|||
|
if (os_type() == "windows") {
|
|||
|
|
|||
|
tarhelp <- tryCatch(
|
|||
|
system2("tar", "--help", stdout = TRUE, stderr = TRUE),
|
|||
|
error = function(x) "")
|
|||
|
|
|||
|
if (any(grepl("--force-local", tarhelp))) {
|
|||
|
status <- try(
|
|||
|
suppressWarnings(utils::untar(tarfile, extras = "--force-local", ...)),
|
|||
|
silent = TRUE)
|
|||
|
if (! is_tar_error(status)) {
|
|||
|
return(status)
|
|||
|
|
|||
|
} else {
|
|||
|
message("External tar failed with `--force-local`, trying without")
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
utils::untar(tarfile, ...)
|
|||
|
}
|
|||
|
|
|||
|
is_tar_error <- function(status) {
|
|||
|
inherits(status, "try-error") ||
|
|||
|
is_error_status(status) ||
|
|||
|
is_error_status(attr(status, "status"))
|
|||
|
}
|
|||
|
|
|||
|
is_error_status <- function(x) {
|
|||
|
is.numeric(x) && length(x) > 0 && !is.na(x) && x != 0
|
|||
|
}
|
|||
|
|
|||
|
os_type <- function() {
|
|||
|
.Platform$OS.type
|
|||
|
}
|
|||
|
|
|||
|
sys_type <- function() {
|
|||
|
if (.Platform$OS.type == "windows") {
|
|||
|
"windows"
|
|||
|
} else if (Sys.info()["sysname"] == "Darwin") {
|
|||
|
"macos"
|
|||
|
} else if (Sys.info()["sysname"] == "Linux") {
|
|||
|
"linux"
|
|||
|
} else if (.Platform$OS.type == "unix") {
|
|||
|
"unix"
|
|||
|
} else {
|
|||
|
stop("Unknown OS")
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
is_dir <- function(path) {
|
|||
|
file.info(path)$isdir
|
|||
|
}
|
|||
|
|
|||
|
untar_description <- function(tarball, dir = tempfile()) {
|
|||
|
files <- untar(tarball, list = TRUE)
|
|||
|
desc <- grep("^[^/]+/DESCRIPTION$", files, value = TRUE)
|
|||
|
if (length(desc) < 1) stop("No 'DESCRIPTION' file in package")
|
|||
|
untar(tarball, desc, exdir = dir)
|
|||
|
file.path(dir, desc)
|
|||
|
}
|
|||
|
|
|||
|
## copied from rematch2@180fb61
|
|||
|
re_match <- function(text, pattern, perl = TRUE, ...) {
|
|||
|
|
|||
|
stopifnot(is.character(pattern), length(pattern) == 1, !is.na(pattern))
|
|||
|
text <- as.character(text)
|
|||
|
|
|||
|
match <- regexpr(pattern, text, perl = perl, ...)
|
|||
|
|
|||
|
start <- as.vector(match)
|
|||
|
length <- attr(match, "match.length")
|
|||
|
end <- start + length - 1L
|
|||
|
|
|||
|
matchstr <- substring(text, start, end)
|
|||
|
matchstr[ start == -1 ] <- NA_character_
|
|||
|
|
|||
|
res <- data.frame(
|
|||
|
stringsAsFactors = FALSE,
|
|||
|
.text = text,
|
|||
|
.match = matchstr
|
|||
|
)
|
|||
|
|
|||
|
if (!is.null(attr(match, "capture.start"))) {
|
|||
|
|
|||
|
gstart <- attr(match, "capture.start")
|
|||
|
glength <- attr(match, "capture.length")
|
|||
|
gend <- gstart + glength - 1L
|
|||
|
|
|||
|
groupstr <- substring(text, gstart, gend)
|
|||
|
groupstr[ gstart == -1 ] <- NA_character_
|
|||
|
dim(groupstr) <- dim(gstart)
|
|||
|
|
|||
|
res <- cbind(groupstr, res, stringsAsFactors = FALSE)
|
|||
|
}
|
|||
|
|
|||
|
names(res) <- c(attr(match, "capture.names"), ".text", ".match")
|
|||
|
class(res) <- c("tbl_df", "tbl", class(res))
|
|||
|
res
|
|||
|
}
|
|||
|
|
|||
|
is_standalone <- function() {
|
|||
|
isTRUE(config_val_to_logical(Sys.getenv("R_REMOTES_STANDALONE", "false")))
|
|||
|
}
|
|||
|
|
|||
|
# This code is adapted from the perl MIME::Base64 module https://perldoc.perl.org/MIME/Base64.html
|
|||
|
# https://github.com/gisle/mime-base64/blob/cf23d49e517c6ed8f4b24295f63721e8c9935010/Base64.xs#L197
|
|||
|
|
|||
|
XX <- 255L
|
|||
|
EQ <- 254L
|
|||
|
INVALID <- XX
|
|||
|
|
|||
|
index_64 <- as.integer(c(
|
|||
|
XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX,
|
|||
|
XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX,
|
|||
|
XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,62, XX,XX,XX,63,
|
|||
|
52,53,54,55, 56,57,58,59, 60,61,XX,XX, XX,EQ,XX,XX,
|
|||
|
XX, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,10, 11,12,13,14,
|
|||
|
15,16,17,18, 19,20,21,22, 23,24,25,XX, XX,XX,XX,XX,
|
|||
|
XX,26,27,28, 29,30,31,32, 33,34,35,36, 37,38,39,40,
|
|||
|
41,42,43,44, 45,46,47,48, 49,50,51,XX, XX,XX,XX,XX,
|
|||
|
|
|||
|
XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX,
|
|||
|
XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX,
|
|||
|
XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX,
|
|||
|
XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX,
|
|||
|
XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX,
|
|||
|
XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX,
|
|||
|
XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX,
|
|||
|
XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX, XX,XX,XX,XX
|
|||
|
))
|
|||
|
|
|||
|
base64_decode <- function(x) {
|
|||
|
if (is.character(x)) {
|
|||
|
x <- charToRaw(x)
|
|||
|
}
|
|||
|
|
|||
|
len <- length(x)
|
|||
|
idx <- 1
|
|||
|
c <- integer(4)
|
|||
|
out <- raw()
|
|||
|
while(idx <= len) {
|
|||
|
i <- 1
|
|||
|
while(i <= 4) {
|
|||
|
uc <- index_64[[as.integer(x[[idx]]) + 1L]]
|
|||
|
idx <- idx + 1
|
|||
|
if (uc != INVALID) {
|
|||
|
c[[i]] <- uc
|
|||
|
i <- i + 1
|
|||
|
}
|
|||
|
if (idx > len) {
|
|||
|
if (i <= 4) {
|
|||
|
if (i <= 2) return(rawToChar(out))
|
|||
|
if (i == 3) {
|
|||
|
c[[3]] <- EQ
|
|||
|
c[[4]] <- EQ
|
|||
|
}
|
|||
|
break
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
if (c[[1]] == EQ || c[[2]] == EQ) {
|
|||
|
break
|
|||
|
}
|
|||
|
|
|||
|
#print(sprintf("c1=%d,c2=%d,c3=%d,c4=%d\n", c[1],c[2],c[3],c[4]))
|
|||
|
|
|||
|
out[[length(out) + 1]] <- as.raw(bitwOr(bitwShiftL(c[[1]], 2L), bitwShiftR(bitwAnd(c[[2]], 0x30), 4L)))
|
|||
|
|
|||
|
if (c[[3]] == EQ) {
|
|||
|
break
|
|||
|
}
|
|||
|
|
|||
|
out[[length(out) + 1]] <- as.raw(bitwOr(bitwShiftL(bitwAnd(c[[2]], 0x0F), 4L), bitwShiftR(bitwAnd(c[[3]], 0x3C), 2L)))
|
|||
|
|
|||
|
if (c[[4]] == EQ) {
|
|||
|
break
|
|||
|
}
|
|||
|
|
|||
|
out[[length(out) + 1]] <- as.raw(bitwOr(bitwShiftL(bitwAnd(c[[3]], 0x03), 6L), c[[4]]))
|
|||
|
}
|
|||
|
rawToChar(out)
|
|||
|
}
|
|||
|
|
|||
|
basis64 <- charToRaw(paste(c(LETTERS, letters, 0:9, "+", "/"),
|
|||
|
collapse = ""))
|
|||
|
|
|||
|
base64_encode <- function(x) {
|
|||
|
if (is.character(x)) {
|
|||
|
x <- charToRaw(x)
|
|||
|
}
|
|||
|
|
|||
|
len <- length(x)
|
|||
|
rlen <- floor((len + 2L) / 3L) * 4L
|
|||
|
out <- raw(rlen)
|
|||
|
ip <- op <- 1L
|
|||
|
c <- integer(4)
|
|||
|
|
|||
|
while (len > 0L) {
|
|||
|
c[[1]] <- as.integer(x[[ip]])
|
|||
|
ip <- ip + 1L
|
|||
|
if (len > 1L) {
|
|||
|
c[[2]] <- as.integer(x[ip])
|
|||
|
ip <- ip + 1L
|
|||
|
} else {
|
|||
|
c[[2]] <- 0L
|
|||
|
}
|
|||
|
out[op] <- basis64[1 + bitwShiftR(c[[1]], 2L)]
|
|||
|
op <- op + 1L
|
|||
|
out[op] <- basis64[1 + bitwOr(bitwShiftL(bitwAnd(c[[1]], 3L), 4L),
|
|||
|
bitwShiftR(bitwAnd(c[[2]], 240L), 4L))]
|
|||
|
op <- op + 1L
|
|||
|
|
|||
|
if (len > 2) {
|
|||
|
c[[3]] <- as.integer(x[ip])
|
|||
|
ip <- ip + 1L
|
|||
|
out[op] <- basis64[1 + bitwOr(bitwShiftL(bitwAnd(c[[2]], 15L), 2L),
|
|||
|
bitwShiftR(bitwAnd(c[[3]], 192L), 6L))]
|
|||
|
op <- op + 1L
|
|||
|
out[op] <- basis64[1 + bitwAnd(c[[3]], 63)]
|
|||
|
op <- op + 1L
|
|||
|
|
|||
|
} else if (len == 2) {
|
|||
|
out[op] <- basis64[1 + bitwShiftL(bitwAnd(c[[2]], 15L), 2L)]
|
|||
|
op <- op + 1L
|
|||
|
out[op] <- charToRaw("=")
|
|||
|
op <- op + 1L
|
|||
|
|
|||
|
} else { ## len == 1
|
|||
|
out[op] <- charToRaw("=")
|
|||
|
op <- op + 1L
|
|||
|
out[op] <- charToRaw("=")
|
|||
|
op <- op + 1L
|
|||
|
|
|||
|
}
|
|||
|
len <- len - 3L
|
|||
|
}
|
|||
|
|
|||
|
rawToChar(out)
|
|||
|
}
|
|||
|
|
|||
|
build_url <- function(host, ...) {
|
|||
|
download_url(do.call(file.path, as.list(c(host, ...))))
|
|||
|
}
|
|||
|
|
|||
|
download_url <- function(url) {
|
|||
|
if (!grepl("^[[:alpha:]]+://", url)) {
|
|||
|
scheme <- if (download_method_secure()) "https://" else "http://"
|
|||
|
return(paste0(scheme, url))
|
|||
|
}
|
|||
|
url
|
|||
|
}
|
|||
|
|
|||
|
is_na <- function(x) {
|
|||
|
length(x) == 1 && is.na(x)
|
|||
|
}
|
|||
|
|
|||
|
dir.exists <- function(paths) {
|
|||
|
if (getRversion() < "3.2") {
|
|||
|
x <- base::file.info(paths)$isdir
|
|||
|
!is.na(x) & x
|
|||
|
} else {
|
|||
|
("base" %::% "dir.exists")(paths)
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
is_binary_pkg <- function(x) {
|
|||
|
file_ext(x) %in% c("tgz", "zip")
|
|||
|
}
|
|||
|
|
|||
|
format_str <- function(x, width = Inf, trim = TRUE, justify = "none", ...) {
|
|||
|
x <- format(x, trim = trim, justify = justify, ...)
|
|||
|
|
|||
|
if (width < Inf) {
|
|||
|
x_width <- nchar(x, "width")
|
|||
|
too_wide <- x_width > width
|
|||
|
if (any(too_wide)) {
|
|||
|
x[too_wide] <- paste0(substr(x[too_wide], 1, width - 3), "...")
|
|||
|
}
|
|||
|
}
|
|||
|
x
|
|||
|
}
|
|||
|
|
|||
|
warn_for_potential_errors <- function() {
|
|||
|
if (sys_type() == "windows" && grepl(" ", R.home()) &&
|
|||
|
getRversion() <= "3.4.2") {
|
|||
|
warning(immediate. = TRUE,
|
|||
|
"\n!!! Installation will probably fail!\n",
|
|||
|
"This version of R has trouble with building and installing packages if\n",
|
|||
|
"the R HOME directory (currently '", R.home(), "')\n",
|
|||
|
"has space characters. Possible workarounds include:\n",
|
|||
|
"- installing R to the C: drive,\n",
|
|||
|
"- installing it into a path without a space, or\n",
|
|||
|
"- creating a drive letter for R HOME via the `subst` windows command, and\n",
|
|||
|
" starting R from the new drive.\n",
|
|||
|
"See also https://github.com/r-lib/remotes/issues/98\n")
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
# Return all directories in the input paths
|
|||
|
directories <- function(paths) {
|
|||
|
dirs <- unique(dirname(paths))
|
|||
|
out <- dirs[dirs != "."]
|
|||
|
while(length(dirs) > 0 && any(dirs != ".")) {
|
|||
|
out <- unique(c(out, dirs[dirs != "."]))
|
|||
|
dirs <- unique(dirname(dirs))
|
|||
|
}
|
|||
|
sort(out)
|
|||
|
}
|
|||
|
|
|||
|
in_r_build_ignore <- function(paths, ignore_file) {
|
|||
|
ignore <- ("tools" %:::% "get_exclude_patterns")()
|
|||
|
|
|||
|
if (file.exists(ignore_file)) {
|
|||
|
ignore <- c(ignore, readLines(ignore_file, warn = FALSE))
|
|||
|
}
|
|||
|
|
|||
|
matches_ignores <- function(x) {
|
|||
|
any(vlapply(ignore, grepl, x, perl = TRUE, ignore.case = TRUE))
|
|||
|
}
|
|||
|
|
|||
|
# We need to search for the paths as well as directories in the path, so
|
|||
|
# `^foo$` matches `foo/bar`
|
|||
|
should_ignore <- function(path) {
|
|||
|
any(vlapply(c(path, directories(path)), matches_ignores))
|
|||
|
}
|
|||
|
|
|||
|
vlapply(paths, should_ignore)
|
|||
|
}
|
|||
|
|
|||
|
dev_split_ref <- function(x) {
|
|||
|
re_match(x, "^(?<pkg>[^@#]+)(?<ref>[@#].*)?$")
|
|||
|
}
|
|||
|
|
|||
|
get_json_sha <- function(text) {
|
|||
|
m <- regexpr(paste0('"sha"\\s*:\\s*"(\\w+)"'), text, perl = TRUE)
|
|||
|
if (all(m == -1)) {
|
|||
|
return(json$parse(text)$sha %||% NA_character_)
|
|||
|
}
|
|||
|
|
|||
|
start <- attr(m, "capture.start")
|
|||
|
end <- start + attr(m, "capture.length") - 1L
|
|||
|
substring(text, start, end)
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
# from tools:::config_val_to_logical
|
|||
|
config_val_to_logical <- function (val) {
|
|||
|
v <- tolower(val)
|
|||
|
if (v %in% c("1", "yes", "true"))
|
|||
|
TRUE
|
|||
|
else if (v %in% c("0", "no", "false"))
|
|||
|
FALSE
|
|||
|
else {
|
|||
|
NA
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
raw_to_char_utf8 <- function(x) {
|
|||
|
res <- rawToChar(x)
|
|||
|
Encoding(res) <- "UTF-8"
|
|||
|
res
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
## Standalone mode, make sure that we restore the env var on exit
|
|||
|
old <- Sys.getenv("R_REMOTES_STANDALONE", NA_character_)
|
|||
|
Sys.setenv("R_REMOTES_STANDALONE" = "true")
|
|||
|
if (is.na(old)) {
|
|||
|
on.exit(Sys.unsetenv("R_REMOTES_STANDALONE"), add = TRUE)
|
|||
|
} else {
|
|||
|
on.exit(Sys.setenv("R_REMOTES_STANDALONE" = old), add = TRUE)
|
|||
|
}
|
|||
|
|
|||
|
install_github(...)
|
|||
|
|
|||
|
}
|