Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add support for Bioconductor #58 #59

Merged
merged 29 commits into from
Feb 21, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
1c6331b
added bioc versions
schochastics Feb 18, 2023
8c34a79
fixed ordering error in cahced_biocver
schochastics Feb 18, 2023
bd37ca5
added query_biocver
schochastics Feb 19, 2023
0907a90
added bioc_package_history
schochastics Feb 19, 2023
160d381
cached_biocver to data.frame
schochastics Feb 19, 2023
397c891
fixed missing supported_os
schochastics Feb 19, 2023
80215bb
added biocver generation
schochastics Feb 19, 2023
7f49b93
check if pkg is bioc
schochastics Feb 19, 2023
2c4da10
fix indention
schochastics Feb 19, 2023
dce1271
added is.bioc and adjusted normalize_pkg
schochastics Feb 19, 2023
db5b2f2
added query_snapshot_dependencies_bioc
schochastics Feb 19, 2023
bceff5a
finalized resolve
schochastics Feb 19, 2023
2a4feb9
Merge remote-tracking branch 'upstream/v0.2' into bioc
schochastics Feb 19, 2023
bcab080
added bioc support for export_rang
schochastics Feb 19, 2023
06564a1
probably fixed sysreqs for bioc
schochastics Feb 20, 2023
93ece47
fixed sysreq return value
schochastics Feb 20, 2023
9f3cd88
fixed typo
schochastics Feb 20, 2023
bbdde48
added resolve test for bioc
schochastics Feb 20, 2023
37defb3
added test for bioc version
schochastics Feb 20, 2023
957fc7d
missed the dot
schochastics Feb 20, 2023
9709762
added test for new bioc release
schochastics Feb 20, 2023
5a2c69d
test dockerize with bioc
schochastics Feb 20, 2023
04c70ad
better bioc test data
schochastics Feb 20, 2023
e5997a2
added package caching for bioc
schochastics Feb 20, 2023
29e3b7f
set query_bioc TRUE if bioc detected
schochastics Feb 20, 2023
1a04a3b
added cache test for bioc
schochastics Feb 20, 2023
0c5d1d1
cache testing,fixed header/footer, always write bioc version
schochastics Feb 20, 2023
15ab0a0
integrated bioc into as_pkgref
schochastics Feb 21, 2023
b723ed6
get bioc versions from online
schochastics Feb 21, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions R/as_pkgrefs.R
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,9 @@ as_pkgrefs.sessionInfo <- function(x, ...) {
if ("GithubRepo" %in% names(packageDescription)) {
return(paste0("github::", packageDescription[["GithubUsername"]], "/", packageDescription[["GithubRepo"]]))
}
if(grepl("bioconductor",packageDescription[["URL"]])){
return(paste0("bioc::",handle))
}
## uncomment this when #57 is implemented
##if (basename(attr(packageDescription, "file")) == "DESCRIPTION") {
## probably load via devtools::load_all
Expand Down
51 changes: 39 additions & 12 deletions R/installation.R
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@
}

.write_rang_as_comment <- function(rang, con, path, verbose, lib,
cran_mirror, check_cran_mirror) {
cran_mirror, check_cran_mirror, bioc_mirror) {
cat("## ## To reconstruct this file, please install version",
as.character(utils::packageVersion("rang")), "of `rang` and run:\n", file = con)
cat("## rang <- \n", file = con)
Expand All @@ -130,10 +130,15 @@
} else {
lib_as_character <- paste0("\"", lib, "\"")
}
if(!is.null(rang$bioc_version)){
bioc_txt <- paste0(", bioc_mirror = \"", bioc_mirror,rang$bioc_version,"/","\"")
} else{
bioc_txt <- NULL
}
writeLines(paste0("## rang::export_rang(rang = rang, path = \"", path, "\", verbose = ",
as.character(verbose), ", lib = ", lib_as_character,
", cran_mirror = \"", cran_mirror, "\", check_cran_mirror = ",
as.character(check_cran_mirror), ")"), con = con)
as.character(check_cran_mirror), bioc_txt ,")"), con = con)
}

.query_mirror_validity <- function(mirror) {
Expand Down Expand Up @@ -179,6 +184,15 @@
}
}

.cache_pkg_bioc <- function(x, version, cache_dir, bioc_mirror, bioc_version, verbose) {
url <- paste(bioc_mirror, bioc_version, "/bioc/src/contrib/", x, "_", version, ".tar.gz", sep = "")
tarball_path <- file.path(cache_dir, paste(x, "_", version, ".tar.gz", sep = ""))
suppressWarnings(utils::download.file(url, destfile = tarball_path, quiet = !verbose))
if (!file.exists(tarball_path)) {
warning(names(x), "(", x,") can't be cache.")
}
}

.cache_pkg_github <- function(x, version, handle, source, uid, cache_dir, verbose) {
sha <- uid
tarball_path <- file.path(cache_dir, paste("raw_", x, "_", version, ".tar.gz", sep = ""))
Expand All @@ -189,7 +203,7 @@
}
}

.cache_pkgs <- function(rang, output_dir, cran_mirror, verbose) {
.cache_pkgs <- function(rang, output_dir, cran_mirror, bioc_mirror, verbose) {
installation_order <- .generate_installation_order(rang)
cache_dir <- file.path(output_dir, "cache")
if (!dir.exists(cache_dir)) {
Expand All @@ -211,6 +225,10 @@
source = source, uid = uid,
cache_dir = cache_dir, verbose = verbose)
}
if(source == "bioc"){
.cache_pkg_bioc(x = x, version = version, cache_dir = cache_dir,
bioc_mirror = bioc_mirror,bioc_version = rang$bioc_version, verbose = verbose)
}
}
## For #14, cache R source in the future here
invisible(output_dir)
Expand Down Expand Up @@ -273,6 +291,7 @@
#' @param lib character, pass to [install.packages()]. By default, it is NA (to install the packages to the default location)
#' @param cran_mirror character, which CRAN mirror to use
#' @param check_cran_mirror logical, whether to check the CRAN mirror
#' @param bioc_mirror character, which Bioconductor mirror to use
#' @return `path`, invisibly
#' @details The idea behind this is to determine the installation order of R packages locally. Then, the installation script can be depolyed to another
#' fresh R session to install R packages. [dockerize()] is a more reasonable way because a fresh R session with all system requirements
Expand All @@ -289,7 +308,8 @@
#' }
#' }
export_rang <- function(rang, path, rang_as_comment = TRUE, verbose = TRUE, lib = NA,
cran_mirror = "https://cran.r-project.org/", check_cran_mirror = TRUE) {
cran_mirror = "https://cran.r-project.org/", check_cran_mirror = TRUE,
bioc_mirror = "https://bioconductor.org/packages/") {
if (utils::compareVersion(rang$r_version, "2.1") == -1) {
stop("`export_rang` doesn't support this R version (yet).")
}
Expand All @@ -315,12 +335,16 @@ export_rang <- function(rang, path, rang_as_comment = TRUE, verbose = TRUE, lib
} else {
cat(paste0("lib <- \"", as.character(lib), "\"\n"), file = con)
}
cat(paste0("cran_mirror <- \"", cran_mirror, "\"\n"), file = con)
cat(paste0("cran_mirror <- \"", cran_mirror, "\"\n"), file = con)
if(!is.null(rang$bioc_version)){
cat(paste0("bioc_mirror <- \"", "https://bioconductor.org/packages/",rang$bioc_version,"/", "\"\n"), file = con)
}

writeLines(readLines(system.file("footer.R", package = "rang")), con = con)
if (isTRUE(rang_as_comment)) {
.write_rang_as_comment(rang = rang, con = con, path = path, verbose = verbose,
lib = lib, cran_mirror = cran_mirror,
check_cran_mirror = check_cran_mirror)
check_cran_mirror = check_cran_mirror,bioc_mirror = bioc_mirror)
}
close(con)
invisible(path)
Expand Down Expand Up @@ -354,7 +378,8 @@ export_rang <- function(rang, path, rang_as_comment = TRUE, verbose = TRUE, lib
#' @export
dockerize <- function(rang, output_dir, materials_dir = NULL, image = c("r-ver", "rstudio", "tidyverse", "verse", "geospatial"),
rang_as_comment = TRUE, cache = FALSE, verbose = TRUE, lib = NA,
cran_mirror = "https://cran.r-project.org/", check_cran_mirror = TRUE) {
cran_mirror = "https://cran.r-project.org/", check_cran_mirror = TRUE,
bioc_mirror = "https://bioconductor.org/packages/") {
if (missing(output_dir)) {
stop("You must provide `output_dir`.", call. = FALSE)
}
Expand All @@ -367,9 +392,11 @@ dockerize <- function(rang, output_dir, materials_dir = NULL, image = c("r-ver",
if (!is.null(materials_dir) && !(dir.exists(materials_dir))) {
stop(paste0("The folder ", materials_dir, " does not exist"), call. = FALSE)
}
if (isFALSE(all(grepl("^cran::", .extract_pkgrefs(rang)))) &&
utils::compareVersion(rang$r_version, "3.1") == -1 &&
isFALSE(cache)) {
need_cache <- (isTRUE(any(grepl("^github::", .extract_pkgrefs(rang)))) &&
utils::compareVersion(rang$r_version, "3.1") == -1) ||
(isTRUE(any(grepl("^bioc::", .extract_pkgrefs(rang)))) &&
utils::compareVersion(rang$r_version, "3.3") == -1)
if (isTRUE(need_cache) && isFALSE(cache)) {
stop("Non-CRAN packages must be cached for this R version: ", rang$r_version, ". Please set `cache` = TRUE.", call. = FALSE)
}
image <- match.arg(image)
Expand All @@ -380,9 +407,9 @@ dockerize <- function(rang, output_dir, materials_dir = NULL, image = c("r-ver",
rang_path <- file.path(output_dir, "rang.R")
export_rang(rang = rang, path = rang_path, rang_as_comment = rang_as_comment,
verbose = verbose, lib = lib, cran_mirror = cran_mirror,
check_cran_mirror = check_cran_mirror)
check_cran_mirror = check_cran_mirror, bioc_mirror = bioc_mirror)
if (isTRUE(cache)) {
.cache_pkgs(rang, output_dir, cran_mirror, verbose)
.cache_pkgs(rang, output_dir, cran_mirror, bioc_mirror, verbose)
}
if (utils::compareVersion(rang$r_version, "3.1") == -1) {
file.copy(system.file("compile_r.sh", package = "rang"), file.path(output_dir, "compile_r.sh"),
Expand Down
50 changes: 49 additions & 1 deletion R/memo_misc.R
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,51 @@ NULL

## one hr

.memo_search<- memoise::memoise(pkgsearch::cran_package_history, cache = cachem::cache_mem(max_age = 60 * 60))
.memo_search <- memoise::memoise(pkgsearch::cran_package_history, cache = cachem::cache_mem(max_age = 60 * 60))

.rver <- function() {
suppressWarnings(jsonlite::fromJSON(readLines("https://api.r-hub.io/rversions/r-versions"), simplifyVector = TRUE))
}

.memo_rver <- memoise::memoise(.rver, cache = cachem::cache_mem(max_age = 120 * 60))

.biocver <- function(){
url <- "https://bioconductor.org/config.yaml"
tag <- "release_dates"
txt <- readLines(url)
grps <- grep("^[^[:blank:]]", txt)

start <- match(grep(tag, txt), grps)
end <- ifelse(length(grps) < start + 1, length(txt), grps[start + 1] - 1)
map <- txt[seq(grps[start] + 1, end)]
map <- trimws(gsub("\"", "", sub(" #.*", "", map)))
pattern <- "(.*): (.*)"
bioc_ver <- sub(pattern, "\\1", map)
bioc_date <- anytime::anytime(sub(pattern, "\\2", map), tz = "UTC", asUTC = TRUE)
data.frame(version = bioc_ver,date=bioc_date)
}

.memo_biocver <- memoise::memoise(.biocver, cache = cachem::cache_mem(max_age = 120 * 60))


.bioc_package_history <- function(bioc_version){
if(bioc_version>="2.0"){
con <- url(paste0("http://bioconductor.org/packages/",bioc_version,"/bioc/VIEWS"))
pkgs <- read.dcf(con)
close(con)
} else{
stop("Bioconductor versions <2.0 are not supported")
}
as.data.frame(pkgs)
}

.memo_search_bioc <- memoise::memoise(.bioc_package_history, cache = cachem::cache_mem(max_age = 60 * 60))

.query_sysreqs_rhub <- function(){
jsonlite::fromJSON("https://sysreqs.r-hub.io/list")
}

.memo_query_sysreqs_rhub <- memoise::memoise(.query_sysreqs_rhub,cache = cachem::cache_mem(max_age = 60 * 60))
## internal data generation
## ---
## os <- names(remotes:::supported_os_versions())
Expand All @@ -22,6 +59,17 @@ NULL
## attr(cached_rver, "newest_date") <- anytime::anytime(tail(cached_rver, n = 1)$date, tz = "UTC", asUTC = TRUE)
## usethis::use_data(supported_os, cached_rver, internal = TRUE, overwrite = TRUE)

##library(rvest)
##doc <- read_html("https://www.bioconductor.org/about/release-announcements/")
##cached_biocver <- html_table(doc)[[1]]
##cached_biocver$Date <- anytime::anytime(cached_biocver$Date, tz = "UTC", asUTC = TRUE)
##cached_biocver$`Software packages` <- NULL
##names(cached_biocver) <- c("version","date","rver")
##cached_biocver <- cached_biocver[order(cached_biocver$date),]
##cached_biocver <- as.data.frame(cached_biocver)



## test data upgrade
## ---
## devtools::load_all()
Expand Down
41 changes: 35 additions & 6 deletions R/pkgref.R
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,11 @@
grepl("/", pkg)
}

.is_bioc <- function(pkg,bioc_version){
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@chainsawriot This function requires the bioc version to check if the package is in a specific release. Couldnt find any other way to check this thus far, but this would require to also add bioc_version as a parameter to .normalize_pkg

Copy link
Member Author

@schochastics schochastics Feb 19, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggestion for .normalize_pkg(): make bioc_version optional. If it is omitted, it is assumed to be a cran package. if present, check

.normalize_pkg <- function(pkg,bioc_version = NULL) {
  if (pkg == "" || is.na(pkg)) {
    stop("Invalid `pkg`.", call. = FALSE)
  }
  if (isTRUE(.is_github(pkg))) {
    if (isTRUE(grepl("github\\.com", pkg))) {
      pkg <- .extract_github_handle(pkg)
    }
  }
  if (isTRUE(.is_pkgref(pkg))) {
    return(.clean_suffixes(pkg))
  }
  if (isTRUE(.is_github(pkg))) {
    return(paste0("github::", .clean_suffixes(pkg)))
  }
  if(is.null(bioc_version)){
    return(paste0("cran::", .clean_suffixes(pkg)))
  } else{
    if(isTRUE(.is_bioc(pkg,bioc_version))){
      return(paste0("bioc::", .clean_suffixes(pkg)))
    } else{
      return(paste0("cran::", .clean_suffixes(pkg)))
    }
  }
}

bioc_pkgs <- .memo_search_bioc(bioc_version)
pkg%in%bioc_pkgs$Package
}

.is_pkgref <- function(pkg) {
grepl("::", pkg)
}
Expand All @@ -79,13 +84,30 @@
}

## to normalize a pkg to pkgref
.normalize_pkg <- function(pkg) {
# .normalize_pkg <- function(pkg) {
# if (pkg == "" || is.na(pkg)) {
# stop("Invalid `pkg`.", call. = FALSE)
# }
# if (isTRUE(.is_github(pkg))) {
# if (isTRUE(grepl("github\\.com", pkg))) {
# pkg <- .extract_github_handle(pkg)
# }
# }
# if (isTRUE(.is_pkgref(pkg))) {
# return(.clean_suffixes(pkg))
# }
# if (isTRUE(.is_github(pkg))) {
# return(paste0("github::", .clean_suffixes(pkg)))
# }
# return(paste0("cran::", .clean_suffixes(pkg)))
# }
.normalize_pkg <- function(pkg,bioc_version=NULL) {
if (pkg == "" || is.na(pkg)) {
stop("Invalid `pkg`.", call. = FALSE)
}
if (isTRUE(.is_github(pkg))) {
if (isTRUE(grepl("github\\.com", pkg))) {
pkg <- .extract_github_handle(pkg)
pkg <- .extract_github_handle(pkg)
}
}
if (isTRUE(.is_pkgref(pkg))) {
Expand All @@ -94,10 +116,17 @@
if (isTRUE(.is_github(pkg))) {
return(paste0("github::", .clean_suffixes(pkg)))
}
return(paste0("cran::", .clean_suffixes(pkg)))
if(is.null(bioc_version)){
return(paste0("cran::", .clean_suffixes(pkg)))
} else{
if(isTRUE(.is_bioc(pkg,bioc_version))){
return(paste0("bioc::", .clean_suffixes(pkg)))
} else{
return(paste0("cran::", .clean_suffixes(pkg)))
}
}
}

## vectorize
.normalize_pkgs <- function(pkgs) {
vapply(X = pkgs, FUN = .normalize_pkg, FUN.VALUE = character(1), USE.NAMES = FALSE)
.normalize_pkgs <- function(pkgs,bioc_version = NULL) {
vapply(X = pkgs, bioc_version = bioc_version ,FUN = .normalize_pkg, FUN.VALUE = character(1), USE.NAMES = FALSE)
}
Loading