├── .Rbuildignore ├── tests ├── testthat.R └── testthat │ ├── test-clean_data_package.R │ ├── bioconductor.authz.orig │ ├── bioc-data.authz.orig │ └── test-permissions.R ├── inst ├── testpackages │ ├── AnnotationHub_1.3.18.tar.gz │ └── hgu95av2.db_2.10.1.tar.gz └── extdata │ ├── svn_credentials_request.txt │ ├── tracker.txt │ └── maintainerAcceptance.txt ├── R ├── zzz.R ├── AAAproj_path.R ├── reporting.R ├── gitolite_pubkey_update.R ├── bioc_views.R ├── installDependencies.R ├── utils.R ├── svn_auth_text.R ├── convertToBin.R ├── tallyManifests.R ├── svn.R ├── workflow_standard.R ├── getPackageRange.R ├── email_maintainers.R ├── users.R ├── github.R ├── gh.R ├── addPackage.R ├── permissions.R ├── email.R └── tracker.R ├── NAMESPACE ├── man ├── the.Rd ├── users.Rd ├── devteam.Rd ├── status_map.Rd ├── as.issue.Rd ├── add_packages.Rd ├── github_usernames.Rd ├── gh_unassigned_packages.Rd ├── issue.Rd ├── gh_pre_accepted_packages.Rd ├── read_permissions.Rd ├── standard_commit_message.Rd ├── gh_issue.Rd ├── write_permissions.Rd ├── svn.Rd ├── assign_new_packages.Rd ├── bioc_views_classification.Rd ├── tracker_login.Rd ├── package_name.Rd ├── match_user.Rd ├── gh_packages_assigned_to.Rd ├── maintainers.Rd ├── tabulate_activity.Rd ├── assign_package.Rd ├── clean.Rd ├── proj_path.Rd ├── add_software_permisions.Rd ├── emailMaintainer.Rd ├── package_assignment_email.Rd ├── add_data_experiment_permisions.Rd ├── add_to_spreadsheet.Rd ├── make_github_request.Rd ├── accept_package.Rd ├── post.Rd ├── request_credentials.Rd ├── download.Rd ├── gh_tracker_search.Rd ├── svn_auth_text.Rd ├── user_db.Rd ├── clean_data_package.Rd ├── email.Rd ├── run_commands.Rd ├── edit_software_permissions.Rd ├── edit_data_experiment_permissions.Rd ├── workflow_standard.Rd ├── tracker_search.Rd └── utilFunctions.Rd ├── BiocContributions-github.Rproj ├── .gitignore ├── README.md ├── github-tracker.R ├── DESCRIPTION └── vignettes └── workflow.Rmd /.Rbuildignore: -------------------------------------------------------------------------------- 1 | ^.*\.Rproj$ 2 | ^\.Rproj\.user$ 3 | ^\.httr-oauth$ 4 | -------------------------------------------------------------------------------- /tests/testthat.R: -------------------------------------------------------------------------------- 1 | library(testthat) 2 | library(BiocContributions) 3 | 4 | test_check("BiocContributions") 5 | -------------------------------------------------------------------------------- /inst/testpackages/AnnotationHub_1.3.18.tar.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Bioconductor/BiocContributions/master/inst/testpackages/AnnotationHub_1.3.18.tar.gz -------------------------------------------------------------------------------- /inst/testpackages/hgu95av2.db_2.10.1.tar.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Bioconductor/BiocContributions/master/inst/testpackages/hgu95av2.db_2.10.1.tar.gz -------------------------------------------------------------------------------- /inst/extdata/svn_credentials_request.txt: -------------------------------------------------------------------------------- 1 | Hi, 2 | 3 | Can you please create new SVN account(s) on Hedgehog for 4 | 5 | {{{newusers}}} 6 | 7 | Thanks, 8 | 9 | {{from}} 10 | -------------------------------------------------------------------------------- /R/zzz.R: -------------------------------------------------------------------------------- 1 | ## Here we set things up so that when the package loads we try to have 2 | ## a users file that we can access later on 3 | 4 | ## need a place to put my secret tempFile 5 | stash <- new.env(parent=emptyenv()) 6 | -------------------------------------------------------------------------------- /NAMESPACE: -------------------------------------------------------------------------------- 1 | # Generated by roxygen2: do not edit by hand 2 | 3 | export(github_accept) 4 | export(github_svn_credentials_draft_to_user) 5 | export(github_svn_credentials_request_from_carl) 6 | export(proj_path) 7 | export(svn_data_experiment_auth_text) 8 | export(svn_software_auth_text) 9 | import(httr) 10 | -------------------------------------------------------------------------------- /man/the.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/tracker.R 3 | \docType{data} 4 | \name{the} 5 | \alias{the} 6 | \title{storage object} 7 | \format{An object of class \code{environment} of length 0.} 8 | \usage{ 9 | the 10 | } 11 | \description{ 12 | storage object 13 | } 14 | \keyword{datasets} 15 | 16 | -------------------------------------------------------------------------------- /man/users.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/tracker.R 3 | \name{users} 4 | \alias{users} 5 | \title{Retrieve the user list} 6 | \usage{ 7 | users(session = tracker_login()) 8 | } 9 | \arguments{ 10 | \item{session}{the HTTP session to use} 11 | } 12 | \description{ 13 | Retrieve the user list 14 | } 15 | 16 | -------------------------------------------------------------------------------- /man/devteam.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/tracker.R 3 | \docType{data} 4 | \name{devteam} 5 | \alias{devteam} 6 | \title{Members of the devteam} 7 | \format{An object of class \code{character} of length 4.} 8 | \usage{ 9 | devteam 10 | } 11 | \description{ 12 | Members of the devteam 13 | } 14 | \keyword{datasets} 15 | 16 | -------------------------------------------------------------------------------- /man/status_map.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/tracker.R 3 | \docType{data} 4 | \name{status_map} 5 | \alias{status_map} 6 | \title{Status code to name mapping} 7 | \format{An object of class \code{character} of length 10.} 8 | \usage{ 9 | status_map 10 | } 11 | \description{ 12 | Status code to name mapping 13 | } 14 | \keyword{datasets} 15 | 16 | -------------------------------------------------------------------------------- /man/as.issue.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/tracker.R 3 | \name{as.issue} 4 | \alias{as.issue} 5 | \title{Coerce to an issue object} 6 | \usage{ 7 | as.issue(x, ...) 8 | } 9 | \arguments{ 10 | \item{x}{object to be coerced} 11 | 12 | \item{...}{Additional arguments passed to methods} 13 | } 14 | \description{ 15 | Coerce to an issue object 16 | } 17 | 18 | -------------------------------------------------------------------------------- /BiocContributions-github.Rproj: -------------------------------------------------------------------------------- 1 | Version: 1.0 2 | 3 | RestoreWorkspace: Default 4 | SaveWorkspace: Default 5 | AlwaysSaveHistory: Default 6 | 7 | EnableCodeIndexing: Yes 8 | UseSpacesForTab: Yes 9 | NumSpacesForTab: 4 10 | Encoding: UTF-8 11 | 12 | RnwWeave: Sweave 13 | LaTeX: pdfLaTeX 14 | 15 | BuildType: Package 16 | PackageUseDevtools: Yes 17 | PackageInstallArgs: --no-multiarch --with-keep.source --no-test-load 18 | -------------------------------------------------------------------------------- /man/add_packages.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/svn.R 3 | \name{add_packages} 4 | \alias{add_packages} 5 | \title{Add packages to SVN} 6 | \arguments{ 7 | \item{x}{package tarballs to add.} 8 | 9 | \item{svn_location}{location of the SVN repository} 10 | 11 | \item{manifest}{name of the manifest file} 12 | } 13 | \description{ 14 | Add packages to SVN 15 | } 16 | 17 | -------------------------------------------------------------------------------- /man/github_usernames.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/tracker.R 3 | \docType{data} 4 | \name{github_usernames} 5 | \alias{github_usernames} 6 | \title{and their github usernames} 7 | \format{An object of class \code{character} of length 4.} 8 | \usage{ 9 | github_usernames 10 | } 11 | \description{ 12 | and their github usernames 13 | } 14 | \keyword{datasets} 15 | 16 | -------------------------------------------------------------------------------- /man/gh_unassigned_packages.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/gh.R 3 | \name{gh_unassigned_packages} 4 | \alias{gh_unassigned_packages} 5 | \title{Show unassigned packages} 6 | \usage{ 7 | gh_unassigned_packages() 8 | } 9 | \value{ 10 | a data frame of unassigned packages or NULL if none. 11 | } 12 | \description{ 13 | Show unassigned packages 14 | } 15 | \examples{ 16 | gh_unassigned_packages() 17 | } 18 | 19 | -------------------------------------------------------------------------------- /man/issue.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/tracker.R 3 | \name{issue} 4 | \alias{issue} 5 | \title{Retrieve all of the messages from an issue} 6 | \usage{ 7 | issue(number, session = tracker_login()) 8 | } 9 | \arguments{ 10 | \item{number}{the issue number to retrieve} 11 | 12 | \item{session}{the HTTP session to use} 13 | } 14 | \description{ 15 | Retrieve all of the messages from an issue 16 | } 17 | 18 | -------------------------------------------------------------------------------- /man/gh_pre_accepted_packages.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/gh.R 3 | \name{gh_pre_accepted_packages} 4 | \alias{gh_pre_accepted_packages} 5 | \title{Show pre-accepted packages} 6 | \usage{ 7 | gh_pre_accepted_packages() 8 | } 9 | \value{ 10 | a data frame of pre-accepted packages or NULL if none. 11 | } 12 | \description{ 13 | Show pre-accepted packages 14 | } 15 | \examples{ 16 | gh_pre_accepted_packages() 17 | } 18 | 19 | -------------------------------------------------------------------------------- /man/read_permissions.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/permissions.R 3 | \name{read_permissions} 4 | \alias{read_permissions} 5 | \title{Read authz permission file} 6 | \usage{ 7 | 8 | read_permissions(file = "hedgehog:/extra/svndata/gentleman/svn_authz/bioconductor.authz", 9 | quiet = TRUE) 10 | } 11 | \arguments{ 12 | \item{file}{location passed to rsync} 13 | } 14 | \description{ 15 | Read authz permission file 16 | } 17 | 18 | -------------------------------------------------------------------------------- /man/standard_commit_message.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/permissions.R 3 | \name{standard_commit_message} 4 | \alias{standard_commit_message} 5 | \title{Generate a standard commit message for permission edits} 6 | \usage{ 7 | standard_commit_message(x) 8 | } 9 | \arguments{ 10 | \item{x}{the edits to make, if a data.frame will be coerced to a named list.} 11 | } 12 | \description{ 13 | Generate a standard commit message for permission edits 14 | } 15 | 16 | -------------------------------------------------------------------------------- /man/gh_issue.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/gh.R 3 | \name{gh_issue} 4 | \alias{gh_issue} 5 | \title{Retrieve all messages associated with an issue} 6 | \usage{ 7 | gh_issue(number) 8 | } 9 | \arguments{ 10 | \item{number}{The issue number} 11 | } 12 | \value{ 13 | a data frame/issue of messages associated with issue \code{number} 14 | } 15 | \description{ 16 | Retrieve all messages associated with an issue 17 | } 18 | \examples{ 19 | gh_issue(21) 20 | } 21 | 22 | -------------------------------------------------------------------------------- /man/write_permissions.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/permissions.R 3 | \name{write_permissions} 4 | \alias{write_permissions} 5 | \title{Write authz permission file} 6 | \usage{ 7 | write_permissions(x, 8 | file = "hedgehog:/extra/svndata/gentleman/svn_authz/bioconductor.authz", 9 | ...) 10 | } 11 | \arguments{ 12 | \item{x}{object to write} 13 | 14 | \item{file}{location passed to rsync} 15 | } 16 | \description{ 17 | Write authz permission file 18 | } 19 | 20 | -------------------------------------------------------------------------------- /man/svn.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/svn.R 3 | \name{svn} 4 | \alias{svn} 5 | \title{Declare an SVN instance} 6 | \usage{ 7 | svn(dir = getwd()) 8 | } 9 | \arguments{ 10 | \item{dir}{The SVN directory} 11 | } 12 | \description{ 13 | This is primarily a helper function for using svn progromatically. 14 | } 15 | \examples{ 16 | \dontrun{ 17 | s <- svn("my/svn/location") 18 | s$status() 19 | s$update() 20 | s$add("file3") 21 | s$commit("adding file3") 22 | } 23 | } 24 | 25 | -------------------------------------------------------------------------------- /man/assign_new_packages.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/tracker.R 3 | \name{assign_new_packages} 4 | \alias{assign_new_packages} 5 | \title{Assign new packages} 6 | \usage{ 7 | assign_new_packages(pkgs = unassigned_packages(session), team = devteam) 8 | } 9 | \arguments{ 10 | \item{pkgs}{the packages to assign} 11 | 12 | \item{team}{team members to assign to} 13 | } 14 | \description{ 15 | This method uses a hash digest to assign the packages based on the package 16 | name. 17 | } 18 | 19 | -------------------------------------------------------------------------------- /man/bioc_views_classification.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/bioc_views.R 3 | \name{bioc_views_classification} 4 | \alias{bioc_views_classification} 5 | \title{Extract and validiate biocViews terms from tarball} 6 | \usage{ 7 | bioc_views_classification(files) 8 | } 9 | \arguments{ 10 | \item{files}{tar.gz file locations} 11 | } 12 | \value{ 13 | named vector classifying each tar ball to biocViews hierarchy 14 | } 15 | \description{ 16 | Extract and validiate biocViews terms from tarball 17 | } 18 | 19 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .Rproj.user 2 | .httr-oauth 3 | # Created by https://www.gitignore.io/api/r 4 | 5 | ### R ### 6 | # History files 7 | .Rhistory 8 | .RData 9 | .Rapp.history 10 | 11 | # Example code in package build process 12 | *-Ex.R 13 | 14 | # RStudio files 15 | .Rproj.user/ 16 | 17 | # produced vignettes 18 | vignettes/*.html 19 | vignettes/*.pdf 20 | 21 | # OAuth2 token, see https://github.com/hadley/httr/releases/tag/v0.3 22 | .httr-oauth 23 | 24 | # Output files from R CMD build 25 | /*.tar.gz 26 | 27 | # Output files from R CMD check 28 | /*.Rcheck/ 29 | 30 | .tags* 31 | -------------------------------------------------------------------------------- /man/tracker_login.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/tracker.R 3 | \name{tracker_login} 4 | \alias{tracker_login} 5 | \title{Login to the issue tracker} 6 | \usage{ 7 | tracker_login(url = "https://tracker.bioconductor.org", 8 | user = getOption("tracker_user"), 9 | password = getOption("tracker_password")) 10 | } 11 | \arguments{ 12 | \item{url}{tracker url} 13 | 14 | \item{user}{username used to login} 15 | 16 | \item{password}{password used to login} 17 | } 18 | \description{ 19 | Login to the issue tracker 20 | } 21 | 22 | -------------------------------------------------------------------------------- /man/package_name.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/addPackage.R 3 | \name{package_name} 4 | \alias{package_name} 5 | \title{Extract a packages name from a tarball} 6 | \usage{ 7 | package_name(tarball) 8 | } 9 | \arguments{ 10 | \item{tarball}{package tarball} 11 | } 12 | \value{ 13 | the package name 14 | } 15 | \description{ 16 | Extract a packages name from a tarball 17 | } 18 | \examples{ 19 | pkg <- system.file(package="BiocContributions", 20 | "testpackages", "RNASeqPower_1.11.0.tar.gz") 21 | package_name(pkg) 22 | } 23 | 24 | -------------------------------------------------------------------------------- /man/match_user.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/users.R 3 | \name{match_user} 4 | \alias{match_user} 5 | \title{See if a person already exists in the user db} 6 | \usage{ 7 | match_user(x) 8 | } 9 | \arguments{ 10 | \item{x}{a person object to lookup} 11 | } 12 | \description{ 13 | See if a person already exists in the user db 14 | } 15 | \examples{ 16 | pkg <- system.file(package="BiocContributions", 17 | "testpackages", "RNASeqPower_1.11.0.tar.gz") 18 | 19 | maintainer <- maintainers(pkg) 20 | match_user(maintainer) 21 | } 22 | 23 | -------------------------------------------------------------------------------- /man/gh_packages_assigned_to.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/gh.R 3 | \name{gh_packages_assigned_to} 4 | \alias{gh_packages_assigned_to} 5 | \title{Show packages assigned to a github user} 6 | \usage{ 7 | gh_packages_assigned_to(github_username) 8 | } 9 | \arguments{ 10 | \item{github_username}{The github username} 11 | } 12 | \value{ 13 | a data frame of packages assigned to \code{github_username} 14 | } 15 | \description{ 16 | Show packages assigned to a github user 17 | } 18 | \examples{ 19 | gh_packages_assigned_to("dtenenba") 20 | } 21 | 22 | -------------------------------------------------------------------------------- /man/maintainers.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/users.R 3 | \name{maintainers} 4 | \alias{maintainers} 5 | \title{Retrieve the maintainers from a tarball} 6 | \usage{ 7 | maintainers(tarball) 8 | } 9 | \arguments{ 10 | \item{tarball}{the package tarball to read} 11 | } 12 | \value{ 13 | each maintainer as a 'person' object. 14 | } 15 | \description{ 16 | Retrieve the maintainers from a tarball 17 | } 18 | \examples{ 19 | pkg <- system.file(package="BiocContributions", 20 | "testpackages", "RNASeqPower_1.11.0.tar.gz") 21 | maintainers(pkg) 22 | } 23 | 24 | -------------------------------------------------------------------------------- /man/tabulate_activity.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/trackerCode.R 3 | \name{tabulate_activity} 4 | \alias{tabulate_activity} 5 | \title{Tabulate tracker activity} 6 | \usage{ 7 | tabulate_activity(issues = tracker_search(columns = c("id", "activity", 8 | "title", "creator", "status", "assignedto", "actor")), date = Sys.Date() - 9 | 30, users = devteam) 10 | } 11 | \arguments{ 12 | \item{users}{people to tabulate about, default is the devteam.} 13 | 14 | \item{Date}{Date to tabulate from, default is the last thirty days.} 15 | } 16 | \description{ 17 | Tabulate tracker activity 18 | } 19 | 20 | -------------------------------------------------------------------------------- /man/assign_package.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/tracker.R 3 | \name{assign_packages} 4 | \alias{assign_package} 5 | \alias{assign_packages} 6 | \title{Assign a specific package} 7 | \usage{ 8 | assign_packages(pkgs, code = assign_new_packages(...), ...) 9 | 10 | assign_package(issue, assignee, ...) 11 | } 12 | \arguments{ 13 | \item{number}{issue number} 14 | 15 | \item{assignment}{lookup assignee by name} 16 | } 17 | \description{ 18 | Assign a specific package 19 | } 20 | \section{Functions}{ 21 | \itemize{ 22 | \item \code{assign_packages}: assign multiple packages with assignment code 23 | }} 24 | 25 | -------------------------------------------------------------------------------- /man/clean.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/addPackage.R 3 | \name{clean} 4 | \alias{clean} 5 | \title{Clean a Software Package} 6 | \usage{ 7 | clean(tarball, svnDir = proj_path("Rpacks"), copyToSvnDir = TRUE, 8 | svnAccountExists = FALSE) 9 | } 10 | \arguments{ 11 | \item{tarball}{package tarball} 12 | 13 | \item{svnDir}{Directory of the Rpacks checkout} 14 | 15 | \item{copyToSvnDir}{whether to copy the files to the SVN directory} 16 | } 17 | \description{ 18 | This is for cleaning up build tarballs, and then putting them into 19 | svn (and emailing the authors to let them know this - when they 20 | already have an account) 21 | } 22 | 23 | -------------------------------------------------------------------------------- /man/proj_path.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/AAAproj_path.R 3 | \name{proj_path} 4 | \alias{proj_path} 5 | \title{Path to file (in the unix sense) within the project} 6 | \usage{ 7 | proj_path(file = "", check.exists = TRUE) 8 | } 9 | \arguments{ 10 | \item{file}{character(1) file or directory name within the project.} 11 | 12 | \item{check.exists}{logical(1) indicating whether the file must 13 | exist} 14 | } 15 | \value{ 16 | character(1) file path 17 | } 18 | \description{ 19 | The path is composed of 20 | \code{getOption("bioc_contributions_project")} or, if NULL, the 21 | home directory "~", preceeding the \code{file} argument. 22 | } 23 | 24 | -------------------------------------------------------------------------------- /man/add_software_permisions.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/permissions.R 3 | \name{add_software_permisions} 4 | \alias{add_software_permisions} 5 | \title{Helper function to Add Software Permissions} 6 | \usage{ 7 | add_software_permisions(x, message = standard_commit_message(x), 8 | file = "hedgehog:/extra/svndata/gentleman/svn_authz/bioconductor.authz") 9 | } 10 | \arguments{ 11 | \item{x}{Permissions to add, can be a named \code{list} or \code{data.frame}.} 12 | 13 | \item{message}{Commit message to use} 14 | 15 | \item{file}{File containing the permissions to edit} 16 | } 17 | \description{ 18 | Helper function to Add Software Permissions 19 | } 20 | 21 | -------------------------------------------------------------------------------- /man/emailMaintainer.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/email.R 3 | \name{emailMaintainer} 4 | \alias{emailMaintainer} 5 | \title{Email a new user their credentials} 6 | \usage{ 7 | emailMaintainer(tarball, userId = "user.id", password = "password", 8 | senderName = getOption("bioc_contributions_signature", "Bioconductor")) 9 | } 10 | \arguments{ 11 | \item{tarball}{the package tarball to email about} 12 | 13 | \item{userId}{The SVN user ID for the maintainer} 14 | 15 | \item{password}{The SVN password for the maintainer} 16 | 17 | \item{senderName}{The name of the email sender for use in the signature} 18 | } 19 | \description{ 20 | Email a new user their credentials 21 | } 22 | 23 | -------------------------------------------------------------------------------- /man/package_assignment_email.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/tracker.R 3 | \name{package_assignment_email} 4 | \alias{package_assignment_email} 5 | \title{Generate the package assignments email given code to run} 6 | \usage{ 7 | package_assignment_email(pkgs = unassigned_packages(...), 8 | code = assign_new_packages(pkgs, ...), date = Sys.Date(), ...) 9 | } 10 | \arguments{ 11 | \item{code}{code to run, output of \code{\link{assign_new_packages}()}} 12 | 13 | \item{date}{date to title the email} 14 | 15 | \item{additional}{arguments passed to \code{\link{assign_new_packages}()}} 16 | } 17 | \description{ 18 | Generate the package assignments email given code to run 19 | } 20 | 21 | -------------------------------------------------------------------------------- /man/add_data_experiment_permisions.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/permissions.R 3 | \name{add_data_experiment_permisions} 4 | \alias{add_data_experiment_permisions} 5 | \title{Helper function to Add Data Experiment Permissions} 6 | \usage{ 7 | add_data_experiment_permisions(x, message = standard_commit_message(x), 8 | file = "hedgehog:/extra/svndata/gentleman/svn_authz/bioc-data.authz") 9 | } 10 | \arguments{ 11 | \item{x}{Permissions to add, can be a named \code{list} or \code{data.frame}.} 12 | 13 | \item{message}{Commit message to use} 14 | 15 | \item{file}{File containing the permissions to edit} 16 | } 17 | \description{ 18 | Helper function to Add Data Experiment Permissions 19 | } 20 | 21 | -------------------------------------------------------------------------------- /man/add_to_spreadsheet.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/reporting.R 3 | \name{add_to_spreadsheet} 4 | \alias{add_to_spreadsheet} 5 | \title{Add packages to the tracking worksheet} 6 | \usage{ 7 | add_to_spreadsheet(x, ..., ws = 1, ss = tracker_spreadsheet()) 8 | } 9 | \arguments{ 10 | \item{x}{issues to add} 11 | 12 | \item{...}{Additional arguments passed to \code{\link[googlesheets]{gs_add_row}}} 13 | 14 | \item{ws}{The worksheet number or name to append to.} 15 | } 16 | \description{ 17 | This function uses the googlesheets package to add data retrieved from the 18 | tracker. 19 | } 20 | \examples{ 21 | \dontrun{ 22 | pkgs <- unassigned_packages() 23 | add_to_spreadsheet(pkgs) 24 | } 25 | } 26 | 27 | -------------------------------------------------------------------------------- /man/make_github_request.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/gh.R 3 | \name{make_github_request} 4 | \alias{make_github_request} 5 | \title{Make a GitHub API request using an OAuth token} 6 | \usage{ 7 | make_github_request(uri, method = GET, postdata = NULL, 8 | include_message = FALSE) 9 | } 10 | \arguments{ 11 | \item{uri}{github uri such as '/orgs/octokit/repos'} 12 | 13 | \item{method}{httr verb function (GET, POST, etc)} 14 | 15 | \item{postdata}{data to POST (if method == POST)} 16 | 17 | \item{include_message}{Whether to include the message in the result} 18 | } 19 | \value{ 20 | a data frame based on the result from github 21 | } 22 | \description{ 23 | Make a GitHub API request using an OAuth token 24 | } 25 | 26 | -------------------------------------------------------------------------------- /man/accept_package.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/tracker.R 3 | \name{accept_package} 4 | \alias{accept_package} 5 | \title{Accept a package on the tracker} 6 | \usage{ 7 | accept_package(issue = issue, tarball, note = accept_note(tarball), 8 | status = 6, ..., session = NULL) 9 | } 10 | \arguments{ 11 | \item{issue}{an issue object from \code{\link{issue}}} 12 | 13 | \item{note}{The acceptance note to post to the tracker.} 14 | 15 | \item{...}{Additional arguments passed to rvest::set_values} 16 | 17 | \item{session}{the session to use, if \code{NULL} the issue's session is used.} 18 | } 19 | \description{ 20 | Accept a package on the tracker 21 | } 22 | \examples{ 23 | \dontrun{ 24 | accept_package(1318, "transcriptR_0.99.4.tar.gz") 25 | } 26 | } 27 | 28 | -------------------------------------------------------------------------------- /man/post.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/tracker.R 3 | \name{post} 4 | \alias{post} 5 | \title{Post a message to an issue} 6 | \usage{ 7 | post(issue, session = NULL, note = edit(), file = NULL, ...) 8 | } 9 | \arguments{ 10 | \item{issue}{an issue object from \code{\link{issue}}} 11 | 12 | \item{session}{the session to use, if \code{NULL} the issue's session is used.} 13 | 14 | \item{note}{a note to post to the issue, defaults to opening your editor, 15 | but you can also pass a character string.} 16 | 17 | \item{file}{a file to attach to the issue, if \code{TRUE} choose a file using 18 | \code{\link{file.choose}}} 19 | 20 | \item{...}{Additional arguments passed to rvest::set_values} 21 | } 22 | \description{ 23 | Post a message to an issue 24 | } 25 | 26 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | The canonical location for this code is https://hedgehog.fhcrc.org/bioconductor/trunk/madman/Rpacks/BiocContributions 2 | 3 | You can setup git-svn on it by cloning this repository and running 4 | 5 | ```bash 6 | git svn init https://hedgehog.fhcrc.org/bioconductor/trunk/madman/Rpacks/BiocContributions 7 | git svn fetch 8 | git update-ref refs/remotes/git-svn refs/remotes/origin/master 9 | ``` 10 | 11 | Then after committing code locally run the following to commit the changes SVN and push the commits back to GitHub. 12 | 13 | ```bash 14 | # Get any changes from SVN 15 | git svn rebase 16 | 17 | # commit code to svn 18 | git svn dcommit 19 | 20 | # push code to github 21 | git push 22 | ``` 23 | 24 | There is no need to mess with any branches in this setup, just do everything on the master branch. 25 | -------------------------------------------------------------------------------- /man/request_credentials.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/users.R 3 | \name{request_credentials} 4 | \alias{request_credentials} 5 | \title{Request new credentials for users} 6 | \usage{ 7 | request_credentials(x, sender = getOption("bioc_contributions_signature")) 8 | } 9 | \arguments{ 10 | \item{x}{an object with an \code{\link{email}} method defined.} 11 | 12 | \item{sender}{The name to use in the signature} 13 | } 14 | \value{ 15 | A \code{\link[gmailr]{mime}} object. 16 | } 17 | \description{ 18 | Request new credentials for users 19 | } 20 | \examples{ 21 | pkg <- system.file(package="BiocContributions", 22 | "testpackages", "RNASeqPower_1.11.0.tar.gz") 23 | request_credentials(match_user(maintainers(pkg))) 24 | request_credentials("asdf@asd.com") 25 | } 26 | 27 | -------------------------------------------------------------------------------- /github-tracker.R: -------------------------------------------------------------------------------- 1 | ## workflow 2 | 3 | ## library(jsonlite) 4 | ## https://developer.github.com/v3/ 5 | 6 | library(BiocContributions) 7 | 8 | options( 9 | bioc_contributions_github_user="mtmorgan", 10 | bioc_contributions_github_auth=readLines("~/.git0Auth"), 11 | bioc_contributions_manifest_version="3.6", # manifest update 12 | bioc_contributions_release_version="3.5" # svn_*_auth_text; trails devel 13 | ) 14 | repository <- "https://api.github.com/Bioconductor/Contributions" 15 | 16 | types <- github_accept() 17 | github_svn_credentials_request_from_carl(types$Software) 18 | github_svn_credentials_request_from_carl(types$ExperimentData) 19 | 20 | github_svn_credentials_draft_to_user(types$Software) 21 | github_svn_credentials_draft_to_user(types$ExperimentData) 22 | 23 | svn_software_auth_text(types$Software) 24 | svn_data_experiment_auth_text(types$ExperimentData) 25 | 26 | -------------------------------------------------------------------------------- /R/AAAproj_path.R: -------------------------------------------------------------------------------- 1 | #' Path to file (in the unix sense) within the project 2 | #' 3 | #' The path is composed of 4 | #' \code{getOption("bioc_contributions_project")} or, if NULL, the 5 | #' home directory "~", preceeding the \code{file} argument. 6 | #' 7 | #' @param file character(1) file or directory name within the project. 8 | #' @param check.exists logical(1) indicating whether the file must 9 | #' exist 10 | #' 11 | #' @return character(1) file path 12 | #' @export 13 | proj_path <- 14 | function(file="", check.exists=TRUE) 15 | { 16 | stopifnot(is.logical(check.exists), length(check.exists) == 1L, 17 | !is.na(check.exists)) 18 | path <- file.path(getOption("bioc_contributions_project", "~"), file) 19 | if (check.exists && !all(file.exists(path))) 20 | stop("path does not exist (set option 'bioc_contributions_project'?):", 21 | "\n '", path, "'") 22 | path 23 | } 24 | -------------------------------------------------------------------------------- /man/download.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/tracker.R 3 | \name{download} 4 | \alias{download} 5 | \title{Download attachments from an issue} 6 | \usage{ 7 | download(issue, dir = proj_path(), last_only = TRUE, 8 | pattern = "[.]tar[.]gz$", overwrite = FALSE, ..., 9 | session = tracker_login()) 10 | } 11 | \arguments{ 12 | \item{issue}{Issue object, or issue number to download files from} 13 | 14 | \item{dir}{Location to store the files} 15 | 16 | \item{last_only}{If \code{TRUE} only download the last submitted tarball.} 17 | 18 | \item{pattern}{Regular expression for files to download.} 19 | 20 | \item{overwrite}{Will only overwrite existing \code{path} if TRUE.} 21 | 22 | \item{...}{Additional Arguments passed to \code{\link[rvest]{jump_to}}.} 23 | 24 | \item{session}{the HTTP session to use} 25 | } 26 | \description{ 27 | Download attachments from an issue 28 | } 29 | 30 | -------------------------------------------------------------------------------- /man/gh_tracker_search.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/gh.R 3 | \name{gh_tracker_search} 4 | \alias{gh_tracker_search} 5 | \title{Query the issue tracker} 6 | \usage{ 7 | gh_tracker_search(columns = c("id", "activity", "title", "creator", "status"), 8 | sort = desc("activity"), filter = c("status", "assignedto"), 9 | status = c(-1, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10), ..., 10 | session = tracker_login()) 11 | } 12 | \arguments{ 13 | \item{columns}{which columns to return} 14 | 15 | \item{sort}{A column to sort the data by} 16 | 17 | \item{filter}{what columns are used to filter} 18 | 19 | \item{status}{the status codes used to filter} 20 | 21 | \item{...}{Additional query parameters} 22 | 23 | \item{session}{the HTTP session to use} 24 | } 25 | \description{ 26 | Query the issue tracker 27 | } 28 | \examples{ 29 | tracker_search("@search_text" = "normalize450k") 30 | } 31 | 32 | -------------------------------------------------------------------------------- /man/svn_auth_text.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/svn_auth_text.R 3 | \name{svn_auth_text} 4 | \alias{svn_auth_text} 5 | \alias{svn_data_experiment_auth_text} 6 | \alias{svn_software_auth_text} 7 | \title{Update svn authz files} 8 | \usage{ 9 | svn_software_auth_text(filenames, 10 | version = getOption("bioc_contributions_devel_version", "3.4")) 11 | 12 | svn_data_experiment_auth_text(filenames, 13 | version = getOption("bioc_contributions_devel_version", "3.4")) 14 | } 15 | \arguments{ 16 | \item{filenames}{character() vector of (full) paths to package tar balls.} 17 | 18 | \item{version}{character(1) version string as it appears in svn 19 | 'RELEASE' branch, e.g., \code{"3_2"} corresponding to 20 | RELEASE_3_2.} 21 | } 22 | \description{ 23 | These functions generate text to copy / paste into software svn 24 | file bioconductor.authz or data experiment svn file 25 | bioc-data.authz 26 | } 27 | 28 | -------------------------------------------------------------------------------- /DESCRIPTION: -------------------------------------------------------------------------------- 1 | Package: BiocContributions 2 | Type: Package 3 | Title: Convenience functions for adding packages to the svn repository 4 | Version: 0.99.78 5 | Author: Marc Carlson, Sonali Arora, Jim Hester 6 | Maintainer: Bioconductor Package Maintainer 7 | Description: A set of tools for cleaning up, post-processing, adding packages to 8 | the repository, and for emailing people etc. 9 | Depends: R (>= 3.3), devtools, gmailr 10 | Imports: 11 | S4Vectors, 12 | XML, 13 | httr, 14 | whisker, 15 | rvest, 16 | crayon, 17 | digest, 18 | memoise, 19 | withr, 20 | googlesheets, 21 | knitr, 22 | rex, 23 | xml2, 24 | BiocInstaller, 25 | biocViews 26 | Suggests: 27 | RMySQL, httr, mailR, 28 | sendmailR, 29 | testthat, 30 | BiocStyle 31 | VignetteBuilder: knitr 32 | License: Artistic-2.0 33 | biocViews: Infrastructure 34 | LazyLoad: yes 35 | RoxygenNote: 5.0.1 36 | 37 | -------------------------------------------------------------------------------- /man/user_db.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/users.R 3 | \name{user_db} 4 | \alias{user_db} 5 | \title{Retrieve the remote user database} 6 | \usage{ 7 | user_db() 8 | } 9 | \value{ 10 | the result is a data.frame of the data 11 | } 12 | \description{ 13 | This is an internal function which retrieves the user database file 14 | 'user_db.csv', which holds user information for users with SVN 15 | credentials. this assumes 'rsync' is available on your path, which 16 | is true by default for linux and OSX machines, but probably not for 17 | windows. 18 | } 19 | \details{ 20 | The information is cached so calling this function repeatably will 21 | result in the same information being returned. Use 22 | \code{memoise::forget(user_db)} to reset the cache if needed. 23 | } 24 | \examples{ 25 | user_db() 26 | # second call will be nearly instantaneous 27 | user_db() 28 | # clear the cache 29 | memoise::forget(user_db) 30 | user_db() 31 | } 32 | 33 | -------------------------------------------------------------------------------- /man/clean_data_package.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/addPackage.R 3 | \name{clean_data_package} 4 | \alias{clean_data_package} 5 | \title{Clean and copy a Data Experiment package} 6 | \usage{ 7 | clean_data_package(tarball, svn_pkgs = proj_path("experiment/pkgs"), 8 | svn_data_store = proj_path("experiment/data_store"), data_dirs = c("data", 9 | "inst/extdata")) 10 | } 11 | \arguments{ 12 | \item{tarball}{package tarball} 13 | 14 | \item{svn_pkgs}{the location of Data Experiment \sQuote{pkgs} checkout.} 15 | 16 | \item{svn_data_store}{the location of Data Experiment 17 | \sQuote{data_store} checkout.} 18 | } 19 | \value{ 20 | File paths to the copied locations (invisibly). 21 | } 22 | \description{ 23 | Clean and copy a Data Experiment package 24 | } 25 | \examples{ 26 | \dontrun{ 27 | pkg <- system.file(package="BiocContributions", 28 | "testpackages", "RNASeqPower_1.11.0.tar.gz") 29 | clean_data_package(pkg) 30 | } 31 | } 32 | 33 | -------------------------------------------------------------------------------- /man/email.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/users.R 3 | \name{email} 4 | \alias{email} 5 | \alias{email.character} 6 | \alias{email.list} 7 | \alias{email.person} 8 | \alias{email.user_matches} 9 | \title{Extract the email from an object} 10 | \usage{ 11 | email(x, ...) 12 | 13 | \method{email}{person}(x, ...) 14 | 15 | \method{email}{user_matches}(x, ...) 16 | 17 | \method{email}{list}(x, ...) 18 | 19 | \method{email}{character}(x, ...) 20 | } 21 | \arguments{ 22 | \item{x}{the object to extract email from} 23 | 24 | \item{...}{Additional arguments passed to methods.} 25 | } 26 | \description{ 27 | Extract the email from an object 28 | } 29 | \section{Methods (by class)}{ 30 | \itemize{ 31 | \item \code{person}: person object 32 | 33 | \item \code{user_matches}: user match object from \code{\link{match_user}} 34 | 35 | \item \code{list}: list - calls \code{\link{email}} on every item in the list. 36 | 37 | \item \code{character}: - simply returns the character vector unchanged. 38 | }} 39 | 40 | -------------------------------------------------------------------------------- /man/run_commands.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/permissions.R 3 | \name{run_commands} 4 | \alias{rcs_check_in} 5 | \alias{rcs_check_out} 6 | \alias{run_commands} 7 | \title{Run commands on a file, possibly remote.} 8 | \usage{ 9 | 10 | rcs_check_out(file = "hedgehog:/extra/svndata/gentleman/svn_authz/bioconductor.authz", 11 | args = NULL) 12 | 13 | 14 | rcs_check_in(file = "hedgehog:/extra/svndata/gentleman/svn_authz/bioconductor.authz", 15 | message, args = NULL) 16 | } 17 | \arguments{ 18 | \item{args}{Additional arguments passed to the command.} 19 | 20 | \item{message}{commit message for the check in} 21 | 22 | \item{inheritParams}{read_permissions} 23 | } 24 | \description{ 25 | Run commands on a file, possibly remote. 26 | } 27 | \details{ 28 | if the file is a remote location (server:path) the command is run 29 | remotely. 30 | } 31 | \section{Functions}{ 32 | \itemize{ 33 | \item \code{rcs_check_out}: Check out a RCS tracked file 34 | 35 | \item \code{rcs_check_in}: Check in a RCS tracked file 36 | }} 37 | 38 | -------------------------------------------------------------------------------- /inst/extdata/tracker.txt: -------------------------------------------------------------------------------- 1 | Hi {{author}}, 2 | 3 | {{tarball}} has been added to the Bioconductor repository and nightly 4 | build system. 5 | 6 | Next Steps: 7 | 8 | You have or will in the next 48 hours receive an email from me with 9 | your SVN credentials, as well as additional instructions. Please 10 | ensure that you have read / write access to your package. 11 | 12 | Your package will be built during the next nightly build 13 | cycle. {{when}} at around 5pm PST, the build system takes a snapshot 14 | of all the packages inside Bioconductor. The next day after 2pm PST, a 15 | build report located at [1] is created containing the output of R CMD 16 | build and check on all platforms. The build report for your package 17 | will appear at [2]. When reading [1] and [2], please pay attention to 18 | the date displayed next to - "Snapshot Date:" and "This page was 19 | generated on" to verify what date the report is targeting. 20 | 21 | Thanks, 22 | 23 | {{senderName}} 24 | 25 | [1] http://bioconductor.org/checkResults/3.4/{{type}}/ 26 | 27 | [2] http://bioconductor.org/checkResults/3.4/{{type}}/{{package}} 28 | -------------------------------------------------------------------------------- /man/edit_software_permissions.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/permissions.R 3 | \name{edit_software_permissions} 4 | \alias{edit_software_permissions} 5 | \alias{edit_software_permissions.data.frame} 6 | \alias{edit_software_permissions.list} 7 | \title{Edit the software permissions} 8 | \usage{ 9 | edit_software_permissions(x, ...) 10 | 11 | \method{edit_software_permissions}{data.frame}(x, data = read_permissions(), 12 | version = 3.2, ...) 13 | 14 | \method{edit_software_permissions}{list}(x, data = read_permissions(), 15 | version = "3.2") 16 | } 17 | \arguments{ 18 | \item{x}{The edits to perform} 19 | 20 | \item{...}{Additional arguments passed to methods} 21 | 22 | \item{data}{a authz data file} 23 | 24 | \item{version}{The release version number} 25 | } 26 | \description{ 27 | Edit the software permissions 28 | } 29 | \section{Methods (by class)}{ 30 | \itemize{ 31 | \item \code{data.frame}: data.frame input, expects columns \sQuote{package} and \sQuote{user} 32 | 33 | \item \code{list}: list input, expects a named list of packages and users 34 | }} 35 | 36 | -------------------------------------------------------------------------------- /R/reporting.R: -------------------------------------------------------------------------------- 1 | tracker_spreadsheet <- function(x) { 2 | if (is.null(the$ss)) { 3 | the$ss <- googlesheets::gs_title("Bioconductor Pkg Review") 4 | } 5 | the$ss 6 | } 7 | 8 | #' Add packages to the tracking worksheet 9 | #' 10 | #' This function uses the googlesheets package to add data retrieved from the 11 | #' tracker. 12 | #' @param x issues to add 13 | #' @param ws The worksheet number or name to append to. 14 | #' @param ... Additional arguments passed to \code{\link[googlesheets]{gs_add_row}} 15 | #' @examples 16 | #' \dontrun{ 17 | #' pkgs <- unassigned_packages() 18 | #' add_to_spreadsheet(pkgs) 19 | #' } 20 | add_to_spreadsheet <- function(x, ..., ws = 1, ss = tracker_spreadsheet()) { 21 | data <- data.frame("Package to be added" = x$title, 22 | "Issue" = paste0("https://tracker.bioconductor.org/issue", x$id), 23 | "Github" = "", 24 | "accepted on tracker" = x$activity, 25 | "type of package" = vapply(x$keyword, paste, collapse = ", ", character(1)), 26 | check.names = FALSE, 27 | stringsAsFactors = FALSE) 28 | apply(data, 1, googlesheets::gs_add_row, ss = tracker_spreadsheet(), ws = ws, ...) 29 | } 30 | -------------------------------------------------------------------------------- /tests/testthat/test-clean_data_package.R: -------------------------------------------------------------------------------- 1 | context("clean_data_package") 2 | test_that("it splits the package properly", { 3 | tmp_pkgs <- tempfile() 4 | dir.create(tmp_pkgs) 5 | 6 | tmp_data_store <- tempfile() 7 | dir.create(tmp_data_store) 8 | 9 | on.exit(unlink(c(tmp_pkgs, tmp_data_store), recursive = TRUE)) 10 | 11 | pkg <- system.file(package="BiocContributions", 12 | "testpackages", "hgu95av2.db_2.10.1.tar.gz") 13 | 14 | clean_data_package(pkg, 15 | svn_pkgs = tmp_pkgs, 16 | svn_data_store = tmp_data_store) 17 | 18 | expect_equal(list.files(tmp_pkgs), "hgu95av2.db") 19 | 20 | expect_equal(list.files(file.path(tmp_pkgs, "hgu95av2.db")), 21 | c("DESCRIPTION", "external_data_store.txt", "man", "NAMESPACE", "R", "tests")) 22 | 23 | expect_equal(readLines(file.path(tmp_pkgs, "hgu95av2.db", "external_data_store.txt")), 24 | "inst/extdata") 25 | 26 | expect_equal(list.files(tmp_data_store), "hgu95av2.db") 27 | 28 | expect_equal(list.files(file.path(tmp_data_store, "hgu95av2.db")), 29 | c("inst")) 30 | 31 | expect_equal(list.files(file.path(tmp_data_store, "hgu95av2.db", "inst")), 32 | c("extdata")) 33 | }) 34 | -------------------------------------------------------------------------------- /man/edit_data_experiment_permissions.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/permissions.R 3 | \name{edit_data_experiment_permissions} 4 | \alias{edit_data_experiment_permissions} 5 | \alias{edit_data_experiment_permissions.data.frame} 6 | \alias{edit_data_experiment_permissions.list} 7 | \title{Edit the data experiment permissions} 8 | \usage{ 9 | edit_data_experiment_permissions(x, ...) 10 | 11 | \method{edit_data_experiment_permissions}{data.frame}(x, 12 | data = read_permissions("hedgehog:/extra/svndata/gentleman/svn_authz/bioc-data.authz"), 13 | version = 3.2, ...) 14 | 15 | \method{edit_data_experiment_permissions}{list}(x, 16 | data = read_permissions("hedgehog:/extra/svndata/gentleman/svn_authz/bioc-data.authz"), 17 | version = "3.2") 18 | } 19 | \arguments{ 20 | \item{x}{The edits to perform} 21 | 22 | \item{...}{Additional arguments passed to methods} 23 | 24 | \item{data}{a authz data file} 25 | 26 | \item{version}{The release version number} 27 | } 28 | \description{ 29 | Edit the data experiment permissions 30 | } 31 | \section{Methods (by class)}{ 32 | \itemize{ 33 | \item \code{data.frame}: data.frame input, expects columns \sQuote{package} and \sQuote{user} 34 | 35 | \item \code{list}: list input, expects a named list of packages and users 36 | }} 37 | 38 | -------------------------------------------------------------------------------- /R/gitolite_pubkey_update.R: -------------------------------------------------------------------------------- 1 | gitolite_pubkey_update <- 2 | function(gitid, svnid=gitid, 3 | keydir = "/home/mtmorgan/a/gitolite-admin/keydir") 4 | { 5 | stopifnot( 6 | is.character(gitid), length(gitid) == 1L, !is.na(gitid), 7 | is.character(svnid), length(svnid) == 1L, !is.na(svnid), 8 | is.character(keydir), length(keydir) == 1L, dir.exists(keydir) 9 | ) 10 | 11 | ## get keys from github 12 | response <- httr::GET(sprintf("https://github.com/%s.keys", gitid)) 13 | stop_for_status(response) 14 | keys <- unique(trimws(strsplit(as.character(response), "\n")[[1]])) 15 | 16 | ## find existing keys 17 | pubfile <- sprintf("%s.pub", svnid) 18 | pub <- dir(keydir, pattern=pubfile, recursive=TRUE, full=TRUE) 19 | pub_keydir <- basename(dirname(pub)) 20 | existing <- trimws(vapply(pub, readLines, character(1))) 21 | keys <- keys[!keys %in% existing] 22 | 23 | possible_keydir <- sprintf("key-%d", 1:100) 24 | need_keydir <- file.path( 25 | keydir, 26 | head(setdiff(possible_keydir, pub_keydir), length(keys)) 27 | ) 28 | 29 | ## write new keys 30 | for (key in seq_along(keys)) { 31 | if (!dir.exists(need_keydir[[key]])) 32 | dir.create(need_keydir[[key]]) 33 | writeLines(keys[[key]], file.path(need_keydir[[key]], pubfile)) 34 | } 35 | 36 | length(keys) 37 | } 38 | -------------------------------------------------------------------------------- /man/workflow_standard.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/workflow_standard.R 3 | \name{workflow_standard} 4 | \alias{CreatePackageAssignmentEmail} 5 | \alias{DownloadNewPackageTarballs} 6 | \alias{DraftWeeklySummaryEmail} 7 | \alias{ManageNewPackagesCredentials} 8 | \alias{workflow_standard} 9 | \title{Workflow steps} 10 | \usage{ 11 | CreatePackageAssignmentEmail(assignInTracker = FALSE, 12 | secret = proj_path("bioconductorseattle-gmail.json")) 13 | 14 | DownloadNewPackageTarballs(pre = pre_accepted_packages()) 15 | 16 | ManageNewPackagesCredentials(metadata, createDraft = TRUE) 17 | 18 | DraftWeeklySummaryEmail() 19 | } 20 | \arguments{ 21 | \item{assignInTracker}{logical(1) indicating whether package 22 | assignments should be updated in the tracker.} 23 | 24 | \item{secret}{character(1) path to JSON API client access secret.} 25 | 26 | \item{pre}{data.frame() returned by \code{pre_accepted_packages()}, 27 | retrieved from the tracker and indicating packages tagged as 28 | 'pre-accepted'} 29 | 30 | \item{metadata}{Return value from 31 | \code{DownloadNewPackageTarballs}. If missing, search 32 | \code{proj_path()} for most recent saved version as RData with 33 | format "new-packages-metadata_20160211.RData"} 34 | 35 | \item{createDraft}{logical(1) draft email to FHCRC for SVN new user 36 | credentials} 37 | } 38 | \description{ 39 | Workflow steps 40 | } 41 | 42 | -------------------------------------------------------------------------------- /man/tracker_search.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/tracker.R 3 | \name{tracker_search} 4 | \alias{my_issues} 5 | \alias{pre_accepted_packages} 6 | \alias{tracker_search} 7 | \alias{unassigned_packages} 8 | \title{Query the issue tracker} 9 | \usage{ 10 | tracker_search(columns = c("id", "activity", "title", "creator", "status"), 11 | sort = desc("activity"), filter = c("status", "assignedto"), 12 | status = c(-1, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10), ..., 13 | session = tracker_login()) 14 | 15 | unassigned_packages(status = c(-1, 1, 2, 3, 4, 5, 9), ..., 16 | session = tracker_login()) 17 | 18 | pre_accepted_packages(status = 9, ..., session = tracker_login()) 19 | 20 | my_issues(user = NULL, status = c(-1, 1, 2, 3, 4, 5, 9, 10), ..., 21 | session = tracker_login()) 22 | } 23 | \arguments{ 24 | \item{columns}{which columns to return} 25 | 26 | \item{sort}{A column to sort the data by} 27 | 28 | \item{filter}{what columns are used to filter} 29 | 30 | \item{status}{the status codes used to filter} 31 | 32 | \item{...}{Additional query parameters} 33 | 34 | \item{session}{the HTTP session to use} 35 | } 36 | \description{ 37 | Query the issue tracker 38 | } 39 | \section{Functions}{ 40 | \itemize{ 41 | \item \code{unassigned_packages}: retrieve unassigned packages 42 | 43 | \item \code{pre_accepted_packages}: retrieve pre-accepted packages 44 | 45 | \item \code{my_issues}: retrieve the logged in users packages 46 | }} 47 | \examples{ 48 | tracker_search("@search_text" = "normalize450k") 49 | } 50 | 51 | -------------------------------------------------------------------------------- /R/bioc_views.R: -------------------------------------------------------------------------------- 1 | bioc_views_from_files <- function(files) { 2 | names(files) <- basename(files) 3 | descriptions <- lapply(files, readDESCRIPTION) 4 | views <- vapply(descriptions, `[[`, character(1), "biocViews") 5 | strsplit(trimws(views), "[[:blank:],\n]+") 6 | } 7 | 8 | #' Extract and validiate biocViews terms from tarball 9 | #' 10 | #' @param files tar.gz file locations 11 | #' @return named vector classifying each tar ball to biocViews hierarchy 12 | bioc_views_classification <- function(files) { 13 | names(files) <- basename(files) 14 | views <- bioc_views_from_files (files) 15 | curr <- biocViews::getCurrentbiocViews() 16 | curr <- setNames(unlist(curr, use.names=FALSE), 17 | rep(names(curr), lengths(curr))) 18 | 19 | idx <- match(unlist(views), curr) 20 | if (anyNA(idx)) { 21 | msg <- paste0(unlist(views)[is.na(idx)], " (", 22 | rep(names(views), lengths(views))[is.na(idx)], 23 | ")", collapse="\n ") 24 | stop("invalid biocViews:\n ", msg) 25 | } 26 | 27 | class <- relist(names(curr)[idx], views) 28 | class1 <- lapply(class, unique) 29 | 30 | if (!all(lengths(class1) == 1L)) { 31 | ok <- lengths(class1) == 1L 32 | msg <- paste(Map(function(nm, v, c) { 33 | sprintf("%s: %s", nm, paste0(v, " (", c, ")", collapse=", ")) 34 | }, names(views)[!ok], views[!ok], class[!ok]), collapse="\n ") 35 | stop("conflicting biocViews subgraphs:\n ", msg) 36 | } 37 | 38 | split(unname(files[names(class1)]), unlist(unname(class1))) 39 | } 40 | -------------------------------------------------------------------------------- /tests/testthat/bioconductor.authz.orig: -------------------------------------------------------------------------------- 1 | [groups] 2 | # Global BioC Repos Groups 3 | bioconductor-readers = a.rauschenberger, n.lawlor, p.freire-pritchett, j.fan, jd.zhang, j.sun2, j.heiss, p.westermark, a.karapetyan, m.zingaretti, v.kim 4 | 5 | bioconductor-write0 = j.hester, b.long, j.java 6 | 7 | # Misc BioC Groups 8 | Brazil-Oct-2013 = j.gagneur, e.turro, bcarvalh@jhsph.edu, m.love 9 | 10 | # a group for anonymous use, readonly is a shared username 11 | # the password is 'readonly' 12 | anon = readonly 13 | 14 | # BioC Package Groups 15 | DEDS = yxiao@itsa.ucsf.edu 16 | 17 | bgafun = i.wallace 18 | idiogram = Karl.Dykema@vai.org 19 | sagenhaft = beissbar 20 | FamAgg = j.rainer 21 | iCOBRA = c.soneson 22 | Chicago = m.spivakov, j.cairns, p.freire-pritchett 23 | globalSeq = a.rauschenberger 24 | multiClust = n.lawlor 25 | scde = j.fan 26 | biomformat = p.mcmurdie 27 | BioQC = jd.zhang 28 | dcGSA = j.sun, j.sun2 29 | normalize450K = j.heiss 30 | profileScoreDist = p.westermark 31 | transcriptR = a.karapetyan 32 | kimod = m.zingaretti 33 | lpsymphony = v.kim 34 | splineTCDiffExpr = h.braselmann 35 | 36 | # No one gets any access by default. 37 | [/] 38 | * = 39 | @bioconductor-readers = r 40 | @bioconductor-write0 = rw 41 | 42 | [/trunk/bioconductor.org] 43 | @anon = r 44 | 45 | [/trunk/madman/Rpacks/normalize450K] 46 | @normalize450K = rw 47 | 48 | [/branches/RELEASE_3_2/madman/Rpacks/normalize450K] 49 | @normalize450K = rw 50 | 51 | [/trunk/madman/Rpacks/profileScoreDist] 52 | @profileScoreDist = rw 53 | 54 | [/branches/RELEASE_3_2/madman/Rpacks/profileScoreDist] 55 | @profileScoreDist = rw 56 | 57 | [/trunk/madman/Rpacks/transcriptR] 58 | @transcriptR = rw 59 | 60 | [/branches/RELEASE_3_2/madman/Rpacks/transcriptR] 61 | @transcriptR = rw 62 | 63 | [/trunk/madman/Rpacks/kimod] 64 | @kimod = rw 65 | 66 | [/branches/RELEASE_3_2/madman/Rpacks/kimod] 67 | @kimod = rw 68 | 69 | [/trunk/madman/Rpacks/lpsymphony] 70 | @lpsymphony = rw 71 | 72 | [/branches/RELEASE_3_2/madman/Rpacks/lpsymphony] 73 | @lpsymphony = rw 74 | 75 | [/trunk/madman/Rpacks/splineTCDiffExpr] 76 | @splineTCDiffExpr = rw 77 | 78 | [/branches/RELEASE_3_2/madman/Rpacks/splineTCDiffExpr] 79 | @splineTCDiffExpr = rw 80 | 81 | -------------------------------------------------------------------------------- /R/installDependencies.R: -------------------------------------------------------------------------------- 1 | ## Code to install dependencies even if it's a new tarball that is not 2 | ## yet in the project manifest/biocLite. 3 | 4 | 5 | ## Helper based on code that Dan originally needed for BiocCheck 6 | .depToCharacter <- function(input){ 7 | if (is.null(input)) return(NULL) 8 | output <- gsub("\\s", "", input) 9 | output <- gsub("\\([^)]*\\)", "", output) 10 | if(dim(output)[2] ==0){ 11 | return(NULL) 12 | }else{ 13 | res <- strsplit(output, ",")[[1]] 14 | res[which(res != "R")] 15 | } 16 | } 17 | 18 | 19 | ## Helper to extract all dependencies and the return them as a character vector 20 | .extractDependencies <- function(dir){ 21 | dirPath <- file.path(dir, "DESCRIPTION") 22 | DESC <- read.dcf(dirPath) 23 | deps <- .depToCharacter(DESC[,grepl("Depends",colnames(DESC)),drop=FALSE]) 24 | sugs <- .depToCharacter(DESC[,grepl("Suggests",colnames(DESC)),drop=FALSE]) 25 | imps <- .depToCharacter(DESC[,grepl("Imports",colnames(DESC)),drop=FALSE]) 26 | enhs <- .depToCharacter(DESC[,grepl("Enhances",colnames(DESC)),drop=FALSE]) 27 | lnkt <- .depToCharacter(DESC[,grepl("LinkingTo",colnames(DESC)),drop=FALSE]) 28 | res <- c(deps, sugs, imps, enhs, lnkt) 29 | ip <- rownames(installed.packages()) 30 | res <- res[!res %in% ip] 31 | if(length(res) == 0){ 32 | stop("there are no dependencies to install.") 33 | } 34 | res 35 | } 36 | 37 | installDeps <- function(tarball){ 38 | if(!file.info(tarball)$isdir) 39 | untar(tarball) 40 | dir <- .getShortPkgName(tarball) 41 | dep <- .extractDependencies(dir) 42 | require(BiocInstaller) 43 | biocLite(dep) 44 | } 45 | 46 | 47 | ## library(BiocContributions); tarball <- system.file("testpackages", "AnnotationHub_1.3.18.tar.gz", package="BiocContributions"); 48 | ## installDeps(tarball) 49 | 50 | ## library(BiocContributions); installDeps('genomationData_0.99.tar.gz') 51 | 52 | 53 | 54 | ## Just a helper to unpack a directory of tarballs for indiv changes 55 | ## (version bumps etc.) 56 | untarAllTarballs <- function(tarballsPath=".", suffix=".tar.gz$"){ 57 | tars <- .getTars(path=tarballsPath, suffix=suffix) 58 | lapply(tars, function(x){ system(paste0('tar zxvf ',x))}) 59 | } 60 | -------------------------------------------------------------------------------- /tests/testthat/bioc-data.authz.orig: -------------------------------------------------------------------------------- 1 | [groups] 2 | bioc-data-readers = a.kauffmann, y.taguchi, j.barry, r.carlos, p.freire-pritchett, j.cairns, m.spivakov, h.braselmann, g.bhatti, m.smith 3 | 4 | 5 | anon = readonly 6 | 7 | bioc-data-writers = s.arora, n.hayden, j.hester 8 | 9 | 10 | # BioC Data Package Groups 11 | lumiBarnes = dupan@northwestern.edu 12 | CCl4 = a.kauffmann 13 | RTCGA.mutations = m.kosinski 14 | PGPC = f.klein 15 | MEALData = r.carlos 16 | PCHiCdata = p.freire-pritchett, j.cairns, m.spivakov 17 | furrowSeg = j.barry 18 | FIs = h.braselmann 19 | 20 | # No one gets any access by default. 21 | [/] 22 | * = 23 | @bioc-data-readers = r 24 | @bioc-data-writers = rw 25 | 26 | # Everyone should get read-only access (?) 27 | [/trunk/experiment] 28 | @anon = r 29 | 30 | [/branches/RELEASE_3_2/experiment] 31 | @anon = r 32 | 33 | [/branches/RELEASE_3_2/experiment] 34 | @anon = r 35 | 36 | [/trunk/experiment/pkgs/lumiBarnes] 37 | @lumiBarnes = rw 38 | 39 | [/trunk/experiment/data_store/lumiBarnes] 40 | @lumiBarnes = rw 41 | 42 | [/trunk/experiment/pkgs/PGPC] 43 | @PGPC = rw 44 | 45 | [/trunk/experiment/data_store/PGPC] 46 | @PGPC = rw 47 | 48 | [/branches/RELEASE_3_2/experiment/pkgs/PGPC] 49 | @PGPC = rw 50 | 51 | [/branches/RELEASE_3_2/experiment/data_store/PGPC] 52 | @PGPC = rw 53 | 54 | [/trunk/experiment/pkgs/MEALData] 55 | @MEALData = rw 56 | 57 | [/trunk/experiment/data_store/MEALData] 58 | @MEALData = rw 59 | 60 | [/branches/RELEASE_3_2/experiment/pkgs/MEALData] 61 | @MEALData = rw 62 | 63 | [/branches/RELEASE_3_2/experiment/data_store/MEALData] 64 | @MEALData = rw 65 | 66 | [/trunk/experiment/pkgs/PCHiCdata] 67 | @PCHiCdata = rw 68 | 69 | [/trunk/experiment/data_store/PCHiCdata] 70 | @PCHiCdata = rw 71 | 72 | [/branches/RELEASE_3_2/experiment/pkgs/PCHiCdata] 73 | @PCHiCdata = rw 74 | 75 | [/branches/RELEASE_3_2/experiment/data_store/PCHiCdata] 76 | @PCHiCdata = rw 77 | [/trunk/experiment/pkgs/furrowSeg] 78 | @furrowSeg = rw 79 | 80 | [/trunk/experiment/data_store/furrowSeg] 81 | @furrowSeg = rw 82 | 83 | [/branches/RELEASE_3_2/experiment/pkgs/furrowSeg] 84 | @furrowSeg = rw 85 | 86 | [/branches/RELEASE_3_2/experiment/data_store/furrowSeg] 87 | @furrowSeg = rw 88 | 89 | [/trunk/experiment/pkgs/FIs] 90 | @FIs = rw 91 | 92 | [/trunk/experiment/data_store/FIs] 93 | @FIs = rw 94 | 95 | [/branches/RELEASE_3_2/experiment/pkgs/FIs] 96 | @FIs = rw 97 | 98 | [/branches/RELEASE_3_2/experiment/data_store/FIs] 99 | @FIs = rw 100 | 101 | -------------------------------------------------------------------------------- /R/utils.R: -------------------------------------------------------------------------------- 1 | # simple implementation of plyr::ddply 2 | # subset a data.frame by grouping variables and apply a function to each group 3 | ddply <- function(x, by, fun, ...) { 4 | do.call(rbind, lapply(split(x, x[[by]]), fun, ...)) 5 | } 6 | 7 | 8 | rows <- function(x, ...) UseMethod("rows") 9 | rows.data.frame <- function(x, ...) { 10 | by(x, seq_len(NROW(x)), ...) 11 | } 12 | 13 | # use findInterval to merge x and y by the closest type 14 | # @param by the column to merge by 15 | # @param decreasing to sort results increasing or decreasing 16 | merge_closest <- function(x, y, fun, ...) { 17 | 18 | # generate idx columns for both datasets 19 | x$idx <- seq_len(NROW(x)) 20 | single_bracket <- function(x, i, j, ..., drop = TRUE) { 21 | x[i, j, ..., drop = drop] 22 | } 23 | row_apply <- function(x, fun, ...) { 24 | lapply( 25 | lapply(seq_len(NROW(x)), single_bracket, x = x), 26 | fun, ...) 27 | } 28 | y$idx <- vapply(row_apply(y, fun), function(x) { x <- which.min(x); if (length(x)) x else NA }, integer(1)) 29 | 30 | res <- merge(x, y, by = "idx", all.x = TRUE) 31 | 32 | # remove the idx column from result 33 | res <- res[names(res) != "idx"] 34 | 35 | res 36 | } 37 | 38 | deduplicate <- function(x) { 39 | # this is quadratic in time complexity, but shouldn't matter in practice 40 | while(anyDuplicated(x)) { 41 | is_dup <- duplicated(x) 42 | dups <- x[is_dup] 43 | 44 | m <- regexpr("\\.[[:digit:]]+$", dups) 45 | regmatches(dups, m) <- Map(function(x) paste0(".", as.numeric(substr(x, 2, nchar(x))) + 1), 46 | regmatches(dups, m)) 47 | dups[m == -1] <- paste0(dups, ".2") 48 | x[is_dup] <- dups 49 | } 50 | x 51 | } 52 | 53 | desc <- function(x) { 54 | if (is.numeric(x)) { 55 | -x 56 | } else { 57 | paste0("-", x) 58 | } 59 | } 60 | 61 | roundup_datetime <- function(x, ...) { 62 | as.POSIXct(format = "%Y-%m-%d.%H:%M:%S", tz = "US/Pacific", x, ...) 63 | } 64 | 65 | fmt <- whisker::whisker.render 66 | 67 | assert <- function(x, msg) { 68 | if (!isTRUE(x)) { 69 | stop(msg, call. = FALSE) 70 | } 71 | } 72 | 73 | compact <- function(x) { 74 | is_empty <- vapply(x, function(x) length(x) == 0, logical(1)) 75 | x[!is_empty] 76 | } 77 | 78 | is_named <- function(x) { 79 | !is.null(names(x)) && all(nzchar(names(x))) 80 | } 81 | 82 | cards <- function(x, asMatrix=FALSE, skip=1, ...) 83 | { 84 | zz <- textConnection(x) 85 | m <- read.table(zz, skip=skip, ...) 86 | if (asMatrix) { 87 | m <- as.matrix(m) 88 | dimnames(m) <- NULL 89 | } 90 | close(zz) 91 | 92 | return (m) 93 | } 94 | -------------------------------------------------------------------------------- /R/svn_auth_text.R: -------------------------------------------------------------------------------- 1 | ##' Update svn authz files 2 | ##' 3 | ##' These functions generate text to copy / paste into software svn 4 | ##' file bioconductor.authz or data experiment svn file 5 | ##' bioc-data.authz 6 | ##' 7 | ##' @param filenames character() vector of (full) paths to package tar balls. 8 | ##' 9 | ##' @param version character(1) version string as it appears in svn 10 | ##' 'RELEASE' branch, e.g., \code{"3_2"} corresponding to 11 | ##' RELEASE_3_2. 12 | ##' 13 | ##' @name svn_auth_text 14 | NULL 15 | 16 | ##' @rdname svn_auth_text 17 | ##' @export 18 | svn_software_auth_text <- 19 | function(filenames, 20 | version=getOption("bioc_contributions_release_version")) 21 | { 22 | version <- sprintf("%d_%d", 23 | package_version(version)$major, 24 | package_version(version)$minor) 25 | m <- lapply(filenames, maintainers) 26 | userid <- tolower(vapply(m, function(elt) { 27 | elt <- elt[[1]] # first maintainer only 28 | sprintf("%s.%s", substr(elt$given[1], 1, 1), elt$family) 29 | }, "")) 30 | package <- sub("_0.99.*", "", basename(filenames)) 31 | 32 | ## read permission 33 | cat(paste(c("", userid), collapse=", "), "\n\n") 34 | 35 | ## groups 36 | cat(paste(package, userid, sep=" = ", collapse="\n"), "\n\n") 37 | 38 | ## permission 39 | for (p in package) 40 | cat(paste0( 41 | "[/trunk/madman/Rpacks/", p, "]\n", 42 | "@", p, " = rw\n", 43 | "\n", 44 | "[/branches/RELEASE_", version, "/madman/Rpacks/", p, "]\n", 45 | "@", p, " = rw\n"), 46 | "\n") 47 | } 48 | 49 | 50 | ##' @rdname svn_auth_text 51 | ##' @export 52 | svn_data_experiment_auth_text <- 53 | function(filenames, 54 | version=getOption("bioc_contributions_release_version")) 55 | { 56 | version <- sprintf("%d_%d", 57 | package_version(version)$major, 58 | package_version(version)$minor) 59 | m <- lapply(filenames, maintainers) 60 | userid <- tolower(vapply(m, function(elt) { 61 | elt <- elt[[1]] # first maintainer only 62 | sprintf("%s.%s", substr(elt$given[1], 1, 1), elt$family) 63 | }, "")) 64 | package <- sub("_0.99.*", "", basename(filenames)) 65 | 66 | ## read permission 67 | cat(paste(c("", userid), collapse=", "), "\n\n") 68 | 69 | ## groups 70 | cat(paste(package, userid, sep=" = ", collapse="\n"), "\n\n") 71 | 72 | ## permission 73 | for (p in package) 74 | cat(paste0( 75 | "[/trunk/experiment/pkgs/", p, "]\n", 76 | "@", p, " = rw\n", 77 | "\n", 78 | "[/trunk/experiment/data_store/", p, "]\n", 79 | "@", p, " = rw\n", 80 | "\n", 81 | "[/branches/RELEASE_", version, "/experiment/pkgs/", p, "]\n", 82 | "@", p, " = rw\n", 83 | "\n", 84 | "[/branches/RELEASE_", version, "/experiment/data_store/", p, "]\n", 85 | "@", p, " = rw\n"), 86 | "\n") 87 | } 88 | -------------------------------------------------------------------------------- /R/convertToBin.R: -------------------------------------------------------------------------------- 1 | ## Here I aim to formalize my code that currently processes any 2 | ## package that does not contain source code into mac and windows 3 | ## packages. 4 | 5 | ## Basically the R version of these three shell loops: 6 | ## for pkg in `ls ./fixed/*.tar.gz` 7 | ## do 8 | ## echo $pkg 9 | ## /home/mcarlson/RInstalls/R-302/bin/R CMD INSTALL --build $pkg 10 | ## # R CMD INSTALL --build $pkg 11 | ## done 12 | 13 | ## for pkg in `ls ./*.tar.gz` 14 | ## do 15 | ## echo $pkg 16 | ## pkgName=${pkg%_R_x86_64-unknown-linux-gnu.tar.gz} 17 | ## cp $pkg ${pkgName}.tgz 18 | ## tar zxf $pkg 19 | ## ##change Built: line in DESCRIPTION to say windows instead of unix 20 | ## sed -i -e "s/unix/windows/g" */DESCRIPTION 21 | ## done 22 | 23 | ## for pkg in `ls ./*.tar.gz` ## lists dirs 24 | ## do 25 | ## pkgName=${pkg%_R_x86_64-unknown-linux-gnu.tar.gz} ## ./savR_0.99.1 26 | ## pkgReal=`echo $pkg | cut -d _ -f 1` ## ./savR 27 | ## # echo $pkgName 28 | ## # echo $pkgReal 29 | ## rm -f ${pkgReal}/MD5 ## new line 30 | ## zip -r ${pkgName}.zip $pkgReal 31 | ## rm -rf $pkgReal 32 | ## rm -rf $pkg 33 | ## done 34 | 35 | 36 | 37 | .getLongPkgName <- function(tarball){ 38 | sep <- .Platform$file.sep 39 | notTar <- paste("^",sep,".*",sep, sep="") 40 | tar <- sub(notTar,"",tarball, perl=TRUE) 41 | sub(".tar.gz","", tar, perl=TRUE) 42 | } 43 | 44 | .windowIzeDESCRIPTION <- function(dir){ 45 | dirPath <- file.path(dir, "DESCRIPTION") 46 | DESC <- read.dcf(dirPath) 47 | DESC[,'Built'] <- sub('unix', 'windows', DESC[,'Built']) 48 | write.dcf(DESC, file=dirPath) 49 | } 50 | 51 | 52 | ## So 1st thing is that this function is implicitly a unix only command. 53 | makeBins <- function(tarball){ 54 | if(.Platform$OS.type != "unix"){ 55 | stop("Sorry this function is only available from Unix")} 56 | 57 | ############################################### 58 | ## use system to call R CMD INSTALL -- build 59 | cmd <- paste("R CMD INSTALL --build", tarball) 60 | system(cmd) 61 | 62 | ############################################### 63 | ## make the Mac binary: 64 | ## now get the unmangled package name 65 | pkg <- .getLongPkgName(tarball) 66 | builtPkg <- paste(pkg, "_R_x86_64-unknown-linux-gnu.tar.gz", sep="") 67 | macPkg <- paste(pkg,".tgz",sep="") 68 | file.copy(builtPkg, to=macPkg) 69 | 70 | ############################################### 71 | ## make the Windows Binary 72 | ## untar the built package ### tar zxf $pkg 73 | untar(builtPkg) 74 | 75 | ## also need the 'true' packagename (aka the dir name) 76 | pkgDir <- .getShortPkgName(tarball) 77 | 78 | ## and change important line in DESCRIPTION to be windows. 79 | .windowIzeDESCRIPTION(dir=pkgDir) 80 | 81 | ## remove any MD5 files 82 | MD5File <- file.path(pkgDir,"MD5") 83 | if(file.exists(MD5File)){ 84 | unlink(MD5File) 85 | } 86 | ## zip up the tarball 87 | zip(paste(pkg,".zip",sep=""),files=pkgDir) 88 | 89 | ## remove the unpacked pkgDir along with the builtPkg 90 | if(file.exists(pkgDir)){ 91 | unlink(pkgDir, recursive=TRUE) 92 | } 93 | if(file.exists(builtPkg)){ 94 | unlink(builtPkg) 95 | } 96 | } 97 | 98 | 99 | ## library(BiocContributions); tarball <- system.file("testpackages", "hgu95av2.db_2.10.1.tar.gz", package="BiocContributions"); 100 | 101 | ## makeBins(tarball) 102 | 103 | -------------------------------------------------------------------------------- /R/tallyManifests.R: -------------------------------------------------------------------------------- 1 | ## Used to count and plot the amount of packages in the respos. 2 | 3 | ## helper that generates names for manifests. You have to update this 4 | ## each release in order to get the current totals! 5 | .makeManifestNames <- function(path, appendPath=TRUE){ 6 | files <- dir(path)[grepl(glob2rx("bioc_*.manifest"),dir(path))] 7 | if(appendPath==TRUE){ 8 | paste0(path,files) 9 | }else{ 10 | files 11 | } 12 | } 13 | 14 | .makeExpManifestNames <- function(path, appendPath=TRUE){ 15 | files <- dir(path)[grepl(glob2rx("bioc-data-experiment.*.manifest"), 16 | dir(path))] 17 | if(appendPath==TRUE){ 18 | paste0(path,files) 19 | }else{ 20 | files 21 | } 22 | } 23 | 24 | .getManifestFilenameFromVersion <- function(path, version) 25 | { 26 | file.path(path, paste0("bioc_", version, ".manifest")) 27 | } 28 | 29 | 30 | ## Helper to just read in one manifest and get the total number of packages. 31 | .scanMani <- function(file){ 32 | res <- scan(file, what="character",skip=1, quiet=TRUE) 33 | table(grepl("Package", res))[["TRUE"]] 34 | } 35 | 36 | .getPkgs <- function(file) 37 | { 38 | lines <- readLines(file) 39 | lines <- lines[grepl("^Package:", lines)] 40 | lines <- gsub("^Package:", "", lines) 41 | lines <- trimws(lines) 42 | } 43 | 44 | 45 | ## This extracts the package totals based on existing manifests 46 | getPackageTotals <- 47 | function(path = proj_path("Rpacks")) 48 | { 49 | manis <- .makeManifestNames(path) 50 | maniNames <- .makeManifestNames(path, appendPath=FALSE) 51 | ## Always update the most recent manifest file (at the very least) 52 | lastMani <- manis[length(manis)] 53 | system(paste0("svn up ", lastMani)) 54 | res <- setNames(unlist(lapply(manis, .scanMani)), maniNames) 55 | res[order(res)] ## assumption: that the size will always grow... 56 | } 57 | 58 | ## getPackageTotals() 59 | 60 | 61 | ## And this plots the package totals based on existing manifests 62 | plotPackageTotals <- 63 | function(path = proj_path("Rpacks")) 64 | { 65 | totals <- getPackageTotals(path) 66 | plot(totals) 67 | abline(a=100,b=20,col="red") 68 | } 69 | 70 | ## plotPackageTotals() 71 | 72 | 73 | getPackageDeltas <- 74 | function(path = proj_path("Rpacks")) 75 | { 76 | tots <- getPackageTotals(path) 77 | res <- integer() 78 | names <- character() 79 | for(i in seq_along(tots)){ 80 | res[i] <- tots[i+1] - tots[i] 81 | names[i] <- paste0(names(tots[i]),"_TO_",names(tots[i+1])) 82 | names(res) <- names 83 | } 84 | res <- res[1:(length(res)-1)] 85 | res 86 | } 87 | 88 | compareReleases <- 89 | function(path = proj_path("Rpacks"), oldRel="3.0", newRel="3.1") 90 | { 91 | oldPkgs <- .getPkgs(.getManifestFilenameFromVersion(path, oldRel)) 92 | newPkgs <- .getPkgs(.getManifestFilenameFromVersion(path, newRel)) 93 | list(removed=sort(setdiff(oldPkgs, newPkgs)), 94 | added=sort(setdiff(newPkgs, oldPkgs))) 95 | } 96 | 97 | getDescriptions <- 98 | function(path = proj_path("Rpacks", pkgs)) 99 | { 100 | ret <- "" 101 | for (pkg in pkgs) 102 | { 103 | desc <- file.path(path, pkg, "DESCRIPTION") 104 | dcf <- read.dcf(desc) 105 | if ("Description" %in% colnames(dcf)) 106 | { 107 | val <- unname(dcf[, "Description"]) 108 | val <- gsub("\n", " ", val) 109 | val <- paste0(pkg, " - ", val) 110 | val <- paste(strwrap(val, width=60), collapse="\n") 111 | ret <- paste0(ret, val, "\n\n") 112 | } 113 | } 114 | ret 115 | } 116 | -------------------------------------------------------------------------------- /R/svn.R: -------------------------------------------------------------------------------- 1 | #' Declare an SVN instance 2 | #' 3 | #' This is primarily a helper function for using svn progromatically. 4 | #' @param dir The SVN directory 5 | #' @examples 6 | #' \dontrun{ 7 | #' s <- svn("my/svn/location") 8 | #' s$status() 9 | #' s$update() 10 | #' s$add("file3") 11 | #' s$commit("adding file3") 12 | #' } 13 | svn <- function(dir = getwd()) { 14 | 15 | parse_xml <- function(x) { 16 | xml2::read_xml(paste(collapse = "\n", x)) 17 | } 18 | status <- function(args) { 19 | files <- parse_xml(system2("svn", args = c("status", "--xml", args), 20 | stdout = TRUE)) 21 | entries <- xml2::xml_find_all(files, "//entry") 22 | data.frame(filename = xml2::xml_attr(entries, "path"), 23 | type = xml2::xml_attr(xml2::xml_children(entries), "item")) 24 | } 25 | 26 | list( 27 | add = function(files, args = NULL) { 28 | withr::with_dir(dir, system2("svn", args = c("add", files, args))) 29 | }, 30 | 31 | commit = function(message, args = NULL) { 32 | tmp <- tempfile() 33 | on.exit(unlink(tmp)) 34 | writeLines(con = tmp, message) 35 | withr::with_dir(dir, 36 | system2("svn", 37 | args = c("commit", "--file", tmp, args))) 38 | }, 39 | 40 | status = function(args = NULL) { 41 | withr::with_dir(dir, status(args)) 42 | }, 43 | log = function(args = NULL) { 44 | withr::with_dir(dir, 45 | xml2::read_xml( 46 | paste(collapse = "\n", 47 | system2("svn", 48 | args = c("log", "--xml", args), 49 | stdout = TRUE)))) 50 | }, 51 | remove_untracked = function(args = NULL) { 52 | withr::with_dir(dir, { 53 | files <- status(args) 54 | unlink(subset(files, type == "unversioned")$filename, 55 | recursive = TRUE) 56 | }) 57 | }, 58 | update = function(args = NULL) { 59 | withr::with_dir(dir, { 60 | system2("svn", args = c("up", args)) 61 | }) 62 | }, 63 | read = function(filename) { 64 | withr::with_dir(dir, { 65 | if (!(is.character(filename) && length(filename) == 1)) { 66 | stop("Only read one file at a time", call. = FALSE) 67 | } 68 | readLines(con = filename) 69 | }) 70 | }, 71 | write = function(filename, content) { 72 | content <- force(content) 73 | withr::with_dir(dir, { 74 | if (!(is.character(filename) && length(filename) == 1)) { 75 | stop("Only write one file at a time", call. = FALSE) 76 | } 77 | con <- file(filename, "wb") 78 | writeLines(content, con, sep="\n") 79 | close(con) 80 | }) 81 | }, 82 | ls = function(args = NULL) { 83 | withr::with_dir(dir, { 84 | system2("svn", args = c("ls", args)) 85 | }) 86 | }, 87 | diff = function(args = NULL) { 88 | withr::with_dir(dir, { 89 | system2("svn", args = c("diff", args)) 90 | }) 91 | } 92 | ) 93 | } 94 | 95 | check_manifest <- function(x, pkgs) { 96 | match <- compact(Map(function(pkg) { 97 | grep(paste0("Package:[[:space:]]+", pkg, "\\b"), x) 98 | }, pkgs)) 99 | if (length(match) > 0) { 100 | stop(paste0(sQuote(names(match)), collapse = ", "), 101 | " already in manifest line(s): ", 102 | paste0(collapse = ", ", unlist(match)), call. = FALSE) 103 | } 104 | TRUE 105 | } 106 | 107 | add_package_type <- function(svn_location, manifest, clean_function, 108 | adding_code) 109 | { 110 | eval(bquote( 111 | function(x, svn_location = .(svn_location), manifest = .(manifest)) { 112 | #lapply(x, .(clean_function), .(svn_location)) 113 | lapply(x, .(clean_function)) 114 | s <- svn(svn_location) 115 | s$update() 116 | 117 | pkg_names <- .getShortPkgName(x) 118 | s$status() 119 | 120 | current <- s$read(manifest) 121 | if (check_manifest(current, pkg_names)) { 122 | .(adding_code) 123 | s$write(manifest, 124 | append(current, paste0("Package: ", pkg_names, "\n"))) 125 | s$status() 126 | s$commit(paste0("Adding ", paste(collapse = ", ", pkg_names))) 127 | } 128 | })) 129 | } 130 | 131 | #' Add packages to SVN 132 | #' 133 | #' @param x package tarballs to add. 134 | #' @param svn_location location of the SVN repository 135 | #' @param manifest name of the manifest file 136 | #' @name add_packages 137 | NULL 138 | 139 | print.svn_logentry <- function(x, ...) { 140 | cat(sep = "\n", 141 | paste(sep = " | ", 142 | xml_attr(x, "revision"), 143 | xml_text(xml_find_one(x, ".//author")), 144 | xml_text(xml_find_one(x, ".//date")) 145 | ), 146 | xml_text(xml_find_one(x, ".//msg"))) 147 | } 148 | -------------------------------------------------------------------------------- /R/workflow_standard.R: -------------------------------------------------------------------------------- 1 | #' Workflow steps 2 | #' 3 | #' @name workflow_standard 4 | NULL 5 | #> NULL 6 | 7 | #' @rdname workflow_standard 8 | #' @param assignInTracker logical(1) indicating whether package 9 | #' assignments should be updated in the tracker. 10 | #' @param secret character(1) path to JSON API client access secret. 11 | CreatePackageAssignmentEmail <- 12 | function(assignInTracker=FALSE, 13 | secret=proj_path("bioconductorseattle-gmail.json")) 14 | { 15 | 16 | ## Retrieve unassigned packages 17 | pkgs <- unassigned_packages() 18 | 19 | ## Generate code used for assigning packages 20 | code <- assign_new_packages(pkgs) 21 | 22 | ## Generate package assignment email 23 | email <- package_assignment_email(pkgs, code) 24 | 25 | ## Create a new draft email with assignment content (in Gmail drafts) 26 | gmailr::gmail_auth(scope="compose", secret_file=secret) 27 | gmailr::create_draft(email) 28 | 29 | if (assignInTracker) { 30 | assign_packages(pkgs, code) 31 | return (invisible(NULL)) 32 | } else 33 | return (list(pkgs=pkgs, code=code)) 34 | } 35 | 36 | #' @rdname workflow_standard 37 | #' @param pre data.frame() returned by \code{pre_accepted_packages()}, 38 | #' retrieved from the tracker and indicating packages tagged as 39 | #' 'pre-accepted' 40 | DownloadNewPackageTarballs <- 41 | function(pre=pre_accepted_packages()) 42 | { 43 | ## Download tarballs: 44 | files <- unlist(lapply(pre$id, download, overwrite=TRUE), recursive=FALSE) 45 | filenames <- proj_path(basename(names(files))) 46 | list(pre=pre, files=files, filenames=filenames) 47 | } 48 | 49 | 50 | .LoadNewPackagesMetadata <- 51 | function(metadata.dir=proj_path(), filename.base="new-packages-metadata_") 52 | { 53 | filenames = sort(dir(metadata.dir, filename.base, full.names=TRUE), 54 | decreasing=TRUE) 55 | if (!length(filenames)) 56 | stop(".LoadNewPackagesMetadata() did not find any saved metadata") 57 | local({ 58 | load(filenames[1]) 59 | if (length(ls()) != 1L) 60 | stop(".LoadNewPackagesMetadata() metadata must have one object", 61 | "\n found: ", paste(sQuote(ls()), collapse=", ")) 62 | get(ls()) 63 | }) 64 | } 65 | 66 | 67 | .CheckUsersCredentials <- 68 | function(metadata, credPath=proj_path("bioconductor.authz")) 69 | { 70 | d <- readLines(credPath, 3)[[3]] 71 | d <- strsplit(d, ", *")[[1]] 72 | 73 | us <- tolower(sapply(metadata$filenames, function(x) { 74 | ms <- maintainers(x)[[1]] 75 | sprintf("%s.%s", substr(ms$given[1], 1, 1), ms$family) 76 | })) 77 | 78 | list(usernames=us[!us %in% d], existing=us[us %in% d]) 79 | } 80 | 81 | #' @rdname workflow_standard 82 | #' 83 | #' @param metadata Return value from 84 | #' \code{DownloadNewPackageTarballs}. If missing, search 85 | #' \code{proj_path()} for most recent saved version as RData with 86 | #' format "new-packages-metadata_20160211.RData" 87 | #' @param createDraft logical(1) draft email to FHCRC for SVN new user 88 | #' credentials 89 | ManageNewPackagesCredentials <- 90 | function(metadata, createDraft=TRUE) 91 | { 92 | if (missing(metadata)) 93 | metadata <- .LoadNewPackagesMetadata() 94 | 95 | cat('\n', "##### Check authorization file for existing users.", '\n\n', 96 | sep='') 97 | 98 | creds = .CheckUsersCredentials(metadata) 99 | print(creds) 100 | 101 | cat("##### Gmail draft to scicomp ", '\n\n', sep='') 102 | 103 | mimeDetails <- list( 104 | From = "packages@bioconductor.org", 105 | To = "scicomp@fhcrc.org", 106 | Subject = "New SVN users for Hedgehog" 107 | ) 108 | 109 | email <- c("Hi scicomp,", "", 110 | "Can you please create new SVN account(s) on Hedgehog for", "", 111 | "@@NEWUSERS@@", "", 112 | "Thanks,", "", 113 | "Martin", "") 114 | 115 | maints <- vapply(names(creds$usernames), function(x) { 116 | as.character(maintainers(x))[[1]] 117 | }, character(1)) 118 | 119 | email <- paste(sub("@@NEWUSERS@@", paste(maints, collapse='\n'), email), 120 | collapse='\n') 121 | 122 | for (i in seq_along(mimeDetails)) 123 | cat(names(mimeDetails)[i], ": ", mimeDetails[[i]], "\n", sep="") 124 | cat(email, sep="\n") 125 | 126 | ## Create draft gmail. 127 | if (createDraft) { 128 | gmail_auth(scope="compose") 129 | gmailr::create_draft(mime(From=mimeDetails$From, To=mimeDetails$To, 130 | Subject=mimeDetails$Subject, body=email)) 131 | } 132 | 133 | cat('\n', "##### Create draft e-mails to maintainers", '\n\n', sep='') 134 | cat("gmailr::gmail_auth(scope='compose')\n") 135 | pkgs <- unlist(unname(creds), recursive=FALSE) 136 | for (package in names(pkgs)) 137 | for (username in pkgs[[package]]) 138 | cat("gmailr::create_draft(emailMaintainer('", package, 139 | "', userId='", username, 140 | "', password='XXXXXXXXX'))\n", 141 | sep='') 142 | 143 | cat('\n', "##### Accept packages", '\n\n', sep='') 144 | 145 | for (i in seq_along(metadata$filenames)) 146 | cat("accept_package(", metadata$pre$id[[i]], ", '", 147 | metadata$filenames[[i]], "')\n", sep='') 148 | } 149 | 150 | 151 | #' @rdname workflow_standard 152 | DraftWeeklySummaryEmail <- function() 153 | { 154 | gmailr::gmail_auth(scope="compose") 155 | gmailr::create_draft(weeklyEmailPackagesOverview()) 156 | } 157 | -------------------------------------------------------------------------------- /R/getPackageRange.R: -------------------------------------------------------------------------------- 1 | .trim <- function (x) gsub("^\\s+|\\s+$", "", x) 2 | 3 | .getPackageContents_txtfile <- function(biocVersion='3.2') { 4 | url <- paste0("http://bioconductor.org/checkResults/", biocVersion, 5 | "/bioc-LATEST/STATUS_DB.txt") 6 | temp <- GET(url) 7 | html <- content(temp) 8 | unlist(strsplit(html, "\n")) 9 | } 10 | 11 | 12 | 13 | .getPageContents <- function(biocVersion="3.2") { 14 | theurl <- paste0("http://www.bioconductor.org/checkResults/",biocVersion, 15 | "/bioc-LATEST/") 16 | temp <- GET(theurl) 17 | html <- content(temp) 18 | html2 <- lapply(html["//tr"], xmlValue) 19 | html3 <- unlist(html2) 20 | stats <- head(html3,44) 21 | message(stats[1]) 22 | html3 <- html3[-(1:44)] 23 | 24 | pkgInd <- grep("Package", html3) 25 | 26 | result <- lapply(pkgInd, function(x) { 27 | start <- x 28 | end <- x+7 29 | temp <- html3[start:end] 30 | p <- gsub( " .*$", "", temp[2] ) 31 | moscato2 <- strsplit(grep("Windows", temp,value=TRUE),"x64")[[1]][2] 32 | morelia <- strsplit(grep("Mavericks", temp,value=TRUE),"x86_64")[[1]][2] 33 | petty <- strsplit(grep("Snow", temp,value=TRUE),"x86_64")[[1]][2] 34 | linux <- strsplit(grep("Linux", temp,value=TRUE),"x86_64")[[1]][2] 35 | 36 | list( pkgDetails = p, moscato2= .trim(moscato2), 37 | morelia=.trim(morelia), petty=.trim(petty), 38 | linux=.trim(linux)) 39 | }) 40 | pkgInf <- vapply(result, "[[", "", "pkgDetails") 41 | ## Some cleanup 42 | pkgInf <- gsub("\\.", "_", pkgInf) 43 | ## pkgInf <- strsplit(sub("\\W", "|", pkgInf),"\\|") 44 | pkgInf <- sub("\\W", "|", pkgInf) 45 | pkgInf <- gsub("_", "\\.", pkgInf) 46 | pkgInf <- strsplit(pkgInf,"\\|") 47 | 48 | pkg <- vapply(pkgInf, function(x){x[1]}, 'character') 49 | author <- vapply(pkgInf, function(x){x[2]}, 'character') 50 | 51 | linux <- vapply(result, "[[", "", "linux") 52 | petty <- vapply(result, "[[", "", "petty") 53 | morelia <- vapply(result, "[[", "", "morelia") 54 | moscato2 <- vapply(result, "[[", "", "moscato2") 55 | 56 | data.frame(pkg=pkg, linux=linux, mavericks=morelia, 57 | windows=moscato2, snow=petty, 58 | stringsAsFactors=FALSE) 59 | } 60 | 61 | 62 | .getIndiList <- function(start, end, df) { 63 | m1 <- min(grep(start, df[,1])) 64 | m2 <- max(grep(end, df[,1])) 65 | df[m1:m2, ] 66 | } 67 | 68 | .getErrorWarning <- function(reviewerPkgList, msg=c("ERROR","WARNINGS")) { 69 | str <- c("windows", "linux", "snow","mavericks") 70 | df <- data.frame(platform=character(0), 71 | pkgName=character(0), email=character(0)) 72 | for (i in 1:length(str)){ 73 | s1 <- str[i] 74 | ind <- grep(msg, reviewerPkgList[,s1]) 75 | if(length(ind)!=0){ 76 | pkgName <- reviewerPkgList[ind,1] 77 | email <- .getEmail(pkgName) 78 | names(email) <- NULL 79 | tedf <- data.frame(platform=rep(as.character(s1),length(pkgName)), 80 | pkgName=pkgName, email=email) 81 | df <- rbind(df, tedf) 82 | } 83 | } 84 | df 85 | } 86 | 87 | .getEmail <- function(pkgName) { 88 | sapply(pkgName, function(p){ 89 | url <- paste0("http://bioconductor.org/packages/3.2/bioc/html/", 90 | p,".html") 91 | result <- GET(url) 92 | html <- content(result) 93 | html2 <- sapply(html["//p"], xmlValue) 94 | email <- grep("Maintainer:", html2, value=TRUE) 95 | .trim(sub("Maintainer:","", email)) 96 | }) 97 | } 98 | 99 | getPackageRange <- 100 | function(userName="sarora", biocVersion ="3.2") { 101 | reviewer <- userName 102 | df <- .getPageContents(biocVersion) 103 | df <- df[-(which(df$pkg=="Last")),] 104 | 105 | start <- switch(reviewer, 106 | dtenenba= "a4", herve="BUS" , 107 | jhester="deltaGseg", mcarlson="GeneRegionScan", 108 | mtmorgan="IsoGeneGUI", nhayden="MSnID", 109 | sarora="qpcrNorm", vobencha="seqPattern") 110 | end <- switch(reviewer, 111 | dtenenba= "bumphunter", herve="DEGseq" , 112 | jhester="geneRecommender", mcarlson="isobar", 113 | mtmorgan="MSnbase", nhayden="QDNAseq", 114 | sarora="seqLogo", vobencha="zlibbioc") 115 | 116 | reviewerPkgList <- .getIndiList(start, end, df) 117 | 118 | errorlist <- .getErrorWarning(reviewerPkgList, msg="ERROR") 119 | warnlist <- .getErrorWarning(reviewerPkgList, msg="WARNING") 120 | 121 | message("Total no of packages assigned to ", reviewer," : ", 122 | nrow(reviewerPkgList)) 123 | message("No of Packages with Error:", length(unique(errorlist$pkgName))) 124 | message("No of Packages with Warnings:", length(unique(warnlist$pkgName))) 125 | 126 | list(reviewerPkgList=reviewerPkgList, errorlist=errorlist, 127 | warningslist=warnlist) 128 | } 129 | 130 | #rlist <- c("dtenenba","herve","jhester","mcarlson","mtmorgan","nhayden","sarora", "vobencha") 131 | #res= lapply(rlist, function(r) getPackageRange(r, "3.2")) 132 | 133 | 134 | 135 | 136 | ## helper to just export a nice email list of all the errors and warnings (in one unique string). 137 | 138 | getEmailAddressesToWarnPkgMaintainers <- function(userName){ 139 | ## call getPackageRange 140 | res <- getPackageRange(userName) 141 | errs <- unique(as.character(res[['errorlist']]$email)) 142 | warns <- unique(as.character(res[['warningslist']]$email)) 143 | emails <- sub(" at ","@",unique(c(errs, warns))) 144 | paste(emails, collapse=", ") 145 | } 146 | -------------------------------------------------------------------------------- /R/email_maintainers.R: -------------------------------------------------------------------------------- 1 | # Another set of utilities for emailing maintainers (mtrs) of 2 | # broken packages. Uses slightly different utility functions for 3 | # sending email than email.R. 4 | 5 | .getMtr <- function(package, software=TRUE) 6 | { 7 | if (software) 8 | repos <- "bioc" 9 | else 10 | repos <- "data-experiment" 11 | url <- sprintf("http://master.bioconductor.org/checkResults/devel/%s-LATEST/meat-index.txt", 12 | repos) 13 | require(httr) 14 | txt <- content(GET(url)) 15 | lines <- strsplit(txt, "\n")[[1]] 16 | curkpkg <- NULL 17 | scanMode <- FALSE 18 | for (line in lines) 19 | { 20 | if (line == sprintf("Package: %s", package)) 21 | scanMode = TRUE 22 | if (scanMode && grepl("^MaintainerEmail:", line)) 23 | return(sub("^MaintainerEmail: ", "", line)) 24 | } 25 | stop("Couldn't find the maintainer!") 26 | } 27 | 28 | .getPackageFails <- function(package, software=TRUE) 29 | { 30 | require(httr) 31 | if (software) 32 | repos = "bioc" 33 | else 34 | repos = "data-experiment" 35 | ret <- list() 36 | notfound <- c() 37 | for (version in c("release", "devel")) 38 | { 39 | url <- paste0("http://master.bioconductor.org/checkResults/", 40 | version, "/", repos, "-LATEST/STATUS_DB.txt") 41 | status_txt <- content(GET(url)) 42 | lines <- strsplit(status_txt, "\n")[[1]] 43 | raw <- lines[grep(paste0("^", package, "#"), lines)] 44 | if (!length(raw)) 45 | notfound <- append(notfound, TRUE) 46 | j <- unlist(strsplit(raw, " ")) 47 | results <- unique(j[c(rep(FALSE,TRUE), TRUE)]) 48 | if (length(results)) 49 | results <- sort(results) 50 | results <- results[!grepl("NotNeeded|skipped|OK", results)] 51 | if (length(results) && !(length(results) == 1 && results == "OK")) 52 | ret[[version]] <- results 53 | } 54 | if (all(notfound) && (length(notfound) > 1)) 55 | stop(sprintf("Package %s not found.", package)) 56 | if(is.null(names(ret))) 57 | stop("This package has no issues!") 58 | ret 59 | } 60 | 61 | ## TODO - add special text (and arg to activate it) 62 | ## when release date is approaching 63 | ## and error/warning MUST be fixed by some date. 64 | failmail <- function(package, software=TRUE, from=getOption("fromEmail", 65 | "dtenenba@fredhutch.org"), sig=getOption("mail.sig", "Dan"), 66 | subject=sprintf("%s build problem", package), preview=TRUE, 67 | bccme=TRUE) 68 | { 69 | if (!require(mailR)) 70 | { 71 | message("Installing required custom version of mailR package...") 72 | library(devtools) 73 | BiocInstaller::biocLite("dtenenba/mailR", 74 | ref="useWithBiocContributions") 75 | library(mailR) 76 | } 77 | if (is.null(getOption("email.options", NULL))) 78 | stop("Please set options(email.options) to a list, see ?failmail") 79 | package <- sub("\\/$", "", package) 80 | if (software) 81 | repos = "bioc" 82 | else 83 | repos = "data-experiment" 84 | results <- .getPackageFails(package, software) 85 | to <- .getMtr(package, software) 86 | msg <- sprintf("Hi,\n\nThere's an issue with %s on the build system.\n\n", package) 87 | for (version in c("release", "devel")) 88 | { 89 | if (!is.null(results[[version]])) 90 | { 91 | msg <- sprintf("%sIn %s, build results are %s on one or more platforms.\n\n", 92 | msg, version, paste(results[[version]], collapse=", ")) 93 | msg <- sprintf("%sSee http://bioconductor.org/checkResults/%s/%s-LATEST/%s/\n", 94 | msg, version, repos, package) 95 | msg <- paste0(msg, "for more information.\n\n") 96 | } 97 | } 98 | cat("Add custom message [(y)es/(N)o/use (e)ditor]? ") 99 | line <- readLines(n=1) 100 | if (tolower(line) == "y") 101 | { 102 | cat("Enter a custom message, . on a line by itself to end.\n") 103 | custom <- c() 104 | while(TRUE) 105 | { 106 | line <- readLines(n=1) 107 | if (line == ".") 108 | break 109 | custom <- append(custom, line) 110 | } 111 | if (length(custom)) 112 | msg <- paste0(msg, paste(custom, collapse="\n"), "\n\n") 113 | } else if (tolower(line) == "e") 114 | { 115 | tmpfile <- tempfile(package) 116 | # if (file.exists(tmpfile)) 117 | # unlink(tmpfile) 118 | file.edit(tmpfile) 119 | cat("Press ENTER when done editing. ") 120 | line <- readLines(n=1) 121 | if (file.exists(tmpfile) && file.size(tmpfile) > 0) 122 | { 123 | line <- readLines(n=1) 124 | msg <- paste0(msg, 125 | paste(readLines(tmpfile, warn=FALSE), 126 | collapse="\n"), "\n\n") 127 | } 128 | } 129 | 130 | 131 | msg <- paste0(msg, "Please take a look and fix this as soon as you can.\n") 132 | msg <- paste0(msg, "Let me know if you have any questions.\n\nThanks,\n", sig, "\n") 133 | if (preview) 134 | { 135 | cat ("Mesage preview:\n-------\n") 136 | cat(sprintf("From: %s\nTo: %s\nSubject: %s\n\n%s", 137 | from, to, subject, msg)) 138 | cat("---\nIs this ok (y/N)? ") 139 | ans <- readLines(n=1) 140 | if (!tolower(ans) == "y") 141 | return(invisible(NULL)) 142 | } 143 | msg <- paste0("
", msg, "
") 144 | bcc <- c() 145 | if (bccme) 146 | bcc <- from 147 | debug <- FALSE 148 | if (!is.null(getOption("email.options")$debug)) 149 | debug=getOption("email.options")$debug 150 | res <- send.mail(from, 151 | to=to, 152 | subject, msg, 153 | bcc = bcc, 154 | headers=list("X-BiocContributions"="TRUE"), 155 | authenticate=TRUE, 156 | html=TRUE, 157 | smtp=getOption("email.options"), 158 | debug=debug) 159 | if (getOption("email.options")[["port"]] == 1025) 160 | cat("Using a test email server, email not actually sent.") 161 | res 162 | } 163 | -------------------------------------------------------------------------------- /R/users.R: -------------------------------------------------------------------------------- 1 | #' See if a person already exists in the user db 2 | #' 3 | #' @param x a person object to lookup 4 | #' @examples 5 | #' pkg <- system.file(package="BiocContributions", 6 | #' "testpackages", "RNASeqPower_1.11.0.tar.gz") 7 | #' 8 | #' maintainer <- maintainers(pkg) 9 | #' match_user(maintainer) 10 | match_user <- function(x) { 11 | assert(inherits(x, "person"), "'x' must be a person object") 12 | 13 | db <- user_db() 14 | users <- db2person(db[match(x$email, db$`E-mail Address`), ]) 15 | 16 | attr(users, "input") <- x 17 | class(users) <- "user_matches" 18 | 19 | # Otherwise try to find name based matches 20 | unmatched <- lengths(users) == 0 21 | first_name_matches <- last_name_matches <- replicate(length(users), person()) 22 | 23 | match_column <- function(column) { 24 | function(x) db2person(db[x == tolower(db[[column]]), ]) 25 | } 26 | first_name_matches[unmatched] <- 27 | lapply(tolower(x[unmatched]$given), match_column("First Name")) 28 | last_name_matches[unmatched] <- 29 | lapply(tolower(x[unmatched]$family), match_column("Last Name")) 30 | attr(users, "first_matches") <- first_name_matches 31 | attr(users, "last_matches") <- last_name_matches 32 | 33 | users 34 | } 35 | 36 | db2person <- function(x) { 37 | Map(person, 38 | given = x$`First Name`, 39 | family = x$`Last Name`, 40 | email = x$`E-mail Address`, 41 | comment = x$`SVN User ID`, 42 | USE.NAMES = FALSE) 43 | } 44 | 45 | no_match <- function(x) { 46 | vapply(x, function(x) length(x[[1]]) == 0, logical(1)) 47 | } 48 | 49 | print.user_matches <- function(x, ...) { 50 | inputs <- lapply(attr(x, "input"), format) 51 | matches <- lapply(x, format) 52 | unmatched <- lengths(x) == 0 53 | if (any(unmatched)) { 54 | first_matches <- lapply(attr(x, "first_matches")[unmatched], 55 | function(x) paste0(collapse = "\n", " ", lapply(x, format))) 56 | last_matches <- lapply(attr(x, "last_matches")[unmatched], 57 | function(x) paste0(collapse = "\n", " ", lapply(x, format))) 58 | matches[unmatched] <- paste0("No Exact Match", "\n", 59 | "First Name Matches:\n", 60 | first_matches, "\n", 61 | "Last Name Matches:\n", 62 | last_matches) 63 | } 64 | cat(paste0(collapse = "\n", inputs, "\n", matches, "\n")) 65 | } 66 | 67 | #' Extract the email from an object 68 | #' 69 | #' @param x the object to extract email from 70 | #' @param ... Additional arguments passed to methods. 71 | email <- function(x, ...) { 72 | UseMethod("email") 73 | } 74 | 75 | #' @describeIn email person object 76 | email.person <- function(x, ...) { 77 | x$email 78 | } 79 | 80 | #' @describeIn email user match object from \code{\link{match_user}} 81 | email.user_matches <- function(x, ...) { 82 | format(attr(x, "input")) 83 | } 84 | 85 | #' @describeIn email list - calls \code{\link{email}} on every item in the list. 86 | email.list <- function(x, ...) { 87 | unlist(lapply(x, email)) 88 | } 89 | 90 | #' @describeIn email - simply returns the character vector unchanged. 91 | email.character <- function(x, ...) { 92 | x 93 | } 94 | 95 | #' Retrieve the maintainers from a tarball 96 | #' 97 | #' @return each maintainer as a 'person' object. 98 | #' @param tarball the package tarball to read 99 | #' @examples 100 | #' pkg <- system.file(package="BiocContributions", 101 | #' "testpackages", "RNASeqPower_1.11.0.tar.gz") 102 | #' maintainers(pkg) 103 | maintainers <- function(tarball) { 104 | description <- readDESCRIPTION(tarball) 105 | .extractEmails(description) 106 | } 107 | 108 | #' Retrieve the remote user database 109 | #' 110 | #' This is an internal function which retrieves the user database file 111 | #' 'user_db.csv', which holds user information for users with SVN 112 | #' credentials. this assumes 'rsync' is available on your path, which 113 | #' is true by default for linux and OSX machines, but probably not for 114 | #' windows. 115 | #' 116 | #' The information is cached so calling this function repeatably will 117 | #' result in the same information being returned. Use 118 | #' \code{memoise::forget(user_db)} to reset the cache if needed. 119 | #' @return the result is a data.frame of the data 120 | #' @examples 121 | #' user_db() 122 | #' # second call will be nearly instantaneous 123 | #' user_db() 124 | #' # clear the cache 125 | #' memoise::forget(user_db) 126 | #' user_db() 127 | user_db <- memoise::memoise(function() { 128 | location <- getOption("userDbFile", NULL) 129 | if (is.null(location)) { 130 | stop("please set the userDbFile option to the location of 'user_db.csv'", call. = FALSE) 131 | } 132 | 133 | tmp <- tempfile() 134 | on.exit(unlink(tmp)) 135 | system2("rsync", args = c(location, tmp)) 136 | read.csv(tmp, header = TRUE, check.names = FALSE, stringsAsFactors = FALSE) 137 | }) 138 | 139 | #' Request new credentials for users 140 | #' 141 | #' @param x an object with an \code{\link{email}} method defined. 142 | #' @param sender The name to use in the signature 143 | #' @return A \code{\link[gmailr]{mime}} object. 144 | #' @examples 145 | #' pkg <- system.file(package="BiocContributions", 146 | #' "testpackages", "RNASeqPower_1.11.0.tar.gz") 147 | #' request_credentials(match_user(maintainers(pkg))) 148 | #' request_credentials("asdf@asd.com") 149 | request_credentials <- 150 | function(x, sender = getOption("bioc_contributions_signature")) 151 | { 152 | emails <- email(x) 153 | gmailr::mime(To = "scicomp@fhcrc.org", 154 | Subject = "New SVN users for Hedgehog", 155 | body = fmt(paste0( 156 | "Could you please create new SVN account(s) on Hedgehog for\n\n", 157 | "{{{users}}}\n\n", 158 | "Thanks,\n", 159 | " {{{sender}}}"), 160 | list(users = paste0(collapse = "\n", emails), 161 | sender = sender))) 162 | } 163 | 164 | username <- function(x, ...) { 165 | UseMethod("username") 166 | } 167 | 168 | username.person <- function(x, ...) { 169 | x$comment 170 | } 171 | 172 | username.list <- username.user_matches <- function(x, ...) { 173 | unlist(lapply(x, username)) 174 | } 175 | -------------------------------------------------------------------------------- /vignettes/workflow.Rmd: -------------------------------------------------------------------------------- 1 | 2 | 3 | --- 4 | title: "Administrative tools for new package contributions" 5 | author: "Jim Hester, Jim Java, Martin Morgan" 6 | date: "`r BiocStyle::doc_date()`" 7 | package: "`r BiocStyle::pkg_ver('BiocContributions')`" 8 | abstract: > 9 | This package provides facilities for managing new package 10 | contributions. This vignette describes use high-level 'workflow' 11 | functions for accomplishing major tasks. The workflow steps 12 | reference useful functions for more specific management tasks. The 13 | document is relevant to the 'tracker-based' package submission 14 | scheme used until Bioc-3.4. 15 | vignette: > 16 | %\VignetteIndexEntry{Managing New Package Contributions} 17 | %\VignetteEngine{knitr::rmarkdown} 18 | output: 19 | BiocStyle::html_document 20 | --- 21 | 22 | # Setup 23 | 24 | Make a working directory hierarchy, e.g., 25 | 26 | mkdir -p ~/proj/Rpacks 27 | mkdir -p ~/proj/experiment 28 | 29 | Ensure the following options are set (`bioc_contributions_project` is 30 | `~/proj`, created above) 31 | 32 | options( 33 | ## tracker credentials 34 | tracker_user="mtmorgan", 35 | tracker_password="", 36 | ## local setup 37 | bioc_contributions_project="~/proj", 38 | ## signature in communications with users 39 | bioc_contributions_signature="Martin Morgan", 40 | bioc_contributions_email="martin.morgan@roswellpark.org") 41 | 42 | # Workflow 43 | 44 | ## Assign new packages 45 | 46 | 1. Load the package 47 | 48 | library(BiocContributions) 49 | 50 | 2. Create a new draft gmail with assignment content 51 | 52 | assignments = CreatePackageAssignmentEmail() 53 | 54 | 3. Manually check status in the tracker. 55 | 56 | 4. Assign packages to reviewers in the tracker 57 | 58 | result <- with(assignments, assign_packages(pkgs, code)) 59 | for (res in result) httr::stop_for_status(res) 60 | 61 | 5. Review and send draft gmail. 62 | 63 | ## Add pre-accepted packages 64 | 65 | The work flow typically starts by adding credentials and final 66 | acceptance of previously pre-accepted packages, and then adding the 67 | current pre-accepted packages to svn and asking for credentials for 68 | them. 69 | 70 | ### Add to SVN 71 | 72 | 1. Download package tarballs 73 | 74 | f = DownloadNewPackageTarballs() 75 | 76 | 2. Manually confirm that all tarballs have been downloaded, e.g., when 77 | one issue has both software and data packages 78 | 79 | 3. Separate software and data packages. This should be done by 80 | consulting biocViews terms but currently needs to be done manually. 81 | 82 | (bioc_class <- bioc_views_classification(f$filenames)) 83 | 84 | 4. Add experiment, software packages. 85 | 86 | add_data_experiment_packages(bioc_class$ExperimentData) 87 | add_software_packages(bioc_class$Software) 88 | 89 | 5. Save new-package info for follow-up tasks. 90 | 91 | path = paste0(proj_path(), 92 | "new-packages-metadata_", format(Sys.Date(), "%Y%m%d"), 93 | ".RData") 94 | save(f, file=path) 95 | 96 | ### Credentials 97 | 98 | After the 'save()' above, this function will find and use the most 99 | recently saved metadata to continue package acceptance after scicomp's 100 | reply. 101 | 102 | 1. Ask for svn accounts. The code is tries to guess svn user 103 | credentials and not ask for individuals who already have accounts, 104 | but these need to be reviewed manually. The response is usually 105 | quick, within a day. 106 | 107 | ManageNewPackagesCredentials(createDraft=TRUE) 108 | 109 | 2. The following generates instructions to be pasted into R to (1) 110 | send acceptance emails (these need to be reviewd and sent from 111 | gmail, including adding svn credentials manually); and (2) accepts 112 | packages in the tracker. 113 | 114 | ManageNewPackagesCredentials(createDraft=FALSE) 115 | 116 | 3. `svn_software_auth_text(f$filenames)` and 117 | `svn_data_experiment_auth_text(f$filenames)` generates text to edit 118 | the SVN authorization file; see `*_permissions()` functions for 119 | automation, but I have not trusted these. 120 | 121 | ## Annotation packages 122 | 123 | Here is a summary of the procedure: 124 | 125 | 0. You will need access to the biocadmin account on zin1 (for BioC 126 | release) and zin2 (for BioC devel). 127 | 128 | 1. rsync the packages to a temporary place on zin1 and/or zin2. 129 | 130 | 2. Login to zin1 and/or zin2 as biocadmin and drop the source packages 131 | in 132 | 133 | ~biocadmin/PACKAGES/3.3/data/annotation/src/contrib 134 | 135 | on zin1, and in 136 | 137 | ~biocadmin/PACKAGES/3.4/data/annotation/src/contrib 138 | 139 | on zin2. 140 | 141 | 3. If the packages are updated versions of existing packages, remove 142 | the old packages. To get a list of old packages, you can start R 143 | and run: 144 | 145 | source("~biocbuild/BBS/utils/list.old.pkgs.R") 146 | list.old.pkgs("~biocadmin/PACKAGES/3.3/data/annotation/src/contrib") 147 | # replace 3.2 with 3.3 on zin2 148 | 149 | 4. Run the prepareRepos/pushRepos sequence. That is: 150 | 151 | cd /home/biocadmin/manage-BioC-repos/3.4 152 | ./prepareRepos-data-annotation.sh 153 | ./pushRepos-data-annotation.sh 154 | 155 | (replace 3.2 with 3.3 on zin2) 156 | 157 | The prepareRepos script takes a long time (about 2 hours). 158 | 159 | Alternatively, uncomment the cron job called "Just a daily resync 160 | of the public 3.Y/data/annotation repo" in the crontab for 161 | biocadmin and tweak the time you want this to start. Don't forget 162 | to re-comment this once the job has started. 163 | 164 | 5. Once everything has completed, start R from another machine (e.g. 165 | your laptop) and check that the packages are available via 166 | biocLite(). Additionally you can have a look at the package 167 | landing pages and make sure that everything looks fine (remember 168 | there is a lag of 15-20 min. between pushRepos and the update of 169 | the package landing pages). You might see some broken shields if 170 | the packages are new but I've observed that they seem to auto-fix 171 | the day after (there might be a division by zero going on or 172 | something like that due to the fact that the package is 0 day old). 173 | 174 | ## Weekly summaries 175 | 176 | DraftWeeklySummaryEmail() 177 | -------------------------------------------------------------------------------- /R/github.R: -------------------------------------------------------------------------------- 1 | .github_userpwd <- 2 | function() 3 | { 4 | paste( 5 | getOption("bioc_contributions_github_user"), 6 | getOption("bioc_contributions_github_auth"), 7 | sep=":") 8 | } 9 | 10 | .github_get <- 11 | function(path, api="https://api.github.com", 12 | path_root="/repos/Bioconductor/Contributions") 13 | { 14 | query <- sprintf("%s%s%s", api, path_root, path) 15 | response <- GET( 16 | query, 17 | config(userpwd=.github_userpwd(), httpauth=1L), 18 | accept("application/vnd.github.v3+json")) 19 | stop_for_status(response) 20 | content(response) 21 | } 22 | 23 | .github_patch <- 24 | function(path, body, ..., api="https://api.github.com", 25 | path_root="/repos/Bioconductor/Contributions", 26 | encode="json") 27 | { 28 | query <- sprintf("%s%s%s", api, path_root, path) 29 | response <- PATCH( 30 | query, 31 | config(userpwd=.github_userpwd(), httpauth=1L), 32 | accept("application/vnd.github.v3+json"), 33 | body=body, 34 | ..., 35 | encode=encode) 36 | stop_for_status(response) 37 | content(response) 38 | } 39 | 40 | .github_download <- function(issue) { 41 | message("downloading ", sQuote(issue$title)) 42 | repos <- sub(".*Repository: *([[:alnum:]/:\\.-]+).*", "\\1", 43 | issue$body) 44 | 45 | path <- sprintf("/issues/%d/comments", issue$number) 46 | comments <- .github_get(path) 47 | tag <- "AdditionalPackage: *([[:alnum:]/:\\.-]+).*" 48 | for (comment in comments) { 49 | if (!grepl(tag, comment$body)) 50 | next 51 | repos <- c(repos, 52 | sub(tag, "\\1", comment$body)) 53 | } 54 | 55 | for (repo in repos) 56 | system2("git", sprintf("clone --depth 1 %s", repo), 57 | stdout=TRUE, stderr=TRUE) 58 | setNames(basename(repos), rep(issue$title, length(repos))) 59 | } 60 | 61 | .github_close <- function(issue) { 62 | path <- sprintf("/issues/%d", issue$number) 63 | .github_patch(path, list(state="closed")) 64 | issue$state 65 | } 66 | 67 | .github_to_svn_Software <- 68 | function(pkgs, svn_location=proj_path("Rpacks")) 69 | { 70 | if (!length(pkgs)) 71 | return(pkgs) 72 | 73 | for (pkg in pkgs) 74 | clean(pkg) 75 | 76 | bioc_version <- getOption("bioc_contributions_manifest_version") 77 | stopifnot(!is.null(bioc_version)) 78 | svn_manifest <- 79 | file.path(svn_location, sprintf("bioc_%s.manifest", bioc_version)) 80 | 81 | s <- svn(svn_location) 82 | s$update() 83 | 84 | pkg_names <- basename(pkgs) 85 | s$status() 86 | 87 | current <- s$read(svn_manifest) 88 | if (check_manifest(current, pkg_names)) { 89 | s$add(pkg_names) 90 | s$write(svn_manifest, 91 | append(current, paste0("Package: ", pkg_names, "\n"))) 92 | s$status() 93 | s$commit(paste0("Adding ", paste(collapse = ", ", pkg_names))) 94 | } 95 | file.path("Rpacks", pkgs) 96 | } 97 | 98 | .github_to_svn_ExperimentData <- 99 | function(pkgs, svn_location=proj_path("experiment")) 100 | { 101 | if (!length(pkgs)) 102 | return(pkgs) 103 | 104 | for (pkg in pkgs) 105 | clean_data_package(pkg) 106 | 107 | bioc_version <- getOption("bioc_contributions_manifest_version") 108 | stopifnot(!is.null(bioc_version)) 109 | svn_manifest <- file.path( 110 | svn_location, 111 | sprintf("pkgs/bioc-data-experiment.%s.manifest", bioc_version)) 112 | 113 | s <- svn(svn_location) 114 | s$update() 115 | 116 | pkg_names <- .getShortPkgName(pkgs) 117 | s$status() 118 | 119 | current <- s$read(svn_manifest) 120 | if (check_manifest(current, pkg_names)) { 121 | { 122 | s$add(file.path("pkgs", pkg_names)) 123 | s$add(file.path("data_store", pkg_names)) 124 | } 125 | s$write(svn_manifest, append(current, paste0("Package: ", 126 | pkg_names, "\n"))) 127 | s$status() 128 | s$commit(paste0("Adding ", paste(collapse = ", ", pkg_names))) 129 | } 130 | file.path("pkgs", pkg_names) 131 | } 132 | 133 | #' @export 134 | github_accept <- function() { 135 | path <- "/issues?state=open&labels=3a.%20accepted" 136 | issues <- .github_get(path) 137 | 138 | owd <- setwd(proj_path()) 139 | on.exit(setwd(owd)) 140 | 141 | pkgs <- unlist(lapply(issues, .github_download)) 142 | types <- bioc_views_classification(pkgs) 143 | types$Software <- .github_to_svn_Software(types$Software) 144 | types$ExperimentData <- 145 | .github_to_svn_ExperimentData(types$ExperimentData) 146 | 147 | ## close issues 148 | state <- vapply(issues, .github_close, character(1)) 149 | 150 | types$ExperimentData <- file.path("experiment", types$ExperimentData) 151 | types 152 | } 153 | 154 | .user_id <- function(maintainers) { 155 | tolower(sapply(maintainers, function(maintainer) { 156 | sprintf("%s.%s", substr(maintainer$given[1], 1, 1), maintainer$family) 157 | })) 158 | } 159 | 160 | .credentials_required <- function(maintainers) { 161 | credentials <- proj_path("bioconductor.authz") 162 | if (!file.exists(credentials)) 163 | stop("local copy of 'bioconductor.authz' required at", 164 | "\n ", credentials) 165 | ids <- strsplit(readLines(credentials, 3)[[3]], ", *")[[1]] 166 | !.user_id(maintainers) %in% ids 167 | } 168 | 169 | #' @export 170 | github_svn_credentials_request_from_carl <- function(packages) { 171 | names(packages) <- basename(packages) 172 | maintainers <- lapply(packages, function(x) maintainers(x)[[1]]) 173 | maintainers <- maintainers[.credentials_required(maintainers)] 174 | 175 | if (length(maintainers)){ 176 | newusers <- paste(sapply(maintainers, as.character), collapse="\n") 177 | from <- getOption("bioc_contributions_signature", "Bioconductor") 178 | body <- template("svn_credentials_request.txt", 179 | newusers=newusers, 180 | from=from) 181 | 182 | mime <- mime(From="packages@bioconductor.org", 183 | To="scicomp@fhcrc.org", 184 | Subject="New SVN users for hedgehog", 185 | body=body) 186 | 187 | gmail_auth(scope = "compose") 188 | gmailr::create_draft(mime) 189 | } 190 | 191 | length(maintainers) 192 | } 193 | 194 | #' @export 195 | github_svn_credentials_draft_to_user <- function(packages) { 196 | packages <- file.path(proj_path(), packages) 197 | maintainers <- lapply(packages, function(x) maintainers(x)[[1]]) 198 | user_id <- .user_id(maintainers) 199 | 200 | letters <- Map(emailMaintainer, packages, user_id, 201 | MoreArgs=list(password="XXXXXXXXX")) 202 | 203 | gmailr::gmail_auth(scope='compose') 204 | for (letter in letters) 205 | gmailr::create_draft(letter) 206 | 207 | length(letters) 208 | } 209 | -------------------------------------------------------------------------------- /R/gh.R: -------------------------------------------------------------------------------- 1 | #' Make a GitHub API request using an OAuth token 2 | #' 3 | #' @param uri github uri such as '/orgs/octokit/repos' 4 | #' @param method httr verb function (GET, POST, etc) 5 | #' @param postdata data to POST (if method == POST) 6 | #' @param include_message Whether to include the message in the result 7 | #' @import httr 8 | #' @return a data frame based on the result from github 9 | 10 | # FIXME - currently not dealing with pagination at all! 11 | # https://developer.github.com/v3/#pagination 12 | # https://developer.github.com/guides/traversing-with-pagination 13 | 14 | make_github_request <- function(uri, method=GET, postdata=NULL, 15 | include_message=FALSE) 16 | { 17 | stopifnot(nchar(Sys.getenv("GITHUB_TOKEN")) > 0) 18 | url <- sprintf("https://api.github.com%s", uri) 19 | token <- sprintf("token %s", Sys.getenv("GITHUB_TOKEN")) 20 | args <- list(url=url, add_headers(Authorization=token)) 21 | if (!is.null(postdata)) 22 | args$body <- postdata 23 | # args <- list(url="http://httpbin.org/headers", body=postdata, add_headers(Authorization="token haha")) 24 | response <- do.call(method, args) 25 | # FIXME - do error handling here based on status_code(response)? 26 | results_to_data_frame(content(response), include_message) 27 | } 28 | 29 | get_tracker_repos <- function() 30 | "dtenenba/settings" # change this when the time is right 31 | 32 | #' Query the issue tracker 33 | #' 34 | #' @param columns which columns to return 35 | #' @param sort A column to sort the data by 36 | #' @param filter what columns are used to filter 37 | #' @param status the status codes used to filter 38 | #' @param ... Additional query parameters 39 | #' @param session the HTTP session to use 40 | #' @examples 41 | #' tracker_search("@search_text" = "normalize450k") 42 | gh_tracker_search <- 43 | function(columns = c("id", "activity", "title", "creator", "status"), 44 | sort = desc("activity"), 45 | filter=c("status", "assignedto"), 46 | status = c(-1, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10), 47 | ..., 48 | session = tracker_login()) 49 | { 50 | # see https://developer.github.com/v3/search/#search-issues 51 | # for documentation on searching issues via the api 52 | # here is an example search: 53 | # https://api.github.com/search/issues?q=+repo:dtenenba/settings+state:open&sort=created&order=asc 54 | } 55 | 56 | 57 | # Want to end up with something like this: 58 | # > unassigned_packages() 59 | # id activity title creator 60 | # 1 1437 2016-03-23 14:06:08 EGAD 881 61 | # 2 1432 2016-03-23 07:44:36 DAPARdata 837 62 | # 3 1434 2016-03-22 16:04:49 CHRONOS 943 63 | # 4 1436 2016-03-22 15:20:14 MultiDataSet 872 64 | # 5 1435 2016-03-22 13:26:07 BgeeDB 944 65 | # 6 1433 2016-03-22 10:08:17 pqsfinder 445 66 | # 7 1430 2016-03-21 20:53:12 ImmuneSpaceR 609 67 | # 8 1431 2016-03-21 01:48:26 FlowSorted.CordBloodNorway.450k 305 68 | # 9 1429 2016-03-18 16:23:07 pcaExplorer 669 69 | # status 70 | # 1 new-package 71 | # 2 new-package 72 | # 3 new-package 73 | # 4 new-package 74 | # 5 sent-back 75 | # 6 sent-back 76 | # 7 sent-back 77 | # 8 new-package 78 | # 9 preview-in-progress 79 | 80 | 81 | 82 | as.data.frame.search.results <- function(results, include_message) 83 | { 84 | rows <- results$total_count 85 | if (rows == 0) 86 | return(NULL) 87 | items <- results$items 88 | # there will be 5 columns (id, activity, title, creator, status) 89 | cols <- 5 90 | id <- integer(rows) 91 | activity <- seq( as.Date("1970-01-01"), by=1, len=rows) 92 | title <- character(rows) 93 | creator <- character(rows) 94 | status <- character(rows) 95 | message <- character(rows) 96 | for (i in 1:length(items)) 97 | { 98 | item <- items[[i]] 99 | id[i] <- item$number 100 | activity[i] <- strptime(item$updated_at, "%Y-%m-%dT%H:%M:%S", tz="UTC") 101 | title[i] <- item$title 102 | creator[i] <- item$user$login 103 | if (length(item$labels)) { 104 | labels <- sort(unlist(lapply(item$labels, function(x) x$name))) 105 | status[i] <- paste(labels, collapse=", ") 106 | } else { 107 | status[i] <- NA 108 | } 109 | if (include_message) 110 | { 111 | message[i] <- item$body 112 | } 113 | } 114 | df <- data.frame(id, activity, title, creator, status) 115 | if (include_message) 116 | df <- cbind(df, message) 117 | df 118 | } 119 | 120 | as.data.frame.issue.results <- function(results, include_message) 121 | { 122 | # [1] "id" "message" "href" "filename" "filetype" "author" "time" 123 | l = list(id=results$number, href=results$html_url, #filename=..., 124 | author=results$user$login, time=results$created_at) 125 | if (include_message) 126 | l = append(l, list(message=results$body), 1) 127 | as.data.frame(l) 128 | } 129 | 130 | as.data.frame.issue.comments.results <- function(results, include_message) 131 | { 132 | 133 | } 134 | 135 | 136 | results_to_data_frame <- function(results, include_message=FALSE) 137 | { 138 | if (length(results) == 0) return(NULL) 139 | # what kind of results do we have? 140 | if (!is.null(results$total_count)) # search results 141 | { 142 | class(results) <- "search.results" 143 | # return(search_results_to_data_frame(results, include_message)) 144 | } else if (!is.null(results$number)) {# issue results 145 | class(results) <- "issue.results" 146 | # return(issue_results_to_data_frame(results, include_message)) 147 | } else if (is.list(results[[1]]) && !is.null(results[[1]]$id)) { # issue comments results 148 | class(results) <- "issue.comments.results" 149 | # return(issue_comments_results_to_data_frame(results, include_message)) 150 | } else { 151 | stop("unknown results") 152 | } 153 | as.data.frame(results, include_message) 154 | } 155 | 156 | 157 | 158 | github_search <- function(params) 159 | { 160 | url <- sprintf("/search/issues?q=+repo:%s+state:open+%s&sort=created&order=desc", 161 | get_tracker_repos(), params) 162 | make_github_request(url, "issue_search") 163 | } 164 | 165 | 166 | #' Show unassigned packages 167 | #' 168 | #' @return a data frame of unassigned packages or NULL if none. 169 | #' @examples 170 | #' gh_unassigned_packages() 171 | gh_unassigned_packages <- function() 172 | { 173 | github_search("no:assignee") 174 | } 175 | 176 | #' Show pre-accepted packages 177 | #' 178 | #' @return a data frame of pre-accepted packages or NULL if none. 179 | #' @examples 180 | #' gh_pre_accepted_packages() 181 | gh_pre_accepted_packages <- function() 182 | { 183 | github_search("label:pre-accepted") 184 | } 185 | 186 | #' Show packages assigned to a github user 187 | #' 188 | #' @param github_username The github username 189 | #' @return a data frame of packages assigned to \code{github_username} 190 | #' @examples 191 | #' gh_packages_assigned_to("dtenenba") 192 | 193 | gh_packages_assigned_to <- function(github_username) 194 | { 195 | github_search(sprintf("assignee:%s", github_username)) 196 | } 197 | 198 | 199 | #' Retrieve all messages associated with an issue 200 | #' 201 | #' @param number The issue number 202 | #' @return a data frame/issue of messages associated with issue \code{number} 203 | #' @examples 204 | #' gh_issue(21) 205 | 206 | 207 | # forget about fancy issue S3 class for now. the colnames returned by the old 208 | # version of this function are: 209 | # [1] "id" "message" "href" "filename" "filetype" "author" "time" 210 | gh_issue <- function(number) 211 | { 212 | make_github_request(sprintf("/repos/%s/issues/%s", get_tracker_repos(), 213 | number)) 214 | github_search(sprintf("assignee:%s", github_username)) 215 | } 216 | -------------------------------------------------------------------------------- /inst/extdata/maintainerAcceptance.txt: -------------------------------------------------------------------------------- 1 | Hi {{authorName}}, 2 | 3 | Congratulations, {{packageName}} has been added to _Bioconductor_! 4 | Currently, the definitive location for your _Bioconductor_ package is 5 | in our SVN repository. The following information is to help you in 6 | your role as a package maintainer. You'll need the following 7 | credentials to maintain your package: 8 | 9 | Subversion user ID: {{userId}} 10 | Password: {{password}} 11 | 12 | # Package 'landing pages' 13 | 14 | Every package in _Bioconductor_ gets its own landing page. Contents 15 | from your DESCRIPTION file are pulled out to populate this page. Your 16 | package's permanent URL is 17 | 18 | https://bioconductor.org/packages/{{packageName}}/ 19 | 20 | This URL will redirect to the release landing page of your package 21 | (and until it's released, the devel landing page); this is the URL 22 | that should be used (in publications, etc.) to refer to your package. 23 | You can also refer specifically to the devel, release, or specific 24 | numbered version of _Bioconductor_: 25 | 26 | https://bioconductor.org/packages/devel/{{packageName}}/ 27 | https://bioconductor.org/packages/release/{{packageName}}/ 28 | https://bioconductor.org/packages/{{biocVersion}}/{{packageName}}/ 29 | 30 | # Maintaining your package 31 | 32 | See 33 | http://bioconductor.org/developers/how-to/source-control#experiment-data-packages 34 | for special instructions relating to ExperimentData package 35 | maintenance. 36 | 37 | _Bioconductor_ currently maintains software packages in 'release' and 38 | 'devel' branches of a subversion (svn) repository. 39 | 40 | The release branch is meant for end-users. A new release branch is 41 | created once very 6 months, in April and October. At the release, the 42 | current devel version of your package becomes the release 43 | version. Only 'bug fixes' are made to the release branch. Since your 44 | package has not gone through a release cycle, you do not yet have a 45 | release branch -- your package is only available to users of Bioc 46 | 'devel'. 47 | 48 | The devel branch is where new packages are added, and where new 49 | features are added to existing packages. Your package has been added 50 | to the devel branch, and is available immediately to those 51 | _Bioconductor_ users who have chosen to 'use devel' 52 | 53 | Make any changes to the devel branch, and watch the release schedule 54 | http://bioconductor.org/developers/release-schedule/ for details of 55 | the next release. 56 | 57 | At the next release, your package code in the devel branch will be 58 | become the release version of your package. The release version number 59 | will be changed to 1.0.0. The code in the devel branch will continue, 60 | but with version 1.1.0. If necessary, you'll continue adding features 61 | or updating your package in the devel branch, creating versions 1.1.1, 62 | 1.1.2, ...; you'll port bug fixes (NOT new features, or any change to 63 | the 'API' seen by users!) to the release branch, creating versions 64 | 1.0.1, 1.0.2, ... 65 | 66 | This process will repeat at the next release, where the version of 67 | your package available in devel will become version 1.2.0, and the 68 | devel branch will continue with version 1.3.0. 69 | 70 | ## Subversion 71 | 72 | _Bioconductor_ packages are maintained under Subversion source 73 | control. Use Subversion (or git, described below) to update your 74 | package; see our short svn guide: 75 | 76 | http://bioconductor.org/developers/how-to/source-control/ 77 | 78 | Your subversion account credentials are at the top of this email, or 79 | are already known to you. The credentials give you read access to the 80 | whole _Bioconductor_ repository and WRITE permissions to the devel 81 | (and eventually release) version of your package. 82 | 83 | To update your package in the devel branch, you need to do the 84 | following steps: 85 | 86 | a) Install subversion(svn) on your machine, if it is not already installed. 87 | 88 | b) Use the following command to checkout your packages files from the 89 | _Bioconductor_ subversion repository. 90 | 91 | svn co --username {{userId}} --password {{password}} https://hedgehog.fhcrc.org/bioconductor/trunk/madman/Rpacks/{{packageName}} 92 | 93 | c) Make the necessary changes to your package. 94 | 95 | d) Bump the version from x.y.z to x.y.(z+1) in your package's 96 | DESCRIPTION file. If the version is not properly changed your 97 | changes will not be made available in the public repository. 98 | 99 | e) Build your package tar ball 100 | 101 | R CMD build {{packageName}} 102 | 103 | f) Check that the changes have produced a package consistent with R's 104 | check facility 105 | 106 | R CMD check {{packageName}}_x.y.(z+1).tar.gz 107 | 108 | g) Fix any Warnings or Errors from step (e) and (f) 109 | 110 | h) Check the updated source code in to Subversion 111 | 112 | svn ci {{packageName}} 113 | 114 | g) Check the build report next day (see point 3 for details) 115 | 116 | Please let me know if you have any questions or issues with your SVN 117 | access. 118 | 119 | Remember, all new features and bug fixes are made to devel branch of 120 | your package; only tested bug fixes should be ported to the release 121 | branch. When testing your changes, be sure to use the 'devel' version 122 | of _Bioconductor_ (http://bioconductor.org/developers/how-to/useDevel) 123 | and the appropriate version of R. 124 | 125 | ## Git and github mirrors 126 | 127 | If you prefer to use Git and / or GitHub instead of Subversion, you 128 | can use the _Bioconductor_ Git mirrors which are documented at 129 | http://bioconductor.org/developers/how-to/git-mirror/ 130 | 131 | # Build report 132 | 133 | When you make a change to the devel branch of your package, please 134 | remember to bump the version of your package in the DESCRIPTION FILE. 135 | Everyday at around 5pm PST, the build system takes a snapshot of all 136 | the packages inside _Bioconductor_ and then the next day after 12 noon 137 | PST http://bioconductor.org/checkResults/ is created containing the 138 | output of R CMD build and check on all platforms for each package. 139 | 140 | When reading the above, please pay attention to the date displayed 141 | next to - “Snapshot Date:” and “This page was generated on”. Please 142 | keep an eye on the build/check daily reports for the _Bioconductor_ 143 | devel packages: http://bioconductor.org/checkResults/ and promptly 144 | address any warnings or errors from your packages build report. 145 | 146 | # RSS feeds: 147 | 148 | You can find the RSS feed for software packages at 149 | http://bioconductor.org/rss/build/packages/{{packageName}}.rss 150 | 151 | # Using the support site and bioc-devel mailing list 152 | 153 | Please be sure that you have registered on the support site 154 | https://support.bioconductor.org/accounts/login/. Subscribe to the tag 155 | corresponding to your package by editing your user profile to include 156 | the package name in the 'My Tags' field. This way, you will be 157 | notified when someone is asking a question about your package. Please 158 | respond promptly to bug reports or user questions on the support site. 159 | We recommend that you 'follow' tags that match your own package (such 160 | as your package name) so that 161 | 162 | Please maintain your subscription to the Bioc-devel mailing, so that 163 | you are aware of _Bioconductor_ project and other developments, 164 | http://bioconductor.org/help/support/#bioc-devel. Also, after your 165 | package has passed the build report's CHECK test for the first time, 166 | you may send a note to Bioc-devel to announce its public availability 167 | (with a short description) so other developers are aware of it. 168 | 169 | # Updating maintainer status 170 | 171 | If for some reason, your email address changes, please update the 172 | maintainer field in your DESCRIPTION file. We may need to reach you if 173 | there are issues building your package (this could happen as a result 174 | of changes to R or to packages you depend on). If we are unable to 175 | contact you for a period of time, we may be forced to remove your 176 | package from _Bioconductor_. 177 | 178 | If you want to add a new maintainer or transfer responsibility to 179 | someone else, please email us at packages@bioconductor.org and clearly 180 | state the new maintainers name, email address and CC them on the 181 | email. 182 | 183 | If you no longer want to maintain your package, please let us know and 184 | we will remove it from _Bioconductor_, or (with your permission) find 185 | a new maintainer for it. See 186 | http://bioconductor.org/developers/package-end-of-life/ 187 | 188 | # Helpful things to know about _Bioconductor_ 189 | 190 | Developer resources: http://bioconductor.org/developers 191 | 192 | _Bioconductor_ Newsletter: http://bioconductor.org/help/newsletters/ 193 | 194 | Upcoming Courses: http://bioconductor.org/help/events/ 195 | 196 | Course Material: http://bioconductor.org/help/course-materials/ 197 | 198 | Twitter: https://twitter.com/Bioconductor 199 | 200 | Thank you for contributing to the _Bioconductor_ project! 201 | 202 | {{senderName}} 203 | -------------------------------------------------------------------------------- /R/addPackage.R: -------------------------------------------------------------------------------- 1 | ## This is where code that is used for cleaning up a package and 2 | ## adding it to svn will live. 3 | 4 | ## I want to follow BiocCheck's lead here and do a cleanup command like: 5 | ## R CMD cleanup 6 | 7 | ## cleanup will then do the following: 8 | ## drop and lines from DESCRIPTION thet start with "packaged" 9 | ## rm -rf any build directories or /inst/doc 10 | 11 | 12 | ## cp -r the package to the svn repos 13 | ## put the package into the most recent manifest 14 | 15 | 16 | ## This retrieves the short name for a package or it's true name (so 17 | ## no version numbers or extensions it's basically what the source dir 18 | ## would be called) 19 | .getShortPkgName <- function(tarball){ 20 | sub("_.*","", basename(tarball)) 21 | } 22 | 23 | ## remove .git directories 24 | .cleanGIT <- function(dir) { 25 | gitdir <- sprintf("%s/.git", dir) 26 | stopifnot(unlink(gitdir, recursive=TRUE, force=TRUE) == 0L) 27 | } 28 | 29 | ## This throws away unwanted extra lines and junk from the DESCRIPTION file 30 | .cleanDESCRIPTION <- function(dir){ 31 | dirPath <- file.path(dir, "DESCRIPTION") 32 | DESC <- read.dcf(dirPath) 33 | DESC <- DESC[,!grepl("Packaged",colnames(DESC)),drop=FALSE] 34 | write.dcf(DESC, file=dirPath) 35 | } 36 | 37 | ## white-list '.' directories 38 | .cleanDotdirs <- function(dir) { 39 | ok <- c(".BBSoptions", ".Rbuildignore", ".Rinstignore", ".svnignore", 40 | ".gitignore", ".travis.yml") 41 | suspect <- dir(dir, pattern="^\\.", recursive=TRUE, all=TRUE, 42 | include.dirs=TRUE) 43 | drop <- file.path(dir, setdiff(suspect, ok)) 44 | stopifnot(unlink(drop, recursive=TRUE, force=TRUE) == 0L) 45 | } 46 | 47 | readDESCRIPTION <- function(tarball) { 48 | pkgdir <- tarball 49 | description <- "DESCRIPTION" 50 | if (endsWith(tarball, "tar.gz")) { 51 | ls <- untar(tarball, list = TRUE) 52 | fls <- ls[basename(ls) == description] 53 | if (length(fls) != 1L) 54 | stop("Could not find a unique DESCRIPTION file", 55 | "\n tarball: ", sQuote(tarball), 56 | "\n paths (if any): ", 57 | "\n ", paste(sQuote(fls), collapse="\n ")) 58 | description <- fls 59 | pkgdir <- tempdir() 60 | untar(tarball, files = description, exdir = pkgdir) 61 | } 62 | res <- read.dcf(file.path(pkgdir, description), all = TRUE) 63 | 64 | # generate a maintainer from Authors@R if none specified 65 | if (is.null(res$Maintainer)) { 66 | authors <- utils:::.read_authors_at_R_field(res$`Authors@R`) 67 | res$Maintainer <- Filter(function(x) "cre" %in% x$role, authors) 68 | } else { 69 | ## "'Ima Person' " --> "Ima Person " 70 | res$Maintainer <- sub("' *(.*) *' <", "\\1 <", res$Maintainer) 71 | } 72 | structure(res, class = c("description", "data.frame")) 73 | } 74 | 75 | print.description <- function(x, ...) { 76 | message(paste(names(x), x, sep = ": ", collapse = "\n")) 77 | } 78 | 79 | ## This throws away dirs that are inserted into the tarball by 'R CMD build.' 80 | .removeUnwantedDirs <- function(dir){ 81 | instDoc <- file.path(dir, "inst", "doc") 82 | if(file.exists(instDoc)){ 83 | unlink(instDoc, recursive=TRUE) 84 | } 85 | buildDir <- file.path(dir, "build") 86 | if(file.exists(buildDir)){ 87 | unlink(buildDir, recursive=TRUE) 88 | } 89 | gitDir <- file.path(dir, ".git") 90 | if(file.exists(gitDir)){ 91 | unlink(gitDir, recursive=TRUE) 92 | } 93 | } 94 | 95 | .removeUnwantedFiles <- function(dir){ 96 | srcDir <- file.path(dir, "src") 97 | if(file.exists(srcDir)){ 98 | unlink(file.path(srcDir,"*.o")) 99 | } 100 | } 101 | 102 | #' Clean a Software Package 103 | #' 104 | #' This is for cleaning up build tarballs, and then putting them into 105 | #' svn (and emailing the authors to let them know this - when they 106 | #' already have an account) 107 | #' @inheritParams package_name 108 | #' @param svnDir Directory of the Rpacks checkout 109 | #' @param copyToSvnDir whether to copy the files to the SVN directory 110 | clean <- function(tarball, svnDir=proj_path("Rpacks"), copyToSvnDir=TRUE, 111 | svnAccountExists=FALSE) 112 | { 113 | ## 1st re-run the checker from Dan to make sure we have the right thing... 114 | ## TODO: call Dans checker here? 115 | 116 | ## access the tarball 117 | if (endsWith(tarball, ".tar.gz")) 118 | untar(tarball) 119 | ## get the name of the actual dir that tarball will unpack to 120 | dir <- .getShortPkgName(tarball) 121 | .cleanGIT(dir) 122 | ## ^\\. directories 123 | .cleanDotdirs(dir) 124 | ## clean up DESCRIPTION file 125 | .cleanDESCRIPTION(dir) 126 | ## remove build and inst/doc dirs 127 | .removeUnwantedDirs(dir) 128 | ## remove unwanted files 129 | .removeUnwantedFiles(dir) 130 | ## cp the dir to a default svn dir. 131 | if(copyToSvnDir){ 132 | file.copy(from=dir, to=svnDir, recursive=TRUE) 133 | } 134 | ## cleanup 135 | unlink(dir, recursive=TRUE) 136 | 137 | file.path(svnDir, dir) 138 | } 139 | 140 | 141 | 142 | ############################################################################ 143 | #### Test example for how I want this to work: 144 | ## library(BiocContributions); tarball <- system.file("testpackages", "AnnotationHub_1.3.18.tar.gz", package="BiocContributions"); 145 | 146 | ## use helper argument for testing... 147 | 148 | ## clean(tarball, copyToSvnDir=FALSE) 149 | 150 | ## if we know that the user has an svn account, then I can just email 151 | ## them at the same time that we add their code to the repos. 152 | ## clean(tarball, svnAccountExists=TRUE) 153 | 154 | 155 | ## helper for making paths 156 | .makeFullPaths <- function(x, name){ 157 | file.path(name, unlist(x)) 158 | } 159 | 160 | 161 | #' Extract a packages name from a tarball 162 | #' 163 | #' @param tarball package tarball 164 | #' @return the package name 165 | #' @examples 166 | #' pkg <- system.file(package="BiocContributions", 167 | #' "testpackages", "RNASeqPower_1.11.0.tar.gz") 168 | #' package_name(pkg) 169 | package_name <- function(tarball) { 170 | desc <- readDESCRIPTION(tarball) 171 | desc$Package 172 | } 173 | 174 | #' Clean and copy a Data Experiment package 175 | #' 176 | #' @param svn_pkgs the location of Data Experiment \sQuote{pkgs} checkout. 177 | #' @param svn_data_store the location of Data Experiment 178 | #' \sQuote{data_store} checkout. 179 | #' @inheritParams package_name 180 | #' @return File paths to the copied locations (invisibly). 181 | #' @examples 182 | #' \dontrun{ 183 | #' pkg <- system.file(package="BiocContributions", 184 | #' "testpackages", "RNASeqPower_1.11.0.tar.gz") 185 | #' clean_data_package(pkg) 186 | #' } 187 | clean_data_package <- 188 | function(tarball, svn_pkgs = proj_path("experiment/pkgs"), 189 | svn_data_store = proj_path("experiment/data_store"), 190 | data_dirs = c("data", "inst/extdata")) 191 | { 192 | desc <- readDESCRIPTION(tarball) 193 | 194 | ## Remove the Packaged field 195 | desc$Packaged <- NULL 196 | 197 | ## Extract the tarball and cleanup afterwards 198 | if (endsWith(tarball, ".tar.gz")) { 199 | untar(tarball, exdir = tempdir()) 200 | pkg_dir <- file.path(tempdir(), desc$Package) 201 | on.exit(unlink(pkg_dir, recursive = TRUE)) 202 | } else { 203 | pkg_dir <- tarball 204 | } 205 | 206 | ## FIXME: clean(tarball) 207 | ## Get all extracted files 208 | files <- dir(pkg_dir, recursive = TRUE) 209 | 210 | object_files <- "src/.*\\.(o|sl|so|dylib|a|dll|def)$" 211 | 212 | ## find all unwanted files 213 | unwanted <- grepl( 214 | paste0("^", paste0(collapse = "|", 215 | c("DESCRIPTION", "inst/doc", "build", "\\.git", 216 | object_files))), 217 | files) 218 | 219 | files <- files[!unwanted] 220 | 221 | ## find data directories 222 | is_data <- grepl( 223 | paste0("^", paste0(collapse="|", data_dirs)), 224 | files) 225 | 226 | data_files <- file.path(desc$Package, files[is_data]) 227 | 228 | non_data_files <- file.path(desc$Package, files[!is_data]) 229 | 230 | copy_files <- function(from, to) { 231 | ## create all directories in the new location 232 | lapply(unique(dirname(to)), dir.create, recursive = TRUE, 233 | showWarnings = FALSE) 234 | 235 | Map(file.copy, from, to) 236 | } 237 | 238 | ## copy non data files to the pkgs 239 | copy_files(file.path(dirname(pkg_dir), non_data_files), 240 | file.path(svn_pkgs, non_data_files)) 241 | 242 | ## copy the data files to the data store 243 | copy_files(file.path(dirname(pkg_dir), data_files), 244 | file.path(svn_data_store, data_files)) 245 | 246 | data_path <- unique(dirname(data_files)) 247 | ## FIXME: shortest common path prefix 248 | ## write the data paths in external_data_store.txt 249 | writeLines(sub("^[^/]+/", "", data_path), 250 | file.path(svn_pkgs, desc$Package, "external_data_store.txt")) 251 | 252 | ## write the modified description 253 | write.dcf(desc, file.path(svn_pkgs, desc$Package, "DESCRIPTION")) 254 | 255 | invisible(c(file.path(svn_pkgs, desc$Package), 256 | file.path(svn_data_store, desc$Package))) 257 | } 258 | -------------------------------------------------------------------------------- /R/permissions.R: -------------------------------------------------------------------------------- 1 | #' Read authz permission file 2 | #' 3 | #' @param file location passed to rsync 4 | read_permissions <- function(file = "hedgehog:/extra/svndata/gentleman/svn_authz/bioconductor.authz", quiet = TRUE) { 5 | tmp <- tempfile() 6 | on.exit(unlink(tmp)) 7 | result <- system2("rsync", args = c(file, tmp), stderr = !isTRUE(quiet), stdout = !isTRUE(quiet)) 8 | if (!identical(result, 0L)) { 9 | stop("retrieving file ", sQuote(file), call. = FALSE) 10 | } 11 | res <- readLines(tmp) 12 | group_locs <- grepl("^\\[", res) 13 | groups <- gsub("[][]", "", res[group_locs]) 14 | res <- split(res, cumsum(group_locs)) 15 | res <- Map(function(x, name) { 16 | authz_section(parse_authz_line(x[-1L]), name = name) 17 | }, res, groups, USE.NAMES = FALSE) 18 | names(res) <- groups 19 | structure(res, class = "authz") 20 | } 21 | 22 | #' Write authz permission file 23 | #' 24 | #' @param x object to write 25 | #' @inheritParams read_permissions 26 | write_permissions <- function(x, file = "hedgehog:/extra/svndata/gentleman/svn_authz/bioconductor.authz", ...) { 27 | tmp <- tempfile() 28 | on.exit(unlink(tmp)) 29 | writeLines(format(x), con = tmp) 30 | system2("rsync", args = c(tmp, file)) 31 | } 32 | 33 | run_command_on_file <- function(command) { 34 | function(file = "hedgehog:/extra/svndata/gentleman/svn_authz/bioconductor.authz", args = NULL) { 35 | re <- rex::rex(start, capture(name = "server", graphs), ":", capture(name = "path", anything)) 36 | match <- rex::re_matches(file, re) 37 | remote_file <- isTRUE(!is.na(match[[1]])) 38 | 39 | if (remote_file) { 40 | quoted_args <- shQuote(paste(shQuote(c(args, match$path)), collapse = " ")) 41 | system2("ssh", args = c(match$server, command, quoted_args)) 42 | } else { 43 | command_split <- strsplit(command, " ")[[1]] 44 | system2(command_split[1], args = paste(shQuote(c(command_split[-1], args, file)), collapse = " ")) 45 | } 46 | } 47 | } 48 | 49 | #' Run commands on a file, possibly remote. 50 | #' 51 | #' @param args Additional arguments passed to the command. 52 | #' @param inheritParams read_permissions 53 | #' @details if the file is a remote location (server:path) the command is run 54 | #' remotely. 55 | #' @name run_commands 56 | NULL 57 | 58 | #' @describeIn run_commands Check out a RCS tracked file 59 | rcs_check_out <- run_command_on_file("co -l") 60 | 61 | #' @param message commit message for the check in 62 | #' @describeIn run_commands Check in a RCS tracked file 63 | rcs_check_in <- function(file = "hedgehog:/extra/svndata/gentleman/svn_authz/bioconductor.authz", message, args = NULL) { 64 | (run_command_on_file("ci -u"))(file = file, c(paste0("-m", message), args)) 65 | } 66 | 67 | format.authz <- function(x, ...) { 68 | unlist(lapply(x, format.authz_section, ...), use.names = FALSE) 69 | } 70 | 71 | format.authz_section <- function(x, ...) { 72 | named <- if (is.null(names(x))) { 73 | rep(FALSE, length(x)) 74 | } else { 75 | !is.na(names(x)) 76 | } 77 | res <- character(length(x)) 78 | res[named] <- paste0(names(x)[named], " = ", 79 | vapply(x[named], paste0, character(1), collapse = ", ")) 80 | res[!named] <- x[!named] 81 | c(paste0("[", attr(x, "name"), "]"), 82 | res) 83 | } 84 | 85 | print.authz_section <- print.authz <- print.authz_lines <- function(x, ...) { 86 | cat(unlist(format(x, ...), use.names = FALSE), sep = "\n") 87 | } 88 | 89 | #' Edit the software permissions 90 | #' 91 | #' @param data a authz data file 92 | #' @param version The release version number 93 | #' @param x The edits to perform 94 | #' @param ... Additional arguments passed to methods 95 | edit_software_permissions <- function(x, ...) { 96 | UseMethod("edit_software_permissions") 97 | } 98 | 99 | #' @describeIn edit_software_permissions data.frame input, expects columns \sQuote{package} and \sQuote{user} 100 | edit_software_permissions.data.frame <- function(x, data = read_permissions(), 101 | version = 3.2, ...) { 102 | assert(all(c("package", "user") %in% colnames(x)), 103 | "'x' must have two columns named 'package' and 'user'") 104 | 105 | edit_software_permissions(split(x$user, x$package)) 106 | } 107 | 108 | edit_permissions <- function(data, group, locations) { 109 | eval(bquote(function(x, data = .(data), version = "3.2") { 110 | assert(is_named(x), "Input must be a named list") 111 | 112 | x[] <- lapply(x, as.character) 113 | 114 | usernames <- unlist(x, use.names = FALSE) 115 | 116 | # Add any missing users to the .(group) group 117 | readers <- data$groups[[.(group)]] 118 | missing_users <- !usernames %in% readers 119 | data$groups[[.(group)]] <- append(readers, usernames[missing_users]) 120 | 121 | new <- !names(x) %in% names(data$groups) 122 | 123 | # For existing groups, assign the new users 124 | data$groups[names(x)[!new]] <- x[!new] 125 | 126 | end_of_groups <- tail(which(!nzchar(data$groups)), n = 1L) - 1L 127 | if (any(new)) { 128 | data$groups <- authz_section(append(data$groups, x[new], end_of_groups), 129 | name = "groups") 130 | 131 | new_packages <- names(x)[new] 132 | 133 | for (pkg in new_packages) { 134 | 135 | # Unfortunately you cannot use append for this as it calls c(), 136 | # which drops attributes :(. So we have to do the appending manually 137 | len <- length(data) 138 | obj <- list("rw", "") 139 | names(obj) <- c(paste0("@", pkg), NA) 140 | 141 | .(locations) 142 | } 143 | } 144 | data 145 | })) 146 | } 147 | 148 | #' @describeIn edit_software_permissions list input, expects a named list of packages and users 149 | edit_software_permissions.list <- edit_permissions(quote(read_permissions()), "bioconductor-readers", 150 | quote({ 151 | trunk_loc <- paste0("/trunk/madman/Rpacks/", pkg) 152 | data[[len + 1L]] <- authz_section(obj, name = trunk_loc) 153 | names(data)[[len + 1L]] <- trunk_loc 154 | 155 | release_loc <- paste0("/branches/RELEASE_", sub("[.]", "_", version), "/madman/Rpacks/", pkg) 156 | data[[len + 2L]] <- authz_section(obj, name = release_loc) 157 | names(data)[[len + 2L]] <- release_loc 158 | })) 159 | 160 | #' Edit the data experiment permissions 161 | #' 162 | #' @param data a authz data file 163 | #' @param version The release version number 164 | #' @param x The edits to perform 165 | #' @param ... Additional arguments passed to methods 166 | edit_data_experiment_permissions <- function(x, ...) { 167 | UseMethod("edit_data_experiment_permissions") 168 | } 169 | 170 | #' @describeIn edit_data_experiment_permissions data.frame input, expects columns \sQuote{package} and \sQuote{user} 171 | edit_data_experiment_permissions.data.frame <- function(x, data = read_permissions("hedgehog:/extra/svndata/gentleman/svn_authz/bioc-data.authz"), version = 3.2, ...) { 172 | assert(all(c("package", "user") %in% colnames(x)), 173 | "'x' must have two columns named 'package' and 'user'") 174 | 175 | edit_data_experiment_permissions(split(x$user, x$package)) 176 | } 177 | 178 | # This function is largely duplicated from edit_software_permissions, perhaps 179 | # it is worth trying to use a common helper... 180 | 181 | #' @describeIn edit_data_experiment_permissions list input, expects a named list of packages and users 182 | edit_data_experiment_permissions.list <- edit_permissions(quote(read_permissions("hedgehog:/extra/svndata/gentleman/svn_authz/bioc-data.authz")), 183 | "bioc-data-readers", 184 | quote({ 185 | trunk_loc <- paste0("/trunk/experiment/pkgs/", pkg) 186 | data[[len + 1L]] <- authz_section(obj, name = trunk_loc) 187 | names(data)[[len + 1L]] <- trunk_loc 188 | 189 | trunk_loc <- paste0("/trunk/experiment/data_store/", pkg) 190 | data[[len + 2L]] <- authz_section(obj, name = trunk_loc) 191 | names(data)[[len + 2L]] <- trunk_loc 192 | 193 | release_loc <- paste0("/branches/RELEASE_", sub("[.]", "_", version), "/experiment/pkgs/", pkg) 194 | data[[len + 3L]] <- authz_section(obj, name = release_loc) 195 | names(data)[[len + 3L]] <- release_loc 196 | 197 | release_loc <- paste0("/branches/RELEASE_", sub("[.]", "_", version), "/experiment/data_store/", pkg) 198 | data[[len + 4L]] <- authz_section(obj, name = release_loc) 199 | names(data)[[len + 4L]] <- release_loc 200 | })) 201 | 202 | #' Generate a standard commit message for permission edits 203 | #' 204 | #' @param x the edits to make, if a data.frame will be coerced to a named list. 205 | standard_commit_message <- function(x) { 206 | if (is.data.frame(x)) { 207 | assert(all(c("package", "user") %in% colnames(x)), 208 | "'x' must have two columns named 'package' and 'user'") 209 | 210 | x <- split(x$user, x$package) 211 | } else { 212 | assert(is.list(x) && is_named(x), "Input must be a named list") 213 | } 214 | 215 | paste0(names(x), " = ", lapply(x, paste, collapse = ", "), collapse = "; ") 216 | } 217 | 218 | parse_authz_line <- function(x, ...) { 219 | itr <- 1 220 | res <- vector("list", length(x)) 221 | assignments <- grepl("[[:graph:]]+[[:space:]]*=[[:space:]][[:graph:]]", x) 222 | for (splt in strsplit(x[assignments], "[[:space:]]*=[[:space:]]*|[[:space:]]*,[[:space:]]*")) { 223 | line_num <- which(assignments)[itr] 224 | if (length(splt) <= 1L) { 225 | stop("incorrect parse in line: ", sQuote(x[line_num]), call. = FALSE) 226 | } 227 | res[[line_num]] <- splt[-1L] 228 | names(res)[[line_num]] <- splt[1L] 229 | itr <- itr + 1L 230 | } 231 | res[!assignments] <- x[!assignments] 232 | res 233 | } 234 | 235 | authz_section <- function(x, name) { 236 | missing_nms <- !nzchar(names(x)) 237 | if (any(missing_nms)) { 238 | names(x)[missing_nms] <- NA 239 | } 240 | attr(x, "name") <- name 241 | class(x) <- "authz_section" 242 | x 243 | } 244 | 245 | #' Helper function to Add Software Permissions 246 | #' 247 | #' @param x Permissions to add, can be a named \code{list} or \code{data.frame}. 248 | #' @param message Commit message to use 249 | #' @param file File containing the permissions to edit 250 | add_software_permisions <- function(x, message = standard_commit_message(x), 251 | file = "hedgehog:/extra/svndata/gentleman/svn_authz/bioconductor.authz") { 252 | 253 | # check out the permissions file 254 | rcs_check_out(file = file) 255 | 256 | # add new permissions 257 | perms <- read_permissions(file = file) 258 | new_perms <- edit_software_permissions(x, data = perms) 259 | write_permissions(new_perms, file = file) 260 | 261 | # check in the modified file 262 | rcs_check_in(file = file, message = message) 263 | } 264 | 265 | #' Helper function to Add Data Experiment Permissions 266 | #' 267 | #' @param x Permissions to add, can be a named \code{list} or \code{data.frame}. 268 | #' @param message Commit message to use 269 | #' @param file File containing the permissions to edit 270 | add_data_experiment_permisions <- function(x, message = standard_commit_message(x), 271 | file = "hedgehog:/extra/svndata/gentleman/svn_authz/bioc-data.authz") { 272 | 273 | # check out the permissions file 274 | rcs_check_out(file = file) 275 | 276 | # add new permissions 277 | perms <- read_permissions(file = file) 278 | perms <- edit_data_experiment_permissions(x, data = perms) 279 | write_permissions(perms, file = file) 280 | 281 | # check in the modified file 282 | rcs_check_in(file = file, message = message) 283 | } 284 | -------------------------------------------------------------------------------- /tests/testthat/test-permissions.R: -------------------------------------------------------------------------------- 1 | options(useFancyQuotes = FALSE) 2 | 3 | context("read_permissions") 4 | test_that("it fails if file does not exist", { 5 | expect_error(read_permissions("unknown"), "retrieving file 'unknown'") 6 | }) 7 | 8 | test_that("it is read correctly", { 9 | res <- read_permissions("bioconductor.authz.orig") 10 | expect_equal(length(res), 15L) 11 | 12 | expect_equal(names(res)[1:3], 13 | c("groups", "/", "/trunk/bioconductor.org")) 14 | 15 | # check group is read correctly 16 | expect_equal(names(res$groups)[1:4], c(NA, "bioconductor-readers", NA, "bioconductor-write0")) 17 | 18 | expect_equal(tail(n = 8, names(res$groups)), 19 | c("normalize450K", "profileScoreDist", "transcriptR", "kimod", "lpsymphony", "splineTCDiffExpr", NA, NA)) 20 | 21 | expect_equal(tail(n = 10, names(res)), 22 | c("/trunk/madman/Rpacks/profileScoreDist", "/branches/RELEASE_3_2/madman/Rpacks/profileScoreDist", 23 | "/trunk/madman/Rpacks/transcriptR", "/branches/RELEASE_3_2/madman/Rpacks/transcriptR", 24 | "/trunk/madman/Rpacks/kimod", "/branches/RELEASE_3_2/madman/Rpacks/kimod", 25 | "/trunk/madman/Rpacks/lpsymphony", "/branches/RELEASE_3_2/madman/Rpacks/lpsymphony", 26 | "/trunk/madman/Rpacks/splineTCDiffExpr", "/branches/RELEASE_3_2/madman/Rpacks/splineTCDiffExpr")) 27 | last <- tail(n = 1, res)[[1]] 28 | expect_equal(last, authz_section(x = list("@splineTCDiffExpr" = "rw", ""), 29 | name = "/branches/RELEASE_3_2/madman/Rpacks/splineTCDiffExpr")) 30 | }) 31 | 32 | test_that("Reading and writing without changes produces the same file", { 33 | res <- read_permissions("bioconductor.authz.orig") 34 | tmp <- tempfile() 35 | on.exit(unlink(tmp)) 36 | 37 | write_permissions(res, tmp) 38 | 39 | orig_lines <- readLines("bioconductor.authz.orig") 40 | 41 | new_lines <- readLines(tmp) 42 | 43 | expect_equal(orig_lines, new_lines) 44 | }) 45 | 46 | context("edit_software_permissions") 47 | test_that("Assigning new biocoductor-users works", { 48 | res <- read_permissions("bioconductor.authz.orig") 49 | 50 | new_perms <- edit_software_permissions(list(`bioconductor-readers` = "test.user"), data = res) 51 | 52 | # New user added 53 | expect_equal(new_perms$groups$`bioconductor-readers`, "test.user") 54 | 55 | # Rest of data is equal 56 | expect_equal(new_perms$groups[-2], res$groups[-2]) 57 | expect_equal(new_perms[-1], res[-1]) 58 | }) 59 | 60 | test_that("Setting a single user to existing package assignments works", { 61 | res <- read_permissions("bioconductor.authz.orig") 62 | 63 | new_perms <- edit_software_permissions(list(`lpsymphony` = "j.heiss"), data = res) 64 | 65 | # New user set 66 | expect_equal(new_perms$groups$`lpsymphony`, "j.heiss") 67 | 68 | # Rest of data is equal 69 | lpsymphony_group <- which(names(new_perms$groups) == "lpsymphony") 70 | expect_equal(new_perms$groups[-lpsymphony_group], res$groups[-lpsymphony_group]) 71 | expect_equal(new_perms[-1], res[-1]) 72 | }) 73 | 74 | test_that("Setting multiple users to existing package assignments works", { 75 | res <- read_permissions("bioconductor.authz.orig") 76 | 77 | new_perms <- edit_software_permissions(list(`lpsymphony` = c("j.heiss", "a.karapetyan")), data = res) 78 | 79 | # New user set 80 | expect_equal(new_perms$groups$`lpsymphony`, c("j.heiss", "a.karapetyan")) 81 | 82 | # Rest of data is equal 83 | lpsymphony_group <- which(names(new_perms$groups) == "lpsymphony") 84 | expect_equal(new_perms$groups[-lpsymphony_group], res$groups[-lpsymphony_group]) 85 | expect_equal(new_perms[-1], res[-1]) 86 | }) 87 | 88 | test_that("Setting multiple users to multiple packages package assignments works", { 89 | res <- read_permissions("bioconductor.authz.orig") 90 | 91 | new_perms <- edit_software_permissions(list(`lpsymphony` = c("j.heiss", "a.karapetyan"), 92 | `transcriptR` = c("v.kim", "j.sun2")), data = res) 93 | 94 | # New user set 95 | expect_equal(new_perms$groups$`lpsymphony`, c("j.heiss", "a.karapetyan")) 96 | expect_equal(new_perms$groups$`transcriptR`, c("v.kim", "j.sun2")) 97 | 98 | # Rest of data is equal 99 | modified_groups <- which(names(new_perms$groups) %in% c("lpsymphony", "transcriptR")) 100 | expect_equal(new_perms$groups[-modified_groups], res$groups[-modified_groups]) 101 | expect_equal(new_perms[-1], res[-1]) 102 | }) 103 | 104 | test_that("Users who don't exist in bioconductor-readers are added", { 105 | res <- read_permissions("bioconductor.authz.orig") 106 | 107 | new_perms <- edit_software_permissions(list(`lpsymphony` = "new.user"), data = res) 108 | 109 | # Added to bioconductor-readers 110 | expect_equal(tail(n = 1, new_perms$groups$`bioconductor-readers`), "new.user") 111 | 112 | # New user set for package 113 | expect_equal(new_perms$groups$`lpsymphony`, "new.user") 114 | 115 | # Rest of data is equal 116 | modified_groups <- which(names(new_perms$groups) %in% c("lpsymphony", "bioconductor-readers")) 117 | expect_equal(new_perms$groups[-modified_groups], res$groups[-modified_groups]) 118 | expect_equal(new_perms[-1], res[-1]) 119 | }) 120 | 121 | test_that("Multiple users who don't exist in bioconductor-readers are added", { 122 | res <- read_permissions("bioconductor.authz.orig") 123 | 124 | new_perms <- edit_software_permissions(list(`lpsymphony` = c("new.user", "new.user2")), data = res) 125 | 126 | # Added to bioconductor-readers 127 | expect_equal(tail(n = 2, new_perms$groups$`bioconductor-readers`), c("new.user", "new.user2")) 128 | 129 | # New user set 130 | expect_equal(new_perms$groups$`lpsymphony`, c("new.user", "new.user2")) 131 | 132 | # Rest of data is equal 133 | modified_groups <- which(names(new_perms$groups) %in% c("lpsymphony", "bioconductor-readers")) 134 | expect_equal(new_perms$groups[-modified_groups], res$groups[-modified_groups]) 135 | expect_equal(new_perms[-1], res[-1]) 136 | }) 137 | 138 | test_that("Setting multiple new users to multiple packages package assignments works", { 139 | res <- read_permissions("bioconductor.authz.orig") 140 | 141 | new_perms <- edit_software_permissions(list(`lpsymphony` = c("new.user1", "new.user2"), 142 | `transcriptR` = c("new.user3", "new.user4")), data = res) 143 | 144 | # Added to bioconductor-readers 145 | expect_equal(tail(n = 4, new_perms$groups$`bioconductor-readers`), 146 | c("new.user1", "new.user2", "new.user3", "new.user4")) 147 | 148 | # New user set 149 | expect_equal(new_perms$groups$`lpsymphony`, c("new.user1", "new.user2")) 150 | expect_equal(new_perms$groups$`transcriptR`, c("new.user3", "new.user4")) 151 | 152 | # Rest of data is equal 153 | modified_groups <- which(names(new_perms$groups) %in% c("lpsymphony", "transcriptR", "bioconductor-readers")) 154 | expect_equal(new_perms$groups[-modified_groups], res$groups[-modified_groups]) 155 | expect_equal(new_perms[-1], res[-1]) 156 | }) 157 | 158 | test_that("Adding new packages works", { 159 | res <- read_permissions("bioconductor.authz.orig") 160 | 161 | new_perms <- edit_software_permissions(list(`new.pkg` = "v.kim"), data = res) 162 | 163 | # New package added 164 | expect_true("new.pkg" %in% names(new_perms$groups)) 165 | expect_equal(new_perms$groups$`new.pkg`, "v.kim") 166 | 167 | new_paths <- tail(n = 2, new_perms) 168 | expect_equal(names(new_paths), 169 | c("/trunk/madman/Rpacks/new.pkg", "/branches/RELEASE_3_2/madman/Rpacks/new.pkg")) 170 | 171 | expect_equal(new_paths, 172 | list("/trunk/madman/Rpacks/new.pkg" = 173 | authz_section(list("@new.pkg" = "rw", ""), name = "/trunk/madman/Rpacks/new.pkg"), 174 | "/branches/RELEASE_3_2/madman/Rpacks/new.pkg" = 175 | authz_section(list("@new.pkg" = "rw", ""), name = "/branches/RELEASE_3_2/madman/Rpacks/new.pkg"))) 176 | 177 | # Rest of data is equal 178 | modified_groups <- which(names(new_perms$groups) %in% c("new.pkg")) 179 | expect_equal(new_perms$groups[-modified_groups], c(res$groups[])) 180 | }) 181 | 182 | context("data_experiment_permissions") 183 | test_that("Assigning new bioc-data-users works", { 184 | res <- read_permissions("bioc-data.authz.orig") 185 | 186 | new_perms <- edit_data_experiment_permissions.list(list(`bioc-data-readers` = "test.user"), data = res) 187 | 188 | # New user added 189 | expect_equal(new_perms$groups$`bioc-data-readers`, "test.user") 190 | 191 | # Rest of data is equal 192 | expect_equal(new_perms$groups[-1], res$groups[-1]) 193 | expect_equal(new_perms[-1], res[-1]) 194 | }) 195 | 196 | test_that("Setting a single user to existing package assignments works", { 197 | res <- read_permissions("bioc-data.authz.orig") 198 | 199 | new_perms <- edit_data_experiment_permissions(list(`lumiBarnes` = "j.cairns"), data = res) 200 | 201 | # New user set 202 | expect_equal(new_perms$groups$`lumiBarnes`, "j.cairns") 203 | 204 | # Rest of data is equal 205 | modified_groups <- which(names(new_perms$groups) == "lumiBarnes") 206 | expect_equal(new_perms$groups[-modified_groups], res$groups[-modified_groups]) 207 | expect_equal(new_perms[-1], res[-1]) 208 | }) 209 | 210 | test_that("Setting multiple users to existing package assignments works", { 211 | res <- read_permissions("bioc-data.authz.orig") 212 | 213 | new_perms <- edit_data_experiment_permissions(list(`lumiBarnes` = c("j.cairns", "g.bhatti")), data = res) 214 | 215 | # New user set 216 | expect_equal(new_perms$groups$`lumiBarnes`, c("j.cairns", "g.bhatti")) 217 | 218 | # Rest of data is equal 219 | modified_groups <- which(names(new_perms$groups) == "lumiBarnes") 220 | expect_equal(new_perms$groups[-modified_groups], res$groups[-modified_groups]) 221 | expect_equal(new_perms[-1], res[-1]) 222 | }) 223 | 224 | test_that("Setting multiple users to multiple existing package assignments works", { 225 | res <- read_permissions("bioc-data.authz.orig") 226 | 227 | new_perms <- edit_data_experiment_permissions(list(`lumiBarnes` = c("j.cairns", "g.bhatti"), 228 | `CCl4` = c("a.kauffmann", "y.taguchi")), data = res) 229 | 230 | # New user set 231 | expect_equal(new_perms$groups$`lumiBarnes`, c("j.cairns", "g.bhatti")) 232 | expect_equal(new_perms$groups$`CCl4`, c("a.kauffmann", "y.taguchi")) 233 | 234 | # Rest of data is equal 235 | modified_groups <- which(names(new_perms$groups) %in% c("lumiBarnes", "CCl4")) 236 | expect_equal(new_perms$groups[-modified_groups], res$groups[-modified_groups]) 237 | expect_equal(new_perms[-1], res[-1]) 238 | }) 239 | test_that("Adding new packages works", { 240 | res <- read_permissions("bioc-data.authz.orig") 241 | 242 | new_perms <- edit_data_experiment_permissions(list(`new.pkg` = "m.smith"), data = res) 243 | 244 | # New package added 245 | expect_true("new.pkg" %in% names(new_perms$groups)) 246 | expect_equal(new_perms$groups$`new.pkg`, "m.smith") 247 | 248 | new_paths <- tail(n = 4, new_perms) 249 | expect_equal(names(new_paths), 250 | c("/trunk/experiment/pkgs/new.pkg", "/trunk/experiment/data_store/new.pkg", 251 | "/branches/RELEASE_3_2/experiment/pkgs/new.pkg", "/branches/RELEASE_3_2/experiment/data_store/new.pkg")) 252 | 253 | expect_equal(new_paths, 254 | list( 255 | "/trunk/experiment/pkgs/new.pkg" = 256 | authz_section(list("@new.pkg" = "rw", ""), name = "/trunk/experiment/pkgs/new.pkg"), 257 | "/trunk/experiment/data_store/new.pkg" = 258 | authz_section(list("@new.pkg" = "rw", ""), name = "/trunk/experiment/data_store/new.pkg"), 259 | "/branches/RELEASE_3_2/experiment/pkgs/new.pkg" = 260 | authz_section(list("@new.pkg" = "rw", ""), name = "/branches/RELEASE_3_2/experiment/pkgs/new.pkg"), 261 | "/branches/RELEASE_3_2/experiment/data_store/new.pkg" = 262 | authz_section(list("@new.pkg" = "rw", ""), name = "/branches/RELEASE_3_2/experiment/data_store/new.pkg"))) 263 | 264 | # Rest of data is equal 265 | modified_groups <- which(names(new_perms$groups) %in% c("new.pkg")) 266 | expect_equal(new_perms$groups[-modified_groups], c(res$groups[])) 267 | }) 268 | -------------------------------------------------------------------------------- /man/utilFunctions.Rd: -------------------------------------------------------------------------------- 1 | \name{clean} 2 | \alias{clean} 3 | \alias{cleanDataPkg} 4 | \alias{makeBins} 5 | \alias{emailExistingUser} 6 | \alias{emailNewUser} 7 | \alias{requestNewSvnAccountFromScicomp} 8 | \alias{svnUserMatches} 9 | \alias{existingSvnUsers} 10 | \alias{getPackageTotals} 11 | \alias{plotPackageTotals} 12 | \alias{getPackageDeltas} 13 | \alias{installDeps} 14 | \alias{generatePermissionEdits} 15 | \alias{rebuildIssueTarball} 16 | \alias{removeDeadTrackerIssue} 17 | \alias{filterIssues} 18 | \alias{coneOfShame} 19 | \alias{readyToAdd} 20 | \alias{creditworthy} 21 | \alias{getPackageRange} 22 | \alias{getEmailAddressesToWarnPkgMaintainers} 23 | \alias{compareReleases} 24 | \alias{getDescriptions} 25 | \alias{preacceptedToAccepted} 26 | \alias{failmail} 27 | 28 | \title{Convenience functions for managing behind the scenes operations 29 | involved in adding packages to Bioconductor Repositories.} 30 | 31 | \description{ 32 | There are several convenience functions in this package that are 33 | useful for different things. I will enumerate them below 34 | 35 | clean: used for removing cruft that is added to tarball by R CMD 36 | build. This is normally run before adding a new tarball to the svn 37 | repository. 38 | 39 | makeBins: used for making tarballs that don't contain source code into 40 | mac and windows binaries. 41 | 42 | emailExistingUser: used to send an instructional email to a user who 43 | already has an existing svn account. 44 | 45 | emailNewUser: used to print out a customized instructional email for a 46 | user who has a new svn account. Since there is currently no way to 47 | get the credentials, these will have to be pasted in, but at least the 48 | rest of the message will be put together. 49 | 50 | emailNewSvnAccount: used to send an email to scicomp (with a custom guess 51 | for the proposed username) so that he can make a new svn account. If 52 | a user like this already exists, then you can refuse to send the email 53 | and it will instead print a message out so that you can just change 54 | one character manually and then send it. Again, this could be 55 | improved if we had access to a local copy of the the svn credentials 56 | file. 57 | 58 | existingSvnUsers: used to list all existing svn accounts that look 59 | like the user you would expect to generate based on a taball. It 60 | searches the maintainer field of a tarball, generates a user name and 61 | then looks at all the svn users to see if any are similar to that. 62 | 63 | getPackageTotals: looks at the manifests and gives package counts for 64 | each. This may need to be updated to reflect new manifests... 65 | 66 | getPackageDeltas: looks at the manifests and gives package the deltas 67 | for each manifest transition. So you can see how many packages were 68 | added with each release. 69 | 70 | plotPackageTotals: makes a simple plot of package growth over time. 71 | 72 | installDeps: Given a tarball or package directory, 73 | this will install all its dependencies 74 | using biocLite(). This works even for tarballs/directories that are not yet 75 | available themselves via biocLite(). 76 | 77 | generatePermissionEdits: Takes a vector of tarball paths, and prints 78 | output that can be pasted (after some scrutiny) into the permissions file. 79 | 80 | rebuildIssueTarball: A helper so that you don't have to bother Dan to 81 | rebuild a tarball when the tracker fails to do so for some reason. 82 | This function does require that you have access to habu 83 | 84 | removeDeadTrackerIssue: Another helper that will remove an issue 85 | completely from the tracker. Use this with caution! This also 86 | requires access to habu 87 | 88 | filterIssues: This function will get issue IDs and even tarballs that 89 | match certain date and status criteria. So you want to know what 90 | packages were accepted in march of 2014? Or maybe just which packages 91 | are classified as 'new-package' status from last month? This function 92 | should help with that. 93 | 94 | coneOfShame: This function is named after the 'cone of shame' from 95 | UP. Because sometimes you might get a false positive of the end user 96 | just writes 'thank you' and has not bounced the ball back into your 97 | court... But its still a useful funtion to seeing which packages are 98 | getting neglected and by how much. The hope is that this should spare 99 | all of us the embarrassment of packages getting too far behind. 100 | 101 | readyToAdd: This is just a convenience for listing those packages 102 | that are accepted in the tracker but still not yet in the latest bioc 103 | manifest. IOW this is to list out packages that need to be added so 104 | that we don't miss any. We need to use this because that has happened 105 | before! One issue is that this function has to rely on the package 106 | name in the title being accurate. If those values are wrong, then 107 | this will give false positives. 108 | 109 | creditworthy: gives credit to core Seattle members and finds out whose 110 | done what in the last 30 days with respect to packages 111 | 112 | getPackageRange: Every 6 months, core Seattle members are given 113 | a range and they need to ensure that all packages in that range pass without 114 | any errors and warnings. This helper function takes in the name of reviewer 115 | and gives back the number of packages assigned to them, number of packages 116 | with errors and number of packages with warnings. 117 | 118 | getEmailAddressesToWarnPkgMaintainers: just a convenience wrapper that 119 | calls getPackageRange and produces a list of emails addresses to 120 | bother with a form message. Like getPackageRange, it takes a userName 121 | argument that it then uses to restrict which authors it will bother... 122 | 123 | compareReleases: given an 'old' and 'new' release, what packages were 124 | added and what packages were removed? 125 | 126 | getDescriptions: given a list of packages (maybe returned by 127 | compareReleases), get the Description fields of each and return 128 | as a string suitable for pasting into a release announcement. 129 | 130 | preacceptedToAccepted: finds the packages on the tracker with pre-accepted 131 | status and checks if the build report for them has any warnings or errors 132 | 133 | failmail: Sends email to maintainers whose packages are broken in 134 | the build system. All you have to provide is the package name, 135 | though you are able to customize the email that's sent. 136 | } 137 | 138 | \usage{ 139 | clean(tarball, svnDir="~/proj/Rpacks/", copyToSvnDir=TRUE, 140 | svnAccountExists=FALSE) 141 | cleanDataPkg(tarball, 142 | svnDir1="~/proj/experiment/pkgs", 143 | svnDir2="~/proj/experiment/data_store", 144 | copyToSvnDir=TRUE, 145 | svnAccountExists=FALSE) 146 | makeBins(tarball) 147 | emailExistingUser(tarball, sendMail=FALSE) 148 | emailNewUser(tarball) 149 | requestNewSvnAccountFromScicomp(tarball, sendMail=FALSE) 150 | existingSvnUsers(tarballsPath=".", suffix=".tar.gz$") 151 | svnUserMatches(names) 152 | generatePermissionEdits(tarballsPath=".", suffix=".tar.gz$") 153 | 154 | rebuildIssueTarball(issueNumber,tarballUrlPath) 155 | removeDeadTrackerIssue(issueNumber) 156 | filterIssues(status=c('new-package'),datePrefix='2015',getUserFiles=FALSE) 157 | coneOfShame(daysNeglected=14, userName=NULL, daysToForget=30,lastTouchedFilter=TRUE) 158 | readyToAdd(datePrefix='2015',svnDir="~/proj/Rpacks/", 159 | svnDir1="~/proj/experiment/pkgs/", getUserFiles=FALSE) 160 | creditworthy(creditDays=30, userName) 161 | getPackageRange(userName = "sarora", biocVersion = "3.1") 162 | getEmailAddressesToWarnPkgMaintainers(userName) 163 | compareReleases(path = "~/proj/Rpacks/", oldRel="3.0", newRel="3.1") 164 | getDescriptions(path = "~/proj/Rpacks/", pkgs) 165 | preacceptedToAccepted() 166 | failmail(package, software=TRUE, 167 | from=getOption("fromEmail", "dtenenba@fredhutch.org"), 168 | sig=getOption("mail.sig", "Dan"), subject=sprintf("\%s build problem", package), 169 | preview=TRUE, bccme=TRUE) 170 | } 171 | 172 | \arguments{ 173 | \item{tarball}{The tarball (as built by R CMD build). In \code{installDeps}, 174 | this can also be a directory.} 175 | \item{svnDir}{The dir where the svn repos is located} 176 | \item{svnDir1}{The dir where the pkgs repos if located (data pkgs)} 177 | \item{svnDir2}{The dir where the data repos is located (data pkgs)} 178 | \item{copyToSvnDir}{TRUE/FALSE do we copy the cleaned dir to svnDir?} 179 | \item{svnAccountExists}{TRUE/FALSE does the maintainer of this tarball 180 | have an svn account alread?} 181 | \item{sendMail}{TRUE/FALSE do we call sendmail or do we print out a 182 | file to send later?} 183 | \item{names}{A character vector of strings that you think might be 184 | user names.} 185 | \item{tarballsPath}{The path to a group of tarballs to process} 186 | \item{suffix}{The suffix to use for recognizing tarball files 187 | (normally '.tar.gz' - so no need to specify it)} 188 | \item{issueNumber}{The roundup tracker issue number} 189 | \item{tarballUrlPath}{The url for a tarball that is to be checked} 190 | \item{status}{Any of the following tracker based states: 191 | 'new-package', 'preview-in-progress', 'sent-back', 192 | 'modified-package', 'review-in-progress', 'accepted', 'rejected'} 193 | \item{datePrefix}{Tracker dates are formatted like this: 194 | 'YYYY-MM-DD timestamp', so provide a string that is formatted to match 195 | the amount of date that you intend to exactly match. For example 196 | '2007-05' would match things from may of 2007. It probably won't be 197 | useful to be more specific than that.} 198 | \item{getUserFiles}{TRUE/FALSE get the tarball names associated with 199 | issues that match?} 200 | \item{daysNeglected}{Number of days to wait before including in a 201 | cone of shame list} 202 | \item{userName}{username for a person whos activity you want to see} 203 | \item{daysToForget}{Number of days before something is old enough that 204 | we always want to exclude if from a cone of shame list} 205 | \item{lastTouchedFilter}{ TRUE/FALSE Filter based on whether the 206 | author was the last person to touch it?} 207 | \item{creditDays}{ Number of days for which you want to find out which 208 | reviewer has done what} 209 | \item{biocVersion}{What version of BiocConductor} 210 | \item{oldRel}{old release} 211 | \item{newRel}{new release} 212 | \item{pkgs}{a character vector of package names} 213 | \item{package}{The package to email the maintainer about.} 214 | \item{software}{TRUE for software packages, FALSE for experiment data packages.} 215 | \item{from}{From address for maintainer email.} 216 | \item{sig}{String to sign maintainer email with.} 217 | \item{subject}{Subject of maintainer email.} 218 | \item{preview}{Show a preview of maintainer email before sending?} 219 | \item{bccme}{If TRUE, copy maintainer email to 'from' address via BCC.} 220 | 221 | } 222 | 223 | \details{ 224 | In order to use the email functions you should put a line in your .Rprofile 225 | file that looks like this: 226 | 227 | \code{options( fromEmail = 'emailAddress@someplace.com')} 228 | 229 | The 'failmail' function works a bit differently and requires 230 | that you have something like this in your .Rprofile: 231 | 232 | 233 | \code{options(email.options= 234 | list(host.name = "hostname", 235 | port = 25, tls = TRUE, authenticate = TRUE, user.name = "username", 236 | passwd = "password", debug=FALSE, 237 | ssl = TRUE) 238 | )} 239 | 240 | } 241 | 242 | \value{ 243 | This varies with the function. 244 | } 245 | 246 | 247 | \author{Marc Carlson} 248 | 249 | \examples{ 250 | library(BiocContributions) 251 | tarball <- system.file("testpackages", "AnnotationHub_1.3.18.tar.gz", 252 | package="BiocContributions") 253 | 254 | ## interactive() here is to protect me from example spam etc. 255 | if(interactive()){ 256 | ## clean up a tarball and copy it to the repos 257 | clean(tarball) 258 | 259 | ## clean up a data tarball and copy it to the svn repositories. 260 | cleanDataPkg(tarball) 261 | 262 | ## email someone who has an account 263 | emailExistingUser(tarball) 264 | 265 | ## generate an email for someone who needs svn credentials 266 | emailNewUser(tarball) 267 | 268 | ## send an email to Scicomp asking for a new account. 269 | requestNewSvnAccountFromScicomp(tarball) 270 | 271 | 272 | ## make mac and windows binaries for a package. 273 | tarball <- system.file("testpackages", "hgu95av2.db_2.10.1.tar.gz", 274 | package="BiocContributions"); 275 | makeBins(tarball) 276 | 277 | ## list existing svn users that are similar to tarball maintainers 278 | ## for all the tarballs in the working directory 279 | existingSvnUsers() 280 | 281 | ## list svn users like 'carlson' 282 | svnUserMatches('carlson') 283 | 284 | ## list how many packages are in each known release of Bioconductor 285 | getPackageTotals() 286 | 287 | ## Install all the dependencies for a tarball you need to review 288 | ## (but that isn't yet in biocLite itself) 289 | installDeps(tarball) 290 | 291 | ## Generate permission edits for a directory of new tarballs 292 | generatePermissionEdits() 293 | 294 | ## rebuild tarball issue 558 (URL indicates which tarball to rebuild) 295 | rebuildIssueTarball(558, 296 | 'https://tracker.bioconductor.org/file4845/spbtest_0.99.0.tar.gz') 297 | 298 | ## remove issue 1114 from the tracker 299 | removeDeadTrackerIssue('1114') 300 | 301 | ## get issues that are from 2015 and that are either new or in 302 | ## preview 303 | filterIssues(status=c('new-package','preview-in-progress'), 304 | datePrefix='2015') 305 | 306 | ## get similar issues that are from December of 2014 307 | filterIssues(status=c('new-package','preview-in-progress'), 308 | datePrefix='2014-12') 309 | 310 | ## list users who may be currently behind on their package reviews 311 | ## by two weeks but not more than a month. 312 | coneOfShame() 313 | ## use coneOfShame to only see which issues mcarlson has not touched for 314 | ## more than one week 315 | coneOfShame(7, 'mcarlson') 316 | ## use coneOfShame to just see all issues assigned to mcarlson period: 317 | coneOfShame(1, 'mcarlson', lastTouchedFilter=FALSE) 318 | 319 | ## list packages that may be ready to be added to the repos. 320 | readyToAdd('2015') 321 | 322 | ## give credit to people for what they've worked on in the past 30 days 323 | df <- creditworth() 324 | 325 | diff <- compareReleases() 326 | descs <- getDescriptions(pkgs<-diff$added) 327 | } 328 | 329 | \dontrun{ 330 | failmail("xps") 331 | } 332 | 333 | } 334 | 335 | \keyword{manip} 336 | 337 | -------------------------------------------------------------------------------- /R/email.R: -------------------------------------------------------------------------------- 1 | 2 | ######################################################## 3 | ## 4 | ## TODO: deal with multiple maintainers more elegantly! 5 | ## TODO: write email function to request svn account from scicomp. 6 | ## 7 | ######################################################## 8 | 9 | 10 | 11 | ## I need to send emails to scicomp, new/older authors, as well as 12 | ## prospective authors. These should all be functions if for no other 13 | ## reason than to allow their contents to be edited by all team 14 | ## members via svn. 15 | 16 | #################################################################### 17 | ## lets start with a function to send an email when the user already 18 | ## has a svn account. 19 | 20 | ## This just gets the contents for the Maintainer field 21 | ## (There might be multiple emails returned from here) 22 | .extractEmails <- function(description){ 23 | as.person(description$Maintainer) 24 | } 25 | 26 | ## Email wrapper so that I don't have to do this more than once 27 | ## NOTE: for sendmailR (or even command line mail) to run, you must 28 | ## have set /etc/mailname. Mine was set to: gamay.fhcrc.org 29 | .sendEmailMessage <- function(email, msg, subject){ 30 | require("sendmailR") 31 | fromEmail = getOption("fromEmail") 32 | sendmail(from=fromEmail, to=email, 33 | subject=subject, msg=msg) 34 | } 35 | 36 | ## And for when we want to send multiple messages: 37 | .sendEmailMessages <- function(emails, msgs, subject){ 38 | for(i in seq_along(emails)){ 39 | .sendEmailMessage(emails[i], msgs[i], subject) 40 | } 41 | } 42 | 43 | ## General purpose multiplier for functions that take authorName, packageName and that also have a function to define the message based on that. 44 | .makeMessages <- function(authorNames, packageName, FUN, ...){ 45 | msgs <- character() 46 | for(i in seq_along(authorNames)){ 47 | msgs[i] <- FUN(authorName=authorNames[i], packageName, ...) 48 | } 49 | msgs 50 | } 51 | 52 | ########################################################################## 53 | ########################################################################## 54 | ## email for NEW users. This one will also create an email from the 55 | ## tarball, but this time we can't email them since we have to still 56 | ## put the email credentials in... 57 | 58 | template <- function(path, ...) { 59 | template <- readFile(system.file(package = "BiocContributions", "extdata", path)) 60 | res <- whisker::whisker.render(template, list(...)) 61 | class(res) <- "template" 62 | res 63 | } 64 | 65 | print.template <- function(x, ...) { 66 | cat(x) 67 | invisible(x) 68 | } 69 | 70 | readFile <- function(file) { 71 | readChar(file, file.info(file)$size) 72 | } 73 | ## 1st we need our new user greeting: 74 | .makeMaintainerMsg <- 75 | function(authorName, packageName, userId = "", 76 | password = "", senderName) 77 | { 78 | template("maintainerAcceptance.txt", 79 | packageName = packageName, 80 | authorName = authorName, 81 | biocVersion = BiocInstaller:::BIOC_VERSION, 82 | userId = userId, 83 | password = password, 84 | senderName = senderName) 85 | } 86 | 87 | # TODO need a message for data packages 88 | 89 | .writeOutEmailTemplates <- function(paths, msgs){ 90 | for(i in seq_along(paths)){ 91 | con <- file(paths[i]) 92 | writeLines(text=msgs[i], con=con) 93 | close(con) 94 | } 95 | } 96 | 97 | #' Email a new user their credentials 98 | #' 99 | #' @param tarball the package tarball to email about 100 | #' @param userId The SVN user ID for the maintainer 101 | #' @param password The SVN password for the maintainer 102 | #' @param senderName The name of the email sender for use in the signature 103 | emailMaintainer <- 104 | function(tarball, userId = "user.id", password = "password", 105 | senderName = getOption("bioc_contributions_signature", 106 | "Bioconductor")) 107 | { 108 | description <- readDESCRIPTION(tarball) 109 | 110 | package <- description$Package 111 | emails <- .extractEmails(description) 112 | authorName <- paste(vapply(emails$given, "[[", "", 1), collapse=", ") 113 | 114 | msgs <- .makeMaintainerMsg(authorName=authorName, userId = userId, 115 | packageName=package, password = password, 116 | senderName = senderName) 117 | 118 | subject <- fmt( 119 | "Congratulations, {{package}} has been added to Bioconductor!", 120 | list(package = package)) 121 | 122 | gmailr::mime(Subject = subject, 123 | To = paste(unlist(unname(emails$email)), collapse=", "), 124 | From = "packages@bioconductor.org", 125 | body = msgs) 126 | } 127 | 128 | ########################################################################## 129 | ########################################################################## 130 | ## email for new svn accounts. This one takes a tarball and sends an 131 | ## email to scicomp at scicomp regarding new accounts. 132 | 133 | .makeNewSvnUserRequestMsg <- function(emailsAndUserNames){ 134 | msg <- paste("Hi scicomp, 135 | 136 | Could you please create a new svn account on hedgehog for 137 | 138 | ",emailsAndUserNames," 139 | 140 | Thanks! 141 | 142 | Marc", sep="") 143 | ## then return 144 | msg 145 | } 146 | 147 | .generateProposedUsername <- function(given, family){ 148 | tolower(paste(substr(given, 1, 1), family, sep = ".")) 149 | } 150 | 151 | ## TODO: maybe I should modify this to take a SERIES of tarballs... 152 | ## BUT 1st I need to refactor my functions that access svn logs. 153 | requestNewSvnAccountFromScicomp <- function(tarball, sendMail=FALSE){ 154 | description <- readDESCRIPTION(tarball) 155 | 156 | emails <- .extractEmails(description) 157 | usernames <- .generateProposedUsername(emails$given, emails$family) 158 | 159 | ## generate emails and UserNames 160 | emailsAndUserNames <- paste( 161 | paste(emails$email, 162 | "\n\n proposed username:", 163 | usernames, 164 | "\n"), 165 | collapse="\n\n AND \n\n") 166 | 167 | ## format msgs 168 | msg <- .makeNewSvnUserRequestMsg(emailsAndUserNames) 169 | if(sendMail){ 170 | ## send an email at this time. 171 | ## .sendEmailMessage(email="scicomp@fhcrc.org", msg=msg, 172 | ## subject="new svn account") 173 | email = getOption("fromEmail") 174 | .sendEmailMessage(email=email, msg=msg, 175 | subject="new svn account") 176 | }else{ 177 | con <- file(paste(dir,"_svnRequest__.txt",sep="")) 178 | writeLines(text=msg, con=con) 179 | close(con) 180 | } 181 | } 182 | 183 | 184 | 185 | ## library(BiocContributions); tarball <- system.file("testpackages", "AnnotationHub_1.3.18.tar.gz", package="BiocContributions"); 186 | 187 | ## requestNewSvnAccountFromSciComp(tarball) 188 | 189 | ## works 190 | 191 | 192 | ## requestNewSvnAccountFromSciComp(tarball, sendMail=FALSE) 193 | 194 | 195 | 196 | 197 | 198 | ############################################################################## 199 | ## I need a tool for getting latest svn perms 200 | 201 | ## Problem: the above requires a passphrase to access the content. 202 | ## I am going to email scicomp to see if they can help me square that away. 203 | 204 | 205 | ## this (old) extractor is for when you only want to know if someone has 206 | ## access to bioconductor or not. ## TODO; if we ever start to use 207 | ## this we will want to also load it to the zzz.R file etc. 208 | .extractUsernamesFromAuthz <- function(){ 209 | if(.Platform$OS.type != "unix"){ 210 | stop("Sorry this function is only available from Unix")} 211 | ## Just get the latest file (this will require you 212 | ## to enter your passphrase 213 | permFile = getOption("permFile") 214 | cmd <- paste0('rsync ',permFile,' .') 215 | system(cmd) 216 | 217 | if(file.exists('bioconductor.authz')){ 218 | con <- file('bioconductor.authz') 219 | res <- readLines(con) 220 | close(con) 221 | cats <- c("^bioconductor-readers =","^bioconductor-write0 =") 222 | res <- res[ grepl(cats[1], res) | grepl(cats[2], res) ] 223 | res <- unlist(strsplit(res, ",")) 224 | res <- unique(sub(" ","",sub(cats[2],"",sub(cats[1],"",res)))) 225 | } 226 | unlink("bioconductor.authz") 227 | res 228 | } 229 | 230 | ## This extractor is final word for knowing if an svn account exists at all... 231 | ## This is generally the conservative choice for most testing. 232 | .extractUsernamesFromUsers <- function(){ 233 | if(.Platform$OS.type != "unix"){ 234 | stop("Sorry this function is only available from Unix")} 235 | ## Just get the latest file (this will require you 236 | ## to enter your passphrase 237 | ## usersFile = getOption("usersFile") 238 | ## cmd <- paste0('rsync ',usersFile,' .') 239 | ## system(cmd) 240 | tempDir <- get('tempDir', BiocContributions:::stash) 241 | usersFile <- file.path(tempDir, 'users') 242 | 243 | if(file.exists(usersFile)){ 244 | con <- file(usersFile) 245 | res <- readLines(con) 246 | close(con) 247 | res <- strsplit(res, ":") 248 | res <- unique(unlist(lapply(res, function(x){x[1]}))) 249 | } 250 | ## unlink("users") 251 | res 252 | } 253 | 254 | ## TODO/Bug fix: change the arrangement so that the file above is 255 | ## extracted ONCE per call of the highest level function (and then the 256 | ## file handle is passed down). This will get rid of the bug where we 257 | ## have to type in the passphrase every time that we have a new user 258 | ## name... Once call per functions should really be more than enough. In fact, 259 | ## better would be to call it only once when we first load the package! 260 | 261 | ## 262 | ## TODO: make helper for extracting data from getOption("userDbFile") 263 | ## This will allow checking to see if the email in the package is the 264 | ## same as the one we have on record. 265 | ## 266 | 267 | 268 | #################################################################### 269 | ## Check if a username exists in svn 270 | ## I need this to be a public and private way of looking at whether an 271 | ## svn user exists for Bioconductor. 272 | ## So all the above emails should use this check 1) make sure that a user exists 273 | 274 | 275 | ## These return TRUE or FALSE 276 | .svnUserExists <- function(name){ 277 | names <- .extractUsernamesFromUsers() 278 | ## now grep 279 | any(grepl(name, names)) 280 | } 281 | 282 | .svnUsersExist <- function(names){ 283 | unlist(lapply(names, .svnUserExists)) 284 | } 285 | 286 | 287 | ## these returns matches (so you can think about it better) 288 | .svnUserMatcher <- function(name){ 289 | names <- .extractUsernamesFromUsers() 290 | ## now grep 291 | names[grepl(name, names)] 292 | } 293 | 294 | svnUserMatches <- function(names){ 295 | unlist(lapply(names, .svnUserMatcher)) 296 | } 297 | 298 | 299 | 300 | 301 | ## Check if a tarball is in svn yet or not. 302 | ## (for quickly assessing - a standalone function) 303 | .existingSvnUsers <- function(tarball){ 304 | description <- readDESCRIPTION(tarball) 305 | 306 | ## extract email from DESCRIPTION file 307 | emails <- .extractEmails(description) 308 | 309 | usernames <- .generateProposedUsername(emails$given, emails$family) 310 | 311 | res <- svnUserMatches(usernames) 312 | structure(list( 313 | package = description$Package, 314 | people = emails, 315 | svn = usernames, 316 | matches = res 317 | ), 318 | class = "svn_match") 319 | } 320 | 321 | existingSvnUsers <- function(path = ".", pattern = ".tar.gz$"){ 322 | res <- lapply(dir(path = path, pattern = pattern, full.names = TRUE), .existingSvnUsers) 323 | class(res) <- "svn_matches" 324 | res 325 | } 326 | 327 | print.svn_match <- function(x, ...) { 328 | message("Package: ", x$package, "\n", 329 | "Maintainer: ", x$people, "\n", 330 | "Username: ", paste(x$svn, collapse = ", "), "\n", 331 | "Matches: ", paste(x$matches, collapse = ", "), "\n") 332 | } 333 | 334 | print.svn_matches <- function(x, ...) { 335 | lapply(x, print) 336 | invisible() 337 | } 338 | 339 | as.logical.svn_match <- function(x, ...) { 340 | length(x$matches) > 0 341 | } 342 | 343 | as.logical.svn_matches <- function(x, ...) { 344 | vapply(x, as.logical, logical(1)) 345 | } 346 | 347 | ############################################## 348 | ## example 349 | ## library(BiocContributions); tarball <- system.file("testpackages", "AnnotationHub_1.3.18.tar.gz", package="BiocContributions"); 350 | 351 | ## existingSvnUsers() 352 | 353 | 354 | 355 | 356 | ## TODO: make use of the above helpers in the other email functions (but ONLY after we get better access to the .authz file) 357 | 358 | 359 | 360 | 361 | 362 | 363 | ############################################################################## 364 | ## helper for generating the stuff we put into the permissions file 365 | ## 1st: lets do the really annoying part at the end. 366 | ## then do the middle part, but don't worry about the 1st part. 367 | 368 | ## things to bear in mind: 369 | ## This will tell you what version you are using 370 | ## biocVersion() ## BiocInstaller:::BIOC_VERSION 371 | ## You need to use this string to format the tedious part later 372 | 373 | ## You need to also make sure we are using devel in order to even try 374 | ## to use this function. (non-devel is not permitted) 375 | ## this will tell if you are using devel or not 376 | ## isDevel <- function(){packageVersion("BiocInstaller")$minor %% 2 == 1} 377 | 378 | ## Helper to retrieve userName and packageName 379 | .getPkgNameAndUser <- function(tarball){ 380 | 381 | description <- readDESCRIPTION(tarball) 382 | 383 | emails <- .extractEmails(description) 384 | 385 | usernames <- .generateProposedUsername(emails$given, emails$family) 386 | 387 | finalUserNames <- paste(usernames, collapse=", ") 388 | ## get the answer 389 | res <- svnUserMatches(usernames) 390 | finalUserNames <- paste(res, collapse=", ") 391 | ## Combine and return 392 | names(finalUserNames) <- description$Package 393 | finalUserNames 394 | } 395 | 396 | ## helper for ONLY getting tarballs (used instead of dir()) 397 | .getTars <- function(path=".",suffix=".tar.gz$"){ 398 | if(grepl(suffix,path)){ 399 | stop("You need to supply a path that contains tarballs: not an actual tarball...") 400 | } 401 | res <- dir(path) 402 | res[grepl(suffix,res)] 403 | } 404 | 405 | .printAssociations <- function(elem){ 406 | paste0(names(elem), " = " , elem, "\n") 407 | } 408 | 409 | .printTediousStuff <- function(elem){ 410 | pkg <- names(elem) 411 | # version <- biocVersion() ## re-enable this in fall 412 | version <- "3.1" ## Till just before release (b/c we want 'version before') 413 | part1 <- strsplit(as.character(version),split='[.]')[[1]][1] 414 | part2 <- strsplit(as.character(version),split='[.]')[[1]][2] 415 | # part2 <- as.character(as.integer(part2) - 1) ## no longer needed? 416 | version <- paste0(part1,"_",part2) 417 | paste0("[/trunk/madman/Rpacks/",pkg,"]\n@",pkg, 418 | " = rw\n\n", 419 | "[/branches/RELEASE_",version,"/madman/Rpacks/",pkg,"]\n@",pkg, 420 | " = rw\n\n") 421 | } 422 | 423 | ## helper to test if we are in devel 424 | .isDevel <- function(){packageVersion("BiocInstaller")$minor %% 2 == 1} 425 | 426 | ## tarballs is a character vector of tarball paths. 427 | generatePermissionEdits <- function(path = ".", pattern = "\\.tar\\.gz$"){ 428 | ## start with tarballs in whatever dir we have here... 429 | tarballs <- dir(path = path, pattern = pattern, full.names = TRUE) 430 | ## store the above in a list object 431 | data <- lapply(tarballs, .getPkgNameAndUser) 432 | 433 | ### For all packages in that list: 434 | 435 | ## write out association part (for each - helper2) 436 | message(paste(sapply(data, .printAssociations), collapse="")) 437 | 438 | ## write out the tedious part (for each - helper3) 439 | message(paste(sapply(data, .printTediousStuff), collapse="")) 440 | 441 | } 442 | 443 | 444 | 445 | 446 | 447 | 448 | 449 | 450 | 451 | 452 | 453 | ## Output should look like: 454 | ## , y.shen, t.carroll, w.yang, f.zhang, j.schumann, a.waardenberg 455 | 456 | ## ASSIGN = y.shen 457 | ## ChIPQC = t.carrol, r.stark 458 | ## ABSSeq = w.yang 459 | ## FRGEpistasis = f.zhang 460 | ## flowCyBar = j.schumann 461 | ## CompGO = a.waardenberg 462 | ## Rariant = j.gehring 463 | 464 | ## [/trunk/madman/Rpacks/ASSIGN] 465 | ## @ASSIGN = rw 466 | 467 | ## [/branches/RELEASE_2_13/madman/Rpacks/ASSIGN] 468 | ## @ASSIGN = rw 469 | 470 | ## [/trunk/madman/Rpacks/ChIPQC] 471 | ## @ChIPQC = rw 472 | 473 | ## [/branches/RELEASE_2_13/madman/Rpacks/ChIPQC] 474 | ## @ChIPQC = rw 475 | 476 | ## [/trunk/madman/Rpacks/ABSSeq] 477 | ## @ABSSeq = rw 478 | 479 | ## [/branches/RELEASE_2_13/madman/Rpacks/ABSSeq] 480 | ## @ABSSeq = rw 481 | 482 | ## [/trunk/madman/Rpacks/FRGEpistasis] 483 | ## @FRGEpistasis = rw 484 | 485 | ## [/branches/RELEASE_2_13/madman/Rpacks/FRGEpistasis] 486 | ## @FRGEpistasis = rw 487 | 488 | ## [/trunk/madman/Rpacks/flowCyBar] 489 | ## @flowCyBar = rw 490 | 491 | ## [/branches/RELEASE_2_13/madman/Rpacks/flowCyBar] 492 | ## @flowCyBar = rw 493 | 494 | ## [/trunk/madman/Rpacks/CompGO] 495 | ## @CompGO = rw 496 | 497 | ## [/branches/RELEASE_2_13/madman/Rpacks/CompGO] 498 | ## @CompGO = rw 499 | 500 | ## [/trunk/madman/Rpacks/Rariant] 501 | ## @Rariant = rw 502 | 503 | ## [/branches/RELEASE_2_13/madman/Rpacks/Rariant] 504 | ## @Rariant = rw 505 | -------------------------------------------------------------------------------- /R/tracker.R: -------------------------------------------------------------------------------- 1 | #' Members of the devteam 2 | devteam <- c( 3 | "Martin Morgan", 4 | "Valerie Obenchain", 5 | "Herve Pages", 6 | "Dan Tenenbaum") 7 | 8 | #' and their github usernames 9 | github_usernames <- c( 10 | "mtmorgan", 11 | "vobencha", 12 | "hpages", 13 | "dtenenba" 14 | ) 15 | #' Status code to name mapping 16 | # I would prefer these were dynamic, but there doesn't seem to be a great way 17 | # to do so... 18 | status_map <- c( 19 | "1" = "new-package", 20 | "2" = "preview-in-progress", 21 | "3" = "sent-back", 22 | "4" = "modified-package", 23 | "5" = "review-in-progress", 24 | "6" = "accepted", 25 | "7" = "rejected", 26 | "8" = "closed", 27 | "9" = "pre-accepted", 28 | "10" = "testing") 29 | 30 | #' storage object 31 | the <- new.env(parent=emptyenv()) 32 | 33 | #' Login to the issue tracker 34 | #' 35 | #' @param url tracker url 36 | #' @param user username used to login 37 | #' @param password password used to login 38 | tracker_login <- function( 39 | url = "https://tracker.bioconductor.org", 40 | user = getOption("tracker_user"), 41 | password = getOption("tracker_password")) { 42 | 43 | if (is.null(the$session)) { 44 | session <- rvest::html_session(url) 45 | 46 | login <- rvest::set_values(rvest::html_form(session)[[3]], 47 | `__login_name` = user, 48 | `__login_password` = password) 49 | 50 | the$session <- suppressMessages(rvest::submit_form(session, login)) 51 | } 52 | 53 | the$session 54 | } 55 | 56 | #' Query the issue tracker 57 | #' 58 | #' @param columns which columns to return 59 | #' @param sort A column to sort the data by 60 | #' @param filter what columns are used to filter 61 | #' @param status the status codes used to filter 62 | #' @param ... Additional query parameters 63 | #' @param session the HTTP session to use 64 | #' @examples 65 | #' tracker_search("@search_text" = "normalize450k") 66 | tracker_search <- 67 | function(columns = c("id", "activity", "title", "creator", "status"), 68 | sort = desc("activity"), 69 | filter=c("status", "assignedto"), 70 | status = c(-1, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10), 71 | ..., 72 | session = tracker_login()) 73 | { 74 | url <- "/issue" 75 | res <- rvest::jump_to(session, 76 | url, 77 | query = list("@columns" = paste(columns, collapse = ","), 78 | "@sort" = sort, 79 | "@filter" = paste(filter, collapse = ","), 80 | "status" = paste(status, collapse = ","), 81 | "@action" = "export_csv", 82 | ... 83 | )) 84 | ## The following line now uses readr::read_csv(), which doesn't 85 | ## perform as well here as utils::read.csv(). 86 | #data <- httr::content(res$response) 87 | data <- utils::read.csv(textConnection(httr::content(res$response, "text"))) 88 | data$status <- status_map[data$status] 89 | data$activity <- roundup_datetime(data$activity) 90 | data 91 | } 92 | 93 | keyword_map <- memoise::memoise(function(url = "https://tracker.bioconductor.org/keyword?@template=item", 94 | session = tracker_login()) { 95 | res <- rvest::jump_to(session, 96 | url) 97 | keyword_links <- rvest::html_nodes(res, "table.otherinfo td a") 98 | keyword_numbers <- sub("keyword", "", rvest::html_attr(keyword_links, "href")) 99 | keyword_names <- rvest::html_text(keyword_links) 100 | names(keyword_names) <- keyword_numbers 101 | keyword_names 102 | }) 103 | 104 | #' @describeIn tracker_search retrieve unassigned packages 105 | unassigned_packages <- function(status = c(-1, 1, 2, 3, 4, 5, 9), ..., session = tracker_login()) { 106 | tracker_search(session = session, status = status, assignedto = -1) 107 | } 108 | 109 | #' @describeIn tracker_search retrieve pre-accepted packages 110 | pre_accepted_packages <- 111 | function(status = 9, ..., session = tracker_login()) 112 | { 113 | tracker_search(session = session, status = status) 114 | } 115 | 116 | #' Accept a package on the tracker 117 | #' 118 | #' @inheritParams post 119 | #' @param note The acceptance note to post to the tracker. 120 | #' @examples 121 | #' \dontrun{ 122 | #' accept_package(1318, "transcriptR_0.99.4.tar.gz") 123 | #' } 124 | accept_package <- function(issue = issue, 125 | tarball, 126 | note = accept_note(tarball), 127 | status = 6, 128 | ..., 129 | session = NULL) { 130 | post(issue = issue, session = session, note = note, status = status, ...) 131 | } 132 | 133 | #' @describeIn tracker_search retrieve the logged in users packages 134 | my_issues <- 135 | function(user = NULL, status = c(-1, 1, 2, 3, 4, 5, 9, 10), ..., 136 | session = tracker_login()) 137 | { 138 | if (is.null(user)) { 139 | links <- rvest::html_nodes(session, "a") 140 | text <- rvest::html_text(links) 141 | match <- grepl("Your Issues", text, fixed = TRUE) 142 | if (!any(match)) { 143 | stop("No links have text 'Your Issues'", call. = FALSE) 144 | } 145 | href <- rvest::html_attr(links[[which(match)[1]]], "href") 146 | user <- rex::re_matches(href, rex::rex("assignedto=", 147 | capture(name = "id", digits)))$id 148 | } 149 | tracker_search(session = session, assignedto = user, status = status, ...) 150 | } 151 | 152 | #' Assign new packages 153 | #' 154 | #' This method uses a hash digest to assign the packages based on the package 155 | #' name. 156 | #' @param pkgs the packages to assign 157 | #' @param team team members to assign to 158 | assign_new_packages <- function(pkgs = unassigned_packages(session), 159 | team = devteam) { 160 | 161 | if (NROW(pkgs) == 0) 162 | return(list()) 163 | 164 | pkgs <- pkgs[order(pkgs$id),,drop=FALSE] 165 | 166 | substitute({ 167 | 168 | ids <- ids_ 169 | 170 | title <- title_ 171 | 172 | team <- team_ 173 | 174 | data.frame(reviewer = team[ids %% length(team) + 1], 175 | title = as.character(title)) 176 | }, 177 | list(ids_ = pkgs$id, title_ = pkgs$title, team_ = team)) 178 | } 179 | 180 | #' Retrieve all of the messages from an issue 181 | #' 182 | #' @param number the issue number to retrieve 183 | #' @inheritParams tracker_search 184 | issue <- function(number, session = tracker_login()) { 185 | response <- rvest::jump_to(session, paste0("/issue", number)) 186 | 187 | rows <- rvest::html_nodes(response, ".messages tr") 188 | if (!NROW(rows)) { 189 | return(NULL) 190 | } 191 | 192 | metadata <- rows[seq(2, length(rows), 2)] 193 | 194 | parse_metadata <- function(x) { 195 | headers <- rvest::html_nodes(x, "th") 196 | data.frame( 197 | id = rvest::html_attr(rvest::html_nodes(headers, "a"), "href"), 198 | author = gsub(x = rvest::html_text(headers[[2]]), "Author: ", ""), 199 | time = roundup_datetime( 200 | gsub(x = rvest::html_text(headers[[3]]), "Date: ", "")), 201 | stringsAsFactors = FALSE 202 | ) 203 | } 204 | res <- do.call(rbind, lapply(metadata, parse_metadata)) 205 | 206 | parse_messages <- function(x) { 207 | preformatted <- rvest::html_nodes(x, "pre") 208 | lapply(preformatted, rvest::html_text) 209 | } 210 | messages <- vapply(rows[seq(3, length(rows), 2)], 211 | function(x) { trimws(gsub("\r", "", rvest::html_text(x))) }, 212 | character(1)) 213 | 214 | res$message <- messages 215 | 216 | attachments <- parse_attachments(rvest::html_nodes(response, ".files tr td")) 217 | 218 | # merge by closest time per author 219 | close_times <- function(y, cutoff = 5) { 220 | function(x) { 221 | times <- abs(difftime(x$time, y$time, units = "secs")) 222 | times[times > cutoff] <- NA 223 | times 224 | } 225 | } 226 | res <- ddply(res, "author", 227 | function(df) { 228 | att <- attachments[attachments$author == df$author[1], , drop = FALSE] 229 | merge_closest(df, att, close_times(df)) 230 | }) 231 | 232 | res$author <- res$author.x 233 | res[c("author.x", "author.y")] <- list(NULL) 234 | 235 | res$time <- res$time.x 236 | res[c("time.x", "time.y")] <- list(NULL) 237 | 238 | rownames(res) <- deduplicate(res$id) 239 | 240 | res <- res[order(res$time), ] 241 | 242 | attr(res, "session") <- response 243 | 244 | class(res) <- c("issue", "data.frame") 245 | 246 | res 247 | } 248 | 249 | #' Coerce to an issue object 250 | #' 251 | #' @param x object to be coerced 252 | #' @param ... Additional arguments passed to methods 253 | as.issue <- function(x, ...) UseMethod("as.issue") 254 | 255 | as.issue.issue <- function(x, ...) x 256 | 257 | as.issue.numeric <- function(x, ...) issue(number = x, ...) 258 | 259 | as.issue.integer <- as.issue.numeric 260 | 261 | as.issue.character <- as.issue.numeric 262 | 263 | #' Post a message to an issue 264 | #' 265 | #' @param issue an issue object from \code{\link{issue}} 266 | #' @param session the session to use, if \code{NULL} the issue's session is used. 267 | #' @param note a note to post to the issue, defaults to opening your editor, 268 | #' but you can also pass a character string. 269 | #' @param file a file to attach to the issue, if \code{TRUE} choose a file using 270 | #' \code{\link{file.choose}} 271 | #' @param ... Additional arguments passed to rvest::set_values 272 | post <- function(issue, session = NULL, note = edit(), file = NULL, ...) { 273 | issue <- as.issue(issue) 274 | 275 | if (is.null(session)) { 276 | session <- session(issue) 277 | } 278 | 279 | if (isTRUE(file)) { 280 | file <- file.choose() 281 | } 282 | form <- rvest::html_form( 283 | rvest::html_nodes(session, "form[name='itemSynopsis']")[[1]]) 284 | 285 | form <- 286 | rvest::set_values(form, 287 | `@note` = note, 288 | `@file` = file, 289 | ...) 290 | 291 | rvest::submit_form(session, form) 292 | } 293 | 294 | session <- function(...) UseMethod("session") 295 | 296 | session.issue <- function(x, ...) { 297 | attr(x, "session") 298 | } 299 | 300 | #' Download attachments from an issue 301 | #' 302 | #' @inheritParams tracker_search 303 | #' @param issue Issue object, or issue number to download files from 304 | #' @param dir Location to store the files 305 | #' @param last_only If \code{TRUE} only download the last submitted tarball. 306 | #' @param pattern Regular expression for files to download. 307 | #' @param overwrite Will only overwrite existing \code{path} if TRUE. 308 | #' @param ... Additional Arguments passed to \code{\link[rvest]{jump_to}}. 309 | download <- function(issue, 310 | dir = proj_path(), 311 | last_only = TRUE, 312 | pattern = "[.]tar[.]gz$", 313 | overwrite = FALSE, 314 | ..., 315 | session = tracker_login()) { 316 | issue <- as.issue(issue) 317 | 318 | idx <- grep(pattern, issue$filename) 319 | if (!length(idx)) { 320 | stop("No downloads found for issue", issue$id, call. = FALSE) 321 | } 322 | if (last_only) { 323 | idx <- tail(idx, n = 1) 324 | } 325 | Map(function(href, filename) { 326 | if (!is.na(href)) { 327 | rvest::jump_to(session, 328 | href, 329 | httr::write_disk(path = file.path(dir, filename), 330 | overwrite = overwrite), 331 | httr::progress(), ...) 332 | }}, issue$href[idx], issue$filename[idx]) 333 | } 334 | 335 | parse_attachments <- function(x, ...) { 336 | links <- lapply(rvest::html_nodes(x[seq(1, length(x), 5)], "a"), 337 | function(xx) { 338 | data.frame(href = rvest::html_attr(xx, "href"), 339 | filename = rvest::html_text(xx), 340 | stringsAsFactors = FALSE) 341 | }) 342 | 343 | meta <- lapply(x[seq(2, length(x), 5)], 344 | function(xx) { 345 | xx <- rvest::html_nodes(xx, "span") 346 | data.frame(author = rvest::html_text(xx[[1]]), 347 | time = roundup_datetime(rvest::html_text(xx[[2]])), 348 | stringsAsFactors = FALSE) 349 | }) 350 | 351 | filetype <- rvest::html_text(x[seq(3, length(x), 5)]) 352 | 353 | data.frame(do.call(rbind, links), 354 | do.call(rbind, meta), filetype, 355 | stringsAsFactors = FALSE) 356 | } 357 | 358 | print.issue <- function(x, ...) { 359 | cat(format(x, ...), sep = "\n") 360 | invisible(x) 361 | } 362 | 363 | format.issue <- function(x, ...) { 364 | paste0( 365 | crayon::bold("ID: "), x$id, "\n", 366 | crayon::bold("Author: "), x$author, "\n", 367 | crayon::bold("Time: "), x$time, "\n", 368 | ifelse(!is.na(x$filename), 369 | paste0(crayon::bold("Attachment: "), x$filename, "\n"), 370 | ""), 371 | x$message, "\n") 372 | } 373 | 374 | #' Retrieve the user list 375 | #' @inheritParams tracker_search 376 | users <- memoise::memoise(function(session = tracker_login()) { 377 | session <- rvest::jump_to(session, "https://tracker.bioconductor.org/user") 378 | tbl <- as.data.frame(rvest::html_table(rvest::html_node(session, "table.list"))) 379 | names(tbl) <- c("user_name", "name", "organisation", "email", "phone_number") 380 | 381 | tbl$id <- gsub("user", "", rvest::html_attr(rvest::html_nodes(session, "table.list tr td a"), "href")) 382 | class(tbl) <- c("tbl_df", "tbl", "data.frame") 383 | tbl 384 | }) 385 | 386 | #' Generate the package assignments email given code to run 387 | #' @param code code to run, output of \code{\link{assign_new_packages}()} 388 | #' @param date date to title the email 389 | #' @param additional arguments passed to \code{\link{assign_new_packages}()} 390 | package_assignment_email <- function(pkgs = unassigned_packages(...), 391 | code = assign_new_packages(pkgs, ...), 392 | date = Sys.Date(), 393 | ...) { 394 | code <- format(code) 395 | 396 | # replace opening and closing {} with Rmd brackets 397 | code[c(1, length(code))] <- c("```{r}", "```") 398 | 399 | # remove leading spaces 400 | code <- gsub("^[[:space:]]{4}", "", code) 401 | 402 | env <- new.env() 403 | knitr::render_markdown() 404 | code <- knitr::knit(text = code, envir = env) 405 | 406 | # HACK: markdown here breaks with codeblocks without a language, so set the 407 | # output codeblock to R 408 | code <- gsub("```\n\n```\n##", "```\n\n```r\n##", code) 409 | 410 | # match packages in code to passed in packages 411 | pkgs <- pkgs[match(env$pkgs, pkgs$title), ] 412 | 413 | code <- paste(collapse = "", c(code, "\n\n", 414 | sprintf("- [%s](https://tracker.bioconductor.org/issue%s)\n", 415 | pkgs$title, pkgs$id))) 416 | 417 | code <- gsub("\n", "
", code) 418 | 419 | message <- gmailr::mime(subject = fmt("Package Assignments For {{date}}", 420 | list(date = date)), 421 | to = "devteam-bioc ", 422 | from = sprintf("%s <%s>", getOption("bioc_contributions_signature"), 423 | getOption("bioc_contributions_email"))) 424 | message <- gmailr::html_body(message, code) 425 | 426 | message 427 | } 428 | 429 | #' @describeIn assign_package assign multiple packages with assignment code 430 | assign_packages <- function(pkgs, code = assign_new_packages(...), ...) { 431 | assignments <- eval(code, envir = baseenv()) 432 | 433 | x <- merge(pkgs, assignments) 434 | 435 | Map(assign_package, x$id, x$reviewer) 436 | } 437 | 438 | #' Assign a specific package 439 | #' 440 | #' @param number issue number 441 | #' @param assignment lookup assignee by name 442 | assign_package <- function(issue, assignee, ...) { 443 | issue <- as.issue(issue) 444 | 445 | # lookup tracker username for assignee 446 | user <- subset(users(), name == assignee)$user_name 447 | 448 | post(issue = issue, 449 | note = paste0(assignee, " has been assigned to this package"), 450 | status = "preview-in-progress", 451 | assignedto = user, 452 | ...) 453 | } 454 | 455 | accept_note <- 456 | function(tarball, type = c("software", "experiment-data"), 457 | senderName = getOption("bioc_contributions_signature", 458 | "Bioconductor")) 459 | { 460 | type <- match.arg(type) 461 | description <- readDESCRIPTION(tarball) 462 | email <- .extractEmails(description) 463 | author <- paste(vapply(email$given, "[[", "", 1), collapse=", ") 464 | 465 | switch(type, 466 | software = template( 467 | "tracker.txt", 468 | author = author, 469 | tarball = basename(tarball), 470 | package = description$Package, 471 | senderName = senderName, 472 | when = "Everyday", 473 | type = "bioc-LATEST"), 474 | `experiment-data` = template( 475 | "tracker.txt", 476 | author = author, 477 | tarball = basename(tarball), 478 | package = description$Package, 479 | senderName = senderName, 480 | when = "Wednesday and Saturday", 481 | type = "data-experiment-LATEST") 482 | ) 483 | } 484 | 485 | attachment_size <- 486 | function(issue, session = tracker_login()) 487 | { 488 | issue <- as.issue(issue) 489 | 490 | size <- function(x) { 491 | res <- httr::HEAD(paste0(session$url, x), session$config, 492 | handle = session$handle) 493 | 494 | as.numeric(httr::headers(res)$`content-length`) 495 | } 496 | vapply(na.omit(issue$href), size, numeric(1)) 497 | } 498 | --------------------------------------------------------------------------------