├── .Rbuildignore
├── .binder
├── apt.txt
├── install.R
├── postBuild
└── runtime.txt
├── .codecov.yml
├── .editorconfig
├── .gitattributes
├── .github
├── .gitignore
└── workflows
│ ├── R-CMD-check.yaml
│ ├── pkgdown.yaml
│ └── test-coverage.yaml
├── .gitignore
├── CITATION.cff
├── DESCRIPTION
├── LICENSE.md
├── NAMESPACE
├── R
├── apptainer.R
├── as_pkgrefs.R
├── cache.R
├── dockerfile.R
├── edgelist.R
├── installation.R
├── memo_misc.R
├── pkgref.R
├── resolve.R
├── sysdata.rda
├── sysreqs.R
└── use_rang.R
├── README.Rmd
├── README.md
├── _pkgdown.yml
├── _quarto.yml
├── data
└── recipes.rda
├── inst
├── CITATION
├── Makefile
├── apptainer
│ └── Makefile
├── apptainer_readme_template.txt
├── compile_r.sh
├── docker_readme_template.txt
├── footer.R
├── header.R
├── header_cmd.R
├── turing
│ ├── bibliography.bib
│ ├── code
│ │ ├── 00_preprocess.R
│ │ └── 01_visualization.R
│ ├── data_raw
│ │ └── penguins_raw.csv
│ └── paper.Rmd
├── update.R
└── update_apptainer.R
├── man
├── apptainerize.Rd
├── as_pkgrefs.Rd
├── convert_edgelist.Rd
├── create_turing.Rd
├── dockerize.Rd
├── export_rang.Rd
├── export_renv.Rd
├── figures
│ ├── rang_logo.png
│ └── rang_logo.svg
├── generate_installation_order.Rd
├── query_sysreqs.Rd
├── recipes.Rd
├── resolve.Rd
└── use_rang.Rd
├── methodshub.qmd
├── paper
├── Makefile
├── PaperFigure.R
├── apa.csl
├── basel2023_rang
│ ├── _extensions
│ │ └── gesiscss
│ │ │ └── fakegesis
│ │ │ ├── LICENSE
│ │ │ ├── _extension.yml
│ │ │ ├── fakegesis.scss
│ │ │ └── gesis.png
│ ├── base1.png
│ ├── base2.png
│ ├── bowie.jpg
│ ├── comps.png
│ ├── dplyr.RDS
│ ├── dplyr.png
│ ├── dplyr1.png
│ ├── index.html
│ ├── index.qmd
│ └── rang_logo.png
├── before-body.tex
├── clean.sh
├── diff.pdf
├── hilgard.RDS
├── latexpand
├── nathaniel
│ ├── besley.dta
│ └── fn_5.R
├── oser
│ ├── Makefile
│ ├── README.md
│ └── oser.R
├── paper.bib
├── paper.pdf
├── paper.qmd
├── paper_r0.tex
├── peng.R
├── peng.sh
├── quanteda_rstudio.png
├── r1.pdf
├── r1.qmd
├── readme.md
├── sushi.sh
└── sushi_figure1.pdf
├── rang.Rproj
├── tests
├── .renvignore
├── testdata
│ ├── Rcpp
│ │ └── DESCRIPTION
│ ├── anciente1071.RDS
│ ├── ancientsna.RDS
│ ├── askpass
│ │ ├── DESCRIPTION
│ │ ├── LICENSE
│ │ ├── MD5
│ │ ├── NAMESPACE
│ │ ├── NEWS
│ │ ├── R
│ │ │ ├── askpass.R
│ │ │ ├── onload.R
│ │ │ └── ssh.R
│ │ ├── inst
│ │ │ ├── WORDLIST
│ │ │ ├── mac-askpass
│ │ │ └── mac-simplepass
│ │ ├── man
│ │ │ ├── askpass.Rd
│ │ │ └── ssh_askpass.Rd
│ │ ├── src
│ │ │ ├── Makevars.win
│ │ │ ├── askpass.c
│ │ │ └── win32
│ │ │ │ └── win-askpass.c
│ │ └── tests
│ │ │ ├── testthat.R
│ │ │ └── testthat
│ │ │ └── test-option.R
│ ├── askpass_1.1.tar.gz
│ ├── bioc_renv.RDS
│ ├── chipseq
│ │ └── DESCRIPTION
│ ├── dt.RDS
│ ├── fakeRhtslib.tar.gz
│ ├── fakeRhtslib
│ │ └── DESCRIPTION
│ ├── fake_renv.lock
│ ├── fakexml2
│ │ └── DESCRIPTION
│ ├── fakezlibbioc
│ │ └── DESCRIPTION
│ ├── graph.RDS
│ ├── issue21.RDS
│ ├── issue21_ubuntu2004.RDS
│ ├── issue38.RDS
│ ├── large_renv_lock
│ │ └── renv.lock
│ ├── local_renv_lock
│ │ └── renv.lock
│ ├── mzesalike
│ │ └── DESCRIPTION
│ ├── rang_6.RDS
│ ├── rang_bioc.RDS
│ ├── rang_local_gh.RDS
│ ├── rang_mixture.RDS
│ ├── rang_ok.RDS
│ ├── rang_rio_old.RDS
│ ├── rang_unresolved.RDS
│ ├── rrcompendium-complete
│ │ ├── DESCRIPTION
│ │ └── README.md
│ ├── sessionInfo1.RDS
│ ├── sessionInfo2.RDS
│ ├── sessionInfo3.RDS
│ ├── sle_graph.RDS
│ ├── small_renv_lock
│ │ └── renv.lock
│ ├── sna_0.3.tar.gz
│ ├── superancientsna.RDS
│ ├── sysreqs_gmp.RDS
│ ├── test_dir
│ │ └── script.R
│ └── wrapped_line.txt
├── testthat.R
└── testthat
│ ├── test_apptainerize.R
│ ├── test_create_turing.R
│ ├── test_dockerize.R
│ ├── test_edgelist.R
│ ├── test_expost_rang.R
│ ├── test_pkgref.R
│ ├── test_resolve.R
│ ├── test_s3.R
│ ├── test_sysreqs.R
│ └── test_use_rang.R
└── vignettes
├── .gitignore
├── compendium.Rmd
└── faq.Rmd
/.Rbuildignore:
--------------------------------------------------------------------------------
1 | ^README\.Rmd$
2 | ^README\.html$
3 | ^LICENSE\.md$
4 | ^paper
5 | ^\.github$
6 | ^cran-comments\.md$
7 | ^CRAN-SUBMISSION$
8 | ^.editorconfig$
9 | ^.codecov.yml$
10 | ^.*\.Rproj$
11 | ^\.Rproj\.user$
12 | tests/.renvignore
13 | ^\.gitattributes$
14 | ^_pkgdown\.yml$
15 | ^docs$
16 | ^pkgdown$
17 | ^\.gitignore$
18 | ^CITATION\.cff$
19 | ^install\.R$
20 | ^\.binder$
21 | ^_quarto\.yml$
22 | ^\.quarto$
23 | ^methodshub
24 |
--------------------------------------------------------------------------------
/.binder/apt.txt:
--------------------------------------------------------------------------------
1 | zip
--------------------------------------------------------------------------------
/.binder/install.R:
--------------------------------------------------------------------------------
1 | install.packages("rang")
2 |
--------------------------------------------------------------------------------
/.binder/postBuild:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env -S bash -v
2 |
3 | # determine which version of Quarto to install
4 | QUARTO_VERSION=1.6.39
5 |
6 | # See whether we need to lookup a Quarto version
7 | if [ $QUARTO_VERSION = "prerelease" ]; then
8 | QUARTO_JSON="_prerelease.json"
9 | elif [ $QUARTO_VERSION = "release" ]; then
10 | QUARTO_JSON="_download.json"
11 | fi
12 |
13 | if [ $QUARTO_JSON != "" ]; then
14 |
15 | # create a python script and run it
16 | PYTHON_SCRIPT=_quarto_version.py
17 | if [ -e $PYTHON_SCRIPT ]; then
18 | rm -rf $PYTHON_SCRIPT
19 | fi
20 |
21 | cat > $PYTHON_SCRIPT <. The
18 | dependency graph can then be used to reconstruct the R computational environment
19 | with 'Rocker' .
20 | authors:
21 | - family-names: Chan
22 | given-names: Chung-hong
23 | email: chainsawtiney@gmail.com
24 | orcid: https://orcid.org/0000-0002-6232-7530
25 | - family-names: Schoch
26 | given-names: David
27 | email: david@schochastics.net
28 | orcid: https://orcid.org/0000-0003-2952-4812
29 | preferred-citation:
30 | type: article
31 | title: 'rang: Reconstructing reproducible R computational environments'
32 | authors:
33 | - family-names: Chan
34 | given-names: Chung-hong
35 | email: chainsawtiney@gmail.com
36 | orcid: https://orcid.org/0000-0002-6232-7530
37 | - family-names: Schoch
38 | given-names: David
39 | email: david@schochastics.net
40 | orcid: https://orcid.org/0000-0003-2952-4812
41 | journal: PLOS ONE
42 | url: https://github.com/gesistsa/rang
43 | year: '2023'
44 | doi: 10.1371/journal.pone.0286761
45 | repository: https://CRAN.R-project.org/package=rang
46 | repository-code: https://github.com/gesistsa/rang
47 | url: https://gesistsa.github.io/rang
48 | contact:
49 | - family-names: Chan
50 | given-names: Chung-hong
51 | email: chainsawtiney@gmail.com
52 | orcid: https://orcid.org/0000-0002-6232-7530
53 | keywords:
54 | - cran
55 | - r
56 | - reproducibility
57 | - reproducible-research
58 | - rstats
59 | references:
60 | - type: software
61 | title: knitr
62 | abstract: 'knitr: A General-Purpose Package for Dynamic Report Generation in R'
63 | notes: Suggests
64 | url: https://yihui.org/knitr/
65 | repository: https://CRAN.R-project.org/package=knitr
66 | authors:
67 | - family-names: Xie
68 | given-names: Yihui
69 | email: xie@yihui.name
70 | orcid: https://orcid.org/0000-0003-0645-5666
71 | year: '2025'
72 | doi: 10.32614/CRAN.package.knitr
73 | - type: software
74 | title: rmarkdown
75 | abstract: 'rmarkdown: Dynamic Documents for R'
76 | notes: Suggests
77 | url: https://pkgs.rstudio.com/rmarkdown/
78 | repository: https://CRAN.R-project.org/package=rmarkdown
79 | authors:
80 | - family-names: Allaire
81 | given-names: JJ
82 | email: jj@posit.co
83 | - family-names: Xie
84 | given-names: Yihui
85 | email: xie@yihui.name
86 | orcid: https://orcid.org/0000-0003-0645-5666
87 | - family-names: Dervieux
88 | given-names: Christophe
89 | email: cderv@posit.co
90 | orcid: https://orcid.org/0000-0003-4474-2498
91 | - family-names: McPherson
92 | given-names: Jonathan
93 | email: jonathan@posit.co
94 | - family-names: Luraschi
95 | given-names: Javier
96 | - family-names: Ushey
97 | given-names: Kevin
98 | email: kevin@posit.co
99 | - family-names: Atkins
100 | given-names: Aron
101 | email: aron@posit.co
102 | - family-names: Wickham
103 | given-names: Hadley
104 | email: hadley@posit.co
105 | - family-names: Cheng
106 | given-names: Joe
107 | email: joe@posit.co
108 | - family-names: Chang
109 | given-names: Winston
110 | email: winston@posit.co
111 | - family-names: Iannone
112 | given-names: Richard
113 | email: rich@posit.co
114 | orcid: https://orcid.org/0000-0003-3925-190X
115 | year: '2025'
116 | doi: 10.32614/CRAN.package.rmarkdown
117 | - type: software
118 | title: testthat
119 | abstract: 'testthat: Unit Testing for R'
120 | notes: Suggests
121 | url: https://testthat.r-lib.org
122 | repository: https://CRAN.R-project.org/package=testthat
123 | authors:
124 | - family-names: Wickham
125 | given-names: Hadley
126 | email: hadley@posit.co
127 | year: '2025'
128 | doi: 10.32614/CRAN.package.testthat
129 | version: '>= 3.0.0'
130 | - type: software
131 | title: parsedate
132 | abstract: 'parsedate: Recognize and Parse Dates in Various Formats, Including All
133 | ISO 8601 Formats'
134 | notes: Imports
135 | url: https://github.com/gaborcsardi/parsedate
136 | repository: https://CRAN.R-project.org/package=parsedate
137 | authors:
138 | - family-names: Csárdi
139 | given-names: Gábor
140 | email: csardi.gabor@gmail.com
141 | - family-names: Torvalds
142 | given-names: Linus
143 | year: '2025'
144 | doi: 10.32614/CRAN.package.parsedate
145 | - type: software
146 | title: fastmap
147 | abstract: 'fastmap: Fast Data Structures'
148 | notes: Imports
149 | url: https://r-lib.github.io/fastmap/
150 | repository: https://CRAN.R-project.org/package=fastmap
151 | authors:
152 | - family-names: Chang
153 | given-names: Winston
154 | email: winston@posit.co
155 | year: '2025'
156 | doi: 10.32614/CRAN.package.fastmap
157 | - type: software
158 | title: jsonlite
159 | abstract: 'jsonlite: A Simple and Robust JSON Parser and Generator for R'
160 | notes: Imports
161 | url: https://jeroen.r-universe.dev/jsonlite
162 | repository: https://CRAN.R-project.org/package=jsonlite
163 | authors:
164 | - family-names: Ooms
165 | given-names: Jeroen
166 | email: jeroenooms@gmail.com
167 | orcid: https://orcid.org/0000-0002-4035-0289
168 | year: '2025'
169 | doi: 10.32614/CRAN.package.jsonlite
170 | - type: software
171 | title: memoise
172 | abstract: 'memoise: ''Memoisation'' of Functions'
173 | notes: Imports
174 | url: https://memoise.r-lib.org
175 | repository: https://CRAN.R-project.org/package=memoise
176 | authors:
177 | - family-names: Wickham
178 | given-names: Hadley
179 | email: hadley@rstudio.com
180 | - family-names: Hester
181 | given-names: Jim
182 | - family-names: Chang
183 | given-names: Winston
184 | email: winston@rstudio.com
185 | - family-names: Müller
186 | given-names: Kirill
187 | email: krlmlr+r@mailbox.org
188 | - family-names: Cook
189 | given-names: Daniel
190 | email: danielecook@gmail.com
191 | year: '2025'
192 | doi: 10.32614/CRAN.package.memoise
193 | - type: software
194 | title: remotes
195 | abstract: 'remotes: R Package Installation from Remote Repositories, Including ''GitHub'''
196 | notes: Imports
197 | url: https://remotes.r-lib.org
198 | repository: https://CRAN.R-project.org/package=remotes
199 | authors:
200 | - family-names: Csárdi
201 | given-names: Gábor
202 | email: csardi.gabor@gmail.com
203 | - family-names: Hester
204 | given-names: Jim
205 | - family-names: Wickham
206 | given-names: Hadley
207 | - family-names: Chang
208 | given-names: Winston
209 | - family-names: Morgan
210 | given-names: Martin
211 | - family-names: Tenenbaum
212 | given-names: Dan
213 | year: '2025'
214 | doi: 10.32614/CRAN.package.remotes
215 | - type: software
216 | title: utils
217 | abstract: 'R: A Language and Environment for Statistical Computing'
218 | notes: Imports
219 | authors:
220 | - name: R Core Team
221 | institution:
222 | name: R Foundation for Statistical Computing
223 | address: Vienna, Austria
224 | year: '2025'
225 | - type: software
226 | title: httr
227 | abstract: 'httr: Tools for Working with URLs and HTTP'
228 | notes: Imports
229 | url: https://httr.r-lib.org/
230 | repository: https://CRAN.R-project.org/package=httr
231 | authors:
232 | - family-names: Wickham
233 | given-names: Hadley
234 | email: hadley@posit.co
235 | year: '2025'
236 | doi: 10.32614/CRAN.package.httr
237 | - type: software
238 | title: vctrs
239 | abstract: 'vctrs: Vector Helpers'
240 | notes: Imports
241 | url: https://vctrs.r-lib.org/
242 | repository: https://CRAN.R-project.org/package=vctrs
243 | authors:
244 | - family-names: Wickham
245 | given-names: Hadley
246 | email: hadley@posit.co
247 | - family-names: Henry
248 | given-names: Lionel
249 | email: lionel@posit.co
250 | - family-names: Vaughan
251 | given-names: Davis
252 | email: davis@posit.co
253 | year: '2025'
254 | doi: 10.32614/CRAN.package.vctrs
255 | - type: software
256 | title: renv
257 | abstract: 'renv: Project Environments'
258 | notes: Imports
259 | url: https://rstudio.github.io/renv/
260 | repository: https://CRAN.R-project.org/package=renv
261 | authors:
262 | - family-names: Ushey
263 | given-names: Kevin
264 | email: kevin@rstudio.com
265 | orcid: https://orcid.org/0000-0003-2880-7407
266 | - family-names: Wickham
267 | given-names: Hadley
268 | email: hadley@rstudio.com
269 | orcid: https://orcid.org/0000-0003-4757-117X
270 | year: '2025'
271 | doi: 10.32614/CRAN.package.renv
272 | - type: software
273 | title: here
274 | abstract: 'here: A Simpler Way to Find Your Files'
275 | notes: Imports
276 | url: https://here.r-lib.org/
277 | repository: https://CRAN.R-project.org/package=here
278 | authors:
279 | - family-names: Müller
280 | given-names: Kirill
281 | email: krlmlr+r@mailbox.org
282 | orcid: https://orcid.org/0000-0002-1416-3412
283 | year: '2025'
284 | doi: 10.32614/CRAN.package.here
285 | - type: software
286 | title: lifecycle
287 | abstract: 'lifecycle: Manage the Life Cycle of your Package Functions'
288 | notes: Imports
289 | url: https://lifecycle.r-lib.org/
290 | repository: https://CRAN.R-project.org/package=lifecycle
291 | authors:
292 | - family-names: Henry
293 | given-names: Lionel
294 | email: lionel@posit.co
295 | - family-names: Wickham
296 | given-names: Hadley
297 | email: hadley@posit.co
298 | orcid: https://orcid.org/0000-0003-4757-117X
299 | year: '2025'
300 | doi: 10.32614/CRAN.package.lifecycle
301 | - type: software
302 | title: 'R: A Language and Environment for Statistical Computing'
303 | notes: Depends
304 | url: https://www.R-project.org/
305 | authors:
306 | - name: R Core Team
307 | institution:
308 | name: R Foundation for Statistical Computing
309 | address: Vienna, Austria
310 | year: '2025'
311 | version: '>= 3.5.0'
312 |
313 |
--------------------------------------------------------------------------------
/DESCRIPTION:
--------------------------------------------------------------------------------
1 | Package: rang
2 | Title: Reconstructing Reproducible R Computational Environments
3 | Version: 0.3.0
4 | Authors@R:
5 | c(person("Chung-hong", "Chan", , "chainsawtiney@gmail.com", role = c("aut", "cre"),
6 | comment = c(ORCID = "0000-0002-6232-7530")),
7 | person("David", "Schoch", , "david@schochastics.net", role = "aut",
8 | comment = c(ORCID = "0000-0003-2952-4812")),
9 | person("Egor", "Kotov", , "kotov.egor@gmail.com", role = "ctb",
10 | comment = c(ORCID = "0000-0001-6690-5345")))
11 | Description: Resolve the dependency graph of R packages at a specific time point based on the information from various 'R-hub' web services . The dependency graph can then be used to reconstruct the R computational environment with 'Rocker' .
12 | License: GPL (>= 3)
13 | Encoding: UTF-8
14 | Roxygen: list(markdown = TRUE)
15 | RoxygenNote: 7.2.3
16 | URL: https://gesistsa.github.io/rang, https://github.com/gesistsa/rang
17 | BugReports: https://github.com/gesistsa/rang/issues
18 | Suggests:
19 | knitr,
20 | rmarkdown,
21 | testthat (>= 3.0.0)
22 | Config/testthat/edition: 3
23 | Imports:
24 | parsedate,
25 | fastmap,
26 | jsonlite,
27 | memoise,
28 | pkgsearch,
29 | remotes,
30 | utils,
31 | httr,
32 | vctrs,
33 | renv,
34 | here,
35 | lifecycle
36 | Depends:
37 | R (>= 3.5.0)
38 | VignetteBuilder: knitr
39 | LazyData: true
40 | Config/Needs/website: gesistsa/tsatemplate
41 |
--------------------------------------------------------------------------------
/NAMESPACE:
--------------------------------------------------------------------------------
1 | # Generated by roxygen2: do not edit by hand
2 |
3 | S3method(as_pkgrefs,character)
4 | S3method(as_pkgrefs,default)
5 | S3method(as_pkgrefs,sessionInfo)
6 | S3method(convert_edgelist,default)
7 | S3method(convert_edgelist,rang)
8 | S3method(convert_edgelist,ranglet)
9 | S3method(print,rang)
10 | S3method(print,ranglet)
11 | export(apptainerise)
12 | export(apptainerise_rang)
13 | export(apptainerize)
14 | export(apptainerize_rang)
15 | export(as_pkgrefs)
16 | export(convert_edgelist)
17 | export(create_turing)
18 | export(dockerise)
19 | export(dockerise_rang)
20 | export(dockerize)
21 | export(dockerize_rang)
22 | export(export_rang)
23 | export(export_renv)
24 | export(generate_installation_order)
25 | export(query_sysreqs)
26 | export(resolve)
27 | export(singularise)
28 | export(singularise_rang)
29 | export(singularize)
30 | export(singularize_rang)
31 | export(use_rang)
32 | importFrom(here,here)
33 | importFrom(memoise,memoise)
34 | importFrom(pkgsearch,cran_package_history)
35 | importFrom(remotes,system_requirements)
36 | importFrom(utils,download.file)
37 |
--------------------------------------------------------------------------------
/R/apptainer.R:
--------------------------------------------------------------------------------
1 | .generate_debian_eol_apptainer_content <- function(r_version, lib, sysreqs_cmd, cache, debian_version = "lenny",
2 | post_installation_steps = NULL,
3 | rel_dir = "",
4 | copy_all = FALSE) {
5 | rang_path <- file.path(rel_dir, "rang.R")
6 | cache_path <- file.path(rel_dir, "cache")
7 | compile_path <- file.path(rel_dir, "compile_r.sh")
8 | environment_vars <- c("export TZ=UTC", paste0("export COMPILE_PATH=", compile_path), paste0("export RANG_PATH=", rang_path))
9 | containerfile_content <- list(
10 | BOOTSTRAP = "Bootstrap: docker",
11 | FROM = c(paste0("From: debian/eol:", debian_version)),
12 | ENV_section = "\n%environment\n",
13 | ENV = environment_vars,
14 | FILES_section = "\n%files\n",
15 | FILES = c(paste0("rang.R ", rang_path), paste0("compile_r.sh ", compile_path)),
16 | POST_section = "\n%post\n",
17 | POST = c(
18 | environment_vars,
19 | "ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone && apt-get update -qq && apt-get install wget locales build-essential r-base-dev -y",
20 | sysreqs_cmd
21 | ),
22 | STARTSCRIPT_section = "\n%startscript\n",
23 | STARTSCRIPT = c("exec R \"${@}\"")
24 | )
25 | if (!is.na(lib)) {
26 | containerfile_content$POST <- append(containerfile_content$POST, paste0("mkdir ", lib, " && bash $COMPILE_PATH ", r_version))
27 | } else {
28 | containerfile_content$POST <- append(containerfile_content$POST, paste0("bash $COMPILE_PATH ", r_version))
29 | }
30 | if (isTRUE(cache)) {
31 | containerfile_content$BOOTSTRAP <- "Bootstrap: docker"
32 | containerfile_content$FROM <- c(paste0("From: debian/eol:", debian_version))
33 | containerfile_content$FILES <- append(
34 | containerfile_content$FILES,
35 | c(
36 | paste0("cache/rpkgs ", file.path(cache_path, "rpkgs")),
37 | paste0("cache/rsrc ", file.path(cache_path, "rsrc"))
38 | )
39 | )
40 | containerfile_content$POST <- append(paste0("export CACHE_PATH=", cache_path), containerfile_content$POST)
41 | }
42 | containerfile_content$POST <- append(containerfile_content$POST, post_installation_steps)
43 | if (isTRUE(copy_all)) {
44 | containerfile_content$FILES <- c(". /")
45 | }
46 | return(containerfile_content)
47 | }
48 |
49 | .generate_rocker_apptainer_content <- function(r_version, lib, sysreqs_cmd, cache, image,
50 | post_installation_steps = NULL,
51 | rel_dir = "",
52 | copy_all = FALSE) {
53 | rang_path <- file.path(rel_dir, "rang.R")
54 | cache_path <- file.path(rel_dir, "cache")
55 | environment_vars <- c(paste0("export RANG_PATH=", rang_path))
56 | containerfile_content <- list(
57 | BOOTSTRAP = "Bootstrap: docker",
58 | FROM = c(paste0("From: rocker/", image, ":", r_version)),
59 | ENV_section = "\n%environment\n",
60 | ENV = c(environment_vars, "export RPORT=${RPORT:-8787}",
61 | "export USER=$(whoami)", "export PASSWORD=${PASSWORD:-set_your_password}"),
62 | FILES_section = "\n%files\n",
63 | FILES = c(paste0("rang.R ", rang_path)),
64 | POST_section = "\n%post\n",
65 | POST = c(environment_vars, sysreqs_cmd),
66 | STARTSCRIPT_section = "\n%startscript\n",
67 | STARTSCRIPT = c("exec R \"${@}\"")
68 | )
69 | if (!is.na(lib)) {
70 | containerfile_content$POST <- append(containerfile_content$POST, paste0("mkdir ", lib, " && Rscript $RANG_PATH"))
71 | } else {
72 | containerfile_content$POST <- append(containerfile_content$POST, "Rscript $RANG_PATH")
73 | }
74 | if (isTRUE(cache)) {
75 | containerfile_content$FILES <- append(containerfile_content$FILES, paste0("cache ", cache_path))
76 | containerfile_content$POST <- append(paste0("export CACHE_PATH=", cache_path), containerfile_content$POST)
77 | }
78 | if (image == "rstudio") {
79 | containerfile_content$STARTSCRIPT <- c("exec /usr/lib/rstudio-server/bin/rserver \\\
80 | --auth-none=0 --auth-pam-helper-path=pam-helper \\\
81 | --server-user=${USER} --www-port=${RPORT}")
82 | }
83 | containerfile_content$POST <- append(containerfile_content$POST, post_installation_steps)
84 | if (isTRUE(copy_all)) {
85 | containerfile_content$FILES <- c(". /")
86 | }
87 | return(containerfile_content)
88 | }
89 |
--------------------------------------------------------------------------------
/R/as_pkgrefs.R:
--------------------------------------------------------------------------------
1 | #' Convert Data Structures into Package References
2 | #'
3 | #' This generic function converts several standard data structures into a vector of package references, which in turn
4 | #' can be used as the first argument of the function [resolve()]. This function guessimates the possible sources of the
5 | #' packages. But we strongly recommend manually reviewing the detected packages before using them for [resolve()].
6 | #' @param x, currently supported data structure(s) are: output from [sessionInfo()], a character vector of package names
7 | #' @param bioc_version character. When x is a character vector, version of Bioconductor to search for package names. NULL indicates not
8 | #' search for Bioconductor.
9 | #' @param no_enhances logical, when parsing DESCRIPTION, whether to ignore packages in the "Enhances" field
10 | #' @param no_suggests logical, when parsing DESCRIPTION, whether to ignore packages in the "Suggests" field
11 | #' @param ..., not used
12 | #' @return a vector of package references
13 | #' @export
14 | #' @examples
15 | #' as_pkgrefs(sessionInfo())
16 | #' if (interactive()) {
17 | #' require(rang)
18 | #' graph <- resolve(as_pkgrefs(sessionInfo()))
19 | #' as_pkgrefs(c("rtoot"))
20 | #' as_pkgrefs(c("rtoot", "S4Vectors")) ## this gives cran::S4Vectors and is not correct.
21 | #' as_pkgrefs(c("rtoot", "S4Vectors"), bioc_version = "3.3") ## This gives bioc::S4Vectors
22 | #' }
23 | as_pkgrefs <- function(x, ...) {
24 | UseMethod("as_pkgrefs", x)
25 | }
26 |
27 | #' @rdname as_pkgrefs
28 | #' @export
29 | as_pkgrefs.default <- function(x, ...) {
30 | ## an exported version of .normalize_pkgs
31 | ## if (is.numeric(x) || is.logical(x) || is.integer(x)) {
32 | stop("Don't know how to convert this to package references.", call. = FALSE)
33 | ## }
34 | ## return(.normalize_pkgs(x))
35 | }
36 |
37 | #' @rdname as_pkgrefs
38 | #' @export
39 | as_pkgrefs.character <- function(x, bioc_version = NULL, no_enhances = TRUE, no_suggests = TRUE, ...) {
40 | if(.is_renv_lockfile(x)) {
41 | return(.extract_pkgrefs_renv_lockfile(path = x))
42 | }
43 | if(.is_directory(x)) {
44 | return(.extract_pkgrefs_dir(x,bioc_version))
45 | }
46 | if(.is_DESCRIPTION(x)) {
47 | return(.extract_pkgrefs_DESCRIPTION(x, bioc_version, no_enhances = no_enhances,
48 | no_suggests = no_suggests))
49 | }
50 | return(.normalize_pkgs(pkgs = x, bioc_version = bioc_version))
51 | }
52 |
53 | #' @rdname as_pkgrefs
54 | #' @export
55 | as_pkgrefs.sessionInfo <- function(x, ...) {
56 | vapply(X = x$otherPkgs, FUN = .extract_pkgref_packageDescription, FUN.VALUE = character(1), USE.NAMES = FALSE)
57 | }
58 |
59 | .extract_pkgrefs_renv_lockfile <- function(path) {
60 | lockfile <- .parse_renv_lockfile(path)
61 | sources <- vapply(lockfile[["Packages"]],`[[`,character(1),"Source",USE.NAMES = FALSE)
62 | pkgs <- c()
63 | if("Repository" %in% sources) {
64 | pkgs <- c(pkgs, paste0("cran::",vapply(lockfile[["Packages"]][sources=="Repository"],`[[`,character(1),"Package",USE.NAMES = FALSE)))
65 | }
66 | if("Bioconductor" %in% sources) {
67 | pkgs <- c(pkgs,paste0("bioc::",vapply(lockfile[["Packages"]][sources=="Bioconductor"],`[[`,character(1),"Package",USE.NAMES = FALSE)))
68 | }
69 | if("GitHub" %in% sources) {
70 | pkgs <- c(pkgs,
71 | paste0("github::",
72 | vapply(lockfile[["Packages"]][sources=="GitHub"],`[[`,character(1), "RemoteUsername", USE.NAMES = FALSE),"/",
73 | vapply(lockfile[["Packages"]][sources=="GitHub"],`[[`,character(1), "Package", USE.NAMES = FALSE))
74 | )
75 | }
76 | if ("Local" %in% sources) {
77 | pkgs <- c(pkgs, paste0("local::", vapply(lockfile[["Packages"]][sources=="Local"],`[[`,character(1),"RemoteUrl",USE.NAMES = FALSE)))
78 | }
79 | return(pkgs)
80 | }
81 |
82 | .extract_pkgref_packageDescription <- function(packageDescription) {
83 | handle <- packageDescription[['Package']]
84 | if ("GithubRepo" %in% names(packageDescription)) {
85 | return(paste0("github::", packageDescription[["GithubUsername"]], "/", packageDescription[["GithubRepo"]]))
86 | }
87 | if (grepl("bioconductor", packageDescription[["URL"]])) {
88 | return(paste0("bioc::",handle))
89 | }
90 | if (basename(attr(packageDescription, "file")) == "DESCRIPTION") {
91 | ## probably load via devtools::load_all
92 | return(paste0("local::", dirname(attr(packageDescription, "file"))))
93 | }
94 | return(paste0("cran::", handle))
95 | }
96 |
97 | .extract_pkgrefs_DESCRIPTION <- function(path, bioc_version = NULL, no_enhances = TRUE, no_suggests = TRUE) {
98 | descr_df <- as.data.frame(read.dcf(path))
99 | pkg_dep_df <- .parse_desc(descr_df, remotes = TRUE)
100 | pkg_dep_df$y_pkgref <- .normalize_pkgs(pkg_dep_df$y, bioc_version = bioc_version)
101 | pkgrefs <- .extract_queryable_dependencies(pkg_dep_df, no_enhances = no_enhances,
102 | no_suggests = no_suggests)
103 | if (isTRUE(is.null(pkgrefs))) {
104 | stop("No queryable dependencies listed in the DESCRIPTION file.", call. = FALSE)
105 | }
106 | .remove_overlapped_pkgrefs(pkgrefs)
107 | }
108 |
109 | .remove_overlapped_pkgrefs <- function(pkgrefs) {
110 | ## Eliminate all github/cran duplicates, github has precedence
111 | grouped_pkgrefs <- .group_pkgrefs_by_source(pkgrefs)
112 | if (is.null(grouped_pkgrefs$github)) {
113 | ## no possible overlap
114 | return(pkgrefs)
115 | }
116 | for (handle in grouped_pkgrefs$github) {
117 | pkgname <- strsplit(handle, "/")[[1]][2]
118 | cran_version <- paste0("cran::", pkgname)
119 | bioc_version <- paste0("bioc::", pkgname)
120 | if (cran_version %in% pkgrefs) {
121 | pkgrefs <- setdiff(pkgrefs, cran_version)
122 | }
123 | if (bioc_version %in% pkgrefs) {
124 | pkgrefs <- setdiff(pkgrefs, bioc_version)
125 | }
126 | }
127 | return(pkgrefs)
128 | }
129 |
130 | .is_renv_lockfile <- function(path) {
131 | # assuming all renv lockfiles are called renv.lock and path is only length 1
132 | if(length(path)!=1) {
133 | return(FALSE)
134 | }
135 | if(isFALSE(file.exists(path))) {
136 | return(FALSE)
137 | }
138 | if (isFALSE(basename(path) == "renv.lock")) {
139 | return(FALSE)
140 | }
141 | TRUE
142 | }
143 |
144 | .parse_renv_lockfile <- function(path) {
145 | lockfile <- jsonlite::fromJSON(path, simplifyVector = FALSE)
146 | # class(lockfile) <- "renv_lockfile"
147 | lockfile
148 | }
149 |
150 | .is_directory <- function(path) {
151 | if(length(path)!=1) {
152 | return(FALSE)
153 | }
154 | if(isFALSE(dir.exists(path))) {
155 | return(FALSE)
156 | }
157 | TRUE
158 | }
159 |
160 | .extract_pkgrefs_dir <- function(path, bioc_version = NULL) {
161 | pkgs <- suppressMessages(unique(renv::dependencies(path,progress = FALSE)$Package))
162 | warning("scanning directories for R packages cannot detect github packages.",call. = FALSE)
163 | return(.normalize_pkgs(pkgs = pkgs, bioc_version = bioc_version))
164 | }
165 |
166 | .is_DESCRIPTION <- function(path) {
167 | # assuming all DESCRIPTION files are called DESCRIPTION and path is only length 1
168 | if(length(path)!=1) {
169 | return(FALSE)
170 | }
171 | if(isFALSE(file.exists(path))) {
172 | return(FALSE)
173 | }
174 | if (isFALSE(basename(path) == "DESCRIPTION")) {
175 | return(FALSE)
176 | }
177 | TRUE
178 | }
179 |
--------------------------------------------------------------------------------
/R/cache.R:
--------------------------------------------------------------------------------
1 | #' @importFrom utils download.file
2 | #' @importFrom here here
3 | NULL
4 |
5 | .query_mirror_validity <- function(mirror, local.only = TRUE) {
6 | if (mirror == "https://cran.r-project.org/") {
7 | return(TRUE)
8 | }
9 | all_mirrors <- utils::getCRANmirrors(local.only = local.only)$URL
10 | mirror %in% all_mirrors
11 | }
12 |
13 | .normalize_url <- function(mirror, https = TRUE) {
14 | if (grepl("^http://", mirror)) {
15 | mirror <- gsub("^http://", "https://", mirror)
16 | }
17 | if (!grepl("^https://", mirror)) {
18 | mirror <- paste0("https://", mirror)
19 | }
20 | if (!grepl("/$", mirror)) {
21 | mirror <- paste0(mirror, "/")
22 | }
23 | if (grepl("/+$", mirror)) {
24 | mirror <- gsub("/+$", "/", mirror)
25 | }
26 | if (isTRUE(https)) {
27 | return(mirror)
28 | } else {
29 | return(gsub("^https://", "http://", mirror))
30 | }
31 | }
32 |
33 | .check_tarball_path <- function(tarball_path, x, dir = FALSE) {
34 | ## raise error when tarball_path doesn't exist
35 | if ((isFALSE(dir) && isFALSE(file.exists(tarball_path))) ||
36 | (isTRUE(dir) && isFALSE(dir.exists(tarball_path)))) {
37 | stop(x, " can't be cached.", call. = FALSE)
38 | }
39 | invisible()
40 | }
41 |
42 | .cache_pkg_cran <- function(x, version, cache_dir, cran_mirror, verbose) {
43 | url <- paste(cran_mirror, "src/contrib/Archive/", x, "/", x, "_", version, ".tar.gz", sep = "")
44 | tarball_path <- file.path(cache_dir, paste(x, "_", version, ".tar.gz", sep = ""))
45 | tryCatch({
46 | suppressWarnings(utils::download.file(url, destfile = tarball_path, quiet = !verbose))
47 | }, error = function(e) {
48 | ## is the current latest
49 | url <- paste(cran_mirror, "src/contrib/", x, "_", version, ".tar.gz", sep = "")
50 | utils::download.file(url, destfile = tarball_path, quiet = !verbose)
51 | })
52 | .check_tarball_path(tarball_path, x)
53 | }
54 |
55 | .cache_pkg_bioc <- function(x, version, cache_dir, bioc_mirror, bioc_version, verbose, uid) {
56 | url <- paste(bioc_mirror, bioc_version, "/", uid, "/src/contrib/", x, "_", version, ".tar.gz", sep = "")
57 | tarball_path <- file.path(cache_dir, paste(x, "_", version, ".tar.gz", sep = ""))
58 | suppressWarnings(utils::download.file(url, destfile = tarball_path, quiet = !verbose))
59 | .check_tarball_path(tarball_path, x)
60 | }
61 |
62 | .cache_pkg_github <- function(x, version, handle, source, uid, cache_dir, verbose) {
63 | sha <- uid
64 | tarball_path <- file.path(cache_dir, paste("raw_", x, "_", version, ".tar.gz", sep = ""))
65 | utils::download.file(paste("https://api.github.com/repos/", handle, "/tarball/", sha, sep = ""), destfile = tarball_path,
66 | quiet = !verbose)
67 | .check_tarball_path(tarball_path, x)
68 | }
69 |
70 | .cache_pkg_local <- function(x, version, cache_dir, uid) {
71 | local_path <- uid
72 | tarball_path <- file.path(cache_dir, paste("raw_", x, "_", version, ".tar.gz", sep = ""))
73 | if (isTRUE(grepl("\\.tar.gz$|\\.tgz$", local_path))) {
74 | ## it could be a valid source package, but don't trust it blindly, mark it as raw_
75 | ## similar to github packages
76 | file.copy(local_path, tarball_path)
77 | return(.check_tarball_path(tarball_path, x))
78 | }
79 | if (.is_directory(local_path)) {
80 | dir_pkg_path <- file.path(cache_dir, paste("dir_", x, "_", version, sep = ""))
81 | res <- file.copy(from = local_path, to = cache_dir, recursive = TRUE, overwrite = TRUE)
82 | res <- file.rename(from = file.path(cache_dir, x), to = dir_pkg_path)
83 | return(.check_tarball_path(dir_pkg_path, x, dir = TRUE))
84 | }
85 | }
86 |
87 | .cache_pkgs <- function(rang, base_dir, cran_mirror, bioc_mirror, verbose) {
88 | installation_order <- generate_installation_order(rang)
89 | cache_dir <- file.path(base_dir, "cache", "rpkgs")
90 | if (!dir.exists(cache_dir)) {
91 | dir.create(cache_dir, recursive = TRUE)
92 | }
93 | for (i in seq(from = 1, to = nrow(installation_order), by = 1)) {
94 | x <- installation_order$x[i]
95 | source <- installation_order$source[i]
96 | version <- installation_order$version[i]
97 | handle <- installation_order$handle[i]
98 | uid <- installation_order$uid[i]
99 | if (source == "cran") {
100 | .cache_pkg_cran(x = x, version = version, cache_dir = cache_dir,
101 | cran_mirror = cran_mirror, verbose = verbose)
102 | }
103 | if (source == "github") {
104 | ## please note that these cached packages are not built
105 | .cache_pkg_github(x = x, version = version, handle = handle,
106 | source = source, uid = uid,
107 | cache_dir = cache_dir, verbose = verbose)
108 | }
109 | if(source == "bioc") {
110 | .cache_pkg_bioc(x = x, version = version, cache_dir = cache_dir,
111 | bioc_mirror = bioc_mirror, bioc_version = rang$bioc_version, verbose = verbose,
112 | uid = uid)
113 | }
114 | if(source == "local") {
115 | ## please note that these cached packages are not built
116 | .cache_pkg_local(x = x, version = version, cache_dir = cache_dir, uid = uid)
117 | }
118 | }
119 | invisible(base_dir)
120 | }
121 |
122 | .cache_rsrc <- function(r_version, base_dir, verbose, cran_mirror) {
123 | cache_dir <- file.path(base_dir, "cache", "rsrc")
124 | if (!dir.exists(cache_dir)) {
125 | dir.create(cache_dir, recursive = TRUE)
126 | }
127 | major_version <- as.character(package_version(r_version)$major)
128 | if (major_version == "1") {
129 | file_extension <- ".tgz"
130 | } else {
131 | file_extension <- ".tar.gz"
132 | }
133 | download_dir <- paste0("R-", major_version)
134 | tar_file <- paste0("R-", r_version, file_extension)
135 | url <- paste0(cran_mirror, "src/base/", download_dir, "/", tar_file)
136 | tar_path <- file.path(cache_dir, tar_file)
137 | download.file(url = url, destfile = tar_path, quiet = !verbose)
138 | if (!file.exists(tar_path)) {
139 | stop("Fail to cache R source.")
140 | }
141 | return(tar_path)
142 | }
143 |
144 |
145 | .cache_debian <- function(debian_version, base_dir, verbose) {
146 | cache_dir <- file.path(base_dir, "cache", "debian")
147 | if (!dir.exists(cache_dir)) {
148 | dir.create(cache_dir, recursive = TRUE)
149 | }
150 | debian_image_url <- debian_urls[debian_version]
151 | rootfs_path <- file.path(cache_dir, "rootfs.tar.xz")
152 | download.file(debian_image_url, destfile = rootfs_path, quiet = !verbose)
153 | if (!file.exists(rootfs_path)) {
154 | stop("Fail to cache Debian disk image.")
155 | }
156 | return(rootfs_path)
157 | }
158 |
--------------------------------------------------------------------------------
/R/dockerfile.R:
--------------------------------------------------------------------------------
1 | ## for normalizing post_installation_steps
2 | .normalize_docker_steps <- function(steps) {
3 | fx <- function(step) {
4 | docker_regex <- "^#|^ADD |^COPY |^ENV |^EXPOSE |^FROM |^LABEL |^STOPSIGNAL |^USER |^VOLUME |^WORKDIR |^ONBUILD |^RUN |^CMD |^ENTRYPOINT |^ARG |^HEALTHCHECK |^SHELL "
5 | splitted_step <- strsplit(step, "\n")[[1]]
6 | docker_line_lgl <- grepl(docker_regex, splitted_step)
7 | splitted_step[!docker_line_lgl] <- paste0("RUN ", splitted_step[!docker_line_lgl])
8 | paste0(splitted_step, collapse = "\n")
9 | }
10 | vapply(steps, fx, character(1), USE.NAMES = FALSE)
11 | }
12 |
13 | .generate_debian_eol_dockerfile_content <- function(r_version, lib, sysreqs_cmd, cache, debian_version = "lenny",
14 | post_installation_steps = NULL,
15 | rel_dir = "",
16 | copy_all = FALSE) {
17 | rang_path <- file.path(rel_dir, "rang.R")
18 | cache_path <- file.path(rel_dir, "cache")
19 | compile_path <- file.path(rel_dir, "compile_r.sh")
20 | containerfile_content <- list(
21 | FROM = c(paste0("FROM debian/eol:", debian_version)),
22 | ENV = c("ENV TZ UTC",
23 | "RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone && apt-get update -qq && apt-get install wget locales build-essential r-base-dev -y", paste0("ENV RANG_PATH ", rang_path), paste0("ENV COMPILE_PATH ", compile_path)),
24 | COPY = c(paste0("COPY rang.R ", rang_path), paste0("COPY compile_r.sh ", compile_path)),
25 | RUN = c(paste("RUN", sysreqs_cmd)),
26 | CMD = c("CMD [\"R\"]"))
27 | if (!is.na(lib)) {
28 | containerfile_content$RUN <- append(containerfile_content$RUN, paste0("RUN mkdir ", lib, " && bash $COMPILE_PATH ", r_version))
29 | } else {
30 | containerfile_content$RUN <- append(containerfile_content$RUN, paste0("RUN bash $COMPILE_PATH ", r_version))
31 | }
32 | if (isTRUE(cache)) {
33 | containerfile_content$COPY <- append(containerfile_content$COPY,
34 | c(paste0("COPY cache/rpkgs ", file.path(cache_path, "rpkgs")),
35 | paste0("COPY cache/rsrc ", file.path(cache_path, "rsrc"))))
36 | containerfile_content$FROM <- c("FROM scratch", paste0("ADD ", file.path(rel_dir, "cache/debian/rootfs.tar.xz"), " /"))
37 | containerfile_content$ENV <- append(containerfile_content$ENV, paste0("ENV CACHE_PATH ", cache_path))
38 | }
39 | containerfile_content$RUN <- append(containerfile_content$RUN, .normalize_docker_steps(post_installation_steps))
40 | if (isTRUE(copy_all)) {
41 | containerfile_content$COPY <- c("COPY . /")
42 | }
43 | return(containerfile_content)
44 | }
45 |
46 | .generate_rocker_dockerfile_content <- function(r_version, lib, sysreqs_cmd, cache, image,
47 | post_installation_steps = NULL,
48 | rel_dir = "",
49 | copy_all = FALSE) {
50 | rang_path <- file.path(rel_dir, "rang.R")
51 | cache_path <- file.path(rel_dir, "cache")
52 | containerfile_content <- list(
53 | FROM = c(paste0("FROM rocker/", image, ":", r_version)),
54 | ENV = c(paste0("ENV RANG_PATH ", rang_path)),
55 | COPY = c(paste0("COPY rang.R ", rang_path)),
56 | RUN = c(paste("RUN", sysreqs_cmd)),
57 | CMD = c("CMD [\"R\"]"))
58 | if (!is.na(lib)) {
59 | containerfile_content$RUN <- append(containerfile_content$RUN, paste0("RUN mkdir ", lib, " && Rscript $RANG_PATH"))
60 | } else {
61 | containerfile_content$RUN <- append(containerfile_content$RUN, "RUN Rscript $RANG_PATH")
62 | }
63 | if (isTRUE(cache)) {
64 | containerfile_content$COPY <- append(containerfile_content$COPY, paste0("COPY cache ", cache_path))
65 | containerfile_content$ENV <- append(containerfile_content$ENV, paste0("ENV CACHE_PATH ", cache_path))
66 | }
67 | if (image == "rstudio") {
68 | containerfile_content$CMD <- c("EXPOSE 8787", "CMD [\"/init\"]")
69 | }
70 | containerfile_content$RUN <- append(containerfile_content$RUN, .normalize_docker_steps(post_installation_steps))
71 | if (isTRUE(copy_all)) {
72 | containerfile_content$COPY <- c("COPY . /")
73 | }
74 | return(containerfile_content)
75 | }
76 |
77 | .generate_evercran_dockerfile_content <- function(r_version, lib, sysreqs_cmd, cache,
78 | post_installation_steps = NULL,
79 | rel_dir = "",
80 | copy_all = FALSE) {
81 | rang_path <- file.path(rel_dir, "rang.R")
82 | cache_path <- file.path(rel_dir, "cache")
83 | containerfile_content <- list(
84 | ## evercran only works with semver
85 | FROM = c(paste0("FROM ghcr.io/r-hub/evercran/", r_version)),
86 | ENV = c(paste0("ENV RANG_PATH ", rang_path)),
87 | COPY = c(paste0("COPY rang.R ", rang_path)),
88 | RUN = c(paste("RUN", sysreqs_cmd)),
89 | CMD = c("CMD [\"R\"]"))
90 | run_cmd <- "RUN "
91 | if (!is.na(lib)) {
92 | run_cmd <- paste(run_cmd, "mkdir", lib, "&&")
93 | }
94 | if (.is_r_version_older_than(r_version, "2.5.0")) {
95 | run_cmd <- paste(run_cmd, "R --no-save < $RANG_PATH")
96 | } else {
97 | run_cmd <- paste(run_cmd, "Rscript $RANG_PATH")
98 | }
99 | containerfile_content$RUN <- append(containerfile_content$RUN, run_cmd)
100 | if (isTRUE(cache)) {
101 | containerfile_content$COPY <- append(containerfile_content$COPY, paste0("COPY cache ", cache_path))
102 | containerfile_content$ENV <- append(containerfile_content$ENV, paste0("ENV CACHE_PATH ", cache_path))
103 | }
104 | containerfile_content$RUN <- append(containerfile_content$RUN, .normalize_docker_steps(post_installation_steps))
105 | if (isTRUE(copy_all)) {
106 | containerfile_content$COPY <- c("COPY . /")
107 | }
108 | return(containerfile_content)
109 | }
110 |
--------------------------------------------------------------------------------
/R/edgelist.R:
--------------------------------------------------------------------------------
1 | #' Convert Data Structures to rang edgelist
2 | #'
3 | #' This generic function converts several data structures provided by rang into an edgelist of package dependencies.
4 | #' @param x, supported data structures are `rang` and `ranglet` S3 objects
5 | #' @param ..., not used
6 | #' @return a data frame of directed edges of dependencies
7 | #' @details the resulting data frame can be converted to an igraph object for plotting and analysis via the function [igraph::graph_from_data_frame()]
8 | #' @export
9 | #' @examples
10 | #' \donttest{
11 | #' if (interactive()) {
12 | #' graph <- resolve(pkgs = c("openNLP", "LDAvis", "topicmodels", "quanteda"),
13 | #' snapshot_date = "2020-01-16")
14 | #'
15 | #' # dependency edgelist of a single package
16 | #' convert_edgelist(graph$ranglets[[1]])
17 | #'
18 | #' # full dependency edgelist
19 | #' convert_edgelist(graph)
20 | #' }
21 | #' }
22 | convert_edgelist <- function(x, ...) {
23 | UseMethod("convert_edgelist", x)
24 | }
25 |
26 | #' @rdname convert_edgelist
27 | #' @export
28 | convert_edgelist.default <- function(x, ...){
29 | stop(paste("don't know how to convert an object of type",class(x),"to a rang edgelist"), call. = FALSE)
30 | }
31 |
32 | #' @rdname convert_edgelist
33 | #' @export
34 | convert_edgelist.ranglet <- function(x, ...){
35 | output <- data.frame(from = x$pkgref, to = .extract_queryable_dependencies(x$original, x$no_enhances, x$no_suggests))
36 | for (dep in x$deps) {
37 | if (!.is_terminal_node(dep, x$no_enhances)) {
38 | el <- data.frame(from = unique(dep$x_pkgref), to = .extract_queryable_dependencies(dep, x$no_enhances, x$no_suggests))
39 | output <- rbind(output, el)
40 | }
41 | }
42 | output
43 | }
44 |
45 | #' @rdname convert_edgelist
46 | #' @export
47 | convert_edgelist.rang <- function(x, ...){
48 | if(length(x$ranglets)!=0){
49 | el <- do.call("rbind",lapply(x$ranglets,convert_edgelist))
50 | rownames(el) <- NULL
51 | return(el)
52 | } else{
53 | return(data.frame(from = character(0),to = character(0)))
54 | }
55 |
56 | }
--------------------------------------------------------------------------------
/R/memo_misc.R:
--------------------------------------------------------------------------------
1 | #' @importFrom memoise memoise
2 | #' @importFrom pkgsearch cran_package_history
3 | #' @importFrom remotes system_requirements
4 | NULL
5 |
6 | ## one hr
7 |
8 | .cran_package_history <- function(package, max_retries = 5) {
9 | n_retries <- 0
10 | while(n_retries < max_retries) {
11 | tryCatch({
12 | return(pkgsearch::cran_package_history(package))
13 | }, error = function(e) {
14 | if (grepl("parse error: premature EOF", e$message)) {
15 | n_retries <<- n_retries + 1
16 | ##message("retrying in 2s...")
17 | Sys.sleep(2)
18 | } else {
19 | stop(e)
20 | }
21 | })
22 | }
23 | stop("Can't query this package: ", package, call. = FALSE)
24 | }
25 |
26 | .memo_search <- memoise::memoise(.cran_package_history, cache = cachem::cache_mem(max_age = 60 * 60))
27 |
28 | .rver <- function() {
29 | suppressWarnings(jsonlite::fromJSON(readLines("https://api.r-hub.io/rversions/r-versions"), simplifyVector = TRUE))
30 | }
31 |
32 | .memo_rver <- memoise::memoise(.rver, cache = cachem::cache_mem(max_age = 120 * 60))
33 |
34 | .biocver <- function() {
35 | url <- "https://bioconductor.org/config.yaml"
36 | tag <- "release_dates"
37 | txt <- readLines(url)
38 | grps <- grep("^[^[:blank:]]", txt)
39 | start <- match(grep(tag, txt), grps)
40 | end <- ifelse(length(grps) < start + 1, length(txt), grps[start + 1] - 1)
41 | map <- txt[seq(grps[start] + 1, end)]
42 | map <- trimws(gsub("\"", "", sub(" #.*", "", map)))
43 | pattern <- "(.*): (.*)"
44 | bioc_ver <- sub(pattern, "\\1", map)
45 | bioc_date <- parsedate::parse_date(sub(pattern, "\\2", map))
46 | data.frame(version = bioc_ver, date=bioc_date)
47 | }
48 |
49 | .memo_biocver <- memoise::memoise(.biocver, cache = cachem::cache_mem(max_age = 120 * 60))
50 |
51 | .bioc_package_history <- function(bioc_version) {
52 | if (bioc_version != "release" && utils::compareVersion(bioc_version, "2.0") == -1) {
53 | stop("Bioconductor versions < 2.0 are not supported.", call. = FALSE)
54 | }
55 | suffixes <- c("bioc", "data/annotation", "data/experiment", "workflow")
56 | output <- data.frame()
57 | for (suffix in suffixes) {
58 | view_url <- paste0("http://bioconductor.org/packages/", bioc_version, "/", suffix, "/VIEWS")
59 | con <- url(view_url)
60 | tryCatch({
61 | raw_metadata <- suppressWarnings(read.dcf(con))
62 | metadata <- as.data.frame(raw_metadata)
63 | metadata$suffix <- suffix
64 | output <- vctrs::vec_rbind(output, metadata)
65 | close(con)
66 | }, error = function(e) {
67 | close(con)
68 | })
69 | }
70 | output
71 | }
72 |
73 | .memo_search_bioc <- memoise::memoise(.bioc_package_history, cache = cachem::cache_mem(max_age = 60 * 60))
74 |
75 | .vcat <- function(verbose = TRUE, ...) {
76 | if (isTRUE(verbose)) {
77 | message(..., "\n")
78 | }
79 | invisible()
80 | }
81 |
82 | ## data generation
83 | ## ---
84 | ## recipes <- list()
85 | ## recipes["texlive"] <- "## install texlive\napt-get install -y pandoc pandoc-citeproc texlive"
86 | ## recipes["texlivefull"] <- "## install texlive-full\napt-get install -y pandoc pandoc-citeproc texlive-full"
87 | ## recipes["quarto"] <- "## install quarto (latest)\napt-get install -y curl git && curl -LO https://quarto.org/download/latest/quarto-linux-amd64.deb && dpkg -i quarto-linux-amd64.deb && quarto install tool tinytex && rm quarto-linux-amd64.deb"
88 | ## recipes["clean"] <- "## Clean up caches\nrm -rf /var/lib/apt/lists/* && if [ -d \"$CACHE_PATH\" ]; then rm -rf $CACHE_PATH; fi"
89 | ## recipes["make"] <- "## install GNU make\napt-get -y install make"
90 | ## usethis::use_data(recipes, overwrite = TRUE)
91 |
92 | #' Recipes for Building Container Images
93 | #'
94 | #' A list containing several useful recipes for container building. Useful for the `post_installation_steps` argument of [dockerize()]. Available recipes are:
95 | #' * `texlive`: install pandoc and LaTeX, useful for rendering RMarkdown
96 | #' * `texlivefull`: Similar to the above, but install the full distribution of TeX Live (~ 3GB)
97 | #' * `quarto`: install quarto and tinytex
98 | #' * `clean`: clean up the container image by removing cache
99 | #' * `make`: install GNU make
100 | #' @examples
101 | #' \donttest{
102 | #' if (interactive()) {
103 | #' graph <- resolve(pkgs = c("openNLP", "LDAvis", "topicmodels", "quanteda"),
104 | #' snapshot_date = "2020-01-16")
105 | #' ## install texlive
106 | #' dockerize(graph, ".", post_installation_steps = recipes[['texlive']])
107 | #' }
108 | #' }
109 | "recipes"
110 |
111 | ## internal data generation
112 | ## ---
113 | ### Supported OS Versions
114 | ## supported_os <- c("trusty" = "ubuntu-14.04", "xenial" = "ubuntu-16.04", "bionic" = "ubuntu-18.04", "focal" = "ubuntu-20.04", "centos-6", "centos-7", "centos-8", "redhat-6", "redhat-7", "redhat-8")
115 | ## ### R version history
116 | ## cached_rver <- .rver()
117 | ## attr(cached_rver, "newest_date") <- anytime::anytime(tail(cached_rver, n = 1)$date, tz = "UTC", asUTC = TRUE)
118 | ## ### Bioconductor version history
119 | ## cached_biocver <- .biocver()
120 | ## attr(cached_biocver, "newest_date") <- max(cached_biocver$date)
121 | ## debian_version <- c("lenny", "squeeze", "wheezy", "jessie", "stretch")
122 | ## .get_debian_urls <- function(debian_version, output_dir, verbose) {
123 | ## sha <- .gh(paste0("/repos/debuerreotype/docker-debian-eol-artifacts/branches/dist-",
124 | ## debian_version))$commit$sha
125 | ## .gh(paste0("/repos/debuerreotype/docker-debian-eol-artifacts/contents/",
126 | ## debian_version, "/amd64/rootfs.tar.xz"), ref = sha)$download_url
127 | ## }
128 | ## debian_urls <- sapply(debian_version, .get_debian_urls)
129 | ## usethis::use_data(supported_os, cached_rver, cached_biocver, debian_urls, internal = TRUE, overwrite = TRUE)
130 |
131 | ## test data upgrade
132 | ## ---
133 | ## devtools::load_all()
134 | ## all_rds_files <- list.files(here::here("tests/testdata"), ".RDS", full.names = TRUE)
135 | ## for (rds in all_rds_files) {
136 | ## x <- readRDS(rds)
137 | ## y <- eval(x$call)
138 | ## saveRDS(y, rds)
139 | ## }
140 |
--------------------------------------------------------------------------------
/R/pkgref.R:
--------------------------------------------------------------------------------
1 | ## we follow:
2 | ## https://r-lib.github.io/pkgdepends/reference/pkg_refs.html
3 |
4 | ## syntax of a ref: source::handle
5 | ## source can be: "cran", "github" (as of now)
6 | ## a `handle` indicates how the `package` is sourced from the `source`:
7 | ## if source == "cran", handle <- package name as per DESCRIPTION, e.g. "rtoot"
8 | ## if source == "github", handle <- username/reponame, e.g. "schochastics/rtoot"
9 |
10 | ## Similar to pak::pak()
11 | ## `pkgs` parameter of resolve() can either be shorthands (e.g. "rtoot", "schochastics/rtoot")
12 | ## or pkgrefs (e.g. "cran::rtoot", "github::schochastics/rtoot")
13 |
14 | ## For `dep_df`
15 | ## compulsory columns
16 | ## `x`, `x_version`, `x_pubdate` are the information as per DESCRIPTION
17 | ## `x_pkgref` is x in pkgref, for internal storage, we don't use @ to indicate version / uid
18 |
19 | ## optional columns
20 | ## 1. if there are dependecies: `y`, `y_raw_version`, `type`, `y_pkgref`: as per DESCRIPTION; `y_raw_version` is not useful
21 |
22 | ## 2. for "github" (and possible "local" in the future)
23 | ## `x_uid` and `y_uid` are extra unique identifier for pinning down the package, if `?_version` isn't sufficient for this purpose
24 | ## if `source` == "github", `?_uid` <- "sha"
25 |
26 | ## `installation_order` should be an ordered data.frame
27 | ## not using snake case in column names for backward compatibility in containers, and not needed
28 | ## columns: x, version, source, handle, uid
29 |
30 | .clean_suffixes <- function(pkgref) {
31 | ## remove all @, ?, or # suffixes, we don't support them
32 | gsub("[@#\\?].+", "", pkgref)
33 | }
34 |
35 | .parse_pkgref <- function(pkgref, return_handle = TRUE) {
36 | if (isFALSE(.is_pkgref(pkgref))) {
37 | stop(pkgref, "is not a valid `pkgref`", call. = FALSE)
38 | }
39 | ## remove all @, ?, or # suffixes, we don't support them
40 | pkgref <- .clean_suffixes(pkgref)
41 | res <- strsplit(pkgref, ":+")[[1]]
42 | source <- res[1]
43 | handle <- res[2]
44 | if (isTRUE(return_handle)) {
45 | return(handle)
46 | }
47 | return(source)
48 | }
49 |
50 | .is_local <- function(pkg) {
51 | ## according to the standard, it must be started by ".", "~", "/"
52 | grepl("^[\\.~/]", pkg)
53 | }
54 |
55 | .is_github <- function(pkg) {
56 | ## make .is_local precedes .is_github
57 | if (isTRUE(.is_local(pkg))) {
58 | return(FALSE)
59 | }
60 | if (grepl("github\\.com", pkg)) {
61 | return(TRUE)
62 | }
63 | grepl("/", pkg) && isFALSE(grepl("^[\\.~]?/", pkg)) &&
64 | isFALSE(grepl("/$", pkg)) &&
65 | length(strsplit(pkg, split = "/")[[1]]) == 2
66 | }
67 |
68 | .is_bioc <- function(pkg, bioc_version) {
69 | if (is.null(bioc_version)) {
70 | return(FALSE)
71 | }
72 | bioc_pkgs <- .memo_search_bioc(bioc_version)
73 | pkg %in% bioc_pkgs$Package
74 | }
75 |
76 | ## TBI: .is_valid_pkgref
77 | ## pkgref is only valid if: exactly one "::", source %in% c("cran", "github"), if "github", .is_github is TRUE
78 | .is_pkgref <- function(pkg) {
79 | grepl("^github::|^cran::|^local::|^bioc::", pkg) && length(strsplit(pkg, ":+")[[1]]) == 2
80 | }
81 |
82 | .extract_github_handle <- function(url) {
83 | url <- gsub("^github::", "", url)
84 | if (isTRUE(grepl("@github\\.com", url))) {
85 | ## remote string
86 | info <- strsplit(url, ":")[[1]]
87 | return(gsub("\\.git$", "", info[2]))
88 | }
89 | info <- strsplit(url, "github\\.com")[[1]]
90 | path <- gsub("^/", "", info[length(info)])
91 | path_components <- strsplit(path, "/")[[1]]
92 | return(paste0(path_components[1], "/", path_components[2]))
93 | }
94 |
95 | .normalize_pkg <- function(pkg,bioc_version=NULL) {
96 | if (pkg == "" || is.na(pkg)) {
97 | stop("Invalid `pkg`.", call. = FALSE)
98 | }
99 | if (isTRUE(grepl("github\\.com", pkg))) {
100 | pkg <- .extract_github_handle(pkg)
101 | }
102 | if (isTRUE(.is_pkgref(pkg))) {
103 | return(.clean_suffixes(pkg))
104 | }
105 | if (isTRUE(.is_github(pkg))) {
106 | return(paste0("github::", .clean_suffixes(pkg)))
107 | }
108 | if (isTRUE(.is_local(pkg))) {
109 | return(paste0("local::", .clean_suffixes(pkg)))
110 | }
111 | if (isTRUE(.is_bioc(pkg, bioc_version))) {
112 | return(paste0("bioc::", .clean_suffixes(pkg)))
113 | }
114 | paste0("cran::", .clean_suffixes(pkg))
115 | }
116 |
117 | ## vectorize
118 | .normalize_pkgs <- function(pkgs,bioc_version = NULL) {
119 | vapply(X = pkgs, bioc_version = bioc_version ,FUN = .normalize_pkg, FUN.VALUE = character(1), USE.NAMES = FALSE)
120 | }
121 |
--------------------------------------------------------------------------------
/R/sysdata.rda:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/R/sysdata.rda
--------------------------------------------------------------------------------
/R/use_rang.R:
--------------------------------------------------------------------------------
1 | #' Setup rang for a directory
2 | #'
3 | #' This `usethis`-style function adds the infrastructure in a directory (presumably with R scripts
4 | #' and data) for (re)constructing the computational environment.
5 | #' Specifically, this function inserts `inst/rang` into the directory, which contains
6 | #' all components for the reconstruction. Optionally, `Makefile` and `.here` are also inserted
7 | #' to ease the development of analytic code.
8 | #' By default, (re)running this function does not overwrite any file. One can change this by setting
9 | #' `force` to TRUE.
10 | #' @param path character, path to the project root
11 | #' @param add_makefile logical, whether to insert a barebone `Makefile` in the project root.
12 | #' @param add_here logical, whether to insert a hidden `.here` file in the project root
13 | #' @param verbose logical, whether to print out messages
14 | #' @param force logical, whether to overwrite files (`inst/rang/update.R`, `Makefile`, `.here`) if they
15 | #' exist.
16 | #' @param apptainer logical, whether to use apptainer. `FALSE` indicates using Docker
17 | #' @return path, invisibly
18 | #' @details The infrastructure being added to your path consists of:
19 | #' * `inst/rang` directory in the project root
20 | #' * `update.R` file inside the directory
21 | #' * `.here` in the project root (if `add_here` is TRUE)
22 | #' * `Makefile` in the project root (if `add_makefile` is TRUE)
23 | #' You might need to edit `update.R` manually. The default is to scan the whole project for
24 | #' used R packages and assume they are either on CRAN or Bioconductor. If you have used other R packages,
25 | #' you might need to edit this manually.
26 | #' @export
27 | use_rang <- function(path = ".", add_makefile = TRUE, add_here = TRUE,
28 | verbose = TRUE, force = FALSE, apptainer = FALSE) {
29 | if (isFALSE(dir.exists(path))) {
30 | stop("'path' does not exist")
31 | }
32 | base_dir <- file.path(path, "inst/rang")
33 | if (isFALSE(dir.exists(base_dir))) {
34 | dir.create(base_dir, recursive = TRUE)
35 | .vcat(verbose, "`inst/rang` created.")
36 | } else {
37 | .vcat(verbose, "`inst/rang` exists.")
38 | }
39 | if (apptainer) {
40 | source_update_file <- "update_apptainer.R"
41 | make_file <- "apptainer/Makefile"
42 | } else {
43 | source_update_file <- "update.R"
44 | make_file <- "Makefile"
45 | }
46 | if (isFALSE(file.exists(file.path(base_dir, "update.R"))) || isTRUE(force)) {
47 | file.copy(system.file(source_update_file, package = "rang"), file.path(base_dir, "update.R"), overwrite = TRUE)
48 | }
49 | if (isTRUE(add_makefile) && (isFALSE(file.exists(file.path(path, "Makefile"))) || isTRUE(force))) {
50 | file.copy(system.file(make_file, package = "rang"), file.path(path, "Makefile"), overwrite = TRUE)
51 | .vcat(verbose, "`Makefile` added.")
52 | }
53 | if (isTRUE(add_here) && (isFALSE(file.exists(file.path(path, ".here"))) || isTRUE(force))) {
54 | file.create(file.path(path, ".here"))
55 | .vcat(verbose, "`.here` added.")
56 | }
57 | .vcat(verbose, "The infrastructure for running `rang` in this project is now ready.")
58 | .vcat(verbose, "You might want to edit this file: inst/rang/update.R")
59 | .vcat(verbose, paste0("After that, run: setwd(\"", path,"\"); source(\"inst/rang/update.R\")"))
60 | .vcat(verbose && add_makefile, "Or run in your shell: make update")
61 | return(invisible(path))
62 | }
63 |
64 | #' Create executable research compendium according to the Turing Way
65 | #'
66 | #' This `usethis`-style function creates an executable research compendium according to the Turing Way.
67 | #' @param path character, path to the project root
68 | #' @param add_rang logical, whether to run [use_rang()] to `path`
69 | #' @inheritParams use_rang
70 | #' @return path, invisibly
71 | #' @seealso [use_rang()]
72 | #' @details
73 | #' According to the Turing Way, an executable research compendium should have the following properties
74 | #' 1. Files should be organized in a conventional folder structure;
75 | #' 2. Data, methods, and output should be clearly separated;
76 | #' 3. The computational environment should be specified.
77 | #'
78 | #' We use the structure suggested by the Turing Way:
79 | #' * `data_raw`: a directory to hold the raw data
80 | #' * `data_clean`: a directory to hold the processed data
81 | #' * `code`: a directory to hold computer code
82 | #' * `CITATION`: a file holding citation information
83 | #' * `paper.Rmd`: a manuscript
84 |
85 | #' This function provides the a clearly separated organizational structure. Components can be changed. For example, the manuscript can be in another format (e.g. quarto, sweave) or even optional. With `add_rang`, the computational environment can be recorded and reconstructed later.
86 | #'
87 | #' @references
88 | #' [The Turing Way: Research Compendia](https://the-turing-way.netlify.app/reproducible-research/compendia.html)
89 | #' Gorman, KB, Williams TD. and Fraser WR (2014). Ecological Sexual Dimorphism and Environmental Variability within a Community of Antarctic Penguins (Genus Pygoscelis). PLoS ONE 9(3):e90081. \doi{10.1371/journal.pone.0090081}
90 | #' @export
91 | create_turing <- function(path, add_rang = TRUE, add_makefile = TRUE, add_here = TRUE, verbose = TRUE, force = FALSE, apptainer = FALSE) {
92 | if (isTRUE(dir.exists(path))) {
93 | stop("`path` exists.")
94 | }
95 | dir.create(path)
96 | file.copy(from = list.files(system.file("turing", package = "rang"), full.names = TRUE),
97 | to = path, recursive = TRUE)
98 | writeLines(c("Please cite this research compendium as:", "", " "), file.path(path, "CITATION"))
99 | dir.create(file.path(path, "figures"))
100 | dir.create(file.path(path, "data_clean"))
101 | if (isTRUE(add_rang)) {
102 | use_rang(path, add_makefile = add_makefile, add_here = add_here, verbose = verbose, force = force, apptainer = apptainer)
103 | }
104 | invisible(path)
105 | }
106 |
--------------------------------------------------------------------------------
/_pkgdown.yml:
--------------------------------------------------------------------------------
1 | url: https://gesistsa.github.io/rang/
2 | template:
3 | package: tsatemplate
4 |
--------------------------------------------------------------------------------
/_quarto.yml:
--------------------------------------------------------------------------------
1 | project:
2 | title: rang
3 | type: default
4 | render:
5 | - methodshub.qmd
6 |
--------------------------------------------------------------------------------
/data/recipes.rda:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/data/recipes.rda
--------------------------------------------------------------------------------
/inst/CITATION:
--------------------------------------------------------------------------------
1 | citHeader("To cite rang in publications use:")
2 |
3 |
4 | bibentry(bibtype = "article",
5 | title = "rang: Reconstructing reproducible R computational environments",
6 | journal = "PLOS ONE",
7 | author = c(person("Chung-hong", "Chan"), person("David", "Schoch")),
8 | url = "https://github.com/gesistsa/rang",
9 | year = 2023,
10 | doi = "10.1371/journal.pone.0286761")
11 |
--------------------------------------------------------------------------------
/inst/Makefile:
--------------------------------------------------------------------------------
1 | ## Autogenerated by rang, you might want to change the handle
2 | handle=yourproject
3 | .PHONY: update build launch bash daemon stop export rebuild
4 |
5 | update:
6 | Rscript inst/rang/update.R
7 | build: Dockerfile
8 | docker build -t ${handle}img .
9 | launch:
10 | -docker run --rm --name "${handle}container" -ti ${handle}img
11 | bash:
12 | -docker run --rm --name "${handle}container" --entrypoint bash -ti ${handle}img
13 | daemon:
14 | -docker run -d --rm --name "${handle}container" -ti ${handle}img
15 | stop:
16 | -docker stop ${handle}container
17 | export:
18 | docker save ${handle}img | gzip > ${handle}img.tar.gz
19 | rebuild: ${handle}img.tar.gz
20 | docker load < ${handle}img.tar.gz
21 |
--------------------------------------------------------------------------------
/inst/apptainer/Makefile:
--------------------------------------------------------------------------------
1 | ## Autogenerated by rang, you might want to change the handle
2 | handle=yourproject
3 | .PHONY: update build launch bash daemon stop export
4 |
5 | update:
6 | Rscript inst/rang/update.R
7 | build: container.def
8 | apptainer build "${handle}img.sif" container.def
9 | launch:
10 | -apptainer run "${handle}img.sif" R
11 | bash:
12 | -apptainer shell "${handle}img.sif"
13 | daemon:
14 | -apptainer instance start "${handle}img.sif" "${handle}container"
15 | stop:
16 | -apptainer instance stop "${handle}container"
17 |
--------------------------------------------------------------------------------
/inst/apptainer_readme_template.txt:
--------------------------------------------------------------------------------
1 | This README offers some guidance on how to work with the included Apptainer/Singularity container
2 | which was created with the R package rang on __DATE__
3 |
4 | # Installing Apptainer/Singularity
5 |
6 | Apptained and Singularity are almost the same thing.
7 | Apptainer is the new name for Singularity, (for more info see
8 | https://apptainer.org/news/community-announcement-20211130/ ),
9 | but there is stil aslo a Singularity Community Edition ( https://sylabs.io/singularity/ ). So far the work in the same way and the containers are identical.
10 |
11 | The Apptainer installation depends on the OS you are running
12 | Follow the steps outlined in the official docs:
13 | - for Linux: https://apptainer.org/docs/admin/main/installation.html#installation-on-linux
14 | - for macOS: https://apptainer.org/docs/admin/main/installation.html#mac
15 | - for Windows: https://apptainer.org/docs/admin/main/installation.html#windows
16 |
17 | On Ubuntu, the easiest way to install Apptainer would be to:
18 | sudo apt update
19 | sudo apt install -y software-properties-common
20 | sudo add-apt-repository -y ppa:apptainer/ppa
21 | sudo apt update
22 | sudo apt install -y apptainer
23 |
24 | For SingularityCE:
25 | - Linux: https://docs.sylabs.io/guides/latest/admin-guide/installation.html#installation-on-linux
26 | - macOS: https://docs.sylabs.io/guides/latest/admin-guide/installation.html#mac
27 | - Windows: https://docs.sylabs.io/guides/latest/admin-guide/installation.html#windows
28 |
29 |
30 | If you have access to HPC, there is a good chance that Singularity or Apptainer is alredy installed there by your administrator. On your HPC node you can check this:
31 |
32 | module avail singularity
33 | module avail apptainer
34 |
35 | # Run the docker container
36 | Open a command prompt window (In Windows this could be the built-in command prompt, PowerShell, or Windows Terminal)
37 | navigate to output_dir in the command prompt: `cd __OUTPUT__`
38 |
39 | Note that all Singularity commands for buidling containers
40 | might will sudo rights on Linux distros. Since 1.1.0 Apptainer is rootless
41 | (for details see https://apptainer.org/news/apptainer-1-1-0-20220927), so you
42 | can build and run containers with Apptainer without root. With Sngularity
43 | you can only run containers without root access.
44 |
45 | The execution depends on what image you chose when dockerizing (image used here: __IMAGE__)
46 |
47 |
48 | ## image!="rstudio"
49 |
50 | apptainer build container.sif container.def
51 | apptainer run container.sif R
52 |
53 | or
54 |
55 | sudo singularity build container.sif container.def
56 | singularity run container.sif R
57 |
58 | An R command prompt should open. To confirm that you have succesfully gone back in time, use the `sessionInfo()` command. To stop container, just quit R.
59 |
60 | ## image = "rstudio"
61 |
62 | Build a container:
63 |
64 | apptainer build container.sif container.def
65 |
66 | or
67 |
68 | sudo singularity build container.sif container.def
69 |
70 | All other commands to work with containers do not require root and `apptainer` can be replaced with `singularity`.
71 |
72 | To run RStudio IDE in Apptainer/Singularity container, some writeable folders and a config file have to be created locally:
73 |
74 | mkdir -p run var-lib-rstudio-server .rstudio
75 | printf 'provider=sqlite\ndirectory=/var/lib/rstudio-server\n' > database.conf
76 |
77 | After that, you can run the container (do not run as `root` user, otherwise you will not be able to login to RStudio IDE).
78 |
79 | Start instance (on default RSTUDIO port 8787):
80 |
81 | apptainer instance start \
82 | --bind run:/run,var-lib-rstudio-server:/var/lib/rstudio-server,database.conf:/etc/rstudio/database.conf,.rstudio:/home/rstudio/.rstudio/ \
83 | container.sif \
84 | rangtest
85 |
86 | Now open a browser and go to localhost:8787.
87 | The default username is your local username, default password is 'set_your_password' (if you are using container generated by rang).
88 |
89 |
90 | List running instances:
91 |
92 | apptainer instance list
93 |
94 | Stop instance:
95 |
96 | apptainer instance stop rangtest
97 |
98 | Start instance with custom port (e.g. 8080) and password:
99 |
100 | apptainer instance start \
101 | --enc RPORT=8080
102 | --env PASSWORD='set_your_password' \
103 | --bind run:/run,var-lib-rstudio-server:/var/lib/rstudio-server,database.conf:/etc/rstudio/database.conf,.rstudio:/home/rstudio/.rstudio/ \
104 | container.sif \
105 | rangtest
106 |
107 | Run container with custom rserver command line:
108 |
109 | apptainer exec \
110 | --env PASSWORD='set_your_password' \
111 | --bind run:/run,var-lib-rstudio-server:/var/lib/rstudio-server,database.conf:/etc/rstudio/database.conf,.rstudio:/home/rstudio/.rstudio/ \
112 | container.sif \
113 | /usr/lib/rstudio-server/bin/rserver \
114 | --auth-none=0 --auth-pam-helper-path=pam-helper \
115 | --server-user=$(whoami) --www-port=8787
116 |
117 | If you run the container using `apptainer exec` command, you will have to kill the `rserver` process manually or Ctrl+C from the running container.
118 |
119 |
120 | ## Need more information about rang?
121 | Check the vignette included in the package
122 |
123 | ## Need more information about Apptainer/Singularity?
124 |
125 | - Consult the Apptainer documentation: https://apptainer.org/docs
126 | - Consult the Singularity documentation: https://sylabs.io/docs/
127 | - Read Rocker Project guide on running Rocker in Singularity
128 | and example SLURM script for running in HPC environment: https://rocker-project.org/use/singularity.html
129 |
130 | ## Issues?
131 |
132 | If you are unable to run this Apptainer/Singularity container please file an issue at https://github.com/chainsawriot/rang/issues
133 | containing the following information:
134 |
135 | - The `resolve()` command you executed in R
136 | - The `apptainerize()` command you executed in R
137 | - The error message shown in your command prompt
138 |
--------------------------------------------------------------------------------
/inst/compile_r.sh:
--------------------------------------------------------------------------------
1 | if echo "$1" | grep -Eq '^1' ; then
2 | DOWNLOAD_DIR="R-1"
3 | TARFILE="R-$1.tgz"
4 | elif echo "$1" | grep -Eq '^2' ; then
5 | DOWNLOAD_DIR="R-2"
6 | TARFILE="R-$1.tar.gz"
7 | elif echo "$1" | grep -Eq '^3' ; then
8 | DOWNLOAD_DIR="R-3"
9 | TARFILE="R-$1.tar.gz"
10 | else
11 | DOWNLOAD_DIR="R-4"
12 | TARFILE="R-$1.tar.gz"
13 | fi
14 |
15 | if [ ! -f "/$CACHE_PATH/rsrc/$TARFILE" ]; then
16 | wget "http://cran.r-project.org/src/base/$DOWNLOAD_DIR/$TARFILE"
17 | else
18 | cp /$CACHE_PATH/rsrc/$TARFILE /
19 | fi
20 |
21 | tar -zxf $TARFILE
22 |
23 | cd "R-$1"
24 | ./configure --without-x
25 | make
26 | make install
27 |
28 | cd /
29 |
30 | Rscript $RANG_PATH
31 |
--------------------------------------------------------------------------------
/inst/docker_readme_template.txt:
--------------------------------------------------------------------------------
1 | This README offers some guidance on how to work with the included docker container
2 | which was created with the R package rang on __DATE__
3 |
4 | # Installing docker
5 |
6 | The installation depends on the OS you are running
7 | Follow the steps outlined in the official docs: https://docs.docker.com/engine/install/
8 | For Windows, you can also check out https://learn.microsoft.com/en-us/virtualization/windowscontainers/quick-start/set-up-environment
9 |
10 | # Run the docker container
11 | Open a command prompt window (In Windows this could be the built-in command prompt, PowerShell, or Windows Terminal)
12 | navigate to output_dir in the command prompt: `cd __OUTPUT__`
13 |
14 | Note that all docker commands below might need sudo rights on Linux distros
15 | If you want to run docker without sudo, follow the steps outlined here: https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user
16 |
17 | The execution depends on what image you chose when dockerizing (image used here: __IMAGE__)
18 |
19 | ## image!="rstudio"
20 | docker build -t rang .
21 | docker run --rm --name "rangtest" -ti rang
22 |
23 | You can change the --name paramater to something more suitable for your container
24 | An R command prompt should open. To confirm that you have succesfully gone back in time,
25 | use the `sessionInfo()` command
26 |
27 | ## image = "rstudio"
28 | docker build -t rang .
29 | docker run -p 8787:8787 -e PASSWORD=abc123 --rm --name "rangtest" -ti rang
30 |
31 | You can change the --name paramater to something more suitable for your container
32 | when the building is finished, open a browser and go to localhost:8787
33 | The default username is rstudio, password as specified above (in this case abc123).
34 | To confirm that you have succesfully gone back in time,
35 | use the `sessionInfo()` command
36 |
37 | ## Need more information about rang?
38 | Check the vignette included in the package
39 |
40 | ## Need more information about docker?
41 |
42 | - Consult the docker documentation: https://docs.docker.com/get-started/
43 | - Consult this docker tutorial: https://docker-curriculum.com/
44 | - Using Windows? Checkout Microsofts documentation: https://learn.microsoft.com/en-us/virtualization/windowscontainers/
45 |
46 | ## Issues?
47 |
48 | If you are unable to run this docker container please file an issue at https://github.com/chainsawriot/rang/issues
49 | containing the following information:
50 |
51 | - The `resolve()` command you executed in R
52 | - The `dockerize()` command you executed in R
53 | - The error message shown in your command prompt
54 |
--------------------------------------------------------------------------------
/inst/footer.R:
--------------------------------------------------------------------------------
1 |
2 | if (nrow(installation.order) >= 1) {
3 | for (i in seq(from = 1, to = nrow(installation.order), by = 1)) {
4 | x <- installation.order$x[i]
5 | source <- installation.order$source[i]
6 | version <- installation.order$version[i]
7 | handle <- installation.order$handle[i]
8 | uid <- installation.order$uid[i]
9 | .install.from.source(x = x, version = version, handle = handle, source = source, uid = uid,
10 | lib = lib, path = path, verbose = verbose,
11 | cran.mirror = cran.mirror, bioc.mirror = bioc.mirror,
12 | current.r.version = current.r.version)
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/inst/header.R:
--------------------------------------------------------------------------------
1 | ## generated by rang, do not edit by hand
2 |
3 | current.r.version <- paste(R.Version()[c("major","minor")], collapse = ".", sep = "")
4 |
5 | if (Sys.getenv("CACHE_PATH") != "") {
6 | path <- file.path(Sys.getenv("CACHE_PATH"), "rpkgs")
7 | } else {
8 | path <- tempdir()
9 | }
10 |
11 | .install.packages <- function(tarball.path, lib, verbose, current.r.version) {
12 | if (utils::compareVersion(current.r.version, "3.0") != -1) {
13 | if (is.na(lib)) {
14 | install.packages(pkg = tarball.path, repos = NULL, verbose = verbose, quiet = !verbose)
15 | } else {
16 | install.packages(pkg = tarball.path, lib = lib, repos = NULL, verbose = verbose, quiet = !verbose)
17 | }
18 | } else {
19 | if (is.na(lib)) {
20 | install.packages(pkg = tarball.path, repos = NULL)
21 | } else {
22 | install.packages(pkg = tarball.path, lib = lib, repos = NULL)
23 | }
24 | }
25 | }
26 |
27 | .download.package <- function(tarball.path, x, version, handle, source, uid, verbose, cran.mirror, bioc.mirror, current.r.version) {
28 | if (source == "github") {
29 | return(.download.package.from.github(tarball.path, x, version, handle, source, uid, current.r.version))
30 | }
31 | if (source == "bioc") {
32 | url <- paste(bioc.mirror, uid, "/src/contrib/", x, "_", version, ".tar.gz", sep = "")
33 | }
34 | if (source == "cran") {
35 | url <- paste(cran.mirror, "src/contrib/Archive/", x, "/", x, "_", version, ".tar.gz", sep = "")
36 | }
37 |
38 | tryCatch({
39 | suppressWarnings(download.file(url, destfile = tarball.path, quiet = !verbose))
40 | }, error = function(e) {
41 | if (source == "cran") {
42 | ## is the current latest
43 | url <- paste(cran.mirror, "src/contrib/", x, "_", version, ".tar.gz", sep = "")
44 | download.file(url, destfile = tarball.path, quiet = !verbose)
45 | }
46 | })
47 | invisible(tarball.path)
48 | }
49 |
50 | .tempfile <- function(tmpdir = tempdir(), fileext = ".tar.gz") {
51 | file.path(tmpdir,
52 | paste(paste(sample(c(LETTERS, letters), 20, replace = TRUE), collapse = ""), fileext, sep = ""))
53 | }
54 |
55 | .build.raw.tarball <- function(raw.tarball.path, x, version, tarball.path, current.r.version) {
56 | if (utils::compareVersion(current.r.version, "3.1") != -1) {
57 | vignetteflag <- "--no-build-vignettes"
58 | } else {
59 | vignetteflag <- "--no-vignettes"
60 | }
61 | tmp.dir <- .tempfile(fileext = "")
62 | dir.create(tmp.dir)
63 | system(command = paste("tar", "-zxf ", raw.tarball.path, "-C", tmp.dir))
64 | pkg.dir <- list.files(path = tmp.dir, full.names = TRUE)[1]
65 | new.pkg.dir <- file.path(tmp.dir, x)
66 | file.rename(pkg.dir, new.pkg.dir)
67 | res <- system(command = paste("R", "CMD", "build", vignetteflag, new.pkg.dir))
68 | expected.tarball.path <- paste(x, "_", version, ".tar.gz", sep = "")
69 | stopifnot(file.exists(expected.tarball.path))
70 | file.rename(expected.tarball.path, tarball.path)
71 | return(tarball.path)
72 | }
73 |
74 | .build.dir.tarball <- function(dir.pkg.path, x, version, tarball.path, current.r.version) {
75 | if (utils::compareVersion(current.r.version, "3.1") != -1) {
76 | vignetteflag <- "--no-build-vignettes"
77 | } else {
78 | vignetteflag <- "--no-vignettes"
79 | }
80 | expected.tarball.path <- paste(x, "_", version, ".tar.gz", sep = "")
81 | res <- system(command = paste("R", "CMD", "build", vignetteflag, dir.pkg.path))
82 | stopifnot(file.exists(expected.tarball.path))
83 | file.rename(expected.tarball.path, tarball.path)
84 | return(tarball.path)
85 | }
86 |
87 | .install.from.source <- function(x, version, handle, source, uid, lib,
88 | path = tempdir(), verbose, cran.mirror, bioc.mirror, current.r.version) {
89 | tarball.path <- file.path(path, paste(x, "_", version, ".tar.gz", sep = ""))
90 | raw.tarball.path <- file.path(path, paste("raw_", x, "_", version, ".tar.gz", sep = ""))
91 | dir.pkg.path <- file.path(path, paste("dir_", x, "_", version, sep = ""))
92 | if (!file.exists(tarball.path) && !file.exists(raw.tarball.path) && !file.exists(dir.pkg.path)) {
93 | .download.package(tarball.path = tarball.path, x = x, version = version, handle = handle, source = source,
94 | uid = uid, verbose = verbose, cran.mirror = cran.mirror, bioc.mirror = bioc.mirror,
95 | current.r.version = current.r.version)
96 | }
97 | if (file.exists(raw.tarball.path)) {
98 | tarball.path <- .build.raw.tarball(raw.tarball.path, x = x, version = version, tarball.path,
99 | current.r.version = current.r.version)
100 | if (!file.exists(tarball.path)) {
101 | stop("building failed.")
102 | }
103 | }
104 | if (file.exists(dir.pkg.path)) {
105 | tarball.path <- .build.dir.tarball(dir.pkg.path, x = x, version = version, tarball.path,
106 | current.r.version = current.r.version)
107 | if (!file.exists(tarball.path)) {
108 | stop("building failed.")
109 | }
110 | }
111 | .install.packages(tarball.path, lib, verbose, current.r.version)
112 | ## check and error
113 | if (!is.na(lib)) {
114 | installed.packages <- installed.packages(lib.loc = lib)
115 | } else {
116 | installed.packages <- installed.packages()
117 | }
118 | if (!x %in% dimnames(installed.packages)[[1]]) {
119 | stop("Fail to install ", x, "\n")
120 | }
121 | invisible()
122 | }
123 |
124 | # installing github packages
125 | .download.github.safe <- function(handle, sha, file) {
126 | tryCatch(
127 | download.file(paste("http://api.github.com/repos/", handle, "/tarball/", sha, sep = ""), destfile = file),
128 | error = function(e) {
129 | stop(paste("couldn't download ", handle, " from github", sep = ""), call. = FALSE)
130 | }
131 | )
132 | }
133 |
134 | .download.package.from.github <- function(tarball.path, x, version, handle, source, uid, current.r.version) {
135 | sha <- uid
136 | short.sha <- substr(sha, 1, 7)
137 | raw.tarball.path <- .tempfile(fileext = ".tar.gz")
138 | tmp.dir <- tempdir()
139 | tryCatch(
140 | download.file(paste("https://api.github.com/repos/", handle, "/tarball/", sha, sep = ""), destfile = raw.tarball.path),
141 | error = function(e) {
142 | .download.github.safe(handle, sha, raw.tarball.path)
143 | }
144 | )
145 | .build.raw.tarball(raw.tarball.path = raw.tarball.path, x = x, version = version, tarball.path = tarball.path, current.r.version = current.r.version)
146 | return(tarball.path)
147 | }
148 |
--------------------------------------------------------------------------------
/inst/header_cmd.R:
--------------------------------------------------------------------------------
1 | ## generated by rang, do not edit by hand
2 | ## DEBUG INFO: CMD
3 |
4 | current.r.version <- NA
5 | bioc.mirror <- NA
6 | path <- file.path(Sys.getenv("CACHE_PATH"), "rpkgs") ## cache must have been enforced
7 |
8 | .build.raw.tarball <- function(raw.tarball.path, x, version, tarball.path, current.r.version) {
9 | vignetteflag <- "--no-vignettes"
10 | tmp.dir <- tempfile()
11 | dir.create(tmp.dir)
12 | system(command = paste("tar", "-zxf ", raw.tarball.path, "-C", tmp.dir))
13 | pkg.dir <- list.files(path = tmp.dir, full.names = TRUE)[1]
14 | new.pkg.dir <- file.path(tmp.dir, x)
15 | file.rename(pkg.dir, new.pkg.dir)
16 | expected.tarball.path <- paste(x, "_", version, ".tar.gz", sep = "")
17 | res <- system(command = paste("R", "CMD", "build", vignetteflag, new.pkg.dir))
18 | if (!file.exists(expected.tarball.path)) {
19 | res <- system(command = paste("R", "CMD", "build", new.pkg.dir))
20 | }
21 | stopifnot(file.exists(expected.tarball.path))
22 | file.rename(expected.tarball.path, tarball.path)
23 | return(tarball.path)
24 | }
25 |
26 | .build.dir.tarball <- function(dir.pkg.path, x, version, tarball.path, current.r.version) {
27 | vignetteflag <- "--no-vignettes"
28 | expected.tarball.path <- paste(x, "_", version, ".tar.gz", sep = "")
29 | res <- system(command = paste("R", "CMD", "build", vignetteflag, dir.pkg.path))
30 | if (!file.exists(expected.tarball.path)) {
31 | res <- system(command = paste("R", "CMD", "build", dir.pkg.path))
32 | }
33 | stopifnot(file.exists(expected.tarball.path))
34 | file.rename(expected.tarball.path, tarball.path)
35 | return(tarball.path)
36 | }
37 |
38 | .install.from.source <- function(x, version, handle, source, uid, lib,
39 | path = tempdir(), verbose, cran.mirror, bioc.mirror, current.r.version) {
40 | tarball.path <- file.path(path, paste(x, "_", version, ".tar.gz", sep = ""))
41 | raw.tarball.path <- file.path(path, paste("raw_", x, "_", version, ".tar.gz", sep = ""))
42 | dir.pkg.path <- file.path(path, paste("dir_", x, "_", version, sep = ""))
43 | if (file.exists(raw.tarball.path)) {
44 | tarball.path <- .build.raw.tarball(raw.tarball.path, x = x, version = version, tarball.path,
45 | current.r.version = current.r.version)
46 | if (!file.exists(tarball.path)) {
47 | stop("building failed.")
48 | }
49 | }
50 | if (file.exists(dir.pkg.path)) {
51 | tarball.path <- .build.dir.tarball(dir.pkg.path, x = x, version = version, tarball.path,
52 | current.r.version = current.r.version)
53 | if (!file.exists(tarball.path)) {
54 | stop("building failed.")
55 | }
56 | }
57 | if (!is.na(lib)) {
58 | l <- paste("-l", lib)
59 | } else {
60 | l <- ""
61 | }
62 | res <- system(command = paste("R", "CMD", "INSTALL", tarball.path, l))
63 | }
64 |
--------------------------------------------------------------------------------
/inst/turing/bibliography.bib:
--------------------------------------------------------------------------------
1 | @Manual{horst2020,
2 | title = {palmerpenguins: {Palmer Archipelago (Antarctica)} penguin data},
3 | author = {Allison Marie Horst and Alison Presmanes Hill and Kristen B Gorman},
4 | year = {2020},
5 | note = {R package version 0.1.0},
6 | doi = {10.5281/zenodo.3960218},
7 | url = {https://allisonhorst.github.io/palmerpenguins/},
8 | }
9 |
10 | @Article{gorman2014,
11 | author = {Gorman, Kristen B. and Williams, Tony D. and Fraser,
12 | William R.},
13 | title = {{Ecological Sexual Dimorphism and Environmental
14 | Variability within a Community of Antarctic Penguins
15 | (Genus Pygoscelis)}},
16 | journal = {PLoS ONE},
17 | year = 2014,
18 | volume = 9,
19 | number = 3,
20 | month = {Mar},
21 | pages = {e90081},
22 | issn = {1932-6203},
23 | doi = {10.1371/journal.pone.0090081},
24 | url = {http://dx.doi.org/10.1371/journal.pone.0090081},
25 | publisher = {Public Library of Science (PLoS)}
26 | }
27 |
--------------------------------------------------------------------------------
/inst/turing/code/00_preprocess.R:
--------------------------------------------------------------------------------
1 | ## This file is modified from
2 | ## https://github.com/allisonhorst/palmerpenguins/blob/main/data-raw/penguins.R
3 | ## License CC0: https://github.com/allisonhorst/palmerpenguins/blob/main/LICENSE.md
4 |
5 | library(janitor)
6 | library(here)
7 | library(dplyr)
8 | library(readr)
9 | library(stringr)
10 | library(lubridate)
11 |
12 | penguins_raw_df <- read_csv(here("data_raw", "penguins_raw.csv"))
13 |
14 | penguins_df <- penguins_raw_df %>%
15 | clean_names() %>%
16 | mutate(species_short = word(species, 1)) %>%
17 | mutate(sex = tolower(sex)) %>%
18 | mutate(year = as.integer(lubridate::year(date_egg))) %>%
19 | mutate(across(where(is.character), as.factor)) %>%
20 | mutate(flipper_length_mm = as.integer(flipper_length_mm)) %>%
21 | mutate(body_mass_g = as.integer(body_mass_g)) %>%
22 | rename(bill_length_mm = culmen_length_mm,
23 | bill_depth_mm = culmen_depth_mm) %>%
24 | select(species_short,
25 | island,
26 | bill_length_mm,
27 | bill_depth_mm,
28 | flipper_length_mm,
29 | body_mass_g,
30 | sex,
31 | year) %>%
32 | rename(species = species_short) %>%
33 | as.data.frame()
34 |
35 | write_csv(penguins_df, here("data_clean", "penguins.csv"))
36 |
--------------------------------------------------------------------------------
/inst/turing/code/01_visualization.R:
--------------------------------------------------------------------------------
1 | ## This file is modified from
2 | ## https://github.com/allisonhorst/palmerpenguins/blob/main/README.Rmd
3 | ## License CC0: https://github.com/allisonhorst/palmerpenguins/blob/main/LICENSE.md
4 |
5 | library(ggplot2)
6 | library(readr)
7 | library(here)
8 |
9 | penguins <- read_csv(here("data_clean", "penguins.csv"))
10 |
11 | flipper_bill <- ggplot(data = penguins,
12 | aes(x = flipper_length_mm,
13 | y = bill_length_mm)) +
14 | geom_point(aes(color = species,
15 | shape = species),
16 | size = 3,
17 | alpha = 0.8) +
18 | geom_smooth(method = "lm", se = FALSE, aes(color = species)) +
19 | theme_minimal() +
20 | scale_color_manual(values = c("darkorange","purple","cyan4")) +
21 | labs(title = "Flipper and bill length",
22 | subtitle = "Dimensions for Adelie, Chinstrap and Gentoo Penguins at Palmer Station LTER",
23 | x = "Flipper length (mm)",
24 | y = "Bill length (mm)",
25 | color = "Penguin species",
26 | shape = "Penguin species") +
27 | theme(legend.position = c(0.85, 0.15),
28 | plot.title.position = "plot",
29 | plot.caption = element_text(hjust = 0, face= "italic"),
30 | plot.caption.position = "plot")
31 |
32 | ggsave(here("figures", "flipper_bil.png"), flipper_bill)
33 |
--------------------------------------------------------------------------------
/inst/turing/paper.Rmd:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Executable Compendium (The Turing Way)"
3 | author: Garry Gilmore, Elena Emerson, Sebastian Stewart, Isabel Ingram, Sophie Sasaki
4 | output: pdf_document
5 | bibliography: "bibliography.bib"
6 | ---
7 |
8 | ```{r setup, include = FALSE}
9 | library(here)
10 | library(knitr)
11 | ```
12 |
13 | # Ipsum
14 |
15 | Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vivamus tempus quis justo vel elementum. Ut arcu tortor, pharetra ac mattis vel, scelerisque in metus. Morbi iaculis ante eros, sit amet iaculis dolor suscipit non. Duis lectus urna, laoreet ut nisi quis, ultricies condimentum ipsum. Nullam aliquam id ipsum eget maximus. Pellentesque vulputate felis augue, eget interdum mauris iaculis et. Mauris maximus eros dolor, ut fermentum augue suscipit faucibus.
16 |
17 | Vivamus nulla turpis, condimentum eget urna in, fringilla auctor sapien. Pellentesque sodales lacus ac consequat sodales. In hac habitasse platea dictumst. Praesent ut elit est. Nullam quis quam dignissim, vehicula massa et, dignissim nisl. Ut commodo interdum dapibus. Integer ac sem aliquam, sollicitudin erat mollis, blandit arcu. Vestibulum finibus nisi id tincidunt consequat. Vestibulum varius massa ac dui aliquet condimentum. Duis vel mollis ipsum. Curabitur aliquet diam sit amet dignissim luctus. Orci varius natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Phasellus massa risus, molestie quis rhoncus quis, varius sit amet elit.
18 |
19 | Sed ultrices neque eget magna sagittis, et consectetur nibh finibus. Aliquam sit amet convallis massa. Sed accumsan nunc et diam lacinia feugiat. Vivamus dapibus augue vestibulum felis vestibulum ornare. Curabitur sodales leo at orci gravida, a varius ex consectetur. Maecenas in tellus sed odio varius volutpat. In vestibulum tellus vitae sapien convallis, at facilisis nibh posuere. Sed dictum leo scelerisque nisl dignissim, a tristique risus rutrum.
20 |
21 | Suspendisse scelerisque facilisis augue, a aliquet augue tempor vel. Quisque ac vulputate sem. Nulla facilisi. Donec sagittis ante vitae enim sollicitudin dictum. Aenean in luctus nisi, ac mollis erat. Curabitur pellentesque risus consequat ante volutpat, non auctor nisi molestie. Fusce sit amet lacinia arcu, non euismod metus. Suspendisse fermentum sit amet felis sed ultrices. Suspendisse potenti. Quisque gravida purus mi, nec laoreet massa sodales in. Quisque tincidunt eu nibh vitae convallis. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos.
22 |
23 | Mauris tristique mauris et imperdiet imperdiet. Proin aliquet metus nec ligula egestas sagittis. Nulla metus lorem, cursus ut ex a, aliquam condimentum erat. Cras eget augue in est euismod varius. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia curae; Vivamus eget justo metus. Integer volutpat interdum felis blandit laoreet. Quisque et sagittis est, quis semper risus.
24 |
25 | # Analysis
26 |
27 | This is a visualization of the `palmerpenguins` dataset [@horst2020; @gorman2014].
28 |
29 | ```{r flipper, echo = FALSE, fig.cap = 'Flipper and bill length'}
30 | knitr::include_graphics(here("figures", "flipper_bil.png"))
31 | ```
32 |
33 | # Ipsum
34 |
35 | Lorem ipsum dolor sit amet, consectetur adipiscing elit. Vivamus tempus quis justo vel elementum. Ut arcu tortor, pharetra ac mattis vel, scelerisque in metus. Morbi iaculis ante eros, sit amet iaculis dolor suscipit non. Duis lectus urna, laoreet ut nisi quis, ultricies condimentum ipsum. Nullam aliquam id ipsum eget maximus. Pellentesque vulputate felis augue, eget interdum mauris iaculis et. Mauris maximus eros dolor, ut fermentum augue suscipit faucibus.
36 |
37 | Vivamus nulla turpis, condimentum eget urna in, fringilla auctor sapien. Pellentesque sodales lacus ac consequat sodales. In hac habitasse platea dictumst. Praesent ut elit est. Nullam quis quam dignissim, vehicula massa et, dignissim nisl. Ut commodo interdum dapibus. Integer ac sem aliquam, sollicitudin erat mollis, blandit arcu. Vestibulum finibus nisi id tincidunt consequat. Vestibulum varius massa ac dui aliquet condimentum. Duis vel mollis ipsum. Curabitur aliquet diam sit amet dignissim luctus. Orci varius natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Phasellus massa risus, molestie quis rhoncus quis, varius sit amet elit.
38 |
39 | Sed ultrices neque eget magna sagittis, et consectetur nibh finibus. Aliquam sit amet convallis massa. Sed accumsan nunc et diam lacinia feugiat. Vivamus dapibus augue vestibulum felis vestibulum ornare. Curabitur sodales leo at orci gravida, a varius ex consectetur. Maecenas in tellus sed odio varius volutpat. In vestibulum tellus vitae sapien convallis, at facilisis nibh posuere. Sed dictum leo scelerisque nisl dignissim, a tristique risus rutrum.
40 |
41 | Suspendisse scelerisque facilisis augue, a aliquet augue tempor vel. Quisque ac vulputate sem. Nulla facilisi. Donec sagittis ante vitae enim sollicitudin dictum. Aenean in luctus nisi, ac mollis erat. Curabitur pellentesque risus consequat ante volutpat, non auctor nisi molestie. Fusce sit amet lacinia arcu, non euismod metus. Suspendisse fermentum sit amet felis sed ultrices. Suspendisse potenti. Quisque gravida purus mi, nec laoreet massa sodales in. Quisque tincidunt eu nibh vitae convallis. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos.
42 |
43 | Mauris tristique mauris et imperdiet imperdiet. Proin aliquet metus nec ligula egestas sagittis. Nulla metus lorem, cursus ut ex a, aliquam condimentum erat. Cras eget augue in est euismod varius. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia curae; Vivamus eget justo metus. Integer volutpat interdum felis blandit laoreet. Quisque et sagittis est, quis semper risus.
44 |
45 | # References
46 |
--------------------------------------------------------------------------------
/inst/update.R:
--------------------------------------------------------------------------------
1 | library(rang)
2 | library(here)
3 |
4 | cran_mirror <- "https://cloud.r-project.org/"
5 |
6 | ## Please note that the project scanning result should be checked manually.
7 | ## 1. Github packages must be added manually
8 | ## as_pkgrefs(here::here())
9 | ## 2. You might also want to change the `snapshot_date` to a fix date, when
10 | ## the project is finalized.
11 |
12 | rang <- resolve(here::here(),
13 | snapshot_date = NA,
14 | verbose = TRUE)
15 |
16 | ## You might want to edit `post_installation_steps` or `cache`
17 | dockerize(rang, output_dir = here::here(), verbose = TRUE, cache = TRUE,
18 | post_installation_steps = c(recipes[["make"]], recipes[["texlive"]], recipes[["clean"]]),
19 | insert_readme = FALSE,
20 | copy_all = TRUE,
21 | cran_mirror = cran_mirror)
22 |
--------------------------------------------------------------------------------
/inst/update_apptainer.R:
--------------------------------------------------------------------------------
1 | library(rang)
2 | library(here)
3 |
4 | cran_mirror <- "https://cloud.r-project.org/"
5 |
6 | ## Please note that the project scanning result should be checked manually.
7 | ## 1. Github packages must be added manually
8 | ## as_pkgrefs(here::here())
9 | ## 2. You might also want to change the `snapshot_date` to a fix date, when
10 | ## the project is finalized.
11 |
12 | rang <- resolve(here::here(),
13 | snapshot_date = NA,
14 | verbose = TRUE)
15 |
16 | ## You might want to edit `post_installation_steps` or `cache`
17 | apptainerize(rang, output_dir = here::here(), verbose = TRUE, cache = TRUE,
18 | post_installation_steps = c(recipes[["make"]], recipes[["texlive"]], recipes[["clean"]]),
19 | insert_readme = FALSE,
20 | copy_all = TRUE,
21 | cran_mirror = cran_mirror)
22 |
--------------------------------------------------------------------------------
/man/apptainerize.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/installation.R
3 | \name{apptainerize}
4 | \alias{apptainerize}
5 | \alias{apptainerize_rang}
6 | \alias{apptainerise}
7 | \alias{apptainerise_rang}
8 | \alias{singularize}
9 | \alias{singularize_rang}
10 | \alias{singularise}
11 | \alias{singularise_rang}
12 | \title{Create an Apptainer/Singularity Definition File of The Resolved Result}
13 | \usage{
14 | apptainerize(
15 | rang,
16 | output_dir,
17 | materials_dir = NULL,
18 | post_installation_steps = NULL,
19 | image = c("r-ver", "rstudio", "tidyverse", "verse", "geospatial"),
20 | rang_as_comment = TRUE,
21 | cache = FALSE,
22 | verbose = TRUE,
23 | lib = NA,
24 | cran_mirror = "https://cran.r-project.org/",
25 | check_cran_mirror = TRUE,
26 | bioc_mirror = "https://bioconductor.org/packages/",
27 | no_rocker = FALSE,
28 | debian_version = c("lenny", "squeeze", "wheezy", "jessie", "stretch"),
29 | skip_r17 = TRUE,
30 | insert_readme = TRUE,
31 | copy_all = FALSE,
32 | method = c("auto", "evercran", "rocker", "debian")
33 | )
34 |
35 | apptainerize_rang(...)
36 |
37 | apptainerise(...)
38 |
39 | apptainerise_rang(...)
40 |
41 | singularize(...)
42 |
43 | singularize_rang(...)
44 |
45 | singularise(...)
46 |
47 | singularise_rang(...)
48 | }
49 | \arguments{
50 | \item{rang}{output from \code{\link[=resolve]{resolve()}}}
51 |
52 | \item{output_dir}{character, where to put the Apptainer/Singularity definition file and associated content}
53 |
54 | \item{materials_dir}{character, path to the directory containing additional resources (e.g. analysis scripts) to be copied into \code{output_dir} and in turn into the Apptainer/Singularity container}
55 |
56 | \item{post_installation_steps}{character, additional steps to be added before the in the end of \verb{\%post} section the Apptainer/Singularity definition file, see an example below}
57 |
58 | \item{image}{character, which versioned Rocker image to use. Can only be "r-ver", "rstudio", "tidyverse", "verse", "geospatial"
59 | This applies only to R version >= 3.1}
60 |
61 | \item{rang_as_comment}{logical, whether to write resolved result and the steps to reproduce
62 | the file to \code{path} as comment}
63 |
64 | \item{cache}{logical, whether to cache the packages now. Please note that the system requirements are not cached. For query with non-CRAN packages, this option is strongly recommended. For query with local packages, this must be TRUE regardless of R version. For R version < 3.1, this must be also TRUE if there is any non-CRAN packages.}
65 |
66 | \item{verbose}{logical, pass to \code{\link[=install.packages]{install.packages()}}, the negated value is also passed as \code{quiet} to both \code{\link[=install.packages]{install.packages()}}
67 | and \code{\link[=download.file]{download.file()}}.}
68 |
69 | \item{lib}{character, pass to \code{\link[=install.packages]{install.packages()}}. By default, it is NA (to install the packages to the default location)}
70 |
71 | \item{cran_mirror}{character, which CRAN mirror to use}
72 |
73 | \item{check_cran_mirror}{logical, whether to check the CRAN mirror}
74 |
75 | \item{bioc_mirror}{character, which Bioconductor mirror to use}
76 |
77 | \item{no_rocker}{logical, whether to skip using Rocker images even when an appropriate version is available. Please keep this as \code{FALSE} unless you know what you are doing}
78 |
79 | \item{debian_version}{when Rocker images are not used, which EOL version of Debian to use. Can only be "lenny", "etch", "squeeze", "wheezy", "jessie", "stretch". Please keep this as default "lenny" unless you know what you are doing}
80 |
81 | \item{skip_r17}{logical, whether to skip R 1.7.x. Currently, it is not possible to compile R 1.7.x (R 1.7.0 and R 1.7.1) with the method provided by \code{rang}. It affects \code{snapshot_date} from 2003-04-16 to 2003-10-07. When \code{skip_r17} is TRUE and \code{snapshot_date} is within the aforementioned range, R 1.8.0 is used instead}
82 |
83 | \item{insert_readme}{logical, whether to insert a README file}
84 |
85 | \item{copy_all}{logical, whether to copy everything in the current directory into the container. If \code{inst/rang} is detected in \code{output_dir}, this is coerced to TRUE.}
86 |
87 | \item{method}{character, can only be "auto", "evercran", "rocker", or "debian". Select which base image is used. "auto" (the default) selects the best option based on the R version. "evercran" is experimental.}
88 |
89 | \item{...}{arguments to be passed to \code{apptainerize}}
90 | }
91 | \value{
92 | \code{output_dir}, invisibly
93 | }
94 | \description{
95 | This function exports the result from \code{\link[=resolve]{resolve()}} to an Apptainer/Singularity definition file. For R version >= 3.1.0, the file is based on the versioned Rocker Docker image.
96 | For R version < 3.1.0, the Apptainer/Singularity definition is based on Debian and it compiles R from source.
97 | }
98 | \details{
99 | The idea behind this is to determine the installation order of R packages locally. Then, the installation script can be deployed to another
100 | fresh R session to install R packages. \code{\link[=dockerize]{dockerize()}} and \code{\link[=apptainerize]{apptainerize()}} are more reasonable ways because a fresh R session with all system requirements
101 | is provided.
102 | }
103 | \examples{
104 | \donttest{
105 | if (interactive()) {
106 | graph <- resolve(
107 | pkgs = c("openNLP", "LDAvis", "topicmodels", "quanteda"),
108 | snapshot_date = "2020-01-16"
109 | )
110 | apptainerize(graph, ".")
111 | ## An example of using post_installation_steps to install quarto
112 | install_quarto <- c("apt-get install -y curl git && \\\\
113 | curl -LO https://quarto.org/download/latest/quarto-linux-amd64.deb && \\\\
114 | dpkg -i quarto-linux-amd64.deb && \\\\
115 | quarto install tool tinytex")
116 | apptainerize(graph, ".", post_installation_steps = install_quarto)
117 | }
118 | }
119 | }
120 | \references{
121 | \href{https://apptainer.org/}{Apptainer / Singularity}
122 |
123 | Kurtzer, G. M., Sochat, V., & Bauer, M. W. (2017) Singularity: Scientific containers for mobility of compute. PLOS ONE, 12(5):e0177459. \doi{10.1371/journal.pone.0177459}
124 |
125 | \href{https://rocker-project.org}{The Rocker Project}
126 |
127 | Ripley, B. (2005) \href{https://cran.r-project.org/doc/Rnews/Rnews_2005-1.pdf}{Packages and their Management in R 2.1.0.} R News, 5(1):8--11.
128 | }
129 | \seealso{
130 | \code{\link[=resolve]{resolve()}}, \code{\link[=export_rang]{export_rang()}}, \code{\link[=use_rang]{use_rang()}}
131 | }
132 |
--------------------------------------------------------------------------------
/man/as_pkgrefs.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/as_pkgrefs.R
3 | \name{as_pkgrefs}
4 | \alias{as_pkgrefs}
5 | \alias{as_pkgrefs.default}
6 | \alias{as_pkgrefs.character}
7 | \alias{as_pkgrefs.sessionInfo}
8 | \title{Convert Data Structures into Package References}
9 | \usage{
10 | as_pkgrefs(x, ...)
11 |
12 | \method{as_pkgrefs}{default}(x, ...)
13 |
14 | \method{as_pkgrefs}{character}(x, bioc_version = NULL, no_enhances = TRUE, no_suggests = TRUE, ...)
15 |
16 | \method{as_pkgrefs}{sessionInfo}(x, ...)
17 | }
18 | \arguments{
19 | \item{x, }{currently supported data structure(s) are: output from \code{\link[=sessionInfo]{sessionInfo()}}, a character vector of package names}
20 |
21 | \item{..., }{not used}
22 |
23 | \item{bioc_version}{character. When x is a character vector, version of Bioconductor to search for package names. NULL indicates not
24 | search for Bioconductor.}
25 |
26 | \item{no_enhances}{logical, when parsing DESCRIPTION, whether to ignore packages in the "Enhances" field}
27 |
28 | \item{no_suggests}{logical, when parsing DESCRIPTION, whether to ignore packages in the "Suggests" field}
29 | }
30 | \value{
31 | a vector of package references
32 | }
33 | \description{
34 | This generic function converts several standard data structures into a vector of package references, which in turn
35 | can be used as the first argument of the function \code{\link[=resolve]{resolve()}}. This function guessimates the possible sources of the
36 | packages. But we strongly recommend manually reviewing the detected packages before using them for \code{\link[=resolve]{resolve()}}.
37 | }
38 | \examples{
39 | as_pkgrefs(sessionInfo())
40 | if (interactive()) {
41 | require(rang)
42 | graph <- resolve(as_pkgrefs(sessionInfo()))
43 | as_pkgrefs(c("rtoot"))
44 | as_pkgrefs(c("rtoot", "S4Vectors")) ## this gives cran::S4Vectors and is not correct.
45 | as_pkgrefs(c("rtoot", "S4Vectors"), bioc_version = "3.3") ## This gives bioc::S4Vectors
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/man/convert_edgelist.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/edgelist.R
3 | \name{convert_edgelist}
4 | \alias{convert_edgelist}
5 | \alias{convert_edgelist.default}
6 | \alias{convert_edgelist.ranglet}
7 | \alias{convert_edgelist.rang}
8 | \title{Convert Data Structures to rang edgelist}
9 | \usage{
10 | convert_edgelist(x, ...)
11 |
12 | \method{convert_edgelist}{default}(x, ...)
13 |
14 | \method{convert_edgelist}{ranglet}(x, ...)
15 |
16 | \method{convert_edgelist}{rang}(x, ...)
17 | }
18 | \arguments{
19 | \item{x, }{supported data structures are \code{rang} and \code{ranglet} S3 objects}
20 |
21 | \item{..., }{not used}
22 | }
23 | \value{
24 | a data frame of directed edges of dependencies
25 | }
26 | \description{
27 | This generic function converts several data structures provided by rang into an edgelist of package dependencies.
28 | }
29 | \details{
30 | the resulting data frame can be converted to an igraph object for plotting and analysis via the function \code{\link[igraph:graph_from_data_frame]{igraph::graph_from_data_frame()}}
31 | }
32 | \examples{
33 | \donttest{
34 | if (interactive()) {
35 | graph <- resolve(pkgs = c("openNLP", "LDAvis", "topicmodels", "quanteda"),
36 | snapshot_date = "2020-01-16")
37 |
38 | # dependency edgelist of a single package
39 | convert_edgelist(graph$ranglets[[1]])
40 |
41 | # full dependency edgelist
42 | convert_edgelist(graph)
43 | }
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/man/create_turing.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/use_rang.R
3 | \name{create_turing}
4 | \alias{create_turing}
5 | \title{Create executable research compendium according to the Turing Way}
6 | \usage{
7 | create_turing(
8 | path,
9 | add_rang = TRUE,
10 | add_makefile = TRUE,
11 | add_here = TRUE,
12 | verbose = TRUE,
13 | force = FALSE,
14 | apptainer = FALSE
15 | )
16 | }
17 | \arguments{
18 | \item{path}{character, path to the project root}
19 |
20 | \item{add_rang}{logical, whether to run \code{\link[=use_rang]{use_rang()}} to \code{path}}
21 |
22 | \item{add_makefile}{logical, whether to insert a barebone \code{Makefile} in the project root.}
23 |
24 | \item{add_here}{logical, whether to insert a hidden \code{.here} file in the project root}
25 |
26 | \item{verbose}{logical, whether to print out messages}
27 |
28 | \item{force}{logical, whether to overwrite files (\code{inst/rang/update.R}, \code{Makefile}, \code{.here}) if they
29 | exist.}
30 |
31 | \item{apptainer}{logical, whether to use apptainer. \code{FALSE} indicates using Docker}
32 | }
33 | \value{
34 | path, invisibly
35 | }
36 | \description{
37 | This \code{usethis}-style function creates an executable research compendium according to the Turing Way.
38 | }
39 | \details{
40 | According to the Turing Way, an executable research compendium should have the following properties
41 | \enumerate{
42 | \item Files should be organized in a conventional folder structure;
43 | \item Data, methods, and output should be clearly separated;
44 | \item The computational environment should be specified.
45 | }
46 |
47 | We use the structure suggested by the Turing Way:
48 | \itemize{
49 | \item \code{data_raw}: a directory to hold the raw data
50 | \item \code{data_clean}: a directory to hold the processed data
51 | \item \code{code}: a directory to hold computer code
52 | \item \code{CITATION}: a file holding citation information
53 | \item \code{paper.Rmd}: a manuscript
54 | This function provides the a clearly separated organizational structure. Components can be changed. For example, the manuscript can be in another format (e.g. quarto, sweave) or even optional. With \code{add_rang}, the computational environment can be recorded and reconstructed later.
55 | }
56 | }
57 | \references{
58 | \href{https://the-turing-way.netlify.app/reproducible-research/compendia.html}{The Turing Way: Research Compendia}
59 | Gorman, KB, Williams TD. and Fraser WR (2014). Ecological Sexual Dimorphism and Environmental Variability within a Community of Antarctic Penguins (Genus Pygoscelis). PLoS ONE 9(3):e90081. \doi{10.1371/journal.pone.0090081}
60 | }
61 | \seealso{
62 | \code{\link[=use_rang]{use_rang()}}
63 | }
64 |
--------------------------------------------------------------------------------
/man/dockerize.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/installation.R
3 | \name{dockerize}
4 | \alias{dockerize}
5 | \alias{dockerize_rang}
6 | \alias{dockerise}
7 | \alias{dockerise_rang}
8 | \title{Dockerize The Resolved Result}
9 | \usage{
10 | dockerize(
11 | rang,
12 | output_dir,
13 | materials_dir = NULL,
14 | post_installation_steps = NULL,
15 | image = c("r-ver", "rstudio", "tidyverse", "verse", "geospatial"),
16 | rang_as_comment = TRUE,
17 | cache = FALSE,
18 | verbose = TRUE,
19 | lib = NA,
20 | cran_mirror = "https://cran.r-project.org/",
21 | check_cran_mirror = TRUE,
22 | bioc_mirror = "https://bioconductor.org/packages/",
23 | no_rocker = FALSE,
24 | debian_version = c("lenny", "squeeze", "wheezy", "jessie", "stretch"),
25 | skip_r17 = TRUE,
26 | insert_readme = TRUE,
27 | copy_all = FALSE,
28 | method = c("auto", "evercran", "rocker", "debian")
29 | )
30 |
31 | dockerize_rang(...)
32 |
33 | dockerise(...)
34 |
35 | dockerise_rang(...)
36 | }
37 | \arguments{
38 | \item{rang}{output from \code{\link[=resolve]{resolve()}}}
39 |
40 | \item{output_dir}{character, where to put the Docker file and associated content}
41 |
42 | \item{materials_dir}{character, path to the directory containing additional resources (e.g. analysis scripts) to be copied into \code{output_dir} and in turn into the Docker container}
43 |
44 | \item{post_installation_steps}{character, additional steps to be added before the \code{CMD} part of the Dockerfile, see an example below}
45 |
46 | \item{image}{character, which versioned Rocker image to use. Can only be "r-ver", "rstudio", "tidyverse", "verse", "geospatial"
47 | This applies only to R version >= 3.1}
48 |
49 | \item{rang_as_comment}{logical, whether to write resolved result and the steps to reproduce
50 | the file to \code{path} as comment}
51 |
52 | \item{cache}{logical, whether to cache the packages now. Please note that the system requirements are not cached. For query with non-CRAN packages, this option is strongly recommended. For query with local packages, this must be TRUE regardless of R version. For R version < 3.1, this must be also TRUE if there is any non-CRAN packages.}
53 |
54 | \item{verbose}{logical, pass to \code{\link[=install.packages]{install.packages()}}, the negated value is also passed as \code{quiet} to both \code{\link[=install.packages]{install.packages()}}
55 | and \code{\link[=download.file]{download.file()}}.}
56 |
57 | \item{lib}{character, pass to \code{\link[=install.packages]{install.packages()}}. By default, it is NA (to install the packages to the default location)}
58 |
59 | \item{cran_mirror}{character, which CRAN mirror to use}
60 |
61 | \item{check_cran_mirror}{logical, whether to check the CRAN mirror}
62 |
63 | \item{bioc_mirror}{character, which Bioconductor mirror to use}
64 |
65 | \item{no_rocker}{logical, whether to skip using Rocker images even when an appropriate version is available. Please keep this as \code{FALSE} unless you know what you are doing}
66 |
67 | \item{debian_version}{when Rocker images are not used, which EOL version of Debian to use. Can only be "lenny", "etch", "squeeze", "wheezy", "jessie", "stretch". Please keep this as default "lenny" unless you know what you are doing}
68 |
69 | \item{skip_r17}{logical, whether to skip R 1.7.x. Currently, it is not possible to compile R 1.7.x (R 1.7.0 and R 1.7.1) with the method provided by \code{rang}. It affects \code{snapshot_date} from 2003-04-16 to 2003-10-07. When \code{skip_r17} is TRUE and \code{snapshot_date} is within the aforementioned range, R 1.8.0 is used instead}
70 |
71 | \item{insert_readme}{logical, whether to insert a README file}
72 |
73 | \item{copy_all}{logical, whether to copy everything in the current directory into the container. If \code{inst/rang} is detected in \code{output_dir}, this is coerced to TRUE.}
74 |
75 | \item{method}{character, can only be "auto", "evercran", "rocker", or "debian". Select which base image is used. "auto" (the default) selects the best option based on the R version. "evercran" is experimental.}
76 |
77 | \item{...}{arguments to be passed to \code{dockerize}}
78 | }
79 | \value{
80 | \code{output_dir}, invisibly
81 | }
82 | \description{
83 | This function exports the result from \code{\link[=resolve]{resolve()}} to a Docker file. For R version >= 3.1.0, the Dockerfile is based on the versioned Rocker image.
84 | For R version < 3.1.0, the Dockerfile is based on Debian and it compiles R from source.
85 | }
86 | \details{
87 | The idea behind this is to determine the installation order of R packages locally. Then, the installation script can be deployed to another
88 | fresh R session to install R packages. \code{\link[=dockerize]{dockerize()}} and \code{\link[=apptainerize]{apptainerize()}} are more reasonable ways because a fresh R session with all system requirements
89 | is provided.
90 | }
91 | \examples{
92 | \donttest{
93 | if (interactive()) {
94 | graph <- resolve(pkgs = c("openNLP", "LDAvis", "topicmodels", "quanteda"),
95 | snapshot_date = "2020-01-16")
96 | dockerize(graph, ".")
97 | ## An example of using post_installation_steps to install quarto
98 | install_quarto <- c("RUN apt-get install -y curl git && \\\\
99 | curl -LO https://quarto.org/download/latest/quarto-linux-amd64.deb && \\\\
100 | dpkg -i quarto-linux-amd64.deb && \\\\
101 | quarto install tool tinytex")
102 | dockerize(graph, ".", post_installation_steps = install_quarto)
103 | }
104 | }
105 | }
106 | \references{
107 | \href{https://rocker-project.org}{The Rocker Project}
108 | Ripley, B. (2005) \href{https://cran.r-project.org/doc/Rnews/Rnews_2005-1.pdf}{Packages and their Management in R 2.1.0.} R News, 5(1):8--11.
109 | }
110 | \seealso{
111 | \code{\link[=resolve]{resolve()}}, \code{\link[=export_rang]{export_rang()}}, \code{\link[=use_rang]{use_rang()}}
112 | }
113 |
--------------------------------------------------------------------------------
/man/export_rang.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/installation.R
3 | \name{export_rang}
4 | \alias{export_rang}
5 | \title{Export The Resolved Result As Installation Script}
6 | \usage{
7 | export_rang(
8 | rang,
9 | path,
10 | rang_as_comment = TRUE,
11 | verbose = TRUE,
12 | lib = NA,
13 | cran_mirror = "https://cran.r-project.org/",
14 | check_cran_mirror = TRUE,
15 | bioc_mirror = "https://bioconductor.org/packages/"
16 | )
17 | }
18 | \arguments{
19 | \item{rang}{output from \code{\link[=resolve]{resolve()}}}
20 |
21 | \item{path}{character, path of the exported installation script}
22 |
23 | \item{rang_as_comment}{logical, whether to write resolved result and the steps to reproduce
24 | the file to \code{path} as comment}
25 |
26 | \item{verbose}{logical, pass to \code{\link[=install.packages]{install.packages()}}, the negated value is also passed as \code{quiet} to both \code{\link[=install.packages]{install.packages()}}
27 | and \code{\link[=download.file]{download.file()}}.}
28 |
29 | \item{lib}{character, pass to \code{\link[=install.packages]{install.packages()}}. By default, it is NA (to install the packages to the default location)}
30 |
31 | \item{cran_mirror}{character, which CRAN mirror to use}
32 |
33 | \item{check_cran_mirror}{logical, whether to check the CRAN mirror}
34 |
35 | \item{bioc_mirror}{character, which Bioconductor mirror to use}
36 | }
37 | \value{
38 | \code{path}, invisibly
39 | }
40 | \description{
41 | This function exports the results from \code{\link[=resolve]{resolve()}} to an installation script that can be run in a fresh R environment.
42 | }
43 | \details{
44 | The idea behind this is to determine the installation order of R packages locally. Then, the installation script can be deployed to another
45 | fresh R session to install R packages. \code{\link[=dockerize]{dockerize()}} and \code{\link[=apptainerize]{apptainerize()}} are more reasonable ways because a fresh R session with all system requirements
46 | is provided.
47 | }
48 | \examples{
49 | \donttest{
50 | if (interactive()) {
51 | graph <- resolve(pkgs = c("openNLP", "LDAvis", "topicmodels", "quanteda"),
52 | snapshot_date = "2020-01-16")
53 | export_rang(graph, "rang.R")
54 | }
55 | }
56 | }
57 | \references{
58 | Ripley, B. (2005) \href{https://cran.r-project.org/doc/Rnews/Rnews_2005-1.pdf}{Packages and their Management in R 2.1.0.} R News, 5(1):8--11.
59 | }
60 | \seealso{
61 | \code{\link[=generate_installation_order]{generate_installation_order()}}
62 | }
63 |
--------------------------------------------------------------------------------
/man/export_renv.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/installation.R
3 | \name{export_renv}
4 | \alias{export_renv}
5 | \title{Export The Resolved Result As a renv Lockfile}
6 | \usage{
7 | export_renv(rang, path = ".")
8 | }
9 | \arguments{
10 | \item{rang}{output from \code{\link[=resolve]{resolve()}}}
11 |
12 | \item{path}{character, path of the exported renv lockfile}
13 | }
14 | \value{
15 | \code{path}, invisibly
16 | }
17 | \description{
18 | This function exports the results from \code{\link[=resolve]{resolve()}} to a renv lockfile that can be used as an alternative to a docker container.
19 | }
20 | \details{
21 | A renv lockfile is easier to handle than a docker container, but it cannot always reliably reproduce the exact computational environment,especially for very old code.
22 | }
23 | \examples{
24 | \donttest{
25 | if (interactive()) {
26 | graph <- resolve(pkgs = c("openNLP", "LDAvis", "topicmodels", "quanteda"),
27 | snapshot_date = "2020-01-16")
28 | export_renv(graph, ".")
29 | }
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/man/figures/rang_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/man/figures/rang_logo.png
--------------------------------------------------------------------------------
/man/generate_installation_order.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/installation.R
3 | \name{generate_installation_order}
4 | \alias{generate_installation_order}
5 | \title{Create a Data Frame of The Resolved Result
6 | This function exports the results from \code{\link[=resolve]{resolve()}} to a data frame, which each row represents one installation step. The order of rows is the installation order. By installing packages in the specified order, one can install all the resolved packages without conflicts.}
7 | \usage{
8 | generate_installation_order(rang)
9 | }
10 | \arguments{
11 | \item{rang}{output from \code{\link[=resolve]{resolve()}}}
12 | }
13 | \value{
14 | A data frame ordered by installation order.
15 | }
16 | \description{
17 | Create a Data Frame of The Resolved Result
18 | This function exports the results from \code{\link[=resolve]{resolve()}} to a data frame, which each row represents one installation step. The order of rows is the installation order. By installing packages in the specified order, one can install all the resolved packages without conflicts.
19 | }
20 | \examples{
21 | \donttest{
22 | if (interactive()) {
23 | graph <- resolve(pkgs = c("openNLP", "LDAvis", "topicmodels", "quanteda"),
24 | snapshot_date = "2020-01-16")
25 | generate_installation_order(graph)
26 | }
27 | }
28 | }
29 | \references{
30 | Ripley, B. (2005) \href{https://cran.r-project.org/doc/Rnews/Rnews_2005-1.pdf}{Packages and their Management in R 2.1.0.} R News, 5(1):8--11.
31 | }
32 |
--------------------------------------------------------------------------------
/man/query_sysreqs.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/sysreqs.R
3 | \name{query_sysreqs}
4 | \alias{query_sysreqs}
5 | \title{Query for System Requirements}
6 | \usage{
7 | query_sysreqs(rang, os = "ubuntu-20.04")
8 | }
9 | \arguments{
10 | \item{rang}{output from \code{\link[=resolve]{resolve()}}}
11 |
12 | \item{os}{character, which OS to query for system requirements}
13 | }
14 | \value{
15 | a \code{rang} S3 object with the following items
16 | \item{call}{original function call}
17 | \item{ranglets}{List of dependency graphs of all packages in \code{pkgs}}
18 | \item{snapshot_date}{\code{snapshot_date}}
19 | \item{no_enhances}{\code{no_enhances}}
20 | \item{no_suggests}{\code{no_suggests}}
21 | \item{unresolved_pkgsrefs}{Packages that can't be resolved}
22 | \item{sysreqs}{System requirements as Linux commands}
23 | \item{r_version}{The latest R version as of \code{snapshot_date}}
24 | \item{os}{\code{os}}
25 | }
26 | \description{
27 | This function takes an S3 object returned from \code{\link[=resolve]{resolve()}} and (re)queries the System Requirements.
28 | }
29 | \examples{
30 | \donttest{
31 | if (interactive()) {
32 | graph <- resolve(pkgs = c("openNLP", "LDAvis", "topicmodels", "quanteda"),
33 | snapshot_date = "2020-01-16", query_sysreqs = FALSE)
34 | graph$sysreqs
35 | graph2 <- query_sysreqs(graph, os = "ubuntu-20.04")
36 | graph2$sysreqs
37 | }
38 | }
39 | }
40 | \seealso{
41 | \code{\link[=resolve]{resolve()}}
42 | }
43 |
--------------------------------------------------------------------------------
/man/recipes.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/memo_misc.R
3 | \docType{data}
4 | \name{recipes}
5 | \alias{recipes}
6 | \title{Recipes for Building Container Images}
7 | \format{
8 | An object of class \code{list} of length 5.
9 | }
10 | \usage{
11 | recipes
12 | }
13 | \description{
14 | A list containing several useful recipes for container building. Useful for the \code{post_installation_steps} argument of \code{\link[=dockerize]{dockerize()}}. Available recipes are:
15 | \itemize{
16 | \item \code{texlive}: install pandoc and LaTeX, useful for rendering RMarkdown
17 | \item \code{texlivefull}: Similar to the above, but install the full distribution of TeX Live (~ 3GB)
18 | \item \code{quarto}: install quarto and tinytex
19 | \item \code{clean}: clean up the container image by removing cache
20 | \item \code{make}: install GNU make
21 | }
22 | }
23 | \examples{
24 | \donttest{
25 | if (interactive()) {
26 | graph <- resolve(pkgs = c("openNLP", "LDAvis", "topicmodels", "quanteda"),
27 | snapshot_date = "2020-01-16")
28 | ## install texlive
29 | dockerize(graph, ".", post_installation_steps = recipes[['texlive']])
30 | }
31 | }
32 | }
33 | \keyword{datasets}
34 |
--------------------------------------------------------------------------------
/man/resolve.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/resolve.R
3 | \name{resolve}
4 | \alias{resolve}
5 | \title{Resolve Dependencies Of R Packages}
6 | \usage{
7 | resolve(
8 | pkgs = ".",
9 | snapshot_date,
10 | no_enhances = TRUE,
11 | no_suggests = TRUE,
12 | query_sysreqs = TRUE,
13 | os = "ubuntu-20.04",
14 | verbose = FALSE
15 | )
16 | }
17 | \arguments{
18 | \item{pkgs}{\code{pkgs} can be 1) a character vector of R packages to resolve, 2) a path to a \href{https://rstudio.github.io/renv/articles/lockfile.html}{\code{renv} lockfile}, or 3) a data structure that \code{\link[=as_pkgrefs]{as_pkgrefs()}} can convert to a character vector of package references. For 1) \code{pkgs} can be either in shorthands, e.g. "rtoot", "ropensci/readODS", or in package references, e.g. "cran::rtoot", "github::ropensci/readODS". Please refer to the \href{https://r-lib.github.io/pkgdepends/reference/pkg_refs.html}{Package References documentation} of \code{pak} for details. Currently, this package supports only cran and github packages. For 2) \code{\link[=as_pkgrefs]{as_pkgrefs()}} support the output of \code{\link[=sessionInfo]{sessionInfo()}}, a renv lockfile or a single directory. If it is a single directory, all R scripts are scanned for R packages used using \code{\link[renv:dependencies]{renv::dependencies()}}. Currently, the default is to scan the R scripts in the current working directory. Please also note that this scanning only assumes there are CRAN and Bioconductor packages. We strongly recommend checking whether this is really the case (see example below).}
19 |
20 | \item{snapshot_date}{Snapshot date, if not specified, assume to be a month ago}
21 |
22 | \item{no_enhances}{logical, whether to ignore packages in the "Enhances" field}
23 |
24 | \item{no_suggests}{logical, whether to ignore packages in the "Suggests" field}
25 |
26 | \item{query_sysreqs}{logical, whether to query for System Requirements. Important: Archived CRAN can't be queried for system requirements. Those
27 | packages are assumed to have no system requirement.}
28 |
29 | \item{os}{character, which OS to query for system requirements}
30 |
31 | \item{verbose}{logical, whether to display messages}
32 | }
33 | \value{
34 | a \code{rang} S3 object with the following items
35 | \item{call}{original function call}
36 | \item{ranglets}{List of dependency graphs of all packages in \code{pkgs}}
37 | \item{snapshot_date}{\code{snapshot_date}}
38 | \item{no_enhances}{\code{no_enhances}}
39 | \item{no_suggests}{\code{no_suggests}}
40 | \item{unresolved_pkgsrefs}{Packages that can't be resolved}
41 | \item{sysreqs}{System requirements as Linux commands}
42 | \item{r_version}{The latest R version as of \code{snapshot_date}}
43 | \item{os}{\code{os}}
44 | }
45 | \description{
46 | This function recursively queries dependencies of R packages at a specific snapshot time. The dependency graph can then be used to recreate the computational environment. The data on dependencies are provided by R-hub.
47 | }
48 | \examples{
49 | \donttest{
50 | if (interactive()) {
51 | graph <- resolve(pkgs = c("openNLP", "LDAvis", "topicmodels", "quanteda"),
52 | snapshot_date = "2020-01-16")
53 | graph
54 | ## to resolve github packages
55 | gh_graph <- resolve(pkgs = c("https://github.com/schochastics/rtoot"),
56 | snapshot_date = "2022-11-28")
57 | gh_graph
58 | ## scanning
59 | graph <- resolve(snapshot_date = "2022-11-28")
60 | ## But we recommend this:
61 | pkgs <- as_pkgrefs(".")
62 | pkgs ## check the accuracy
63 | graph <- resolve(pkgs, snapshot_date = "2022-11-28")
64 | }
65 | }
66 | }
67 | \references{
68 | \href{https://r-lib.github.io/pkgdepends/reference/pkg_refs.html}{Package References}
69 | }
70 | \seealso{
71 | \code{\link[=dockerize]{dockerize()}}
72 | }
73 |
--------------------------------------------------------------------------------
/man/use_rang.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/use_rang.R
3 | \name{use_rang}
4 | \alias{use_rang}
5 | \title{Setup rang for a directory}
6 | \usage{
7 | use_rang(
8 | path = ".",
9 | add_makefile = TRUE,
10 | add_here = TRUE,
11 | verbose = TRUE,
12 | force = FALSE,
13 | apptainer = FALSE
14 | )
15 | }
16 | \arguments{
17 | \item{path}{character, path to the project root}
18 |
19 | \item{add_makefile}{logical, whether to insert a barebone \code{Makefile} in the project root.}
20 |
21 | \item{add_here}{logical, whether to insert a hidden \code{.here} file in the project root}
22 |
23 | \item{verbose}{logical, whether to print out messages}
24 |
25 | \item{force}{logical, whether to overwrite files (\code{inst/rang/update.R}, \code{Makefile}, \code{.here}) if they
26 | exist.}
27 |
28 | \item{apptainer}{logical, whether to use apptainer. \code{FALSE} indicates using Docker}
29 | }
30 | \value{
31 | path, invisibly
32 | }
33 | \description{
34 | This \code{usethis}-style function adds the infrastructure in a directory (presumably with R scripts
35 | and data) for (re)constructing the computational environment.
36 | Specifically, this function inserts \code{inst/rang} into the directory, which contains
37 | all components for the reconstruction. Optionally, \code{Makefile} and \code{.here} are also inserted
38 | to ease the development of analytic code.
39 | By default, (re)running this function does not overwrite any file. One can change this by setting
40 | \code{force} to TRUE.
41 | }
42 | \details{
43 | The infrastructure being added to your path consists of:
44 | \itemize{
45 | \item \code{inst/rang} directory in the project root
46 | \item \code{update.R} file inside the directory
47 | \item \code{.here} in the project root (if \code{add_here} is TRUE)
48 | \item \code{Makefile} in the project root (if \code{add_makefile} is TRUE)
49 | You might need to edit \code{update.R} manually. The default is to scan the whole project for
50 | used R packages and assume they are either on CRAN or Bioconductor. If you have used other R packages,
51 | you might need to edit this manually.
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/methodshub.qmd:
--------------------------------------------------------------------------------
1 | ---
2 | title: rang - Reconstructing Reproducible R Computational Environments
3 | format:
4 | html:
5 | embed-resources: true
6 | gfm: default
7 | ---
8 |
9 | ## Description
10 |
11 |
12 |
13 | Resolve the dependency graph of R packages at a specific time point based on the information from various 'R-hub' web services . The dependency graph can then be used to reconstruct the R computational environment with 'Rocker' .
14 |
15 | ## Keywords
16 |
17 |
18 |
19 | * Computational Environment
20 | * Computational Reproducibility
21 | * Open Science
22 |
23 | ## Science Usecase(s)
24 |
25 |
26 |
27 |
28 |
29 |
30 | This package is designed to retrospectively construct a constant computational environment for running shared R scripts, in which the computational environment is **not** specified. Additional functions are provided for creating executable [research compendia](https://research-compendium.science/).
31 |
32 | ## Repository structure
33 |
34 | This repository follows [the standard structure of an R package](https://cran.r-project.org/doc/FAQ/R-exts.html#Package-structure).
35 |
36 | ## Environment Setup
37 |
38 | With R installed:
39 |
40 | ```r
41 | install.packages("rang")
42 | ```
43 |
44 | Installation of [Docker](https://www.docker.com/) or [Singularity](https://sylabs.io/singularity/) is strongly recommended.
45 |
46 | ## Input Data
47 |
48 | The main function `resolve()` accepts various input data. One example is a path to a directory of R scripts.
49 |
50 | ## Output Data
51 |
52 | The main function `resolve()` gives an S3 object of dependency graph. Please refer to @sec-touse.
53 |
54 | ## How to Use {#sec-touse}
55 |
56 | Suppose you would like to run this code snippet in [this 2018 paper](https://joss.theoj.org/papers/10.21105/joss.00774) of the R package `quanteda` (an R package for text analysis).
57 |
58 | ```r
59 | library("quanteda")
60 | # construct the feature co-occurrence matrix
61 | examplefcm <-
62 | tokens(data_corpus_irishbudget2010, remove_punct = TRUE) %>%
63 | tokens_tolower() %>%
64 | tokens_remove(stopwords("english"), padding = FALSE) %>%
65 | fcm(context = "window", window = 5, tri = FALSE)
66 | # choose 30 most frequency features
67 | topfeats <- names(topfeatures(examplefcm, 30))
68 | # select the top 30 features only, plot the network
69 | set.seed(100)
70 | textplot_network(fcm_select(examplefcm, topfeats), min_freq = 0.8)
71 | ```
72 |
73 | This code cannot be executed with a recent version of `quanteda`. As the above code was written in 2018, one can get the dependency graph of `quanteda` in 2018:
74 |
75 | ```{r}
76 | library(rang)
77 | graph <- resolve(pkgs = "quanteda",
78 | snapshot_date = "2018-10-06",
79 | os = "ubuntu-18.04")
80 | graph
81 | ```
82 |
83 | This dependency graph can be used to create a dockerized computational environment (in form of `Dockerfile`) for running the abovementioned code. Suppose one would like to generate the `Dockerfile` in the directory "quanteda_docker".
84 |
85 | ```r
86 | dockerize(graph, "quanteda_docker", method = "evercran")
87 | ```
88 |
89 | A Docker container can then be built and launched, e.g. from the shell:
90 |
91 | ```sh
92 | cd quanteda_docker
93 | docker build -t rang .
94 | docker run --rm --name "rangtest" -ti rang
95 | ```
96 |
97 | The launched container is based on R 3.5.1 and `quanteda` 1.3.4 and is able to run the abovementioned code snippet.
98 |
99 | Please refer to either the [publication of this package](https://doi.org/10.1371/journal.pone.0286761) or the [official website](https://gesistsa.github.io/rang/) for further information.
100 |
101 | ## Contact Details
102 |
103 | Maintainer: Chung-hong Chan
104 |
105 | Issue Tracker: [https://github.com/gesistsa/rang/issues](https://github.com/gesistsa/rang/issues)
106 |
107 | ## Publication
108 |
109 | Chan, C. H., & Schoch, D. (2023). rang: Reconstructing reproducible R computational environments. PLoS ONE, 18(6): e0286761. .
110 |
111 |
112 |
113 |
114 |
115 |
116 |
--------------------------------------------------------------------------------
/paper/Makefile:
--------------------------------------------------------------------------------
1 | render:
2 | bash clean.sh ## clean up the plos theme
3 | quarto render paper.qmd
4 | bib:
5 | bibcon -b ~/dev/dotfiles/bib.bib -o paper.bib paper.qmd
6 | diff: render
7 | cp paper_r0.tex temp1.tex
8 | -latex temp1
9 | -bibtex temp1
10 | pdflatex temp1
11 | pdflatex temp1
12 | perl latexpand --expand-bbl temp1.bbl temp1.tex > flat1.tex
13 | cp paper.tex temp2.tex
14 | -latex temp2
15 | -bibtex temp2
16 | pdflatex temp2
17 | pdflatex temp2
18 | perl latexpand --expand-bbl temp2.bbl temp2.tex > flat2.tex
19 | cp flat2.tex paper_flat.tex
20 | latexdiff flat1.tex flat2.tex > diff.tex
21 | -pdflatex diff.tex
22 | -pdflatex diff.tex
23 | rm temp*
24 | rm flat*
25 | rm diff.aux
26 | rm diff.log
27 | rm diff.tex
28 |
--------------------------------------------------------------------------------
/paper/basel2023_rang/_extensions/gesiscss/fakegesis/_extension.yml:
--------------------------------------------------------------------------------
1 | title: Fake GESIS Presentation Template
2 | author: Chung-hong Chan
3 | version: 0.0.1
4 | quarto-required: ">=1.2.231"
5 | contributes:
6 | formats:
7 | revealjs:
8 | slide-number: c
9 | highlight-style: github
10 | theme: fakegesis.scss
11 | footer: "[www.gesis.org](http://www.gesis.org)"
12 | logo: gesis.png
13 |
--------------------------------------------------------------------------------
/paper/basel2023_rang/_extensions/gesiscss/fakegesis/fakegesis.scss:
--------------------------------------------------------------------------------
1 | /*-- scss:defaults --*/
2 |
3 | // fonts
4 | $font-family-sans-serif: 'Helvetica' !default;
5 |
6 | // colors
7 | $body-bg: #ffffff !default;
8 | $body-color: #000000 !default;
9 | $link-color: #58748f !default;
10 |
11 | $code-block-bg: #ffffdc !default;
12 | $code-block-border-color: $body-color !default;
13 |
14 | $code-bg: #c6d1dc !default;
15 | $code-color: $link-color !default;
16 | $code-block-bg-alpha: -.1 !default;
17 |
18 | $presentation-heading-color: $link-color !default;
19 |
20 | /*-- scss:rules --*/
21 |
22 | .picture {
23 | border-style: solid;
24 | border-width: 3pt;
25 | border-color: $body-color;
26 | border-radius: 50%;
27 | padding: 5pt;
28 | height: 50%;
29 | width: auto;
30 | }
31 |
32 | #title-slide {
33 | text-align: left;
34 |
35 | .institute,
36 | .quarto-title-affiliation {
37 | // font-style: italic;
38 | font-size: 60%;
39 | }
40 |
41 | .author,
42 | .quarto-title-author-name {
43 | color: $body-color;
44 | }
45 |
46 | .quarto-title-authors {
47 | display: flex;
48 | justify-content: left;
49 |
50 | .quarto-title-author {
51 | padding-left: 0em;
52 | padding-right: 0em;
53 | width: 100%;
54 | }
55 | }
56 |
57 | p.author::before,
58 | div.quarto-title-author::before {
59 | content: "";
60 | display: block;
61 | border: none;
62 | background-color: $body-color;
63 | color: $body-color;
64 | height: 3px;
65 | margin-bottom: 1em;
66 | }
67 |
68 | p, a {
69 | color: $link-color;
70 | }
71 | }
72 |
73 | .footer {
74 | bottom: 10px !important;
75 | // color: #58748f !important;
76 | }
77 |
78 | .reveal .progress {
79 | top: 0 !important;
80 | height: 2% !important;
81 | }
82 |
83 | .slide-logo {
84 | display: block !important;
85 | position: fixed !important;
86 | top: 2% !important;
87 | left: 1.5% !important;
88 | max-height: 10% !important;
89 | height: 3.5% !important;
90 | width: auto !important;
91 | color: $body-color !important;
92 | }
93 |
94 | .slide-menu-button {
95 | left: 10px !important;
96 | bottom: 10px !important;
97 | }
98 |
99 | .slide-number,
100 | .reveal.has-logo .slide-number {
101 | bottom: 10px !important;
102 | right: 10px !important;
103 | top: unset !important;
104 | color: $body-color !important;
105 | }
106 |
107 | .cell-output {
108 | border: 1px solid $body-color;
109 | border-radius: 4px;
110 | }
111 |
112 | .center-x {
113 | text-align: center;
114 | background-color: $code-bg;
115 | // color: #58748f ;
116 | }
117 |
118 | .center-xy {
119 | margin: 0;
120 | position: absolute;
121 | top: 50%;
122 | left: 50%;
123 | -ms-transform: translateY(-50%), translateX(-50%);
124 | transform: translateY(-50%), translateX(-50%);
125 | }
126 |
--------------------------------------------------------------------------------
/paper/basel2023_rang/_extensions/gesiscss/fakegesis/gesis.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/paper/basel2023_rang/_extensions/gesiscss/fakegesis/gesis.png
--------------------------------------------------------------------------------
/paper/basel2023_rang/base1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/paper/basel2023_rang/base1.png
--------------------------------------------------------------------------------
/paper/basel2023_rang/base2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/paper/basel2023_rang/base2.png
--------------------------------------------------------------------------------
/paper/basel2023_rang/bowie.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/paper/basel2023_rang/bowie.jpg
--------------------------------------------------------------------------------
/paper/basel2023_rang/comps.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/paper/basel2023_rang/comps.png
--------------------------------------------------------------------------------
/paper/basel2023_rang/dplyr.RDS:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/paper/basel2023_rang/dplyr.RDS
--------------------------------------------------------------------------------
/paper/basel2023_rang/dplyr.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/paper/basel2023_rang/dplyr.png
--------------------------------------------------------------------------------
/paper/basel2023_rang/dplyr1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/paper/basel2023_rang/dplyr1.png
--------------------------------------------------------------------------------
/paper/basel2023_rang/rang_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/paper/basel2023_rang/rang_logo.png
--------------------------------------------------------------------------------
/paper/before-body.tex:
--------------------------------------------------------------------------------
1 | $-- PLOS insert title and author within the main body
2 | $-- Use this template to insert the necessary pieces before any content provided in .qmd file
3 | \vspace*{0.2in}
4 |
5 | % Title must be 250 characters or less.
6 | \begin{flushleft}
7 | {\Large
8 | \textbf\newline{$title$} % Please use "sentence case" for title and headings (capitalize only the first word in a title (or heading), the first word in a subtitle (or subheading), and any proper nouns).
9 | }
10 | \newline
11 | \\
12 | % Insert author names, affiliations and corresponding author email (do not include titles, positions, or degrees).
13 | $by-author:_authors.tex()[, ]$
14 | \\
15 | \bigskip
16 | $by-affiliation:_affiliations.tex()[\\ ]$
17 | \bigskip
18 |
19 |
20 | % Use the asterisk to denote corresponding authorship and provide email address in note below.
21 | $for(by-author)$
22 | $if(by-author.attributes.corresponding)$
23 | * $by-author.email$
24 | $endif$
25 | $endfor$
26 |
27 | \end{flushleft}
28 |
29 | $if(abstract)$
30 | \section*{Abstract}
31 | $abstract$
32 | $endif$
33 |
34 | $if(author-summary)$
35 | \section*{Author summary}
36 | $author-summary$
37 | $endif$
38 |
39 | \linenumbers
40 |
--------------------------------------------------------------------------------
/paper/clean.sh:
--------------------------------------------------------------------------------
1 | if grep -q "\\Yinyang" _extensions/quarto-journals/plos/partials/before-body.tex ;
2 | then
3 | cp before-body.tex _extensions/quarto-journals/plos/partials/before-body.tex
4 | fi
5 |
6 |
--------------------------------------------------------------------------------
/paper/diff.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/paper/diff.pdf
--------------------------------------------------------------------------------
/paper/hilgard.RDS:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/paper/hilgard.RDS
--------------------------------------------------------------------------------
/paper/nathaniel/besley.dta:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/paper/nathaniel/besley.dta
--------------------------------------------------------------------------------
/paper/nathaniel/fn_5.R:
--------------------------------------------------------------------------------
1 | #Before starting we route all output to a file, fn_5.txt
2 | sink("fn_5.txt")
3 | #First we load library foreign and library bife
4 | #Note the library command is hardcoded to the standard file for libraries
5 | #If your libraries are different, or they fail to load, look at the commented out lib.loc subcommand
6 | #These are the only R packages needed that are not in baseR
7 | library("foreign")
8 | #lib.loc="/Library/Frameworks/R.framework/Versions/3.5/Resources/library")
9 | library("bife")
10 | #lib.loc="/Library/Frameworks/R.framework/Versions/3.5/Resources/library")
11 | # now we read in the same Stata data as used in Table_1, saved as an early version Stata dta file, besley.dta which is put into data_b
12 | #For timing
13 | start.time <- Sys.time()
14 | data_b<-read.dta("besley.dta")
15 | glm_naive<-glm(graduate~Democracy+logGDPcapita+factor(country),data=data_b,family=binomial(link=logit))
16 | summary.glm(glm_naive)
17 | #now we run bife to both get correct LogitFE results and to get the data set corresponding to NotAllZero groups) - we run "vanilla" bife without cluster robust standard errors
18 | bife_all<-bife(graduate~Democracy+logGDPcapita|country,data=data_b,bias_corr="no")
19 | summary(bife_all,fixed="TRUE")
20 | #Now we create the NotAllZero data set for glm to show this works
21 | y_notall0<-bife_all$model_info$y
22 | x_notall0<-bife_all$model_info$X
23 | country_notall0<-bife_all$model_info$id
24 | glm_notall0 <- glm(y_notall0~x_notall0+factor(country_notall0),family=binomial(link=logit))
25 | summary.glm(glm_notall0)
26 | end.time <- Sys.time()
27 | time.taken <- end.time - start.time
28 | cat("Execution time:", time.taken)
29 | closeAllConnections()
30 |
--------------------------------------------------------------------------------
/paper/oser/Makefile:
--------------------------------------------------------------------------------
1 | output_file=reproduced.html
2 | r_cmd = "rmarkdown::render('materials/README.Rmd', \
3 | output_file = '${output_file}')"
4 | handle=oser
5 | local_file=${handle}_README.html
6 |
7 | .PHONY: all resolve build render export rebuild
8 |
9 | all: resolve build render
10 | echo "finished"
11 | resolve:
12 | Rscript ${handle}.R
13 | build: ${handle}docker
14 | docker build -t ${handle}img ${handle}docker
15 | render:
16 | docker run -d --rm --name "${handle}container" -ti ${handle}img
17 | docker exec ${handle}container Rscript -e ${r_cmd}
18 | docker cp ${handle}container:/materials/${output_file} ${local_file}
19 | docker stop ${handle}container
20 | export:
21 | docker save ${handle}img | gzip > ${handle}img.tar.gz
22 | rebuild: ${handle}img.tar.gz
23 | docker load < ${handle}img.tar.gz
24 |
--------------------------------------------------------------------------------
/paper/oser/README.md:
--------------------------------------------------------------------------------
1 | # Executable compendium of Oser et al. (2022)
2 |
3 | This is enhanced from the [original data and code](http://doi.org/10.17605/OSF.IO/AF5DR) shared by Oser et al. (2022) for their article "How Political Efficacy Relates to Online and Offline Political Participation: A Multilevel Meta-analysis" in *Political Communication* ([doi](https://doi.org/10.1080/10584609.2022.2086329)).
4 |
5 | In order to run this executable compendium, the following components are needed.
6 |
7 | * Docker (Please install it with [rootless mode](https://docs.docker.com/engine/security/rootless/))
8 | * Make (on a standard Ubuntu machine: `sudo apt install -y make`)
9 | * R (please follow [this guide](https://cran.r-project.org/bin/linux/ubuntu/) to install the latest version on a standard Ubuntu machine)
10 | * the R package `rang` (`install.packages('rang')`)
11 |
12 | All the instructions to execute the analysis in batch is available in the provided `Makefile`.
13 |
14 | * `make resolve`: scan the code provided by Oser et al. for all R packages used, resolve their dependency structure, generate `Dockerfile`, and cache all R packages.
15 | * `make build`: build the Docker image
16 | * `make render`: render the RMarkdown file provided by Oser et al. inside a container and obtain the rendered HTML file back
17 | * `make export`: export the Docker image
18 | * `make rebuild`: rebuild the exported Docker image
19 | * `make all`: running `make resolve`, `make build`, and `make render`
20 |
21 | The complete version of the executable compendium is available here: https://doi.org/10.5281/zenodo.7708417
22 |
23 | # Disclaimer
24 |
25 | This executable compendium was created by the authors of `rang`. However, the data and code are shared by Oser et al. The authors of `rang` claims no ownership to the data and code shared by Oser et al.
26 |
--------------------------------------------------------------------------------
/paper/oser/oser.R:
--------------------------------------------------------------------------------
1 | require(rang)
2 |
3 | ## all packages used in the project
4 | cran_pkgs <- as_pkgrefs("meta-analysis") ## dmetar is an undeclared github package: MathiasHarrer/dmetar
5 |
6 | cran_pkgs[cran_pkgs == "cran::dmetar"] <- "MathiasHarrer/dmetar"
7 | x <- resolve(cran_pkgs, "2021-08-11", verbose = TRUE)
8 | ##print(x, all_pkgs = TRUE)
9 | dockerize(x, "oserdocker", materials_dir = "meta-analysis", cache = TRUE)
10 |
--------------------------------------------------------------------------------
/paper/paper.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/paper/paper.pdf
--------------------------------------------------------------------------------
/paper/peng.R:
--------------------------------------------------------------------------------
1 | ##require(rang)
2 |
3 | ##graph <- resolve("./ptproc_1.5-1.tar.gz", "2004-07-01")
4 | ##dockerize(graph, "~/dev/misc/ptproc", cache = TRUE)
5 |
6 | require(ptproc)
7 |
8 | set.seed(1000)
9 | x <- cbind(runif(100), runif(100), runif(100))
10 | hPois.cond.int <- function(params, eval.pts, pts = NULL, data = NULL, TT = NULL) {
11 | mu <- params[1]
12 | if(is.null(TT))
13 | rep(mu, nrow(eval.pts))
14 | else {
15 | vol <- prod(apply(TT, 2, diff))
16 | mu * vol
17 | }
18 | }
19 | ppm <- ptproc(pts = x, cond.int = hPois.cond.int, params = 50, ranges = cbind(c(0,1), c(0,1), c(0,1)))
20 | fit <- ptproc.fit(ppm, optim.control = list(trace = 2), method = "BFGS")
21 | summary(fit)
22 |
--------------------------------------------------------------------------------
/paper/peng.sh:
--------------------------------------------------------------------------------
1 | Rscript -e "require(rang); dockerize(resolve('ptproc', '2004-07-01'), 'pengdocker', cache = TRUE)"
2 | docker build -t pengimg ./pengdocker
3 | docker run -d --rm --name "pengcontainer" -ti pengimg
4 | docker cp peng.R pengcontainer:/peng.R
5 | docker exec pengcontainer R CMD BATCH peng.R
6 | docker exec pengcontainer cat peng.Rout
7 | docker cp pengcontainer:/peng.Rout peng.Rout
8 | docker stop pengcontainer
9 |
--------------------------------------------------------------------------------
/paper/quanteda_rstudio.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/paper/quanteda_rstudio.png
--------------------------------------------------------------------------------
/paper/r1.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/paper/r1.pdf
--------------------------------------------------------------------------------
/paper/r1.qmd:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Re: PONE-D-23-07706"
3 | format:
4 | pdf: default
5 | ---
6 |
7 | Dear Professor Fernandez-Lozano, dear reviewers,
8 |
9 | Thank you for your insightful feedback on our paper "rang: Reconstructing reproducible R computational environments" (PONE-D-23-07706). Following the reviewers’ suggestions, we have revised the manuscript. We respond below through a point-by-point outline.
10 |
11 | We hope that the revised manuscript will now be suited for publication in PLOS ONE. We are looking forward to hearing back from you.
12 |
13 | Sincerely,
14 |
15 | The Authors
16 |
17 | * * *
18 |
19 | # Reviewer #1: General comments
20 |
21 | **R1.1: This manuscript describes the ‘rang’ package for R, focusing on six examples of using rang to reconstruct the computational environments of old scripts. This is important and interesting, for reasons well described in the introduction—basically, computational reproducibility is a hard problem, and while not a panacea this package provides useful functionality to help this. The ms is fairly well written, and the core examples are very well done in their variety and step-by-step explanations.**
22 |
23 | A1.1 We would like to thank R1 for their appreciation of our package `rang`.
24 |
25 | **R1.2: There are some minor problems (see below), and one major one I thought: if the purpose of this ms is to describe the rang package, i.e. to be its primary peer-reviewed documentation, it needs to list all package functions and what they do (e.g. in a table; see #12 below).**
26 |
27 | A1.2 We agreed with R1 that a list of all package functions is needed. It has been added accordingly. (See table 1)
28 |
29 | **R1.3: In summary, this is an interesting and well-done ms that usefully lays out how to use rang to reconstruct computational environments in a wide variety of cases. It needs minor to moderate revisions for clarity in many places, but is fundamentally a strong piece worthy of publication.**
30 |
31 | A1.3 We would like to thank R1 for their appreciation of our paper.
32 |
33 | **R1.4 Specific comments 1. Abstract: could probably say “usually missing”**
34 |
35 | A1.4 We adopted the suggestion by R1.
36 |
37 | **R1.5 2. Abstract: change “spanning from” to “spanning” (grammar)**
38 |
39 | A1.5 We adopted the suggestion by R1.
40 |
41 | **R1.6 3. Line 13: cite R correctly – see citation()**
42 |
43 | A1.6 We cited R accordingly.
44 |
45 | **R1.7 4. L. 24: this is a bit odd, as slurm is a workload scheduler, not a computing environment**
46 |
47 | A1.7 R1 correctly pointed out the problem. We removed the mention of slurm.
48 |
49 | **R1.8 5. L. 64: “unambiguously specify” instead of “pin down”? Seems clearer**
50 |
51 | A1.8 We adopted the R1's suggestion to say "(unambigously) specify" instead of "pin down". And we agree that it is clearer.
52 |
53 | **R1.9 6. L. 75: what limitations, exactly? Be specific**
54 |
55 | A1.9 We spelled out the limitations of MRAN.
56 |
57 | **R1.10 7. L. 159: a little unclear. “which is used by the scanning function”?**
58 |
59 | A1.10 We stated clearly that `as_pkgrefs()` is a wrapper.
60 |
61 | **R1.11 8. L. 186: “covert editing”? Really? Clarify, expand, or remove**
62 |
63 | A1.11 As far as we know, the editing was done without the permission from the original submitter. However, without going too detail into similar editing by the same staffer, we decided to drop the adjective "covert".
64 |
65 | **R1.12 9. L. 189: “Neither worked.”**
66 |
67 | A1.12 We adopted the suggestion by the reviewer.
68 |
69 | **R1.13 10. L. 245: what does “suggested by the Turing way” mean?**
70 |
71 | A1.13 The Turing Way (https://the-turing-way.netlify.app/reproducible-research/compendia.html) is a handbook published by the Alan Turing Institute in the UK. We provided a citation to the handbook to make it clearer.
72 |
73 | **R1.14 11. L. 248: use “library(rang)” (cf. code on p. 3) not “require(rang)”**
74 |
75 | A1.14 We adopted the suggestion by the reviewer.
76 |
77 | **R1.15 12. L. 291: there are features not mentioned? Like what? A table listing all the package functions would be a useful addition**
78 |
79 | A1.15 See A1.2
80 |
81 | **R1.16 13. The major rang caveats, as listed in the package vignette, should be included in this article**
82 |
83 | A1.16 We adopted the suggestion by the reviewer to include all caveats.
84 |
85 | # Reviewer #2:
86 |
87 | **R2.1 The article is well written and the package works as described. They have tested rang using a wide range of examples and the steps are well documented and clear. The problem they are addressing is a very important one and I commend their effort in creating the package and producing the article.**
88 |
89 | A2.1 We would like to thank R2 for their appreciation of our package `rang` and the article.
90 |
91 | **R2.2 My only comment is related to the discussion of limitations. Many packages access databases to obtain data. For example, the STRINGdb package has a function to obtain protein interaction data, and the API changes over time. As such, previous versions of STRINGdb cannot access the db. For packages like biomaRt, the method to access the data may be more stable, however the actual data itself can change, for example Gene Ontology data. I suggest the authors make this limitation clear and/or suggest ways to solve, or in some way minimise it.**
92 |
93 | A2.2 We agree with R2 that this is a major reproducibility issue. In the revised version of the paper, we added this as a limitation. `rang` is not a solution to these external dependencies and we made several suggestions.
94 |
95 | **R2.3 I would also recommend a thorough language edit - the article is very clearly written and easy to follow but there are some small errors. For example, in the following sentence, there is some mixing of tenses, and the use of brackets seems unsual: However, having this directory preserved insures against the situations that some R packages used in the project were no longer available or any of the information providers used by rang for resolving the dependency relationships were not available. (Or in the rare circumstance of rang is no longer available.)**
96 |
97 | A2.3 We revised the language of the paper. The sentence mentioned was due to the wrong usage of past subjunctive mood ("were"). We fixed it to use present indicative mood instead.
98 |
--------------------------------------------------------------------------------
/paper/readme.md:
--------------------------------------------------------------------------------
1 | ## Readme
2 |
3 | You need to clone the following repo here.
4 |
5 | ```sh
6 | git clone https://github.com/Joe-Hilgard/vvg-2d4d.git
7 | ```
8 |
9 | As well as get the data and code shared by Oser et al.
10 |
11 | ```r
12 | require(osfr)
13 | osf_retrieve_file("https://osf.io/y7cg5") %>% osf_download()
14 | unzip("meta-analysis replication files.zip", exdir = "oser")
15 | ```
16 |
17 | And install the quarto extension
18 |
19 | ```sh
20 | ##quarto install extension mikemahoney218/quarto-arxiv
21 | quarto add quarto-journals/plos
22 | make render
23 | ```
24 |
25 | # The executable compendium
26 |
27 | `oser/` is the executable compendium based on `rang`. Please refer to the README for more information. In order to reproduce the whole analysis, just:
28 |
29 | ```sh
30 | cd oser
31 | make
32 | ```
33 |
--------------------------------------------------------------------------------
/paper/sushi.sh:
--------------------------------------------------------------------------------
1 | Rscript -e "require(rang); dockerize(resolve('Sushi', '2014-06-05'),
2 | 'sushidocker', no_rocker = TRUE, cache = TRUE)"
3 | docker build -t sushiimg ./sushidocker
4 | docker run -d --rm --name "sushicontainer" -ti sushiimg
5 | docker cp PaperFigure.R sushicontainer:/PaperFigure.R
6 | docker exec sushicontainer mkdir vignettes
7 | docker exec sushicontainer R CMD BATCH PaperFigure.R
8 | docker cp sushicontainer:/vignettes/Figure_1.pdf sushi_figure1.pdf
9 | docker stop sushicontainer
10 |
--------------------------------------------------------------------------------
/paper/sushi_figure1.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/paper/sushi_figure1.pdf
--------------------------------------------------------------------------------
/rang.Rproj:
--------------------------------------------------------------------------------
1 | Version: 1.0
2 |
3 | RestoreWorkspace: No
4 | SaveWorkspace: No
5 | AlwaysSaveHistory: No
6 |
7 | EnableCodeIndexing: Yes
8 | UseSpacesForTab: Yes
9 | NumSpacesForTab: 4
10 | Encoding: UTF-8
11 |
12 | RnwWeave: Sweave
13 | LaTeX: pdfLaTeX
14 |
15 | AutoAppendNewline: Yes
16 |
17 | BuildType: Package
18 | PackageUseDevtools: Yes
19 | PackageInstallArgs: --no-multiarch --with-keep.source
20 |
21 | UseNativePipeOperator: Yes
22 |
23 | QuitChildProcessesOnExit: Yes
24 |
--------------------------------------------------------------------------------
/tests/.renvignore:
--------------------------------------------------------------------------------
1 | testdata
2 |
--------------------------------------------------------------------------------
/tests/testdata/Rcpp/DESCRIPTION:
--------------------------------------------------------------------------------
1 | Package: Rcpp
2 | Title: Seamless R and C++ Integration
3 | Version: 1.0.10.4
4 | Date: 2023-03-26
5 | Author: Dirk Eddelbuettel, Romain Francois, JJ Allaire, Kevin Ushey, Qiang Kou,
6 | Nathan Russell, Inaki Ucar, Douglas Bates and John Chambers
7 | Maintainer: Dirk Eddelbuettel
8 | Description: The 'Rcpp' package provides R functions as well as C++ classes which
9 | offer a seamless integration of R and C++. Many R data types and objects can be
10 | mapped back and forth to C++ equivalents which facilitates both writing of new
11 | code as well as easier integration of third-party libraries. Documentation
12 | about 'Rcpp' is provided by several vignettes included in this package, via the
13 | 'Rcpp Gallery' site at , the paper by Eddelbuettel and
14 | Francois (2011, ), the book by Eddelbuettel (2013,
15 | ) and the paper by Eddelbuettel and Balamuta (2018,
16 | ); see 'citation("Rcpp")' for details.
17 | Imports: methods, utils
18 | Suggests: tinytest, inline, rbenchmark, pkgKitten (>= 0.1.2)
19 | URL: https://www.rcpp.org, https://dirk.eddelbuettel.com/code/rcpp.html, https://github.com/RcppCore/Rcpp
20 | License: GPL (>= 2)
21 | BugReports: https://github.com/RcppCore/Rcpp/issues
22 | MailingList: rcpp-devel@lists.r-forge.r-project.org
23 | RoxygenNote: 6.1.1
24 |
--------------------------------------------------------------------------------
/tests/testdata/anciente1071.RDS:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/tests/testdata/anciente1071.RDS
--------------------------------------------------------------------------------
/tests/testdata/ancientsna.RDS:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/tests/testdata/ancientsna.RDS
--------------------------------------------------------------------------------
/tests/testdata/askpass/DESCRIPTION:
--------------------------------------------------------------------------------
1 | Package: askpass
2 | Type: Package
3 | Title: Safe Password Entry for R, Git, and SSH
4 | Version: 1.1
5 | Authors@R: person("Jeroen", "Ooms", role = c("aut", "cre"),
6 | email = "jeroen@berkeley.edu", comment = c(ORCID = "0000-0002-4035-0289"))
7 | Description: Cross-platform utilities for prompting the user for credentials or a
8 | passphrase, for example to authenticate with a server or read a protected key.
9 | Includes native programs for MacOS and Windows, hence no 'tcltk' is required.
10 | Password entry can be invoked in two different ways: directly from R via the
11 | askpass() function, or indirectly as password-entry back-end for 'ssh-agent'
12 | or 'git-credential' via the SSH_ASKPASS and GIT_ASKPASS environment variables.
13 | Thereby the user can be prompted for credentials or a passphrase if needed
14 | when R calls out to git or ssh.
15 | License: MIT + file LICENSE
16 | URL: https://github.com/jeroen/askpass#readme
17 | BugReports: https://github.com/jeroen/askpass/issues
18 | Encoding: UTF-8
19 | LazyData: true
20 | Imports: sys (>= 2.1)
21 | RoxygenNote: 6.1.1
22 | Suggests: testthat
23 | Language: en-US
24 | NeedsCompilation: yes
25 | Packaged: 2019-01-13 12:08:17 UTC; jeroen
26 | Author: Jeroen Ooms [aut, cre] ()
27 | Maintainer: Jeroen Ooms
28 | Repository: CRAN
29 | Date/Publication: 2019-01-13 12:50:03 UTC
30 |
--------------------------------------------------------------------------------
/tests/testdata/askpass/LICENSE:
--------------------------------------------------------------------------------
1 | YEAR: 2018
2 | COPYRIGHT HOLDER: Jeroen Ooms
3 |
--------------------------------------------------------------------------------
/tests/testdata/askpass/MD5:
--------------------------------------------------------------------------------
1 | eb6f4d0b9cc61dfa08b3804779591d5f *DESCRIPTION
2 | 98c71b5eae0ac8dabb055bd8883b8398 *LICENSE
3 | ad7ca63fdfe698661be582601697c491 *NAMESPACE
4 | 53a778139c44afbdf7a33ef6f5df8705 *NEWS
5 | 35412ba699bd625370487aa6a3e462c4 *R/askpass.R
6 | 97259f0bad2d259cc87ad38a01dbf02e *R/onload.R
7 | a1624267f9c82ed814f980de8c5fbc66 *R/ssh.R
8 | 1f9f6a06b0543cf62931f42ad291db6b *inst/WORDLIST
9 | a34602417af1b0c9ad06fc93df828c71 *inst/mac-askpass
10 | e0651808479eb9c747ffbd785441912b *inst/mac-simplepass
11 | 0eb1bcf4a9936ace5ae19d962a2a56a7 *man/askpass.Rd
12 | 20d01a60d6cef576ee14420f207d90d1 *man/ssh_askpass.Rd
13 | dce2a57c7c4f360319f3beaec3245444 *src/Makevars.win
14 | ff322a40812325235977b54323b67c91 *src/askpass.c
15 | b0e65f1a2fd9237f0cb01f9e2e6f64a4 *src/win32/win-askpass.c
16 | 203e4bb2a5fd4caccb9a07d14bf48a90 *tests/testthat.R
17 | 631ab61b3bc4600779d0eee8aaf6cb32 *tests/testthat/test-option.R
18 |
--------------------------------------------------------------------------------
/tests/testdata/askpass/NAMESPACE:
--------------------------------------------------------------------------------
1 | # Generated by roxygen2: do not edit by hand
2 |
3 | export(askpass)
4 | export(ssh_askpass)
5 |
--------------------------------------------------------------------------------
/tests/testdata/askpass/NEWS:
--------------------------------------------------------------------------------
1 | 1.1
2 | - Fix build on R 3.3 and older
3 | - Fix compiler warning on Solaris
4 |
5 | 1.0
6 | - Initial release
7 |
--------------------------------------------------------------------------------
/tests/testdata/askpass/R/askpass.R:
--------------------------------------------------------------------------------
1 | #' Password Prompt Utility
2 | #'
3 | #' Prompt the user for a password to authenticate or read a protected key.
4 | #' By default, this function automatically uses the most appropriate method
5 | #' based on the user platform and front-end. Users or IDEs can override this
6 | #' and set a custom password entry function via the `askpass` option.
7 | #'
8 | #' @export
9 | #' @param prompt the string printed when prompting the user for input.
10 | #' @examples \donttest{
11 | #' # Prompt user for passwd
12 | #' pw <- askpass("Please enter your password")
13 | #' }
14 | askpass <- function(prompt = "Please enter your password: "){
15 | FUN <- getOption("askpass", ask_password_default)
16 | FUN(prompt)
17 | }
18 |
19 | ask_password_default <- function(prompt){
20 | if(!interactive())
21 | return(NULL)
22 | if(is_windows()){
23 | askpass_windows(prompt)
24 | } else if(is_macos() && !isatty(stdin())){
25 | askpass_mac(prompt)
26 | } else {
27 | readline_silent(prompt)
28 | }
29 | }
30 |
31 | askpass_path <- function(simple = TRUE){
32 | if(is_windows()){
33 | arch <- .Machine$sizeof.pointer * 8;
34 | system.file(sprintf('win-askpass%d.exe', arch),
35 | package = 'askpass', mustWork = TRUE)
36 | } else if(is_macos()){
37 | prog <- ifelse(isTRUE(simple), 'mac-simplepass', 'mac-askpass')
38 | system.file(prog, package = 'askpass', mustWork = TRUE)
39 | }
40 | }
41 |
42 | askpass_windows <- function(prompt, user = names(prompt)){
43 | tryCatch({
44 | res <- sys::exec_internal(askpass_path(), c(prompt, user), timeout = 120)
45 | out_without_eol(res$stdout)
46 | }, error = function(e){
47 | message(e$message)
48 | })
49 | }
50 |
51 | askpass_mac <- function(prompt){
52 | tryCatch({
53 | res <- sys::exec_internal(askpass_path(), prompt, timeout = 120)
54 | out_without_eol(res$stdout)
55 | }, error = function(e){
56 | message(e$message)
57 | })
58 | }
59 |
60 | readline_silent <- function(prompt, icon = "\U0001f511 "){
61 | if(is_unix() && isatty(stdin())){
62 | if(system('stty -echo') == 0){
63 | on.exit(system('stty echo'))
64 | }
65 | }
66 | cat(prompt, "\n")
67 | out <- base::readline(icon)
68 | cat(" OK\n")
69 | out
70 | }
71 |
72 | is_windows <- function(){
73 | .Platform$OS.type == 'windows'
74 | }
75 |
76 | is_unix <- function(){
77 | .Platform$OS.type == "unix"
78 | }
79 |
80 | is_macos <- function(){
81 | identical(tolower(Sys.info()[['sysname']]), "darwin")
82 | }
83 |
84 | out_without_eol <- function(x){
85 | sub("\r?\n$", "", rawToChar(x))
86 | }
87 |
--------------------------------------------------------------------------------
/tests/testdata/askpass/R/onload.R:
--------------------------------------------------------------------------------
1 | .onLoad <- function(libname, pkgname){
2 | setup_askpass_vars()
3 | }
4 |
5 | setup_askpass_vars <- function(){
6 | if(var_exists('RSTUDIO')){
7 | fix_rstudio_path()
8 | } else {
9 | # This is mostly for RGui and R.app (tty could mean MacOS server)
10 | if(is_windows() || (is_macos() && !isatty(stdin()))){
11 | askpass_bin = ssh_askpass()
12 | if(!var_exists('GIT_ASKPASS')){
13 | Sys.setenv("GIT_ASKPASS" = askpass_bin)
14 | }
15 | if(!var_exists('SSH_ASKPASS')){
16 | Sys.setenv("SSH_ASKPASS" = askpass_bin)
17 | }
18 | }
19 | }
20 | }
21 |
22 | # Try to put 'rpostback-askpass' on the path in RStudio if needed
23 | # See: https://github.com/rstudio/rstudio/issues/3805
24 | fix_rstudio_path <- function(){
25 | rs_path <- Sys.getenv('RS_RPOSTBACK_PATH')
26 | git_askpass <- Sys.getenv('GIT_ASKPASS')
27 | if(nchar(rs_path) && !cmd_exists(git_askpass)){
28 | PATH <- Sys.getenv("PATH")
29 | if(!grepl(normalizePath(rs_path, mustWork = FALSE), PATH, fixed = TRUE)){
30 | rs_path <- unique(c(rs_path, sub("rpostback", 'postback', rs_path)))
31 | Sys.setenv(PATH = paste(c(PATH, normalizePath(rs_path, mustWork = FALSE)),
32 | collapse = .Platform$path.sep))
33 | }
34 | }
35 | }
36 |
37 | var_exists <- function(var){
38 | nchar(Sys.getenv(var)) > 0
39 | }
40 |
41 | cmd_exists <- function(cmd){
42 | nchar(Sys.which(cmd)) > 0
43 | }
44 |
--------------------------------------------------------------------------------
/tests/testdata/askpass/R/ssh.R:
--------------------------------------------------------------------------------
1 | #' ASKPASS CMD TOOL
2 | #'
3 | #' This returns the path to the native askpass executable which can be used
4 | #' by git-credential or ssh-agent. Most users don't have worry about this.
5 | #'
6 | #' On Windows and MacOS the package automatically sets the `SSH_ASKPASS` and
7 | #' `GIT_ASKPASS` variables on load (if not already set). If these are set
8 | #' you should be able to run e.g. `sys::exec_wait("ssh-add")` and you should
9 | #' be prompted for a passphrase if your key is protected.
10 | #'
11 | #' @export
12 | ssh_askpass <- function(){
13 | askpass_path(simple = FALSE)
14 | }
15 |
--------------------------------------------------------------------------------
/tests/testdata/askpass/inst/WORDLIST:
--------------------------------------------------------------------------------
1 | CMD
2 | IDEs
3 | MacOS
4 | tcltk
5 |
--------------------------------------------------------------------------------
/tests/testdata/askpass/inst/mac-askpass:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env osascript
2 | # This only works on MacOS!
3 | # Adapted from: https://github.com/theseal/ssh-askpass
4 |
5 | on run argv
6 | set args to argv as text
7 | set frontmost_application to name of (info for (path to frontmost application))
8 | tell application frontmost_application
9 | if args ends with ": " or args ends with ":" then
10 | if args contains "pass" or args contains "pin" then
11 | display dialog args with icon note default button "OK" default answer "" with hidden answer
12 | else
13 | display dialog args with icon note default button "OK" default answer ""
14 | end if
15 | return result's text returned
16 | else
17 | display dialog args with icon note default button "Cancel"
18 | return
19 | end if
20 | end tell
21 | end run
22 |
--------------------------------------------------------------------------------
/tests/testdata/askpass/inst/mac-simplepass:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env osascript
2 | # This only works on MacOS!
3 |
4 | on run argv
5 | set args to argv as text
6 | set frontmost_application to name of (info for (path to frontmost application))
7 | tell application frontmost_application
8 | display dialog args with icon note default button "OK" default answer "" with hidden answer
9 | return result's text returned
10 | end tell
11 | end run
12 |
--------------------------------------------------------------------------------
/tests/testdata/askpass/man/askpass.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/askpass.R
3 | \name{askpass}
4 | \alias{askpass}
5 | \title{Password Prompt Utility}
6 | \usage{
7 | askpass(prompt = "Please enter your password: ")
8 | }
9 | \arguments{
10 | \item{prompt}{the string printed when prompting the user for input.}
11 | }
12 | \description{
13 | Prompt the user for a password to authenticate or read a protected key.
14 | By default, this function automatically uses the most appropriate method
15 | based on the user platform and front-end. Users or IDEs can override this
16 | and set a custom password entry function via the \code{askpass} option.
17 | }
18 | \examples{
19 | \donttest{
20 | # Prompt user for passwd
21 | pw <- askpass("Please enter your password")
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/tests/testdata/askpass/man/ssh_askpass.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/ssh.R
3 | \name{ssh_askpass}
4 | \alias{ssh_askpass}
5 | \title{ASKPASS CMD TOOL}
6 | \usage{
7 | ssh_askpass()
8 | }
9 | \description{
10 | This returns the path to the native askpass executable which can be used
11 | by git-credential or ssh-agent. Most users don't have worry about this.
12 | }
13 | \details{
14 | On Windows and MacOS the package automatically sets the \code{SSH_ASKPASS} and
15 | \code{GIT_ASKPASS} variables on load (if not already set). If these are set
16 | you should be able to run e.g. \code{sys::exec_wait("ssh-add")} and you should
17 | be prompted for a passphrase if your key is protected.
18 | }
19 |
--------------------------------------------------------------------------------
/tests/testdata/askpass/src/Makevars.win:
--------------------------------------------------------------------------------
1 | ASKPASS=../inst/win-askpass$(WIN).exe
2 | ASKPASSOBJ=win32/win-askpass.o
3 |
4 | all: clean $(ASKPASS)
5 |
6 | clean:
7 | rm -f $(ASKPASS) $(ASKPASSOBJ)
8 |
9 | $(ASKPASS): $(ASKPASSOBJ)
10 | $(CC) $(CFLAGS) -o $(ASKPASS) $(ASKPASSOBJ) -lcredui
11 |
--------------------------------------------------------------------------------
/tests/testdata/askpass/src/askpass.c:
--------------------------------------------------------------------------------
1 | #include
2 | #include
3 | #include
4 | #include
5 |
6 | /* We don't use this anymore */
7 |
8 | SEXP pw_entry_dialog(SEXP prompt){
9 | #ifndef _WIN32
10 | const char *text = CHAR(STRING_ELT(prompt, 0));
11 | const char *pass = getpass(text);
12 | if(pass != NULL)
13 | return Rf_mkString(pass);
14 | #endif
15 | return R_NilValue;
16 | }
17 |
18 | static const R_CallMethodDef CallEntries[] = {
19 | {"pw_entry_dialog", (DL_FUNC) &pw_entry_dialog, 1},
20 | {NULL, NULL, 0}
21 | };
22 |
23 | void R_init_askpass(DllInfo *dll){
24 | R_registerRoutines(dll, NULL, CallEntries, NULL, NULL);
25 | R_useDynamicSymbols(dll, FALSE);
26 | R_forceSymbols(dll, TRUE);
27 | }
28 |
--------------------------------------------------------------------------------
/tests/testdata/askpass/src/win32/win-askpass.c:
--------------------------------------------------------------------------------
1 | #include
2 | #include
3 | #include
4 |
5 | static const char *formatError(DWORD res){
6 | static char buf[1000], *p;
7 | FormatMessage(FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS,
8 | NULL, res,
9 | MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT),
10 | buf, 1000, NULL);
11 | p = buf+strlen(buf) -1;
12 | if(*p == '\n') *p = '\0';
13 | p = buf+strlen(buf) -1;
14 | if(*p == '\r') *p = '\0';
15 | p = buf+strlen(buf) -1;
16 | if(*p == '.') *p = '\0';
17 | return buf;
18 | }
19 |
20 | int main( int argc, const char* argv[] ){
21 | const char *prompt = argc > 1 ? argv[1] : "Please enter password";
22 | const char *user = argc > 2 ? argv[2] : "NA";
23 | CREDUI_INFO cui;
24 | TCHAR pszPwd[CREDUI_MAX_PASSWORD_LENGTH+1];
25 | BOOL fSave;
26 | DWORD dwErr;
27 |
28 | cui.cbSize = sizeof(CREDUI_INFO);
29 | cui.hwndParent = GetActiveWindow();
30 | cui.pszMessageText = TEXT(prompt);
31 | cui.pszCaptionText = TEXT("Password Entry");
32 | cui.hbmBanner = NULL;
33 | fSave = FALSE;
34 | SecureZeroMemory(pszPwd, sizeof(pszPwd));
35 | dwErr = CredUIPromptForCredentials(
36 | &cui, // CREDUI_INFO structure
37 | TEXT("TheServer"), // Target for credentials
38 | NULL, // Reserved
39 | 0, // Reason
40 | (char*) user, // User name
41 | 0, // Max number of char for user name
42 | pszPwd, // Password
43 | CREDUI_MAX_PASSWORD_LENGTH+1, // Max number of char for password
44 | &fSave, // State of save check box
45 | CREDUI_FLAGS_GENERIC_CREDENTIALS | // flags
46 | CREDUI_FLAGS_KEEP_USERNAME |
47 | CREDUI_FLAGS_PASSWORD_ONLY_OK |
48 | CREDUI_FLAGS_ALWAYS_SHOW_UI |
49 | CREDUI_FLAGS_DO_NOT_PERSIST);
50 |
51 | if(!dwErr) {
52 | fprintf( stdout, "%s\n", pszPwd);
53 |
54 | return 0;
55 | } else {
56 | fprintf( stderr, "%s\n", formatError(GetLastError()));
57 | return 1;
58 | }
59 | }
60 |
--------------------------------------------------------------------------------
/tests/testdata/askpass/tests/testthat.R:
--------------------------------------------------------------------------------
1 | library(testthat)
2 | library(askpass)
3 |
4 | test_check("askpass")
5 |
--------------------------------------------------------------------------------
/tests/testdata/askpass/tests/testthat/test-option.R:
--------------------------------------------------------------------------------
1 | context("test-option")
2 |
3 | test_that("program exists", {
4 | if(is_windows() || is_macos()){
5 | expect_true(file.exists(ssh_askpass()))
6 | }
7 | })
8 |
9 | test_that("option askpass is respected", {
10 | options(askpass = function(...){
11 | 'supersecret'
12 | })
13 | expect_equal(askpass(), 'supersecret')
14 | })
15 |
--------------------------------------------------------------------------------
/tests/testdata/askpass_1.1.tar.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/tests/testdata/askpass_1.1.tar.gz
--------------------------------------------------------------------------------
/tests/testdata/bioc_renv.RDS:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/tests/testdata/bioc_renv.RDS
--------------------------------------------------------------------------------
/tests/testdata/chipseq/DESCRIPTION:
--------------------------------------------------------------------------------
1 | Package: chipseq
2 | Title: chipseq: A package for analyzing chipseq data
3 | Version: 1.49.0
4 | Author: Deepayan Sarkar, Robert Gentleman, Michael Lawrence, Zizhen Yao
5 | Description: Tools for helping process short read data for chipseq
6 | experiments
7 | Depends: R (>= 2.10), methods, BiocGenerics (>= 0.1.0), S4Vectors (>= 0.17.25),
8 | IRanges (>= 2.13.12), GenomicRanges (>= 1.31.8), ShortRead
9 | Imports: methods, stats, lattice, BiocGenerics, IRanges, GenomicRanges,
10 | ShortRead
11 | Suggests: BSgenome, GenomicFeatures, TxDb.Mmusculus.UCSC.mm9.knownGene
12 | Maintainer: Bioconductor Package Maintainer
13 |
14 | License: Artistic-2.0
15 | LazyLoad: yes
16 | biocViews: ChIPSeq, Sequencing, Coverage, QualityControl, DataImport
17 |
--------------------------------------------------------------------------------
/tests/testdata/dt.RDS:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/tests/testdata/dt.RDS
--------------------------------------------------------------------------------
/tests/testdata/fakeRhtslib.tar.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/tests/testdata/fakeRhtslib.tar.gz
--------------------------------------------------------------------------------
/tests/testdata/fakeRhtslib/DESCRIPTION:
--------------------------------------------------------------------------------
1 | Package: Rhtslib
2 | Title: HTSlib high-throughput sequencing library as an R package
3 | Description: This package provides version 1.15.1 of the 'HTSlib' C
4 | library for high-throughput sequence analysis. The package is
5 | primarily useful to developers of other R packages who wish to
6 | make use of HTSlib. Motivation and instructions for use of this
7 | package are in the vignette, vignette(package="Rhtslib", "Rhtslib").
8 | biocViews: DataImport, Sequencing
9 | URL: https://bioconductor.org/packages/Rhtslib, http://www.htslib.org/
10 | BugReports: https://github.com/Bioconductor/Rhtslib/issues
11 | Version: 2.0.0
12 | License: LGPL (>= 2)
13 | Copyright: Unless otherwise noted in the file, all files outside
14 | src/htslib-1.15.1 or inst/include copyright Bioconductor; for
15 | files inside src/htslib-1.15.1 or inst/include, see file
16 | src/htslib-1.15.1/LICENSE.
17 | Encoding: UTF-8
18 | Authors@R:
19 | c(person("Nathaniel", "Hayden", email="nhayden@fredhutch.org",
20 | role=c("led", "aut")),
21 | person("Martin", "Morgan", email="martin.morgan@roswellpark.org",
22 | role="aut"),
23 | person("Hervé", "Pagès",
24 | email="hpages.on.github@gmail.com", role=c("aut", "cre")))
25 | Imports: zlibbioc
26 | LinkingTo: zlibbioc
27 | Suggests: knitr, rmarkdown, BiocStyle
28 | SystemRequirements: libbz2 & liblzma & libcurl (with header files), GNU
29 | make
30 | StagedInstall: no
31 | VignetteBuilder: knitr
32 | git_url: https://git.bioconductor.org/packages/Rhtslib
33 | git_branch: RELEASE_3_16
34 | git_last_commit: 1757333
35 | git_last_commit_date: 2022-11-01
36 | Date/Publication: 2022-11-01
37 | NeedsCompilation: yes
38 | Packaged: 2022-11-01 22:45:17 UTC; biocbuild
39 | Author: Nathaniel Hayden [led, aut],
40 | Martin Morgan [aut],
41 | Hervé Pagès [aut, cre]
42 | Maintainer: Hervé Pagès
43 |
--------------------------------------------------------------------------------
/tests/testdata/fake_renv.lock:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/tests/testdata/fake_renv.lock
--------------------------------------------------------------------------------
/tests/testdata/fakexml2/DESCRIPTION:
--------------------------------------------------------------------------------
1 | Package: xml2
2 | Title: Parse XML
3 | Version: 1.3.3.9000
4 | Authors@R: c(
5 | person("Hadley", "Wickham", , "hadley@rstudio.com", role = c("aut", "cre")),
6 | person("Jim", "Hester", role = "aut"),
7 | person("Jeroen", "Ooms", role = "aut"),
8 | person("RStudio", role = c("cph", "fnd")),
9 | person("R Foundation", role = "ctb",
10 | comment = "Copy of R-project homepage cached as example")
11 | )
12 | Description: Work with XML files using a simple, consistent interface.
13 | Built on top of the 'libxml2' C library.
14 | License: MIT + file LICENSE
15 | URL: https://xml2.r-lib.org/, https://github.com/r-lib/xml2
16 | BugReports: https://github.com/r-lib/xml2/issues
17 | Depends:
18 | R (>= 3.1.0)
19 | Imports:
20 | methods
21 | Suggests:
22 | covr,
23 | curl,
24 | httr,
25 | knitr,
26 | magrittr,
27 | mockery,
28 | rmarkdown,
29 | testthat (>= 2.1.0)
30 | VignetteBuilder:
31 | knitr
32 | Config/Needs/website: tidyverse/tidytemplate
33 | Encoding: UTF-8
34 | Roxygen: list(markdown = TRUE)
35 | RoxygenNote: 7.1.2
36 | SystemRequirements: libxml2: libxml2-dev (deb), libxml2-devel (rpm)
37 | Collate:
38 | 'S4.R'
39 | 'as_list.R'
40 | 'xml_parse.R'
41 | 'as_xml_document.R'
42 | 'classes.R'
43 | 'init.R'
44 | 'paths.R'
45 | 'utils.R'
46 | 'xml_attr.R'
47 | 'xml_children.R'
48 | 'xml_find.R'
49 | 'xml_modify.R'
50 | 'xml_name.R'
51 | 'xml_namespaces.R'
52 | 'xml_path.R'
53 | 'xml_schema.R'
54 | 'xml_serialize.R'
55 | 'xml_structure.R'
56 | 'xml_text.R'
57 | 'xml_type.R'
58 | 'xml_url.R'
59 | 'xml_write.R'
60 | 'zzz.R'
61 |
--------------------------------------------------------------------------------
/tests/testdata/fakezlibbioc/DESCRIPTION:
--------------------------------------------------------------------------------
1 | Package: zlibbioc
2 | Type: Package
3 | Title: An R packaged zlib-1.2.5
4 | Version: 1.41.0
5 | Author: Martin Morgan
6 | Maintainer: Bioconductor Package Maintainer
7 | Description: This package uses the source code of zlib-1.2.5 to create
8 | libraries for systems that do not have these available via other
9 | means (most Linux and Mac users should have system-level access to
10 | zlib, and no direct need for this package). See the vignette for
11 | instructions on use.
12 | biocViews: Infrastructure
13 | URL: https://bioconductor.org/packages/zlibbioc
14 | BugReports: https://github.com/Bioconductor/zlibbioc/issues
15 | License: Artistic-2.0 + file LICENSE
16 | LazyLoad: yes
17 |
--------------------------------------------------------------------------------
/tests/testdata/graph.RDS:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/tests/testdata/graph.RDS
--------------------------------------------------------------------------------
/tests/testdata/issue21.RDS:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/tests/testdata/issue21.RDS
--------------------------------------------------------------------------------
/tests/testdata/issue21_ubuntu2004.RDS:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/tests/testdata/issue21_ubuntu2004.RDS
--------------------------------------------------------------------------------
/tests/testdata/issue38.RDS:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/tests/testdata/issue38.RDS
--------------------------------------------------------------------------------
/tests/testdata/mzesalike/DESCRIPTION:
--------------------------------------------------------------------------------
1 | Package: mzesalike
2 | Title: Xaringan Template With MZES Theme
3 | Version: 0.0.3
4 | Authors@R:
5 | person(given = "Chung-hong",
6 | family = "Chan",
7 | role = c("aut", "cre"),
8 | email = "chainsawtiney@gmail.com",
9 | comment = c(ORCID = "0000-0002-6232-7530"))
10 | Description: Create professional looking HTML5 slides with MZES theme.
11 | License: GPL-3
12 | Encoding: UTF-8
13 | LazyData: true
14 | Imports:
15 | xaringan,
16 | xaringanExtra (>= 0.0.14),
17 | leaflet,
18 | fontawesome
19 | Remotes:
20 | yihui/xaringan,
21 | chainsawriot/xaringanExtra,
22 | rstudio/fontawesome
23 | RoxygenNote: 7.1.0
24 |
--------------------------------------------------------------------------------
/tests/testdata/rang_6.RDS:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/tests/testdata/rang_6.RDS
--------------------------------------------------------------------------------
/tests/testdata/rang_bioc.RDS:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/tests/testdata/rang_bioc.RDS
--------------------------------------------------------------------------------
/tests/testdata/rang_local_gh.RDS:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/tests/testdata/rang_local_gh.RDS
--------------------------------------------------------------------------------
/tests/testdata/rang_mixture.RDS:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/tests/testdata/rang_mixture.RDS
--------------------------------------------------------------------------------
/tests/testdata/rang_ok.RDS:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/tests/testdata/rang_ok.RDS
--------------------------------------------------------------------------------
/tests/testdata/rang_rio_old.RDS:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/tests/testdata/rang_rio_old.RDS
--------------------------------------------------------------------------------
/tests/testdata/rang_unresolved.RDS:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/tests/testdata/rang_unresolved.RDS
--------------------------------------------------------------------------------
/tests/testdata/rrcompendium-complete/DESCRIPTION:
--------------------------------------------------------------------------------
1 | Package: rrcompendium
2 | Title: Partial Reproduction of Boettiger Ecology Letters 2018;21:1255–1267 with rrtools
3 | Version: 0.0.0.9000
4 | Authors@R:
5 | person(given = "Anna",
6 | family = "Krystalli",
7 | role = c("aut", "cre"),
8 | email = "annakrystalli@googlemail.com")
9 | Description: This repository contains the research compendium of the partial
10 | reproduction of Boettiger Ecology Letters 2018;21:1255–1267. The compendium
11 | contains all data, code, and text associated with this sub-section of the
12 | analysis.
13 | License: MIT + file LICENSE
14 | ByteCompile: true
15 | Encoding: UTF-8
16 | LazyData: true
17 | URL: https://github.com/annakrystalli/rrcompendium
18 | BugReports: https://github.com/annakrystalli/rrcompendium/issues
19 | Imports:
20 | bookdown,
21 | dplyr,
22 | readr,
23 | ggplot2 (>= 3.0.0),
24 | ggthemes (>= 3.5.0),
25 | here (>= 0.1),
26 | knitr (>= 1.20),
27 | rticles (>= 0.6)
28 | RoxygenNote: 6.1.0
29 | Suggests:
30 | testthat
31 |
--------------------------------------------------------------------------------
/tests/testdata/rrcompendium-complete/README.md:
--------------------------------------------------------------------------------
1 | This is obtained from
2 |
3 | https://github.com/annakrystalli/rrcompendium-complete
4 |
5 | A complete research compendium prepared with rrtools.
6 |
7 |
--------------------------------------------------------------------------------
/tests/testdata/sessionInfo1.RDS:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/tests/testdata/sessionInfo1.RDS
--------------------------------------------------------------------------------
/tests/testdata/sessionInfo2.RDS:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/tests/testdata/sessionInfo2.RDS
--------------------------------------------------------------------------------
/tests/testdata/sessionInfo3.RDS:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/tests/testdata/sessionInfo3.RDS
--------------------------------------------------------------------------------
/tests/testdata/sle_graph.RDS:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/tests/testdata/sle_graph.RDS
--------------------------------------------------------------------------------
/tests/testdata/small_renv_lock/renv.lock:
--------------------------------------------------------------------------------
1 | {
2 | "R": {
3 | "Version": "4.2.2",
4 | "Repositories": [
5 | {
6 | "Name": "CRAN",
7 | "URL": "https://cloud.r-project.org"
8 | }
9 | ]
10 | },
11 | "Packages": {
12 | "BiocGenerics": {
13 | "Package": "BiocGenerics",
14 | "Version": "0.44.0",
15 | "Source": "Bioconductor",
16 | "git_url": "https://git.bioconductor.org/packages/BiocGenerics",
17 | "git_branch": "RELEASE_3_16",
18 | "git_last_commit": "d7cd9c1",
19 | "git_last_commit_date": "2022-11-01",
20 | "Hash": "0de19224c2cd94f48fbc0d0bc663ce3b",
21 | "Requirements": []
22 | },
23 | "levelnet": {
24 | "Package": "levelnet",
25 | "Version": "0.5.0",
26 | "Source": "GitHub",
27 | "RemoteType": "github",
28 | "RemoteHost": "api.github.com",
29 | "RemoteRepo": "levelnet",
30 | "RemoteUsername": "schochastics",
31 | "RemoteRef": "HEAD",
32 | "RemoteSha": "775cf5e91b83cb73fe35e378ed1d7facb1d741eb",
33 | "Hash": "29eed562ec1c9bb7e31ce87e321b9252",
34 | "Requirements": [
35 | "Matrix",
36 | "Rcpp",
37 | "igraph"
38 | ]
39 | },
40 | "rtoot": {
41 | "Package": "rtoot",
42 | "Version": "0.3.0",
43 | "Source": "Repository",
44 | "Repository": "CRAN",
45 | "Hash": "06eb72de42a3f8fcb252badc58f92b2b",
46 | "Requirements": [
47 | "clipr",
48 | "curl",
49 | "dplyr",
50 | "httr",
51 | "jsonlite",
52 | "tibble"
53 | ]
54 | }
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/tests/testdata/sna_0.3.tar.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/tests/testdata/sna_0.3.tar.gz
--------------------------------------------------------------------------------
/tests/testdata/superancientsna.RDS:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/tests/testdata/superancientsna.RDS
--------------------------------------------------------------------------------
/tests/testdata/sysreqs_gmp.RDS:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gesistsa/rang/25fbdc70f0fc24f0b35611cf836650c582bf7d02/tests/testdata/sysreqs_gmp.RDS
--------------------------------------------------------------------------------
/tests/testdata/test_dir/script.R:
--------------------------------------------------------------------------------
1 | library(BiocGenerics)
2 | library(rtoot)
3 |
--------------------------------------------------------------------------------
/tests/testdata/wrapped_line.txt:
--------------------------------------------------------------------------------
1 | RUN apt-get update -qq \
2 | && apt-get install -y libpcre3-dev zlib1g-dev pkg-config libcurl4-openssl-dev \
3 | && apt-get install -y libcurl4-openssl-dev libicu-dev libssl-dev make zlib1g-dev
4 | RUN apt-get install -y curl git \
5 | && curl -LO https://quarto.org/download/latest/quarto-linux-amd64.deb \
6 | && dpkg -i quarto-linux-amd64.deb \
7 | && quarto install tool tinytex
8 |
--------------------------------------------------------------------------------
/tests/testthat.R:
--------------------------------------------------------------------------------
1 | # This file is part of the standard setup for testthat.
2 | # It is recommended that you do not modify it.
3 | #
4 | # Where should you do additional test configuration?
5 | # Learn more about the roles of various files in:
6 | # * https://r-pkgs.org/tests.html
7 | # * https://testthat.r-lib.org/reference/test_package.html#special-files
8 |
9 | library(testthat)
10 | library(rang)
11 |
12 | test_check("rang")
13 |
--------------------------------------------------------------------------------
/tests/testthat/test_create_turing.R:
--------------------------------------------------------------------------------
1 | .generate_temp_dir <- function() {
2 | file.path(tempdir(), paste(sample(c(LETTERS, letters), 20, replace = TRUE), collapse = ""))
3 | }
4 |
5 | test_that("create_turing defensive", {
6 | existing_dir <- .generate_temp_dir()
7 | dir.create(existing_dir)
8 | expect_error(create_turing(existing_dir))
9 | })
10 |
11 | test_that("create_turing all cases", {
12 | temp_dir <- .generate_temp_dir()
13 | create_turing(temp_dir, verbose = FALSE) ## add_rang = TRUE
14 | expect_true(dir.exists(temp_dir))
15 | expect_true(file.exists(file.path(temp_dir, "CITATION")))
16 | expect_true(dir.exists(file.path(temp_dir, "data_raw")))
17 | expect_true(dir.exists(file.path(temp_dir, "data_clean")))
18 | expect_true(dir.exists(file.path(temp_dir, "figures")))
19 | expect_true(file.exists(file.path(temp_dir, "Makefile")))
20 | expect_true(dir.exists(file.path(temp_dir, "inst/rang")))
21 | temp_dir <- .generate_temp_dir()
22 | create_turing(temp_dir, verbose = FALSE, add_makefile = FALSE) ## add_rang = TRUE
23 | expect_false(file.exists(file.path(temp_dir, "Makefile")))
24 | expect_true(dir.exists(temp_dir))
25 | expect_true(dir.exists(file.path(temp_dir, "data_raw")))
26 | expect_true(dir.exists(file.path(temp_dir, "data_clean")))
27 | expect_true(dir.exists(file.path(temp_dir, "figures")))
28 | expect_true(dir.exists(file.path(temp_dir, "inst/rang")))
29 | temp_dir <- .generate_temp_dir()
30 | create_turing(temp_dir, add_rang = FALSE)
31 | expect_true(dir.exists(temp_dir))
32 | expect_true(dir.exists(file.path(temp_dir, "data_raw")))
33 | expect_true(dir.exists(file.path(temp_dir, "data_clean")))
34 | expect_true(dir.exists(file.path(temp_dir, "figures")))
35 | expect_false(dir.exists(file.path(temp_dir, "inst/rang")))
36 | })
37 |
--------------------------------------------------------------------------------
/tests/testthat/test_edgelist.R:
--------------------------------------------------------------------------------
1 | test_that("convert ranglet to edgelist", {
2 | graph <- readRDS("../testdata/graph.RDS")
3 | el <- convert_edgelist(graph$ranglets[[1]])
4 | expect_s3_class(el,"data.frame")
5 | expect_equal(nrow(el),4L)
6 | })
7 |
8 | test_that("convert rang to edgelist", {
9 | graph <- readRDS("../testdata/graph.RDS")
10 | el <- convert_edgelist(graph)
11 | expect_s3_class(el,"data.frame")
12 | expect_equal(nrow(el),121L)
13 | })
14 |
15 | test_that("convert edgelist empty rang", {
16 | graph <- readRDS("../testdata/rang_ok.RDS")
17 | graph$ranglets <- list()
18 | el <- convert_edgelist(graph)
19 | expect_s3_class(el,"data.frame")
20 | expect_equal(nrow(el),0L)
21 |
22 | })
23 |
24 |
25 | test_that("convert edgelist error", {
26 | expect_error(convert_edgelist("abc"))
27 | expect_error(convert_edgelist(42))
28 | expect_error(convert_edgelist(cbind(1:5,6:10)))
29 | })
30 |
31 |
--------------------------------------------------------------------------------
/tests/testthat/test_expost_rang.R:
--------------------------------------------------------------------------------
1 | test_that("rang_as_comment or #13", {
2 | rang_ok <- readRDS("../testdata/rang_ok.RDS")
3 | temp_r <- tempfile(fileext = ".R")
4 | export_rang(rang_ok, path = temp_r) ## default rang_as_comment = TRUE
5 | x <- readLines(temp_r)
6 | expect_true(any(grepl("^## ## To reconstruct this file", x)))
7 | export_rang(rang_ok, path = temp_r, rang_as_comment = FALSE) ## default rang_as_comment = TRUE
8 | x <- readLines(temp_r)
9 | expect_false(any(grepl("^## ## To reconstruct this file", x)))
10 | })
11 |
12 | test_that("verbose #16", {
13 | rang_ok <- readRDS("../testdata/rang_ok.RDS")
14 | temp_r <- tempfile(fileext = ".R")
15 | export_rang(rang_ok, path = temp_r, rang_as_comment = FALSE) ## verbose = TRUE
16 | x <- readLines(temp_r)
17 | expect_true(any(grepl("^verbose <- TRUE", x)))
18 | export_rang(rang_ok, path = temp_r, rang_as_comment = FALSE, verbose = FALSE)
19 | x <- readLines(temp_r)
20 | expect_true(any(grepl("^verbose <- FALSE", x)))
21 | })
22 |
23 | test_that("lib #16", {
24 | rang_ok <- readRDS("../testdata/rang_ok.RDS")
25 | temp_r <- tempfile(fileext = ".R")
26 | export_rang(rang_ok, path = temp_r, rang_as_comment = FALSE) ## lib = NA
27 | x <- readLines(temp_r)
28 | expect_true(any(grepl("^lib <- NA", x)))
29 | export_rang(rang_ok, path = temp_r, rang_as_comment = FALSE, verbose = FALSE, lib = "abc")
30 | x <- readLines(temp_r)
31 | expect_true(any(grepl("^lib <- \"abc\"", x)))
32 | })
33 |
34 | test_that(".normalize_url", {
35 | x <- .normalize_url("http://cran.r-project.org/")
36 | expect_equal(x, "https://cran.r-project.org/")
37 | x <- .normalize_url("cran.r-project.org/")
38 | expect_equal(x, "https://cran.r-project.org/")
39 | x <- .normalize_url("cran.r-project.org")
40 | expect_equal(x, "https://cran.r-project.org/")
41 | x <- .normalize_url("cran.r-project.org//")
42 | expect_equal(x, "https://cran.r-project.org/")
43 | })
44 |
45 | test_that(".normalize_url https issue #20", {
46 | x <- .normalize_url("http://cran.r-project.org/") # https = TRUE
47 | expect_equal(x, "https://cran.r-project.org/")
48 | x <- .normalize_url("http://cran.r-project.org/", https = FALSE)
49 | expect_equal(x, "http://cran.r-project.org/")
50 | x <- .normalize_url("cran.r-project.org", https = FALSE)
51 | expect_equal(x, "http://cran.r-project.org/")
52 | x <- .normalize_url("cran.r-project.org//", https = FALSE)
53 | expect_equal(x, "http://cran.r-project.org/")
54 | })
55 |
56 | test_that(".query_mirror_validity", {
57 | expect_true(.query_mirror_validity("https://cran.r-project.org/"))
58 | expect_true(.query_mirror_validity("https://cloud.r-project.org/"))
59 | expect_false(.query_mirror_validity("https://www.chainsawriot.com/"))
60 | })
61 |
62 | test_that("integration of mirror selection to `export_rang` #18", {
63 | rang_ok <- readRDS("../testdata/rang_ok.RDS")
64 | temp_r <- tempfile(fileext = ".R")
65 | export_rang(rang_ok, path = temp_r) ## cran_mirror = "https://cran.r-project.org/"
66 | x <- readLines(temp_r)
67 | expect_true(any(grepl("^cran.mirror <- \"https://cran\\.r\\-project\\.org/\"", x)))
68 | export_rang(rang_ok, path = temp_r, cran_mirror = "cran.r-project.org")
69 | x <- readLines(temp_r)
70 | expect_true(any(grepl("^cran.mirror <- \"https://cran\\.r\\-project\\.org/\"", x)))
71 | export_rang(rang_ok, path = temp_r, cran_mirror = "https://cloud.r-project.org/")
72 | x <- readLines(temp_r)
73 | expect_true(any(grepl("^cran.mirror <- \"https://cloud\\.r\\-project\\.org/\"", x)))
74 | expect_error(export_rang(rang_ok, path = temp_r, cran_mirror = "https://www.chainsawriot.com/"))
75 | expect_error(export_rang(rang_ok, path = temp_r, cran_mirror = "https://www.chainsawriot.com/", check_cran_mirror = FALSE), NA)
76 | x <- readLines(temp_r)
77 | expect_true(any(grepl("^cran.mirror <- \"https://www\\.chainsawriot\\.com/\"", x)))
78 | })
79 |
80 | test_that("integration of https to `export_rang` #20", {
81 | rang_ok <- readRDS("../testdata/rang_ok.RDS")
82 | expect_equal(rang_ok$r_version, "4.2.2")
83 | temp_r <- tempfile(fileext = ".R")
84 | export_rang(rang_ok, path = temp_r) ## cran_mirror = "https://cran.r-project.org/"
85 | x <- readLines(temp_r)
86 | expect_true(any(grepl("^cran.mirror <- \"https://cran\\.r\\-project\\.org/\"", x)))
87 | rang_ok <- readRDS("../testdata/rang_ok.RDS")
88 | rang_ok$r_version <- "3.3.0"
89 | temp_r <- tempfile(fileext = ".R")
90 | export_rang(rang_ok, path = temp_r) ## cran_mirror = "https://cran.r-project.org/"
91 | x <- readLines(temp_r)
92 | expect_true(any(grepl("^cran.mirror <- \"https://cran\\.r\\-project\\.org/\"", x)))
93 | rang_ok <- readRDS("../testdata/rang_ok.RDS")
94 | rang_ok$r_version <- "3.2.0"
95 | temp_r <- tempfile(fileext = ".R")
96 | export_rang(rang_ok, path = temp_r) ## cran_mirror = "https://cran.r-project.org/"
97 | x <- readLines(temp_r)
98 | expect_false(any(grepl("^cran.mirror <- \"https://cran\\.r\\-project\\.org/\"", x)))
99 | expect_true(any(grepl("^cran.mirror <- \"http://cran\\.r\\-project\\.org/\"", x)))
100 | })
101 |
102 | test_that("Docker R < 1.3.1", {
103 | rang_rio <- readRDS("../testdata/rang_rio_old.RDS")
104 | rang_rio$r_version <- "1.3.1" ## exactly 1.3.1, no error
105 | temp_r <- tempfile(fileext = ".R")
106 | expect_error(export_rang(rang_rio, path = temp_r), NA)
107 | rang_rio <- readRDS("../testdata/rang_rio_old.RDS")
108 | rang_rio$r_version <- "1.0.0"
109 | expect_error(export_rang(rang_rio, path = temp_r))
110 | })
111 |
112 | test_that("issue #38", {
113 | issue38 <- readRDS("../testdata/issue38.RDS")
114 | expect_error(export_rang(issue38, tempfile(fileext = ".R")), NA)
115 | })
116 |
117 | test_that("Bioconductor <2.0",{
118 | expect_error(.bioc_package_history(bioc_version = "1.9"))
119 | })
120 |
121 | test_that("Bioconductor new release",{
122 | expect_equal(.query_biocver("2023-01-01")$version,"3.16")
123 | })
124 |
125 | test_that("empty rang export, #75", {
126 | graph <- readRDS("../testdata/rang_ok.RDS")
127 | graph$ranglets <- list()
128 | expect_warning(x <- export_rang(graph, path = tempfile()))
129 | expect_equal(x, NULL)
130 | })
131 |
132 | test_that("prevent infinite loop, #81", {
133 | graph <- readRDS("../testdata/rang_ok.RDS")
134 | graph$ranglets[[1]]$deps[[2]] <- NULL
135 | expect_error(generate_installation_order(graph), "cran::LDAvis")
136 | graph <- readRDS("../testdata/rang_ok.RDS")
137 | graph$ranglets[[1]]$original$y <- "S4Vectors"
138 | graph$ranglets[[1]]$original$y_pkgref <- "bioc::S4Vectors"
139 | expect_error(generate_installation_order(graph), "cran::LDAvis")
140 | })
141 |
142 | test_that("renv export cran", {
143 | temp_dir <- tempdir()
144 | graph <- readRDS("../testdata/rang_ok.RDS")
145 | export_renv(graph, path = temp_dir)
146 | x <- readLines(file.path(temp_dir,"renv.lock"))
147 | expect_true(any(grepl("LDAvis",x)))
148 | expect_true(any(grepl("proxy",x)))
149 | expect_true(any(grepl("RJSONIO",x)))
150 | })
151 |
152 | test_that("renv export bioc", {
153 | temp_dir <- tempdir()
154 | graph <- readRDS("../testdata/rang_bioc.RDS")
155 | export_renv(graph, path = temp_dir)
156 | x <- readLines(file.path(temp_dir,"renv.lock"))
157 | expect_true(any(grepl("Bioconductor",x)))
158 | })
159 |
160 | test_that("renv export local and GH", {
161 | temp_dir <- tempdir()
162 | graph <- readRDS("../testdata/rang_local_gh.RDS")
163 | export_renv(graph, path = temp_dir)
164 | x <- readLines(file.path(temp_dir,"renv.lock"))
165 | expect_true(any(grepl("local",x)))
166 | expect_true(any(grepl("GitHub",x)))
167 | })
168 |
169 | test_that("empty renv export", {
170 | temp_dir <- tempdir()
171 | graph <- readRDS("../testdata/rang_ok.RDS")
172 | graph$ranglets <- list()
173 | expect_warning(x <- export_rang(graph, path = temp_dir))
174 | expect_equal(x, NULL)
175 | })
176 |
177 | test_that("renv export unknown source", {
178 | temp_dir <- tempdir()
179 | graph <- readRDS("../testdata/rang_ok.RDS")
180 | graph$ranglets[[1]]$original$x_pkgref <- "errr::or"
181 | expect_error(export_rang(graph, temp_dir))
182 | })
183 |
184 | test_that("Super ancient special packages", {
185 | graph <- readRDS("../testdata/superancientsna.RDS")
186 | expect_error(generate_installation_order(graph), NA)
187 | })
188 |
189 | test_that("base as a dependency, issue 144", {
190 | graph <- readRDS("../testdata/dt.RDS")
191 | expect_error(generate_installation_order(graph), NA)
192 | })
193 |
--------------------------------------------------------------------------------
/tests/testthat/test_s3.R:
--------------------------------------------------------------------------------
1 | test_that("printing with and without resolved packages", {
2 | rang_mixture <- readRDS("../testdata/rang_mixture.RDS")
3 | rang_ok <- readRDS("../testdata/rang_ok.RDS")
4 | rang_unresolved <- readRDS("../testdata/rang_unresolved.RDS")
5 | rang_output <- capture_output(print(rang_ok$ranglets[[1]]))
6 | expect_equal(rang_output, "The latest version of `LDAvis` [cran] at 2023-01-01 was 0.3.2, which has 2 unique dependencies (2 with no dependencies.)")
7 | rang_ok_output <- capture_output(print(rang_ok))
8 | expect_true(grepl("^resolved: 1 package\\(s\\). Unresolved package\\(s\\): 0", rang_ok_output))
9 | rang_mixture_output <- capture_output(print(rang_mixture))
10 | expect_true(grepl("^resolved: 1 package\\(s\\). Unresolved package\\(s\\): 1", rang_mixture_output))
11 | rang_unresolved_output <- capture_output(print(rang_unresolved))
12 | expect_true(grepl("^resolved: 0 package\\(s\\). Unresolved package\\(s\\): 1", rang_unresolved_output))
13 | expect_false(grepl("The latest version", rang_unresolved_output))
14 | })
15 |
16 | test_that("all_pkgs", {
17 | rang_mixture <- readRDS("../testdata/rang_mixture.RDS")
18 | output1 <- capture_output(print(rang_mixture, all_pkgs = FALSE))
19 | output2 <- capture_output(print(rang_mixture, all_pkgs = TRUE))
20 | expect_true(output1 == output2)
21 | rang_6 <- readRDS("../testdata/rang_6.RDS")
22 | output1 <- capture_output(print(rang_6, all_pkgs = FALSE))
23 | output2 <- capture_output(print(rang_6, all_pkgs = TRUE))
24 | expect_false(output1 == output2)
25 | expect_true(grepl("First 5 packages", output1))
26 | expect_false(grepl("First 5 packages", output2))
27 | })
28 |
--------------------------------------------------------------------------------
/tests/testthat/test_sysreqs.R:
--------------------------------------------------------------------------------
1 | ## all are online tests
2 |
3 | test_that(".query_singleline_sysreqs", {
4 | skip_if_offline()
5 | skip_on_cran()
6 | res <- .query_singleline_sysreqs("")
7 | expect_equal(res, character(0))
8 | res <- .query_singleline_sysreqs("tensorflow")
9 | expect_equal(res, character(0))
10 | res <- .query_singleline_sysreqs("GNU Scientific Library version >= 1.8, C++11", "ubuntu-20.04") ## cheat
11 | expect_equal(res, "apt-get install -y libgsl0-dev")
12 | res <- .query_singleline_sysreqs("Tcl/Tk", "ubuntu-20.04") ## uncheckable
13 | expect_equal(res, "apt-get install -y tcl8.6 tk8.6") ## cheat
14 | })
15 |
16 | test_that(".query_sysreqs_github", {
17 | skip_if_offline()
18 | skip_on_cran()
19 | ## This doesn't query for system requirements of deep dependencies anymore
20 | res <- .query_sysreqs_github("cran/topicmodels", os = "ubuntu-20.04")
21 | expect_true(all(grepl("^apt-get", res)))
22 | expect_true(length(res) == 1)
23 | res <- .query_sysreqs_github("cran/topicmodels", "centos-8")
24 | expect_true(all(grepl("^dnf", res)))
25 | res <- .query_sysreqs_github("Bioconductor/Rhtslib", "ubuntu-20.04")
26 | res2 <- .query_sysreqs_bioc("Rhtslib", "ubuntu-20.04")
27 | expect_equal(res, res2)
28 | })
29 |
30 | test_that(".query_sysreqs_bioc with uncheckable info", {
31 | skip_if_offline()
32 | skip_on_cran()
33 | x <- .query_sysreqs_bioc("Rhtslib", "ubuntu-20.04")
34 | expect_true("apt-get install -y libbz2-dev" %in% x) ## uncheckable
35 | expect_true("apt-get install -y liblzma-dev" %in% x)
36 | expect_true("apt-get install -y make" %in% x) ## checkable
37 | expect_false("apt-get install -y" %in% x) ## the null response from C++
38 | x <- .query_sysreqs_bioc("Rhtslib", "centos-7")
39 | expect_true("dnf install -y libbz2-devel" %in% x)
40 | expect_true("dnf install -y xz-devel" %in% x)
41 | expect_true("dnf install -y make" %in% x)
42 | expect_false("dnf install -y" %in% x) ## the null response from C++
43 | x <- .query_singleline_sysreqs("libxml2", "ubuntu-20.04")
44 | expect_equal(x, "apt-get install -y libxml2-dev")
45 | x <- .query_singleline_sysreqs("C++", "ubuntu-20.04")
46 | expect_equal(x, character(0))
47 | x <- readRDS("../testdata/sysreqs_gmp.RDS")
48 | ## buildtime / runtime requirements
49 | expect_equal(.extract_sys_package(x[[1]], arch = "DEB"),
50 | "apt-get install -y libgmp-dev")
51 | })
52 |
53 | test_that(".query_sysreqs_local", {
54 | skip_if_offline()
55 | skip_on_cran()
56 | expect_error(sysreqs <- .query_sysreqs_local(c("../testdata/fakexml2", "../testdata/askpass_1.1.tar.gz", "../testdata/fakeRhtslib.tar.gz"), "ubuntu-20.04"), NA)
57 | expect_true("apt-get install -y libxml2-dev" %in% sysreqs)
58 | expect_true("apt-get install -y libbz2-dev" %in% sysreqs)
59 | ## dispatch in .query_sysreqs_smart
60 | expect_error(sysreqs2 <- .query_sysreqs_smart(c("local::../testdata/fakexml2", "local::../testdata/askpass_1.1.tar.gz", "local::../testdata/fakeRhtslib.tar.gz"), "ubuntu-20.04"), NA)
61 | expect_equal(sysreqs, sysreqs2)
62 | })
63 |
--------------------------------------------------------------------------------
/tests/testthat/test_use_rang.R:
--------------------------------------------------------------------------------
1 | .generate_temp_dir <- function() {
2 | file.path(tempdir(), paste(sample(c(LETTERS, letters), 20, replace = TRUE), collapse = ""))
3 | }
4 |
5 | expect_unequal <- function(x, y) {
6 | expect_false(isTRUE(all.equal(x, y)))
7 | }
8 |
9 | test_that("use_rang defensive", {
10 | expect_error(use_rang("this directory does not exist"))
11 | })
12 |
13 | test_that("use_rang normal", {
14 | tempdir <- .generate_temp_dir()
15 | dir.create(tempdir)
16 | msg <- capture_messages(use_rang(tempdir))
17 | expect_true(any(grepl("infrastructure", msg)))
18 | tempdir <- .generate_temp_dir()
19 | dir.create(tempdir)
20 | expect_silent(use_rang(tempdir, verbose = FALSE))
21 | expect_true(dir.exists(file.path(tempdir, "inst/rang")))
22 | expect_true(file.exists(file.path(tempdir, "inst/rang/update.R")))
23 | expect_true(file.exists(file.path(tempdir, ".here")))
24 | expect_true(file.exists(file.path(tempdir, "Makefile")))
25 | })
26 |
27 | test_that("options", {
28 | tempdir <- .generate_temp_dir()
29 | dir.create(tempdir)
30 | use_rang(tempdir, add_makefile = FALSE, verbose = FALSE)
31 | expect_true(dir.exists(file.path(tempdir, "inst/rang")))
32 | expect_true(file.exists(file.path(tempdir, "inst/rang/update.R")))
33 | expect_true(file.exists(file.path(tempdir, ".here")))
34 | expect_false(file.exists(file.path(tempdir, "Makefile")))
35 | tempdir <- .generate_temp_dir()
36 | dir.create(tempdir)
37 | use_rang(tempdir, add_here = FALSE, verbose = FALSE)
38 | expect_true(dir.exists(file.path(tempdir, "inst/rang")))
39 | expect_true(file.exists(file.path(tempdir, "inst/rang/update.R")))
40 | expect_false(file.exists(file.path(tempdir, ".here")))
41 | expect_true(file.exists(file.path(tempdir, "Makefile")))
42 | })
43 |
44 | test_that("Existing components update.R", {
45 | tempdir <- .generate_temp_dir()
46 | dir.create(tempdir)
47 | dir.create(file.path(tempdir, "inst/rang"), recursive = TRUE)
48 | dummy <- c("831721", "GESIS")
49 | writeLines(dummy, file.path(tempdir, "inst/rang/update.R"))
50 | use_rang(tempdir, verbose = FALSE)
51 | content <- readLines(file.path(tempdir, "inst/rang/update.R"))
52 | expect_equal(content, dummy)
53 | ## force
54 | tempdir <- .generate_temp_dir()
55 | dir.create(tempdir)
56 | dir.create(file.path(tempdir, "inst/rang"), recursive = TRUE)
57 | writeLines(dummy, file.path(tempdir, "inst/rang/update.R"))
58 | use_rang(tempdir, verbose = FALSE, force = TRUE)
59 | content <- readLines(file.path(tempdir, "inst/rang/update.R"))
60 | expect_unequal(content, dummy) ## got overwritten
61 | })
62 |
63 | test_that("Existing components Makefile", {
64 | tempdir <- .generate_temp_dir()
65 | dir.create(tempdir)
66 | dummy <- c("831721", "GESIS")
67 | writeLines(dummy, file.path(tempdir, "Makefile"))
68 | use_rang(tempdir, verbose = FALSE)
69 | content <- readLines(file.path(tempdir, "Makefile"))
70 | expect_equal(content, dummy)
71 | ## force
72 | tempdir <- .generate_temp_dir()
73 | dir.create(tempdir)
74 | writeLines(dummy, file.path(tempdir, "Makefile"))
75 | use_rang(tempdir, verbose = FALSE, force = TRUE)
76 | content <- readLines(file.path(tempdir, "Makefile"))
77 | expect_unequal(content, dummy) ## got overwritten
78 | })
79 |
80 | test_that("Existing components .here", {
81 | tempdir <- .generate_temp_dir()
82 | dir.create(tempdir)
83 | dummy <- c("831721", "GESIS")
84 | writeLines(dummy, file.path(tempdir, ".here"))
85 | use_rang(tempdir, verbose = FALSE)
86 | content <- readLines(file.path(tempdir, ".here"))
87 | expect_equal(content, dummy)
88 | ## force
89 | tempdir <- .generate_temp_dir()
90 | dir.create(tempdir)
91 | writeLines(dummy, file.path(tempdir, ".here"))
92 | use_rang(tempdir, verbose = FALSE, force = TRUE)
93 | content <- readLines(file.path(tempdir, ".here"))
94 | expect_unequal(content, dummy) ## got overwritten
95 | })
96 |
97 | test_that("Apptainer", {
98 | tempdir <- .generate_temp_dir()
99 | dir.create(tempdir)
100 | msg <- capture_messages(use_rang(tempdir, apptainer = TRUE))
101 | expect_true(any(grepl("infrastructure", msg)))
102 | tempdir <- .generate_temp_dir()
103 | dir.create(tempdir)
104 | expect_silent(use_rang(tempdir, verbose = FALSE, apptainer = TRUE))
105 | expect_true(dir.exists(file.path(tempdir, "inst/rang")))
106 | expect_true(file.exists(file.path(tempdir, "inst/rang/update.R")))
107 | expect_true(file.exists(file.path(tempdir, ".here")))
108 | expect_true(file.exists(file.path(tempdir, "Makefile")))
109 | expect_true(any(grepl("apptainer", readLines(file.path(tempdir, "inst/rang/update.R")))))
110 | expect_true(any(grepl("apptainer", readLines(file.path(tempdir, "Makefile")))))
111 | })
112 |
--------------------------------------------------------------------------------
/vignettes/.gitignore:
--------------------------------------------------------------------------------
1 | *.html
2 | *.R
3 |
--------------------------------------------------------------------------------
/vignettes/compendium.Rmd:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Create research compendia"
3 | output: rmarkdown::html_vignette
4 | vignette: >
5 | %\VignetteIndexEntry{Create research compendia}
6 | %\VignetteEngine{knitr::rmarkdown}
7 | %\VignetteEncoding{UTF-8}
8 | ---
9 |
10 | ```{r, include = FALSE}
11 | knitr::opts_chunk$set(
12 | collapse = TRUE,
13 | comment = "#>"
14 | )
15 | ```
16 |
17 | There has been several implementations of research compendium in the R ecosystem already: `rrtools`, `rcompendium`, `template`, `manuscriptPackage`, and `ProjectTemplate`. The idea of `use_rang()` is not to create a new format. Instead, `use_rang()` can be used to **enhance** your current research compendium.
18 |
19 | If you would like to create a new research compendium, you can either use any of the aforementioned formats; or to use `create_turing()` to create a structure suggested by The Turing Way. However, the idea is that `use_rang()`, a `usethis`-style function, is a general function that can work well with any structure. Just like `rang` in general, `use_rang()` is designed with interoperability in mind.
20 |
21 | # Case 1: Create a Turing-style research compendium
22 |
23 | `create_turing()` can be used to create a general research compendium structure. The function generates an example structure like this:
24 |
25 | ```txt
26 | .
27 | ├── bibliography.bib
28 | ├── CITATION
29 | ├── code
30 | │ ├── 00_preprocess.R
31 | │ └── 01_visualization.R
32 | ├── data_clean
33 | ├── data_raw
34 | │ └── penguins_raw.csv
35 | ├── figures
36 | ├── .here
37 | ├── inst
38 | │ └── rang
39 | │ └── update.R
40 | ├── Makefile
41 | └── paper.Rmd
42 | ```
43 |
44 | More information about this can be found in [the Turing Way](https://the-turing-way.netlify.app/reproducible-research/compendia.html). But in general:
45 |
46 | 1. Raw data should be in the directory `data_raw`
47 | 2. Scripts should be in the directory `code`, preferably named in the execution order.
48 | 3. The scripts should generate intermediate data files in `data_intermediate`, figures in `figures`.
49 | 4. After the code execution, a file for the manuscript is written with literate programming techniques. In this case, `Paper.Rmd`.
50 |
51 | The special part is `inst/rang/update.R`. Running this script does the following things:
52 |
53 | 1. It scans the current directory for all R packages used.
54 | 2. It creates the infrastructure for building the Docker image to run the code.
55 | 3. It caches all R packages.
56 |
57 | As written in the file, you should edit this script to cater for your own needs. You might also need to run this multiple time during the project lifecycle. You can also use the `Makefile` included to pull off some of the tasks. For example, you can run `make update` to run `inst/rang/update.R`. We highly recommend using GNU Make.
58 |
59 | The first step is to run `inst/rang/update.R`. You can either run it by `Rscript inst/rang/update.R` or `make update`. It will determine the snapshot date, scan the current directory for R dependencies, determine the dependency graph, generate `Dockerfile`, and cache R packages.
60 |
61 | After running it, you should have `Dockerfile` at the root level. In `inst/rang`, you should have `rang.R` and `cache`. Now, you can build the Docker image. We recommend using GNU Make and type `make build` (or `docker build -t yourprojectimg .`). And launch the Docker container (`make launch` or `docker run --rm --name "yourprojectcontainer" -ti yourprojectimg`). Another idea is to launch a Bash shell (`make bash` or `docker run --rm --name "yourprojectcontainer" --entrypoint bash -ti yourprojectimg`). Let's assume you take this approach.
62 |
63 | Inside the container, you will get all your files. And that container should have all the dependencies installed and you can run all the scripts right away. Let's say
64 |
65 | ```bash
66 | Rscript code/00_preprocess.R
67 | Rscript code/01_visualization.R
68 | Rscript -e "rmarkdown::render('paper.Rmd')"
69 | ```
70 |
71 | You can copy any artefact generated inside the container from **another shell instance**.
72 |
73 | ```bash
74 | docker cp yourprojectcontainer:/paper.pdf ./
75 | ```
76 |
77 | ## Other ideas
78 |
79 | 1. Add a readme: `usethis::use_readme()`
80 | 2. Add a license: `usethis::use_mit_license()`
81 | 3. Add the steps to run the code in the `Makefile`, but you'll need to rerun `make build`.
82 | 4. Export the Docker image: `make export` and restore it `make restore`
83 |
84 | # Case 2: Enhance an existing research compendium
85 |
86 | Oser et al. shared their data as a zip file on [OSF](https://osf.io/y7cg5). You can obtain a copy using `osfr`.
87 |
88 | ```bash
89 | Rscript -e "osfr::osf_download(osfr::osf_retrieve_file('https://osf.io/y7cg5'))"
90 | unzip meta-analysis\ replication\ files.zip
91 | cd meta-analysis
92 | ```
93 |
94 | Suppose you want to use Apptainer to reproduce this research. At the root level of this compendium, run:
95 |
96 | ```bash
97 | Rscript -e "rang::use_rang(apptainer = TRUE)"
98 | ```
99 |
100 | This compendium is slightly more tricky because we know that there is one undeclared GitHub package. You need to edit `inst/rang/update.R` yourself. In this case, you also want to fix the `snapshot_date`. Also, you know that "texlive" is not needed.
101 |
102 | ```r
103 | pkgs <- as_pkgrefs(here::here())
104 | pkgs[pkgs == "cran::dmetar"] <- "MathiasHarrer/dmetar"
105 |
106 | rang <- resolve(pkgs,
107 | snapshot_date = "2021-08-11",
108 | verbose = TRUE)
109 |
110 | apptainerize(rang, output_dir = here::here(), verbose = TRUE, cache = TRUE,
111 | post_installation_steps = c(recipes[["make"]], recipes[["clean"]]),
112 | insert_readme = FALSE,
113 | copy_all = TRUE,
114 | cran_mirror = cran_mirror)
115 |
116 | ```
117 |
118 | You can also edit `Makefile` to give the project a handle. Maybe "oser" is a good handle.
119 |
120 | ```Makefile
121 | handle=oser
122 | .PHONY: update build launch bash daemon stop export
123 | ```
124 |
125 | Similar to above, we first run `make build` to build the Apptainer image. As the handle is "oser", it generates an Apptainer image called "oserimg.sif".
126 |
127 | Similar to above, you can now launch a bash shell and render the RMarkdown file.
128 |
129 | ```bash
130 | make bash
131 | Rscript -e "rmarkdown::render('README.Rmd', output_file = 'output.html')"
132 | exit
133 | ```
134 |
135 | Upon you exit, you have "output.html" in your host machine. You don't need to transfer the file from the container. Please note that this feature is handy but can also have a [negative impact to reproducibility](https://the-turing-way.netlify.app/reproducible-research/renv/renv-containers#words-of-warning).
136 |
137 | # What to share?
138 |
139 | It is important to know that there are at least two levels of reproducibility: 1) Whether your computational environment can be reproducibly reconstructed, and 2) Whether your analysis is reproducibility. The discussion of reproducibility usually conflates the two. We want to focus on the 2nd goal.
140 |
141 | If your goal is to ensure other researchers can have a compatible computational environment that can (re)run your code, The Turing Way [recommends](https://the-turing-way.netlify.app/reproducible-research/renv/renv-containers#long-term-storage-of-container-images) that one should share the research compendium and the container images, not just the recipes e.g. `Dockerfile` or `container.def`. There are many moving parts during the reconstruction, e.g. whether the source Docker image is available and usable. As long as Docker or Apptainer support the same image format (or allow upgrade the current format), sharing the images is the most future proof method.
142 |
--------------------------------------------------------------------------------