├── .Rprofile
├── .github
└── workflows
│ └── deploy-book.yml
├── .gitignore
├── R
├── docs-examples
│ ├── ggseg.R
│ └── scatterplot-matrix.R
├── run-all.R
└── synthetic-data
│ ├── get_colnames_smri.R
│ └── simulate_data_pds.R
├── README.md
├── data
├── abcd_smri_colnames.rds
├── abcd_smri_colnames.txt
└── pds_summary.rds
├── docs
├── _config.yml
├── _toc.yml
├── bug-reference.md
├── contributing.md
├── figs
│ ├── py_nilearn_leftlateral.png
│ ├── py_nilearn_rightlateral.png
│ ├── py_seaborn_pairs.png
│ ├── r_ggally_pairs.png
│ └── r_ggseg_brain.png
├── intro.md
├── logo.png
├── nikmabcd-logo.png
├── old_tweets.md
├── recent_tweets.md
├── references.bib
├── requirements.txt
├── resources
│ ├── abcd-specific.md
│ ├── computing.md
│ ├── science.md
│ └── statistics.md
├── starting.md
├── user-manual.md
└── visualization.md
├── environment.yml
├── now-i-know-my-abcd.Rproj
├── python
└── docs-examples
│ ├── nilearn-plotting.py
│ └── scatterplot-matrix.py
├── renv.lock
└── renv
└── activate.R
/.Rprofile:
--------------------------------------------------------------------------------
1 | source("renv/activate.R")
2 |
--------------------------------------------------------------------------------
/.github/workflows/deploy-book.yml:
--------------------------------------------------------------------------------
1 | name: deploy-book
2 |
3 | # Only run this when the master branch changes
4 | on:
5 | push:
6 | branches:
7 | - main
8 | # If your git repository has the Jupyter Book within some-subfolder next to
9 | # unrelated files, you can make this run only if a file within that specific
10 | # folder has been modified.
11 | #
12 | paths:
13 | - docs/**
14 | - .github/workflows/deploy-book.yml
15 |
16 | jobs:
17 | # This job installs dependencies, builds the book, and pushes it to `gh-pages
18 | deploy-book:
19 | runs-on: ubuntu-latest
20 |
21 | steps:
22 | - uses: actions/checkout@v2
23 |
24 | # Install py dependencies
25 | - name: Set up Python 3.10
26 | uses: actions/setup-python@v2
27 | with:
28 | python-version: 3.10.5
29 |
30 | - name: Install py dependencies not with conda cuz we suck
31 | run: |
32 | python -m pip install --upgrade pip
33 | pip install jupyter-book sphinx-inline-tabs nilearn seaborn
34 |
35 | # Run the code tutorial scripts
36 | # must cd into the folder bc the paths in the scripts
37 | # are relative from the script path
38 | # and python is sensitive to the dir from which
39 | # the script is called
40 | - name: Run externalized py tutorial scripts
41 | run: |
42 | cd python/docs-examples
43 | for f in *.py; do python "$f"; done
44 | cd ../..
45 |
46 | # Build the book
47 | - name: Build the book
48 | run: |
49 | jupyter-book build docs/
50 |
51 | # Push the book's HTML to github-pages
52 | - name: GitHub Pages action
53 | uses: peaceiris/actions-gh-pages@v3.8.0
54 | with:
55 | github_token: ${{ secrets.GITHUB_TOKEN }}
56 | publish_dir: docs/_build/html
57 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | share/python-wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 | MANIFEST
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | htmlcov/
41 | .tox/
42 | .nox/
43 | .coverage
44 | .coverage.*
45 | .cache
46 | nosetests.xml
47 | coverage.xml
48 | *.cover
49 | *.py,cover
50 | .hypothesis/
51 | .pytest_cache/
52 | cover/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/*
73 |
74 | # PyBuilder
75 | .pybuilder/
76 | target/
77 |
78 | # Jupyter Notebook
79 | .ipynb_checkpoints
80 |
81 | # IPython
82 | profile_default/
83 | ipython_config.py
84 |
85 | # pyenv
86 | # For a library or package, you might want to ignore these files since the code is
87 | # intended to run in multiple environments; otherwise, check them in:
88 | # .python-version
89 |
90 | # pipenv
91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
94 | # install all needed dependencies.
95 | #Pipfile.lock
96 |
97 | # poetry
98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
99 | # This is especially recommended for binary packages to ensure reproducibility, and is more
100 | # commonly ignored for libraries.
101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
102 | #poetry.lock
103 |
104 | # pdm
105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
106 | #pdm.lock
107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
108 | # in version control.
109 | # https://pdm.fming.dev/#use-with-ide
110 | .pdm.toml
111 |
112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
113 | __pypackages__/
114 |
115 | # Celery stuff
116 | celerybeat-schedule
117 | celerybeat.pid
118 |
119 | # SageMath parsed files
120 | *.sage.py
121 |
122 | # Environments
123 | .env
124 | .venv
125 | env/
126 | venv/
127 | ENV/
128 | env.bak/
129 | venv.bak/
130 | renv/*
131 | # but not renv/activate.R
132 | !renv/activate.R
133 |
134 | # Spyder project settings
135 | .spyderproject
136 | .spyproject
137 |
138 | # Rope project settings
139 | .ropeproject
140 |
141 | # mkdocs documentation
142 | /site
143 |
144 | # mypy
145 | .mypy_cache/
146 | .dmypy.json
147 | dmypy.json
148 |
149 | # Pyre type checker
150 | .pyre/
151 |
152 | # pytype static type analyzer
153 | .pytype/
154 |
155 | # Cython debug symbols
156 | cython_debug/
157 |
158 | # PyCharm
159 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
160 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
161 | # and can be added to the global gitignore or merged into this file. For a more nuclear
162 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
163 | #.idea/
164 |
165 | # Stop the dumb ass mac files
166 | *.DS_Store
167 |
168 | # R system files
169 | .Rproj.user
170 | .Rhistory
171 |
172 | # Local data that should not by synced
173 | ignore/*
174 |
--------------------------------------------------------------------------------
/R/docs-examples/ggseg.R:
--------------------------------------------------------------------------------
1 | library(tidyverse)
2 | library(ggseg)
3 | library(ggsegGordon)
4 |
5 | set.seed(17)
6 |
7 | r_ggseg_brain <- gordon %>%
8 | as_tibble() %>%
9 | # So stats don't get generated for those midline panels of callosal nothingness
10 | mutate(sim_stat = if_else(!is.na(annot), runif(n(), -1, 1), NA_real_)) %>%
11 | ggplot() +
12 | geom_brain(atlas = gordon,
13 | mapping = aes(fill = sim_stat),
14 | position = position_brain(hemi ~ side)) +
15 | scale_fill_gradient2(low = "#68BCD6",
16 | high = "#CE84AD") +
17 | labs(fill = "simulated\nstatistic")
18 |
19 | r_ggseg_brain
20 |
21 | ggsave(here::here("docs", "figs", "r_ggseg_brain.png"),
22 | r_ggseg_brain,
23 | device = "png",
24 | width = 5,
25 | height = 3,
26 | units = "in")
27 |
--------------------------------------------------------------------------------
/R/docs-examples/scatterplot-matrix.R:
--------------------------------------------------------------------------------
1 | library(tidyverse)
2 | library(GGally)
3 |
4 | set.seed(17)
5 |
6 | abcd_smri_colnames <- read_rds(here::here("data", "abcd_smri_colnames.rds")) %>%
7 | .[1:5]
8 |
9 | data <- crossing(src_subject_id = 1:1000,
10 | var = abcd_smri_colnames) %>%
11 | mutate(value = rnorm(n())) %>%
12 | pivot_wider(id_cols = src_subject_id,
13 | names_from = var,
14 | values_from = value)
15 |
16 | r_ggally_pairs <- data %>%
17 | select(-src_subject_id) %>%
18 | GGally::ggpairs(lower = list(continuous = wrap("points", size = 0.5, alpha = 0.1)))
19 |
20 | r_ggally_pairs
21 |
22 | ggsave(here::here("docs", "figs", "r_ggally_pairs.png"),
23 | r_ggally_pairs,
24 | device = "png",
25 | width = 5,
26 | height = 5,
27 | units = "in")
28 |
--------------------------------------------------------------------------------
/R/run-all.R:
--------------------------------------------------------------------------------
1 | paths <- list.files(here::here("R"), recursive = TRUE, full.names = TRUE)
2 |
3 | for (path in paths) {
4 | source(path)
5 | }
6 |
--------------------------------------------------------------------------------
/R/synthetic-data/get_colnames_smri.R:
--------------------------------------------------------------------------------
1 | library(tidyverse)
2 |
3 | abcd_smri_colnames <- "FS_InterCranial_Vol = smri_vol_scs_intracranialv, #technically inter vs intra
4 | #FS_BrainSeg_Vol
5 | #FS_BrainSeg_Vol_No_Vent
6 | #FS_BrainSeg_Vol_No_Vent_Surf
7 | FS_LCort_GM_Vol = smri_vol_cdk_totallh,
8 | FS_RCort_GM_Vol = smri_vol_cdk_totalrh,
9 | FS_TotCort_GM_Vol = smri_vol_cdk_total, #total whole brain cortical volume
10 | FS_SubCort_GM_Vol = smri_vol_scs_subcorticalgv,
11 | #FS_Total_GM_Vol #maybe just calculate?
12 | FS_SupraTentorial_Vol = smri_vol_scs_suprateialv, #assuming this includes all ventricles
13 | #FS_SupraTentorial_Vol_No_Vent #smri_vol_scs_allventricles is all ventricles, subtract?
14 | #FS_SupraTentorial_No_Vent_Voxel_Count
15 | #FS_Mask_Vol
16 | #FS_BrainSegVol_eTIV_Ratio
17 | #FS_MaskVol_eTIV_Ratio
18 | FS_L_LatVent_Vol = smri_vol_scs_ltventriclelh,
19 | FS_L_InfLatVent_Vol = smri_vol_scs_inflatventlh,
20 | FS_L_Cerebellum_WM_Vol = smri_vol_scs_crbwmatterlh,
21 | FS_L_Cerebellum_Cort_Vol = smri_vol_scs_crbcortexlh,
22 | FS_L_ThalamusProper_Vol = smri_vol_scs_tplh,
23 | FS_L_Caudate_Vol = smri_vol_scs_caudatelh,
24 | FS_L_Putamen_Vol = smri_vol_scs_putamenlh,
25 | FS_L_Pallidum_Vol = smri_vol_scs_pallidumlh,
26 | FS_3rdVent_Vol = smri_vol_scs_3rdventricle,
27 | FS_4thVent_Vol = smri_vol_scs_4thventricle,
28 | FS_BrainStem_Vol = smri_vol_scs_bstem,
29 | FS_L_Hippo_Vol = smri_vol_scs_hpuslh,
30 | FS_L_Amygdala_Vol = smri_vol_scs_amygdalalh,
31 | FS_CSF_Vol = smri_vol_scs_csf,
32 | FS_L_AccumbensArea_Vol = smri_vol_scs_aal,
33 | FS_L_VentDC_Vol = smri_vol_scs_vedclh,
34 | #FS_L_Vessel_Vol
35 | #FS_L_ChoroidPlexus_Vol
36 | FS_R_LatVent_Vol = smri_vol_scs_ltventriclerh,
37 | FS_R_InfLatVent_Vol = smri_vol_scs_inflatventrh,
38 | FS_R_Cerebellum_WM_Vol = smri_vol_scs_crbwmatterrh,
39 | FS_R_Cerebellum_Cort_Vol = smri_vol_scs_crbcortexrh,
40 | FS_R_ThalamusProper_Vol = smri_vol_scs_tprh,
41 | FS_R_Caudate_Vol = smri_vol_scs_caudaterh,
42 | FS_R_Putamen_Vol = smri_vol_scs_putamenrh,
43 | FS_R_Pallidum_Vol = smri_vol_scs_pallidumrh,
44 | FS_R_Hippo_Vol = smri_vol_scs_hpusrh,
45 | FS_R_Amygdala_Vol = smri_vol_scs_amygdalarh,
46 | FS_R_AccumbensArea_Vol = smri_vol_scs_aar,
47 | FS_R_VentDC_Vol = smri_vol_scs_vedcrh,
48 | #FS_R_Vessel_Vol
49 | #FS_R_ChoroidPlexus_Vol
50 | #FS_OpticChiasm_Vol
51 | FS_CC_Posterior_Vol = smri_vol_scs_ccps,
52 | FS_CC_MidPosterior_Vol = smri_vol_scs_ccmidps,
53 | FS_CC_Central_Vol = smri_vol_scs_ccct,
54 | FS_CC_MidAnterior_Vol = smri_vol_scs_ccmidat,
55 | FS_CC_Anterior_Vol = smri_vol_scs_ccat,
56 | FS_L_Bankssts_Area = smri_area_cdk_banksstslh,
57 | FS_L_Caudalanteriorcingulate_Area = smri_area_cdk_cdacatelh,
58 | FS_L_Caudalmiddlefrontal_Area = smri_area_cdk_cdmdfrlh,
59 | FS_L_Cuneus_Area = smri_area_cdk_cuneuslh,
60 | FS_L_Entorhinal_Area = smri_area_cdk_ehinallh,
61 | FS_L_Fusiform_Area = smri_area_cdk_fusiformlh,
62 | FS_L_Inferiorparietal_Area = smri_area_cdk_ifpllh,
63 | FS_L_Inferiortemporal_Area = smri_area_cdk_iftmlh,
64 | FS_L_Isthmuscingulate_Area = smri_area_cdk_ihcatelh,
65 | FS_L_Lateraloccipital_Area = smri_area_cdk_locclh,
66 | FS_L_Lateralorbitofrontal_Area = smri_area_cdk_lobfrlh,
67 | FS_L_Lingual_Area = smri_area_cdk_linguallh,
68 | FS_L_Medialorbitofrontal_Area = smri_area_cdk_mobfrlh,
69 | FS_L_Middletemporal_Area = smri_area_cdk_mdtmlh,
70 | FS_L_Parahippocampal_Area = smri_area_cdk_parahpallh,
71 | FS_L_Paracentral_Area = smri_area_cdk_paracnlh,
72 | FS_L_Parsopercularis_Area = smri_area_cdk_parsopclh,
73 | FS_L_Parsorbitalis_Area = smri_area_cdk_parsobislh,
74 | FS_L_Parstriangularis_Area = smri_area_cdk_parstgrislh,
75 | FS_L_Pericalcarine_Area = smri_area_cdk_pericclh,
76 | FS_L_Postcentral_Area = smri_area_cdk_postcnlh,
77 | FS_L_Posteriorcingulate_Area = smri_area_cdk_ptcatelh,
78 | FS_L_Precentral_Area = smri_area_cdk_precnlh,
79 | FS_L_Precuneus_Area = smri_area_cdk_pclh,
80 | FS_L_Rostralanteriorcingulate_Area = smri_area_cdk_rracatelh,
81 | FS_L_Rostralmiddlefrontal_Area = smri_area_cdk_rrmdfrlh,
82 | FS_L_Superiorfrontal_Area = smri_area_cdk_sufrlh,
83 | FS_L_Superiorparietal_Area = smri_area_cdk_supllh,
84 | FS_L_Superiortemporal_Area = smri_area_cdk_sutmlh,
85 | FS_L_Supramarginal_Area = smri_area_cdk_smlh,
86 | FS_L_Frontalpole_Area = smri_area_cdk_frpolelh,
87 | FS_L_Temporalpole_Area = smri_area_cdk_tmpolelh,
88 | FS_L_Transversetemporal_Area = smri_area_cdk_trvtmlh,
89 | FS_L_Insula_Area = smri_area_cdk_insulalh,
90 | FS_R_Bankssts_Area = smri_area_cdk_banksstsrh,
91 | FS_R_Caudalanteriorcingulate_Area = smri_area_cdk_cdacaterh,
92 | FS_R_Caudalmiddlefrontal_Area = smri_area_cdk_cdmdfrrh,
93 | FS_R_Cuneus_Area = smri_area_cdk_cuneusrh,
94 | FS_R_Entorhinal_Area = smri_area_cdk_ehinalrh,
95 | FS_R_Fusiform_Area = smri_area_cdk_fusiformrh,
96 | FS_R_Inferiorparietal_Area = smri_area_cdk_ifplrh,
97 | FS_R_Inferiortemporal_Area = smri_area_cdk_iftmrh,
98 | FS_R_Isthmuscingulate_Area = smri_area_cdk_ihcaterh,
99 | FS_R_Lateraloccipital_Area = smri_area_cdk_loccrh,
100 | FS_R_Lateralorbitofrontal_Area = smri_area_cdk_lobfrrh,
101 | FS_R_Lingual_Area = smri_area_cdk_lingualrh,
102 | FS_R_Medialorbitofrontal_Area = smri_area_cdk_mobfrrh,
103 | FS_R_Middletemporal_Area = smri_area_cdk_mdtmrh,
104 | FS_R_Parahippocampal_Area = smri_area_cdk_parahpalrh,
105 | FS_R_Paracentral_Area = smri_area_cdk_paracnrh,
106 | FS_R_Parsopercularis_Area = smri_area_cdk_parsopcrh,
107 | FS_R_Parsorbitalis_Area = smri_area_cdk_parsobisrh,
108 | FS_R_Parstriangularis_Area = smri_area_cdk_parstgrisrh,
109 | FS_R_Pericalcarine_Area = smri_area_cdk_periccrh,
110 | FS_R_Postcentral_Area = smri_area_cdk_postcnrh,
111 | FS_R_Posteriorcingulate_Area = smri_area_cdk_ptcaterh,
112 | FS_R_Precentral_Area = smri_area_cdk_precnrh,
113 | FS_R_Precuneus_Area = smri_area_cdk_pcrh,
114 | FS_R_Rostralanteriorcingulate_Area = smri_area_cdk_rracaterh,
115 | FS_R_Rostralmiddlefrontal_Area = smri_area_cdk_rrmdfrrh,
116 | FS_R_Superiorfrontal_Area = smri_area_cdk_sufrrh,
117 | FS_R_Superiorparietal_Area = smri_area_cdk_suplrh,
118 | FS_R_Superiortemporal_Area = smri_area_cdk_sutmrh,
119 | FS_R_Supramarginal_Area = smri_area_cdk_smrh,
120 | FS_R_Frontalpole_Area = smri_area_cdk_frpolerh,
121 | FS_R_Temporalpole_Area = smri_area_cdk_tmpolerh,
122 | FS_R_Transversetemporal_Area = smri_area_cdk_trvtmrh,
123 | FS_R_Insula_Area = smri_area_cdk_insularh,
124 | #abcd technically doesn't say gray matter volume for the following varibles
125 | FS_L_Bankssts_GrayVol = smri_vol_cdk_banksstslh,
126 | FS_L_Caudalanteriorcingulate_GrayVol = smri_vol_cdk_cdacatelh,
127 | FS_L_Caudalmiddlefrontal_GrayVol = smri_vol_cdk_cdmdfrlh,
128 | FS_L_Cuneus_GrayVol = smri_vol_cdk_cuneuslh,
129 | FS_L_Entorhinal_GrayVol = smri_vol_cdk_ehinallh,
130 | FS_L_Fusiform_GrayVol = smri_vol_cdk_fusiformlh,
131 | FS_L_Inferiorparietal_GrayVol = smri_vol_cdk_ifpllh,
132 | FS_L_Inferiortemporal_GrayVol = smri_vol_cdk_iftmlh,
133 | FS_L_Isthmuscingulate_GrayVol = smri_vol_cdk_ihcatelh,
134 | FS_L_Lateraloccipital_GrayVol = smri_vol_cdk_locclh,
135 | FS_L_Lateralorbitofrontal_GrayVol = smri_vol_cdk_lobfrlh,
136 | FS_L_Lingual_GrayVol = smri_vol_cdk_linguallh,
137 | FS_L_Medialorbitofrontal_GrayVol = smri_vol_cdk_mobfrlh,
138 | FS_L_Middletemporal_GrayVol = smri_vol_cdk_mdtmlh,
139 | FS_L_Parahippocampal_GrayVol = smri_vol_cdk_parahpallh,
140 | FS_L_Paracentral_GrayVol = smri_vol_cdk_paracnlh,
141 | FS_L_Parsopercularis_GrayVol = smri_vol_cdk_parsopclh,
142 | FS_L_Parsorbitalis_GrayVol = smri_vol_cdk_parsobislh,
143 | FS_L_Parstriangularis_GrayVol = smri_vol_cdk_parstgrislh,
144 | FS_L_Pericalcarine_GrayVol = smri_vol_cdk_pericclh,
145 | FS_L_Postcentral_GrayVol = smri_vol_cdk_postcnlh,
146 | FS_L_Posteriorcingulate_GrayVol = smri_vol_cdk_ptcatelh,
147 | FS_L_Precentral_GrayVol = smri_vol_cdk_precnlh,
148 | FS_L_Precuneus_GrayVol = smri_vol_cdk_pclh,
149 | FS_L_Rostralanteriorcingulate_GrayVol = smri_vol_cdk_rracatelh,
150 | FS_L_Rostralmiddlefrontal_GrayVol = smri_vol_cdk_rrmdfrlh,
151 | FS_L_Superiorfrontal_GrayVol = smri_vol_cdk_sufrlh,
152 | FS_L_Superiorparietal_GrayVol = smri_vol_cdk_supllh,
153 | FS_L_Superiortemporal_GrayVol = smri_vol_cdk_sutmlh,
154 | FS_L_Supramarginal_GrayVol = smri_vol_cdk_smlh,
155 | FS_L_Frontalpole_GrayVol = smri_vol_cdk_frpolelh,
156 | FS_L_Temporalpole_GrayVol = smri_vol_cdk_tmpolelh,
157 | FS_L_Transversetemporal_GrayVol = smri_vol_cdk_trvtmlh,
158 | FS_L_Insula_GrayVol = smri_vol_cdk_insulalh,
159 | FS_R_Bankssts_GrayVol = smri_vol_cdk_banksstsrh,
160 | FS_R_Caudalanteriorcingulate_GrayVol = smri_vol_cdk_cdacaterh,
161 | FS_R_Caudalmiddlefrontal_GrayVol = smri_vol_cdk_cdmdfrrh,
162 | FS_R_Cuneus_GrayVol = smri_vol_cdk_cuneusrh,
163 | FS_R_Entorhinal_GrayVol = smri_vol_cdk_ehinalrh,
164 | FS_R_Fusiform_GrayVol = smri_vol_cdk_fusiformrh,
165 | FS_R_Inferiorparietal_GrayVol = smri_vol_cdk_ifplrh,
166 | FS_R_Inferiortemporal_GrayVol = smri_vol_cdk_iftmrh,
167 | FS_R_Isthmuscingulate_GrayVol = smri_vol_cdk_ihcaterh,
168 | FS_R_Lateraloccipital_GrayVol = smri_vol_cdk_loccrh,
169 | FS_R_Lateralorbitofrontal_GrayVol = smri_vol_cdk_lobfrrh,
170 | FS_R_Lingual_GrayVol = smri_vol_cdk_lingualrh,
171 | FS_R_Medialorbitofrontal_GrayVol = smri_vol_cdk_mobfrrh,
172 | FS_R_Middletemporal_GrayVol = smri_vol_cdk_mdtmrh,
173 | FS_R_Parahippocampal_GrayVol = smri_vol_cdk_parahpalrh,
174 | FS_R_Paracentral_GrayVol = smri_vol_cdk_paracnrh,
175 | FS_R_Parsopercularis_GrayVol = smri_vol_cdk_parsopcrh,
176 | FS_R_Parsorbitalis_GrayVol = smri_vol_cdk_parsobisrh,
177 | FS_R_Parstriangularis_GrayVol = smri_vol_cdk_parstgrisrh,
178 | FS_R_Pericalcarine_GrayVol = smri_vol_cdk_periccrh,
179 | FS_R_Postcentral_GrayVol = smri_vol_cdk_postcnrh,
180 | FS_R_Posteriorcingulate_GrayVol = smri_vol_cdk_ptcaterh,
181 | FS_R_Precentral_GrayVol = smri_vol_cdk_precnrh,
182 | FS_R_Precuneus_GrayVol = smri_vol_cdk_pcrh,
183 | FS_R_Rostralanteriorcingulate_GrayVol = smri_vol_cdk_rracaterh,
184 | FS_R_Rostralmiddlefrontal_GrayVol = smri_vol_cdk_rrmdfrrh,
185 | FS_R_Superiorfrontal_GrayVol = smri_vol_cdk_sufrrh,
186 | FS_R_Superiorparietal_GrayVol = smri_vol_cdk_suplrh,
187 | FS_R_Superiortemporal_GrayVol = smri_vol_cdk_sutmrh,
188 | FS_R_Supramarginal_GrayVol = smri_vol_cdk_smrh,
189 | FS_R_Frontalpole_GrayVol = smri_vol_cdk_frpolerh,
190 | FS_R_Temporalpole_GrayVol = smri_vol_cdk_tmpolerh,
191 | FS_R_Transversetemporal_GrayVol = smri_vol_cdk_trvtmrh,
192 | FS_R_Insula_GrayVol = smri_vol_cdk_insularh" %>%
193 | str_split("\n") %>%
194 | unlist() %>%
195 | .[!startsWith(., "#")] %>%
196 | map_chr(~str_split(., "=") %>%
197 | unlist() %>%
198 | pluck(2) %>%
199 | str_split(., ",") %>%
200 | unlist() %>%
201 | pluck(1) %>%
202 | str_trim(side = "left"))
203 |
204 | write_rds(abcd_smri_colnames, file = here::here("data", "abcd_smri_colnames.rds"))
205 | write_lines(abcd_smri_colnames, file = here::here("data", "abcd_smri_colnames.txt"))
206 |
--------------------------------------------------------------------------------
/R/synthetic-data/simulate_data_pds.R:
--------------------------------------------------------------------------------
1 | ## setup ----
2 |
3 | require(tidyverse)
4 |
5 | ## read in summary of abcd puberty data ----
6 |
7 | abcd_pds_summary_real <- read_rds(here::here("data", "pds_summary.rds")) %>%
8 | as_tibble(.name_repair = "universal") %>%
9 | select(colname = ...2, value = n) %>%
10 | mutate(across(everything(), str_trim, side = "both")) %>%
11 | group_by(colname) %>%
12 | mutate(id_temp = 1:n()) %>%
13 | ungroup() %>%
14 | pivot_wider(names_from = colname, values_from = value) %>%
15 | select(-id_temp)
16 |
17 | ## parse summary stats for numeric cols
18 |
19 | abcd_pds_summary_num <- abcd_pds_summary_real %>%
20 | select(where(function (x) startsWith(x[1], "Min."))) %>%
21 | pivot_longer(cols = everything(), names_to = "colname", values_to = "value") %>%
22 | separate(value, into = c("stat", "value"), sep = ":") %>%
23 | mutate(stat = str_trim(stat, side = "right"),
24 | # NA values will get coerced to NA bc they're strings, but that's fine here
25 | value = as.numeric(value)) %>%
26 | filter(!is.na(stat)) %>%
27 | mutate(stat = dplyr::recode(stat,
28 | Min. = "min",
29 | `1st Qu.` = "q25",
30 | Median = "median",
31 | Mean = "mean",
32 | `3rd Qu.` = "q75",
33 | Max. = "max",
34 | `NA's` = "n_na")) %>%
35 | pivot_wider(id_cols = colname, names_from = stat, values_from = value) %>%
36 | mutate(n_na = coalesce(n_na, 0))
37 |
38 | ## parse summary stats for character cols ----
39 |
40 | abcd_pds_summary_char <- abcd_pds_summary_real %>%
41 | select(where(function (x) !startsWith(x[1], "Min."))) %>%
42 | pivot_longer(cols = everything(), names_to = "colname", values_to = "value") %>%
43 | filter(!is.na(value)) %>%
44 | separate(value, into = c("level", "value"), sep = ":") %>%
45 | mutate(value = str_trim(value, side = "right"),
46 | value = as.numeric(value))
47 |
48 |
49 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Now I Know My ABCD
2 |
3 | Welcome to the GitHub repository home of Now I Know My ABCD, a community-managed unofficial help resource for the ABCD Study! **Click the "Discussions" tab at the top of the page to access the help forum.**
4 |
5 | ## Issues (suggest and request updates)
6 |
7 | If you would like to suggest an update or addition to the Now I Know My ABCD documentation site, without writing the code yourself, first navigate to the website page you'd like to suggest a change to. Then, open a GitHub issue using the "open issue" link in the GitHub drop-down menu at the top of the page. Include a comment with your issue describing your suggested change, and a site moderator will review your issue and update the website.
8 |
9 | If you have more general ABCD questions, please visit the Discussion Board! Opening a GitHub issue is best suited to targeted content update requests and suggestions for individual pages.
10 |
11 | ## Forks and pull requests (code it yourself)
12 |
13 | The first step to contributing to the Now I Know My ABCD docs is to [fork](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/working-with-forks) the GitHub repository containing the source code. In your local fork, you can write new website content or edit existing content at your own pace without affecting the main site.
14 |
15 | Once you're ready, open a [pull request](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/about-pull-requests). A site moderator will review your pull request and work with you to make sure any tutorial code is reproducible. Once the updates are finalized, they will merge your updates into the main site!
16 |
17 | ## Adding pages
18 |
19 | If you would like to propose a new tutorial page for addition to the Now I Know My ABCD docs, these instructions will help you get started. They are written for contributors who already feel comfortable with Python, Jupyter, and Markdown.
20 |
21 | ### Creating content
22 |
23 | All new pages go into the `docs` subfolder. Text-only pages can be written in Markdown, while pages with code can be written as Jupyter notebooks. (Eventually, code pages can also be written in R Markdown, but I haven't figured out how to do that yet.)
24 |
25 | ### Adding to the table of contents
26 |
27 | The table of contents is declared in `_toc.yml` in the project root folder. Add a page to the table of contents by adding a new line with `- file:` and then your page _filename, not the page title_ with no file ending. JupyterBook will automatically render the page's internal title (the level-1 header at the beginning of the page) as the chapter name in the table of contents.
28 |
29 | Order matters! Add the new line where you want the resulting chapter to end up.
30 |
31 | ## Rendering the book site locally
32 |
33 | The website is currently equipped with a GitHub Actions workflow that renders the web-hosted version of the site _every time a change is pushed on the `main` branch._ You do not need to have `jupyter-book` installed on your local machine in order to get the live website to update! 🎉 :tada:
34 |
35 | However, you _do_ need to have it installed if you want to preview website edits before pushing. This repo comes with a conda `environment.yml` file that you can (theoretically) use to install all relevant packages. In Monica's experience, `environment.yml` files can be picky and sometimes overly OS-specific, so please contact Monica if you have issues recreating the Python dependencies.
36 |
37 | When you have JupyterBook set up, the following terminal command will render the book locally when run from the project root folder:
38 |
39 | ```bash
40 | jupyter-book build docs/
41 | ```
42 |
43 | The local book homepage should then be accessible at `docs/_build/html/index.html` in your browser window.
44 |
--------------------------------------------------------------------------------
/data/abcd_smri_colnames.rds:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/now-i-know-my-abcd/docs/362cebbe23bb440a74eecbdf7b5a6d56bfe11618/data/abcd_smri_colnames.rds
--------------------------------------------------------------------------------
/data/abcd_smri_colnames.txt:
--------------------------------------------------------------------------------
1 | smri_vol_scs_intracranialv
2 | smri_vol_cdk_totallh
3 | smri_vol_cdk_totalrh
4 | smri_vol_cdk_total
5 | smri_vol_scs_subcorticalgv
6 | smri_vol_scs_suprateialv
7 | smri_vol_scs_ltventriclelh
8 | smri_vol_scs_inflatventlh
9 | smri_vol_scs_crbwmatterlh
10 | smri_vol_scs_crbcortexlh
11 | smri_vol_scs_tplh
12 | smri_vol_scs_caudatelh
13 | smri_vol_scs_putamenlh
14 | smri_vol_scs_pallidumlh
15 | smri_vol_scs_3rdventricle
16 | smri_vol_scs_4thventricle
17 | smri_vol_scs_bstem
18 | smri_vol_scs_hpuslh
19 | smri_vol_scs_amygdalalh
20 | smri_vol_scs_csf
21 | smri_vol_scs_aal
22 | smri_vol_scs_vedclh
23 | smri_vol_scs_ltventriclerh
24 | smri_vol_scs_inflatventrh
25 | smri_vol_scs_crbwmatterrh
26 | smri_vol_scs_crbcortexrh
27 | smri_vol_scs_tprh
28 | smri_vol_scs_caudaterh
29 | smri_vol_scs_putamenrh
30 | smri_vol_scs_pallidumrh
31 | smri_vol_scs_hpusrh
32 | smri_vol_scs_amygdalarh
33 | smri_vol_scs_aar
34 | smri_vol_scs_vedcrh
35 | smri_vol_scs_ccps
36 | smri_vol_scs_ccmidps
37 | smri_vol_scs_ccct
38 | smri_vol_scs_ccmidat
39 | smri_vol_scs_ccat
40 | smri_area_cdk_banksstslh
41 | smri_area_cdk_cdacatelh
42 | smri_area_cdk_cdmdfrlh
43 | smri_area_cdk_cuneuslh
44 | smri_area_cdk_ehinallh
45 | smri_area_cdk_fusiformlh
46 | smri_area_cdk_ifpllh
47 | smri_area_cdk_iftmlh
48 | smri_area_cdk_ihcatelh
49 | smri_area_cdk_locclh
50 | smri_area_cdk_lobfrlh
51 | smri_area_cdk_linguallh
52 | smri_area_cdk_mobfrlh
53 | smri_area_cdk_mdtmlh
54 | smri_area_cdk_parahpallh
55 | smri_area_cdk_paracnlh
56 | smri_area_cdk_parsopclh
57 | smri_area_cdk_parsobislh
58 | smri_area_cdk_parstgrislh
59 | smri_area_cdk_pericclh
60 | smri_area_cdk_postcnlh
61 | smri_area_cdk_ptcatelh
62 | smri_area_cdk_precnlh
63 | smri_area_cdk_pclh
64 | smri_area_cdk_rracatelh
65 | smri_area_cdk_rrmdfrlh
66 | smri_area_cdk_sufrlh
67 | smri_area_cdk_supllh
68 | smri_area_cdk_sutmlh
69 | smri_area_cdk_smlh
70 | smri_area_cdk_frpolelh
71 | smri_area_cdk_tmpolelh
72 | smri_area_cdk_trvtmlh
73 | smri_area_cdk_insulalh
74 | smri_area_cdk_banksstsrh
75 | smri_area_cdk_cdacaterh
76 | smri_area_cdk_cdmdfrrh
77 | smri_area_cdk_cuneusrh
78 | smri_area_cdk_ehinalrh
79 | smri_area_cdk_fusiformrh
80 | smri_area_cdk_ifplrh
81 | smri_area_cdk_iftmrh
82 | smri_area_cdk_ihcaterh
83 | smri_area_cdk_loccrh
84 | smri_area_cdk_lobfrrh
85 | smri_area_cdk_lingualrh
86 | smri_area_cdk_mobfrrh
87 | smri_area_cdk_mdtmrh
88 | smri_area_cdk_parahpalrh
89 | smri_area_cdk_paracnrh
90 | smri_area_cdk_parsopcrh
91 | smri_area_cdk_parsobisrh
92 | smri_area_cdk_parstgrisrh
93 | smri_area_cdk_periccrh
94 | smri_area_cdk_postcnrh
95 | smri_area_cdk_ptcaterh
96 | smri_area_cdk_precnrh
97 | smri_area_cdk_pcrh
98 | smri_area_cdk_rracaterh
99 | smri_area_cdk_rrmdfrrh
100 | smri_area_cdk_sufrrh
101 | smri_area_cdk_suplrh
102 | smri_area_cdk_sutmrh
103 | smri_area_cdk_smrh
104 | smri_area_cdk_frpolerh
105 | smri_area_cdk_tmpolerh
106 | smri_area_cdk_trvtmrh
107 | smri_area_cdk_insularh
108 | smri_vol_cdk_banksstslh
109 | smri_vol_cdk_cdacatelh
110 | smri_vol_cdk_cdmdfrlh
111 | smri_vol_cdk_cuneuslh
112 | smri_vol_cdk_ehinallh
113 | smri_vol_cdk_fusiformlh
114 | smri_vol_cdk_ifpllh
115 | smri_vol_cdk_iftmlh
116 | smri_vol_cdk_ihcatelh
117 | smri_vol_cdk_locclh
118 | smri_vol_cdk_lobfrlh
119 | smri_vol_cdk_linguallh
120 | smri_vol_cdk_mobfrlh
121 | smri_vol_cdk_mdtmlh
122 | smri_vol_cdk_parahpallh
123 | smri_vol_cdk_paracnlh
124 | smri_vol_cdk_parsopclh
125 | smri_vol_cdk_parsobislh
126 | smri_vol_cdk_parstgrislh
127 | smri_vol_cdk_pericclh
128 | smri_vol_cdk_postcnlh
129 | smri_vol_cdk_ptcatelh
130 | smri_vol_cdk_precnlh
131 | smri_vol_cdk_pclh
132 | smri_vol_cdk_rracatelh
133 | smri_vol_cdk_rrmdfrlh
134 | smri_vol_cdk_sufrlh
135 | smri_vol_cdk_supllh
136 | smri_vol_cdk_sutmlh
137 | smri_vol_cdk_smlh
138 | smri_vol_cdk_frpolelh
139 | smri_vol_cdk_tmpolelh
140 | smri_vol_cdk_trvtmlh
141 | smri_vol_cdk_insulalh
142 | smri_vol_cdk_banksstsrh
143 | smri_vol_cdk_cdacaterh
144 | smri_vol_cdk_cdmdfrrh
145 | smri_vol_cdk_cuneusrh
146 | smri_vol_cdk_ehinalrh
147 | smri_vol_cdk_fusiformrh
148 | smri_vol_cdk_ifplrh
149 | smri_vol_cdk_iftmrh
150 | smri_vol_cdk_ihcaterh
151 | smri_vol_cdk_loccrh
152 | smri_vol_cdk_lobfrrh
153 | smri_vol_cdk_lingualrh
154 | smri_vol_cdk_mobfrrh
155 | smri_vol_cdk_mdtmrh
156 | smri_vol_cdk_parahpalrh
157 | smri_vol_cdk_paracnrh
158 | smri_vol_cdk_parsopcrh
159 | smri_vol_cdk_parsobisrh
160 | smri_vol_cdk_parstgrisrh
161 | smri_vol_cdk_periccrh
162 | smri_vol_cdk_postcnrh
163 | smri_vol_cdk_ptcaterh
164 | smri_vol_cdk_precnrh
165 | smri_vol_cdk_pcrh
166 | smri_vol_cdk_rracaterh
167 | smri_vol_cdk_rrmdfrrh
168 | smri_vol_cdk_sufrrh
169 | smri_vol_cdk_suplrh
170 | smri_vol_cdk_sutmrh
171 | smri_vol_cdk_smrh
172 | smri_vol_cdk_frpolerh
173 | smri_vol_cdk_tmpolerh
174 | smri_vol_cdk_trvtmrh
175 | smri_vol_cdk_insularh
176 |
--------------------------------------------------------------------------------
/data/pds_summary.rds:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/now-i-know-my-abcd/docs/362cebbe23bb440a74eecbdf7b5a6d56bfe11618/data/pds_summary.rds
--------------------------------------------------------------------------------
/docs/_config.yml:
--------------------------------------------------------------------------------
1 | # Book settings
2 | # Learn more at https://jupyterbook.org/customize/config.html
3 |
4 | title: Now I Know My ABCD
5 | author: Sana Ali, Clare McCann, Monica Thieu, Lucy Whitmore
6 | logo: nikmabcd-logo.png
7 |
8 | # Force re-execution of notebooks on each build.
9 | # See https://jupyterbook.org/content/execute.html
10 | execute:
11 | execute_notebooks: force
12 |
13 | # Extra fancy Sphinx extensions for nicer book features
14 | # These need to be installed as python packages!
15 | sphinx:
16 | extra_extensions:
17 | - sphinx_inline_tabs
18 |
19 | # Define the name of the latex output file for PDF builds
20 | latex:
21 | latex_documents:
22 | targetname: book.tex
23 |
24 | # Add a bibtex file so that we can create citations
25 | bibtex_bibfiles:
26 | - references.bib
27 |
28 | # Information about where the book exists on the web
29 | repository:
30 | url: https://github.com/now-i-know-my-abcd/docs # Online location of your book
31 | path_to_book: docs # Optional path to your book, relative to the repository root
32 | branch: main # Which branch of the repository should be used when creating links (optional)
33 |
34 | # Add GitHub buttons to your book
35 | # See https://jupyterbook.org/customize/config.html#add-a-link-to-your-repository
36 | html:
37 | use_issues_button: true
38 | use_repository_button: true
39 | extra_footer: |
40 |
41 |
42 |
47 |
48 |
49 |
--------------------------------------------------------------------------------
/docs/_toc.yml:
--------------------------------------------------------------------------------
1 | # Table of contents
2 | # Learn more at https://jupyterbook.org/customize/toc.html
3 |
4 | format: jb-book
5 | root: intro
6 | parts:
7 | - caption: General info
8 | chapters:
9 | - file: user-manual
10 | - file: starting
11 | - url: https://github.com/now-i-know-my-abcd/docs/discussions
12 | title: ABCD Discussion Board
13 | - file: contributing
14 | - caption: Resources
15 | chapters:
16 | - file: resources/abcd-specific
17 | - file: resources/computing
18 | - file: resources/statistics
19 | - file: resources/science
20 | - caption: Methods tutorials
21 | chapters:
22 | - file: visualization
23 | - file: bug-reference
24 | - caption: ABCD on Twitter
25 | chapters:
26 | - file: recent_tweets
27 | - file: old_tweets
28 |
29 |
--------------------------------------------------------------------------------
/docs/bug-reference.md:
--------------------------------------------------------------------------------
1 | # Big book of bugs
2 |
3 | This is a placeholder chapter for a screened and sorted dictionary of common bugs ABCD users might run into, how to deal with them, and source info.
4 |
--------------------------------------------------------------------------------
/docs/contributing.md:
--------------------------------------------------------------------------------
1 | # How to contribute
2 |
3 | This reference book is maintained on GitHub, so you can contribute via issues and pull requests.
4 |
5 | % Note to website editors: the README isn't included in the directories that jupyter-book automatically checks for updates when re-rendering the website.
6 | % If you update the README, this page won't automatically pull the content and re-render the contributing page.
7 | % You need to make a dummy edit (add "TEST" or something on a line) on this page to force jupyter-book to re-render. Then remove the edit on this page and re-render again so that the README changes remain but the dummy edit is gone.
8 |
9 | ```{include} ../README.md
10 | ```
11 |
--------------------------------------------------------------------------------
/docs/figs/py_nilearn_leftlateral.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/now-i-know-my-abcd/docs/362cebbe23bb440a74eecbdf7b5a6d56bfe11618/docs/figs/py_nilearn_leftlateral.png
--------------------------------------------------------------------------------
/docs/figs/py_nilearn_rightlateral.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/now-i-know-my-abcd/docs/362cebbe23bb440a74eecbdf7b5a6d56bfe11618/docs/figs/py_nilearn_rightlateral.png
--------------------------------------------------------------------------------
/docs/figs/py_seaborn_pairs.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/now-i-know-my-abcd/docs/362cebbe23bb440a74eecbdf7b5a6d56bfe11618/docs/figs/py_seaborn_pairs.png
--------------------------------------------------------------------------------
/docs/figs/r_ggally_pairs.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/now-i-know-my-abcd/docs/362cebbe23bb440a74eecbdf7b5a6d56bfe11618/docs/figs/r_ggally_pairs.png
--------------------------------------------------------------------------------
/docs/figs/r_ggseg_brain.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/now-i-know-my-abcd/docs/362cebbe23bb440a74eecbdf7b5a6d56bfe11618/docs/figs/r_ggseg_brain.png
--------------------------------------------------------------------------------
/docs/intro.md:
--------------------------------------------------------------------------------
1 | # Now I Know My ABCD
2 |
3 | The purpose of this website is to centralize helpful resources, bug fixes, tutorial vignettes, and other information for users of the [ABCD dataset.](https://abcdstudy.org/scientists/data-sharing/)
4 |
5 | This website was created as part of a 2022 [Neurohackademy](https://neurohackademy.org/) hackathon project.
6 |
7 | This website was originally written in English. If you wish to translate any page on this site, please use the Google Translate widget embedded at the bottom of the page.
8 |
9 | ```{tableofcontents}
10 | ```
11 |
--------------------------------------------------------------------------------
/docs/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/now-i-know-my-abcd/docs/362cebbe23bb440a74eecbdf7b5a6d56bfe11618/docs/logo.png
--------------------------------------------------------------------------------
/docs/nikmabcd-logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/now-i-know-my-abcd/docs/362cebbe23bb440a74eecbdf7b5a6d56bfe11618/docs/nikmabcd-logo.png
--------------------------------------------------------------------------------
/docs/old_tweets.md:
--------------------------------------------------------------------------------
1 | # Relevant ABCD Tweets Since 2015-01-01
2 |
3 | All tweets included in the document here are results of a twitter pull including the following keywords and hashtags:
4 | * "ABCD study"
5 | * "Adolescent Brain Cognitive Development study"
6 | * "Adolescent Brain Cognitive Development (ABCD) study"
7 | * "ABCD sample"
8 | * "ABCD Study Site"
9 | * "#ABCD study"
10 | * "#abcdstudy"
11 | * "ABCD-BIDS Community Collection"
12 |
13 | If you have suggestions for other keywords or hashtags to include in the twitter pull, please reach out to clarefmccann@ucla.edu.
14 |
15 | The document is sorted by the following categories:
16 | * Announcements
17 | * Questions
18 | * Issues
19 | * Resources
20 | * Shout-outs
21 | * Opportunities
22 | * Papers
23 | * Posters
24 | * Presentations
25 |
26 | To view relevant tweets from the past week, please visit the following [link](https://now-i-know-my-abcd.github.io/docs/recent_tweets.html).
27 |
28 |
29 |
33 |
34 |
--------------------------------------------------------------------------------
/docs/recent_tweets.md:
--------------------------------------------------------------------------------
1 | # Recent ABCD Relevant Tweets
2 |
3 | See what ABCD researchers have been up to in the recent weeks!
4 |
5 | Scroll through the feeds below to find recent papers, opportunities, announcements, and troubleshooting re: ABCD!
6 |
7 | ## "#ABCDStudy"
8 |
9 |
10 |
--------------------------------------------------------------------------------
/docs/references.bib:
--------------------------------------------------------------------------------
1 | ---
2 | ---
3 |
4 | @inproceedings{holdgraf_evidence_2014,
5 | address = {Brisbane, Australia, Australia},
6 | title = {Evidence for {Predictive} {Coding} in {Human} {Auditory} {Cortex}},
7 | booktitle = {International {Conference} on {Cognitive} {Neuroscience}},
8 | publisher = {Frontiers in Neuroscience},
9 | author = {Holdgraf, Christopher Ramsay and de Heer, Wendy and Pasley, Brian N. and Knight, Robert T.},
10 | year = {2014}
11 | }
12 |
13 | @article{holdgraf_rapid_2016,
14 | title = {Rapid tuning shifts in human auditory cortex enhance speech intelligibility},
15 | volume = {7},
16 | issn = {2041-1723},
17 | url = {http://www.nature.com/doifinder/10.1038/ncomms13654},
18 | doi = {10.1038/ncomms13654},
19 | number = {May},
20 | journal = {Nature Communications},
21 | author = {Holdgraf, Christopher Ramsay and de Heer, Wendy and Pasley, Brian N. and Rieger, Jochem W. and Crone, Nathan and Lin, Jack J. and Knight, Robert T. and Theunissen, Frédéric E.},
22 | year = {2016},
23 | pages = {13654},
24 | file = {Holdgraf et al. - 2016 - Rapid tuning shifts in human auditory cortex enhance speech intelligibility.pdf:C\:\\Users\\chold\\Zotero\\storage\\MDQP3JWE\\Holdgraf et al. - 2016 - Rapid tuning shifts in human auditory cortex enhance speech intelligibility.pdf:application/pdf}
25 | }
26 |
27 | @inproceedings{holdgraf_portable_2017,
28 | title = {Portable learning environments for hands-on computational instruction using container-and cloud-based technology to teach data science},
29 | volume = {Part F1287},
30 | isbn = {978-1-4503-5272-7},
31 | doi = {10.1145/3093338.3093370},
32 | abstract = {© 2017 ACM. There is an increasing interest in learning outside of the traditional classroom setting. This is especially true for topics covering computational tools and data science, as both are challenging to incorporate in the standard curriculum. These atypical learning environments offer new opportunities for teaching, particularly when it comes to combining conceptual knowledge with hands-on experience/expertise with methods and skills. Advances in cloud computing and containerized environments provide an attractive opportunity to improve the effciency and ease with which students can learn. This manuscript details recent advances towards using commonly-Available cloud computing services and advanced cyberinfrastructure support for improving the learning experience in bootcamp-style events. We cover the benets (and challenges) of using a server hosted remotely instead of relying on student laptops, discuss the technology that was used in order to make this possible, and give suggestions for how others could implement and improve upon this model for pedagogy and reproducibility.},
33 | booktitle = {{ACM} {International} {Conference} {Proceeding} {Series}},
34 | author = {Holdgraf, Christopher Ramsay and Culich, A. and Rokem, A. and Deniz, F. and Alegro, M. and Ushizima, D.},
35 | year = {2017},
36 | keywords = {Teaching, Bootcamps, Cloud computing, Data science, Docker, Pedagogy}
37 | }
38 |
39 | @article{holdgraf_encoding_2017,
40 | title = {Encoding and decoding models in cognitive electrophysiology},
41 | volume = {11},
42 | issn = {16625137},
43 | doi = {10.3389/fnsys.2017.00061},
44 | abstract = {© 2017 Holdgraf, Rieger, Micheli, Martin, Knight and Theunissen. Cognitive neuroscience has seen rapid growth in the size and complexity of data recorded from the human brain as well as in the computational tools available to analyze this data. This data explosion has resulted in an increased use of multivariate, model-based methods for asking neuroscience questions, allowing scientists to investigate multiple hypotheses with a single dataset, to use complex, time-varying stimuli, and to study the human brain under more naturalistic conditions. These tools come in the form of “Encoding” models, in which stimulus features are used to model brain activity, and “Decoding” models, in which neural features are used to generated a stimulus output. Here we review the current state of encoding and decoding models in cognitive electrophysiology and provide a practical guide toward conducting experiments and analyses in this emerging field. Our examples focus on using linear models in the study of human language and audition. We show how to calculate auditory receptive fields from natural sounds as well as how to decode neural recordings to predict speech. The paper aims to be a useful tutorial to these approaches, and a practical introduction to using machine learning and applied statistics to build models of neural activity. The data analytic approaches we discuss may also be applied to other sensory modalities, motor systems, and cognitive systems, and we cover some examples in these areas. In addition, a collection of Jupyter notebooks is publicly available as a complement to the material covered in this paper, providing code examples and tutorials for predictive modeling in python. The aimis to provide a practical understanding of predictivemodeling of human brain data and to propose best-practices in conducting these analyses.},
45 | journal = {Frontiers in Systems Neuroscience},
46 | author = {Holdgraf, Christopher Ramsay and Rieger, J.W. and Micheli, C. and Martin, S. and Knight, R.T. and Theunissen, F.E.},
47 | year = {2017},
48 | keywords = {Decoding models, Encoding models, Electrocorticography (ECoG), Electrophysiology/evoked potentials, Machine learning applied to neuroscience, Natural stimuli, Predictive modeling, Tutorials}
49 | }
50 |
51 | @book{ruby,
52 | title = {The Ruby Programming Language},
53 | author = {Flanagan, David and Matsumoto, Yukihiro},
54 | year = {2008},
55 | publisher = {O'Reilly Media}
56 | }
57 |
--------------------------------------------------------------------------------
/docs/requirements.txt:
--------------------------------------------------------------------------------
1 | jupyter-book
2 | matplotlib
3 | numpy
4 |
--------------------------------------------------------------------------------
/docs/resources/abcd-specific.md:
--------------------------------------------------------------------------------
1 | ## ABCD-specific resources
2 |
3 | ### ABCD publications
4 |
5 | - Official list of ABCD-related publications: https://abcdstudy.org/publications/
6 |
7 | ### ABCD dataset and data resources
8 |
9 | - Instructions for accessing ABCD data from the cloud: https://nda.nih.gov/training/modules/cloud.html
10 | - NDA Tools https://github.com/NDAR/nda-tools
11 | - NDA S3 Downloader: https://github.com/ABCD-STUDY/nda-abcd-s3-downloader
12 | - Collection 3165 - DCAN Labs ABCD-BIDS https://collection3165.readthedocs.io/en/stable/
13 | - Responsible Use of Open-Access Developmental Data: The Adolescent Brain Cognitive Development (ABCD) Study: https://journals.sagepub.com/doi/abs/10.1177/09567976211003564?journalCode=pssa
14 |
15 | ### ABCD Workshops
16 |
17 | [2021 - Modeling Developmental Change Homepage](https://abcdworkshop.github.io/)
18 |
19 | [2021 - Modeling Developmental Change - Resources, Slides, Recorded Talks](https://abcdworkshop.github.io/resources/)
20 |
21 | Following the 2021 workshop, several attendees came together and published this manuscript which is a practical guide for researchers using ABCD dataset and other large longitudinal datasets: [PracticalGuide.pdf](https://github.com/now-i-know-my-abcd/docs/files/9310731/PracticalGuide.pdf)
22 |
23 | [2019 - Brain Development and Mental Health Homepage](https://abcdworkshop.github.io/past-workshops/2019/)
24 |
25 | [2019 - Brain Development and Mental Health Resources, Slides, Recorded Talks](https://abcdworkshop.github.io/past-workshops/2019/resources/)
26 |
27 | Description: Slides, tutorials, and recorded talks from the 2019 and 2021 ABCD Workshops. Resources on reproducible science, working with ABCD, modeling, puberty, and adolescent mental health! Recorded talks, slides, and code tutorials.
28 |
29 | The ABCD Workshops were supported by NIH R25MH125545.
30 |
31 | ### ABCD ReproNim
32 |
33 | [ABCD ReproNim Workshop Materials](https://www.abcd-repronim.org/materials.html)
34 |
35 | [ABCD ReproNim Neurostars Forum](https://neurostars.org/c/abcd-repronim/232)
36 |
37 | Description: Lectures, resources, and data exercises from the ABCD-ReproNim workshops. The ABCD-ReproNim Neurostars page contains some Q&A about working with the data.
38 |
--------------------------------------------------------------------------------
/docs/resources/computing.md:
--------------------------------------------------------------------------------
1 | ## Scientific computing & methods
2 |
3 | *h/t to Victoria Guazzelli Williamson for curating the list of resources below, gathered during the MDC ABCD Workshop.*
4 |
5 | ### Preregistration
6 |
7 | - Instructions on creating blinded preregistrations: https://help.osf.io/hc/en-us/articles/360042097853-Create-a-View-only-Link-for-a-Registration
8 | - Examples
9 | - ABCD example from Monash University: https://osf.io/cjnh4
10 | - Search for ABCD at OSF.io
11 |
12 | ### Reproducibility
13 |
14 | - ReproducibiliTea Group! https://reproducibilitea.org/
15 | - ReproducibiliTea podcast https://soundcloud.com/reproducibilitea
16 | - Great YouTube channel which discusses really good topic on reproducible and open science practices https://www.youtube.com/c/RIOTScienceClub/videos
17 | - Reasons to work reproducibly: https://www.youtube.com/watch?v=yVT07Sukv9Q
18 |
19 | ### Data Science Tools
20 |
21 | - R for data science: https://r4ds.had.co.nz/
22 | - A book on EVERYTHING you need to know about using RStudio with Git: https://happygitwithr.com/
23 | - Accessible tutorials for learning R: https://ourcodingclub.github.io/tutorials
24 | - ABCD workshop tutorials: https://abcdworkshop.github.io/bookdown/introduction.html
25 | - fMRIprep overview: https://www.nature.com/articles/s41592-018-0235-4
26 | - Running fMRIprep on a server with no internet: https://fmriprep.org/en/stable/faq.html#i-m-running-fmriprep-via-singularity-containers-how-can-i-troubleshoot-problems
27 | - Datalad for version-controlling large datasets: https://www.datalad.org/
28 |
29 | ### Miscellaneous
30 |
31 | - Weston et al., https://pubmed.ncbi.nlm.nih.gov/32190814/
32 | - The case for formal methodology in scientific reform https://royalsocietypublishing.org/doi/10.1098/rsos.200805
33 |
--------------------------------------------------------------------------------
/docs/resources/science.md:
--------------------------------------------------------------------------------
1 | ## Domain-specific resources
2 |
3 | *h/t to Victoria Guazzelli Williamson for curating the list of resources below, gathered during the 2021 ABCD Workshop.*
4 |
5 | ### Adolescence
6 |
7 | **Mental Health & Psychopathology**
8 |
9 | - A focus on adolescence to reduce neurological, mental health and substance-use disability: https://www.nature.com/articles/nature16030
10 | - https://www.thelancet.com/journals/lancet/article/PIIS0140-6736(13)61611-6/fulltext
11 | - https://www.sciencedirect.com/science/article/pii/S014976340700053X
12 | - The ABCD study: understanding the development of risk for mental and physical health outcomes: https://www.nature.com/articles/s41386-020-0736-6
13 |
14 | **Puberty**
15 |
16 | - Multi-method assessment of pubertal timing and associations with internalizing psychopathology in early adolescent girls https://psyarxiv.com/p5vfb
17 | - A Researcher’s Guide to the Measurement and Modeling of Puberty in the ABCD Study® at Baseline https://www.frontiersin.org/articles/10.3389/fendo.2021.608575/full?&utm_source=Email_to_authors_&utm_medium=Email&utm_content=T1_11.5e1_author&utm_campaign=Email_publication&field=&journalName=Frontiers_in_Endocrinology&id=608575
18 |
19 | **Brain**
20 |
21 | - Baseline brain function in the preadolescents of the ABCD Study https://www.nature.com/articles/s41593-021-00867-9
22 | - Opportunities for increased reproducibility and replicability of developmental neuroimaging: https://www.sciencedirect.com/science/article/pii/S1878929320301511
23 |
24 | **Development/Longitudinal modeling**
25 |
26 | - Longitudinal modeling in developmental neuroimaging research: Common
27 | challenges, and solutions from developmental psychology: https://www.sciencedirect.com/science/article/pii/S1878929317300300
28 | - Improving practices and inferences in developmental cognitive neuroscience https://www.sciencedirect.com/science/article/pii/S1878929320300554
29 |
30 | ### Considerations for Racism, Discrimination and Race/Ethnicity
31 |
32 | - Hidden in plain sight -- reconsidering the use of race correction in clinical algorithms: https://www.nejm.org/doi/full/10.1056/NEJMms2004740
33 | - APA Guidelines for Race/Ethnicity in Psychology: https://www.apa.org/about/policy/guidelines-race-ethnicity.pdf
34 | - The National Institute on Minority Health and Health Disparities Research Framework: https://ajph.aphapublications.org/doi/10.2105/AJPH.2018.304883
35 | - Racism and mental health: Examining the link between racism and depression from a social cognitive perspective. https://psycnet.apa.org/record/2015-40803-006
36 | - Social Determinants of Health: Future Directions for Health Disparities Research https://ajph.aphapublications.org/doi/full/10.2105/AJPH.2019.304964?utm_source=TrendMD&utm_medium=cpc&utm_campaign=Am_J_Public_Health_TrendMD_0
37 | - Racism: Science & Tools for the Public Health Professional https://ajph.aphapublications.org/doi/book/10.2105/9780875533049
38 | - The Cost of Racism for People of Color: Contextualizing Experiences of Discrimination https://www.apa.org/pubs/books/4317394
39 |
40 | ### Neuroimaging
41 |
42 | - What Is the Test-Retest Reliability of Common Task-Functional MRI Measures? New Empirical Evidence and a Meta-Analysis 10.1177/0956797620916786
43 | - fMRI can be highly reliable, but it depends on what you measure 10.31234/osf.io/9eaxk
44 | - Towards Reproducible Brain-Wide Association Studies https://doi.org/10.1101/2020.08.21.257758
45 |
46 | ### Big versus small studies
47 |
48 | - Leveraging big data to map neurodevelopmental trajectories in pediatric anxiety https://pubmed.ncbi.nlm.nih.gov/34147988/
49 | - Large teams develop and small teams disrupt science and technology https://pubmed.ncbi.nlm.nih.gov/30760923/
50 |
51 | ### Economic/Policy/Medical Data by US State
52 |
53 | - Twitter thread on resources for compiling economic/policy/medical data: https://twitter.com/elizmart133/status/1413163060005466114
54 |
--------------------------------------------------------------------------------
/docs/resources/statistics.md:
--------------------------------------------------------------------------------
1 | ## Statistics
2 |
3 | *h/t to Victoria Guazzelli Williamson for curating the list of resources below, gathered during the 2021 ABCD Workshop.*
4 |
5 | ### Statistical control
6 |
7 | - Statistical Control Requires Causal Justification https://osf.io/38mxq/
8 |
9 | ### Meaningful effects
10 |
11 | - Workshop: https://apps1.seiservices.com/meaningfuleffects/
12 | - https://apps1.seiservices.com/meaningfuleffects/09022020_MeaningfulEffects_Summary.pdf
13 | - Meaningful Associations in the Adolescent Brain Cognitive Development Study: https://www.biorxiv.org/content/10.1101/2020.09.01.276451v2
14 | - Funder & Ozer 2019 https://journals.sagepub.com/doi/full/10.1177/2515245919847202
15 |
16 | ### Validity
17 |
18 | - Borsboom, Mellenbergh, Van Heerden - 2004 - The concept of validity https://doi.org/10.1037/0033-295X.111.4.1061
19 | - Construct Validation in Social and Personality Research: Current Practice and Recommendations https://doi.org/10.1177/1948550617693063
20 |
21 | ### Latent variables
22 |
23 | - Worse Than Measurement Error: Consequences of Inappropriate Latent Variable Measurement Models 10.1037/met0000220
24 | - Latent Change Score models https://www.sciencedirect.com/science/article/pii/S187892931730021X
25 |
26 | ### Stratifying/splitting samples
27 |
28 | - Data splitting tutorial https://topepo.github.io/caret/data-splitting.html
29 | - Stratified sampling tutorial: https://towardsdatascience.com/stratified-sampling-and-how-to-perform-it-in-r-8b753efde1ef
30 |
31 | ### Model selection
32 |
33 | - Overview paper (suggested by Sara Weston) on cross-lagged-panel-models (different types): https://psycnet.apa.org/fulltext/2020-54836-001.pdf
34 |
35 | - **ri-CLPM**
36 | - Papers to understand:
37 | - Hamaker, E. L., Kuiper, R. M., & Grasman, R. P. P. P. (2015). A critique of the cross-lagged panel model. Psychological Methods, 20(1), 102–116. https://doi.org/10.1037/a0038889
38 | - Mulder, J. D., & Hamaker, E. L. (2020). Three extensions of the random intercept cross-lagged panel model. Structural Equation Modeling. https://doi.org/10.1080/10705511.2020.1784738
39 | - Code (R/MPlus) to try it out:
40 | - https://jeroendmulder.github.io/RI-CLPM/
41 | - https://osf.io/cxc6f/
42 | - https://johnflournoy.science/2017/10/20/riclpm-lavaan-demo/, https://johnflournoy.science/riclpmr/
43 |
44 | ### Miscellaneous
45 |
46 | - Tong - 2019 - Statistical Inference Enables Bad Science Statistical Thinking Enables Good Science: 10.1080/00031305.2018.1518264
47 | - The Quantitude Podcast https://quantitudethepodcast.org/listen/
48 |
--------------------------------------------------------------------------------
/docs/starting.md:
--------------------------------------------------------------------------------
1 | # Getting Started with ABCD
2 |
3 | To work with ABCD data, you’ll need a few things: access to the National Data Archive (NDA), and an approved Data Use Certification (DUC). As a recent update, renewing DUCs will now require creating and registering an NDA study.
4 |
5 | ## Accessing the NDA & getting an approved DUC:
6 | The ABCD Repro-Nim Workshop has a [guide](https://docs.google.com/document/d/18hsT2x15bypuXFcfMQb9Ck_YEB7VvY2j4w5hwbV78A4/edit) to requesting NDA user accounts and obtaining a DUC.
7 |
8 | For the official NIH instructions for accessing the National Data Archive (NDA), see [NIH Instructions for NDA access](https://nda.nih.gov/about/creating-an-nda-account)
9 |
10 | For an overview of the DUC and responsible data use guidelines, see: [2021 MDC ABCD Workshop](https://abcdworkshop.github.io/slides/ResponsibleABCD.pdf).
11 |
12 | For the full text of the Data Use Certification (DUC), see [here](https://s3.amazonaws.com/nda.nih.gov/Documents/NDA+Data+Access+Request+DUC+FINAL.pdf)
13 |
14 | For information on creating NDA studies:
15 |
16 | - [Tutorial](https://nda.nih.gov/training/module?trainingModuleId=training.study&slideId=slide.study.intro)
17 |
18 | - [Steps](https://nda.nih.gov/get/manuscript-preparation.html)
19 |
20 | For information on how to download ABCD data, see: [How to Download ABCD Data.pdf](https://github.com/now-i-know-my-abcd/docs/files/9284755/How.to.Download.ABCD.Data.pdf)
21 |
22 |
--------------------------------------------------------------------------------
/docs/user-manual.md:
--------------------------------------------------------------------------------
1 | # User Manual
2 |
3 | Welcome to *Now I Know My ABCD*! This collaborative team project was initiated during Neurohackademy 2022, a summer school in data science and neuroimaging held at the University of Washington eSciences Institute. Our team consists of some wonderful humans. As the *creators* of this website, we will be maintaining the content as best we can. To reach out, please see contact information below:
4 |
5 | - Sana Ali (2nd-Year PhD student at UCSD); s5ali@ucsd.edu
6 | - Clare McCann (1st-Year PhD student at UCLA); clarefmccann@ucla.edu
7 | - Monica Thieu (Postdoctoral Fellow, Emory University); mthieu@emory.edu
8 | - Lucy Whitmore (2nd-Year PhD student, University of Oregon); lwhitmor@uoregon.edu
9 |
10 | Our hope is that we have provided ABCD researchers with a resource hub filled with frequently asked questions, code troubleshooting, archived Slack or related message board threads from past workshops, and associated tweets. Our group is committed to open science, careful work practices and a safe professional working space. A little more on this:
11 |
12 | ### Open science
13 | Open science is a movement in the scientific community promoting the dissemination of accessible research to society at all levels. Open science is committed to *transparency* and our hope that we have contributed to this development through collaborative networks.
14 |
15 | ### Careful work practices
16 | We should all strive to be vigilant about our data and analyses. Mistakes absolutely happen, but we should do as much as we can to prevent them and detect them. We hope to provide you with a resource to circumvent most issues, however, please note that we can not guarantee that we have the answers to all questions related to ABCD.
17 |
18 | ### Safe professional working space
19 | This goes without saying but please be mindful that this is a safe, professional working space. We ask that you be considerate when navigating our website as well as respectful to others when contributing insights to the discussion board.
20 |
21 | **Important note**: This resource is meant to be accessible for researchers with or without a Data Use Certification (DUC), however, if you are interested in obtaining a DUC to work with the ABCD dataset, please refer to https://nda.nih.gov/abcd/.
22 |
23 | Lastly, we are *thrilled* to have you here and hope this network continues to grow!
24 |
--------------------------------------------------------------------------------
/docs/visualization.md:
--------------------------------------------------------------------------------
1 | ---
2 | jupytext:
3 | formats: md:myst
4 | text_representation:
5 | extension: .md
6 | format_name: myst
7 | kernelspec:
8 | display_name: Python 3
9 | language: python
10 | name: python3
11 | ---
12 |
13 | # Visualization tips
14 |
15 | ## Many correlations
16 |
17 | ### Exploratory visualization
18 |
19 | You may be exploring correlations between brain statistics, between behavioral/clinical correlates, or across brain and behavior. With each brain statistic being calculated across many different ROIs, and with many behavioral correlates, you can generate scatterplots in _small multiples_ in order to see the range of correlations as efficiently as possible.
20 |
21 | ````{tab} R
22 | [`ggpairs()`](https://ggobi.github.io/ggally/articles/ggpairs.html) from the `GGally` package will produce a flexible scatterplot matrix for exploratory visual data analysis. The plot is a `ggplot2` object under the hood, so you can modify `theme()` elements. However, `GGally` plot matrix objects don't behave exactly like single ggplots, so the syntax for modifying other plot aesthetics is slightly different.
23 |
24 | ```{code-block} r
25 | library(dplyr)
26 | library(GGally)
27 |
28 | data %>%
29 | # ggpairs() can drop columns from the scatterplot matrix, but not by name
30 | select(-src_subject_id) %>%
31 | ggpairs(lower = list(continuous = wrap("points", size = 0.5, alpha = 0.1)))
32 | ```
33 | ```{figure} figs/r_ggally_pairs.png
34 | ```
35 | ````
36 | ````{tab} Python
37 | [`pairplot()`](https://seaborn.pydata.org/generated/seaborn.pairplot.html) from the `seaborn` package will produce a flexible scatterplot matrix for exploratory visual data analysis. The plot is a `matplotlib` object under the hood, and can be manipulated as such.
38 | ```{code-block} python
39 | import pandas as pd
40 | import seaborn as sns
41 |
42 | sns.pairplot(data)
43 | ```
44 | ```{figure} figs/py_seaborn_pairs.png
45 | ```
46 | ````
47 |
48 | ### Visualization on the brain
49 |
50 | If you have already selected your behavioral/clinical correlate of interest, you may wish to show how it correlates with brain statistics across different ROIs. You can plot the brain-behavior correlation values as a statmap across the brain's surface, if you have mappings to from your ROIs to surface visualizations.
51 |
52 | The examples below are shown (unless otherwise indicated) for plotting ROI-wise statistics using the [Gordon et al. (2016) resting-state functional connectivity parcellation](https://sites.wustl.edu/petersenschlaggarlab/parcels-19cwpgu/).
53 |
54 | ````{tab} R
55 | The [`ggseg`](https://ggseg.github.io/ggseg/index.html) R package uses `ggplot2` under the hood to plot pre-rendered 2D shape-file brain atlases and color individual regions by statistics of interest. The base package only comes with a couple default FreeSurfer atlases, but the [`ggsegGordon`](https://github.com/ggseg/ggsegGordon) helper package contains a compatible map of the Gordon parcellation.
56 |
57 | Note that because `ggseg <= 1.6.5` relies on static parcellation maps that were only rendered from lateral and medial views, you cannot easily render quick 2D views of, say, the ventral surface of the cortex.
58 |
59 | `ggseg` is optimized for ROI-based visualization, not vertex- or voxel-wise. It stores its atlases as tibbles/dataframes with one row per region. You can `join` a dataframe of ROI-wise statistics onto the atlas dataframe, and then plot the augmented atlas dataframe with your statistic of interest mapped to `aes(fill = my_statistic)`. You can bind on whatever ROI-wise statistics you have, as long as the parcel names in your data line up with the parcel names in the `ggsegGordon` atlas dataframe. If certain ROIs appear to be missing data when you plot, you may need to do some light string manipulation to harmonize any misaligned parcel names.
60 |
61 | ```{code-block} r
62 | library(dplyr)
63 | library(ggplot2)
64 | library(ggseg)
65 | library(ggsegGordon)
66 |
67 | gordon %>%
68 | as_tibble() %>%
69 | left_join(roi_data, by = "annot") %>%
70 | ggplot() +
71 | geom_brain(atlas = gordon,
72 | mapping = aes(fill = statistic),
73 | position = position_brain(hemi ~ side)) +
74 | scale_fill_gradient2(low = "#68BCD6",
75 | high = "#CE84AD") +
76 | labs(fill = "simulated\nstatistic")
77 | ```
78 | ```{figure} figs/r_ggseg_brain.png
79 | ```
80 | ````
81 | ````{tab} Python
82 | The `nilearn.plotting` library uses `plotly` and `matplotlib` under the hood to plot snapshots of 3D cortical surface visualizations. `nilearn.datasets` comes with a number of atlases; however, the Gordon atlas has not yet been implemented natively [^footnote1]. You can either import the atlas yourself as a `numpy` array of ROI codes (one per each surface vertex) or use an alternative atlas that has already been implemented. The example below plots simulated ROI-wise statistics for each parcel in the [Destrieux surface atlas](https://nilearn.github.io/stable/modules/generated/nilearn.datasets.fetch_atlas_surf_destrieux.html?highlight=destrieux#nilearn.datasets.fetch_atlas_surf_destrieux).
83 |
84 | Because `nilearn.plotting` uses `plotly` under the hood to flexibly snapshot the underlying brain model each time a plot is called, it supports all six canonical hemisphere views: medial, lateral, dorsal, ventral, anterior, and posterior. However, it can only render one hemisphere/view at a time.
85 |
86 | `nilearn.plotting` surface plots are optimized for vertex-wise visualization. `nilearn` stores atlases as 1D arrays with one row per vertex. Each row's value is that ROI's integer code, and all indices with the same code value thus belong to the same ROI. You can plot any array of stat values as long as the array has the same dimensions as the atlas. If you have a list or `pandas` dataframe column of one stat values for each atlas ROI label, you will need to iterate through the _atlas array of vertices,_ fetching the relevant stat value for each vertex.
87 |
88 | Once you have that, you can plot stat values on the surface! Use [`plot_surf_stat_map()`](https://nilearn.github.io/stable/modules/generated/nilearn.plotting.plot_surf_stat_map.html#nilearn.plotting.plot_surf_stat_map), which will vary color with your statistic of interest if it's fed into the `stat_map` argument. While it sounds like what you want, don't use `plot_surf_roi()` for this because that function is designed for plotting arbitrary categorical colors to ROI definition atlases.
89 |
90 | ```{code-block} python
91 | from nilearn import datasets, plotting, surface
92 | import numpy as np
93 | import pandas as pd
94 | import random
95 | from matplotlib import pyplot as plt
96 | from seaborn import diverging_palette
97 |
98 | # Retrieve destrieux parcellation in fsaverage5 space from nilearn
99 | destrieux_atlas = datasets.fetch_atlas_surf_destrieux()
100 |
101 | # The parcellation is already loaded into memory
102 | parcellation = destrieux_atlas['map_left']
103 |
104 | # Retrieve fsaverage5 surface dataset for the plotting background. It contains
105 | # the surface template as pial and inflated version and a sulcal depth maps
106 | # which is used for shading
107 | fsaverage = datasets.fetch_surf_fsaverage()
108 |
109 | # While it's not necessary to reformat atlas vertices as a dataframe, it's nice
110 | destrieux_df_left = pd.DataFrame({'roi_val': destrieux_atlas.map_left})
111 |
112 | destrieux_df_left['roi_label'] = [destrieux_atlas.labels[i] for i in destrieux_df_left['roi_val']]
113 |
114 | # Simulated statistics for each atlas ROI
115 | sim_stats = [random.uniform(-100, 100) for i in range(len(destrieux_atlas.labels))]
116 |
117 | destrieux_df_left['sim_stat'] = [sim_stats[i] for i in destrieux_df_left['roi_val']]
118 |
119 | plotting.plot_surf_stat_map(fsaverage['infl_left'],
120 | stat_map=np.array(destrieux_df_left['sim_stat'], dtype = np.int32),
121 | hemi='left', view='lateral',
122 | # Use seaborn's helper function for diverging gradient colormap
123 | cmap = diverging_palette(194, 327, s = 50, l = 64, as_cmap = True),
124 | bg_map=fsaverage['sulc_left'], bg_on_data=True,
125 | darkness=.5)
126 | ```
127 | ```{figure} figs/py_nilearn_leftlateral.png
128 | ```
129 | ````
130 |
131 | [^footnote1]: The Gordon atlas has been requested for addition into `nilearn.datasets`, but has not yet been implemented pending larger decisions for how to add new atlases into the library. https://github.com/nilearn/nilearn/issues/1167
132 |
--------------------------------------------------------------------------------
/environment.yml:
--------------------------------------------------------------------------------
1 | name: /Users/mthieu/Repos/now-i-know-my-abcd/env
2 | channels:
3 | - conda-forge
4 | - defaults
5 | dependencies:
6 | - alabaster=0.7.12=py_0
7 | - anyio=3.6.1=py310h2ec42d9_0
8 | - appnope=0.1.3=pyhd8ed1ab_0
9 | - argon2-cffi=21.3.0=pyhd8ed1ab_0
10 | - argon2-cffi-bindings=21.2.0=py310h1961e1f_2
11 | - asttokens=2.0.5=pyhd8ed1ab_0
12 | - attrs=21.4.0=pyhd8ed1ab_0
13 | - babel=2.10.3=pyhd8ed1ab_0
14 | - backcall=0.2.0=pyh9f0ad1d_0
15 | - backports=1.0=py_2
16 | - backports.functools_lru_cache=1.6.4=pyhd8ed1ab_0
17 | - beautifulsoup4=4.11.1=pyha770c72_0
18 | - bleach=5.0.1=pyhd8ed1ab_0
19 | - brotli=1.0.9=h5eb16cf_7
20 | - brotli-bin=1.0.9=h5eb16cf_7
21 | - brotlipy=0.7.0=py310h1961e1f_1004
22 | - bzip2=1.0.8=h0d85af4_4
23 | - c-ares=1.18.1=h0d85af4_0
24 | - ca-certificates=2022.6.15=h033912b_0
25 | - cached-property=1.5.2=hd8ed1ab_1
26 | - cached_property=1.5.2=pyha770c72_1
27 | - certifi=2022.6.15=py310h2ec42d9_0
28 | - cffi=1.15.1=py310h96bbf6e_0
29 | - charset-normalizer=2.1.0=pyhd8ed1ab_0
30 | - click=8.1.3=py310h2ec42d9_0
31 | - click-completion=0.5.2=py310h2ec42d9_4
32 | - click-log=0.3.2=pyh9f0ad1d_0
33 | - colorama=0.4.5=pyhd8ed1ab_0
34 | - cryptography=37.0.4=py310h52c3658_0
35 | - cycler=0.11.0=pyhd8ed1ab_0
36 | - dataclasses=0.8=pyhc8e2a94_3
37 | - debugpy=1.6.0=py310h9d931ec_0
38 | - decorator=5.1.1=pyhd8ed1ab_0
39 | - defusedxml=0.7.1=pyhd8ed1ab_0
40 | - docutils=0.17.1=py310h2ec42d9_2
41 | - entrypoints=0.4=pyhd8ed1ab_0
42 | - executing=0.9.1=pyhd8ed1ab_0
43 | - flit-core=3.7.1=pyhd8ed1ab_0
44 | - fonttools=4.34.4=py310h6c45266_0
45 | - freetype=2.10.4=h4cff582_1
46 | - giflib=5.2.1=hbcb3906_2
47 | - gitdb=4.0.9=pyhd8ed1ab_0
48 | - gitpython=3.1.27=pyhd8ed1ab_0
49 | - greenlet=1.1.2=py310h9d931ec_2
50 | - h5py=3.7.0=nompi_py310haa22bcd_100
51 | - hdf5=1.12.1=nompi_ha60fbc9_104
52 | - icu=70.1=h96cf925_0
53 | - idna=3.3=pyhd8ed1ab_0
54 | - imagesize=1.4.1=pyhd8ed1ab_0
55 | - importlib-metadata=4.11.4=py310h2ec42d9_0
56 | - importlib_resources=5.9.0=pyhd8ed1ab_0
57 | - ipykernel=6.15.1=pyh736e0ef_0
58 | - ipython=8.4.0=py310h2ec42d9_0
59 | - ipython_genutils=0.2.0=py_1
60 | - ipywidgets=7.7.1=pyhd8ed1ab_0
61 | - jedi=0.18.1=py310h2ec42d9_1
62 | - jinja2=3.1.2=pyhd8ed1ab_1
63 | - joblib=1.1.0=pyhd8ed1ab_0
64 | - jpeg=9e=hac89ed1_2
65 | - jsonschema=3.2.0=pyhd8ed1ab_3
66 | - jupyter-book=0.13.0=pyhd8ed1ab_0
67 | - jupyter-cache=0.4.3=pyhd8ed1ab_0
68 | - jupyter-server-mathjax=0.2.6=pyhc268e32_0
69 | - jupyter-sphinx=0.3.2=pyhd8ed1ab_1
70 | - jupyter_client=7.3.4=pyhd8ed1ab_0
71 | - jupyter_core=4.11.1=py310h2ec42d9_0
72 | - jupyter_server=1.18.1=pyhd8ed1ab_0
73 | - jupyterlab_pygments=0.2.2=pyhd8ed1ab_0
74 | - jupyterlab_widgets=1.1.1=pyhd8ed1ab_0
75 | - kiwisolver=1.4.4=py310habb735a_0
76 | - krb5=1.19.3=hb49756b_0
77 | - latexcodec=2.0.1=pyh9f0ad1d_0
78 | - lcms2=2.12=h577c468_0
79 | - lerc=4.0.0=hb486fe8_0
80 | - libblas=3.9.0=15_osx64_openblas
81 | - libbrotlicommon=1.0.9=h5eb16cf_7
82 | - libbrotlidec=1.0.9=h5eb16cf_7
83 | - libbrotlienc=1.0.9=h5eb16cf_7
84 | - libcblas=3.9.0=15_osx64_openblas
85 | - libcurl=7.83.1=h372c54d_0
86 | - libcxx=14.0.6=hce7ea42_0
87 | - libdeflate=1.12=hac89ed1_0
88 | - libedit=3.1.20191231=h0678c8f_2
89 | - libev=4.33=haf1e3a3_1
90 | - libffi=3.4.2=h0d85af4_5
91 | - libgfortran=5.0.0=9_3_0_h6c81a4c_23
92 | - libgfortran5=9.3.0=h6c81a4c_23
93 | - libiconv=1.16=haf1e3a3_0
94 | - liblapack=3.9.0=15_osx64_openblas
95 | - libnghttp2=1.47.0=h942079c_0
96 | - libopenblas=0.3.20=openmp_h4e9756f_1
97 | - libpng=1.6.37=h5a3d3bf_3
98 | - libsodium=1.0.18=hbcb3906_1
99 | - libssh2=1.10.0=h52ee1ee_2
100 | - libtiff=4.4.0=h4aaeabe_2
101 | - libwebp=1.2.3=hf64df63_1
102 | - libwebp-base=1.2.3=hac89ed1_2
103 | - libxcb=1.13=h0d85af4_1004
104 | - libxml2=2.9.14=h08a9926_3
105 | - libxslt=1.1.35=heaa0ce8_0
106 | - libzlib=1.2.12=hfe4f2af_2
107 | - linkify-it-py=1.0.3=pyhd8ed1ab_0
108 | - llvm-openmp=14.0.4=ha654fa7_0
109 | - lxml=4.9.1=py310h6c45266_0
110 | - lz4-c=1.9.3=he49afe7_1
111 | - markdown-it-py=1.1.0=pyhd8ed1ab_0
112 | - markupsafe=2.1.1=py310h1961e1f_1
113 | - matplotlib-base=3.5.2=py310h4510841_1
114 | - matplotlib-inline=0.1.3=pyhd8ed1ab_0
115 | - mdit-py-plugins=0.2.8=pyhd8ed1ab_0
116 | - mistune=0.8.4=py310he24745e_1005
117 | - munkres=1.1.4=pyh9f0ad1d_0
118 | - myst-nb=0.13.2=pyhd8ed1ab_0
119 | - myst-parser=0.15.2=pyhd8ed1ab_0
120 | - nbclient=0.5.13=pyhd8ed1ab_0
121 | - nbconvert=6.5.0=pyhd8ed1ab_0
122 | - nbconvert-core=6.5.0=pyhd8ed1ab_0
123 | - nbconvert-pandoc=6.5.0=pyhd8ed1ab_0
124 | - nbdime=3.1.1=pyhd8ed1ab_0
125 | - nbformat=5.4.0=pyhd8ed1ab_0
126 | - ncurses=6.3=h96cf925_1
127 | - nest-asyncio=1.5.5=pyhd8ed1ab_0
128 | - nibabel=4.0.1=pyhd8ed1ab_0
129 | - nilearn=0.9.1=pyhd8ed1ab_0
130 | - notebook=6.4.12=pyha770c72_0
131 | - numpy=1.23.1=py310ha3f357c_0
132 | - openjpeg=2.4.0=h6e7aa92_1
133 | - openssl=1.1.1q=hfe4f2af_0
134 | - packaging=21.3=pyhd8ed1ab_0
135 | - pandas=1.4.3=py310h3099161_0
136 | - pandoc=2.18=h694c41f_0
137 | - pandocfilters=1.5.0=pyhd8ed1ab_0
138 | - parso=0.8.3=pyhd8ed1ab_0
139 | - patsy=0.5.2=pyhd8ed1ab_0
140 | - pexpect=4.8.0=pyh9f0ad1d_2
141 | - pickleshare=0.7.5=py_1003
142 | - pillow=9.2.0=py310hb3240ae_0
143 | - pip=22.2.1=pyhd8ed1ab_0
144 | - prometheus_client=0.14.1=pyhd8ed1ab_0
145 | - prompt-toolkit=3.0.30=pyha770c72_0
146 | - psutil=5.9.1=py310h6c45266_0
147 | - pthread-stubs=0.4=hc929b4f_1001
148 | - ptyprocess=0.7.0=pyhd3deb0d_0
149 | - pure_eval=0.2.2=pyhd8ed1ab_0
150 | - pybtex=0.24.0=pyhd8ed1ab_2
151 | - pybtex-docutils=1.0.2=py310h2ec42d9_1
152 | - pycparser=2.21=pyhd8ed1ab_0
153 | - pydata-sphinx-theme=0.8.1=pyhd8ed1ab_0
154 | - pydicom=2.3.0=pyh6c4a22f_0
155 | - pygments=2.12.0=pyhd8ed1ab_0
156 | - pyopenssl=22.0.0=pyhd8ed1ab_0
157 | - pyparsing=3.0.9=pyhd8ed1ab_0
158 | - pyrsistent=0.18.1=py310h1961e1f_1
159 | - pysocks=1.7.1=py310h2ec42d9_5
160 | - python=3.10.5=hdaaf3db_0_cpython
161 | - python-dateutil=2.8.2=pyhd8ed1ab_0
162 | - python-fastjsonschema=2.16.1=pyhd8ed1ab_0
163 | - python_abi=3.10=2_cp310
164 | - pytz=2022.1=pyhd8ed1ab_0
165 | - pyyaml=6.0=py310h1961e1f_4
166 | - pyzmq=23.2.0=py310h85fb675_0
167 | - readline=8.1.2=h3899abd_0
168 | - requests=2.28.1=pyhd8ed1ab_0
169 | - scikit-learn=1.1.1=py310hfc06b38_0
170 | - scipy=1.9.0=py310h70707f4_0
171 | - seaborn=0.11.2=hd8ed1ab_0
172 | - seaborn-base=0.11.2=pyhd8ed1ab_0
173 | - send2trash=1.8.0=pyhd8ed1ab_0
174 | - setuptools=63.3.0=py310h2ec42d9_0
175 | - shellingham=1.4.0=pyh44b312d_0
176 | - six=1.16.0=pyh6c4a22f_0
177 | - smmap=3.0.5=pyh44b312d_0
178 | - sniffio=1.2.0=py310h2ec42d9_3
179 | - snowballstemmer=2.2.0=pyhd8ed1ab_0
180 | - soupsieve=2.3.2.post1=pyhd8ed1ab_0
181 | - sphinx=4.5.0=pyh6c4a22f_0
182 | - sphinx-book-theme=0.3.3=pyhd8ed1ab_0
183 | - sphinx-comments=0.0.3=pyh9f0ad1d_0
184 | - sphinx-copybutton=0.5.0=pyhd8ed1ab_0
185 | - sphinx-design=0.1.0=pyhd8ed1ab_0
186 | - sphinx-external-toc=0.2.4=pyhd8ed1ab_0
187 | - sphinx-jupyterbook-latex=0.4.6=pyhd8ed1ab_0
188 | - sphinx-multitoc-numbering=0.1.3=pyhd8ed1ab_0
189 | - sphinx-thebe=0.1.2=pyhd8ed1ab_0
190 | - sphinx-togglebutton=0.3.1=pyhd8ed1ab_0
191 | - sphinxcontrib-applehelp=1.0.2=py_0
192 | - sphinxcontrib-bibtex=2.4.2=pyhd8ed1ab_0
193 | - sphinxcontrib-devhelp=1.0.2=py_0
194 | - sphinxcontrib-htmlhelp=2.0.0=pyhd8ed1ab_0
195 | - sphinxcontrib-jsmath=1.0.1=py_0
196 | - sphinxcontrib-qthelp=1.0.3=py_0
197 | - sphinxcontrib-serializinghtml=1.1.5=pyhd8ed1ab_2
198 | - sqlalchemy=1.4.39=py310h6c45266_0
199 | - sqlite=3.39.2=hd9f0692_0
200 | - stack_data=0.3.0=pyhd8ed1ab_0
201 | - statsmodels=0.13.2=py310h1bbcd0e_0
202 | - terminado=0.15.0=py310h2ec42d9_0
203 | - threadpoolctl=3.1.0=pyh8a188c0_0
204 | - tinycss2=1.1.1=pyhd8ed1ab_0
205 | - tk=8.6.12=h5dbffcc_0
206 | - tornado=6.2=py310h6c45266_0
207 | - traitlets=5.3.0=pyhd8ed1ab_0
208 | - typing_extensions=4.3.0=pyha770c72_0
209 | - tzdata=2022a=h191b570_0
210 | - uc-micro-py=1.0.1=pyhd8ed1ab_0
211 | - unicodedata2=14.0.0=py310h1961e1f_1
212 | - urllib3=1.26.11=pyhd8ed1ab_0
213 | - wcwidth=0.2.5=pyh9f0ad1d_2
214 | - webencodings=0.5.1=py_1
215 | - websocket-client=1.3.3=pyhd8ed1ab_0
216 | - wheel=0.37.1=pyhd8ed1ab_0
217 | - widgetsnbextension=3.6.1=pyha770c72_0
218 | - xorg-libxau=1.0.9=h35c211d_0
219 | - xorg-libxdmcp=1.1.3=h35c211d_0
220 | - xz=5.2.5=haf1e3a3_1
221 | - yaml=0.2.5=h0d85af4_2
222 | - zeromq=4.3.4=he49afe7_1
223 | - zipp=3.8.0=pyhd8ed1ab_0
224 | - zlib=1.2.12=hfe4f2af_2
225 | - zstd=1.5.2=ha9df2e0_2
226 | prefix: /Users/mthieu/Repos/now-i-know-my-abcd/env
227 |
--------------------------------------------------------------------------------
/now-i-know-my-abcd.Rproj:
--------------------------------------------------------------------------------
1 | Version: 1.0
2 |
3 | RestoreWorkspace: Default
4 | SaveWorkspace: Default
5 | AlwaysSaveHistory: Default
6 |
7 | EnableCodeIndexing: Yes
8 | UseSpacesForTab: Yes
9 | NumSpacesForTab: 2
10 | Encoding: UTF-8
11 |
12 | RnwWeave: Sweave
13 | LaTeX: pdfLaTeX
14 |
--------------------------------------------------------------------------------
/python/docs-examples/nilearn-plotting.py:
--------------------------------------------------------------------------------
1 | # %%
2 | # Retrieve destrieux parcellation in fsaverage5 space from nilearn
3 | from nilearn import datasets, plotting, surface
4 | import numpy as np
5 | import pandas as pd
6 | import random
7 | from matplotlib import pyplot as plt
8 | from seaborn import diverging_palette
9 |
10 | random.seed(17)
11 |
12 | destrieux_atlas = datasets.fetch_atlas_surf_destrieux()
13 |
14 | # The parcellation is already loaded into memory
15 | parcellation = destrieux_atlas['map_left']
16 |
17 | # Retrieve fsaverage5 surface dataset for the plotting background. It contains
18 | # the surface template as pial and inflated version and a sulcal depth maps
19 | # which is used for shading
20 | fsaverage = datasets.fetch_surf_fsaverage()
21 |
22 |
23 | # %%
24 |
25 | # Display Destrieux parcellation on fsaverage5 pial surface using nilearn
26 | plotting.plot_surf_roi(fsaverage['infl_left'], roi_map=parcellation,
27 | hemi='left', view='ventral',
28 | bg_map=fsaverage['sulc_left'], bg_on_data=True,
29 | darkness=.5)
30 |
31 | # %%
32 | destrieux_df_left = pd.DataFrame({'roi_val': destrieux_atlas.map_left})
33 | destrieux_df_right = pd.DataFrame({'roi_val': destrieux_atlas.map_right})
34 |
35 | destrieux_df_left['roi_label'] = [destrieux_atlas.labels[i] for i in destrieux_df_left['roi_val']]
36 | destrieux_df_right['roi_label'] = [destrieux_atlas.labels[i] for i in destrieux_df_right['roi_val']]
37 |
38 | sim_stats = [random.uniform(-100, 100) for i in range(len(destrieux_atlas.labels))]
39 |
40 | destrieux_df_left['sim_stat'] = [sim_stats[i] for i in destrieux_df_left['roi_val']]
41 | destrieux_df_right['sim_stat'] = [sim_stats[i] for i in destrieux_df_right['roi_val']]
42 |
43 |
44 | # %%
45 | def plot_my_surf_stat_map(hemi, view, axes = None):
46 | if hemi == 'left':
47 | this_atlas_df = destrieux_df_left
48 | else:
49 | this_atlas_df = destrieux_df_right
50 | plotting.plot_surf_stat_map(fsaverage['infl_' + hemi],
51 | stat_map=np.array(this_atlas_df['sim_stat'], dtype = np.int32),
52 | hemi=hemi, view=view,
53 | cmap = diverging_palette(194, 327, s = 50, l = 64, as_cmap = True),
54 | bg_map=fsaverage['sulc_left'], bg_on_data=True,
55 | darkness=.5,
56 | axes = axes)
57 |
58 | # %%
59 | plot_my_surf_stat_map('left', 'lateral')
60 | plt.savefig('../../docs/figs/py_nilearn_leftlateral.png')
61 |
62 | plot_my_surf_stat_map('right', 'lateral')
63 | plt.savefig('../../docs/figs/py_nilearn_rightlateral.png')
64 |
65 |
66 |
--------------------------------------------------------------------------------
/python/docs-examples/scatterplot-matrix.py:
--------------------------------------------------------------------------------
1 | # %%
2 | from operator import concat
3 | import pandas as pd
4 | import seaborn as sns
5 | import random
6 | from matplotlib.pyplot import savefig
7 |
8 | random.seed(17)
9 |
10 | f = open("../../data/abcd_smri_colnames.txt", "r")
11 | abcd_smri_colnames = f.readlines()
12 | abcd_smri_colnames = [colname.strip() for colname in abcd_smri_colnames]
13 |
14 | data = pd.DataFrame({'src_subject_id': range(1000)})
15 |
16 | # dangit! python indexes exclusively!
17 | for col in abcd_smri_colnames[0:5]:
18 | data[col] = [random.gauss(0, 1) for i in range(1000)]
19 |
20 | data = data.set_index('src_subject_id')
21 |
22 | sns.pairplot(data)
23 |
24 | savefig('../../docs/figs/py_seaborn_pairs.png')
25 |
--------------------------------------------------------------------------------
/renv.lock:
--------------------------------------------------------------------------------
1 | {
2 | "R": {
3 | "Version": "4.2.1",
4 | "Repositories": [
5 | {
6 | "Name": "CRAN",
7 | "URL": "https://cran.rstudio.com"
8 | }
9 | ]
10 | },
11 | "Packages": {
12 | "DBI": {
13 | "Package": "DBI",
14 | "Version": "1.1.3",
15 | "Source": "Repository",
16 | "Repository": "CRAN",
17 | "Hash": "b2866e62bab9378c3cc9476a1954226b",
18 | "Requirements": []
19 | },
20 | "GGally": {
21 | "Package": "GGally",
22 | "Version": "2.1.2",
23 | "Source": "Repository",
24 | "Repository": "CRAN",
25 | "Hash": "022f78c8698724b326f1838b1a98cafa",
26 | "Requirements": [
27 | "RColorBrewer",
28 | "dplyr",
29 | "forcats",
30 | "ggplot2",
31 | "gtable",
32 | "lifecycle",
33 | "plyr",
34 | "progress",
35 | "reshape",
36 | "rlang",
37 | "scales",
38 | "tidyr"
39 | ]
40 | },
41 | "MASS": {
42 | "Package": "MASS",
43 | "Version": "7.3-57",
44 | "Source": "Repository",
45 | "Repository": "CRAN",
46 | "Hash": "71476c1d88d1ebdf31580e5a257d5d31",
47 | "Requirements": []
48 | },
49 | "Matrix": {
50 | "Package": "Matrix",
51 | "Version": "1.4-1",
52 | "Source": "Repository",
53 | "Repository": "CRAN",
54 | "Hash": "699c47c606293bdfbc9fd78a93c9c8fe",
55 | "Requirements": [
56 | "lattice"
57 | ]
58 | },
59 | "R6": {
60 | "Package": "R6",
61 | "Version": "2.5.1",
62 | "Source": "Repository",
63 | "Repository": "CRAN",
64 | "Hash": "470851b6d5d0ac559e9d01bb352b4021",
65 | "Requirements": []
66 | },
67 | "RColorBrewer": {
68 | "Package": "RColorBrewer",
69 | "Version": "1.1-3",
70 | "Source": "Repository",
71 | "Repository": "CRAN",
72 | "Hash": "45f0398006e83a5b10b72a90663d8d8c",
73 | "Requirements": []
74 | },
75 | "Rcpp": {
76 | "Package": "Rcpp",
77 | "Version": "1.0.9",
78 | "Source": "Repository",
79 | "Repository": "CRAN",
80 | "Hash": "e9c08b94391e9f3f97355841229124f2",
81 | "Requirements": []
82 | },
83 | "askpass": {
84 | "Package": "askpass",
85 | "Version": "1.1",
86 | "Source": "Repository",
87 | "Repository": "CRAN",
88 | "Hash": "e8a22846fff485f0be3770c2da758713",
89 | "Requirements": [
90 | "sys"
91 | ]
92 | },
93 | "assertthat": {
94 | "Package": "assertthat",
95 | "Version": "0.2.1",
96 | "Source": "Repository",
97 | "Repository": "CRAN",
98 | "Hash": "50c838a310445e954bc13f26f26a6ecf",
99 | "Requirements": []
100 | },
101 | "backports": {
102 | "Package": "backports",
103 | "Version": "1.4.1",
104 | "Source": "Repository",
105 | "Repository": "CRAN",
106 | "Hash": "c39fbec8a30d23e721980b8afb31984c",
107 | "Requirements": []
108 | },
109 | "base64enc": {
110 | "Package": "base64enc",
111 | "Version": "0.1-3",
112 | "Source": "Repository",
113 | "Repository": "CRAN",
114 | "Hash": "543776ae6848fde2f48ff3816d0628bc",
115 | "Requirements": []
116 | },
117 | "bit": {
118 | "Package": "bit",
119 | "Version": "4.0.4",
120 | "Source": "Repository",
121 | "Repository": "CRAN",
122 | "Hash": "f36715f14d94678eea9933af927bc15d",
123 | "Requirements": []
124 | },
125 | "bit64": {
126 | "Package": "bit64",
127 | "Version": "4.0.5",
128 | "Source": "Repository",
129 | "Repository": "CRAN",
130 | "Hash": "9fe98599ca456d6552421db0d6772d8f",
131 | "Requirements": [
132 | "bit"
133 | ]
134 | },
135 | "blob": {
136 | "Package": "blob",
137 | "Version": "1.2.3",
138 | "Source": "Repository",
139 | "Repository": "CRAN",
140 | "Hash": "10d231579bc9c06ab1c320618808d4ff",
141 | "Requirements": [
142 | "rlang",
143 | "vctrs"
144 | ]
145 | },
146 | "broom": {
147 | "Package": "broom",
148 | "Version": "1.0.0",
149 | "Source": "Repository",
150 | "Repository": "CRAN",
151 | "Hash": "c948329889c7b24a4201df3aef5058c2",
152 | "Requirements": [
153 | "backports",
154 | "dplyr",
155 | "ellipsis",
156 | "generics",
157 | "ggplot2",
158 | "glue",
159 | "purrr",
160 | "rlang",
161 | "stringr",
162 | "tibble",
163 | "tidyr"
164 | ]
165 | },
166 | "bslib": {
167 | "Package": "bslib",
168 | "Version": "0.4.0",
169 | "Source": "Repository",
170 | "Repository": "CRAN",
171 | "Hash": "be5ee090716ce1671be6cd5d7c34d091",
172 | "Requirements": [
173 | "cachem",
174 | "htmltools",
175 | "jquerylib",
176 | "jsonlite",
177 | "memoise",
178 | "rlang",
179 | "sass"
180 | ]
181 | },
182 | "cachem": {
183 | "Package": "cachem",
184 | "Version": "1.0.6",
185 | "Source": "Repository",
186 | "Repository": "CRAN",
187 | "Hash": "648c5b3d71e6a37e3043617489a0a0e9",
188 | "Requirements": [
189 | "fastmap",
190 | "rlang"
191 | ]
192 | },
193 | "callr": {
194 | "Package": "callr",
195 | "Version": "3.7.1",
196 | "Source": "Repository",
197 | "Repository": "CRAN",
198 | "Hash": "2fda237f24bc56508f31394beaa56877",
199 | "Requirements": [
200 | "R6",
201 | "processx"
202 | ]
203 | },
204 | "cellranger": {
205 | "Package": "cellranger",
206 | "Version": "1.1.0",
207 | "Source": "Repository",
208 | "Repository": "CRAN",
209 | "Hash": "f61dbaec772ccd2e17705c1e872e9e7c",
210 | "Requirements": [
211 | "rematch",
212 | "tibble"
213 | ]
214 | },
215 | "cli": {
216 | "Package": "cli",
217 | "Version": "3.3.0",
218 | "Source": "Repository",
219 | "Repository": "CRAN",
220 | "Hash": "23abf173c2b783dcc43379ab9bba00ee",
221 | "Requirements": [
222 | "glue"
223 | ]
224 | },
225 | "clipr": {
226 | "Package": "clipr",
227 | "Version": "0.8.0",
228 | "Source": "Repository",
229 | "Repository": "CRAN",
230 | "Hash": "3f038e5ac7f41d4ac41ce658c85e3042",
231 | "Requirements": []
232 | },
233 | "colorspace": {
234 | "Package": "colorspace",
235 | "Version": "2.0-3",
236 | "Source": "Repository",
237 | "Repository": "CRAN",
238 | "Hash": "bb4341986bc8b914f0f0acf2e4a3f2f7",
239 | "Requirements": []
240 | },
241 | "cpp11": {
242 | "Package": "cpp11",
243 | "Version": "0.4.2",
244 | "Source": "Repository",
245 | "Repository": "CRAN",
246 | "Hash": "fa53ce256cd280f468c080a58ea5ba8c",
247 | "Requirements": []
248 | },
249 | "crayon": {
250 | "Package": "crayon",
251 | "Version": "1.5.1",
252 | "Source": "Repository",
253 | "Repository": "CRAN",
254 | "Hash": "8dc45fd8a1ee067a92b85ef274e66d6a",
255 | "Requirements": []
256 | },
257 | "curl": {
258 | "Package": "curl",
259 | "Version": "4.3.2",
260 | "Source": "Repository",
261 | "Repository": "CRAN",
262 | "Hash": "022c42d49c28e95d69ca60446dbabf88",
263 | "Requirements": []
264 | },
265 | "data.table": {
266 | "Package": "data.table",
267 | "Version": "1.14.2",
268 | "Source": "Repository",
269 | "Repository": "CRAN",
270 | "Hash": "36b67b5adf57b292923f5659f5f0c853",
271 | "Requirements": []
272 | },
273 | "dbplyr": {
274 | "Package": "dbplyr",
275 | "Version": "2.2.1",
276 | "Source": "Repository",
277 | "Repository": "CRAN",
278 | "Hash": "f6c7eb9617e4d2a86bb7182fff99c805",
279 | "Requirements": [
280 | "DBI",
281 | "R6",
282 | "assertthat",
283 | "blob",
284 | "cli",
285 | "dplyr",
286 | "glue",
287 | "lifecycle",
288 | "magrittr",
289 | "pillar",
290 | "purrr",
291 | "rlang",
292 | "tibble",
293 | "tidyselect",
294 | "vctrs",
295 | "withr"
296 | ]
297 | },
298 | "digest": {
299 | "Package": "digest",
300 | "Version": "0.6.29",
301 | "Source": "Repository",
302 | "Repository": "CRAN",
303 | "Hash": "cf6b206a045a684728c3267ef7596190",
304 | "Requirements": []
305 | },
306 | "dplyr": {
307 | "Package": "dplyr",
308 | "Version": "1.0.9",
309 | "Source": "Repository",
310 | "Repository": "CRAN",
311 | "Hash": "f0bda1627a7f5d3f9a0b5add931596ac",
312 | "Requirements": [
313 | "R6",
314 | "generics",
315 | "glue",
316 | "lifecycle",
317 | "magrittr",
318 | "pillar",
319 | "rlang",
320 | "tibble",
321 | "tidyselect",
322 | "vctrs"
323 | ]
324 | },
325 | "dtplyr": {
326 | "Package": "dtplyr",
327 | "Version": "1.2.1",
328 | "Source": "Repository",
329 | "Repository": "CRAN",
330 | "Hash": "f5d195cd5fcc0a77499d9da698ef2ea3",
331 | "Requirements": [
332 | "crayon",
333 | "data.table",
334 | "dplyr",
335 | "ellipsis",
336 | "glue",
337 | "lifecycle",
338 | "rlang",
339 | "tibble",
340 | "tidyselect",
341 | "vctrs"
342 | ]
343 | },
344 | "ellipsis": {
345 | "Package": "ellipsis",
346 | "Version": "0.3.2",
347 | "Source": "Repository",
348 | "Repository": "CRAN",
349 | "Hash": "bb0eec2fe32e88d9e2836c2f73ea2077",
350 | "Requirements": [
351 | "rlang"
352 | ]
353 | },
354 | "evaluate": {
355 | "Package": "evaluate",
356 | "Version": "0.15",
357 | "Source": "Repository",
358 | "Repository": "CRAN",
359 | "Hash": "699a7a93d08c962d9f8950b2d7a227f1",
360 | "Requirements": []
361 | },
362 | "fansi": {
363 | "Package": "fansi",
364 | "Version": "1.0.3",
365 | "Source": "Repository",
366 | "Repository": "CRAN",
367 | "Hash": "83a8afdbe71839506baa9f90eebad7ec",
368 | "Requirements": []
369 | },
370 | "farver": {
371 | "Package": "farver",
372 | "Version": "2.1.1",
373 | "Source": "Repository",
374 | "Repository": "CRAN",
375 | "Hash": "8106d78941f34855c440ddb946b8f7a5",
376 | "Requirements": []
377 | },
378 | "fastmap": {
379 | "Package": "fastmap",
380 | "Version": "1.1.0",
381 | "Source": "Repository",
382 | "Repository": "CRAN",
383 | "Hash": "77bd60a6157420d4ffa93b27cf6a58b8",
384 | "Requirements": []
385 | },
386 | "forcats": {
387 | "Package": "forcats",
388 | "Version": "0.5.1",
389 | "Source": "Repository",
390 | "Repository": "CRAN",
391 | "Hash": "81c3244cab67468aac4c60550832655d",
392 | "Requirements": [
393 | "ellipsis",
394 | "magrittr",
395 | "rlang",
396 | "tibble"
397 | ]
398 | },
399 | "fs": {
400 | "Package": "fs",
401 | "Version": "1.5.2",
402 | "Source": "Repository",
403 | "Repository": "CRAN",
404 | "Hash": "7c89603d81793f0d5486d91ab1fc6f1d",
405 | "Requirements": []
406 | },
407 | "gargle": {
408 | "Package": "gargle",
409 | "Version": "1.2.0",
410 | "Source": "Repository",
411 | "Repository": "CRAN",
412 | "Hash": "9d234e6a87a6f8181792de6dc4a00e39",
413 | "Requirements": [
414 | "cli",
415 | "fs",
416 | "glue",
417 | "httr",
418 | "jsonlite",
419 | "rappdirs",
420 | "rlang",
421 | "rstudioapi",
422 | "withr"
423 | ]
424 | },
425 | "generics": {
426 | "Package": "generics",
427 | "Version": "0.1.3",
428 | "Source": "Repository",
429 | "Repository": "CRAN",
430 | "Hash": "15e9634c0fcd294799e9b2e929ed1b86",
431 | "Requirements": []
432 | },
433 | "ggplot2": {
434 | "Package": "ggplot2",
435 | "Version": "3.3.6",
436 | "Source": "Repository",
437 | "Repository": "CRAN",
438 | "Hash": "0fb26d0674c82705c6b701d1a61e02ea",
439 | "Requirements": [
440 | "MASS",
441 | "digest",
442 | "glue",
443 | "gtable",
444 | "isoband",
445 | "mgcv",
446 | "rlang",
447 | "scales",
448 | "tibble",
449 | "withr"
450 | ]
451 | },
452 | "glue": {
453 | "Package": "glue",
454 | "Version": "1.6.2",
455 | "Source": "Repository",
456 | "Repository": "CRAN",
457 | "Hash": "4f2596dfb05dac67b9dc558e5c6fba2e",
458 | "Requirements": []
459 | },
460 | "googledrive": {
461 | "Package": "googledrive",
462 | "Version": "2.0.0",
463 | "Source": "Repository",
464 | "Repository": "CRAN",
465 | "Hash": "c3a25adbbfbb03f12e6f88c5fb1f3024",
466 | "Requirements": [
467 | "cli",
468 | "gargle",
469 | "glue",
470 | "httr",
471 | "jsonlite",
472 | "lifecycle",
473 | "magrittr",
474 | "pillar",
475 | "purrr",
476 | "rlang",
477 | "tibble",
478 | "uuid",
479 | "vctrs",
480 | "withr"
481 | ]
482 | },
483 | "googlesheets4": {
484 | "Package": "googlesheets4",
485 | "Version": "1.0.0",
486 | "Source": "Repository",
487 | "Repository": "CRAN",
488 | "Hash": "9a6564184dc4a81daea4f1d7ce357c6a",
489 | "Requirements": [
490 | "cellranger",
491 | "cli",
492 | "curl",
493 | "gargle",
494 | "glue",
495 | "googledrive",
496 | "httr",
497 | "ids",
498 | "magrittr",
499 | "purrr",
500 | "rematch2",
501 | "rlang",
502 | "tibble",
503 | "vctrs"
504 | ]
505 | },
506 | "gtable": {
507 | "Package": "gtable",
508 | "Version": "0.3.0",
509 | "Source": "Repository",
510 | "Repository": "CRAN",
511 | "Hash": "ac5c6baf7822ce8732b343f14c072c4d",
512 | "Requirements": []
513 | },
514 | "haven": {
515 | "Package": "haven",
516 | "Version": "2.5.0",
517 | "Source": "Repository",
518 | "Repository": "CRAN",
519 | "Hash": "e3058e4ac77f4fa686f68a1838d5b715",
520 | "Requirements": [
521 | "cli",
522 | "cpp11",
523 | "forcats",
524 | "hms",
525 | "lifecycle",
526 | "readr",
527 | "rlang",
528 | "tibble",
529 | "tidyselect",
530 | "vctrs"
531 | ]
532 | },
533 | "here": {
534 | "Package": "here",
535 | "Version": "1.0.1",
536 | "Source": "Repository",
537 | "Repository": "CRAN",
538 | "Hash": "24b224366f9c2e7534d2344d10d59211",
539 | "Requirements": [
540 | "rprojroot"
541 | ]
542 | },
543 | "highr": {
544 | "Package": "highr",
545 | "Version": "0.9",
546 | "Source": "Repository",
547 | "Repository": "CRAN",
548 | "Hash": "8eb36c8125038e648e5d111c0d7b2ed4",
549 | "Requirements": [
550 | "xfun"
551 | ]
552 | },
553 | "hms": {
554 | "Package": "hms",
555 | "Version": "1.1.1",
556 | "Source": "Repository",
557 | "Repository": "CRAN",
558 | "Hash": "5b8a2dd0fdbe2ab4f6081e6c7be6dfca",
559 | "Requirements": [
560 | "ellipsis",
561 | "lifecycle",
562 | "pkgconfig",
563 | "rlang",
564 | "vctrs"
565 | ]
566 | },
567 | "htmltools": {
568 | "Package": "htmltools",
569 | "Version": "0.5.3",
570 | "Source": "Repository",
571 | "Repository": "CRAN",
572 | "Hash": "6496090a9e00f8354b811d1a2d47b566",
573 | "Requirements": [
574 | "base64enc",
575 | "digest",
576 | "fastmap",
577 | "rlang"
578 | ]
579 | },
580 | "httr": {
581 | "Package": "httr",
582 | "Version": "1.4.3",
583 | "Source": "Repository",
584 | "Repository": "CRAN",
585 | "Hash": "88d1b310583777edf01ccd1216fb0b2b",
586 | "Requirements": [
587 | "R6",
588 | "curl",
589 | "jsonlite",
590 | "mime",
591 | "openssl"
592 | ]
593 | },
594 | "ids": {
595 | "Package": "ids",
596 | "Version": "1.0.1",
597 | "Source": "Repository",
598 | "Repository": "CRAN",
599 | "Hash": "99df65cfef20e525ed38c3d2577f7190",
600 | "Requirements": [
601 | "openssl",
602 | "uuid"
603 | ]
604 | },
605 | "isoband": {
606 | "Package": "isoband",
607 | "Version": "0.2.5",
608 | "Source": "Repository",
609 | "Repository": "CRAN",
610 | "Hash": "7ab57a6de7f48a8dc84910d1eca42883",
611 | "Requirements": []
612 | },
613 | "jquerylib": {
614 | "Package": "jquerylib",
615 | "Version": "0.1.4",
616 | "Source": "Repository",
617 | "Repository": "CRAN",
618 | "Hash": "5aab57a3bd297eee1c1d862735972182",
619 | "Requirements": [
620 | "htmltools"
621 | ]
622 | },
623 | "jsonlite": {
624 | "Package": "jsonlite",
625 | "Version": "1.8.0",
626 | "Source": "Repository",
627 | "Repository": "CRAN",
628 | "Hash": "d07e729b27b372429d42d24d503613a0",
629 | "Requirements": []
630 | },
631 | "knitr": {
632 | "Package": "knitr",
633 | "Version": "1.39",
634 | "Source": "Repository",
635 | "Repository": "CRAN",
636 | "Hash": "029ab7c4badd3cf8af69016b2ba27493",
637 | "Requirements": [
638 | "evaluate",
639 | "highr",
640 | "stringr",
641 | "xfun",
642 | "yaml"
643 | ]
644 | },
645 | "labeling": {
646 | "Package": "labeling",
647 | "Version": "0.4.2",
648 | "Source": "Repository",
649 | "Repository": "CRAN",
650 | "Hash": "3d5108641f47470611a32d0bdf357a72",
651 | "Requirements": []
652 | },
653 | "lattice": {
654 | "Package": "lattice",
655 | "Version": "0.20-45",
656 | "Source": "Repository",
657 | "Repository": "CRAN",
658 | "Hash": "b64cdbb2b340437c4ee047a1f4c4377b",
659 | "Requirements": []
660 | },
661 | "lifecycle": {
662 | "Package": "lifecycle",
663 | "Version": "1.0.1",
664 | "Source": "Repository",
665 | "Repository": "CRAN",
666 | "Hash": "a6b6d352e3ed897373ab19d8395c98d0",
667 | "Requirements": [
668 | "glue",
669 | "rlang"
670 | ]
671 | },
672 | "lubridate": {
673 | "Package": "lubridate",
674 | "Version": "1.8.0",
675 | "Source": "Repository",
676 | "Repository": "CRAN",
677 | "Hash": "2ff5eedb6ee38fb1b81205c73be1be5a",
678 | "Requirements": [
679 | "cpp11",
680 | "generics"
681 | ]
682 | },
683 | "magrittr": {
684 | "Package": "magrittr",
685 | "Version": "2.0.3",
686 | "Source": "Repository",
687 | "Repository": "CRAN",
688 | "Hash": "7ce2733a9826b3aeb1775d56fd305472",
689 | "Requirements": []
690 | },
691 | "memoise": {
692 | "Package": "memoise",
693 | "Version": "2.0.1",
694 | "Source": "Repository",
695 | "Repository": "CRAN",
696 | "Hash": "e2817ccf4a065c5d9d7f2cfbe7c1d78c",
697 | "Requirements": [
698 | "cachem",
699 | "rlang"
700 | ]
701 | },
702 | "mgcv": {
703 | "Package": "mgcv",
704 | "Version": "1.8-40",
705 | "Source": "Repository",
706 | "Repository": "CRAN",
707 | "Hash": "c6b2fdb18cf68ab613bd564363e1ba0d",
708 | "Requirements": [
709 | "Matrix",
710 | "nlme"
711 | ]
712 | },
713 | "mime": {
714 | "Package": "mime",
715 | "Version": "0.12",
716 | "Source": "Repository",
717 | "Repository": "CRAN",
718 | "Hash": "18e9c28c1d3ca1560ce30658b22ce104",
719 | "Requirements": []
720 | },
721 | "modelr": {
722 | "Package": "modelr",
723 | "Version": "0.1.8",
724 | "Source": "Repository",
725 | "Repository": "CRAN",
726 | "Hash": "9fd59716311ee82cba83dc2826fc5577",
727 | "Requirements": [
728 | "broom",
729 | "magrittr",
730 | "purrr",
731 | "rlang",
732 | "tibble",
733 | "tidyr",
734 | "tidyselect",
735 | "vctrs"
736 | ]
737 | },
738 | "munsell": {
739 | "Package": "munsell",
740 | "Version": "0.5.0",
741 | "Source": "Repository",
742 | "Repository": "CRAN",
743 | "Hash": "6dfe8bf774944bd5595785e3229d8771",
744 | "Requirements": [
745 | "colorspace"
746 | ]
747 | },
748 | "nlme": {
749 | "Package": "nlme",
750 | "Version": "3.1-157",
751 | "Source": "Repository",
752 | "Repository": "CRAN",
753 | "Hash": "dbca60742be0c9eddc5205e5c7ca1f44",
754 | "Requirements": [
755 | "lattice"
756 | ]
757 | },
758 | "openssl": {
759 | "Package": "openssl",
760 | "Version": "2.0.2",
761 | "Source": "Repository",
762 | "Repository": "CRAN",
763 | "Hash": "6d3bef2e305f55c705c674653c7d7d3d",
764 | "Requirements": [
765 | "askpass"
766 | ]
767 | },
768 | "pillar": {
769 | "Package": "pillar",
770 | "Version": "1.8.0",
771 | "Source": "Repository",
772 | "Repository": "CRAN",
773 | "Hash": "f95cf85794546c4ac2b9a6ca42e671ff",
774 | "Requirements": [
775 | "cli",
776 | "fansi",
777 | "glue",
778 | "lifecycle",
779 | "rlang",
780 | "utf8",
781 | "vctrs"
782 | ]
783 | },
784 | "pkgconfig": {
785 | "Package": "pkgconfig",
786 | "Version": "2.0.3",
787 | "Source": "Repository",
788 | "Repository": "CRAN",
789 | "Hash": "01f28d4278f15c76cddbea05899c5d6f",
790 | "Requirements": []
791 | },
792 | "plyr": {
793 | "Package": "plyr",
794 | "Version": "1.8.7",
795 | "Source": "Repository",
796 | "Repository": "CRAN",
797 | "Hash": "9c17c6ee41639ebdc1d7266546d3b627",
798 | "Requirements": [
799 | "Rcpp"
800 | ]
801 | },
802 | "prettyunits": {
803 | "Package": "prettyunits",
804 | "Version": "1.1.1",
805 | "Source": "Repository",
806 | "Repository": "CRAN",
807 | "Hash": "95ef9167b75dde9d2ccc3c7528393e7e",
808 | "Requirements": []
809 | },
810 | "processx": {
811 | "Package": "processx",
812 | "Version": "3.7.0",
813 | "Source": "Repository",
814 | "Repository": "CRAN",
815 | "Hash": "f91df0f5f31ffdf88bc0b624f5ebab0f",
816 | "Requirements": [
817 | "R6",
818 | "ps"
819 | ]
820 | },
821 | "progress": {
822 | "Package": "progress",
823 | "Version": "1.2.2",
824 | "Source": "Repository",
825 | "Repository": "CRAN",
826 | "Hash": "14dc9f7a3c91ebb14ec5bb9208a07061",
827 | "Requirements": [
828 | "R6",
829 | "crayon",
830 | "hms",
831 | "prettyunits"
832 | ]
833 | },
834 | "ps": {
835 | "Package": "ps",
836 | "Version": "1.7.1",
837 | "Source": "Repository",
838 | "Repository": "CRAN",
839 | "Hash": "8b93531308c01ad0e56d9eadcc0c4fcd",
840 | "Requirements": []
841 | },
842 | "purrr": {
843 | "Package": "purrr",
844 | "Version": "0.3.4",
845 | "Source": "Repository",
846 | "Repository": "CRAN",
847 | "Hash": "97def703420c8ab10d8f0e6c72101e02",
848 | "Requirements": [
849 | "magrittr",
850 | "rlang"
851 | ]
852 | },
853 | "rappdirs": {
854 | "Package": "rappdirs",
855 | "Version": "0.3.3",
856 | "Source": "Repository",
857 | "Repository": "CRAN",
858 | "Hash": "5e3c5dc0b071b21fa128676560dbe94d",
859 | "Requirements": []
860 | },
861 | "readr": {
862 | "Package": "readr",
863 | "Version": "2.1.2",
864 | "Source": "Repository",
865 | "Repository": "CRAN",
866 | "Hash": "9c59de1357dc209868b5feb5c9f0fe2f",
867 | "Requirements": [
868 | "R6",
869 | "cli",
870 | "clipr",
871 | "cpp11",
872 | "crayon",
873 | "hms",
874 | "lifecycle",
875 | "rlang",
876 | "tibble",
877 | "tzdb",
878 | "vroom"
879 | ]
880 | },
881 | "readxl": {
882 | "Package": "readxl",
883 | "Version": "1.4.0",
884 | "Source": "Repository",
885 | "Repository": "CRAN",
886 | "Hash": "170c35f745563bb307e963bde0197e4f",
887 | "Requirements": [
888 | "cellranger",
889 | "cpp11",
890 | "progress",
891 | "tibble"
892 | ]
893 | },
894 | "rematch": {
895 | "Package": "rematch",
896 | "Version": "1.0.1",
897 | "Source": "Repository",
898 | "Repository": "CRAN",
899 | "Hash": "c66b930d20bb6d858cd18e1cebcfae5c",
900 | "Requirements": []
901 | },
902 | "rematch2": {
903 | "Package": "rematch2",
904 | "Version": "2.1.2",
905 | "Source": "Repository",
906 | "Repository": "CRAN",
907 | "Hash": "76c9e04c712a05848ae7a23d2f170a40",
908 | "Requirements": [
909 | "tibble"
910 | ]
911 | },
912 | "renv": {
913 | "Package": "renv",
914 | "Version": "0.15.5",
915 | "Source": "Repository",
916 | "Repository": "CRAN",
917 | "Hash": "6a38294e7d12f5d8e656b08c5bd8ae34",
918 | "Requirements": []
919 | },
920 | "reprex": {
921 | "Package": "reprex",
922 | "Version": "2.0.1",
923 | "Source": "Repository",
924 | "Repository": "CRAN",
925 | "Hash": "911d101becedc0fde495bd910984bdc8",
926 | "Requirements": [
927 | "callr",
928 | "cli",
929 | "clipr",
930 | "fs",
931 | "glue",
932 | "knitr",
933 | "rlang",
934 | "rmarkdown",
935 | "rstudioapi",
936 | "withr"
937 | ]
938 | },
939 | "reshape": {
940 | "Package": "reshape",
941 | "Version": "0.8.9",
942 | "Source": "Repository",
943 | "Repository": "CRAN",
944 | "Hash": "603d56041d7d4fa3ceb1864b3f6ee6b1",
945 | "Requirements": [
946 | "plyr"
947 | ]
948 | },
949 | "rlang": {
950 | "Package": "rlang",
951 | "Version": "1.0.4",
952 | "Source": "Repository",
953 | "Repository": "CRAN",
954 | "Hash": "6539dd8c651e67e3b55b5ffea106362b",
955 | "Requirements": []
956 | },
957 | "rmarkdown": {
958 | "Package": "rmarkdown",
959 | "Version": "2.14",
960 | "Source": "Repository",
961 | "Repository": "CRAN",
962 | "Hash": "31b60a882fabfabf6785b8599ffeb8ba",
963 | "Requirements": [
964 | "bslib",
965 | "evaluate",
966 | "htmltools",
967 | "jquerylib",
968 | "jsonlite",
969 | "knitr",
970 | "stringr",
971 | "tinytex",
972 | "xfun",
973 | "yaml"
974 | ]
975 | },
976 | "rprojroot": {
977 | "Package": "rprojroot",
978 | "Version": "2.0.3",
979 | "Source": "Repository",
980 | "Repository": "CRAN",
981 | "Hash": "1de7ab598047a87bba48434ba35d497d",
982 | "Requirements": []
983 | },
984 | "rstudioapi": {
985 | "Package": "rstudioapi",
986 | "Version": "0.13",
987 | "Source": "Repository",
988 | "Repository": "CRAN",
989 | "Hash": "06c85365a03fdaf699966cc1d3cf53ea",
990 | "Requirements": []
991 | },
992 | "rvest": {
993 | "Package": "rvest",
994 | "Version": "1.0.2",
995 | "Source": "Repository",
996 | "Repository": "CRAN",
997 | "Hash": "bb099886deffecd6f9b298b7d4492943",
998 | "Requirements": [
999 | "httr",
1000 | "lifecycle",
1001 | "magrittr",
1002 | "rlang",
1003 | "selectr",
1004 | "tibble",
1005 | "xml2"
1006 | ]
1007 | },
1008 | "sass": {
1009 | "Package": "sass",
1010 | "Version": "0.4.2",
1011 | "Source": "Repository",
1012 | "Repository": "CRAN",
1013 | "Hash": "1b191143d7d3444d504277843f3a95fe",
1014 | "Requirements": [
1015 | "R6",
1016 | "fs",
1017 | "htmltools",
1018 | "rappdirs",
1019 | "rlang"
1020 | ]
1021 | },
1022 | "scales": {
1023 | "Package": "scales",
1024 | "Version": "1.2.0",
1025 | "Source": "Repository",
1026 | "Repository": "CRAN",
1027 | "Hash": "6e8750cdd13477aa440d453da93d5cac",
1028 | "Requirements": [
1029 | "R6",
1030 | "RColorBrewer",
1031 | "farver",
1032 | "labeling",
1033 | "lifecycle",
1034 | "munsell",
1035 | "rlang",
1036 | "viridisLite"
1037 | ]
1038 | },
1039 | "selectr": {
1040 | "Package": "selectr",
1041 | "Version": "0.4-2",
1042 | "Source": "Repository",
1043 | "Repository": "CRAN",
1044 | "Hash": "3838071b66e0c566d55cc26bd6e27bf4",
1045 | "Requirements": [
1046 | "R6",
1047 | "stringr"
1048 | ]
1049 | },
1050 | "stringi": {
1051 | "Package": "stringi",
1052 | "Version": "1.7.8",
1053 | "Source": "Repository",
1054 | "Repository": "CRAN",
1055 | "Hash": "a68b980681bcbc84c7a67003fa796bfb",
1056 | "Requirements": []
1057 | },
1058 | "stringr": {
1059 | "Package": "stringr",
1060 | "Version": "1.4.0",
1061 | "Source": "Repository",
1062 | "Repository": "CRAN",
1063 | "Hash": "0759e6b6c0957edb1311028a49a35e76",
1064 | "Requirements": [
1065 | "glue",
1066 | "magrittr",
1067 | "stringi"
1068 | ]
1069 | },
1070 | "sys": {
1071 | "Package": "sys",
1072 | "Version": "3.4",
1073 | "Source": "Repository",
1074 | "Repository": "CRAN",
1075 | "Hash": "b227d13e29222b4574486cfcbde077fa",
1076 | "Requirements": []
1077 | },
1078 | "tibble": {
1079 | "Package": "tibble",
1080 | "Version": "3.1.8",
1081 | "Source": "Repository",
1082 | "Repository": "CRAN",
1083 | "Hash": "56b6934ef0f8c68225949a8672fe1a8f",
1084 | "Requirements": [
1085 | "fansi",
1086 | "lifecycle",
1087 | "magrittr",
1088 | "pillar",
1089 | "pkgconfig",
1090 | "rlang",
1091 | "vctrs"
1092 | ]
1093 | },
1094 | "tidyr": {
1095 | "Package": "tidyr",
1096 | "Version": "1.2.0",
1097 | "Source": "Repository",
1098 | "Repository": "CRAN",
1099 | "Hash": "d8b95b7fee945d7da6888cf7eb71a49c",
1100 | "Requirements": [
1101 | "cpp11",
1102 | "dplyr",
1103 | "ellipsis",
1104 | "glue",
1105 | "lifecycle",
1106 | "magrittr",
1107 | "purrr",
1108 | "rlang",
1109 | "tibble",
1110 | "tidyselect",
1111 | "vctrs"
1112 | ]
1113 | },
1114 | "tidyselect": {
1115 | "Package": "tidyselect",
1116 | "Version": "1.1.2",
1117 | "Source": "Repository",
1118 | "Repository": "CRAN",
1119 | "Hash": "17f6da8cfd7002760a859915ce7eef8f",
1120 | "Requirements": [
1121 | "ellipsis",
1122 | "glue",
1123 | "purrr",
1124 | "rlang",
1125 | "vctrs"
1126 | ]
1127 | },
1128 | "tidyverse": {
1129 | "Package": "tidyverse",
1130 | "Version": "1.3.2",
1131 | "Source": "Repository",
1132 | "Repository": "CRAN",
1133 | "Hash": "972389aea7fa1a34739054a810d0c6f6",
1134 | "Requirements": [
1135 | "broom",
1136 | "cli",
1137 | "crayon",
1138 | "dbplyr",
1139 | "dplyr",
1140 | "dtplyr",
1141 | "forcats",
1142 | "ggplot2",
1143 | "googledrive",
1144 | "googlesheets4",
1145 | "haven",
1146 | "hms",
1147 | "httr",
1148 | "jsonlite",
1149 | "lubridate",
1150 | "magrittr",
1151 | "modelr",
1152 | "pillar",
1153 | "purrr",
1154 | "readr",
1155 | "readxl",
1156 | "reprex",
1157 | "rlang",
1158 | "rstudioapi",
1159 | "rvest",
1160 | "stringr",
1161 | "tibble",
1162 | "tidyr",
1163 | "xml2"
1164 | ]
1165 | },
1166 | "tinytex": {
1167 | "Package": "tinytex",
1168 | "Version": "0.40",
1169 | "Source": "Repository",
1170 | "Repository": "CRAN",
1171 | "Hash": "e7b654da5e77bc4e5435a966329cd25f",
1172 | "Requirements": [
1173 | "xfun"
1174 | ]
1175 | },
1176 | "tzdb": {
1177 | "Package": "tzdb",
1178 | "Version": "0.3.0",
1179 | "Source": "Repository",
1180 | "Repository": "CRAN",
1181 | "Hash": "b2e1cbce7c903eaf23ec05c58e59fb5e",
1182 | "Requirements": [
1183 | "cpp11"
1184 | ]
1185 | },
1186 | "utf8": {
1187 | "Package": "utf8",
1188 | "Version": "1.2.2",
1189 | "Source": "Repository",
1190 | "Repository": "CRAN",
1191 | "Hash": "c9c462b759a5cc844ae25b5942654d13",
1192 | "Requirements": []
1193 | },
1194 | "uuid": {
1195 | "Package": "uuid",
1196 | "Version": "1.1-0",
1197 | "Source": "Repository",
1198 | "Repository": "CRAN",
1199 | "Hash": "f1cb46c157d080b729159d407be83496",
1200 | "Requirements": []
1201 | },
1202 | "vctrs": {
1203 | "Package": "vctrs",
1204 | "Version": "0.4.1",
1205 | "Source": "Repository",
1206 | "Repository": "CRAN",
1207 | "Hash": "8b54f22e2a58c4f275479c92ce041a57",
1208 | "Requirements": [
1209 | "cli",
1210 | "glue",
1211 | "rlang"
1212 | ]
1213 | },
1214 | "viridisLite": {
1215 | "Package": "viridisLite",
1216 | "Version": "0.4.0",
1217 | "Source": "Repository",
1218 | "Repository": "CRAN",
1219 | "Hash": "55e157e2aa88161bdb0754218470d204",
1220 | "Requirements": []
1221 | },
1222 | "vroom": {
1223 | "Package": "vroom",
1224 | "Version": "1.5.7",
1225 | "Source": "Repository",
1226 | "Repository": "CRAN",
1227 | "Hash": "976507b5a105bc3bdf6a5a5f29e0684f",
1228 | "Requirements": [
1229 | "bit64",
1230 | "cli",
1231 | "cpp11",
1232 | "crayon",
1233 | "glue",
1234 | "hms",
1235 | "lifecycle",
1236 | "progress",
1237 | "rlang",
1238 | "tibble",
1239 | "tidyselect",
1240 | "tzdb",
1241 | "vctrs",
1242 | "withr"
1243 | ]
1244 | },
1245 | "withr": {
1246 | "Package": "withr",
1247 | "Version": "2.5.0",
1248 | "Source": "Repository",
1249 | "Repository": "CRAN",
1250 | "Hash": "c0e49a9760983e81e55cdd9be92e7182",
1251 | "Requirements": []
1252 | },
1253 | "xfun": {
1254 | "Package": "xfun",
1255 | "Version": "0.31",
1256 | "Source": "Repository",
1257 | "Repository": "CRAN",
1258 | "Hash": "a318c6f752b8dcfe9fb74d897418ab2b",
1259 | "Requirements": []
1260 | },
1261 | "xml2": {
1262 | "Package": "xml2",
1263 | "Version": "1.3.3",
1264 | "Source": "Repository",
1265 | "Repository": "CRAN",
1266 | "Hash": "40682ed6a969ea5abfd351eb67833adc",
1267 | "Requirements": []
1268 | },
1269 | "yaml": {
1270 | "Package": "yaml",
1271 | "Version": "2.3.5",
1272 | "Source": "Repository",
1273 | "Repository": "CRAN",
1274 | "Hash": "458bb38374d73bf83b1bb85e353da200",
1275 | "Requirements": []
1276 | }
1277 | }
1278 | }
1279 |
--------------------------------------------------------------------------------
/renv/activate.R:
--------------------------------------------------------------------------------
1 |
2 | local({
3 |
4 | # the requested version of renv
5 | version <- "0.15.5"
6 |
7 | # the project directory
8 | project <- getwd()
9 |
10 | # figure out whether the autoloader is enabled
11 | enabled <- local({
12 |
13 | # first, check config option
14 | override <- getOption("renv.config.autoloader.enabled")
15 | if (!is.null(override))
16 | return(override)
17 |
18 | # next, check environment variables
19 | # TODO: prefer using the configuration one in the future
20 | envvars <- c(
21 | "RENV_CONFIG_AUTOLOADER_ENABLED",
22 | "RENV_AUTOLOADER_ENABLED",
23 | "RENV_ACTIVATE_PROJECT"
24 | )
25 |
26 | for (envvar in envvars) {
27 | envval <- Sys.getenv(envvar, unset = NA)
28 | if (!is.na(envval))
29 | return(tolower(envval) %in% c("true", "t", "1"))
30 | }
31 |
32 | # enable by default
33 | TRUE
34 |
35 | })
36 |
37 | if (!enabled)
38 | return(FALSE)
39 |
40 | # avoid recursion
41 | if (identical(getOption("renv.autoloader.running"), TRUE)) {
42 | warning("ignoring recursive attempt to run renv autoloader")
43 | return(invisible(TRUE))
44 | }
45 |
46 | # signal that we're loading renv during R startup
47 | options(renv.autoloader.running = TRUE)
48 | on.exit(options(renv.autoloader.running = NULL), add = TRUE)
49 |
50 | # signal that we've consented to use renv
51 | options(renv.consent = TRUE)
52 |
53 | # load the 'utils' package eagerly -- this ensures that renv shims, which
54 | # mask 'utils' packages, will come first on the search path
55 | library(utils, lib.loc = .Library)
56 |
57 | # unload renv if it's already been loaded
58 | if ("renv" %in% loadedNamespaces())
59 | unloadNamespace("renv")
60 |
61 | # load bootstrap tools
62 | `%||%` <- function(x, y) {
63 | if (is.environment(x) || length(x)) x else y
64 | }
65 |
66 | bootstrap <- function(version, library) {
67 |
68 | # attempt to download renv
69 | tarball <- tryCatch(renv_bootstrap_download(version), error = identity)
70 | if (inherits(tarball, "error"))
71 | stop("failed to download renv ", version)
72 |
73 | # now attempt to install
74 | status <- tryCatch(renv_bootstrap_install(version, tarball, library), error = identity)
75 | if (inherits(status, "error"))
76 | stop("failed to install renv ", version)
77 |
78 | }
79 |
80 | renv_bootstrap_tests_running <- function() {
81 | getOption("renv.tests.running", default = FALSE)
82 | }
83 |
84 | renv_bootstrap_repos <- function() {
85 |
86 | # check for repos override
87 | repos <- Sys.getenv("RENV_CONFIG_REPOS_OVERRIDE", unset = NA)
88 | if (!is.na(repos))
89 | return(repos)
90 |
91 | # check for lockfile repositories
92 | repos <- tryCatch(renv_bootstrap_repos_lockfile(), error = identity)
93 | if (!inherits(repos, "error") && length(repos))
94 | return(repos)
95 |
96 | # if we're testing, re-use the test repositories
97 | if (renv_bootstrap_tests_running())
98 | return(getOption("renv.tests.repos"))
99 |
100 | # retrieve current repos
101 | repos <- getOption("repos")
102 |
103 | # ensure @CRAN@ entries are resolved
104 | repos[repos == "@CRAN@"] <- getOption(
105 | "renv.repos.cran",
106 | "https://cloud.r-project.org"
107 | )
108 |
109 | # add in renv.bootstrap.repos if set
110 | default <- c(FALLBACK = "https://cloud.r-project.org")
111 | extra <- getOption("renv.bootstrap.repos", default = default)
112 | repos <- c(repos, extra)
113 |
114 | # remove duplicates that might've snuck in
115 | dupes <- duplicated(repos) | duplicated(names(repos))
116 | repos[!dupes]
117 |
118 | }
119 |
120 | renv_bootstrap_repos_lockfile <- function() {
121 |
122 | lockpath <- Sys.getenv("RENV_PATHS_LOCKFILE", unset = "renv.lock")
123 | if (!file.exists(lockpath))
124 | return(NULL)
125 |
126 | lockfile <- tryCatch(renv_json_read(lockpath), error = identity)
127 | if (inherits(lockfile, "error")) {
128 | warning(lockfile)
129 | return(NULL)
130 | }
131 |
132 | repos <- lockfile$R$Repositories
133 | if (length(repos) == 0)
134 | return(NULL)
135 |
136 | keys <- vapply(repos, `[[`, "Name", FUN.VALUE = character(1))
137 | vals <- vapply(repos, `[[`, "URL", FUN.VALUE = character(1))
138 | names(vals) <- keys
139 |
140 | return(vals)
141 |
142 | }
143 |
144 | renv_bootstrap_download <- function(version) {
145 |
146 | # if the renv version number has 4 components, assume it must
147 | # be retrieved via github
148 | nv <- numeric_version(version)
149 | components <- unclass(nv)[[1]]
150 |
151 | # if this appears to be a development version of 'renv', we'll
152 | # try to restore from github
153 | dev <- length(components) == 4L
154 |
155 | # begin collecting different methods for finding renv
156 | methods <- c(
157 | renv_bootstrap_download_tarball,
158 | if (dev)
159 | renv_bootstrap_download_github
160 | else c(
161 | renv_bootstrap_download_cran_latest,
162 | renv_bootstrap_download_cran_archive
163 | )
164 | )
165 |
166 | for (method in methods) {
167 | path <- tryCatch(method(version), error = identity)
168 | if (is.character(path) && file.exists(path))
169 | return(path)
170 | }
171 |
172 | stop("failed to download renv ", version)
173 |
174 | }
175 |
176 | renv_bootstrap_download_impl <- function(url, destfile) {
177 |
178 | mode <- "wb"
179 |
180 | # https://bugs.r-project.org/bugzilla/show_bug.cgi?id=17715
181 | fixup <-
182 | Sys.info()[["sysname"]] == "Windows" &&
183 | substring(url, 1L, 5L) == "file:"
184 |
185 | if (fixup)
186 | mode <- "w+b"
187 |
188 | utils::download.file(
189 | url = url,
190 | destfile = destfile,
191 | mode = mode,
192 | quiet = TRUE
193 | )
194 |
195 | }
196 |
197 | renv_bootstrap_download_cran_latest <- function(version) {
198 |
199 | spec <- renv_bootstrap_download_cran_latest_find(version)
200 |
201 | message("* Downloading renv ", version, " ... ", appendLF = FALSE)
202 |
203 | type <- spec$type
204 | repos <- spec$repos
205 |
206 | info <- tryCatch(
207 | utils::download.packages(
208 | pkgs = "renv",
209 | destdir = tempdir(),
210 | repos = repos,
211 | type = type,
212 | quiet = TRUE
213 | ),
214 | condition = identity
215 | )
216 |
217 | if (inherits(info, "condition")) {
218 | message("FAILED")
219 | return(FALSE)
220 | }
221 |
222 | # report success and return
223 | message("OK (downloaded ", type, ")")
224 | info[1, 2]
225 |
226 | }
227 |
228 | renv_bootstrap_download_cran_latest_find <- function(version) {
229 |
230 | # check whether binaries are supported on this system
231 | binary <-
232 | getOption("renv.bootstrap.binary", default = TRUE) &&
233 | !identical(.Platform$pkgType, "source") &&
234 | !identical(getOption("pkgType"), "source") &&
235 | Sys.info()[["sysname"]] %in% c("Darwin", "Windows")
236 |
237 | types <- c(if (binary) "binary", "source")
238 |
239 | # iterate over types + repositories
240 | for (type in types) {
241 | for (repos in renv_bootstrap_repos()) {
242 |
243 | # retrieve package database
244 | db <- tryCatch(
245 | as.data.frame(
246 | utils::available.packages(type = type, repos = repos),
247 | stringsAsFactors = FALSE
248 | ),
249 | error = identity
250 | )
251 |
252 | if (inherits(db, "error"))
253 | next
254 |
255 | # check for compatible entry
256 | entry <- db[db$Package %in% "renv" & db$Version %in% version, ]
257 | if (nrow(entry) == 0)
258 | next
259 |
260 | # found it; return spec to caller
261 | spec <- list(entry = entry, type = type, repos = repos)
262 | return(spec)
263 |
264 | }
265 | }
266 |
267 | # if we got here, we failed to find renv
268 | fmt <- "renv %s is not available from your declared package repositories"
269 | stop(sprintf(fmt, version))
270 |
271 | }
272 |
273 | renv_bootstrap_download_cran_archive <- function(version) {
274 |
275 | name <- sprintf("renv_%s.tar.gz", version)
276 | repos <- renv_bootstrap_repos()
277 | urls <- file.path(repos, "src/contrib/Archive/renv", name)
278 | destfile <- file.path(tempdir(), name)
279 |
280 | message("* Downloading renv ", version, " ... ", appendLF = FALSE)
281 |
282 | for (url in urls) {
283 |
284 | status <- tryCatch(
285 | renv_bootstrap_download_impl(url, destfile),
286 | condition = identity
287 | )
288 |
289 | if (identical(status, 0L)) {
290 | message("OK")
291 | return(destfile)
292 | }
293 |
294 | }
295 |
296 | message("FAILED")
297 | return(FALSE)
298 |
299 | }
300 |
301 | renv_bootstrap_download_tarball <- function(version) {
302 |
303 | # if the user has provided the path to a tarball via
304 | # an environment variable, then use it
305 | tarball <- Sys.getenv("RENV_BOOTSTRAP_TARBALL", unset = NA)
306 | if (is.na(tarball))
307 | return()
308 |
309 | # allow directories
310 | info <- file.info(tarball, extra_cols = FALSE)
311 | if (identical(info$isdir, TRUE)) {
312 | name <- sprintf("renv_%s.tar.gz", version)
313 | tarball <- file.path(tarball, name)
314 | }
315 |
316 | # bail if it doesn't exist
317 | if (!file.exists(tarball)) {
318 |
319 | # let the user know we weren't able to honour their request
320 | fmt <- "* RENV_BOOTSTRAP_TARBALL is set (%s) but does not exist."
321 | msg <- sprintf(fmt, tarball)
322 | warning(msg)
323 |
324 | # bail
325 | return()
326 |
327 | }
328 |
329 | fmt <- "* Bootstrapping with tarball at path '%s'."
330 | msg <- sprintf(fmt, tarball)
331 | message(msg)
332 |
333 | tarball
334 |
335 | }
336 |
337 | renv_bootstrap_download_github <- function(version) {
338 |
339 | enabled <- Sys.getenv("RENV_BOOTSTRAP_FROM_GITHUB", unset = "TRUE")
340 | if (!identical(enabled, "TRUE"))
341 | return(FALSE)
342 |
343 | # prepare download options
344 | pat <- Sys.getenv("GITHUB_PAT")
345 | if (nzchar(Sys.which("curl")) && nzchar(pat)) {
346 | fmt <- "--location --fail --header \"Authorization: token %s\""
347 | extra <- sprintf(fmt, pat)
348 | saved <- options("download.file.method", "download.file.extra")
349 | options(download.file.method = "curl", download.file.extra = extra)
350 | on.exit(do.call(base::options, saved), add = TRUE)
351 | } else if (nzchar(Sys.which("wget")) && nzchar(pat)) {
352 | fmt <- "--header=\"Authorization: token %s\""
353 | extra <- sprintf(fmt, pat)
354 | saved <- options("download.file.method", "download.file.extra")
355 | options(download.file.method = "wget", download.file.extra = extra)
356 | on.exit(do.call(base::options, saved), add = TRUE)
357 | }
358 |
359 | message("* Downloading renv ", version, " from GitHub ... ", appendLF = FALSE)
360 |
361 | url <- file.path("https://api.github.com/repos/rstudio/renv/tarball", version)
362 | name <- sprintf("renv_%s.tar.gz", version)
363 | destfile <- file.path(tempdir(), name)
364 |
365 | status <- tryCatch(
366 | renv_bootstrap_download_impl(url, destfile),
367 | condition = identity
368 | )
369 |
370 | if (!identical(status, 0L)) {
371 | message("FAILED")
372 | return(FALSE)
373 | }
374 |
375 | message("OK")
376 | return(destfile)
377 |
378 | }
379 |
380 | renv_bootstrap_install <- function(version, tarball, library) {
381 |
382 | # attempt to install it into project library
383 | message("* Installing renv ", version, " ... ", appendLF = FALSE)
384 | dir.create(library, showWarnings = FALSE, recursive = TRUE)
385 |
386 | # invoke using system2 so we can capture and report output
387 | bin <- R.home("bin")
388 | exe <- if (Sys.info()[["sysname"]] == "Windows") "R.exe" else "R"
389 | r <- file.path(bin, exe)
390 |
391 | args <- c(
392 | "--vanilla", "CMD", "INSTALL", "--no-multiarch",
393 | "-l", shQuote(path.expand(library)),
394 | shQuote(path.expand(tarball))
395 | )
396 |
397 | output <- system2(r, args, stdout = TRUE, stderr = TRUE)
398 | message("Done!")
399 |
400 | # check for successful install
401 | status <- attr(output, "status")
402 | if (is.numeric(status) && !identical(status, 0L)) {
403 | header <- "Error installing renv:"
404 | lines <- paste(rep.int("=", nchar(header)), collapse = "")
405 | text <- c(header, lines, output)
406 | writeLines(text, con = stderr())
407 | }
408 |
409 | status
410 |
411 | }
412 |
413 | renv_bootstrap_platform_prefix <- function() {
414 |
415 | # construct version prefix
416 | version <- paste(R.version$major, R.version$minor, sep = ".")
417 | prefix <- paste("R", numeric_version(version)[1, 1:2], sep = "-")
418 |
419 | # include SVN revision for development versions of R
420 | # (to avoid sharing platform-specific artefacts with released versions of R)
421 | devel <-
422 | identical(R.version[["status"]], "Under development (unstable)") ||
423 | identical(R.version[["nickname"]], "Unsuffered Consequences")
424 |
425 | if (devel)
426 | prefix <- paste(prefix, R.version[["svn rev"]], sep = "-r")
427 |
428 | # build list of path components
429 | components <- c(prefix, R.version$platform)
430 |
431 | # include prefix if provided by user
432 | prefix <- renv_bootstrap_platform_prefix_impl()
433 | if (!is.na(prefix) && nzchar(prefix))
434 | components <- c(prefix, components)
435 |
436 | # build prefix
437 | paste(components, collapse = "/")
438 |
439 | }
440 |
441 | renv_bootstrap_platform_prefix_impl <- function() {
442 |
443 | # if an explicit prefix has been supplied, use it
444 | prefix <- Sys.getenv("RENV_PATHS_PREFIX", unset = NA)
445 | if (!is.na(prefix))
446 | return(prefix)
447 |
448 | # if the user has requested an automatic prefix, generate it
449 | auto <- Sys.getenv("RENV_PATHS_PREFIX_AUTO", unset = NA)
450 | if (auto %in% c("TRUE", "True", "true", "1"))
451 | return(renv_bootstrap_platform_prefix_auto())
452 |
453 | # empty string on failure
454 | ""
455 |
456 | }
457 |
458 | renv_bootstrap_platform_prefix_auto <- function() {
459 |
460 | prefix <- tryCatch(renv_bootstrap_platform_os(), error = identity)
461 | if (inherits(prefix, "error") || prefix %in% "unknown") {
462 |
463 | msg <- paste(
464 | "failed to infer current operating system",
465 | "please file a bug report at https://github.com/rstudio/renv/issues",
466 | sep = "; "
467 | )
468 |
469 | warning(msg)
470 |
471 | }
472 |
473 | prefix
474 |
475 | }
476 |
477 | renv_bootstrap_platform_os <- function() {
478 |
479 | sysinfo <- Sys.info()
480 | sysname <- sysinfo[["sysname"]]
481 |
482 | # handle Windows + macOS up front
483 | if (sysname == "Windows")
484 | return("windows")
485 | else if (sysname == "Darwin")
486 | return("macos")
487 |
488 | # check for os-release files
489 | for (file in c("/etc/os-release", "/usr/lib/os-release"))
490 | if (file.exists(file))
491 | return(renv_bootstrap_platform_os_via_os_release(file, sysinfo))
492 |
493 | # check for redhat-release files
494 | if (file.exists("/etc/redhat-release"))
495 | return(renv_bootstrap_platform_os_via_redhat_release())
496 |
497 | "unknown"
498 |
499 | }
500 |
501 | renv_bootstrap_platform_os_via_os_release <- function(file, sysinfo) {
502 |
503 | # read /etc/os-release
504 | release <- utils::read.table(
505 | file = file,
506 | sep = "=",
507 | quote = c("\"", "'"),
508 | col.names = c("Key", "Value"),
509 | comment.char = "#",
510 | stringsAsFactors = FALSE
511 | )
512 |
513 | vars <- as.list(release$Value)
514 | names(vars) <- release$Key
515 |
516 | # get os name
517 | os <- tolower(sysinfo[["sysname"]])
518 |
519 | # read id
520 | id <- "unknown"
521 | for (field in c("ID", "ID_LIKE")) {
522 | if (field %in% names(vars) && nzchar(vars[[field]])) {
523 | id <- vars[[field]]
524 | break
525 | }
526 | }
527 |
528 | # read version
529 | version <- "unknown"
530 | for (field in c("UBUNTU_CODENAME", "VERSION_CODENAME", "VERSION_ID", "BUILD_ID")) {
531 | if (field %in% names(vars) && nzchar(vars[[field]])) {
532 | version <- vars[[field]]
533 | break
534 | }
535 | }
536 |
537 | # join together
538 | paste(c(os, id, version), collapse = "-")
539 |
540 | }
541 |
542 | renv_bootstrap_platform_os_via_redhat_release <- function() {
543 |
544 | # read /etc/redhat-release
545 | contents <- readLines("/etc/redhat-release", warn = FALSE)
546 |
547 | # infer id
548 | id <- if (grepl("centos", contents, ignore.case = TRUE))
549 | "centos"
550 | else if (grepl("redhat", contents, ignore.case = TRUE))
551 | "redhat"
552 | else
553 | "unknown"
554 |
555 | # try to find a version component (very hacky)
556 | version <- "unknown"
557 |
558 | parts <- strsplit(contents, "[[:space:]]")[[1L]]
559 | for (part in parts) {
560 |
561 | nv <- tryCatch(numeric_version(part), error = identity)
562 | if (inherits(nv, "error"))
563 | next
564 |
565 | version <- nv[1, 1]
566 | break
567 |
568 | }
569 |
570 | paste(c("linux", id, version), collapse = "-")
571 |
572 | }
573 |
574 | renv_bootstrap_library_root_name <- function(project) {
575 |
576 | # use project name as-is if requested
577 | asis <- Sys.getenv("RENV_PATHS_LIBRARY_ROOT_ASIS", unset = "FALSE")
578 | if (asis)
579 | return(basename(project))
580 |
581 | # otherwise, disambiguate based on project's path
582 | id <- substring(renv_bootstrap_hash_text(project), 1L, 8L)
583 | paste(basename(project), id, sep = "-")
584 |
585 | }
586 |
587 | renv_bootstrap_library_root <- function(project) {
588 |
589 | prefix <- renv_bootstrap_profile_prefix()
590 |
591 | path <- Sys.getenv("RENV_PATHS_LIBRARY", unset = NA)
592 | if (!is.na(path))
593 | return(paste(c(path, prefix), collapse = "/"))
594 |
595 | path <- renv_bootstrap_library_root_impl(project)
596 | if (!is.null(path)) {
597 | name <- renv_bootstrap_library_root_name(project)
598 | return(paste(c(path, prefix, name), collapse = "/"))
599 | }
600 |
601 | renv_bootstrap_paths_renv("library", project = project)
602 |
603 | }
604 |
605 | renv_bootstrap_library_root_impl <- function(project) {
606 |
607 | root <- Sys.getenv("RENV_PATHS_LIBRARY_ROOT", unset = NA)
608 | if (!is.na(root))
609 | return(root)
610 |
611 | type <- renv_bootstrap_project_type(project)
612 | if (identical(type, "package")) {
613 | userdir <- renv_bootstrap_user_dir()
614 | return(file.path(userdir, "library"))
615 | }
616 |
617 | }
618 |
619 | renv_bootstrap_validate_version <- function(version) {
620 |
621 | loadedversion <- utils::packageDescription("renv", fields = "Version")
622 | if (version == loadedversion)
623 | return(TRUE)
624 |
625 | # assume four-component versions are from GitHub; three-component
626 | # versions are from CRAN
627 | components <- strsplit(loadedversion, "[.-]")[[1]]
628 | remote <- if (length(components) == 4L)
629 | paste("rstudio/renv", loadedversion, sep = "@")
630 | else
631 | paste("renv", loadedversion, sep = "@")
632 |
633 | fmt <- paste(
634 | "renv %1$s was loaded from project library, but this project is configured to use renv %2$s.",
635 | "Use `renv::record(\"%3$s\")` to record renv %1$s in the lockfile.",
636 | "Use `renv::restore(packages = \"renv\")` to install renv %2$s into the project library.",
637 | sep = "\n"
638 | )
639 |
640 | msg <- sprintf(fmt, loadedversion, version, remote)
641 | warning(msg, call. = FALSE)
642 |
643 | FALSE
644 |
645 | }
646 |
647 | renv_bootstrap_hash_text <- function(text) {
648 |
649 | hashfile <- tempfile("renv-hash-")
650 | on.exit(unlink(hashfile), add = TRUE)
651 |
652 | writeLines(text, con = hashfile)
653 | tools::md5sum(hashfile)
654 |
655 | }
656 |
657 | renv_bootstrap_load <- function(project, libpath, version) {
658 |
659 | # try to load renv from the project library
660 | if (!requireNamespace("renv", lib.loc = libpath, quietly = TRUE))
661 | return(FALSE)
662 |
663 | # warn if the version of renv loaded does not match
664 | renv_bootstrap_validate_version(version)
665 |
666 | # load the project
667 | renv::load(project)
668 |
669 | TRUE
670 |
671 | }
672 |
673 | renv_bootstrap_profile_load <- function(project) {
674 |
675 | # if RENV_PROFILE is already set, just use that
676 | profile <- Sys.getenv("RENV_PROFILE", unset = NA)
677 | if (!is.na(profile) && nzchar(profile))
678 | return(profile)
679 |
680 | # check for a profile file (nothing to do if it doesn't exist)
681 | path <- renv_bootstrap_paths_renv("profile", profile = FALSE)
682 | if (!file.exists(path))
683 | return(NULL)
684 |
685 | # read the profile, and set it if it exists
686 | contents <- readLines(path, warn = FALSE)
687 | if (length(contents) == 0L)
688 | return(NULL)
689 |
690 | # set RENV_PROFILE
691 | profile <- contents[[1L]]
692 | if (!profile %in% c("", "default"))
693 | Sys.setenv(RENV_PROFILE = profile)
694 |
695 | profile
696 |
697 | }
698 |
699 | renv_bootstrap_profile_prefix <- function() {
700 | profile <- renv_bootstrap_profile_get()
701 | if (!is.null(profile))
702 | return(file.path("profiles", profile, "renv"))
703 | }
704 |
705 | renv_bootstrap_profile_get <- function() {
706 | profile <- Sys.getenv("RENV_PROFILE", unset = "")
707 | renv_bootstrap_profile_normalize(profile)
708 | }
709 |
710 | renv_bootstrap_profile_set <- function(profile) {
711 | profile <- renv_bootstrap_profile_normalize(profile)
712 | if (is.null(profile))
713 | Sys.unsetenv("RENV_PROFILE")
714 | else
715 | Sys.setenv(RENV_PROFILE = profile)
716 | }
717 |
718 | renv_bootstrap_profile_normalize <- function(profile) {
719 |
720 | if (is.null(profile) || profile %in% c("", "default"))
721 | return(NULL)
722 |
723 | profile
724 |
725 | }
726 |
727 | renv_bootstrap_path_absolute <- function(path) {
728 |
729 | substr(path, 1L, 1L) %in% c("~", "/", "\\") || (
730 | substr(path, 1L, 1L) %in% c(letters, LETTERS) &&
731 | substr(path, 2L, 3L) %in% c(":/", ":\\")
732 | )
733 |
734 | }
735 |
736 | renv_bootstrap_paths_renv <- function(..., profile = TRUE, project = NULL) {
737 | renv <- Sys.getenv("RENV_PATHS_RENV", unset = "renv")
738 | root <- if (renv_bootstrap_path_absolute(renv)) NULL else project
739 | prefix <- if (profile) renv_bootstrap_profile_prefix()
740 | components <- c(root, renv, prefix, ...)
741 | paste(components, collapse = "/")
742 | }
743 |
744 | renv_bootstrap_project_type <- function(path) {
745 |
746 | descpath <- file.path(path, "DESCRIPTION")
747 | if (!file.exists(descpath))
748 | return("unknown")
749 |
750 | desc <- tryCatch(
751 | read.dcf(descpath, all = TRUE),
752 | error = identity
753 | )
754 |
755 | if (inherits(desc, "error"))
756 | return("unknown")
757 |
758 | type <- desc$Type
759 | if (!is.null(type))
760 | return(tolower(type))
761 |
762 | package <- desc$Package
763 | if (!is.null(package))
764 | return("package")
765 |
766 | "unknown"
767 |
768 | }
769 |
770 | renv_bootstrap_user_dir <- function() {
771 | dir <- renv_bootstrap_user_dir_impl()
772 | path.expand(chartr("\\", "/", dir))
773 | }
774 |
775 | renv_bootstrap_user_dir_impl <- function() {
776 |
777 | # use local override if set
778 | override <- getOption("renv.userdir.override")
779 | if (!is.null(override))
780 | return(override)
781 |
782 | # use R_user_dir if available
783 | tools <- asNamespace("tools")
784 | if (is.function(tools$R_user_dir))
785 | return(tools$R_user_dir("renv", "cache"))
786 |
787 | # try using our own backfill for older versions of R
788 | envvars <- c("R_USER_CACHE_DIR", "XDG_CACHE_HOME")
789 | for (envvar in envvars) {
790 | root <- Sys.getenv(envvar, unset = NA)
791 | if (!is.na(root))
792 | return(file.path(root, "R/renv"))
793 | }
794 |
795 | # use platform-specific default fallbacks
796 | if (Sys.info()[["sysname"]] == "Windows")
797 | file.path(Sys.getenv("LOCALAPPDATA"), "R/cache/R/renv")
798 | else if (Sys.info()[["sysname"]] == "Darwin")
799 | "~/Library/Caches/org.R-project.R/R/renv"
800 | else
801 | "~/.cache/R/renv"
802 |
803 | }
804 |
805 |
806 | renv_json_read <- function(file = NULL, text = NULL) {
807 |
808 | text <- paste(text %||% read(file), collapse = "\n")
809 |
810 | # find strings in the JSON
811 | pattern <- '["](?:(?:\\\\.)|(?:[^"\\\\]))*?["]'
812 | locs <- gregexpr(pattern, text, perl = TRUE)[[1]]
813 |
814 | # if any are found, replace them with placeholders
815 | replaced <- text
816 | strings <- character()
817 | replacements <- character()
818 |
819 | if (!identical(c(locs), -1L)) {
820 |
821 | # get the string values
822 | starts <- locs
823 | ends <- locs + attr(locs, "match.length") - 1L
824 | strings <- substring(text, starts, ends)
825 |
826 | # only keep those requiring escaping
827 | strings <- grep("[[\\]{}:]", strings, perl = TRUE, value = TRUE)
828 |
829 | # compute replacements
830 | replacements <- sprintf('"\032%i\032"', seq_along(strings))
831 |
832 | # replace the strings
833 | mapply(function(string, replacement) {
834 | replaced <<- sub(string, replacement, replaced, fixed = TRUE)
835 | }, strings, replacements)
836 |
837 | }
838 |
839 | # transform the JSON into something the R parser understands
840 | transformed <- replaced
841 | transformed <- gsub("[[{]", "list(", transformed)
842 | transformed <- gsub("[]}]", ")", transformed)
843 | transformed <- gsub(":", "=", transformed, fixed = TRUE)
844 | text <- paste(transformed, collapse = "\n")
845 |
846 | # parse it
847 | json <- parse(text = text, keep.source = FALSE, srcfile = NULL)[[1L]]
848 |
849 | # construct map between source strings, replaced strings
850 | map <- as.character(parse(text = strings))
851 | names(map) <- as.character(parse(text = replacements))
852 |
853 | # convert to list
854 | map <- as.list(map)
855 |
856 | # remap strings in object
857 | remapped <- renv_json_remap(json, map)
858 |
859 | # evaluate
860 | eval(remapped, envir = baseenv())
861 |
862 | }
863 |
864 | renv_json_remap <- function(json, map) {
865 |
866 | # fix names
867 | if (!is.null(names(json))) {
868 | lhs <- match(names(json), names(map), nomatch = 0L)
869 | rhs <- match(names(map), names(json), nomatch = 0L)
870 | names(json)[rhs] <- map[lhs]
871 | }
872 |
873 | # fix values
874 | if (is.character(json))
875 | return(map[[json]] %||% json)
876 |
877 | # handle true, false, null
878 | if (is.name(json)) {
879 | text <- as.character(json)
880 | if (text == "true")
881 | return(TRUE)
882 | else if (text == "false")
883 | return(FALSE)
884 | else if (text == "null")
885 | return(NULL)
886 | }
887 |
888 | # recurse
889 | if (is.recursive(json)) {
890 | for (i in seq_along(json)) {
891 | json[i] <- list(renv_json_remap(json[[i]], map))
892 | }
893 | }
894 |
895 | json
896 |
897 | }
898 |
899 | # load the renv profile, if any
900 | renv_bootstrap_profile_load(project)
901 |
902 | # construct path to library root
903 | root <- renv_bootstrap_library_root(project)
904 |
905 | # construct library prefix for platform
906 | prefix <- renv_bootstrap_platform_prefix()
907 |
908 | # construct full libpath
909 | libpath <- file.path(root, prefix)
910 |
911 | # attempt to load
912 | if (renv_bootstrap_load(project, libpath, version))
913 | return(TRUE)
914 |
915 | # load failed; inform user we're about to bootstrap
916 | prefix <- paste("# Bootstrapping renv", version)
917 | postfix <- paste(rep.int("-", 77L - nchar(prefix)), collapse = "")
918 | header <- paste(prefix, postfix)
919 | message(header)
920 |
921 | # perform bootstrap
922 | bootstrap(version, libpath)
923 |
924 | # exit early if we're just testing bootstrap
925 | if (!is.na(Sys.getenv("RENV_BOOTSTRAP_INSTALL_ONLY", unset = NA)))
926 | return(TRUE)
927 |
928 | # try again to load
929 | if (requireNamespace("renv", lib.loc = libpath, quietly = TRUE)) {
930 | message("* Successfully installed and loaded renv ", version, ".")
931 | return(renv::load())
932 | }
933 |
934 | # failed to download or load renv; warn the user
935 | msg <- c(
936 | "Failed to find an renv installation: the project will not be loaded.",
937 | "Use `renv::activate()` to re-initialize the project."
938 | )
939 |
940 | warning(paste(msg, collapse = "\n"), call. = FALSE)
941 |
942 | })
943 |
--------------------------------------------------------------------------------