├── .Rbuildignore ├── .gitattributes ├── .github ├── .gitignore └── workflows │ ├── R-CMD-check.yaml │ └── pkgdown.yaml ├── .gitignore ├── DESCRIPTION ├── License.md ├── NAMESPACE ├── NEWS.md ├── R ├── data.R ├── eSDM-internal.R ├── eSDM-package.R ├── eSDM_GUI.R ├── ensemble_create.R ├── ensemble_rescale.R ├── evaluation_metrics.R ├── model_abundance.R ├── overlay_sdm.R ├── pts2poly_centroids.R └── pts2poly_vertices.R ├── README.md ├── _pkgdown.yml ├── cran-comments.md ├── data-raw ├── GSHHG_shp.R ├── README.txt ├── create_data.R ├── create_ens_sf.R ├── create_extdata.R ├── eSDM_ME&E_final.RDATA ├── eSDM_shpExport (1) │ ├── eSDM_Sample_predictions_1_csv__PredModel1__orig.dbf │ ├── eSDM_Sample_predictions_1_csv__PredModel1__orig.prj │ ├── eSDM_Sample_predictions_1_csv__PredModel1__orig.shp │ └── eSDM_Sample_predictions_1_csv__PredModel1__orig.shx ├── eSDM_shpExport (2) │ ├── eSDM_Sample_predictions_4_gdb__Pred1__orig.dbf │ ├── eSDM_Sample_predictions_4_gdb__Pred1__orig.prj │ ├── eSDM_Sample_predictions_4_gdb__Pred1__orig.shp │ └── eSDM_Sample_predictions_4_gdb__Pred1__orig.shx ├── eSDM_shpExport │ ├── eSDM_Sample_predictions_2_csv__pred__orig.dbf │ ├── eSDM_Sample_predictions_2_csv__pred__orig.prj │ ├── eSDM_Sample_predictions_2_csv__pred__orig.shp │ └── eSDM_Sample_predictions_2_csv__pred__orig.shx ├── ens_sf_unw.rda ├── ens_sf_wtss.rda ├── figure2_overlay.R ├── figure3.R ├── figure4.R ├── figure5.R ├── figure_plot.R └── valid_sf_pres.rda ├── data ├── gshhg.l.L16.rda ├── preds.1.rda ├── preds.2.rda ├── preds.3.rda └── validation.data.rda ├── eSDM.Rproj ├── inst ├── CITATION ├── eSDM_vignette_helper.R ├── extdata │ ├── Predictions_Beckeretal2016.rds │ ├── Predictions_Beckeretal2016_overlaid.rds │ ├── Predictions_Hazenetal2017.rds │ ├── Predictions_Hazenetal2017_overlaid.rds │ ├── Predictions_Redfernetal2017.rds │ ├── README.txt │ ├── Shapefiles │ │ ├── Study_Area_CCE.dbf │ │ ├── Study_Area_CCE.prj │ │ ├── Study_Area_CCE.sbn │ │ ├── Study_Area_CCE.sbx │ │ ├── Study_Area_CCE.shp │ │ ├── Study_Area_CCE.shp.xml │ │ └── Study_Area_CCE.shx │ ├── Table3.csv │ └── eSDM_Validation_data_all.rds └── shiny │ ├── app.R │ ├── server_1_loadModels │ ├── server_1_loadModels.R │ ├── server_1_loadModels_create_local.R │ ├── server_1_loadModels_csv.R │ ├── server_1_loadModels_funcs.R │ ├── server_1_loadModels_raster.R │ ├── server_1_loadModels_renderUI.R │ ├── server_1_loadModels_shpgdb.R │ └── server_1_loadModels_shpgdb_create_local.R │ ├── server_2_overlay │ ├── server_2_overlay.R │ ├── server_2_overlay_funcs.R │ ├── server_2_overlay_loadPoly_csv.R │ ├── server_2_overlay_loadPoly_provided.R │ ├── server_2_overlay_loadPoly_shpgdb.R │ ├── server_2_overlay_overlayModels.R │ ├── server_2_overlay_overlayModels_base.R │ └── server_2_overlay_renderUI.R │ ├── server_3_createEns │ ├── server_3_createEns.R │ ├── server_3_createEns_create.R │ ├── server_3_createEns_create_regexc.R │ ├── server_3_createEns_create_weighted.R │ └── server_3_createEns_renderUI.R │ ├── server_4_evalMetrics │ ├── server_4_evalMetrics.R │ ├── server_4_evalMetrics_funcs.R │ ├── server_4_evalMetrics_loadData.R │ └── server_4_evalMetrics_renderUI.R │ ├── server_5_prettyPlot │ ├── server_5_prettyPlot.R │ ├── server_5_prettyPlot_addobj.R │ ├── server_5_prettyPlot_addobj_renderUI.R │ ├── server_5_prettyPlot_addobj_update.R │ ├── server_5_prettyPlot_download.R │ ├── server_5_prettyPlot_funcs.R │ ├── server_5_prettyPlot_plot.R │ ├── server_5_prettyPlot_prep.R │ ├── server_5_prettyPlot_renderUI.R │ ├── server_5_prettyPlot_toplot.R │ ├── server_5_prettyPlot_update.R │ └── server_5_prettyPlot_update_renderUI.R │ ├── server_6_export │ ├── server_6_export.R │ └── server_6_export_renderUI.R │ ├── server_other │ ├── server_checks.R │ ├── server_funcs.R │ ├── server_funcs_preview360.R │ ├── server_plots.R │ ├── server_plots_download.R │ ├── server_plots_funcs.R │ ├── server_reactiveValues.R │ ├── server_render.R │ ├── server_roadmap_download.R │ ├── server_tables.R │ └── server_workspace.R │ ├── ui_files │ ├── ui_0_roadmap.R │ ├── ui_1_loadModels.R │ ├── ui_2_overlay.R │ ├── ui_3_createEns.R │ ├── ui_4_evalMetrics.R │ ├── ui_5_prettyPlot.R │ ├── ui_6_export.R │ ├── ui_7_manual.R │ ├── ui_common.R │ └── ui_funcs.R │ └── www │ ├── eSDM_manual.pdf │ └── noaa_logo.png ├── man ├── eSDM-package.Rd ├── eSDM_GUI.Rd ├── ensemble_create.Rd ├── ensemble_rescale.Rd ├── evaluation_metrics.Rd ├── gshhg.l.L16.Rd ├── model_abundance.Rd ├── overlay_sdm.Rd ├── preds.Rd ├── pts2poly_centroids.Rd ├── pts2poly_vertices.Rd └── validation.data.Rd ├── pkgdown └── extra.css ├── tests ├── testthat.R └── testthat │ ├── test-ensemble.R │ ├── test-internals.R │ ├── test-model_abundance.R │ ├── test-overlay_sdm.R │ └── test-pts2poly.R └── vignettes ├── .gitignore └── example-analysis.Rmd /.Rbuildignore: -------------------------------------------------------------------------------- 1 | ^.*\.Rproj$ 2 | ^\.Rproj\.user$ 3 | ^data-raw$ 4 | ^LICENSE$ 5 | ^\.travis\.yml$ 6 | ^appveyor\.yml$ 7 | ^cran-comments\.md$ 8 | ^inst/shiny/rsconnect$ 9 | ^README\.Rmd$ 10 | README.md 11 | License.md 12 | 13 | ^CRAN-RELEASE$ 14 | ^_pkgdown\.yml$ 15 | ^docs$ 16 | ^pkgdown$ 17 | ^tic\.R$ 18 | ^\.github$ 19 | ^CRAN-SUBMISSION$ 20 | ^LICENSE\.md$ 21 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | * text=auto 2 | data/* binary 3 | src/* text=lf 4 | R/* text=lf 5 | -------------------------------------------------------------------------------- /.github/.gitignore: -------------------------------------------------------------------------------- 1 | *.html 2 | -------------------------------------------------------------------------------- /.github/workflows/R-CMD-check.yaml: -------------------------------------------------------------------------------- 1 | # Workflow derived from https://github.com/r-lib/actions/tree/v2/examples 2 | # Need help debugging build failures? Start at https://github.com/r-lib/actions#where-to-find-help 3 | on: 4 | push: 5 | branches: [main, master] 6 | pull_request: 7 | branches: [main, master] 8 | 9 | name: R-CMD-check 10 | 11 | permissions: read-all 12 | 13 | jobs: 14 | R-CMD-check: 15 | runs-on: ${{ matrix.config.os }} 16 | 17 | name: ${{ matrix.config.os }} (${{ matrix.config.r }}) 18 | 19 | strategy: 20 | fail-fast: false 21 | matrix: 22 | config: 23 | - {os: macos-latest, r: 'release'} 24 | - {os: windows-latest, r: 'release'} 25 | - {os: ubuntu-latest, r: 'devel', http-user-agent: 'release'} 26 | - {os: ubuntu-latest, r: 'release'} 27 | - {os: ubuntu-latest, r: 'oldrel-1'} 28 | 29 | env: 30 | GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} 31 | R_KEEP_PKG_SOURCE: yes 32 | 33 | steps: 34 | - uses: actions/checkout@v4 35 | 36 | - uses: r-lib/actions/setup-pandoc@v2 37 | 38 | - uses: r-lib/actions/setup-r@v2 39 | with: 40 | r-version: ${{ matrix.config.r }} 41 | http-user-agent: ${{ matrix.config.http-user-agent }} 42 | use-public-rspm: true 43 | 44 | - uses: r-lib/actions/setup-r-dependencies@v2 45 | with: 46 | extra-packages: any::rcmdcheck 47 | needs: check 48 | 49 | - uses: r-lib/actions/check-r-package@v2 50 | with: 51 | upload-snapshots: true 52 | build_args: 'c("--no-manual","--compact-vignettes=gs+qpdf")' 53 | -------------------------------------------------------------------------------- /.github/workflows/pkgdown.yaml: -------------------------------------------------------------------------------- 1 | # Workflow derived from https://github.com/r-lib/actions/tree/v2/examples 2 | # Need help debugging build failures? Start at https://github.com/r-lib/actions#where-to-find-help 3 | on: 4 | push: 5 | branches: [main, master] 6 | pull_request: 7 | branches: [main, master] 8 | release: 9 | types: [published] 10 | workflow_dispatch: 11 | 12 | name: pkgdown 13 | 14 | permissions: read-all 15 | 16 | jobs: 17 | pkgdown: 18 | runs-on: ubuntu-latest 19 | # Only restrict concurrency for non-PR jobs 20 | concurrency: 21 | group: pkgdown-${{ github.event_name != 'pull_request' || github.run_id }} 22 | env: 23 | GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} 24 | permissions: 25 | contents: write 26 | steps: 27 | - uses: actions/checkout@v4 28 | 29 | - uses: r-lib/actions/setup-pandoc@v2 30 | 31 | - uses: r-lib/actions/setup-r@v2 32 | with: 33 | use-public-rspm: true 34 | 35 | - uses: r-lib/actions/setup-r-dependencies@v2 36 | with: 37 | extra-packages: any::pkgdown, local::. 38 | needs: website 39 | 40 | - name: Build site 41 | run: pkgdown::build_site_github_pages(new_process = FALSE, install = FALSE) 42 | shell: Rscript {0} 43 | 44 | - name: Deploy to GitHub pages 🚀 45 | if: github.event_name != 'pull_request' 46 | uses: JamesIves/github-pages-deploy-action@v4.5.0 47 | with: 48 | clean: false 49 | branch: gh-pages 50 | folder: docs 51 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .Rproj.user 2 | .Rhistory 3 | .RData 4 | .Ruserdata 5 | .httr-oauth 6 | .dcf 7 | inst/doc 8 | docs/ 9 | docs 10 | 11 | inst/shiny/rsconnect/ 12 | rsconnect/ 13 | -------------------------------------------------------------------------------- /DESCRIPTION: -------------------------------------------------------------------------------- 1 | Package: eSDM 2 | Title: Ensemble Tool for Predictions from Species Distribution Models 3 | Description: A tool which allows users to create and evaluate ensembles 4 | of species distribution model (SDM) predictions. 5 | Functionality is offered through R functions or a GUI (R Shiny app). 6 | This tool can assist users in identifying spatial uncertainties and 7 | making informed conservation and management decisions. The package is 8 | further described in Woodman et al (2019) . 9 | Version: 0.4.4 10 | Authors@R: person("Sam", "Woodman", email = "sam.woodman@noaa.gov", role = c("aut", "cre"), comment = c(ORCID = "0000-0001-6071-8186")) 11 | URL: https://github.com/swfsc/eSDM/, https://swfsc.github.io/eSDM/ 12 | BugReports: https://github.com/swfsc/eSDM/issues/ 13 | Depends: R (>= 4.0.0) 14 | Imports: 15 | dplyr (>= 1.1), 16 | magrittr, 17 | methods, 18 | purrr, 19 | rlang, 20 | ROCR, 21 | sf (>= 1.0), 22 | shiny, 23 | stats, 24 | units 25 | Suggests: 26 | colorRamps, 27 | colourpicker, 28 | dichromat, 29 | DT, 30 | knitr, 31 | leafem, 32 | leaflet, 33 | maps, 34 | raster, 35 | RColorBrewer, 36 | rmarkdown, 37 | shinybusy, 38 | shinydashboard, 39 | shinyjs, 40 | testthat (>= 2.1.0), 41 | tmap (>= 2.3), 42 | viridis, 43 | zip 44 | License: Apache License (== 2) 45 | Encoding: UTF-8 46 | LazyData: true 47 | RoxygenNote: 7.3.2 48 | VignetteBuilder: knitr 49 | -------------------------------------------------------------------------------- /NAMESPACE: -------------------------------------------------------------------------------- 1 | # Generated by roxygen2: do not edit by hand 2 | 3 | S3method(ensemble_create,data.frame) 4 | S3method(ensemble_create,sf) 5 | export(eSDM_GUI) 6 | export(ensemble_create) 7 | export(ensemble_rescale) 8 | export(evaluation_metrics) 9 | export(model_abundance) 10 | export(overlay_sdm) 11 | export(pts2poly_centroids) 12 | export(pts2poly_vertices) 13 | importFrom(ROCR,performance) 14 | importFrom(ROCR,prediction) 15 | importFrom(dplyr,all_of) 16 | importFrom(dplyr,arrange) 17 | importFrom(dplyr,between) 18 | importFrom(dplyr,bind_rows) 19 | importFrom(dplyr,filter) 20 | importFrom(dplyr,group_by) 21 | importFrom(dplyr,left_join) 22 | importFrom(dplyr,mutate) 23 | importFrom(dplyr,rename) 24 | importFrom(dplyr,select) 25 | importFrom(dplyr,summarise) 26 | importFrom(magrittr,"%>%") 27 | importFrom(methods,slot) 28 | importFrom(purrr,map) 29 | importFrom(purrr,map2_df) 30 | importFrom(purrr,set_names) 31 | importFrom(rlang,.data) 32 | importFrom(rlang,sym) 33 | importFrom(sf,st_agr) 34 | importFrom(sf,st_area) 35 | importFrom(sf,st_bbox) 36 | importFrom(sf,st_crop) 37 | importFrom(sf,st_crs) 38 | importFrom(sf,st_geometry) 39 | importFrom(sf,st_intersection) 40 | importFrom(sf,st_intersects) 41 | importFrom(sf,st_is_valid) 42 | importFrom(sf,st_make_valid) 43 | importFrom(sf,st_polygon) 44 | importFrom(sf,st_set_agr) 45 | importFrom(sf,st_set_geometry) 46 | importFrom(sf,st_sf) 47 | importFrom(sf,st_sfc) 48 | importFrom(shiny,runApp) 49 | importFrom(stats,na.omit) 50 | importFrom(units,set_units) 51 | importFrom(utils,head) 52 | importFrom(utils,tail) 53 | -------------------------------------------------------------------------------- /NEWS.md: -------------------------------------------------------------------------------- 1 | # eSDM 0.4.4 2 | 3 | * Updated all [eSDM-data](https://github.com/SWFSC/eSDM-data) links after transferring to the [SWFSC](https://github.com/SWFSC) organization. 4 | 5 | * Corred several remaining smwoodman/eSDM, particularly in the Shiny app 6 | 7 | 8 | # eSDM 0.4.3 9 | 10 | * `overlay_sdm` maintains data.frame/tbl/tbl_df class consistency [#17] 11 | 12 | * `eSDM` now officially requires `dplyr>=1.1` and `sf>=1.0` 13 | 14 | 15 | # eSDM 0.4.2 16 | 17 | * Moved repo to https://github.com/SWFSC, and updated all associated documentation 18 | 19 | * Changed license to Apache License (== 2), based on NMFS guidance at https://nmfs-opensci.github.io/GitHub-Guide 20 | 21 | 22 | # eSDM 0.4.1 23 | 24 | * Updated shiny app URL to https://connect.fisheries.noaa.gov/eSDM/ 25 | 26 | 27 | # eSDM 0.4.0 28 | 29 | * Removed `maptools` as a dependency (#13). This Import was a relic as `eSDM` did not actually use `maptools` functions, so no functionality should change. 30 | 31 | * eSDM depends on R >= 4.0.0 32 | 33 | 34 | # eSDM 0.3.7 35 | 36 | * Updated `overlay_sdm` to stay current with `sf` 1.0 (#12) 37 | 38 | 39 | # eSDM 0.3.6 40 | 41 | * Fixed a bug to stay current with the `shiny` package (not using the `%OR%` function) - no changes to user functionality 42 | 43 | 44 | # eSDM 0.3.5 45 | 46 | * Use `==` rather than `identical` to check CRS equivalence (#10) 47 | 48 | * Fixed bug in the GUI where static previews downloaded with "Dimensions of 'Static Preview' window" did not have exact same dimensions as the window 49 | 50 | 51 | # eSDM 0.3.4 52 | 53 | * Fixed bug in eSDM GUI - invalid error message printed if packages not installed 54 | 55 | 56 | # eSDM 0.3.3 57 | 58 | * Import `st_make_valid` from `sf` rather than `lwgeom` (#9) 59 | 60 | * Use `shinybusy` rather than `shinycssloaders` in eSDM GUI because of unknown error 61 | 62 | * Move packages only used in the GUI (shiny app) to Suggests 63 | 64 | 65 | # eSDM 0.3.2 66 | 67 | * Updated citation details with MEE issue and page numbers 68 | 69 | * Updated vignette formatting 70 | 71 | 72 | # eSDM 0.3.1 73 | 74 | * Fixed bug in eSDM GUI 'update map range' button 75 | 76 | * Fixed bug in evaluation_metrics when validation data contains NA values 77 | 78 | * When subsetting for a single column, replaced `[,]` with `[[]]` so both data frames and tibbles return a vector 79 | 80 | * Improved documentation and messages in GUI 81 | 82 | * Added eSDM paper citation details (doi, etc) where applicable 83 | 84 | 85 | # eSDM 0.3.0 86 | 87 | * Updated citation to 'in press' in Methods in Ecology and Evolution 88 | 89 | * Added button to download manuscript example analysis data through the GUI 90 | 91 | * Added ability to have tick marks on high quality maps (`tmap` update) 92 | 93 | * Explicitly call `dplyr::select`, in case it is masked by `raster::select` 94 | 95 | * Fixed bug when exporting predictions from the GUI as a shapefile (now use `zip` package) 96 | 97 | * Fixed bug when downloading evaluation metrics from the GUI 98 | 99 | * Now call all `addMouseCoordinates` from `leafem` rather than `mapview` 100 | 101 | 102 | # eSDM 0.2.1 103 | 104 | * Initial release 105 | -------------------------------------------------------------------------------- /R/data.R: -------------------------------------------------------------------------------- 1 | #' Low resolution GSHHG world map 2 | #' 3 | #' Low resolution GSHHG world map, includes hierarchical levels 4 | #' L1 and L6. Processed using \code{\link[sf:valid]{st_make_valid}} 5 | #' 6 | #' @format An object of class \code{\link[sf]{sfc}} 7 | #' @source \url{http://www.soest.hawaii.edu/pwessel/gshhg/} 8 | "gshhg.l.L16" 9 | 10 | #' Sample SDM density predictions 11 | #' 12 | #' \code{preds.1}, \code{preds.2}, and \code{preds.3} are objects of class \code{\link[sf]{sf}} that serve as 13 | #' sample sets of SDM density predictions for the \code{eSDM} package 14 | #' 15 | #' @details 16 | #' \code{preds.1} sample SDM density predictions created by importing 17 | #' Sample_predictions_2.csv into the eSDM GUI, exporting predictions, and then 18 | #' clipping them to the SoCal_bite.csv region. 19 | #' Also manually added two variance columns (numbers are randomly generated with a max of 0.01) 20 | #' 21 | #' \code{preds.2} sample SDM density predictions created by importing 22 | #' Sample_predictions_1.csv into the eSDM GUI, exporting predictions, and then 23 | #' clipping them to the SoCal_bite.csv region 24 | #' 25 | #' \code{preds.3} is a set of sample SDM density predictions created by importing 26 | #' Sample_predictions_4_gdb into the eSDM GUI, exporting predictions, and then 27 | #' clipping them to the SoCal_bite.csv region 28 | #' 29 | #' @format Objects of class \code{sf} with a column of density predictions (name: \code{Density}) and 30 | #' a simple feature list column (name: \code{geometry}). 31 | #' \code{preds.1} also has a second column of sample density predictions (name: \code{Density2}), 32 | #' as well as \code{Var1} and \code{Var2}, representing the variance 33 | #' 34 | #' \code{preds1}: An object of class sf (inherits from data.frame) with 325 rows and 5 columns. 35 | #' 36 | #' \code{preds2}: An object of class sf (inherits from data.frame) with 1891 rows and 2 columns. 37 | #' 38 | #' \code{preds3}: An object of class sf (inherits from data.frame) with 1445 rows and 2 columns. 39 | #' 40 | #' @name preds 41 | #' @aliases preds.1 preds.2 preds.3 42 | 43 | 44 | #' @rdname preds 45 | "preds.1" 46 | 47 | #' @rdname preds 48 | "preds.2" 49 | 50 | #' @rdname preds 51 | "preds.3" 52 | 53 | 54 | #' Sample validation data 55 | #' 56 | #' Sample validation data created by cropping Validation_data.csv to the SoCal_bite.csv region 57 | #' (.csv files from ...) 58 | #' 59 | #' @format An object of class \code{\link[sf]{sf}} with 8 rows and 3 variables 60 | #' \describe{ 61 | #' \item{sight}{1's and 0's indicating species presence/absence} 62 | #' \item{count}{number of individuals observed at each point} 63 | #' \item{geometry}{simple feature list column representing validation data points} 64 | #' } 65 | "validation.data" 66 | -------------------------------------------------------------------------------- /R/eSDM-internal.R: -------------------------------------------------------------------------------- 1 | ############################################################################### 2 | # Calculate root mean squared error 3 | esdm_rmse <- function(x, y, ...) { 4 | lst <- list(...) 5 | z.lgl <- if ("na.rm" %in% names(lst)) lst$na.rm else TRUE 6 | 7 | sqrt(mean((x - y) ^ 2, na.rm = z.lgl)) 8 | } 9 | 10 | 11 | ############################################################################### 12 | # Weighted mean function for eSDM 13 | # Different from stats::weighted.mean in that NA values of w can also be ignored 14 | esdm_weighted_mean <- function(x, w, ...) { 15 | ### Inputs: 16 | # x: numeric vector of values for which to calculate the weighted mean 17 | # w: numeric vector of weights 18 | # ...: for passing na.rm argument 19 | 20 | stopifnot( 21 | inherits(x, c("numeric", "integer")), 22 | inherits(w, c("numeric", "integer")) 23 | ) 24 | 25 | lst <- list(...) 26 | z.lgl <- if ("na.rm" %in% names(lst)) lst$na.rm else TRUE 27 | 28 | if (z.lgl) { 29 | idx <- which(!is.na(x) & !is.na(w)) 30 | x <- x[idx] 31 | w <- w[idx] 32 | } else { 33 | idx <- seq_along(x) 34 | } 35 | 36 | if (length(idx) == 0) { 37 | NA 38 | 39 | } else { 40 | w <- w / sum(w, na.rm = z.lgl) 41 | sum(x * w, na.rm = z.lgl) 42 | } 43 | } 44 | 45 | 46 | ############################################################################### 47 | # Weighted variance function for eSDM 48 | 49 | #------------------------------------------------------------------------------ 50 | ### Calculate among-model variance from weights and values 51 | ### used to calculate weighted mean 52 | ### Applicable formula (Price 1972; extension): 53 | ### wvar(x) = sum(wi * (xi - xmean)); sum(wi = 1) 54 | esdm_weighted_var_amv <- function(x, x.mean, w, ...) { 55 | ### Inputs: 56 | # x: numeric vector of values that were used to calculate weighted mean 57 | # x.mean: weighted mean of values in x, calculated using w 58 | # w: numeric vector of weights used to calculate weighted mean 59 | # ...: for passing na.rm argument 60 | 61 | stopifnot( 62 | inherits(x, c("numeric", "integer")), 63 | inherits(x.mean, c("numeric", "integer")), 64 | inherits(w, c("numeric", "integer")) 65 | ) 66 | 67 | lst <- list(...) 68 | z.lgl <- if ("na.rm" %in% names(lst)) lst$na.rm else TRUE 69 | 70 | if (z.lgl) { 71 | idx <- which(!is.na(x) & !is.na(w)) 72 | x <- x[idx] 73 | w <- w[idx] 74 | } else { 75 | idx <- seq_along(x) 76 | } 77 | 78 | if (length(idx) == 0) { 79 | NA 80 | 81 | } else { 82 | w <- w / sum(w, na.rm = z.lgl) 83 | sum(w * (x - x.mean)^2, na.rm = z.lgl) 84 | } 85 | } 86 | 87 | 88 | #------------------------------------------------------------------------------ 89 | ### Calculate within-model variance from weights and variances of 90 | ### values used to calculate weighted mean 91 | ### Applicable formulas (Wade and Angliss 1996): 92 | ### var(c * x) = c^2 * var(x) 93 | ### var(wtdmean(x1, x2, ...)) = w1^2 * var(x1) + w2^2 * var(x2) + ...; sum(wi = 1) 94 | esdm_weighted_var_wmv <- function(x.var, w, ...) { 95 | ### Inputs: 96 | # x: numeric vector of variances of values used to calculate weighted mean 97 | # w: numeric vector of weights used to calculate weighted mean 98 | # ...: for passing na.rm argument 99 | 100 | stopifnot( 101 | inherits(x.var, c("numeric", "integer")), 102 | inherits(w, c("numeric", "integer")) 103 | ) 104 | 105 | lst <- list(...) 106 | z.lgl <- if ("na.rm" %in% names(lst)) lst$na.rm else TRUE 107 | 108 | if (z.lgl) { 109 | idx <- which(!is.na(x.var) & !is.na(w)) 110 | x.var <- x.var[idx] 111 | w <- w[idx] 112 | } else { 113 | idx <- seq_along(x.var) 114 | } 115 | 116 | if (length(idx) == 0) { 117 | NA 118 | 119 | } else { 120 | w <- w / sum(w, na.rm = z.lgl) 121 | sum(w^2 * x.var, na.rm = z.lgl) 122 | } 123 | } 124 | 125 | ############################################################################### 126 | -------------------------------------------------------------------------------- /R/eSDM-package.R: -------------------------------------------------------------------------------- 1 | #' eSDM: A tool for creating and exploring ensembles of predictions from Species 2 | #' Distribution Models 3 | #' 4 | #' eSDM provides functionality for overlaying SDM predictions onto a single base 5 | #' geometry and creating and evaluating ensemble predictions. This can be done 6 | #' manually in R, or using the eSDM GUI (an R Shiny app) opened through 7 | #' \link{eSDM_GUI} 8 | #' 9 | #' @details eSDM allows users to overlay SDM predictions onto a single base 10 | #' geometry, create ensembles of these predictions via weighted or unweighted 11 | #' averages, calculate performance metrics for each set of predictions and for 12 | #' resulting ensembles, and visually compare ensemble predictions with 13 | #' original predictions. The information provided by this tool can assist 14 | #' users in understanding spatial uncertainties and making informed 15 | #' conservation decisions. 16 | #' 17 | #' The GUI ensures that the tool is accessible to non-R users, while also 18 | #' providing a user-friendly environment for functionality such as loading 19 | #' other polygons to use and visualizing predictions. However, user choices 20 | #' are restricted to the workflow provided by the GUI. 21 | #' 22 | #' @name eSDM-package 23 | #' @aliases eSDM 24 | #' @title Ensemble tool for predictions from Species Distribution Models 25 | #' @author Sam Woodman \email{sam.woodman@@noaa.gov} 26 | #' @seealso \url{https://swfsc.github.io/eSDM/} 27 | #' 28 | #' @importFrom dplyr all_of arrange between bind_rows filter group_by left_join 29 | #' mutate rename select summarise 30 | #' @importFrom magrittr %>% 31 | #' @importFrom methods slot 32 | #' @importFrom purrr map map2_df set_names 33 | #' @importFrom rlang .data sym 34 | #' @importFrom ROCR performance prediction 35 | #' @importFrom sf st_agr st_set_agr st_area st_bbox st_crop st_crs st_geometry 36 | #' st_intersection st_intersects st_polygon st_set_geometry st_sf st_sfc 37 | #' st_is_valid st_make_valid 38 | #' @importFrom shiny runApp 39 | #' @importFrom stats na.omit 40 | #' @importFrom units set_units 41 | #' @importFrom utils head tail 42 | #' 43 | #' @keywords package 44 | "_PACKAGE" 45 | -------------------------------------------------------------------------------- /R/eSDM_GUI.R: -------------------------------------------------------------------------------- 1 | #' Open the eSDM GUI 2 | #' 3 | #' Open the eSDM graphical user interface (GUI); 4 | #' an R Shiny app for creating ensemble predictions using SDM predictions. 5 | #' 6 | #' @param launch.browser Logical with default of \code{TRUE}; passed to \code{launch.browser} 7 | #' argument of \code{\link[shiny]{runApp}} 8 | #' 9 | #' @usage eSDM_GUI(launch.browser = TRUE) 10 | #' 11 | #' @export 12 | eSDM_GUI <- function(launch.browser = TRUE) { 13 | appDir <- system.file("shiny", package = "eSDM") 14 | if (appDir == "") { 15 | stop("The eSDM GUI folder could not be found. Try re-installing 'eSDM'", 16 | call. = FALSE) 17 | } 18 | 19 | runApp(appDir, launch.browser = launch.browser, display.mode = "normal") 20 | } 21 | -------------------------------------------------------------------------------- /R/ensemble_rescale.R: -------------------------------------------------------------------------------- 1 | #' Rescale SDM predictions 2 | #' 3 | #' Rescale SDM predictions and (if applicable) associated uncertainties 4 | #' 5 | #' @param x object of class \code{sf} 6 | #' @param x.idx vector of column names or column indices; 7 | #' indicates columns in \code{x} with prediction values that will be rescaled 8 | #' @param y rescaling method; must be either "abundance" or "sumto1". 9 | #' See 'Details' section for descriptions of the rescaling methods 10 | #' @param y.abund numeric value; ignored if \code{y} is not \code{"abundance"} 11 | #' @param x.var.idx vector of column names or column indices; 12 | #' indicates columns in \code{x} with variance values that will be rescaled. 13 | #' If \code{x.var.idx} is specified, it must be the same length as \code{x.idx}. 14 | #' Use \code{x.var.idx = NULL} (the default) if none of the predictions have associated uncertainty values; 15 | #' see the 'Details' section for more information 16 | #' 17 | #' @details \code{ensemble_rescale} is intended to be used after overlaying predictions with 18 | #' \code{\link{overlay_sdm}} and before creating ensembles with \code{\link{ensemble_create}}. 19 | #' The provided rescaling methods are: 20 | #' \itemize{ 21 | #' \item'abundance' - Rescale the density values so that the predicted abundance is \code{y.abund} 22 | #' \item'sumto1' - Rescale the density values so their sum is 1 23 | #' } 24 | #' 25 | #' SDM uncertainty values must be rescaled differently than the prediction values. 26 | #' Columns specified in \code{x.var.idx} must contain variance values. 27 | #' These values will be rescaled using the formula \code{var(c * x) = c^2 * var(x)}, 28 | #' where \code{c} is the rescaling factor for the associated predictions. 29 | #' 30 | #' If \code{x.var.idx} is not \code{NULL}, then the function assumes 31 | #' \code{x.var.idx[1]} contains the variance values associated with the predictions in \code{x.idx[1]}, 32 | #' \code{x.var.idx[2]} contains the variance values associated with the predictions in \code{x.idx[2]}, etc. 33 | #' Use \code{NA} in \code{x.var.idx} to indicate a set of predictions that does not have 34 | #' associated uncertainty values (e.g., \code{x.var.idx = c(4, NA, 5)}) 35 | #' 36 | #' @return The \code{sf} object \code{x} with the columns specified by \code{x.idx} and \code{x.var.idx} rescaled. 37 | #' The \code{agr} attributes of \code{x} will be conserved 38 | #' 39 | #' @examples 40 | #' ensemble_rescale(preds.1, c("Density", "Density2"), "abundance", 50) 41 | #' ensemble_rescale(preds.1, c(1, 2), "sumto1") 42 | #' 43 | #' ensemble_rescale( 44 | #' preds.1, c("Density", "Density2"), "abundance", 100, c(3,4) 45 | #' ) 46 | #' 47 | #' 48 | #' @export 49 | ensemble_rescale <- function(x, x.idx, y, y.abund = NULL, x.var.idx = NULL) { 50 | #---------------------------------------------------------------------------- 51 | # Check inputs 52 | 53 | #-------------------------------------------------------- 54 | ### General checks 55 | stopifnot( 56 | inherits(x, "sf"), 57 | length(x.idx) < ncol(x) 58 | ) 59 | 60 | #-------------------------------------------------------- 61 | ### Check/process x.idx 62 | x.df <- st_set_geometry(x, NULL) 63 | x.geom <- st_geometry(x) 64 | if (inherits(x.idx, "character")) { 65 | if (!all(x.idx %in% names(x.df))) { 66 | stop("If x.idx is a character vector, then all elements of x.idx must ", 67 | "be the name of a column of x (and not the geometry list-column)") 68 | } 69 | 70 | } else if (inherits(x.idx, c("integer", "numeric"))) { 71 | if (!(max(x.idx) < ncol(x))) { 72 | stop("If x.idx is a numeric vector, then all values of x must be ", 73 | "less than ncol(x)") 74 | } 75 | x.idx <- names(x)[x.idx] 76 | 77 | } else { 78 | stop("x.idx must be a vector of class character, integer, or numeric") 79 | } 80 | 81 | #-------------------------------------------------------- 82 | ### Check/process x.var.idx 83 | if (!is.null(x.var.idx)) { 84 | if (length(x.idx) != length(x.var.idx)) { 85 | stop("If specified, x.var.idx must be the same length as x.idx. ", 86 | "Use 'NA' in in x.var.idx for predictions that ", 87 | "do not have uncertainty values") 88 | } 89 | 90 | if (inherits(x.var.idx, "character")) { 91 | if (!all(x.var.idx %in% c(names(x.df), NA))) { 92 | stop("If x.var.idx is a character vector, then all elements of x.var.idx must ", 93 | "be the name of a column of x (not including the geometry list-column)") 94 | } 95 | 96 | } else if (inherits(x.var.idx, c("integer", "numeric"))) { 97 | if (!(max(x.var.idx, na.rm = TRUE) < ncol(x))) { 98 | stop("If x.var.idx is a numeric vector, then all values of x must be ", 99 | "less than ncol(x)") 100 | } 101 | x.var.idx <- names(x)[x.var.idx] 102 | 103 | } else { 104 | stop("If it is not NULL, x.var.idx must be a vector of class ", 105 | "character, integer, or numeric") 106 | } 107 | } 108 | 109 | # Check that x.idx and x.var.idx do not share any elements 110 | if (any(x.idx %in% x.var.idx)) 111 | stop("x.idx and x.var.idx cannot point to any of the same columns of x") 112 | 113 | #-------------------------------------------------------- 114 | ### Check/process y 115 | if (!(y %in% c("abundance", "sumto1"))) { 116 | stop("y must be either 'abundance' or 'sumto1'") 117 | } 118 | 119 | #---------------------------------------------------------------------------- 120 | # Rescale values 121 | 122 | #-------------------------------------------------------- 123 | ### Prep 124 | # Use select rather than [,] to ensure object is data frame 125 | x.df.idx <- x.df %>% select(!!x.idx) 126 | # x.df.var.idx <- x.df %>% select(!x.var.idx) 127 | x.df.var.idx <- data.frame(lapply(x.var.idx, function(i) { 128 | if (is.na(i)) NA else x.df %>% select(!!i) 129 | })) 130 | 131 | #-------------------------------------------------------- 132 | ### Get rescaling factors 133 | if (y == "abundance") { 134 | if (!length(y.abund) == 1) 135 | stop ("If y is 'abundance', y.abund must be a single number greater than 0") 136 | if (!(y.abund > 0 & inherits(y.abund, c("integer", "numeric")))) 137 | stop ("If y is 'abundance', y.abund must be a single number greater than 0") 138 | 139 | 140 | d <- vapply(x.df.idx, function(i, j, k) { 141 | tmp.sf <- st_sf(pred = i, geometry = j, agr = "constant") 142 | eSDM::model_abundance(tmp.sf, "pred") / k 143 | }, 1, j = x.geom, k = y.abund, USE.NAMES = FALSE) 144 | 145 | } else { #y == "sumto1" 146 | d <- vapply(x.df.idx, sum, 1, na.rm = TRUE, USE.NAMES = FALSE) 147 | } 148 | 149 | #-------------------------------------------------------- 150 | ### Replace original values with rescaled values 151 | x.df.idx.rescaled <- map2_df(x.df.idx, d, function(i, j) i / j) 152 | x.df[, x.idx] <- x.df.idx.rescaled 153 | 154 | if (!is.null(x.var.idx)) { 155 | x.df.var.idx.rescaled <- map2_df(x.df.var.idx, d, function(i, j) i / (j^2)) #%>% select(which(!is.na(x.var.idx))) 156 | x.df[, x.var.idx] <- x.df.var.idx.rescaled 157 | } 158 | st_sf(x.df, geometry = x.geom, agr = st_agr(x)) 159 | } 160 | -------------------------------------------------------------------------------- /R/evaluation_metrics.R: -------------------------------------------------------------------------------- 1 | #' Calculate SDM evaluation metrics 2 | #' 3 | #' Calculate AUC, TSS, and RMSE for given density predictions and validation data 4 | #' 5 | #' @param x object of class sf; SDM predictions 6 | #' @param x.idx name or index of column in \code{x} with prediction values 7 | #' @param y object of class sf; validation data 8 | #' @param y.idx name or index of column in \code{y} with validation data. 9 | #' This validation data column must have at least two unique values, e.g. 0 and 1 10 | #' @param count.flag logical; \code{TRUE} indicates that the data in column \code{y.idx} is count data, 11 | #' while \code{FALSE} indicates that the data is presence/absence. 12 | #' See details for differences in data processing based on this flag. 13 | #' 14 | #' @details If \code{count.flag == TRUE}, then \code{eSDM::model_abundance(x, x.idx, FALSE)} will be run 15 | #' to calculate predicted abundance and thus calculate RMSE. 16 | #' Note that this assumes the data in column \code{x.idx} of \code{x} are density values. 17 | #' 18 | #' If \code{count.flag == FALSE}, then all of the values in column \code{y.idx} of \code{y} must be \code{0} or \code{1}. 19 | #' 20 | #' All rows of \code{x} with a value of \code{NA} in column \code{x.idx} and 21 | #' all rows of \code{y} with a value of \code{NA} in column \code{y.idx} are removed before calculating metrics 22 | #' 23 | #' @return A numeric vector with AUC, TSS and RMSE values, respectively. 24 | #' If \code{count.flag == FALSE}, the RMSE value will be \code{NA} 25 | #' 26 | #' @examples 27 | #' evaluation_metrics(preds.1, 2, validation.data, "sight") 28 | #' 29 | #' evaluation_metrics(preds.1, "Density2", validation.data, "count", TRUE) 30 | #' 31 | #' @export 32 | evaluation_metrics <- function(x, x.idx, y, y.idx, count.flag = FALSE) { 33 | #------------------------------------------------------------------ 34 | # Input checks and some processing 35 | if (!all(vapply(list(x, y), inherits, TRUE, "sf"))) { 36 | stop("x and y must both be objects of class sf") 37 | } 38 | if (st_crs(x) != st_crs(y)) { 39 | stop("x and y must have equivalent coordinate reference systems") 40 | } 41 | stopifnot( 42 | length(x.idx) == 1, 43 | length(y.idx) == 1, 44 | inherits(count.flag, "logical") 45 | ) 46 | 47 | x.dens <- st_set_geometry(x, NULL)[[x.idx]] 48 | x.dens.nona <- !is.na(x.dens) 49 | x.dens <- x.dens[x.dens.nona] 50 | 51 | y.data <- st_set_geometry(y, NULL)[[y.idx]] 52 | y.data.nona <- !is.na(y.data) 53 | y.data <- y.data[y.data.nona] 54 | 55 | if (!is.numeric(x.dens)) { 56 | stop("The data in column x.idx of object x must all be numbers") 57 | } 58 | if (!is.numeric(y.data)) { 59 | stop("The data in column y.idx of object y must all be numbers") 60 | } 61 | if (length(unique(y.data)) < 2) { 62 | stop("The data in column y.idx of object y must have at least ", 63 | "two unqiue values, e.g. 0 and 1. ", 64 | "Calculating metrics using presence-only or absence-only data ", 65 | "is not currently supported by this function") 66 | } 67 | 68 | #------------------------------------------------------------------ 69 | # Remove NAs, then get intersection of predictions and validation data 70 | x <- x[x.dens.nona, ] 71 | y <- y[y.data.nona, ] 72 | yx.sgbp <- suppressMessages(st_intersects(y, x)) 73 | 74 | temp <- sapply(yx.sgbp, length) 75 | temp0 <- sum(temp == 0) 76 | temp2 <- sum(temp > 1) 77 | if (temp0 > 0) { 78 | base::message( 79 | "There were ", temp0, " validation points ", 80 | "that did not overlap with a non-NA prediction polygon" 81 | ) 82 | } 83 | if (temp2 > 0) { 84 | base::message( 85 | "There were ", temp2, " validation points ", 86 | "that were on the boundary of two or more non-NA prediction polygons" 87 | ) 88 | } 89 | rm(temp, temp0, temp2) 90 | 91 | 92 | #------------------------------------------------------------------ 93 | # Data kept as separate vectors because in mapply() accessing several vector 94 | # objects is faster than accessing one data.frame 95 | if (count.flag) { 96 | y.sight <- ifelse(y.data >= 1, 1, 0) 97 | x.abund <- unname(unlist(eSDM::model_abundance(x, x.idx, sum.abund = FALSE))) 98 | y.count <- y.data 99 | 100 | } else { 101 | if (!all(y.data %in% c(0, 1))) { 102 | stop("The data in column y.idx of object y must all be numbers 0 or 1") 103 | } 104 | 105 | y.sight <- y.data 106 | x.abund <- as.numeric(NA) 107 | y.count <- as.numeric(NA) 108 | } 109 | 110 | stopifnot( 111 | is.numeric(y.sight), is.numeric(x.abund), is.numeric(y.count) 112 | ) 113 | 114 | 115 | #------------------------------------------------------------------ 116 | # Make data frame with corresponding density and validation data values 117 | xy.data.overlap.list <- mapply(function(i, j) { 118 | if (length(j) == 0) { 119 | NULL 120 | } else { 121 | c(mean(x.dens[i]), y.sight[j], mean(x.abund[i]), y.count[j]) 122 | } 123 | }, yx.sgbp, seq_along(yx.sgbp), SIMPLIFY = FALSE) 124 | 125 | xy.data.overlap <- data.frame(do.call(rbind, xy.data.overlap.list)) %>% 126 | set_names(c("dens", "sight", "abund", "count")) %>% 127 | filter(!is.na(.data$dens), !is.na(.data$sight)) 128 | 129 | 130 | #------------------------------------------------------------------ 131 | # AUC and TSS 132 | pred.out <- prediction(xy.data.overlap[[1]], xy.data.overlap[[2]]) 133 | 134 | m1 <- slot(performance(pred.out, measure = "auc"), "y.values")[[1]] 135 | 136 | sens <- slot(performance(pred.out, "sens"), "y.values")[[1]] 137 | spec <- slot(performance(pred.out, "spec"), "y.values")[[1]] 138 | m2 <- max(sens + spec - 1) 139 | 140 | # RMSE 141 | m3 <- ifelse( 142 | count.flag, 143 | esdm_rmse(xy.data.overlap[[3]], xy.data.overlap[[4]], na.rm = TRUE), 144 | NA 145 | ) 146 | 147 | #------------------------------------------------------------------ 148 | c(m1, m2, m3) 149 | } 150 | -------------------------------------------------------------------------------- /R/model_abundance.R: -------------------------------------------------------------------------------- 1 | #' Calculate predicted abundance 2 | #' 3 | #' Calculates the predicted abundance by multiplying the density prediction values by prediction polygon areas 4 | #' 5 | #' @param x object of class \code{sf}; SDM with density predictions. 6 | #' Must have a valid crs code 7 | #' @param dens.idx name or index of column(s) in \code{x} with density predictions. 8 | #' Can be a character vector (column names) or numeric vector (column indices) 9 | #' @param sum.abund logical; whether or not to sum all of the predicted abundances 10 | #' 11 | #' @details Multiplies the values in the specified column(s) (i.e. the density predictions) 12 | #' by the area in square kilometers of their corresponding prediction polygon. 13 | #' The area of each prediction polygon is calculated using \code{st_area} from \code{\link[sf]{geos_measures}}. 14 | #' x must have a valid crs code to calculate area for these abundance calculations. 15 | #' 16 | #' @return If \code{sum.abund == TRUE}, then a vector of the same length as \code{dens.idx} 17 | #' representing the predicted abundance for the density values in each column. 18 | #' 19 | #' If \code{sum.abund == FALSE} and the length of \code{dens.idx} is 1, 20 | #' then a numeric vector with the predicted abundance of each prediction polygon of \code{x}. 21 | #' 22 | #' If \code{sum.abund == FALSE} and the length of \code{dens.idx} is greater than 1, 23 | #' then a data frame with \code{length(dens.idx)} columns of the predicted abundance of prediction polygons 24 | #' 25 | #' @examples 26 | #' model_abundance(preds.1, "Density") 27 | #' model_abundance(preds.1, c(1, 1)) 28 | #' model_abundance(preds.1, c(1, 1), FALSE) 29 | #' 30 | #' @export 31 | model_abundance <- function(x, dens.idx, sum.abund = TRUE) { 32 | stopifnot( 33 | inherits(x, "sf"), 34 | inherits(dens.idx, "character") | inherits(dens.idx, "numeric"), 35 | inherits(sum.abund, "logical") 36 | ) 37 | 38 | if (is.na(st_crs(x))) stop("x must have a valid crs code") 39 | 40 | x.area <- st_area(x) 41 | if (!all(units(x.area)[[1]] == c("m", "m"))) { 42 | stop("Units of st_area(x) must be m^2; please ensure that x has a valid crs code") 43 | } 44 | x.area <- as.numeric(x.area) / 1e+06 45 | 46 | x.df <- st_set_geometry(x, NULL) 47 | 48 | if (sum.abund) { 49 | sapply(dens.idx, function(j) {sum(x.df[[j]] * x.area, na.rm = TRUE)}) 50 | 51 | } else { 52 | if (length(dens.idx) == 1) { 53 | x.df[, dens.idx] * x.area 54 | } else { 55 | data.frame(sapply(dens.idx, function(j) {x.df[[j]] * x.area})) %>% 56 | set_names(paste0(dens.idx, ".abund")) 57 | } 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /R/pts2poly_centroids.R: -------------------------------------------------------------------------------- 1 | #' Create polygons from centroid coordinates 2 | #' 3 | #' Create polygon(s) from a data frame with coordinates of the polygon centroid(s) 4 | #' 5 | #' @param x data frame with at least two columns; 6 | #' the first two columns must contain longitude and latitude coordinates, respectively. 7 | #' See 'Details' section for how additional columns are handled 8 | #' @param y numeric; the perpendicular distance from the polygon centroid (center) to its edge 9 | #' (i.e. half the length of one side of a polygon) 10 | #' @param ... passed to \link[sf:sf]{st_sf} or to \link[sf:sfc]{st_sfc}, 11 | #' e.g. for passing named arguments \code{crs} and \code{agr} 12 | #' 13 | #' @details This function was designed for someone who reads in a .csv file 14 | #' with a grid of coordinates representing SDM prediction points and needs to create 15 | #' prediction polygons with the .csv file coordinates as the polygon centroids. 16 | #' However, the function can be used to create square polygons of any size around the provided points, 17 | #' regardless of if those polygons touch or overlap. 18 | #' The created polygons are oriented so that, in a 2D plane, their edges are parallel to either the x or the y axis. 19 | #' 20 | #' If \code{x} contains more than two column, then additional columns will be treated as simple feature attributes, 21 | #' i.e. passed along as the first argument to \link[sf:sf]{st_sf} 22 | #' 23 | #' If a \code{crs} is not specified in \code{...}, 24 | #' then the \code{crs} attribute of the polygon(s) will be \code{NULL}. 25 | #' 26 | #' @return Object of class \code{sfc} (if \code{x} has exactly two columns) or class \code{sf} 27 | #' (if \code{x} has exactly more than two columns). The object will have a geometry type of \code{POLYGON}. 28 | #' If the object is of class \code{sf}, the name of the geometry list-column will be "geometry" 29 | #' 30 | #' @examples 31 | #' # Create an sfc object from a data frame of two columns 32 | #' x <- data.frame( 33 | #' lon = c(5, 10, 15, 20, 5, 10, 15, 20), 34 | #' lat = c(5, 5, 5, 5, 10, 10, 10, 10) 35 | #' ) 36 | #' pts2poly_centroids(x, 2.5, crs = 4326) 37 | #' 38 | #' # Create an sf object from a data frame of more than two columns 39 | #' x <- data.frame( 40 | #' lon = c(5, 10, 15, 20, 5, 10, 15, 20), 41 | #' lat = c(5, 5, 5, 5, 10, 10, 10, 10), 42 | #' sdm.pred = runif(8), 43 | #' sdm.pred2 = runif(8) 44 | #' ) 45 | #' pts2poly_centroids(x, 2.5, crs = 4326, agr = "constant") 46 | #' 47 | #' @export 48 | pts2poly_centroids <- function(x, y, ...) { 49 | # Input checks 50 | stopifnot( 51 | inherits(x, "data.frame"), 52 | ncol(x) >= 2, 53 | is.numeric(y) 54 | ) 55 | 56 | if (ncol(x) == 2 & ("agr" %in% names(list(...)))) 57 | stop("agr cannot be passed to st_sfc(), ", 58 | "meaning when x only has two columns") 59 | 60 | 61 | # Use first two (lon and lat) columns to create list of sfg objects 62 | x.lonlat <- x %>% 63 | select(c(1, 2)) %>% 64 | set_names(c("lon", "lat")) 65 | 66 | sfg.list <- unname(apply(x.lonlat, 1, function(i, j) { 67 | st_polygon(list(matrix( 68 | c(i[1] + j, i[1] - j, i[1] - j, i[1] + j, i[1] + j, 69 | i[2] + j, i[2] + j, i[2] - j, i[2] - j, i[2] + j), 70 | ncol = 2 71 | ))) 72 | }, j = y)) 73 | 74 | # Create sf or sfc object, as appropriate 75 | if (ncol(x) > 2) { 76 | x %>% 77 | select(-c(1, 2)) %>% 78 | st_sf(geometry = st_sfc(sfg.list), ...) 79 | } else { 80 | st_sfc(sfg.list, ...) 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /R/pts2poly_vertices.R: -------------------------------------------------------------------------------- 1 | #' Create polygons from vertex coordinates 2 | #' 3 | #' Create polygon(s) from a data frame with the coordinates of the polygon vertices 4 | #' 5 | #' @param x data frame with at least two columns; 6 | #' the first two columns must contain longitude and latitude coordinates, respectively. 7 | #' See 'Details' section for how additional columns are handled 8 | #' @param ... passed to \link[sf:sfc]{st_sfc}, 9 | #' e.g. for passing named argument \code{crs} 10 | # 11 | #' @details Vertices of different polygons must be demarcated by rows with values of \code{NA} 12 | #' in both the first and second columns (i.e. the longitude and latitude columns). 13 | #' 14 | #' All columns in \code{x} besides the first two columns are ignored. 15 | #' 16 | #' If a \code{crs} is not specified in \code{...}, 17 | #' then the \code{crs} attribute of the polygon(s) will be \code{NULL}. 18 | #' 19 | #' @return Object of class \code{sfc} with the geometry type \code{POLYGON} 20 | #' 21 | #' @examples 22 | #' x <- data.frame( 23 | #' lon = c(40, 40, 50, 50, 40), 24 | #' lat = c(0, 10, 10, 0, 0) 25 | #' ) 26 | #' pts2poly_vertices(x, crs = 4326) 27 | #' 28 | #' # Create an sf object 29 | #' x <- data.frame( 30 | #' lon = c(40, 40, 50, 50, 40, NA, 20, 20, 30, 30, 20), 31 | #' lat = c(0, 10, 10, 0, 0, NA, 0, 10, 10, 0, 0) 32 | #' ) 33 | #' sf::st_sf(Pred = 1:2, geometry = pts2poly_vertices(x, crs = 4326)) 34 | #' 35 | #' @export 36 | pts2poly_vertices <- function(x, ...) { 37 | stopifnot( 38 | inherits(x, "data.frame"), 39 | ncol(x) >= 2, 40 | identical(is.na(x[[1]]), is.na(x[[2]])) 41 | ) 42 | 43 | x <- x %>% 44 | select(c(1, 2)) %>% 45 | set_names(c("lon", "lat")) 46 | 47 | if (anyNA(x$lon)) { 48 | obj.list <- x %>% 49 | mutate(na_sum = cumsum(is.na(.data$lon) & is.na(.data$lat))) %>% 50 | filter(!is.na(.data$lon) & !is.na(.data$lat)) %>% 51 | group_by(.data$na_sum) %>% 52 | summarise(temp = list( 53 | st_polygon(list(matrix(c(.data$lon, .data$lat), ncol = 2))) 54 | )) 55 | 56 | st_sfc(obj.list$temp, ...) 57 | 58 | } else { 59 | st_sfc(st_polygon(list(matrix(c(x$lon, x$lat), ncol = 2))), ...) 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # eSDM 2 | 3 | 4 | [![CRAN version](https://www.r-pkg.org/badges/version/eSDM)](https://cran.r-project.org/package=eSDM) 5 | [![R-CMD-check](https://github.com/SWFSC/eSDM/actions/workflows/R-CMD-check.yaml/badge.svg)](https://github.com/SWFSC/eSDM/actions/workflows/R-CMD-check.yaml) 6 | 7 | 8 | `eSDM` is an R package designed to allow users to create ensembles of predictions from species distribution models (SDMs) made at different spatial scales or with different prediction units. Included in the package is the eSDM GUI, an R Shiny tool that provides the user with a graphical user interface that they can use to import, overlay, and create ensembles of SDM predictions. 9 | 10 | ## Installation 11 | 12 | You can install the released version of eSDM from [CRAN](https://CRAN.R-project.org) with: 13 | 14 | ``` r 15 | install.packages('eSDM') 16 | ``` 17 | 18 | To install the latest version from [GitHub](https://github.com): 19 | 20 | ``` r 21 | # install.packages('devtools') 22 | devtools::install_github('SWFSC/eSDM', build_vignettes = TRUE) 23 | ``` 24 | 25 | ## eSDM GUI 26 | 27 | You can access the GUI online at . You do not need to have R or RStudio installed to access the GUI online. 28 | 29 | To run the GUI locally: install `eSDM` as described above, and then run the following code in your RStudio console: 30 | 31 | ``` r 32 | eSDM::eSDM_GUI() 33 | ``` 34 | 35 | Depending on your internet connection, running the GUI locally may be faster than running it online. If text or images overlap within the GUI, please make the browser window full screen or adjust the text size in your browser (e.g., Ctrl - minus (‘-’) on Windows systems) 36 | 37 | ## Vignettes 38 | 39 | ``` r 40 | # To see the list of available vignettes 41 | browseVignettes("eSDM") 42 | 43 | # To open a specific vignette 44 | vignette("example-analysis") 45 | ``` 46 | 47 | ## Manuscript reference 48 | 49 | The paper can be obtained [here](https://doi.org/10.1111/2041-210X.13283), and is cited as (preferred): 50 | 51 | Woodman, S.M., Forney, K.A., Becker, E.A., DeAngelis, M.L., Hazen, E.L., Palacios, D.M., Redfern, J.V. (2019). *eSDM*: A tool for creating and exploring ensembles of predictions from species distribution and abundance models. *Methods Ecol Evol*. 2019;10:1923-1933. 52 | 53 | For data used in the example analysis, see 54 | 55 | For code used to create applicable figures from the manuscript: [Figure 2](https://github.com/swfsc/eSDM/blob/master/data-raw/figure2_overlay.R), [Figure 3](https://github.com/swfsc/eSDM/blob/master/data-raw/figure3.R), [Figure 4](https://github.com/swfsc/eSDM/blob/master/data-raw/figure4.R), [Figure 5](https://github.com/swfsc/eSDM/blob/master/data-raw/figure5.R) 56 | 57 | ## Disclaimer 58 | 59 | This repository is a scientific product and is not official communication of the National Oceanic and Atmospheric Administration, or the United States Department of Commerce. All NOAA GitHub project code is provided on an ‘as is’ basis and the user assumes responsibility for its use. Any claims against the Department of Commerce or Department of Commerce bureaus stemming from the use of this GitHub project will be governed by all applicable Federal law. Any reference to specific commercial products, processes, or services by service mark, trademark, manufacturer, or otherwise, does not constitute or imply their endorsement, recommendation or favoring by the Department of Commerce. The Department of Commerce seal and logo, or the seal and logo of a DOC bureau, shall not be used in any manner to imply endorsement of any commercial product or activity by DOC or the United States Government. 60 | 61 | NOAA Fisheries Logo 62 | 63 | [U.S. Department of Commerce](https://www.commerce.gov/) \| [National Oceanographic and Atmospheric Administration](https://www.noaa.gov) \| [NOAA Fisheries](https://www.fisheries.noaa.gov/) 64 | -------------------------------------------------------------------------------- /_pkgdown.yml: -------------------------------------------------------------------------------- 1 | url: https://swfsc.github.io/eSDM/ 2 | template: 3 | bootstrap: 5 4 | 5 | home: 6 | links: 7 | - text: eSDM manuscript 8 | href: https://doi.org/10.1111/2041-210X.13283 9 | -------------------------------------------------------------------------------- /cran-comments.md: -------------------------------------------------------------------------------- 1 | ## Release summary 2 | This is a minor update to update documentation links after transferring a related repository to a new GitHub organization 3 | 4 | ## Test environments (R-release = v4.4.1) 5 | * Windows 10, R-release (local) 6 | * win-builder (devel) 7 | * windows-latest (on github actions) R-release 8 | * macOS-latest (on github actions) R-release 9 | * ubuntu-latest (on github actions) R-devel and release and oldrel-1 10 | 11 | ## R CMD check results 12 | There were no ERRORs, WARNINGs, or NOTEs. 13 | 14 | ## Downstream dependencies 15 | No downstream dependencies 16 | -------------------------------------------------------------------------------- /data-raw/GSHHG_shp.R: -------------------------------------------------------------------------------- 1 | # Save GUI default land file as .RDATA files 2 | # Shapefiles details: http://www.soest.hawaii.edu/pwessel/gshhg/ 3 | # Didn't make sense to put GSHHG_shp folder in data-raw because it's so big 4 | # Only put low resolution file in package - others too big 5 | 6 | library(sf) 7 | library(usethis) 8 | 9 | # Load L1 (world except for Antarctica) and L6 (Antarctica ground line) 10 | # gshhg.f.L1 <- st_read("../Data files/GSHHG_shp/f/GSHHS_f_L1.shp") 11 | # gshhg.h.L1 <- st_read("../Data files/GSHHG_shp/h/GSHHS_h_L1.shp") 12 | # gshhg.i.L1 <- st_read("../Data files/GSHHG_shp/i/GSHHS_i_L1.shp") 13 | gshhg.l.L1 <- st_read("../Data files/GSHHG_shp/l/GSHHS_l_L1.shp") 14 | # gshhg.c.L1 <- st_read("../Data files/GSHHG_shp/c/GSHHS_c_L1.shp") 15 | 16 | # gshhg.f.L6 <- st_read("../Data files/GSHHG_shp/f/GSHHS_f_L6.shp") 17 | # gshhg.h.L6 <- st_read("../Data files/GSHHG_shp/h/GSHHS_h_L6.shp") 18 | # gshhg.i.L6 <- st_read("../Data files/GSHHG_shp/i/GSHHS_i_L6.shp") 19 | gshhg.l.L6 <- st_read("../Data files/GSHHG_shp/l/GSHHS_l_L6.shp") 20 | # gshhg.c.L6 <- st_read("../Data files/GSHHG_shp/c/GSHHS_c_L6.shp") 21 | 22 | # Combine L1 and L6 23 | # gshhg.f.L16 <- st_geometry(rbind(gshhg.f.L1, gshhg.f.L6)) 24 | # gshhg.h.L16 <- st_geometry(rbind(gshhg.h.L1, gshhg.h.L6)) 25 | # gshhg.i.L16 <- st_geometry(rbind(gshhg.i.L1, gshhg.i.L6)) 26 | gshhg.l.L16 <- st_geometry(rbind(gshhg.l.L1, gshhg.l.L6)) 27 | # gshhg.c.L16 <- st_geometry(rbind(gshhg.c.L1, gshhg.c.L6)) 28 | # rm(gshhg.f.L1, gshhg.f.L6, gshhg.h.L1, gshhg.h.L6, gshhg.i.L1, gshhg.i.L6, 29 | # gshhg.l.L1, gshhg.l.L6, gshhg.c.L1, gshhg.c.L6) 30 | rm(gshhg.l.L1, gshhg.l.L6) 31 | 32 | # # Check validity of polygons; only gshhg.l.L16 is valid 33 | # all(st_is_valid(gshhg.f.L16)) 34 | # all(st_is_valid(gshhg.h.L16)) 35 | # all(st_is_valid(gshhg.i.L16)) 36 | all(st_is_valid(gshhg.l.L16)) 37 | # all(st_is_valid(gshhg.c.L16)) 38 | 39 | # Make polygons valid 40 | # gshhg.f.L16 <- st_make_valid(gshhg.f.L16) 41 | # gshhg.h.L16 <- st_make_valid(gshhg.h.L16) 42 | # gshhg.i.L16 <- st_make_valid(gshhg.i.L16) 43 | # gshhg.l.L16 <- st_make_valid(gshhg.l.L16) # Already valid, adds 'srid' attribute 44 | # gshhg.c.L16 <- st_make_valid(gshhg.c.L16) 45 | 46 | # Check polygons validity again 47 | # all(st_is_valid(gshhg.f.L16)) 48 | # all(st_is_valid(gshhg.h.L16)) 49 | # all(st_is_valid(gshhg.i.L16)) 50 | all(st_is_valid(gshhg.l.L16)) 51 | # all(st_is_valid(gshhg.c.L16)) 52 | 53 | # Save to data folder 54 | # devtools::use_data(gshhg.f.L16) 55 | # devtools::use_data(gshhg.h.L16) 56 | # devtools::use_data(gshhg.i.L16) 57 | usethis::use_data(gshhg.l.L16) 58 | # devtools::use_data(gshhg.c.L16) 59 | 60 | 61 | -------------------------------------------------------------------------------- /data-raw/README.txt: -------------------------------------------------------------------------------- 1 | File descriptions: 2 | 3 | eSDM_shpExport*: Shapefiles exported from the GUI and process to create sample objects included in eSDM package 4 | 5 | eSDM_ME&E_final: Saved workspace from GUI session where we performed the example analysis from Woodman et al. -------------------------------------------------------------------------------- /data-raw/create_data.R: -------------------------------------------------------------------------------- 1 | library(dplyr) 2 | library(eSDM) 3 | library(sf) 4 | 5 | ### Create sample predictions to use in examples 6 | # sample_predictions_2 7 | x <- st_read("data-raw/eSDM_shpExport/eSDM_Sample_predictions_2_csv__pred__orig.shp", agr = "constant") 8 | # sample_predictions_1 9 | y <- st_read("data-raw/eSDM_shpExport (1)/eSDM_Sample_predictions_1_csv__PredModel1__orig.shp", agr = "constant") 10 | # sample_predictions_4 11 | z <- st_transform( 12 | st_read("data-raw/eSDM_shpExport (2)/eSDM_Sample_predictions_4_gdb__Pred1__orig.shp", agr = "constant"), 13 | 4326 14 | ) 15 | bite <- eSDM::pts2poly_vertices(read.csv("../eSDM-data/Ignore/eSDM_data_sample/SoCal_bight.csv"), crs = 4326) 16 | 17 | 18 | preds.1 <- st_set_agr(st_crop(x, bite), "constant") 19 | preds.2 <- st_set_agr(st_crop(y, bite), "constant") 20 | preds.3 <- st_set_agr(st_crop(z, bite), "constant") 21 | rm(x, y, z) 22 | 23 | row.names(preds.1) <- 1:nrow(preds.1) 24 | row.names(preds.2) <- 1:nrow(preds.2) 25 | row.names(preds.3) <- 1:nrow(preds.3) 26 | 27 | # To handle 'old CRS method' warning 28 | st_crs(preds.1) <- st_crs(preds.1) 29 | st_crs(preds.2) <- st_crs(preds.2) 30 | st_crs(preds.3) <- st_crs(preds.3) 31 | 32 | # Spice up preds.1 for the sake of examples 33 | preds.1.geom <- st_geometry(preds.1) 34 | preds.1 <- data.frame(st_set_geometry(preds.1, NULL)) %>% 35 | mutate(Density2 = runif(325), 36 | Var1 = runif(325) / 100, 37 | Var2 = runif(325) / 100) %>% 38 | st_sf(geometry = preds.1.geom, agr = "constant") 39 | 40 | # plot(bite, axes = TRUE) 41 | # plot(st_geometry(preds.1), add = TRUE, col = NA, border = "red") 42 | # plot(st_geometry(preds.2), add = TRUE, col = NA, border = "blue") 43 | # plot(st_geometry(preds.3), add = TRUE, col = NA, border = "green") 44 | 45 | usethis::use_data(preds.1) #, overwrite = TRUE) 46 | usethis::use_data(preds.2) 47 | usethis::use_data(preds.3) 48 | 49 | 50 | ### Create sample validation data to use in examples 51 | x <- read.csv("../eSDM-data/Ignore/eSDM_data_sample/Validation_data.csv") 52 | validation.data <- x %>% 53 | select(lon, lat, sight, count) %>% 54 | st_as_sf(coords = c("lon","lat"), crs = st_crs(4326), agr = "constant") 55 | validation.data <- st_crop(validation.data, bite) 56 | 57 | # plot(bite, axes = TRUE) 58 | # plot(st_geometry(validation.data), add = TRUE) 59 | # plot(validation.data[2], pch = 19) 60 | 61 | usethis::use_data(validation.data) 62 | -------------------------------------------------------------------------------- /data-raw/create_ens_sf.R: -------------------------------------------------------------------------------- 1 | # Create ens_sf_...rda objects to use in scripts that create ME&E paper figs 2 | # Loaded .RDATA file is saved GUI environment when using data from 3 | # eSDM_data_manuscript.zip 4 | 5 | library(dplyr) 6 | library(sf) 7 | 8 | # Load saved GUI environment 9 | load("data-raw/eSDM_ME&E_final.RDATA") 10 | 11 | # Check that ensembles are at expected indices 12 | stopifnot( 13 | vals.save$ensemble.specs[[1]]["ensmethod"] == "Unweighted", 14 | vals.save$ensemble.specs[[3]]["ensmethod"] == "Weighted - TSS-based" 15 | ) 16 | 17 | # Unweighted ensemble 18 | ens.sf.unw <- vals.save$ensemble.models[[1]] %>% 19 | mutate(CV_ens = SE_ens / Pred_ens) %>% 20 | st_sf(geometry = vals.save$overlay.base.sfc,agr = "constant") 21 | save(ens.sf.unw, file = "data-raw/ens_sf_unw.rda") 22 | 23 | # Ensemble weighted by TSS 24 | ens.sf.wtss <- vals.save$ensemble.models[[3]] %>% 25 | st_sf(geometry = vals.save$overlay.base.sfc,agr = "constant") 26 | save(ens.sf.wtss, file = "data-raw/ens_sf_wtss.rda") 27 | 28 | # Validation presence data 29 | valid.sf.pres <- vals.save$eval.data %>% 30 | filter(sight == 1) 31 | save(valid.sf.pres, file = "data-raw/valid_sf_pres.rda") 32 | -------------------------------------------------------------------------------- /data-raw/create_extdata.R: -------------------------------------------------------------------------------- 1 | # Code for creating RDA files stored in inst/extdata and used in the 2 | # example-analysis vignette 3 | 4 | 5 | ############################################################################### 6 | # Prep 7 | require(sf) 8 | 9 | files.path <- "../package_file_old" 10 | 11 | 12 | ############################################################################### 13 | # Load data and save as rds file in inst/extdata 14 | 15 | ### Predictions - Becker et al. 2016 16 | model.b <- read.csv(paste(files.path, "Predictions_Beckeretal2016.csv", sep = "/")) 17 | saveRDS(model.b, file = "inst/extdata/Predictions_Beckeretal2016.rds") 18 | # save(model.b, file = "inst/extdata/Predictions_Beckeretal2016.rda") 19 | 20 | ### Predictions - Hazen et al. 2017 21 | model.h <- read.csv(paste(files.path, "Predictions_Hazenetal2017.csv", sep = "/")) 22 | saveRDS(model.h, file = "inst/extdata/Predictions_Hazenetal2017.rds") 23 | # save(model.h, file = "inst/extdata/Predictions_Hazenetal2017.rda") 24 | 25 | ### Predictions - Redfern et al. 2017 26 | model.r <- st_read(paste(files.path, "Predictions_Redfernetal2017.shp", sep = "/")) 27 | saveRDS(model.r, file = "inst/extdata/Predictions_Redfernetal2017.rds") 28 | # save(model.r, file = "inst/extdata/Predictions_Redfernetal2017.rda") 29 | 30 | 31 | ### Validation data 32 | valid.data <- read.csv( 33 | paste(files.path, "eSDM_Validation_data_all.csv", sep = "/"), 34 | stringsAsFactors = FALSE 35 | ) 36 | saveRDS(valid.data, file = "inst/extdata/eSDM_Validation_data_all.rds") 37 | # save(valid.data, file = "inst/extdata/eSDM_Validation_data_all.rda") 38 | 39 | ############################################################################### 40 | -------------------------------------------------------------------------------- /data-raw/eSDM_ME&E_final.RDATA: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWFSC/eSDM/be2f911c1fb18707e0f7c11973e8524510c2d3b5/data-raw/eSDM_ME&E_final.RDATA -------------------------------------------------------------------------------- /data-raw/eSDM_shpExport (1)/eSDM_Sample_predictions_1_csv__PredModel1__orig.dbf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWFSC/eSDM/be2f911c1fb18707e0f7c11973e8524510c2d3b5/data-raw/eSDM_shpExport (1)/eSDM_Sample_predictions_1_csv__PredModel1__orig.dbf -------------------------------------------------------------------------------- /data-raw/eSDM_shpExport (1)/eSDM_Sample_predictions_1_csv__PredModel1__orig.prj: -------------------------------------------------------------------------------- 1 | GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]] -------------------------------------------------------------------------------- /data-raw/eSDM_shpExport (1)/eSDM_Sample_predictions_1_csv__PredModel1__orig.shp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWFSC/eSDM/be2f911c1fb18707e0f7c11973e8524510c2d3b5/data-raw/eSDM_shpExport (1)/eSDM_Sample_predictions_1_csv__PredModel1__orig.shp -------------------------------------------------------------------------------- /data-raw/eSDM_shpExport (1)/eSDM_Sample_predictions_1_csv__PredModel1__orig.shx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWFSC/eSDM/be2f911c1fb18707e0f7c11973e8524510c2d3b5/data-raw/eSDM_shpExport (1)/eSDM_Sample_predictions_1_csv__PredModel1__orig.shx -------------------------------------------------------------------------------- /data-raw/eSDM_shpExport (2)/eSDM_Sample_predictions_4_gdb__Pred1__orig.dbf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWFSC/eSDM/be2f911c1fb18707e0f7c11973e8524510c2d3b5/data-raw/eSDM_shpExport (2)/eSDM_Sample_predictions_4_gdb__Pred1__orig.dbf -------------------------------------------------------------------------------- /data-raw/eSDM_shpExport (2)/eSDM_Sample_predictions_4_gdb__Pred1__orig.prj: -------------------------------------------------------------------------------- 1 | PROJCS["Cylindrical_Equal_Area",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]],PROJECTION["Cylindrical_Equal_Area"],PARAMETER["standard_parallel_1",0],PARAMETER["central_meridian",0],PARAMETER["false_easting",0],PARAMETER["false_northing",0],UNIT["Meter",1]] -------------------------------------------------------------------------------- /data-raw/eSDM_shpExport (2)/eSDM_Sample_predictions_4_gdb__Pred1__orig.shp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWFSC/eSDM/be2f911c1fb18707e0f7c11973e8524510c2d3b5/data-raw/eSDM_shpExport (2)/eSDM_Sample_predictions_4_gdb__Pred1__orig.shp -------------------------------------------------------------------------------- /data-raw/eSDM_shpExport (2)/eSDM_Sample_predictions_4_gdb__Pred1__orig.shx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWFSC/eSDM/be2f911c1fb18707e0f7c11973e8524510c2d3b5/data-raw/eSDM_shpExport (2)/eSDM_Sample_predictions_4_gdb__Pred1__orig.shx -------------------------------------------------------------------------------- /data-raw/eSDM_shpExport/eSDM_Sample_predictions_2_csv__pred__orig.prj: -------------------------------------------------------------------------------- 1 | GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]] -------------------------------------------------------------------------------- /data-raw/eSDM_shpExport/eSDM_Sample_predictions_2_csv__pred__orig.shp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWFSC/eSDM/be2f911c1fb18707e0f7c11973e8524510c2d3b5/data-raw/eSDM_shpExport/eSDM_Sample_predictions_2_csv__pred__orig.shp -------------------------------------------------------------------------------- /data-raw/eSDM_shpExport/eSDM_Sample_predictions_2_csv__pred__orig.shx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWFSC/eSDM/be2f911c1fb18707e0f7c11973e8524510c2d3b5/data-raw/eSDM_shpExport/eSDM_Sample_predictions_2_csv__pred__orig.shx -------------------------------------------------------------------------------- /data-raw/ens_sf_unw.rda: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWFSC/eSDM/be2f911c1fb18707e0f7c11973e8524510c2d3b5/data-raw/ens_sf_unw.rda -------------------------------------------------------------------------------- /data-raw/ens_sf_wtss.rda: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWFSC/eSDM/be2f911c1fb18707e0f7c11973e8524510c2d3b5/data-raw/ens_sf_wtss.rda -------------------------------------------------------------------------------- /data-raw/figure2_overlay.R: -------------------------------------------------------------------------------- 1 | # Code for creating Figure 2 for eSDM manuscript 2 | 3 | ################################################################################################### 4 | library(eSDM) 5 | library(RColorBrewer) 6 | library(sf) 7 | library(tmap) 8 | 9 | ### Prep work 10 | x <- st_geometry(eSDM::preds.2) 11 | y <- st_geometry(eSDM::preds.3) 12 | 13 | x.mid <- x[1496] #x[1496] is the 'current' base geometry polygon 14 | overlap <- st_intersection(x.mid, y) 15 | y.which <- st_intersects(x.mid, y)[[1]] 16 | 17 | lim.xmin <- -120.022 18 | lim.xmax <- -119.777 19 | lim.ymin <- 34.3 20 | lim.ymax <- 34.5 21 | 22 | lim.bbox <- c(lim.xmin, lim.xmax, lim.ymin, lim.ymax) 23 | lim.poly <- c(lim.xmin, lim.xmax, lim.xmax, lim.xmin, lim.xmin, 24 | lim.ymin, lim.ymin, lim.ymax, lim.ymax, lim.ymin) 25 | rm(lim.xmin, lim.xmax, lim.ymin, lim.ymax) 26 | 27 | area.poly <- st_sfc(st_polygon(list(matrix(lim.poly, ncol = 2))), crs = st_crs(x)) 28 | 29 | y.clip <- st_crop(y, area.poly) 30 | x.mid.clip <- st_intersection(x.mid, y.clip) 31 | 32 | 33 | ################################################################################################### 34 | ### Create tmap pieces of maps (polygons and other objects) 35 | b.pal <- brewer.pal(9, "YlGnBu") 36 | 37 | z.base <- tm_shape(x, bbox = matrix(lim.bbox, nrow = 2, byrow = TRUE)) + 38 | tm_polygons(col = "navajowhite2", border.col = "black", lwd = 2) 39 | 40 | z.base.mid <- tm_shape(x.mid) + 41 | tm_borders(col = "dodgerblue", lwd = 3) 42 | 43 | z.over <- tm_shape(y.clip, bbox = matrix(lim.bbox, nrow = 2, byrow = TRUE)) + 44 | tm_polygons(col = "grey", alpha = 0.55) + 45 | tm_shape(y) + 46 | tm_borders(col = "red", lwd = 2) 47 | 48 | z.int.poly <- tm_shape(overlap) + 49 | tm_polygons(col = b.pal[2], border.col = NA, border.alpha = 0) + 50 | tm_shape(x.mid) + 51 | tm_borders(col = "dodgerblue", lwd = 3, alpha = 1) + 52 | tm_shape(x.mid.clip) + 53 | tm_borders(col = "grey", lwd = 3, alpha = 0.45) + 54 | tm_shape(y.clip) + 55 | tm_borders(col = "red", lwd = 2) 56 | 57 | z.label <- function(x) tm_credits(x, size = 1.3, position = c("LEFT", "TOP")) 58 | 59 | # Making inner.margins anything other than 0 creates awkward space at map edges 60 | z.layout <- tm_layout(frame = "white", frame.lwd = 6, inner.margins = 0.0) 61 | 62 | 63 | ################################################################################################### 64 | ### Create tmap maps 65 | f1 <- z.base + z.base.mid + z.layout + z.label("(a)") 66 | f2 <- z.over + z.layout + z.label("(b)") 67 | f3 <- z.base + z.base.mid + z.over + z.layout + z.label("(c)") 68 | f4 <- z.base + z.base.mid + z.over + z.int.poly + z.layout + z.label("(d)") 69 | 70 | 71 | ################################################################################################### 72 | ### Save tmap maps 73 | f1234 <- tmap_arrange(list(f1, f2, f3, f4), ncol = 2) 74 | tmap_save(f1234, filename = "../eSDM paper/Figures/Fig 2.png", width = 8, height = 8) 75 | 76 | # # Save individual panels 77 | # tmap_save(f1, filename = "../eSDM paper/Figures/Overlay1.png", width = 4, height = 4) 78 | # tmap_save(f2, filename = "../eSDM paper/Figures/Overlay2.png", width = 4, height = 4) 79 | # tmap_save(f3, filename = "../eSDM paper/Figures/Overlay3.png", width = 4, height = 4) 80 | # tmap_save(f4, filename = "../eSDM paper/Figures/Overlay4.png", width = 4, height = 4) 81 | 82 | ################################################################################################### 83 | -------------------------------------------------------------------------------- /data-raw/figure3.R: -------------------------------------------------------------------------------- 1 | # Code for creating Figure 3 for eSDM manuscript 2 | 3 | ############################################################################### 4 | library(RColorBrewer) 5 | library(sf) 6 | library(tmap) 7 | 8 | 9 | ############################################################################### 10 | # Helper functions and values 11 | 12 | ### Base map 13 | load("data/gshhg.l.L16.rda") 14 | map.world <- gshhg.l.L16 15 | 16 | ### Study area 17 | study.area <- st_read("inst/extdata/Shapefiles/Study_Area_CCE.shp") 18 | 19 | ### Plot extent 20 | range.poly <- st_sfc( 21 | st_polygon(list(matrix( 22 | c(-136, -136, -115, -115, -136, 29, 51, 51, 29, 29), ncol = 2 23 | ))), 24 | crs = 4326 25 | ) 26 | rpoly.mat <- matrix(st_bbox(range.poly), ncol = 2) 27 | 28 | ### Size of text and legend width 29 | main.size <- 1.4 30 | leg.size <- 1 31 | leg.width <- 1 32 | grid.size <- 1 33 | txt.size <- 1.2 34 | 35 | ### Plotting functions 36 | source("data-raw/figure_plot.R", local = TRUE, echo = FALSE) 37 | 38 | 39 | ############################################################################### 40 | # Create and save figure 41 | 42 | ### Load and extract original SDMs 43 | load("data-raw/eSDM_ME&E_final.RDATA") 44 | model.b <- st_transform(vals.save$models.orig[[1]], 4326) 45 | model.h <- st_transform(vals.save$models.orig[[2]], 4326) 46 | model.r <- st_transform(vals.save$models.orig[[3]], 4326) 47 | 48 | 49 | #------------------------------------------------------------------------------ 50 | ### Model_B 51 | blp.b1 <- tmap_sdm_help(model.b, "Pred") 52 | blp.b2 <- tmap_sdm_help_perc(model.b, "Pred") 53 | 54 | # Plot of predictions (whales / km^-2) 55 | tmap.b.obj1 <- tmap_sdm( 56 | model.b, "Pred", blp.b1, map.world, rpoly.mat, "Model_B", 57 | main.size, leg.size, leg.width, grid.size, t.studyarea = study.area 58 | ) + 59 | tm_credits(expression("Predictions (whales km"^-2*")"), 60 | fontfamily = "sans", size = txt.size, 61 | position = c("left", "BOTTOM")) 62 | 63 | # Plot of SE values (with same color scheme as predictions) 64 | tmap.b.obj2 <- tmap_sdm( 65 | model.b, "SE", blp.b1, map.world, rpoly.mat, NA, 66 | main.size, leg.size, leg.width, grid.size, t.studyarea = study.area 67 | ) + 68 | tm_credits(expression("SE of predictions (whales km"^-2*")"), 69 | fontfamily = "sans", size = txt.size, 70 | position = c("left", "BOTTOM")) 71 | 72 | # Plot of predictions (percentiles) 73 | tmap.b.obj3 <- tmap_sdm( 74 | model.b, "Pred", blp.b2, map.world, rpoly.mat, NA, 75 | main.size, leg.size, leg.width, grid.size, t.studyarea = study.area 76 | ) + 77 | tm_credits(expression("Predictions (percentiles)"), 78 | fontfamily = "sans", size = txt.size, 79 | position = c("left", "BOTTOM")) 80 | 81 | 82 | #------------------------------------------------------------------------------ 83 | ### Model_H 84 | blp.h1 <- tmap_sdm_help(model.h, "Pred") 85 | blp.h2 <- tmap_sdm_help_perc(model.h, "Pred") 86 | 87 | # Plot of predictions (whales / km^-2) 88 | tmap.h.obj1 <- tmap_sdm( 89 | model.h, "Pred", blp.h1, map.world, rpoly.mat, "Model_H", 90 | main.size, leg.size, leg.width, grid.size, t.studyarea = study.area 91 | ) + 92 | tm_credits(expression("Predictions (habitat preference)"), 93 | fontfamily = "sans", size = txt.size, 94 | position = c("left", "BOTTOM")) 95 | 96 | # Plot of SE values (with same color scheme as predictions) 97 | tmap.h.obj2 <- tmap_sdm( 98 | model.h, "SE", blp.h1, map.world, rpoly.mat, NA, 99 | main.size, leg.size, leg.width, grid.size, t.studyarea = study.area 100 | ) + 101 | tm_credits(expression("SE of predictions (habitat preference)"), 102 | fontfamily = "sans", size = txt.size, 103 | position = c("left", "BOTTOM")) 104 | 105 | # Plot of predictions (percentiles) 106 | tmap.h.obj3 <- tmap_sdm( 107 | model.h, "Pred", blp.h2, map.world, rpoly.mat, NA, 108 | main.size, leg.size, leg.width, grid.size, t.studyarea = study.area 109 | ) + 110 | tm_credits(expression("Predictions (percentiles)"), 111 | fontfamily = "sans", size = txt.size, 112 | position = c("left", "BOTTOM")) 113 | 114 | #------------------------------------------------------------------------------ 115 | ### Model_R 116 | blp.r1 <- tmap_sdm_help(model.r, "Pred") 117 | blp.r2 <- tmap_sdm_help_perc(model.r, "Pred") 118 | 119 | # Plot of predictions (whales / km^-2) 120 | tmap.r.obj1 <- tmap_sdm( 121 | model.r, "Pred", blp.r1, map.world, rpoly.mat, "Model_R", 122 | main.size, leg.size, leg.width, grid.size, t.studyarea = study.area 123 | ) + 124 | tm_credits(expression("Predictions (whales km"^-2*")"), 125 | fontfamily = "sans", size = txt.size, 126 | position = c("left", "BOTTOM")) 127 | 128 | # Plot of SE values (with same color scheme as predictions) 129 | tmap.r.obj2 <- tmap_sdm( 130 | model.r, "SE", blp.r1, map.world, rpoly.mat, NA, 131 | main.size, leg.size, leg.width, grid.size, t.studyarea = study.area 132 | ) + 133 | tm_credits(expression("SE of predictions (whales km"^-2*")"), 134 | fontfamily = "sans", size = txt.size, 135 | position = c("left", "BOTTOM")) 136 | 137 | # Plot of predictions (percentiles) 138 | tmap.r.obj3 <- tmap_sdm( 139 | model.r, "Pred", blp.r2, map.world, rpoly.mat, NA, 140 | main.size, leg.size, leg.width, grid.size, t.studyarea = study.area 141 | ) + 142 | tm_credits(expression("Predictions (percentiles)"), 143 | fontfamily = "sans", size = txt.size, 144 | position = c("left", "BOTTOM")) 145 | 146 | #------------------------------------------------------------------------------ 147 | ### Generate and save plot 148 | tmap.list <- list( 149 | tmap.b.obj1, tmap.h.obj1, tmap.r.obj1, 150 | tmap.b.obj2, tmap.h.obj2, tmap.r.obj2, 151 | tmap.b.obj3, tmap.h.obj3, tmap.r.obj3 152 | ) 153 | 154 | png("../eSDM paper/Figures_working/Fig3.png", height = 18, width = 13, 155 | units = "in", res = 300) 156 | tmap_arrange(tmap.list, ncol = 3, nrow = 3, asp = NULL, outer.margins = 0.02) 157 | dev.off() 158 | ############################################################################### 159 | -------------------------------------------------------------------------------- /data-raw/figure4.R: -------------------------------------------------------------------------------- 1 | # Code for creating Figure 4 for eSDM manuscript 2 | 3 | ############################################################################### 4 | library(RColorBrewer) 5 | library(sf) 6 | library(tmap) 7 | 8 | 9 | ############################################################################### 10 | # Helper functions and values 11 | 12 | ### Base map 13 | load("data/gshhg.l.L16.rda") 14 | map.world <- gshhg.l.L16 15 | 16 | ### Plot extent 17 | range.poly <- st_sfc( 18 | st_polygon(list(matrix( 19 | c(-132, -132, -116, -116, -132, 29.5, 49, 49, 29.5, 29.5), ncol = 2 20 | ))), 21 | crs = 4326 22 | ) 23 | rpoly.mat <- matrix(st_bbox(range.poly), ncol = 2) 24 | 25 | ### Size of text and legend width 26 | main.size <- 1.4 27 | leg.size <- 1 28 | leg.width <- 1 29 | grid.size <- 1 30 | txt.size <- 1.2 31 | 32 | ### Plotting functions 33 | source("data-raw/figure_plot.R", local = TRUE, echo = FALSE) 34 | 35 | 36 | ############################################################################### 37 | # Create and save figure 38 | 39 | ### Load unweighted ensemble and prep plots 40 | # rda file created in data-raw/create_ens_sf.R 41 | load("data-raw/ens_sf_unw.rda") 42 | 43 | blp1 <- tmap_sdm_help(ens.sf.unw, "Pred_ens") 44 | blp2 <- tmap_sdm_help(ens.sf.unw, "CV_ens") 45 | 46 | # Plot of predictions (whales / km^-2) 47 | tmap.obj1 <- tmap_sdm( 48 | ens.sf.unw, "Pred_ens", blp1, map.world, rpoly.mat, NA, 49 | main.size, leg.size, leg.width, grid.size 50 | ) + 51 | tm_credits(expression("Predictions\n(whales km"^-2*")"), 52 | fontfamily = "sans", size = txt.size, 53 | position = c("left", "bottom")) 54 | 55 | # Plot of SE values (with same color scheme as predictions) 56 | tmap.obj2 <- tmap_sdm( 57 | ens.sf.unw, "SE_ens", blp1, map.world, rpoly.mat, NA, 58 | main.size, leg.size, leg.width, grid.size 59 | ) + 60 | tm_credits(expression("SE of\npredictions\n(whales km"^-2*")"), 61 | fontfamily = "sans", size = txt.size, 62 | position = c("left", "bottom")) 63 | 64 | # Plot of CV values 65 | tmap.obj3 <- tmap_sdm( 66 | ens.sf.unw, "CV_ens", blp2, map.world, rpoly.mat, NA, 67 | main.size, leg.size, leg.width, grid.size 68 | ) + 69 | tm_credits(expression("CV of\npredictions\n(whales km"^-2*")"), 70 | fontfamily = "sans", size = txt.size, 71 | position = c("left", "bottom")) 72 | 73 | 74 | ### Generate and save plot 75 | png("../eSDM paper/Figures_working/Fig4.png", height = 6.9, width = 13, 76 | units = "in", res = 300) 77 | tmap_arrange( 78 | list(tmap.obj1, tmap.obj2, tmap.obj3), ncol = 3, asp = NULL, 79 | outer.margins = 0.02 80 | ) 81 | dev.off() 82 | 83 | ############################################################################### 84 | -------------------------------------------------------------------------------- /data-raw/figure5.R: -------------------------------------------------------------------------------- 1 | # Code for creating Figure 5 for eSDM manuscript 2 | 3 | ############################################################################### 4 | library(RColorBrewer) 5 | library(sf) 6 | library(tmap) 7 | 8 | 9 | ############################################################################### 10 | # Helper functions and values 11 | 12 | ### Base map 13 | load("data/gshhg.l.L16.rda") 14 | map.world <- gshhg.l.L16 15 | 16 | ### Plot extent 17 | range.poly <- st_sfc( 18 | st_polygon(list(matrix( 19 | c(-132, -132, -116, -116, -132, 29.5, 49, 49, 29.5, 29.5), ncol = 2 20 | ))), 21 | crs = 4326 22 | ) 23 | rpoly.mat <- matrix(st_bbox(range.poly), ncol = 2) 24 | 25 | ### Size of text and legend width 26 | main.size <- 1.4 27 | leg.size <- 1 28 | leg.width <- 1 29 | grid.size <- 1 30 | txt.size <- 1.2 31 | 32 | ### Plotting functions 33 | source("data-raw/figure_plot.R", local = TRUE, echo = FALSE) 34 | 35 | 36 | ############################################################################### 37 | # Create and save figure 38 | 39 | ### Load tss-weighted ensemble and validation data 40 | load("data-raw/ens_sf_wtss.rda") 41 | load("data-raw/valid_sf_pres.rda") 42 | 43 | blp1 <- tmap_sdm_help(ens.sf.wtss, "Pred_ens") 44 | blp2 <- tmap_sdm_help_perc(ens.sf.wtss, "Pred_ens") 45 | 46 | #------------------------------------------------------------------------------ 47 | # Plot of predictions (whales / km^-2) 48 | tmap.obj1 <- tmap_sdm( 49 | ens.sf.wtss, "Pred_ens", blp1, map.world, rpoly.mat, NA, 50 | main.size, leg.size, leg.width, grid.size 51 | ) + 52 | tm_credits(expression("Predictions\n(whales km"^-2*")"), 53 | fontfamily = "sans", size = txt.size, 54 | position = c("left", "bottom")) 55 | 56 | # Plot of SE values (with same color scheme as predictions) 57 | tmap.obj2 <- tmap_sdm( 58 | ens.sf.wtss, "SE_ens", blp1, map.world, rpoly.mat, NA, 59 | main.size, leg.size, leg.width, grid.size 60 | ) + 61 | tm_credits(expression("SE of\npredictions\n(whales km"^-2*")"), 62 | fontfamily = "sans", size = txt.size, 63 | position = c("left", "bottom")) 64 | 65 | # Plot of predictions (percentiles) 66 | tmap.obj3 <- tmap_sdm( 67 | ens.sf.wtss, "Pred_ens", blp2, map.world, rpoly.mat, NA, 68 | main.size, leg.size, leg.width, grid.size 69 | ) + 70 | tm_credits("Predictions\n(percentiles)", 71 | fontfamily = "sans", size = txt.size, 72 | position = c("left", "bottom")) 73 | 74 | # Plot of predictions (percentiles) with presence points 75 | tmap.obj4 <- tmap_sdm( 76 | ens.sf.wtss, "Pred_ens", blp2, map.world, rpoly.mat, NA, 77 | main.size, leg.size, leg.width, grid.size 78 | ) + 79 | tm_shape(st_geometry(valid.sf.pres)) + 80 | tm_dots(col = "black", shape = 19, size = 0.1, legend.show = FALSE) + 81 | tm_credits("Predictions\n(percentiles)", 82 | fontfamily = "sans", size = txt.size, 83 | position = c("left", "bottom")) 84 | 85 | 86 | #------------------------------------------------------------------------------ 87 | ### Generate and save plot 88 | png("../eSDM paper/Figures_working/Fig5.png", height = 13.5, width = 8.6, 89 | units = "in", res = 300) 90 | tmap_arrange( 91 | list(tmap.obj1, tmap.obj2, tmap.obj3, tmap.obj4), 92 | ncol = 2, nrow = 2, asp = NULL, outer.margins = 0.02 93 | ) 94 | dev.off() 95 | 96 | ############################################################################### 97 | -------------------------------------------------------------------------------- /data-raw/figure_plot.R: -------------------------------------------------------------------------------- 1 | ############################################################################### 2 | # Code for generating Figs 3, 4, and 5 from Woodman et al. 3 | # Uses tmap package to generate plots 4 | 5 | # tmap_sdm_help* are copied directly from eSDM_vignette_helper.R 6 | # tmap_sdm was adapted to be able to plot study area polygon 7 | 8 | #------------------------------------------------------------------------------ 9 | # Return a list with the break point values, legend text, and color palette 10 | tmap_sdm_help <- function(x, x.col, pal.len = 10) { 11 | x <- st_set_geometry(x, NULL) 12 | b.val <- seq( 13 | from = min(x[, x.col], na.rm = TRUE), to = max(x[, x.col], na.rm = TRUE), 14 | length.out = pal.len + 1 15 | ) 16 | b.val[1] <- 0 17 | col.pal <- rev(RColorBrewer::brewer.pal(pal.len, "Spectral")) 18 | 19 | b.val.txt <- format(round(b.val, 3), justify = "right") #round(b.val, 5) 20 | leg.txt <- paste(head(b.val.txt, -1), tail(b.val.txt, -1), sep = " - ") 21 | 22 | list(b.val, leg.txt, col.pal) 23 | } 24 | 25 | 26 | #------------------------------------------------------------------------------ 27 | # Calcualte break points for relative percentages (percentiles) maps 28 | breaks_calc <- function(x, breaks = c(seq(0.4, 0.05, by = -0.05), 0.02)) { 29 | x <- stats::na.omit(x) 30 | x <- sort(x, decreasing = TRUE) 31 | 32 | c(-Inf, x[ceiling(breaks * length(x))], Inf) 33 | } 34 | 35 | 36 | #------------------------------------------------------------------------------ 37 | # Return a list with the break point values, legend text, and color palette 38 | # For pecentile plot 39 | tmap_sdm_help_perc <- function(x, x.col, pal.len = 10) { 40 | col.pal <- c( 41 | "#313695", "#4575b4", "#74add1", "#abd9e9", "#d1e5f0", 42 | "#fee090", "#fdae61", "#f46d43", "#d73027", "#a50026" 43 | ) 44 | leg.txt <- c( 45 | "Lowest 60%", "35 - 40%", "30 - 35%", "25 - 30%", "20 - 25%", 46 | "15 - 20%", "10 - 15%", "5 - 10%", "2 - 5%", "Highest 2%" 47 | ) 48 | 49 | x <- st_set_geometry(x, NULL) 50 | b.val <- breaks_calc(x[, x.col]) 51 | 52 | list(b.val, leg.txt, col.pal) 53 | } 54 | 55 | 56 | #------------------------------------------------------------------------------ 57 | # Create tmap maps 58 | tmap_sdm <- function(tmap.obj, t.col, t.blp, t.map, t.mat, t.title, 59 | t.main.size, t.leg.size, t.leg.width, t.grid.size, 60 | t.alpha = 1, t.grid.col = "black", 61 | t.ticks = TRUE, t.lines = FALSE, t.studyarea = NULL) { 62 | 63 | tmap.curr <- tm_shape(tmap.obj, bbox = t.mat, projection = 4326) + 64 | tm_fill(col = t.col, border.col = "transparent", alpha = t.alpha, 65 | style = "fixed", breaks = t.blp[[1]], palette = t.blp[[3]], 66 | showNA = FALSE, title = "", labels = t.blp[[2]], 67 | legend.is.portrait = TRUE, legend.reverse = TRUE) + 68 | tm_layout(bg.color = "white", legend.bg.color = "white", 69 | main.title = t.title, 70 | main.title.position = "center", 71 | main.title.size = t.main.size, 72 | inner.margins = c(0.02, 0.02, 0, 0), outer.margins = 0.03) + 73 | tm_legend(show = TRUE, outside = FALSE, position = c("right", "top"), 74 | text.size = t.leg.size, 75 | width = t.leg.width, 76 | frame = "black") + 77 | tm_graticules(x = seq(-135, -120, by = 5), y = seq(30, 50, by = 5), 78 | col = t.grid.col, lwd = 1, alpha = 1, 79 | ticks = t.ticks, lines = t.lines, 80 | labels.inside.frame = FALSE, labels.size = t.grid.size, 81 | labels.rot = c(0, 90)) 82 | 83 | if (!is.null(t.studyarea)) { 84 | tmap.curr + 85 | tm_shape(t.studyarea, bbox = t.mat, projection = 4326) + 86 | tm_polygons(col = NA, border.col = "red", alpha = 0, lty = 1, lwd = 1) + 87 | tm_shape(t.map, bbox = t.mat, projection = 4326) + 88 | tm_polygons(col = "tan", border.col = NA, border.alpha = 0, alpha = 1, lty = 1, lwd = 1) 89 | } else { 90 | tmap.curr + 91 | tm_shape(t.map, bbox = t.mat, projection = 4326) + 92 | tm_polygons(col = "tan", border.col = NA, border.alpha = 0, alpha = 1, lty = 1, lwd = 1) 93 | } 94 | } 95 | 96 | ############################################################################### 97 | -------------------------------------------------------------------------------- /data-raw/valid_sf_pres.rda: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWFSC/eSDM/be2f911c1fb18707e0f7c11973e8524510c2d3b5/data-raw/valid_sf_pres.rda -------------------------------------------------------------------------------- /data/gshhg.l.L16.rda: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWFSC/eSDM/be2f911c1fb18707e0f7c11973e8524510c2d3b5/data/gshhg.l.L16.rda -------------------------------------------------------------------------------- /data/preds.1.rda: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWFSC/eSDM/be2f911c1fb18707e0f7c11973e8524510c2d3b5/data/preds.1.rda -------------------------------------------------------------------------------- /data/preds.2.rda: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWFSC/eSDM/be2f911c1fb18707e0f7c11973e8524510c2d3b5/data/preds.2.rda -------------------------------------------------------------------------------- /data/preds.3.rda: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWFSC/eSDM/be2f911c1fb18707e0f7c11973e8524510c2d3b5/data/preds.3.rda -------------------------------------------------------------------------------- /data/validation.data.rda: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWFSC/eSDM/be2f911c1fb18707e0f7c11973e8524510c2d3b5/data/validation.data.rda -------------------------------------------------------------------------------- /eSDM.Rproj: -------------------------------------------------------------------------------- 1 | Version: 1.0 2 | 3 | RestoreWorkspace: No 4 | SaveWorkspace: No 5 | AlwaysSaveHistory: Default 6 | 7 | EnableCodeIndexing: Yes 8 | UseSpacesForTab: Yes 9 | NumSpacesForTab: 2 10 | Encoding: UTF-8 11 | 12 | RnwWeave: Sweave 13 | LaTeX: pdfLaTeX 14 | 15 | AutoAppendNewline: Yes 16 | StripTrailingWhitespace: Yes 17 | 18 | BuildType: Package 19 | PackageUseDevtools: Yes 20 | PackageInstallArgs: --no-multiarch --with-keep.source 21 | PackageCheckArgs: --as-cran 22 | PackageRoxygenize: rd,collate,namespace 23 | -------------------------------------------------------------------------------- /inst/CITATION: -------------------------------------------------------------------------------- 1 | citHeader("To cite eSDM in publications use:") 2 | 3 | bibentry( 4 | bibtype = "Article", 5 | title = "eSDM: A tool for creating and exploring ensembles of predictions from species distribution and abundance models", 6 | year = "2019", 7 | author = c( 8 | as.person("Samuel M. Woodman"), 9 | as.person("Karin A. Forney"), 10 | as.person("Elizabeth A. Becker"), 11 | as.person("Monica L. DeAngelis"), 12 | as.person("Elliott L. Hazen"), 13 | as.person("Daniel M. Palacios"), 14 | as.person("Jessica V. Redfern") 15 | ), 16 | journal = "Methods Ecol Evol", 17 | doi = "10.1111/2041-210X.13283", 18 | url = "https://doi.org/10.1111/2041-210X.13283", 19 | textVersion = paste("Woodman, S.M., Forney, K.A., Becker, E.A., DeAngelis, M.L., Hazen, E.L., Palacios, D.M., Redfern, J.V. (2019). eSDM: A tool for creating and exploring ensembles of predictions from species distribution and abundance models. Methods Ecol Evol. 2019;10:1923-1933. doi:10.1111/2041-210X.13283") 20 | ) 21 | -------------------------------------------------------------------------------- /inst/extdata/Predictions_Beckeretal2016.rds: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWFSC/eSDM/be2f911c1fb18707e0f7c11973e8524510c2d3b5/inst/extdata/Predictions_Beckeretal2016.rds -------------------------------------------------------------------------------- /inst/extdata/Predictions_Beckeretal2016_overlaid.rds: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWFSC/eSDM/be2f911c1fb18707e0f7c11973e8524510c2d3b5/inst/extdata/Predictions_Beckeretal2016_overlaid.rds -------------------------------------------------------------------------------- /inst/extdata/Predictions_Hazenetal2017.rds: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWFSC/eSDM/be2f911c1fb18707e0f7c11973e8524510c2d3b5/inst/extdata/Predictions_Hazenetal2017.rds -------------------------------------------------------------------------------- /inst/extdata/Predictions_Hazenetal2017_overlaid.rds: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWFSC/eSDM/be2f911c1fb18707e0f7c11973e8524510c2d3b5/inst/extdata/Predictions_Hazenetal2017_overlaid.rds -------------------------------------------------------------------------------- /inst/extdata/Predictions_Redfernetal2017.rds: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWFSC/eSDM/be2f911c1fb18707e0f7c11973e8524510c2d3b5/inst/extdata/Predictions_Redfernetal2017.rds -------------------------------------------------------------------------------- /inst/extdata/README.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWFSC/eSDM/be2f911c1fb18707e0f7c11973e8524510c2d3b5/inst/extdata/README.txt -------------------------------------------------------------------------------- /inst/extdata/Shapefiles/Study_Area_CCE.dbf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWFSC/eSDM/be2f911c1fb18707e0f7c11973e8524510c2d3b5/inst/extdata/Shapefiles/Study_Area_CCE.dbf -------------------------------------------------------------------------------- /inst/extdata/Shapefiles/Study_Area_CCE.prj: -------------------------------------------------------------------------------- 1 | GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]] -------------------------------------------------------------------------------- /inst/extdata/Shapefiles/Study_Area_CCE.sbn: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWFSC/eSDM/be2f911c1fb18707e0f7c11973e8524510c2d3b5/inst/extdata/Shapefiles/Study_Area_CCE.sbn -------------------------------------------------------------------------------- /inst/extdata/Shapefiles/Study_Area_CCE.sbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWFSC/eSDM/be2f911c1fb18707e0f7c11973e8524510c2d3b5/inst/extdata/Shapefiles/Study_Area_CCE.sbx -------------------------------------------------------------------------------- /inst/extdata/Shapefiles/Study_Area_CCE.shp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWFSC/eSDM/be2f911c1fb18707e0f7c11973e8524510c2d3b5/inst/extdata/Shapefiles/Study_Area_CCE.shp -------------------------------------------------------------------------------- /inst/extdata/Shapefiles/Study_Area_CCE.shp.xml: -------------------------------------------------------------------------------- 1 | 2 | 20190205094955001.0FALSEStudy_Area_CCE002-131.000000-117.01298130.05000148.60000010.001file://\\SWC-SWOODMAN-L\C$\SMW\Ensemble Tool\eSDM-data\Ignore\eSDM_data_manuscript\Shapefiles\Study_Area_CCE.shpLocal Area NetworkGeographicGCS_WGS_1984Angular Unit: Degree (0.017453)<GeographicCoordinateSystem xsi:type='typens:GeographicCoordinateSystem' xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' xmlns:xs='http://www.w3.org/2001/XMLSchema' xmlns:typens='http://www.esri.com/schemas/ArcGIS/10.1'><WKT>GEOGCS[&quot;GCS_WGS_1984&quot;,DATUM[&quot;D_WGS_1984&quot;,SPHEROID[&quot;WGS_1984&quot;,6378137.0,298.257223563]],PRIMEM[&quot;Greenwich&quot;,0.0],UNIT[&quot;Degree&quot;,0.0174532925199433],AUTHORITY[&quot;EPSG&quot;,4326]]</WKT><XOrigin>-400</XOrigin><YOrigin>-400</YOrigin><XYScale>11258999068426.238</XYScale><ZOrigin>-100000</ZOrigin><ZScale>10000</ZScale><MOrigin>-100000</MOrigin><MScale>10000</MScale><XYTolerance>8.983152841195215e-009</XYTolerance><ZTolerance>0.001</ZTolerance><MTolerance>0.001</MTolerance><HighPrecision>true</HighPrecision><LeftLongitude>-180</LeftLongitude><WKID>4326</WKID><LatestWKID>4326</LatestWKID></GeographicCoordinateSystem>20190205094955002019020509495500Microsoft Windows 7 Version 6.1 (Build 7601) Service Pack 1; Esri ArcGIS 10.2.2.3552Study_Area_CCE1-131.000000-117.01298148.60000030.050001Shapefile0.001datasetEPSG8.2.61SimpleFALSE1TRUEFALSEStudy_Area_CCEFeature Class1FIDFIDOID400Internal feature number.EsriSequential unique whole numbers that are automatically generated.ShapeShapeGeometry000Feature geometry.EsriCoordinates defining the features.OBJECTIDOBJECTIDInteger990acronymacronymString2000Shape_LengShape_LengDouble1900Shape_AreaShape_AreaDouble1900Area of feature in internal units squared.EsriPositive real numbers that are automatically generated.Area_eaArea_eaDouble1900Area_llArea_llDouble190020190205 3 | -------------------------------------------------------------------------------- /inst/extdata/Shapefiles/Study_Area_CCE.shx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWFSC/eSDM/be2f911c1fb18707e0f7c11973e8524510c2d3b5/inst/extdata/Shapefiles/Study_Area_CCE.shx -------------------------------------------------------------------------------- /inst/extdata/Table3.csv: -------------------------------------------------------------------------------- 1 | Predictions,AUC,TSS,AUC-LT,TSS-LT,AUC-HR,TSS-HR 2 | Model_B - original,0.912,0.717,0.732,0.374,0.963,0.824 3 | Model_H - original,0.734,0.414,0.62,0.284,0.772,0.471 4 | Model_R - original,0.919,0.756,0.684,0.29,0.98,0.882 5 | Model_B - overlaid,0.916,0.742,0.732,0.38,0.967,0.856 6 | Model_H - overlaid,0.735,0.406,0.62,0.286,0.772,0.46 7 | Model_R - overlaid,0.919,0.756,0.684,0.29,0.98,0.882 8 | Ensemble - unweighted,0.915,0.772,0.699,0.345,0.972,0.888 9 | Ensemble - AUC-based weights,0.917,0.777,0.703,0.349,0.973,0.893 10 | Ensemble - TSS-based weights,0.92,0.785,0.708,0.352,0.975,0.9 11 | Ensemble - variance-based weights,0.888,0.67,0.713,0.344,0.936,0.764 12 | -------------------------------------------------------------------------------- /inst/extdata/eSDM_Validation_data_all.rds: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWFSC/eSDM/be2f911c1fb18707e0f7c11973e8524510c2d3b5/inst/extdata/eSDM_Validation_data_all.rds -------------------------------------------------------------------------------- /inst/shiny/server_1_loadModels/server_1_loadModels.R: -------------------------------------------------------------------------------- 1 | ### Code for importing and converting them to sf objects 2 | # Note 'importing predictions' was 'loading models' when code was first written 3 | 4 | 5 | ############################################################################### 6 | ### Flag for if any predictions are imported 7 | output$loadModels_display_flag <- reactive({ 8 | length(vals$models.ll) > 0 9 | }) 10 | outputOptions(output, "loadModels_display_flag", suspendWhenHidden = FALSE) 11 | 12 | ### Flag for if any model predictions are selected in the table 13 | output$loaded_models_selected_flag <- reactive({ 14 | isTruthy(input$models_loaded_table_rows_selected) 15 | }) 16 | outputOptions(output, "loaded_models_selected_flag", suspendWhenHidden = FALSE) 17 | 18 | 19 | ############################################################################### 20 | ### Delete selected predictions 21 | model_remove <- eventReactive(input$model_remove_execute, { 22 | idx <- as.numeric(input$models_loaded_table_rows_selected) 23 | req(length(idx) > 0) 24 | 25 | ######################################################### 26 | ### Remove the reactiveValue info for selected set(s) of predicitons 27 | vals$models.ll <- vals$models.ll[-idx] 28 | vals$models.orig <- vals$models.orig[-idx] 29 | vals$models.names <- vals$models.names[-idx] 30 | vals$models.data.names <- vals$models.data.names[-idx] 31 | vals$models.pred.type <- vals$models.pred.type[-idx] 32 | vals$models.specs <- vals$models.specs[-idx] 33 | 34 | if (length(vals$models.names) == 0) vals$models.names <- NULL 35 | if (length(vals$models.data.names) == 0) vals$models.data.names <- NULL 36 | if (length(vals$models.pred.type) == 0) vals$models.pred.type <- NULL 37 | if (length(vals$models.specs) == 0) vals$models.specs <- NULL 38 | 39 | 40 | ######################################################### 41 | # Handle relevant places these preds was used/displayed 42 | 43 | ### If these predictions were interactively previewed, remove preview 44 | ### Else, adjust vals idx 45 | if (isTruthy(vals$models.plot.leaf.idx)) { 46 | if (isTruthy(any(idx %in% vals$models.plot.leaf.idx))) { 47 | vals$models.plot.leaf <- NULL 48 | vals$models.plot.leaf.idx <- NULL 49 | 50 | } else { 51 | idx.adjust <- sapply(vals$models.plot.leaf.idx, function(i) sum(idx < i)) 52 | vals$models.plot.leaf.idx <- vals$models.plot.leaf.idx - idx.adjust 53 | validate( 54 | need(all(vals$models.plot.leaf.idx > 0), 55 | "Error: While deleting original model(s), error 1") 56 | ) 57 | } 58 | } 59 | 60 | ### If these predictions were staticly previewed, remove preview 61 | ### Else, adjust vals idx 62 | if (isTruthy(vals$models.plot.idx)) { 63 | if (isTruthy(any(idx %in% vals$models.plot.idx))) { 64 | vals$models.plot <- NULL 65 | vals$models.plot.idx <- NULL 66 | 67 | } else { 68 | idx.adjust <- sapply(vals$models.plot.idx, function(i) sum(idx < i)) 69 | vals$models.plot.idx <- vals$models.plot.idx - idx.adjust 70 | validate( 71 | need(all(vals$models.plot.idx > 0), 72 | "Error: While deleting original predictions(s), error 1b") 73 | ) 74 | } 75 | } 76 | 77 | ### Remove evaluation metrics if they're calculated for original preds 78 | # TODO: make this so it only removes the metrics of preds being removed 79 | if (isTruthy(vals$eval.models.idx)) { 80 | if (!is.null(vals$eval.models.idx[[1]])){ 81 | vals$eval.models.idx <- NULL 82 | vals$eval.metrics <- NULL 83 | vals$eval.metrics.names <- NULL 84 | } 85 | } 86 | 87 | "" 88 | }) 89 | 90 | 91 | ############################################################################### 92 | ### Reset 'Prediction value type' to 'Relative' if new file is uploaded 93 | observe({ 94 | input$model_csv_file 95 | updateSelectInput(session, "model_csv_pred_type", selected = 2) 96 | updateSelectInput(session, "model_csv_pt_loc", selected = 1) 97 | }) 98 | observe({ 99 | input$model_gis_raster_file 100 | updateSelectInput(session, "model_gis_raster_pred_type", selected = 2) 101 | }) 102 | observe({ 103 | input$model_gis_shp_files 104 | updateSelectInput(session, "model_gis_shp_pred_type", selected = 2) 105 | }) 106 | observe({ 107 | input$model_gis_gdb_load 108 | updateSelectInput(session, "model_gis_gdb_pred_type", selected = 2) 109 | }) 110 | 111 | ############################################################################### 112 | -------------------------------------------------------------------------------- /inst/shiny/server_1_loadModels/server_1_loadModels_create_local.R: -------------------------------------------------------------------------------- 1 | ### Code for final processing steps and adding data to vals$models... 2 | 3 | ############################################################################### 4 | ### Inputs: objects that must be defined before this code chunk is called 5 | # sf.load.ll # Predictions with crs = crs.ll 6 | # sf.load.orig # Predictions in original projection 7 | # pred.type # Prediction type 8 | # var.type # Uncertainty value type 9 | # model.res # Resolution of predictions 10 | # model.name # File name of predictions 11 | # data.names # Names of data columns with predictions and weights 12 | 13 | 14 | ############################################################################### 15 | #------------------------------------------------------------------------------ 16 | if (!exists("sf.load.orig")) sf.load.orig <- sf.load.ll 17 | 18 | validate( 19 | need(inherits(sf.load.ll, "sf") & inherits(sf.load.orig, "sf"), 20 | "Error 1 in create_local prep") 21 | ) 22 | validate( 23 | need(ncol(sf.load.ll) == 5 & ncol(sf.load.orig) == 5, 24 | "Error 2 in create_local prep") 25 | ) 26 | 27 | 28 | #------------------------------------------------------------------------------ 29 | ### Set names and agr for sf objects; (if nec) convert CV to SE 30 | tmp.geom.ll <- st_geometry(sf.load.ll) 31 | tmp.geom.orig <- st_geometry(sf.load.orig) 32 | 33 | sf.load.ll <- st_set_geometry(sf.load.ll, NULL) %>% 34 | purrr::set_names(c("Pred", "SE", "Weight", "idx")) %>% 35 | mutate(SE = if(var.type == 1) SE / Pred else SE) %>% 36 | st_sf(geometry = tmp.geom.ll, agr = "constant") 37 | 38 | sf.load.orig <- st_set_geometry(sf.load.orig, NULL) %>% 39 | purrr::set_names(c("Pred", "SE", "Weight", "idx")) %>% 40 | mutate(SE = if(var.type == 1) SE / Pred else SE) %>% 41 | st_sf(geometry = tmp.geom.orig, agr = "constant") 42 | rm(tmp.geom.ll, tmp.geom.orig) 43 | 44 | 45 | #------------------------------------------------------------------------------ 46 | ### Process prediction values based on prediction type 47 | if (pred.type == 1) { 48 | abund <- unname(round(eSDM::model_abundance(sf.load.orig, "Pred"), 1)) 49 | 50 | } else if (pred.type == 2) { 51 | abund <- "N/A" 52 | 53 | } else if (pred.type == 3) { 54 | abund <- round(sum(sf.load.orig$Pred), 1) 55 | 56 | sdm.area.m2 <- st_area(sf.load.orig) 57 | validate( 58 | need(all(units(sdm.area.m2)$numerator == c("m", "m")), 59 | paste("Error: The GUI could not properly calculate the area of", 60 | "the prediction polygons; please ensure the predictions", 61 | "are properly formatted and have a defined coordinate system")) 62 | ) 63 | 64 | sdm.area.km2 <- esdm_area_km2(sf.load.orig) 65 | sf.load.ll$Pred <- sf.load.ll$Pred / sdm.area.km2 66 | sf.load.orig$Pred <- sf.load.orig$Pred / sdm.area.km2 67 | 68 | sf.load.ll <- st_set_agr(sf.load.ll, "constant") 69 | sf.load.orig <- st_set_agr(sf.load.orig, "constant") 70 | } 71 | 72 | 73 | ### Create vector of specs about the predictions, added to list of vectors 74 | # Specs are: resolution, cell count, non-NA prediction count, abundance, 75 | # and lat/long extent 76 | specs.curr <- c( 77 | model.res, nrow(sf.load.ll), sum(!is.na(sf.load.ll$Pred)), abund, 78 | paste0( 79 | "(", paste(round(st_bbox(sf.load.ll), 0)[c(1, 3)], collapse = ", "), 80 | "), (", 81 | paste(round(st_bbox(sf.load.ll), 0)[c(2, 4)], collapse = ", "), ")" 82 | ) 83 | ) 84 | 85 | 86 | ############################################################################### 87 | ### Save objects to reactiveValues 88 | vals$models.ll <- c(vals$models.ll, list(sf.load.ll)) 89 | vals$models.orig <- c(vals$models.orig, list(sf.load.orig)) 90 | vals$models.names <- c(vals$models.names, model.name) 91 | vals$models.data.names <- c(vals$models.data.names, data.names) 92 | vals$models.pred.type <- c(vals$models.pred.type, pred.type) 93 | vals$models.specs <- c(vals$models.specs, list(specs.curr)) 94 | -------------------------------------------------------------------------------- /inst/shiny/server_1_loadModels/server_1_loadModels_funcs.R: -------------------------------------------------------------------------------- 1 | # Functions specific to 'Import predictions' section 2 | 3 | ############################################################################### 4 | # Generate info message about NA values for renderUIs 5 | model_NA_info_func <- function(pred.type, pred.na.idx, 6 | var.idx, var.na.idx, 7 | weight.idx, weight.na.idx) { 8 | temp <- ifelse( 9 | pred.type != 3, NA, 10 | paste("Abundance value type: All prediction values will be divided", 11 | "by their prediction polygon area") 12 | ) 13 | 14 | #-------------------------------------------------------- 15 | if (as.numeric(weight.idx) > 1 & as.numeric(var.idx) > 1) { 16 | if (is.na(temp)) { 17 | HTML( 18 | na_message_pred(pred.na.idx), "
", "
", 19 | na_message(var.na.idx, pred.na.idx, "uncertainty"), "
", "
", 20 | na_message(weight.na.idx, pred.na.idx, "weight") 21 | ) 22 | } else { 23 | HTML( 24 | temp, "
", "
", 25 | na_message_pred(pred.na.idx), "
", "
", 26 | na_message(var.na.idx, pred.na.idx, "uncertainty"), "
", "
", 27 | na_message(weight.na.idx, pred.na.idx, "weight") 28 | ) 29 | } 30 | 31 | #------------------------------------------------------ 32 | } else if (as.numeric(weight.idx) > 1) { 33 | if (is.na(temp)) { 34 | HTML( 35 | na_message_pred(pred.na.idx), "
", "
", 36 | na_message(weight.na.idx, pred.na.idx, "weight") 37 | ) 38 | } else { 39 | HTML( 40 | temp, "
", "
", 41 | na_message_pred(pred.na.idx), "
", "
", 42 | na_message(weight.na.idx, pred.na.idx, "weight") 43 | ) 44 | } 45 | 46 | #------------------------------------------------------ 47 | } else if (as.numeric(var.idx) > 1) { 48 | if (is.na(temp)) { 49 | HTML( 50 | na_message_pred(pred.na.idx), "
", "
", 51 | na_message(var.na.idx, pred.na.idx, "uncertainty") 52 | ) 53 | } else { 54 | HTML( 55 | temp, "
", "
", 56 | na_message_pred(pred.na.idx), "
", "
", 57 | na_message(var.na.idx, pred.na.idx, "uncertainty") 58 | ) 59 | } 60 | 61 | #------------------------------------------------------ 62 | } else { 63 | if (is.na(temp)) { 64 | HTML(na_message_pred(pred.na.idx)) 65 | } else { 66 | HTML(temp, "
", "
", na_message_pred(pred.na.idx)) 67 | } 68 | } 69 | } 70 | 71 | ############################################################################### 72 | # Attempt to determine the resolution of provided GIS predictions 73 | gis_res_calc <- function(sf.ll, sf.orig) { 74 | #---------------------------------------------------------------------------- 75 | # Checks 76 | validate( 77 | need(inherits(sf.ll, "sf") & inherits(sf.orig, "sf"), 78 | "Error: gis.res.calc(): inputs must be sf objects"), 79 | need(identical(st_crs(sf.ll), crs.ll), 80 | "Error: gis.res.calc(): first input must have crs = crs.ll") 81 | ) 82 | 83 | #---------------------------------------------------------------------------- 84 | ### Get extents of individual polys in original projection and units if apl 85 | crs.orig <- st_crs(sf.orig)$proj4string 86 | crs.orig.m <- grepl("+units=m", crs.orig) 87 | crs.orig.ll <- st_is_longlat(crs.orig) 88 | 89 | if ((crs.orig.m | crs.orig.ll) & (crs.orig.m != crs.orig.ll) & 90 | !identical(st_crs(sf.orig), crs.ll)) { 91 | res.orig <- sapply(list(sf.orig), function(sf.curr, div.val) { 92 | sf.bbox <- lapply(st_geometry(sf.curr), st_bbox) 93 | sf.lon.diff <- sapply(sf.bbox, function(i) round(i["xmax"] - i["xmin"], 3)) / div.val 94 | sf.lat.diff <- sapply(sf.bbox, function(j) round(j["ymax"] - j["ymin"], 3)) / div.val 95 | 96 | sf.table.lon <- table(sf.lon.diff) 97 | sf.check1 <- (max(sf.table.lon) / nrow(sf.curr)) > 0.8 98 | sf.lon.val <- as.numeric(names(sf.table.lon)[which.max(sf.table.lon)]) 99 | 100 | sf.table.lat <- table(sf.lat.diff) 101 | sf.check2 <- (max(sf.table.lat) / nrow(sf.curr)) > 0.8 102 | sf.lat.val <- as.numeric(names(sf.table.lat)[which.max(sf.table.lat)]) 103 | 104 | if (sf.check1 & sf.check2 & (sf.lon.val == sf.lat.val)) { 105 | paste(sf.lon.val, ifelse(crs.orig.m, "km", "degrees")) 106 | } else { 107 | NA 108 | } 109 | }, div.val = ifelse(crs.orig.m, 1e+03, 1)) 110 | 111 | } else { 112 | res.orig <- NA 113 | } 114 | 115 | #---------------------------------------------------------------------------- 116 | ### Get extents of individual polygons in original crs.ll (WGS84) 117 | res.ll <- sapply(list(sf.ll), function(sf.curr) { 118 | sf.bbox <- lapply(st_geometry(sf.curr), st_bbox) 119 | sf.lon.diff <- sapply(sf.bbox, function(i) round(i["xmax"] - i["xmin"], 3)) 120 | sf.lat.diff <- sapply(sf.bbox, function(j) round(j["ymax"] - j["ymin"], 3)) 121 | 122 | sf.table.lon <- table(sf.lon.diff) 123 | sf.check1 <- (max(sf.table.lon) / nrow(sf.curr)) > 0.8 124 | sf.lon.val <- as.numeric(names(sf.table.lon)[which.max(sf.table.lon)]) 125 | 126 | sf.table.lat <- table(sf.lat.diff) 127 | sf.check2 <- (max(sf.table.lat) / nrow(sf.curr)) > 0.8 128 | sf.lat.val <- as.numeric(names(sf.table.lat)[which.max(sf.table.lat)]) 129 | 130 | if (sf.check1 & sf.check2 & (sf.lon.val == sf.lat.val)) { 131 | paste(sf.lon.val, "degrees") 132 | } else { 133 | NA 134 | } 135 | }) 136 | 137 | #---------------------------------------------------------------------------- 138 | ### Return appropriate object 139 | if (is.na(res.orig) & !is.na(res.ll)) { 140 | res.ll 141 | 142 | } else if (!is.na(res.orig) & is.na(res.ll)) { 143 | res.orig 144 | 145 | } else { 146 | "Unk" 147 | } 148 | } 149 | 150 | ############################################################################### 151 | -------------------------------------------------------------------------------- /inst/shiny/server_1_loadModels/server_1_loadModels_raster.R: -------------------------------------------------------------------------------- 1 | ### Code for importing predictions from a raster (.tif file) 2 | 3 | 4 | ############################################################################### 5 | ### Get indices of predictions that area NA's using na_which() 6 | model_gis_raster_NA_idx_pred <- reactive({ 7 | na_which(req(read_model_gis_raster())[[1]]$Pred) 8 | }) 9 | 10 | 11 | ############################################################################### 12 | # Load and process data from raster 13 | 14 | ### Read raster data and start processing 15 | read_model_gis_raster <- reactive({ 16 | file.in <- req(input$model_gis_raster_file) 17 | 18 | ### Ensure file extension is .tif or .img (recognized as "") 19 | if (!(file.in$type %in% c("image/tiff", ""))) return() 20 | 21 | ### Read raster and start processing 22 | withProgress(message = "Uploading raster", value = 0.4, { 23 | gis.file.raster <- try( 24 | raster(file.in$datapath, band = input$model_gis_raster_band), 25 | silent = TRUE 26 | ) 27 | gis.file.success <- inherits(gis.file.raster, "RasterLayer") 28 | incProgress(0.3) 29 | 30 | if (gis.file.success) { 31 | sf.load.raster <- st_as_sf( 32 | as(gis.file.raster, "SpatialPolygonsDataFrame") 33 | ) 34 | stopifnot(ncol(sf.load.raster) == 2) 35 | } 36 | incProgress(0.3) 37 | }) 38 | 39 | ### Return appropriate objects 40 | if (gis.file.success) { 41 | list(sf.load.raster, gis.file.raster) 42 | } else { 43 | NULL 44 | } 45 | }) 46 | 47 | 48 | ### Flag for if the raster was properly read and processed 49 | output$read_model_gis_raster_flag <- reactive({ 50 | if (is.null(read_model_gis_raster())) FALSE else TRUE 51 | }) 52 | outputOptions(output, "read_model_gis_raster_flag", suspendWhenHidden = FALSE) 53 | 54 | 55 | ####################################### 56 | ### Process data and add it to vals 57 | create_sf_gis_raster <- eventReactive(input$model_create_gis_raster, { 58 | sf.load.raster <- read_model_gis_raster()[[1]] 59 | 60 | withProgress(message = "Importing predictions from raster", value = 0.2, { 61 | # Check that pred and weight data are valid 62 | sf.load.raster <- check_pred_var_weight( 63 | sf.load.raster, 1, NA, NA, model_gis_raster_NA_idx_pred(), NA, NA 64 | ) 65 | 66 | # Check long extent, polygon validity, and create crs.ll version if nec 67 | sf.load.raster <- check_dateline(sf.load.raster, progress.detail = TRUE) 68 | incProgress(0.2) 69 | sf.load.raster <- check_valid(sf.load.raster, progress.detail = TRUE) 70 | incProgress(0.2) 71 | sf.list <- check_gis_crs(sf.load.raster) 72 | incProgress(0.2) 73 | 74 | # Determine resolution of raster cells 75 | crs.orig <- st_crs(sf.load.raster)$proj4string 76 | crs.orig.m <- grepl("+units=m", crs.orig) 77 | crs.orig.ll <- st_is_longlat(crs.orig) 78 | 79 | if (crs.orig.ll & crs.orig.m) { 80 | model.res <- NA 81 | 82 | } else if (crs.orig.ll) { 83 | z <- read_model_gis_raster()[[2]] 84 | z.1 <- round((z@extent@xmax - z@extent@xmin) / z@ncols, 3) 85 | z.2 <- round((z@extent@ymax - z@extent@ymin) / z@nrows, 3) 86 | model.res <- ifelse(z.1 == z.2, paste(z.1, "degrees"), NA) 87 | rm(z, z.1, z.2) 88 | 89 | } else if (crs.orig.m) { 90 | z <- read_model_gis_raster()[[2]] 91 | z.1 <- round((z@extent@xmax - z@extent@xmin) / z@ncols / 1e+3, 3) 92 | z.2 <- round((z@extent@ymax - z@extent@ymin) / z@nrows / 1e+3, 3) 93 | model.res <- ifelse(z.1 == z.2, paste(z.1, "km"), NA) 94 | rm(z, z.1, z.2) 95 | 96 | } else { 97 | model.res <- NA 98 | } 99 | incProgress(0.1) 100 | 101 | # Prepare for 'local' code 102 | sf.load.llo <- sf.list[[1]] 103 | sf.load.origo <- sf.list[[2]] 104 | 105 | sf.load.ll <- sf.list[[1]] %>% 106 | st_set_geometry(NULL) %>% 107 | dplyr::mutate(SE = as.numeric(NA), Weight = as.numeric(NA), 108 | idx = 1:nrow(sf.list[[1]])) %>% 109 | st_sf(geometry = st_geometry(sf.list[[1]]), agr = "constant") 110 | sf.load.orig <- sf.list[[2]] %>% 111 | st_set_geometry(NULL) %>% 112 | dplyr::mutate(SE = as.numeric(NA), Weight = as.numeric(NA), 113 | idx = 1:nrow(sf.list[[1]])) %>% 114 | st_sf(geometry = st_geometry(sf.list[[2]]), agr = "constant") 115 | 116 | data.names <- list(c(names(sf.load.ll)[1], NA, NA)) 117 | pred.type <- input$model_gis_raster_pred_type 118 | var.type <- 2 119 | model.name <- input$model_gis_raster_file$name 120 | incProgress(0.1) 121 | 122 | 123 | ###### Code common to all importing functions ###### 124 | source( 125 | file.path("server_1_loadModels", "server_1_loadModels_create_local.R"), 126 | local = TRUE, echo = FALSE, chdir = TRUE 127 | ) 128 | #################################################### 129 | }) 130 | 131 | "Predictions imported from raster" 132 | }) 133 | 134 | ############################################################################### 135 | -------------------------------------------------------------------------------- /inst/shiny/server_1_loadModels/server_1_loadModels_shpgdb.R: -------------------------------------------------------------------------------- 1 | ### Code for importing predictions from .shp or .gdb file 2 | 3 | 4 | ############################################################################### 5 | # gis.model.check() and gis.res.calc() are in '..._loadModels_func.R' 6 | 7 | ############################################################################### 8 | # Reactive functions for renderUIs 9 | 10 | #---------------------------------------------------------- 11 | ### Get names of data columns 12 | shp_names_choice_input <- reactive({ 13 | req(read_model_gis_shp()) 14 | choice.input.names <- head(names(read_model_gis_shp()[[1]]), -1) 15 | choice.input <- (1:length(choice.input.names)) 16 | names(choice.input) <- choice.input.names 17 | 18 | choice.input 19 | }) 20 | 21 | gdb_names_choice_input <- reactive({ 22 | choice.input.names <- head(names(read_model_gis_gdb()[[1]]), -1) 23 | choice.input <- (1:length(choice.input.names)) 24 | names(choice.input) <- choice.input.names 25 | 26 | choice.input 27 | }) 28 | 29 | 30 | #---------------------------------------------------------- 31 | ### Identify NA prediction values 32 | model_gis_shp_NA_idx_pred <- reactive({ 33 | data.shp <- st_set_geometry(req(read_model_gis_shp())[[1]], NULL) 34 | 35 | data.col <- as.numeric(req(input$model_gis_shp_names_pred)) 36 | req(data.col <= ncol(data.shp)) 37 | 38 | na_which(data.shp[, data.col]) 39 | }) 40 | 41 | model_gis_gdb_NA_idx_pred <- reactive({ 42 | data.gdb <- st_set_geometry(req(read_model_gis_gdb())[[1]], NULL) 43 | 44 | data.col <- as.numeric(req(input$model_gis_gdb_names_pred)) 45 | req(data.col <= ncol(data.gdb)) 46 | 47 | na_which(data.gdb[, data.col]) 48 | }) 49 | 50 | 51 | #---------------------------------------------------------- 52 | ### Identify NA uncertainty values 53 | model_gis_shp_NA_idx_var <- reactive({ 54 | data.shp <- st_set_geometry(req(read_model_gis_shp())[[1]], NULL) 55 | var.col <- as.numeric(req(input$model_gis_shp_names_var)) 56 | req((var.col - 1) <= ncol(data.shp)) 57 | 58 | if (var.col > 1) { 59 | na_which(data.shp[, var.col - 1]) 60 | } else { 61 | NA 62 | } 63 | }) 64 | 65 | model_gis_gdb_NA_idx_var <- reactive({ 66 | data.gdb <- st_set_geometry(req(read_model_gis_gdb())[[1]], NULL) 67 | var.col <- as.numeric(req(input$model_gis_gdb_names_var)) 68 | req((var.col - 1) <= ncol(data.gdb)) 69 | 70 | if (var.col > 1) { 71 | na_which(data.gdb[, var.col - 1]) 72 | } else { 73 | NA 74 | } 75 | }) 76 | 77 | 78 | #---------------------------------------------------------- 79 | ### Identify NA weight values 80 | model_gis_shp_NA_idx_weight <- reactive({ 81 | data.shp <- st_set_geometry(req(read_model_gis_shp())[[1]], NULL) 82 | weight.col <- as.numeric(req(input$model_gis_shp_names_weight)) 83 | req((weight.col - 1) <= ncol(data.shp)) 84 | 85 | if (weight.col > 1) { 86 | na_which(data.shp[, weight.col - 1]) 87 | } else { 88 | NA 89 | } 90 | }) 91 | 92 | model_gis_gdb_NA_idx_weight <- reactive({ 93 | data.gdb <- st_set_geometry(req(read_model_gis_gdb())[[1]], NULL) 94 | weight.col <- as.numeric(req(input$model_gis_gdb_names_weight)) 95 | req((weight.col - 1) <= ncol(data.gdb)) 96 | 97 | if (weight.col > 1) { 98 | na_which(data.gdb[, weight.col - 1]) 99 | } else { 100 | NA 101 | } 102 | }) 103 | 104 | 105 | ############################################################################### 106 | # Upload and process data from shapefile 107 | 108 | #------------------------------------------------------------------------------ 109 | ### Read in data and return sf object 110 | read_model_gis_shp <- reactive({ 111 | req(input$model_gis_shp_files) 112 | 113 | withProgress(message = "Uploading shapefile", value = 0.4, { 114 | gis.file.shp <- read.shp.shiny(input$model_gis_shp_files) 115 | incProgress(0.6) 116 | }) 117 | 118 | if(isTruthy(gis.file.shp)) { 119 | list( 120 | gis.file.shp, strsplit(input$model_gis_shp_files$name[1], "\\.")[[1]][1] 121 | ) 122 | } else { 123 | NULL 124 | } 125 | }) 126 | 127 | ### Flag for if the shapefile was properly uploaded 128 | output$read_model_gis_shp_flag <- reactive({ 129 | isTruthy(read_model_gis_shp()) 130 | }) 131 | outputOptions(output, "read_model_gis_shp_flag", suspendWhenHidden = FALSE) 132 | 133 | 134 | #------------------------------------------------------------------------------ 135 | ### Process shapefile data 136 | create_sf_gis_shp <- eventReactive(input$model_create_gis_shp, { 137 | # Prep for create_local code 138 | gis.file <- read_model_gis_shp()[[1]] 139 | 140 | pred.idx <- as.numeric(input$model_gis_shp_names_pred) 141 | var.idx <- as.numeric(input$model_gis_shp_names_var) 142 | weight.idx <- as.numeric(input$model_gis_shp_names_weight) 143 | 144 | # Check that pred and weight data are valid 145 | gis.file <- check_pred_var_weight( 146 | gis.file, pred.idx, ifelse(var.idx == 1, NA, var.idx - 1), 147 | ifelse(weight.idx == 1, NA, weight.idx - 1), 148 | model_gis_shp_NA_idx_pred(), model_gis_shp_NA_idx_var(), 149 | model_gis_shp_NA_idx_weight() 150 | ) 151 | 152 | # Continue create_local code prep 153 | model.name <-read_model_gis_shp()[[2]] 154 | pred.type <- input$model_gis_shp_pred_type 155 | var.type <- input$model_gis_shp_var_type 156 | prog.message <- "Importing predictions from shapefile" 157 | 158 | #### The code from this file is the same as in create_sf_gis_gdb() #### 159 | source(file.path( 160 | "server_1_loadModels", "server_1_loadModels_shpgdb_create_local.R" 161 | ), local = TRUE, echo = FALSE, chdir = TRUE) 162 | 163 | "Predictions imported from shapefile" 164 | }) 165 | 166 | 167 | ############################################################################### 168 | # Upload and process data from file geodatabase (.gdb) feature class 169 | 170 | ### Read in data and return sf object 171 | read_model_gis_gdb <- eventReactive(input$model_gis_gdb_load, { 172 | gdb.path <- input$model_gis_gdb_path 173 | gdb.name <- input$model_gis_gdb_name 174 | 175 | withProgress(message = "Uploading feature class", value = 0.4, { 176 | gis.file.gdb <- try(st_read(gdb.path, gdb.name, quiet = TRUE), 177 | silent = TRUE) 178 | incProgress(0.6) 179 | }) 180 | 181 | if (isTruthy(gis.file.gdb)) { 182 | list(gis.file.gdb, gdb.name) 183 | } else { 184 | NULL 185 | } 186 | }) 187 | 188 | output$read_model_gis_gdb_flag <- reactive({ 189 | isTruthy(read_model_gis_gdb()) 190 | }) 191 | outputOptions(output, "read_model_gis_gdb_flag", suspendWhenHidden = FALSE) 192 | 193 | 194 | #------------------------------------------------------------------------------ 195 | ### Process feature class data 196 | create_sf_gis_gdb <- eventReactive(input$model_create_gis_gdb, { 197 | # Prep for create_local code 198 | gis.file <- read_model_gis_gdb()[[1]] 199 | 200 | pred.idx <- as.numeric(input$model_gis_gdb_names_pred) 201 | var.idx <- as.numeric(input$model_gis_gdb_names_var) 202 | weight.idx <- as.numeric(input$model_gis_gdb_names_weight) 203 | 204 | # Check that pred and weight data are valid 205 | gis.file <- check_pred_var_weight( 206 | gis.file, pred.idx, ifelse(var.idx == 1, NA, var.idx - 1), 207 | ifelse(weight.idx == 1, NA, weight.idx - 1), 208 | model_gis_gdb_NA_idx_pred(), model_gis_gdb_NA_idx_var(), 209 | model_gis_gdb_NA_idx_weight() 210 | ) 211 | 212 | # Continue create_local code prep 213 | model.name <- read_model_gis_gdb()[[2]] 214 | pred.type <- input$model_gis_gdb_pred_type 215 | var.type <- input$model_gis_gdb_var_type 216 | prog.message <- "Importing predictions from feature class" 217 | 218 | #### The code from this file is the same as in create_sf_gis_shp() #### 219 | source(file.path( 220 | "server_1_loadModels", "server_1_loadModels_shpgdb_create_local.R" 221 | ), local = TRUE, echo = FALSE, chdir = TRUE) 222 | 223 | "Predictions imported from feature class" 224 | }) 225 | -------------------------------------------------------------------------------- /inst/shiny/server_1_loadModels/server_1_loadModels_shpgdb_create_local.R: -------------------------------------------------------------------------------- 1 | ### Code for importing predicitons for both .shp and .gdb inputs 2 | ### Called in create_sf_gis_shp() and create_sf_gis_gdb() 3 | 4 | ### req() checks 5 | req(pred.idx <= (ncol(gis.file) - 1)) 6 | req((weight.idx - 1) <= (ncol(gis.file) - 1)) 7 | 8 | withProgress(message = prog.message, value = 0.3, { 9 | ### Check long extent, polygon validity, and generate crs.ll version if nec 10 | gis.file <- check_dateline(gis.file, progress.detail = TRUE) 11 | gis.file <- check_valid(gis.file, progress.detail = TRUE) 12 | sf.list <- check_gis_crs(gis.file) 13 | incProgress(0.4) 14 | 15 | ### Process spatial data 16 | sf.load.ll <- sf.list[[1]] 17 | sf.load.orig <- sf.list[[2]] 18 | sf.load.df <- st_set_geometry(sf.load.ll, NULL) 19 | 20 | 21 | var.idx <- ifelse(var.idx == 1, NA, var.idx - 1) 22 | if(!is.na(var.idx)) { 23 | toadd.v <- sf.load.df[, var.idx] 24 | } else { 25 | toadd.v <- as.numeric(NA) 26 | } 27 | 28 | weight.idx <- ifelse(weight.idx == 1, NA, weight.idx - 1) 29 | if(!is.na(weight.idx)) { 30 | toadd.w <- sf.load.df[, weight.idx] 31 | } else { 32 | toadd.w <- as.numeric(NA) 33 | } 34 | 35 | 36 | # Names of sf object columns set in other create_local code 37 | sf.load.ll <- sf.load.ll %>% 38 | st_set_geometry(NULL) %>% 39 | dplyr::select(all_of(pred.idx)) %>% 40 | dplyr::mutate(toadd.v, toadd.w, idx = seq_along(toadd.v)) %>% 41 | st_sf(geometry = st_geometry(sf.load.ll), agr = "constant") 42 | 43 | sf.load.orig <- sf.load.orig %>% 44 | st_set_geometry(NULL) %>% 45 | dplyr::select(all_of(pred.idx)) %>% 46 | dplyr::mutate(toadd.v, toadd.w, idx = seq_along(toadd.v)) %>% 47 | st_sf(geometry = st_geometry(sf.load.orig), agr = "constant") 48 | incProgress(0.1) 49 | 50 | # Calculate resolution of the predictions 51 | model.res <- gis_res_calc(sf.load.ll, sf.load.orig) 52 | incProgress(0.2) 53 | 54 | # Need names from sf.list[[1]] since sf.load.ll names will be different 55 | data.names <- list(names(sf.list[[1]])[c(pred.idx, var.idx, weight.idx)]) 56 | 57 | 58 | ###### Code common to all importing functions ###### 59 | source("server_1_loadModels_create_local.R", 60 | local = TRUE, echo = FALSE, chdir = TRUE) 61 | #################################################### 62 | }) 63 | -------------------------------------------------------------------------------- /inst/shiny/server_2_overlay/server_2_overlay.R: -------------------------------------------------------------------------------- 1 | ### Select base geometry and perform overlay 2 | 3 | 4 | ############################################################################### 5 | ### Flag for whether or not to display overlay tab items 6 | output$overlay_display_flag <- reactive({ 7 | (length(vals$models.ll) > 0) | length(vals$overlaid.models) > 0 8 | }) 9 | outputOptions(output, "overlay_display_flag", suspendWhenHidden = FALSE) 10 | 11 | ### Flag for if overlaid predictions have been created 12 | output$overlay_preview_display_flag <- reactive({ 13 | length(vals$overlaid.models) > 0 14 | }) 15 | outputOptions( 16 | output, "overlay_preview_display_flag", suspendWhenHidden = FALSE 17 | ) 18 | 19 | 20 | ############################################################################### 21 | ### Remove study area (boundary) poly if 'import study area' box is unchecked 22 | observeEvent(input$overlay_bound, { 23 | if (!input$overlay_bound) { 24 | vals$overlay.bound <- NULL 25 | 26 | shinyjs::reset("overlay_bound_csv_file") 27 | shinyjs::reset("overlay_bound_gis_shp_files") 28 | shinyjs::reset("overlay_bound_gis_gdb_path") 29 | shinyjs::reset("overlay_bound_gis_gdb_name") 30 | } 31 | }) 32 | 33 | ### Remove erasing (land) poly if 'import erasing polygon' box is unchecked 34 | observeEvent(input$overlay_land, { 35 | if (!input$overlay_land){ 36 | vals$overlay.land <- NULL 37 | 38 | shinyjs::reset("overlay_land_csv_file") 39 | shinyjs::reset("overlay_land_gis_shp_files") 40 | shinyjs::reset("overlay_land_gis_gdb_path") 41 | shinyjs::reset("overlay_land_gis_gdb_name") 42 | } 43 | }) 44 | 45 | ### Reset erasing polygon widget info as necessary 46 | observeEvent(input$overlay_land_load_type, { 47 | shinyjs::reset("overlay_land_csv_file") 48 | shinyjs::reset("overlay_land_gis_shp_files") 49 | }) 50 | 51 | observeEvent(input$overlay_land_file_type, { 52 | shinyjs::reset("overlay_land_csv_file") 53 | shinyjs::reset("overlay_land_gis_shp_files") 54 | }) 55 | 56 | 57 | ############################################################################### 58 | # Prep for base geometry preview 59 | 60 | ########################################################### 61 | ### Get predictions selected for base geometry in crs.ll 62 | overlay_preview_base_model <- reactive({ 63 | base.which <- as.numeric(input$overlay_loaded_table_rows_selected) 64 | 65 | validate( 66 | need(length(base.which) == 1, 67 | paste("Error: Please select exactly one set of predictions from the", 68 | "table to use as the base geometry")) 69 | ) 70 | 71 | vals$models.ll[[base.which]] 72 | }) 73 | 74 | ### Crop (clip) erasing polygon by bbox of base geometry 75 | overlay_preview_base_land <- reactive({ 76 | base.bbox <- st_bbox(overlay_preview_base_model()) 77 | base.bbox[1:2] <- base.bbox[1:2] - 2 78 | base.bbox[3:4] <- base.bbox[3:4] + 2 79 | 80 | suppressMessages( 81 | st_crop(vals$overlay.land, base.bbox) 82 | ) 83 | }) 84 | 85 | ############################################################################### 86 | -------------------------------------------------------------------------------- /inst/shiny/server_2_overlay/server_2_overlay_funcs.R: -------------------------------------------------------------------------------- 1 | # Check that provided sf object has a valid crs, and return crs.ll version 2 | overlay_gis_check <- function(gis.loaded) { 3 | validate( 4 | need(inherits(gis.loaded, "sfc"), 5 | "Error: Error in import, please report this as an issue") 6 | ) 7 | validate( 8 | need(!is.na(st_crs(gis.loaded)$proj4string), 9 | "Error: The provided object does not have a defined coordinate system") 10 | ) 11 | 12 | sf.ll <- st_transform(gis.loaded, crs.ll) 13 | 14 | sf.ll <- check_dateline(sf.ll) 15 | check_valid(sf.ll) 16 | } 17 | -------------------------------------------------------------------------------- /inst/shiny/server_2_overlay/server_2_overlay_loadPoly_csv.R: -------------------------------------------------------------------------------- 1 | ### Code for creating a study area or erasing polygon from .csv data 2 | 3 | 4 | ############################################################################ 5 | # Study area polygon (aka boundary in code) 6 | 7 | ### Create sfc object with one polygon from csv points 8 | overlay_bound_csv <- reactive({ 9 | req(input$overlay_bound_csv_file) 10 | 11 | # Reset vals object here in case validate() is triggered 12 | vals$overlay.bound <- NULL 13 | 14 | # Read in .csv file 15 | validate( 16 | need(input$overlay_bound_csv_file$type %in% 17 | c("text/csv", "application/vnd.ms-excel"), 18 | "Error: Selected file is not an Excel .csv file") 19 | ) 20 | csv.df <- read.csv( 21 | input$overlay_bound_csv_file$datapath, stringsAsFactors = FALSE 22 | ) 23 | 24 | validate( 25 | need(ncol(csv.df) >= 2, 26 | paste("Error: The study area .csv file must have at least two", 27 | "columns (longitude and latitude, respectively)")), 28 | need(!anyNA(csv.df), 29 | paste("Error: The study area polygon must be one, single polygon.", 30 | "Please load a csv file without any invalid entries (e.g. NA)", 31 | "in the two columns")) 32 | ) 33 | 34 | # Process input and make it into sfc object 35 | withProgress(message = 'Importing study area polygon', value = 0.7, { 36 | Sys.sleep(0.5) 37 | 38 | if (min(csv.df[, 1]) > 180) csv.df[, 1] <- csv.df[, 1] - 360 39 | bound.sfc <- try( 40 | st_sfc(st_polygon(list(as.matrix(csv.df))), crs = 4326), silent = TRUE 41 | ) 42 | validate( 43 | need(isTruthy(bound.sfc), 44 | paste("Error: The study area polygon could not be created", 45 | "from the provided points.", 46 | "Please ensure that the .csv file has the longitude points", 47 | "in the first column, the latitude points in the second", 48 | "column, and that the provided points form a closed", 49 | "and valid polygon"))) 50 | validate( 51 | need(st_is_valid(bound.sfc), 52 | paste("Error: The provided study area polygon is invalid;", 53 | "please ensure that the provided points form a closed", 54 | "and valid polygon (no self-intersections)")) 55 | ) 56 | 57 | bound.sfc <- check_dateline(bound.sfc, 60, progress.detail = TRUE) 58 | incProgress(0.3) 59 | }) 60 | 61 | # Add sfc object to vals, this sfc object will always be length 1 62 | vals$overlay.bound <- bound.sfc 63 | 64 | # Return empty string - success message printed elsewhere 65 | "" 66 | }) 67 | 68 | 69 | ############################################################################ 70 | # Erasing polygon (aka land in code) 71 | 72 | ### Create sfc object from csv points 73 | overlay_land_csv <- reactive({ 74 | req(input$overlay_land_csv_file) 75 | 76 | # Reset vals object here in case validate() is triggered 77 | vals$overlay.land <- NULL 78 | 79 | # Read in .csv file 80 | validate( 81 | need(input$overlay_land_csv_file$type %in% 82 | c("text/csv", "application/vnd.ms-excel"), 83 | "Error: Selected file is not an Excel .csv file") 84 | ) 85 | csv.df <- read.csv( 86 | input$overlay_land_csv_file$datapath, stringsAsFactors = FALSE 87 | ) 88 | 89 | # Create sfc object for land polygon 90 | withProgress(message = 'Importing land polygon', value = 0.7, { 91 | Sys.sleep(0.5) 92 | 93 | if (min(csv.df[, 1], na.rm = TRUE) > 180) { 94 | csv.df[, 1] <- csv.df[, 1] - 360 95 | } 96 | land.sfc <- pts2poly_vertices_shiny(csv.df[, 1:2], crs.ll, TRUE) 97 | #^ Calls check_dateline() and check_valid() 98 | incProgress(0.3) 99 | }) 100 | 101 | # Add sfc object to vals 102 | vals$overlay.land <- land.sfc 103 | 104 | # Return empty string - success message printed elsewhere 105 | "" 106 | }) 107 | 108 | ############################################################################### 109 | -------------------------------------------------------------------------------- /inst/shiny/server_2_overlay/server_2_overlay_loadPoly_provided.R: -------------------------------------------------------------------------------- 1 | ### Code for loading in GSHHG land polygons 2 | 3 | 4 | # When user clicks button, load selected provided land poly(s) 5 | overlay_land_prov <- eventReactive(input$overlay_land_provided, { 6 | withProgress(message = "Importing provided erasing polygon", value = 0.5, { 7 | vals$overlay.land <- NULL 8 | 9 | temp <- try(eSDM::gshhg.l.L16, silent = TRUE) 10 | 11 | validate( 12 | need(inherits(temp, "sfc"), 13 | paste("Error: The GUI was not able to access the provided erasing", 14 | "polygon; try reinstalling eSDM or restarting the GUI")) 15 | ) 16 | incProgress(0.5) 17 | vals$overlay.land <- temp 18 | }) 19 | 20 | "" 21 | }) 22 | -------------------------------------------------------------------------------- /inst/shiny/server_2_overlay/server_2_overlay_loadPoly_shpgdb.R: -------------------------------------------------------------------------------- 1 | ### Code for importing a study area or erasing polygon from shp or gdb source 2 | ## overlaid.gis.crs is in ...overlay_funcs.R 3 | 4 | ############################################################################### 5 | # Study area (aka boundary) polygon 6 | 7 | ### Shapefile 8 | overlay_bound_gis_shp <- reactive({ 9 | req(input$overlay_bound_gis_shp_files) 10 | 11 | isolate({ 12 | # Reset vals object here in case validate() is triggered 13 | vals$overlay.bound <- NULL 14 | 15 | withProgress(message = 'Importing study area polygon', value = 0.3, { 16 | bound.sf <- read.shp.shiny(input$overlay_bound_gis_shp_files) 17 | 18 | validate( 19 | need(inherits(bound.sf, c("sf", "sfc")), 20 | "Error: Could not import shapefile using selected files") 21 | ) 22 | incProgress(0.5) 23 | 24 | temp.bound <- overlay_gis_check(st_geometry(bound.sf)) 25 | 26 | if (length(temp.bound) != 1) { 27 | vals$overlay.bound <- st_union(temp.bound) 28 | } else { 29 | vals$overlay.bound <- temp.bound 30 | } 31 | 32 | incProgress(0.2) 33 | }) 34 | }) 35 | 36 | "" 37 | }) 38 | 39 | 40 | ### .gdb 41 | overlay_bound_gis_gdb <- eventReactive(input$overlay_bound_gis_gdb_load, { 42 | # Reset vals object here in case validate() is triggered 43 | vals$overlay.bound <- NULL 44 | 45 | withProgress(message = 'Importing study area polygon', value = 0.3, { 46 | bound.sf <- try( 47 | st_read(input$overlay_bound_gis_gdb_path, 48 | input$overlay_bound_gis_gdb_name, 49 | quiet = TRUE), 50 | silent = TRUE 51 | ) 52 | 53 | validate( 54 | need(inherits(bound.sf, c("sf", "sfc")), 55 | "Error: Could not import object using provided path and name") 56 | ) 57 | incProgress(0.5) 58 | 59 | temp.bound <- overlay_gis_check(st_geometry(bound.sf)) 60 | 61 | if (length(temp.bound) != 1) { 62 | vals$overlay.bound <- st_union(temp.bound) 63 | } else { 64 | vals$overlay.bound <- temp.bound 65 | } 66 | incProgress(0.2) 67 | }) 68 | 69 | "" 70 | }) 71 | 72 | 73 | ############################################################################### 74 | # Erasing (aka land) polygon 75 | 76 | ### Shapefile 77 | overlay_land_gis_shp <- reactive({ 78 | req(input$overlay_land_gis_shp_files) 79 | 80 | isolate({ 81 | # Reset vals object here in case validate() is triggered 82 | vals$overlay.land <- NULL 83 | 84 | withProgress(message = 'Importing erasing polygon', value = 0.3, { 85 | land.sf <- read.shp.shiny(input$overlay_land_gis_shp_files) 86 | 87 | validate( 88 | need(inherits(land.sf, c("sf", "sfc")), 89 | "Error: Could not import shapefile using selected files") 90 | ) 91 | incProgress(0.5) 92 | 93 | temp.land <- overlay_gis_check(st_geometry(land.sf)) 94 | 95 | if (length(temp.land) != 1) { 96 | vals$overlay.land <- st_union(temp.land) 97 | } else { 98 | vals$overlay.land <- temp.land 99 | } 100 | incProgress(0.2) 101 | }) 102 | }) 103 | 104 | "" 105 | }) 106 | 107 | 108 | ### .gdb 109 | overlay_land_gis_gdb <- eventReactive(input$overlay_land_gis_gdb_load, { 110 | land.gdb.path <- input$overlay_land_gis_gdb_path 111 | land.gdb.name <- input$overlay_land_gis_gdb_name 112 | 113 | # Reset vals object here in case validate() is triggered 114 | vals$overlay.land <- NULL 115 | 116 | withProgress(message = 'Importing erasing polygon', value = 0.3, { 117 | land.sf <- try( 118 | st_read(input$overlay_land_gis_gdb_path, input$overlay_land_gis_gdb_name, 119 | quiet = TRUE), 120 | silent = TRUE 121 | ) 122 | 123 | validate( 124 | need(inherits(land.sf, c("sf", "sfc")), 125 | "Error: Could not import object using provided path and name") 126 | ) 127 | incProgress(0.5) 128 | 129 | temp.land <- overlay_gis_check(st_geometry(land.sf)) 130 | 131 | if (length(temp.land) != 1) { 132 | vals$overlay.land <- st_union(temp.land) 133 | } else { 134 | vals$overlay.land <- temp.land 135 | } 136 | incProgress(0.2) 137 | }) 138 | 139 | "" 140 | }) 141 | 142 | ############################################################################### 143 | -------------------------------------------------------------------------------- /inst/shiny/server_2_overlay/server_2_overlay_renderUI.R: -------------------------------------------------------------------------------- 1 | # Items that are rendered for Overlay Predictions tab 2 | 3 | ############################################################################### 4 | ### Widget for user to select file with projection to use in overlay 5 | output$overlay_proj_sdm_uiOut_select <- renderUI({ 6 | req(!input$overlay_proj_native, input$overlay_proj_method == 2) 7 | 8 | choices.list.names <- as.list(vals$models.names) 9 | choices.list <- seq_along(choices.list.names) 10 | names(choices.list) <- choices.list.names 11 | 12 | selectInput("overlay_proj_sdm", 13 | tags$h5("Perform the overlay in the coordinate system of the", 14 | "selected original predictions"), 15 | choices = choices.list, selected = 1) 16 | }) 17 | 18 | 19 | ############################################################################### 20 | ### Widget (button) to preview base geometry with erasing and study area polys 21 | output$overlay_preview_base_execute_uiOut_button <- renderUI({ 22 | validate( 23 | need(isTruthy(input$overlay_loaded_table_rows_selected), 24 | "Select a set of original predictions to use as the base geometry"), 25 | errorClass = "validation2" 26 | ) 27 | 28 | validate( 29 | if (input$overlay_bound) 30 | need(isTruthy(vals$overlay.bound), 31 | paste("Either uncheck the 'study area polygon' checkbox or", 32 | "import a study area polygon to preview the base geometry")), 33 | if (input$overlay_land) 34 | need(isTruthy(vals$overlay.land), 35 | paste("Either uncheck the 'erasing polygon' checkbox or", 36 | "import an erasing polygon to preview the base geometry")), 37 | errorClass = "validation2" 38 | ) 39 | 40 | actionButton("overlay_preview_base_execute", "Preview base geometry") 41 | }) 42 | 43 | 44 | ############################################################################### 45 | ### Widgets for user to select overlaid predictions and value type to plot 46 | output$overlay_preview_overlaid_models_uiOut_selectize <- renderUI({ 47 | validate( 48 | need(length(vals$overlaid.models) > 0, 49 | "Create overlaid predictions to use this section of the GUI"), 50 | errorClass = "validation2" 51 | ) 52 | 53 | choices.list <- seq_along(vals$overlaid.models) 54 | names(choices.list) <- paste("Overlaid", choices.list) 55 | 56 | selectizeInput( 57 | "overlay_preview_overlaid_models", 58 | tags$h5("Select overlaid predictions to preview; 'Overlaid' numbers", 59 | "correspond to 'Original' numbers in the table above"), 60 | choices = choices.list, selected = NULL, multiple = TRUE 61 | ) 62 | }) 63 | 64 | output$overlay_preview_message <- renderUI({ 65 | validate( 66 | "Create overlaid predictions to use this section of the GUI", 67 | errorClass = "validation2" 68 | ) 69 | }) 70 | 71 | ############################################################################### 72 | -------------------------------------------------------------------------------- /inst/shiny/server_3_createEns/server_3_createEns.R: -------------------------------------------------------------------------------- 1 | # Create a simple ensemble from overlaid predictions and process ensembles 2 | # once they're created 3 | 4 | 5 | ############################################################################### 6 | # Flags for conditionalPanel 7 | 8 | #------------------------------------------------ 9 | ### Flag for if overlaid predictions have been created 10 | output$ens_display_flag <- reactive({ 11 | length(vals$overlaid.models) > 0 12 | }) 13 | outputOptions(output, "ens_display_flag", suspendWhenHidden = FALSE) 14 | 15 | 16 | #------------------------------------------------ 17 | ### Flag for if at least 2 overlaid predictions are selected to ensemble 18 | output$ens_overlaid_selected_flag <- reactive({ 19 | length(create_ens_overlaid_idx_num()) >= 2 20 | }) 21 | outputOptions(output, "ens_overlaid_selected_flag", suspendWhenHidden = FALSE) 22 | 23 | 24 | #------------------------------------------------ 25 | ### Flag: any ensemble predictions created 26 | output$ens_display_ens_flag <- reactive({ 27 | length(vals$ensemble.models) > 0 28 | }) 29 | outputOptions(output, "ens_display_ens_flag", suspendWhenHidden = FALSE) 30 | 31 | 32 | #------------------------------------------------ 33 | ### Flag: any created ensemble predictions selected to be used in action 34 | output$ens_models_selected_flag <- reactive({ 35 | isTruthy(input$ens_datatable_ensembles_rows_selected) 36 | }) 37 | outputOptions(output, "ens_models_selected_flag", suspendWhenHidden = FALSE) 38 | 39 | 40 | ############################################################################### 41 | # Remove selected ensemble(s) 42 | ens_remove <- eventReactive(input$ens_remove_execute, { 43 | idx <- input$ens_datatable_ensembles_rows_selected 44 | 45 | validate( 46 | need(length(idx) > 0, 47 | paste("Error: Please select one or more sets of", 48 | "ensemble predictions to remove")) 49 | ) 50 | 51 | #------------------------------------ 52 | ### Remove the reactiveValue info for selected set(s) of ensemble predicitons 53 | vals$ensemble.models <- vals$ensemble.models[-idx] 54 | vals$ensemble.overlaid.res <- vals$ensemble.overlaid.res[-idx] 55 | vals$ensemble.specs <- vals$ensemble.specs[-idx] 56 | 57 | if(length(vals$ensemble.overlaid.res) == 0) vals$ensemble.overlaid.res <- NULL 58 | if(length(vals$ensemble.specs) == 0) vals$ensemble.specs <- NULL 59 | 60 | 61 | # Handle other places this data was used 62 | #------------------------------------ 63 | ### If predictions being removed were interactively previewed, remove preview 64 | ### Else, adjust vals idx 65 | if (isTruthy(vals$ensemble.plot.leaf.idx)) { 66 | if (isTruthy(any(idx %in% vals$ensemble.plot.leaf.idx))) { 67 | vals$ensemble.plot.leaf <- NULL 68 | vals$ensemble.plot.leaf.idx <- NULL 69 | 70 | } else { 71 | idx.adjust <- sapply(vals$ensemble.plot.leaf.idx, function(i) sum(idx < i)) 72 | vals$ensemble.plot.leaf.idx <- vals$ensemble.plot.leaf.idx - idx.adjust 73 | validate( 74 | need(all(vals$ensemble.plot.leaf.idx > 0), 75 | paste("Error: While deleting ensemble predictions, error 1.", 76 | "Please report this as an issue")) 77 | ) 78 | } 79 | } 80 | 81 | #------------------------------------ 82 | ### If predictions being removed were staticly previewed, remove preview 83 | ### Else, adjust vals idx 84 | if (isTruthy(vals$ensemble.plot.idx)) { 85 | if (isTruthy(any(idx %in% vals$ensemble.plot.idx))) { 86 | vals$ensemble.plot <- NULL 87 | vals$ensemble.plot.idx <- NULL 88 | 89 | } else { 90 | idx.adjust <- sapply(vals$ensemble.plot.idx, function(i) sum(idx < i)) 91 | vals$ensemble.plot.idx <- vals$ensemble.plot.idx - idx.adjust 92 | validate( 93 | need(all(vals$ensemble.plot.idx > 0), 94 | paste("Error: While deleting ensemble predictions, error 1b.", 95 | "Please report this as an issue")) 96 | ) 97 | } 98 | } 99 | 100 | #------------------------------------ 101 | ### Remove evaluation metrics if they're calculated for ensemble model preds 102 | # TODO: make this so it only removes the metrics of models being removed 103 | if (isTruthy(vals$eval.models.idx[[3]])) { 104 | vals$eval.models.idx <- NULL 105 | vals$eval.metrics <- NULL 106 | vals$eval.metrics.names <- NULL 107 | } 108 | 109 | #------------------------------------ 110 | "" 111 | }) 112 | 113 | 114 | ############################################################################### 115 | # Predicted abundance 116 | 117 | #---------------------------------------------------------- 118 | ### Calculate predicted abundance(s) 119 | ens_abund_values <- reactive({ 120 | validate( 121 | need(length(input$ens_datatable_ensembles_rows_selected) > 0, 122 | paste("Error: Please select at least one set of", 123 | "ensemble predictions from the table")) 124 | ) 125 | 126 | ensemble.which <- sort(input$ens_datatable_ensembles_rows_selected) 127 | 128 | ens.tocalc <- vals$ensemble.models[ensemble.which] 129 | ens.abund <- vapply(ens.tocalc, function(i) { 130 | round(eSDM::model_abundance( 131 | st_sf(i, vals$overlay.base.sfc, agr = "constant"), "Pred_ens" 132 | ), 1) 133 | }, 1) 134 | names(ens.abund) <- paste("Ensemble", ensemble.which) 135 | 136 | ens.abund 137 | }) 138 | 139 | #---------------------------------------------------------- 140 | ### Generate table of calculated abundances 141 | table_ens_abund <- eventReactive(input$ens_calc_abund_execute, { 142 | req(input$ens_select_action == 5) 143 | ens.abund <- ens_abund_values() 144 | 145 | data.frame( 146 | "Predictions" = names(ens.abund), "Abundance" = unname(ens.abund), 147 | stringsAsFactors = FALSE 148 | ) 149 | }) 150 | 151 | ############################################################################### 152 | -------------------------------------------------------------------------------- /inst/shiny/server_3_createEns/server_3_createEns_create_weighted.R: -------------------------------------------------------------------------------- 1 | ### Code for creating weighted ensembles 2 | 3 | 4 | ############################################################################### 5 | ############################################################################### 6 | # Weighted ensemble method 1: 'Manual entry' 7 | 8 | ### Process text inputs for weights and return vector of weights 9 | create_ens_weights_manual <- reactive({ 10 | preds.weights <- suppressWarnings( 11 | esdm_parse_num(req(input$create_ens_weight_manual)) 12 | # as.numeric(unlist(strsplit(req(input$create_ens_weight_manual), ","))) 13 | ) 14 | 15 | validate( 16 | need(!anyNA(preds.weights), 17 | paste("Error: One or more of the weights was not recognized as", 18 | "a number; please ensure that all of the weights are numbers", 19 | "separated by a comma and a space")) 20 | ) 21 | 22 | if (input$create_ens_table_subset) { 23 | models.num <- length(input$create_ens_datatable_rows_selected) 24 | } else { 25 | models.num <- length(vals$overlaid.models) 26 | } 27 | 28 | # Validate weights input 29 | validate( 30 | need(length(preds.weights) == models.num, 31 | paste("Error: The number of entered weights does not", 32 | "match the number of selected overlaid predictions")) 33 | ) 34 | validate( 35 | need(all(preds.weights > 0), 36 | "Error: All entered weights must be greater than zero") 37 | ) 38 | validate( 39 | need(round(sum(preds.weights), 3) == 1, 40 | "Error: The entered weights do not sum to 1") 41 | ) 42 | 43 | preds.weights 44 | }) 45 | 46 | 47 | ############################################################################### 48 | ############################################################################### 49 | # Weighted ensemble method 2: 'Evaluation metric' 50 | 51 | ### Table of selected metrics 52 | create_ens_weights_metric_table <- reactive({ 53 | req( 54 | input$create_ens_weights_metric, 55 | all(create_ens_overlaid_idx() %in% vals$eval.models.idx[[2]]) 56 | ) 57 | 58 | # Get desired metric for desired overlaid models from eval metrics table 59 | eval.metrics <- table_eval_metrics() 60 | 61 | idx.col <- which(names(eval.metrics) == input$create_ens_weights_metric) 62 | idx.row <- grep("Overlaid", eval.metrics$Predictions) 63 | idx.row <- idx.row[vals$eval.models.idx[[2]] %in% create_ens_overlaid_idx()] 64 | 65 | weights.table <- eval.metrics[idx.row, c(1, idx.col)] 66 | 67 | # Prep for display 68 | weights.table$R.weights <- weights.table[, 2] / sum(weights.table[, 2]) 69 | names(weights.table)[3] <- "Weights" 70 | row.names(weights.table) <- 1:nrow(weights.table) 71 | 72 | weights.table 73 | }) 74 | 75 | ### Return vector of weights based on evaluation metrics 76 | create_ens_weights_metric <- reactive({ 77 | # Check that selected predictions have calculated metrics 78 | validate( 79 | need(all(create_ens_overlaid_idx() %in% vals$eval.models.idx[[2]]), 80 | paste("Error: You must calculate at least one metric for all", 81 | "selected overlaid predictions")) 82 | ) 83 | create_ens_weights_metric_table()[, 3] 84 | }) 85 | 86 | 87 | ############################################################################### 88 | ############################################################################### 89 | # Weighted ensemble method 3: 'Pixel-level spatial weights' 90 | 91 | ### Vector of idx of selected overlaid models that have spatial weights 92 | create_ens_weights_pix_which <- reactive({ 93 | which(!is.na(vapply(vals$overlaid.specs, function(i) i["col_weight"], "1"))) 94 | }) 95 | 96 | ### Generate data frame of pixel weights - fed into table and ensemble_create 97 | create_ens_weights_pix <- reactive({ 98 | ens.which <- create_ens_overlaid_idx() 99 | ens.which.spatial <- create_ens_weights_pix_which() 100 | 101 | # Need validate() call here for ensemble function 102 | validate( 103 | need(any(ens.which.spatial %in% ens.which), 104 | paste("Error: At least one of the selected overlaid predictions", 105 | "must have pixel-level spatial weights")) 106 | ) 107 | 108 | w.list <- lapply(ens.which, function(i, j, k) { 109 | if (i %in% j) vals$overlaid.models[[i]]$Weight else rep(1, k) 110 | }, j = ens.which.spatial, k = nrow(vals$overlaid.models[[1]])) 111 | 112 | purrr::set_names(data.frame(w.list), paste0("w", seq_along(ens.which))) 113 | }) 114 | 115 | ### Table summarizing pixel-level spatial weights of selected overlaid preds 116 | create_ens_weights_pix_table <- reactive({ 117 | ens.which <- create_ens_overlaid_idx() 118 | ens.which.spatial <- create_ens_weights_pix_which() 119 | 120 | # Before create_ens_weights_pix() call to avoid 'Error' validation 121 | validate( 122 | need(any(ens.which.spatial %in% ens.which), 123 | paste("At least one of the selected overlaid predictions must have", 124 | "pixel-level spatial weights to use this weighting method")), 125 | errorClass = "validation2" 126 | ) 127 | 128 | ens.pix.w <- create_ens_weights_pix() 129 | 130 | data.frame( 131 | Predictions = paste("Overlaid", ens.which), 132 | Min = vapply(ens.pix.w, min, 1, na.rm = TRUE), 133 | Median = vapply(ens.pix.w, median, 1, na.rm = TRUE), 134 | Mean = vapply(ens.pix.w, mean, 1, na.rm = TRUE), 135 | Max = vapply(ens.pix.w, max, 1, na.rm = TRUE), 136 | NAs = vapply(ens.pix.w, function(i) sum(is.na(i)), 1) 137 | ) 138 | }) 139 | 140 | 141 | ############################################################################### 142 | ############################################################################### 143 | # Weighted ensemble method 4: Weighting by the inverse of the variance 144 | 145 | ### Vector of idx of selected overlaid preds that have associated uncertainty 146 | create_ens_weights_var_which <- reactive({ 147 | which(!is.na(vapply(vals$overlaid.specs, function(i) i["col_se"], "1"))) 148 | }) 149 | 150 | ### Table summarizing variance values of selected overlaid preds 151 | create_ens_weights_var_table <- reactive({ 152 | ens.which <- create_ens_overlaid_idx() 153 | ens.which.var <- create_ens_weights_var_which() 154 | 155 | # Need validate() call here for display in-app 156 | validate( 157 | need(all(ens.which %in% ens.which.var), 158 | paste("All of the selected overlaid predictions must have", 159 | "associated uncertainty values to use this weighting method")), 160 | errorClass = "validation2" 161 | ) 162 | 163 | ens.varvalue <- create_ens_data_rescale()[[2]] 164 | 165 | data.frame( 166 | Predictions = paste("Overlaid", ens.which), 167 | Min = vapply(ens.varvalue, min, 1, na.rm = TRUE), 168 | Median = vapply(ens.varvalue, median, 1, na.rm = TRUE), 169 | Mean = vapply(ens.varvalue, mean, 1, na.rm = TRUE), 170 | Max = vapply(ens.varvalue, max, 1, na.rm = TRUE), 171 | NAs = vapply(ens.varvalue, function(i) sum(is.na(i)), 1) 172 | ) 173 | }) 174 | 175 | ### Create data frame of weights (1 / var) 176 | # ensemble_create() will normalize each row so it sums to 1 177 | create_ens_weights_var <- reactive({ 178 | ens.which <- create_ens_overlaid_idx() 179 | ens.which.var <- create_ens_weights_var_which() 180 | 181 | # Need validate() call here for ensemble function 182 | validate( 183 | need(all(ens.which %in% ens.which.var), 184 | paste("Error: All of the selected overlaid predictions must have", 185 | "associated uncertainty values to use this weighting method")) 186 | ) 187 | 188 | purrr::set_names( 189 | 1 / create_ens_data_rescale()[[2]], 190 | paste0("w", seq_along(ens.which)) 191 | ) 192 | }) 193 | 194 | ############################################################################### 195 | ############################################################################### 196 | -------------------------------------------------------------------------------- /inst/shiny/server_4_evalMetrics/server_4_evalMetrics_funcs.R: -------------------------------------------------------------------------------- 1 | ### Non-reactive functions for Evaluation Metrics tab 2 | 3 | ############################################################################### 4 | # Process evaluation metric validation data 5 | 6 | ### Process data frame (x) with long, lat, and data column; 7 | ### processing method depends on data type (y) 8 | eval_proc_df <- function(x, y, p.codes, a.codes) { 9 | #---------------------------------------------------------------------------- 10 | stopifnot( 11 | is.data.frame(x), 12 | ncol(x) == 3, 13 | y %in% c(1, 2) 14 | ) 15 | 16 | if (y == 1) { 17 | #-------------------------------------------- 18 | # Count data 19 | validate( 20 | need(is.numeric(x[, 3]) | is.integer(x[, 3]), 21 | paste("Error: Selected validation data column is not numeric.", 22 | "Consider importing data as 'Presence/absence' data")) 23 | ) 24 | 25 | x <- x %>% 26 | dplyr::rename(lon = 1, lat = 2, count = 3) %>% 27 | dplyr::mutate(sight = as.numeric(count > 0)) %>% 28 | dplyr::select(1, 2, 4, 3) 29 | 30 | } else { 31 | #-------------------------------------------- 32 | # Presence/absence data 33 | x <- x %>% 34 | dplyr::rename(lon = 1, lat = 2, sight.temp = 3) %>% 35 | dplyr::mutate(count = NA) 36 | 37 | validate( 38 | need(!(is.null(p.codes) & is.null(a.codes)), 39 | paste("Error: Please select one or more", 40 | "presence codes and absence codes")), 41 | need(all(!(p.codes %in% a.codes)), 42 | paste("Error: Please ensure that no presence and", 43 | "absence codes are the same")), 44 | need(all(unique(x$sight.temp) %in% c(p.codes, a.codes)), 45 | paste("Error: Please ensure that all codes are classified", 46 | "as either presence or absence codes")) 47 | ) 48 | 49 | x <- x %>% 50 | dplyr::mutate(sight = ifelse(sight.temp %in% p.codes, 1, 0)) %>% 51 | dplyr::select(1, 2, 5, 4) 52 | } 53 | 54 | #---------------------------------------------------------------------------- 55 | stopifnot( 56 | ncol(x) == 4, 57 | names(x) == c("lon", "lat", "sight", "count") 58 | ) 59 | 60 | if (min(x$lon, na.rm = TRUE) > 180) x$lon <- x$lon - 360 61 | 62 | # Sort by lat (primary) then long for bottom up sort and then create sf obj 63 | pts <- x %>% 64 | dplyr::arrange(lat, lon) %>% 65 | st_as_sf(coords = c("lon", "lat"), crs = crs.ll, agr = "constant") 66 | 67 | # Perform checks 68 | validate( 69 | need(inherits(st_geometry(pts), "sfc_POINT"), 70 | "Error processing validation data") 71 | ) 72 | 73 | # Don't need check_valid() for pts 74 | check_dateline(pts) 75 | } 76 | 77 | 78 | ############################################################################### 79 | # Generate message detailing the number of validation pts on polygon boundaries 80 | eval_overlap_message <- function(models.toeval, eval.data) { 81 | pt.over.len <- sapply( 82 | lapply(models.toeval, function(m) { 83 | eval.data <- st_transform(eval.data, st_crs(m)) 84 | which(sapply(suppressMessages(st_intersects(eval.data, m)), length) > 1) 85 | }), 86 | length 87 | ) 88 | 89 | # Make text pretty 90 | #-------------------------------------------------------- 91 | if (all(pt.over.len == 0)) { 92 | paste( 93 | "The predictions being evaluated had 0 validation points", 94 | "that fell on the boundary between two or more prediction polygons" 95 | ) 96 | 97 | #------------------------------------------------------ 98 | } else if (length(pt.over.len) == 1) { 99 | paste( 100 | "The predictions being evaluated had", pt.over.len, "validation points", 101 | "that fell on the boundary between two or more prediction polygons;" , 102 | "the predictions from these polygons were averaged for the evaluation.", 103 | "See Appendix 2 of the manual for more details." 104 | ) 105 | 106 | #------------------------------------------------------ 107 | } else { 108 | if (zero_range(pt.over.len)) { 109 | temp <- paste( 110 | "The predictions being evaluated each had", unique(pt.over.len), 111 | "validation points" 112 | ) 113 | 114 | } else if (length(pt.over.len) == 2) { 115 | temp <- paste( 116 | "The predictions being evaluated had", 117 | paste(pt.over.len, collapse = " and "), 118 | "validation points, respectively," 119 | ) 120 | 121 | } else { 122 | temp <- paste( 123 | "The predictions being evaluated had", 124 | paste0(paste(head(pt.over.len, -1), collapse = ", "), ","), 125 | "and", tail(pt.over.len, 1), "validation points, respectively," 126 | ) 127 | } 128 | 129 | paste( 130 | temp, 131 | "that fell on the boundary between two or more prediction polygons;", 132 | "the predictions from these polygons were averaged for the evaluation.", 133 | "See Appendix 2 of the manual for more details." 134 | ) 135 | } 136 | } 137 | 138 | 139 | ############################################################################### 140 | -------------------------------------------------------------------------------- /inst/shiny/server_4_evalMetrics/server_4_evalMetrics_renderUI.R: -------------------------------------------------------------------------------- 1 | #renderUI()'s for eval metrics tab 2 | # Error messages for pres/abs code widgets is done via flags in ..._loadData.R 3 | 4 | 5 | ############################################################################### 6 | # renderUI()'s for csv 7 | 8 | ### Select long, lat, and data column names 9 | output$eval_csv_names_uiOut_select <- renderUI({ 10 | csv.info <- eval_data_csv_load() 11 | req(csv.info) 12 | 13 | choice.input.names <- names(csv.info[[2]]) 14 | choice.input <- seq_along(choice.input.names) 15 | names(choice.input) <- choice.input.names 16 | 17 | selectizeInput("eval_csv_names", 18 | tags$h5("Select, in this order, the longitude, latitude, and", 19 | "validation data column for the uploaded .csv file"), 20 | choices = choice.input, selected = NULL, multiple = TRUE) 21 | }) 22 | 23 | ### Select presence codes 24 | output$eval_csv_codes_p_uiOut_select <- renderUI({ 25 | choice.input.names <- eval_data_csv_pacodes() 26 | req(!any(c("error1", "error2") %in% eval_data_csv_pacodes())) 27 | req(length(choice.input.names) > 1) 28 | 29 | selectizeInput("eval_csv_codes_p", tags$h5("Select presence code(s)"), 30 | choices = choice.input.names, selected = NULL, 31 | multiple = TRUE) 32 | }) 33 | 34 | ### Select absence codes 35 | output$eval_csv_codes_a_uiOut_select <- renderUI({ 36 | choice.input.names <- eval_data_csv_pacodes() 37 | req(!any(c("error1", "error2") %in% eval_data_csv_pacodes())) 38 | req(length(choice.input.names) > 1) 39 | 40 | selectizeInput("eval_csv_codes_a", tags$h5("Select absence code(s)"), 41 | choices = choice.input.names, selected = NULL, 42 | multiple = TRUE) 43 | }) 44 | 45 | ### Click to import csv validation data 46 | output$eval_csv_execute_uiOut_button <- renderUI({ 47 | if (input$eval_data_type == 1) { 48 | req(eval_data_csv_load(), length(input$eval_csv_names) == 3) 49 | 50 | } else { 51 | req(!any(c("error1", "error2") %in% eval_data_csv_pacodes())) 52 | } 53 | 54 | # Check that validation data column has at least 2 unique, non-NA values 55 | col.pa <- as.numeric(input$eval_csv_names[3]) 56 | req(col.pa <= ncol(eval_data_csv_load()[[2]])) 57 | choice.input.names <- na.omit(unique(eval_data_csv_load()[[2]][, col.pa])) 58 | validate( 59 | need(length(choice.input.names) >= 2, 60 | paste("The validation data column must contain at least two", 61 | "unique values to be used to in the GUI")), 62 | errorClass = "validation2" 63 | ) 64 | 65 | # Action 66 | actionButton("eval_csv_execute", "Import validation data") 67 | }) 68 | 69 | 70 | ############################################################################### 71 | # renderUI()'s for GIS 72 | 73 | ### Select data column name 74 | output$eval_gis_names_uiOut_select <- renderUI({ 75 | req(vals$eval.data.gis.info) 76 | req(vals$eval.data.gis.info[[1]], 77 | vals$eval.data.gis.info[[3]] == input$eval_load_type) 78 | 79 | choice.input.names <- names( 80 | st_set_geometry(vals$eval.data.gis.info[[2]], NULL) 81 | ) 82 | choice.input <- seq_along(choice.input.names) 83 | names(choice.input) <- choice.input.names 84 | 85 | selectInput("eval_gis_names", 86 | tags$h5("Select the validation data column for the", 87 | "uploaded object"), 88 | choices = choice.input, selected = NULL) 89 | }) 90 | 91 | ### Select presence codes 92 | output$eval_gis_codes_p_uiOut_select <- renderUI({ 93 | choice.input.names <- eval_data_gis_pacodes() 94 | req(!("error2" %in% choice.input.names)) 95 | req(length(choice.input.names) > 1) 96 | 97 | selectizeInput("eval_gis_codes_p", tags$h5("Select presence code(s)"), 98 | choices = choice.input.names, selected = NULL, 99 | multiple = TRUE) 100 | }) 101 | 102 | ### Select absence codes 103 | output$eval_gis_codes_a_uiOut_select <- renderUI({ 104 | choice.input.names <- eval_data_gis_pacodes() 105 | req(!("error2" %in% choice.input.names)) 106 | req(length(choice.input.names) > 1) 107 | 108 | selectizeInput("eval_gis_codes_a", tags$h5("Select absence code(s)"), 109 | choices = choice.input.names, selected = NULL, 110 | multiple = TRUE) 111 | }) 112 | 113 | ### Button to click to import GIS validation data 114 | output$eval_gis_execute_uiOut_button <- renderUI({ 115 | if (input$eval_data_type == 1) { 116 | req(vals$eval.data.gis.info) 117 | req(vals$eval.data.gis.info[[1]], 118 | vals$eval.data.gis.info[[3]] == input$eval_load_type) 119 | 120 | } else { 121 | req(!("error2" %in% eval_data_gis_pacodes())) 122 | } 123 | 124 | # Check that validation data column has at least 2 unique, non-NA values 125 | col.pa <- as.numeric(req(input$eval_gis_names)) 126 | choice.input.names <- na.omit(unique( 127 | st_set_geometry(vals$eval.data.gis.info[[2]], NULL)[, col.pa] 128 | )) 129 | validate( 130 | need(length(choice.input.names) >= 2, 131 | paste("The validation data column must contain at least two", 132 | "unique values to be used to in the GUI")), 133 | errorClass = "validation2" 134 | ) 135 | 136 | # Action 137 | actionButton("eval_gis_execute", "Import validation data") 138 | }) 139 | 140 | 141 | ############################################################################### 142 | # Calculate metrics box 143 | 144 | ### Message about count data -> binary data 145 | output$table_eval_pts_countmessage_out <- renderText({ 146 | req(vals$eval.data.specs) 147 | 148 | if (vals$eval.data.specs[[2]] == 1) { 149 | paste( 150 | "To calculate AUC and TSS, the count data will be converted to", 151 | "presence/absence data by classifying", 152 | "points with counts greater than zero as presence points, and", 153 | "points with counts of zero as absence points" 154 | ) 155 | } else if (vals$eval.data.specs[[2]] == 2) { 156 | NULL 157 | } else { 158 | validate("Error: error in validation data processing") 159 | } 160 | 161 | }) 162 | 163 | 164 | ### Choice of metric(s) to calculate 165 | output$eval_metrics_which_uiOut_check <- renderUI({ 166 | req(vals$eval.data.specs) 167 | 168 | choices.list <- list("AUC", "TSS", "RMSE") 169 | if (vals$eval.data.specs[[2]] == 2) choices.list <- choices.list[1:2] 170 | 171 | input.lab <- tags$h5( 172 | helpText("See 'Metrics Descriptions and References'", 173 | "section below for metric information"), 174 | "Metric(s) to calculate" 175 | ) 176 | 177 | checkboxGroupInput("eval_metrics_which", input.lab, choices = choices.list) 178 | }) 179 | 180 | ############################################################################### 181 | -------------------------------------------------------------------------------- /inst/shiny/server_5_prettyPlot/server_5_prettyPlot_download.R: -------------------------------------------------------------------------------- 1 | # Download plots from 'High Quality Maps' tab 2 | 3 | 4 | ############################################################################### 5 | # Download handler - High Quality Maps 6 | output$pretty_download_execute <- downloadHandler( 7 | filename = function() { 8 | download_plot_ext( 9 | input$pretty_download_format, input$pretty_download_name 10 | ) 11 | }, 12 | 13 | content = function(file) { 14 | withProgress(message = "Downloading high quality map", value = 0.4, { 15 | #---------------------------------------------------- 16 | plot.which <- input$pretty_toplot_table_out_rows_selected 17 | plot.nrow <- input$pretty_nrow 18 | plot.ncol <- input$pretty_ncol 19 | plot.width <- input$pretty_width_inch 20 | plot.height <- input$pretty_height_inch 21 | 22 | req( 23 | plot.which, (plot.nrow * plot.ncol) >= length(plot.which), 24 | plot.width > 0, plot.height > 0 25 | ) 26 | 27 | plot.res <- ifelse(input$pretty_download_res == "1", 300, 72) 28 | plot.format <- input$pretty_download_format 29 | incProgress(0.2) 30 | 31 | 32 | #---------------------------------------------------- 33 | dims.vec <- c( 34 | nrow = plot.nrow, ncol = plot.ncol, width = plot.width, 35 | height = plot.height 36 | ) 37 | 38 | tmap.todownload <- plot_pretty_top( 39 | dims.vec, vals$pretty.toplot.idx[plot.which], 40 | vals$pretty.params.toplot[plot.which] 41 | ) 42 | 43 | 44 | #---------------------------------------------------- 45 | esdm.tmap_options.orig <- tmap_options()$show.messages 46 | tmap_options(show.messages = FALSE) 47 | 48 | tmap_save( 49 | tmap.todownload, file, dpi = plot.res, 50 | width = plot.width, height = plot.height, units = "in" 51 | ) 52 | incProgress(0.4) 53 | 54 | tmap_options(show.messages = esdm.tmap_options.orig) 55 | rm(esdm.tmap_options.orig) 56 | 57 | #---------------------------------------------------- 58 | }) 59 | } 60 | ) 61 | 62 | 63 | ############################################################################### 64 | # renderUI()'s 65 | 66 | ### Filename 67 | output$pretty_download_name_uiOut_text <- renderUI({ 68 | req(vals$pretty.params.toplot) 69 | 70 | maps.selected <- input$pretty_toplot_table_out_rows_selected 71 | 72 | res.txt <- ifelse(input$pretty_download_res == 1, "300ppi", "72ppi") 73 | 74 | if (length(maps.selected) == 1) { 75 | req(maps.selected <= length(vals$pretty.params.toplot)) 76 | id.txt <- paste( 77 | unlist(strsplit(vals$pretty.params.toplot[[maps.selected]]$id, " ")), 78 | collapse = "_" 79 | ) 80 | f.val <- paste0("eSDM_", id.txt, res.txt) 81 | 82 | } else { 83 | f.val <- paste0("eSDM_map_", res.txt) 84 | } 85 | 86 | input.lab <- "Filename (without file extension)" 87 | textInput("pretty_download_name", tags$h5(input.lab), value = f.val) 88 | }) 89 | 90 | ### Download button 91 | output$pretty_download_execute_uiOut_download <- renderUI({ 92 | req(vals$pretty.params.toplot) 93 | 94 | plot.which <- input$pretty_toplot_table_out_rows_selected 95 | plot.nrow <- input$pretty_nrow 96 | plot.ncol <- input$pretty_ncol 97 | plot.width <- input$pretty_width_inch 98 | plot.height <- input$pretty_height_inch 99 | 100 | validate( 101 | need(plot.which, 102 | "You must select at least one saved map to download"), 103 | errorClass = "validation2" 104 | ) 105 | 106 | validate( 107 | need(inherits(plot.nrow, "integer") && inherits(plot.ncol, "integer"), 108 | paste("'Number of rows' and 'Number of columns'", 109 | "must be whole numbers")), 110 | need(isTruthy(plot.width) && isTruthy(plot.height) && 111 | is.numeric(plot.width) && is.numeric(plot.height), 112 | paste("'Plot width (in)' and 'Plot height (in)'", 113 | "must be numbers")), 114 | errorClass = "validation2" 115 | ) 116 | 117 | validate( 118 | need((plot.nrow * plot.ncol) >= length(plot.which), 119 | paste("'Number of rows' * 'Number of columns' must be", 120 | "greater than or equal to the number of items", 121 | "selected from the to-plot list to plot")), 122 | need(plot.width > 0 && plot.height > 0, 123 | paste("'Plot width (in)' and 'Plot height (in)' must both", 124 | "be greater than 0")), 125 | errorClass = "validation2" 126 | ) 127 | 128 | downloadButton("pretty_download_execute", "Download map(s)") 129 | }) 130 | 131 | ############################################################################### 132 | -------------------------------------------------------------------------------- /inst/shiny/server_5_prettyPlot/server_5_prettyPlot_funcs.R: -------------------------------------------------------------------------------- 1 | # Functions used in pretty plot section 2 | 3 | ############################################################################### 4 | # Create a sfc object from vector of plot limits (e.g. output of st_bbox()) 5 | # x format: c(xmin, xmax, ymin, ymax) 6 | pretty_range_poly_func <- function(x, poly.crs) { 7 | stopifnot(length(x) == 4) 8 | 9 | poly.x <- x[c(1, 1, 2, 2, 1)] 10 | poly.y <- x[c(3, 4, 4, 3, 3)] 11 | 12 | st_sfc( 13 | st_polygon(list(cbind(poly.x, poly.y))), crs = poly.crs 14 | ) 15 | } 16 | 17 | 18 | ############################################################################### 19 | # Check if x is completely within y, and if not then clip x by y 20 | # tmap explanation 21 | # tmap can plot objects in [0, 360] range, but only if object has some 22 | # area in [0, 180] and 'bbox' arg of tm_shape() is specified 23 | # tmap can plot objects in (180, 360] range only when crs is NA, 24 | # although this does warnings to be printed. One warning can be taken care 25 | # of using 'projection' arg of tm_shape() 26 | pretty_int_func <- function(x, y, x.name) { 27 | UseMethod("pretty_int_func") 28 | } 29 | 30 | pretty_int_func.sf <- function(x, y, x.name) { 31 | cover <- suppressMessages(st_covers(y, x)) 32 | 33 | if (length(cover[[1]]) != nrow(x)) { 34 | x <- suppressMessages(st_intersection(x, y)) 35 | } 36 | 37 | # Predictions will always be sf object 38 | if (identical(x.name, "selected predictions")) { 39 | val.message <- paste( 40 | "Error: None of the geometry of the", x.name, 41 | "is within the specified map range;", 42 | "adjust the map range to plot these predictions" 43 | ) 44 | } else { 45 | val.message <- paste( 46 | "Error: None of the geometry of the", x.name, 47 | "is within the specified map range;", 48 | "either remove this object or adjust the map range" 49 | ) 50 | } 51 | 52 | validate( 53 | need(nrow(x) > 0 && !inherits(st_geometry(x), "sfc_LINESTRING"), 54 | val.message) 55 | ) 56 | 57 | x 58 | } 59 | 60 | pretty_int_func.sfc <- function(x, y, x.name) { 61 | cover <- suppressMessages(st_covers(y, x)) 62 | 63 | if (length(cover[[1]]) != length(x)) { 64 | x <- suppressMessages(st_intersection(x, y)) 65 | } 66 | 67 | val.message <- paste( 68 | "Error: None of the geometry of the", x.name, 69 | "is within the specified map range;", 70 | "either remove this object or adjust the map range" 71 | ) 72 | 73 | validate( 74 | need(length(x) > 0 && !inherits(x, "sfc_LINESTRING"), 75 | val.message) 76 | ) 77 | 78 | x 79 | } 80 | 81 | 82 | ############################################################################### 83 | # Set crs of objects as NA if their long range is (180, 360] dec deg 84 | pretty_crsNA_func <- function(x) { 85 | y1 <- st_sfc(st_polygon(list( 86 | matrix(c(-180, 0, 0, -180, -180, -90, -90, 90, 90, -90), ncol = 2) 87 | )), crs = 4326) 88 | y1 <- st_transform(y1, st_crs(x)) 89 | lon.180 <- abs(unname(st_bbox(y1))[1]) 90 | 91 | if (st_bbox(x)[1] > lon.180) st_set_crs(x, NA) else x 92 | } 93 | 94 | 95 | ############################################################################### 96 | # Calculate color scheme data breaks and legend labels 97 | pretty_colorscheme_func <- function(x, data.name, map.range, perc, color.num, 98 | leg.perc.esdm, leg.round) { 99 | # Clip predictions to map range 100 | y <- pretty_range_poly_func(map.range, st_crs(x)) 101 | x <- pretty_int_func(x, y, "selected predictions") 102 | x.df <- st_set_geometry(x, NULL)[, data.name] 103 | 104 | # Get color scheme info for clipped predictions 105 | if (perc) { 106 | # Percentages 107 | data.breaks <- breaks_calc(x.df) 108 | labels.lab.pretty <- leg.perc.esdm 109 | 110 | } else { 111 | # Values 112 | data.breaks <- seq( 113 | min(x.df, na.rm = TRUE), max(x.df, na.rm = TRUE), 114 | length.out = (color.num + 1) 115 | ) 116 | data.breaks.labs <- round(data.breaks, leg.round) 117 | labels.lab.pretty <- paste( 118 | format(head(data.breaks.labs, -1), nsmall = leg.round), 119 | format(tail(data.breaks.labs, -1), nsmall = leg.round), 120 | sep = " - " 121 | ) 122 | } 123 | 124 | list(data.breaks, labels.lab.pretty) 125 | } 126 | 127 | ############################################################################### 128 | -------------------------------------------------------------------------------- /inst/shiny/server_6_export/server_6_export_renderUI.R: -------------------------------------------------------------------------------- 1 | ### renderUI code for Export Predictions tab 2 | 3 | 4 | ############################################################################### 5 | ### Predictions with coord system in which to export selected predictions 6 | output$export_proj_sdm_uiOut_select <- renderUI({ 7 | req(vals$models.names, !input$export_proj_native, 8 | input$export_proj_method == 2) 9 | 10 | choices.list.names <- vals$models.names 11 | choices.list <- seq_along(choices.list.names) 12 | names(choices.list) <- choices.list.names 13 | 14 | selectInput("export_proj_sdm", 15 | tags$h5("Export predictions in the coordinate system of the", 16 | "selected SDM predictions"), 17 | choices = choices.list, selected = 1) 18 | }) 19 | 20 | 21 | ############################################################################### 22 | ### Default filename of exported object 23 | output$export_filename_uiOut_text <- renderUI({ 24 | req(length(vals$models.ll) > 0) 25 | x <- input$export_table_orig_out_rows_selected 26 | y <- input$export_table_over_out_rows_selected 27 | z <- input$export_table_ens_out_rows_selected 28 | req(sum(!sapply(list(x, y, z), is.null)) == 1) 29 | 30 | #------------------------------------ 31 | ### Extract first term of filename 32 | if (isTruthy(x)) { 33 | # Original predictions 34 | table.info <- table_orig()[x, ] 35 | filename.value <- paste(table.info[, 1:2], collapse = "__") 36 | filename.value <- paste0(filename.value, "__orig") 37 | 38 | } else if (isTruthy(y)) { 39 | # Overlaid predictions 40 | table.info <- table_overlaid()[y, ] 41 | filename.value <- paste(table.info[, 1:2], collapse = "__") 42 | filename.value <- paste0(filename.value, "__overlaid") 43 | 44 | } else { # isTruthy(z) 45 | # Ensemble predictions 46 | table.info <- table_ensembles()[z, ] 47 | 48 | table.info$`Predictions used` <- switch( 49 | table.info$`Predictions used`, 50 | "All overlaid" = "All", substring( table.info$`Predictions used`, 10) 51 | ) 52 | 53 | rescale.txt <- table.info$`Rescaling method` 54 | rescale.txt <- ifelse( 55 | grepl("Abund", rescale.txt), 56 | paste0("Abund", strsplit(rescale.txt, ": ")[[1]][2]), 57 | switch( 58 | rescale.txt, "None" = "None", "Sum to 1" = "Sumto1" 59 | ) 60 | ) 61 | table.info$`Rescaling method` <- rescale.txt 62 | 63 | table.info$`Uncertainty method` <- ifelse( 64 | table.info$`Uncertainty method` == "Among-model", "AMV", "WMV" 65 | ) 66 | 67 | filename.value <- paste(table.info, collapse = "_") 68 | filename.value <- gsub(", ", "+", filename.value) #Change comma to '+' 69 | filename.value <- gsub(" ", "", filename.value) #Gets rid of spaces 70 | } 71 | 72 | #------------------------------------ 73 | ### Projection info 74 | if (input$export_proj_360) filename.value <- paste0(filename.value, "_360") 75 | 76 | #------------------------------------ 77 | ### Prefix 78 | filename.value <- paste0("eSDM_", gsub("\\.", "_", filename.value)) 79 | 80 | #------------------------------------ 81 | 82 | input.lab <- "Filename (without file extension)" 83 | textInput("export_filename", tags$h5(input.lab), value = filename.value) 84 | }) 85 | 86 | 87 | ############################################################################### 88 | ### Message about whether predictions have weight data 89 | output$export_weight_inc_uiOut_text <- renderUI({ 90 | req(length(vals$models.ll) > 0) 91 | x <- input$export_table_orig_out_rows_selected 92 | y <- input$export_table_over_out_rows_selected 93 | z <- input$export_table_ens_out_rows_selected 94 | req(sum(!sapply(list(x, y, z), is.null)) == 1) 95 | 96 | if (isTruthy(z)) { 97 | tags$h5("Ensemble predictions do not have any weight data to export,", 98 | "and thus the downloaded file will not contain any weight data") 99 | } else { 100 | NULL 101 | } 102 | }) 103 | 104 | 105 | ############################################################################### 106 | ### Download button to export predictions 107 | output$export_out_uiOut_download <- renderUI({ 108 | validate( 109 | need(!identical(input$export_filename, ""), 110 | "Error: Please enter a filename") 111 | ) 112 | req(input$export_filename) 113 | 114 | export_crs() #to get validate() 115 | 116 | downloadButton("export_out", "Export predictions") 117 | }) 118 | 119 | ############################################################################### 120 | -------------------------------------------------------------------------------- /inst/shiny/server_other/server_roadmap_download.R: -------------------------------------------------------------------------------- 1 | # Download manuscript data, sample data, or manual 2 | # NOTEs: 3 | # The shiny app must finish fully loading/rendering before 4 | # the user can download files. 5 | # If user clicks download button too soon, they'll get html download 6 | 7 | ############################################################################### 8 | # Download manuscript data 9 | output$download_data_manuscript <- downloadHandler( 10 | filename = function() "eSDM_data_manuscript.zip", 11 | 12 | content = function(file) { 13 | withProgress(message = "Downloading manuscript data", value = 0.6, { 14 | sample.try <- try( 15 | download.file( 16 | "https://github.com/SWFSC/eSDM-data/raw/master/eSDM_data_manuscript.zip", 17 | destfile = file, quiet = TRUE 18 | ), 19 | silent = TRUE 20 | ) 21 | 22 | req(sample.try) 23 | incProgress(0.4) 24 | }) 25 | } 26 | ) 27 | 28 | 29 | ############################################################################### 30 | # Download sample data 31 | output$download_data_sample <- downloadHandler( 32 | filename = function() "eSDM_data_sample.zip", 33 | 34 | content = function(file) { 35 | withProgress(message = "Downloading sample data", value = 0.6, { 36 | sample.try <- try( 37 | download.file( 38 | "https://github.com/SWFSC/eSDM-data/raw/master/eSDM_data_sample.zip", 39 | destfile = file, quiet = TRUE 40 | ), 41 | silent = TRUE 42 | ) 43 | 44 | req(sample.try) 45 | incProgress(0.4) 46 | }) 47 | } 48 | ) 49 | 50 | 51 | ############################################################################### 52 | # Download manual 53 | output$download_manual <- downloadHandler( 54 | filename = function() "eSDM_manual.pdf", 55 | 56 | content = function(file) { 57 | withProgress(message = "Downloading manual", value = 0.6, { 58 | sample.try <- try( 59 | download.file( 60 | "https://github.com/SWFSC/eSDM/raw/master/inst/shiny/www/eSDM_manual.pdf", 61 | destfile = file, quiet = TRUE, mode = "wb" 62 | ), 63 | silent = TRUE 64 | ) 65 | 66 | req(sample.try) 67 | incProgress(0.4) 68 | }) 69 | } 70 | ) 71 | 72 | ############################################################################### 73 | -------------------------------------------------------------------------------- /inst/shiny/server_other/server_tables.R: -------------------------------------------------------------------------------- 1 | ### Reactive functions that return data frames for tables for various tabs 2 | # Most tables use 'if... return()' rather than 'req()' so that 3 | # subsequent reactive functions won't be stopped 4 | 5 | 6 | ############################################################################### 7 | # Original predictions 8 | 9 | #------------------------------------------------------------------------------ 10 | ### Table of original predictions 11 | table_orig <- reactive({ 12 | if (length(vals$models.ll) == 0) return() 13 | 14 | data.frame(vals$models.names, t(as.data.frame(vals$models.data.names)), 15 | vapply(as.numeric(vals$models.pred.type), function(i) { 16 | switch(i, "Absolute density", "Relative density", "Abundance") 17 | }, character(1)), 18 | stringsAsFactors = FALSE) %>% 19 | `rownames<-`(paste("Original", seq_along(vals$models.names))) %>% 20 | purrr::set_names( 21 | c("SDM filename", "Prediction", "Uncertainty", "Weight", 22 | "Pred value type")) 23 | }) 24 | 25 | 26 | #------------------------------------------------------------------------------ 27 | ### Table of original predictions with stats 28 | table_orig_stats <- reactive({ 29 | req(table_orig()) 30 | 31 | data.frame(vals$models.names, t(as.data.frame(vals$models.specs)), 32 | stringsAsFactors = FALSE) %>% 33 | `rownames<-`(paste("Original", seq_along(vals$models.names))) %>% 34 | purrr::set_names( 35 | c("SDM filename", "Resolution", "Polygon count", 36 | "Non-NA prediction count", "Abundance", "Long, lat range")) 37 | }) 38 | 39 | 40 | ############################################################################### 41 | ### Table of overlaid predictions in 'Create ensemble predictions' tab 42 | table_overlaid <- reactive({ 43 | if (length(vals$overlaid.models) == 0) return() 44 | 45 | data.frame(t(data.frame(vals$overlaid.specs)), stringsAsFactors = FALSE) %>% 46 | `rownames<-`(paste("Overlaid", seq_along(vals$overlaid.specs))) %>% 47 | purrr::set_names( 48 | c("SDM filename", "Prediction", "Uncertainty", "Weight", 49 | "Pred value type", "Resolution", "Polygon count", 50 | "Non-NA prediction count", "Abundance", "Long, lat range")) 51 | }) 52 | 53 | 54 | ############################################################################### 55 | # Ensemble predictions 56 | 57 | #------------------------------------------------------------------------------ 58 | ### Table of created ensemble predictions 59 | table_ensembles <- reactive({ 60 | if (length(vals$ensemble.models) == 0) return() 61 | 62 | data.frame(t(data.frame(vals$ensemble.specs)), stringsAsFactors = FALSE) %>% 63 | `rownames<-`(paste("Ensemble", seq_along(vals$ensemble.specs))) %>% 64 | dplyr::select(1, 2, 4, 6) %>% 65 | purrr::set_names( 66 | c("Predictions used", "Rescaling method", "Ensemble method", 67 | "Uncertainty method")) 68 | }) 69 | 70 | #------------------------------------------------------------------------------ 71 | ### Table of created ensemble predictions with stats 72 | table_ensembles_stats <- reactive({ 73 | if (length(vals$ensemble.models) == 0) return() 74 | 75 | data.frame(t(data.frame(vals$ensemble.specs)), stringsAsFactors = FALSE) %>% 76 | `rownames<-`(paste("Ensemble", seq_along(vals$ensemble.specs))) %>% 77 | dplyr::select(1, 3, 4, 5) %>% 78 | purrr::set_names( 79 | c("Predictions used", "Regional exclusion for", "Ensemble method", 80 | "Weights")) 81 | }) 82 | 83 | ############################################################################### 84 | # Other are rendered in their pertinent files 85 | 86 | ############################################################################### 87 | -------------------------------------------------------------------------------- /inst/shiny/ui_files/ui_7_manual.R: -------------------------------------------------------------------------------- 1 | ### UI code for the 'Manual' tab 2 | 3 | ui.manual <- function() { 4 | tabItem( 5 | tabName = "manual", 6 | fluidRow( 7 | column( 8 | width = 10, offset = 1, 9 | fluidRow( 10 | box( 11 | title = "eSDM GUI Manual", width = 12, 12 | tags$strong("Click the 'Fit to page' button in the pdf viewer (above the '+' and '-' buttons)", 13 | "once or twice to resize the display of the manual to fit to page and fit to width, respectively.", 14 | tags$br(), 15 | "You also can click the 'Download' button on the top bar of the pdf viewer", 16 | "to download the manual as a PDF."), 17 | tags$br(), 18 | tags$br(), 19 | tags$br(), 20 | tags$iframe(height = "700px", width = "100%", scrolling = "yes", src = "eSDM_manual.pdf") 21 | # The above code could also be in server.R within a renderUI(), with corresponding uiOutput() here 22 | ) 23 | ) 24 | ) 25 | ) 26 | ) 27 | } 28 | -------------------------------------------------------------------------------- /inst/shiny/ui_files/ui_funcs.R: -------------------------------------------------------------------------------- 1 | # Functions for common eSDM UI structures 2 | 3 | ############################################################################### 4 | # 'Actions to perform with selected ... predictions' 5 | 6 | #------------------------------------------------------------------------------ 7 | ### Interactive preview 8 | ui_interactive_preview <- function(x) { 9 | conditionalPanel( 10 | condition = paste0("input.", x, "_select_action == 1"), 11 | fluidRow( 12 | column(4, radioButtons(paste0(x, "_preview_interactive_perc"), 13 | tags$h5("Units"), 14 | choices = preview.static.perc, selected = 1)), 15 | column(8, tags$br(), tags$br(), 16 | uiOutput(paste0(x, "_preview_interactive_execute_uiOut_button"))) 17 | ), 18 | helpText("Note that if you are not connected to the internet", 19 | "then the background map will not display") 20 | ) 21 | } 22 | 23 | #------------------------------------------------------------------------------ 24 | ### Static preview 25 | # This is not called for overlaid preview because ui setup is different 26 | ui_static_preview <- function(x, over.flag = FALSE) { 27 | conditionalPanel( 28 | condition = paste0("input.", x, "_select_action == 2"), 29 | fluidRow( 30 | column(4, radioButtons(paste0(x, "_preview_perc"), tags$h5("Units"), 31 | choices = preview.static.perc, selected = 1)), 32 | column(8, radioButtons(paste0(x, "_preview_var"), tags$h5("Uncertainty"), 33 | choices = preview.static.var, selected = 1)) 34 | ), 35 | conditionalPanel( 36 | condition = paste0("input.", x, "_preview_var == 2"), 37 | helpText("Uncertainty plots will have \"- SE\" in their title.", 38 | "Uncertainty plots of units type 'values' will have the same", 39 | "color scale as their assocaited predictions.") 40 | ), 41 | actionButton(paste0(x, "_preview_execute"), "Plot static preview") 42 | ) 43 | } 44 | #------------------------------------------------------------------------------ 45 | ### Download preview 46 | ui_download_preview <- function(x) { 47 | conditionalPanel( 48 | condition = paste0("input.", x, "_select_action == 3"), 49 | fluidRow( 50 | column(4, radioButtons(paste0(x, "_download_preview_perc"), tags$h5("Units"), 51 | choices = preview.static.perc, selected = 1)), 52 | column(4, radioButtons(paste0(x, "_download_preview_var"), tags$h5("Uncertainty"), 53 | choices = preview.static.var, selected = 1)), 54 | column(4, radioButtons(paste0(x, "_download_preview_res"), tags$h5("Resolution"), 55 | choices = list("High (300 ppi)" = 1, "Low (72 ppi)" = 2), 56 | selected = 1)) 57 | ), 58 | fluidRow( 59 | column(4, radioButtons(paste0(x, "_download_preview_format"), tags$h5("File format"), 60 | choices = list("JPEG" = 1, "PDF" = 2, "PNG" = 3), 61 | selected = 3)), 62 | column(8, radioButtons(paste0(x, "_download_preview_dim"), tags$h5("File dimensions"), 63 | choices = preview.download.dim, selected = 1)) 64 | 65 | ), 66 | uiOutput(paste0(x, "_download_preview_name_uiOut_text")), 67 | uiOutput(paste0(x, "_download_preview_execute_uiOut_download")) 68 | ) 69 | } 70 | 71 | #------------------------------------------------------------------------------ 72 | ### Revove predictions 73 | ui_remove <- function(x) { 74 | conditionalPanel( 75 | condition = paste0("input.", x, "_select_action == 4"), 76 | actionButton(paste0(x, "_remove_execute"), 77 | "Remove selected ensemble predictions"), 78 | textOutput(paste0(x, "_remove_text")) 79 | ) 80 | } 81 | -------------------------------------------------------------------------------- /inst/shiny/www/eSDM_manual.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWFSC/eSDM/be2f911c1fb18707e0f7c11973e8524510c2d3b5/inst/shiny/www/eSDM_manual.pdf -------------------------------------------------------------------------------- /inst/shiny/www/noaa_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SWFSC/eSDM/be2f911c1fb18707e0f7c11973e8524510c2d3b5/inst/shiny/www/noaa_logo.png -------------------------------------------------------------------------------- /man/eSDM-package.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/eSDM-package.R 3 | \docType{package} 4 | \name{eSDM-package} 5 | \alias{eSDM-package} 6 | \alias{eSDM} 7 | \title{Ensemble tool for predictions from Species Distribution Models} 8 | \description{ 9 | eSDM: A tool for creating and exploring ensembles of predictions from Species 10 | Distribution Models 11 | } 12 | \details{ 13 | eSDM provides functionality for overlaying SDM predictions onto a single base 14 | geometry and creating and evaluating ensemble predictions. This can be done 15 | manually in R, or using the eSDM GUI (an R Shiny app) opened through 16 | \link{eSDM_GUI} 17 | 18 | eSDM allows users to overlay SDM predictions onto a single base 19 | geometry, create ensembles of these predictions via weighted or unweighted 20 | averages, calculate performance metrics for each set of predictions and for 21 | resulting ensembles, and visually compare ensemble predictions with 22 | original predictions. The information provided by this tool can assist 23 | users in understanding spatial uncertainties and making informed 24 | conservation decisions. 25 | 26 | The GUI ensures that the tool is accessible to non-R users, while also 27 | providing a user-friendly environment for functionality such as loading 28 | other polygons to use and visualizing predictions. However, user choices 29 | are restricted to the workflow provided by the GUI. 30 | } 31 | \seealso{ 32 | \url{https://swfsc.github.io/eSDM/} 33 | } 34 | \author{ 35 | Sam Woodman \email{sam.woodman@noaa.gov} 36 | } 37 | \keyword{package} 38 | -------------------------------------------------------------------------------- /man/eSDM_GUI.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/eSDM_GUI.R 3 | \name{eSDM_GUI} 4 | \alias{eSDM_GUI} 5 | \title{Open the eSDM GUI} 6 | \usage{ 7 | eSDM_GUI(launch.browser = TRUE) 8 | } 9 | \arguments{ 10 | \item{launch.browser}{Logical with default of \code{TRUE}; passed to \code{launch.browser} 11 | argument of \code{\link[shiny]{runApp}}} 12 | } 13 | \description{ 14 | Open the eSDM graphical user interface (GUI); 15 | an R Shiny app for creating ensemble predictions using SDM predictions. 16 | } 17 | -------------------------------------------------------------------------------- /man/ensemble_create.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/ensemble_create.R 3 | \name{ensemble_create} 4 | \alias{ensemble_create} 5 | \alias{ensemble_create.sf} 6 | \alias{ensemble_create.data.frame} 7 | \title{Create ensemble of SDM predictions} 8 | \usage{ 9 | ensemble_create(x, x.idx, w = NULL, x.var.idx = NULL, ...) 10 | 11 | \method{ensemble_create}{sf}(x, x.idx, w = NULL, x.var.idx = NULL, ...) 12 | 13 | \method{ensemble_create}{data.frame}(x, x.idx, w = NULL, x.var.idx = NULL, ...) 14 | } 15 | \arguments{ 16 | \item{x}{object of class \code{sf} or class \code{data.frame}} 17 | 18 | \item{x.idx}{vector of column names or numerical indices; 19 | indicates which columns in \code{x} will be used to create the ensemble} 20 | 21 | \item{w}{weights for the ensemble; either a numeric vector the same length as \code{x} or 22 | a data frame (or tibble) with the same number of rows as \code{x} and \code{ncol(w) == length(x.idx)}. 23 | If w is a numeric vector, its values (i.e. the weights) must sum to 1. 24 | The default value is \code{1 / length(x.idx)}, i.e. an unweighted ensemble} 25 | 26 | \item{x.var.idx}{vector of column names or column indices; 27 | indicates columns in \code{x} with variance values with which to 28 | calculate uncertainty values for the ensemble. 29 | If \code{x.var.idx} is specified, it must be the same length as \code{x.idx}. 30 | Use \code{x.var.idx = NULL} (the default) if none of the predictions have associated uncertainty values; 31 | in this case the uncertainty values for the ensemble will be calculated using the among-model uncertainty. 32 | See the 'Details' section for more information} 33 | 34 | \item{...}{Arguments to be passed to methods; specifically designed for passing 35 | \code{na.rm} argument to \code{sum}} 36 | } 37 | \value{ 38 | An object of the same class as \code{x} with two columns appended to the data frame: 39 | \itemize{ 40 | \item 'Pred_ens' - The ensemble predictions 41 | \item 'Var_ens' - The variance of the ensemble predictions, 42 | calculated using either the within-model uncertainty (if \code{x.var.idx} is specified) or 43 | the among-model uncertainty (if \code{x.var.idx} is \code{NULL}) 44 | } 45 | Note that all other columns of \code{x} will be included in the returned object. 46 | Also, if \code{x} is of class \code{sf} then 47 | 1) the geometry list-column will be the last column of the returned object and 48 | 2) the \code{agr} attribute will be set as 'constant' for 'Pred_ens' and 'Var_ens' 49 | } 50 | \description{ 51 | Create a weighted or unweighted ensemble of SDM predictions, including associated uncertainty values 52 | } 53 | \details{ 54 | \code{ensemble_create} is designed to be used after overlaying predictions with \code{\link{overlay_sdm}} and 55 | (if desired) rescaling the overlaid predictions with \code{\link{ensemble_rescale}}. 56 | 57 | This function implements ensemble methods provided in \link{eSDM_GUI}. 58 | Note that it does not implement regional exclusion, which must be done manually if not using the GUI. 59 | 60 | Ensemble uncertainty is calculated using either the within-model uncertainty (if \code{x.var.idx} is specified) or 61 | the among-model uncertainty (if \code{x.var.idx} is \code{NULL}). 62 | See the eSDM GUI manual for applicable formulas. 63 | } 64 | \examples{ 65 | ensemble_create(preds.1, c("Density", "Density2"), c(0.2, 0.8)) 66 | ensemble_create(preds.1, 1:2, c(0.2, 0.8), c("Var1", "Var2")) 67 | ensemble_create(data.frame(a = 1:5, b = 3:7), c(1, 2)) 68 | 69 | weights.df <- data.frame(runif(325), c(rep(NA, 100), runif(225))) 70 | ensemble_create(preds.1, c("Density", "Density2"), weights.df, na.rm = TRUE) 71 | 72 | } 73 | -------------------------------------------------------------------------------- /man/ensemble_rescale.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/ensemble_rescale.R 3 | \name{ensemble_rescale} 4 | \alias{ensemble_rescale} 5 | \title{Rescale SDM predictions} 6 | \usage{ 7 | ensemble_rescale(x, x.idx, y, y.abund = NULL, x.var.idx = NULL) 8 | } 9 | \arguments{ 10 | \item{x}{object of class \code{sf}} 11 | 12 | \item{x.idx}{vector of column names or column indices; 13 | indicates columns in \code{x} with prediction values that will be rescaled} 14 | 15 | \item{y}{rescaling method; must be either "abundance" or "sumto1". 16 | See 'Details' section for descriptions of the rescaling methods} 17 | 18 | \item{y.abund}{numeric value; ignored if \code{y} is not \code{"abundance"}} 19 | 20 | \item{x.var.idx}{vector of column names or column indices; 21 | indicates columns in \code{x} with variance values that will be rescaled. 22 | If \code{x.var.idx} is specified, it must be the same length as \code{x.idx}. 23 | Use \code{x.var.idx = NULL} (the default) if none of the predictions have associated uncertainty values; 24 | see the 'Details' section for more information} 25 | } 26 | \value{ 27 | The \code{sf} object \code{x} with the columns specified by \code{x.idx} and \code{x.var.idx} rescaled. 28 | The \code{agr} attributes of \code{x} will be conserved 29 | } 30 | \description{ 31 | Rescale SDM predictions and (if applicable) associated uncertainties 32 | } 33 | \details{ 34 | \code{ensemble_rescale} is intended to be used after overlaying predictions with 35 | \code{\link{overlay_sdm}} and before creating ensembles with \code{\link{ensemble_create}}. 36 | The provided rescaling methods are: 37 | \itemize{ 38 | \item'abundance' - Rescale the density values so that the predicted abundance is \code{y.abund} 39 | \item'sumto1' - Rescale the density values so their sum is 1 40 | } 41 | 42 | SDM uncertainty values must be rescaled differently than the prediction values. 43 | Columns specified in \code{x.var.idx} must contain variance values. 44 | These values will be rescaled using the formula \code{var(c * x) = c^2 * var(x)}, 45 | where \code{c} is the rescaling factor for the associated predictions. 46 | 47 | If \code{x.var.idx} is not \code{NULL}, then the function assumes 48 | \code{x.var.idx[1]} contains the variance values associated with the predictions in \code{x.idx[1]}, 49 | \code{x.var.idx[2]} contains the variance values associated with the predictions in \code{x.idx[2]}, etc. 50 | Use \code{NA} in \code{x.var.idx} to indicate a set of predictions that does not have 51 | associated uncertainty values (e.g., \code{x.var.idx = c(4, NA, 5)}) 52 | } 53 | \examples{ 54 | ensemble_rescale(preds.1, c("Density", "Density2"), "abundance", 50) 55 | ensemble_rescale(preds.1, c(1, 2), "sumto1") 56 | 57 | ensemble_rescale( 58 | preds.1, c("Density", "Density2"), "abundance", 100, c(3,4) 59 | ) 60 | 61 | 62 | } 63 | -------------------------------------------------------------------------------- /man/evaluation_metrics.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/evaluation_metrics.R 3 | \name{evaluation_metrics} 4 | \alias{evaluation_metrics} 5 | \title{Calculate SDM evaluation metrics} 6 | \usage{ 7 | evaluation_metrics(x, x.idx, y, y.idx, count.flag = FALSE) 8 | } 9 | \arguments{ 10 | \item{x}{object of class sf; SDM predictions} 11 | 12 | \item{x.idx}{name or index of column in \code{x} with prediction values} 13 | 14 | \item{y}{object of class sf; validation data} 15 | 16 | \item{y.idx}{name or index of column in \code{y} with validation data. 17 | This validation data column must have at least two unique values, e.g. 0 and 1} 18 | 19 | \item{count.flag}{logical; \code{TRUE} indicates that the data in column \code{y.idx} is count data, 20 | while \code{FALSE} indicates that the data is presence/absence. 21 | See details for differences in data processing based on this flag.} 22 | } 23 | \value{ 24 | A numeric vector with AUC, TSS and RMSE values, respectively. 25 | If \code{count.flag == FALSE}, the RMSE value will be \code{NA} 26 | } 27 | \description{ 28 | Calculate AUC, TSS, and RMSE for given density predictions and validation data 29 | } 30 | \details{ 31 | If \code{count.flag == TRUE}, then \code{eSDM::model_abundance(x, x.idx, FALSE)} will be run 32 | to calculate predicted abundance and thus calculate RMSE. 33 | Note that this assumes the data in column \code{x.idx} of \code{x} are density values. 34 | 35 | If \code{count.flag == FALSE}, then all of the values in column \code{y.idx} of \code{y} must be \code{0} or \code{1}. 36 | 37 | All rows of \code{x} with a value of \code{NA} in column \code{x.idx} and 38 | all rows of \code{y} with a value of \code{NA} in column \code{y.idx} are removed before calculating metrics 39 | } 40 | \examples{ 41 | evaluation_metrics(preds.1, 2, validation.data, "sight") 42 | 43 | evaluation_metrics(preds.1, "Density2", validation.data, "count", TRUE) 44 | 45 | } 46 | -------------------------------------------------------------------------------- /man/gshhg.l.L16.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/data.R 3 | \docType{data} 4 | \name{gshhg.l.L16} 5 | \alias{gshhg.l.L16} 6 | \title{Low resolution GSHHG world map} 7 | \format{ 8 | An object of class \code{\link[sf]{sfc}} 9 | } 10 | \source{ 11 | \url{http://www.soest.hawaii.edu/pwessel/gshhg/} 12 | } 13 | \usage{ 14 | gshhg.l.L16 15 | } 16 | \description{ 17 | Low resolution GSHHG world map, includes hierarchical levels 18 | L1 and L6. Processed using \code{\link[sf:valid]{st_make_valid}} 19 | } 20 | \keyword{datasets} 21 | -------------------------------------------------------------------------------- /man/model_abundance.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/model_abundance.R 3 | \name{model_abundance} 4 | \alias{model_abundance} 5 | \title{Calculate predicted abundance} 6 | \usage{ 7 | model_abundance(x, dens.idx, sum.abund = TRUE) 8 | } 9 | \arguments{ 10 | \item{x}{object of class \code{sf}; SDM with density predictions. 11 | Must have a valid crs code} 12 | 13 | \item{dens.idx}{name or index of column(s) in \code{x} with density predictions. 14 | Can be a character vector (column names) or numeric vector (column indices)} 15 | 16 | \item{sum.abund}{logical; whether or not to sum all of the predicted abundances} 17 | } 18 | \value{ 19 | If \code{sum.abund == TRUE}, then a vector of the same length as \code{dens.idx} 20 | representing the predicted abundance for the density values in each column. 21 | 22 | If \code{sum.abund == FALSE} and the length of \code{dens.idx} is 1, 23 | then a numeric vector with the predicted abundance of each prediction polygon of \code{x}. 24 | 25 | If \code{sum.abund == FALSE} and the length of \code{dens.idx} is greater than 1, 26 | then a data frame with \code{length(dens.idx)} columns of the predicted abundance of prediction polygons 27 | } 28 | \description{ 29 | Calculates the predicted abundance by multiplying the density prediction values by prediction polygon areas 30 | } 31 | \details{ 32 | Multiplies the values in the specified column(s) (i.e. the density predictions) 33 | by the area in square kilometers of their corresponding prediction polygon. 34 | The area of each prediction polygon is calculated using \code{st_area} from \code{\link[sf]{geos_measures}}. 35 | x must have a valid crs code to calculate area for these abundance calculations. 36 | } 37 | \examples{ 38 | model_abundance(preds.1, "Density") 39 | model_abundance(preds.1, c(1, 1)) 40 | model_abundance(preds.1, c(1, 1), FALSE) 41 | 42 | } 43 | -------------------------------------------------------------------------------- /man/overlay_sdm.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/overlay_sdm.R 3 | \name{overlay_sdm} 4 | \alias{overlay_sdm} 5 | \title{Overlay SDM predictions onto base geometry} 6 | \usage{ 7 | overlay_sdm(base.geom, sdm, sdm.idx, overlap.perc) 8 | } 9 | \arguments{ 10 | \item{base.geom}{object of class \code{sfc}; base geometry} 11 | 12 | \item{sdm}{object of class \code{sf}; original SDM predictions} 13 | 14 | \item{sdm.idx}{names or indices of column(s) with data to be overlaid} 15 | 16 | \item{overlap.perc}{numeric; percent overlap threshold, 17 | i.e. percentage of each base geometry polygon must overlap with SDM 18 | prediction polygons for overlaid density value to be calculated and 19 | not set as NA} 20 | } 21 | \value{ 22 | Object of class \code{sf} with the geometry of \code{base.geom} and 23 | the data in the \code{sdm.idx} columns of \code{sdm} overlaid onto that 24 | geometry. Note that this means all columns of \code{sdm} not in 25 | \code{sdm.idx} will not be in the returned object. 26 | Because the data are considered spatially intensive, the \code{agr} 27 | attribute will be set as 'constant' for all columns in the returned object. 28 | 29 | Additionally, the output will match the class of \code{sdm}, with regards 30 | to the classes tbl_df, tbl, and data.frame. This means that, in addition to 31 | being an \code{sf} object, if \code{sdm} is a tibble then the output will 32 | also be a tibble, while if \code{sdm} is just a data frame then the output 33 | will not be a tibble. 34 | } 35 | \description{ 36 | Overlay specified SDM predictions that meet the percent overlap threshold requirement onto base geometry 37 | } 38 | \details{ 39 | See the eSDM GUI manual for specifics about the overlay process. 40 | This process is equivalent to areal interpolation (Goodchild and Lam 1980), 41 | where \code{base.geom} is the target, \code{sdm} is the source, and the data 42 | specified by \code{sdm.idx} are spatially intensive. 43 | 44 | Note that \code{overlay_sdm} removes rows in \code{sdm} that have NA values 45 | in the first column specified in \code{sdm.idx} (i.e. \code{sdm.idx[1]}), 46 | before the overlay. 47 | Thus, for valid overlay results, all columns of \code{sdm} specified in 48 | \code{sdm.idx} must either have NA values in the same rows 49 | or contain only NAs. 50 | } 51 | \examples{ 52 | pol1.geom <- sf::st_sfc( 53 | sf::st_polygon(list(rbind(c(1,1), c(3,1), c(3,3), c(1,3), c(1,1)))), 54 | crs = sf::st_crs(4326) 55 | ) 56 | pol2.geom <- sf::st_sfc( 57 | sf::st_polygon(list(rbind(c(0,0), c(2,0), c(2,2), c(0,2), c(0,0)))), 58 | crs = sf::st_crs(4326) 59 | ) 60 | pol2.sf <- sf::st_sf(data.frame(Dens = 0.5), geometry = pol2.geom, 61 | crs = sf::st_crs(4326)) 62 | 63 | overlay_sdm(pol1.geom, pol2.sf, 1, 25) 64 | 65 | # Output 'Dens' value is NA because of higher overlap.perc value 66 | overlay_sdm(pol1.geom, pol2.sf, 1, 50) 67 | 68 | \dontshow{if (FALSE) (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf} 69 | # These examples take longer to run 70 | overlay_sdm(sf::st_geometry(preds.1), preds.2, 1, 50) 71 | overlay_sdm(sf::st_geometry(preds.2), preds.1, "Density", 50) 72 | \dontshow{\}) # examplesIf} 73 | } 74 | \references{ 75 | Goodchild, M.F. & Lam, N.S.-N. (1980) Areal interpolation: 76 | a variant of the traditional spatial problem. Geo-Processing, 1, 297-312. 77 | } 78 | -------------------------------------------------------------------------------- /man/preds.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/data.R 3 | \docType{data} 4 | \name{preds} 5 | \alias{preds} 6 | \alias{preds.1} 7 | \alias{preds.2} 8 | \alias{preds.3} 9 | \title{Sample SDM density predictions} 10 | \format{ 11 | Objects of class \code{sf} with a column of density predictions (name: \code{Density}) and 12 | a simple feature list column (name: \code{geometry}). 13 | \code{preds.1} also has a second column of sample density predictions (name: \code{Density2}), 14 | as well as \code{Var1} and \code{Var2}, representing the variance 15 | 16 | \code{preds1}: An object of class sf (inherits from data.frame) with 325 rows and 5 columns. 17 | 18 | \code{preds2}: An object of class sf (inherits from data.frame) with 1891 rows and 2 columns. 19 | 20 | \code{preds3}: An object of class sf (inherits from data.frame) with 1445 rows and 2 columns. 21 | 22 | An object of class \code{sf} (inherits from \code{data.frame}) with 1891 rows and 2 columns. 23 | 24 | An object of class \code{sf} (inherits from \code{data.frame}) with 1445 rows and 2 columns. 25 | } 26 | \usage{ 27 | preds.1 28 | 29 | preds.2 30 | 31 | preds.3 32 | } 33 | \description{ 34 | \code{preds.1}, \code{preds.2}, and \code{preds.3} are objects of class \code{\link[sf]{sf}} that serve as 35 | sample sets of SDM density predictions for the \code{eSDM} package 36 | } 37 | \details{ 38 | \code{preds.1} sample SDM density predictions created by importing 39 | Sample_predictions_2.csv into the eSDM GUI, exporting predictions, and then 40 | clipping them to the SoCal_bite.csv region. 41 | Also manually added two variance columns (numbers are randomly generated with a max of 0.01) 42 | 43 | \code{preds.2} sample SDM density predictions created by importing 44 | Sample_predictions_1.csv into the eSDM GUI, exporting predictions, and then 45 | clipping them to the SoCal_bite.csv region 46 | 47 | \code{preds.3} is a set of sample SDM density predictions created by importing 48 | Sample_predictions_4_gdb into the eSDM GUI, exporting predictions, and then 49 | clipping them to the SoCal_bite.csv region 50 | } 51 | \keyword{datasets} 52 | -------------------------------------------------------------------------------- /man/pts2poly_centroids.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/pts2poly_centroids.R 3 | \name{pts2poly_centroids} 4 | \alias{pts2poly_centroids} 5 | \title{Create polygons from centroid coordinates} 6 | \usage{ 7 | pts2poly_centroids(x, y, ...) 8 | } 9 | \arguments{ 10 | \item{x}{data frame with at least two columns; 11 | the first two columns must contain longitude and latitude coordinates, respectively. 12 | See 'Details' section for how additional columns are handled} 13 | 14 | \item{y}{numeric; the perpendicular distance from the polygon centroid (center) to its edge 15 | (i.e. half the length of one side of a polygon)} 16 | 17 | \item{...}{passed to \link[sf:sf]{st_sf} or to \link[sf:sfc]{st_sfc}, 18 | e.g. for passing named arguments \code{crs} and \code{agr}} 19 | } 20 | \value{ 21 | Object of class \code{sfc} (if \code{x} has exactly two columns) or class \code{sf} 22 | (if \code{x} has exactly more than two columns). The object will have a geometry type of \code{POLYGON}. 23 | If the object is of class \code{sf}, the name of the geometry list-column will be "geometry" 24 | } 25 | \description{ 26 | Create polygon(s) from a data frame with coordinates of the polygon centroid(s) 27 | } 28 | \details{ 29 | This function was designed for someone who reads in a .csv file 30 | with a grid of coordinates representing SDM prediction points and needs to create 31 | prediction polygons with the .csv file coordinates as the polygon centroids. 32 | However, the function can be used to create square polygons of any size around the provided points, 33 | regardless of if those polygons touch or overlap. 34 | The created polygons are oriented so that, in a 2D plane, their edges are parallel to either the x or the y axis. 35 | 36 | If \code{x} contains more than two column, then additional columns will be treated as simple feature attributes, 37 | i.e. passed along as the first argument to \link[sf:sf]{st_sf} 38 | 39 | If a \code{crs} is not specified in \code{...}, 40 | then the \code{crs} attribute of the polygon(s) will be \code{NULL}. 41 | } 42 | \examples{ 43 | # Create an sfc object from a data frame of two columns 44 | x <- data.frame( 45 | lon = c(5, 10, 15, 20, 5, 10, 15, 20), 46 | lat = c(5, 5, 5, 5, 10, 10, 10, 10) 47 | ) 48 | pts2poly_centroids(x, 2.5, crs = 4326) 49 | 50 | # Create an sf object from a data frame of more than two columns 51 | x <- data.frame( 52 | lon = c(5, 10, 15, 20, 5, 10, 15, 20), 53 | lat = c(5, 5, 5, 5, 10, 10, 10, 10), 54 | sdm.pred = runif(8), 55 | sdm.pred2 = runif(8) 56 | ) 57 | pts2poly_centroids(x, 2.5, crs = 4326, agr = "constant") 58 | 59 | } 60 | -------------------------------------------------------------------------------- /man/pts2poly_vertices.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/pts2poly_vertices.R 3 | \name{pts2poly_vertices} 4 | \alias{pts2poly_vertices} 5 | \title{Create polygons from vertex coordinates} 6 | \usage{ 7 | pts2poly_vertices(x, ...) 8 | } 9 | \arguments{ 10 | \item{x}{data frame with at least two columns; 11 | the first two columns must contain longitude and latitude coordinates, respectively. 12 | See 'Details' section for how additional columns are handled} 13 | 14 | \item{...}{passed to \link[sf:sfc]{st_sfc}, 15 | e.g. for passing named argument \code{crs}} 16 | } 17 | \value{ 18 | Object of class \code{sfc} with the geometry type \code{POLYGON} 19 | } 20 | \description{ 21 | Create polygon(s) from a data frame with the coordinates of the polygon vertices 22 | } 23 | \details{ 24 | Vertices of different polygons must be demarcated by rows with values of \code{NA} 25 | in both the first and second columns (i.e. the longitude and latitude columns). 26 | 27 | All columns in \code{x} besides the first two columns are ignored. 28 | 29 | If a \code{crs} is not specified in \code{...}, 30 | then the \code{crs} attribute of the polygon(s) will be \code{NULL}. 31 | } 32 | \examples{ 33 | x <- data.frame( 34 | lon = c(40, 40, 50, 50, 40), 35 | lat = c(0, 10, 10, 0, 0) 36 | ) 37 | pts2poly_vertices(x, crs = 4326) 38 | 39 | # Create an sf object 40 | x <- data.frame( 41 | lon = c(40, 40, 50, 50, 40, NA, 20, 20, 30, 30, 20), 42 | lat = c(0, 10, 10, 0, 0, NA, 0, 10, 10, 0, 0) 43 | ) 44 | sf::st_sf(Pred = 1:2, geometry = pts2poly_vertices(x, crs = 4326)) 45 | 46 | } 47 | -------------------------------------------------------------------------------- /man/validation.data.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/data.R 3 | \docType{data} 4 | \name{validation.data} 5 | \alias{validation.data} 6 | \title{Sample validation data} 7 | \format{ 8 | An object of class \code{\link[sf]{sf}} with 8 rows and 3 variables 9 | \describe{ 10 | \item{sight}{1's and 0's indicating species presence/absence} 11 | \item{count}{number of individuals observed at each point} 12 | \item{geometry}{simple feature list column representing validation data points} 13 | } 14 | } 15 | \usage{ 16 | validation.data 17 | } 18 | \description{ 19 | Sample validation data created by cropping Validation_data.csv to the SoCal_bite.csv region 20 | (.csv files from ...) 21 | } 22 | \keyword{datasets} 23 | -------------------------------------------------------------------------------- /pkgdown/extra.css: -------------------------------------------------------------------------------- 1 | @import url("https://nmfs-fish-tools.github.io/nmfspalette/extra.css"); 2 | -------------------------------------------------------------------------------- /tests/testthat.R: -------------------------------------------------------------------------------- 1 | library(testthat) 2 | library(eSDM) 3 | 4 | test_check("eSDM") 5 | -------------------------------------------------------------------------------- /tests/testthat/test-ensemble.R: -------------------------------------------------------------------------------- 1 | library(sf) 2 | 3 | test_that("rescale", { 4 | da <- data.frame( 5 | lon = c(45, 40, 40, 45, 45), lat = c(10, 10, 20, 20, 10) 6 | ) 7 | db <- data.frame( 8 | lon = c(50, 45, 45, 50, 50), lat = c(10, 10, 20, 20, 10) 9 | ) 10 | 11 | sfc1 <- st_sfc( 12 | st_polygon(list(matrix(unlist(da), ncol = 2))), st_polygon(list(matrix(unlist(db), ncol = 2))), 13 | crs = 4326 14 | ) 15 | 16 | df1 <- data.frame(pred = c(2, 3)) 17 | df1e <- data.frame(pred = c(2, 3) / 5) 18 | sf1 <- st_sf(df1, geometry = sfc1, agr = "constant") 19 | sf1.e <- st_sf(df1e, geometry = sfc1, agr = "constant") 20 | 21 | df2 <- data.frame(pred = c(2, 3), var = c(0.2, 0.3)) 22 | df2e <- data.frame(pred = c(0.4, 0.6), var = c(0.2, 0.3) * 1/25) 23 | sf2 <- st_sf(df2, geometry = sfc1, agr = "constant") 24 | sf2.e <- st_sf(df2e, geometry = sfc1, agr = "constant") 25 | 26 | expect_equal(ensemble_rescale(sf1, 1, "sumto1"), sf1.e) 27 | expect_equal(ensemble_rescale(sf2, 1, "sumto1", x.var.idx = 2), sf2.e) 28 | 29 | expect_equal(model_abundance(ensemble_rescale(sf1, 1, "abundance", 42), 1), 42) 30 | }) 31 | 32 | 33 | test_that("create ens", { 34 | d <- data.frame( 35 | lon = c(50, 40, 40, 50, 50), lat = c(10, 10, 0, 0, 10) 36 | ) 37 | da <- data.frame( 38 | lon = c(45, 40, 40, 45, 45), lat = c(10, 10, 0, 0, 10) 39 | ) 40 | db <- data.frame( 41 | lon = c(50, 45, 45, 50, 50), lat = c(10, 10, 0, 0, 10) 42 | ) 43 | 44 | sfc1 <- st_sfc( 45 | st_polygon(list(matrix(unlist(da), ncol = 2))), st_polygon(list(matrix(unlist(db), ncol = 2))), 46 | crs = 4326 47 | ) 48 | sfc2 <- st_sfc(st_polygon(list(matrix(unlist(d), ncol = 2))), crs = 4326) 49 | 50 | # Basic data frame and sf object 51 | df1 <- data.frame(pred1 = c(2, 3), pred2 = c(4, 6), pred3 = c(3, 5), 52 | var1 = c(0.2, 0.3), var2 = c(0.4, 0.6), var3 = c(0.3, 0.5)) 53 | sf1 <- st_sf(df1, geometry = sfc1, agr = "constant") 54 | sf1c <- st_sf(df1, geometry = sfc1, agr = "identity") 55 | 56 | # Mean of first two pred columns with amv 57 | df1e <- cbind( 58 | df1, Pred_ens = c(mean(c(2, 4)), mean(c(3, 6))), Var_ens = c(1, 2.25) 59 | ) 60 | sf1.e <- st_sf(df1e, geometry = sfc1, agr = "constant") 61 | 62 | # Mean of all 3 pred columns with amv 63 | df1eb.mean <- c(mean(c(2, 4, 3)), mean(c(3, 6, 5))) 64 | df1eb.var <- apply(cbind(df1[, 1:3], df1eb.mean), 1, function(i, w){ 65 | esdm_weighted_var_amv(i[1:3], i[4], w) 66 | }, w = rep(1/3, 3)) 67 | df1eb <- cbind(df1, Pred_ens = df1eb.mean, Var_ens = c(2/3, 14/9)) 68 | sf1.eb <- st_sf(df1eb, geometry = sfc1, agr = "constant") 69 | 70 | # Mean of first two pred columns with wmv 71 | df1e2.mean <- c(mean(c(2, 4)), mean(c(3, 6))) 72 | df1e2.var <- apply(df1[, 4:5], 1, esdm_weighted_var_wmv, w = c(0.5, 0.5)) 73 | df1e2 <- cbind(df1, Pred_ens = df1e2.mean, Var_ens = df1e2.var) 74 | sf1.ec <- st_sf(df1e, geometry = sfc1, agr = c(rep("identity", 6), rep("constant", 2))) 75 | sf1.e2 <- st_sf(df1e2, geometry = sfc1, agr = "constant") 76 | 77 | # Tests 78 | expect_equal(ensemble_create(sf1, 1:2, c(0.5, 0.5)), sf1.e) 79 | expect_equal(ensemble_create(sf1, 1:3, c(1/3, 1/3, 1/3)), sf1.eb) 80 | expect_equal(ensemble_create(sf1c, 1:2, c(0.5, 0.5)), sf1.ec) 81 | 82 | expect_equal(ensemble_create(sf1, 1:2, c(0.5, 0.5), x.var.idx = 4:5), sf1.e2) 83 | 84 | }) 85 | -------------------------------------------------------------------------------- /tests/testthat/test-internals.R: -------------------------------------------------------------------------------- 1 | 2 | d.na <- as.numeric(NA) 3 | 4 | test_that("internal-rmse", { 5 | expect_equal(esdm_rmse(c(1, NA), c(2, 3), na.rm = TRUE), 1) 6 | expect_equal(esdm_rmse(c(1, 2), c(2, NA), na.rm = TRUE), 1) 7 | expect_equal(esdm_rmse(c(1, NA), c(2, 3), na.rm = FALSE), d.na) 8 | expect_equal(esdm_rmse(c(1, 2), c(2, NA), na.rm = FALSE), d.na) 9 | 10 | expect_equal(esdm_rmse(c(1:5), c(2:6)), 1) 11 | expect_equal(esdm_rmse(1, 2), 1) 12 | }) 13 | 14 | 15 | test_that("internal-wtdmean", { #Values calculated using stats::weighted.mean() 16 | expect_equal(esdm_weighted_mean(c(NA, 2, 3), c(1, 2, 3), na.rm = TRUE), 2.6) 17 | expect_equal(esdm_weighted_mean(c(NA, 2, 3), c(1, 2, 3), na.rm = FALSE), d.na) 18 | expect_equal(esdm_weighted_mean(c(5, 2, 3), c(1, 2, 3), na.rm = TRUE), 3) 19 | expect_equal(esdm_weighted_mean(c(4, 2, 3), c(1, 2, NA), na.rm = TRUE), 8/3) 20 | expect_equal(esdm_weighted_mean(c(4, 2, 3), c(1, 2, NA), na.rm = FALSE), d.na) 21 | }) 22 | 23 | 24 | test_that("internal-amv", { 25 | expect_equal(esdm_weighted_var_amv(d.na, 2, 1, na.rm = FALSE), d.na) 26 | expect_equal(esdm_weighted_var_amv(1:4, 2.5, rep(0.25, 4), na.rm = TRUE), mean((1:4 - 2.5) ^ 2)) 27 | expect_equal(esdm_weighted_var_amv(1:4, 2.5, rep(0.25, 4), na.rm = FALSE), mean((1:4 - 2.5) ^ 2)) 28 | expect_equal(esdm_weighted_var_amv(c(1, 5, NA), 3, c(0.2, 0.8, NA), na.rm = TRUE), 4) 29 | expect_equal(esdm_weighted_var_amv(c(1, 5, NA), 3, c(0.1, 0.4, 0.5), na.rm = TRUE), 4) 30 | expect_equal(esdm_weighted_var_amv(c(1, 5, NA), 3, c(0.2, 0.8, NA), na.rm = FALSE), d.na) 31 | }) 32 | 33 | 34 | test_that("internal-wmv", { 35 | expect_equal(esdm_weighted_var_wmv(c(NA, 0.1), c(0.5, 0.5), na.rm = FALSE), d.na) 36 | expect_equal(esdm_weighted_var_wmv(c(NA, 0.1), c(0.5, 0.5), na.rm = TRUE), 0.1) 37 | expect_equal(esdm_weighted_var_wmv(c(0.4, 0.1), c(0.5, 0.5), na.rm = TRUE), 0.4 * 0.25 + 0.1 * 0.25) 38 | }) 39 | -------------------------------------------------------------------------------- /tests/testthat/test-model_abundance.R: -------------------------------------------------------------------------------- 1 | library(sf) 2 | 3 | test_that("abundance", { 4 | da <- data.frame( 5 | lon = c(45, 40, 40, 45, 45), lat = c(10, 10, 20, 20, 10) 6 | ) 7 | db <- data.frame( 8 | lon = c(50, 45, 45, 50, 50), lat = c(10, 10, 20, 20, 10) 9 | ) 10 | 11 | sfc1 <- st_sfc( 12 | st_polygon(list(matrix(unlist(da), ncol = 2))), st_polygon(list(matrix(unlist(db), ncol = 2))), 13 | crs = 4326 14 | ) 15 | sfc2 <- st_sfc( 16 | st_polygon(list(matrix(unlist(da), ncol = 2))), st_polygon(list(matrix(unlist(db), ncol = 2))) 17 | ) 18 | 19 | sf1 <- st_sf(data.frame(dens = c(2, 2)), geometry = sfc1, agr = "constant") 20 | sf2 <- st_sf(data.frame(dens = c(2, 2)), geometry = sfc2, agr = "constant") 21 | 22 | area1 <- as.numeric(units::set_units(st_area(sfc1), "km^2")) * 2 23 | 24 | expect_equal(model_abundance(sf1, 1), sum(area1)) 25 | expect_equal(model_abundance(sf1, 1, sum.abund = FALSE), area1) 26 | expect_error(model_abundance(sf2, 1)) 27 | }) 28 | -------------------------------------------------------------------------------- /tests/testthat/test-overlay_sdm.R: -------------------------------------------------------------------------------- 1 | library(sf) 2 | library(dplyr) 3 | 4 | test_that("simple overlay", { 5 | d <- data.frame( 6 | lon = c(40, 40, 50, 50, 40), lat = c(10, 20, 20, 10, 10) 7 | ) 8 | da <- data.frame( 9 | lon = c(40, 40, 45, 45, 40), lat = c(10, 20, 20, 10, 10) 10 | ) 11 | db <- data.frame( 12 | lon = c(45, 45, 50, 50, 45), lat = c(10, 20, 20, 10, 10) 13 | ) 14 | 15 | sf1.df <- data.frame(pred = c(2, 2)) 16 | sf2.df <- data.frame(pred = 2) 17 | 18 | sfc1 <- st_sfc( 19 | st_polygon(list(as.matrix(da))), st_polygon(list(as.matrix(db))), 20 | crs = 4326 21 | ) 22 | sfc2 <- st_sfc(st_polygon(list(as.matrix(d))), crs = 4326) 23 | 24 | sf1 <- st_sf(sf1.df, geometry = sfc1, agr = "constant") 25 | sf1b <- st_sf(data.frame(pred = c(1, 2)), geometry = sfc1, agr = "constant") 26 | sf2 <- st_sf(sf2.df, geometry = sfc2, agr = "constant") 27 | sf2b <- st_sf(data.frame(pred = 1.5), geometry = sfc2, agr = "constant") 28 | 29 | expect_equal(overlay_sdm(sfc2, sf1, 1, 100), sf2) 30 | expect_equal(overlay_sdm(sfc2, sf1b, 1, 100), sf2b) 31 | 32 | expect_error(overlay_sdm(sf1, sf2, 1, 100)) 33 | expect_error(overlay_sdm(sfc1, sfc2, 1, 100)) 34 | 35 | # Test that tibble classes are brought through 36 | expect_equal( 37 | overlay_sdm(sfc2, st_sf(as_tibble(sf1.df), geometry = sfc1, agr = "constant"), 38 | 1, 100), 39 | st_sf(as_tibble(sf2.df), geometry = sfc2, agr = "constant") 40 | ) 41 | }) 42 | -------------------------------------------------------------------------------- /tests/testthat/test-pts2poly.R: -------------------------------------------------------------------------------- 1 | library(sf) 2 | 3 | test_that("pts2poly", { 4 | # Need to go counter-clockwise form top right point for pts2poly_centroid 5 | d <- data.frame( 6 | lon = c(50, 40, 40, 50, 50), lat = c(10, 10, 0, 0, 10) 7 | ) 8 | da <- data.frame( 9 | lon = c(45, 40, 40, 45, 45), lat = c(10, 10, 0, 0, 10) 10 | ) 11 | db <- data.frame( 12 | lon = c(50, 45, 45, 50, 50), lat = c(10, 10, 0, 0, 10) 13 | ) 14 | 15 | sfc1 <- st_sfc( 16 | st_polygon(list(matrix(unlist(da), ncol = 2))), st_polygon(list(matrix(unlist(db), ncol = 2))), 17 | crs = 4326 18 | ) 19 | sfc2 <- st_sfc(st_polygon(list(matrix(unlist(d), ncol = 2))), crs = 4326) 20 | 21 | sf1 <- st_sf(data.frame(pred = c(2, 2)), geometry = sfc1, agr = "constant") 22 | sf1b <- st_sf(data.frame(pred = c(1, 2)), geometry = sfc1, agr = "constant") 23 | sf2 <- st_sf(data.frame(pred = 2), geometry = sfc2, agr = "constant") 24 | sf2b <- st_sf(data.frame(pred = 1.5), geometry = sfc2, agr = "constant") 25 | 26 | expect_equal(pts2poly_vertices(d, crs = 4326), sfc2) 27 | expect_equal(pts2poly_vertices(rbind(d, NA), crs = 4326), sfc2) 28 | expect_equal(pts2poly_vertices(rbind(da, NA, db), crs = 4326), sfc1) 29 | 30 | pt1 <- data.frame(lon = 45, lat = 5, pred = 2) 31 | 32 | expect_equal(pts2poly_centroids(pt1, 5, crs = 4326, agr = "constant"), sf2) 33 | expect_equal(pts2poly_centroids(pt1[, 1:2], 5, crs = 4326), sfc2) 34 | expect_equal(pts2poly_centroids(pt1, 5, crs = 4326, agr = "constant", precision = 10), 35 | st_set_precision(sf2, 10)) 36 | expect_equal(pts2poly_centroids(pt1[, 1:2], 5, crs = 4326, precision = 10), 37 | st_set_precision(sfc2, 10)) 38 | 39 | expect_error(pts2poly_centroids(pt1[, 1:2], 5, crs = 4326, agr = "constant")) 40 | }) 41 | -------------------------------------------------------------------------------- /vignettes/.gitignore: -------------------------------------------------------------------------------- 1 | *.html 2 | *.R 3 | --------------------------------------------------------------------------------