├── .Rbuildignore ├── .github ├── issue_template.md └── workflows │ └── tic.yml ├── .gitignore ├── CONDUCT.md ├── CONTRIBUTING.md ├── CRAN-RELEASE ├── DESCRIPTION ├── NAMESPACE ├── NEWS.md ├── R ├── download_smap.R ├── extract_smap.R ├── find_smap.R ├── list_smap.R ├── set_smap_credentials.R ├── smapr-package.R └── zzz.R ├── README.Rmd ├── README.md ├── codecov.yml ├── codemeta.json ├── cran-comments.md ├── docs ├── CONDUCT.html ├── CONTRIBUTING.html ├── articles │ ├── index.html │ ├── smapr-intro.html │ └── smapr-intro_files │ │ └── figure-html │ │ ├── crop-raster-1.png │ │ ├── get-mean-1.png │ │ ├── inverse-mask-1.png │ │ ├── mask-raster-1.png │ │ ├── plot-raster-1.png │ │ └── surface-vs-rootzone-1.png ├── authors.html ├── docsearch.css ├── docsearch.js ├── index.html ├── issue_template.html ├── link.svg ├── news │ └── index.html ├── pkgdown.css ├── pkgdown.js ├── pkgdown.yml └── reference │ ├── download_smap.html │ ├── extract_smap.html │ ├── figures │ └── extract-data-1.png │ ├── find_smap.html │ ├── index.html │ ├── list_smap.html │ ├── set_smap_credentials.html │ └── smapr-package.html ├── man ├── download_smap.Rd ├── extract_smap.Rd ├── figures │ └── extract-data-1.png ├── find_smap.Rd ├── list_smap.Rd ├── set_smap_credentials.Rd └── smapr-package.Rd ├── tests ├── testthat.R └── testthat │ ├── test-download_smap.R │ ├── test-extract_smap.R │ ├── test-find_smap.R │ ├── test-list_smap.R │ ├── test-set_smap_credentials.R │ └── test-zzz.R ├── tic.R └── vignettes ├── smapr-intro-crop-raster-1.png ├── smapr-intro-get-mean-1.png ├── smapr-intro-inverse-mask-1.png ├── smapr-intro-mask-raster-1.png ├── smapr-intro-plot-raster-1.png ├── smapr-intro-surface-vs-rootzone-1.png ├── smapr-intro.Rmd └── smapr-intro.Rmd.orig /.Rbuildignore: -------------------------------------------------------------------------------- 1 | ^.*\.Rproj$ 2 | ^\.Rproj\.user$ 3 | ^\.travis\.yml$ 4 | ^README\.Rmd$ 5 | ^README-.*\.png$ 6 | cran-comments.md 7 | CONTRIBUTING.md 8 | ^appveyor\.yml$ 9 | ^CONDUCT\.md$ 10 | ^codemeta\.json$ 11 | ^\.github$ 12 | ^docs$ 13 | ^codecov\.yml$ 14 | ^revdep$ 15 | ^CRAN-RELEASE$ 16 | ^\.ccache$ 17 | ^tic\.R$ 18 | -------------------------------------------------------------------------------- /.github/issue_template.md: -------------------------------------------------------------------------------- 1 | # Before posting 2 | 3 | The GitHub issue tracker is intended for bug reports and feature requests. 4 | Do not post your NASA Earthdata username or password with your issue! 5 | 6 | When you post, please include a minimal reproducible example of the problem 7 | and/or desired behavior, if applicable. 8 | -------------------------------------------------------------------------------- /.github/workflows/tic.yml: -------------------------------------------------------------------------------- 1 | ## tic GitHub Actions template: linux-deploy 2 | ## revision date: 2022-11-23 3 | on: 4 | workflow_dispatch: 5 | push: 6 | branches: 7 | - main 8 | - master 9 | - cran-* 10 | pull_request: 11 | branches: 12 | - main 13 | - master 14 | # for now, CRON jobs only run on the default branch of the repo (i.e. usually on master) 15 | schedule: 16 | # * is a special character in YAML so you have to quote this string 17 | - cron: "0 4 * * *" 18 | 19 | name: tic 20 | 21 | jobs: 22 | all: 23 | runs-on: ${{ matrix.config.os }} 24 | 25 | name: ${{ matrix.config.os }} (${{ matrix.config.r }}) 26 | 27 | strategy: 28 | fail-fast: false 29 | matrix: 30 | config: 31 | # use a different tic template type if you do not want to build on all listed platforms 32 | - { os: ubuntu-latest, r: "release", pkgdown: "true" } 33 | 34 | env: 35 | # make sure to run `tic::use_ghactions_deploy()` to set up deployment 36 | TIC_DEPLOY_KEY: ${{ secrets.TIC_DEPLOY_KEY }} 37 | # earthdata creds 38 | ed_un: ${{ secrets.EARTHDATA_USER }} 39 | ed_pw: ${{ secrets.EARTHDATA_PW }} 40 | # prevent rgl issues because no X11 display is available 41 | RGL_USE_NULL: true 42 | # if you use bookdown or blogdown, replace "PKGDOWN" by the respective 43 | # capitalized term. This also might need to be done in tic.R 44 | BUILD_PKGDOWN: ${{ matrix.config.pkgdown }} 45 | # use GITHUB_TOKEN from GitHub to workaround rate limits in {remotes} 46 | GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} 47 | 48 | steps: 49 | - uses: actions/checkout@v3 50 | 51 | - uses: r-lib/actions/setup-r@v2 52 | with: 53 | r-version: ${{ matrix.config.r }} 54 | Ncpus: 4 55 | 56 | - uses: r-lib/actions/setup-tinytex@v2 57 | if: matrix.config.latex == 'true' 58 | 59 | - uses: r-lib/actions/setup-pandoc@v2 60 | 61 | # set date/week for use in cache creation 62 | # https://github.community/t5/GitHub-Actions/How-to-set-and-access-a-Workflow-variable/m-p/42970 63 | # - cache R packages daily 64 | - name: "[Cache] Prepare daily timestamp for cache" 65 | if: runner.os != 'Windows' 66 | id: date 67 | run: echo "date=$(date '+%d-%m')" >> $GITHUB_OUTPUT 68 | 69 | - name: "[Cache] Cache R packages" 70 | if: runner.os != 'Windows' 71 | uses: pat-s/always-upload-cache@v3 72 | with: 73 | path: ${{ env.R_LIBS_USER }} 74 | key: ${{ runner.os }}-r-${{ matrix.config.r }}-${{steps.date.outputs.date}} 75 | restore-keys: ${{ runner.os }}-r-${{ matrix.config.r }}-${{steps.date.outputs.date}} 76 | 77 | - name: "[Stage] Install pak" 78 | run: Rscript -e "install.packages('pak', repos = 'https://r-lib.github.io/p/pak/stable')" 79 | 80 | - name: "[Stage] Install" 81 | run: Rscript -e "if (grepl('Ubuntu', Sys.info()[['version']]) && !grepl('Under development', R.version[['status']])) {options(repos = c(CRAN = sprintf('https://packagemanager.rstudio.com/all/__linux__/%s/latest', system('lsb_release -cs', intern = TRUE))))}else{options(repos = c(CRAN = 'cloud.r-project.org'))}; pak::pkg_install('ropensci/tic')" -e "print(tic::dsl_load())" -e "tic::prepare_all_stages()" -e "tic::before_install()" -e "tic::install()" 82 | 83 | - name: "[Stage] Script" 84 | run: Rscript -e 'tic::script()' 85 | 86 | - name: "[Stage] After Success" 87 | run: Rscript -e "tic::after_success()" 88 | 89 | - name: "[Stage] Upload R CMD check artifacts" 90 | if: failure() 91 | uses: actions/upload-artifact@v2 92 | with: 93 | name: ${{ runner.os }}-r${{ matrix.config.r }}-results 94 | path: check 95 | - name: "[Stage] Before Deploy" 96 | run: | 97 | Rscript -e "tic::before_deploy()" 98 | 99 | - name: "[Stage] Deploy" 100 | run: Rscript -e "tic::deploy()" 101 | 102 | - name: "[Stage] After Deploy" 103 | run: Rscript -e "tic::after_deploy()" 104 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # History files 2 | .Rhistory 3 | .Rapp.history 4 | # Session Data files 5 | .RData 6 | # Example code in package build process 7 | *-Ex.R 8 | # RStudio files 9 | .Rproj.user/ 10 | *.Rproj 11 | # produced vignettes 12 | vignettes/*.html 13 | vignettes/*.pdf 14 | # OAuth2 token, see https://github.com/hadley/httr/releases/tag/v0.3 15 | .httr-oauth 16 | .Rproj.user 17 | revdep 18 | data 19 | docs/ 20 | -------------------------------------------------------------------------------- /CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Code of Conduct 2 | 3 | As contributors and maintainers of this project, we pledge to respect all people who 4 | contribute through reporting issues, posting feature requests, updating documentation, 5 | submitting pull requests or patches, and other activities. 6 | 7 | We are committed to making participation in this project a harassment-free experience for 8 | everyone, regardless of level of experience, gender, gender identity and expression, 9 | sexual orientation, disability, personal appearance, body size, race, ethnicity, age, or religion. 10 | 11 | Examples of unacceptable behavior by participants include the use of sexual language or 12 | imagery, derogatory comments or personal attacks, trolling, public or private harassment, 13 | insults, or other unprofessional conduct. 14 | 15 | Project maintainers have the right and responsibility to remove, edit, or reject comments, 16 | commits, code, wiki edits, issues, and other contributions that are not aligned to this 17 | Code of Conduct. Project maintainers who do not follow the Code of Conduct may be removed 18 | from the project team. 19 | 20 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by 21 | opening an issue or contacting one or more of the project maintainers. 22 | 23 | This Code of Conduct is adapted from the Contributor Covenant 24 | (http:contributor-covenant.org), version 1.0.0, available at 25 | http://contributor-covenant.org/version/1/0/0/ 26 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # CONTRIBUTING # 2 | 3 | ### Please contribute! 4 | We love collaboration. 5 | 6 | ### Found a Bug? 7 | 8 | * Submit an issue on our Issues page [here](https://github.com/earthlab/smapr/issues). 9 | 10 | ### Code contributions? 11 | 12 | * **Fork** this repo to your Github account. 13 | * **Clone** your version on your account down to your machine from your account, e.g,. `git clone https://github.com//smapr.git`. 14 | * Make sure to **track upstream** progress (i.e., on our version of `smapr` at `earthlab/smapr`) by doing `git remote add upstream https://github.com/earthlab/smapr.git`. Before making changes make sure to pull changes in from upstream by doing either `git fetch upstream` then merge later or `git pull upstream` to fetch and merge in one step 15 | * Make your **changes** (bonus points for making changes on a new branch). 16 | * **Push** up to your account. 17 | * Submit a **pull request** to home base at `earthlab/smapr`. 18 | 19 | Please follow [this](http://adv-r.had.co.nz/Style.html) styleguide for your contributions. 20 | 21 | ### Questions? 22 | 23 | Get in touch: [maxwell.b.joseph@colorado.edu](mailto:maxwell.b.joseph@colorado.edu) 24 | 25 | ### Thanks for contributing! 26 | -------------------------------------------------------------------------------- /CRAN-RELEASE: -------------------------------------------------------------------------------- 1 | This package was submitted to CRAN on 2019-04-21. 2 | Once it is accepted, delete this file and tag the release (commit cf93ca7679). 3 | -------------------------------------------------------------------------------- /DESCRIPTION: -------------------------------------------------------------------------------- 1 | Package: smapr 2 | Type: Package 3 | Title: Acquisition and Processing of NASA Soil Moisture Active-Passive (SMAP) Data 4 | Version: 0.2.1 5 | Authors@R: c( 6 | person("Maxwell", "Joseph", email = "maxwell.b.joseph@colorado.edu", 7 | role = c("aut", "cre")), 8 | person("Matthew", "Oakley", email = "matthew.oakley@colorado.edu", 9 | role = "aut"), 10 | person("Zachary", "Schira", email = "zasc3143@colorado.edu", 11 | role = "aut") 12 | ) 13 | Depends: 14 | R (>= 3.2.5) 15 | Imports: 16 | hdf5r, 17 | httr (>= 1.1.0), 18 | methods, 19 | rappdirs (>= 0.3.1), 20 | rvest, 21 | terra, 22 | xml2 23 | Maintainer: Maxwell Joseph 24 | Description: 25 | Facilitates programmatic access to NASA Soil Moisture Active 26 | Passive (SMAP) data with R. It includes functions to search for, acquire, 27 | and extract SMAP data. 28 | License: GPL-3 29 | LazyData: TRUE 30 | RoxygenNote: 7.2.3 31 | Suggests: 32 | knitr, 33 | rmarkdown, 34 | roxygen2, 35 | testthat, 36 | utils, 37 | covr 38 | VignetteBuilder: knitr 39 | URL: https://docs.ropensci.org/smapr, https://github.com/ropensci/smapr 40 | BugReports: https://github.com/ropensci/smapr/issues 41 | Encoding: UTF-8 42 | -------------------------------------------------------------------------------- /NAMESPACE: -------------------------------------------------------------------------------- 1 | # Generated by roxygen2: do not edit by hand 2 | 3 | export(download_smap) 4 | export(extract_smap) 5 | export(find_smap) 6 | export(list_smap) 7 | export(set_smap_credentials) 8 | importFrom(httr,GET) 9 | importFrom(httr,authenticate) 10 | importFrom(httr,write_disk) 11 | importFrom(methods,is) 12 | importFrom(rappdirs,user_cache_dir) 13 | importFrom(terra,crs) 14 | importFrom(terra,ext) 15 | importFrom(terra,merge) 16 | importFrom(terra,project) 17 | importFrom(terra,rast) 18 | importFrom(terra,writeRaster) 19 | -------------------------------------------------------------------------------- /NEWS.md: -------------------------------------------------------------------------------- 1 | # smapr 0.2.1 2 | 3 | * patch to skip test on cran that requires internet 4 | * updated SMAP data versions 5 | 6 | # smapr 0.2.0 7 | 8 | * added set_smap_credentials() function for NASA EarthData portal 9 | * expanded vignettes to include cropping, masking, etc. 10 | * added verbose argument to download_smap 11 | * performance improvements to extract_smap() 12 | 13 | # smapr 0.1.2 14 | 15 | * added support for SMAP/sentinel hybrid soil moisture product 16 | * added a code of conduct 17 | * added a vignette to show a complete workflow 18 | 19 | # smapr 0.1.1 20 | 21 | * added patch for searching date ranges containing missing collections 22 | * added unit tests for user specified download directories 23 | * updates to examples for new data versions 24 | * adding a CONTRIBUTING.md file 25 | 26 | # smapr 0.1.0 27 | 28 | * updating remote data location (previous ftp server was removed) 29 | * using NASA Earthdata authentication 30 | 31 | # smapr 0.0.1 32 | 33 | * first submission to CRAN 34 | -------------------------------------------------------------------------------- /R/download_smap.R: -------------------------------------------------------------------------------- 1 | #' Download SMAP data 2 | #' 3 | #' This function downloads SMAP data in HDF5 format. 4 | #' 5 | #' This function requires a username and password from NASA's Earthdata portal. 6 | #' If you have an Earthdata username and password, pass them in using the 7 | #' \code{\link[=set_smap_credentials]{set_smap_credentials()}} function. 8 | #' 9 | #' If you do not yet have a username and password, register for one here: 10 | #' \url{https://urs.earthdata.nasa.gov/} 11 | #' 12 | #' 13 | #' @param files A \code{data.frame} produced by \code{find_smap()} 14 | #' that specifies data files to download. 15 | #' @param directory A local directory path in which to save data, specified as a 16 | #' character string. If left as \code{NULL}, data are stored in a user's cache 17 | #' directory. 18 | #' @param overwrite TRUE or FALSE: should existing data files be overwritten? 19 | #' @param verbose TRUE or FALSE: should messages be printed to indicate that 20 | #' files are being downloaded? 21 | #' @return Returns a \code{data.frame} that appends a column called 22 | #' \code{local_dir} to the input data frame, which consists of a character 23 | #' vector specifying the local directory containing the downloaded files. 24 | #' @examples 25 | #' \dontrun{ 26 | #' files <- find_smap(id = "SPL4SMGP", dates = "2015-03-31", version = 4) 27 | #' # files[1, ] refers to the first available data file 28 | #' downloads <- download_smap(files[1, ]) 29 | #' } 30 | #' @export 31 | 32 | download_smap <- function(files, directory = NULL, 33 | overwrite = TRUE, verbose = TRUE) { 34 | check_creds() 35 | directory <- validate_directory(directory) 36 | validate_input_df(files) 37 | local_files <- fetch_all(files, directory, overwrite, verbose) 38 | verify_download_success(files, local_files) 39 | downloads_df <- bundle_to_df(files, local_files, directory) 40 | downloads_df 41 | } 42 | 43 | validate_input_df <- function(df) { 44 | if (any(is.na(df))) { 45 | stop( 46 | paste( 47 | "Argument 'files' must be a data frame with no NA values.", 48 | "First, be sure that the input data frame was produced by", 49 | "the find_smap() function. NA values will result when data", 50 | "are missing (e.g., if there was no data collection", 51 | "on a particular day. You must omit all NA values from the", 52 | "input data.frame 'files' to use download_smap(), e.g.,", 53 | "with the na.omit() function." 54 | ) 55 | ) 56 | } 57 | } 58 | 59 | bundle_to_df <- function(desired_files, downloaded_files, local_dir) { 60 | names_without_paths <- gsub(".*/", "", downloaded_files) 61 | names_without_extensions <- gsub("\\..*", "", names_without_paths) 62 | name <- unique(names_without_extensions) 63 | downloads <- data.frame(name, local_dir, stringsAsFactors = FALSE) 64 | merged_df <- merge(desired_files, downloads, by = 'name') 65 | merged_df 66 | } 67 | 68 | fetch_all <- function(files, directory, overwrite, verbose) { 69 | n_downloads <- nrow(files) 70 | local_files <- vector(mode = 'list', length = n_downloads) 71 | for (i in 1:n_downloads) { 72 | local_files[[i]] <- maybe_download(files[i, ], directory, 73 | overwrite, verbose) 74 | } 75 | downloaded_files <- unlist(local_files) 76 | downloaded_files 77 | } 78 | 79 | #' @importFrom rappdirs user_cache_dir 80 | validate_directory <- function(destination_directory) { 81 | if (is.null(destination_directory)) { 82 | destination_directory <- user_cache_dir("smap") 83 | } 84 | if (!dir.exists(destination_directory)) { 85 | dir.create(destination_directory, recursive = TRUE) 86 | } 87 | destination_directory 88 | } 89 | 90 | maybe_download <- function(file, local_directory, overwrite, verbose) { 91 | target_files <- get_rel_paths(file) 92 | full_target_paths <- file.path(local_directory, target_files) 93 | all_files_exist <- all(file.exists(full_target_paths)) 94 | if (!all_files_exist | overwrite) { 95 | https_locations <- paste0(https_prefix(), file$dir, target_files) 96 | for (i in seq_along(full_target_paths)) { 97 | if (verbose) { 98 | message(paste('Downloading', https_locations[i])) 99 | } 100 | remote_to_local(full_target_paths, https_locations, i) 101 | } 102 | } 103 | full_target_paths 104 | } 105 | 106 | get_rel_paths <- function(file) { 107 | id <- toString(file[3]) 108 | if (grepl("SPL4CMDL", id) == TRUE) { 109 | target_files <- paste0(file$name, min_extensions()) 110 | } 111 | else { 112 | target_files <- paste0(file$name, extensions()) 113 | } 114 | target_files 115 | } 116 | 117 | #' @importFrom httr authenticate 118 | #' @importFrom httr write_disk 119 | #' @importFrom httr GET 120 | remote_to_local <- function(local_paths, https_locations, i) { 121 | write_loc <- write_disk(local_paths[i], overwrite = TRUE) 122 | GET(https_locations[i], write_loc, auth()) 123 | } 124 | 125 | verify_download_success <- function(files, downloaded_files) { 126 | expected_downloads <- get_rel_paths(files) 127 | actual_downloads <- gsub(".*/", "", downloaded_files) 128 | stopifnot(all(expected_downloads %in% actual_downloads)) 129 | } 130 | -------------------------------------------------------------------------------- /R/extract_smap.R: -------------------------------------------------------------------------------- 1 | #' Extracts contents of SMAP data 2 | #' 3 | #' Extracts datasets from SMAP data files. 4 | #' 5 | #' The arguments \code{group} and \code{dataset} must refer specifically the 6 | #' group and name within group for the input file, such as can be obtained with 7 | #' \code{list_smap()}. This function will extract that particular dataset, 8 | #' returning a Raster object. 9 | #' 10 | #' @param data A data frame produced by \code{download_smap()} that specifies 11 | #' input files from which to extract data. 12 | #' @param name The path in the HDF5 file pointing to data to extract. 13 | #' @return Returns a SpatRaster object. 14 | #' @examples 15 | #' \dontrun{ 16 | #' files <- find_smap(id = "SPL4SMGP", dates = "2015-03-31", version = 4) 17 | #' downloads <- download_smap(files[1, ]) 18 | #' sm_raster <- extract_smap(downloads, name = '/Geophysical_Data/sm_surface') 19 | #' } 20 | #' @importFrom terra crs 21 | #' @importFrom terra ext 22 | #' @importFrom terra merge 23 | #' @importFrom terra project 24 | #' @importFrom terra rast 25 | #' @importFrom terra writeRaster 26 | #' @importFrom rappdirs user_cache_dir 27 | #' @export 28 | 29 | extract_smap <- function(data, name) { 30 | validate_data(data) 31 | h5_files <- local_h5_paths(data) 32 | n_files <- length(h5_files) 33 | rasters <- vector("list", length = n_files) 34 | for (i in 1:n_files) { 35 | rasters[[i]] <- rasterize_smap(h5_files[i], name) 36 | } 37 | output <- bundle_rasters(rasters, data) 38 | output 39 | } 40 | 41 | validate_data <- function(data) { 42 | # ensure that all data are of equal data product ID 43 | dir_name_splits <- strsplit(data$dir, split = "\\.") 44 | data_product_ids <- unlist(lapply(dir_name_splits, `[`, 1)) 45 | if (length(unique(data_product_ids)) > 1) { 46 | stop('extract_smap() requires all data IDs to be the same! \n 47 | Only one data product type can be extracted at once, \n 48 | e.g., SPL3SMP data cannot be extracted with SPL2SMAP_S data.') 49 | } 50 | } 51 | 52 | bundle_rasters <- function(rasters, data) { 53 | filenames <- data$name 54 | all_L2SMSP <- mean(is_L2SMSP(filenames)) 55 | if (all_L2SMSP == 1) { 56 | if (length(rasters) > 1) { 57 | # place data on common reference grid to enable stacking 58 | proj_rasters <- lapply( 59 | rasters, 60 | project, 61 | y = terra::crs(rasters[[1]]), 62 | res = 3000 63 | ) 64 | raster_collection <- terra::sprc(proj_rasters) 65 | total_extent <- terra::ext(raster_collection) 66 | 67 | reference_grid <- rast( 68 | extent = total_extent, 69 | crs = terra::crs(rasters[[1]]), 70 | resolution = 3000 71 | ) 72 | 73 | rasters <- lapply(rasters, project, y = reference_grid) 74 | } 75 | } 76 | output <- rast(rasters) 77 | names(output) <- write_layer_names(filenames) 78 | output 79 | } 80 | 81 | rasterize_smap <- function(file, name) { 82 | # Load the h5 file 83 | f <- hdf5r::H5File$new(file, mode="r") 84 | 85 | h5_in <- hdf5r::readDataSet(f[[name]]) 86 | 87 | if (is_cube(h5_in)) { 88 | r <- rasterize_cube(h5_in, file, name) 89 | } else { 90 | r <- rasterize_matrix(h5_in, file, name) 91 | } 92 | r 93 | } 94 | 95 | rasterize_cube <- function(cube, file, name) { 96 | layers <- vector("list", length = dim(cube)[3]) 97 | for (i in seq_along(layers)) { 98 | slice <- cube[, , i] 99 | layers[[i]] <- rasterize_matrix(slice, file, name) 100 | } 101 | stack <- rast(layers) 102 | stack 103 | } 104 | 105 | rasterize_matrix <- function(matrix, file, name) { 106 | fill_value <- find_fill_value(file, name) 107 | matrix[matrix == fill_value] <- NA 108 | raster_layer <- rast(t(matrix)) 109 | raster_layer <- project_smap(file, raster_layer) 110 | raster_layer 111 | } 112 | 113 | is_cube <- function(array) { 114 | d <- length(dim(array)) 115 | stopifnot(d < 4) 116 | is_3d <- d == 3 117 | is_3d 118 | } 119 | 120 | find_fill_value <- function(file, name) { 121 | 122 | # Load the h5 file 123 | f <- hdf5r::H5File$new(file, mode="r") 124 | 125 | if(f[[name]]$attr_exists("_FillValue")) { 126 | fill_value <- hdf5r::h5attr(f[[name]],"_FillValue") 127 | } else { 128 | fill_value <- -9999 129 | } 130 | fill_value 131 | } 132 | 133 | project_smap <- function(file, smap_raster) { 134 | terra::ext(smap_raster) <- compute_extent(file) 135 | terra::crs(smap_raster) <- smap_crs(file) 136 | smap_raster 137 | } 138 | 139 | compute_extent <- function(h5_file) { 140 | latlon_extent <- compute_latlon_extent(h5_file) 141 | latlon_raster <- rast(latlon_extent, crs = latlon_crs()) 142 | pr_extent <- project(latlon_raster, smap_crs(h5_file)) 143 | if (is_L3FT(h5_file)) { 144 | # extent must be corrected for EASE-grid 2.0 North 145 | terra::ext(pr_extent)[3] <- -terra::ext(pr_extent)[4] 146 | } 147 | smap_extent <- terra::ext(pr_extent) 148 | smap_extent 149 | } 150 | 151 | compute_latlon_extent <- function(h5_file) { 152 | if (is_L3FT(h5_file)) { 153 | # b/c metadata are incorrect in L3_FT data files 154 | extent_vector <- c(-180, 180, 0, 90) 155 | } else { 156 | extent_vector <- extent_vector_from_metadata(h5_file) 157 | } 158 | latlon_extent <- terra::ext(extent_vector) 159 | latlon_extent 160 | } 161 | 162 | extent_vector_from_metadata <- function(h5_file) { 163 | f <- hdf5r::H5File$new(h5_file, mode="r") 164 | if (is_L2SMSP(h5_file)) { 165 | # extent specification is explained here: 166 | # https://nsidc.org/data/smap/spl1btb/md-fields 167 | vertices <- hdf5r::h5attr(f[["Metadata/Extent"]], "polygonPosList") 168 | vertex_coords <- matrix(vertices, nrow = 2, 169 | dimnames = list(c('lat', 'lon'))) 170 | extent_vec <- c(min(vertex_coords['lon', ]), 171 | max(vertex_coords['lon', ]), 172 | min(vertex_coords['lat', ]), 173 | max(vertex_coords['lat', ])) 174 | } else { 175 | # if not L2 data, metadata already contains values we need 176 | extent_vec <- c(hdf5r::h5attr(f[["Metadata/Extent"]], 177 | "westBoundLongitude"), 178 | hdf5r::h5attr(f[["Metadata/Extent"]], 179 | "eastBoundLongitude"), 180 | hdf5r::h5attr(f[["Metadata/Extent"]], 181 | "southBoundLatitude"), 182 | hdf5r::h5attr(f[["Metadata/Extent"]], 183 | "northBoundLatitude")) 184 | } 185 | extent_vec 186 | } 187 | 188 | is_L3FT <- function(filename) { 189 | grepl("L3_FT", filename) 190 | } 191 | 192 | is_L2SMSP <- function(filename) { 193 | grepl('L2_SM_SP', filename) 194 | } 195 | 196 | write_layer_names <- function(file_names) { 197 | proportion_L3FT <- mean(is_L3FT(file_names)) 198 | stopifnot(proportion_L3FT %in% c(0, 1)) 199 | if (proportion_L3FT == 1) { 200 | time_day <- c("AM", "PM") 201 | times_vector <- rep(time_day, length(file_names)) 202 | filename_vector <- rep(file_names, each = 2) 203 | layer_names <- paste(filename_vector, times_vector, sep = "_") 204 | } else if (proportion_L3FT == 0) { 205 | layer_names <- file_names 206 | } 207 | layer_names 208 | } 209 | -------------------------------------------------------------------------------- /R/find_smap.R: -------------------------------------------------------------------------------- 1 | #' Find SMAP data 2 | #' 3 | #' This function searches for SMAP data on a specific date, returning a 4 | #' \code{data.frame} describing available data. 5 | #' 6 | #' There are many SMAP data products that can be accessed with this function. 7 | #' Currently, smapr supports level 3 and level 4 data products, each of which 8 | #' has an associated Data Set ID which is specified by the \code{id} argument, 9 | #' described at \url{https://nsidc.org/data/smap/smap-data.html} and summarized 10 | #' below: 11 | #' 12 | #' \describe{ 13 | #' \item{SPL2SMAP_S}{SMAP/Sentinel-1 Radiometer/Radar Soil Moisture} 14 | #' \item{SPL3FTA}{Radar Northern Hemisphere Daily Freeze/Thaw State} 15 | #' \item{SPL3SMA}{Radar Global Daily Soil Moisture} 16 | #' \item{SPL3SMP}{Radiometer Global Soil Moisture} 17 | #' \item{SPL3SMAP}{Radar/Radiometer Global Soil Moisture} 18 | #' \item{SPL4SMAU}{Surface/Rootzone Soil Moisture Analysis Update} 19 | #' \item{SPL4SMGP}{Surface/Rootzone Soil Moisture Geophysical Data} 20 | #' \item{SPL4SMLM}{Surface/Rootzone Soil Moisture Land Model Constants} 21 | #' \item{SPL4CMDL}{Carbon Net Ecosystem Exchange} 22 | #' } 23 | #' 24 | #' This function requires a username and password from NASA's Earthdata portal. 25 | #' If you have an Earthdata username and password, pass them in using the 26 | #' \code{\link[=set_smap_credentials]{set_smap_credentials()}} function. 27 | #' 28 | #' If you do not yet have a username and password, register for one here: 29 | #' \url{https://urs.earthdata.nasa.gov/} 30 | #' 31 | #' 32 | #' @param id A character string that refers to a specific SMAP dataset, e.g., 33 | #' \code{"SPL4SMGP"} for SMAP L4 Global 3-hourly 9 km Surface and Rootzone Soil 34 | #' Moisture Geophysical Data. See "Details" for a list of supported data types 35 | #' and their associated id codes. 36 | #' @param dates An object of class Date or a character string formatted as 37 | #' %Y-%m-%d (e.g., "2016-04-01") which specifies the date(s) to search. 38 | #' To search for one specific date, this can be a Date object of length one. To 39 | #' search over a time interval, it can be a multi-element object of class Date 40 | #' such as produced by \code{seq.Date}. 41 | #' @param version Which data version would you like to search for? Version 42 | #' information for each data product can be found at 43 | #' \url{https://nsidc.org/data/smap/data_versions} 44 | #' @return A data.frame with the names of the data files, the remote directory, and 45 | #' the date. 46 | #' 47 | #' @examples 48 | #' \dontrun{ 49 | #' # looking for data on one day: 50 | #' find_smap(id = "SPL4SMGP", dates = "2015-03-31", version = 4) 51 | #' 52 | #' # searching across a date range 53 | #' start_date <- as.Date("2015-03-31") 54 | #' end_date <- as.Date("2015-04-02") 55 | #' date_sequence <- seq(start_date, end_date, by = 1) 56 | #' find_smap(id = "SPL4SMGP", dates = date_sequence, version = 4) 57 | #' } 58 | #' 59 | #' @importFrom httr GET 60 | #' @importFrom methods is 61 | #' @export 62 | 63 | find_smap <- function(id, dates, version) { 64 | check_creds() 65 | if (!is(dates, "Date")) { 66 | dates <- try_make_date(dates) 67 | } 68 | ensure_dates_in_past(dates) 69 | res <- lapply(dates, find_for_date, id = id, version = version) 70 | do.call(rbind, res) 71 | } 72 | 73 | try_make_date <- function(date) { 74 | tryCatch(as.Date(date), 75 | error = function(c) { 76 | stop(paste("Couldn't coerce date(s) to a Date object.", 77 | "Try formatting date(s) as: %Y-%m-%d,", 78 | "or use Date objects for the date argument", 79 | "(see ?Date).")) 80 | } 81 | ) 82 | } 83 | 84 | ensure_dates_in_past <- function(dates) { 85 | todays_date <- format(Sys.time(), "%Y-%m-%d") 86 | if (any(dates > todays_date)) { 87 | stop("All search dates must be <= the current date") 88 | } 89 | } 90 | 91 | find_for_date <- function(date, id, version) { 92 | date <- format(date, "%Y.%m.%d") 93 | validate_request(id, date, version) 94 | is_date_valid <- validate_date(id, version, date) 95 | 96 | if (is_date_valid) { 97 | route <- route_to_data(id, date, version) 98 | available_files <- find_available_files(route, date) 99 | } else { 100 | # return a row in output with 101 | # NA for 'name' and 'dir' so that users can track which 102 | # data are missing 103 | available_files <- data.frame(name = NA, 104 | date = as.Date(date, 105 | format = "%Y.%m.%d"), 106 | dir = NA, 107 | stringsAsFactors = FALSE) 108 | } 109 | available_files 110 | } 111 | 112 | validate_request <- function(id, date, version) { 113 | folder_names <- get_dir_contents(path = https_prefix()) 114 | validate_dataset_id(folder_names, id) 115 | validate_version(folder_names, id, version) 116 | } 117 | 118 | validate_dataset_id <- function(folder_names, id) { 119 | names_no_versions <- gsub("\\..*", "", folder_names) 120 | if (!(id %in% names_no_versions)) { 121 | prefix <- "Invalid data id." 122 | suffix <- paste(id, "does not exist at", https_prefix()) 123 | stop(paste(prefix, suffix)) 124 | } 125 | } 126 | 127 | validate_version <- function(folder_names, id, version) { 128 | expected_folder <- paste0(id, ".", "00", version) 129 | if (!expected_folder %in% folder_names) { 130 | prefix <- "Invalid data version." 131 | suffix <- paste(expected_folder, "does not exist at", https_prefix()) 132 | stop(paste(prefix, suffix)) 133 | } 134 | } 135 | 136 | validate_date <- function(id, version, date) { 137 | date_checking_route <- route_to_dates(id, version) 138 | folder_names <- get_dir_contents(path = date_checking_route) 139 | if (!date %in% folder_names) { 140 | prefix <- "Data are not available for this date." 141 | suffix <- paste(date, "does not exist at", date_checking_route) 142 | does_date_exist <- FALSE 143 | warning(paste(prefix, suffix)) 144 | } else { 145 | does_date_exist <- TRUE 146 | } 147 | does_date_exist 148 | } 149 | 150 | get_dir_contents <- function(path) { 151 | top_level_response <- GET(path, auth()) 152 | nodes <- rvest::html_nodes(xml2::read_html(top_level_response), "table") 153 | df <- rvest::html_table(nodes)[[1]] 154 | filenames <- df$Name 155 | filenames <- filenames[filenames != "Parent Directory"] 156 | gsub("/+$", "", filenames) # removes trailing slashes 157 | } 158 | 159 | route_to_data <- function(id, date, version) { 160 | data_version <- paste0("00", version) 161 | long_id <- paste(id, data_version, sep = ".") 162 | route <- paste0(https_prefix(), long_id, "/", date, "/") 163 | route 164 | } 165 | 166 | route_to_dates <- function(id, version) { 167 | data_version <- paste0("00", version) 168 | long_id <- paste(id, data_version, sep = ".") 169 | route <- paste0(https_prefix(), long_id, "/") 170 | route 171 | } 172 | 173 | find_available_files <- function(route, date) { 174 | contents <- get_dir_contents(route) 175 | validate_contents(route) 176 | data_filenames <- extract_filenames(contents) 177 | available_files <- bundle_search_results(data_filenames, route, date) 178 | available_files 179 | } 180 | 181 | validate_contents <- function(contents, route) { 182 | # deal with error case where https directory exists, but is empty 183 | is_dir_empty <- length(contents) == 0 184 | if (any(is_dir_empty)) { 185 | error_message <- paste('https directory', route, 'exists, but is empty') 186 | stop(error_message) 187 | } 188 | } 189 | 190 | extract_filenames <- function(contents) { 191 | no_extensions <- gsub("\\..*", "", contents) 192 | unique_files <- unique(no_extensions) 193 | unique_files 194 | } 195 | 196 | bundle_search_results <- function(filenames, route, date) { 197 | dir <- gsub(https_prefix(), "", route) 198 | data.frame(name = filenames, 199 | date = as.Date(date, format = "%Y.%m.%d"), 200 | dir = dir, 201 | stringsAsFactors = FALSE) 202 | } 203 | -------------------------------------------------------------------------------- /R/list_smap.R: -------------------------------------------------------------------------------- 1 | #' Lists the contents of SMAP data files 2 | #' 3 | #' This function returns a list of the contents of SMAP data files. 4 | #' 5 | #' @param files A \code{data.frame} produced by \code{download_smap()} that 6 | #' specifies input data files. 7 | #' @param all If TRUE a longer, more detailed list of information on each 8 | #' entry is provided. 9 | #' @return Returns a list of \code{data.frame} objects that list the contents 10 | #' of each data file in \code{files}. 11 | #' @examples 12 | #' \dontrun{ 13 | #' files <- find_smap(id = "SPL4SMGP", dates = "2015-03-31", version = 4) 14 | #' files <- download_smap(files[1, ]) 15 | #' list_smap(files) 16 | #' list_smap(files, all = TRUE) 17 | #' } 18 | #' @export 19 | list_smap <- function(files, all = TRUE) { 20 | paths_with_filenames <- local_h5_paths(files) 21 | contents <- lapply(paths_with_filenames, h5ls, all) 22 | names(contents) <- files$name 23 | contents 24 | } 25 | 26 | # This function emulates rhdf5::h5ls using the functions in h5 27 | h5ls <- function(file, all) { 28 | # Load the h5 file 29 | f <- hdf5r::H5File$new(file, mode="r") 30 | # Remind the function to close it on exit 31 | 32 | datasets <- f$ls(recursive = all) 33 | datasets$path <- datasets$name 34 | datasets$group <- dirname(datasets$path) 35 | datasets$name <- basename(datasets$path) 36 | datasets$otype <- as.character(datasets$obj_type) 37 | datasets$dclass <- as.character(datasets$dataset.type_class) 38 | datasets$dim <- datasets$dataset.dims 39 | datasets[, names(datasets) %in% c('group', 'name', 'otype', 'dclass', 'dim')] 40 | } 41 | -------------------------------------------------------------------------------- /R/set_smap_credentials.R: -------------------------------------------------------------------------------- 1 | #' Set credentials for NASA's Earthdata portal 2 | #' 3 | #' To use smapr, users need to provide NASA Earthdata portal credentials. 4 | #' This function allows users to interactively set these credentials via the 5 | #' user's Earthdata username and password. 6 | #' 7 | #' If you do not yet have a username and password, register for one here: 8 | #' https://urs.earthdata.nasa.gov/ 9 | #' 10 | #' A warning: do not commit your username and password to a public repository! 11 | #' This function is meant to be used interactively, and not embedded within a 12 | #' script that you would share. 13 | #' 14 | #' @param username A character string of your Earthdata portal username 15 | #' @param password A character string of your Earthdata portal password 16 | #' @param save Logical: whether to save your credentials to your 17 | #' .Renviron file (e.g., ~/.Renviron). Previous Earthdata credentials will not 18 | #' be overwritten unless \code{overwrite = TRUE}. 19 | #' @param overwrite Logical: whether to overwrite previous Earthdata credentials 20 | #' in your .Renviron file (only applies when \code{save = TRUE}) 21 | #' @return A data.frame with the names of the data files, the remote directory, and 22 | #' the date. 23 | #' 24 | #' @examples 25 | #' \dontrun{ 26 | #' set_smap_credentials('myusername', 'mypassword') 27 | #' } 28 | #' 29 | #' @export 30 | set_smap_credentials <- function(username, password, 31 | save = TRUE, overwrite = FALSE) { 32 | Sys.setenv(ed_un = username, ed_pw = password) 33 | 34 | if (save) { 35 | renvironment_path <- file.path(Sys.getenv("HOME"), ".Renviron") 36 | if (!file.exists(renvironment_path)) { 37 | file.create(renvironment_path) 38 | } 39 | renvironment_contents <- readLines(renvironment_path) 40 | 41 | username_in_renv <- grepl("^ed_un[[:space:]]*=.*", renvironment_contents) 42 | password_in_renv <- grepl("^ed_pw[[:space:]]*=.*", renvironment_contents) 43 | credentials_already_exist <- any(username_in_renv | password_in_renv) 44 | 45 | if (credentials_already_exist) { 46 | if (overwrite) { 47 | to_remove <- username_in_renv | password_in_renv 48 | renvironment_contents <- renvironment_contents[!to_remove] 49 | blank_spaces <- renvironment_contents == "" 50 | stripped_contents <- renvironment_contents[!blank_spaces] 51 | contents_w_newline <- c(stripped_contents, "") 52 | writeLines(contents_w_newline, renvironment_path) 53 | } else { 54 | stop( 55 | paste0( 56 | strwrap( 57 | c("Earthdata credentials already exist in your .Renviron file:", 58 | renvironment_path, 59 | "", 60 | "To resolve this issue, you can do one of the following: ", 61 | "", 62 | "1) Use the 'overwrite = TRUE' argument in", 63 | "set_smap_credentials() to overwrite the existing Earthdata", 64 | "credentials in your .Renviron file", 65 | "", 66 | "2) Manually edit the .Renviron file to update your Earthdata", 67 | "username and password.") 68 | ), 69 | collapse = "\n" 70 | ) 71 | ) 72 | } 73 | } 74 | 75 | set_env_cmd <- paste0("ed_un=", username, "\n", 76 | "ed_pw=", password, "\n") 77 | write(set_env_cmd, renvironment_path, append = TRUE) 78 | 79 | message( 80 | paste( 81 | strwrap( 82 | c("Your credentials have been updated.", 83 | "To avoid exposing your username and password, do not commit", 84 | "your call to set_smap_credentials(), your .Renviron, or your", 85 | ".Rhistory file to a public repository.") 86 | ), 87 | collapse = "\n" 88 | ) 89 | ) 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /R/smapr-package.R: -------------------------------------------------------------------------------- 1 | #' smapr: A package for acquisition and processing of NASA SMAP data. 2 | #' 3 | #' The smapr package provides a means to discover, acquire, and process 4 | #' NASA Soil Moisture Active Passive (SMAP) data. 5 | #' 6 | #' @docType package 7 | #' @name smapr-package 8 | #' @author Max Joseph \email{maxwell.b.joseph@colorado.edu} 9 | #' @keywords package 10 | NULL 11 | -------------------------------------------------------------------------------- /R/zzz.R: -------------------------------------------------------------------------------- 1 | https_prefix <- function() { 2 | "https://n5eil01u.ecs.nsidc.org/SMAP/" 3 | } 4 | 5 | extensions <- function() { 6 | c('.h5', '.qa', '.h5.iso.xml') 7 | } 8 | 9 | min_extensions <- function(){ 10 | c('.h5', '.h5.iso.xml') 11 | } 12 | 13 | smap_crs <- function(file) { 14 | if (is_L3FT(file)) { 15 | crs <- "+proj=laea +lon_0=0 +lat_0=90 +datum=WGS84 +units=m" 16 | } else { 17 | crs <- "+proj=cea +lat_ts=30 +datum=WGS84 +units=m" 18 | } 19 | crs 20 | } 21 | 22 | latlon_crs <- function() { 23 | "+proj=longlat +lat_ts=30 +datum=WGS84 +units=m" 24 | } 25 | 26 | local_h5_paths <- function(files) { 27 | stopifnot(is.data.frame(files)) 28 | filenames <- paste0(files$name, '.h5') 29 | paths_with_filenames <- file.path(files$local_dir, filenames) 30 | } 31 | 32 | auth <- function() { 33 | # authentication function for any GET requests 34 | httr::authenticate(user = Sys.getenv("ed_un"), 35 | password = Sys.getenv("ed_pw")) 36 | } 37 | 38 | check_creds <- function() { 39 | username_missing <- "" == Sys.getenv("ed_un") 40 | password_missing <- "" == Sys.getenv("ed_pw") 41 | if (username_missing | password_missing) { 42 | stop( 43 | paste0( 44 | strwrap( 45 | c("smapr expected ed_un and ed_pw to be environment variables!", 46 | "The smapr package requires a username and password from", 47 | "NASA's Earthdata portal.", "", 48 | "If you have a username and password please provide them with", 49 | "the set_smap_credentials() function, e.g.,", 50 | "set_smap_credentials('username', 'passwd')", "", 51 | "If you do not have a username and password, get one here:", 52 | "https://urs.earthdata.nasa.gov/") 53 | ), 54 | collapse = '\n' 55 | ) 56 | ) 57 | } 58 | 59 | # if the username and password exist, check to see whether they are correct 60 | response <- GET(https_prefix(), auth()) 61 | check_for_401(response) 62 | } 63 | 64 | get_creds <- function(renvironment_path) { 65 | # helper function to get username and password from .Renviron file 66 | renvironment_contents <- readLines(renvironment_path) 67 | username_in_renv <- grepl("^ed_un[[:space:]]*=.*", renvironment_contents) 68 | password_in_renv <- grepl("^ed_pw[[:space:]]*=.*", renvironment_contents) 69 | stopifnot(any(username_in_renv)) 70 | stopifnot(any(password_in_renv)) 71 | username <- trimws(gsub("^ed_un[[:space:]]*=", replacement = "", 72 | renvironment_contents[username_in_renv])) 73 | passwd <- trimws(gsub("^ed_pw[[:space:]]*=", replacement = "", 74 | renvironment_contents[password_in_renv])) 75 | c('username' = username, 'passwd' = passwd) 76 | } 77 | 78 | renvironment_path <- file.path(Sys.getenv("HOME"), ".Renviron") 79 | 80 | check_for_401 <- function(response) { 81 | if (response$status_code == 401) { 82 | stop( 83 | paste0( 84 | strwrap( 85 | c("401 unauthorized response from server.", 86 | "Are your NASA Earthdata username and password correct?", 87 | "Check with: Sys.getenv(c('ed_un', 'ed_pw'))", 88 | "", 89 | "To modify your credentials, you can use set_smap_credentials()", 90 | "e.g., set_smap_credentials('user', 'passwd', overwrite = TRUE)", 91 | "", 92 | "If you've forgotten your username or password, go to:", 93 | "https://urs.earthdata.nasa.gov/") 94 | ), 95 | collapse = "\n" 96 | ) 97 | ) 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /README.Rmd: -------------------------------------------------------------------------------- 1 | --- 2 | title: "smapr" 3 | output: github_document 4 | --- 5 | 6 | [![codecov](https://codecov.io/gh/ropensci/smapr/branch/master/graph/badge.svg)](https://codecov.io/gh/ropensci/smapr) 7 | [![CRAN_Status_Badge](http://www.r-pkg.org/badges/version/smapr)](https://cran.r-project.org/package=smapr) 8 | [![lifecycle](https://img.shields.io/badge/lifecycle-maturing-blue.svg)](https://www.tidyverse.org/lifecycle/#maturing) 9 | [![](http://cranlogs.r-pkg.org/badges/grand-total/smapr)](http://cran.rstudio.com/web/packages/smapr/index.html) 10 | [![](https://badges.ropensci.org/231_status.svg)](https://github.com/ropensci/onboarding/issues/231) 11 | [![Project Status: Active – The project has reached a stable, usable state and is being actively developed.](http://www.repostatus.org/badges/latest/active.svg)](http://www.repostatus.org/#active) 12 | 13 | 14 | ```{r, echo = FALSE} 15 | knitr::opts_chunk$set( 16 | collapse = TRUE, 17 | comment = "#>", 18 | fig.path = "man/figures/" 19 | ) 20 | ``` 21 | 22 | 23 | An R package for acquisition and processing of [NASA (Soil Moisture Active-Passive) SMAP data](http://smap.jpl.nasa.gov/) 24 | 25 | ## Installation 26 | 27 | To install smapr from CRAN: 28 | 29 | ```{r cran-installation, eval = FALSE} 30 | install.packages("smapr") 31 | ``` 32 | 33 | To install the development version from GitHub: 34 | 35 | ```{r gh-installation, eval = FALSE} 36 | # install.packages("devtools") 37 | devtools::install_github("ropensci/smapr") 38 | ``` 39 | 40 | #### Docker instructions (alternative to a local installation) 41 | 42 | If a local installation is not possible for some reason, we have made a Docker 43 | image available with smapr and all its dependencies. 44 | 45 | ``` 46 | docker run -d -p 8787:8787 earthlab/smapr 47 | ``` 48 | 49 | In a web browser, navigate to localhost:8787 and log in with 50 | username: rstudio, password: rstudio. 51 | 52 | 53 | ## Authentication 54 | 55 | Access to the NASA SMAP data requires authentication through NASA's Earthdata 56 | portal. 57 | If you do not already have a username and password through Earthdata, you can 58 | register for an account here: https://urs.earthdata.nasa.gov/ 59 | You cannot use this package without an Earthdata account. 60 | 61 | Once you have an account, you need to pass your Earthdata username (`ed_un`) 62 | and password (`ed_pw`) as environmental variables that can be read from within 63 | your R session. 64 | There are a couple of ways to do this: 65 | 66 | ### Recommended approach 67 | 68 | Use `set_smap_credentials('yourusername', 'yourpasswd')`. 69 | This will save your credentials by default, overwriting existing credentials if 70 | `overwrite = TRUE`. 71 | 72 | #### Alternative approaches 73 | 74 | - Use `Sys.setenv()` interactively in your R session to set your username and 75 | password (not including the `<` and `>`): 76 | 77 | ```{r, eval = FALSE} 78 | Sys.setenv(ed_un = "", ed_pw = "") 79 | ``` 80 | 81 | - Create a text file `.Renviron` in your home directory, which contains your 82 | username and password. 83 | If you don't know what your home directory is, execute `normalizePath("~/")` in 84 | the R console and it will be printed. 85 | Be sure to include a new line at the end of the file or R will fail silently 86 | when loading it. 87 | 88 | Example `.Renviron file` (note the new line at the end!): 89 | 90 | ``` 91 | ed_un=slkdjfsldkjfs 92 | ed_pw=dlfkjDD124^ 93 | 94 | ``` 95 | 96 | Once this file is created, restart your R session and you should now be able to 97 | access these environment variables (e.g., via `Sys.getenv("ed_un")`). 98 | 99 | 100 | 101 | # SMAP data products 102 | 103 | Multiple SMAP data products are provided by the NSIDC, and these products vary 104 | in the amount of processing. 105 | Currently, smapr primarily supports level 3 and level 4 data products, 106 | which represent global daily composite and global three hourly modeled data 107 | products, respectively. 108 | There are a wide variety of data layers available in SMAP products, including surface soil moisture, root zone soil moisture, freeze/thaw status, surface temperature, vegetation water content, vegetation opacity, net ecosystem carbon exchange, soil temperature, and evapotranspiration. 109 | NSIDC provides documentation for all SMAP data products on their 110 | [website](https://nsidc.org/data/smap/smap-data.html), and we provide a summary 111 | of data products supported by smapr below. 112 | 113 | | Dataset id | Description | Resolution | 114 | |------------|-----------------------------------------------------|------------| 115 | | SPL2SMAP_S | SMAP/Sentinel-1 Radiometer/Radar Soil Moisture | 3 km | 116 | | SPL3FTA | Radar Northern Hemisphere Daily Freeze/Thaw State | 3 km | 117 | | SPL3SMA | Radar Global Daily Soil Moisture | 3 km | 118 | | SPL3SMP | Radiometer Global Soil Moisture | 36 km | 119 | | SPL3SMAP | Radar/Radiometer Global Soil Moisture | 9 km | 120 | | SPL4SMAU | Surface/Rootzone Soil Moisture Analysis Update | 9 km | 121 | | SPL4SMGP | Surface/Rootzone Soil Moisture Geophysical Data | 9 km | 122 | | SPL4SMLM | Surface/Rootzone Soil Moisture Land Model Constants | 9 km | 123 | | SPL4CMDL | Carbon Net Ecosystem Exchange | 9 km | 124 | 125 | ## Typical workflow 126 | 127 | At a high level, most workflows follow these steps: 128 | 129 | 1. Find SMAP data with `find_smap()` 130 | 2. Download data with `download_smap()` 131 | 3. List data contents with `list_smap()` 132 | 4. Extract data with `extract_smap()` 133 | 134 | Each of these steps are outlined below: 135 | 136 | ### Finding SMAP data 137 | 138 | Data are hosted on a server by the National Snow and Ice Data Center. 139 | The `find_smap()` function searches for specific data products and returns a 140 | data frame of available data. 141 | As data mature and pass checks, versions advance. 142 | At any specific time, not all versions of all datasets for all dates may exist. 143 | For the most up to date overview of dataset versions, see the NSIDC SMAP data 144 | version [webpage](https://nsidc.org/data/smap/smap-data.html). 145 | 146 | ```{r find-data} 147 | library(smapr) 148 | library(terra) 149 | available_data <- find_smap(id = "SPL3SMAP", date = "2015-05-25", version = 3) 150 | str(available_data) 151 | ``` 152 | 153 | ### Downloading and inspecting SMAP data 154 | 155 | Given a data frame produced by `find_smap`, `download_smap` downloads the data 156 | onto the local file system. 157 | Unless a directory is specified as an argument, the data are stored in the 158 | user's cache. 159 | 160 | ```{r download-data} 161 | downloads <- download_smap(available_data) 162 | str(downloads) 163 | ``` 164 | 165 | The SMAP data are provided in HDF5 format, and in any one file there are 166 | actually multiple data sets, including metadata. 167 | The `list_smap` function allows users to inspect the contents of downloaded 168 | data at a high level (`all = FALSE`) or in depth (`all = TRUE`). 169 | 170 | ```{r list-data} 171 | list_smap(downloads, all = FALSE) 172 | ``` 173 | 174 | To see all of the data fields, set `all = TRUE`. 175 | 176 | ### Extracting gridded data products 177 | 178 | The `extract_smap` function extracts gridded data products 179 | (e.g., global soil moisture). 180 | If more than one file has been downloaded and passed into the first argument, `extract_smap` extracts data for each file 181 | 182 | ```{r extract-data, fig.align='center', fig.width=8, fig.height=7} 183 | sm_raster <- extract_smap(downloads, "Soil_Moisture_Retrieval_Data/soil_moisture") 184 | plot(sm_raster, main = "Level 3 soil moisture: May 25, 2015") 185 | ``` 186 | 187 | The path "Soil_Moisture_Retrieval_Data/soil_moisture" was determined from the 188 | output of `list_smap(downloads, all = TRUE)`, which lists all of the data 189 | contained in SMAP data files. 190 | 191 | ### Saving GeoTIFF output 192 | 193 | The data can be saved as a GeoTIFF using the `writeRaster` function 194 | from the terra pacakge. 195 | 196 | ```{r} 197 | writeRaster(sm_raster, "sm_raster.tif") 198 | ``` 199 | 200 | ```{r, echo = FALSE, results='hide'} 201 | # cleanup 202 | file.remove("sm_raster.tif") 203 | ``` 204 | 205 | 206 | ## Meta 207 | 208 | * Please [report any issues or bugs](https://github.com/ropensci/smapr/issues), 209 | after reading our contribution [guidelines](CONTRIBUTING.md), and the 210 | [Contributor Code of Conduct](CONDUCT.md). 211 | * License: GPL-3 212 | * See `citation("smapr")` in R to cite this package in publications. 213 | 214 | [![ropensci_footer](https://ropensci.org/public_images/ropensci_footer.png)](https://ropensci.org) 215 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | smapr 2 | ================ 3 | 4 | [![codecov](https://codecov.io/gh/ropensci/smapr/branch/master/graph/badge.svg)](https://codecov.io/gh/ropensci/smapr) 5 | [![CRAN_Status_Badge](http://www.r-pkg.org/badges/version/smapr)](https://cran.r-project.org/package=smapr) 6 | [![lifecycle](https://img.shields.io/badge/lifecycle-maturing-blue.svg)](https://www.tidyverse.org/lifecycle/#maturing) 7 | [![](http://cranlogs.r-pkg.org/badges/grand-total/smapr)](http://cran.rstudio.com/web/packages/smapr/index.html) 8 | [![](https://badges.ropensci.org/231_status.svg)](https://github.com/ropensci/onboarding/issues/231) 9 | [![Project Status: Active – The project has reached a stable, usable 10 | state and is being actively 11 | developed.](http://www.repostatus.org/badges/latest/active.svg)](http://www.repostatus.org/#active) 12 | 13 | An R package for acquisition and processing of [NASA (Soil Moisture 14 | Active-Passive) SMAP data](http://smap.jpl.nasa.gov/) 15 | 16 | ## Installation 17 | 18 | To install smapr from CRAN: 19 | 20 | ``` r 21 | install.packages("smapr") 22 | ``` 23 | 24 | To install the development version from GitHub: 25 | 26 | ``` r 27 | # install.packages("devtools") 28 | devtools::install_github("ropensci/smapr") 29 | ``` 30 | 31 | #### Docker instructions (alternative to a local installation) 32 | 33 | If a local installation is not possible for some reason, we have made a 34 | Docker image available with smapr and all its dependencies. 35 | 36 | docker run -d -p 8787:8787 earthlab/smapr 37 | 38 | In a web browser, navigate to localhost:8787 and log in with username: 39 | rstudio, password: rstudio. 40 | 41 | ## Authentication 42 | 43 | Access to the NASA SMAP data requires authentication through NASA’s 44 | Earthdata portal. If you do not already have a username and password 45 | through Earthdata, you can register for an account here: 46 | You cannot use this package without an 47 | Earthdata account. 48 | 49 | Once you have an account, you need to pass your Earthdata username 50 | (`ed_un`) and password (`ed_pw`) as environmental variables that can be 51 | read from within your R session. There are a couple of ways to do this: 52 | 53 | ### Recommended approach 54 | 55 | Use `set_smap_credentials('yourusername', 'yourpasswd')`. This will save 56 | your credentials by default, overwriting existing credentials if 57 | `overwrite = TRUE`. 58 | 59 | #### Alternative approaches 60 | 61 | - Use `Sys.setenv()` interactively in your R session to set your 62 | username and password (not including the `<` and `>`): 63 | 64 | ``` r 65 | Sys.setenv(ed_un = "", ed_pw = "") 66 | ``` 67 | 68 | - Create a text file `.Renviron` in your home directory, which contains 69 | your username and password. If you don’t know what your home directory 70 | is, execute `normalizePath("~/")` in the R console and it will be 71 | printed. Be sure to include a new line at the end of the file or R 72 | will fail silently when loading it. 73 | 74 | Example `.Renviron file` (note the new line at the end!): 75 | 76 | ed_un=slkdjfsldkjfs 77 | ed_pw=dlfkjDD124^ 78 | 79 | Once this file is created, restart your R session and you should now be 80 | able to access these environment variables (e.g., via 81 | `Sys.getenv("ed_un")`). 82 | 83 | # SMAP data products 84 | 85 | Multiple SMAP data products are provided by the NSIDC, and these 86 | products vary in the amount of processing. Currently, smapr primarily 87 | supports level 3 and level 4 data products, which represent global daily 88 | composite and global three hourly modeled data products, respectively. 89 | There are a wide variety of data layers available in SMAP products, 90 | including surface soil moisture, root zone soil moisture, freeze/thaw 91 | status, surface temperature, vegetation water content, vegetation 92 | opacity, net ecosystem carbon exchange, soil temperature, and 93 | evapotranspiration. NSIDC provides documentation for all SMAP data 94 | products on their [website](https://nsidc.org/data/smap/smap-data.html), 95 | and we provide a summary of data products supported by smapr below. 96 | 97 | | Dataset id | Description | Resolution | 98 | |------------|-----------------------------------------------------|------------| 99 | | SPL2SMAP_S | SMAP/Sentinel-1 Radiometer/Radar Soil Moisture | 3 km | 100 | | SPL3FTA | Radar Northern Hemisphere Daily Freeze/Thaw State | 3 km | 101 | | SPL3SMA | Radar Global Daily Soil Moisture | 3 km | 102 | | SPL3SMP | Radiometer Global Soil Moisture | 36 km | 103 | | SPL3SMAP | Radar/Radiometer Global Soil Moisture | 9 km | 104 | | SPL4SMAU | Surface/Rootzone Soil Moisture Analysis Update | 9 km | 105 | | SPL4SMGP | Surface/Rootzone Soil Moisture Geophysical Data | 9 km | 106 | | SPL4SMLM | Surface/Rootzone Soil Moisture Land Model Constants | 9 km | 107 | | SPL4CMDL | Carbon Net Ecosystem Exchange | 9 km | 108 | 109 | ## Typical workflow 110 | 111 | At a high level, most workflows follow these steps: 112 | 113 | 1. Find SMAP data with `find_smap()` 114 | 2. Download data with `download_smap()` 115 | 3. List data contents with `list_smap()` 116 | 4. Extract data with `extract_smap()` 117 | 118 | Each of these steps are outlined below: 119 | 120 | ### Finding SMAP data 121 | 122 | Data are hosted on a server by the National Snow and Ice Data Center. 123 | The `find_smap()` function searches for specific data products and 124 | returns a data frame of available data. As data mature and pass checks, 125 | versions advance. At any specific time, not all versions of all datasets 126 | for all dates may exist. For the most up to date overview of dataset 127 | versions, see the NSIDC SMAP data version 128 | [webpage](https://nsidc.org/data/smap/smap-data.html). 129 | 130 | ``` r 131 | library(smapr) 132 | library(terra) 133 | #> terra 1.7.18 134 | available_data <- find_smap(id = "SPL3SMAP", date = "2015-05-25", version = 3) 135 | str(available_data) 136 | #> 'data.frame': 1 obs. of 3 variables: 137 | #> $ name: chr "SMAP_L3_SM_AP_20150525_R13080_001" 138 | #> $ date: Date, format: "2015-05-25" 139 | #> $ dir : chr "SPL3SMAP.003/2015.05.25/" 140 | ``` 141 | 142 | ### Downloading and inspecting SMAP data 143 | 144 | Given a data frame produced by `find_smap`, `download_smap` downloads 145 | the data onto the local file system. Unless a directory is specified as 146 | an argument, the data are stored in the user’s cache. 147 | 148 | ``` r 149 | downloads <- download_smap(available_data) 150 | #> Downloading https://n5eil01u.ecs.nsidc.org/SMAP/SPL3SMAP.003/2015.05.25/SMAP_L3_SM_AP_20150525_R13080_001.h5 151 | #> Downloading https://n5eil01u.ecs.nsidc.org/SMAP/SPL3SMAP.003/2015.05.25/SMAP_L3_SM_AP_20150525_R13080_001.qa 152 | #> Downloading https://n5eil01u.ecs.nsidc.org/SMAP/SPL3SMAP.003/2015.05.25/SMAP_L3_SM_AP_20150525_R13080_001.h5.iso.xml 153 | str(downloads) 154 | #> 'data.frame': 1 obs. of 4 variables: 155 | #> $ name : chr "SMAP_L3_SM_AP_20150525_R13080_001" 156 | #> $ date : Date, format: "2015-05-25" 157 | #> $ dir : chr "SPL3SMAP.003/2015.05.25/" 158 | #> $ local_dir: chr "~/.cache/smap" 159 | ``` 160 | 161 | The SMAP data are provided in HDF5 format, and in any one file there are 162 | actually multiple data sets, including metadata. The `list_smap` 163 | function allows users to inspect the contents of downloaded data at a 164 | high level (`all = FALSE`) or in depth (`all = TRUE`). 165 | 166 | ``` r 167 | list_smap(downloads, all = FALSE) 168 | #> $SMAP_L3_SM_AP_20150525_R13080_001 169 | #> name group otype dclass dim 170 | #> 1 Metadata . H5I_GROUP 171 | #> 2 Soil_Moisture_Retrieval_Data . H5I_GROUP 172 | ``` 173 | 174 | To see all of the data fields, set `all = TRUE`. 175 | 176 | ### Extracting gridded data products 177 | 178 | The `extract_smap` function extracts gridded data products (e.g., global 179 | soil moisture). If more than one file has been downloaded and passed 180 | into the first argument, `extract_smap` extracts data for each file 181 | 182 | ``` r 183 | sm_raster <- extract_smap(downloads, "Soil_Moisture_Retrieval_Data/soil_moisture") 184 | plot(sm_raster, main = "Level 3 soil moisture: May 25, 2015") 185 | ``` 186 | 187 | 188 | 189 | The path “Soil_Moisture_Retrieval_Data/soil_moisture” was determined 190 | from the output of `list_smap(downloads, all = TRUE)`, which lists all 191 | of the data contained in SMAP data files. 192 | 193 | ### Saving GeoTIFF output 194 | 195 | The data can be saved as a GeoTIFF using the `writeRaster` function from 196 | the terra pacakge. 197 | 198 | ``` r 199 | writeRaster(sm_raster, "sm_raster.tif") 200 | ``` 201 | 202 | ## Meta 203 | 204 | - Please [report any issues or 205 | bugs](https://github.com/ropensci/smapr/issues), after reading our 206 | contribution [guidelines](CONTRIBUTING.md), and the [Contributor Code 207 | of Conduct](CONDUCT.md). 208 | - License: GPL-3 209 | - See `citation("smapr")` in R to cite this package in publications. 210 | 211 | [![ropensci_footer](https://ropensci.org/public_images/ropensci_footer.png)](https://ropensci.org) 212 | -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | comment: false 2 | 3 | coverage: 4 | status: 5 | project: 6 | default: 7 | target: auto 8 | threshold: 1% 9 | patch: 10 | default: 11 | target: auto 12 | threshold: 1% 13 | -------------------------------------------------------------------------------- /codemeta.json: -------------------------------------------------------------------------------- 1 | { 2 | "@context": "https://doi.org/10.5063/schema/codemeta-2.0", 3 | "@type": "SoftwareSourceCode", 4 | "identifier": "smapr", 5 | "description": " Facilitates programmatic access to NASA Soil Moisture Active Passive (SMAP) data with R. It includes functions to search for, acquire, and extract SMAP data.", 6 | "name": "smapr: Acquisition and Processing of NASA Soil Moisture Active-Passive (SMAP) Data", 7 | "relatedLink": ["https://docs.ropensci.org/smapr", "https://CRAN.R-project.org/package=smapr"], 8 | "codeRepository": "https://github.com/ropensci/smapr", 9 | "issueTracker": "https://github.com/ropensci/smapr/issues", 10 | "license": "https://spdx.org/licenses/GPL-3.0", 11 | "version": "0.2.1", 12 | "programmingLanguage": { 13 | "@type": "ComputerLanguage", 14 | "name": "R", 15 | "url": "https://r-project.org" 16 | }, 17 | "runtimePlatform": "R version 4.2.2 Patched (2022-11-10 r83330)", 18 | "provider": { 19 | "@id": "https://cran.r-project.org", 20 | "@type": "Organization", 21 | "name": "Comprehensive R Archive Network (CRAN)", 22 | "url": "https://cran.r-project.org" 23 | }, 24 | "author": [ 25 | { 26 | "@type": "Person", 27 | "givenName": "Maxwell", 28 | "familyName": "Joseph", 29 | "email": "maxwell.b.joseph@colorado.edu" 30 | }, 31 | { 32 | "@type": "Person", 33 | "givenName": "Matthew", 34 | "familyName": "Oakley", 35 | "email": "matthew.oakley@colorado.edu" 36 | }, 37 | { 38 | "@type": "Person", 39 | "givenName": "Zachary", 40 | "familyName": "Schira", 41 | "email": "zasc3143@colorado.edu" 42 | } 43 | ], 44 | "maintainer": [ 45 | { 46 | "@type": "Person", 47 | "givenName": "Maxwell", 48 | "familyName": "Joseph", 49 | "email": "maxwell.b.joseph@colorado.edu" 50 | } 51 | ], 52 | "softwareSuggestions": [ 53 | { 54 | "@type": "SoftwareApplication", 55 | "identifier": "knitr", 56 | "name": "knitr", 57 | "provider": { 58 | "@id": "https://cran.r-project.org", 59 | "@type": "Organization", 60 | "name": "Comprehensive R Archive Network (CRAN)", 61 | "url": "https://cran.r-project.org" 62 | }, 63 | "sameAs": "https://CRAN.R-project.org/package=knitr" 64 | }, 65 | { 66 | "@type": "SoftwareApplication", 67 | "identifier": "rmarkdown", 68 | "name": "rmarkdown", 69 | "provider": { 70 | "@id": "https://cran.r-project.org", 71 | "@type": "Organization", 72 | "name": "Comprehensive R Archive Network (CRAN)", 73 | "url": "https://cran.r-project.org" 74 | }, 75 | "sameAs": "https://CRAN.R-project.org/package=rmarkdown" 76 | }, 77 | { 78 | "@type": "SoftwareApplication", 79 | "identifier": "roxygen2", 80 | "name": "roxygen2", 81 | "provider": { 82 | "@id": "https://cran.r-project.org", 83 | "@type": "Organization", 84 | "name": "Comprehensive R Archive Network (CRAN)", 85 | "url": "https://cran.r-project.org" 86 | }, 87 | "sameAs": "https://CRAN.R-project.org/package=roxygen2" 88 | }, 89 | { 90 | "@type": "SoftwareApplication", 91 | "identifier": "testthat", 92 | "name": "testthat", 93 | "provider": { 94 | "@id": "https://cran.r-project.org", 95 | "@type": "Organization", 96 | "name": "Comprehensive R Archive Network (CRAN)", 97 | "url": "https://cran.r-project.org" 98 | }, 99 | "sameAs": "https://CRAN.R-project.org/package=testthat" 100 | }, 101 | { 102 | "@type": "SoftwareApplication", 103 | "identifier": "utils", 104 | "name": "utils" 105 | }, 106 | { 107 | "@type": "SoftwareApplication", 108 | "identifier": "covr", 109 | "name": "covr", 110 | "provider": { 111 | "@id": "https://cran.r-project.org", 112 | "@type": "Organization", 113 | "name": "Comprehensive R Archive Network (CRAN)", 114 | "url": "https://cran.r-project.org" 115 | }, 116 | "sameAs": "https://CRAN.R-project.org/package=covr" 117 | } 118 | ], 119 | "softwareRequirements": { 120 | "1": { 121 | "@type": "SoftwareApplication", 122 | "identifier": "R", 123 | "name": "R", 124 | "version": ">= 3.2.5" 125 | }, 126 | "2": { 127 | "@type": "SoftwareApplication", 128 | "identifier": "hdf5r", 129 | "name": "hdf5r", 130 | "provider": { 131 | "@id": "https://cran.r-project.org", 132 | "@type": "Organization", 133 | "name": "Comprehensive R Archive Network (CRAN)", 134 | "url": "https://cran.r-project.org" 135 | }, 136 | "sameAs": "https://CRAN.R-project.org/package=hdf5r" 137 | }, 138 | "3": { 139 | "@type": "SoftwareApplication", 140 | "identifier": "httr", 141 | "name": "httr", 142 | "version": ">= 1.1.0", 143 | "provider": { 144 | "@id": "https://cran.r-project.org", 145 | "@type": "Organization", 146 | "name": "Comprehensive R Archive Network (CRAN)", 147 | "url": "https://cran.r-project.org" 148 | }, 149 | "sameAs": "https://CRAN.R-project.org/package=httr" 150 | }, 151 | "4": { 152 | "@type": "SoftwareApplication", 153 | "identifier": "rappdirs", 154 | "name": "rappdirs", 155 | "version": ">= 0.3.1", 156 | "provider": { 157 | "@id": "https://cran.r-project.org", 158 | "@type": "Organization", 159 | "name": "Comprehensive R Archive Network (CRAN)", 160 | "url": "https://cran.r-project.org" 161 | }, 162 | "sameAs": "https://CRAN.R-project.org/package=rappdirs" 163 | }, 164 | "5": { 165 | "@type": "SoftwareApplication", 166 | "identifier": "rvest", 167 | "name": "rvest", 168 | "provider": { 169 | "@id": "https://cran.r-project.org", 170 | "@type": "Organization", 171 | "name": "Comprehensive R Archive Network (CRAN)", 172 | "url": "https://cran.r-project.org" 173 | }, 174 | "sameAs": "https://CRAN.R-project.org/package=rvest" 175 | }, 176 | "6": { 177 | "@type": "SoftwareApplication", 178 | "identifier": "terra", 179 | "name": "terra", 180 | "provider": { 181 | "@id": "https://cran.r-project.org", 182 | "@type": "Organization", 183 | "name": "Comprehensive R Archive Network (CRAN)", 184 | "url": "https://cran.r-project.org" 185 | }, 186 | "sameAs": "https://CRAN.R-project.org/package=terra" 187 | }, 188 | "7": { 189 | "@type": "SoftwareApplication", 190 | "identifier": "xml2", 191 | "name": "xml2", 192 | "provider": { 193 | "@id": "https://cran.r-project.org", 194 | "@type": "Organization", 195 | "name": "Comprehensive R Archive Network (CRAN)", 196 | "url": "https://cran.r-project.org" 197 | }, 198 | "sameAs": "https://CRAN.R-project.org/package=xml2" 199 | }, 200 | "SystemRequirements": null 201 | }, 202 | "fileSize": "473.659KB", 203 | "releaseNotes": "https://github.com/ropensci/smapr/blob/master/NEWS.md", 204 | "readme": "https://github.com/ropensci/smapr/blob/master/README.md", 205 | "contIntegration": "https://codecov.io/gh/ropensci/smapr", 206 | "developmentStatus": ["https://www.tidyverse.org/lifecycle/#maturing", "http://www.repostatus.org/#active"], 207 | "review": { 208 | "@type": "Review", 209 | "url": "https://github.com/ropensci/software-review/issues/231", 210 | "provider": "https://ropensci.org" 211 | }, 212 | "keywords": ["nasa", "smap-data", "raster", "extract-data", "acquisition", "soil-moisture", "soil-moisture-sensor", "soil-mapping", "peer-reviewed", "r", "r-package", "rstats"] 213 | } 214 | -------------------------------------------------------------------------------- /cran-comments.md: -------------------------------------------------------------------------------- 1 | ## Test environments 2 | * Ubuntu Linux 14.04 (on travis-ci), R-release, R-devel, R-oldrel 3 | * Windows Server Windows Server 2012 R2 x64 (on appveyor) 4 | * Debian Linux, R-release, GCC (on r-hub) 5 | * Ubuntu Linux 16.04 LTS, R-devel, GCC (on r-hub) 6 | * Ubuntu Linux 16.04 LTS, R-release, GCC (on r-hub) 7 | 8 | ## R CMD check results 9 | There were no ERRORs or WARNINGs. 10 | 11 | Possibly mis-spelled words in DESCRIPTION: 12 | SMAP 13 | 14 | SMAP is not a misspelling, it's an acronym for Soil Moisture Active Passive. 15 | 16 | ## Downstream dependencies 17 | 18 | There are currently no downstream dependencies for this package. 19 | -------------------------------------------------------------------------------- /docs/CONDUCT.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | Contributor Code of Conduct • smapr 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 45 | 46 | 47 | 48 | 49 | 50 |
51 |
52 | 107 | 108 | 109 |
110 | 111 |
112 |
113 | 116 | 117 |
118 | 119 |

As contributors and maintainers of this project, we pledge to respect all people who contribute through reporting issues, posting feature requests, updating documentation, submitting pull requests or patches, and other activities.

120 |

We are committed to making participation in this project a harassment-free experience for everyone, regardless of level of experience, gender, gender identity and expression, sexual orientation, disability, personal appearance, body size, race, ethnicity, age, or religion.

121 |

Examples of unacceptable behavior by participants include the use of sexual language or imagery, derogatory comments or personal attacks, trolling, public or private harassment, insults, or other unprofessional conduct.

122 |

Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct. Project maintainers who do not follow the Code of Conduct may be removed from the project team.

123 |

Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by opening an issue or contacting one or more of the project maintainers.

124 |

This Code of Conduct is adapted from the Contributor Covenant (http:contributor-covenant.org), version 1.0.0, available at http://contributor-covenant.org/version/1/0/0/

125 |
126 | 127 |
128 | 129 |
130 | 131 | 132 |
133 | 136 | 137 |
138 |

Site built with pkgdown 1.3.0.

139 |
140 |
141 |
142 | 143 | 144 | 145 | 146 | 147 | 148 | -------------------------------------------------------------------------------- /docs/CONTRIBUTING.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | CONTRIBUTING • smapr 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 45 | 46 | 47 | 48 | 49 | 50 |
51 |
52 | 107 | 108 | 109 |
110 | 111 |
112 |
113 | 116 | 117 |
118 | 119 |
120 |

121 | Please contribute!

122 |

We love collaboration.

123 |
124 |
125 |

126 | Found a Bug?

127 |
    128 |
  • Submit an issue on our Issues page here.
  • 129 |
130 |
131 |
132 |

133 | Code contributions?

134 |
    135 |
  • 136 | Fork this repo to your Github account.
  • 137 |
  • 138 | Clone your version on your account down to your machine from your account, e.g,. git clone https://github.com/<yourgithubusername>/smapr.git.
  • 139 |
  • Make sure to track upstream progress (i.e., on our version of smapr at earthlab/smapr) by doing git remote add upstream https://github.com/earthlab/smapr.git. Before making changes make sure to pull changes in from upstream by doing either git fetch upstream then merge later or git pull upstream to fetch and merge in one step
  • 140 |
  • Make your changes (bonus points for making changes on a new branch).
  • 141 |
  • 142 | Push up to your account.
  • 143 |
  • Submit a pull request to home base at earthlab/smapr.
  • 144 |
145 |

Please follow this styleguide for your contributions.

146 |
147 |
148 |

149 | Questions?

150 |

Get in touch: maxwell.b.joseph@colorado.edu

151 |
152 |
153 |

154 | Thanks for contributing!

155 |
156 |
157 | 158 |
159 | 160 |
161 | 162 | 163 |
164 | 167 | 168 |
169 |

Site built with pkgdown 1.3.0.

170 |
171 |
172 |
173 | 174 | 175 | 176 | 177 | 178 | 179 | -------------------------------------------------------------------------------- /docs/articles/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | Articles • smapr 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 45 | 46 | 47 | 48 | 49 | 50 |
51 |
52 | 107 | 108 | 109 |
110 | 111 |
112 |
113 | 116 | 117 |
118 |

All vignettes

119 |

120 | 121 | 124 |
125 |
126 |
127 | 128 |
129 | 132 | 133 |
134 |

Site built with pkgdown 1.3.0.

135 |
136 |
137 |
138 | 139 | 140 | 141 | 142 | 143 | 144 | -------------------------------------------------------------------------------- /docs/articles/smapr-intro_files/figure-html/crop-raster-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ropensci/smapr/6274022ddcfa4e109679df5b71c32de104ae06e3/docs/articles/smapr-intro_files/figure-html/crop-raster-1.png -------------------------------------------------------------------------------- /docs/articles/smapr-intro_files/figure-html/get-mean-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ropensci/smapr/6274022ddcfa4e109679df5b71c32de104ae06e3/docs/articles/smapr-intro_files/figure-html/get-mean-1.png -------------------------------------------------------------------------------- /docs/articles/smapr-intro_files/figure-html/inverse-mask-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ropensci/smapr/6274022ddcfa4e109679df5b71c32de104ae06e3/docs/articles/smapr-intro_files/figure-html/inverse-mask-1.png -------------------------------------------------------------------------------- /docs/articles/smapr-intro_files/figure-html/mask-raster-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ropensci/smapr/6274022ddcfa4e109679df5b71c32de104ae06e3/docs/articles/smapr-intro_files/figure-html/mask-raster-1.png -------------------------------------------------------------------------------- /docs/articles/smapr-intro_files/figure-html/plot-raster-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ropensci/smapr/6274022ddcfa4e109679df5b71c32de104ae06e3/docs/articles/smapr-intro_files/figure-html/plot-raster-1.png -------------------------------------------------------------------------------- /docs/articles/smapr-intro_files/figure-html/surface-vs-rootzone-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ropensci/smapr/6274022ddcfa4e109679df5b71c32de104ae06e3/docs/articles/smapr-intro_files/figure-html/surface-vs-rootzone-1.png -------------------------------------------------------------------------------- /docs/authors.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | Authors • smapr 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 45 | 46 | 47 | 48 | 49 | 50 |
51 |
52 | 107 | 108 | 109 |
110 | 111 |
112 |
113 | 116 | 117 |
    118 |
  • 119 |

    Maxwell Joseph. Author, maintainer. 120 |

    121 |
  • 122 |
  • 123 |

    Matthew Oakley. Author. 124 |

    125 |
  • 126 |
  • 127 |

    Zachary Schira. Author. 128 |

    129 |
  • 130 |
131 | 132 |
133 | 134 |
135 | 136 | 137 |
138 | 141 | 142 |
143 |

Site built with pkgdown 1.3.0.

144 |
145 |
146 |
147 | 148 | 149 | 150 | 151 | 152 | 153 | -------------------------------------------------------------------------------- /docs/docsearch.css: -------------------------------------------------------------------------------- 1 | /* Docsearch -------------------------------------------------------------- */ 2 | /* 3 | Source: https://github.com/algolia/docsearch/ 4 | License: MIT 5 | */ 6 | 7 | .algolia-autocomplete { 8 | display: block; 9 | -webkit-box-flex: 1; 10 | -ms-flex: 1; 11 | flex: 1 12 | } 13 | 14 | .algolia-autocomplete .ds-dropdown-menu { 15 | width: 100%; 16 | min-width: none; 17 | max-width: none; 18 | padding: .75rem 0; 19 | background-color: #fff; 20 | background-clip: padding-box; 21 | border: 1px solid rgba(0, 0, 0, .1); 22 | box-shadow: 0 .5rem 1rem rgba(0, 0, 0, .175); 23 | } 24 | 25 | @media (min-width:768px) { 26 | .algolia-autocomplete .ds-dropdown-menu { 27 | width: 175% 28 | } 29 | } 30 | 31 | .algolia-autocomplete .ds-dropdown-menu::before { 32 | display: none 33 | } 34 | 35 | .algolia-autocomplete .ds-dropdown-menu [class^=ds-dataset-] { 36 | padding: 0; 37 | background-color: rgb(255,255,255); 38 | border: 0; 39 | max-height: 80vh; 40 | } 41 | 42 | .algolia-autocomplete .ds-dropdown-menu .ds-suggestions { 43 | margin-top: 0 44 | } 45 | 46 | .algolia-autocomplete .algolia-docsearch-suggestion { 47 | padding: 0; 48 | overflow: visible 49 | } 50 | 51 | .algolia-autocomplete .algolia-docsearch-suggestion--category-header { 52 | padding: .125rem 1rem; 53 | margin-top: 0; 54 | font-size: 1.3em; 55 | font-weight: 500; 56 | color: #00008B; 57 | border-bottom: 0 58 | } 59 | 60 | .algolia-autocomplete .algolia-docsearch-suggestion--wrapper { 61 | float: none; 62 | padding-top: 0 63 | } 64 | 65 | .algolia-autocomplete .algolia-docsearch-suggestion--subcategory-column { 66 | float: none; 67 | width: auto; 68 | padding: 0; 69 | text-align: left 70 | } 71 | 72 | .algolia-autocomplete .algolia-docsearch-suggestion--content { 73 | float: none; 74 | width: auto; 75 | padding: 0 76 | } 77 | 78 | .algolia-autocomplete .algolia-docsearch-suggestion--content::before { 79 | display: none 80 | } 81 | 82 | .algolia-autocomplete .ds-suggestion:not(:first-child) .algolia-docsearch-suggestion--category-header { 83 | padding-top: .75rem; 84 | margin-top: .75rem; 85 | border-top: 1px solid rgba(0, 0, 0, .1) 86 | } 87 | 88 | .algolia-autocomplete .ds-suggestion .algolia-docsearch-suggestion--subcategory-column { 89 | display: block; 90 | padding: .1rem 1rem; 91 | margin-bottom: 0.1; 92 | font-size: 1.0em; 93 | font-weight: 400 94 | /* display: none */ 95 | } 96 | 97 | .algolia-autocomplete .algolia-docsearch-suggestion--title { 98 | display: block; 99 | padding: .25rem 1rem; 100 | margin-bottom: 0; 101 | font-size: 0.9em; 102 | font-weight: 400 103 | } 104 | 105 | .algolia-autocomplete .algolia-docsearch-suggestion--text { 106 | padding: 0 1rem .5rem; 107 | margin-top: -.25rem; 108 | font-size: 0.8em; 109 | font-weight: 400; 110 | line-height: 1.25 111 | } 112 | 113 | .algolia-autocomplete .algolia-docsearch-footer { 114 | width: 110px; 115 | height: 20px; 116 | z-index: 3; 117 | margin-top: 10.66667px; 118 | float: right; 119 | font-size: 0; 120 | line-height: 0; 121 | } 122 | 123 | .algolia-autocomplete .algolia-docsearch-footer--logo { 124 | background-image: url("data:image/svg+xml;utf8,"); 125 | background-repeat: no-repeat; 126 | background-position: 50%; 127 | background-size: 100%; 128 | overflow: hidden; 129 | text-indent: -9000px; 130 | width: 100%; 131 | height: 100%; 132 | display: block; 133 | transform: translate(-8px); 134 | } 135 | 136 | .algolia-autocomplete .algolia-docsearch-suggestion--highlight { 137 | color: #FF8C00; 138 | background: rgba(232, 189, 54, 0.1) 139 | } 140 | 141 | 142 | .algolia-autocomplete .algolia-docsearch-suggestion--text .algolia-docsearch-suggestion--highlight { 143 | box-shadow: inset 0 -2px 0 0 rgba(105, 105, 105, .5) 144 | } 145 | 146 | .algolia-autocomplete .ds-suggestion.ds-cursor .algolia-docsearch-suggestion--content { 147 | background-color: rgba(192, 192, 192, .15) 148 | } 149 | -------------------------------------------------------------------------------- /docs/docsearch.js: -------------------------------------------------------------------------------- 1 | $(function() { 2 | 3 | // register a handler to move the focus to the search bar 4 | // upon pressing shift + "/" (i.e. "?") 5 | $(document).on('keydown', function(e) { 6 | if (e.shiftKey && e.keyCode == 191) { 7 | e.preventDefault(); 8 | $("#search-input").focus(); 9 | } 10 | }); 11 | 12 | $(document).ready(function() { 13 | // do keyword highlighting 14 | /* modified from https://jsfiddle.net/julmot/bL6bb5oo/ */ 15 | var mark = function() { 16 | 17 | var referrer = document.URL ; 18 | var paramKey = "q" ; 19 | 20 | if (referrer.indexOf("?") !== -1) { 21 | var qs = referrer.substr(referrer.indexOf('?') + 1); 22 | var qs_noanchor = qs.split('#')[0]; 23 | var qsa = qs_noanchor.split('&'); 24 | var keyword = ""; 25 | 26 | for (var i = 0; i < qsa.length; i++) { 27 | var currentParam = qsa[i].split('='); 28 | 29 | if (currentParam.length !== 2) { 30 | continue; 31 | } 32 | 33 | if (currentParam[0] == paramKey) { 34 | keyword = decodeURIComponent(currentParam[1].replace(/\+/g, "%20")); 35 | } 36 | } 37 | 38 | if (keyword !== "") { 39 | $(".contents").unmark({ 40 | done: function() { 41 | $(".contents").mark(keyword); 42 | } 43 | }); 44 | } 45 | } 46 | }; 47 | 48 | mark(); 49 | }); 50 | }); 51 | 52 | /* Search term highlighting ------------------------------*/ 53 | 54 | function matchedWords(hit) { 55 | var words = []; 56 | 57 | var hierarchy = hit._highlightResult.hierarchy; 58 | // loop to fetch from lvl0, lvl1, etc. 59 | for (var idx in hierarchy) { 60 | words = words.concat(hierarchy[idx].matchedWords); 61 | } 62 | 63 | var content = hit._highlightResult.content; 64 | if (content) { 65 | words = words.concat(content.matchedWords); 66 | } 67 | 68 | // return unique words 69 | var words_uniq = [...new Set(words)]; 70 | return words_uniq; 71 | } 72 | 73 | function updateHitURL(hit) { 74 | 75 | var words = matchedWords(hit); 76 | var url = ""; 77 | 78 | if (hit.anchor) { 79 | url = hit.url_without_anchor + '?q=' + escape(words.join(" ")) + '#' + hit.anchor; 80 | } else { 81 | url = hit.url + '?q=' + escape(words.join(" ")); 82 | } 83 | 84 | return url; 85 | } 86 | -------------------------------------------------------------------------------- /docs/issue_template.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | Before posting • smapr 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 45 | 46 | 47 | 48 | 49 | 50 |
51 |
52 | 107 | 108 | 109 |
110 | 111 |
112 |
113 | 116 | 117 |
118 | 119 |

The GitHub issue tracker is intended for bug reports and feature requests. Do not post your NASA Earthdata username or password with your issue!

120 |

When you post, please include a minimal reproducible example of the problem and/or desired behavior, if applicable.

121 |
122 | 123 |
124 | 125 |
126 | 127 | 128 |
129 | 132 | 133 |
134 |

Site built with pkgdown 1.3.0.

135 |
136 |
137 |
138 | 139 | 140 | 141 | 142 | 143 | 144 | -------------------------------------------------------------------------------- /docs/link.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | 8 | 12 | 13 | -------------------------------------------------------------------------------- /docs/news/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | Changelog • smapr 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 45 | 46 | 47 | 48 | 49 | 50 |
51 |
52 | 107 | 108 | 109 |
110 | 111 |
112 |
113 | 117 | 118 |
119 |

120 | smapr 0.2.1 Unreleased 121 |

122 |
    123 |
  • patch to skip test on cran that requires internet
  • 124 |
  • updated SMAP data versions
  • 125 |
126 |
127 |
128 |

129 | smapr 0.2.0 2018-09-24 130 |

131 |
    132 |
  • added set_smap_credentials() function for NASA EarthData portal
  • 133 |
  • expanded vignettes to include cropping, masking, etc.
  • 134 |
  • added verbose argument to download_smap
  • 135 |
  • performance improvements to extract_smap()
  • 136 |
137 |
138 |
139 |

140 | smapr 0.1.2 2018-06-19 141 |

142 |
    143 |
  • added support for SMAP/sentinel hybrid soil moisture product
  • 144 |
  • added a code of conduct
  • 145 |
  • added a vignette to show a complete workflow
  • 146 |
147 |
148 |
149 |

150 | smapr 0.1.1 2017-10-20 151 |

152 |
    153 |
  • added patch for searching date ranges containing missing collections
  • 154 |
  • added unit tests for user specified download directories
  • 155 |
  • updates to examples for new data versions
  • 156 |
  • adding a CONTRIBUTING.md file
  • 157 |
158 |
159 |
160 |

161 | smapr 0.1.0 2017-02-21 162 |

163 |
    164 |
  • updating remote data location (previous ftp server was removed)
  • 165 |
  • using NASA Earthdata authentication
  • 166 |
167 |
168 |
169 |

170 | smapr 0.0.1 2016-10-05 171 |

172 |
    173 |
  • first submission to CRAN
  • 174 |
175 |
176 |
177 | 178 | 191 | 192 |
193 | 194 |
195 | 198 | 199 |
200 |

Site built with pkgdown 1.3.0.

201 |
202 |
203 |
204 | 205 | 206 | 207 | 208 | 209 | 210 | -------------------------------------------------------------------------------- /docs/pkgdown.css: -------------------------------------------------------------------------------- 1 | /* Sticky footer */ 2 | 3 | /** 4 | * Basic idea: https://philipwalton.github.io/solved-by-flexbox/demos/sticky-footer/ 5 | * Details: https://github.com/philipwalton/solved-by-flexbox/blob/master/assets/css/components/site.css 6 | * 7 | * .Site -> body > .container 8 | * .Site-content -> body > .container .row 9 | * .footer -> footer 10 | * 11 | * Key idea seems to be to ensure that .container and __all its parents__ 12 | * have height set to 100% 13 | * 14 | */ 15 | 16 | html, body { 17 | height: 100%; 18 | } 19 | 20 | body > .container { 21 | display: flex; 22 | height: 100%; 23 | flex-direction: column; 24 | 25 | padding-top: 60px; 26 | } 27 | 28 | body > .container .row { 29 | flex: 1 0 auto; 30 | } 31 | 32 | footer { 33 | margin-top: 45px; 34 | padding: 35px 0 36px; 35 | border-top: 1px solid #e5e5e5; 36 | color: #666; 37 | display: flex; 38 | flex-shrink: 0; 39 | } 40 | footer p { 41 | margin-bottom: 0; 42 | } 43 | footer div { 44 | flex: 1; 45 | } 46 | footer .pkgdown { 47 | text-align: right; 48 | } 49 | footer p { 50 | margin-bottom: 0; 51 | } 52 | 53 | img.icon { 54 | float: right; 55 | } 56 | 57 | img { 58 | max-width: 100%; 59 | } 60 | 61 | /* Fix bug in bootstrap (only seen in firefox) */ 62 | summary { 63 | display: list-item; 64 | } 65 | 66 | /* Typographic tweaking ---------------------------------*/ 67 | 68 | .contents .page-header { 69 | margin-top: calc(-60px + 1em); 70 | } 71 | 72 | /* Section anchors ---------------------------------*/ 73 | 74 | a.anchor { 75 | margin-left: -30px; 76 | display:inline-block; 77 | width: 30px; 78 | height: 30px; 79 | visibility: hidden; 80 | 81 | background-image: url(./link.svg); 82 | background-repeat: no-repeat; 83 | background-size: 20px 20px; 84 | background-position: center center; 85 | } 86 | 87 | .hasAnchor:hover a.anchor { 88 | visibility: visible; 89 | } 90 | 91 | @media (max-width: 767px) { 92 | .hasAnchor:hover a.anchor { 93 | visibility: hidden; 94 | } 95 | } 96 | 97 | 98 | /* Fixes for fixed navbar --------------------------*/ 99 | 100 | .contents h1, .contents h2, .contents h3, .contents h4 { 101 | padding-top: 60px; 102 | margin-top: -40px; 103 | } 104 | 105 | /* Static header placement on mobile devices */ 106 | @media (max-width: 767px) { 107 | .navbar-fixed-top { 108 | position: absolute; 109 | } 110 | .navbar { 111 | padding: 0; 112 | } 113 | } 114 | 115 | 116 | /* Sidebar --------------------------*/ 117 | 118 | #sidebar { 119 | margin-top: 30px; 120 | } 121 | #sidebar h2 { 122 | font-size: 1.5em; 123 | margin-top: 1em; 124 | } 125 | 126 | #sidebar h2:first-child { 127 | margin-top: 0; 128 | } 129 | 130 | #sidebar .list-unstyled li { 131 | margin-bottom: 0.5em; 132 | } 133 | 134 | .orcid { 135 | height: 16px; 136 | vertical-align: middle; 137 | } 138 | 139 | /* Reference index & topics ----------------------------------------------- */ 140 | 141 | .ref-index th {font-weight: normal;} 142 | 143 | .ref-index td {vertical-align: top;} 144 | .ref-index .icon {width: 40px;} 145 | .ref-index .alias {width: 40%;} 146 | .ref-index-icons .alias {width: calc(40% - 40px);} 147 | .ref-index .title {width: 60%;} 148 | 149 | .ref-arguments th {text-align: right; padding-right: 10px;} 150 | .ref-arguments th, .ref-arguments td {vertical-align: top;} 151 | .ref-arguments .name {width: 20%;} 152 | .ref-arguments .desc {width: 80%;} 153 | 154 | /* Nice scrolling for wide elements --------------------------------------- */ 155 | 156 | table { 157 | display: block; 158 | overflow: auto; 159 | } 160 | 161 | /* Syntax highlighting ---------------------------------------------------- */ 162 | 163 | pre { 164 | word-wrap: normal; 165 | word-break: normal; 166 | border: 1px solid #eee; 167 | } 168 | 169 | pre, code { 170 | background-color: #f8f8f8; 171 | color: #333; 172 | } 173 | 174 | pre code { 175 | overflow: auto; 176 | word-wrap: normal; 177 | white-space: pre; 178 | } 179 | 180 | pre .img { 181 | margin: 5px 0; 182 | } 183 | 184 | pre .img img { 185 | background-color: #fff; 186 | display: block; 187 | height: auto; 188 | } 189 | 190 | code a, pre a { 191 | color: #375f84; 192 | } 193 | 194 | a.sourceLine:hover { 195 | text-decoration: none; 196 | } 197 | 198 | .fl {color: #1514b5;} 199 | .fu {color: #000000;} /* function */ 200 | .ch,.st {color: #036a07;} /* string */ 201 | .kw {color: #264D66;} /* keyword */ 202 | .co {color: #888888;} /* comment */ 203 | 204 | .message { color: black; font-weight: bolder;} 205 | .error { color: orange; font-weight: bolder;} 206 | .warning { color: #6A0366; font-weight: bolder;} 207 | 208 | /* Clipboard --------------------------*/ 209 | 210 | .hasCopyButton { 211 | position: relative; 212 | } 213 | 214 | .btn-copy-ex { 215 | position: absolute; 216 | right: 0; 217 | top: 0; 218 | visibility: hidden; 219 | } 220 | 221 | .hasCopyButton:hover button.btn-copy-ex { 222 | visibility: visible; 223 | } 224 | 225 | /* mark.js ----------------------------*/ 226 | 227 | mark { 228 | background-color: rgba(255, 255, 51, 0.5); 229 | border-bottom: 2px solid rgba(255, 153, 51, 0.3); 230 | padding: 1px; 231 | } 232 | 233 | /* vertical spacing after htmlwidgets */ 234 | .html-widget { 235 | margin-bottom: 10px; 236 | } 237 | -------------------------------------------------------------------------------- /docs/pkgdown.js: -------------------------------------------------------------------------------- 1 | /* http://gregfranko.com/blog/jquery-best-practices/ */ 2 | (function($) { 3 | $(function() { 4 | 5 | $("#sidebar") 6 | .stick_in_parent({offset_top: 40}) 7 | .on('sticky_kit:bottom', function(e) { 8 | $(this).parent().css('position', 'static'); 9 | }) 10 | .on('sticky_kit:unbottom', function(e) { 11 | $(this).parent().css('position', 'relative'); 12 | }); 13 | 14 | $('body').scrollspy({ 15 | target: '#sidebar', 16 | offset: 60 17 | }); 18 | 19 | $('[data-toggle="tooltip"]').tooltip(); 20 | 21 | var cur_path = paths(location.pathname); 22 | var links = $("#navbar ul li a"); 23 | var max_length = -1; 24 | var pos = -1; 25 | for (var i = 0; i < links.length; i++) { 26 | if (links[i].getAttribute("href") === "#") 27 | continue; 28 | // Ignore external links 29 | if (links[i].host !== location.host) 30 | continue; 31 | 32 | var nav_path = paths(links[i].pathname); 33 | 34 | var length = prefix_length(nav_path, cur_path); 35 | if (length > max_length) { 36 | max_length = length; 37 | pos = i; 38 | } 39 | } 40 | 41 | // Add class to parent
  • , and enclosing
  • if in dropdown 42 | if (pos >= 0) { 43 | var menu_anchor = $(links[pos]); 44 | menu_anchor.parent().addClass("active"); 45 | menu_anchor.closest("li.dropdown").addClass("active"); 46 | } 47 | }); 48 | 49 | function paths(pathname) { 50 | var pieces = pathname.split("/"); 51 | pieces.shift(); // always starts with / 52 | 53 | var end = pieces[pieces.length - 1]; 54 | if (end === "index.html" || end === "") 55 | pieces.pop(); 56 | return(pieces); 57 | } 58 | 59 | // Returns -1 if not found 60 | function prefix_length(needle, haystack) { 61 | if (needle.length > haystack.length) 62 | return(-1); 63 | 64 | // Special case for length-0 haystack, since for loop won't run 65 | if (haystack.length === 0) { 66 | return(needle.length === 0 ? 0 : -1); 67 | } 68 | 69 | for (var i = 0; i < haystack.length; i++) { 70 | if (needle[i] != haystack[i]) 71 | return(i); 72 | } 73 | 74 | return(haystack.length); 75 | } 76 | 77 | /* Clipboard --------------------------*/ 78 | 79 | function changeTooltipMessage(element, msg) { 80 | var tooltipOriginalTitle=element.getAttribute('data-original-title'); 81 | element.setAttribute('data-original-title', msg); 82 | $(element).tooltip('show'); 83 | element.setAttribute('data-original-title', tooltipOriginalTitle); 84 | } 85 | 86 | if(ClipboardJS.isSupported()) { 87 | $(document).ready(function() { 88 | var copyButton = ""; 89 | 90 | $(".examples, div.sourceCode").addClass("hasCopyButton"); 91 | 92 | // Insert copy buttons: 93 | $(copyButton).prependTo(".hasCopyButton"); 94 | 95 | // Initialize tooltips: 96 | $('.btn-copy-ex').tooltip({container: 'body'}); 97 | 98 | // Initialize clipboard: 99 | var clipboardBtnCopies = new ClipboardJS('[data-clipboard-copy]', { 100 | text: function(trigger) { 101 | return trigger.parentNode.textContent; 102 | } 103 | }); 104 | 105 | clipboardBtnCopies.on('success', function(e) { 106 | changeTooltipMessage(e.trigger, 'Copied!'); 107 | e.clearSelection(); 108 | }); 109 | 110 | clipboardBtnCopies.on('error', function() { 111 | changeTooltipMessage(e.trigger,'Press Ctrl+C or Command+C to copy'); 112 | }); 113 | }); 114 | } 115 | })(window.jQuery || window.$) 116 | -------------------------------------------------------------------------------- /docs/pkgdown.yml: -------------------------------------------------------------------------------- 1 | pandoc: '2.5' 2 | pkgdown: 1.3.0 3 | pkgdown_sha: ~ 4 | articles: 5 | smapr-intro: smapr-intro.html 6 | 7 | -------------------------------------------------------------------------------- /docs/reference/download_smap.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | Download SMAP data — download_smap • smapr 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 48 | 49 | 50 | 51 | 52 | 53 |
    54 |
    55 | 110 | 111 | 112 |
    113 | 114 |
    115 |
    116 | 121 | 122 |
    123 | 124 |

    This function downloads SMAP data in HDF5 format.

    125 | 126 |
    127 | 128 |
    download_smap(files, directory = NULL, overwrite = TRUE,
    129 |   verbose = TRUE)
    130 | 131 |

    Arguments

    132 | 133 | 134 | 135 | 136 | 138 | 139 | 140 | 141 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 153 | 154 |
    files

    A data.frame produced by find_smap() 137 | that specifies data files to download.

    directory

    A local directory path in which to save data, specified as a 142 | character string. If left as NULL, data are stored in a user's cache 143 | directory.

    overwrite

    TRUE or FALSE: should existing data files be overwritten?

    verbose

    TRUE or FALSE: should messages be printed to indicate that 152 | files are being downloaded?

    155 | 156 |

    Value

    157 | 158 |

    Returns a data.frame that appends a column called 159 | local_dir to the input data frame, which consists of a character 160 | vector specifying the local directory containing the downloaded files.

    161 | 162 |

    Details

    163 | 164 |

    This function requires a username and password from NASA's Earthdata portal. 165 | If you have an Earthdata username and password, pass them in using the 166 | set_smap_credentials() function.

    167 |

    If you do not yet have a username and password, register for one here: 168 | https://urs.earthdata.nasa.gov/

    169 | 170 | 171 |

    Examples

    172 |
    # NOT RUN {
    173 | files <- find_smap(id = "SPL4SMGP", dates = "2015-03-31", version = 4)
    174 | # files[1, ] refers to the first available data file
    175 | downloads <- download_smap(files[1, ])
    176 | # }
    177 |
    178 | 191 |
    192 | 193 |
    194 | 197 | 198 |
    199 |

    Site built with pkgdown 1.3.0.

    200 |
    201 |
    202 |
    203 | 204 | 205 | 206 | 207 | 208 | 209 | -------------------------------------------------------------------------------- /docs/reference/extract_smap.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | Extracts contents of SMAP data — extract_smap • smapr 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 48 | 49 | 50 | 51 | 52 | 53 |
    54 |
    55 | 110 | 111 | 112 |
    113 | 114 |
    115 |
    116 | 121 | 122 |
    123 | 124 |

    Extracts datasets from SMAP data files.

    125 | 126 |
    127 | 128 |
    extract_smap(data, name, in_memory = FALSE)
    129 | 130 |

    Arguments

    131 | 132 | 133 | 134 | 135 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 147 | 148 |
    data

    A data frame produced by download_smap() that specifies 136 | input files from which to extract data.

    name

    The path in the HDF5 file pointing to data to extract.

    in_memory

    Logical. Should the result be stored in memory? If not, then 145 | raster objects are stored on disk in the cache directory. By default 146 | the result is stored on disk.

    149 | 150 |

    Value

    151 | 152 |

    Returns a RasterStack object.

    153 | 154 |

    Details

    155 | 156 |

    The arguments group and dataset must refer specifically the 157 | group and name within group for the input file, such as can be obtained with 158 | list_smap(). This function will extract that particular dataset, 159 | returning a Raster object.

    160 | 161 | 162 |

    Examples

    163 |
    # NOT RUN {
    164 | files <- find_smap(id = "SPL4SMGP", dates = "2015-03-31", version = 4)
    165 | downloads <- download_smap(files[1, ])
    166 | sm_raster <- extract_smap(downloads, name = '/Geophysical_Data/sm_surface')
    167 | # }
    168 |
    169 | 182 |
    183 | 184 |
    185 | 188 | 189 |
    190 |

    Site built with pkgdown 1.3.0.

    191 |
    192 |
    193 |
    194 | 195 | 196 | 197 | 198 | 199 | 200 | -------------------------------------------------------------------------------- /docs/reference/figures/extract-data-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ropensci/smapr/6274022ddcfa4e109679df5b71c32de104ae06e3/docs/reference/figures/extract-data-1.png -------------------------------------------------------------------------------- /docs/reference/find_smap.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | Find SMAP data — find_smap • smapr 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 49 | 50 | 51 | 52 | 53 | 54 |
    55 |
    56 | 111 | 112 | 113 |
    114 | 115 |
    116 |
    117 | 122 | 123 |
    124 | 125 |

    This function searches for SMAP data on a specific date, returning a 126 | data.frame describing available data.

    127 | 128 |
    129 | 130 |
    find_smap(id, dates, version)
    131 | 132 |

    Arguments

    133 | 134 | 135 | 136 | 137 | 141 | 142 | 143 | 144 | 149 | 150 | 151 | 152 | 155 | 156 |
    id

    A character string that refers to a specific SMAP dataset, e.g., 138 | "SPL4SMGP" for SMAP L4 Global 3-hourly 9 km Surface and Rootzone Soil 139 | Moisture Geophysical Data. See "Details" for a list of supported data types 140 | and their associated id codes.

    dates

    An object of class Date or a character string formatted as 145 | 146 | To search for one specific date, this can be a Date object of length one. To 147 | search over a time interval, it can be a multi-element object of class Date 148 | such as produced by seq.Date.

    version

    Which data version would you like to search for? Version 153 | information for each data product can be found at 154 | https://nsidc.org/data/smap/data_versions

    157 | 158 |

    Value

    159 | 160 |

    A data.frame with the names of the data files, the remote directory, and 161 | the date.

    162 | 163 |

    Details

    164 | 165 |

    There are many SMAP data products that can be accessed with this function. 166 | Currently, smapr supports level 3 and level 4 data products, each of which 167 | has an associated Data Set ID which is specified by the id argument, 168 | described at https://nsidc.org/data/smap/smap-data.html and summarized 169 | below:

    170 |
    171 |
    SPL2SMAP_S

    SMAP/Sentinel-1 Radiometer/Radar Soil Moisture

    172 |
    SPL3FTA

    Radar Northern Hemisphere Daily Freeze/Thaw State

    173 |
    SPL3SMA

    Radar Global Daily Soil Moisture

    174 |
    SPL3SMP

    Radiometer Global Soil Moisture

    175 |
    SPL3SMAP

    Radar/Radiometer Global Soil Moisture

    176 |
    SPL4SMAU

    Surface/Rootzone Soil Moisture Analysis Update

    177 |
    SPL4SMGP

    Surface/Rootzone Soil Moisture Geophysical Data

    178 |
    SPL4SMLM

    Surface/Rootzone Soil Moisture Land Model Constants

    179 |
    SPL4CMDL

    Carbon Net Ecosystem Exchange

    180 |
    181 |

    This function requires a username and password from NASA's Earthdata portal. 182 | If you have an Earthdata username and password, pass them in using the 183 | set_smap_credentials() function.

    184 |

    If you do not yet have a username and password, register for one here: 185 | https://urs.earthdata.nasa.gov/

    186 | 187 | 188 |

    Examples

    189 |
    # NOT RUN {
    190 | # looking for data on one day:
    191 | find_smap(id = "SPL4SMGP", dates = "2015-03-31", version = 4)
    192 | 
    193 | # searching across a date range
    194 | start_date <- as.Date("2015-03-31")
    195 | end_date <- as.Date("2015-04-02")
    196 | date_sequence <- seq(start_date, end_date, by = 1)
    197 | find_smap(id = "SPL4SMGP", dates = date_sequence, version = 4)
    198 | # }
    199 |
    200 |
    201 | 214 |
    215 | 216 |
    217 | 220 | 221 |
    222 |

    Site built with pkgdown 1.3.0.

    223 |
    224 |
    225 |
    226 | 227 | 228 | 229 | 230 | 231 | 232 | -------------------------------------------------------------------------------- /docs/reference/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | Function reference • smapr 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 45 | 46 | 47 | 48 | 49 | 50 |
    51 |
    52 | 107 | 108 | 109 |
    110 | 111 |
    112 |
    113 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 131 | 132 | 133 | 134 | 137 | 138 | 139 | 140 | 143 | 144 | 145 | 146 | 149 | 150 | 151 | 152 | 155 | 156 | 157 | 158 | 161 | 162 | 163 | 164 | 167 | 168 | 169 | 170 |
    128 |

    All functions

    129 |

    130 |
    135 |

    download_smap()

    136 |

    Download SMAP data

    141 |

    extract_smap()

    142 |

    Extracts contents of SMAP data

    147 |

    find_smap()

    148 |

    Find SMAP data

    153 |

    list_smap()

    154 |

    Lists the contents of SMAP data files

    159 |

    set_smap_credentials()

    160 |

    Set credentials for NASA's Earthdata portal

    165 |

    smapr-package

    166 |

    smapr: A package for acquisition and processing of NASA SMAP data.

    171 |
    172 | 173 | 179 |
    180 | 181 |
    182 | 185 | 186 |
    187 |

    Site built with pkgdown 1.3.0.

    188 |
    189 |
    190 |
    191 | 192 | 193 | 194 | 195 | 196 | 197 | -------------------------------------------------------------------------------- /docs/reference/list_smap.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | Lists the contents of SMAP data files — list_smap • smapr 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 48 | 49 | 50 | 51 | 52 | 53 |
    54 |
    55 | 110 | 111 | 112 |
    113 | 114 |
    115 |
    116 | 121 | 122 |
    123 | 124 |

    This function returns a list of the contents of SMAP data files.

    125 | 126 |
    127 | 128 |
    list_smap(files, all = FALSE)
    129 | 130 |

    Arguments

    131 | 132 | 133 | 134 | 135 | 137 | 138 | 139 | 140 | 142 | 143 |
    files

    A data.frame produced by download_smap() that 136 | specifies input data files.

    all

    If TRUE a longer, more detailed list of information on each 141 | entry is provided.

    144 | 145 |

    Value

    146 | 147 |

    Returns a list of data.frame objects that list the contents 148 | of each data file in files.

    149 | 150 | 151 |

    Examples

    152 |
    # NOT RUN {
    153 | files <- find_smap(id = "SPL4SMGP", dates = "2015-03-31", version = 4)
    154 | files <- download_smap(files[1, ])
    155 | list_smap(files)
    156 | list_smap(files, all = TRUE)
    157 | # }
    158 |
    159 | 170 |
    171 | 172 |
    173 | 176 | 177 |
    178 |

    Site built with pkgdown 1.3.0.

    179 |
    180 |
    181 |
    182 | 183 | 184 | 185 | 186 | 187 | 188 | -------------------------------------------------------------------------------- /docs/reference/set_smap_credentials.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | Set credentials for NASA's Earthdata portal — set_smap_credentials • smapr 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 50 | 51 | 52 | 53 | 54 | 55 |
    56 |
    57 | 112 | 113 | 114 |
    115 | 116 |
    117 |
    118 | 123 | 124 |
    125 | 126 |

    To use smapr, users need to provide NASA Earthdata portal credentials. 127 | This function allows users to interactively set these credentials via the 128 | user's Earthdata username and password.

    129 | 130 |
    131 | 132 |
    set_smap_credentials(username, password, save = TRUE,
    133 |   overwrite = FALSE)
    134 | 135 |

    Arguments

    136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 151 | 152 | 153 | 154 | 156 | 157 |
    username

    A character string of your Earthdata portal username

    password

    A character string of your Earthdata portal password

    save

    Logical: whether to save your credentials to your 149 | .Renviron file (e.g., ~/.Renviron). Previous Earthdata credentials will not 150 | be overwritten unless overwrite = TRUE.

    overwrite

    Logical: whether to overwrite previous Earthdata credentials 155 | in your .Renviron file (only applies when save = TRUE)

    158 | 159 |

    Value

    160 | 161 |

    A data.frame with the names of the data files, the remote directory, and 162 | the date.

    163 | 164 |

    Details

    165 | 166 |

    If you do not yet have a username and password, register for one here: 167 | https://urs.earthdata.nasa.gov/

    168 |

    A warning: do not commit your username and password to a public repository! 169 | This function is meant to be used interactively, and not embedded within a 170 | script that you would share.

    171 | 172 | 173 |

    Examples

    174 |
    # NOT RUN {
    175 | set_smap_credentials('myusername', 'mypassword')
    176 | # }
    177 |
    178 |
    179 | 192 |
    193 | 194 |
    195 | 198 | 199 |
    200 |

    Site built with pkgdown 1.3.0.

    201 |
    202 |
    203 |
    204 | 205 | 206 | 207 | 208 | 209 | 210 | -------------------------------------------------------------------------------- /docs/reference/smapr-package.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | smapr: A package for acquisition and processing of NASA SMAP data. — smapr-package • smapr 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 49 | 50 | 51 | 52 | 53 | 54 |
    55 |
    56 | 111 | 112 | 113 |
    114 | 115 |
    116 |
    117 | 122 | 123 |
    124 | 125 |

    The smapr package provides a means to discover, acquire, and process 126 | NASA Soil Moisture Active Passive (SMAP) data.

    127 | 128 |
    129 | 130 | 131 | 132 |
    133 | 141 |
    142 | 143 | 152 |
    153 | 154 | 155 | 156 | 157 | 158 | 159 | -------------------------------------------------------------------------------- /man/download_smap.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/download_smap.R 3 | \name{download_smap} 4 | \alias{download_smap} 5 | \title{Download SMAP data} 6 | \usage{ 7 | download_smap(files, directory = NULL, overwrite = TRUE, verbose = TRUE) 8 | } 9 | \arguments{ 10 | \item{files}{A \code{data.frame} produced by \code{find_smap()} 11 | that specifies data files to download.} 12 | 13 | \item{directory}{A local directory path in which to save data, specified as a 14 | character string. If left as \code{NULL}, data are stored in a user's cache 15 | directory.} 16 | 17 | \item{overwrite}{TRUE or FALSE: should existing data files be overwritten?} 18 | 19 | \item{verbose}{TRUE or FALSE: should messages be printed to indicate that 20 | files are being downloaded?} 21 | } 22 | \value{ 23 | Returns a \code{data.frame} that appends a column called 24 | \code{local_dir} to the input data frame, which consists of a character 25 | vector specifying the local directory containing the downloaded files. 26 | } 27 | \description{ 28 | This function downloads SMAP data in HDF5 format. 29 | } 30 | \details{ 31 | This function requires a username and password from NASA's Earthdata portal. 32 | If you have an Earthdata username and password, pass them in using the 33 | \code{\link[=set_smap_credentials]{set_smap_credentials()}} function. 34 | 35 | If you do not yet have a username and password, register for one here: 36 | \url{https://urs.earthdata.nasa.gov/} 37 | } 38 | \examples{ 39 | \dontrun{ 40 | files <- find_smap(id = "SPL4SMGP", dates = "2015-03-31", version = 4) 41 | # files[1, ] refers to the first available data file 42 | downloads <- download_smap(files[1, ]) 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /man/extract_smap.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/extract_smap.R 3 | \name{extract_smap} 4 | \alias{extract_smap} 5 | \title{Extracts contents of SMAP data} 6 | \usage{ 7 | extract_smap(data, name) 8 | } 9 | \arguments{ 10 | \item{data}{A data frame produced by \code{download_smap()} that specifies 11 | input files from which to extract data.} 12 | 13 | \item{name}{The path in the HDF5 file pointing to data to extract.} 14 | } 15 | \value{ 16 | Returns a SpatRaster object. 17 | } 18 | \description{ 19 | Extracts datasets from SMAP data files. 20 | } 21 | \details{ 22 | The arguments \code{group} and \code{dataset} must refer specifically the 23 | group and name within group for the input file, such as can be obtained with 24 | \code{list_smap()}. This function will extract that particular dataset, 25 | returning a Raster object. 26 | } 27 | \examples{ 28 | \dontrun{ 29 | files <- find_smap(id = "SPL4SMGP", dates = "2015-03-31", version = 4) 30 | downloads <- download_smap(files[1, ]) 31 | sm_raster <- extract_smap(downloads, name = '/Geophysical_Data/sm_surface') 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /man/figures/extract-data-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ropensci/smapr/6274022ddcfa4e109679df5b71c32de104ae06e3/man/figures/extract-data-1.png -------------------------------------------------------------------------------- /man/find_smap.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/find_smap.R 3 | \name{find_smap} 4 | \alias{find_smap} 5 | \title{Find SMAP data} 6 | \usage{ 7 | find_smap(id, dates, version) 8 | } 9 | \arguments{ 10 | \item{id}{A character string that refers to a specific SMAP dataset, e.g., 11 | \code{"SPL4SMGP"} for SMAP L4 Global 3-hourly 9 km Surface and Rootzone Soil 12 | Moisture Geophysical Data. See "Details" for a list of supported data types 13 | and their associated id codes.} 14 | 15 | \item{dates}{An object of class Date or a character string formatted as 16 | %Y-%m-%d (e.g., "2016-04-01") which specifies the date(s) to search. 17 | To search for one specific date, this can be a Date object of length one. To 18 | search over a time interval, it can be a multi-element object of class Date 19 | such as produced by \code{seq.Date}.} 20 | 21 | \item{version}{Which data version would you like to search for? Version 22 | information for each data product can be found at 23 | \url{https://nsidc.org/data/smap/data_versions}} 24 | } 25 | \value{ 26 | A data.frame with the names of the data files, the remote directory, and 27 | the date. 28 | } 29 | \description{ 30 | This function searches for SMAP data on a specific date, returning a 31 | \code{data.frame} describing available data. 32 | } 33 | \details{ 34 | There are many SMAP data products that can be accessed with this function. 35 | Currently, smapr supports level 3 and level 4 data products, each of which 36 | has an associated Data Set ID which is specified by the \code{id} argument, 37 | described at \url{https://nsidc.org/data/smap/smap-data.html} and summarized 38 | below: 39 | 40 | \describe{ 41 | \item{SPL2SMAP_S}{SMAP/Sentinel-1 Radiometer/Radar Soil Moisture} 42 | \item{SPL3FTA}{Radar Northern Hemisphere Daily Freeze/Thaw State} 43 | \item{SPL3SMA}{Radar Global Daily Soil Moisture} 44 | \item{SPL3SMP}{Radiometer Global Soil Moisture} 45 | \item{SPL3SMAP}{Radar/Radiometer Global Soil Moisture} 46 | \item{SPL4SMAU}{Surface/Rootzone Soil Moisture Analysis Update} 47 | \item{SPL4SMGP}{Surface/Rootzone Soil Moisture Geophysical Data} 48 | \item{SPL4SMLM}{Surface/Rootzone Soil Moisture Land Model Constants} 49 | \item{SPL4CMDL}{Carbon Net Ecosystem Exchange} 50 | } 51 | 52 | This function requires a username and password from NASA's Earthdata portal. 53 | If you have an Earthdata username and password, pass them in using the 54 | \code{\link[=set_smap_credentials]{set_smap_credentials()}} function. 55 | 56 | If you do not yet have a username and password, register for one here: 57 | \url{https://urs.earthdata.nasa.gov/} 58 | } 59 | \examples{ 60 | \dontrun{ 61 | # looking for data on one day: 62 | find_smap(id = "SPL4SMGP", dates = "2015-03-31", version = 4) 63 | 64 | # searching across a date range 65 | start_date <- as.Date("2015-03-31") 66 | end_date <- as.Date("2015-04-02") 67 | date_sequence <- seq(start_date, end_date, by = 1) 68 | find_smap(id = "SPL4SMGP", dates = date_sequence, version = 4) 69 | } 70 | 71 | } 72 | -------------------------------------------------------------------------------- /man/list_smap.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/list_smap.R 3 | \name{list_smap} 4 | \alias{list_smap} 5 | \title{Lists the contents of SMAP data files} 6 | \usage{ 7 | list_smap(files, all = TRUE) 8 | } 9 | \arguments{ 10 | \item{files}{A \code{data.frame} produced by \code{download_smap()} that 11 | specifies input data files.} 12 | 13 | \item{all}{If TRUE a longer, more detailed list of information on each 14 | entry is provided.} 15 | } 16 | \value{ 17 | Returns a list of \code{data.frame} objects that list the contents 18 | of each data file in \code{files}. 19 | } 20 | \description{ 21 | This function returns a list of the contents of SMAP data files. 22 | } 23 | \examples{ 24 | \dontrun{ 25 | files <- find_smap(id = "SPL4SMGP", dates = "2015-03-31", version = 4) 26 | files <- download_smap(files[1, ]) 27 | list_smap(files) 28 | list_smap(files, all = TRUE) 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /man/set_smap_credentials.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/set_smap_credentials.R 3 | \name{set_smap_credentials} 4 | \alias{set_smap_credentials} 5 | \title{Set credentials for NASA's Earthdata portal} 6 | \usage{ 7 | set_smap_credentials(username, password, save = TRUE, overwrite = FALSE) 8 | } 9 | \arguments{ 10 | \item{username}{A character string of your Earthdata portal username} 11 | 12 | \item{password}{A character string of your Earthdata portal password} 13 | 14 | \item{save}{Logical: whether to save your credentials to your 15 | .Renviron file (e.g., ~/.Renviron). Previous Earthdata credentials will not 16 | be overwritten unless \code{overwrite = TRUE}.} 17 | 18 | \item{overwrite}{Logical: whether to overwrite previous Earthdata credentials 19 | in your .Renviron file (only applies when \code{save = TRUE})} 20 | } 21 | \value{ 22 | A data.frame with the names of the data files, the remote directory, and 23 | the date. 24 | } 25 | \description{ 26 | To use smapr, users need to provide NASA Earthdata portal credentials. 27 | This function allows users to interactively set these credentials via the 28 | user's Earthdata username and password. 29 | } 30 | \details{ 31 | If you do not yet have a username and password, register for one here: 32 | https://urs.earthdata.nasa.gov/ 33 | 34 | A warning: do not commit your username and password to a public repository! 35 | This function is meant to be used interactively, and not embedded within a 36 | script that you would share. 37 | } 38 | \examples{ 39 | \dontrun{ 40 | set_smap_credentials('myusername', 'mypassword') 41 | } 42 | 43 | } 44 | -------------------------------------------------------------------------------- /man/smapr-package.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/smapr-package.R 3 | \docType{package} 4 | \name{smapr-package} 5 | \alias{smapr-package} 6 | \title{smapr: A package for acquisition and processing of NASA SMAP data.} 7 | \description{ 8 | The smapr package provides a means to discover, acquire, and process 9 | NASA Soil Moisture Active Passive (SMAP) data. 10 | } 11 | \author{ 12 | Max Joseph \email{maxwell.b.joseph@colorado.edu} 13 | } 14 | \keyword{package} 15 | -------------------------------------------------------------------------------- /tests/testthat.R: -------------------------------------------------------------------------------- 1 | library(testthat) 2 | library(smapr) 3 | 4 | test_check("smapr") 5 | -------------------------------------------------------------------------------- /tests/testthat/test-download_smap.R: -------------------------------------------------------------------------------- 1 | context("download_smap") 2 | 3 | test_that("invalid output directories cause errors", { 4 | skip_on_cran() 5 | files <- find_smap(id = "SPL3SMP", dates = "2015-03-31", version = 7) 6 | expect_error(download_smap(files[1, ], dir = 1234)) 7 | }) 8 | 9 | test_that("non-existent directories are created", { 10 | skip_on_cran() 11 | files <- find_smap(id = "SPL3SMP", dates = "2015-03-31", version = 7) 12 | dir_name <- "silly_nonexistent_directory" 13 | downloads <- download_smap(files, directory = dir_name) 14 | expect_true(dir.exists(dir_name)) 15 | # cleanup by removing directory 16 | unlink(dir_name, recursive = TRUE) 17 | }) 18 | 19 | test_that("valid user-specified directories contain downloads", { 20 | skip_on_cran() 21 | available_data <- find_smap(id = "SPL3SMP", 22 | date = "2015-10-01", 23 | version = 7) 24 | user_specified_path <- file.path('data', 'SMAP') 25 | downloads <- download_smap(available_data, 26 | directory = user_specified_path) 27 | files_in_path <- list.files(user_specified_path) 28 | extensions <- sort(unique(tools::file_ext(files_in_path))) 29 | expect_identical(c("h5", "qa", "xml"), extensions) 30 | 31 | # clean up 32 | unlink('data', recursive = TRUE, force = TRUE) 33 | }) 34 | 35 | test_that("the downloaded data is of the data frame class", { 36 | skip_on_cran() 37 | files <- find_smap(id = "SPL3SMP", dates = "2015-03-31", version = 7) 38 | downloads <- download_smap(files[1, ]) 39 | expect_that(downloads, is_a("data.frame")) 40 | }) 41 | 42 | test_that("Two SPL4CMDL data files are downloaded (h5 and xml)", { 43 | skip_on_cran() 44 | files <- find_smap(id = "SPL4CMDL", dates = "2015-05-01", version = 5) 45 | downloads <- download_smap(files[1, ]) 46 | file_prefix <- downloads$name 47 | downloaded_files <- list.files(downloads$local_dir) 48 | relevant_files <- grepl(file_prefix, downloaded_files) 49 | 50 | number_of_downloaded_files <- sum(relevant_files) 51 | expect_equal(2, number_of_downloaded_files) 52 | 53 | relevant_filenames <- downloaded_files[relevant_files] 54 | extensions <- gsub(".*\\.", "", relevant_filenames) 55 | expect_equal(extensions, c('h5', 'xml')) 56 | }) 57 | 58 | test_that("setting overwrite = FALSE prevents data from being overwritten", { 59 | skip_on_cran() 60 | get_last_modified <- function(downloads) { 61 | path <- file.path(downloads$local_dir, paste0(downloads$name, '.h5')) 62 | time <- file.info(path)$mtime 63 | as.numeric(time) 64 | } 65 | 66 | files <- find_smap(id = "SPL3SMP", date = "2015-03-31", version = 7) 67 | 68 | downloads <- download_smap(files) 69 | modified1 <- get_last_modified(downloads) 70 | 71 | # wait one second then download again 72 | Sys.sleep(1) 73 | downloads <- download_smap(files, overwrite = FALSE) 74 | modified2 <- get_last_modified(downloads) 75 | 76 | expect_equal(modified1, modified2) 77 | }) 78 | 79 | 80 | test_that("setting overwrite = TRUE ensures data overwrite", { 81 | skip_on_cran() 82 | get_last_modified <- function(downloads) { 83 | path <- file.path(downloads$local_dir, paste0(downloads$name, '.h5')) 84 | time <- file.info(path)$mtime 85 | as.numeric(time) 86 | } 87 | 88 | files <- find_smap(id = "SPL3SMP", date = "2015-03-31", version = 7) 89 | 90 | downloads <- download_smap(files, overwrite = TRUE) 91 | modified1 <- get_last_modified(downloads) 92 | 93 | # wait one second then download again 94 | Sys.sleep(1) 95 | downloads <- download_smap(files, overwrite = TRUE) 96 | modified2 <- get_last_modified(downloads) 97 | 98 | expect_gt(modified2[1], modified1[1]) 99 | }) 100 | 101 | 102 | test_that('input data.frames with NA values raise errors', { 103 | skip_on_cran() 104 | expect_warning(df_w_na <- find_smap(id = "SPL2SMP_E", 105 | dates = '2015-05-13', 106 | version = 4)) 107 | expect_error(download_smap(df_w_na)) 108 | }) 109 | 110 | test_that('verbose = TRUE prints output', { 111 | skip_on_cran() 112 | files <- find_smap(id = "SPL3SMP", dates = "2015-03-31", version = 7) 113 | downloads <- expect_message(download_smap(files[1, ], verbose = TRUE)) 114 | }) 115 | 116 | test_that('verbose = FALSE suppresses output', { 117 | skip_on_cran() 118 | files <- find_smap(id = "SPL3SMP", dates = "2015-03-31", version = 7) 119 | downloads <- expect_silent(download_smap(files[1, ], verbose = FALSE)) 120 | }) 121 | -------------------------------------------------------------------------------- /tests/testthat/test-extract_smap.R: -------------------------------------------------------------------------------- 1 | context("extract_smap") 2 | 3 | test_that("invalid datasets cause errors", { 4 | skip_on_cran() 5 | files <- find_smap(id = "SPL3SMP", dates = "2015-03-31", version = 7) 6 | downloads <- download_smap(files[1, ], overwrite = FALSE) 7 | expect_error( 8 | extract_smap(downloads, 9 | name = 'Soil_Moisture_Retrieval_Data_AM/soil_flavor') 10 | ) 11 | }) 12 | 13 | test_that("extract_smap produces a SpatRaster", { 14 | skip_on_cran() 15 | files <- find_smap(id = "SPL3SMP", dates = "2015-03-31", version = 7) 16 | downloads <- download_smap(files[1, ], overwrite = FALSE) 17 | r <- extract_smap(downloads, 18 | name = 'Soil_Moisture_Retrieval_Data_AM/soil_moisture') 19 | expect_that(r, is_a("SpatRaster")) 20 | }) 21 | 22 | test_that("-9999 is used fill value when a _FillValue doesn't exist", { 23 | skip_on_cran() 24 | files <- find_smap(id = "SPL3SMP", dates = "2015-03-31", version = 7) 25 | downloads <- download_smap(files, overwrite = FALSE) 26 | r <- extract_smap(downloads, 27 | name = "Soil_Moisture_Retrieval_Data_PM/latitude_pm") 28 | # the fill value in the file is -9999, but there is no fill value attribute 29 | # therefore, if this function works, the minimum should be >= -90 30 | # (the latitude at the south pole) 31 | min_max <- terra::minmax(r) 32 | min_value <- min_max["min", 1] 33 | expect_gte(min_value, -90) 34 | }) 35 | 36 | test_that("layer names for SPL3FT include file name + am/pm suffix", { 37 | skip_on_cran() 38 | files <- find_smap(id = "SPL3FTA", dates = "2015-04-14", version = 3) 39 | downloads <- download_smap(files, overwrite = FALSE) 40 | r <- extract_smap(downloads, 41 | name = "Freeze_Thaw_Retrieval_Data/freeze_thaw") 42 | expect_that(r, is_a("SpatRaster")) 43 | expected_names <- paste(downloads$name, c("AM", "PM"), sep = "_") 44 | expect_equal(names(r), expected_names) 45 | }) 46 | 47 | test_that("layer names for SPL3SMP include file name", { 48 | skip_on_cran() 49 | files <- find_smap(id = "SPL3SMP", dates = "2015-03-31", version = 7) 50 | downloads <- download_smap(files, overwrite = FALSE) 51 | r <- extract_smap(downloads, 52 | name = "Soil_Moisture_Retrieval_Data_AM/latitude") 53 | expected_names <- paste(downloads$name) 54 | expect_equal(names(r), expected_names) 55 | }) 56 | 57 | test_that("extraction still works with user specified directories", { 58 | skip_on_cran() 59 | available_data <- find_smap(id = "SPL3SMP", 60 | date = "2015-10-01", 61 | version = 7) 62 | user_specified_path <- file.path('data', 'SMAP') 63 | downloads <- download_smap(available_data, 64 | directory = user_specified_path, 65 | overwrite = FALSE) 66 | r <- extract_smap(downloads, 67 | name = "Soil_Moisture_Retrieval_Data_AM/latitude") 68 | expect_that(r, is_a("SpatRaster")) 69 | 70 | # clean up 71 | unlink('data', recursive = TRUE, force = TRUE) 72 | }) 73 | 74 | test_that("Sentinel/SMAP integrated products can read properly", { 75 | skip_on_cran() 76 | 77 | files <- find_smap('SPL2SMAP_S', '2016-06-08', 3) 78 | 79 | n_to_use <- 2L # don't use all files, use this many instead 80 | downloads <- download_smap(files[1:n_to_use, ]) 81 | r <- extract_smap(downloads, 82 | '/Soil_Moisture_Retrieval_Data_3km/soil_moisture_3km') 83 | 84 | expect_that(r, is_a('SpatRaster')) 85 | n_layers <- dim(r)[3] 86 | expect_equal(n_layers, n_to_use) 87 | }) 88 | 89 | 90 | test_that("Sentinel/SMAP cannot be extracted with other data types", { 91 | skip_on_cran() 92 | 93 | files <- find_smap('SPL2SMAP_S', '2016-06-08', 3) 94 | other_files <- find_smap(id = "SPL3SMP", 95 | date = "2015-10-01", 96 | version = 7) 97 | mixed_files <- rbind(files[1, ], other_files) 98 | downloads <- download_smap(mixed_files) 99 | 100 | # extracting two different kinds of files should raise error 101 | to_extract <- '/Soil_Moisture_Retrieval_Data_3km/soil_moisture_3km' 102 | expect_error(extract_smap(downloads, to_extract)) 103 | }) 104 | -------------------------------------------------------------------------------- /tests/testthat/test-find_smap.R: -------------------------------------------------------------------------------- 1 | context("find_smap") 2 | 3 | test_that("searching for invalid ids causes an error", { 4 | skip_on_cran() 5 | expect_error(find_smap(id = "invalid", dates = "2015-03-31", version = 1)) 6 | }) 7 | 8 | test_that("searching for invalid versions causes an error", { 9 | skip_on_cran() 10 | expect_error(find_smap(id = "SPL4SMGP", dates = "2015-03-31", 11 | version = 999)) 12 | }) 13 | 14 | test_that("searching for future dates causes an error", { 15 | skip_on_cran() 16 | expect_error(find_smap(id = "SPL4SMGP", dates = "3015-03-31", version = 5)) 17 | }) 18 | 19 | test_that("searching for missing dates raises a warning", { 20 | skip_on_cran() 21 | expect_warning(find_smap(id = "SPL2SMP_E", dates = '2015-05-13', 22 | version = 4)) 23 | }) 24 | 25 | test_that("searching for missing dates with extant dates returns both", { 26 | skip_on_cran() 27 | seq_dates <- seq(as.Date("2015-05-12"), as.Date("2015-05-13"), by = 1) 28 | expect_warning(available_data <- find_smap(id = "SPL2SMP_E", 29 | dates = seq_dates, 30 | version = 4)) 31 | num_na_vals_by_column <- apply(available_data, 2, FUN = function(x) { 32 | sum(is.na(x)) 33 | }) 34 | expect_identical(num_na_vals_by_column, 35 | c(name = 1L, date = 0L, dir = 1L)) 36 | expect_identical(dim(available_data), c(12L, 3L)) 37 | }) 38 | 39 | test_that("find_smap produces a data frame with the proper dimensions", { 40 | skip_on_cran() 41 | data <- find_smap(id = "SPL4SMGP", dates = "2015-03-31", version = 5) 42 | expect_match(colnames(data[1]), "name") 43 | expect_match(colnames(data[2]), "date") 44 | expect_match(colnames(data[3]), "dir") 45 | num_rows <- nrow(data) 46 | row_vector <- row.names(data) 47 | expect_match(row_vector[num_rows], toString(num_rows)) 48 | }) 49 | 50 | 51 | test_that("date sequences retrieve data for each day", { 52 | skip_on_cran() 53 | start_date <- as.Date("2015-03-31") 54 | end_date <- as.Date("2015-04-02") 55 | date_sequence <- seq(start_date, end_date, by = 1) 56 | data <- find_smap(id = "SPL4SMGP", 57 | dates = date_sequence, 58 | version = 5) 59 | dates_in_data <- unique(data$date) 60 | expect_equal(date_sequence, dates_in_data) 61 | }) 62 | 63 | test_that("invalid date formats raise errors", { 64 | expect_error(try_make_date("2016-3.04")) 65 | }) 66 | 67 | test_that("valid date formats do not raise errors", { 68 | expect_is(try_make_date("2016-3-4"), 'Date') 69 | expect_is(try_make_date(ISOdate(2010, 04, 13, 12)), 'Date') 70 | }) 71 | -------------------------------------------------------------------------------- /tests/testthat/test-list_smap.R: -------------------------------------------------------------------------------- 1 | context("list_smap") 2 | 3 | test_that("vector input causes errors", { 4 | skip_on_cran() 5 | files <- find_smap(id = "SPL3SMP", dates = "2015-05-01", version = 7) 6 | downloads <- download_smap(files[1, ]) 7 | expect_error(list_smap(downloads$local_file)) 8 | }) 9 | 10 | test_that("list_smap returns a list of dfs", { 11 | skip_on_cran() 12 | files <- find_smap(id = "SPL3SMP", dates = "2015-05-01", version = 7) 13 | downloads <- download_smap(files[1, ]) 14 | contents <- list_smap(downloads) 15 | expect_that(contents, is_a("list")) 16 | expect_that(contents[[1]], is_a("data.frame")) 17 | }) 18 | -------------------------------------------------------------------------------- /tests/testthat/test-set_smap_credentials.R: -------------------------------------------------------------------------------- 1 | context("set_smap_credentials") 2 | 3 | if (!file.exists(renvironment_path)) file.create(renvironment_path) 4 | 5 | test_that(".Renviron file is not modified when save = FALSE", { 6 | skip_on_cran() 7 | skip_on_ci() 8 | renvironment_contents <- readLines(renvironment_path) 9 | creds <- get_creds(renvironment_path) 10 | 11 | set_smap_credentials('fakeuser', 'fakepass', save = FALSE) 12 | 13 | # verify that the .Renviron file has not been modified 14 | final_renvironment_contents <- readLines(renvironment_path) 15 | expect_identical(renvironment_contents, final_renvironment_contents) 16 | 17 | # clean up by restoring original credentials 18 | set_smap_credentials(creds['username'], 19 | creds['passwd'], 20 | save = FALSE) 21 | }) 22 | 23 | test_that("Existing credentials raise an error when overwrite = FALSE", { 24 | skip_on_cran() 25 | skip_on_ci() 26 | expect_error(set_smap_credentials("dummy_user", 27 | "dummy_password", 28 | save = TRUE, 29 | overwrite = FALSE), 30 | "Earthdata credentials already exist") 31 | }) 32 | 33 | test_that("Existing credentials are overwritten when overwrite = TRUE", { 34 | skip_on_cran() 35 | skip_on_ci() 36 | original_creds <- get_creds(renvironment_path) 37 | 38 | set_smap_credentials("user", 39 | "password", 40 | save = TRUE, 41 | overwrite = TRUE) 42 | 43 | new_creds <- get_creds(renvironment_path) 44 | expect_equal(new_creds[['username']], "user") 45 | expect_equal(new_creds[['passwd']], "password") 46 | 47 | # restore old creds 48 | set_smap_credentials(original_creds['username'], 49 | original_creds['passwd'], 50 | overwrite = TRUE) 51 | }) 52 | -------------------------------------------------------------------------------- /tests/testthat/test-zzz.R: -------------------------------------------------------------------------------- 1 | context("zzz") 2 | 3 | test_that("get_creds() returns a vector with username and passwd", { 4 | skip_on_cran() 5 | skip_on_ci() 6 | creds <- get_creds(file.path(Sys.getenv("HOME"), ".Renviron")) 7 | expect_length(creds, 2) 8 | expect_named(creds, c("username", "passwd")) 9 | }) 10 | 11 | test_that("Correct credentials do not raise a 401 error", { 12 | skip_on_cran() 13 | resp <- httr::GET(https_prefix(), 14 | config = auth()) 15 | expect_equal(resp$status_code, 200) 16 | expect_null(check_for_401(resp)) 17 | }) 18 | 19 | test_that("Incorrect credentials cause a 401 error", { 20 | skip_on_cran() 21 | # temporary handle is necessary here, otherwise previous 22 | # authentication (with correct credentials) is used 23 | # solution from: https://github.com/r-lib/httr/issues/122 24 | tmp_handle <- httr::handle("https://n5eil01u.ecs.nsidc.org/SMAP/") 25 | resp <- httr::GET(handle = tmp_handle, 26 | config = httr::authenticate('fakeuser', 'fakepass')) 27 | expect_equal(resp$status_code, 401) 28 | expect_error(check_for_401(resp), "401 unauthorized") 29 | rm(tmp_handle) 30 | }) 31 | 32 | test_that("Missing credentials cause an error", { 33 | username <- Sys.getenv('ed_un') 34 | password <- Sys.getenv('ed_pw') 35 | 36 | Sys.setenv(ed_un = "", ed_pw = "") 37 | expect_error(check_creds(), 38 | "smapr expected ed_un and ed_pw to be environment variables!") 39 | 40 | Sys.setenv(ed_un = username, ed_pw = password) 41 | }) 42 | -------------------------------------------------------------------------------- /tic.R: -------------------------------------------------------------------------------- 1 | # installs dependencies, runs R CMD check, runs covr::codecov() 2 | do_package_checks() 3 | 4 | if (ci_on_ghactions() && ci_has_env("BUILD_PKGDOWN")) { 5 | # creates pkgdown site and pushes to gh-pages branch 6 | # only for the runner with the "BUILD_PKGDOWN" env var set 7 | do_pkgdown() 8 | } 9 | -------------------------------------------------------------------------------- /vignettes/smapr-intro-crop-raster-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ropensci/smapr/6274022ddcfa4e109679df5b71c32de104ae06e3/vignettes/smapr-intro-crop-raster-1.png -------------------------------------------------------------------------------- /vignettes/smapr-intro-get-mean-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ropensci/smapr/6274022ddcfa4e109679df5b71c32de104ae06e3/vignettes/smapr-intro-get-mean-1.png -------------------------------------------------------------------------------- /vignettes/smapr-intro-inverse-mask-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ropensci/smapr/6274022ddcfa4e109679df5b71c32de104ae06e3/vignettes/smapr-intro-inverse-mask-1.png -------------------------------------------------------------------------------- /vignettes/smapr-intro-mask-raster-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ropensci/smapr/6274022ddcfa4e109679df5b71c32de104ae06e3/vignettes/smapr-intro-mask-raster-1.png -------------------------------------------------------------------------------- /vignettes/smapr-intro-plot-raster-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ropensci/smapr/6274022ddcfa4e109679df5b71c32de104ae06e3/vignettes/smapr-intro-plot-raster-1.png -------------------------------------------------------------------------------- /vignettes/smapr-intro-surface-vs-rootzone-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ropensci/smapr/6274022ddcfa4e109679df5b71c32de104ae06e3/vignettes/smapr-intro-surface-vs-rootzone-1.png -------------------------------------------------------------------------------- /vignettes/smapr-intro.Rmd.orig: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Introduction to the smapr package" 3 | author: "Maxwell B. Joseph" 4 | date: "`r Sys.Date()`" 5 | output: rmarkdown::html_vignette 6 | vignette: > 7 | %\VignetteIndexEntry{Introduction to the smapr package} 8 | %\VignetteEngine{knitr::rmarkdown} 9 | %\VignetteEncoding{UTF-8} 10 | --- 11 | 12 | ```{r setup, include = FALSE} 13 | knitr::opts_chunk$set( 14 | collapse = TRUE, 15 | comment = "#>", 16 | fig.width=8, 17 | fig.height=6, 18 | fig.path = "vignettes/smapr-intro-" 19 | ) 20 | ``` 21 | 22 | ```{r load-smapr} 23 | library(smapr) 24 | library(terra) 25 | ``` 26 | 27 | 28 | This vignette outlines a basic use scenario for smapr. 29 | We will acquire and process 30 | [NASA (Soil Moisture Active-Passive) SMAP data](http://smap.jpl.nasa.gov/), 31 | and generate some simple visualizations. 32 | 33 | 34 | ## SMAP data products 35 | 36 | Multiple SMAP data products are provided by the NSIDC, and these products vary 37 | in the amount of processing. 38 | Currently, smapr primarily supports level 3 and level 4 data products, 39 | which represent global daily composite and global three hourly modeled data 40 | products, respectively. 41 | NSIDC provides documentation for all SMAP data products on their 42 | [website](https://nsidc.org/data/smap/smap-data.html), and we provide a summary 43 | of data products supported by smapr below. 44 | 45 | | Dataset id | Description | Resolution | 46 | |------------|-----------------------------------------------------|------------| 47 | | SPL2SMAP_S | SMAP/Sentinel-1 Radiometer/Radar Soil Moisture | 3 km | 48 | | SPL3FTA | Radar Northern Hemisphere Daily Freeze/Thaw State | 3 km | 49 | | SPL3SMA | Radar Global Daily Soil Moisture | 3 km | 50 | | SPL3SMP | Radiometer Global Soil Moisture | 36 km | 51 | | SPL3SMAP | Radar/Radiometer Global Soil Moisture | 9 km | 52 | | SPL4SMAU | Surface/Rootzone Soil Moisture Analysis Update | 9 km | 53 | | SPL4SMGP | Surface/Rootzone Soil Moisture Geophysical Data | 9 km | 54 | | SPL4SMLM | Surface/Rootzone Soil Moisture Land Model Constants | 9 km | 55 | | SPL4CMDL | Carbon Net Ecosystem Exchange | 9 km | 56 | 57 | 58 | This vignette uses the level 4 [SPL4SMAU](https://nsidc.org/data/SPL4SMAU) 59 | (Surface/Rootzone Soil Moisture Analysis Update) data product. 60 | 61 | ## Preparing to access SMAP data 62 | 63 | NASA requires a username and password from their Earthdata portal to access 64 | SMAP data. 65 | You can get these credentials here: https://earthdata.nasa.gov/ 66 | 67 | Once you have your credentials, you can use the `set_smap_credentials` 68 | function to set them for use by the smapr package: 69 | 70 | ```{r set-creds, eval = FALSE} 71 | set_smap_credentials("myusername", "mypassword") 72 | ``` 73 | 74 | This function saves your credentials for later use unless you use the argument 75 | `save = FALSE`. 76 | 77 | ## Finding data 78 | 79 | To find out which SMAP data are available, we'll use the `find_smap` function, 80 | which takes a data set ID, date(s) to search, and a dataset version. 81 | 82 | ```{r find-data} 83 | available_data <- find_smap(id = 'SPL4SMAU', dates = '2018-06-01', version = 5) 84 | ``` 85 | 86 | This returns a data frame, where every row is one data file that is available 87 | on NASA's servers. 88 | 89 | ```{r head-data} 90 | str(available_data) 91 | ``` 92 | 93 | ## Downloading data 94 | 95 | To download the data, we can use `download_smap`. Note that this may take a 96 | while, depending on the number of files being downloaded, and the speed of your 97 | internet connection. 98 | Because we're downloading multiple files, we will use the 99 | `verbose = FALSE` argument to avoid printing excessive output to the console. 100 | 101 | ```{r download-data} 102 | local_files <- download_smap(available_data, overwrite = FALSE, verbose = FALSE) 103 | ``` 104 | 105 | Each file corresponds to different 106 | times as indicated by the file names: 107 | 108 | ```{r print-filenames} 109 | local_files$name[1:2] 110 | ``` 111 | 112 | ## Exploring data 113 | 114 | Each file that we downloaded is an HDF5 file with multiple datasets bundled 115 | together. 116 | To list all of the data in a file we can use `list_smap`. 117 | By default, if we give `list_smap` a data frame of local files, it will 118 | return a list of data frames. 119 | Because all of these data files are of the same data product, using `list_smap` 120 | on one file (e.g., the first) will tell us what's available in all of the files: 121 | 122 | ```{r list-smap} 123 | list_smap(local_files[1, ]) 124 | ``` 125 | 126 | To dig deeper, we can use the `all` argument to `list_smap`: 127 | 128 | ```{r list-more-smap} 129 | list_smap(local_files[1, ], all = TRUE) 130 | ``` 131 | 132 | Looking at this output, we can conclude that the file contains multiple arrays 133 | (notice the `dim` column). 134 | These arrays correspond to things like estimated root zone soil moisture 135 | (`/Analysis_Data/sm_rootzone_analysis`), estimated surface soil moisture 136 | (`/Analysis_Data/sm_surface_analysis`), and estimated surface temperature 137 | (`/Analysis_Data/surface_temp_analysis`). 138 | See https://nsidc.org/data/smap/spl4sm/data-fields#sm_surface_analysis for more 139 | detailed information on what these datasets represent and how they were 140 | generated. 141 | 142 | ## Extracting data 143 | 144 | The datasets that we are interested in are spatial grids. 145 | The `smapr` package can extract these data into `raster` objects with the 146 | `extract_smap` function, which takes a dataset name as an argument. 147 | These names are paths that can be generated from the output of `list_smap`. 148 | For example, if we want to get rootzone soil moisture, we can see a dataset 149 | with name `sm_rootzone_analysis` in group `/Analysis_Data`, so that the path 150 | to the dataset is `/Analysis_Data/sm_rootzone_analysis`: 151 | 152 | ```{r extract-data} 153 | sm_raster <- extract_smap(local_files, '/Analysis_Data/sm_rootzone_analysis') 154 | ``` 155 | 156 | This will extract all of the data in the data frame `local_files`, generating 157 | a terra SpatRaster object with one layer per file: 158 | 159 | ```{r print-raster} 160 | sm_raster 161 | ``` 162 | 163 | We can visualize each layer: 164 | 165 | ```{r plot-raster} 166 | plot(sm_raster) 167 | ``` 168 | 169 | Cropping, masking, and summarization can then proceed using the terra R package. 170 | 171 | For example, to get mean soil moisture values across layers, use`terra::app()`: 172 | 173 | ```{r get-mean} 174 | mean_sm <- app(sm_raster, fun = mean) 175 | plot(mean_sm, main = 'Mean soil moisture') 176 | ``` 177 | 178 | ### Comparing surface and soil moisture 179 | 180 | Our SPL4SMAU data have estimated surface and rootzone soil moisture layers. 181 | If we want to compare these values, we can load the surface soil moisture data, 182 | compute the mean value over layers as we did for the rootzone soil moisture 183 | raster, and generate a scatterplot. 184 | 185 | ```{r surface-vs-rootzone} 186 | surface_raster <- extract_smap(local_files, 187 | name = '/Analysis_Data/sm_surface_analysis') 188 | 189 | mean_surface_sm <- app(surface_raster, fun = mean) 190 | 191 | # compare values 192 | plot(values(mean_sm), values(mean_surface_sm), col = 'dodgerblue', cex = .1, 193 | xlab = 'Rootzone soil moisture', ylab = 'Surface soil moisture', bty = 'n') 194 | abline(0, 1, lty = 2) 195 | ``` 196 | --------------------------------------------------------------------------------