├── .Rbuildignore ├── .gitignore ├── DESCRIPTION ├── NAMESPACE ├── NEWS ├── R ├── download.r ├── downloader-package.r ├── sha_url.r └── source_url.r ├── README.md ├── downloader.Rproj ├── man ├── download.Rd ├── downloader.Rd ├── sha_url.Rd └── source_url.Rd └── tests ├── testthat.R └── testthat ├── test-download.R └── test-sha.R /.Rbuildignore: -------------------------------------------------------------------------------- 1 | .gitignore 2 | downloader.sublime-project 3 | downloader.sublime-workspace 4 | ^.*\.Rproj$ 5 | ^\.Rproj\.user$ 6 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.sublime-project 2 | *.sublime-workspace 3 | .Rhistory 4 | .Rproj.user 5 | *.Rproj 6 | -------------------------------------------------------------------------------- /DESCRIPTION: -------------------------------------------------------------------------------- 1 | Package: downloader 2 | Authors@R: person(given = "Winston", 3 | family = "Chang", 4 | role = c("aut", "cre"), 5 | email = "winston@stdout.org") 6 | Version: 0.4.1 7 | License: GPL-2 8 | Title: Download Files over HTTP and HTTPS 9 | Description: Provides a wrapper for the download.file function, 10 | making it possible to download files over HTTPS on Windows, Mac OS X, and 11 | other Unix-like platforms. The 'RCurl' package provides this functionality 12 | (and much more) but can be difficult to install because it must be compiled 13 | with external dependencies. This package has no external dependencies, so 14 | it is much easier to install. 15 | URL: https://github.com/wch/downloader 16 | Imports: 17 | utils, 18 | digest 19 | Suggests: 20 | testthat 21 | BugReports: https://github.com/wch/downloader/issues 22 | RoxygenNote: 7.3.2 23 | -------------------------------------------------------------------------------- /NAMESPACE: -------------------------------------------------------------------------------- 1 | # Generated by roxygen2: do not edit by hand 2 | 3 | export(download) 4 | export(sha_url) 5 | export(source_url) 6 | importFrom(digest,digest) 7 | importFrom(utils,download.file) 8 | -------------------------------------------------------------------------------- /NEWS: -------------------------------------------------------------------------------- 1 | Version 0.4 2 | -------------------------------------------------------------------------------- 3 | 4 | * Use new R 3.2 download methods ("wininet" and "libcurl") when available. 5 | 6 | Version 0.3 7 | -------------------------------------------------------------------------------- 8 | 9 | * `source_url()` function now checks the SHA-1 hash the downloaded file. 10 | 11 | * Add `sha_url()` function, for finding the SHA-1 hash of a remote file. 12 | 13 | Version 0.2.2 14 | -------------------------------------------------------------------------------- 15 | 16 | * Disable all network tests when running on CRAN, because the connection to the 17 | remote test website may not be reliable. 18 | 19 | Version 0.2.1 20 | -------------------------------------------------------------------------------- 21 | 22 | * Change https redirection test to not run on CRAN because their Windows build 23 | machine has more stringent security settings. 24 | 25 | Version 0.2 26 | -------------------------------------------------------------------------------- 27 | 28 | * Switched to using `Sys.which` to find external programs. 29 | 30 | * Added tests. 31 | 32 | * When using curl, follow redirects with http. (It already worked with https.) 33 | 34 | * Add `source_url` function. 35 | -------------------------------------------------------------------------------- /R/download.r: -------------------------------------------------------------------------------- 1 | #' Download a file, using http, https, or ftp 2 | #' 3 | #' This is a wrapper for \code{\link{download.file}} and takes all the same 4 | #' arguments. The only difference is that, if the protocol is https, it changes 5 | #' some settings to make it work. How exactly the settings are changed differs 6 | #' among platforms. 7 | #' 8 | #' This function also should follow http redirects on all platforms, which is 9 | #' something that does not happen by default when \code{curl} is used, as on Mac 10 | #' OS X. 11 | #' 12 | #' With Windows, it either uses the \code{"wininet"} method (for R 3.2) or uses 13 | #' the \code{"internal"} method after first ensuring that \code{setInternet2}, 14 | #' is active (which tells R to use the \code{internet2.dll}). 15 | #' 16 | #' On other platforms, it will try to use \code{libcurl}, \code{wget}, then 17 | #' \code{curl}, and then \code{lynx} to download the file. R 3.2 will typically 18 | #' have the \code{libcurl} method and for previous versions of R Linux platforms 19 | #' will have \code{wget} installed, and Mac OS X will have \code{curl}. 20 | #' 21 | #' Note that for many (perhaps most) types of files, you will want to use 22 | #' \code{mode="wb"} so that the file is downloaded in binary mode. 23 | #' 24 | #' @param url The URL to download. 25 | #' @param ... Other arguments that are passed to \code{\link{download.file}}. 26 | #' 27 | #' @seealso \code{\link{download.file}} for more information on the arguments 28 | #' that can be used with this function. 29 | #' 30 | #' @export 31 | #' @examples 32 | #' \dontrun{ 33 | #' # Download the downloader source, in binary mode 34 | #' download("https://github.com/wch/downloader/zipball/master", 35 | #' "downloader.zip", mode = "wb") 36 | #' } 37 | #' 38 | #' @importFrom utils download.file 39 | download <- function(url, ...) { 40 | # First, check protocol. If http or https, check platform: 41 | if (grepl('^https?://', url)) { 42 | 43 | # Check whether we are running R 3.2 44 | isR32 <- getRversion() >= "3.2" 45 | 46 | # Windows 47 | if (.Platform$OS.type == "windows") { 48 | 49 | if (isR32) { 50 | method <- "wininet" 51 | } else { 52 | 53 | # If we directly use setInternet2, R CMD CHECK gives a Note on Mac/Linux 54 | seti2 <- `::`(utils, 'setInternet2') 55 | 56 | # Check whether we are already using internet2 for internal 57 | internet2_start <- seti2(NA) 58 | 59 | # If not then temporarily set it 60 | if (!internet2_start) { 61 | # Store initial settings, and restore on exit 62 | on.exit(suppressWarnings(seti2(internet2_start))) 63 | 64 | # Needed for https. Will get warning if setInternet2(FALSE) already run 65 | # and internet routines are used. But the warnings don't seem to matter. 66 | suppressWarnings(seti2(TRUE)) 67 | } 68 | 69 | method <- "internal" 70 | } 71 | 72 | # download.file will complain about file size with something like: 73 | # Warning message: 74 | # In download.file(url, ...) : downloaded length 19457 != reported length 200 75 | # because apparently it compares the length with the status code returned (?) 76 | # so we supress that 77 | suppressWarnings(download.file(url, method = method, ...)) 78 | 79 | } else { 80 | # If non-Windows, check for libcurl/curl/wget/lynx, then call download.file with 81 | # appropriate method. 82 | 83 | if (isR32 && capabilities("libcurl")) { 84 | method <- "libcurl" 85 | } else if (nzchar(Sys.which("wget")[1])) { 86 | method <- "wget" 87 | } else if (nzchar(Sys.which("curl")[1])) { 88 | method <- "curl" 89 | 90 | # curl needs to add a -L option to follow redirects. 91 | # Save the original options and restore when we exit. 92 | orig_extra_options <- getOption("download.file.extra") 93 | on.exit(options(download.file.extra = orig_extra_options)) 94 | 95 | options(download.file.extra = paste("-L", orig_extra_options)) 96 | 97 | } else if (nzchar(Sys.which("lynx")[1])) { 98 | method <- "lynx" 99 | } else { 100 | stop("no download method found") 101 | } 102 | 103 | download.file(url, method = method, ...) 104 | } 105 | 106 | } else { 107 | download.file(url, ...) 108 | } 109 | } 110 | -------------------------------------------------------------------------------- /R/downloader-package.r: -------------------------------------------------------------------------------- 1 | #' downloader: a package for making it easier to download files over https 2 | #' 3 | #' This package provides a wrapper for the download.file function, 4 | #' making it possible to download files over https on Windows, Mac OS X, and 5 | #' other Unix-like platforms. The RCurl package provides this functionality 6 | #' (and much more) but can be difficult to install because it must be compiled 7 | #' with external dependencies. This package has no external dependencies, so 8 | #' it is much easier to install. 9 | #' 10 | #' @name downloader 11 | "_PACKAGE" 12 | -------------------------------------------------------------------------------- /R/sha_url.r: -------------------------------------------------------------------------------- 1 | #' Download a file from a URL and find a SHA-1 hash of it 2 | #' 3 | #' This will download a file and find a SHA-1 hash of it, using 4 | #' \code{\link[digest]{digest}()}. The primary purpose of this function is to provide 5 | #' an easy way to find the value of \code{sha} which can be passed to 6 | #' \code{\link{source_url}()}. 7 | #' 8 | #' @param url The URL of the file to find a hash of. 9 | #' @param cmd If \code{TRUE} (the default), print out a command for sourcing the 10 | #' URL with \code{\link{source_url}()}, including the hash. 11 | #' 12 | #' @export 13 | #' @examples 14 | #' \dontrun{ 15 | #' # Get the SHA hash of a file. It will print the text below and return 16 | #' # the hash as a string. This is a very long URL; break it up so it can be 17 | #' # seen more easily in the examples. 18 | #' test_url <- paste0("https://gist.github.com/wch/dae7c106ee99fe1fdfe7", 19 | #' "/raw/db0c9bfe0de85d15c60b0b9bf22403c0f5e1fb15/test.r") 20 | #' sha_url(test_url) 21 | #' # Command for sourcing the URL: 22 | #' # downloader::source_url("https://gist.github.com/wch/dae7c106ee99fe1fdfe7 23 | #' # /raw/db0c9bfe0de85d15c60b0b9bf22403c0f5e1fb15/test.r", 24 | #' # sha="9b8ff5213e32a871d6cb95cce0bed35c53307f61") 25 | #' # [1] "9b8ff5213e32a871d6cb95cce0bed35c53307f61" 26 | #' } 27 | #' 28 | #' 29 | #' @importFrom digest digest 30 | sha_url <- function(url, cmd = TRUE) { 31 | temp_file <- tempfile() 32 | download(url, temp_file) 33 | on.exit(unlink(temp_file)) 34 | 35 | sha <- digest(file = temp_file, algo = 'sha1') 36 | 37 | if (cmd) { 38 | message('Command for sourcing the URL:\n', 39 | ' downloader::source_url("', url, '", sha="', sha, '")') 40 | } 41 | 42 | sha 43 | } 44 | -------------------------------------------------------------------------------- /R/source_url.r: -------------------------------------------------------------------------------- 1 | #' Download an R file from a URL and source it 2 | #' 3 | #' This will download a file and source it. Because it uses the 4 | #' \code{\link{download}()} function, it can handle https URLs. 5 | #' 6 | #' By default, \code{source_url()} checks the SHA-1 hash of the file. If it 7 | #' differs from the expected value, it will throw an error. The default 8 | #' expectation is that a hash is provided; if not, \code{source_url()} will 9 | #' prompt the user, asking if they are sure they want to continue, unless 10 | #' \code{prompt=FALSE} is used. In other words, if you use \code{prompt=FALSE}, 11 | #' it will run the remote code without checking the hash, and without asking 12 | #' the user. 13 | #' 14 | #' The purpose of checking the hash is to ensure that the file has not changed. 15 | #' If a \code{source_url} command with a hash is posted in a public forum, then 16 | #' others who source the URL (with the hash) are guaranteed to run the same 17 | #' code every time. This means that the author doesn't need to worry about the 18 | #' security of the server hosting the file. It also means that the users don't 19 | #' have to worry about the file being replaced with a damaged or 20 | #' maliciously-modified version. 21 | #' 22 | #' To find the hash of a local file, use \code{\link[digest]{digest}()}. For a simple 23 | #' way to find the hash of a remote file, use \code{\link{sha_url}()}. 24 | #' 25 | #' @param url The URL to download. 26 | #' @param sha A SHA-1 hash of the file at the URL. 27 | #' @param prompt Prompt the user if no value for \code{sha} is provided. 28 | #' @param quiet If \code{FALSE} (the default), print out status messages about 29 | #' checking SHA. 30 | #' @param ... Other arguments that are passed to \code{\link{source}()}. 31 | #' 32 | #' @seealso \code{\link{source}()} for more information on the arguments 33 | #' that can be used with this function. The \code{\link{sha_url}()} function 34 | #' can be used to find the SHA-1 hash of a remote file. 35 | #' 36 | #' @export 37 | #' @examples 38 | #' \dontrun{ 39 | #' # Source the a sample file 40 | #' 41 | #' # This is a very long URL; break it up so it can be seen more easily in the 42 | #' # examples. 43 | #' test_url <- paste0("https://gist.github.com/wch/dae7c106ee99fe1fdfe7", 44 | #' "/raw/db0c9bfe0de85d15c60b0b9bf22403c0f5e1fb15/test.r") 45 | #' downloader::source_url(test_url, 46 | #' sha = "9b8ff5213e32a871d6cb95cce0bed35c53307f61") 47 | #' 48 | #' # Find the hash of a file 49 | #' downloader::sha_url(test_url) 50 | #' } 51 | #' 52 | #' 53 | #' @importFrom digest digest 54 | source_url <- function(url, sha = NULL, ..., prompt = TRUE, quiet = FALSE) { 55 | 56 | if (prompt && (is.null(sha) || sha == '')) { 57 | resp <- readline(prompt = paste(sep = '', 58 | ' No SHA-1 hash specified for the file. The hash is needed to ensure that\n', 59 | ' the file at the URL has not changed. See ?source_url for information on\n', 60 | ' why this is useful. Are sure you want to continue? [y/n] ')) 61 | 62 | sha <- NULL # Set to NULL for simpler check later on 63 | 64 | if (tolower(resp) != "y") { 65 | message("Quitting") 66 | return(invisible()) 67 | } 68 | } 69 | 70 | temp_file <- tempfile() 71 | download(url, temp_file) 72 | on.exit(unlink(temp_file)) 73 | 74 | if (!is.null(sha)) { 75 | url_sha <- digest(file = temp_file, algo = 'sha1') 76 | 77 | if (url_sha == sha) { 78 | if (!quiet) { 79 | message('Hash ', url_sha, ' matches expected value.') 80 | } 81 | } else { 82 | stop('Hash ', url_sha, ' does not match expected value!') 83 | } 84 | 85 | } else { 86 | if (!quiet) { 87 | message('Not checking SHA-1 of downloaded file.') 88 | } 89 | } 90 | 91 | source(temp_file, ...) 92 | } 93 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | downloader 2 | ========== 3 | 4 | This package provides a wrapper for the download.file function, making it possible to download files over https on Windows, Mac OS X, and other Unix-like platforms. 5 | The RCurl package provides this functionality (and much more) but can be difficult to install because it must be compiled with external dependencies. 6 | This package has no external dependencies, so it is much easier to install. 7 | 8 | Example usage 9 | ============= 10 | 11 | This will download the source code for the downloader package: 12 | 13 | ```R 14 | # First install downloader from CRAN 15 | install.packages("downloader") 16 | 17 | library(downloader) 18 | download("https://github.com/wch/downloader/zipball/master", 19 | "downloader.zip", mode = "wb") 20 | ``` 21 | -------------------------------------------------------------------------------- /downloader.Rproj: -------------------------------------------------------------------------------- 1 | Version: 1.0 2 | 3 | RestoreWorkspace: Default 4 | SaveWorkspace: Default 5 | AlwaysSaveHistory: Default 6 | 7 | EnableCodeIndexing: Yes 8 | UseSpacesForTab: Yes 9 | NumSpacesForTab: 2 10 | Encoding: UTF-8 11 | 12 | RnwWeave: Sweave 13 | LaTeX: pdfLaTeX 14 | 15 | BuildType: Package 16 | PackageInstallArgs: --no-multiarch 17 | PackageRoxygenize: rd,collate,namespace,vignette 18 | -------------------------------------------------------------------------------- /man/download.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/download.r 3 | \name{download} 4 | \alias{download} 5 | \title{Download a file, using http, https, or ftp} 6 | \usage{ 7 | download(url, ...) 8 | } 9 | \arguments{ 10 | \item{url}{The URL to download.} 11 | 12 | \item{...}{Other arguments that are passed to \code{\link{download.file}}.} 13 | } 14 | \description{ 15 | This is a wrapper for \code{\link{download.file}} and takes all the same 16 | arguments. The only difference is that, if the protocol is https, it changes 17 | some settings to make it work. How exactly the settings are changed differs 18 | among platforms. 19 | } 20 | \details{ 21 | This function also should follow http redirects on all platforms, which is 22 | something that does not happen by default when \code{curl} is used, as on Mac 23 | OS X. 24 | 25 | With Windows, it either uses the \code{"wininet"} method (for R 3.2) or uses 26 | the \code{"internal"} method after first ensuring that \code{setInternet2}, 27 | is active (which tells R to use the \code{internet2.dll}). 28 | 29 | On other platforms, it will try to use \code{libcurl}, \code{wget}, then 30 | \code{curl}, and then \code{lynx} to download the file. R 3.2 will typically 31 | have the \code{libcurl} method and for previous versions of R Linux platforms 32 | will have \code{wget} installed, and Mac OS X will have \code{curl}. 33 | 34 | Note that for many (perhaps most) types of files, you will want to use 35 | \code{mode="wb"} so that the file is downloaded in binary mode. 36 | } 37 | \examples{ 38 | \dontrun{ 39 | # Download the downloader source, in binary mode 40 | download("https://github.com/wch/downloader/zipball/master", 41 | "downloader.zip", mode = "wb") 42 | } 43 | 44 | } 45 | \seealso{ 46 | \code{\link{download.file}} for more information on the arguments 47 | that can be used with this function. 48 | } 49 | -------------------------------------------------------------------------------- /man/downloader.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/downloader-package.r 3 | \docType{package} 4 | \name{downloader} 5 | \alias{downloader-package} 6 | \alias{downloader} 7 | \title{downloader: a package for making it easier to download files over https} 8 | \description{ 9 | This package provides a wrapper for the download.file function, 10 | making it possible to download files over https on Windows, Mac OS X, and 11 | other Unix-like platforms. The RCurl package provides this functionality 12 | (and much more) but can be difficult to install because it must be compiled 13 | with external dependencies. This package has no external dependencies, so 14 | it is much easier to install. 15 | } 16 | \seealso{ 17 | Useful links: 18 | \itemize{ 19 | \item \url{https://github.com/wch/downloader} 20 | \item Report bugs at \url{https://github.com/wch/downloader/issues} 21 | } 22 | 23 | } 24 | \author{ 25 | \strong{Maintainer}: Winston Chang \email{winston@stdout.org} 26 | 27 | } 28 | -------------------------------------------------------------------------------- /man/sha_url.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/sha_url.r 3 | \name{sha_url} 4 | \alias{sha_url} 5 | \title{Download a file from a URL and find a SHA-1 hash of it} 6 | \usage{ 7 | sha_url(url, cmd = TRUE) 8 | } 9 | \arguments{ 10 | \item{url}{The URL of the file to find a hash of.} 11 | 12 | \item{cmd}{If \code{TRUE} (the default), print out a command for sourcing the 13 | URL with \code{\link{source_url}()}, including the hash.} 14 | } 15 | \description{ 16 | This will download a file and find a SHA-1 hash of it, using 17 | \code{\link[digest]{digest}()}. The primary purpose of this function is to provide 18 | an easy way to find the value of \code{sha} which can be passed to 19 | \code{\link{source_url}()}. 20 | } 21 | \examples{ 22 | \dontrun{ 23 | # Get the SHA hash of a file. It will print the text below and return 24 | # the hash as a string. This is a very long URL; break it up so it can be 25 | # seen more easily in the examples. 26 | test_url <- paste0("https://gist.github.com/wch/dae7c106ee99fe1fdfe7", 27 | "/raw/db0c9bfe0de85d15c60b0b9bf22403c0f5e1fb15/test.r") 28 | sha_url(test_url) 29 | # Command for sourcing the URL: 30 | # downloader::source_url("https://gist.github.com/wch/dae7c106ee99fe1fdfe7 31 | # /raw/db0c9bfe0de85d15c60b0b9bf22403c0f5e1fb15/test.r", 32 | # sha="9b8ff5213e32a871d6cb95cce0bed35c53307f61") 33 | # [1] "9b8ff5213e32a871d6cb95cce0bed35c53307f61" 34 | } 35 | 36 | 37 | } 38 | -------------------------------------------------------------------------------- /man/source_url.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/source_url.r 3 | \name{source_url} 4 | \alias{source_url} 5 | \title{Download an R file from a URL and source it} 6 | \usage{ 7 | source_url(url, sha = NULL, ..., prompt = TRUE, quiet = FALSE) 8 | } 9 | \arguments{ 10 | \item{url}{The URL to download.} 11 | 12 | \item{sha}{A SHA-1 hash of the file at the URL.} 13 | 14 | \item{...}{Other arguments that are passed to \code{\link{source}()}.} 15 | 16 | \item{prompt}{Prompt the user if no value for \code{sha} is provided.} 17 | 18 | \item{quiet}{If \code{FALSE} (the default), print out status messages about 19 | checking SHA.} 20 | } 21 | \description{ 22 | This will download a file and source it. Because it uses the 23 | \code{\link{download}()} function, it can handle https URLs. 24 | } 25 | \details{ 26 | By default, \code{source_url()} checks the SHA-1 hash of the file. If it 27 | differs from the expected value, it will throw an error. The default 28 | expectation is that a hash is provided; if not, \code{source_url()} will 29 | prompt the user, asking if they are sure they want to continue, unless 30 | \code{prompt=FALSE} is used. In other words, if you use \code{prompt=FALSE}, 31 | it will run the remote code without checking the hash, and without asking 32 | the user. 33 | 34 | The purpose of checking the hash is to ensure that the file has not changed. 35 | If a \code{source_url} command with a hash is posted in a public forum, then 36 | others who source the URL (with the hash) are guaranteed to run the same 37 | code every time. This means that the author doesn't need to worry about the 38 | security of the server hosting the file. It also means that the users don't 39 | have to worry about the file being replaced with a damaged or 40 | maliciously-modified version. 41 | 42 | To find the hash of a local file, use \code{\link[digest]{digest}()}. For a simple 43 | way to find the hash of a remote file, use \code{\link{sha_url}()}. 44 | } 45 | \examples{ 46 | \dontrun{ 47 | # Source the a sample file 48 | 49 | # This is a very long URL; break it up so it can be seen more easily in the 50 | # examples. 51 | test_url <- paste0("https://gist.github.com/wch/dae7c106ee99fe1fdfe7", 52 | "/raw/db0c9bfe0de85d15c60b0b9bf22403c0f5e1fb15/test.r") 53 | downloader::source_url(test_url, 54 | sha = "9b8ff5213e32a871d6cb95cce0bed35c53307f61") 55 | 56 | # Find the hash of a file 57 | downloader::sha_url(test_url) 58 | } 59 | 60 | 61 | } 62 | \seealso{ 63 | \code{\link{source}()} for more information on the arguments 64 | that can be used with this function. The \code{\link{sha_url}()} function 65 | can be used to find the SHA-1 hash of a remote file. 66 | } 67 | -------------------------------------------------------------------------------- /tests/testthat.R: -------------------------------------------------------------------------------- 1 | library(testthat) 2 | library(downloader) 3 | 4 | test_check("downloader") 5 | -------------------------------------------------------------------------------- /tests/testthat/test-download.R: -------------------------------------------------------------------------------- 1 | context("download") 2 | 3 | # Download from a url, and return the contents of the file as a string 4 | download_result <- function(url) { 5 | tfile <- tempfile() 6 | download(url, tfile, mode = "wb") 7 | 8 | # Read the file 9 | tfile_fd <- file(tfile, "r") 10 | dl_text <- readLines(tfile_fd, warn = FALSE) 11 | dl_text <- paste(dl_text, collapse = "\n") 12 | close(tfile_fd) 13 | unlink(tfile) 14 | 15 | dl_text 16 | } 17 | 18 | # CRAN has intermittent problems with these tests, since they rely on a 19 | # particular website being accessible. This makes it run with devtools::test() 20 | # but not on CRAN 21 | if (Sys.getenv('NOT_CRAN') == "true") { 22 | 23 | test_that("downloading http and https works properly", { 24 | # Download http from httpbin.org 25 | result <- download_result("http://httpbin.org/ip") 26 | # Check that it has the string "origin" in the text 27 | expect_true(grepl("origin", result)) 28 | 29 | # Download https from httpbin.org 30 | result <- download_result("https://httpbin.org/ip") 31 | # Check that it has the string "origin" in the text 32 | expect_true(grepl("origin", result)) 33 | }) 34 | 35 | test_that("follows redirects", { 36 | # Download http redirect from httpbin.org 37 | result <- download_result("http://httpbin.org/redirect/3") 38 | # Check that it has the string "origin" in the text 39 | expect_true(grepl("origin", result)) 40 | 41 | # Download https redirect from httpbin.org 42 | result <- download_result("https://httpbin.org/redirect/3") 43 | # Check that it has the string "origin" in the text 44 | expect_true(grepl("origin", result)) 45 | }) 46 | 47 | } 48 | -------------------------------------------------------------------------------- /tests/testthat/test-sha.R: -------------------------------------------------------------------------------- 1 | context("sha") 2 | 3 | test_that('sha_url', { 4 | # Create a temp file with simple R code 5 | temp_file <- tempfile() 6 | str <- 'a <<- a + 1' 7 | 8 | # Write str it to a file 9 | writeLines(str, sep = '', con = temp_file) 10 | url <- paste('file://', temp_file, sep = '') 11 | 12 | # Compare result from sha_url() to result directly from digest() 13 | expect_equal(sha_url(url), digest(str, algo = 'sha1', serialize = FALSE)) 14 | }) 15 | 16 | 17 | test_that('Check SHA hash with source_url', { 18 | 19 | # Create a temp file with simple R code 20 | temp_file <- tempfile() 21 | writeLines('a <<- a + 1', con = temp_file) 22 | url <- paste('file://', temp_file, sep = '') 23 | 24 | # Calculate the correct and incorrect SHA 25 | right_sha <- sha_url(url) 26 | wrong_sha <- '0000000000000000000000000000000000000000' 27 | 28 | # Counter - should be incremented by the code in the URL, which is a <<- a + 1 29 | .GlobalEnv$a <- 0 30 | 31 | # There are a total of 2x3x2=12 conditions, but we don't need to test them all 32 | 33 | # prompt=TRUE, right SHA, quiet=FALSE: print message 34 | expect_message(source_url(url, sha = right_sha), 'matches expected') 35 | expect_equal(a, 1) 36 | 37 | # prompt=TRUE, wrong SHA, quiet=FALSE: error 38 | expect_error(source_url(url, sha = wrong_sha)) 39 | expect_equal(a, 1) 40 | 41 | # prompt=TRUE, no SHA, quiet=FALSE: should prompt and respond to y/n 42 | # (no way to automatically test this) 43 | #source_url(url) 44 | 45 | # prompt=FALSE, no SHA, quiet=FALSE: do it, with message about not checking 46 | expect_message(source_url(url, prompt = FALSE), 'Not checking') 47 | expect_equal(a, 2) 48 | 49 | # prompt=FALSE, right SHA, quiet=FALSE: should just do it, with message about match 50 | expect_message(source_url(url, sha = right_sha, prompt = FALSE), 'matches expected') 51 | expect_equal(a, 3) 52 | 53 | # prompt=FALSE, wrong SHA, quiet=FALSE: should error 54 | expect_error(source_url(url, sha = wrong_sha, prompt = FALSE)) 55 | expect_equal(a, 3) 56 | 57 | # prompt=FALSE, no SHA, quiet=TRUE: should just do it, with no message about not checking 58 | source_url(url, prompt = FALSE, quiet = TRUE) 59 | expect_equal(a, 4) 60 | 61 | # prompt=FALSE, right SHA, quiet=TRUE: should just do it, with no message 62 | source_url(url, sha = right_sha, prompt = FALSE, quiet = TRUE) 63 | expect_equal(a, 5) 64 | }) 65 | --------------------------------------------------------------------------------