├── .Rbuildignore ├── .gitignore ├── DESCRIPTION ├── NAMESPACE ├── R ├── BatchDownload.R ├── ConvertToDD.R ├── EndCoordinates.R ├── ExtractTile.R ├── FindID.R ├── GetBands.R ├── GetDates.R ├── GetProducts.R ├── GetSubset.R ├── LandCover.R ├── MODISGrid.R ├── MODISSubsets.R ├── MODISSummaries.R ├── MODISTimeSeries.R ├── MODISTransects.R ├── QualityCheck.R ├── UpdateSubsets.R ├── WritePRJ.R └── daacmodis.R ├── README.md ├── data ├── ConvertExample.txt ├── EndCoordinatesExample.txt ├── FindIDExample.txt ├── QualityCheckExample.txt ├── SubsetExample.txt └── TransectExample.txt ├── inst └── CITATION ├── man ├── BatchDownload.Rd ├── ConvertExample.Rd ├── ConvertToDD.Rd ├── EndCoordinates.Rd ├── EndCoordinatesExample.Rd ├── ExtractTile.Rd ├── FindID.Rd ├── FindIDExample.rd ├── GetBands.Rd ├── GetDates.Rd ├── GetProducts.Rd ├── GetSubset.Rd ├── LandCover.Rd ├── MODISGrid.Rd ├── MODISSubsets.Rd ├── MODISSummaries.Rd ├── MODISTimeSeries.Rd ├── MODISTools-package.Rd ├── MODISTransects.Rd ├── QualityCheck.Rd ├── QualityCheckExample.Rd ├── SubsetExample.Rd ├── TransectExample.Rd ├── UpdateSubsets.Rd ├── WritePRJ.Rd └── daacmodis.Rd ├── tests ├── MODISTools_FullTestingScript.R ├── MODISTools_FullTestingScript.Rout └── Test.R └── vignettes ├── LandCover └── LandCoverSummary.csv ├── MODISSubsetsMCD12Q1 ├── 1_MCD12Q1.asc ├── 2_MCD12Q1.asc ├── 3_MCD12Q1.asc ├── 4_MCD12Q1.asc ├── 5_MCD12Q1.asc ├── 6_MCD12Q1.asc ├── 7_MCD12Q1.asc └── 8_MCD12Q1.asc ├── MODISSubsetsMOD13Q1 ├── 1___MOD13Q1.asc ├── 2___MOD13Q1.asc ├── 3___MOD13Q1.asc ├── 4___MOD13Q1.asc ├── 5___MOD13Q1.asc ├── 6___MOD13Q1.asc ├── 7___MOD13Q1.asc └── 8___MOD13Q1.asc ├── MODISSummaries ├── Data.csv └── Summary.csv ├── MODISTransects └── Transect.csv └── UsingMODISTools.Rnw /.Rbuildignore: -------------------------------------------------------------------------------- 1 | .gitignore 2 | .git 3 | .DS_Store 4 | README.md 5 | tests/MODISTools_FullTestingScript.R 6 | tests/MODISTools_FullTestingScript.Rout -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .Rproj.user 2 | .Rhistory 3 | .RData 4 | .DS_Store 5 | README.html 6 | *~ 7 | man/.Rapp.history 8 | inst/doc/UsingMODISTools-concordance.tex 9 | inst/doc/UsingMODISTools.log 10 | inst/doc/UsingMODISTools.pdf 11 | inst/doc/UsingMODISTools.synctex.gz 12 | inst/doc/UsingMODISTools.tex 13 | inst/doc/UsingMODISTools.toc 14 | -------------------------------------------------------------------------------- /DESCRIPTION: -------------------------------------------------------------------------------- 1 | Package: MODISTools 2 | Type: Package 3 | Title: MODIS Subsetting Tools 4 | Version: 0.95.1 5 | Date: 2017-02-15 6 | Author: Sean Tuck, Helen Phillips 7 | Imports: RCurl, XML, grDevices, graphics, stats, utils 8 | Maintainer: Sean Tuck 9 | Description: Provides several functions for downloading, storing and processing 10 | subsets of MODIS Land Processes data as a batch process. 11 | License: GPL-3 12 | LazyLoad: yes 13 | LazyData: yes 14 | URL: https://github.com/seantuck12/MODISTools/ 15 | -------------------------------------------------------------------------------- /NAMESPACE: -------------------------------------------------------------------------------- 1 | import(RCurl) 2 | importFrom(XML, xmlRoot, xmlTreeParse, xmlSApply, xmlValue) 3 | 4 | importFrom("grDevices", "dev.off", "pdf") 5 | importFrom("graphics", "abline", "lines", "plot") 6 | importFrom("stats", "approx", "median", "sd") 7 | importFrom("utils", "read.csv", "read.delim", "write.csv", "write.table") 8 | 9 | exportPattern("^[^\\.]") -------------------------------------------------------------------------------- /R/BatchDownload.R: -------------------------------------------------------------------------------- 1 | BatchDownload <- 2 | function(lat.long, start.date, end.date, MODIS.start, MODIS.end, Bands, Products, Size, StartDate, Transect, SaveDir) 3 | { 4 | # DEFINE 5 | NCOL_SERVER_RES <- 10 6 | 7 | # Split band names into sets for different products. 8 | which.bands <- lapply(Products, function(x) which(Bands %in% GetBands(x))) 9 | 10 | # Loop set up to make request and write a subset file for each location. 11 | for(i in 1:nrow(lat.long)) 12 | { 13 | # Retrieve the list of date codes to be requested and organise them in batches of time series's of length 10. 14 | dates <- lapply(Products, function(x) GetDates(lat.long$lat[i], lat.long$long[i], x)) 15 | 16 | # Check that time-series fall within date range of MODIS data. 17 | if(any((start.date$year + 1900) < 2000 & (end.date$year + 1900) < 2000)){ 18 | stop("Time-series found that falls entirely outside the range of available MODIS dates.") 19 | } 20 | if(any((start.date$year + 1900) > max(unlist(dates)) & (end.date$year + 1900) > max(unlist(dates)))){ 21 | stop("Time-series found that falls entirely outside the range of available MODIS dates.") 22 | } 23 | if(any((end.date$year + 1900) < 2000) | any((end.date$year + 1900) > max(unlist(dates)))){ 24 | stop("Some dates have been found that are beyond the range of MODIS observations available for download.") 25 | } 26 | if(any((start.date$year + 1900) < 2000) | any((start.date$year + 1900) > max(unlist(dates)))){ 27 | warning("Dates found beyond range of MODIS observations. Downloading from earliest date.", immediate. = TRUE) 28 | } 29 | 30 | ##### Initialise objects that will store downloaded data. 31 | # Find the start date and end date specific for each subset. 32 | start.dates <- lapply(dates, function(x) which(x >= MODIS.start[i])) 33 | end.dates <- lapply(dates, function(x) which(x >= MODIS.end[i])) 34 | # Extract the string of time-steps by snipping end.dates off the end. 35 | date.res <- mapply(function(x, y) x[which(!x %in% y)], x = start.dates, y = end.dates, SIMPLIFY = FALSE) 36 | allProblemDates <- c() # will store any empty dates that come up, so they can be returned to the user 37 | 38 | subsets <- mapply(function(x, y) rep(NA, length = (length(x) * length(y))), x = which.bands, y = date.res, SIMPLIFY = FALSE) 39 | subsets.length <- length(unlist(subsets)) 40 | ##### 41 | 42 | cat("Getting subset for location ", i, " of ", nrow(lat.long), "...\n", sep = "") 43 | 44 | for(prod in 1:length(Products)){ 45 | 46 | # Organise relevant MODIS dates into batches of 10. Web service getsubset function will only take 10 at a time. 47 | # Fill up any remaining rows in the final column to avoid data recycling. 48 | ifelse((length(date.res[[prod]]) %% NCOL_SERVER_RES) == 0, 49 | date.list <- matrix(dates[[prod]][date.res[[prod]]], nrow = NCOL_SERVER_RES), 50 | date.list <- matrix(c(dates[[prod]][date.res[[prod]]], rep(NA, NCOL_SERVER_RES - (length(date.res[[prod]]) %% NCOL_SERVER_RES))), 51 | nrow = NCOL_SERVER_RES)) 52 | 53 | # Set bands for this product. 54 | bands <- Bands[which.bands[[prod]]] 55 | 56 | # Loop subset request for each band specified, storing each run into subsets object. 57 | for(n in 1:length(bands)){ 58 | 59 | if(ncol(date.list) > 1){ 60 | # Above statement stops (ncol(date.list)-1)=0 occurring in the loop (i.e. ask for the 0th column of dates). 61 | for(x in 1:(ncol(date.list) - 1)){ 62 | 63 | # getsubset function return object of ModisData class, with a subset slot that only allows 10 elements 64 | # (i.e. 10 dates), looped until all requested dates have been retrieved. 65 | # Retrieve the batch of MODIS data and store in result 66 | result <- try(GetSubset(lat.long$lat[i], lat.long$long[i], Products[prod], bands[n], 67 | date.list[1,x], date.list[NCOL_SERVER_RES,x], Size[1], Size[2])) 68 | 69 | if(!is.list(result)) stop("Downloading from the web service is currently not working. Please try again later.") 70 | 71 | if(length(strsplit(as.character(result$subset[[1]][1]), ",")[[1]]) == 5){ 72 | stop("Downloading from the web service is currently not working. Please try again later.") 73 | } 74 | 75 | busy <- FALSE 76 | if(class(result) != "try-error"){ 77 | busy <- grepl("Server is busy handling other requests", result$subset[1]) 78 | if(busy) cat("The server is busy handling other requests...\n") 79 | } 80 | 81 | # Check data downloaded. If not, wait 30 secs and try again until successful or function times out. 82 | if(class(result) == "try-error" || is.na(result) || busy){ 83 | timer <- 1 84 | while(timer <= 10){ 85 | cat("Connection to the MODIS Web Service failed: trying again in 30secs...attempt", timer, "\n") 86 | Sys.sleep(30) 87 | 88 | result <- try(GetSubset(lat.long$lat[i], lat.long$long[i], Products[prod], bands[n], 89 | date.list[1,x], date.list[NCOL_SERVER_RES,x], Size[1], Size[2])) 90 | 91 | if(!is.list(result)) stop("Downloading from the web service is currently not working. Please try again later.") 92 | 93 | if(length(strsplit(as.character(result$subset[[1]][1]), ",")[[1]]) == 5){ 94 | stop("Downloading from the web service is currently not working. Please try again later.") 95 | } 96 | 97 | timer <- timer + 1 98 | ifelse(class(result) == "try-error" || is.na(result) || busy, next, break) 99 | } 100 | ifelse(class(result) == "try-error" || is.na(result) || busy, 101 | cat("Connection to the MODIS Web Service failed: timed out after 10 failed attempts...stopping download.\n"), 102 | break) 103 | stop(result) 104 | } 105 | 106 | # Store retrieved data in subsets. If more than 10 time-steps are requested, this runs until the final 107 | # column, which is downloaded after this loop. 108 | result <- with(result, paste(nrow, ncol, xll, yll, pixelsize, subset[[1]], sep = ',')) 109 | 110 | # Check whether result contains the expected number of dates. If not, find missing dates, add NA placemark, and print warning. 111 | if(length(result) < sum(!is.na(date.list[ ,x]))){ 112 | resultDates <- sapply(result, function(x) strsplit(x, ',')[[1]][8], USE.NAMES=FALSE) 113 | whichProblemDates <- which(!(date.list[ ,x] %in% resultDates)) 114 | problemDates <- date.list[whichProblemDates,x] 115 | allProblemDates <- c(allProblemDates,problemDates) 116 | result <- replace(rep(NA,sum(!is.na(date.list[ ,x]))), date.list[ ,x] %in% resultDates, result) 117 | 118 | warning("There is no data for some requested dates:\n", 119 | "Latitude = ",lat.long$lat[i],"\n", 120 | "Longitude = ",lat.long$long[i],"\n", 121 | "Product = ",Products[prod],"\n", 122 | "Band = ",Bands[n],"\n", 123 | "Dates = ",problemDates,"\n", 124 | call.=FALSE, immediate.=TRUE) 125 | } 126 | 127 | subsets[[prod]][(((n - 1) * length(date.res[[prod]])) + ((x * NCOL_SERVER_RES) - (NCOL_SERVER_RES - 1))): 128 | (((n - 1) * length(date.res[[prod]])) + (x * NCOL_SERVER_RES))] <- result 129 | 130 | } # End of loop that reiterates for multiple batches of time-steps if the time-series is > 10 time-steps long. 131 | } 132 | 133 | ##### 134 | # This will download the last column of dates left (either final column or only column if < 10 dates). 135 | result <- try(GetSubset(lat.long$lat[i], lat.long$long[i], Products[prod], bands[n], date.list[1,ncol(date.list)], 136 | date.list[max(which(!is.na(date.list[ ,ncol(date.list)]))),ncol(date.list)], Size[1], Size[2])) 137 | 138 | if(!is.list(result)) stop("Downloading from the web service is currently not working. Please try again later.") 139 | 140 | if(length(strsplit(as.character(result$subset[[1]][1]), ",")[[1]]) == 5){ 141 | stop("Downloading from the web service is currently not working. Please try again later.") 142 | } 143 | 144 | busy <- FALSE 145 | if(class(result) != "try-error"){ 146 | busy <- grepl("Server is busy handling other requests", result$subset[1]) 147 | if(busy) cat("The server is busy handling other requests...\n") 148 | } 149 | 150 | if(class(result) == "try-error" || is.na(result) || busy){ 151 | timer <- 1 152 | while(timer <= 10){ 153 | cat("Connection to the MODIS Web Service failed: trying again in 30secs...attempt", timer, "\n") 154 | Sys.sleep(30) 155 | 156 | result <- try(GetSubset(lat.long$lat[i], lat.long$long[i], Products[prod], bands[n], date.list[1,ncol(date.list)], 157 | date.list[max(which(!is.na(date.list[ ,ncol(date.list)]))),ncol(date.list)], Size[1], Size[2])) 158 | 159 | if(!is.list(result)) stop("Downloading from the web service is currently not working. Please try again later.") 160 | 161 | if(length(strsplit(as.character(result$subset[[1]][1]), ",")[[1]]) == 5){ 162 | stop("Downloading from the web service is currently not working. Please try again later.") 163 | } 164 | 165 | timer <- timer + 1 166 | ifelse(class(result) == "try-error" || is.na(result) || busy, next, break) 167 | } 168 | 169 | ifelse(class(result) == "try-error" || is.na(result) || busy, 170 | cat("Connection to the MODIS Web Service failed: timed out after 10 failed attempts...stopping download.\n"), 171 | break) 172 | stop(result) 173 | } 174 | 175 | 176 | # Check downloaded subset request contains data: if it contains the following message instead, abort function. 177 | if(regexpr("Server is busy handling other requests in queue", result$subset[[1]][1]) != -1){ 178 | stop("Server is busy handling other requests in queue. Please try your subset order later.") 179 | } 180 | 181 | # All MODIS data for a given product band now retrieved and stored in subsets. 182 | result <- with(result, paste(nrow, ncol, xll, yll, pixelsize, subset[[1]], sep = ',')) 183 | 184 | # Check whether result contains the expected number of dates. If not, find missing dates, add NA placemark, and print warning. 185 | if(length(result) < sum(!is.na(date.list[ ,ncol(date.list)]))){ 186 | resultDates <- sapply(result, function(x) strsplit(x, ',')[[1]][8], USE.NAMES=FALSE) 187 | whichProblemDates <- which(!(date.list[ ,ncol(date.list)] %in% resultDates)) 188 | problemDates <- date.list[whichProblemDates,ncol(date.list)] 189 | allProblemDates <- c(allProblemDates,problemDates) 190 | result <- replace(rep(NA,sum(!is.na(date.list[ ,ncol(date.list)]))), date.list[ ,ncol(date.list)] %in% resultDates, result) 191 | 192 | warning("There is no data for some requested dates:\n", 193 | "Latitude = ",lat.long$lat[i],"\n", 194 | "Longitude = ",lat.long$long[i],"\n", 195 | "Product = ",Products[prod],"\n", 196 | "Band = ",Bands[n],"\n", 197 | "Dates = ",problemDates,"\n", 198 | call.=FALSE, immediate.=TRUE) 199 | } 200 | 201 | subsets[[prod]][(((n - 1) * length(date.res[[prod]])) + (((ncol(date.list) - 1) * NCOL_SERVER_RES) + 1)): 202 | (((n - 1) * length(date.res[[prod]])) + length(date.res[[prod]]))] <- result 203 | 204 | # Check whether any dates in subset are empty and store their subset info for future use. 205 | whichBandN <- (((n-1)*length(date.res[[prod]]))+1) : (n*length(date.res[[prod]])) 206 | if(any(emptySubsets <- sapply(subsets[[prod]][whichBandN], function(x) grepl("character(0)",x,fixed=TRUE)))){ 207 | problemDates <- dates[[prod]][date.res[[prod]][which(emptySubsets)]] 208 | allProblemDates <- c(allProblemDates,problemDates) 209 | warning("There is no data for some requested dates:\n", 210 | "Latitude = ",lat.long$lat[i],"\n", 211 | "Longitude = ",lat.long$long[i],"\n", 212 | "Product = ",Products[prod],"\n", 213 | "Band = ",Bands[n],"\n", 214 | "Dates = ",problemDates,"\n", 215 | call.=FALSE, immediate.=TRUE) 216 | } 217 | 218 | } # End of loop for each band. 219 | } # End of loop for each product. 220 | 221 | subsets <- do.call("c", subsets) 222 | 223 | ##### Check that there is no missing data in the download & log download status accordingly. 224 | if(length(subsets) != subsets.length | any(is.na(subsets))){ 225 | lat.long$Status[i] <- paste("Some dates were missing:", paste(unique(allProblemDates),collapse="; ")) 226 | subsets <- subsets[!is.na(subsets)] 227 | } else { 228 | lat.long$Status[i] <- "Successful download" 229 | } 230 | 231 | if("," %in% substr(subsets, nchar(subsets), nchar(subsets))){ 232 | lat.long$Status[i] <- "Missing data in subset: try downloading again" 233 | cat("Missing information for time-series ", lat.long$SubsetID[i], ". See subset download file.\n", sep = "") 234 | } else { 235 | lat.long$Status[i] <- "Successful download" 236 | } 237 | ##### 238 | 239 | # Remove any empty subsets 240 | if(any(problemDates <- grep("character(0)", subsets, fixed=TRUE))){ 241 | allProblemDates <- c(allProblemDates,problemDates) 242 | subsets <- subsets[-problemDates] 243 | lat.long$Status[i] <- paste("Some dates were missing:", paste(unique(allProblemDates),collapse="; ")) 244 | } 245 | 246 | # Write an ascii file with all dates for each band at a given location into the working directory. 247 | prods <- paste(Products, collapse = "_") 248 | 249 | if(!Transect) write(subsets, file = file.path(SaveDir, paste(lat.long$SubsetID[i], "___", prods, ".asc", sep = "")), sep = "") 250 | if(Transect){ 251 | if(i == 1) write(subsets, file = file.path(SaveDir, paste(lat.long$SubsetID[i], "___", prods, ".asc", sep = "")), sep = "") 252 | if(i != 1) write(subsets, file = file.path(SaveDir, paste(lat.long$SubsetID[i], "___", prods, ".asc", sep = "")), sep = "", append = TRUE) 253 | } 254 | 255 | if(i == nrow(lat.long)) cat("Full subset download complete. Writing the subset download file...\n") 256 | } 257 | return(lat.long) 258 | } -------------------------------------------------------------------------------- /R/ConvertToDD.R: -------------------------------------------------------------------------------- 1 | ConvertToDD <- 2 | function(XY, FileSep = NULL, LatColName, LongColName) 3 | { 4 | if(!is.object(XY) & !is.character(XY)) stop("XY must be an object in R or a file path character string.") 5 | if(is.object(XY)) XY<- data.frame(XY) 6 | if(is.character(XY)){ 7 | if(!file.exists(XY)) stop("Character string input for XY argument does not resemble an existing file path.") 8 | if(is.null(FileSep)) stop("To load a file as input, you must also specify its delimiter (FileSep).") 9 | XY<- read.delim(XY, sep = FileSep) 10 | } 11 | 12 | DMS.lat <- as.character(XY[ ,which(names(XY) == LatColName)]) 13 | DMS.long <- as.character(XY[ ,which(names(XY) == LongColName)]) 14 | 15 | which.format.lat <- gregexpr("([^0-9.][0-9])", DMS.lat) 16 | which.format.long <- gregexpr("([^0-9.][0-9])", DMS.long) 17 | DM.or.DMS.lat <- rep(NA, nrow(XY)) 18 | DM.or.DMS.long <- rep(NA, nrow(XY)) 19 | 20 | for(i in 1:nrow(XY)){ 21 | DM.or.DMS.lat[i] <- length(which.format.lat[[i]]) 22 | DM.or.DMS.long[i] <- length(which.format.long[[i]]) 23 | } 24 | if(any(DM.or.DMS.lat != DM.or.DMS.long)){ 25 | stop("A coordinate has been recognised with inconsistent formatting between lat and long. 26 | Check for erroneous non-numeric characters. See the help page for advice on correct formats.") 27 | } 28 | if(any(DM.or.DMS.lat != 1 & DM.or.DMS.lat != 2)){ 29 | stop("A coordinate has been found that does not match the required format for degrees minutes seconds or degrees minutes. 30 | Check for erroneous non-numeric characters. See the help page for advice on correct formats.") 31 | } 32 | 33 | DD.lat <- rep(NA, nrow(XY)) 34 | DD.long <- rep(NA, nrow(XY)) 35 | D.lat <- rep(NA, nrow(XY)) 36 | D.long <- rep(NA, nrow(XY)) 37 | M.lat <- rep(NA, nrow(XY)) 38 | M.long <- rep(NA, nrow(XY)) 39 | S.lat <- rep(NA, nrow(XY)) 40 | S.long <- rep(NA, nrow(XY)) 41 | D.point.lat <- regexpr("[^0-9][0-9]{1,2}[^0-9]", DMS.lat) 42 | D.point.long <- regexpr("[^0-9][0-9]{1,2}[^0-9]", DMS.long) 43 | 44 | for(i in 1:nrow(XY)){ 45 | # For degrees minutes seconds coordinates. 46 | if(DM.or.DMS.lat[i] == 2){ 47 | # Latitude 48 | D.lat[i] <- as.numeric(substr(DMS.lat[i], 1, D.point.lat[i]-1)) 49 | M.lat[i] <- as.numeric(substr(DMS.lat[i], D.point.lat[i]+1, D.point.lat[i]+attr(D.point.lat, "match.length")[i]-2)) 50 | if(substr(DMS.lat[i], nchar(DMS.lat[i]), nchar(DMS.lat[i])) == 'N'){ 51 | S.lat[i] <- as.numeric(substr(DMS.lat[i], D.point.lat[i]+attr(D.point.lat, "match.length")[i], nchar(DMS.lat[i])-2)) 52 | } else { 53 | if(substr(DMS.lat[i], nchar(DMS.lat[i]), nchar(DMS.lat[i])) == 'S'){ 54 | D.lat[i] <- -D.lat[i] 55 | S.lat[i] <- as.numeric(substr(DMS.lat[i], D.point.lat[i]+attr(D.point.lat, "match.length")[i], nchar(DMS.lat[i])-2)) 56 | } else { 57 | S.lat[i] <- as.numeric(substr(DMS.lat[i], D.point.lat[i]+attr(D.point.lat, "match.length")[i], nchar(DMS.lat[i])-1)) 58 | } 59 | } 60 | # Calculate latitude decimal degrees. 61 | if(D.lat[i] >= 0){ 62 | DD.lat[i] <- D.lat[i] + (M.lat[i] / 60) + (S.lat[i] / 3600) 63 | } else { 64 | DD.lat[i] <- -(S.lat[i] / 3600) - (M.lat[i] / 60) + D.lat[i] 65 | } 66 | if(substr(DMS.lat[i], nchar(DMS.lat[i]), nchar(DMS.lat[i])) == 'S' & D.lat[i] == 0){ 67 | DD.lat[i] <- -DD.lat[i] 68 | } 69 | 70 | # Longitude 71 | D.long[i] <- as.numeric(substr(DMS.long[i], 1, D.point.long[i]-1)) 72 | M.long[i] <- as.numeric(substr(DMS.long[i], D.point.long[i]+1, D.point.long[i]+attr(D.point.long, "match.length")[i]-2)) 73 | if(substr(DMS.long[i], nchar(DMS.long[i]), nchar(DMS.long[i])) == 'E'){ 74 | S.long[i] <- as.numeric(substr(DMS.long[i], D.point.long[i]+attr(D.point.long, "match.length")[i], nchar(DMS.long[i])-2)) 75 | } else { 76 | if(substr(DMS.long[i], nchar(DMS.long[i]), nchar(DMS.long[i])) == 'W'){ 77 | S.long[i] <- as.numeric(substr(DMS.long[i], D.point.long[i]+attr(D.point.long, "match.length")[i], nchar(DMS.long[i])-2)) 78 | D.long[i] <- -D.long[i] 79 | } else { 80 | S.long[i] <- as.numeric(substr(DMS.long[i], D.point.long[i]+attr(D.point.long, "match.length")[i], nchar(DMS.long[i])-1)) 81 | } 82 | } 83 | # Calculate longitude decimal degrees. 84 | if(D.long[i] >= 0){ 85 | DD.long[i] <- D.long[i] + (M.long[i] / 60) + (S.long[i] / 3600) 86 | } else { 87 | DD.long[i] <- -(S.long[i] / 3600) - (M.long[i] / 60) + D.long[i] 88 | } 89 | if(substr(DMS.long[i], nchar(DMS.long[i]), nchar(DMS.long[i])) == 'W' & D.long[i] == 0){ 90 | DD.long[i] <- -DD.long[i] 91 | } 92 | } 93 | 94 | # For degrees minutes coordinates. 95 | if(DM.or.DMS.lat[i] == 1){ 96 | # Latitude 97 | D.lat[i] <- as.numeric(substr(DMS.lat[i], 1, D.point.lat[i]-1)) 98 | if(substr(DMS.lat[i], nchar(DMS.lat[i]), nchar(DMS.lat[i])) == 'N'){ 99 | M.lat[i] <- as.numeric(substr(DMS.lat[i], D.point.lat[i]+1, nchar(DMS.lat[i])-2)) 100 | } else { 101 | if(substr(DMS.lat[i], nchar(DMS.lat[i]), nchar(DMS.lat[i])) == 'S'){ 102 | M.lat[i] <- as.numeric(substr(DMS.lat[i], D.point.lat[i]+1, nchar(DMS.lat[i])-2)) 103 | D.lat[i] <- -D.lat[i] 104 | } else { 105 | M.lat[i] <- as.numeric(substr(DMS.lat[i], D.point.lat[i]+1, nchar(DMS.lat[i])-1)) 106 | } 107 | } 108 | # Calculate latitude decimal degrees. 109 | if(D.lat[i] >= 0){ 110 | DD.lat[i] <- D.lat[i] + (M.lat[i] /60) 111 | } else { 112 | DD.lat[i] <- -(M.lat[i] / 60) + D.lat[i] 113 | } 114 | if(substr(DMS.lat[i], nchar(DMS.lat[i]), nchar(DMS.lat[i])) == 'S' & D.lat[i] == 0){ 115 | DD.lat[i]<- -DD.lat[i] 116 | } 117 | 118 | # Longitude 119 | D.long[i] <- as.numeric(substr(DMS.long[i], 1, D.point.long[i]-1)) 120 | 121 | if(substr(DMS.long[i], nchar(DMS.long[i]), nchar(DMS.long[i])) == 'E'){ 122 | M.long[i] <- as.numeric(substr(DMS.long[i], D.point.long[i]+1, nchar(DMS.long[i])-2)) 123 | } else { 124 | if(substr(DMS.long[i], nchar(DMS.long[i]), nchar(DMS.long[i])) == 'W'){ 125 | M.long[i] <- as.numeric(substr(DMS.long[i], D.point.long[i]+1, nchar(DMS.long[i])-2)) 126 | D.long[i] <- -D.long[i] 127 | } else { 128 | M.long[i] <- as.numeric(substr(DMS.long[i], D.point.long[i]+1, nchar(DMS.long[i])-1)) 129 | } 130 | } 131 | # Calculate longitude decimal degrees. 132 | if(D.long[i] >= 0){ 133 | DD.long[i] <- (D.long[i]) + ((M.long[i])/60) 134 | } else { 135 | DD.long[i] <- -((M.long[i])/60) + (D.long[i]) 136 | } 137 | if(substr(DMS.long[i], nchar(DMS.long[i]), nchar(DMS.long[i])) == 'W' & D.long[i] == 0){ 138 | DD.long[i] <- -DD.long[i] 139 | } 140 | } 141 | } 142 | 143 | # Checks that lat answers are going to be sensible before returning result. 144 | if(any(abs(D.lat) > 90)){ 145 | cat("Invalid degrees of latitude entries:", "\n", XY[which(abs(D.lat) > 90), ], "\n") 146 | stop("Range of valid degrees is from -90 to 90.") 147 | } 148 | if(any(M.lat < 0 & M.lat > 60)){ 149 | cat("Invalid minutes entries:", "\n", XY[which(M.lat > 0 & M.lat < 60), ], "\n") 150 | stop("Range of valid minutes is from 0 to 60.") 151 | } 152 | if(any(DM.or.DMS.lat == 2)){ 153 | if(any(S.lat[which(DM.or.DMS.lat == 2)] < 0 & S.lat[which(DM.or.DMS.lat == 2)] > 60)){ 154 | cat("Invalid seconds entries:", "\n", 155 | XY[which(S.lat[which(DM.or.DMS.lat == 2)] > 0 & S.lat[which(DM.or.DMS.lat == 2)] < 60), ], "\n") 156 | stop("Range of valid seconds is from 0 to 60.") 157 | } 158 | } 159 | # Checks that long answers are going to be sensible before returning result. 160 | if(any(abs(D.long) > 180)){ 161 | cat("Invalid degrees of longitude entries:", "\n", XY[which(abs(D.long) > 180), ], "\n") 162 | stop("Range of valid degrees longitude is from -180 to 180.") 163 | } 164 | if(any(M.long < 0 & M.long > 60)){ 165 | cat("Invalid minutes entries:", "\n", XY[which(M.long > 0 & M.long < 60), ], "\n") 166 | stop("Range of valid minutes is from 0 to 60.") 167 | } 168 | if(any(DM.or.DMS.lat == 2)){ 169 | if(any(S.long[which(DM.or.DMS.lat == 2)] < 0 & S.long[which(DM.or.DMS.lat == 2)] > 60)){ 170 | cat("Invalid seconds entries:", "\n", 171 | XY[which(S.long[which(DM.or.DMS.lat == 2)] > 0 & S.long[which(DM.or.DMS.lat == 2)] < 60), ], "\n") 172 | stop("Range of valid seconds is from 0 to 60.") 173 | } 174 | } 175 | 176 | # Final checks that -90 <= decimal lat <= 90 and -180 <= decimal long <= 180, and then return the result. 177 | lat.res.check <- all(abs(DD.lat) <= 90) 178 | long.res.check <- all(abs(DD.long) <= 180) 179 | if(!lat.res.check & !long.res.check){ 180 | stop("It appears an invalid answer has been calculated. Check for values just beyond the valid ranges of lat and long.") 181 | } else { 182 | return(cbind(DD.lat, DD.long)) 183 | } 184 | } -------------------------------------------------------------------------------- /R/EndCoordinates.R: -------------------------------------------------------------------------------- 1 | EndCoordinates <- 2 | function(LoadDat, FileSep = NULL, Distance = 1000, Angle = 90, AngleUnits = 'radians'|'degrees', Dir = ".", 3 | FileName = "Transect Coordinates") 4 | { 5 | if(!is.object(LoadDat) & !is.character(LoadDat)) stop("Data must be the name of an object in R, or a file path.") 6 | if(is.object(LoadDat)) x <- data.frame(LoadDat) 7 | 8 | if(is.character(LoadDat)){ 9 | if(!file.exists(LoadDat)) stop("Character string input for LoadDat argument does not resemble an existing file path.") 10 | if(is.null(FileSep)) stop("Data is a file path. Must also specify its delimiter (FileSep).") 11 | x <- read.delim(LoadDat, sep = FileSep) 12 | } 13 | 14 | if(!file.exists(Dir)) stop("Character string input for Dir argument does not resemble an existing file path.") 15 | 16 | if(AngleUnits == 'radians' & Angle > (2 * pi)) stop('Not sensible radian values. Did you mean degrees?') 17 | if(AngleUnits == 'degrees' & Angle > 360) stop('Not sensible degrees values. Check input.') 18 | 19 | if(AngleUnits == 'radians') angle.rad <- Angle 20 | if(AngleUnits == 'degrees') angle.rad <- Angle / (180 / pi) 21 | 22 | lat.rad <- x$start.lat / (180 / pi) 23 | delta.lat.metres <- round(Distance * cos(angle.rad)) 24 | delta.long.metres <- round(Distance * sin(angle.rad)) 25 | delta.lat.degrees <- delta.lat.metres / (111.2 * 1000) 26 | delta.long.degrees <- delta.long.metres / ((111.2 * 1000) * cos(lat.rad)) 27 | end.lat <- x$start.lat + delta.lat.degrees 28 | end.long <- x$start.long + delta.long.degrees 29 | x <- cbind(x, end.lat, end.long) 30 | 31 | write.csv(x, file = paste(Dir, "/", FileName, Distance, "m", Angle, AngleUnits, Sys.Date(), ".csv", sep = ""), 32 | row.names = FALSE) 33 | } -------------------------------------------------------------------------------- /R/ExtractTile.R: -------------------------------------------------------------------------------- 1 | ExtractTile <- 2 | function(Data, Rows, Cols, Grid = FALSE) 3 | { 4 | if(!is.object(Data)) stop("Data input should be an R object - numeric vector, matrix, or data frame.") 5 | 6 | if(!is.vector(Data) & !is.matrix(Data) & !is.data.frame(Data)){ 7 | stop("Data should be a vector (one tile), or a matrix/data.frame (multiple tiles).") 8 | } 9 | 10 | if(is.vector(Data)) Data <- matrix(Data, nrow = 1, ncol = length(Data)) 11 | if(is.data.frame(Data)) Data <- as.matrix(Data) 12 | 13 | if(!is.numeric(Data)) stop("Data is not numeric class: should be MODIS data only to extract a nested subset.") 14 | 15 | if(ncol(Data) <= 1) stop("Not enough pixels (columns) found to extract a subset.") 16 | 17 | if(!is.numeric(Rows) | !is.numeric(Cols)) stop("Rows and Cols should be both be numeric class - two integers.") 18 | if(length(Rows) != 2 | length(Cols) != 2) stop("Rows and Cols input must both be a vector of integers, with two elements.") 19 | if(abs(Rows[1] - round(Rows[1])) > .Machine$double.eps^0.5 | 20 | abs(Rows[2] - round(Rows[2])) > .Machine$double.eps^0.5 | 21 | abs(Cols[2] - round(Cols[2])) > .Machine$double.eps^0.5 | 22 | abs(Cols[2] - round(Cols[2])) > .Machine$double.eps^0.5){ 23 | stop("Size input must be integers.") 24 | } 25 | 26 | if((Rows[1] %% 2) != 1 | (Cols[1] %% 2) != 1) stop("The dimensions from any tile downloaded should be odd numbered") 27 | 28 | # Check Rows & Cols [1] == ncol Data, i.e. the length of data in a tile fits a matrix of dim Rows[1] & Cols[1] 29 | if(ncol(Data) != length(matrix(nrow=Rows[1], ncol=Cols[1]))) stop("Tile size of Data does not match Rows and Cols input.") 30 | 31 | if(((Rows[2] * 2) + 1) >= Rows[1] & ((Cols[2] * 2) + 1) >= Cols[1]) stop("Tile size requested is not smaller than Data.") 32 | 33 | if(!is.logical(Grid)) stop("Grid should be logical, to specify the format of the output.") 34 | ##### 35 | 36 | # Get Data into a workable format and work out the subscripts of the nested subset. 37 | full.tile <- apply(Data, 1, function(x) list(matrix(x, nrow = Rows[1], ncol = Cols[1], byrow = TRUE))) 38 | centre <- c(ceiling(nrow(full.tile[[1]][[1]]) / 2), ceiling(ncol(full.tile[[1]][[1]]) / 2)) 39 | row.range <- (centre[1] - Rows[2]):(centre[1] + Rows[2]) 40 | col.range <- (centre[2] - Cols[2]):(centre[2] + Cols[2]) 41 | 42 | # Put output in either array or matrix format. 43 | if(Grid){ 44 | res <- array(dim = c( ((Rows[2] * 2) + 1), ((Cols[2] * 2) + 1), nrow(Data))) 45 | for(i in 1:nrow(Data)) res[ , ,i] <- full.tile[[i]][[1]][row.range,col.range] 46 | } else if(!Grid){ 47 | res <- matrix(nrow = nrow(Data), ncol = length(matrix(nrow=((Rows[2] * 2) + 1), ncol = ((Cols[2] * 2) + 1)))) 48 | for(i in 1:nrow(Data)) res[i, ] <- as.vector(full.tile[[i]][[1]][row.range,col.range]) 49 | } 50 | 51 | return(res) 52 | } -------------------------------------------------------------------------------- /R/FindID.R: -------------------------------------------------------------------------------- 1 | FindID <- 2 | function(ID, Data) 3 | { 4 | if(!is.object(ID) | !is.object(Data)) stop("ID and Data inputs must both be objects currently in your R workspace.") 5 | 6 | if(!all(names(ID) %in% names(Data))) stop("ID is not a subset of Data. All names of ID must match with rows in Data.") 7 | 8 | match.set <- Data[ ,match(names(ID), names(Data))] 9 | 10 | row.matches <- apply(match.set, 1, match, ID) 11 | 12 | ifelse(length(which(!is.na(apply(row.matches, 2, sum)))) == 0, 13 | return(cat("No matches found.\n")), 14 | return(which(!is.na(apply(row.matches, 2, sum))))) 15 | } -------------------------------------------------------------------------------- /R/GetBands.R: -------------------------------------------------------------------------------- 1 | GetBands <- 2 | function(Product) 3 | { 4 | if(!any(Product == GetProducts())) stop("Product entered does not match any available products; see ?GetProducts.") 5 | 6 | getbands.xml <- paste(' 7 | 9 | 10 | 11 | 12 | ', Product, ' 13 | 14 | 15 | ', 16 | sep = "") 17 | 18 | header.fields <- c(Accept = "text/xml", 19 | Accept = "multipart/*", 20 | 'Content-Type' = "text/xml; charset=utf-8", 21 | SOAPAction = "") 22 | 23 | reader <- basicTextGatherer() 24 | header <- basicTextGatherer() 25 | 26 | curlPerform(url = paste0(daacmodis, wsdl_loc), 27 | httpheader = header.fields, 28 | postfields = getbands.xml, 29 | writefunction = reader$update, 30 | verbose = FALSE) 31 | 32 | # Check the server is not down by insepcting the XML response for internal server error message. 33 | if(grepl("Internal Server Error", reader$value())){ 34 | stop("Web service failure: the ORNL DAAC server seems to be down, please try again later. 35 | The online subsetting tool may indicate when the server is up and running again.") 36 | } 37 | 38 | xmlres <- xmlRoot(xmlTreeParse(reader$value())) 39 | bandsres <- xmlSApply(xmlres[[1]], 40 | function(x) xmlSApply(x, 41 | function(x) xmlSApply(x,xmlValue))) 42 | 43 | if(colnames(bandsres) == "Fault"){ 44 | if(length(bandsres['faultstring.text', ][[1]]) == 0){ 45 | stop("Downloading from the web service is currently not working. Please try again later.") 46 | } 47 | stop(bandsres['faultstring.text', ]) 48 | } else{ 49 | return(as.vector(bandsres)) 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /R/GetDates.R: -------------------------------------------------------------------------------- 1 | GetDates <- 2 | function(Lat, Long, Product) 3 | { 4 | if(!any(Product == GetProducts())) stop("Product entered does not match any available products; see ?GetProducts.") 5 | 6 | if(!is.numeric(Lat) | !is.numeric(Long)) stop("Lat and Long inputs must be numeric.") 7 | 8 | if(length(Lat) != 1 | length(Long) != 1) stop("Incorrect number of Lats and Longs supplied (only 1 coordinate allowed).") 9 | 10 | if(abs(Lat) > 90 | abs(Long) > 180) stop("Detected a lat or long beyond the range of valid coordinates.") 11 | 12 | getdates.xml <- paste(' 13 | 15 | 16 | 17 | 18 | ', Lat, ' 19 | ', Long, ' 20 | ', Product, ' 21 | 22 | 23 | ', 24 | sep = "") 25 | 26 | header.fields <- c(Accept = "text/xml", 27 | Accept = "multipart/*", 28 | 'Content-Type' = "text/xml; charset=utf-8", 29 | SOAPAction = "") 30 | 31 | reader <- basicTextGatherer() 32 | header <- basicTextGatherer() 33 | 34 | curlPerform(url = paste0(daacmodis, "/cgi-bin/MODIS/GLBVIZ_1_Glb_subset/MODIS_webservice.pl"), 35 | httpheader = header.fields, 36 | postfields = getdates.xml, 37 | writefunction = reader$update, 38 | verbose = FALSE) 39 | 40 | # Check the server is not down by insepcting the XML response for internal server error message. 41 | if(grepl("Internal Server Error", reader$value())){ 42 | stop("Web service failure: the ORNL DAAC server seems to be down, please try again later. 43 | The online subsetting tool (https://daac.ornl.gov/cgi-bin/MODIS/GLBVIZ_1_Glb/modis_subset_order_global_col5.pl) 44 | will indicate when the server is up and running again.") 45 | } 46 | 47 | xmlres <- xmlRoot(xmlTreeParse(reader$value())) 48 | datesres <- xmlSApply(xmlres[[1]], 49 | function(x) xmlSApply(x, 50 | function(x) xmlSApply(x, 51 | function(x) xmlSApply(x,xmlValue)))) 52 | 53 | if(colnames(datesres) == "Fault"){ 54 | if(length(datesres['faultstring.text', ][[1]]) == 0){ 55 | stop("Downloading from the web service is currently not working. Please try again later.") 56 | } 57 | stop(datesres['faultstring.text', ]) 58 | } else{ 59 | return(as.vector(datesres)) 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /R/GetProducts.R: -------------------------------------------------------------------------------- 1 | GetProducts <- 2 | function() 3 | { 4 | getproducts.xml <- paste(' 5 | 7 | 8 | 9 | 10 | 11 | ', 12 | sep = "") 13 | 14 | header.fields <- c(Accept = "text/xml", 15 | Accept = "multipart/*", 16 | 'Content-Type' = "text/xml; charset=utf-8", 17 | SOAPAction = "") 18 | 19 | reader <- basicTextGatherer() 20 | header <- basicTextGatherer() 21 | 22 | curlPerform(url = paste0(daacmodis, wsdl_loc), 23 | httpheader = header.fields, 24 | postfields = getproducts.xml, 25 | writefunction = reader$update, 26 | verbose = FALSE) 27 | 28 | # Check the server is not down by insepcting the XML response for internal server error message. 29 | if(grepl("Internal Server Error", reader$value())){ 30 | stop("Web service failure: the ORNL DAAC server seems to be down, please try again later. 31 | The online subsetting tool (https://daac.ornl.gov/cgi-bin/MODIS/GLBVIZ_1_Glb/modis_subset_order_global_col5.pl) 32 | will indicate when the server is up and running again.") 33 | } 34 | 35 | xmlres <- xmlRoot(xmlTreeParse(reader$value())) 36 | productsres <- xmlSApply(xmlres[[1]], 37 | function(x) xmlSApply(x, 38 | function(x) xmlSApply(x,xmlValue))) 39 | 40 | if(colnames(productsres) == "Fault"){ 41 | if(length(productsres['faultstring.text', ][[1]]) == 0){ 42 | stop("Downloading from the web service is currently not working. Please try again later.") 43 | } 44 | stop(productsres['faultstring.text', ]) 45 | } else{ 46 | return(as.vector(productsres)) 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /R/GetSubset.R: -------------------------------------------------------------------------------- 1 | GetSubset <- 2 | function(Lat, Long, Product, Band, StartDate, EndDate, KmAboveBelow, KmLeftRight) 3 | { 4 | if(length(Product) != 1) stop("Incorrect length of Product input. Give only one data product at a time.") 5 | 6 | if(length(Band) != 1) stop("Incorrect length of Band input. Give only one data band at a time.") 7 | 8 | if(!is.numeric(Lat) | !is.numeric(Long)) stop("Lat and Long inputs must be numeric.") 9 | 10 | if(length(Lat) != 1 | length(Long) != 1) stop("Incorrect number of Lats and Longs supplied (only 1 coordinate allowed).") 11 | 12 | if(abs(Lat) > 90 | abs(Long) > 180) stop("Detected a lat or long beyond the range of valid coordinates.") 13 | 14 | getsubset.xml <- paste(' 15 | 17 | 18 | 19 | 20 | ', Lat, ' 21 | ', Long, ' 22 | ', Product, ' 23 | ', Band, ' 24 | ', StartDate, ' 25 | ', EndDate, ' 26 | ', KmAboveBelow, ' 27 | ', KmLeftRight, ' 28 | 29 | 30 | ', 31 | sep = "") 32 | 33 | header.fields <- c(Accept = "text/xml", 34 | Accept = "multipart/*", 35 | 'Content-Type' = "text/xml; charset=utf-8", 36 | SOAPAction = "") 37 | 38 | reader <- basicTextGatherer() 39 | header <- basicTextGatherer() 40 | 41 | curlPerform(url = paste0(daacmodis, wsdl_loc), 42 | httpheader = header.fields, 43 | postfields = getsubset.xml, 44 | writefunction = reader$update, 45 | verbose = FALSE) 46 | 47 | # Check the server is not down by insepcting the XML response for internal server error message. 48 | if(grepl("Internal Server Error", reader$value())){ 49 | stop("Web service failure: the ORNL DAAC server seems to be down, please try again later. 50 | The online subsetting tool (https://daac.ornl.gov/cgi-bin/MODIS/GLBVIZ_1_Glb/modis_subset_order_global_col5.pl) 51 | will indicate when the server is up and running again.") 52 | } 53 | 54 | xmlres <- xmlRoot(xmlTreeParse(reader$value())) 55 | modisres <- xmlSApply(xmlres[[1]], 56 | function(x) xmlSApply(x, 57 | function(x) xmlSApply(x, 58 | function(x) xmlSApply(x,xmlValue)))) 59 | 60 | if(colnames(modisres) == "Fault"){ 61 | if(length(modisres['faultstring.text', ][[1]]) == 0){ 62 | stop("Downloading from the web service is currently not working. Please try again later.") 63 | } 64 | stop(modisres['faultstring.text', ]) 65 | } else{ 66 | modisres <- as.data.frame(t(unname(modisres[-c(7,11)]))) 67 | names(modisres) <- c("xll", "yll", "pixelsize", "nrow", "ncol", "band", "scale", "lat", "long", "subset") 68 | return(modisres) 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /R/LandCover.R: -------------------------------------------------------------------------------- 1 | LandCover <- 2 | function(Dir = ".", Band) 3 | { 4 | ########## Define land cover classes for each lc band. 5 | LC_CLASS <- list( 6 | Land_Cover_Type_1 = c("Water" = 0, "Evergreen Needleleaf forest" = 1, "Evergreen Broadleaf forest" = 2, 7 | "Deciduous Needleleaf forest" = 3, "Deciduous Broadleaf forest" = 4, "Mixed forest" = 5, 8 | "Closed shrublands" = 6, "Open shrublands" = 7, "Woody savannas" = 8, "Savannas" = 9, 9 | "Grasslands" = 10, "Permanent wetlands" = 11, "Croplands" = 12, "Urban & built-up" = 13, 10 | "Cropland/Natural vegetation mosaic" = 14, "Snow & ice" = 15, "Barren/Sparsely vegetated" = 16, 11 | "Unclassified" = 254, "NoDataFill" = 255), 12 | 13 | Land_Cover_Type_2 = c("Water" = 0, "Evergreen Needleleaf forest" = 1, "Evergreen Broadleaf forest" = 2, 14 | "Deciduous Needleleaf forest" = 3, "Deciduous Broadleaf forest" = 4, "Mixed forest" = 5, 15 | "Closed shrublands" = 6, "Open shrublands" = 7, "Woody savannas" = 8, "Savannas" = 9, 16 | "Grasslands" = 10, "Croplands" = 12, "Urban & built-up" = 13, "Barren/Sparsely vegetated" = 16, 17 | "Unclassified" = 254, "NoDataFill" = 255), 18 | 19 | Land_Cover_Type_3 = c("Water" = 0, "Grasses/Cereal crops" = 1, "Shrubs" = 2, "Broadleaf crops" = 3, "Savanna" = 4, 20 | "Evergreen Broadleaf forest" = 5, "Deciduous Broadleaf forest" = 6, 21 | "Evergreen Needleleaf forest" = 7, "Deciduous Needleleaf forest" = 8, "Non-vegetated" = 9, 22 | "Urban" = 10, "Unclassified" = 254, "NoDataFill" = 255), 23 | 24 | Land_Cover_Type_4 = c("Water" = 0, "Evergreen Needleleaf forest" = 1, "Evergreen Broadleaf forest" = 2, 25 | "Deciduous Needleleaf forest" = 3, "Deciduous Broadleaf forest" = 4, 26 | "Annual Broadleaf vegetation" = 5, "Annual grass vegetation" = 6, "Non-vegetated land" = 7, 27 | "Urban" = 8, "Unclassified" = 254, "NoDataFill" = 255), 28 | 29 | Land_Cover_Type_5 = c("Water" = 0, "Evergreen Needleleaf forest" = 1, "Evergreen Broadleaf forest" = 2, 30 | "Deciduous Needleleaf forest" = 3, "Deciduous Broadleaf forest" = 4, "Shrub" = 5, "Grass" = 6, 31 | "Cereal crop" = 7, "Broadleaf crop" = 8, "Urban & built-up" = 9, "Snow & ice" = 10, 32 | "Barren/Sparsely vegetated" = 11, "Unclassified" = 254, "NoDataFill" = 255) 33 | ) 34 | NUM_METADATA_COLS <- 10 35 | ########## 36 | 37 | if(!file.exists(Dir)) stop("Character string input for Dir argument does not resemble an existing file path.") 38 | 39 | file.list <- list.files(path = Dir, pattern = "MCD12Q1.*asc$") 40 | 41 | if(length(file.list) == 0) stop("Found no MODIS Land Cover ASCII files in Dir.") 42 | 43 | if(!any(GetBands("MCD12Q1") == Band)) stop("LandCover is for land cover data. Band specified is not for this product.") 44 | 45 | lc.type.set <- LC_CLASS[[which(names(LC_CLASS) == Band)]] 46 | NoDataFill <- unname(lc.type.set["NoDataFill"]) 47 | ValidRange <- unname(lc.type.set) 48 | 49 | lc.summary <- list(NA) 50 | 51 | for(i in 1:length(file.list)){ 52 | 53 | cat("Processing file ", i, " of ", length(file.list), "...\n", sep="") 54 | 55 | lc.subset <- read.csv(paste(Dir, "/", file.list[i], sep = ""), header = FALSE, as.is = TRUE) 56 | names(lc.subset) <- c("nrow", "ncol", "xll", "yll", "pixelsize", "row.id", "land.product.code", 57 | "MODIS.acq.date", "where", "MODIS.proc.date", 1:(ncol(lc.subset) - NUM_METADATA_COLS)) 58 | 59 | where.long <- regexpr("Lon", lc.subset$where[1]) 60 | where.samp <- regexpr("Samp", lc.subset$where[1]) 61 | where.land <- regexpr("Land", lc.subset$row.id) 62 | lat <- as.numeric(substr(lc.subset$where[1], 4, where.long - 1)) 63 | long <- as.numeric(substr(lc.subset$where[1], where.long + 3, where.samp - 1)) 64 | band.codes <- substr(lc.subset$row.id, where.land, nchar(lc.subset$row.id)) 65 | 66 | ifelse(any(grepl(Band, lc.subset$row.id)), 67 | which.are.band <- which(band.codes == Band), 68 | stop("Cannot find which rows in LoadDat are band data. Make sure the only ascii files in the directory are 69 | those downloaded from MODISSubsets.")) 70 | 71 | lc.tiles <- as.matrix(lc.subset[which.are.band,(NUM_METADATA_COLS+1):ncol(lc.subset)], 72 | nrow = length(which.are.band), ncol = length((NUM_METADATA_COLS+1):ncol(lc.subset))) 73 | 74 | if(!all(lc.tiles %in% ValidRange)) stop("Some values fall outside the valid range for the data band specified.") 75 | 76 | # Screen pixels in lc.tiles: pixels = NoDataFill, or whose corresponding pixel in qc.tiles < QualityThreshold. 77 | lc.tiles <- matrix(ifelse(lc.tiles != NoDataFill, lc.tiles, NA), nrow = length(which.are.band)) 78 | 79 | # Extract year and day from the metadata and make POSIXlt dates (YYYY-MM-DD), ready for time-series analysis. 80 | year <- as.numeric(substr(lc.subset$MODIS.acq.date, 2, 5)) 81 | day <- as.numeric(substr(lc.subset$MODIS.acq.date, 6, 8)) 82 | lc.subset$date <- strptime(paste(year, "-", day, sep = ""), "%Y-%j") 83 | 84 | # Initialise objects to store landscape summaries 85 | lc.mode.class <- rep(NA, nrow(lc.tiles)) 86 | lc.richness <- rep(NA, nrow(lc.tiles)) 87 | simp.even <- rep(NA, nrow(lc.tiles)) 88 | simp.d <- rep(NA, nrow(lc.tiles)) 89 | no.fill <- rep(NA, nrow(lc.tiles)) 90 | poor.quality <- rep(NA, nrow(lc.tiles)) 91 | 92 | for(x in 1:nrow(lc.tiles)){ 93 | 94 | # Calculate mode - most frequent lc class 95 | lc.freq <- table(lc.tiles[x, ]) 96 | lc.freq <- lc.freq / ncol(lc.tiles) 97 | lc.freq <- sum(lc.freq^2) 98 | 99 | # Calculate Simpson's D diversity index 100 | simp.d[x] <- 1 / lc.freq 101 | 102 | lc.mode <- which.max(table(lc.tiles[x, ])) 103 | lc.mode.class[x] <- names(which(lc.type.set == lc.mode)) 104 | 105 | # Calculate landscape richness 106 | lc.richness[x] <- length(table(lc.tiles[x, ])) 107 | 108 | # Calculate Simpson's measure of evenness 109 | simp.even[x] <- simp.d[x] / lc.richness[x] 110 | 111 | no.fill[x] <- paste(round((sum(lc.subset[x,(NUM_METADATA_COLS+1):ncol(lc.subset)] == NoDataFill) / length(lc.tiles[x, ])) * 100, 2), 112 | "% (", sum(lc.subset[x,(NUM_METADATA_COLS+1):ncol(lc.subset)] == NoDataFill), "/", length(lc.tiles[x, ]), ")", 113 | sep = "") 114 | 115 | } # End of loop that summaries tiles at each time-step, for the ith ASCII file. 116 | 117 | # Compile summaries into a table. 118 | lc.summary[[i]] <- data.frame(lat = lat, long = long, date = lc.subset$date[which(band.codes == Band)], 119 | modis.band = Band, most.common = lc.mode.class, richness = lc.richness, 120 | simpsons.d = simp.d, simpsons.evenness = simp.even, no.data.fill = no.fill) 121 | 122 | } # End of loop that reiterates for each ascii file. 123 | 124 | # Write output summary file by appending summary data from all files, producing one file of summary output. 125 | lc.summary <- do.call("rbind", lc.summary) 126 | write.table(lc.summary, file = paste(Dir, "/", "MODIS_Land_Cover_Summary ", Sys.Date(), ".csv", sep = ""), 127 | sep = ",", row.names = FALSE) 128 | 129 | cat("Done! Check the 'MODIS Land Cover Summary' output file.\n") 130 | } -------------------------------------------------------------------------------- /R/MODISGrid.R: -------------------------------------------------------------------------------- 1 | MODISGrid <- 2 | function(Dir = ".", DirName = "MODIS_GRID", SubDir = TRUE, NoDataValues) 3 | { 4 | ## DEFINE 5 | NUM_METADATA_COLS <- 10 6 | 7 | if(Dir == '.') cat('Files downloaded will be written to ', file.path(getwd(), DirName), '.\n', sep = '') 8 | if(Dir != '.') cat('Files downloaded will be written to ', file.path(Dir, DirName), '.\n', sep = '') 9 | 10 | ## Create directory for storing new grid files. 11 | if(!file.exists(file.path(Dir, DirName))) dir.create(path = file.path(Dir, DirName)) 12 | 13 | ## Find all MODIS data files in Dir. 14 | file.list <- list.files(path = Dir, pattern = ".asc$") 15 | file.list <- file.list[grepl("___", file.list)] 16 | if(length(file.list) == 0) stop("Could not find any MODIS data files in directory specified.") 17 | 18 | ## Check NoDataValues is a list of named vectors. 19 | if(!is.list(NoDataValues)) stop("NoDataValues should be a list of named vectors. See help doc.") 20 | 21 | ## Check the number of products in NoDataValues list equals the number found in file.list. 22 | prod.set <- unique(do.call(c, as.vector(lapply( 23 | file.path(Dir, file.list), function(x) read.csv(x, header = FALSE, as.is = TRUE)[ ,7] 24 | )))) 25 | if(any(nchar(prod.set) == 0)) stop("A subset was incompletely downloaded. Check MODISSubsets output and retry the subset.") 26 | if(!all(prod.set %in% names(NoDataValues))) stop("Mismatch between NoDataValues and data products found in files.") 27 | 28 | ## Check that NoDataValues value is specified for every data band found in file.list. 29 | band.set <- unique(as.vector(sapply( 30 | lapply(file.path(Dir, file.list), function(x) read.csv(x, header = FALSE, as.is = TRUE)[ ,6]), 31 | function(x) unique(substr(x, (gregexpr(".", x, fixed = TRUE)[[1]][5] + 1), nchar(x))) 32 | ))) 33 | if(!all(band.set %in% names(do.call(c, unname(NoDataValues))))){ 34 | stop("Mismatch between NoDataValues and data bands found in files.") 35 | } 36 | 37 | for(i in 1:length(file.list)) 38 | { 39 | cat("Creating new GIS ASCII files from MODIS data file", i, "out of", length(file.list), "\n") 40 | 41 | data.file <- read.csv(file.path(Dir, file.list[i]), header = FALSE, as.is = TRUE) 42 | names(data.file) <- c("nrow", "ncol", "xll", "yll", "pixelsize", "row.id", "product.code", "MODIS.acq.date", 43 | "where", "MODIS.proc.date", 1:(ncol(data.file) - NUM_METADATA_COLS)) 44 | 45 | ## Create directory for this data file if SubDir = TRUE. 46 | sub.dir <- substr(file.list[i], 1, regexpr(".asc$", file.list[i])-1) 47 | if(SubDir & !file.exists(file.path(Dir, DirName, sub.dir))) dir.create(path = file.path(Dir, DirName, sub.dir)) 48 | 49 | for(n in 1:nrow(data.file)) 50 | { 51 | data.band <- substr(data.file$row.id[n], 52 | gregexpr(".", data.file$row.id[n], fixed = TRUE)[[1]][5] + 1, 53 | nchar(data.file$row.id[n])) 54 | data.date <- data.file$MODIS.acq.date[n] 55 | 56 | path <- ifelse(SubDir, 57 | file.path(Dir, DirName, sub.dir, 58 | paste0("GRID_", sub.dir, "_", data.band, "_", data.date)), 59 | file.path(Dir, DirName, 60 | paste0("GRID_", sub.dir, "_", data.band, "_", data.date))) 61 | 62 | write(c(sprintf("ncols\t\t %i", data.file$ncol[n]), 63 | sprintf("nrows\t\t %i", data.file$nrow[n]), 64 | sprintf("xllcorner\t %.2f", data.file$xll[n]), 65 | sprintf("yllcorner\t %.2f", data.file$yll[n]), 66 | sprintf("cellsize\t %s", as.character(data.file$pixelsize[n])), 67 | sprintf("NODATA_value\t %s", as.character(NoDataValues[[data.file$product.code[n]]][data.band]))), 68 | file = file.path(paste0(path,".asc"))) 69 | 70 | WritePRJ(Path = file.path(paste0(path,".prj"))) 71 | 72 | grid.data <- matrix(data.file[n,(NUM_METADATA_COLS+1):ncol(data.file)], 73 | nrow = data.file$nrow[n], ncol = data.file$ncol[n], byrow = TRUE) 74 | write.table(grid.data, file = file.path(paste0(path,".asc")), append = TRUE, col.names = FALSE, row.names = FALSE) 75 | } 76 | } 77 | } -------------------------------------------------------------------------------- /R/MODISSubsets.R: -------------------------------------------------------------------------------- 1 | MODISSubsets <- 2 | function(LoadDat, FileSep = NULL, Products, Bands, Size, SaveDir = ".", StartDate = FALSE, TimeSeriesLength = 0, Transect = FALSE) 3 | { 4 | if(SaveDir == '.') cat('Files downloaded will be written to ', getwd(), '.\n', sep = '') 5 | if(SaveDir != '.') cat('Files downloaded will be written to ', SaveDir, '.\n', sep = '') 6 | 7 | # Load data of locations; external data file, or an R object. 8 | if(!is.object(LoadDat) & !is.character(LoadDat)) stop("LoadDat must be an object in R or a file path character string.") 9 | if(is.object(LoadDat)) dat <- data.frame(LoadDat) 10 | if(is.character(LoadDat)){ 11 | if(!file.exists(LoadDat)) stop("Character string input for LoadDat does not resemble an existing file path.") 12 | if(is.null(FileSep)) stop("To load a file as input, you must also specify its delimiter (FileSep).") 13 | dat <- read.delim(LoadDat, sep = FileSep) 14 | } 15 | 16 | ##### 17 | # Check lat and long data frame columns are named "lat" and "long" as necessary. 18 | if(!any(names(dat) == "lat") | !any(names(dat) == "long")){ 19 | stop("Could not find columns for latitude and/or longitude in your data set. Must be named 'lat' and 'long'.") 20 | } 21 | 22 | # Check lats and longs are valid. 23 | if(abs(dat$lat) > 90 || abs(dat$long) > 180) stop("Detected some lats or longs beyond the range of valid coordinates.") 24 | 25 | # Check for missing lat/long data 26 | if(any(is.na(dat$lat) != is.na(dat$long))) stop("There are locations with incomplete coordinates.") 27 | 28 | # Check to see if IDs have been given in data frame. 29 | ID <- ifelse(any(names(dat) == "ID"), TRUE, FALSE) 30 | 31 | # Check that the input data set contains dates, named end.date. 32 | if(!any(names(dat) == "end.date")) stop("Dates for time series must be included and named 'end.date'.") 33 | 34 | # Now that incomplete coordinates have been checked for, check also that each coordinate has date information. 35 | if(any(is.na(dat$lat) != is.na(dat$end.date))) stop("Not all coordinates have a corresponding date.") 36 | 37 | # Check SaveDir matches an existing directory. 38 | if(!file.exists(SaveDir)) stop("Input for SaveDir does not resemble an existing file path.") 39 | 40 | # Check StartDate is logial. 41 | if(!is.logical(StartDate)) stop("StartDate must be logical.") 42 | 43 | # Set of stop-if-nots to run if StartDate == TRUE. 44 | if(StartDate){ 45 | # Check that the input data set contains start dates, named start.date. 46 | if(!any(names(dat) == "start.date")) stop("StartDate = TRUE, but 'start.date' not found in the data set.") 47 | # Check that each coordinate has start date information. 48 | if(any(is.na(dat$lat) != is.na(dat$start.date))) stop("Not all coordinates have a corresponding start date.") 49 | } 50 | 51 | if(!StartDate){ 52 | # Check TimeSeriesLength is correctly inputted. 53 | if(!is.numeric(TimeSeriesLength)) stop("TimeSeriesLength should be numeric class.") 54 | 55 | if(length(TimeSeriesLength) != 1) stop("TimeSeriesLength must be one numeric element.") 56 | 57 | if(abs(TimeSeriesLength[1] - round(TimeSeriesLength[1])) > .Machine$double.eps^0.5){ 58 | stop("TimeSeriesLength must be a positive integer.") 59 | } 60 | if(TimeSeriesLength < 0) stop("TimeSeriesLength must be a positive integer.") 61 | } 62 | ##### 63 | 64 | # Remove any incomplete time series. 65 | if(StartDate) dat <- dat[!is.na(dat$lat) | !is.na(dat$long) | !is.na(dat$end.date) | !is.na(dat$start.date), ] 66 | if(!StartDate) dat <- dat[!is.na(dat$lat) | !is.na(dat$long) | !is.na(dat$end.date), ] 67 | 68 | # Find all unique time-series wanted, for each unique location. 69 | if(StartDate) lat.long <- dat[!duplicated(data.frame(dat$lat, dat$long, dat$end.date, dat$start.date)), ] 70 | if(!StartDate) lat.long <- dat[!duplicated(data.frame(dat$lat, dat$long, dat$end.date)), ] 71 | 72 | cat("Found", nrow(lat.long), "unique time-series to download.\n") 73 | 74 | ##### Year or posixt date format? 75 | Year <- FALSE 76 | POSIXt <- FALSE 77 | 78 | posix.compatible <- try(as.POSIXlt(lat.long$end.date), silent = TRUE) 79 | 80 | if(any(class(lat.long$end.date) == "POSIXt") | all(class(posix.compatible) != "try-error")) POSIXt <- TRUE 81 | if(all(is.numeric(lat.long$end.date) & nchar(lat.long$end.date) == 4) & 82 | any(class(posix.compatible) == "try-error")) Year <- TRUE 83 | 84 | if(!Year & !POSIXt) stop("Date information in LoadDat is not recognised as years or as POSIXt format.") 85 | if(Year & POSIXt) stop("Date information in LoadDat is recognised as both year and POSIXt formats.") 86 | 87 | # Take date information for each time-series, in 'year' or 'posixt', and turn them into MODIS date codes (Julian). 88 | if(Year){ 89 | if(StartDate){ 90 | start.year.fail <- any(!is.numeric(lat.long$start.date) | nchar(lat.long$start.date) != 4) 91 | if(start.year.fail) stop("end.date identified as year dates, but start.date does not match.") 92 | 93 | start.date <- strptime(paste(lat.long$start.date, "-01-01", sep = ""), "%Y-%m-%d") 94 | } 95 | if(!StartDate) start.date <- strptime(paste(lat.long$end.date - TimeSeriesLength, "-01-01", sep = ""), "%Y-%m-%d") 96 | 97 | # Put start and end dates in POSIXlt format. 98 | end.date <- strptime(paste(lat.long$end.date, "-12-31", sep = ""), "%Y-%m-%d") 99 | start.day <- start.date$yday 100 | start.day[nchar(start.day) == 2] <- paste(0, start.day[nchar(start.day) == 2], sep = "") 101 | start.day[nchar(start.day) == 1] <- paste(0, 0, start.day[nchar(start.day) == 1], sep = "") 102 | end.day <- end.date$yday 103 | end.day[nchar(end.day) == 2] <- paste(0, end.day[nchar(end.day) == 2], sep = "") 104 | end.day[nchar(end.day) == 1] <- paste(0, 0, end.day[nchar(end.day) == 1], sep = "") 105 | 106 | # Write dates into format compatible with MODIS date IDs (Julian format: YYYYDDD). 107 | MODIS.start <- paste("A", substr(start.date, 1, 4), start.day, sep = "") 108 | MODIS.end <- paste("A", substr(end.date, 1, 4), end.day, sep = "") 109 | } 110 | 111 | if(POSIXt){ 112 | end.date <- strptime(lat.long$end.date, "%Y-%m-%d") 113 | 114 | if(StartDate){ 115 | start.posix.fail <- any(class(try(as.POSIXlt(lat.long$end.date), silent = TRUE)) == "try-error") 116 | if(start.posix.fail) stop("end.date identified as POSIXt dates, but start.date does not match.") 117 | 118 | start.date <- strptime(lat.long$start.date, "%Y-%m-%d") 119 | } 120 | if(!StartDate) start.date <- strptime(paste((end.date$year + 1900) - TimeSeriesLength, "-01-01", sep = ""), "%Y-%m-%d") 121 | 122 | start.day <- start.date$yday 123 | start.day[nchar(start.day) == 2] <- paste(0, start.day[nchar(start.day) == 2], sep = "") 124 | start.day[nchar(start.day) == 1] <- paste(0, 0, start.day[nchar(start.day) == 1], sep = "") 125 | end.day <- end.date$yday 126 | end.day[nchar(end.day) == 2] <- paste(0, end.day[nchar(end.day) == 2], sep = "") 127 | end.day[nchar(end.day) == 1] <- paste(0, 0, end.day[nchar(end.day) == 1], sep = "") 128 | 129 | MODIS.start <- paste("A", substr(start.date, 1, 4), start.day, sep = "") 130 | MODIS.end <- paste("A", substr(end.date, 1, 4), end.day, sep = "") 131 | } 132 | ##### 133 | 134 | # Create IDs for each time series. 135 | fmt <- '%.5f' 136 | if(ID){ 137 | ## Check that all author-given IDs will be unique for each unique time-series, and check that they won't cause issues with product information 138 | n.unique <- length(unique(lat.long$ID)) == nrow(lat.long) 139 | if(n.unique){ 140 | if(any(grepl("___", lat.long$ID))) stop("IDs can not contain '___'") 141 | names(lat.long)[names(lat.long) == "ID"] <- "SubsetID" 142 | lat.long <- data.frame(lat.long, Status = rep(NA, nrow(lat.long))) 143 | } else { 144 | cat("Number of unique IDs does not match number of unique time series. Creating new ID field.") 145 | ID <- paste("Lat", sprintf(fmt, lat.long$lat), "Lon", sprintf(fmt, lat.long$long), "Start", start.date, "End", end.date, sep = "") 146 | lat.long <- data.frame(SubsetID = ID, lat.long, Status = rep(NA, nrow(lat.long))) 147 | } 148 | } else { 149 | ID <- paste("Lat", sprintf(fmt, lat.long$lat), "Lon", sprintf(fmt, lat.long$long), "Start", start.date, "End", end.date, sep = "") 150 | lat.long <- data.frame(SubsetID = ID, lat.long, Status = rep(NA, nrow(lat.long))) 151 | } 152 | 153 | ##### 154 | # If the Products input does not match any product codes in the list output from GetProducts(), stop with error. 155 | if(!all(Products %in% GetProducts())) stop("Not every Products input matches available products (?GetProducts).") 156 | 157 | # If the Bands input does not match with the Products input, stop with error. 158 | avail.bands <- unlist(lapply(Products, function(x) GetBands(x))) 159 | band.test <- any(lapply(Bands, function(x) any(x %in% avail.bands)) == FALSE) 160 | if(band.test) stop("At least one Bands input does not match the product names entered (?GetBands).") 161 | 162 | # If Size is not two dimensions or not integers, stop with error. 163 | if(!is.numeric(Size)) stop("Size should be numeric class. Two integers.") 164 | if(length(Size) != 2) stop("Size input must be a vector of integers, with two elements.") 165 | if(abs(Size[1] - round(Size[1])) > .Machine$double.eps^0.5 | abs(Size[2] - round(Size[2])) > .Machine$double.eps^0.5){ 166 | stop("Size input must be integers.") 167 | } 168 | ##### 169 | 170 | ##### Retrieve data subsets for each time-series of a set of product bands, saving data for each time series into ASCII files. 171 | lat.long <- BatchDownload(lat.long = lat.long, start.date = start.date, end.date = end.date, MODIS.start = MODIS.start, MODIS.end = MODIS.end, 172 | Bands = Bands, Products = Products, Size = Size, StartDate = StartDate, Transect = Transect, SaveDir = SaveDir) 173 | 174 | # Run a second round of downloads for any time-series that incompletely downloaded, and overwrite originals. 175 | success.check <- lat.long$Status != "Successful download" 176 | if(any(success.check)){ 177 | cat("Some subsets that were downloaded were incomplete. Retrying download again for these time-series...\n") 178 | 179 | lat.long[success.check, ] <- BatchDownload(lat.long = lat.long[success.check, ], start.date = start.date, end.date = end.date, 180 | MODIS.start = MODIS.start, MODIS.end = MODIS.end, Bands = Bands, Products = Products, 181 | Size = Size, StartDate = StartDate, Transect = Transect, SaveDir = SaveDir) 182 | 183 | success.check <- lat.long$Status != "Successful download" 184 | if(any(success.check)) cat("Incomplete downloads were re-tried but incomplete downloads remain. See subset download file.\n") 185 | } 186 | ##### 187 | 188 | ##### Write a summary file with IDs and unique time-series information. 189 | date <- as.POSIXlt(Sys.time()) 190 | file.date <- paste(as.Date(date), 191 | paste(paste0("h", date$hour), paste0("m", date$min), paste0("s", round(date$sec, digits=0)), sep = "-"), 192 | sep = "_") 193 | if(!Transect){ 194 | write.table(lat.long, file = file.path(SaveDir, paste0("SubsetDownload_", file.date, ".csv")), 195 | col.names = TRUE, row.names = FALSE, sep = ",") 196 | } 197 | if(Transect){ 198 | DirList <- list.files(path = SaveDir) 199 | w.transect <- regexpr("Point", dat$ID[1]) 200 | transect.id <- substr(dat$ID[1], 1, w.transect - 1) 201 | 202 | if(!any(DirList == file.path(SaveDir, paste0(transect.id, "_SubsetDownload_", file.date, ".csv")))){ 203 | write.table(lat.long, file = file.path(SaveDir, paste0(transect.id, "_SubsetDownload_", file.date, ".csv")), 204 | col.names = TRUE, row.names = FALSE, sep = ",") 205 | } else { 206 | write.table(lat.long, file = file.path(SaveDir, paste0(transect.id, "_SubsetDownload_", file.date, ".csv")), 207 | col.names = FALSE, row.names = FALSE, sep = ",", append = TRUE) 208 | } 209 | } 210 | ##### 211 | 212 | # Print message to confirm downloads are complete and to remind the user to check summary file for any missing data. 213 | if(!Transect) cat("Done! Check the subset download file for correct subset information and download messages.\n") 214 | } -------------------------------------------------------------------------------- /R/MODISTimeSeries.R: -------------------------------------------------------------------------------- 1 | MODISTimeSeries <- 2 | function(Dir, Band, Simplify = FALSE) 3 | { 4 | # DEFINE 5 | NUM_METADATA_COLS <- 10 6 | WHICH_ID <- 6 7 | 8 | if(!file.exists(Dir)) stop("Character string input for Dir argument does not resemble an existing file path.") 9 | 10 | file.set <- list.files(path = Dir, pattern = ".asc") 11 | 12 | file.ids <- sapply(file.path(Dir, file.set), function(x) 13 | any(grepl(pattern = Band, x = read.csv(file = x, header = FALSE, as.is = TRUE)[ ,WHICH_ID])) 14 | ) 15 | file.set <- file.set[file.ids] 16 | 17 | if(length(file.set) < 1) stop("No downloaded files found in the requested directory for the requested data band.") 18 | 19 | data.collector <- vector(mode = "list", length = length(file.set)) 20 | ts.row.names <- vector(mode = "list", length = length(file.set)) 21 | ts.col.names <- vector(mode = "list", length = length(file.set)) 22 | nrow.recorder <- ncol.recorder <- numeric(length = length(file.set)) 23 | 24 | for(i in 1:length(file.set)){ 25 | data.file <- read.csv(file.path(Dir, file.set[i]), header = FALSE, as.is = TRUE) 26 | names(data.file) <- c("nrow", "ncol", "xll", "yll", "pixelsize", "row.id", "product.code", "MODIS.acq.date", 27 | "where", "MODIS.proc.date", 1:(ncol(data.file) - NUM_METADATA_COLS)) 28 | data.file <- data.file[grepl(pattern = Band, x = data.file$row.id), ] 29 | 30 | data.collector[[i]] <- as.matrix(data.file[ ,(NUM_METADATA_COLS+1):ncol(data.file)]) 31 | 32 | nrow.recorder[i] <- nrow(as.matrix(data.file[ ,(NUM_METADATA_COLS+1):ncol(data.file)])) 33 | ncol.recorder[i] <- ncol(as.matrix(data.file[ ,(NUM_METADATA_COLS+1):ncol(data.file)])) 34 | 35 | ts.col.names[[i]] <- paste(unique(data.file$where), "_pixel", 1:ncol.recorder[i], sep = "") 36 | ts.row.names[[i]] <- data.file$MODIS.acq.date 37 | colnames(data.collector[[i]]) <- ts.col.names[[i]] 38 | rownames(data.collector[[i]]) <- ts.row.names[[i]] 39 | } 40 | 41 | if(!Simplify) return(data.collector) 42 | 43 | if(!all(sapply(data.collector, nrow) == sapply(data.collector, nrow)[1])){ 44 | cat('Simplify == TRUE, but not all tiles have the same number of rows so cannot be\n', 45 | 'simplified into one matrix. Returning data as an array instead.\n', sep = '') 46 | return(data.collector) 47 | } else { 48 | res <- matrix(nrow = max(nrow.recorder), ncol = sum(ncol.recorder)) 49 | rownames(res) <- ts.row.names[[which(nrow.recorder == max(nrow.recorder))[1]]] 50 | colnames(res) <- unlist(ts.col.names) 51 | for(j in 1:length(data.collector)){ 52 | res[1:nrow.recorder[j],(sum(1, ncol.recorder[1:j]) - ncol.recorder[j]):sum(ncol.recorder[1:j])] <- 53 | as.matrix(data.collector[[j]]) 54 | } 55 | return(res) 56 | } 57 | 58 | } -------------------------------------------------------------------------------- /R/MODISTransects.R: -------------------------------------------------------------------------------- 1 | MODISTransects <- 2 | function(LoadData, FileSep = NULL, Product, Bands, Size, SaveDir = ".", StartDate = FALSE, TimeSeriesLength = 0) 3 | { 4 | # Define: 5 | # Data are gridded in equal-area tiles in a sinusoidal projection. Each tile consists of a 1200x1200 km data 6 | # array of pixels at a finer resolution (see http://modis-land.gsfc.nasa.gov/MODLAND_grid.html). 7 | LONG_EQTR_M = 111.2 * 1000 8 | 9 | if(!is.object(LoadData) & !is.character(LoadData)){ 10 | stop("Data is incorrectly specified. Must either be the name of an object in R, or a file path character string.") 11 | } 12 | # Load data of locations; external data file, or an R object. 13 | if(is.object(LoadData)) dat <- data.frame(LoadData) 14 | if(is.character(LoadData)){ 15 | if(!file.exists(LoadData)) stop("Character string input for LoadData does not resemble an existing file path.") 16 | if(is.null(FileSep)) stop("Data is a file path, the files delimiter (FileSep) also needed.") 17 | 18 | dat <- read.delim(LoadData, sep = FileSep) 19 | } 20 | 21 | # Check input dataset has variables named as necessary. 22 | if(!any(names(dat) == "transect") | !any(names(dat) == "start.lat") | 23 | !any(names(dat) == "end.lat") | !any(names(dat) == "start.long") | 24 | !any(names(dat) == "end.long") | !any(names(dat) == "end.date")){ 25 | stop("Could not find some information that is necessary. May either be missing or incorrectly named. 26 | See ?MODISTransects for help on data requirements. If data file is loaded, make sure FileSep is sensible.") 27 | } 28 | 29 | # Check SaveDir matches an existing directory. 30 | if(!file.exists(SaveDir)) stop("Character string input for SaveDir argument does not resemble an existing file path.") 31 | 32 | # Check argument inputs are sensible. 33 | # If the Product input does not match any product codes in the list output from GetProducts(), stop with error. 34 | if(!any(Product == GetProducts())) stop("Product entered does not match any available products; see ?GetProducts.") 35 | 36 | # If the Bands input does not match with the Product input, stop with error. 37 | band.test <- lapply(Bands, function(x) !any(x == GetBands(Product))) 38 | if(any(band.test == TRUE)) stop("A Band does not match Product; see ?GetBands for bands within each product.") 39 | 40 | # If Size is not two dimensions or are not integers (greater than expected after rounding, with tolerance around 41 | # computing precision), stop with error. 42 | if(!is.numeric(Size)) stop("Size should be numeric class. Two integers.") 43 | if(length(Size) != 2) stop("Size input must be a vector of integers, with two elements.") 44 | if(abs(Size[1] - round(Size[1])) > .Machine$double.eps^0.5 | abs(Size[2] - round(Size[2])) > .Machine$double.eps^0.5){ 45 | stop("Size input must be integers.") 46 | } 47 | 48 | if(!is.logical(StartDate)) stop("StartDate must be logical.") 49 | 50 | # Year or posixt date format? 51 | Year <- FALSE 52 | POSIXt <- FALSE 53 | 54 | char.compatible <- as.character(dat$end.date) 55 | if(!is.character(char.compatible) | all(is.na(char.compatible)) & any(nchar(char.compatible) != 4)) POSIXt <- TRUE 56 | 57 | posix.compatible <- try(as.POSIXlt(dat$end.date), silent=TRUE) 58 | if(class(posix.compatible) == "try-error") Year <- TRUE 59 | 60 | if(!Year & !POSIXt) stop("Dates in LoadDat not recognised as years or as POSIXt format.") 61 | if(Year & POSIXt) stop("Dates in LoadDat recognised as both year and POSIXt formats.") 62 | 63 | # Check the start dates are valid. 64 | if(StartDate){ 65 | if(Year){ 66 | char.compatible <- as.character(dat$start.date) 67 | if(!is.character(char.compatible) | all(is.na(char.compatible))){ 68 | stop("Year date format detected, but start.date are not compatible with numeric class.") 69 | } 70 | if(any(nchar(dat$start.date) != 4)) stop("start.date is not matching year format.") 71 | } else { 72 | posix.compatible <- try(as.POSIXlt(dat$start.date), silent = TRUE) 73 | if(class(posix.compatible) == "try-error") stop("start.date are not all in standard POSIXt format. See ?POSIXt.") 74 | } 75 | } 76 | 77 | # Check latitude and longitude inputs are valid coordinate data. 78 | # Check for missing lat/long data 79 | if(any(is.na(dat$start.lat) != is.na(dat$start.long) | is.na(dat$start.lat) != is.na(dat$end.lat) | 80 | is.na(dat$start.lat) != is.na(dat$end.long) | is.na(dat$start.lat) != is.na(dat$end.date))) { 81 | stop("Not equal amount of lats, longs, and dates: there must be locations with incomplete time-series information.") 82 | } 83 | if(abs(dat$start.lat) > 90 || abs(dat$start.long) > 180 || abs(dat$end.lat) > 90 || abs(dat$end.long) > 180){ 84 | stop("Detected some lats or longs beyond the range of valid coordinates.") 85 | } 86 | 87 | if(StartDate){ 88 | if(!any(names(dat) == "start.date")) stop("StartDate == TRUE, but start.date not detected. See ?MODISTransects.") 89 | if(any(is.na(dat$start.lat) != is.na(dat$start.date))) stop("Not all coordinates have a corresponding start date.") 90 | } 91 | 92 | # Work out actual width of each pixel in the MODIS projection. 93 | cell.size.dates <- GetDates(Lat = LoadData$start.lat[1], Long = LoadData$start.long[1], Product = Product)[1:2] 94 | cell.size <- as.numeric(unname( 95 | GetSubset(Lat = LoadData$start.lat[1], Long = LoadData$start.long[1], Product = Product, Band = Bands[1], 96 | StartDate = cell.size.dates[1], EndDate = cell.size.dates[2], KmAboveBelow = 0, KmLeftRight = 0)$pixelsize[[1]] 97 | )) 98 | 99 | # Find all unique transects to download pixels for. 100 | t.dat <- dat[!duplicated(dat$transect), ] 101 | cat("Found", nrow(t.dat), "transects. Downloading time-series sets for each transect...\n") 102 | 103 | # Loop that reiterates download for each transect. 104 | for(i in 1:nrow(t.dat)){ 105 | 106 | # Find the distance, in decimal degrees between the start and end of the transect. 107 | delta.lat <- t.dat$end.lat[i] - t.dat$start.lat[i] 108 | delta.long <- round(t.dat$end.long[i] - t.dat$start.long[i], digits = 5) 109 | # Work out how far in metres is the latitudinal difference between start and end locations. 110 | lat.metres <- delta.lat * LONG_EQTR_M 111 | # Determine the curvature angle from the latitude to calculate distance between one longitude at transect location. 112 | lat.rad <- median(c(t.dat$start.lat[i], t.dat$end.lat[i])) * (pi / 180) 113 | # Work out how far in metres is the longitudinal difference between start and end locations. 114 | long.metres <- delta.long * (LONG_EQTR_M * cos(lat.rad)) 115 | # Work out the actual length of the transect. 116 | transect <- sqrt((lat.metres^2) + (long.metres^2)) 117 | 118 | # Work out how many points can be equally spaced (i.e. how many pixels) between the start and end coordinates. 119 | num.points <- transect / cell.size 120 | # Work out the lat and long distances of the equally spaced points. 121 | lat.increment <- round(delta.lat / num.points, digits = 5) 122 | long.increment <- round(delta.long / num.points, digits = 5) 123 | lat <- t.dat$start.lat[i] 124 | long <- round(t.dat$start.long[i], digits = 5) 125 | 126 | # Take the start lat & long and interpolate new lat & long, using lat & long increments, until the end lat & long. 127 | # Produces vector of lats and long for all pixels along transect for time-series information in MODISSubsets. 128 | if(lat.increment > 0){ 129 | 130 | if(long.increment > 0){ 131 | while(lat[length(lat)] <= (t.dat$end.lat[i] - lat.increment) & long[length(long)] <= 132 | (round(t.dat$end.long[i], digits=5) - long.increment)){ 133 | lat <- c(lat, round(lat[length(lat)] + lat.increment, digits = 5)) 134 | long <- c(long, round(long[length(long)] + long.increment, digits = 5)) 135 | } 136 | } else { 137 | while(lat[length(lat)] <= (t.dat$end.lat[i] - lat.increment) & long[length(long)] >= 138 | (round(t.dat$end.long[i], digits=5) - long.increment)){ 139 | lat <- c(lat, round(lat[length(lat)] + lat.increment, digits = 5)) 140 | long <- c(long, round(long[length(long)] + long.increment, digits = 5)) 141 | } 142 | } 143 | 144 | } else { 145 | 146 | if(long.increment > 0){ 147 | while(lat[length(lat)] >= (t.dat$end.lat[i] - lat.increment) & long[length(long)] <= 148 | (round(t.dat$end.long[i], digits=5) - long.increment)){ 149 | lat <- c(lat, round(lat[length(lat)] + lat.increment, digits = 5)) 150 | long <- c(long, round(long[length(long)] + long.increment, digits = 5)) 151 | } 152 | } else { 153 | while(lat[length(lat)] >= (t.dat$end.lat[i] - lat.increment) & long[length(long)] >= 154 | (round(t.dat$end.long[i], digits=5) - long.increment)){ 155 | lat <- c(lat, round(lat[length(lat)] + lat.increment, digits = 5)) 156 | long <- c(long, round(long[length(long)] + long.increment, digits = 5)) 157 | } 158 | } 159 | 160 | } # End of if statements that correctly interpolate points along transect line. 161 | 162 | # Write vector of end dates & IDs of each transect point to be used for time-series information in MODISSubsets. 163 | end.date <- rep(t.dat$end.date[i], length(lat)) 164 | ID <- paste("Transect", t.dat$transect[i], "Point", 1:length(lat), sep = "") 165 | 166 | # Organise time-series information, with new by-transect IDs for each pixel, for input into MODISSubsets call 167 | # with optional start date as well as end date. 168 | if(StartDate){ 169 | start.date <- rep(t.dat$start.date[i], length(lat)) 170 | t.subset <- data.frame(ID, lat, long, start.date, end.date) 171 | } else { 172 | t.subset <- data.frame(ID, lat, long, end.date) 173 | } 174 | 175 | ##### 176 | # Do some error checking of pixels in transect before requesting data in MODISSubsets function call. 177 | xll <- vector(mode = "numeric", length = nrow(t.subset)) 178 | yll <- vector(mode = "numeric", length = nrow(t.subset)) 179 | 180 | date.for.xy <- GetDates(t.subset$lat[1], t.subset$long[1], Product)[1] 181 | 182 | for(n in 1:nrow(t.subset)){ 183 | t.point <- GetSubset(t.subset$lat[n], t.subset$long[n], Product, 184 | Bands[1], date.for.xy, date.for.xy, 0, 0) 185 | xll[n] <- as.numeric(as.character(t.point$xll)) 186 | yll[n] <- as.numeric(as.character(t.point$yll)) 187 | } 188 | 189 | # Check which pixels have the same x or y as the previous pixel. 190 | check.equal.x <- signif(xll[1:length(xll) - 1], digits = 6) == signif(xll[2:length(xll)], digits = 6) 191 | 192 | check.equal.y <- signif(yll[1:length(yll) - 1], digits = 5) == signif(yll[2:length(yll)], digits = 5) 193 | 194 | # From remaining pixels, check if they are +/- 1 pixel width (i.e. adjacent pixel) away. 195 | check.new.x <- ifelse(xll[which(!check.equal.x)] < xll[which(!check.equal.x) + 1], 196 | round(xll[which(!check.equal.x)]) == round(xll[which(!check.equal.x) + 1] - cell.size), 197 | round(xll[which(!check.equal.x)]) == round(xll[which(!check.equal.x) + 1] + cell.size)) 198 | 199 | check.new.y <- ifelse(yll[which(!check.equal.y)] < yll[which(!check.equal.y) + 1], 200 | round(yll[which(!check.equal.y)]) == round(yll[which(!check.equal.y) + 1] - cell.size), 201 | round(yll[which(!check.equal.y)]) == round(yll[which(!check.equal.y) + 1] + cell.size)) 202 | 203 | if(!all(check.new.x) | !all(check.new.y)){ 204 | # Check if unaccounted for distance between pixels can be attributed to rounding error or MODIS proj uncertainty. 205 | check.error.x <- ifelse(xll[which(!check.equal.x)] < xll[which(!check.equal.x) + 1], 206 | signif(xll[which(!check.equal.x) + 1] - xll[which(!check.equal.x)], digits = 3) == signif(cell.size, digits = 3), 207 | signif(xll[which(!check.equal.x) + 1] - xll[which(!check.equal.x)], digits = 3) == -signif(cell.size, digits = 3)) 208 | 209 | check.error.y <- ifelse(yll[which(!check.equal.y)] < yll[which(!check.equal.y) + 1], 210 | signif(yll[which(!check.equal.y) + 1] - yll[which(!check.equal.y)], digits = 3) == signif(cell.size, digits = 3), 211 | signif(yll[which(!check.equal.y) + 1] - yll[which(!check.equal.y)], digits = 3) == -signif(cell.size, digits = 3)) 212 | 213 | # If differences between pixel > expected from checks, abort and produce error stating there are gaps in transect. 214 | if(any(!check.error.x) | any(!check.error.y)) stop("Error: Gap in transect pixels") 215 | } 216 | 217 | # Run MODISSubsets to retrieve subset for this transect of pixels. 218 | MODISSubsets(LoadDat = t.subset, Products = Product, Bands = Bands, Size = Size, SaveDir = SaveDir, 219 | StartDate = StartDate, TimeSeriesLength = TimeSeriesLength, Transect = TRUE) 220 | 221 | } # End of loop that reiterates download for each transect. 222 | 223 | } -------------------------------------------------------------------------------- /R/QualityCheck.R: -------------------------------------------------------------------------------- 1 | QualityCheck <- 2 | function(Data, Product, Band, NoDataFill, QualityBand, QualityScores, QualityThreshold) 3 | { 4 | ##### Define what valid ranges for quality bands should be for different products: 5 | # 1=lower range 2=upper range 3=no fill value 6 | QA_RANGE <- data.frame( 7 | MOD09A1 = c(0,4294966531,NA), # Surface reflectance bands 0-1 bits 8 | MYD09A1 = c(0,4294966531,NA), # Surface reflectance bands 0-1 bits 9 | MOD11A2 = c(0,255,NA), # Land surface temperature and emissivity 0-1 bits 10 | MYD11A2 = c(0,255,NA), # Land surface temperature and emissivity 0-1 bits 11 | MCD12Q1 = c(0,254,255), # Land cover types 0-1 bits 12 | MOD13Q1 = c(0,3,-1), # Vegetation indices 0-1 bits 13 | MYD13Q1 = c(0,3,-1), # Vegetation indices 0-1 bits 14 | MOD15A2 = c(0,254,255), # LAI - FPAR 0 bit 15 | MYD15A2 = c(0,254,255), # LAI - FPAR 0 bit 16 | MOD17A2 = c(0,254,255), # GPP 0 bit 17 | MOD17A3 = c(0,100,NA), # GPP funny one - evaluate separately 18 | MCD43A4 = c(0,4294967294,4294967295), # BRDF albedo band quality, taken from MCD43A2, for reflectance data 19 | MOD16A2 = c(0,254,255) # 8-day, monthly ET/LE/PET/PLE, annual LE/PLE. Same data as MOD15A2 QC 20 | ) 21 | row.names(QA_RANGE) <- c("min","max","noData") 22 | # Land cover dynamics products are available for download but not for quality checking with this function. 23 | 24 | # Check the product input corresponds to one with useable quality information 25 | if(!any(names(QA_RANGE) == Product)) stop("QualityCheck cannot be used for ",Product," product.") 26 | 27 | # If dataframes, coerce to matrices. 28 | if(is.data.frame(Data)) Data <- as.matrix(Data) 29 | if(is.data.frame(QualityScores)) QualityScores <- as.matrix(QualityScores) 30 | 31 | # Check that Data and QualityScores have matching length and, if a matrix, dimensions . 32 | if(is.matrix(Data) | is.matrix(QualityScores)){ 33 | if(!(is.matrix(Data) & is.matrix(QualityScores))) stop("Data and QualityScores do not have matching dimensions.") 34 | if(!all(dim(Data) == dim(QualityScores))) stop("Data and QualityScores do not have matching dimensions.") 35 | } else { 36 | if(length(Data) != length(QualityScores)) stop("Data and QualityScores must have matching lengths.") 37 | } 38 | 39 | # Check the QualityScores input are within the correct range for the product requested. 40 | if(is.na(QA_RANGE["noData",Product])){ 41 | if(any(QualityScores < QA_RANGE["min",Product] | QualityScores > QA_RANGE["max",Product])) 42 | stop("QualityScores not all in range of ",Product,"'s QC: ",QA_RANGE["min",Product],"-",QA_RANGE["max",Product]) 43 | } else { 44 | qualityScoresInvalid <- ifelse(QualityScores != QA_RANGE["noData",Product], 45 | QualityScores < QA_RANGE["min",Product] | QualityScores > QA_RANGE["max",Product], 46 | FALSE) 47 | if(any(qualityScoresInvalid)) 48 | stop("QualityScores not all in range of ",Product,"'s QC: ",QA_RANGE["min",Product],"-",QA_RANGE["max",Product]) 49 | } 50 | 51 | # Quality Threshold should be one integer. 52 | if(length(QualityThreshold) != 1) stop("QualityThreshold input must be one integer.") 53 | if(!is.numeric(QualityThreshold)) stop("QualityThreshold should be numeric class. One integer.") 54 | if(abs(QualityThreshold - round(QualityThreshold)) > .Machine$double.eps^0.5){ 55 | stop("QualityThreshold input must be one integer.") 56 | } 57 | 58 | # NoDataFill should be one integer. 59 | if(length(NoDataFill) != 1) stop("NoDataFill input must be one integer.") 60 | if(!is.numeric(NoDataFill)) stop("NoDataFill should be numeric class. One integer.") 61 | if(abs(NoDataFill - round(NoDataFill)) > .Machine$double.eps^0.5) stop("NoDataFill input must be one integer.") 62 | 63 | # Check QualityThreshold is within 0-3 for all except LAI, ET and GPP bands, which must be within 0-1, and MOD17A3 (0-100). 64 | lai.gpp.prods <- c("MOD15A2", "MYD15A2", "MOD16A2", "MOD17A2") 65 | if(any(lai.gpp.prods == Product)){ 66 | if(QualityThreshold != 0 & QualityThreshold != 1){ 67 | stop("QualityThreshold should be either 0 or 1 for this product; 0 is good quality, 1 is other.") 68 | } 69 | } else if(Product != "MOD17A3"){ 70 | if(QualityThreshold < 0 | QualityThreshold > 3){ 71 | stop("QualityThreshold should be between 0-3 for this product; 72 | 0 = high quality, 1 = good but marginal quality, 2 = cloudy/poor quality, 3 = poor quality for other reasons.") 73 | } 74 | } 75 | 76 | # Check whether all QualityScores are no data fills scores. 77 | # If so, then return Data with NoDataFill removed, without quality screening. 78 | if(all(QualityScores == QA_RANGE["noData",Product])){ 79 | warning("Quality checking aborted for this subset because all QualityScores were missing data.", 80 | call.=FALSE, immediate.=TRUE) 81 | return(matrix(ifelse(Data != NoDataFill, Data, NA), nrow = nrow(Data))) 82 | } 83 | ##### 84 | 85 | # MOD17A3 is an exception, so deal with this first, and then the rest, 86 | if(Product == "MOD17A3"){ 87 | if(QualityThreshold < 0 | QualityThreshold > 100) stop("QualityThreshold should be between 0-100 for this product.") 88 | 89 | NOFILLRANGE <- c(250, 255) 90 | Data <- ifelse(Data > NOFILLRANGE[1] & Data < NOFILLRANGE[2], Data, NA) 91 | Data <- ifelse(QualityScores <= QualityThreshold, Data, NA) 92 | } else { 93 | # Convert decimal QualityScores values into binary. 94 | decimal.set <- QualityScores 95 | 96 | if(max(QualityScores) == 0) num.binary.digits <- 1 97 | if(max(QualityScores) != 0) num.binary.digits <- floor(log(max(QualityScores), base = 2)) + 1 98 | 99 | binary.set<- matrix(nrow = length(QualityScores), ncol = num.binary.digits) 100 | 101 | for(n in 1:num.binary.digits){ 102 | binary.set[ ,(num.binary.digits - n) + 1] <- decimal.set %% 2 103 | decimal.set <- decimal.set %/% 2 104 | } 105 | 106 | quality.binary <- apply(binary.set, 1, function(x) paste(x, collapse = "")) 107 | 108 | # Deal with the quality data in binary, according to the product input. 109 | if(any(lai.gpp.prods == Product)){ 110 | qa.binary <- as.numeric(substr(quality.binary, nchar(quality.binary), nchar(quality.binary))) 111 | Data <- ifelse(Data != NoDataFill & qa.binary <= QualityThreshold, Data, NA) 112 | } else { 113 | # Create an ifelse here so if MCD43A4, snip the relevant binary string for Band. Otherwise, carry on. 114 | if(Product == "MCD43A4"){ 115 | band.num <- as.numeric(substr(Band, nchar(Band), nchar(Band))) 116 | if(any(is.na(band.num))) stop("Band input is not one of the reflectance bands (1-7) from MCD43A4.") 117 | if(any(1 < band.num & band.num > 7)) stop("Band input is not one of the reflectance bands (1-7) from MCD43A4.") 118 | 119 | # Select the section of binary code relevant to Band. 120 | qa.binary <- substr(quality.binary, (nchar(quality.binary) - (((band.num - 1) * 2) + 2)), 121 | (nchar(quality.binary) - ((band.num - 1) * 2))) 122 | 123 | qa.int <- numeric(length(qa.binary)) 124 | qa.int[qa.binary == "000"] <- 0 125 | qa.int[qa.binary == "001"] <- 1 126 | qa.int[qa.binary == "010"] <- 2 127 | qa.int[qa.binary == "011"] <- 3 128 | qa.int[qa.binary == "100"] <- 4 129 | 130 | Data <- ifelse(Data != NoDataFill & qa.int <= QualityThreshold, Data, NA) 131 | } else { 132 | qa.binary <- substr(quality.binary, nchar(quality.binary) - 1, nchar(quality.binary)) 133 | 134 | qa.int <- numeric(length(qa.binary)) 135 | qa.int[qa.binary == "00"] <- 0 136 | qa.int[qa.binary == "01"] <- 1 137 | qa.int[qa.binary == "10"] <- 2 138 | qa.int[qa.binary == "11"] <- 3 139 | 140 | Data <- ifelse(Data != NoDataFill & qa.int <= QualityThreshold, Data, NA) 141 | } 142 | } 143 | } 144 | return(Data) 145 | } -------------------------------------------------------------------------------- /R/UpdateSubsets.R: -------------------------------------------------------------------------------- 1 | UpdateSubsets <- 2 | function(LoadDat, StartDate = FALSE, Dir = ".") 3 | { 4 | if(StartDate) details <- LoadDat[!duplicated(data.frame(LoadDat$lat, LoadDat$long, LoadDat$end.date, LoadDat$start.date)), ] 5 | if(!StartDate) details <- LoadDat[!duplicated(data.frame(LoadDat$lat, LoadDat$long, LoadDat$end.date)), ] 6 | cat("Found", nrow(details), "unique time-series in original file\n") 7 | 8 | # Year or posixt date format? 9 | Year <- FALSE 10 | POSIXt <- FALSE 11 | posix.compatible <- try(as.POSIXlt(details$end.date), silent = TRUE) 12 | if(any(class(details$end.date) == "POSIXt") | all(class(posix.compatible) != "try-error")) POSIXt <- TRUE 13 | if(all(is.numeric(details$end.date) & nchar(details$end.date) == 4) & 14 | any(class(posix.compatible) == "try-error")) Year <- TRUE 15 | if(!Year & !POSIXt) stop("Date information in LoadDat is not recognised as years or as POSIXt format.") 16 | if(Year & POSIXt) stop("Date information in LoadDat is recognised as both year and POSIXt formats.") 17 | 18 | if(Year) endyear <- details$end.date 19 | if(POSIXt) endyear <- as.numeric(format(details$end.date, "%Y")) 20 | if(StartDate){ 21 | if(Year) startyear <- details$start.date 22 | if(POSIXt) startyear <- as.numeric(format(details$start.date, "%Y")) 23 | } 24 | 25 | #ID <- ifelse(any(names(details) == "ID"), TRUE, FALSE) 26 | #fmt <- '%.5f' 27 | #if(StartDate){ 28 | # if(ID){ 29 | # ## Check that all author-given IDs will be unique for each unique time-series, and check that they won't cause issues with product information 30 | # n.unique <- length(unique(details$ID)) == nrow(details) 31 | # if(!n.unique){ 32 | # cat('Number of IDs is not unique.\n') 33 | # details$ID <- paste("Lat", sprintf(fmt, details$lat), "Lon", sprintf(fmt, details$long), "Start", startyear, "End", endyear, sep = "") 34 | # } 35 | # } else { 36 | # details$ID <- paste("Lat", sprintf(fmt, details$lat), "Lon", sprintf(fmt, details$long), "Start", startyear, "End", endyear, sep = "") 37 | # } 38 | #} 39 | #if(!StartDate){ 40 | # if(ID){ 41 | # ## Check that all author-given IDs will be unique for each unique time-series, and check that they won't cause issues with product information 42 | # n.unique <- length(unique(details$ID)) == nrow(details) 43 | # if(!n.unique){ 44 | # details$ID <- paste("Lat", sprintf(fmt, details$lat), "Lon", sprintf(fmt, details$long), "End", endyear, sep = "") 45 | # } 46 | # } else { 47 | # details$ID <- paste("Lat", sprintf(fmt, details$lat), "Lon", sprintf(fmt, details$long), "End", endyear, sep = "") 48 | # } 49 | #} 50 | 51 | filelist <- list.files(path = Dir, pattern = ".asc") 52 | cat("Found", length(filelist), "subsets previously downloaded\n") 53 | whichSubsetsDownloaded <- c() 54 | 55 | for(file in filelist) 56 | { 57 | dataFile <- read.csv(file.path(Dir, file), as.is = TRUE, header = FALSE) 58 | 59 | dataLat <- substr(dataFile[1,9], 60 | regexpr("Lat", dataFile[1,9])+3, 61 | regexpr("Lon", dataFile[1,9])-1) 62 | dataLong <- substr(dataFile[1,9], 63 | regexpr("Lon", dataFile[1,9])+3, 64 | regexpr("Samp", dataFile[1,9])-1) 65 | 66 | startModisDate <- dataFile[1,8] 67 | endModisDate <- dataFile[nrow(dataFile),8] 68 | 69 | startYears <- substr(startModisDate, 2, 5) 70 | startDays <- substr(startModisDate, 6, 8) 71 | startPosixDate <- as.Date(paste0(startYears, "-01-01")) + (as.numeric(startDays) - 1) 72 | 73 | endYears <- substr(endModisDate, 2, 5) 74 | endDays <- substr(endModisDate, 6, 8) 75 | endPosixDate <- as.Date(paste0(endYears, "-01-01")) + (as.numeric(endDays) - 1) 76 | 77 | if(Year) 78 | { 79 | subsetMetadata <- data.frame(lat = as.numeric(dataLat), 80 | long = as.numeric(dataLong), 81 | start.date = startYears, 82 | end.date = endYears) 83 | 84 | whichSubsetsDownloaded <- c(whichSubsetsDownloaded, 85 | with(subsetMetadata, 86 | which(sprintf("%.5f", lat) == sprintf("%.5f", details$lat) & 87 | sprintf("%.5f", long) == sprintf("%.5f", details$long) & 88 | start.date == details$start.date & 89 | end.date == details$end.date))) 90 | } 91 | if(POSIXt) 92 | { 93 | subsetMetadata <- data.frame(lat = as.numeric(dataLat), 94 | long = as.numeric(dataLong), 95 | start.date = startPosixDate, 96 | end.date = endPosixDate) 97 | 98 | ## Find the interval length for the downloaded data band, to set the 99 | ## flexibility allowed when matching subset dates. 100 | secondDate <- dataFile[2,8] 101 | secondYears <- substr(secondDate, 2, 5) 102 | secondDays <- substr(secondDate, 6, 8) 103 | secondDate <- as.Date(paste0(secondYears, "-01-01")) + (as.numeric(secondDays) - 1) 104 | intervalLength <- as.numeric(secondDate - startPosixDate) 105 | 106 | whichSubsetsDownloaded <- c(whichSubsetsDownloaded, 107 | with(subsetMetadata, 108 | which(sprintf("%.5f", lat) == sprintf("%.5f", details$lat) & 109 | sprintf("%.5f", long) == sprintf("%.5f", details$long) & 110 | (as.Date(details$start.date) <= start.date & start.date < as.Date(details$start.date)+intervalLength) & 111 | (end.date <= as.Date(details$end.date) & as.Date(details$start.date) < end.date+intervalLength)))) 112 | } 113 | } 114 | 115 | return(details[-whichSubsetsDownloaded, ]) 116 | } -------------------------------------------------------------------------------- /R/WritePRJ.R: -------------------------------------------------------------------------------- 1 | WritePRJ <- 2 | function(Path) 3 | { 4 | prj <- paste('PROJCS["Sinusoidal",GEOGCS["GCS_Undefined",DATUM["Undefined",', 5 | 'SPHEROID["User_Defined_Spheroid",6371007.181,0.0]],PRIMEM["Greenwich",0.0],', 6 | 'UNIT["Degree",0.0174532925199433]],PROJECTION["Sinusoidal"],', 7 | 'PARAMETER["False_Easting",0.0],PARAMETER["False_Northing",0.0],', 8 | 'PARAMETER["Central_Meridian",0.0],UNIT["Meter",1.0]]', 9 | sep = "") 10 | write(prj, file = Path) 11 | } -------------------------------------------------------------------------------- /R/daacmodis.R: -------------------------------------------------------------------------------- 1 | daac <- getOption("MODIStools.daac", default = "https://daac.ornl.gov") 2 | daacmodis <- getOption("MODIStools.daacmodis", default = "https://daacmodis.ornl.gov") 3 | wsdl_loc <- getOption("MODIStools.wsdl_loc", default = "/cgi-bin/MODIS/GLBVIZ_1_Glb_subset/MODIS_webservice.pl") 4 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | MODISTools: retrieve MODIS data from NASA LPDAAC 2 | ========= 3 | This repository is archived, download the new version at https://github.com/khufkens/MODISTools/ 4 | --------- 5 | 6 | `MODISTools` is now maintained by [Koen Hufkens](https://github.com/khufkens). It was originally written by [Sean Tuck](https://github.com/seantuck12) and [Helen Phillips](https://github.com/helenphillips). 7 | 8 | To cite MODISTools in publications, you can continue to use: 9 | 10 | Tuck, S.L., Phillips, H.R.P., Hintzen, R.E., Scharlemann, J.P.W., Purvis, A. and Hudson, L.N. (2014) MODISTools -- downloading and processing MODIS remotely sensed data in R. Ecology and Evolution, 4 (24), 4658--4668. DOI: 10.1002/ece3.1273. 11 | 12 | ORNL DAAC FAQ 13 | --------- 14 | The Oak Ridge National Laboratory Distributed Active Archive Center (ORNL DAAC) has an FAQ [here](https://daac.ornl.gov/faq/faq.shtml). 15 | -------------------------------------------------------------------------------- /data/ConvertExample.txt: -------------------------------------------------------------------------------- 1 | lat long 2 | 51d24.106'N 0d38.018'W 3 | 51d24.922'N 0d38.772'W 4 | 51d24.106'N 0d38.664'W 5 | 51d24.772'N 0d38.043'W 6 | 51d24m51.106sN 0d38m56.018sW 7 | 51d24m37.922sN 0d38m31.772sW 8 | 51d24m42.106sN 0d38m17.664sW 9 | 51d24m47.772sN 0d38m42.043sW 10 | -------------------------------------------------------------------------------- /data/EndCoordinatesExample.txt: -------------------------------------------------------------------------------- 1 | start.lat start.long start.date end.date ID 2 | 51.41363 -0.64875 2002 2004 1 3 | 51.40896 -0.62967 2002 2004 2 4 | 51.40988 -0.63342 2003 2005 3 5 | 51.41021 -0.64091 2004 2006 4 6 | -------------------------------------------------------------------------------- /data/FindIDExample.txt: -------------------------------------------------------------------------------- 1 | lat long start.date end.date ID 2 | 51.41363 -0.64875 2002 2004 1 3 | 51.40896 -0.62967 2002 2004 2 4 | 51.40988 -0.63342 2003 2005 3 5 | 51.41021 -0.64091 2004 2006 4 6 | -------------------------------------------------------------------------------- /data/QualityCheckExample.txt: -------------------------------------------------------------------------------- 1 | "pixel.1" "pixel.2" "pixel.3" "pixel.4" "pixel.5" 2 | 4410 4410 4835 5307 4774 3 | 4024 4194 4035 3124 4670 4 | 3928 3982 4580 5176 4908 5 | 3892 3890 4251 3248 3248 6 | 4649 4038 3988 3986 4184 7 | 0 0 0 0 0 8 | 0 0 0 0 0 9 | 3 3 3 3 3 10 | 0 0 0 0 0 11 | 0 0 0 0 0 12 | -------------------------------------------------------------------------------- /data/SubsetExample.txt: -------------------------------------------------------------------------------- 1 | lat long start.date end.date ID 2 | 51.41363 -0.64875 2002 2004 1 3 | -------------------------------------------------------------------------------- /data/TransectExample.txt: -------------------------------------------------------------------------------- 1 | transect start.lat start.long end.lat end.long start.date end.date 2 | 1 51.414196 -0.648894 51.414211 -0.641607 2002 2004 3 | -------------------------------------------------------------------------------- /inst/CITATION: -------------------------------------------------------------------------------- 1 | citHeader("To cite package 'MODISTools' in publications use:") 2 | 3 | ## R >= 2.8.0 passes package metadata to citation(). 4 | if(!exists("meta") || is.null(meta)) meta <- packageDescription("MODISTools") 5 | year <- sub("-.*", "", meta$Date) 6 | note <- sprintf("R package version %s", meta$Version) 7 | 8 | bibentry(bibtype = "Article", 9 | title = "MODISTools -- downloading and processing MODIS remotely sensed data in R", 10 | year = "2014", 11 | author = c(person(c("Sean", "L."), "Tuck"), 12 | person(c("Helen", "R.P."), "Phillips"), 13 | person(c("Rogier", "E."), "Hintzen"), 14 | person(c("J{\"o}rn", "P.W."), "Scharlemann"), 15 | person("Andy", "Purvis"), 16 | person(c("Lawrence", "N."), "Hudson") 17 | ), 18 | journal = "Ecology and Evolution", 19 | volume = "4", 20 | number = "24", 21 | pages = "4658--4668", 22 | doi = "10.1002/ece3.1273", 23 | keywords = "Conservation biology, earth observation, global change, 24 | land processes, macroecology, PREDICTS, remote-sensing, satellite imagery", 25 | url = "http://onlinelibrary.wiley.com/doi/10.1002/ece3.1273/full" 26 | ) 27 | 28 | bibentry(bibtype = "Manual", 29 | title = "MODISTools: MODIS Subsetting Tools", 30 | author = c(person("Sean", "Tuck"), person("Helen", "Phillips")), 31 | year = year, 32 | note = note, 33 | url = "https://cran.r-project.org/package=MODISTools" 34 | ) -------------------------------------------------------------------------------- /man/BatchDownload.Rd: -------------------------------------------------------------------------------- 1 | \name{BatchDownload} 2 | \alias{BatchDownload} 3 | \title{ 4 | Internal function. 5 | } 6 | \description{ 7 | An internal function, used by MODISSubsets, that is not intended for use independently. Use MODISSubsets for download of subsets of MODIS data. 8 | } -------------------------------------------------------------------------------- /man/ConvertExample.Rd: -------------------------------------------------------------------------------- 1 | \name{ConvertExample} 2 | \alias{ConvertExample} 3 | \docType{data} 4 | \title{Example dataset for ConvertToDD} 5 | \description{A dataset consisting of a set of arbitrary locations in degrees minutes seconds (DMS) format that ConvertToDD can convert to decimal degrees format.} 6 | \usage{data(ConvertExample)} 7 | \format{ 8 | A data frame with 4 observations on the following 2 variables. 9 | \describe{ 10 | \item{\code{lat}}{A dataframe column containing latitude degrees minutes seconds (DDdMMmSS.SSSsN. ConvertToDD can take other formats and layouts in general.} 11 | \item{\code{long}}{A dataframe column containing longitude degrees minutes seconds (DDdMMmSS.SSSsW. ConvertToDD can take other formats and layouts in general.} 12 | } 13 | } 14 | \details{The various compatible layouts for each format are described at ?ConvertToDD (see also). If hemispheres are denoted using letters (N/S; E/W) then the same transformation (+ive -ive coordinates) will 15 | be carried out on all points. Therefore, in this case, if points within a dataset are in different hemispheres, they will be all be placed into one.} 16 | \source{Locations were arbitrarily drawn from the grounds of Silwood Park Campus, Imperial College London.} 17 | \seealso{\code{\link[MODISTools:ConvertToDD]{ConvertToDD}}} 18 | \keyword{datasets} 19 | -------------------------------------------------------------------------------- /man/ConvertToDD.Rd: -------------------------------------------------------------------------------- 1 | \name{ConvertToDD} 2 | \alias{ConvertToDD} 3 | \title{Degrees Minutes Seconds/Decimal Minutes - Decimal Degrees Converter} 4 | \description{Takes a set of coordinates in either degrees minutes seconds or degrees decimal minutes, and converts them to decimal degrees format (##.#####; -##.#####). The function is flexible to take the input coordinates in several different formats. 5 | } 6 | \usage{ConvertToDD(XY, FileSep=NULL, LatColName, LongColName) 7 | } 8 | \arguments{ 9 | \item{XY}{Input dataset; either the name of an object already in the workspace, or a file to be read in by specifying its file path as a character string.} 10 | \item{FileSep}{If XY is a character string that corresponds to a file path, choose the delimiter character for that file (e.g. "," for comma separated).} 11 | \item{LatColName}{Character string; the name of the column in XY containing latitude data.} 12 | \item{LongColName}{Character string; the name of the column in XY containing longitude data.} 13 | } 14 | \details{There is flexibility in the layout of each degrees input format. For example, N-S/E-W can be described by positive-negative coordinates or by using respective letters at the end of each coordinate. For coordinates in degrees minutes seconds to be converted requires three numbers - degrees, minutes, and seconds - each separated by a single non-numeric character (e.g. 51d24'51.106"N 0d38'56.018"W). For coordinates in degrees minutes to be converted requires just two numbers - degrees and minutes (e.g. 51d24.106'N 0d38.018'W). Coordinates should not contain spaces in between the numbers. The degrees format does not have to be consistent throughout the data set.} 15 | \value{Output is a data frame of same dimensions as input, with each value converted to decimal degrees, named "DD.lat" and "DD.long".} 16 | \author{Sean Tuck} 17 | \seealso{\code{\link[MODISTools:MODISSubsets]{MODISSubsets}}} 18 | \examples{ 19 | data(ConvertExample) 20 | ConvertToDD(XY=ConvertExample, LatColName="lat", LongColName="long")} -------------------------------------------------------------------------------- /man/EndCoordinates.Rd: -------------------------------------------------------------------------------- 1 | \name{EndCoordinates} 2 | \alias{EndCoordinates} 3 | \title{Calculate end location of a transect} 4 | \description{Function that uses a set of locations in decimal degrees to calculate new locations, at a specified distance and angle away, that can form the start and end coordinates for transects.} 5 | \usage{EndCoordinates(LoadDat, FileSep = NULL, Distance = 1000, Angle = 90, 6 | AngleUnits = 'radians'|'degrees', Dir = ".", FileName = "Transect Coordinates")} 7 | \arguments{ 8 | \item{LoadDat}{Input data; the name of an object or a character string that specifies a file path for an external file to be read in, containing a set of locations to be used for calculating new end coordinates.} 9 | \item{FileSep}{If a file path is entered to LoadDat, specify the file delimiter character.} 10 | \item{Distance}{Numeric; distance, in metres, that the end coordinates should be from the focal coordinates.} 11 | \item{Angle}{Numeric; angle, in either degrees or radians, that the end coordinates should be from the focal coordinates.} 12 | \item{AngleUnits}{Character String; the unit the angle is given in, either 'radians' or 'degrees'.} 13 | \item{Dir}{Character string; an optional argument to specify the subdirectory where the output file should be saved. Default Dir='.' writes file to the working directory.} 14 | \item{FileName}{Character string; an optional argument to specify the name given to the output file. Default is "Transect Coordinates".} 15 | } 16 | \details{Input dataset, LoadDat, should contain separate columns for latitudes and longitudes, named 'start.lat' and 'start.long' respectively. Coordinates should be in decimal degrees.} 17 | \value{A new csv file containing the new end coordinate data appended to the original data.} 18 | \author{Helen Phillips} 19 | \seealso{\code{\link[MODISTools:MODISTransects]{MODISTransects}} \code{\link[MODISTools:ConvertToDD]{ConvertToDD}}} 20 | \examples{ 21 | data(EndCoordinatesExample) 22 | 23 | EndCoordinates(LoadDat=EndCoordinatesExample, 24 | Distance = 2000, Angle = 90, AngleUnits = 'degrees') 25 | } -------------------------------------------------------------------------------- /man/EndCoordinatesExample.Rd: -------------------------------------------------------------------------------- 1 | \name{EndCoordinatesExample} 2 | \alias{EndCoordinatesExample} 3 | \docType{data} 4 | \title{Example dataset for EndCoordinates()} 5 | \description{A dataset consisting of a set of arbitrary locations with a corresponding set of arbitrary start and end time-series dates and IDs. 6 | This file can be used in the example for the MODIS EndCoordinates function.} 7 | \usage{data(EndCoordinatesExample)} 8 | \format{ 9 | A data frame with 4 observation on the following 5 variables. 10 | \describe{ 11 | \item{start.lat}{A numeric vector; decimal degrees latitudes in WGS-1984 coordinate system.} 12 | \item{start.long}{A numeric vector; decimal degrees longitudes in WGS-1984 coordinate system.} 13 | \item{start.date}{A numeric vector; listing the date to begin the time-series for each corresponding locations. Used during MODISTransects.} 14 | \item{end.date}{A numeric vector; listing the date to end the time-series for each corresponding locations. Used during MODISTransects.} 15 | \item{ID}{A numeric vector; a unique ID code for each unique location.} 16 | } 17 | } 18 | \source{Locations were arbitrarily drawn from the grounds of Silwood Park Campus, Imperial College London.} 19 | \seealso{\code{\link[MODISTools:MODISSubsets]{MODISSubsets}}} 20 | \keyword{datasets} 21 | -------------------------------------------------------------------------------- /man/ExtractTile.Rd: -------------------------------------------------------------------------------- 1 | \name{ExtractTile} 2 | \alias{ExtractTile} 3 | \title{Extract subset tiles from bigger tiles of MODIS data.} 4 | \description{Input a dataset of MODIS data, comprised of one or many tiles of pixels - a column for each pixel in a tile and a row for each tile - and extract a nested subset from within these tiles. 5 | } 6 | \usage{ExtractTile(Data, Rows, Cols, Grid=FALSE) 7 | } 8 | \arguments{ 9 | \item{Data}{Numeric vector, matrix, or data frame; The input data, containing the large tile(s) that smaller tiles will be extracted from. If a matrix or data frame, each row should represent a distinct tile, whilst each column in a row will be a different pixel within the that tile. 10 | } 11 | \item{Rows}{Numeric - two integers; Rows[1] should be the number of rows in the large tile(s) of pixels. Rows[2] is the number of rows you would like either side of the tile(s) central pixel in the small tile(s) output. 12 | } 13 | \item{Cols}{Numeric - two integers; Cols[1] should be the number of columns in the large tile(s) of pixels. Cols[2] is the number of columns you would like either side of the tile(s) central pixel in the small tile(s) output. 14 | } 15 | \item{Grid}{Logical; if Grid=FALSE, the smaller tile(s) will be in a matrix, like the input. If Grid=TRUE, the output will be an array, with tile(s) presented explicitly. See value. 16 | } 17 | } 18 | \details{Data should only include MODIS data and not any other metadata for the input tiles (see example). Rows[2] and Cols[2] should equate to a nested subset of Rows[1] and Cols[1]. Rows[1] and Cols[1] specify the dimensions of the tiles, laid out in rows in matrix, and therefore should equate to the dimensions of a matrix that is filled by ncols(Data) data points. 19 | } 20 | \value{The output may have two possible structures, optionally chosen with the Grid argument. If Grid=FALSE, the output will be the same structure as the input - a nxm matrix, where n is the number of tiles and m is the number of pixels in each tile - with the same number of rows but only the number of pixels in the smaller tile(s) for each row. If Grid=TRUE, the output will be an array, with the tiles laid out with pixels in spatial order - a matrix of the subset of pixels, for each row in Data. 21 | } 22 | \author{Sean Tuck} 23 | \examples{ 24 | \dontrun{ # dontrun() used because running the example requires internet access. 25 | data(SubsetExample) 26 | MODISSubsets(LoadDat = SubsetExample, Products = "MOD13Q1", 27 | Bands = c("250m_16_days_EVI", "250m_16_days_pixel_reliability"), Size = c(1,1), 28 | StartDate = FALSE, TimeSeriesLength = 1) 29 | MODISSummaries(LoadDat = SubsetExample, Product = "MOD13Q1", Bands = "250m_16_days_EVI", 30 | ValidRange = c(-2000,10000), NoDataFill = -3000, ScaleFactor = 0.0001, 31 | StartDate = FALSE, QualityScreen = TRUE, QualityThreshold = 0, 32 | QualityBand = "250m_16_days_pixel_reliability") 33 | 34 | if(sum(grepl("MODIS_Data", list.files())) != 1){ 35 | print("Could not identify 'MODIS_Data' csv output file from MODISSummaries") 36 | } else { 37 | TileExample <- read.csv(list.files(pattern = "MODIS_Data")) 38 | TileExample <- TileExample[1,which(grepl("pixel", names(TileExample)))] 39 | 40 | dim(TileExample) 41 | dim(ExtractTile(Data = TileExample, Rows = c(9,2), Cols = c(9,2), Grid = FALSE)) 42 | ExtractTile(Data = TileExample, Rows = c(9,2), Cols = c(9,2), Grid = FALSE) 43 | 44 | matrix(TileExample, nrow = 9, ncol = 9, byrow = TRUE) 45 | ExtractTile(Data = TileExample, Rows = c(9,2), Cols = c(9,2), Grid = TRUE) 46 | } 47 | } 48 | } -------------------------------------------------------------------------------- /man/FindID.Rd: -------------------------------------------------------------------------------- 1 | \name{FindID} 2 | \alias{FindID} 3 | \title{Time-series matching to IDs} 4 | \description{A function that takes a unique time-series definition and looks inside a large dataset to find all the IDs that correspond to records with matching time-series definitions. A useful 5 | function for maintaining transparency in the data collection and processing chain.} 6 | \usage{FindID(ID, Data)} 7 | \arguments{ 8 | \item{ID}{A data frame with one or a set of rows containing a time-series definition, in terms of lat, long, and dates information.} 9 | \item{Data}{A larger data frame consisting of at least the same information as ID, which is to be looked in to find matches.} 10 | } 11 | \details{Rows in ID do not need to be the same order as in Data, but the variables in dat to be looked in must have the same names as those in ID. Date information can include one date, or optionally a start and end date. Any information in ID must be included in Data.} 12 | \value{A set of character strings, listing all the IDs that correspond to the given unique time-series.} 13 | \author{Sean Tuck} 14 | \seealso{\code{\link[MODISTools:MODISSummaries]{MODISSummaries}}} 15 | \examples{ 16 | data(SubsetExample, FindIDExample) 17 | FindID(ID=SubsetExample, Data=FindIDExample) 18 | } 19 | -------------------------------------------------------------------------------- /man/FindIDExample.rd: -------------------------------------------------------------------------------- 1 | \name{FindIDExample} 2 | \alias{FindIDExample} 3 | \docType{data} 4 | \title{Example dataset for FindIDSubsets()} 5 | \description{A dataset consisting of a set of arbitrary locations with a corresponding set of arbitrary start and end time-series dates and IDs. 6 | This file can be used in the example for the function that takes a set of time-series and finds all IDs for records in a larger data frame that match the time-series definition.} 7 | \usage{data(FindIDExample)} 8 | \format{ 9 | A data frame with 4 observations on the following 5 variables. 10 | \describe{ 11 | \item{lat}{A numeric vector; decimal degrees latitudes in WGS-1984 coordinate system.} 12 | \item{long}{A numeric vector; decimal degrees longitudes in WGS-1984 coordinate system.} 13 | \item{start.date}{A numeric vector; listing the date to begin the time-series for each corresponding location.} 14 | \item{end.date}{A numeric vector; listing the date to end the time-series for each corresponding location.} 15 | \item{ID}{A numeric vector; a unique ID code for each unique time-series (either unique in time or in space).} 16 | } 17 | } 18 | \details{Rows in ID do not need to be the same order as in Data, but the variables in Data to be looked in must have the same names as those in ID. Date information can include one date, or optionally a start and end date. Any information in ID must be included in Data.} 19 | \source{Locations were arbitrarily drawn from the grounds of Silwood Park Campus, Imperial College London.} 20 | \seealso{\code{\link[MODISTools:MODISSubsets]{MODISSubsets}}} 21 | \keyword{datasets} -------------------------------------------------------------------------------- /man/GetBands.Rd: -------------------------------------------------------------------------------- 1 | \name{GetBands} 2 | \alias{GetBands} 3 | \title{Get MODIS Product Data Band Names} 4 | \description{Internal function that uses the MODIS SOAP Web Service to retrieve a list of all of the codes to identify the data bands within a specified MODIS product.} 5 | \usage{GetBands(Product)} 6 | \arguments{ 7 | \item{Product}{Character string; the unique code for a given MODIS product. Will only accept one product code. See references for the product list.} 8 | } 9 | \value{A character vector; containing the codes for each data band.} 10 | \references{ 11 | \url{https://daacmodis.ornl.gov/cgi-bin/MODIS/GLBVIZ_1_Glb/modis_subset_order_global_col5.pl} 12 | } 13 | \author{Sean Tuck} 14 | \seealso{\code{\link[MODISTools:MODISSubsets]{MODISSubsets}}} 15 | \examples{ 16 | \dontrun{ # dontrun() used because running the example requires internet access. 17 | GetBands("MOD13Q1")} 18 | } 19 | -------------------------------------------------------------------------------- /man/GetDates.Rd: -------------------------------------------------------------------------------- 1 | \name{GetDates} 2 | \alias{GetDates} 3 | \title{Get list of available dates for requested time-series} 4 | \description{Internal function that uses the MODIS SOAP Web Service to retrieve a list of all dates when the requested data, specified by MODIS product and a location, are available for download.} 5 | \usage{GetDates(Lat, Long, Product)} 6 | \arguments{ 7 | \item{Lat}{Numeric; a decimal degrees latitude in WGS-1984 coordinate system.} 8 | \item{Long}{Numeric; a decimal degrees longitude in WGS-1984 coordinate system.} 9 | \item{Product}{Character string; a product code to request subsets from. The MODIS product table shows all available products and their respective product titles (see references).} 10 | } 11 | \value{A character vector; each element representing a time-step (in MODIS date format) where data is available.} 12 | \references{ 13 | \url{https://daacmodis.ornl.gov/cgi-bin/MODIS/GLBVIZ_1_Glb/modis_subset_order_global_col5.pl} 14 | } 15 | \author{Sean Tuck} 16 | \seealso{\code{\link[MODISTools:MODISSubsets]{MODISSubsets}}} 17 | \examples{ 18 | \dontrun{ # dontrun() used because running the example requires internet access. 19 | GetDates(Lat=51.41363, Long=-0.64875, Product="MOD13Q1")} 20 | } 21 | -------------------------------------------------------------------------------- /man/GetProducts.Rd: -------------------------------------------------------------------------------- 1 | \name{GetProducts} 2 | \alias{GetProducts} 3 | \title{Get MODIS Product List} 4 | \description{Internal function that uses the MODIS SOAP Web Service to retrieve a list of all of the codes for available MODIS products.} 5 | \usage{GetProducts()} 6 | \arguments{No input.} 7 | \value{A character vector; containing the codes for each product.} 8 | \references{ 9 | \url{https://daacmodis.ornl.gov/cgi-bin/MODIS/GLBVIZ_1_Glb/modis_subset_order_global_col5.pl} 10 | } 11 | \author{Sean Tuck} 12 | \seealso{\code{\link[MODISTools:MODISSubsets]{MODISSubsets}}} 13 | \examples{ 14 | \dontrun{ # dontrun() used because running the example requires internet access. 15 | GetProducts()} 16 | } 17 | -------------------------------------------------------------------------------- /man/GetSubset.Rd: -------------------------------------------------------------------------------- 1 | \name{GetSubset} 2 | \alias{GetSubset} 3 | \title{Subset download using MODIS web service method.} 4 | \description{Internal function that uses the MODIS SOAP Web Service (see references) getsubset method to download a requested time-series subset of a MODIS product for a given area surrounding many locations. This function is used by the MODISSubsets function, which is the main subset download tool for the user.} 5 | \usage{GetSubset(Lat, Long, Product, Band, StartDate, EndDate, KmAboveBelow, KmLeftRight)} 6 | \arguments{ 7 | \item{Lat}{Numeric; a decimal degrees latitude in WGS-1984 coordinate system.} 8 | \item{Long}{Numeric; a decimal degrees longitude in WGS-1984 coordinate system.} 9 | \item{Product}{A character string; a product code to request the subset from. The MODIS product table shows all available products and their respective product titles (see references).} 10 | \item{Band}{A character string; a code or vector of codes to retrieve the desired data band(s) within the specified product to be requested. To get a list of the available bands in a product of interest use GetBands().} 11 | \item{StartDate}{Character; in MODIS date format, listing the date to begin the time-series for each corresponding location.} 12 | \item{EndDate}{Character; in MODIS date format, listing the date to end the time-series for each corresponding location.} 13 | \item{KmAboveBelow}{An integer; the distance, in kilometres, from the centre of the tile of pixels to the top and bottom of the tile. So, if KmAboveBelow=1, the total distance from top to bottom of the tile will be 2km.} 14 | \item{KmLeftRight}{An integer; the distance, in kilometres, from the centre of the tile of pixels to the left and right sides of the tile. So, if KmLeftRight=1, the total distance from side to side of the tile will be 2km.} 15 | } 16 | \value{A data frame containing: 17 | xll=The x coordinate, in the MODIS coordinate system, of the lower left corner of the pixel in which the location data falls. 18 | yll=The y coordinate, in the MODIS coordinate system, of the lower left corner of the pixel in which the location data falls. 19 | pixelsize=The actual calculated width of the pixels of interest, in metres. 20 | nrow=Corresponds to KmAboveBelow argument. 21 | ncol=Corresponds to KmLeftRight argument. 22 | band=Corresponds to band argument. 23 | scale=The number by which raw downloaded data values can be scaled by. Specific to each band. 24 | lat=Corresponds to the Lat argument. 25 | long=Corresponds to the Long argument. 26 | subset=The downloaded data, as a vector of character strings, with length number of time-steps requested, that include data attached to metadata. 27 | 28 | This data frame is then passed to MODISSubsets.} 29 | \references{ 30 | \url{https://daacmodis.ornl.gov/cgi-bin/MODIS/GLBVIZ_1_Glb/modis_subset_order_global_col5.pl} 31 | } 32 | \author{Sean Tuck} 33 | \seealso{\code{\link[MODISTools:MODISSubsets]{MODISSubsets}}} 34 | \examples{ 35 | \dontrun{ 36 | # dontrun() used because running the example requires internet access, 37 | # and takes over a minute to run. 38 | data(SubsetExample) 39 | GetSubset(Lat=SubsetExample[ ,1], Long=SubsetExample[ ,2], Product="MOD13Q1", 40 | Band="250m_16_days_EVI", StartDate="A2001001", EndDate="A2001025", 41 | KmAboveBelow=0, KmLeftRight=0)} 42 | } 43 | -------------------------------------------------------------------------------- /man/LandCover.Rd: -------------------------------------------------------------------------------- 1 | \name{LandCover} 2 | \alias{LandCover} 3 | \title{Summaries on tiles of land cover data.} 4 | \description{The MODIS product MCD12Q1 contains data bands for land cover types. This function summarises that data, reporting: the most common land cover type, the richness of land cover types, and landscape diversity and heterogeneity using Simpson's D and Simpson's measure of evenness. 5 | } 6 | \usage{ 7 | LandCover(Dir=".", Band) 8 | } 9 | \arguments{ 10 | \item{Dir}{Character string; an optional argument to specify the subdirectory where downloaded ascii files to be processed are located and the ouput is written: default Dir="." extracts files from the working directory. 11 | } 12 | \item{Band}{Character string; the shortname code that identifies what band type of land cover data from the MCD12Q1 product is being processed. 13 | } 14 | } 15 | \details{No data fill values are removed from the tiles before summaries are reported. Additional to the no data fill values, the land cover bands include an 'Unclassified' land cover class. This would indicate poor quality, but is not removed. 16 | } 17 | \value{One csv file is written as an output to the function. It contains all of the summaries for each tile input, from all ascii files read in. Each row corresponds to a different tile of pixels - from a different time-step in the time-series, or from a different time-series (ascii file) altogether. Columns are: latitude, longitude, date, land cover band type, most common land cover class, landscape richness, Simpson's D, Simpson's evenness, proportion of data that are no data fill values. 18 | } 19 | \references{ 20 | \url{https://lpdaac.usgs.gov/products/modis_products_table/mcd12q1} 21 | } 22 | \author{Sean Tuck} 23 | \seealso{ \code{\link[MODISTools:MODISSubsets]{MODISSubsets}} 24 | \code{\link[MODISTools:MODISSummaries]{MODISSummaries}} 25 | } 26 | \examples{ 27 | \dontrun{ # dontrun() used because running the example requires internet access. 28 | data(SubsetExample) 29 | # Extend the years to download for 30 | SubsetExample$start.date <- 2001 31 | SubsetExample$end.date <- 2009 32 | MODISSubsets(LoadDat=SubsetExample, Product="MCD12Q1", Bands=c("Land_Cover_Type_1"), 33 | Size=c(3,3), StartDate=TRUE) 34 | 35 | LandCover(Band="Land_Cover_Type_1") 36 | } 37 | } -------------------------------------------------------------------------------- /man/MODISGrid.Rd: -------------------------------------------------------------------------------- 1 | \name{MODISGrid} 2 | \alias{MODISGrid} 3 | \title{Create GIS ASCII files from downloaded MODIS data files 4 | } 5 | \description{Looks for ASCII files of downloaded MODIS data in a directory and creates new ASCII raster grid files that allow data downloaded using MODISTools to connect to a GIS environment. The new raster grid files can be converted into raster images and loaded into a GIS environment where further data processing or spatial analysis can take place. 6 | } 7 | \usage{MODISGrid(Dir = ".", DirName = "MODIS_GRID", SubDir = TRUE, NoDataValues) 8 | } 9 | \arguments{ 10 | \item{Dir}{Character; file path to the directory where downloaded ASCII files to be processed are located and the output is written: default \code{Dir = "."} extracts files from the working directory. 11 | } 12 | \item{DirName}{Character; the name of the directory (a subdirectory found in directory specified as \code{Dir}) where the new ASCII raster grids will be written. If the directory does not already exist, it will be written. 13 | } 14 | \item{SubDir}{Logical; if TRUE, all ASCII raster grids created from one MODIS data file will be collected under a subdirectory, within \code{DirName}, that has the same name as the MODIS data file from which it was created. If FALSE all ASCII raster grids will simply be written to \code{DirName}, with no nested file structure. 15 | } 16 | \item{NoDataValues}{Named list; the no data values for all the data bands contained in the files. Should be specified as a list of vectors: the names for the list elements should be the product codes and each element should contain a named vector of no data values for bands within that product. For guidance, see the example below. All data bands found in the MODIS data files should be listed in this argument. These values are neccessary to identify pixels with missing data. The correct no data values for the data bands of interest can be found at on the ORNL LPDAAC website. 17 | } 18 | } 19 | \details{A raster grid file is created for each grid found in a MODIS data file (i.e. a new file is written for each date that occurs in a time series). In other words, where the MODIS data file may follow a tile of pixels over time, the raster grids split up each tile to create a new file for that tile at each new date. Therefore, multiple raster grids may be created from one MODIS data file. These files can be stored in one directory or nested in subdirectories for each MODIS data file, using the optional argument \code{SubDir}. By default, the subdirectories are created to retain the spatial structure. 20 | 21 | The data are in the MODIS projection system. To combine a raster grid with a projection, the two files must be stored in the same directory with the same file name. Every raster grid needs a respective PRJ file. The projection (PRJ) file for each ASCII raster grid is included in the function output. 22 | } 23 | \value{Multiple ASCII raster grid files, with PRJ files, containing a tile of MODIS pixels at a given date, either all written to one directory or separated into subdirectories for each MODIS data file found. The file names for each raster grid created are composed of the following: "GRID_", the file name of the relevant MODIS data file, the data band that the grid pertains to, and finally the MODIS acquisition date for the grid (in Julian date format). 24 | } 25 | \author{Sean Tuck 26 | } 27 | \seealso{\code{\link{MODISSubsets}} 28 | \code{\link{MODISSummaries}} 29 | \code{\link{ExtractTile}} 30 | } 31 | \examples{ 32 | \dontrun{ 33 | # dontrun() used because running the example requires internet access. 34 | 35 | dat <- EndCoordinatesExample 36 | names(dat) <- c("lat", "long", "start.date", "end.date", "ID") 37 | 38 | MODISSubsets(dat, Products = "MOD13Q1", Size = c(1,1), StartDate = TRUE, 39 | Bands = c("250m_16_days_EVI", "250m_16_days_NDVI", "250m_16_days_pixel_reliability")) 40 | 41 | MODISGrid(NoDataValues = list("MOD13Q1" = c("250m_16_days_EVI" = -3000, 42 | "250m_16_days_NDVI" = -3000, 43 | "250m_16_days_pixel_reliability" = -1))) 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /man/MODISSubsets.Rd: -------------------------------------------------------------------------------- 1 | \name{MODISSubsets} 2 | \alias{MODISSubsets} 3 | \title{MODIS Subset Tool} 4 | \description{Uses the MODIS SOAP Web Service (see references) as a batch process to remotely extract and locally download time-series subsets of a MODIS product for a given area surrounding many locations. 5 | } 6 | \usage{MODISSubsets(LoadDat, FileSep = NULL, Products, Bands, Size, 7 | SaveDir = ".", StartDate = FALSE, TimeSeriesLength = 0, Transect = FALSE) 8 | } 9 | \arguments{ 10 | \item{LoadDat}{Input dataset: either the name of an object already in the workspace, or a file to be read in by specifying its file path as a character string, that has location data, dates (end date, and optionally start date) and study ID for each location. For required data organisation, see Details. 11 | } 12 | \item{FileSep}{If LoadDat is a character string that corresponds to a file path, choose the delimiter character for that file (e.g. "," for comma separated). 13 | } 14 | \item{Products}{Character string; a product code to request subsets from. The MODIS product table shows all available products and their respective product titles (see references). Data from multiple Products can be downloaded. 15 | } 16 | \item{Bands}{A character vector; each string a code to retrieve the desired data bands within the specified product to be requested. To get a list of the available bands in a product of interest, use GetBands(). Multiple data band across multiple products can be specified and downloaded at the same time. 17 | } 18 | \item{Size}{Numeric vector of two non-negative integers defining the dimensions of tile requested at each location. The first element identifies the distance from the centre to the bottom/top (in both directions), and the second element to the left/right (in both directions) in km. For example, Size = c(0,0) identifies the centre pixel only, whereas Size = c(1,1) identifies a tile of 2kmsq. 19 | } 20 | \item{SaveDir}{Character string; an optional argument to specify a file path to the subdirectory where downloaded ASCII files should be saved: default, SaveDir = ".", saves the files to the working directory. The directory where the downloaded data files will be located is printed at the beginning of the function call. These downloaded files can then be re-loaded into the R environment in a manageable way by using the function MODISTimeSeries. 21 | } 22 | \item{StartDate}{Logical; indicate whether the input dataset contains information on the time-series start date. If StartDate = TRUE, start dates will be taken from the input data and will expect the data frame to have a column named start.date. Default is StartDate = FALSE, whereby the input data is assumed to have only time-series end date and the length of time-series recorded prior to that date is determined by another optional argument, TimeSeriesLength. 23 | } 24 | \item{TimeSeriesLength}{Non-negative integer; if StartDate = FALSE specified the length of time-series. Default TimeSeriesLength = 0, giving a time-series length of 1 year, taken from the beginning to the end of the year. 25 | } 26 | \item{Transect}{Logical; argument to be used when MODISSubsets() is called in MODISTransects() to merge all subset points for each transect into one file. Should not be optionally used - only useful for MODISTransects(). 27 | } 28 | } 29 | \details{The data should be organised accordingly: "lat" and "long" columns showing WGS-1984 decimal degrees latitudes and longitudes respectively; "end.date" for study end date (and optionally "start.date"), that can be in years or in POSIXlt date format; "ID" containing a unique ID for each unique time-series (in time or space). Column names should be exactly as shown. 30 | } 31 | \value{An ASCII file written for each unique time-series (unique location, or unique time-series at a duplicated location), containing all data requested, for each band, for every pixel in the specified tile size. A subset download CSV file will also be written, that contains all the unique time-series information, and a status report for each time-series telling the user if the respective time-series was downloaded successfully or identifying any problems with that particular download. 32 | } 33 | \references{ 34 | \url{https://daacmodis.ornl.gov/cgi-bin/MODIS/GLBVIZ_1_Glb/modis_subset_order_global_col5.pl} 35 | } 36 | \author{Sean Tuck} 37 | \seealso{\code{\link{MODISSummaries}} 38 | \code{\link{MODISTransects}} 39 | \code{\link{MODISGrid}} 40 | \code{\link{MODISTimeSeries}} 41 | } 42 | \examples{ 43 | \dontrun{ 44 | # dontrun() used because running the example requires internet access. 45 | 46 | data(SubsetExample) 47 | 48 | MODISSubsets(LoadDat = SubsetExample, Products = "MOD13Q1", Bands = c("250m_16_days_EVI", 49 | "250m_16_days_pixel_reliability"), Size = c(0,0), StartDate = TRUE) 50 | 51 | MODISSummaries(LoadDat = SubsetExample, Product = "MOD13Q1", Bands = "250m_16_days_EVI", 52 | ValidRange = c(-2000,10000), NoDataFill = -3000, ScaleFactor = 0.0001, 53 | StartDate = TRUE, QualityScreen = TRUE, QualityThreshold = 0, 54 | QualityBand = "250m_16_days_pixel_reliability") 55 | 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /man/MODISSummaries.Rd: -------------------------------------------------------------------------------- 1 | \name{MODISSummaries} 2 | \alias{MODISSummaries} 3 | \title{MODIS subset processing & organisation tool 4 | } 5 | \description{A function to run time-series analysis and compute summary statistics for a downloaded MODIS subset, writing a summary file and another file with processed MODIS data tagged onto the original file inputted to MODISSubsets. This function allows the user to easily explore the characteristics of the downloaded data, and then process them into a form that is ready for use in modelling. 6 | } 7 | \usage{MODISSummaries(LoadDat, FileSep = NULL, Dir = ".", Product, Bands, ValidRange, NoDataFill, 8 | ScaleFactor, StartDate = FALSE, QualityScreen = FALSE, QualityBand = NULL, 9 | QualityThreshold = NULL, Mean = TRUE, SD = TRUE, Min = TRUE, Max = TRUE, Yield = FALSE, 10 | Interpolate = FALSE, InterpolateN = NULL, DiagnosticPlot = FALSE) 11 | } 12 | \arguments{ 13 | \item{LoadDat}{Input dataset: either the name of an object already in the workspace, or a file to be read in by specifying its file path as a character string, that has location data, dates (end date, and optionally start date) and study ID for each location. If IDs are found in LoadDat that provide a primary key for unique time series then these IDs will be used. Otherwise a set of unique IDs will be generated and used to identify, and file name, each time series. 14 | } 15 | \item{FileSep}{If LoadDat is a character string that corresponds to a file path, choose the delimiter character for that file (e.g. "," for comma separated). 16 | } 17 | \item{Dir}{Character string; an optional argument to specify a file path to the subdirectory where downloaded ASCII files to be processed are located and the output is written: default Dir = "." extracts files from the working directory. 18 | } 19 | \item{Product}{Character; The product shortname code, that the data band input belongs to. The MODIS product table shows all available products and their respective product shortname codes (see references). 20 | } 21 | \item{Bands}{Character; the code that identifies from which band types are the data to be processed. Multiple bands can be specified as a character vector, including the quality control data bands, providing they all come from the same product. With the exception of BRDF Reflectance data products (MCD43A4) that have quality information stored as a separate product (MCD43A2). 22 | } 23 | \item{ValidRange}{Numeric vector of two elements; states the lower (ValidRange[1]) and upper (ValidRange[2]) bounds within which the data to be processed should be found. 24 | } 25 | \item{NoDataFill}{Numeric; the missing data fill value that is used for Bands. 26 | } 27 | \item{ScaleFactor}{Numeric; The specified scaling for the given band type, which the data is to be multiplied by. If a scale factor does not exist for the data band, ScaleFactor should be set to 1. 28 | } 29 | \item{StartDate}{Logical; indicate whether the input dataset contains information on the time-series start date. If StartDate = TRUE, start dates will be taken from the input data and will expect the data frame to have a column named start.date. Default is StartDate = FALSE, whereby the input data is assumed to have only time-series end date. This should be the same as that used in the relevant call to MODISSubsets. 30 | } 31 | \item{QualityScreen}{Logical; optional argument for screening the band data for unreliable pixels. If QualityScreen = TRUE, band data must be downloaded from MODISSubsets with the quality control data corresponding to the same product included. Therefore, both band data and reliability data will be in the same ASCII files for each time-series downloaded. Quality screening is completed by the QualityCheck function, and the arguments for this function need to be included in a MODISSummaries call, if QualityScreen = TRUE. The default is QualityScreen = FALSE, meaning the function will omit data equal to NoDataFill, but will not omit poor quality data. 32 | } 33 | \item{QualityBand}{Character; if QualityScreen = TRUE, the shortname code for the quality data band that you are using to screen Band for poor quality data. 34 | } 35 | \item{QualityThreshold}{Numeric integer; if QualityScreen = TRUE, set the threshold between acceptable and unacceptable quality. Any pixels of lower quality than the class set by QualityThreshold will be removed, and those equal to or of higher quality will be kept. QualityThreshold should be a number within the range of possible QualityScores for the given Product QA data. 36 | } 37 | \item{Mean, 38 | SD, 39 | Min, 40 | Max, 41 | Yield}{Logical; optional arguments that allow selecting which summaries will be included in the summary file that gets written - see value. Selecting Yield requires Interpolate to also be set as TRUE. 42 | } 43 | \item{Interpolate}{Logical; determines whether, after poor quality data is removed, to linearly interpolate between high quality data before calculating the summary statistics. Must be TRUE if Yield = TRUE. The interpolation function used is stats::approx. See ?stats::approx for more details. 44 | } 45 | \item{InterpolateN}{Numeric; if Interpolate = TRUE, optionally set the number interpolated data points to be requested from the time-series interpolation. The default is set to a daily interpolation of the data. 46 | } 47 | \item{DiagnosticPlot}{Logical; if TRUE will produce an additional folder in the specified directory to which plots of the time series data for each site will be saved. Will add the interpolation line, mean, min and max values if specified in the function call. 48 | } 49 | } 50 | \details{If QualityScreen = TRUE, subsets to be processed should include a pixel reliability layer, so the data can be screened for poor quality data, removing them and using linear interpolation to refill data between high quality values. 51 | } 52 | \value{Two CSV files: 53 | One file (MODIS_Summary...) contains summary statistics and computed values for each data. The information this file contains is partly defined by the optional arguments settings: Mean is arithmetic mean; SD is standard deviation; Min and Max are minimum and maximum band values; Yield is the average annual yield (designed for vegetation indices, may not be sensible for all band types); NoFill and PoorQuality show the percentage of values in each time-series that were NoDataFill and omitted by QualityCheck (if QualityScreen = TRUE) respectively. All summary statistics, except yield, are included by default. 54 | 55 | The second file (MODIS_Data...) that has the information from the original file inputted (which should have been used in MODISSubsets too) with computed means of the MODIS data tagged on, coupling the input with the output in one form ready for use, such as modelling. In the second file, each nth column of MODIS data, if more than one, will be for each pixel within the whole tile of n pixels collected for the time-series on that row. 56 | } 57 | \references{ 58 | \url{https://daacmodis.ornl.gov/cgi-bin/MODIS/GLBVIZ_1_Glb/modis_subset_order_global_col5.pl} 59 | } 60 | \author{Sean Tuck} 61 | \seealso{ \code{\link[MODISTools:MODISSubsets]{MODISSubsets}} 62 | \code{\link[MODISTools:QualityCheck]{QualityCheck}} 63 | } 64 | \examples{ 65 | \dontrun{ 66 | # dontrun() used because running the example requires internet access, 67 | # and takes over a minute to run. 68 | 69 | data(SubsetExample) 70 | 71 | MODISSubsets(LoadDat = SubsetExample, Products = "MOD13Q1", 72 | Bands = c("250m_16_days_EVI", "250m_16_days_NDVI", "250m_16_days_pixel_reliability"), 73 | Size = c(0,0), StartDate = TRUE) 74 | 75 | # Without quality checking 76 | MODISSummaries(LoadDat = SubsetExample, Product = "MOD13Q1", Bands = "250m_16_days_EVI", 77 | ValidRange = c(-2000,10000), NoDataFill = -3000, ScaleFactor = 0.0001, 78 | StartDate = TRUE) 79 | 80 | # With quality checking 81 | MODISSummaries(LoadDat = SubsetExample, Product = "MOD13Q1", Bands = "250m_16_days_EVI", 82 | ValidRange = c(-2000,10000), NoDataFill = -3000, ScaleFactor = 0.0001, 83 | StartDate = TRUE, QualityScreen = TRUE, QualityThreshold = 0, 84 | QualityBand = "250m_16_days_pixel_reliability") 85 | 86 | # For both EVI and NDVI 87 | MODISSummaries(LoadDat = SubsetExample, Product = "MOD13Q1", 88 | Bands = c("250m_16_days_EVI","250m_16_days_NDVI"), 89 | ValidRange = c(-2000,10000), NoDataFill = -3000, ScaleFactor = 0.0001, 90 | StartDate = TRUE, QualityScreen = TRUE, QualityThreshold = 0, 91 | QualityBand = "250m_16_days_pixel_reliability") 92 | } 93 | } 94 | -------------------------------------------------------------------------------- /man/MODISTimeSeries.Rd: -------------------------------------------------------------------------------- 1 | \name{MODISTimeSeries} 2 | \alias{MODISTimeSeries} 3 | \title{Create one matrix of data from many downloaded timeseries. 4 | } 5 | \description{Take many downloaded timeseries of a MODIS data band and simplify them into one list or matrix of data, with a row for each date and a column for each unique timeseries (i.e., each ASCII file). 6 | } 7 | \usage{ 8 | MODISTimeSeries(Dir, Band, Simplify = FALSE) 9 | } 10 | \arguments{ 11 | \item{Dir}{Character; the directory in which to search for downloaded ASCII files of MODIS data. 12 | } 13 | \item{Band}{Character; the MODIS data band shortname for the downloaded data. 14 | } 15 | \item{Simplify}{Logical; specifies whether the data output should be presented as a list (by default) or simplified to one matrix. See details for more information.} 16 | } 17 | \details{Dir can contain files other than the ASCII files containing data for Band, including other MODIS ASCII files. But, only the data for Band will be included in the output dataset. The names of the downloaded ASCII files must be their original names, containing their correct data product code. The timeseries length of different subsets can be different (the output dataset will contain NAs in the final rows of any timeseries that are shorter than the maximum). 18 | 19 | If Simplify = FALSE, each element in the list represents a unique time series tile as a matrix, with columns for each pixel in that tile and rows for each date in the time series. If Simplify = TRUE, the matrices for each time series tile will be appended together (columnwise) to form one large matrix with the same number of rows (for each date) but many columns (for all pixels of interest). If, however, the number of dates varies in different timeseries, the number of rows in each tile matrix will not be equal and these data cannot be simplified into a matrix. In this case, the data will always be returned as a list. 20 | } 21 | \value{A list with elements for each time series tile (i.e., each MODIS data file), and a matrix with columns for pixels and rows for dates in each element. Or, one large matrix with rows for each date and columns for each unique pixel across all MODIS data of interest (found in Dir). 22 | } 23 | \author{Sean Tuck} 24 | \seealso{\code{\link[MODISTools:MODISSubsets]{MODISSubsets}} 25 | \code{\link[MODISTools:MODISSummaries]{MODISSummaries}} 26 | } 27 | \examples{ 28 | \dontrun{ 29 | # dontrun() used because running the example requires internet access, 30 | # and takes over a minute to run. 31 | time.series <- data.frame(lat = c(51.41363, 51.41421), 32 | long = c(-0.64875, -0.641607), 33 | start.date = c(2002, 2002), 34 | end.date = c(2004, 2004), 35 | ID = c(1, 2)) 36 | 37 | MODISSubsets(LoadDat = time.series, Product = "MOD13Q1", 38 | Bands = c("250m_16_days_EVI"), Size = c(0,0), StartDate = TRUE) 39 | 40 | ts <- MODISTimeSeries(Dir = ".", Band = "250m_16_days_EVI", Simplify = FALSE) 41 | class(ts) 42 | dim(ts[[1]]) 43 | dim(ts[[2]]) 44 | 45 | ts <- MODISTimeSeries(Dir = ".", Band = "250m_16_days_EVI", Simplify = TRUE) 46 | class(ts) 47 | dim(ts) 48 | 49 | time.series$end.date[2] <- 2005 50 | MODISSubsets(LoadDat = time.series, Product = "MOD13Q1", 51 | Bands = c("250m_16_days_EVI"), Size = c(1,1), StartDate = TRUE) 52 | 53 | ts <- MODISTimeSeries(Dir = ".", Band = "250m_16_days_EVI", Simplify = TRUE) 54 | class(ts) 55 | } 56 | } -------------------------------------------------------------------------------- /man/MODISTools-package.Rd: -------------------------------------------------------------------------------- 1 | \name{MODISTools} 2 | \alias{MODISTools-package} 3 | \docType{package} 4 | \title{MODIS subsetting tools} 5 | \description{Provides an automated batch method for retrieving subsets of MODIS Land Processes data through the MODIS Web Service and processing them to a format ready for user friendly application in R such as statistical modelling. Datasets currently available - from terra, aqua, and combined platforms - through the web service: surface reflectance; land cover/land cover change; land surface temperature and emissivity; vegetation indices; leaf area index and FPAR; evapotranspiration; net photosynthesis and primary productivity.} 6 | \details{Imports RCurl and XML. The main purpose of this package is to employ the MODIS SOAP Web Service to remotely interrogate the MODIS archive and locally download the requested datasets. The most important function is MODISSubsets, which allows the user to request subsets from a given MODIS product for multiple time-series. Each time-series is defined by a coordinate location (WGS-1984), a specified surrounding extent of pixels, and a start and end date. The relevant MODIS product data is then extracted for these location/time combinations. Automating this as a batch process greatly reduces time, effort, and human error. Alternatively, MODISTransects expands upon MODISSubsets by extracting MODIS data along a transect, and its surrounding neighbourhood. Downloaded subsets are saved in ascii files, which are then accessed by MODISSummaries for computing summary statistics. It also organises downloaded data back with the original input data into a csv file that can be easily used for modelling; this provides efficient storage of data and a transparent process from data collection, to processing, to a form that is ready for final use.} 7 | \author{Sean Tuck} 8 | \references{ 9 | \url{https://daacmodis.ornl.gov/cgi-bin/MODIS/GLBVIZ_1_Glb/modis_subset_order_global_col5.pl} 10 | 11 | \url{https://cran.r-project.org/package=RCurl} 12 | 13 | \url{https://cran.r-project.org/package=XML} 14 | } 15 | \seealso{\code{\link[MODISTools:MODISSubsets]{MODISSubsets}}} 16 | -------------------------------------------------------------------------------- /man/MODISTransects.Rd: -------------------------------------------------------------------------------- 1 | \name{MODISTransects} 2 | \alias{MODISTransects} 3 | \title{MODIS Transect Subset Tool} 4 | \description{A function that downloads batches of MODIS data transects. 5 | } 6 | \usage{MODISTransects(LoadData, FileSep = NULL, Product, Bands, Size, 7 | SaveDir = ".", StartDate = FALSE, TimeSeriesLength = 0) 8 | } 9 | \arguments{ 10 | \item{LoadData}{Input data: either the name of an object already in the workspace, or a file to be read in by specifying its file path as a character string. For data input requirements, see details. All arguments to be passed to MODISSubsets(); see ?MODISSubsets for more information. 11 | } 12 | \item{FileSep}{If LoadData is a character string that corresponds to a file path, choose the delimiter character for that file (e.g. "," for comma separated). 13 | } 14 | \item{Product}{Character string; code denoting which MODIS product to be requested. 15 | } 16 | \item{Bands}{Character vector; which bands of data within specified product are of interest. 17 | } 18 | \item{Size}{Numeric vector of two non-negative integers defining the dimensions of tile requested at each location. The first element identifies the distance from the centre to the bottom/top (in both directions), and the second element to the left/right (in both directions) in km. For example, Size = c(0,0) identifies the centre pixel only, whereas Size = c(1,1) identifies a tile of 2kmsq. 19 | } 20 | \item{SaveDir}{Character string; an optional argument to specify the subdirectory where downloaded ascii files should be saved: default SaveDir = "." saves the files to the working directory. 21 | } 22 | \item{StartDate}{Logical; indicate whether the input dataset contains information on the time-series start date. If StartDate = TRUE, start dates will be taken from the input data and will expect the data frame to have a column named start.date. Default is StartDate = FALSE, whereby the input data is assumed to have only time-series end date and the length of time-series recorded prior to that date is determined by another optional argument, TimeSeriesLength. 23 | } 24 | \item{TimeSeriesLength}{Non-negative integer; if StartDate = FALSE specified the length of time-series. Default TimeSeriesLength = 0, giving a time-series length of 1 year, taken from the beginning to the end of the year. 25 | } 26 | } 27 | \details{File input requirements are a transect ID, a start lat and long, and an end lat and long for each transect, and an end date (start date optional) for the requested time-series for each transect. These fields must be labelled with headers: "transect"; "start.lat"; "start.long"; "end.lat"; "end.long"; "start.date", and; "end.date" respectively. 28 | 29 | All data types requested must be of the same resolution (pixel size). See ?MODISSubsets for more details. 30 | 31 | The transects lengths requested must be longer than the requested pixel size. Therefore, each transect must contain more than 1 pixel. 32 | } 33 | \value{ASCII file written for each location, containing the specified time-series for each band of data requested. File name states the transect ID that the location is within, and a unique ID for that specific location. 34 | } 35 | \references{ 36 | \url{https://daacmodis.ornl.gov/cgi-bin/MODIS/GLBVIZ_1_Glb/modis_subset_order_global_col5.pl} 37 | } 38 | \author{Sean Tuck} 39 | \seealso{\code{\link[MODISTools:MODISSubsets]{MODISSubsets}}} 40 | \examples{ 41 | \dontrun{ 42 | # dontrun() used because running the example requires internet access, 43 | # and takes over a minute to run. 44 | data(TransectExample) 45 | 46 | MODISTransects(LoadData = TransectExample, Product = "MOD13Q1", 47 | Bands = c("250m_16_days_EVI", "250m_16_days_pixel_reliability"), 48 | Size = c(0,0), StartDate = TRUE) 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /man/QualityCheck.Rd: -------------------------------------------------------------------------------- 1 | \name{QualityCheck} 2 | \alias{QualityCheck} 3 | \title{Remove unreliable data and no fill values from MODIS data} 4 | \description{Designed as an internal function for MODISSummaries, but can also be used independently. MODIS data of a given product band are screened for poor quality pixels, using the reliability data available for that product. The missing data values are also filtered out. The output is the same MODIS data, with screened poor quality and missing data converted to NAs. 5 | } 6 | \usage{ 7 | QualityCheck(Data, Product, Band, NoDataFill, QualityBand, QualityScores, 8 | QualityThreshold) 9 | } 10 | \arguments{ 11 | \item{Data}{Numeric vector or matrix; the input MODIS data, for QualityCheck to screen for poor quality and missing data. 12 | } 13 | \item{Product}{Character; the product code for Data (e.g. "MOD15A2" if you want to screen the quality of a downloaded data band from the leaf area index product). 14 | } 15 | \item{Band}{Character; the shortname code for the data band that you are quality screening. 16 | } 17 | \item{NoDataFill}{Numeric; the missing data fill value for the data band (Data) that you are screening. 18 | } 19 | \item{QualityBand}{Character; the shortname code for the quality data band that you are using to screen Band for poor quality data. 20 | } 21 | \item{QualityScores}{Numeric integer vector or matrix; the quality control control that matches your Data input. QualityScores should be the same length/dimensions as Data. 22 | } 23 | \item{QualityThreshold}{Numeric integer; set the threshold between acceptable and unacceptable quality. Any pixels of lower quality than the class set by QualityThreshold will be removed, and those equal to or of higher quality will be kept. QualityThreshold should be a number within the range of possible QualityScores for the given Product QA data. 24 | } 25 | } 26 | \details{Although there are several data bands within each MODIS product, only one band type of data should be passed to QualityCheck at a given time, as different bands within a product may have different NoDataFill values, and may even have different data bands that contain their relevant quality information. 27 | } 28 | \value{Data, as it was, but with poor quality data and no data fill values replaced with NAs. 29 | } 30 | \references{ 31 | \url{https://daacmodis.ornl.gov/cgi-bin/MODIS/GLBVIZ_1_Glb/modis_subset_order_global_col5.pl} 32 | } 33 | \author{Sean Tuck} 34 | \seealso{\code{\link[MODISTools:MODISSummaries]{MODISSummaries}}} 35 | \examples{ 36 | \dontrun{ 37 | # dontrun() used because running the example requires internet access. 38 | data(QualityCheckExample) 39 | EVIdata <- QualityCheckExample[1:5, ] 40 | QAdata <- QualityCheckExample[6:10, ] 41 | 42 | QualityCheck(Data=EVIdata, Product="MOD13Q1", Band="250m_16_days_EVI", 43 | NoDataFill=-3000, QualityBand="250m_16_days_pixel_reliability", 44 | QualityScores=QAdata, QualityThreshold=0) 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /man/QualityCheckExample.Rd: -------------------------------------------------------------------------------- 1 | \name{QualityCheckExample} 2 | \alias{QualityCheckExample} 3 | \docType{data} 4 | \title{Example dataset for QualityCheck} 5 | \description{A dataset containing the MODIS band data, and it's quality control data for the same pixels. The first half rows are band data (EVI data from the MOD13Q1 vegetation indices product), and the second half are pixel reliability data. There are 5 rows for each data type. Each row contains 5 pixels (columns) at that time-step.} 6 | \usage{data(QualityCheckExample)} 7 | \format{ 8 | A data frame with 10 observations on the following 5 variables. 9 | \describe{ 10 | \item{\code{pixel.1}}{Pixel 1.} 11 | \item{\code{pixel.2}}{Pixel 2.} 12 | \item{\code{pixel.3}}{Pixel 3.} 13 | \item{\code{pixel.4}}{Pixel 4.} 14 | \item{\code{pixel.5}}{Pixel 5.} 15 | } 16 | } 17 | \details{The data input to QualityCheck can be a vector or a matrix. It will return an object with the same dimensions. QualityCheck was designed for internal use within MODISSummaries, but can be used separately. When called via MODISSummaries, the data input and it's quality information is expected to be contained in the same ascii files, downloaded using MODISSubsets.} 18 | \source{Arbitrary values within the valid range of EVI data and it's pixel reliability information.} 19 | \keyword{datasets} 20 | -------------------------------------------------------------------------------- /man/SubsetExample.Rd: -------------------------------------------------------------------------------- 1 | \name{SubsetExample} 2 | \alias{SubsetExample} 3 | \docType{data} 4 | \title{Example dataset for MODISSubsets()} 5 | \description{A dataset consisting of a set of arbitrary locations with a corresponding set of arbitrary start and end time-series dates and IDs. 6 | This file can be used in the example for the MODIS subset function.} 7 | \usage{data(SubsetExample)} 8 | \format{ 9 | A data frame with 1 observation on the following 5 variables. 10 | \describe{ 11 | \item{lat}{A numeric vector; decimal degrees latitudes in WGS-1984 coordinate system.} 12 | \item{long}{A numeric vector; decimal degrees longitudes in WGS-1984 coordinate system.} 13 | \item{start.date}{A numeric vector; listing the date to begin the time-series for each corresponding location.} 14 | \item{end.date}{A numeric vector; listing the date to end the time-series for each corresponding location.} 15 | \item{ID}{A numeric vector; a unique ID code for each unique time-series (either unique in time or in space).} 16 | } 17 | } 18 | \details{Each time-series that has a unique combination of location/time-series dates should ideally have a unique ID. If IDs are not unique, then an arbitrary set of IDs will be assigned 19 | and the optional summary file should be written to maintain transparency in the data collection process.} 20 | \source{Locations were arbitrarily drawn from the grounds of Silwood Park Campus, Imperial College London.} 21 | \seealso{\code{\link[MODISTools:MODISSubsets]{MODISSubsets}}} 22 | \keyword{datasets} 23 | -------------------------------------------------------------------------------- /man/TransectExample.Rd: -------------------------------------------------------------------------------- 1 | \name{TransectExample} 2 | \alias{TransectExample} 3 | \docType{data} 4 | \title{Example dataset for MODISTransects()} 5 | \description{A dataset consisting of a set of arbitrary transect start/end locations with a corresponding set of arbitrary start and end time-series dates and transect IDs. 6 | This file can be used in the example for the MODIS subset function.} 7 | \usage{data(TransectExample)} 8 | \format{ 9 | A data frame with 1 observation on the following 7 variables. 10 | \describe{ 11 | \item{transect}{A numeric vector; a unique ID code for each transect.} 12 | \item{start.lat}{A numeric vector; decimal degrees latitudes in WGS-1984 coordinate system for the transect start point.} 13 | \item{start.long}{A numeric vector; decimal degrees latitudes in WGS-1984 coordinate system for the transect start point.} 14 | \item{end.lat}{A numeric vector; decimal degrees latitudes in WGS-1984 coordinate system for the transect end point.} 15 | \item{end.long}{A numeric vector; decimal degrees latitudes in WGS-1984 coordinate system for the transect end point.} 16 | \item{start.date}{A numeric vector; listing the year to begin the time-series for each corresponding location.} 17 | \item{end.date}{A numeric vector; listing the year to end the time-series for each corresponding location.} 18 | } 19 | } 20 | \source{Locations were arbitrarily drawn from the grounds of Silwood Park Campus, Imperial College London.} 21 | \seealso{\code{\link[MODISTools:MODISTransects]{MODISTransects}}} 22 | \keyword{datasets} -------------------------------------------------------------------------------- /man/UpdateSubsets.Rd: -------------------------------------------------------------------------------- 1 | \name{UpdateSubsets} 2 | \alias{UpdateSubsets} 3 | \title{Updating Subset List} 4 | \description{A function that identifies, from a given dataset, which subsets have already been downloaded and returns a dataframe of those yet to be downloaded. A useful function if the list of locations for subsets is continually increasing or if download was halted previously. 5 | } 6 | \usage{UpdateSubsets(LoadDat, StartDate=FALSE, Dir=".")} 7 | \arguments{ 8 | \item{LoadDat}{Input dataset: either the name of an object already in the workspace, or a file to be read in by specifying its file path as a character string, that has location data, end date (end.date) and study ID for each location. 9 | } 10 | \item{StartDate}{Logical: Specifying whether StartDate was specified in the original MODISSubset download.} 11 | \item{Dir}{String: Directory where previously downloaded subsets are saved. Default is current working directory.} 12 | } 13 | \details{The input dataset should be organised accordingly: "lat" and "long" columns showing WGS-1984 decimal degrees latitudes and longitudes respectively; "end.date" for study end date.} 14 | \value{A dataframe containing any data for which a subset has not been downloaded. This can then be used with MODISSubsets.} 15 | \author{Helen Phillips and Sean Tuck} 16 | \seealso{\code{\link[MODISTools:MODISSubsets]{MODISSubsets}}} 17 | \examples{ 18 | \dontrun{ 19 | # dontrun() used because running the example requires internet access. 20 | 21 | data(SubsetExample, ConvertExample) 22 | 23 | modis.subset <- ConvertToDD(XY = ConvertExample, LatColName = "lat", LongColName = "long") 24 | modis.subset <- data.frame(lat = c(SubsetExample$lat, modis.subset[ ,1]), 25 | long = c(SubsetExample$long, modis.subset[ ,2]), 26 | start.date = rep(SubsetExample$start.date, 9), 27 | end.date = rep(SubsetExample$end.date, 9)) 28 | 29 | MODISSubsets(LoadDat = SubsetExample, Product = "MOD13Q1", Bands = c("250m_16_days_EVI", 30 | "250m_16_days_pixel_reliability"), Size = c(0,0), StartDate = TRUE) 31 | 32 | Updated.modis.subset <- UpdateSubsets(LoadDat = modis.subset, StartDate = TRUE) 33 | 34 | MODISSubsets(LoadDat = Updated.modis.subset, Product = "MOD13Q1", Bands = c("250m_16_days_EVI", 35 | "250m_16_days_pixel_reliability"), Size = c(0,0), StartDate = TRUE) 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /man/WritePRJ.Rd: -------------------------------------------------------------------------------- 1 | \name{WritePRJ} 2 | \alias{WritePRJ} 3 | \title{Internal function to write projection files for MODISGrid 4 | } 5 | \description{The function is used internally by MODISGrid to write PRJ files for the MODIS projection system. A file is written for each ASCII raster grid. 6 | } 7 | \usage{ 8 | WritePRJ(Path) 9 | } 10 | \arguments{ 11 | \item{Path}{The file path for the MODIS PRJ file that will be written. 12 | } 13 | } 14 | \value{A PRJ (plain text) file containing the definitions for the MODIS projection system. 15 | } 16 | \author{Sean Tuck 17 | } 18 | \seealso{\code{\link{MODISGrid}} 19 | } 20 | -------------------------------------------------------------------------------- /man/daacmodis.Rd: -------------------------------------------------------------------------------- 1 | \name{daacmodis} 2 | \alias{daac} 3 | \alias{daacmodis} 4 | \alias{wsdl_loc} 5 | \docType{data} 6 | \title{ 7 | Base URLs for MODIS downloads and the web service location 8 | } 9 | \description{ 10 | These give the base URLs for MODIS downloads. Originally they were \url{http://daac.ornl.gov} 11 | and \url{http://daacmodis.ornl.gov}; recently they 12 | moved to \url{https://daac.ornl.gov} 13 | and \url{https://daacmodis.ornl.gov}. 14 | 15 | These variables 16 | will be initialized to values of \code{getOption("MODIStools.daac", default = "https://daac.ornl.gov")} 17 | and \code{getOption("MODIStools.daacmodis", default = "https://daacmodis.ornl.gov")} in case there is 18 | a future change. 19 | } 20 | \usage{ 21 | daac 22 | daacmodis 23 | wsdl_loc 24 | } 25 | \format{ 26 | The format is: 27 | chr "1" 28 | } 29 | \examples{ 30 | daacmodis 31 | } 32 | 33 | -------------------------------------------------------------------------------- /tests/MODISTools_FullTestingScript.R: -------------------------------------------------------------------------------- 1 | # Full length testing script for MODISTools R package. 2 | # ---------------------------------------------------- 3 | # The CRAN testing farms has time constraints, creating 4 | # a limit to the amount of testing possible. This full 5 | # length script complements a smaller version in the R 6 | # package itself, which is to be run locally before 7 | # releasing any updates. 8 | # ----------------------------------------------------- 9 | # This testing checks for internet connectivity, the 10 | # connection to the MODIS SOAP WSDL Server and it's Web 11 | # Service Description Language, for the XML response from 12 | # the Web Service method, and for the functions of MODISTools. 13 | # =========================================================== 14 | 15 | # Load data to be used for testing. 16 | rm(list = ls()) 17 | library(MODISTools) 18 | data(SubsetExample, FindIDExample, QualityCheckExample, TransectExample, 19 | EndCoordinatesExample, ConvertExample) 20 | library(RCurl) # Will use some RCurl and XML functions explicitly in testing. 21 | library(XML) 22 | 23 | options(warn = 2) 24 | 25 | ### Following lines of code testing for internet connectivity and server access, are 26 | ### from R testing: .../tests/internet.R 27 | # Check for internet capability. 28 | if(!capabilities("http/ftp")) q() 29 | 30 | # Check for internet connectivity. 31 | if(.Platform$OS.type == "unix" && is.null(nsl("cran.r-project.org"))) q() 32 | 33 | # Check we can reach the server for lpdaac modis web service. 34 | if(.Platform$OS.type == "unix" && is.null(nsl("daac.ornl.gov"))) q() 35 | 36 | # Check the web service is currently responsive. 37 | if(class(try(GetProducts(), silent = TRUE)) == "try-error") q() 38 | 39 | # Check MODIS subset uses this output to produce correctly downloaded files. 40 | if(grepl("Server is busy handling other requests", 41 | GetSubset(Lat = SubsetExample$lat, Long = SubsetExample$long, Product = "MCD12Q1", 42 | Band = "Land_Cover_Type_1", StartDate = "A2005001", EndDate = "A2005001", 43 | KmAboveBelow = 0, KmLeftRight = 0)$subset[1])){ 44 | q() 45 | } 46 | ### 47 | 48 | ### Check the XML response is as expected. 49 | getsubset.xml <- paste(' 50 | 52 | 53 | 54 | 55 | ', 51.41363, ' 56 | ', -0.64875, ' 57 | ', "MOD13Q1", ' 58 | ', "250m_16_days_EVI", ' 59 | ', "A2001001", ' 60 | ', "A2001025", ' 61 | ', 0, ' 62 | ', 0, ' 63 | 64 | 65 | ', 66 | sep = "") 67 | 68 | header.fields <- c(Accept = "text/xml", 69 | Accept = "multipart/*", 70 | 'Content-Type' = "text/xml; charset=utf-8", 71 | SOAPAction = "") 72 | 73 | reader <- basicTextGatherer() 74 | header <- basicTextGatherer() 75 | 76 | curlPerform(url = paste0(daacmodis, wsdl_doc), 77 | httpheader = header.fields, 78 | postfields = getsubset.xml, 79 | writefunction = reader$update, 80 | verbose = FALSE) 81 | 82 | # Check the server is not down by insepcting the XML response for internal server error message. 83 | if(grepl("Internal Server Error", reader$value())) q() 84 | 85 | xmlRoot(xmlTreeParse(reader$value())) 86 | ### 87 | 88 | ### Check some internal functions 89 | # Check FindID example 90 | FindID(ID = SubsetExample, Data = FindIDExample) 91 | 92 | # Check QualityCheck example 93 | EVIdata <- QualityCheckExample[1:5, ] 94 | QAdata <- QualityCheckExample[6:10, ] 95 | 96 | QualityCheck(Data = EVIdata, Product = "MOD13Q1", Band = "250m_16_days_EVI", 97 | NoDataFill = -3000, QualityBand = "250m_16_days_pixel_reliability", 98 | QualityScores = QAdata, QualityThreshold = 0) 99 | 100 | # Check the web service get methods 101 | GetProducts() 102 | GetBands('MOD13Q1') 103 | GetDates(SubsetExample$lat, SubsetExample$long, 'MOD13Q1') 104 | ### 105 | 106 | 107 | ### Check the main functions: MODISSubsets, MODISSummaries, and MODISTransects 108 | 109 | # Check we can still reach the server for lpdaac modis web service before running functions that request. 110 | if(.Platform$OS.type == "unix" && is.null(nsl("daac.ornl.gov"))) q() 111 | # Check MODIS subset uses this output to produce correctly downloaded files. 112 | if(grepl("Server is busy handling other requests", 113 | GetSubset(Lat = SubsetExample$lat, Long = SubsetExample$long, Product = "MCD12Q1", Band = "Land_Cover_Type_1", 114 | StartDate = "A2005001", EndDate = "A2005001", KmAboveBelow = 0, KmLeftRight = 0)$subset[1])){ 115 | q() 116 | } else { 117 | # Check GetSubset is producing the correct output. 118 | Dates <- GetDates(FindIDExample$lat[1], FindIDExample$long[1], Product = 'MOD13Q1')[1:10] 119 | 120 | gsub.try <- try(GetSubset(Lat = FindIDExample$lat[1], Long = FindIDExample$long[1], 121 | Product = 'MOD13Q1', Band = '250m_16_days_EVI', StartDate = Dates[1], 122 | EndDate = Dates[5], KmAboveBelow = 0, KmLeftRight = 0)) 123 | if(class(gsub.try) == "try-error") q() 124 | gsub.try 125 | 126 | ### 127 | # MODISSubsets check using multiple bands and multiple-pixel tiles over multiple time-steps. 128 | MODISSubsets(LoadDat = FindIDExample, Product = "MOD13Q1", 129 | Bands = c("250m_16_days_EVI","250m_16_days_NDVI","250m_16_days_pixel_reliability"), 130 | Size = c(1,1), StartDate = TRUE) 131 | ### 132 | } 133 | 134 | # Check we can still reach the server for lpdaac modis web service before running functions that request. 135 | if(.Platform$OS.type == "unix" && is.null(nsl("daac.ornl.gov"))) q() 136 | # Check example run of MODISSummaries. 137 | if(grepl("Server is busy handling other requests", 138 | GetSubset(Lat = SubsetExample$lat, Long = SubsetExample$long, Product = "MOD13Q1", Band = "250m_16_days_EVI", 139 | StartDate = "A2000049", EndDate = "A2000049", KmAboveBelow = 0, KmLeftRight = 0)$subset[1])){ 140 | q() 141 | } else { 142 | 143 | ### 144 | # MODISSummaries check without QualityCheck 145 | MODISSummaries(LoadDat = FindIDExample, Product = "MOD13Q1", 146 | Bands = c("250m_16_days_EVI","250m_16_days_NDVI"), ValidRange = c(-2000,10000), 147 | NoDataFill = -3000, ScaleFactor = 0.0001, StartDate = TRUE) 148 | 149 | # MODISSummaries check with QualityCheck 150 | MODISSummaries(LoadDat = FindIDExample, Product = "MOD13Q1", 151 | Bands = c("250m_16_days_EVI","250m_16_days_NDVI"), ValidRange = c(-2000,10000), 152 | NoDataFill = -3000, ScaleFactor = 0.0001, StartDate = TRUE, QualityScreen = TRUE, 153 | QualityBand = "250m_16_days_pixel_reliability", QualityThreshold = 0) 154 | ### 155 | } 156 | 157 | ### 158 | # Check the MODISSummaries file outputs are consistent. 159 | SummaryFile <- read.csv(list.files(pattern = "MODIS_Summary")[1]) 160 | DataFile <- read.csv(list.files(pattern = "MODIS_Data")[1]) 161 | file.check <- all(SummaryFile$mean.band == DataFile[1,which(grepl("pixel", names(DataFile)))]) 162 | if(is.na(file.check)){ 163 | warning("The two output files from MODISSummaries are not consistent.") 164 | } 165 | if(!file.check){ 166 | warning("The two output files from MODISSummaries are not consistent.") 167 | } 168 | ### 169 | 170 | # Check again that the web service is responsive. 171 | if(class(try(GetProducts(), silent = TRUE)) == "try-error") q() 172 | 173 | # Check we can still reach the server for lpdaac modis web service before running functions that request. 174 | if(.Platform$OS.type == "unix" && is.null(nsl("daacmodis.ornl.gov"))) q() 175 | # Check example of MODISTransects 176 | if(grepl("Server is busy handling other requests", 177 | GetSubset(Lat = SubsetExample$lat, Long = SubsetExample$long, Product = "MOD13Q1", Band = "250m_16_days_EVI", 178 | StartDate = "A2000049", EndDate = "A2000049", KmAboveBelow = 0, KmLeftRight = 0)$subset[1])){ 179 | q() 180 | } else { 181 | 182 | ### 183 | MODISTransects(LoadData = TransectExample, Product = "MCD12Q1", Bands = c("Land_Cover_Type_1"), 184 | Size = c(0,0), StartDate = TRUE) 185 | ### 186 | } 187 | 188 | ### End of checking for main functions 189 | 190 | ### Checking for secondary functions 191 | # Check EndCoordinates example 192 | EndCoordinates(LoadDat = EndCoordinatesExample, Distance = 2000, Angle = 90, AngleUnits = "degrees") 193 | 194 | # Check ConvertToDD example 195 | convert.deg1 <- ConvertToDD(XY = ConvertExample, LatColName = "lat", LongColName = "long") 196 | convert.eg2 <- data.frame(lat = c("51d24.106'S","51d24.922'S","51d24.106'S","51d24.772'S", 197 | "51d24m51.106sS","51d24m37.922sS","51d24m42.106sS","51d24m47.772sS"), 198 | long = c("0d38.018'E","0d38.772'E","0d38.664'E","0d38.043'E","0d38m56.018sE", 199 | "0d38m31.772sE","0d38m17.664sE","0d38m42.043sE")) 200 | convert.deg2 <- ConvertToDD(XY = convert.eg2, LatColName = "lat", LongColName = "long") 201 | 202 | if(!all.equal(convert.deg1, (convert.deg2 * -1))) stop('ConvertToDD function not translating hemispheres correctly.') 203 | 204 | # Check ExtractTile example 205 | TileExample <- read.csv(list.files(pattern = "MODIS_Data")[1]) 206 | TileExample <- TileExample[1,which(grepl("pixel", names(TileExample)))[1:81]] 207 | 208 | dim(TileExample) 209 | matrix(TileExample, nrow = 9, ncol = 9, byrow = TRUE) 210 | 211 | dim(ExtractTile(Data = TileExample, Rows = c(9,1), Cols = c(9,1), Grid = FALSE)) 212 | ExtractTile(Data = TileExample, Rows = c(9,1), Cols = c(9,1), Grid = FALSE) 213 | 214 | ExtractTile(Data = TileExample, Rows = c(9,1), Cols = c(9,1), Grid = TRUE) 215 | 216 | # Check LandCover on previously downloaded data from MODISSubsets 217 | LandCover(Band = "Land_Cover_Type_1") 218 | 219 | # Check MODISTimeSeries example 220 | MODISTimeSeries(Dir = ".", Band = "250m_16_days_EVI") 221 | 222 | # Check UpdateSubsets 223 | rm(list = ls()) 224 | dir.create('./UpdateSubsetsEx') 225 | setwd('./UpdateSubsetsEx') 226 | data(SubsetExample, FindIDExample) 227 | 228 | MODISSubsets(LoadDat = SubsetExample, Product = "MOD13Q1", 229 | Bands = c("250m_16_days_EVI","250m_16_days_pixel_reliability"), 230 | Size = c(0,0), StartDate = TRUE) 231 | list.files() 232 | 233 | reduced.subset <- UpdateSubsets(LoadDat = FindIDExample) 234 | MODISSubsets(LoadDat = reduced.subset, Product = "MOD13Q1", 235 | Bands = c("250m_16_days_EVI","250m_16_days_pixel_reliability"), 236 | Size = c(0,0), StartDate = TRUE) 237 | list.files() 238 | ### 239 | 240 | rm(list = ls()) 241 | options(warn = 0) 242 | 243 | ### END OF TESTING SCRIPT 244 | -------------------------------------------------------------------------------- /tests/Test.R: -------------------------------------------------------------------------------- 1 | # Testing for internet connectivity, the connection to the MODIS SOAP WSDL Server and it's Web Service 2 | # Description Language, for the XML response from the Web Service method, and for the functions of 3 | # MODISTools. 4 | 5 | # Load data to be used for testing. 6 | rm(list = ls()) 7 | library(MODISTools) 8 | data(SubsetExample, FindIDExample, QualityCheckExample, TransectExample, EndCoordinatesExample, ConvertExample) 9 | library(RCurl) # Will use some RCurl and XML functions explicitly in testing. 10 | library(XML) 11 | 12 | options(warn = 2) 13 | 14 | ## Following lines of code testing for internet connectivity and server access, are from 15 | ## R testing: .../tests/internet.R 16 | # Check for internet capability. 17 | if(!capabilities("http/ftp")) q() 18 | 19 | # Check for internet connectivity. 20 | if(.Platform$OS.type == "unix" && is.null(nsl("cran.r-project.org"))) q() 21 | 22 | # Check we can reach the server for lpdaac modis web service. 23 | if(.Platform$OS.type == "unix" && is.null(nsl("daacmodis.ornl.gov"))) q() 24 | 25 | # Check the web service is currently responsive. 26 | if(class(try(GetProducts(), silent = TRUE)) == "try-error") q() 27 | ## 28 | 29 | # Check the XML response is as expected. 30 | getsubset.xml <- paste(' 31 | 33 | 34 | 35 | 36 | ', 51.41363, ' 37 | ', -0.64875, ' 38 | ', "MOD13Q1", ' 39 | ', "250m_16_days_EVI", ' 40 | ', "A2001001", ' 41 | ', "A2001025", ' 42 | ', 0, ' 43 | ', 0, ' 44 | 45 | 46 | ', 47 | sep = "") 48 | 49 | header.fields <- c(Accept = "text/xml", 50 | Accept = "multipart/*", 51 | 'Content-Type' = "text/xml; charset=utf-8", 52 | SOAPAction = "") 53 | 54 | reader <- basicTextGatherer() 55 | header <- basicTextGatherer() 56 | 57 | curlPerform(url = paste0(daacmodis, wsdl_loc), 58 | httpheader = header.fields, 59 | postfields = getsubset.xml, 60 | writefunction = reader$update, 61 | verbose = FALSE) 62 | 63 | # Check the server is not down by insepcting the XML response for internal server error message. 64 | if(grepl("Internal Server Error", reader$value())) q() 65 | 66 | xmlRoot(xmlTreeParse(reader$value())) 67 | ### 68 | 69 | # Check FindID example 70 | FindID(ID = SubsetExample, Data = FindIDExample) 71 | 72 | # Check QualityCheck example 73 | EVIdata <- QualityCheckExample[1:5, ] 74 | QAdata <- QualityCheckExample[6:10, ] 75 | 76 | QualityCheck(Data = EVIdata, Product = "MOD13Q1", Band = "250m_16_days_EVI", NoDataFill = -3000, 77 | QualityBand = "250m_16_days_pixel_reliability", QualityScores = QAdata, QualityThreshold = 0) 78 | ### 79 | 80 | # Check MODIS subset uses this output to produce correctly downloaded files. 81 | request <- GetSubset(Lat = SubsetExample$lat, Long = SubsetExample$long, Product = "MCD12Q1", Band = "Land_Cover_Type_1", 82 | StartDate = "A2005001", EndDate = "A2005001", KmAboveBelow = 0, KmLeftRight = 0)$subset[1] 83 | if(grepl("Server is busy handling other requests", request) | grepl("System overloaded", request) | 84 | grepl("Downloading from the web service is currently not working", request)){ 85 | q() 86 | } else { 87 | # Check GetSubset is producing the correct output. 88 | # Use GetProducts, GetBands, and GetDates, to specify the GetSubset request. 89 | Product <- GetProducts()[1] 90 | Band <- GetBands(Product)[1] 91 | Dates <- GetDates(SubsetExample$lat, SubsetExample$long, Product)[1:2] 92 | 93 | GetSubset(Lat = SubsetExample$lat, Long = SubsetExample$long, Product = Product, Band = Band, 94 | StartDate = Dates[1], EndDate = Dates[1], KmAboveBelow = 0, KmLeftRight = 0) 95 | 96 | MODISSubsets(LoadDat = SubsetExample, Product = "MCD12Q1", Bands = c("Land_Cover_Type_1"), 97 | Size = c(1,1), StartDate = TRUE) 98 | 99 | MODISSummaries(LoadDat = SubsetExample, Product = "MCD12Q1", Band = "Land_Cover_Type_1", 100 | ValidRange = c(0,254), NoDataFill = 255, ScaleFactor = 1, StartDate = TRUE) 101 | } 102 | 103 | # Check the MODISSummaries file outputs are consistent. 104 | SummaryFile <- read.csv(list.files(pattern = "MODIS_Summary")) 105 | DataFile <- read.csv(list.files(pattern = "MODIS_Data")) 106 | file.check <- all(SummaryFile$mean.band == DataFile[1,which(grepl("pixel", names(DataFile)))]) 107 | if(is.na(file.check)){ 108 | warning("The two output files from MODISSummaries are not consistent.") 109 | } 110 | if(!file.check){ 111 | warning("The two output files from MODISSummaries are not consistent.") 112 | } 113 | 114 | # Check example of MODISTransects 115 | request <- GetSubset(Lat = SubsetExample$lat, Long = SubsetExample$long, Product = "MOD13Q1", Band = "250m_16_days_EVI", 116 | StartDate = "A2000049", EndDate = "A2000049", KmAboveBelow = 0, KmLeftRight = 0)$subset[1] 117 | if(grepl("Server is busy handling other requests", request) | grepl("System overloaded", request) | 118 | grepl("Downloading from the web service is currently not working", request)){ 119 | q() 120 | } else { 121 | MODISTransects(LoadData = TransectExample, Product = "MCD12Q1", Bands = c("Land_Cover_Type_1"), 122 | Size = c(0,0), StartDate = TRUE) 123 | } 124 | 125 | # Check EndCoordinates example 126 | EndCoordinates(LoadDat = EndCoordinatesExample, Distance = 2000, Angle = 90, AngleUnits = "degrees") 127 | 128 | # Check ConvertToDD example 129 | ConvertToDD(XY = ConvertExample, LatColName = "lat", LongColName = "long") 130 | 131 | # Check ExtractTile example 132 | TileExample <- read.csv(list.files(pattern = "MODIS_Data")) 133 | TileExample <- TileExample[ ,which(grepl("pixel", names(TileExample)))] 134 | 135 | dim(TileExample) 136 | dim(ExtractTile(Data = TileExample, Rows = c(5,1), Cols = c(5,1), Grid = FALSE)) 137 | ExtractTile(Data = TileExample, Rows = c(5,1), Cols = c(5,1), Grid = FALSE) 138 | 139 | matrix(TileExample, nrow = 5, ncol = 5, byrow = TRUE) 140 | ExtractTile(Data = TileExample, Rows = c(5,1), Cols = c(5,1), Grid = TRUE) 141 | 142 | # Check LandCover on previously downloaded data from MODISSubsets 143 | LandCover(Band = "Land_Cover_Type_1") 144 | 145 | rm(list = ls()) 146 | options(warn = 0) 147 | -------------------------------------------------------------------------------- /vignettes/LandCover/LandCoverSummary.csv: -------------------------------------------------------------------------------- 1 | "lat","long","date","modis.band","most.common","richness","simpsons.d","simpsons.evenness","no.data.fill" 2 | 51.4017666666667,-0.633633333333333,2006-01-01,"Land_Cover_Type_1","Deciduous Needleleaf forest",3,2.88018433179723,0.960061443932412,"0% (0/25)" 3 | 51.4017666666667,-0.6444,2006-01-01,"Land_Cover_Type_1","Deciduous Needleleaf forest",4,2.65957446808511,0.664893617021277,"0% (0/25)" 4 | 51.4105338888889,-0.642158888888889,2006-01-01,"Land_Cover_Type_1","Evergreen Broadleaf forest",4,2.85388127853881,0.713470319634703,"0% (0/25)" 5 | 51.4116961111111,-0.63824,2006-01-01,"Land_Cover_Type_1","Evergreen Broadleaf forest",4,2.45098039215686,0.612745098039216,"0% (0/25)" 6 | 51.4128666666667,-0.63405,2006-01-01,"Land_Cover_Type_1","Evergreen Needleleaf forest",3,1.84365781710914,0.614552605703048,"0% (0/25)" 7 | 51.41327,-0.645011944444444,2006-01-01,"Land_Cover_Type_1","Evergreen Broadleaf forest",4,2.30627306273063,0.576568265682657,"0% (0/25)" 8 | 51.4141961111111,-0.648893888888889,2006-01-01,"Land_Cover_Type_1","Evergreen Broadleaf forest",5,2.75330396475771,0.550660792951542,"0% (0/25)" 9 | 51.4153666666667,-0.6462,2006-01-01,"Land_Cover_Type_1","Evergreen Broadleaf forest",4,2.30627306273063,0.576568265682657,"0% (0/25)" 10 | -------------------------------------------------------------------------------- /vignettes/MODISSubsetsMCD12Q1/1_MCD12Q1.asc: -------------------------------------------------------------------------------- 1 | MCD12Q1.A2004001.h17v03.005.2011062212900.Land_Cover_Type_1,MCD12Q1,A2004001,Lat51.41327Lon-0.645011944444444Samp5Line5,2011062212900,14,14,14,14,14,5,5,14,5,5,5,5,5,5,5,5,14,14,14,5,5,5,5,13,14 2 | MCD12Q1.A2005001.h17v03.005.2011085002541.Land_Cover_Type_1,MCD12Q1,A2005001,Lat51.41327Lon-0.645011944444444Samp5Line5,2011085002541,14,14,14,14,14,14,5,14,5,5,5,5,5,5,5,14,14,14,5,14,5,1,5,13,14 3 | MCD12Q1.A2006001.h17v03.005.2011230183526.Land_Cover_Type_1,MCD12Q1,A2006001,Lat51.41327Lon-0.645011944444444Samp5Line5,2011230183526,5,14,14,5,14,14,5,5,5,5,14,5,5,5,5,14,14,14,5,5,14,1,5,13,14 4 | -------------------------------------------------------------------------------- /vignettes/MODISSubsetsMCD12Q1/2_MCD12Q1.asc: -------------------------------------------------------------------------------- 1 | MCD12Q1.A2004001.h17v03.005.2011062212900.Land_Cover_Type_1,MCD12Q1,A2004001,Lat51.4017666666667Lon-0.6444Samp5Line5,2011062212900,5,14,14,14,5,5,5,5,13,14,13,13,14,13,14,13,13,13,13,14,13,13,13,13,5 2 | MCD12Q1.A2005001.h17v03.005.2011085002541.Land_Cover_Type_1,MCD12Q1,A2005001,Lat51.4017666666667Lon-0.6444Samp5Line5,2011085002541,14,14,14,5,14,5,1,5,13,14,13,13,14,13,14,13,13,13,13,14,13,13,13,13,5 3 | MCD12Q1.A2006001.h17v03.005.2011230183526.Land_Cover_Type_1,MCD12Q1,A2006001,Lat51.4017666666667Lon-0.6444Samp5Line5,2011230183526,14,14,14,5,5,14,1,5,13,14,13,13,14,13,14,13,13,13,13,14,13,13,13,13,14 4 | -------------------------------------------------------------------------------- /vignettes/MODISSubsetsMCD12Q1/3_MCD12Q1.asc: -------------------------------------------------------------------------------- 1 | MCD12Q1.A2004001.h17v03.005.2011062212900.Land_Cover_Type_1,MCD12Q1,A2004001,Lat51.4017666666667Lon-0.633633333333333Samp5Line5,2011062212900,14,14,5,5,5,5,13,14,14,5,14,13,14,14,14,13,13,14,14,5,13,13,5,5,5 2 | MCD12Q1.A2005001.h17v03.005.2011085002541.Land_Cover_Type_1,MCD12Q1,A2005001,Lat51.4017666666667Lon-0.633633333333333Samp5Line5,2011085002541,14,5,14,5,5,5,13,14,14,5,14,13,14,14,5,13,13,14,14,5,13,13,5,5,5 3 | MCD12Q1.A2006001.h17v03.005.2011230183526.Land_Cover_Type_1,MCD12Q1,A2006001,Lat51.4017666666667Lon-0.633633333333333Samp5Line5,2011230183526,14,5,5,5,5,5,13,14,14,5,14,13,14,14,5,13,13,14,14,5,13,13,14,14,5 4 | -------------------------------------------------------------------------------- /vignettes/MODISSubsetsMCD12Q1/4_MCD12Q1.asc: -------------------------------------------------------------------------------- 1 | MCD12Q1.A2004001.h17v03.005.2011062212900.Land_Cover_Type_1,MCD12Q1,A2004001,Lat51.4105338888889Lon-0.642158888888889Samp5Line5,2011062212900,5,5,14,5,5,5,5,5,5,5,5,14,14,14,5,5,5,5,13,14,13,13,14,13,14 2 | MCD12Q1.A2005001.h17v03.005.2011085002541.Land_Cover_Type_1,MCD12Q1,A2005001,Lat51.4105338888889Lon-0.642158888888889Samp5Line5,2011085002541,14,5,14,5,5,5,5,5,5,5,14,14,14,5,14,5,1,5,13,14,13,13,14,13,14 3 | MCD12Q1.A2006001.h17v03.005.2011230183526.Land_Cover_Type_1,MCD12Q1,A2006001,Lat51.4105338888889Lon-0.642158888888889Samp5Line5,2011230183526,14,5,5,5,5,14,5,5,5,5,14,14,14,5,5,14,1,5,13,14,13,13,14,13,14 4 | -------------------------------------------------------------------------------- /vignettes/MODISSubsetsMCD12Q1/5_MCD12Q1.asc: -------------------------------------------------------------------------------- 1 | MCD12Q1.A2004001.h17v03.005.2011062212900.Land_Cover_Type_1,MCD12Q1,A2004001,Lat51.4116961111111Lon-0.63824Samp5Line5,2011062212900,5,14,5,5,5,5,5,5,5,5,14,14,14,5,5,5,5,13,14,14,13,14,13,14,14 2 | MCD12Q1.A2005001.h17v03.005.2011085002541.Land_Cover_Type_1,MCD12Q1,A2005001,Lat51.4116961111111Lon-0.63824Samp5Line5,2011085002541,5,14,5,5,5,5,5,5,5,5,14,14,5,14,5,1,5,13,14,14,13,14,13,14,14 3 | MCD12Q1.A2006001.h17v03.005.2011230183526.Land_Cover_Type_1,MCD12Q1,A2006001,Lat51.4116961111111Lon-0.63824Samp5Line5,2011230183526,5,5,5,5,5,5,5,5,5,5,14,14,5,5,5,1,5,13,14,14,13,14,13,14,14 4 | -------------------------------------------------------------------------------- /vignettes/MODISSubsetsMCD12Q1/6_MCD12Q1.asc: -------------------------------------------------------------------------------- 1 | MCD12Q1.A2004001.h17v03.005.2011062212900.Land_Cover_Type_1,MCD12Q1,A2004001,Lat51.4128666666667Lon-0.63405Samp5Line5,2011062212900,14,14,14,14,14,14,5,5,5,5,5,5,5,5,5,14,14,5,5,5,5,13,14,14,5 2 | MCD12Q1.A2005001.h17v03.005.2011085002541.Land_Cover_Type_1,MCD12Q1,A2005001,Lat51.4128666666667Lon-0.63405Samp5Line5,2011085002541,14,14,14,14,14,14,5,5,5,5,5,5,5,5,5,14,5,14,5,5,5,13,14,14,5 3 | MCD12Q1.A2006001.h17v03.005.2011230183526.Land_Cover_Type_1,MCD12Q1,A2006001,Lat51.4128666666667Lon-0.63405Samp5Line5,2011230183526,14,5,14,14,14,5,5,5,5,5,5,5,5,5,5,14,5,5,5,5,5,13,14,14,5 4 | -------------------------------------------------------------------------------- /vignettes/MODISSubsetsMCD12Q1/7_MCD12Q1.asc: -------------------------------------------------------------------------------- 1 | MCD12Q1.A2004001.h17v03.005.2011062212900.Land_Cover_Type_1,MCD12Q1,A2004001,Lat51.4141961111111Lon-0.648893888888889Samp5Line5,2011062212900,14,14,14,14,14,14,5,5,14,5,14,5,5,5,5,14,5,14,14,14,4,5,5,5,13 2 | MCD12Q1.A2005001.h17v03.005.2011085002541.Land_Cover_Type_1,MCD12Q1,A2005001,Lat51.4141961111111Lon-0.648893888888889Samp5Line5,2011085002541,14,14,14,14,14,14,14,5,14,5,14,5,5,5,5,14,14,14,14,5,4,5,1,5,13 3 | MCD12Q1.A2006001.h17v03.005.2011230183526.Land_Cover_Type_1,MCD12Q1,A2006001,Lat51.4141961111111Lon-0.648893888888889Samp5Line5,2011230183526,14,5,14,14,5,14,14,5,5,5,8,14,5,5,5,8,14,14,14,5,5,14,1,5,13 4 | -------------------------------------------------------------------------------- /vignettes/MODISSubsetsMCD12Q1/8_MCD12Q1.asc: -------------------------------------------------------------------------------- 1 | MCD12Q1.A2004001.h17v03.005.2011062212900.Land_Cover_Type_1,MCD12Q1,A2004001,Lat51.4153666666667Lon-0.6462Samp5Line5,2011062212900,14,14,14,14,14,5,5,14,5,5,5,5,5,5,5,5,14,14,14,5,5,5,5,13,14 2 | MCD12Q1.A2005001.h17v03.005.2011085002541.Land_Cover_Type_1,MCD12Q1,A2005001,Lat51.4153666666667Lon-0.6462Samp5Line5,2011085002541,14,14,14,14,14,14,5,14,5,5,5,5,5,5,5,14,14,14,5,14,5,1,5,13,14 3 | MCD12Q1.A2006001.h17v03.005.2011230183526.Land_Cover_Type_1,MCD12Q1,A2006001,Lat51.4153666666667Lon-0.6462Samp5Line5,2011230183526,5,14,14,5,14,14,5,5,5,5,14,5,5,5,5,14,14,14,5,5,14,1,5,13,14 4 | -------------------------------------------------------------------------------- /vignettes/MODISSummaries/Data.csv: -------------------------------------------------------------------------------- 1 | "lat","long","start.date","end.date","SubsetID","250m_16_days_EVI_pixel1","250m_16_days_EVI_pixel2","250m_16_days_EVI_pixel3","250m_16_days_EVI_pixel4","250m_16_days_EVI_pixel5","250m_16_days_EVI_pixel6","250m_16_days_EVI_pixel7","250m_16_days_EVI_pixel8","250m_16_days_EVI_pixel9","250m_16_days_EVI_pixel10","250m_16_days_EVI_pixel11","250m_16_days_EVI_pixel12","250m_16_days_EVI_pixel13","250m_16_days_EVI_pixel14","250m_16_days_EVI_pixel15","250m_16_days_EVI_pixel16","250m_16_days_EVI_pixel17","250m_16_days_EVI_pixel18","250m_16_days_EVI_pixel19","250m_16_days_EVI_pixel20","250m_16_days_EVI_pixel21","250m_16_days_EVI_pixel22","250m_16_days_EVI_pixel23","250m_16_days_EVI_pixel24","250m_16_days_EVI_pixel25","250m_16_days_EVI_pixel26","250m_16_days_EVI_pixel27","250m_16_days_EVI_pixel28","250m_16_days_EVI_pixel29","250m_16_days_EVI_pixel30","250m_16_days_EVI_pixel31","250m_16_days_EVI_pixel32","250m_16_days_EVI_pixel33","250m_16_days_EVI_pixel34","250m_16_days_EVI_pixel35","250m_16_days_EVI_pixel36","250m_16_days_EVI_pixel37","250m_16_days_EVI_pixel38","250m_16_days_EVI_pixel39","250m_16_days_EVI_pixel40","250m_16_days_EVI_pixel41","250m_16_days_EVI_pixel42","250m_16_days_EVI_pixel43","250m_16_days_EVI_pixel44","250m_16_days_EVI_pixel45","250m_16_days_EVI_pixel46","250m_16_days_EVI_pixel47","250m_16_days_EVI_pixel48","250m_16_days_EVI_pixel49","250m_16_days_EVI_pixel50","250m_16_days_EVI_pixel51","250m_16_days_EVI_pixel52","250m_16_days_EVI_pixel53","250m_16_days_EVI_pixel54","250m_16_days_EVI_pixel55","250m_16_days_EVI_pixel56","250m_16_days_EVI_pixel57","250m_16_days_EVI_pixel58","250m_16_days_EVI_pixel59","250m_16_days_EVI_pixel60","250m_16_days_EVI_pixel61","250m_16_days_EVI_pixel62","250m_16_days_EVI_pixel63","250m_16_days_EVI_pixel64","250m_16_days_EVI_pixel65","250m_16_days_EVI_pixel66","250m_16_days_EVI_pixel67","250m_16_days_EVI_pixel68","250m_16_days_EVI_pixel69","250m_16_days_EVI_pixel70","250m_16_days_EVI_pixel71","250m_16_days_EVI_pixel72","250m_16_days_EVI_pixel73","250m_16_days_EVI_pixel74","250m_16_days_EVI_pixel75","250m_16_days_EVI_pixel76","250m_16_days_EVI_pixel77","250m_16_days_EVI_pixel78","250m_16_days_EVI_pixel79","250m_16_days_EVI_pixel80","250m_16_days_EVI_pixel81" 2 | 51.4017666666667,-0.633633333333333,2003,2006,"Lat51.40177Lon-0.63363Start2006-01-01End2006-12-31","0.39936","0.37724","0.34072","0.37131875","0.404575","0.40685625","0.38736875","0.35590625","0.346973333333333","0.369485714285714","0.366107142857143","0.329966666666667","0.34665","0.38898125","0.38135","0.41319375","0.51429375","0.449713333333333","0.34075","0.35045","0.34808","0.3784125","0.3966875","0.39108125","0.49211875","0.54566875","0.469913333333333","0.335464285714286","0.336871428571429","0.360128571428571","0.39626","0.4037375","0.41949375","0.48775625","0.5037","0.410933333333333","0.327442857142857","0.346464285714286","0.379886666666667","0.372106666666667","0.333366666666667","0.35502","0.40006","0.435833333333333","0.42526","0.327207142857143","0.318207142857143","0.34886","0.35926","0.328873333333333","0.357706666666667","0.400946666666667","0.379807142857143","0.364478571428571","0.364561538461538","0.315038461538462","0.315707692307692","0.368476923076923","0.399521428571429","0.422985714285714","0.426171428571429","0.3797","0.359014285714286","0.3977","0.346161538461538","0.353953846153846","0.400684615384615","0.386884615384615","0.378684615384615","0.356323076923077","0.341353846153846","0.345085714285714","0.357742857142857","0.358885714285714","0.325657142857143","0.314078571428571","0.315038461538462","0.287546153846154","0.283938461538462","0.307707692307692","0.353928571428571" 3 | 51.4153666666667,-0.6462,2003,2006,"Lat51.41537Lon-0.64620Start2006-01-01End2006-12-31","0.35470625","0.36103125","0.43336875","0.48204375","0.52995625","0.48905625","0.42155","0.41335625","0.39858125","0.40330625","0.31438125","0.384775","0.48506875","0.47388125","0.46478125","0.457025","0.4319","0.401125","0.41651875","0.34694375","0.36665","0.412575","0.40041875","0.4102875","0.41368125","0.39599375","0.3773875","0.390393333333333","0.35818","0.36965625","0.35319375","0.32791875","0.38174375","0.4338375","0.42308125","0.3961375","0.406826666666667","0.385633333333333","0.384646666666667","0.36898","0.375466666666667","0.39111875","0.4130875","0.40366875","0.394975","0.37638","0.36788","0.39262","0.379526666666667","0.384146666666667","0.398853333333333","0.370206666666667","0.37826875","0.3698","0.358966666666667","0.389713333333333","0.38906","0.380573333333333","0.384506666666667","0.367206666666667","0.360226666666667","0.390875","0.3778375","0.377378571428571","0.41585","0.384542857142857","0.375542857142857","0.39936","0.37724","0.34072","0.37131875","0.404575","0.39275","0.383292857142857","0.370285714285714","0.349592857142857","0.369485714285714","0.366107142857143","0.329966666666667","0.34665","0.38898125" 4 | 51.4017666666667,-0.6444,2003,2006,"Lat51.40177Lon-0.64440Start2006-01-01End2006-12-31","0.41585","0.384542857142857","0.375542857142857","0.39936","0.37724","0.34072","0.37131875","0.404575","0.40685625","0.383292857142857","0.370285714285714","0.349592857142857","0.369485714285714","0.366107142857143","0.329966666666667","0.34665","0.38898125","0.38135","0.31125","0.296778571428571","0.310171428571429","0.34075","0.35045","0.34808","0.3784125","0.3966875","0.39108125","0.263971428571429","0.271714285714286","0.328271428571429","0.335464285714286","0.336871428571429","0.360128571428571","0.39626","0.4037375","0.41949375","0.221714285714286","0.250735714285714","0.336842857142857","0.327442857142857","0.346464285714286","0.379886666666667","0.372106666666667","0.333366666666667","0.35502","0.248928571428571","0.268830769230769","0.3213","0.327207142857143","0.318207142857143","0.34886","0.35926","0.328873333333333","0.357706666666667","0.294192857142857","0.303384615384615","0.3316","0.364561538461538","0.315038461538462","0.315707692307692","0.368476923076923","0.399521428571429","0.422985714285714","0.37974","0.355307142857143","0.404828571428571","0.3977","0.346161538461538","0.353953846153846","0.400684615384615","0.386884615384615","0.378684615384615","0.39594","0.357342857142857","0.336071428571429","0.357742857142857","0.358885714285714","0.325657142857143","0.314078571428571","0.315038461538462","0.287546153846154" 5 | 51.4128666666667,-0.63405,2003,2006,"Lat51.41287Lon-0.63405Start2006-01-01End2006-12-31","0.47388125","0.46478125","0.457025","0.4319","0.401125","0.35405","0.3657125","0.4067125","0.41826875","0.40041875","0.4102875","0.41368125","0.39599375","0.3773875","0.3632625","0.3737125","0.40519375","0.4392125","0.32791875","0.38174375","0.4338375","0.42308125","0.3961375","0.38186875","0.3695125","0.37293125","0.38539375","0.375466666666667","0.39111875","0.4130875","0.40366875","0.394975","0.3786125","0.3835125","0.3858125","0.39775625","0.384146666666667","0.398853333333333","0.370206666666667","0.37826875","0.3698","0.35873125","0.3475625","0.34495625","0.37538","0.384506666666667","0.367206666666667","0.360226666666667","0.390875","0.3778375","0.37433125","0.3781","0.320725","0.26802","0.39936","0.37724","0.34072","0.37131875","0.404575","0.40685625","0.38736875","0.35590625","0.346973333333333","0.369485714285714","0.366107142857143","0.329966666666667","0.34665","0.38898125","0.38135","0.41319375","0.51429375","0.449713333333333","0.34075","0.35045","0.34808","0.3784125","0.3966875","0.39108125","0.49211875","0.54566875","0.469913333333333" 6 | 51.4141961111111,-0.648893888888889,2003,2006,"Lat51.41420Lon-0.64889Start2006-01-01End2006-12-31","0.4803875","0.40330625","0.31438125","0.384775","0.48506875","0.47388125","0.46478125","0.457025","0.4319","0.48875","0.41651875","0.34694375","0.36665","0.412575","0.40041875","0.4102875","0.41368125","0.39599375","0.479133333333333","0.390393333333333","0.35818","0.36965625","0.35319375","0.32791875","0.38174375","0.4338375","0.42308125","0.437013333333333","0.406826666666667","0.385633333333333","0.384646666666667","0.36898","0.375466666666667","0.39111875","0.4130875","0.40366875","0.380766666666667","0.37638","0.36788","0.39262","0.379526666666667","0.384146666666667","0.398853333333333","0.370206666666667","0.37826875","0.346326666666667","0.358966666666667","0.389713333333333","0.38906","0.380573333333333","0.384506666666667","0.367206666666667","0.360226666666667","0.390875","0.363714285714286","0.377378571428571","0.41585","0.384542857142857","0.375542857142857","0.39936","0.37724","0.34072","0.37131875","0.395785714285714","0.39275","0.383292857142857","0.370285714285714","0.349592857142857","0.369485714285714","0.366107142857143","0.329966666666667","0.34665","0.392057142857143","0.360628571428571","0.31125","0.296778571428571","0.310171428571429","0.34075","0.35045","0.34808","0.3784125" 7 | 51.4105338888889,-0.642158888888889,2003,2006,"Lat51.41053Lon-0.64216Start2006-01-01End2006-12-31","0.34694375","0.36665","0.412575","0.40041875","0.4102875","0.41368125","0.39599375","0.3773875","0.3632625","0.35818","0.36965625","0.35319375","0.32791875","0.38174375","0.4338375","0.42308125","0.3961375","0.38186875","0.385633333333333","0.384646666666667","0.36898","0.375466666666667","0.39111875","0.4130875","0.40366875","0.394975","0.3786125","0.36788","0.39262","0.379526666666667","0.384146666666667","0.398853333333333","0.370206666666667","0.37826875","0.3698","0.35873125","0.389713333333333","0.38906","0.380573333333333","0.384506666666667","0.367206666666667","0.360226666666667","0.390875","0.3778375","0.37433125","0.41585","0.384542857142857","0.375542857142857","0.39936","0.37724","0.34072","0.37131875","0.404575","0.40685625","0.383292857142857","0.370285714285714","0.349592857142857","0.369485714285714","0.366107142857143","0.329966666666667","0.34665","0.38898125","0.38135","0.31125","0.296778571428571","0.310171428571429","0.34075","0.35045","0.34808","0.3784125","0.3966875","0.39108125","0.263971428571429","0.271714285714286","0.328271428571429","0.335464285714286","0.336871428571429","0.360128571428571","0.39626","0.4037375","0.41949375" 8 | 51.4116961111111,-0.63824,2003,2006,"Lat51.41170Lon-0.63824Start2006-01-01End2006-12-31","0.36665","0.412575","0.40041875","0.4102875","0.41368125","0.39599375","0.3773875","0.3632625","0.3737125","0.36965625","0.35319375","0.32791875","0.38174375","0.4338375","0.42308125","0.3961375","0.38186875","0.3695125","0.384646666666667","0.36898","0.375466666666667","0.39111875","0.4130875","0.40366875","0.394975","0.3786125","0.3835125","0.39262","0.379526666666667","0.384146666666667","0.398853333333333","0.370206666666667","0.37826875","0.3698","0.35873125","0.3475625","0.38906","0.380573333333333","0.384506666666667","0.367206666666667","0.360226666666667","0.390875","0.3778375","0.37433125","0.3781","0.384542857142857","0.375542857142857","0.39936","0.37724","0.34072","0.37131875","0.404575","0.40685625","0.38736875","0.370285714285714","0.349592857142857","0.369485714285714","0.366107142857143","0.329966666666667","0.34665","0.38898125","0.38135","0.41319375","0.296778571428571","0.310171428571429","0.34075","0.35045","0.34808","0.3784125","0.3966875","0.39108125","0.49211875","0.271714285714286","0.328271428571429","0.335464285714286","0.336871428571429","0.360128571428571","0.39626","0.4037375","0.41949375","0.48775625" 9 | 51.41327,-0.645011944444444,2003,2006,"Lat51.41327Lon-0.64501Start2006-01-01End2006-12-31","0.40330625","0.31438125","0.384775","0.48506875","0.47388125","0.46478125","0.457025","0.4319","0.401125","0.41651875","0.34694375","0.36665","0.412575","0.40041875","0.4102875","0.41368125","0.39599375","0.3773875","0.390393333333333","0.35818","0.36965625","0.35319375","0.32791875","0.38174375","0.4338375","0.42308125","0.3961375","0.406826666666667","0.385633333333333","0.384646666666667","0.36898","0.375466666666667","0.39111875","0.4130875","0.40366875","0.394975","0.37638","0.36788","0.39262","0.379526666666667","0.384146666666667","0.398853333333333","0.370206666666667","0.37826875","0.3698","0.358966666666667","0.389713333333333","0.38906","0.380573333333333","0.384506666666667","0.367206666666667","0.360226666666667","0.390875","0.3778375","0.377378571428571","0.41585","0.384542857142857","0.375542857142857","0.39936","0.37724","0.34072","0.37131875","0.404575","0.39275","0.383292857142857","0.370285714285714","0.349592857142857","0.369485714285714","0.366107142857143","0.329966666666667","0.34665","0.38898125","0.360628571428571","0.31125","0.296778571428571","0.310171428571429","0.34075","0.35045","0.34808","0.3784125","0.3966875" 10 | -------------------------------------------------------------------------------- /vignettes/MODISTransects/Transect.csv: -------------------------------------------------------------------------------- 1 | "start.lat","start.long","start.date","end.date","end.lat","end.long" 2 | 51.4017666666667,-0.633633333333333,2003,2006,51.4062630695444,-0.621150042253927 3 | 51.4153666666667,-0.6462,2003,2006,51.4198630695444,-0.633712995426601 4 | 51.4017666666667,-0.6444,2003,2006,51.4062630695444,-0.631916708920594 5 | 51.4128666666667,-0.63405,2003,2006,51.4173630695444,-0.621563678272679 6 | 51.4141961111111,-0.648893888888889,2003,2006,51.4186925139888,-0.636407204051455 7 | 51.4105338888889,-0.642158888888889,2003,2006,51.4150302917666,-0.62967320424404 8 | 51.4116961111111,-0.63824,2003,2006,51.4161925139888,-0.625753997962547 9 | 51.41327,-0.645011944444444,2003,2006,51.4177664028777,-0.63252551255795 10 | -------------------------------------------------------------------------------- /vignettes/UsingMODISTools.Rnw: -------------------------------------------------------------------------------- 1 | \documentclass[11pt]{article} 2 | \usepackage[top=2cm, bottom=3cm, left=2cm, right=2cm]{geometry} 3 | \usepackage[utf8]{inputenc} 4 | \usepackage{amsmath} % /eqref 5 | \usepackage{url} 6 | \usepackage{hyperref} 7 | \usepackage[none]{hyphenat} % No hyphens 8 | \usepackage[noae]{Sweave} 9 | %\VignetteIndexEntry{Using MODISTools} 10 | \newcommand{\code}[1]{\texttt{#1}} 11 | 12 | \begin{document} 13 | 14 | \sloppy % Prevent hyphenated words running into margins 15 | \SweaveOpts{concordance=TRUE, width=6, height=6} 16 | \setkeys{Gin}{width=0.5\textwidth} 17 | \title{Using MODISTools 18 | (\Sexpr{packageDescription('MODISTools', fields='Version')})} 19 | \author{Sean Tuck} 20 | \date{\Sexpr{packageDescription('MODISTools', fields='Date')}} 21 | 22 | \maketitle 23 | \tableofcontents 24 | 25 | <>= 26 | library(MODISTools) 27 | 28 | # Makes copy-paste much less painful 29 | options(continue = ' ') 30 | options(width = 90) 31 | options(prompt = '> ') 32 | 33 | options(SweaveHooks = list(fig=function() par(mgp=c(2.5,1,0), 34 | mar=c(4,4,2,1), 35 | oma=c(0,0,1,0), 36 | cex.main=0.8))) 37 | @ 38 | 39 | \section{Introduction} 40 | The MODISTools R package is a set of tools for downloading and working with NASA's MODIS remotely-sensed data. The package retrieves data from the LP DAAC data archive, via their SOAP web service. Functions download data as a batch process, and save subsets in text files that can be returned to at a later date. Additional functions can provide summaries of this data and prepare the data to a format ready for application in R; if you have other data that you wish to relate MODIS data to, downloaded data can be appended to your original dataset. Other ancillary functions can help to get input arguments into the correct format. 41 | 42 | This vignette provides a worked example for using MODISTools. A dataset of time-series -- lat-long coordinates with start and end dates -- to collect MODIS data for, will be used to show a complete workflow for how someone might use MODISTools. We will prepare input information for a subset request, download subsets of Enhanced Vegetation Index (EVI) and land cover data for the specified locations, and process these data to analyse land processes at these locations. Note that you will need an internet connection to run this worked example yourself, and that it will download files to your computer. 43 | 44 | \section{Format the data} 45 | We have some coordinates that we would like to extract MODIS data for. But the coordinates are not in the correct format. We need to make sure the coordinates we input for our subset request are in the WGS-1984 coordinate system, and are in decimal degrees format. 46 | <<>>= 47 | data(ConvertExample) 48 | ConvertExample 49 | @ 50 | These coordinates are WGS-1984 coordinates, but they are not in decimal degrees. We can use \code{ConvertToDD} to fix this. 51 | <<>>= 52 | modis.subset <- 53 | ConvertToDD(XY = ConvertExample, LatColName = "lat", LongColName = "long") 54 | modis.subset <- data.frame(lat = modis.subset[ ,1], long = modis.subset[ ,2]) 55 | modis.subset 56 | @ 57 | What we also need to retrieve a time-series of MODIS data for these locations are dates. End dates for the time-series, and preferably start dates too. If we don't have start dates we can ask for a set number of years for each location instead. Let's retrieve data between 2003 and 2006. The dates can be specified as years or in POSIXlt date-time class (see \code{?POSIXlt}). In this case we can just use years. 58 | <<>>= 59 | modis.subset$start.date <- rep(2003, nrow(modis.subset)) 60 | modis.subset$end.date <- rep(2006, nrow(modis.subset)) 61 | @ 62 | That's all we need! Let's download our EVI data first. 63 | 64 | \section{Download the data} 65 | \subsection{Specifying a subset request} 66 | The shortname code for the EVI product is ``MOD13Q1". We can check the codes for all the products available using \code{GetProducts}, and we can find the shortname codes for all data bands within each product using \code{GetBands}. 67 | <>= 68 | GetProducts() 69 | @ 70 | <>= 71 | c("MCD12Q1", "MCD12Q2", "MCD43A1", "MCD43A2", "MCD43A4", "MOD09A1", 72 | "MOD11A2", "MOD13Q1", "MOD15A2", "MOD15A2GFS", "MOD16A2", "MOD17A2_51", 73 | "MOD17A3", "MYD09A1", "MYD11A2", "MYD13Q1", "MYD15A2") 74 | @ 75 | <>= 76 | GetBands(Product = "MOD13Q1") 77 | @ 78 | <>= 79 | c("250m_16_days_blue_reflectance", "250m_16_days_MIR_reflectance", 80 | "250m_16_days_NIR_reflectance", "250m_16_days_pixel_reliability", 81 | "250m_16_days_red_reflectance", "250m_16_days_relative_azimuth_angle", 82 | "250m_16_days_sun_zenith_angle", "250m_16_days_view_zenith_angle", 83 | "250m_16_days_VI_Quality", "250m_16_days_NDVI", 84 | "250m_16_days_EVI", "250m_16_days_composite_day_of_the_year") 85 | @ 86 | We will download EVI data at 250m pixel resolution, which is available at 16-day intervals. The shortname code for this data band is 250m\_16\_days\_EVI. We will collect quality control data for these pixels too, which is available from the 250m\_16\_days\_pixel\_reliability band (and 250m\_16\_days\_VI\_Quality too). 87 | 88 | We can check that the time-series of MODIS data we want is available for this data product by retrieving the dates for all available time-steps. 89 | <>= 90 | GetDates(Product = "MOD13Q1", Lat = modis.subset$lat[1], Long = modis.subset$long[1]) 91 | @ 92 | The time-period available for the Vegetation Indices product covers 2003-2006 (the maximum shown is at the time this vignette was built), so we can proceed. When we download we also need to decide how large we want the tiles of data for each location to be. We specify this by entering the distance (km) above and below in each direction away from the central pixel, where the input coordinate is located, and then doing the same for left and right. The input must be whole km (integers) for each direction. As an example, if we specify \code{Size=c(1,1)} for this EVI data at 250m pixel resolution, it will retrieve a 9x9 pixel tile for each location, centred on the input coordinate. The tiles this size will be downloaded at the locations for each time-step that falls between the start and end dates. \code{Size=c(0,0)} would specify only the central pixel. The maximum size tile surrounding a location is \code{Size=c(100,100)}. 93 | 94 | \subsection{MODISSubsets} 95 | The download will write the MODIS data to ASCII files for each location subset specified. We can specify the directory that we would like to save downloaded files in, using the \code{SaveDir} argument below. In the code below, downloaded files will be written to your working directory; if you would prefer the files to be written elsewhere change \code{SaveDir}. But we will access these files later, so remember to request the files from the same directory. 96 | <>= 97 | MODISSubsets(LoadDat = modis.subset, Products = "MOD13Q1", 98 | Bands = c("250m_16_days_EVI", "250m_16_days_pixel_reliability"), 99 | Size = c(1,1)) 100 | @ 101 | Each ASCII file is a different subset location. In each ASCII file, each row is a different time-step in the time-series. If multiple data bands have been downloaded for this subset, they will all be contained in the same ASCII file for that subset. 102 | 103 | Here is an example of the strings of data that are downloaded for pixels at each time-step and data band: 104 | <>= 105 | subset.string <- read.csv(list.files(pattern = ".asc")[1], 106 | header = FALSE, as.is = TRUE) 107 | subset.string[1, ] 108 | @ 109 | <>= 110 | subset.string <- read.csv(paste("./MODISSubsetsMOD13Q1/", 111 | list.files(path = "./MODISSubsetsMOD13Q1", pattern = ".asc")[1] 112 | , sep = ""), header = FALSE, as.is = TRUE) 113 | subset.string[1, ] 114 | @ 115 | A download log file will also be written, displaying all the unique subsets found in the dataset, and confirmation of download success for each. 116 | 117 | \subsection{MODISTransects} 118 | Alternatively, we may want transects of MODIS data. This is easily done by specifying start and end points for transects, with unique IDs for each transect and then calling \code{MODISTransects} instead of using \code{MODISSubsets}. To try this out, use the example given in the \code{MODISTransects} help documentation. 119 | 120 | \section{Process the data} 121 | \subsection{MODISSummaries} 122 | Now we have downloaded the EVI data, we can find average each pixel over time, to produce one tile of mean EVI pixels at each subset location. We can use \code{MODISSummaries} for this. The function will also take this processed data and append it to your original files containing all the subset information (\code{modis.subset}). This will write two files to the specified directory. We downloaded quality control data for each pixel alongside our EVI data, so \code{MODISSummaries} can also check for poor quality and missing data. These data will be removed and replaced with \code{NA}s. The threshold for defining what is good and poor quality is set by the user: the scores for highest quality is 0, and the score for lowest quality is 3 or 5, depending on the data band. To see how quality control information is defined for each data type, go to the \href{https://daacweb-dev.ornl.gov/MODIS/MODIS-menu/products.html}{\bf{MODIS Products Table}}. We need to specify the range of valid data for EVI, the value that denotes missing data, and the scale factor that is applied to the data, which are all available from the same web page. 123 | <>= 124 | MODISSummaries(LoadDat = modis.subset, Product = "MOD13Q1", Bands = "250m_16_days_EVI", 125 | ValidRange = c(-2000,10000), NoDataFill = -3000, ScaleFactor = 0.0001, 126 | QualityScreen = TRUE, QualityBand = "250m_16_days_pixel_reliability", 127 | QualityThreshold = 0) 128 | @ 129 | If you want to screen data for quality without all the other things that \code{MODISSummaries} does, you can call the more general \code{QualityCheck}, which is an internal function for \code{MODISSummaries}. 130 | 131 | \subsection{ExtractTile} 132 | Also, if large subset tiles are downloaded for each location, there may be times when we want to extract a smaller tile from within this subset, rather than downloading again to retrieve the nested data we want. This can be done using \code{ExtractTile}. We will use the file just written from our call to \code{MODISSummaries}, retrieve the smaller subset we want, and arrange them into tiles to compare before and after. 133 | <>= 134 | TileExample <- read.csv(list.files(pattern = "MODIS_Data")) 135 | TileExample <- TileExample[ ,which(grepl("250m_16_days_EVI", names(TileExample)))] 136 | @ 137 | <>= 138 | TileExample <- read.csv(paste("./MODISSummaries/", 139 | list.files(path = "./MODISSummaries/", 140 | pattern = "Data"), sep = "")) 141 | TileExample <- TileExample[ ,which(grepl("250m_16_days_EVI", names(TileExample)))] 142 | @ 143 | Pixels in a tile are on the same row. See that using \code{ExtractTile} takes away some of the columns. 144 | <<>>= 145 | dim(TileExample) 146 | dim(ExtractTile(Data = TileExample, Rows = c(9,2), Cols = c(9,2), Grid = FALSE)) 147 | head(ExtractTile(Data = TileExample, Rows = c(9,2), Cols = c(9,2), Grid = FALSE), 148 | n = 2) 149 | @ 150 | We can look at the first subset and arrange the pixels into a tile to visually show what \code{ExtractTile} has done. 151 | <<>>= 152 | matrix(TileExample[1, ], nrow = 9, ncol = 9, byrow = TRUE) 153 | ExtractTile(Data = TileExample, Rows = c(9,2), Cols = c(9,2), Grid = TRUE)[ , ,1] 154 | @ 155 | Arrangement of the pixels into tiles this way can be optionally set with a call to \code{ExtractTile}. The order for the strings of pixel data in the downloaded ASCII files is by row, so \code{matrix(..., byrow=TRUE)} can arrange the pixels correctly (see above). 156 | 157 | \subsection{LandCover} 158 | Let's do the same as above but download data on land cover classes for the same subsets. 159 | <>= 160 | dir.create('./LandCover') 161 | setwd('./LandCover') 162 | MODISSubsets(LoadDat = modis.subset, Product = "MCD12Q1", Bands = "Land_Cover_Type_1", 163 | Size = c(1,1)) 164 | @ 165 | We can use \code{LandCover} to retrieve some summaries of land cover in each tile. This will tell us the most common land cover type, the total number of distinct land cover types, and Simpson's D and evenness measures to express landscape diversity and heterogeneity in these tiles. Let's retrieve these summaries from the land cover subset files we just downloaded. 166 | <>= 167 | LandCover(Band = "Land_Cover_Type_1") 168 | 169 | land.summary <- read.csv(list.files(pattern = "MODIS_Land_Cover_Summary")) 170 | head(land.summary) 171 | @ 172 | <>= 173 | land.summary <- read.csv(paste("./LandCover/", 174 | list.files(path = "./LandCover/", 175 | pattern = "LandCoverSummary"), 176 | sep = "")) 177 | head(land.summary) 178 | @ 179 | 180 | \end{document} 181 | --------------------------------------------------------------------------------