├── .Rbuildignore ├── .gitignore ├── .travis.yml ├── DESCRIPTION ├── NAMESPACE ├── NEWS.md ├── R ├── WellRedundancyUtils.R ├── aggregateData.R ├── calcGWFlow.R ├── calcTrafficLights.R ├── createOptions.R ├── fitData.R ├── fitPSpline.R ├── fitSVM.R ├── global.R ├── importTables.R ├── interpBary.R ├── interpConc.R ├── jobqueue.R ├── launchApp.R ├── plotPlumeTimeSeries.R ├── plotSTPredictions.R ├── plotSpatialImage.R ├── plotTimeSeries.R ├── plotTrendTable.R ├── plotWellReport.R ├── plumeDiagnostics.R ├── ppt.R ├── predictValues.R ├── processData.R ├── readData.R ├── selectGamma.R ├── server.R ├── shapeFiles.R ├── sliderValues.R ├── ui.R ├── uiAnalyse.R ├── uiAnalyseOptions.R ├── uiAttr.R ├── uiCustomColourKey.R ├── uiDataManager.R ├── uiLogsJobs.R ├── uiPlumeDiagnostics.R ├── uiSTPredictions.R ├── uiSession.R ├── uiSpatialImage.R ├── uiTimeSeries.R ├── uiTrendTable.R ├── uiWellReport.R ├── user_db.R └── utilities.R ├── README.md ├── Shiny_GWSDAT.Rproj ├── cran-comments.md ├── data ├── BasicExample_WellCoords.csv ├── BasicExample_WellData.csv ├── ComprehensiveExample_WellCoords.csv ├── ComprehensiveExample_WellData.csv └── GIS_Files │ ├── GWSDATex2.cpg │ ├── GWSDATex2.dbf │ ├── GWSDATex2.shp │ └── GWSDATex2.shx ├── inst ├── application │ ├── jqdb_pspline_fit.R │ └── simple_pspline_fit.R ├── extdata │ ├── BasicExample_WellCoords.csv │ ├── BasicExample_WellData.csv │ ├── Basic_Example.xlsx │ ├── ComprehensiveExample_WellCoords.csv │ ├── ComprehensiveExample_WellData.csv │ ├── Comprehensive_Example.xlsx │ ├── GIS_Files │ │ ├── GWSDATex2.cpg │ │ ├── GWSDATex2.dbf │ │ ├── GWSDATex2.shp │ │ └── GWSDATex2.shx │ ├── GWSDAT-v3.10.xlam │ ├── GWSDAT-v3.12.xlam │ ├── GWSDAT-v3.20.xlam │ ├── GWSDAT_Basic_Example.rds │ ├── GWSDAT_Example.rds │ ├── GWSDAT_Examples.rds │ └── gwsdat_logo.png └── www │ ├── google-analytics.js │ ├── jump_to_tsplot.js │ └── logo.png ├── man ├── createOptions.Rd └── launchApp.Rd └── start_ExcelMode.R /.Rbuildignore: -------------------------------------------------------------------------------- 1 | ^.*\.Rproj$ 2 | ^\.Rproj\.user$ 3 | .git* 4 | ^Readme.md 5 | ^R/fitSVM.R 6 | ^R/shapeFiles_sp.R 7 | ^R/sliderValues.R 8 | ^start_ExcelMode.R 9 | ^data/* 10 | ^.travis.yml 11 | ^TODO.txt 12 | ^users.RData 13 | ^app.R 14 | ^users.db 15 | ^CRAN-RELEASE$ 16 | ^cran-comments.md 17 | ^CRAN-SUBMISSION$ 18 | ^dumpdata.R 19 | ^Evaldf.R 20 | ^temp.R 21 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .Rproj.user 2 | .Rhistory 3 | .RData 4 | .Ruserdata 5 | TODO.txt 6 | users.RData 7 | app.R 8 | users.db 9 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: r 2 | cache: packages 3 | sudo: required 4 | apt_packages: 5 | - libudunits2-dev 6 | - gdal-bin 7 | - libgdal-dev 8 | 9 | before_install: 10 | - sudo add-apt-repository ppa:ubuntugis/ubuntugis-unstable -y 11 | - sudo apt-get update -q 12 | -------------------------------------------------------------------------------- /DESCRIPTION: -------------------------------------------------------------------------------- 1 | Package: GWSDAT 2 | Title: GroundWater Spatiotemporal Data Analysis Tool (GWSDAT) 3 | Version: 3.2.1 4 | Author: Wayne Jones , Ludger Evers 5 | , Andrej Aderhold 6 | 7 | Maintainer: Wayne Jones 8 | Description: Shiny application for the analysis of groundwater 9 | monitoring data, designed to work with simple time-series data for 10 | solute concentration and ground water elevation, but can also plot 11 | non-aqueous phase liquid (NAPL) thickness if required. Also provides 12 | the import of a site basemap in GIS shapefile format. 13 | License: GPL-3 14 | Depends: 15 | R (>= 3.5.0) 16 | Imports: 17 | deldir, 18 | digest, 19 | geometry, 20 | Kendall, 21 | lattice, 22 | lubridate, 23 | MASS, 24 | Matrix, 25 | officer (>= 0.3.8), 26 | raster, 27 | readxl, 28 | rhandsontable, 29 | sf, 30 | shiny, 31 | shinycssloaders, 32 | shinydashboard, 33 | shinyjs, 34 | sm, 35 | sp, 36 | splancs, 37 | zoo 38 | Suggests: 39 | DBI, 40 | RSQLite 41 | Encoding: UTF-8 42 | RoxygenNote: 7.2.3 43 | -------------------------------------------------------------------------------- /NAMESPACE: -------------------------------------------------------------------------------- 1 | # Generated by roxygen2: do not edit by hand 2 | 3 | S3method(predict,GWSDAT.PSpline) 4 | export(createOptions) 5 | export(launchApp) 6 | import(MASS) 7 | import(geometry) 8 | import(grDevices) 9 | import(graphics) 10 | import(officer) 11 | import(rhandsontable) 12 | import(sf) 13 | import(shiny) 14 | import(shinycssloaders) 15 | import(stats) 16 | import(zoo) 17 | importFrom(Kendall,Kendall) 18 | importFrom(Matrix,sparseMatrix) 19 | importFrom(deldir,deldir) 20 | importFrom(deldir,triang.list) 21 | importFrom(digest,digest) 22 | importFrom(lattice,panel.grid) 23 | importFrom(lattice,panel.xyplot) 24 | importFrom(lattice,xyplot) 25 | importFrom(lubridate,parse_date_time) 26 | importFrom(raster,extent) 27 | importFrom(raster,raster) 28 | importFrom(raster,rasterize) 29 | importFrom(raster,writeRaster) 30 | importFrom(readxl,excel_sheets) 31 | importFrom(readxl,read_excel) 32 | importFrom(shinyjs,delay) 33 | importFrom(shinyjs,hide) 34 | importFrom(shinyjs,onclick) 35 | importFrom(shinyjs,show) 36 | importFrom(shinyjs,useShinyjs) 37 | importFrom(sm,sm.regression) 38 | importFrom(sp,point.in.polygon) 39 | importFrom(splancs,areapl) 40 | importFrom(splancs,gridpts) 41 | importFrom(utils,globalVariables) 42 | importFrom(utils,packageVersion) 43 | importFrom(utils,read.csv) 44 | importFrom(utils,sessionInfo) 45 | importFrom(utils,write.csv) 46 | -------------------------------------------------------------------------------- /NEWS.md: -------------------------------------------------------------------------------- 1 | # GWSDAT 3.2.1 2 | - Bug fix for Trends and Threshold Indicator Table 3 | 4 | # GWSDAT 3.2.0 5 | - Implementation of well influence analysis: https://github.com/peterradv/Well-Influence-Analysis 6 | 7 | - Updated User Manual for version 3.2. 8 | 9 | - Updated Excel Add-in - more robust to 32 bit versus 64 bit version of Excel. Removed installation of rgdal. 10 | 11 | - Fixed bug Threshold Value does not display in export of time-series graphs: #252 12 | 13 | - Functionality to read in R data.frames directly. 14 | 15 | - Beta implementation of api style argument passing to online version. 16 | 17 | - Added GW Well Report Functionality. 18 | 19 | - Added local save options to CSV generation of Monitoring data file from Excel Add-in. More robust date format identification. 20 | 21 | - Improved messaging describing how GWSDAT behaves when concentration and NAPL data observed together, i.e. prefers to use conc data rather than substitution. 22 | 23 | - Support for Windows Meta File image format output for spatial plot. Useful for rearranging overlapping well labels. 24 | 25 | # GWSDAT 3.1.1 26 | 27 | - Cumulative bug fixes since the release of 3.1.0 28 | 29 | # GWSDAT 3.1.0 30 | 31 | - Well Redundancy Analysis: This allows the user to very conveniently drop a well or a combination of wells from the analysis and investigate the resultant impact. 32 | 33 | - Updated User Manual: http://gwsdat.net/gwsdat_manual. A fully comprehensive description of GWSDAT, updated for version 3.1. 34 | 35 | - Excel Add-in Menu: New Excel menu designed to be clearer to navigate and easier to install. 36 | 37 | - Custom Colour Key: In response to user feedback, the latest version has the functionality to customise the colour key in the main GWSDAT spatial plot. 38 | 39 | - Updated branding: The Excel data input templates have been updated with more contemporary colour schemes. 40 | 41 | - Bug Fixes: Numerous bug fixes and enhancements. More robust data input procedures which report more thoroughly on any potential issues, e.g. missing data, incorrect units, etc. 42 | 43 | 44 | # GWSDAT 3.0.6 45 | 46 | - Fixed High resolution model fit in Shiny options menu. 47 | 48 | - Modified to be backwards compatible with tsearch function in geometry package. 49 | 50 | - Fixed Model dialog box issue to correctly show Glasgow T+Cs. 51 | 52 | - Fixed csv input date format issue 53 | 54 | -------------------------------------------------------------------------------- /R/WellRedundancyUtils.R: -------------------------------------------------------------------------------- 1 | RefitModel<-function(csite,substance,WellsToOmit){ 2 | 3 | All.Data<-csite$All.Data 4 | #All.Data$Cont.Data<-subset(All.Data$Cont.Data, !WellName %in% WellsToOmit) #Cran not keen on this. 5 | All.Data$Cont.Data<-All.Data$Cont.Data[!All.Data$Cont.Data$WellName %in% WellsToOmit,] 6 | 7 | csite[["Reduced.Fitted.Data"]]<-fitData(All.Data=All.Data,params=csite$GWSDAT_Options,showProgress = TRUE,calcTrend=FALSE)$Fitted.Data 8 | 9 | ### Refit GW flows. 10 | temp.GW.Flows <- csite$GW.Flows 11 | temp.GW.Flows<-temp.GW.Flows[!temp.GW.Flows$WellName %in% WellsToOmit,] 12 | temp.GW.Flows<-evalGWFlow(temp.GW.Flows,showErrorMessage=FALSE) 13 | csite[["Reduced.Fitted.Data.GW.Flows"]]<-temp.GW.Flows 14 | return(csite) 15 | } -------------------------------------------------------------------------------- /R/aggregateData.R: -------------------------------------------------------------------------------- 1 | 2 | pasteAggLimit <- function(timep, aggr_by, fchrin = "%d-%m-%Y", fout = "%d-%b-%Y") { 3 | 4 | 5 | aggr_by <- tolower(aggr_by) 6 | 7 | if (is.character(timep)) 8 | timep <- as.Date(timep, fchrin) 9 | 10 | 11 | # Create the string for the date or date range to print 12 | dout <- format.Date(timep, fout) 13 | 14 | # Need the end of the aggregation period 15 | if (tolower(aggr_by) != "day") { 16 | 17 | # The second element will be the last day of the month or quarter, year. 18 | #period <- seq.Date(timep, by = aggr_by, length.out = 2) - 1 19 | period <- seq.Date(timep, by = paste0("-1 ",aggr_by), length.out = 2) + 1 20 | dout <- paste0(format.Date(period[2], fout), " to ", dout) 21 | #dout <- paste0(dout, " to ", format.Date(period[2], fout)) 22 | } 23 | 24 | return(dout) 25 | } 26 | 27 | 28 | aggregateData <- function(Cont.Data, GW.Data, NAPL.Thickness.Data, Well.Coords, 29 | aggr_by, aggr_gw_type) { 30 | 31 | 32 | if (!(tolower(aggr_by) %in% c("day", "month", "quarter", "year" ))) 33 | stop("Need to specify valid aggregation period (aggr_by): day, month, quarter, or year.") 34 | 35 | if (!(tolower(aggr_gw_type) %in% c("mean","median","min","max"))) 36 | stop("Need to specify valid GW aggregation method (aggr_gw_type): mean, meadian, min, or max.") 37 | 38 | 39 | All.Dates<-as.Date(sort(unique(c(Cont.Data$SampleDate,GW.Data$SampleDate, NAPL.Thickness.Data$SampleDate)))) 40 | ##my.seq<-as.Date(sort(seq.Date(max(Cont.Data$SampleDate),min(Cont.Data$SampleDate)-500,by=paste("-1",tolower(aggr_by))))) 41 | my.seq<-as.Date(sort(seq.Date(max(All.Dates),min(All.Dates)-500,by=paste("-1",tolower(aggr_by))))) 42 | Cont.Data$AggDate<-as.Date(cut.Date(Cont.Data$SampleDate,breaks=my.seq,include.lowest=T,right=T,labels=as.character(my.seq[-1]))) 43 | 44 | #Cont.Data$AggDate <- as.Date(cut.Date(Cont.Data$SampleDate, breaks = tolower(aggr_by), include.lower = TRUE)) 45 | All_Agg_Dates <- sort(unique(Cont.Data$AggDate)) 46 | Agg_GW_Data <- NULL 47 | 48 | # If there is groundwater data, generate the corresponding aggregation dates 49 | # and append to the list of contaminant aggregation dates. 50 | if (nrow(na.omit(GW.Data)) > 0) { 51 | 52 | Agg_GW_Data <- na.omit(GW.Data) 53 | Agg_GW_Data$AggDate<-as.Date(cut.Date(Agg_GW_Data$SampleDate,breaks=my.seq,include.lowest=T,right=T,labels=as.character(my.seq[-1]))) 54 | #Agg_GW_Data$AggDate <- as.Date(cut.Date(Agg_GW_Data$SampleDate, breaks = tolower(aggr_by), include.lower = TRUE)) 55 | Agg_GW_Data <- createGWAggDates(Agg_GW_Data, Well.Coords, tolower(aggr_gw_type)) 56 | 57 | # Append GW aggregated dates. 58 | All_Agg_Dates <- sort(unique(c(All_Agg_Dates, Agg_GW_Data$AggDate))) 59 | 60 | } 61 | 62 | 63 | if (!is.null(NAPL.Thickness.Data)) { 64 | 65 | #NAPL.Thickness.Data$AggDate <- as.Date(cut.Date(NAPL.Thickness.Data$SampleDate, breaks = tolower(aggr_by), include.lower = TRUE)) 66 | NAPL.Thickness.Data$AggDate<-as.Date(cut.Date(NAPL.Thickness.Data$SampleDate,breaks=my.seq,include.lowest=T,right=T,labels=as.character(my.seq[-1]))) 67 | attr(NAPL.Thickness.Data, "lev_cuts") <- pretty(seq(0,max(NAPL.Thickness.Data$Result.Corr.ND, na.rm = T), l = 13), n = 12) 68 | attr(NAPL.Thickness.Data, "NAPL.Wells") <- sort(unique(as.character(NAPL.Thickness.Data$WellName))) 69 | 70 | } 71 | 72 | return(list(All_Agg_Dates = All_Agg_Dates, 73 | Cont.Data = Cont.Data, 74 | Agg_GW_Data = Agg_GW_Data, 75 | NAPL.Thickness.Data = NAPL.Thickness.Data )) 76 | } 77 | 78 | 79 | 80 | createGWAggDates <- function(x, Well.Coords, type=c("mean","median","min","max")){ 81 | 82 | type <- match.arg(type) 83 | 84 | 85 | out <- aggregate(x$Result, by = list(AggDate = x$AggDate, WellName = x$WellName), type) 86 | names(out)[which(names(out) == "x")] <- "Result" 87 | 88 | out$XCoord <- Well.Coords[match(as.character(out$WellName), as.character(Well.Coords$WellName)),]$XCoord 89 | out$YCoord <- Well.Coords[match(as.character(out$WellName), as.character(Well.Coords$WellName)),]$YCoord 90 | 91 | if (is.factor(out$AggDate)) { 92 | out$AggDate <- as.Date(as.numeric(as.character(out$AggDate))) #Compatibility with older versions of R 93 | } 94 | 95 | return(out) 96 | } 97 | 98 | 99 | -------------------------------------------------------------------------------- /R/calcGWFlow.R: -------------------------------------------------------------------------------- 1 | 2 | 3 | delDirNeighbours <- function(tempDelDir){ 4 | 5 | tr <- tempDelDir$delsgs 6 | my.list <- list() 7 | 8 | for (i in 1:nrow(tempDelDir$summary)) { 9 | my.list[[i]] <- sort(c(tr[,c("ind1")][which(tr[,c("ind2")] == i)],tr[,c("ind2")][which(tr[,c("ind1")] == i)])) 10 | } 11 | 12 | return(my.list) 13 | } 14 | 15 | 16 | 17 | 18 | calcGWFlow <- function(temp.GW) { 19 | 20 | 21 | if (nrow(temp.GW) < 3) {return(NULL)} 22 | 23 | if (any(duplicated(temp.GW[,c("XCoord","YCoord")]))) { 24 | 25 | #warning("non unique well Coords") 26 | new.temp.GW <- temp.GW[!duplicated(temp.GW[,c("XCoord","YCoord")]),,drop = FALSE] 27 | 28 | if (nrow(new.temp.GW) < 3) {return(NULL)} 29 | 30 | for (i in 1:nrow(new.temp.GW)) { 31 | new.temp.GW$Result[i] <- mean(temp.GW$Result[temp.GW$XCoord == new.temp.GW$XCoord[i] & temp.GW$YCoord == new.temp.GW$YCoord[i]], na.rm = T) 32 | } 33 | 34 | temp.GW <- new.temp.GW 35 | } 36 | 37 | #temp.tr <- deldir(x=temp.GW$XCoord, y=temp.GW$YCoord, duplicate = "remove",frac = 0) 38 | temp.tr <- deldir(x=temp.GW$XCoord, y=temp.GW$YCoord) 39 | 40 | temp.tr.nghbrs <- delDirNeighbours(temp.tr) 41 | temp.GW$R <- temp.GW$GradY <- temp.GW$GradX <- rep(NA,nrow(temp.GW)) 42 | 43 | for (i in 1:nrow(temp.GW)) { 44 | 45 | temp.lm <- lm(Result~XCoord+YCoord,temp.GW[c(i,temp.tr.nghbrs[[i]]),]) 46 | temp.GW$GradX[i] <- (-1)*temp.lm$coeff["XCoord"] 47 | temp.GW$GradY[i] <- (-1)*temp.lm$coeff["YCoord"] 48 | temp.GW$R[i] <- sqrt(temp.GW$GradX[i]^2 + temp.GW$GradY[i]^2) 49 | } 50 | 51 | 52 | temp.GW$RAD <- atan2(temp.GW$GradY,temp.GW$GradX) 53 | 54 | return(temp.GW) 55 | } 56 | 57 | 58 | evalGWFlow <- function(Agg_GW_Data,showErrorMessage=TRUE) { 59 | 60 | 61 | #if (showProgress) { 62 | # progress$set(value = PctDone, detail = paste("calculating groundwater")) 63 | #} 64 | 65 | GW.Flows <- NULL 66 | 67 | if (!is.null(Agg_GW_Data)) { 68 | 69 | tryCatch( 70 | GW.Flows <- do.call('rbind', by(Agg_GW_Data, Agg_GW_Data$AggDate, calcGWFlow)), 71 | error = function(e) { 72 | if(showErrorMessage){showNotification(paste0("Failed to calculate groundwater flows: ", e$message), type = "error", duration = 10)} 73 | }) 74 | 75 | if (!is.null(GW.Flows)) { 76 | GW.Flows$R <- GW.Flows$R/quantile(GW.Flows$R, p = 0.9, na.rm = T) 77 | GW.Flows$R[GW.Flows$R > 1] <- 1 78 | GW.Flows <- na.omit(GW.Flows) 79 | } 80 | } 81 | 82 | return(GW.Flows) 83 | } -------------------------------------------------------------------------------- /R/calcTrafficLights.R: -------------------------------------------------------------------------------- 1 | 2 | 3 | calcTrafficLights <- function(All.Data, Fitted.Data, smThreshSe = 1.1512, smMethod = "aicc") { 4 | 5 | ND.Beta.Check <- function(Cont.Data,All.Time.Evals){ 6 | 7 | if (nrow(Cont.Data) <= 1) { 8 | return(rep(NA,length(All.Time.Evals))) 9 | } 10 | Cont.Data <- Cont.Data[order(Cont.Data$SampleDate),] 11 | Cont.Data$SampleDate[which(Cont.Data$SampleDate == max(Cont.Data$SampleDate))]<- 12 | Cont.Data$AggDate[which(Cont.Data$SampleDate == max(Cont.Data$SampleDate))] 13 | #print(Cont.Data$ND) 14 | 15 | ind.minus1<-approx(Cont.Data$SampleDate,as.numeric(Cont.Data$ND),xout=All.Time.Evals-1)$y 16 | ind<-approx(Cont.Data$SampleDate,as.numeric(Cont.Data$ND),xout=All.Time.Evals)$y 17 | ind.plus1<-approx(Cont.Data$SampleDate,as.numeric(Cont.Data$ND),xout=All.Time.Evals+1)$y 18 | cl<-cbind(ind.minus1,ind,ind.plus1) 19 | lookup<-apply(cl,1,mean,na.rm=T) 20 | lookup[is.nan(lookup)]<-NA 21 | 22 | return(floor(lookup)) 23 | 24 | } 25 | 26 | 27 | Wrap.ND.Beta.Check<-function(Cont.Data,All.Time.Evals){ 28 | 29 | out<-try(ND.Beta.Check(Cont.Data,All.Time.Evals),silent=FALSE) 30 | 31 | if(!inherits(out, "try-error")){ 32 | return(out) 33 | }else{ 34 | return(rep(NA,length(All.Time.Evals))) 35 | } 36 | 37 | } 38 | 39 | 40 | 41 | GWSDAT.sm.derivative <- function(x, y, h, eval.points = seq(min(x), max(x), length = 50)) { 42 | 43 | n <- length(x) 44 | ne <- length(eval.points) 45 | wd <- matrix(rep(eval.points, rep(n, ne)), ncol = n, byrow = T) 46 | wd <- wd - matrix(rep(x, ne), ncol = n, byrow = T) 47 | w <- exp(-.5 * (wd/h)^2) 48 | 49 | s0 <- w %*% rep(1,n) 50 | s1 <- (w * wd) %*% rep(1,n) 51 | s2 <- (w * wd^2) %*% rep(1,n) 52 | 53 | w <- w * (wd * matrix(rep(s0, n), ncol = n) - matrix(rep(s1, n), ncol = n)) 54 | w <- w / (matrix(rep(s2, n), ncol = n) * matrix(rep(s0, n), ncol = n) 55 | - matrix(rep(s1, n), ncol = n)^2) 56 | est <- -as.vector(w %*% y) 57 | invisible(list(eval.points = eval.points, estimate = est)) 58 | 59 | } 60 | 61 | 62 | #' @importFrom sm sm.regression 63 | sm.func <- function(x, All.Time.Evals, smThreshSe, smMethod) { 64 | 65 | x.obs<-as.numeric(x$SampleDate) #input data is still non-agg data! 66 | y.obs<-log(x$Result.Corr.ND) 67 | out.upper<-rep(NA,length(All.Time.Evals)) 68 | out.Betas<-rep(NA,length(All.Time.Evals)) 69 | out.index<-match(as.numeric(x$AggDate),as.numeric(All.Time.Evals)) #outindex matches the aggregate date! 70 | out.index<-range(out.index,na.rm=T)[1]:range(out.index,na.rm=T)[2] 71 | 72 | 73 | if(length(x.obs)<3){return(list(Betas=out.Betas,trend.upper.lim=out.upper,h=NA))} 74 | if(sum(!x$ND)<2){return(list(Betas=out.Betas,trend.upper.lim=out.upper,h=NA))} 75 | 76 | sm.fit <- try(sm::sm.regression(x.obs,y.obs,display = "none",method=smMethod,verbose=0),silent=TRUE) 77 | 78 | 79 | if(inherits(sm.fit, "try-error")){ 80 | 81 | return(list(Betas=out.Betas,trend.upper.lim=out.upper,h=NA)) 82 | } 83 | 84 | if(any(is.nan(sm.fit$estimate)) || any(is.na(sm.fit$estimate)) || any(is.nan(sm.fit$se)) || any(is.na(sm.fit$se))){ 85 | 86 | return(list(Betas=out.Betas,trend.upper.lim=out.upper,h=NA)) 87 | 88 | } 89 | 90 | 91 | sm.fit <- sm::sm.regression(x.obs,y.obs,display = "none",eval.points = All.Time.Evals,h=sm.fit$h,verbose=0) 92 | sm.fit$estimate[sm.fit$se>smThreshSe]<-NA 93 | sm.est<-exp(sm.fit$estimate) 94 | sm.95up<-exp(sm.fit$estimate+2*sm.fit$se) 95 | out.upper[out.index]<-sm.95up[out.index] 96 | 97 | 98 | 99 | # Calculate the derivaties. 100 | Betas <- try(matrix(GWSDAT.sm.derivative(x = x.obs, y = y.obs, h = sm.fit$h, 101 | eval.points = as.numeric(All.Time.Evals))$estimate, ncol = 1), silent = TRUE) 102 | 103 | if (!inherits(Betas, "try-error")) { 104 | out.Betas[out.index] <- Betas[out.index] 105 | out.Betas[is.na(sm.fit$estimate)] <- NA # if se>smThreshSe then dont give Beta estimate! 106 | } 107 | 108 | return(list(Betas = out.Betas, trend.upper.lim = out.upper, h = sm.fit$h)) 109 | 110 | } 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | All.Time.Evals <- All.Data$All_Agg_Dates 119 | sample_loc_names <- All.Data$sample_loc$names 120 | 121 | 122 | 123 | 124 | tr<-try(by(data=All.Data$Cont.Data,INDICES=list(Wells=All.Data$Cont.Data$WellName,Conts=All.Data$Cont.Data$Constituent),FUN=sm.func,All.Time.Evals=All.Time.Evals,smThreshSe,smMethod)) 125 | 126 | Beta.check.ND<-try(by(data=All.Data$Cont.Data,INDICES=list(Wells=All.Data$Cont.Data$WellName,Conts=All.Data$Cont.Data$Constituent),FUN=Wrap.ND.Beta.Check,All.Time.Evals=All.Time.Evals)) 127 | 128 | Abs.Thresh.Check<-my.Betas<-my.uppers<-my.Beta.ND.Check<-array(NA,dim=c(length(sample_loc_names), length(All.Data$cont_names), length(All.Time.Evals))) 129 | 130 | dimnames(Abs.Thresh.Check)<-dimnames(my.uppers)<-dimnames(my.Betas) <- 131 | dimnames(my.Beta.ND.Check)<-list(sort(sample_loc_names, decreasing = TRUE), sort(All.Data$cont_names), as.character(All.Time.Evals)) 132 | 133 | my.h<-matrix(NA,nrow = length(sample_loc_names), ncol = length(All.Data$cont_names)) 134 | rownames(my.h) <- sample_loc_names 135 | colnames(my.h) <- All.Data$cont_names 136 | 137 | 138 | 139 | if(!inherits(tr, "try-error")){ 140 | 141 | for(i in dimnames(my.uppers)[[1]]){ 142 | for(j in dimnames(my.uppers)[[2]]){ 143 | 144 | my.uppers[i,j,] <- if(!inherits(try(tr[i,j],silent=T),"try-error") && !is.null(tr[i,j][[1]]$trend.upper.lim)) {tr[i,j][[1]]$trend.upper.lim}else{NA} 145 | my.Betas[i,j,] <- if(!inherits(try(tr[i,j],silent=T),"try-error") && !is.null(tr[i,j][[1]]$Betas)) {tr[i,j][[1]]$Betas} else{NA} 146 | my.Beta.ND.Check[i,j,] <- if(!inherits(try(tr[i,j],silent=T),"try-error") && !is.null(Beta.check.ND[i,j][[1]])) {Beta.check.ND[i,j][[1]]} else{NA} 147 | my.h[i,j] <- if(!inherits(try(tr[i,j],silent=T),"try-error") && !is.null(tr[i,j][[1]]$h)) {tr[i,j][[1]]$h} else{NA} 148 | } 149 | } 150 | } 151 | 152 | 153 | my.Betas[!is.finite(my.Betas)]<-NA 154 | my.uppers[!is.finite(my.uppers)]<-NA 155 | my.Beta.ND.Check[!is.finite(my.Beta.ND.Check)]<-NA 156 | 157 | 158 | 159 | 160 | 161 | ################ Abs Threshold Calc ########################################################## 162 | 163 | 164 | 165 | ND.Data<- All.Data$Cont.Data[All.Data$Cont.Data$ND==TRUE,] 166 | D.Data <- All.Data$Cont.Data[All.Data$Cont.Data$ND==FALSE,] 167 | 168 | if(nrow(ND.Data)>0){ 169 | 170 | temp.AggDate<-as.character(ND.Data$AggDate) 171 | temp.Conts<-as.character(ND.Data$Constituent) 172 | temp.Wells<-as.character(ND.Data$WellName) 173 | 174 | for(i in 1:nrow(ND.Data)){ 175 | 176 | try(Abs.Thresh.Check[temp.Wells[i],temp.Conts[i],temp.AggDate[i]]<- -1) 177 | 178 | } 179 | 180 | } 181 | 182 | 183 | if(nrow(D.Data)>0){ 184 | 185 | temp.AggDate<-as.character(D.Data$AggDate) 186 | temp.Conts<-as.character(D.Data$Constituent) 187 | temp.Wells<-as.character(D.Data$WellName) 188 | 189 | for(i in 1:nrow(D.Data)){ 190 | 191 | temp.val<-Abs.Thresh.Check[temp.Wells[i],temp.Conts[i],temp.AggDate[i]] 192 | 193 | if(is.finite(D.Data$Result.Corr.ND[i])){ 194 | if(is.na(temp.val) || temp.val t) 67 | } 68 | 69 | 70 | 71 | # Computes the coefficient vector for the MAP estimate of lambda. 72 | GWSDAT.compute.map.coef <- function(B, DtD, y, ig.a=1e-3, ig.b=1e-3, lambdas, prior=function(lambda) 1) { 73 | 74 | # Prepare and do the fancy linear algebra 75 | BtB <- t(B) %*% B 76 | P.eigen <- eigen(BtB+DtD) 77 | if (any(P.eigen$valuessqrt(.Machine$double.eps)) 87 | z <- drop(t(Q.svd$u)%*%y) 88 | 89 | # Function to compute the loglikelihood 90 | loglik <- function(lambda) { 91 | # Get the coefficient 92 | coef <- drop(Xtinv[,sel]%*%(z*sqrt(d[sel]) / (d[sel]+lambda*e[sel]))) 93 | residuals <- y-B%*%coef 94 | # Get the posterior determinant 95 | log.post.det <- -sum(log(d+lambda*e))-log.det.XtX 96 | # Compute the log-posterior 97 | 0.5 * rank.D * log(lambda) + 0.5 * log.post.det - (ig.a + length(y)/2) * log(2*ig.b + sum(y*residuals)) + log(prior(lambda)) 98 | } 99 | 100 | # Get the best lambda 101 | logliks <- sapply(lambdas, loglik) 102 | lambda <- lambdas[which.max(logliks)] 103 | # alpha coefficient the the best lambda 104 | alpha <- drop(Xtinv[,sel]%*%(z*sqrt(d[sel]) / (d[sel]+lambda*e[sel]))) 105 | fitted <- drop(B %*% alpha) 106 | 107 | return(list(best.lambda=lambda,trial.lambdas=lambdas,logliks=logliks,alpha=alpha,fitted=fitted)) 108 | 109 | ## Alternative when SEs are needed. 110 | ## Quantities for Standard error calc 111 | #post.ig.a <- ig.a + length(y)/2 112 | #post.ig.b <- ig.b + sum(y*(y-fitted))/2 113 | #return(list( best.lambda=lambda,trial.lambdas=lambdas,logliks=logliks,alpha=alpha,fitted=fitted, 114 | # post.ig.a=post.ig.a,post.ig.b=post.ig.b,Xtinv=Xtinv,e=e,d=d)) 115 | 116 | 117 | } 118 | 119 | tunePSplines <- function(ContData, NIG.a, NIG.b, nseg, pord, bdeg, Trial.Lambda, verbose = FALSE) { 120 | 121 | 122 | # Prepare Data 123 | form <- log(Result.Corr.ND) ~ XCoord + YCoord + AggDate - 1 124 | #FIXME: what is variable 'form' for inside tunePSpline()? 125 | X <- model.matrix(form,ContData) 126 | 127 | colnames(X) <- c("XCoord","YCoord","AggDate") 128 | center <- colMeans(X, na.rm = TRUE) 129 | X <- sweep(X, 2L, center) 130 | scale <- apply(X, 2, sd) 131 | scale[1:2] <- rep(min(scale[1:2]),2) 132 | X <- sweep(X, 2L, scale, "/") 133 | Y <- model.response(model.frame(form, ContData)) 134 | 135 | 136 | mat <- GWSDAT.st.matrices(X, xrange = t(apply(X, 2, range)), ndims = 3, nseg = rep(nseg,3), pord = pord, bdeg = bdeg) 137 | 138 | 139 | BestModel <- GWSDAT.compute.map.coef(mat$B, mat$P, Y, lambdas = Trial.Lambda, ig.a = NIG.a, ig.b = NIG.b, prior = GWSDAT.Prior) 140 | 141 | if(TRUE){ ##Calculate Imetrics - introduced in GWSDAT version 3.2 142 | 143 | B <- mat$B 144 | P <- mat$P 145 | 146 | # computing Hat matrix 147 | hatmat <- B %*% solve((t(B) %*% B + BestModel$best.lambda * P)) %*% t(B) 148 | df <- sum(diag(hatmat)) # effective degrees of freedom 149 | 150 | 151 | Imetrics <- data.frame(Constituent=ContData$Constituent,WellName =ContData$WellName,SampleDate=ContData$AggDate, leverage=diag(hatmat),residual=Y-BestModel$fitted) 152 | N <- nrow(Imetrics) # number of observations 153 | 154 | 155 | # calculate standard error of residuals 156 | RSE <- sqrt(sum(Imetrics$residual^2)/(N-df)) 157 | 158 | # calculate standardized residuals and add to results 159 | Imetrics$standresid = Imetrics$residual/(RSE*sqrt(1-Imetrics$leverage)) 160 | Imetrics$cd<-(1/df)*(Imetrics$standresid^2)*(Imetrics$leverage/(1-Imetrics$leverage)) 161 | Imetrics$covratio<- 1/(((((N-df-1)/(N-df))+((Imetrics$standresid^2)/(N-df)))^df)*(1-Imetrics$leverage)) 162 | ImetricsByWellSummary<-aggregate(cd~Constituent+WellName,Imetrics,mean) 163 | ImetricsByWellSummary<-ImetricsByWellSummary[order(ImetricsByWellSummary$cd,decreasing = F),] 164 | } 165 | 166 | 167 | if (verbose) { 168 | 169 | # op <- par(no.readonly = TRUE); 170 | # par(mfrow = c(1,1)) 171 | # plot(log(BestModel$trial.lambdas,base = 10), BestModel$logliks) 172 | # par(op) 173 | # 174 | } 175 | 176 | 177 | best.model <- list(Lambda = BestModel$best.lambda, 178 | xrange = mat$xrange, 179 | nseg = nseg, 180 | ndims = 3, 181 | bdeg = mat$bdeg, 182 | pord = mat$pord, 183 | scale = scale, 184 | center = center, 185 | alpha = BestModel$alpha, 186 | fitted = BestModel$fitted, 187 | Imetrics=list(Imetrics=Imetrics,ImetricsByWellSummary=ImetricsByWellSummary,Wellorder=as.character(ImetricsByWellSummary$WellName))) 188 | 189 | ##Alternative for SEs 190 | #best.model<-list( 191 | #Lambda=BestModel$best.lambda,xrange=mat$xrange,nseg=nseg,ndims=3,bdeg = mat$bdeg,pord = mat$pord,scale=scale,center=center, 192 | #alpha=BestModel$alpha,fitted=BestModel$fitted, 193 | #post.ig.a=BestModel$post.ig.a,post.ig.b=BestModel$post.ig.b ,Xtinv=BestModel$Xtinv,d=BestModel$d,e=BestModel$e 194 | #) 195 | 196 | class(best.model) <- "GWSDAT.PSpline" 197 | 198 | Model.tune <- list(Trial.Lambda = Trial.Lambda,best.model = best.model) 199 | 200 | return(Model.tune) 201 | } 202 | 203 | 204 | 205 | 206 | fitPSplines <- function(ContData, params){ 207 | 208 | #cat("* in fitPSpline()\n") 209 | 210 | names(ContData)[names(ContData) == "AggDate"] <- "AggDatekeep" 211 | names(ContData)[names(ContData) == "SampleDate"] <- "AggDate" 212 | 213 | Model.tune <- try(tunePSplines(ContData, params$NIG.a, params$NIG.b, params$nseg, 214 | params$pord, params$bdeg, params$Trial.Lambda)) 215 | 216 | 217 | if (!inherits(Model.tune, "try-error")) { 218 | 219 | #pred<-predict(Model.tune$best.model,newdata=ContData,se=TRUE) 220 | #ContData$ModelPred<-exp(pred$predicted) 221 | #ContData$Upper95<-exp(pred$predicted+1.96*pred$predicted.sd) 222 | #ContData$Lower95<-exp(pred$predicted-1.96*pred$predicted.sd) 223 | 224 | #Alternative to SEs 225 | pred <- predict(Model.tune$best.model, newdata = ContData, se = FALSE) 226 | ContData$ModelPred <- exp(pred$predicted) 227 | ContData$Upper95 <- ContData$Lower95 <- rep(NA,nrow(ContData)) 228 | 229 | } else{ 230 | ContData$ModelPred <- rep(NA,nrow(ContData)) 231 | ContData$Upper95 <- rep(NA,nrow(ContData)) 232 | ContData$Lower95 <- rep(NA,nrow(ContData)) 233 | } 234 | 235 | names(ContData)[names(ContData) == "AggDate"] <- "SampleDate" 236 | names(ContData)[names(ContData) == "AggDatekeep"] <- "AggDate" 237 | 238 | #### Legacy func from GWSDAT SVM.R. Need to check for NAPL only data sets. 239 | ContData$Result.Corr.ND[!is.finite(ContData$Result.Corr.ND)] <- NA #Wayne V3 coerce -inf to NA for NAPL only data sets. 240 | 241 | list(Cont.Data = ContData, Model.tune = Model.tune) 242 | 243 | } 244 | 245 | #' @export 246 | #predict.GWSDAT.PSpline <- function(mod,newdata,se=FALSE) { 247 | predict.GWSDAT.PSpline <- function(object,newdata,se=FALSE,...) { 248 | 249 | X <- model.matrix(~XCoord+YCoord+AggDate-1,newdata) 250 | X <- sweep(X, 2L, object$center) 251 | X <- sweep(X, 2L, object$scale, "/") 252 | 253 | 254 | mat <- GWSDAT.st.matrices(x = X, xrange = object$xrange, ndims = object$ndims, 255 | nseg = rep(object$nseg,object$ndims), bdeg = object$bdeg, 256 | pord = object$pord, computeP = FALSE) 257 | B <- mat$B 258 | 259 | result <- list(predicted.sd = rep(NA,nrow(B))) 260 | 261 | 262 | if (se) { 263 | 264 | post.ig.a <- object$post.ig.a 265 | post.ig.b <- object$post.ig.b 266 | 267 | if (post.ig.a <= 2) { 268 | result$predicted.sd <- rep(Inf, nrow(B)) 269 | } else { 270 | result$predicted.sd <- sqrt((post.ig.b / (post.ig.a-2)) * ((B%*%object$Xtinv)^2 %*% (1 / (object$d + object$Lambda * object$e)))) 271 | } 272 | 273 | result$predicted.sd <- drop(result$predicted.sd) 274 | } 275 | 276 | result$predicted <- as.numeric(B %*% object$alpha) 277 | return(result) 278 | } 279 | 280 | # NOT CALLED BY ANY METHOD: 281 | # Recompute the huge Xtinv following loading a GWSDAT session. 282 | # GWSDAT.RecomputeXtinv <- function(ContData, GWSDAT_Options) { 283 | # cat("* in GWSDAT.RecomputeXtinv()\n") 284 | # 285 | # # Prepare Data 286 | # names(ContData)[names(ContData) == "AggDate"] <- "AggDatekeep" 287 | # names(ContData)[names(ContData) == "SampleDate"] <- "AggDate" 288 | # 289 | # form <- log(Result.Corr.ND)~XCoord+YCoord+AggDate-1 290 | # X <- model.matrix(form,ContData) 291 | # colnames(X) <- c("XCoord","YCoord","AggDate") 292 | # center <- colMeans(X, na.rm = TRUE) 293 | # X <- sweep(X, 2L, center) 294 | # scale <- apply(X,2,sd) 295 | # scale[1:2] <- rep(min(scale[1:2]),2) 296 | # X <- sweep(X, 2L, scale, "/") 297 | # Y <- model.response(model.frame(form,ContData)) 298 | # 299 | # 300 | # # Initialise 301 | # NIG.a <- GWSDAT_Options$PSplineVars$NIG.a 302 | # NIG.b <- GWSDAT_Options$PSplineVars$NIG.b 303 | # nseg <- GWSDAT_Options$PSplineVars$nseg 304 | # pord <- GWSDAT_Options$PSplineVars$pord 305 | # bdeg <- GWSDAT_Options$PSplineVars$bdeg 306 | # Trial.Lambda <- GWSDAT_Options$PSplineVars$Trial.Lambda 307 | # 308 | # mat <- GWSDAT.st.matrices(X, xrange = xrange <- t(apply(X, 2, range)), 309 | # ndims = 3, nseg = rep(nseg,3), pord = pord, bdeg = bdeg) 310 | # 311 | # B <- mat$B 312 | # DtD <- mat$P 313 | # 314 | # BtB <- t(B) %*% B 315 | # P.eigen <- eigen(BtB + DtD) 316 | # Mt <- t(P.eigen$vectors)*(1/sqrt(P.eigen$values)) 317 | # Q.svd <- svd(B %*% t(Mt), nu = ncol(B), nv = ncol(B)) 318 | # d <- c(pmin(Q.svd$d,1), rep(0, ncol(B) - length(Q.svd$d)))^2 319 | # e <- 1 - d 320 | # Xtinv <- t(t(P.eigen$vectors)*sqrt(1/P.eigen$values)) %*% Q.svd$v 321 | # return(Xtinv) 322 | # 323 | # } 324 | 325 | -------------------------------------------------------------------------------- /R/fitSVM.R: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | fitSVM <- function(Cont.Data, Cont.Name, GWSDAT_Options) { 5 | 6 | 7 | temp.Cont.Data <- Cont.Data[Cont.Data$Constituent == Cont.Name,] 8 | temp.Cont.Data <- na.omit(temp.Cont.Data) 9 | names(temp.Cont.Data)[names(temp.Cont.Data) == "AggDate"] <- "AggDatekeep" 10 | names(temp.Cont.Data)[names(temp.Cont.Data) == "SampleDate"] <- "AggDate" 11 | 12 | 13 | 14 | gamma <- GWSDAT_Options[["gamma"]] 15 | cost <- GWSDAT_Options[["cost"]] 16 | 17 | gamma <- try(selectGamma(temp.Cont.Data, gamma)) 18 | 19 | if (inherits(gamma, "try-error")) { gamma = NA }#else{gamma=1/gamma}#Wayne 20 | 21 | 22 | svm.temp <- try(tune.svm(log(Result.Corr.ND) ~ AggDate + XCoord + YCoord, 23 | data = temp.Cont.Data, 24 | gamma = gamma, 25 | cost = cost, 26 | scale = TRUE, 27 | tunecontrol = tune.control(cross = min(GWSDAT_Options[["cross"]], nrow(temp.Cont.Data))))) 28 | 29 | 30 | if (!inherits(svm.temp, "try-error")) { 31 | 32 | temp.Cont.Data$ModelPred <- exp(predict(svm.temp$best.model,newdata = temp.Cont.Data)) 33 | 34 | } else { 35 | 36 | temp.Cont.Data$ModelPred <- rep(NA,nrow(temp.Cont.Data)) 37 | 38 | } 39 | 40 | names(temp.Cont.Data)[names(temp.Cont.Data) == "AggDate"] <- "SampleDate" 41 | names(temp.Cont.Data)[names(temp.Cont.Data) == "AggDatekeep"] <- "AggDate" 42 | 43 | temp.Cont.Data$Result.Corr.ND[!is.finite(temp.Cont.Data$Result.Corr.ND)] <- NA #Wayne V3 coerce -inf to NA for NAPL only data sets. 44 | 45 | list(Cont.Data = temp.Cont.Data, Model.tune = svm.temp) 46 | 47 | } 48 | 49 | 50 | selectGamma <- function(temp.Cont.Data,gamma){ 51 | 52 | if (gamma[1] != 0) { return(gamma) } 53 | 54 | 55 | tempgamma <- matrix(nrow=50,ncol=2) 56 | 57 | for (i in 1:nrow(tempgamma)) { 58 | 59 | tempgamma[i,] <- GWSDAT.sigest(log(Result.Corr.ND)~AggDate+XCoord+YCoord,temp.Cont.Data) 60 | 61 | } 62 | 63 | if (length(gamma) == 1) { 64 | 65 | gamma <- mean(0.5*(tempgamma[,1]+tempgamma[,2])) 66 | #gamma<-median(1/apply(tempgamma,1,mean)) #Wayne 26th June 2009 67 | 68 | }else{ 69 | 70 | #gamma<-quantile(apply(tempgamma,1,mean),p=c(0.1,0.5,0.9)) 71 | #gamma<-c(mean(0.5*(tempgamma[,1]+tempgamma[,2])), quantile(tempgamma[,2],p=0.9)) 72 | #gamma<-c(quantile(tempgamma[,2],p=0.95)) 73 | #gamma<-sort(apply(tempgamma,2,mean))[1]+c(.3,.5,.7)*diff(sort(apply(tempgamma,2,mean))) 74 | #gamma<-quantile(1/apply(tempgamma,1,mean),p=c(.1,.5,.9)) #Wayne 26th June 2009 75 | gamma < -sort(apply(tempgamma,2,mean))[1]+c(.3,.5,.7)*diff(sort(apply(tempgamma,2,mean))) 76 | 77 | 78 | 79 | 80 | } 81 | 82 | return(gamma) 83 | } 84 | 85 | 86 | 87 | -------------------------------------------------------------------------------- /R/global.R: -------------------------------------------------------------------------------- 1 | 2 | 3 | # This is the place in Shiny to define global _constant_ variables. 4 | # Note that these variables become locked if the software is packaged. 5 | # 6 | # The binding can be unlocked, but instead of doing this rather complicated step 7 | # I put all non-constant variables into the server() function (non-global) and pass them to 8 | # the functions that needs it. 9 | # 10 | 11 | coord_units <- c("","metres", "feet") 12 | conc_units <- c("ng/l", "ug/l", "mg/l", "Level", 13 | "metres", # for GW (groundwater depth) 14 | "feet", # for GW (groundwater depth) 15 | "mm", # for NAPL thickness 16 | "pH") 17 | conc_flags <- c("", "E-acc", "Omit", "NotInNAPL", "Redox") 18 | conc_header <- list("WellName", "Constituent", "SampleDate", "Result", "Units", "Flags") 19 | well_header <- list("WellName", "XCoord", "YCoord", "Aquifer") 20 | -------------------------------------------------------------------------------- /R/importTables.R: -------------------------------------------------------------------------------- 1 | 2 | 3 | parseTable <- function(tbl = NULL, type = NULL, wells = NULL, dsource = "") { 4 | 5 | if (is.null(tbl)) { 6 | #stop("First argument \'tbl\' is missing\n") 7 | return(NULL) 8 | } 9 | 10 | if (is.null(type)) { 11 | #stop("Need to specify \'type\' of table: \'contaminant\' or \'well\'\n") 12 | return(NULL) 13 | } 14 | 15 | if (type == "contaminant" && is.null(wells)) { 16 | #stop("If type = \'contaminant\', need to specify the parameter \'wells\'.") 17 | return(NULL) 18 | } 19 | 20 | 21 | if (nrow(tbl) == 0) 22 | return(NULL) 23 | 24 | if (dsource == "excel") 25 | return(tbl) 26 | 27 | 28 | # Create empty buffer for valid entries 29 | val_buf <- tbl[-c(1:nrow(tbl)),] 30 | 31 | 32 | # Parse wells table. 33 | if (type == "wells") { 34 | 35 | duplicate_wells = c() 36 | 37 | # Loop over rows and check every column. 38 | for (i in 1:nrow(tbl)) { 39 | 40 | if (tbl$WellName[i] == "" || is.na(tbl$WellName[i])) next 41 | if (tbl$XCoord[i] == "" || is.na(tbl$XCoord[i])) next 42 | if (tbl$YCoord[i] == "" || is.na(tbl$YCoord[i])) next 43 | 44 | if (tbl$WellName[i] %in% val_buf$WellName) { 45 | duplicate_wells <- c(duplicate_wells, tbl$WellName[i]) 46 | next 47 | } 48 | 49 | # If we made it until here, the row is ok. 50 | val_buf <- rbind(val_buf, tbl[i,]) 51 | } 52 | 53 | # Check if all wells are unique 54 | if (length(duplicate_wells) > 0) { 55 | msg <- paste("Found duplicate WellNames in Well Coordinate table: ", 56 | paste(duplicate_wells, collapse = ","), 57 | ". Only keeping first entry.") 58 | showNotification(msg, type = "warning", duration = 10) 59 | } 60 | } 61 | 62 | # Parse contaminant table. 63 | if (type == "contaminant") { 64 | 65 | # In some data sets spaces in WellName are present Well Table but not 66 | # contaminant table: Delete spaces in both to synchronize. 67 | wells <- rm_spaces(as.character(unique(wells))) 68 | 69 | # Make sure the WellName is not a factor (will break rm_spaces) 70 | tbl$WellName <- as.character(tbl$WellName) 71 | 72 | # Some data sets have "Levels" instead of "Level" as unit for GW. 73 | # Temporarily add "Levels" to units so they don't get lost. 74 | units <- tolower(c(conc_units, "Levels")) 75 | 76 | invalid_wells <- c() 77 | invalid_sampledates <- 0 78 | invalid_units <- 0 79 | invalid_flags <- 0 80 | 81 | #ptm <- proc.time() 82 | # Loop over rows and check every column. 83 | for (i in 1:nrow(tbl)) { 84 | 85 | 86 | if (tbl$WellName[i] == "") next 87 | 88 | # WellName must exist in provided 'wells' vector. 89 | if (!(rm_spaces(tbl$WellName[i]) %in% wells)) { 90 | invalid_wells <- c(invalid_wells, tbl$WellName[i]) 91 | next 92 | } 93 | 94 | if (tbl$Constituent[i] == "") next 95 | 96 | # If some weird number or a string was put in, it will show up as . 97 | if (is.na(tbl$SampleDate[i])) { 98 | invalid_sampledates <- invalid_sampledates + 1 99 | next 100 | } 101 | 102 | if (is.na(tbl$Result[i]) || tbl$Result[i] == "") next 103 | 104 | if (!(tolower(tbl$Units[i]) %in% units)) { 105 | invalid_units <- invalid_units + 1 106 | next 107 | } 108 | 109 | if (!(tbl$Flags[i] %in% conc_flags)) { 110 | invalid_flags <- invalid_flags + 1 111 | next 112 | } 113 | 114 | # If we made it until here, the row is ok. 115 | val_buf <- rbind(val_buf, tbl[i,]) 116 | } 117 | #print("Time to go through table (parseTable): ") 118 | #print(proc.time() - ptm) 119 | 120 | # Check if all wells are unique 121 | if (length(invalid_wells) > 0) { 122 | msg <- paste0("Found ", length(invalid_wells), " WellName not defined in Contaminant table.") 123 | showNotification(msg, type = "warning", duration = 10) 124 | } 125 | 126 | if (invalid_sampledates > 0) { 127 | msg <- paste0("Found ", invalid_sampledates, " invalid SampleDate(s) in Contaminant table.") 128 | showNotification(msg, type = "warning", duration = 10) 129 | } 130 | 131 | if (invalid_units > 0) { 132 | msg <- paste0("Found ", invalid_units, " invalid Units in Contaminant table.") 133 | showNotification(msg, type = "warning", duration = 10) 134 | } 135 | 136 | if (invalid_flags > 0) { 137 | msg <- paste0("Found ", invalid_flags, " invalid Flags in Contaminant table.") 138 | showNotification(msg, type = "warning", duration = 10) 139 | } 140 | 141 | } 142 | 143 | showNotification(paste0("Parsed ", nrow(val_buf), " row(s) from ", type, " table."), type = "message", duration = 10) 144 | 145 | if (nrow(val_buf) == 0) 146 | return(NULL) 147 | 148 | return(val_buf) 149 | 150 | } 151 | 152 | validateTable <- function(tbl) { 153 | 154 | if (is.null(tbl)) 155 | return(FALSE) 156 | 157 | return(TRUE) 158 | } 159 | -------------------------------------------------------------------------------- /R/interpBary.R: -------------------------------------------------------------------------------- 1 | 2 | ###### Barycentric Interpolation Function #### 3 | 4 | 5 | #' @importFrom Matrix sparseMatrix 6 | #' @importFrom sp point.in.polygon 7 | # #' @importFrom geometry delaunay 8 | interpBary <- function(model,AggDate,my.area,type=c("Predicted","Lower 95% CI","Upper 95% CI","% sd","IQR/2")) { 9 | 10 | 11 | type <- match.arg(type) 12 | if(length(AggDate)!=1){stop("Agg Date must be length 1")} 13 | 14 | 15 | my.area <- my.area[chull(my.area),,drop=F] 16 | my.exp.area <- expandpoly(my.area, fact = 1.15) 17 | my.area <- expandpoly(my.area, fact = 1.05) 18 | colnames(my.area) <- c("XCoord", "YCoord") 19 | 20 | 21 | 22 | x0<-seq(min(my.area[,"XCoord"]),max(my.area[,"XCoord"]),length=100) 23 | y0<-seq(min(my.area[,"YCoord"]),max(my.area[,"YCoord"]),length=100) 24 | 25 | x0<-sort(unique(c(x0,my.area[,1]))) 26 | y0<-sort(unique(c(y0,my.area[,2]))) 27 | 28 | pred.df<-expand.grid(XCoord=x0,YCoord=y0) 29 | pred.df$AggDate<-as.numeric(AggDate) 30 | pred.df$pred<-rep(NA,nrow(pred.df)) 31 | 32 | 33 | if(!is.null(model)){ 34 | 35 | ####################### eval.df #################################### 36 | colnames(my.exp.area)<-c("XCoord","YCoord") 37 | eval.df<-gridpts(my.exp.area,250) 38 | eval.df<-data.frame(XCoord=eval.df[,1],YCoord=eval.df[,2]) 39 | eval.df<-rbind(eval.df,my.exp.area) 40 | eval.df$AggDate=as.numeric(AggDate) 41 | 42 | 43 | 44 | temppred<-predict(model,newdata=eval.df,se=type!="Predicted") 45 | eval.df$pred<-temppred$predicted 46 | eval.df$pred.sd<-temppred$predicted.sd 47 | 48 | if(type=="Lower 95% CI"){eval.df$pred<-eval.df$pred-1.96*eval.df$pred.sd} 49 | if(type=="Upper 95% CI"){eval.df$pred<-eval.df$pred+1.96*eval.df$pred.sd} 50 | if(type=="% sd") {eval.df$pred<-100*(exp(eval.df$pred.sd)-1)} 51 | ### Daniel's approx to sd(exp(x)). 52 | #if(type=="sd") {eval.df$pred<-exp(eval.df$pred + eval.df$pred.sd^2)*sqrt(1 - exp(-eval.df$pred.sd^2))} 53 | if(type=="IQR/2") {eval.df$pred<-0.5*(exp(qnorm(p=c(0.75), mean = eval.df$pred, sd = eval.df$pred.sd))-exp(qnorm(p=c(0.25), mean = eval.df$pred, sd = eval.df$pred.sd)))} 54 | 55 | 56 | ####################### pred.df #################################### 57 | 58 | pred.df$InOut <- !sp::point.in.polygon(pred.df$XCoord,pred.df$YCoord,my.area[,1],my.area[,2]) == 0 59 | 60 | predred.df <- pred.df[pred.df$InOut,] 61 | 62 | 63 | # 64 | # The following code needs attention: 65 | # - delaunayn() will generate warnings/errors if the set of points is not 66 | # convex (check before or just catch). 67 | # - As a consequence, calls to contourLines() will produce warnings. 68 | 69 | dn <- try(delaunayn(eval.df[,c("XCoord","YCoord")],options=""), silent = T) 70 | #dn <- try(delaunayn(eval.df[,c("XCoord","YCoord")]), silent = T) 71 | if (!inherits(dn, "try-error") && class(dn)[1]=="delaunayn") {dn<-dn$tri} ## Ensuring matrix of Delaunay triangulation is returned. 72 | 73 | if (!inherits(dn, "try-error")) { 74 | 75 | ### Backwards compatibility for function tsearch in package geometry. 76 | if(utils::packageVersion("geometry")>'0.3.6'){ 77 | 78 | tri <- tsearch(eval.df[,"XCoord"], eval.df[,"YCoord"], dn ,predred.df[,"XCoord"] , 79 | predred.df[,"YCoord"], method = "orig",bary = T) 80 | }else{ 81 | 82 | tri <- tsearch(eval.df[,"XCoord"], eval.df[,"YCoord"], dn ,predred.df[,"XCoord"] , 83 | predred.df[,"YCoord"],bary = T) 84 | } 85 | 86 | active <- dn[tri$idx,] 87 | 88 | 89 | M <- Matrix::sparseMatrix(i = rep(1:nrow(predred.df), each = 3), 90 | j = as.numeric(t(active)), x = as.numeric(t(tri$p)), 91 | dims = c(nrow(predred.df), length(eval.df$pred))) 92 | 93 | predred.df$pred <- as.numeric(M %*% eval.df$pred) 94 | 95 | 96 | pred.df$pred[pred.df$InOut] <- predred.df$pred 97 | } else { 98 | cat("Encountered github issue #123 associated to delaunay(). Coordinates lack convex hull.\n") 99 | } 100 | 101 | } 102 | 103 | out <- list(x = x0, y = y0, z = matrix(pred.df$pred,nrow = length(x0), ncol = length(y0))) 104 | 105 | return(out) 106 | 107 | } 108 | 109 | -------------------------------------------------------------------------------- /R/interpConc.R: -------------------------------------------------------------------------------- 1 | 2 | #' @importFrom splancs gridpts areapl 3 | interpConc <- function(csite, substance, timepoint,UseReducedWellSet) { 4 | 5 | if(UseReducedWellSet){ 6 | model.tune <- csite$Reduced.Fitted.Data[[substance]][["Model.tune"]] 7 | tmp_cont <- csite$Reduced.Fitted.Data[[substance]]$Cont.Data 8 | }else{ 9 | model.tune <- csite$Fitted.Data[[substance]][["Model.tune"]] 10 | tmp_cont <- csite$Fitted.Data[[substance]]$Cont.Data 11 | } 12 | 13 | Well.Coords <- csite$All.Data$sample_loc$data 14 | Col.Option <- csite$ui_attr$spatial_options["Scale colours to Data"] 15 | 16 | # 17 | # Extract useable wells for given substance and timestep. 18 | # 19 | 20 | 21 | tmp_wells_earlier <- unique(tmp_cont[as.numeric(tmp_cont$AggDate) <= timepoint,]$WellName) 22 | tmp_wells_later <- unique(tmp_cont[as.numeric(tmp_cont$AggDate) >= timepoint,]$WellName) 23 | 24 | Good.Wells <- intersect(as.character(tmp_wells_earlier), as.character(tmp_wells_later)) 25 | 26 | # 27 | # Find the limits of the contour. 28 | # 29 | diffrangeX <- 0.06*(range(Well.Coords$XCoord)[2] - range(Well.Coords$XCoord)[1]) 30 | diffrangeY <- 0.06*(range(Well.Coords$YCoord)[2] - range(Well.Coords$YCoord)[1]) 31 | 32 | 33 | if (diffrangeY != 0) 34 | if ((diffrangeX/diffrangeY) > 1.4) {diffrangeY = 0} 35 | 36 | if (diffrangeX != 0) 37 | if ((diffrangeY/diffrangeX) > 1.4) {diffrangeX = 0} 38 | 39 | Contour.xlim = c(range(Well.Coords$XCoord)[1] - diffrangeX,range(Well.Coords$XCoord)[2] + diffrangeX) 40 | Contour.ylim = c(range(Well.Coords$YCoord)[1] - diffrangeY,range(Well.Coords$YCoord)[2] + diffrangeY) 41 | 42 | 43 | # 44 | # Define area with outer hull 45 | # 46 | 47 | Do.Image <- TRUE 48 | 49 | if (length(Good.Wells) < 3) { 50 | 51 | # Not enough wells to form an area. 52 | Do.Image <- FALSE 53 | my.area <- as.matrix(Well.Coords[,c("XCoord","YCoord")]) 54 | } else { 55 | my.area <- as.matrix(Well.Coords[as.character(Well.Coords$WellName) %in% as.character(Good.Wells),c("XCoord","YCoord")]) 56 | } 57 | 58 | tmp_my.area <- my.area[chull(my.area),,drop = FALSE] 59 | 60 | if (nrow(tmp_my.area) < 3) { 61 | Do.Image = FALSE 62 | my.area <- as.matrix(Well.Coords[,c("XCoord","YCoord")]) 63 | } else if ((splancs::areapl(tmp_my.area) / csite$All.Data$sample_loc$area) < 0.01) { 64 | Do.Image = FALSE 65 | my.area <- as.matrix(Well.Coords[,c("XCoord","YCoord")]) 66 | } 67 | 68 | 69 | if (!Do.Image) { 70 | 71 | # Not enough wells for the area. 72 | my.area <- cbind( 73 | c(Contour.xlim[1],Contour.xlim[1],Contour.xlim[2],Contour.xlim[2]), 74 | c(Contour.ylim[1],Contour.ylim[2],Contour.ylim[1],Contour.ylim[2]) 75 | ) 76 | 77 | colnames(my.area) <- c("XCoord","YCoord") 78 | 79 | } 80 | 81 | 82 | # 83 | # Prepare area for interpolation. 84 | # 85 | my.area <- my.area[chull(my.area),, drop = F] 86 | my.exp.area <- expandpoly(my.area, fact = 1.05) 87 | 88 | 89 | if (nrow(my.exp.area) != 1) { 90 | eval.df <- splancs::gridpts(my.exp.area, 350) 91 | eval.df <- rbind(eval.df,my.exp.area) 92 | } else { 93 | eval.df <- my.exp.area 94 | } 95 | 96 | colnames(eval.df)[1:2] <- c("XCoord","YCoord") 97 | try(rownames(eval.df) <- NULL) 98 | eval.df <- as.data.frame(eval.df) 99 | eval.df$AggDate = rep(timepoint,nrow(eval.df)) 100 | 101 | 102 | # 103 | # Interpolate values inside the area. 104 | # 105 | if (!inherits(model.tune,"try-error")) { 106 | 107 | #interp.pred<-GWSDAT.Interp(model.tune$best.mod,AggDate=eval.df$AggDate[1],eval.df,type=if(is.null(csite$ui_attr$pred_interval)){"predict"}else{as.character(csite$ui_attr$pred_interval)}) 108 | interp.pred <- try( interpBary(model.tune$best.mod, 109 | AggDate = eval.df$AggDate[1], 110 | my.area = my.area, 111 | type = as.character(csite$ui_attr$pred_interval) 112 | ) 113 | ) 114 | 115 | if (inherits(interp.pred,"try-error")) { 116 | interp.pred <- predictValues(NULL, AggDate = eval.df$AggDate[1], eval.df) 117 | } 118 | 119 | } else { 120 | 121 | interp.pred <- predictValues(NULL, AggDate = eval.df$AggDate[1], eval.df) 122 | Do.Image <- FALSE 123 | 124 | } 125 | 126 | 127 | # 128 | # Extract level cut (for interp.pred z dimension). 129 | # 130 | if (csite$ui_attr$pred_interval != "% sd") { 131 | 132 | lev_cut <- csite$ui_attr$lev_cut 133 | if (csite$ui_attr$conc_unit_selected == "mg/l") {lev_cut <- lev_cut/10} 134 | if (csite$ui_attr$conc_unit_selected == "ng/l") {lev_cut <- lev_cut*10} 135 | 136 | } else { 137 | 138 | lev_cut <- csite$ui_attr$sd_lev_cut 139 | 140 | } 141 | 142 | 143 | # 144 | # Modify z dimension. 145 | # 146 | if (Do.Image) { 147 | 148 | if (csite$ui_attr$pred_interval %in% c("Lower 95% CI","Predicted","Upper 95% CI","IQR/2")) { 149 | 150 | if (csite$ui_attr$pred_interval != "IQR/2") {interp.pred$z <- exp(interp.pred$z)} 151 | 152 | if (csite$ui_attr$conc_unit_selected == "mg/l") {interp.pred$z <- interp.pred$z/1000} 153 | if (csite$ui_attr$conc_unit_selected == "ng/l") {interp.pred$z <- interp.pred$z*1000} 154 | 155 | } 156 | 157 | if (max(interp.pred$z,na.rm = T) > lev_cut[length(lev_cut)] && !Col.Option) { 158 | 159 | interp.pred$z[which(interp.pred$z > lev_cut[length(lev_cut)],arr.ind = T)] <- lev_cut[length(lev_cut)] 160 | } 161 | 162 | } else{ 163 | 164 | interp.pred$z[,] <- NA 165 | 166 | 167 | } 168 | 169 | 170 | return(list(data = interp.pred, Do.Image = Do.Image, 171 | Contour.xlim = Contour.xlim, Contour.ylim = Contour.ylim)) 172 | 173 | } 174 | -------------------------------------------------------------------------------- /R/jobqueue.R: -------------------------------------------------------------------------------- 1 | 2 | 3 | infoQueue <- function(dbPath = NULL, con = NULL, verbose = FALSE) { 4 | 5 | if (!is.null(dbPath)) { 6 | drv <- DBI::dbDriver("SQLite") 7 | con <- DBI::dbConnect(drv, dbPath) 8 | } 9 | 10 | jq <- DBI::dbReadTable(con, 'jobqueue') 11 | rq <- DBI::dbReadTable(con, 'running') 12 | dq <- DBI::dbReadTable(con, 'done') 13 | 14 | if (verbose) cat(" Info: jobqueue (", nrow(jq), " records), running (", nrow(rq), " records), done (", nrow(dq), ").\n") 15 | 16 | # Only disconnect if dbPath was specified. 17 | if (!is.null(dbPath)) { 18 | DBI::dbDisconnect(con) 19 | } 20 | 21 | return(list(jq = jq, rq = rq, dq = dq)) 22 | } 23 | 24 | 25 | #FIXME (minor): Instead of calling evalQueue() and after that infoQueue (2x read all tables from DB), 26 | # use info from evalQueue() to pass back to main process. 27 | evalQueue <- function(jq_db, max_workers = 2, max_done_jobs = 1e10) { 28 | 29 | con <- jq_db$dbConn 30 | dbPath <- jq_db$dbPath 31 | 32 | # Read all tables. 33 | jq <- DBI::dbReadTable(con, "jobqueue") 34 | rq <- DBI::dbReadTable(con, "running") 35 | dq <- DBI::dbReadTable(con, "done") 36 | 37 | # Check number of running jobs 38 | active_workers <- nrow(rq) 39 | 40 | # jobs that are done and need evaluation are saved in this list will be returned. 41 | done_jobs <- list() 42 | 43 | # Execute new job from jobqueue if some job is in queue and the number of 44 | # running jobs does not exceed the maximum of allowed workers (max_workers). 45 | if (nrow(jq) != 0 && active_workers < max_workers) { 46 | 47 | # Retrieve first record. 48 | newjob <- jq[1,] 49 | 50 | # Remove job by id. 51 | SQLcmd <- paste0('DELETE FROM jobqueue WHERE job_id=\'', newjob$job_id, '\';') 52 | rs <- DBI::dbSendQuery(con, SQLcmd) 53 | DBI::dbClearResult(rs) 54 | 55 | # Note: File newjob$inputfile should contain all necessary information to 56 | # identify the job and the data. 57 | scriptPath<-system.file("application", newjob$script, package = "GWSDAT") 58 | ## Adding escaped double quotes to deal with spaces in file paths.. 59 | Rcmd <- paste0('Rscript ',"\"", scriptPath,"\"", ' ', "\"",newjob$inputfile,"\"", ' ', "\"",newjob$outputfile,"\"", ' ', "\"", dbPath,"\"") 60 | 61 | cat("Start BG process: ", Rcmd, "\n") 62 | 63 | system(Rcmd, wait = FALSE, invisible = TRUE) 64 | 65 | # Add a progress field to the job record and append to 'running' table. 66 | newjob$progress <- 0 67 | DBI::dbWriteTable(con, "running", newjob, append = TRUE) 68 | } 69 | 70 | # Check if any 'done' jobs have to be evaluated 71 | if (nrow(dq) > 0) { 72 | 73 | # Extract done jobs that have not been evaluated yet. 74 | not_eval <- dq[which(dq$evaluated == 0),] 75 | 76 | # If any jobs have been found.. 77 | if (nrow(not_eval) > 0) { 78 | 79 | cat("--> found ", nrow(not_eval), ' done jobs that require evaluation.\n') 80 | 81 | # .. go through each one and evaluate result in the 'output' file field. 82 | for (row in 1:nrow(not_eval)) { 83 | 84 | # Abort extracting 'done' jobs if the threshold 'max_done_jobs' is reached 85 | if (row > max_done_jobs) 86 | break 87 | 88 | job_id <- not_eval[row, "job_id"] 89 | 90 | cat(' * evaluating job_id ', job_id, '.....\n') 91 | 92 | # Do job evaluation. 93 | done_jobs[[length(done_jobs) + 1]] <- list(job_id = job_id, data_id = not_eval[row, "data_id"], 94 | script = not_eval[row, "script"], 95 | outputfile = not_eval[row, "outputfile"]) 96 | 97 | # Flag the 'evaluated' field as TRUE (1). This can be later deleted. 98 | SQLcmd = paste0('UPDATE done SET evaluated = 1 WHERE job_id = ', job_id, ';') 99 | rs <- DBI::dbSendQuery(con, SQLcmd) 100 | DBI::dbClearResult(rs) 101 | } 102 | } 103 | } 104 | 105 | # Return the list of done jobs that require evaluation 106 | if (length(done_jobs) > 0) 107 | return(done_jobs) 108 | 109 | return(NULL) 110 | } 111 | 112 | 113 | # #' @import DBI 114 | # #' @import RSQLite 115 | createJobQueue <- function() { 116 | 117 | dbPath <- tempfile(pattern = "jobqueue_", tmpdir = tempdir(), fileext = ".db") 118 | 119 | drv <- DBI::dbDriver("SQLite") 120 | con <- DBI::dbConnect(drv, dbPath) 121 | 122 | tables <- DBI::dbListTables(con) 123 | 124 | # Setup the jobqueue table. 125 | jobqueue <- data.frame('info' = character(), 'job_id' = integer(), 'script' = character(), 126 | 'data_set' = character(),'data_id' = integer(), 'inputfile' = character(), 127 | 'outputfile' = character(), stringsAsFactors = FALSE) 128 | 129 | running <- data.frame('info' = character(), 'job_id' = integer(), 'script' = character(), 130 | 'data_set' = character(), 'data_id' = integer(), 'inputfile' = character(), 131 | 'outputfile' = character(), 'progress' = integer(), stringsAsFactors = FALSE) 132 | 133 | done <- data.frame('info' = character(), 'job_id' = integer(), 'script' = character(), 134 | 'data_set' = character(), 'data_id' = integer(), 'evaluated' = integer(), 'outputfile' = character(), 135 | stringsAsFactors = FALSE) 136 | 137 | if (!("jobqueue" %in% tables)) { 138 | 139 | DBI::dbWriteTable(con, "jobqueue", jobqueue) 140 | DBI::dbWriteTable(con, "running", running) 141 | DBI::dbWriteTable(con, "done", done) 142 | 143 | } else { 144 | cat("Not creating fresh tables, found jobqueue table in database.\n") 145 | return(NULL) 146 | } 147 | 148 | return(list(dbPath = dbPath, dbConn = con)) 149 | } 150 | 151 | 152 | createUniqueJobID <- function(dbConn) { 153 | 154 | # Extract all 'job_id' from the the tables. 155 | tables <- infoQueue(con = dbConn) 156 | all_job_ids <- c(tables$jq$job_id, tables$rq$job_id, tables$dq$job_id) 157 | 158 | new_id <- 0 159 | 160 | # Loop as long as no unique data id can be found. 161 | while (1) { 162 | new_id <- sample.int(100000, 1) 163 | 164 | if (!(new_id %in% all_job_ids)) 165 | break 166 | } 167 | 168 | return(new_id) 169 | 170 | } 171 | 172 | 173 | addQueueJob <- function(jq, script_name, info = 'short job description', data_name, 174 | data_id, pdata, params) { 175 | 176 | 177 | finput <- tempfile(pattern = "filein_", tmpdir = tempdir(), fileext = ".rds") 178 | foutput <- tempfile(pattern = "fileout_", tmpdir = tempdir(), fileext = ".rds") 179 | 180 | job_id <- createUniqueJobID(jq$dbConn) 181 | 182 | # Put all necessary parameters to run the method and identify the job and data 183 | # into this list and save it to the .rds file that is read by the target script. 184 | input_data <- list(params = params, data = pdata, job_id = job_id) 185 | saveRDS(input_data, file = finput) 186 | 187 | # Create record for the 'jobqueue' table and append to table. 188 | newjob <- data.frame('info' = info, 'job_id' = job_id, 'script' = script_name, 189 | 'data_set' = data_name, 'data_id' = data_id, 190 | 'inputfile' = finput, 'outputfile' = foutput, stringsAsFactors = FALSE) 191 | 192 | DBI::dbWriteTable(jq$dbConn, "jobqueue", newjob, append = TRUE) 193 | 194 | } -------------------------------------------------------------------------------- /R/launchApp.R: -------------------------------------------------------------------------------- 1 | 2 | 3 | # Print warnings when they occur. 4 | options(warn = 1) 5 | 6 | #' Launches the GWSDAT Shiny application. 7 | #' 8 | #' The shiny application can run in multi or single data mode. If no parameter is 9 | #' specified with \code{launchApp}, the application starts in multi data mode, which 10 | #' includes a data manager and several data import facilities. If the parameter \code{session_file} 11 | #' was specified, the application launches in single data mode, which is limited to the 12 | #' analysis of the data specified by \code{session_file}. 13 | #' 14 | #' 15 | #' @param GWSDAT_Options A list of start options created with \code{\link{createOptions}}. 16 | #' @param session_file Path to .rds file containing a GWSDAT analysis session. 17 | #' 18 | #' @return None 19 | #' 20 | #' @export 21 | #' 22 | #' @import stats grDevices graphics MASS shiny shinycssloaders geometry zoo rhandsontable sf 23 | #' @importFrom shinyjs show hide delay onclick useShinyjs 24 | #' @importFrom utils sessionInfo write.csv packageVersion 25 | #' @importFrom readxl excel_sheets 26 | #' 27 | #' @examples 28 | #' if(interactive()) { 29 | #' launchApp(session_file = "path_to_GWSDAT_session.rds") # launch in single data mode. 30 | #' launchApp() # launch in multi data mode 31 | #' } 32 | launchApp <- function(GWSDAT_Options, session_file) { 33 | 34 | # For R package: Need this here or shinyjs won't work and the connection 35 | # breaks - reason unknown. 36 | # The Browser log will say: SCRIPT5009: 'shinyjs' is undefined. 37 | # The index.html will look fine. did put shinyjs::useShinyjs() into the the 38 | # start of the ui() function where it belongs on default. 39 | shinyjs::useShinyjs() 40 | shiny::addResourcePath("www", system.file("www", package = "GWSDAT")) 41 | if (missing(GWSDAT_Options) && missing(session_file)) { 42 | 43 | .GlobalEnv$APP_RUN_MODE <- "MultiData" 44 | 45 | shinyApp(ui = uiFull(), server = server) 46 | 47 | } else { 48 | 49 | .GlobalEnv$APP_RUN_MODE <- "SingleData" 50 | 51 | 52 | if (!missing(session_file)) { 53 | .GlobalEnv$session_file <- normalizePath(session_file) 54 | } else { 55 | .GlobalEnv$GWSDAT_Options <- GWSDAT_Options 56 | } 57 | 58 | options(shiny.launch.browser = TRUE) 59 | 60 | shinyApp(ui = uiSimple(), server = server) 61 | } 62 | 63 | } 64 | -------------------------------------------------------------------------------- /R/plotSTPredictions.R: -------------------------------------------------------------------------------- 1 | 2 | 3 | plotSTPredictions <- function(csite, substance = NULL, Wells.to.Plot = NULL, 4 | UseLogScale = FALSE, solute_unit = "ug/l") { 5 | 6 | 7 | # Maybe show message that nothing is selected 8 | if (is.null(substance) || is.null(Wells.to.Plot)) 9 | return(NULL) 10 | 11 | if (length(Wells.to.Plot) == 0) 12 | return(NULL) 13 | 14 | 15 | Cont.Data <- csite$All.Data$Cont.Data 16 | 17 | # No model prediction possible without well coords! 18 | Cont.Data <- Cont.Data[!is.na(Cont.Data$XCoord),] 19 | Cont.Data <- Cont.Data[!is.na(Cont.Data$YCoord),] 20 | 21 | SiteName <- csite$GWSDAT_Options$SiteName 22 | 23 | # Extract selected contaminants and wells. 24 | Cont.Data <- Cont.Data[as.character(Cont.Data$Constituent) %in% substance,] 25 | Cont.Data$Constituent <- factor(as.character(Cont.Data$Constituent)) 26 | 27 | Cont.Data <- Cont.Data[as.character(Cont.Data$WellName) %in% Wells.to.Plot,] 28 | 29 | # Check if there is data left to plot. 30 | if (nrow(Cont.Data) == 0) { 31 | showNotification("No Data to Plot!", type = "error", duration = 10) 32 | return(NULL) 33 | } 34 | 35 | 36 | Cont.Data$WellName <- factor(as.character(Cont.Data$WellName), levels = sort(Wells.to.Plot)) 37 | Cont.Data <- Cont.Data[order(Cont.Data$SampleDate),] 38 | 39 | 40 | 41 | # In the previous version, se.fit was set with this: 42 | # 'se.fit = FALSE & panel$dlines["Conc. Trend Smoother"]' 43 | # Seems, that it was always FALSE, so the trend lines are always plotted (which is good). 44 | # Setting it to true omits the trend lines but prints the following error: 45 | # "Error using packet X, Argument is of length zero". 46 | # Thus, if disabling trend lines would be something we need, this is were we 47 | # have to correct the code. 48 | plotModelPredictions(csite, Cont.Data, SiteName = SiteName, 49 | se.fit = FALSE, 50 | UseLogScale = UseLogScale, solute_unit = solute_unit) 51 | 52 | } 53 | 54 | 55 | #' @importFrom lattice xyplot panel.grid 56 | plotModelPredictions <- function(csite, Cont.Data, SiteName = "", se.fit = FALSE, 57 | UseLogScale = FALSE, solute_unit = "ug/l"){ 58 | 59 | if (solute_unit == "mg/l") { 60 | Cont.Data$Result.Corr.ND <- Cont.Data$Result.Corr.ND / 1000 61 | } 62 | 63 | if (solute_unit == "ng/l") { 64 | Cont.Data$Result.Corr.ND <- Cont.Data$Result.Corr.ND * 1000 65 | } 66 | 67 | 68 | NAPL.Present <- any(tolower(as.character(na.omit(Cont.Data$Result))) == "napl") 69 | Cont <- as.character(unique(Cont.Data$Constituent)) 70 | 71 | 72 | #my.xlim <- as.Date(range(c(csite$Cont.Data$SampleDate, csite$All.Data$GW.Data$SampleDate))) 73 | my.xlim <- as.Date(range(Cont.Data$SampleDate)) 74 | 75 | my.xlim.orig = my.xlim 76 | my.xlim[1] <- my.xlim.orig[1] - 0.025*as.numeric(diff(my.xlim.orig)) 77 | my.xlim[2] <- my.xlim.orig[2] + 0.025*as.numeric(diff(my.xlim.orig)) 78 | 79 | 80 | my.ylim <- range(csite$Fitted.Data[[Cont]]$Cont.Data[,c("ModelPred","Upper95","Lower95")], na.rm = TRUE) 81 | 82 | if (solute_unit == "mg/l") {my.ylim <- my.ylim / 1000} 83 | if (solute_unit == "ng/l") {my.ylim <- my.ylim * 1000} 84 | 85 | my.key <- list( 86 | space = "top", 87 | border = FALSE, 88 | columns = 3, 89 | lines = list( 90 | pch = c(0,19,19), lty = c(1,1,1), cex = rep(1.4,3), lwd = c(3), col = c("grey","black","orange"), type = c("l","p", "p") 91 | ), 92 | text = list( 93 | lab = c("Spatiotemporal Prediction","Detectable Data","Non-Detect Data") 94 | ) 95 | ) 96 | 97 | if (NAPL.Present) { 98 | 99 | my.key <- list( 100 | space = "top", 101 | border = FALSE, 102 | columns = 3, 103 | lines = list( 104 | pch = c(0,19,19,19), lty = c(1,1,1,1), cex = rep(1.4,4), lwd = c(3), col = c("grey","black","orange","red"), type = c("l","p", "p","p") 105 | ), 106 | text = list( 107 | lab = c("Spatiotemporal Prediction","Detectable Data","Non-Detect Data","NAPL Substituted Data") 108 | ) 109 | ) 110 | 111 | Cont.Data$ND<-as.character(Cont.Data$ND) 112 | Cont.Data$ND[tolower(as.character(Cont.Data$Result))=="napl"]<-"NAPL" 113 | } 114 | 115 | 116 | #my.plot <- 117 | plot(lattice::xyplot(Result.Corr.ND ~ as.Date(SampleDate) | WellName, 118 | data = Cont.Data, groups = as.character(Cont.Data$ND), 119 | panel = function(x, y,groups,subscripts) { 120 | try( lattice::panel.grid(h = -1, v = 2) ) 121 | groupNDx <- x[groups[subscripts] == "TRUE"] 122 | groupNDy <- y[groups[subscripts] == "TRUE"] 123 | panel.xyplot(groupNDx, groupNDy, col = "orange", pch = 19, cex = 1.0) 124 | 125 | 126 | groupx<-x[groups[subscripts]=="FALSE"] 127 | groupy<-y[groups[subscripts]=="FALSE"] 128 | 129 | panel.xyplot(groupx,groupy,col="black",pch=19,cex=1.0) 130 | 131 | 132 | groupNAPLx <- x[groups[subscripts]=="NAPL"] 133 | groupNAPLy <- y[groups[subscripts]=="NAPL"] 134 | 135 | if(length(groupNAPLx) > 0) { panel.xyplot(groupNAPLx,groupNAPLy,col="red",pch=19,cex=1.0)} 136 | 137 | 138 | #if(sm.fit && length(x)>1){ 139 | 140 | if(length(x)>1){ 141 | 142 | Model<-csite$Fitted.Data[[as.character(Cont)]]$Model.tune 143 | 144 | if(!inherits(Model,"try-error")){ 145 | 146 | 147 | Model<-Model$best.model 148 | eval.df<-data.frame(AggDate=seq(min(x,na.rm=T),max(x,na.rm=T),l=50),XCoord=rep(Cont.Data[subscripts,"XCoord"][1],50),YCoord=rep(Cont.Data[subscripts,"YCoord"][1],50)) 149 | 150 | pred <- predict(Model,eval.df, se = se.fit) ### 151 | eval.df$pred<-pred$predicted 152 | 153 | if (se.fit) { 154 | eval.df$upper<-pred$predicted+pred$predicted.sd*1.96 155 | eval.df$lower<-pred$predicted-pred$predicted.sd*1.96 156 | } 157 | 158 | if (solute_unit == "mg/l") { 159 | 160 | eval.df$pred <-log(exp(eval.df$pred)/1000) 161 | 162 | if (se.fit) { 163 | eval.df$upper <- log(exp(eval.df$upper)/1000) 164 | eval.df$lower <- log(exp(eval.df$lower)/1000) 165 | } 166 | 167 | } 168 | 169 | if (solute_unit == "ng/l") { 170 | 171 | eval.df$pred <-log(exp(eval.df$pred)*1000) 172 | 173 | if(se.fit){ 174 | eval.df$upper<-log(exp(eval.df$upper)*1000) 175 | eval.df$lower<-log(exp(eval.df$lower)*1000) 176 | } 177 | 178 | } 179 | 180 | 181 | if(UseLogScale){ 182 | 183 | eval.df$pred <-log(exp(eval.df$pred),base=10) 184 | if(se.fit){ 185 | eval.df$upper<-log(exp(eval.df$upper),base=10) 186 | eval.df$lower<-log(exp(eval.df$lower),base=10) 187 | } 188 | 189 | }else{ 190 | 191 | eval.df$pred <-exp(eval.df$pred) 192 | 193 | if (se.fit) { 194 | eval.df$upper <- exp(eval.df$upper) 195 | eval.df$lower <- exp(eval.df$lower) 196 | } 197 | 198 | } 199 | 200 | panel.xyplot(as.Date(eval.df$AggDate),eval.df$pred,type="l",col="grey",lwd=3) 201 | if (se.fit) { 202 | panel.xyplot(as.Date(eval.df$AggDate), eval.df$upper,type="l",lty=2,col="grey",lwd=2) 203 | panel.xyplot(as.Date(eval.df$AggDate), eval.df$lower,type="l",lty=2,col="grey",lwd=2) 204 | } 205 | 206 | } 207 | } 208 | 209 | }, 210 | scales = list(y = list(log = UseLogScale)), 211 | xlab = list("Sampling Date", cex = 1.5), ylab = list(paste("Solute concentration"," (",solute_unit,")",sep=""),cex=1.5), 212 | #layout = if (length(levels(Cont.Data$Well)) > 30) { c(4,4)} else{NULL}, 213 | xlim = my.xlim, 214 | #ylim=my.ylim, 215 | main = if (csite$Aquifer == "") {paste("Spatiotemporal Predictions for ",Cont,"at",SiteName) } else { 216 | paste("Spatiotemporal Predictions for ", Cont," at ", SiteName, ": Aquifer-",csite$Aquifer, sep = "")}, 217 | drop.unused.levels = FALSE, key = my.key)) 218 | } 219 | 220 | 221 | plotSTPredictionsPPT <- function(csite, fileout, substance = NULL, Wells.to.Plot = NULL, 222 | UseLogScale = FALSE, solute_unit = "ug/l", 223 | width = 900, height = 500) { 224 | 225 | # Initialize Powerpoint file. 226 | if (is.null(ppt_pres <- initPPT())) { 227 | return(NULL) 228 | } 229 | 230 | # Create temporary wmf file. 231 | mytemp <- tempfile(fileext = ".png") 232 | 233 | png(mytemp, width = width, height = height) 234 | plotSTPredictions(csite, substance, Wells.to.Plot, UseLogScale, solute_unit) 235 | dev.off() 236 | 237 | ppt_pres <- addPlotPPT(mytemp, ppt_pres, width, height) 238 | 239 | print(ppt_pres, target = fileout) %>% invisible() 240 | 241 | try(file.remove(mytemp)) 242 | 243 | } 244 | 245 | 246 | -------------------------------------------------------------------------------- /R/plotWellReport.R: -------------------------------------------------------------------------------- 1 | 2 | 3 | plotWellReport <- function(csite, Conts.to.plot = NULL, Wells.to.Plot = NULL, 4 | UseLogScale = FALSE){ 5 | 6 | on.exit(palette("default")) 7 | 8 | Cont.Data <- csite$All.Data$Cont.Data 9 | SiteName <- csite$GWSDAT_Options$SiteName 10 | 11 | Cont.Data <- Cont.Data[as.character(Cont.Data$Constituent) %in% Conts.to.plot,] 12 | Cont.Data <- Cont.Data[as.character(Cont.Data$WellName) %in% Wells.to.Plot,] 13 | 14 | if (nrow(Cont.Data) == 0) { 15 | showNotification("Well Report: No data to plot.", type = "warning", duration = 10) 16 | return() 17 | } 18 | 19 | 20 | Cont.Data$WellName <- factor(as.character(Cont.Data$WellName), levels = sort(Wells.to.Plot)) 21 | Cont.Data <- Cont.Data[order(Cont.Data$SampleDate),] 22 | 23 | if (csite$ui_attr$conc_unit_selected == "mg/l") {Cont.Data$Result.Corr.ND <- Cont.Data$Result.Corr.ND/1000} 24 | if (csite$ui_attr$conc_unit_selected == "ng/l") {Cont.Data$Result.Corr.ND <- Cont.Data$Result.Corr.ND*1000} 25 | 26 | if (length(Conts.to.plot) == 1) { 27 | 28 | myplot <- GWSDAT.xyplotWells(csite, Cont.Data, SiteName = SiteName, 29 | sm.fit = csite$ui_attr$ts_options["Conc. Trend Smoother"], UseLogScale = UseLogScale) 30 | 31 | } else { 32 | 33 | myplot <- GWSDAT.xyplotAllContbyWells(csite, Cont.Data, SiteName = SiteName, 34 | UseLogScale = UseLogScale) 35 | 36 | } 37 | 38 | # Make the plot. If called inside renderPlot this will send it into the 39 | # corresponding csite. 40 | print(myplot) 41 | 42 | 43 | 44 | } 45 | 46 | 47 | 48 | #' @importFrom sm sm.regression 49 | #' @importFrom lattice xyplot panel.xyplot 50 | GWSDAT.xyplotWells <- function(csite, Cont.Data, SiteName = "", sm.fit=TRUE, UseLogScale=FALSE){ 51 | 52 | NAPL.Present <- any(tolower(as.character(na.omit(Cont.Data$Result))) == "napl") 53 | 54 | 55 | Cont <- unique(Cont.Data$Constituent) 56 | 57 | my.xlim <- range(c(csite$All.Data$Cont.Data$SampleDate, csite$All.Data$GW.Data$SampleDate)) 58 | 59 | my.xlim.orig = my.xlim 60 | my.xlim[1] <- my.xlim.orig[1] - 0.025*as.numeric(diff(my.xlim.orig)) 61 | my.xlim[2] <- my.xlim.orig[2] + 0.025*as.numeric(diff(my.xlim.orig)) 62 | 63 | 64 | my.key <- list( 65 | space = "top", 66 | border = FALSE, 67 | columns = 2, 68 | points = list( 69 | pch = rep(19,2), 70 | cex = rep(1.4,2), 71 | col = c("black","orange") 72 | ), 73 | text = list( 74 | lab = c("Detectable Data","Non-Detect Data") 75 | ) 76 | ) 77 | 78 | if (NAPL.Present) { 79 | 80 | my.key <- list( 81 | space = "top", 82 | border = FALSE, 83 | columns=3, 84 | points = list( 85 | pch = rep(19,3), 86 | cex = rep(1.4,3), 87 | col = c("black","orange","red") 88 | ), 89 | text = list( 90 | lab = c("Detectable Data","Non-Detect Data","NAPL Substituted Data") 91 | ) 92 | ) 93 | 94 | Cont.Data$ND <- as.character(Cont.Data$ND) 95 | Cont.Data$ND[tolower(as.character(Cont.Data$Result)) == "napl"] <- "NAPL" 96 | } 97 | 98 | # 99 | # There is too much going on in the function arguments. Untangle this.. 100 | # 101 | 102 | my.plot <- lattice::xyplot(Result.Corr.ND ~ as.Date(SampleDate) | WellName, 103 | data = Cont.Data, 104 | groups = as.character(Cont.Data$ND), 105 | panel = function(x, y,groups,subscripts) { 106 | # try(csite.grid(h = -1, v = 2)) 107 | groupNDx <- x[groups[subscripts] == "TRUE"] 108 | groupNDy <- y[groups[subscripts] == "TRUE"] 109 | panel.xyplot(groupNDx,groupNDy,col = "orange", pch = 19, cex = 1.0) 110 | 111 | groupx<-x[groups[subscripts] == "FALSE"] 112 | groupy<-y[groups[subscripts] == "FALSE"] 113 | panel.xyplot(groupx,groupy,col = "black",pch=19,cex=1.0) 114 | 115 | groupNAPLx <- x[groups[subscripts] == "NAPL"] 116 | groupNAPLy <- y[groups[subscripts] == "NAPL"] 117 | 118 | if (length(groupNAPLx)>0) {panel.xyplot(groupNAPLx, groupNAPLy, col = "red", pch = 19,cex = 1.0)} 119 | 120 | if (sm.fit && length(x) > 1) { 121 | 122 | h = try(csite$Traffic.Lights$h[as.character(unique(Cont.Data[subscripts,"WellName"])),as.character(Cont)]) 123 | try(eval.points<-seq(min(x,na.rm=T),max(x,na.rm=T),l=40)) 124 | try(if(UseLogScale){y=log(10^y)}else{y=log(y)}) 125 | try(sr <- sm::sm.regression(x,y,h=h,display="none",eval.points=eval.points)) 126 | 127 | try(sr.keep<-sr$estimate) 128 | try(sr.keep <- if(UseLogScale){log(exp(sr$estimate),base=10)}else{exp(sr$estimate)}) 129 | try(sr.95up.keep <- if(UseLogScale){log(exp(sr$estimate+2*sr$se),base=10)}else{exp(sr$estimate+2*sr$se)}) 130 | try(sr.95low.keep<- if(UseLogScale){log(exp(sr$estimate-2*sr$se),base=10)}else{exp(sr$estimate-2*sr$se)}) 131 | try(panel.xyplot(as.Date(sr$eval.points), sr.keep,type="l",col="grey")) 132 | try(panel.xyplot(as.Date(sr$eval.points), sr.95up.keep,type="l",col="grey")) 133 | try(panel.xyplot(as.Date(sr$eval.points), sr.95low.keep,type="l",col="grey")) 134 | 135 | 136 | try(sr$estimate[sr$se > csite$GWSDAT_Options$smThreshSe]<-NA) 137 | try(sr.fit<- if(UseLogScale){log(exp(sr$estimate),base=10)}else{exp(sr$estimate)}) 138 | try(sr.95up<- if(UseLogScale){log(exp(sr$estimate+2*sr$se),base=10)}else{exp(sr$estimate+2*sr$se)}) 139 | try(sr.95low<-if(UseLogScale){log(exp(sr$estimate-2*sr$se),base=10)}else{exp(sr$estimate-2*sr$se)}) 140 | try(panel.xyplot(as.Date(sr$eval.points), sr.fit,type="l",col="blue")) 141 | try(panel.xyplot(as.Date(sr$eval.points), sr.95up,type="l",col="blue")) 142 | try(panel.xyplot(as.Date(sr$eval.points), sr.95low,type="l",col="blue")) 143 | } 144 | }, 145 | scales = list(y=list(log = UseLogScale)), 146 | xlab = list("Sampling Date",cex = 1.5), 147 | ylab = list(paste("Solute concentration"," (", csite$ui_attr$conc_unit_selected,")", sep = ""),cex=1.5), 148 | #layout = if(length(levels(Cont.Data$Well))>30){c(4,4)}else{NULL}, 149 | xlim = my.xlim, 150 | main = if (csite$Aquifer == "") {paste(Cont,"at",SiteName)} else { 151 | paste(Cont," at ",SiteName,": Aquifer-", csite$Aquifer, sep = "")}, 152 | drop.unused.levels = FALSE, key = my.key) 153 | 154 | return(my.plot) 155 | 156 | 157 | 158 | } 159 | 160 | 161 | ################### All Wells all Conts ######################################### 162 | 163 | #' @importFrom lattice xyplot 164 | GWSDAT.xyplotAllContbyWells <- function(csite, Cont.Data, SiteName = "", UseLogScale=FALSE) { 165 | 166 | my.xlim <- c(min(Cont.Data$SampleDate,na.rm = T),max(Cont.Data$AggDate,na.rm = T)) 167 | 168 | # Add GW date range into xlim - Is this needed ? Turn off for now. 169 | # my.xlim <- range(c(my.xlim, csite$All.Data$GW.Data$SampleDate), na.rm = T) 170 | 171 | my.xlim.orig <- my.xlim 172 | my.xlim[1] <- my.xlim.orig[1] - 0.025 * as.numeric(diff(my.xlim.orig)) 173 | my.xlim[2] <- my.xlim.orig[2] + 0.025 * as.numeric(diff(my.xlim.orig)) 174 | 175 | 176 | palette("default") 177 | palette(c(palette(),"purple","orange","deeppink4","springgreen","indianred")) 178 | Num.Conts <- nlevels(Cont.Data$Constituent) 179 | if (Num.Conts > length(palette())) {palette(rainbow(Num.Conts)) } 180 | 181 | my.key <- list( 182 | space = "top", 183 | border = FALSE, 184 | columns = min(Num.Conts,4), 185 | points = list( 186 | pch = rep(19,Num.Conts), 187 | cex = rep(1.2,Num.Conts), 188 | col = 1:Num.Conts 189 | ), 190 | text = list( 191 | lab = as.character(levels(Cont.Data$Constituent)) 192 | ) 193 | ) 194 | 195 | 196 | 197 | myplot <- lattice::xyplot(Result.Corr.ND ~ SampleDate | WellName, 198 | groups = Cont.Data$Constituent, 199 | data = Cont.Data, 200 | scales = list( y = list(log = UseLogScale)), 201 | #layout = if (length(unique(Cont.Data$WellName)) > 30) { c(4,4)} else {NULL}, 202 | type = c("b"), 203 | pch = 19, 204 | cex = 0.75, 205 | col = 1:Num.Conts, 206 | lwd = 1, 207 | key = my.key, 208 | xlab = list("Sampling Date",cex = 1.5), 209 | ylab = list(paste("Solute concentration"," (", csite$ui_attr$conc_unit_selected, ")",sep = ""),cex = 1.5), 210 | main = if (csite$Aquifer == "") {SiteName} else {paste(SiteName, ": Aquifer-", csite$Aquifer, sep = "")}, 211 | drop.unused.levels = FALSE, 212 | xlim = my.xlim 213 | ) 214 | 215 | 216 | return(myplot) 217 | 218 | } 219 | 220 | 221 | 222 | 223 | 224 | plotWellReportPPT <- function(csite, fileout, substances, locations, use_log_scale, 225 | width = 900, height = 500){ 226 | 227 | # Initialize Powerpoint file. 228 | if (is.null(ppt_pres <- initPPT())) { 229 | return(NULL) 230 | } 231 | 232 | # Create temporary wmf file. 233 | mytemp <- tempfile(fileext = ".png") 234 | 235 | png(mytemp, width = width, height = height) 236 | plotWellReport(csite, substances, locations, use_log_scale) 237 | dev.off() 238 | 239 | ppt_pres <- addPlotPPT(mytemp, ppt_pres, width, height) 240 | 241 | print(ppt_pres, target = fileout) %>% invisible() 242 | 243 | try(file.remove(mytemp)) 244 | 245 | try(file.remove(mytemp)) 246 | } 247 | -------------------------------------------------------------------------------- /R/plumeDiagnostics.R: -------------------------------------------------------------------------------- 1 | 2 | getFullPlumeStats <- function(csite, substance, plume_thresh, ground_porosity,progressBar = NULL,UseReducedWellSet) { 3 | 4 | # This will become a data frame containing in each row the plume statistics 5 | # of each date. 6 | full_plume_stats <- NULL 7 | 8 | nr_timesteps = length(csite$All.Data$All_Agg_Dates) 9 | 10 | for (i in 1:nr_timesteps) { 11 | 12 | datetmp <- csite$All.Data$All_Agg_Dates[i] 13 | 14 | progressBar$set(value = (i/nr_timesteps), detail = paste("time point ", i, " / ", nr_timesteps)) 15 | 16 | interp.pred <- interpConc(csite, substance, datetmp,UseReducedWellSet) 17 | 18 | plume_stats <- getPlumeStats(csite, substance, datetmp, 19 | interp.pred$data, plume_thresh, ground_porosity,UseReducedWellSet) 20 | 21 | # Add date. 22 | plume_stats = cbind(plume_stats, "Agg.Date" = datetmp) 23 | 24 | 25 | # Append to full plume stats table. 26 | if (is.null(full_plume_stats)) 27 | full_plume_stats <- plume_stats 28 | else 29 | full_plume_stats <- rbind(full_plume_stats, plume_stats) 30 | 31 | } 32 | 33 | return(full_plume_stats) 34 | } 35 | 36 | 37 | # Calculate the plume statistics including mass, area, center, and average concentration for a specific time point. 38 | # 39 | # @param csite GWSDAT data object. 40 | # @param substance Name of the contaminant. 41 | # @param timepoint Time point (Date) for which to calculate the plume. 42 | # @param predicted_val Predicted concentration values of the contaminant. 43 | # @param plume_thresh Concentration limit defining the plume. 44 | # @param ground_porosity Porosity of the ground in percent. 45 | # 46 | # @return A data frame containing the plume statistics. 47 | # 48 | #' @importFrom splancs areapl 49 | getPlumeStats <- function(csite, 50 | substance, 51 | timepoint, 52 | predicted_val, 53 | plume_thresh, 54 | ground_porosity,UseReducedWellSet) { 55 | 56 | if (csite$ui_attr$conc_unit_selected == "mg/l") {plume_thresh <- plume_thresh/1000} 57 | if (csite$ui_attr$conc_unit_selected == "ng/l") {plume_thresh <- plume_thresh*1000} 58 | 59 | cL <- contourLines(predicted_val, levels = plume_thresh) 60 | 61 | model.tune <- if(UseReducedWellSet){csite$Reduced.Fitted.Data[[substance]][["Model.tune"]]}else{csite$Fitted.Data[[substance]][["Model.tune"]]} 62 | 63 | PlumeDetails = list() 64 | 65 | 66 | if (length(cL) > 0) { 67 | 68 | for (i in 1:length(cL)) { 69 | 70 | cL[[i]]$Closed <- checkPlumeClosure(cL[[i]]) 71 | 72 | if (cL[[i]]$Closed) { 73 | 74 | cL[[i]]$area <- splancs::areapl(cbind(cL[[i]]$x,cL[[i]]$y)) 75 | tempPlumeQuant <- CalcPlumeStats(model.tune$best.mod, 76 | AggDate = timepoint, 77 | cL[[i]], 78 | plume_thresh = plume_thresh, 79 | type = csite$ui_attr$pred_interval, 80 | units = csite$ui_attr$conc_unit_selected) 81 | 82 | cL[[i]]$Volume <- tempPlumeQuant$PlumeVol 83 | cL[[i]]$PlumeCentreofMass <- tempPlumeQuant$PlumeCentreofMass 84 | 85 | } else { 86 | 87 | cL[[i]]$area <- NA 88 | cL[[i]]$Volume <- NA 89 | cL[[i]]$PlumeCentreofMass <- NA 90 | 91 | } 92 | 93 | } 94 | 95 | } 96 | 97 | if (length(cL) > 0) { 98 | PlumeDetails$cL = cL 99 | } else { 100 | PlumeDetails$cL = NULL 101 | } 102 | 103 | 104 | # 105 | # Create plume statistics summary. 106 | # 107 | 108 | plume_stats <- data.frame(area = NA, volume = NA, mass = NA, avg_conc = NA, 109 | mass_centre_x = NA, mass_centre_y = NA, 110 | conc_thresh = plume_thresh, 111 | substance = substance, 112 | ground_porosity = ground_porosity, 113 | coord_unit = csite$All.Data$sample_loc$coord_unit, 114 | conc_unit = csite$ui_attr$conc_unit_selected 115 | ) 116 | 117 | if (length(PlumeDetails$cL) > 0) { 118 | 119 | plume_stats$area <- sum(unlist(lapply(PlumeDetails$cL,function(l){l$area}))) 120 | plume_stats$volume <- sum(unlist(lapply(PlumeDetails$cL,function(l){l$Volume}))) 121 | plume_stats$mass <- plume_stats$volume * plume_stats$ground_porosity 122 | plume_stats$avg_conc <- plume_stats$volume / plume_stats$area 123 | 124 | COMWeights <- unlist(lapply(PlumeDetails$cL, function(l){l$Volume}))/sum(unlist(lapply(PlumeDetails$cL,function(l){l$Volume}))) 125 | plume_stats$mass_centre_x <- sum(COMWeights * unlist(lapply(PlumeDetails$cL,function(l){l$PlumeCentreofMass[1]}))) 126 | plume_stats$mass_centre_y <- sum(COMWeights * unlist(lapply(PlumeDetails$cL,function(l){l$PlumeCentreofMass[2]}))) 127 | 128 | } 129 | 130 | return(plume_stats) 131 | 132 | } 133 | 134 | #' @importFrom deldir deldir triang.list 135 | #' @importFrom splancs gridpts 136 | CalcPlumeStats <- function(model, AggDate, cL, plume_thresh, type, units){ 137 | 138 | temppts <- cbind(cL$x, cL$y) 139 | temppts <- splancs::gridpts(temppts, 100) 140 | Plume.Tri.Points <- data.frame(XCoord = temppts[,1],YCoord = temppts[,2]) 141 | Plume.Tri.Points$AggDate = as.numeric(AggDate) 142 | 143 | 144 | temppred <- predict(model, newdata = Plume.Tri.Points, se = (type != "Predicted")) 145 | if (type == "Lower 95% CI") {temppred$predicted <- temppred$predicted - temppred$predicted.sd*1.96} 146 | if (type == "Upper 95% CI") {temppred$predicted <- temppred$predicted + temppred$predicted.sd*1.96} 147 | 148 | Plume.Tri.Points$z <- exp(temppred$predicted) 149 | 150 | 151 | if(units=="mg/l"){Plume.Tri.Points$z<-Plume.Tri.Points$z/1000} 152 | if(units=="ng/l"){Plume.Tri.Points$z<-Plume.Tri.Points$z*1000} 153 | 154 | 155 | 156 | cL.Tri.Points <- data.frame(XCoord=cL$x,YCoord=cL$y,z=rep(plume_thresh,length(cL$x))) 157 | Vol.Tri.Points <- unique(rbind(Plume.Tri.Points[,c("XCoord","YCoord","z")],cL.Tri.Points)) 158 | 159 | mydeldir <- deldir::deldir(x=Vol.Tri.Points$XCoord, y = Vol.Tri.Points$YCoord, z = Vol.Tri.Points$z) 160 | mytriangs <- deldir::triang.list(mydeldir) 161 | PlumeVol <- sum(unlist(lapply(mytriangs, VolIndTri))) 162 | xPlumeVol <- sum(unlist(lapply(mytriangs, xVolIndTri))) 163 | yPlumeVol <- sum(unlist(lapply(mytriangs, yVolIndTri))) 164 | PlumeCentreofMass <- c(xPlumeVol, yPlumeVol) / PlumeVol 165 | 166 | 167 | return(list(PlumeVol=PlumeVol,PlumeCentreofMass=PlumeCentreofMass)) 168 | 169 | } 170 | 171 | 172 | PlumeUnitHandlingFunc <- function(LengthUnit, rgUnits, PlumeMass, PlumeArea){ 173 | 174 | 175 | if (is.null(LengthUnit) || LengthUnit=="" ) { 176 | 177 | PlumeMass <- 1000*PlumeMass 178 | 179 | if (rgUnits == "ng/l") {PlumeMass <- PlumeMass*10^-12} 180 | if (rgUnits == "ug/l") {PlumeMass <- PlumeMass*10^-9} 181 | if (rgUnits == "mg/l") {PlumeMass <- PlumeMass*10^-6} 182 | 183 | PlumeMassUnits <- paste("(Mass/Unit Depth)",sep = "") 184 | PlumeAreaUnits <- paste("(Unit Area)",sep = "") 185 | PlumeAverageUnits <- paste("(",rgUnits,")",sep = "") 186 | 187 | 188 | } else { 189 | 190 | if (LengthUnit == "metres") { 191 | 192 | PlumeMass <- 1000*PlumeMass 193 | 194 | if (rgUnits == "ng/l") {PlumeMass <- PlumeMass * 10^-12} 195 | if (rgUnits == "ug/l") {PlumeMass <- PlumeMass * 10^-9} 196 | if (rgUnits == "mg/l") {PlumeMass <- PlumeMass * 10^-6} 197 | 198 | PlumeMassUnits <- paste("(kg/m)",sep = "") 199 | PlumeAreaUnits <- paste("(m^2)",sep = "") 200 | PlumeAverageUnits <- paste("(",rgUnits,")",sep = "") 201 | 202 | } 203 | 204 | 205 | if (LengthUnit == "feet") { 206 | 207 | PlumeMass <- 1000*PlumeMass/35.315 #per cubic ft 208 | 209 | if (rgUnits == "ng/l") {PlumeMass <- PlumeMass * 10^-12} 210 | if (rgUnits == "ug/l") {PlumeMass <- PlumeMass * 10^-9} 211 | if (rgUnits == "mg/l") {PlumeMass <- PlumeMass * 10^-6} 212 | 213 | PlumeMassUnits <- paste("(kg/ft)",sep = "") 214 | PlumeAreaUnits <- paste("(ft^2)",sep = "") 215 | PlumeAverageUnits <- paste("(",rgUnits,")",sep = "") 216 | 217 | } 218 | 219 | } 220 | 221 | return(list(PlumeMass = PlumeMass, PlumeArea = PlumeArea, 222 | PlumeMassUnits = PlumeMassUnits, 223 | PlumeAreaUnits = PlumeAreaUnits, 224 | PlumeAverageUnits = PlumeAverageUnits)) 225 | 226 | } 227 | 228 | 229 | 230 | 231 | 232 | xVolIndTri <- function(l){ 233 | 234 | x <- l$x 235 | y <- l$y 236 | z <- l$z 237 | 238 | z = z*x 239 | 240 | 0.5*(x[1]*(y[2] - y[3]) + x[2]*(y[3]- y[1]) + x[3]*(y[1]- y[2]))*(z[1] + z[2] + z[3]) / 3 241 | } 242 | 243 | 244 | yVolIndTri<-function(l){ 245 | 246 | x<-l$x 247 | y<-l$y 248 | z<-l$z 249 | 250 | z=z*y 251 | 252 | 0.5*(x[1]*(y[2] - y[3]) + x[2]*(y[3]- y[1]) + x[3]*(y[1]- y[2]))*(z[1] + z[2] + z[3]) / 3 253 | } 254 | 255 | checkPlumeClosure <- function(cl){ 256 | 257 | cl$x[1] == cl$x[length(cl$x)] & cl$y[1] == cl$y[length(cl$y)] 258 | 259 | } 260 | 261 | VolIndTri <- function(l){ 262 | 263 | x <- l$x 264 | y <- l$y 265 | z <- l$z 266 | 267 | 0.5*(x[1]*(y[2] - y[3]) + x[2] * (y[3] - y[1]) + x[3] * (y[1] - y[2]))*(z[1] + z[2] + z[3]) / 3 268 | } 269 | 270 | -------------------------------------------------------------------------------- /R/ppt.R: -------------------------------------------------------------------------------- 1 | 2 | existsPPT <- function() { 3 | 4 | return(TRUE) 5 | 6 | } 7 | 8 | 9 | existsPPT_RDCOMClient <- function() { 10 | 11 | # CRAN just doesn't like RDCOMClient so I turn it off for now. 12 | return(FALSE) 13 | 14 | 15 | 16 | # This would be the way to go, if RDCOMClient would be part of the 17 | # official CRAN, but it is not (anymore), thus I can't put RDCOMClient 18 | # into the Suggests tag of the DESCRIPTIONS file and requireNamespace() 19 | # throws a warning with devtools::check(). 20 | # does_exist <- requireNamespace("RDCOMClient", quietly = TRUE) 21 | 22 | 23 | tryCatch( 24 | res <- find.package("RDCOMClient"), 25 | error = function(e) { 26 | cat("Info: RDCOMClient package not installed: Saving to Powerpoint is disabled. If on Windows, use install.packages(\"RDCOMClient\") to install it.\n") 27 | 28 | } 29 | ) 30 | 31 | if (exists("res")) 32 | return(TRUE) 33 | else return(FALSE) 34 | 35 | 36 | } 37 | 38 | 39 | initPPT <- function() { 40 | 41 | #my_pres <- read_pptx() 42 | 43 | return(read_pptx()) 44 | 45 | } 46 | 47 | initPPT_RDCOMClient <- function() { 48 | 49 | if (existsPPT()) { 50 | 51 | 52 | 53 | # tryCatch( 54 | # ppt <- RDCOMClient::COMCreate("PowerPoint.Application"), 55 | # error = function(e) { 56 | # showNotification("Initializing RDCOMClient failed: Package is probably not loaded. Use require(RDCOMClient) before launching the app.", type = "error", duration = 10) 57 | # 58 | # } 59 | # ) 60 | # 61 | # # Variable 'ppt' will not exist if an error occurs in the tryCatch() above. 62 | # if (exists("ppt")) { 63 | # ppt[["Visible"]] <- TRUE 64 | # 65 | # myPres <- ppt[["Presentations"]]$add() 66 | # mySlides <- myPres[["Slides"]] 67 | # 68 | # return(list(ppt = ppt, pres = myPres, slides = mySlides)) 69 | # } 70 | } 71 | 72 | return(NULL) 73 | 74 | } 75 | 76 | #' @import officer 77 | addPlotPPT <- function(imgfile, ppt_pres, width, height) { 78 | 79 | ppt_pres <- officer::add_slide(ppt_pres, layout = "Title and Content", master = "Office Theme") 80 | ppt_pres <- officer::ph_with(x=ppt_pres,external_img(src = imgfile),location = ph_location_type(type = "body")) 81 | 82 | # if(utils::packageVersion("officer")>'0.3.7'){ 83 | # 84 | # ## ph_with_img is defunct and replaced with ph_with 85 | # #ppt_pres <- officer::ph_with(x=ppt_pres,external_img(src = imgfile, width = width / 90 * 0.7, height = height / 90 * 0.7),location = ph_location_left(),use_loc_size = FALSE) 86 | # ppt_pres <- officer::ph_with(x=ppt_pres,external_img(src = imgfile),location = ph_location_type(type = "body")) 87 | # 88 | # }else{ 89 | # 90 | # ppt_pres <- officer::ph_with_img(ppt_pres, src = imgfile, height = height / 90 * 0.7, width = width / 90 * 0.7 ) 91 | # 92 | # } 93 | 94 | return(ppt_pres) 95 | 96 | } 97 | 98 | addPlotPPT_RDCOMClient <- function(imgfile, ppt_lst, width, height) { 99 | 100 | slide <- ppt_lst$pres[["Slides"]]$add(as.integer(max(1, ppt_lst$pres[["Slides"]]$Count() + 1)), 101 | as.integer(12)) 102 | shapes <- slide$Shapes() 103 | 104 | shapes$AddPicture(imgfile, LinkToFile = FALSE, SaveWithDocument = TRUE, 105 | Top = 1, Left = 20, Width = width * 0.7, Height = height * 0.7) 106 | slide$Select() 107 | 108 | return(ppt_lst) 109 | } 110 | 111 | -------------------------------------------------------------------------------- /R/predictValues.R: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | #' @importFrom sp point.in.polygon 6 | predictValues <- function(model, AggDate, eval.df, type = c("predicted","lower","upper","sd")) { 7 | 8 | 9 | type=match.arg(type) 10 | 11 | if(length(AggDate)!=1){stop("Agg Date must be length 1")} 12 | 13 | x0 = sort(unique(eval.df[,1])) 14 | y0 = sort(unique(eval.df[,2])) 15 | my.df <- expand.grid(XCoord=x0,YCoord=y0) 16 | 17 | if(!is.null(model)){ 18 | 19 | my.df$AggDate <- as.numeric(AggDate) 20 | my.df$InOut <- !sp::point.in.polygon(my.df$XCoord,my.df$YCoord,eval.df[chull(eval.df[,1:2]),1],eval.df[chull(eval.df[,1:2]),2]) == 0 21 | my.df$pred <- rep(NA,nrow(my.df)) 22 | 23 | temppred <- predict(model, newdata = my.df[my.df$InOut, c("XCoord","YCoord","AggDate")], se=type!="predicted") 24 | my.df$pred[my.df$InOut] <- temppred$predicted 25 | my.df$pred.sd[my.df$InOut] <- temppred$predicted.sd 26 | 27 | if(type=="lower"){my.df$pred <- my.df$pred - 1.96 * my.df$pred.sd} 28 | if(type=="upper"){my.df$pred <- my.df$pred + 1.96 * my.df$pred.sd} 29 | if(type=="sd") {my.df$pred <- my.df$pred.sd} 30 | 31 | } else { 32 | 33 | my.df$pred <- rep(NA,nrow(my.df)) 34 | 35 | } 36 | 37 | list(x = x0,y = y0,z = matrix(my.df$pred,nrow = length(x0), ncol = length(y0))) 38 | 39 | } 40 | 41 | -------------------------------------------------------------------------------- /R/readData.R: -------------------------------------------------------------------------------- 1 | 2 | 3 | #' @importFrom readxl read_excel 4 | readExcelData <- function(filein, sheet, header = NULL, get_subset = TRUE, ign_first_head = "") { 5 | 6 | # Avoid reading column header (in first row) in order to have conform input 7 | # if table is located somewhere inside the sheet. 'col_names = TRUE' would detect 8 | # types and which(excl[,i] == headj)) would fail if not a character. 9 | excl <- read_excel(filein, sheet = sheet, col_names = FALSE) 10 | 11 | # Find the headers in the sheet 12 | dftmp <- NULL 13 | head_err = "" 14 | end_row = 0 15 | 16 | # Detect empty table. 17 | if (ncol(excl) == 0) 18 | return(NULL) 19 | 20 | for (headj in header) { 21 | 22 | found_head <- FALSE 23 | 24 | # Look into each column 25 | for (i in 1:ncol(excl)) { 26 | #cat("sheet: ", sheet, ", header: ", headj, ", column: ", i, "\n") 27 | 28 | 29 | # Get row offset of header 30 | if (length(rowpos <- which(excl[,i] == headj)) == 1) { 31 | 32 | if (ign_first_head == headj) { 33 | ign_first_head = "" 34 | next 35 | } 36 | 37 | found_head <- TRUE 38 | 39 | if (get_subset) { 40 | 41 | if (end_row == 0) { 42 | end_row <- which(is.na(excl[ (rowpos + 1):nrow(excl) , i]))[1] 43 | 44 | if (!is.na(end_row)) 45 | end_row <- end_row + rowpos - 1 46 | else 47 | end_row <- nrow(excl) 48 | 49 | } 50 | 51 | # Extract the data for this header 52 | ctmp <- excl[(rowpos + 1):end_row, i] 53 | 54 | colnames(ctmp) <- headj 55 | 56 | if (is.null(dftmp)) 57 | dftmp <- ctmp 58 | else 59 | dftmp <- cbind(dftmp, ctmp) 60 | } 61 | 62 | break 63 | } 64 | 65 | } 66 | 67 | if (!found_head) 68 | head_err = paste0(head_err, headj) 69 | 70 | } 71 | 72 | if (head_err != "") { 73 | return(head_err) 74 | } 75 | 76 | 77 | return(dftmp) 78 | 79 | } 80 | 81 | 82 | #' @importFrom readxl excel_sheets 83 | readExcel <- function(filein, sheet = NULL) { 84 | 85 | 86 | conc_header <- list("WellName", "Constituent", "SampleDate", "Result", "Units", "Flags") 87 | well_header <- list("WellName", "XCoord", "YCoord", "Aquifer") 88 | 89 | conc_data <- NULL 90 | well_data <- NULL 91 | #coord_unit <- "metres" 92 | coord_unit <- "" 93 | 94 | # If no sheet was specified, extract them and try to find tables. 95 | if (is.null(sheet)) 96 | ls_sheets <- readxl::excel_sheets(filein$datapath) 97 | else 98 | ls_sheets <- list(sheet) 99 | 100 | # Attempt to find valid tables in sheets. 101 | for (sheet in ls_sheets) { 102 | 103 | # 104 | # Read the contaminant data 105 | # 106 | ret <- readExcelData(filein$datapath, sheet = sheet, header = conc_header) 107 | 108 | #if (class(ret) != "data.frame") { 109 | if (!is.data.frame(ret)) { 110 | try(showNotification(paste0("Sheet \'", sheet, "\': No valid contaminant table found."), duration = 10, type = "error")) 111 | next 112 | } 113 | 114 | # Check if the date input is correct. 115 | if (any(is.na(ret$SampleDate))) { 116 | 117 | ret <- ret[!is.na(ret$SampleDate),] 118 | 119 | msg <- paste0("Sheet \'", sheet, "\': Incorrect input date value(s) detected. Ommitting values.") 120 | try(showNotification(msg, type = "warning", duration = 10)) 121 | 122 | if (nrow(ret) == 0) { 123 | try(showNotification(paste0("Sheet \'", sheet, "\': Zero entries in concentration data read, skipping."), type = "error", duration = 10)) 124 | next 125 | } 126 | } 127 | 128 | # Modify some columns in order to make it display nicely in the table view. 129 | if (!"flags" %in% tolower(names(ret))) { ret$Flags <- rep("",nrow(ret))} 130 | ret$Flags[is.na(ret$Flags)] <- "" 131 | #ret$Result[is.na(ret$Result)] <- 0 132 | ret$SampleDate <- excelDate2Date(floor(as.numeric(as.character(ret$SampleDate)))) 133 | 134 | conc_data <- ret 135 | 136 | 137 | # 138 | # Read the well data 139 | # 140 | well_data <- readExcelData(filein$datapath, sheet = sheet, header = well_header, 141 | ign_first_head = "WellName") 142 | 143 | #if (class(well_data) != "data.frame") { 144 | if (!is.data.frame(well_data)) { 145 | showNotification(paste0("Sheet \'", sheet, "\': No valid well table found, skipping."), duration = 10) 146 | next 147 | } 148 | 149 | 150 | # Extract the coordinate unit (default: metres). 151 | coord_unit <- as.character(ret$CoordUnits[1]) 152 | #if (length(coord_unit) == 0 || is.na(coord_unit)) coord_unit <- "metres" 153 | if (length(coord_unit) == 0 || is.na(coord_unit)) coord_unit <- "" 154 | if(!coord_unit %in% c("","feet","metres")){coord_unit <- ""} #default to no units if erroneous unit entered. 155 | 156 | # Replace Aquifer with emptry string 157 | well_data$Aquifer[is.na(well_data$Aquifer)] <- "" 158 | 159 | # 160 | # Attempt to read shape files (if not found, ignore) 161 | # (Disabled: Shape files must be uploaded with the file input control) 162 | # 163 | # ret <- readExcelData(newfile, sheet = sheet, header = shape_header) 164 | # 165 | # shape_files <- NULL 166 | # 167 | # if (any(class(ret) == "data.frame")) { 168 | # 169 | # shape_files <- validateShapeFiles(ret, sheet) 170 | # 171 | # if (!is.null(shape_files)) 172 | # showNotification(paste0("Sheet \'", sheet, "\': Found ", length(shape_files), 173 | # " shape file(s)."), type = "message", duration = 10) 174 | # 175 | # } 176 | 177 | 178 | # If we made it until here, we were able to read some valid data. 179 | try(showNotification(paste0("Sheet \'", sheet, "\': Found valid tables."), type = "message", duration = 10)) 180 | break 181 | } 182 | 183 | if (is.null(conc_data) || is.null(well_data)) 184 | return(NULL) 185 | 186 | 187 | return(list(conc_data = conc_data, well_data = well_data, coord_unit = coord_unit)) 188 | 189 | } 190 | 191 | 192 | #' @importFrom utils read.csv 193 | #' @importFrom lubridate parse_date_time 194 | readConcData <- function(input_file, valid_header, ...) { 195 | 196 | 197 | if (length(list(...)) == 0) 198 | DF = read.csv(input_file) 199 | else 200 | DF = read.csv(input_file, header = list(...)$header, sep = list(...)$sep, quote = list(...)$quote) 201 | 202 | 203 | # Create Flags column or replace NA values with "" if exist. 204 | if (!"flags" %in% tolower(names(DF))) { DF$Flags <- rep("",nrow(DF))} 205 | 206 | DF_extract <- data.frame(matrix(nrow = nrow(DF), ncol = 0)) 207 | head_not_found <- "" 208 | 209 | # Filter input data frame for valid headers. 210 | for (vh in valid_header) { 211 | 212 | if (vh %in% colnames(DF)) { 213 | 214 | DF_extract <- cbind(DF_extract, DF[,vh]) 215 | colnames(DF_extract)[ncol(DF_extract)] <- vh 216 | } else 217 | head_not_found = paste(head_not_found, vh) 218 | 219 | } 220 | 221 | 222 | if (head_not_found != "") { 223 | msg <- paste0("Reading well coordinates failed. Header missing: ", paste(head_not_found, collapse = ", "), ". Try to change the Column Separator." ) 224 | showNotification(msg, type = "error", duration = 7) 225 | return(NULL) 226 | } 227 | 228 | # Transform the 'SampleDate' column into 'Date' class. 229 | #if (class(DF$SampleDate) == "numeric" | class(DF$SampleDate) == "integer") 230 | if (is.numeric(DF$SampleDate) | is.integer(DF$SampleDate)) 231 | # An integer value indicates Excel time. This is _not_ Unix time! 232 | DF$SampleDate <- excelDate2Date(floor(as.numeric(as.character(DF$SampleDate)))) 233 | else 234 | # Expects string input with format "yyyy-mm-dd" or "yyyy/mm/dd". 235 | # Uses 'lubridate' 236 | DF$SampleDate <- as.Date(parse_date_time(as.character(DF$SampleDate),orders=c("dmy", "mdy", "ymd"))) 237 | 238 | 239 | # Check the dates 240 | if (any(is.na(DF$SampleDate))) { 241 | 242 | msg <- paste("Warning: Incorrect input date value(s) detected. Ommitting ",sum(is.na(DF$SampleDate)),"row(s) of data.") 243 | showNotification(msg, type = "warning", duration = 15) 244 | 245 | DF <- DF[!is.na(DF$SampleDate),] 246 | 247 | if (nrow(DF) == 0) { 248 | 249 | msg <- "Detected Zero entries in concentration data. Aborting file read." 250 | showNotification(msg, type = "error", duration = 10) 251 | return(NULL) 252 | } 253 | } 254 | 255 | 256 | # Make some basic modifications, maybe move these to the format function. 257 | # However, these mods are made before showing up in the import table, but 258 | # the format function is called only when the import button is pressed. 259 | DF$Flags[is.na(DF$Flags)] <- "" 260 | 261 | # Converting it to character (from numeric or factor) makes it possible to replace 262 | # NA values (if factor). formatData() will later convert it to numeric values. 263 | DF$Result <- as.character(DF$Result) 264 | #DF$Result[is.na(DF$Result)] <- "0" 265 | 266 | 267 | return(DF) 268 | 269 | } 270 | 271 | 272 | 273 | #' @importFrom utils read.csv 274 | readWellCoords <- function(input_file, valid_header, ...) { 275 | 276 | 277 | if (length(list(...)) == 0) 278 | DF = read.csv(input_file) 279 | else 280 | DF = read.csv(input_file, header = list(...)$header, sep = list(...)$sep, quote = list(...)$quote) 281 | 282 | 283 | coord_unit <- as.character(DF$CoordUnits[1]) 284 | 285 | if (length(coord_unit) == 0 || is.na(coord_unit)) { 286 | #coord_unit <- "metres" 287 | coord_unit <- "" 288 | 289 | } 290 | if(!coord_unit %in% c("","feet","metres")){coord_unit <- ""} #default to no units if erroneous unit entered. 291 | 292 | # If no Aquifer field was found, add one with blank strings. 293 | if (!"aquifer" %in% tolower(names(DF))) { 294 | DF$Aquifer <- rep("",nrow(DF)) 295 | } 296 | 297 | 298 | DF_extract <- data.frame(matrix(nrow = nrow(DF), ncol = 0)) 299 | head_not_found <- "" 300 | 301 | # Filter input data frame for valid headers. 302 | for (vh in valid_header) { 303 | 304 | if (vh %in% colnames(DF)) { 305 | 306 | DF_extract <- cbind(DF_extract, DF[,vh]) 307 | colnames(DF_extract)[ncol(DF_extract)] <- vh 308 | } else head_not_found = paste(head_not_found, vh) 309 | } 310 | 311 | if (head_not_found != "") { 312 | msg <- paste0("Reading well coordinates failed. Missing the following columns: ", head_not_found, "." ) 313 | showNotification(msg, type = "error", duration = 10) 314 | return(NULL) 315 | } 316 | 317 | # Make sure its not a factor, or we get an error when introducing "" 318 | DF_extract$Aquifer <- as.character(DF$Aquifer) 319 | DF_extract$Aquifer[is.na(DF_extract$Aquifer)] <- "" 320 | 321 | return(list(data = DF_extract, coord_unit = coord_unit )) 322 | 323 | } 324 | 325 | -------------------------------------------------------------------------------- /R/selectGamma.R: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | # 5 | # Might delete, from old version 2.12 but not used here. 6 | # 7 | 8 | estSig <- function(x, data = NULL, frac = 0.6, na.action = na.omit, scaled = TRUE){ 9 | 10 | call <- match.call() 11 | m <- match.call(expand.dots = FALSE) 12 | if (is.matrix(eval(m$data, parent.frame()))) 13 | m$data <- as.data.frame(data) 14 | m$formula <- m$x 15 | m$x <- NULL 16 | m$scaled <- NULL 17 | m$frac <- NULL 18 | m[[1]] <- as.name("model.frame") 19 | m <- eval(m, parent.frame()) 20 | Terms <- attr(m, "terms") 21 | attr(Terms, "intercept") <- 0 22 | x <- model.matrix(Terms, m) 23 | if (length(scaled) == 1) 24 | scaled <- rep(scaled, ncol(x)) 25 | if (any(scaled)) { 26 | remove <- unique(c(which(labels(Terms) %in% names(attr(x, 27 | "contrasts"))), which(!scaled))) 28 | scaled <- !attr(x, "assign") %in% remove 29 | } 30 | 31 | ret <- estSigMat(x, scaled = scaled, frac = frac, na.action = na.action) 32 | return(ret) 33 | } 34 | 35 | 36 | 37 | estSigMat <- function(x, frac = 0.25, scaled = TRUE, na.action = na.omit){ 38 | set.seed(1) 39 | x <- na.action(x) 40 | if (length(scaled) == 1) 41 | scaled <- rep(scaled, ncol(x)) 42 | if (any(scaled)) { 43 | co <- !apply(x[, scaled, drop = FALSE], 2, var) 44 | if (any(co)) { 45 | scaled <- rep(FALSE, ncol(x)) 46 | warning(paste("Variable(s)", paste("`", colnames(x[, 47 | scaled, drop = FALSE])[co], "'", sep = "", 48 | collapse = " and "), "constant. Cannot scale data.")) 49 | } 50 | else { 51 | xtmp <- scale(x[, scaled]) 52 | x[, scaled] <- xtmp 53 | } 54 | } 55 | m <- dim(x)[1] 56 | n <- floor(frac * m) 57 | index <- sample(1:m, n, replace = TRUE) 58 | index2 <- sample(1:m, n, replace = TRUE) 59 | temp <- x[index, , drop = FALSE] - x[index2, , drop = FALSE] 60 | dist <- rowSums(temp^2) 61 | ds <- sort(dist[dist != 0]) 62 | sl <- ds[ceiling(0.1 * length(ds))] 63 | su <- ds[ceiling(0.9 * length(ds))] 64 | srange <- c(1/su, 1/sl) 65 | names(srange) <- NULL 66 | return(srange) 67 | } 68 | 69 | 70 | 71 | selectGamma <- function(temp.Cont.Data,gamma){ 72 | 73 | if (gamma[1] != 0) { return(gamma) } 74 | 75 | 76 | tempgamma <- matrix(nrow = 50,ncol=2) 77 | 78 | for (i in 1:nrow(tempgamma)) { 79 | 80 | tempgamma[i,] <- estSig(log(Result.Corr.ND)~AggDate+XCoord+YCoord,temp.Cont.Data) 81 | 82 | } 83 | 84 | if (length(gamma) == 1) { 85 | 86 | gamma <- mean(0.5*(tempgamma[,1]+tempgamma[,2])) 87 | #gamma<-median(1/apply(tempgamma,1,mean)) #Wayne 26th June 2009 88 | 89 | }else{ 90 | 91 | #gamma<-quantile(apply(tempgamma,1,mean),p=c(0.1,0.5,0.9)) 92 | #gamma<-c(mean(0.5*(tempgamma[,1]+tempgamma[,2])), quantile(tempgamma[,2],p=0.9)) 93 | #gamma<-c(quantile(tempgamma[,2],p=0.95)) 94 | #gamma<-sort(apply(tempgamma,2,mean))[1]+c(.3,.5,.7)*diff(sort(apply(tempgamma,2,mean))) 95 | #gamma<-quantile(1/apply(tempgamma,1,mean),p=c(.1,.5,.9)) #Wayne 26th June 2009 96 | gamma < -sort(apply(tempgamma,2,mean))[1]+c(.3,.5,.7)*diff(sort(apply(tempgamma,2,mean))) 97 | 98 | 99 | 100 | 101 | } 102 | 103 | return(gamma) 104 | } 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | -------------------------------------------------------------------------------- /R/shapeFiles.R: -------------------------------------------------------------------------------- 1 | #' @import sf 2 | readShapeFiles <- function(ShapeFileNames) { 3 | 4 | shdat <- list() 5 | 6 | if (!is.null(ShapeFileNames)) { 7 | for (filein in ShapeFileNames) { 8 | 9 | tryCatch({ 10 | 11 | dat <- sf::st_read(filein) 12 | shdat[[length(shdat) + 1]] <- dat 13 | 14 | }, error = function(e) { 15 | showNotification(paste0("Failed to load shape file ", filein, ": ", e$msg), type = "error") 16 | }) 17 | } 18 | } 19 | 20 | if (length(shdat) == 0) 21 | return(NULL) 22 | 23 | return(shdat) 24 | 25 | } 26 | 27 | 28 | createShapeFileList <- function(shfiles) { 29 | cat("* in createShapeFileList()\n") 30 | if (!is.null(shfiles)) { 31 | 32 | MAT <- matrix(nrow = 0, ncol = 2) 33 | 34 | # Add shape file details to matrix. 35 | for (i in 1:length(shfiles$file_details)) { 36 | MAT <- rbind(MAT, matrix(c(shfiles$file_details[[i]]$name, 37 | shfiles$file_details[[i]]$size), ncol = 2)) 38 | } 39 | 40 | MAT <- as.data.frame(MAT) 41 | colnames(MAT) <- c("Name", "Size") 42 | 43 | # Render the table. 44 | tbl_out <- rhandsontable::rhandsontable(MAT, useTypes = TRUE, rowHeaders = NULL, 45 | stretchH = "all", height = 400, readOnly = TRUE)# %>% 46 | #hot_col("Name", readOnly = TRUE) %>% 47 | #hot_col("Size", readOnly = TRUE) 48 | 49 | return(tbl_out) 50 | } #else { 51 | 52 | # Render Empty table: To leave to blank gap when "Remove All Files" is pressed. 53 | #tbl_out <- rhandsontable::rhandsontable(data.frame(Name = character(), Size = numeric()), 54 | # useTypes = FALSE, rowHeaders = NULL, stretchH = "all", 55 | # height = 400, readOnly = TRUE) #%>% 56 | #hot_col("Name", readOnly = TRUE) %>% 57 | #hot_col("Size", readOnly = TRUE) 58 | 59 | #} 60 | 61 | # return(tbl_out) 62 | } 63 | 64 | 65 | addShapeFiles <- function(input_spf, spf) { 66 | 67 | # If no shape file has been added yet (spf is NULL), create new list. 68 | if (is.null(spf)) 69 | spf <- list(shp_files = c(), file_details = list(input_spf)) 70 | else { 71 | # Append to already existing shape file collection. 72 | lst_end <- length(spf$file_details) 73 | spf$file_details[[lst_end + 1]] <- input_spf 74 | } 75 | 76 | SHP_FILE_DETECTED <- FALSE 77 | 78 | # Rename all the temporary files to their original name, so that the sf package 79 | # reader can read all files an ones. 80 | for (i in 1:length(input_spf$name)) { 81 | 82 | # Split path and replace file name with real file name. 83 | dp <- strsplit(input_spf$datapath[i], "/")[[1]] 84 | dp[length(dp)] <- input_spf$name[i] 85 | 86 | # Put it back together, now with the new file name in the path. 87 | new_dp <- paste(dp, collapse = "/") 88 | 89 | file.rename(input_spf$datapath[i], new_dp) 90 | 91 | # Detect *.shp file and save to shp_files vector. It will be used by the 92 | # sf package to read out all other files. 93 | fa <- strsplit(input_spf$name[i], "\\.")[[1]] 94 | if (fa[length(fa)] == "shp") { 95 | SHP_FILE_DETECTED <- TRUE 96 | spf$shp_files <- c(spf$shp_files, new_dp) 97 | } 98 | } 99 | 100 | if (!SHP_FILE_DETECTED) 101 | showNotification("No .shp file detected. Need at least one .shp file to read remaining shape files.", 102 | type = "warning", duration = 10) 103 | 104 | return(spf) 105 | } 106 | 107 | 108 | # # If no shape file has been added yet, create new list. 109 | # if (is.null(spf)) 110 | # import_tables$shape_files <<- list(shp_files = c(), file_details = list(input$shape_files_csv)) 111 | # else { 112 | # # Append to already existing shape file collection. 113 | # lst_end <- length(import_tables$shape_files$file_details) 114 | # import_tables$shape_files$file_details[[lst_end + 1]] <<- input$shape_files_csv 115 | # } 116 | # 117 | # SHP_FILE_DETECTED <- FALSE 118 | # 119 | # # Rename all the temporary files to their original name, so that the sf package 120 | # # reader can read all files an ones. 121 | # for (i in 1:length(input$shape_files_csv$name)) { 122 | # 123 | # # Split path and replace file name with real file name. 124 | # dp <- strsplit(input$shape_files_csv$datapath[i], "/")[[1]] 125 | # dp[length(dp)] <- input$shape_files_csv$name[i] 126 | # 127 | # # Put it back together, now with the new file name in the path. 128 | # new_dp <- paste(dp, collapse = "/") 129 | # 130 | # file.rename(input$shape_files_csv$datapath[i], new_dp) 131 | # 132 | # # Detect *.shp file and save to shp_files vector. It will be used by the 133 | # # sf package to read out all other files. 134 | # fa <- strsplit(input$shape_files_csv$name[i], "\\.")[[1]] 135 | # if (fa[length(fa)] == "shp") { 136 | # SHP_FILE_DETECTED <- TRUE 137 | # import_tables$shape_files$shp_files <<- c(import_tables$shape_files$shp_files, new_dp) 138 | # } 139 | # } 140 | # 141 | # if (!SHP_FILE_DETECTED) 142 | # showNotification("No .shp file detected. Need at least one .shp file to read remaining shape files.", 143 | # type = "warning", duration = 10) 144 | -------------------------------------------------------------------------------- /R/sliderValues.R: -------------------------------------------------------------------------------- 1 | 2 | # 3 | # [Not used] Attempt on updating the sliderValues input, but it will work only 4 | # for the 'label' attribute. See updateSliderInput() for a reference. 5 | # Needs more insight I guess. 6 | # 7 | updateSliderValues <- function(session, inputId, label = NULL, from = NULL) 8 | { 9 | # vals <- shiny:::dropNulls(list(value, min, max)) 10 | # type <- unique(lapply(vals, function(x) { 11 | # if (inherits(x, "Date")) 12 | # "date" 13 | # else if (inherits(x, "POSIXt")) 14 | # "datetime" 15 | # else "number" 16 | # })) 17 | # if (length(type) > 1) { 18 | # stop("Type mismatch for value, min, and max") 19 | # } 20 | # if ((length(type) == 1) && (type == "date" || type == "datetime")) { 21 | # to_ms <- function(x) 1000 * as.numeric(as.POSIXct(x)) 22 | # if (!is.null(min)) 23 | # min <- to_ms(min) 24 | # if (!is.null(max)) 25 | # max <- to_ms(max) 26 | # if (!is.null(value)) 27 | # value <- to_ms(value) 28 | # } 29 | 30 | message <- shiny:::dropNulls(list(label = label, from = shiny:::formatNoSci(from))) 31 | session$sendInputMessage(inputId, message) 32 | } 33 | 34 | 35 | # Debounce slider inputs: The input of sliderValues() is not delayed. 36 | # To debounce input$timepoint by 'ms' milliseconds, define the following line 37 | # and use timepoint_d()' as reactive to respond to delayed changes of input$timepoint. 38 | # 39 | #timepoint_d <- debounce( reactive({input$timepoint }), ms) 40 | 41 | 42 | 43 | # 44 | # [Not used] This modified slider input allows the passing of a vector of values 45 | # instead of just limits. The original sliderInput doesn't do this. 46 | # The Problem: Can't update the attributes so far, and the grid looks too crowded 47 | # if 'values' has lots of content. 48 | # 49 | sliderValues <- function(inputId, 50 | label, 51 | values, 52 | from, 53 | to = NULL, 54 | grid = TRUE, 55 | width = NULL, 56 | postfix = NULL, 57 | prefix = NULL, 58 | dragRange = TRUE, 59 | disable = FALSE, 60 | animate = FALSE) { 61 | validate_fromto <- 62 | function(fromto = NULL, 63 | values = NULL, 64 | default = 0) { 65 | if (!is.null(fromto)) { 66 | if (is.character(values) & is.numeric(fromto)) { 67 | fromto <- fromto - 1 68 | } else { 69 | fromto <- which(values == fromto) - 1 70 | } 71 | } else { 72 | fromto <- default 73 | } 74 | return(fromto) 75 | } 76 | 77 | sliderProps <- shiny:::dropNulls( 78 | list( 79 | class = "js-range-slider", 80 | id = inputId, 81 | `data-type` = if (!is.null(to)) 82 | "double" 83 | else 84 | "single", 85 | `data-from` = validate_fromto(fromto = from, values = values), 86 | `data-to` = validate_fromto(fromto = to, values = values, default = length(values)), 87 | `data-grid` = grid, 88 | `data-prefix` = if (is.null(prefix)) { 89 | "null" 90 | } else { 91 | shQuote(prefix, "sh") 92 | }, 93 | `data-postfix` = if (is.null(postfix)) { 94 | "null" 95 | } else { 96 | shQuote(postfix, "sh") 97 | }, 98 | `data-drag-interval` = dragRange, 99 | `data-disable` = disable, 100 | `data-values` = if (is.numeric(values)) { 101 | paste(values, collapse = ", ") 102 | } else { 103 | paste(shQuote(values, type = "sh"), collapse = ", ") 104 | } 105 | ) 106 | ) 107 | 108 | sliderProps <- lapply( 109 | X = sliderProps, 110 | FUN = function(x) { 111 | if (identical(x, TRUE)) 112 | "true" 113 | else if (identical(x, FALSE)) 114 | "false" 115 | else 116 | x 117 | } 118 | ) 119 | 120 | sliderTag <- tags$div( 121 | class = "form-group shiny-input-container", 122 | style = if (!is.null(width)) 123 | paste0("width: ", htmltools::validateCssUnit(width), ";"), 124 | if (!is.null(label)) 125 | shiny:::controlLabel(inputId, label), 126 | do.call( 127 | tags$input, 128 | list( 129 | type = if (is.numeric(values) & 130 | is.null(to)) { 131 | "number" 132 | } else { 133 | "text" 134 | }, 135 | #class = "js-range-slider", 136 | id = inputId, 137 | name = inputId, 138 | value = "" 139 | ) 140 | ), 141 | tags$style( 142 | whisker::whisker.render( 143 | template = 144 | "input[id='{{id}}'] { 145 | -moz-appearance:textfield; 146 | } 147 | input[id='{{id}}']::-webkit-outer-spin-button, 148 | input[id='{{id}}']::-webkit-inner-spin-button { 149 | -webkit-appearance: none; 150 | margin: 0; 151 | }", data = list(id = inputId)) 152 | ), 153 | tags$script( 154 | HTML( 155 | whisker::whisker.render( 156 | template = '$("#{{id}}").ionRangeSlider({ 157 | type: "{{data-type}}", 158 | from: {{data-from}}, 159 | to: {{data-to}}, 160 | grid: {{data-grid}}, 161 | keyboard: true, 162 | keyboard_step: 1, 163 | postfix: {{data-postfix}}, 164 | prefix: {{data-prefix}}, 165 | drag_interval: {{data-drag-interval}}, 166 | values: [{{data-values}}], 167 | disable: {{data-disable}} 168 | });', 169 | data = sliderProps 170 | ) 171 | )) 172 | ) 173 | if (identical(animate, TRUE)) 174 | animate <- animationOptions() 175 | if (!is.null(animate) && !identical(animate, FALSE)) { 176 | if (is.null(animate$playButton)) 177 | animate$playButton <- icon("play", lib = "glyphicon") 178 | if (is.null(animate$pauseButton)) 179 | animate$pauseButton <- icon("pause", lib = "glyphicon") 180 | sliderTag <- htmltools::tagAppendChild( 181 | sliderTag, 182 | tags$div(class = "slider-animate-container", 183 | tags$a(href = "#", class = "slider-animate-button", 184 | `data-target-id` = inputId, `data-interval` = animate$interval, 185 | `data-loop` = animate$loop, span(class = "play", 186 | animate$playButton), 187 | span(class = "pause", 188 | animate$pauseButton))) 189 | ) 190 | } 191 | dep <- htmltools::htmlDependency( 192 | "ionrangeslider", 193 | "2.1.12", 194 | c(href = "shared/ionrangeslider"), 195 | script = "js/ion.rangeSlider.min.js", 196 | stylesheet = c( 197 | "css/ion.rangeSlider.css", 198 | "css/ion.rangeSlider.skinShiny.css" 199 | ) 200 | ) 201 | htmltools::attachDependencies(sliderTag, dep) 202 | } 203 | 204 | 205 | # This works for the 'label' attributes, but not for the others. 206 | # Would be great to get it to work for 'from', 'to', 'values' 207 | updateLabelSliderValues <- function(session, inputID, label) { 208 | 209 | message <- shiny:::dropNulls(list(label = as.character(label))) 210 | session$sendInputMessage(inputID, message) 211 | 212 | } 213 | 214 | 215 | 216 | 217 | # 218 | # [Not used] A slim version of sliderValues(). Does not directly provide with 219 | # 'inputId' the value in 'values', but an index value into 'values'. Kept it 220 | # to here for later testing of an updateSliderValues_slim() method. 221 | # 222 | sliderValues_slim <- function (inputId, label, values, from, to = NULL, width = NULL) { 223 | sliderProps <- shiny:::dropNulls(list(class = "js-range-slider", 224 | id = inputId, 225 | `data-type` = if (!is.null(to)) "double", 226 | `data-from` = which(values == from) - 1, 227 | `data-to` = if (!is.null(to)) which(values == to) - 1, 228 | `data-grid` = TRUE, 229 | `data-values` = paste(values, collapse = ", ") 230 | )) 231 | sliderProps <- lapply(sliderProps, function(x) { 232 | if (identical(x, TRUE)) 233 | "true" 234 | else if (identical(x, FALSE)) 235 | "false" 236 | else x 237 | }) 238 | sliderTag <- div(class = "form-group shiny-input-container", 239 | style = if (!is.null(width)) 240 | paste0("width: ", validateCssUnit(width), ";"), 241 | if (!is.null(label)) 242 | shiny:::controlLabel(inputId, label), do.call(tags$input, 243 | sliderProps)) 244 | dep <- list(htmltools::htmlDependency("ionrangeslider", "2.0.12", c(href = "shared/ionrangeslider"), 245 | script = "js/ion.rangeSlider.min.js", 246 | stylesheet = c("css/ion.rangeSlider.css", 247 | "css/ion.rangeSlider.skinShiny.css"))) 248 | htmltools::attachDependencies(sliderTag, dep) 249 | } 250 | -------------------------------------------------------------------------------- /R/ui.R: -------------------------------------------------------------------------------- 1 | #' @importFrom utils globalVariables 2 | globalVariables("APP_CUSTOM_COMPONENT") 3 | 4 | # This is the main UI file that defines the two types of interfaces: 5 | # 1. uiFull : running on a server with multiple data sets. 6 | # 2. uiSimple : running locally on a single data set (a.k.a. 'ExcelMode') 7 | # 8 | 9 | 10 | createLogoCSS <- function() { 11 | tags$head(tags$style(HTML(' 12 | .main-header .logo { 13 | background-image: url("www/logo.png"); 14 | background-repeat: no-repeat; 15 | background-size: contain; 16 | background-position: left; 17 | } 18 | '))) 19 | } 20 | 21 | 22 | dbHeaderFull <- function() shinydashboard::dashboardHeader(title = "GWSDAT", 23 | shinydashboard::dropdownMenuOutput("welcomeMsg"), 24 | shinydashboard::dropdownMenuOutput("logAction"), 25 | shinydashboard::dropdownMenuOutput("signupAction"),tags$li(a(href = 'http://gwsdat.net',target="_blank", 26 | icon("home"), title = "GWSDAT Homepage and User Manual"), class = "dropdown")) 27 | 28 | 29 | uiFull <- function() shinydashboard::dashboardPage(skin = "black", 30 | 31 | dbHeaderFull(), 32 | shinydashboard::dashboardSidebar(shinydashboard::sidebarMenu(id = "sidebar_menu", 33 | shinydashboard::menuItem("Manage Data", tabName = "menu_data_manager", icon = icon("archive")), 34 | shinydashboard::menuItem("Analyse", tabName = "menu_analyse", icon = icon("bar-chart")), 35 | shinydashboard::menuItem("Log and Jobs", tabName = "logs_jobs", icon = icon("wpforms")) 36 | ) 37 | ), 38 | 39 | shinydashboard::dashboardBody( 40 | shinyjs::useShinyjs(), 41 | createLogoCSS(), 42 | # if (exists("APP_CUSTOM_COMPONENT", envir = .GlobalEnv)) 43 | # APP_CUSTOM_COMPONENT(), 44 | 45 | # Not using includeScript because it wraps 3 | 9 | -------------------------------------------------------------------------------- /inst/www/jump_to_tsplot.js: -------------------------------------------------------------------------------- 1 | 2 | function jumpToPlot(i, j) { 3 | var plotWnd = document.getElementById("test"); 4 | window.alert('jobToPlot executed.'); 5 | 6 | $('.tabbable .nav.nav-tabs li a:first').click(); 7 | } 8 | -------------------------------------------------------------------------------- /inst/www/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/WayneGitShell/GWSDAT/04f781b4991f0fb77f267d259636d3e4c9619c3b/inst/www/logo.png -------------------------------------------------------------------------------- /man/createOptions.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/createOptions.R 3 | \name{createOptions} 4 | \alias{createOptions} 5 | \title{Create a list with default start options.} 6 | \usage{ 7 | createOptions(site_name = NULL) 8 | } 9 | \arguments{ 10 | \item{site_name}{An arbitrary string containing the name of the monitoring site.} 11 | } 12 | \value{ 13 | A list containing essential model parameters and start options. 14 | } 15 | \description{ 16 | \code{createOptions} creates a list with start options that can be modified 17 | and passed as argument to \code{\link{launchApp}}. 18 | } 19 | \examples{ 20 | opt <- createOptions("New Site 1") 21 | opt$PSplineVars$nseg <- 10 # modify model parameter for p-splines. 22 | opt$WellDataFilename <- 'path_to_concentration_file.csv' 23 | opt$WellCoordsFilename <- 'path_to_well_coordinate_file.csv' 24 | if(interactive()) { 25 | launchApp(opt) 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /man/launchApp.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/launchApp.R 3 | \name{launchApp} 4 | \alias{launchApp} 5 | \title{Launches the GWSDAT Shiny application.} 6 | \usage{ 7 | launchApp(GWSDAT_Options, session_file) 8 | } 9 | \arguments{ 10 | \item{GWSDAT_Options}{A list of start options created with \code{\link{createOptions}}.} 11 | 12 | \item{session_file}{Path to .rds file containing a GWSDAT analysis session.} 13 | } 14 | \value{ 15 | None 16 | } 17 | \description{ 18 | The shiny application can run in multi or single data mode. If no parameter is 19 | specified with \code{launchApp}, the application starts in multi data mode, which 20 | includes a data manager and several data import facilities. If the parameter \code{session_file} 21 | was specified, the application launches in single data mode, which is limited to the 22 | analysis of the data specified by \code{session_file}. 23 | } 24 | \examples{ 25 | if(interactive()) { 26 | launchApp(session_file = "path_to_GWSDAT_session.rds") # launch in single data mode. 27 | launchApp() # launch in multi data mode 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /start_ExcelMode.R: -------------------------------------------------------------------------------- 1 | 2 | GWSDAT_Options <- list() 3 | 4 | GWSDAT_Options[['Aggby']] <- 'Month' # 'Day', 'Month', 'Quarter', 'Year' 5 | GWSDAT_Options[['AggMethod ']] <- 'Mean' 6 | GWSDAT_Options[['NDMethod']] <- 'Half of ND Value' 7 | 8 | GWSDAT_Options[['cross']] <- 10 9 | GWSDAT_Options[['Tune']] <- TRUE 10 | GWSDAT_Options[['gamma']] <- c(0) 11 | GWSDAT_Options[['cost']] <- 2^c(0,1,2,3,4,5) 12 | 13 | # fitPSplines() 14 | GWSDAT_Options[['ModelMethod']] <- 'pspline' # not used, fitData() assumes 'pspline' as default because no other method works 15 | GWSDAT_Options[['PSplineVars']] <- list() 16 | GWSDAT_Options[['PSplineVars']][['NIG.a']] <- 0.0001 17 | GWSDAT_Options[['PSplineVars']][['NIG.b']] <- 0.0001 18 | GWSDAT_Options[['PSplineVars']][['pord']] <- 1 19 | GWSDAT_Options[['PSplineVars']][['bdeg']] <- 2 20 | GWSDAT_Options[['PSplineVars']][['Trial.Lambda']] <- 10^seq(-6, 0, length = 30) 21 | GWSDAT_Options[['PSplineVars']][['nseg']] <- 6 22 | 23 | GWSDAT_Options[['smThreshSe']] <- 1.1512 # calcTrafficLights() 24 | GWSDAT_Options[['smMethod']] <- 'aicc' # calcTrafficLights() 25 | 26 | GWSDAT_Options[['DefContThresh']] <- 500 27 | GWSDAT_Options[['DefPlumeThresh']] <- 10 28 | GWSDAT_Options[['DefPorosity']] <- 0.25 29 | 30 | GWSDAT_Options[['Version']] <- '2.11' 31 | GWSDAT_Options[['Version']] <- as.numeric(GWSDAT_Options[['Version']]) 32 | GWSDAT_Options[['ShapeFileNames']] <- NULL 33 | 34 | 35 | # 'Basic Example' 36 | GWSDAT_Options[['SiteName']] <- 'Basic Example' 37 | GWSDAT_Options[['WellDataFilename']] <- 'D:/1_Arbeit/1_GWSDAT/3_Shiny_Dev/GWSDAT/data/BasicExample_WellData.csv' 38 | GWSDAT_Options[['WellCoordsFilename']] <- 'D:/1_Arbeit/1_GWSDAT/3_Shiny_Dev/GWSDAT/data/BasicExample_WellCoords.csv' 39 | 40 | # 'Comprehensive Example' 41 | #GWSDAT_Options[['SiteName']] <- 'Comprehensive Example' 42 | #GWSDAT_Options[['WellDataFilename']] <- 'D:/1_Arbeit/1_GWSDAT/3_Shiny_Dev/GWSDAT/data/ComprehensiveExample_WellData.csv' 43 | #GWSDAT_Options[['WellCoordsFilename']] <- 'D:/1_Arbeit/1_GWSDAT/3_Shiny_Dev/GWSDAT/data/ComprehensiveExample_WellCoords.csv' 44 | #GWSDAT_Options[['ShapeFileNames']] <- c('D:/1_Arbeit/1_GWSDAT/3_Shiny_Dev/GWSDAT/data/GIS_Files/GWSDATex2.shp') 45 | 46 | 47 | 48 | # 49 | # Plume Areas (different Aquifer) 50 | # 51 | #GWSDAT_Options[['SiteName']] <- 'Site 25' 52 | #GWSDAT_Options[['WellDataFilename']] <- 'D:/1_Arbeit/1_GWSDAT/3_Shiny_Dev/EPA_Data/Site_25_plumeareas_concdata.csv' 53 | #GWSDAT_Options[['WellCoordsFilename']] <- 'D:/1_Arbeit/1_GWSDAT/3_Shiny_Dev/EPA_Data/Site_25_plumeareas_wellcoords.csv' 54 | 55 | 56 | # 57 | # Site 25, site-wide (no Aquifer groups) 58 | # 59 | 60 | #GWSDAT_Options[['SiteName']] <- 'Site 25' 61 | #GWSDAT_Options[['WellDataFilename']] <- 'D:/1_Arbeit/1_GWSDAT/3_Shiny_Dev/EPA_Data/Site_25_sitewide_concdata.csv' 62 | #GWSDAT_Options[['WellCoordsFilename']] <- 'D:/1_Arbeit/1_GWSDAT/3_Shiny_Dev/EPA_Data/Site_25_sitewide_wellcoords.csv' 63 | #GWSDAT_Options[['ShapeFileNames']] <- c(GWSDAT_Options[['ShapeFileNames']],'D:/1_Arbeit/1_GWSDAT/3_Shiny_Dev/EPA_Data/Site_25_Railroads.shp') 64 | #GWSDAT_Options[['ShapeFileNames']] <- c(GWSDAT_Options[['ShapeFileNames']],'D:/1_Arbeit/1_GWSDAT/3_Shiny_Dev/EPA_Data/Site_25_Roads.shp') 65 | #GWSDAT_Options[['ShapeFileNames']] <- c(GWSDAT_Options[['ShapeFileNames']],'D:/1_Arbeit/1_GWSDAT/3_Shiny_Dev/EPA_Data/Site_25_Boundary.shp') 66 | #GWSDAT_Options[['ShapeFileNames']] <- c(GWSDAT_Options[['ShapeFileNames']],'D:/1_Arbeit/1_GWSDAT/3_Shiny_Dev/EPA_Data/Site_25_Buildings.shp') 67 | 68 | #GWSDAT_Options[['SiteName']] <- 'New Data' 69 | #GWSDAT_Options[['WellDataFilename']] <- 'D:/1_Arbeit/1_GWSDAT/3_Shiny_Dev/2_Bugtesting/VBAF11FWellData.csv' 70 | #GWSDAT_Options[['WellCoordsFilename']] <- 'D:/1_Arbeit/1_GWSDAT/3_Shiny_Dev/2_Bugtesting/VBAF11FWellCoords.csv' 71 | 72 | # require(RDCOMClient) 73 | # 74 | # usePPT <- function() { 75 | # 76 | # ppt <- RDCOMClient::COMCreate("PowerPoint.Application") 77 | # 78 | # return(ppt) 79 | # } 80 | 81 | #devtools::install_github("andrejadd/GWSDAT") 82 | #library(GWSDAT) 83 | devtools::load_all() 84 | launchApp(GWSDAT_Options) 85 | #launchApp(session_file = "data/GWSDAT_debug_shapefile.RData") 86 | 87 | --------------------------------------------------------------------------------