├── pyMannKendall-master ├── tests │ ├── __init__.py │ └── test_pymannkendall.py ├── requirements.txt ├── .gitattributes ├── MANIFEST.in ├── setup.cfg ├── Paper │ ├── Hussain et al [2019] - pyMannKendall a python package for non parametric Mann Kendall family of trend tests.pdf │ ├── paper.bib │ └── paper.md ├── .travis.yml ├── Examples │ ├── shampoo.csv │ ├── AirPassengers.csv │ └── daily-total-female-births.csv ├── pymannkendall │ └── __init__.py ├── LICENSE.txt ├── setup.py ├── .gitignore ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md └── README.md ├── .DS_Store ├── data.mat ├── Model ├── AR model.m ├── MA model.m ├── ARIMA model.m └── ARMA model.m ├── bfast-master ├── data │ ├── som.rda │ ├── ndvi.rda │ ├── simts.rda │ ├── harvest.rda │ └── somaliadat.rda ├── inst │ ├── extdata │ │ ├── modisraster.gri │ │ └── modisraster.grd │ └── CITATION ├── man │ ├── ndvi.Rd │ ├── som.Rd │ ├── create16dayts.Rd │ ├── modisraster.Rd │ ├── dates.Rd │ ├── harvest.Rd │ ├── simts.Rd │ ├── plot.bfast.Rd │ ├── bfastts.Rd │ ├── bfast01classify.Rd │ ├── bfast-package.rd │ ├── bfastpp.Rd │ ├── bfast01.Rd │ ├── bfast.Rd │ └── bfastmonitor.Rd ├── R │ ├── create16dayts.R │ ├── print.bfast.R │ ├── history_break.R │ ├── history_roc.R │ ├── bfastts.R │ ├── print.bfastmonitor.R │ ├── plot.bfastmonitor.R │ ├── bfastpp.R │ ├── plot.bfast.R │ ├── bfastmonitor.R │ ├── seasonal.R │ ├── bfast.R │ ├── bfast01classify.R │ └── bfast01.R ├── NAMESPACE ├── NEWS ├── MD5 └── DESCRIPTION ├── readme.txt └── Dependent function ├── armax.m ├── autocorr.m ├── ar.m ├── parcorr.m └── iddata.m /pyMannKendall-master/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pyMannKendall-master/requirements.txt: -------------------------------------------------------------------------------- 1 | numpy 2 | scipy 3 | pytest -------------------------------------------------------------------------------- /pyMannKendall-master/.gitattributes: -------------------------------------------------------------------------------- 1 | pymannkendall/_version.py export-subst 2 | -------------------------------------------------------------------------------- /.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Three-Poles/Time-series-analysis/HEAD/.DS_Store -------------------------------------------------------------------------------- /data.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Three-Poles/Time-series-analysis/HEAD/data.mat -------------------------------------------------------------------------------- /pyMannKendall-master/MANIFEST.in: -------------------------------------------------------------------------------- 1 | include versioneer.py 2 | include pymannkendall/_version.py 3 | -------------------------------------------------------------------------------- /Model/AR model.m: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Three-Poles/Time-series-analysis/HEAD/Model/AR model.m -------------------------------------------------------------------------------- /Model/MA model.m: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Three-Poles/Time-series-analysis/HEAD/Model/MA model.m -------------------------------------------------------------------------------- /Model/ARIMA model.m: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Three-Poles/Time-series-analysis/HEAD/Model/ARIMA model.m -------------------------------------------------------------------------------- /Model/ARMA model.m: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Three-Poles/Time-series-analysis/HEAD/Model/ARMA model.m -------------------------------------------------------------------------------- /bfast-master/data/som.rda: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Three-Poles/Time-series-analysis/HEAD/bfast-master/data/som.rda -------------------------------------------------------------------------------- /bfast-master/data/ndvi.rda: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Three-Poles/Time-series-analysis/HEAD/bfast-master/data/ndvi.rda -------------------------------------------------------------------------------- /bfast-master/data/simts.rda: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Three-Poles/Time-series-analysis/HEAD/bfast-master/data/simts.rda -------------------------------------------------------------------------------- /bfast-master/data/harvest.rda: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Three-Poles/Time-series-analysis/HEAD/bfast-master/data/harvest.rda -------------------------------------------------------------------------------- /bfast-master/data/somaliadat.rda: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Three-Poles/Time-series-analysis/HEAD/bfast-master/data/somaliadat.rda -------------------------------------------------------------------------------- /bfast-master/inst/extdata/modisraster.gri: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Three-Poles/Time-series-analysis/HEAD/bfast-master/inst/extdata/modisraster.gri -------------------------------------------------------------------------------- /pyMannKendall-master/setup.cfg: -------------------------------------------------------------------------------- 1 | [versioneer] 2 | VCS = git 3 | style = pep440 4 | versionfile_source = pymannkendall/_version.py 5 | versionfile_build = pymannkendall/_version.py 6 | tag_prefix = v 7 | parentdir_prefix = pymannkendall- -------------------------------------------------------------------------------- /pyMannKendall-master/Paper/Hussain et al [2019] - pyMannKendall a python package for non parametric Mann Kendall family of trend tests.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Three-Poles/Time-series-analysis/HEAD/pyMannKendall-master/Paper/Hussain et al [2019] - pyMannKendall a python package for non parametric Mann Kendall family of trend tests.pdf -------------------------------------------------------------------------------- /bfast-master/man/ndvi.Rd: -------------------------------------------------------------------------------- 1 | \name{ndvi} 2 | \alias{ndvi} 3 | \docType{data} 4 | \title{ 5 | A random NDVI time series 6 | } 7 | \description{ 8 | A univariate time series object of class "ts". Frequency is set to 24. 9 | } 10 | 11 | \usage{data(ndvi)} 12 | 13 | \examples{ 14 | plot(ndvi) 15 | } 16 | \keyword{datasets} 17 | \keyword{ts} 18 | -------------------------------------------------------------------------------- /pyMannKendall-master/.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | cache: pip 3 | python: 4 | - 2.7 5 | - 3.4 6 | - 3.5 7 | - 3.6 8 | # Enable 3.7 without globally enabling sudo and dist: xenial for other build jobs 9 | matrix: 10 | include: 11 | - python: 3.7 12 | dist: xenial 13 | sudo: true 14 | install: pip install -r requirements.txt 15 | script: pytest -v -------------------------------------------------------------------------------- /bfast-master/R/create16dayts.R: -------------------------------------------------------------------------------- 1 | ## a function to create 16-day time series 2 | 3 | create16dayts <- function(data,dates) { 4 | z <- zoo(data,dates) 5 | yr <- as.numeric(format(time(z), "%Y")) 6 | jul <- as.numeric(format(time(z), "%j")) 7 | delta <- min(unlist(tapply(jul, yr, diff))) # 16 8 | zz <- aggregate(z, yr + (jul - 1) / delta / 23) 9 | (tso <- as.ts(zz)) 10 | return(tso) 11 | } -------------------------------------------------------------------------------- /bfast-master/R/print.bfast.R: -------------------------------------------------------------------------------- 1 | print.bfast <- function(x, ...) 2 | { 3 | cat("\n TREND BREAKPOINTS") 4 | niter <- length(x$output) 5 | if(x$output[[niter]]$Vt.bp[1] != 0) 6 | print(x$output[[niter]]$ci.Vt) 7 | else 8 | cat(": None\n") 9 | cat("\n SEASONAL BREAKPOINTS") 10 | if(x$output[[niter]]$Wt.bp[1] != 0) 11 | print(x$output[[niter]]$ci.Wt) 12 | else 13 | cat(": None\n") 14 | cat("\n") 15 | } 16 | -------------------------------------------------------------------------------- /readme.txt: -------------------------------------------------------------------------------- 1 | # Model----包括AR、MA、ARMA、ARIMA模型: 2 | ## AR model.m 3 | ## MA model.m 4 | ## ARMA model.m 5 | ## ARIMA model.m 6 | 7 | # Dependent function----包括AR、MA、ARMA、ARIMA模型运行所需要的函数: 8 | ## ar.m 9 | ## armax.m 10 | ## autocorr.m 11 | ## iddata.m 12 | ## parrcorr.m 13 | ## predict.m 14 | 15 | # 运行环境:Windows 7及以上 16 | 17 | # 运行平台:MATLAB 18 | 19 | # 输入:AR model、MA model、ARMA model、ARIMA model中的参数‘y'均为时序数据。 20 | # 输出:AR model、MA model、ARMA model、ARIMA model中的参数‘PF’均为时序预测值。 21 | 22 | # 注:代码中有模型运行的具体注释。 23 | 24 | # data是示例数据,运行时令"y=data(1:95);"。 -------------------------------------------------------------------------------- /bfast-master/man/som.Rd: -------------------------------------------------------------------------------- 1 | \name{som} 2 | \alias{som} 3 | \docType{data} 4 | \title{ 5 | Two 16-day NDVI time series from the south of Somalia 6 | } 7 | \description{ 8 | \code{som} is a dataframe containing time and two NDVI time series to 9 | illlustrate how the monitoring approach works. 10 | } 11 | 12 | \usage{data(som)} 13 | 14 | \source{ 15 | Needs to be added. 16 | } 17 | 18 | \examples{ 19 | ## first define the data as a regular time series (i.e. ts object) 20 | library(zoo) 21 | NDVI <- as.ts(zoo(som$NDVI.b,som$Time)) 22 | plot(NDVI) 23 | 24 | } 25 | \keyword{datasets} 26 | -------------------------------------------------------------------------------- /bfast-master/man/create16dayts.Rd: -------------------------------------------------------------------------------- 1 | \name{create16dayts} 2 | \alias{create16dayts} 3 | \title{A helper function to create time series} 4 | 5 | \description{ 6 | Time series creation 7 | } 8 | 9 | \usage{ 10 | create16dayts(data, dates) 11 | } 12 | 13 | \arguments{ 14 | \item{data}{A vector} 15 | \item{dates}{A vector ....} 16 | } 17 | 18 | \author{Achim Zeileis, Jan Verbesselt} 19 | 20 | \seealso{\code{\link[bfast]{bfastmonitor}}} 21 | 22 | \examples{ 23 | ## set up a 16-day time series 24 | #ndvi <- create16dayts(modisraster[1],dates) 25 | #plot(ndvi) 26 | } 27 | 28 | \keyword{ts} 29 | 30 | -------------------------------------------------------------------------------- /bfast-master/R/history_break.R: -------------------------------------------------------------------------------- 1 | ################################## 2 | ## Bai & Perron last breakpoint ## 3 | ################################## 4 | 5 | history_break <- function(formula, data, h = NULL, hpc = "none") { 6 | n <- nrow(data) 7 | ## rule of thumb for minimal segment size 8 | if(is.null(h)) h <- 6 * NCOL(model.matrix(formula, data = data[0,])) 9 | 10 | ## conduct breakpoints estimation 11 | bp <- breakpoints(formula, data = data, h = h, hpc = hpc) 12 | 13 | y_start <- tail(breakpoints(bp)$breakpoints, 1) 14 | y_start <- if(is.na(y_start)) 1 else y_start + 1 15 | data$time[y_start] 16 | } 17 | -------------------------------------------------------------------------------- /pyMannKendall-master/Examples/shampoo.csv: -------------------------------------------------------------------------------- 1 | "Month","Sales" 2 | "1-01",266.0 3 | "1-02",145.9 4 | "1-03",183.1 5 | "1-04",119.3 6 | "1-05",180.3 7 | "1-06",168.5 8 | "1-07",231.8 9 | "1-08",224.5 10 | "1-09",192.8 11 | "1-10",122.9 12 | "1-11",336.5 13 | "1-12",185.9 14 | "2-01",194.3 15 | "2-02",149.5 16 | "2-03",210.1 17 | "2-04",273.3 18 | "2-05",191.4 19 | "2-06",287.0 20 | "2-07",226.0 21 | "2-08",303.6 22 | "2-09",289.9 23 | "2-10",421.6 24 | "2-11",264.5 25 | "2-12",342.3 26 | "3-01",339.7 27 | "3-02",440.4 28 | "3-03",315.9 29 | "3-04",439.3 30 | "3-05",401.3 31 | "3-06",437.4 32 | "3-07",575.5 33 | "3-08",407.6 34 | "3-09",682.0 35 | "3-10",475.3 36 | "3-11",581.3 37 | "3-12",646.9 -------------------------------------------------------------------------------- /bfast-master/man/modisraster.Rd: -------------------------------------------------------------------------------- 1 | \name{modisraster} 2 | \alias{modisraster} 3 | \docType{data} 4 | \title{ 5 | A raster brick of 16-day satellite image NDVI time series for a small subset in south eastern Somalia. 6 | } 7 | \description{ 8 | A raster brick containing 16-day NDVI satellite images (MOD13C1 product). 9 | } 10 | 11 | \source{ 12 | Verbesselt, J., R. Hyndman, G. Newnham, and D. Culvenor (2012). 13 | Near real-time disturbance detection using satellite image time series. 14 | \emph{Remote Sensing of Environment}. \url{http://eeecon.uibk.ac.at/wopec2/repec/inn/wpaper/2011-18.pdf}. 15 | } 16 | 17 | \examples{ 18 | 19 | ## see ?bfastmonitor 20 | 21 | } 22 | \keyword{datasets} 23 | \keyword{ts} 24 | -------------------------------------------------------------------------------- /bfast-master/man/dates.Rd: -------------------------------------------------------------------------------- 1 | \name{dates} 2 | \alias{dates} 3 | \docType{data} 4 | \title{ 5 | A vector with date information (a Datum type) to be linked with each NDVI layer within the modis raster brick (modisraster data set) 6 | } 7 | \description{ 8 | \code{dates} is an object of class "Date" and contains the "Date" information to create a 16-day time series object. 9 | } 10 | 11 | 12 | \source{ 13 | Verbesselt, J., R. Hyndman, G. Newnham, and D. Culvenor (2012). 14 | Near real-time disturbance detection using satellite image time series. 15 | \emph{Remote Sensing of Environment}. \url{http://eeecon.uibk.ac.at/wopec2/repec/inn/wpaper/2011-18.pdf}. 16 | } 17 | 18 | \examples{ 19 | ## see ?bfastmonitor for examples 20 | } 21 | \keyword{datasets} 22 | -------------------------------------------------------------------------------- /bfast-master/R/history_roc.R: -------------------------------------------------------------------------------- 1 | ######################################## 2 | ## Reversely Ordered CUSUM (ROC) test ## 3 | ######################################## 4 | 5 | ## A technique to verify whether or not the historical period is stable or not 6 | ## reversely order sample and perform 7 | ## recursive CUSUM test 8 | history_roc <- function(formula, data, level = 0.05) { 9 | n <- nrow(data) 10 | data_rev <- data[n:1,] 11 | data_rev$response <- ts(data_rev$response) 12 | y_rcus <- efp(formula, data = data_rev, type = "Rec-CUSUM") 13 | 14 | y_start <- if(sctest(y_rcus)$p.value < level) { 15 | length(y_rcus$process) - min(which(abs(y_rcus$process)[-1] > boundary(y_rcus)[-1])) + 1 16 | } else { 17 | 1 18 | } 19 | data$time[y_start] 20 | } -------------------------------------------------------------------------------- /pyMannKendall-master/pymannkendall/__init__.py: -------------------------------------------------------------------------------- 1 | from .pymannkendall import sens_slope, seasonal_sens_slope, original_test, hamed_rao_modification_test, yue_wang_modification_test, pre_whitening_modification_test, trend_free_pre_whitening_modification_test, multivariate_test, seasonal_test, regional_test, correlated_multivariate_test, correlated_seasonal_test, partial_test 2 | 3 | __all__ = [sens_slope, seasonal_sens_slope, original_test, hamed_rao_modification_test, yue_wang_modification_test, pre_whitening_modification_test, trend_free_pre_whitening_modification_test, multivariate_test, seasonal_test, regional_test, correlated_multivariate_test, correlated_seasonal_test, partial_test] 4 | 5 | from ._version import get_versions 6 | __version__ = get_versions()['version'] 7 | del get_versions 8 | -------------------------------------------------------------------------------- /bfast-master/man/harvest.Rd: -------------------------------------------------------------------------------- 1 | \name{harvest} 2 | \alias{harvest} 3 | \docType{data} 4 | \title{ 5 | 16-day NDVI time series for a Pinus radiata plantation. 6 | } 7 | \description{ 8 | A univariate time series object of class "ts". Frequency is set to 23 -- the approximate number of observations per year. 9 | } 10 | 11 | \usage{data(harvest)} 12 | 13 | \source{ 14 | Verbesselt, J., R. Hyndman, G. Newnham, and D. Culvenor (2009). 15 | Detecting trend and seasonal changes in satellite image time series. 16 | \emph{Remote Sensing of Environment}. \url{http://dx.doi.org/10.1016/j.rse.2009.08.014}. 17 | Or see \url{http://robjhyndman.com/papers/bfast1}. 18 | } 19 | 20 | \examples{ 21 | plot(harvest,ylab='NDVI') 22 | # References 23 | citation("bfast") 24 | } 25 | \keyword{datasets} 26 | \keyword{ts} 27 | -------------------------------------------------------------------------------- /bfast-master/man/simts.Rd: -------------------------------------------------------------------------------- 1 | \name{simts} 2 | \alias{simts} 3 | \docType{data} 4 | \title{ 5 | Simulated seasonal 16-day NDVI time series 6 | } 7 | \description{ 8 | \code{simts} is an object of class "stl" and consists of seasonal, trend (equal to 0) and noise components. The 9 | simulated noise is typical for remotely sensed satellite data. 10 | } 11 | 12 | \usage{data(simts)} 13 | 14 | \source{ 15 | Verbesselt, J., R. Hyndman, G. Newnham, and D. Culvenor (2009). 16 | Detecting trend and seasonal changes in satellite image time series. 17 | \emph{Remote Sensing of Environment}. \url{http://dx.doi.org/10.1016/j.rse.2009.08.014}. 18 | Or see \url{http://robjhyndman.com/papers/bfast1}. 19 | } 20 | 21 | \examples{ 22 | plot(simts) 23 | # References 24 | citation("bfast") 25 | } 26 | \keyword{datasets} 27 | -------------------------------------------------------------------------------- /bfast-master/NAMESPACE: -------------------------------------------------------------------------------- 1 | import("graphics", "stats", "strucchange", "zoo", "raster", "sp", "forecast") 2 | 3 | export( 4 | "bfast", 5 | "bfastmonitor", 6 | "bfastpp", 7 | "bfastts", 8 | "bfast01", 9 | "bfast01classify" 10 | ) 11 | 12 | S3method("plot", "bfast") 13 | S3method("print", "bfast") 14 | S3method("plot", "bfastmonitor") 15 | S3method("print", "bfastmonitor") 16 | S3method("breakpoints", "bfast01") 17 | S3method("breakdates", "bfast01") 18 | S3method("logLik", "bfast01") 19 | S3method("deviance", "bfast01") 20 | S3method("model.frame", "bfast01") 21 | S3method("model.matrix", "bfast01") 22 | S3method("nobs", "bfast01") 23 | S3method("AIC", "bfast01") 24 | S3method("BIC", "bfast01") 25 | S3method("coef", "bfast01") 26 | S3method("fitted", "bfast01") 27 | S3method("residuals", "bfast01") 28 | S3method("predict", "bfast01") 29 | S3method("as.zoo", "bfast01") 30 | S3method("plot", "bfast01") -------------------------------------------------------------------------------- /bfast-master/R/bfastts.R: -------------------------------------------------------------------------------- 1 | ## for now only for conversion to a time series with 365 days 2 | ## to do solve the issue of 29 Februari 3 | ## to do and functionality for other types of time series 4 | ## e.g. 10 day or 16 day time series 5 | bfastts <- function(data,dates, type = c("irregular", "16-day", "10-day")) { 6 | 7 | yday365 <- function(x) { 8 | x <- as.POSIXlt(x) 9 | mdays <- c(31L, 28L, 31L, 30L, 31L, 30L, 31L, 31L, 30L, 31L, 30L, 31L) 10 | cumsum(c(0L, mdays))[1L + x$mon] + x$mday 11 | } 12 | 13 | if (type == "irregular") { 14 | zz <- zoo(data,1900 + as.POSIXlt(dates)$year + (yday365(dates) - 1)/365, frequency = 365) 15 | } 16 | 17 | if (type == "16-day") { 18 | z <- zoo(data, dates) 19 | yr <- as.numeric(format(time(z), "%Y")) 20 | jul <- as.numeric(format(time(z), "%j")) 21 | delta <- min(unlist(tapply(jul, yr, diff))) # 16 22 | zz <- aggregate(z, yr + (jul - 1) / delta / 23) 23 | } 24 | 25 | if (type == "10-day") { 26 | tz <- as.POSIXlt(dates) 27 | zz <- zoo(data, 28 | 1900L + tz$year + round((tz$yday - 1L)/ 10L)/36L, 29 | frequency = 36L) 30 | } 31 | 32 | tso <- as.ts(zz) 33 | return(tso) 34 | } 35 | -------------------------------------------------------------------------------- /bfast-master/NEWS: -------------------------------------------------------------------------------- 1 | Changes in Version 1.5-7 2 | 3 | o all required packages are now in imports so you have to load the package e.g. zoo yourself now. 4 | 5 | Changes in Version 1.5-5 6 | 7 | o Bfast01 classification function added 8 | 9 | Changes in Version 1.5 10 | 11 | o Bfast01 function added 12 | 13 | Changes in Version 1.4-4 14 | 15 | o Bfastmonitor function added 16 | 17 | Changes in Version 1.4-3 18 | 19 | o Preparing helper functions for processing of different types of time series data 20 | o Preparing structure and plan for raster brick processing (satellite image time series processing) 21 | 22 | 23 | Changes in Version 1.4-1 24 | 25 | o Plotting functionality is improved for bfastmonitor() output (i.e. when dealing with daily data and lot's of missing data points) 26 | 27 | Changes in Version 1.4-0 28 | 29 | o Added bfastmonitor() for near real-time detection of breaks in BFAST-type 30 | model. Data pre-processing is handled by a new function bfastpp() whose 31 | results can easily be plugged into strucchange (or other modeling/testing 32 | functions). 33 | 34 | o New data set "som" with NDVI series from Somalia. 35 | 36 | -------------------------------------------------------------------------------- /pyMannKendall-master/LICENSE.txt: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Md. Manjurul Hussain Shourov 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /bfast-master/R/print.bfastmonitor.R: -------------------------------------------------------------------------------- 1 | print.bfastmonitor <- function(x, ...) 2 | { 3 | freq <- frequency(x$data) 4 | cat("\nBFAST monitoring\n\n1. History period\n") 5 | cat(sprintf("Stable period selected: %i(%i)--%i(%i)\n", 6 | floor(x$history[1]), 7 | round((x$history[1] - floor(x$history[1])) * freq) + 1, 8 | floor(x$history[2]), 9 | round((x$history[2] - floor(x$history[2])) * freq) + 1)) 10 | cat(sprintf("Length (in years): %f\n", diff(x$history))) 11 | 12 | cat("Model fit:\n") 13 | print(coef(x$model)) 14 | cat(sprintf("R-squared: %f\n", summary(x$model)$r.squared)) 15 | 16 | cat("\n\n2. Monitoring period\n") 17 | cat(sprintf("Monitoring period assessed: %i(%i)--%i(%i)\n", 18 | floor(x$monitor[1]), 19 | round((x$monitor[1] - floor(x$monitor[1])) * freq) + 1, 20 | floor(x$monitor[2]), 21 | round((x$monitor[2] - floor(x$monitor[2])) * freq) + 1)) 22 | cat(sprintf("Length (in years): %f\n", diff(x$monitor))) 23 | if(is.na(x$breakpoint)) { 24 | cat("Break detected at: -- (no break)\n\n") 25 | } else { 26 | cat(sprintf("Break detected at: %i(%i)\n\n", floor(x$breakpoint), round((x$breakpoint - floor(x$breakpoint)) * freq) + 1)) 27 | } 28 | 29 | invisible(x) 30 | } -------------------------------------------------------------------------------- /pyMannKendall-master/setup.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | from setuptools import setup 5 | import versioneer 6 | 7 | __author__ = "Md. Manjurul Hussain Shourov" 8 | __version__ = versioneer.get_version() 9 | __email__ = "mmhs013@gmail.com" 10 | __license__ = "MIT" 11 | __copyright__ = "Copyright Md. Manjurul Hussain Shourov (2019)" 12 | 13 | with open("README.md", "r") as fh: 14 | long_description = fh.read() 15 | 16 | setup( 17 | name = "pymannkendall", 18 | version = __version__, 19 | author = __author__, 20 | author_email = __email__, 21 | description = ("A python package for non-parametric Mann-Kendall family of trend tests."), 22 | long_description = long_description, 23 | long_description_content_type = "text/markdown", 24 | url = "https://github.com/mmhs013/pymannkendall", 25 | packages = ["pymannkendall"], 26 | license = __license__, 27 | install_requires = ["numpy", "scipy"], 28 | classifiers = [ 29 | "Programming Language :: Python :: 2.7", 30 | "Programming Language :: Python :: 3.4", 31 | "Programming Language :: Python :: 3.5", 32 | "Programming Language :: Python :: 3.6", 33 | "Programming Language :: Python :: 3.7", 34 | "License :: OSI Approved :: MIT License", 35 | "Intended Audience :: Science/Research", 36 | "Operating System :: OS Independent", 37 | "Topic :: Scientific/Engineering", 38 | "Development Status :: 5 - Production/Stable" 39 | ] 40 | ) -------------------------------------------------------------------------------- /bfast-master/R/plot.bfastmonitor.R: -------------------------------------------------------------------------------- 1 | plot.bfastmonitor <- function(x, na.rm = TRUE, main = TRUE, ylab = "Data", ...) 2 | { 3 | if(isTRUE(main)) main <- if(is.na(x$breakpoint)) { 4 | "No break detected" 5 | } else { 6 | sprintf("Break detected at: %i(%i)", floor(x$breakpoint), 7 | round((x$breakpoint - floor(x$breakpoint)) * frequency(x$data)) + 1) 8 | } 9 | 10 | y <- if(is.null(dim(x$data))) x$data else x$data[,1L] 11 | if(na.rm) y <- na.omit(as.zoo(y)) 12 | plot(y, type = "n", main = main, ylab = ylab, ...) 13 | lines(window(y, end = x$history[2]), col = "black") 14 | lines(window(y, start = x$history[1], end = x$history[2]), 15 | col = "darkgreen", type = "p", pch = 19, cex = 0.5) 16 | lines(window(y, start = x$monitor[1]), col = "red") 17 | points(window(y, start = x$monitor[1]), col = "red", pch=19, cex=0.5) # new 18 | test_pred <- predict(x$model, newdata = x$tspp) 19 | test_pred <- zoo(test_pred, x$tspp$time, frequency = frequency(y)) 20 | lines(test_pred, col = "blue", lwd = 1.5) 21 | 22 | abline(v = x$monitor[1], lty = 2, col = "black", lwd = 1) 23 | abline(v = x$breakpoint, lty = 2, col = "red", lwd = 2) 24 | 25 | legend("bottomleft", bty = "n", 26 | c("Historical data", "New data", "Stable history", "Fit based on stable history", "Start of the Monitoring period", "Time of detected break"), 27 | lty = c(1, 1, NA, 1, 2, 2), 28 | col = c("black", "red", "darkgreen", "blue", "black", "red"), 29 | pch = c(NA, NA, 19, NA, NA, NA) 30 | ) 31 | invisible(x) 32 | } 33 | -------------------------------------------------------------------------------- /pyMannKendall-master/.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | # *.ipynb 75 | 76 | # pyenv 77 | .python-version 78 | 79 | # celery beat schedule file 80 | celerybeat-schedule 81 | 82 | # SageMath parsed files 83 | *.sage.py 84 | 85 | # Environments 86 | .env 87 | .venv 88 | env/ 89 | venv/ 90 | ENV/ 91 | env.bak/ 92 | venv.bak/ 93 | 94 | # Spyder project settings 95 | .spyderproject 96 | .spyproject 97 | 98 | # Rope project settings 99 | .ropeproject 100 | 101 | # mkdocs documentation 102 | /site 103 | 104 | # mypy 105 | .mypy_cache/ 106 | -------------------------------------------------------------------------------- /bfast-master/man/plot.bfast.Rd: -------------------------------------------------------------------------------- 1 | \name{plot.bfast} 2 | \alias{plot.bfast} 3 | 4 | \title{ 5 | Methods for objects of class "bfast". 6 | } 7 | \description{ 8 | Plot methods for objects of class "bfast". 9 | } 10 | \usage{ 11 | %%## S3 method for class 'bfast': 12 | \method{plot}{bfast}(x, type = c("components", "all", "data", "seasonal", 13 | "trend", "noise"), sim = NULL, largest=FALSE, main, ANOVA = FALSE, ...) 14 | } 15 | 16 | \arguments{ 17 | \item{x}{ \code{\link[bfast]{bfast}} object } 18 | \item{type}{Indicates the type of plot. See details.} 19 | \item{sim}{Optional \code{\link[stats]{stl}} object containing the original components used when simulating \code{x}. } 20 | \item{largest}{If TRUE, show the largest jump in the trend component.} 21 | \item{ANOVA}{if TRUE Derive Slope and Significance values for each identified trend segment} 22 | \item{main}{an overall title for the plot.} 23 | \item{...}{further arguments passed to the \code{\link[graphics]{plot}} function.} 24 | } 25 | 26 | \details{ 27 | This function creates various plots to demonstrate the results of a bfast decomposition. 28 | The type of plot shown depends on the value of \code{type}. 29 | \itemize{ 30 | \item{components}{Shows the final estimated components with breakpoints.} 31 | \item{all}{Plots the estimated components and breakpoints from all iterations.} 32 | \item{data}{Just plots the original time series data.} 33 | \item{seasonal}{Shows the trend component including breakpoints.} 34 | \item{trend}{Shows the trend component including breakpoints.} 35 | \item{noise}{Plots the noise component along with its acf and pacf.} 36 | } 37 | If \code{sim} is not \code{NULL}, the components used in simulation are also shown on each graph. 38 | } 39 | 40 | \author{ 41 | Jan Verbesselt, Rob Hyndman and Rogier De Jong 42 | } 43 | 44 | \examples{ 45 | ## See \code{\link[bfast]{bfast}} for examples. 46 | } 47 | 48 | \keyword{ts} 49 | 50 | -------------------------------------------------------------------------------- /bfast-master/MD5: -------------------------------------------------------------------------------- 1 | 942ebfed20d75a141e8493d8a6520eee *DESCRIPTION 2 | 3a1a9f30c3fff8f5a494eaabb8fa56e2 *NAMESPACE 3 | d90bc73e80418b5c7ee2b849e7b5906c *NEWS 4 | 5231f83fd90aa3a5b1b71c13214783ae *R/bfast.R 5 | 12a163936b6fd742eda32342f446aba2 *R/bfast01.R 6 | da2e00d3eb80d212a8d2fed024937b01 *R/bfast01classify.R 7 | d2b34e8cfc3f514edcddf852f6e13760 *R/bfastmonitor.R 8 | 7f84e78fc9d5034bd24d947cf99b9258 *R/bfastpp.R 9 | 409bedf76b4a5b0b6ade859c579c9e44 *R/bfastts.R 10 | c1fe8c0c474c378a9abcaf0b7d6b872f *R/create16dayts.R 11 | ed9bc3ba4e954b7bf61a981eff977066 *R/history_break.R 12 | 9b7cd5fd87d68bd17ffaa1f6f6623448 *R/history_roc.R 13 | 1c5e17219d1bf25aca56f0e858ee83c3 *R/plot.bfast.R 14 | 175a3901f23910053e1ba428f7d95ecf *R/plot.bfastmonitor.R 15 | a8bad8afa7a3cae12d13e01e03c3c2ee *R/print.bfast.R 16 | 558d273a5b5aadc08328490e7d1d9dc1 *R/print.bfastmonitor.R 17 | 4a07e9cba7ee6d89b67ac71fc4b24249 *R/seasonal.R 18 | e236e040d0dcd22ef4d7986bad36350e *data/harvest.rda 19 | 7f7a0c872ecfcd186a050594f2143a6e *data/ndvi.rda 20 | 4df6ce08f55140ab2347e638df5ce4d8 *data/simts.rda 21 | 8eb23b45fa7f56c6fc5c240925cee4f7 *data/som.rda 22 | cbe79e181d10d27a9f0546010a5bac68 *data/somaliadat.rda 23 | 41aca6ad8216f81e636a0aa51acd9561 *inst/CITATION 24 | d666a343622b0a746cbc88490f662cdb *inst/extdata/modisraster.grd 25 | 123f09d1624eeda49de5b06907d91690 *inst/extdata/modisraster.gri 26 | 53f5ee8bf2a6d57bf71331cb42ceb742 *man/bfast-package.Rd 27 | 2687ed864ebd3183df30b56da5eb03e9 *man/bfast.Rd 28 | 50ee0c6b5d4d83668a5986465308d986 *man/bfast01.Rd 29 | 98cf7e12a41ee5eb40b159224b75b917 *man/bfast01classify.Rd 30 | 8db7f8d66f027c62ce9d55bbb359dbbc *man/bfastmonitor.Rd 31 | a48716ee48e55f09648a61b3adf4fa51 *man/bfastpp.Rd 32 | beecc712903e00807d06ff72cd043d88 *man/bfastts.Rd 33 | 2a569c9caa307d4e8f196abe1327655c *man/create16dayts.Rd 34 | 643adebceac3d88818ef66e53be3bef1 *man/dates.Rd 35 | 75b46b7df5e558f496a42815a1aa818e *man/harvest.Rd 36 | 5476b12c5e10464713cdc7142e85bdf5 *man/modisraster.Rd 37 | a16a3d8cb747da03aa4836c83e52e3c5 *man/ndvi.Rd 38 | 1bc7adac777edc84025d52b33c2bdce2 *man/plot.bfast.Rd 39 | 43a5d4be056c13941e29c82c1676b8c3 *man/simts.Rd 40 | 8f892e00a50fd45683bdb17c800c05dc *man/som.Rd 41 | -------------------------------------------------------------------------------- /bfast-master/DESCRIPTION: -------------------------------------------------------------------------------- 1 | Package: bfast 2 | Version: 1.5.7 3 | Date: 2014-08-27 4 | Title: Breaks For Additive Season and Trend (BFAST) 5 | Authors@R: c(person(given = "Jan", family = "Verbesselt", role = c("aut", "cre"), email = "Jan.Verbesselt@wur.nl"), 6 | person(given = "Achim", family = "Zeileis", role = "aut", email = "Achim.Zeileis@R-project.org"), 7 | person(given = "Rob", family = "Hyndman", role = "ctb", email = "Rob.Hyndman@buseco.monash.edu.au")) 8 | Author: Jan Verbesselt [aut, cre], Achim Zeileis [aut], Rob Hyndman [ctb] 9 | Maintainer: Jan Verbesselt 10 | Description: BFAST integrates the decomposition of time series into trend, 11 | seasonal, and remainder components with methods for detecting 12 | and characterizing abrupt changes within the trend and seasonal 13 | components. BFAST can be used to analyze different types of 14 | satellite image time series and can be applied to other disciplines 15 | dealing with seasonal or non-seasonal time series, such as hydrology, 16 | climatology, and econometrics. The algorithm can be extended to 17 | label detected changes with information on the parameters of the 18 | fitted piecewise linear models. BFAST monitoring functionality is added 19 | based on a paper that has been submitted to Remote Sensing of Environment. 20 | BFAST monitor provides functionality to detect disturbance in near real-time based on BFAST-type models. 21 | BFAST approach is flexible approach that handles missing data without interpolation. 22 | Furthermore now different models can be used to fit the time series data and detect structural changes (breaks). 23 | Depends: R (>= 2.15.0) 24 | Imports: graphics, stats, strucchange, zoo, forecast, sp, raster 25 | Suggests: 26 | License: GPL (>= 2) 27 | URL: http://bfast.R-Forge.R-project.org/ 28 | LazyLoad: yes 29 | LazyData: yes 30 | Repository: CRAN 31 | Repository/R-Forge/Project: bfast 32 | Repository/R-Forge/Revision: 464 33 | Repository/R-Forge/DateTimeStamp: 2014-08-27 18:49:54 34 | Date/Publication: 2014-08-28 00:00:24 35 | Packaged: 2014-08-27 20:15:07 UTC; rforge 36 | NeedsCompilation: no 37 | -------------------------------------------------------------------------------- /bfast-master/man/bfastts.Rd: -------------------------------------------------------------------------------- 1 | \name{bfastts} 2 | \alias{bfastts} 3 | \title{Create a regular time series object by combining data and date information} 4 | \description{ 5 | Create a regular time series object by combining measurements (data) and time (dates) information. 6 | } 7 | 8 | \usage{ 9 | bfastts(data, dates, 10 | type = c("irregular", "16-day", "10-day")) 11 | } 12 | 13 | \arguments{ 14 | \item{data}{A data vector } 15 | \item{dates}{Optional input of dates for each measurement in the 'data' variable. In 16 | case the data is a irregular time series, a vector with 'dates' for 17 | each measurement can be supplied using this 'dates' variable. The irregular data will be linked 18 | with the dates vector to create daily regular time series with a frequency = 365. Extra days 19 | in leap years might cause problems. Please be carefull using this option as it is 20 | experimental. Feedback is welcome.} 21 | \item{type}{ (\code{"irregular"}) indicates that the data is collected at irregular dates and as such will be converted to a daily time series. (\code{"16-day"})indicates that data is collected at a regular time interval (every 16-days e.g. like the MODIS 16-day data products)}. (\code{"10-day"}) indicates that data is collected at a 10-day time interval of the SPOT VEGETATION (S10) product. Warning: Only use this function for the SPOT VEGETATION S10 time series, as for other 10-day time series a different approach might be required. 22 | } 23 | 24 | \details{ 25 | \code{bfastts} create a regular time series 26 | } 27 | 28 | \value{ 29 | \code{bfastts} returns an object of class \code{"ts"}, i.e., a list with components as follows. 30 | \item{zz}{ a regular \code{"ts"} time series with a frequency equal to 365 or 23 i.e. 16-day time series.} 31 | } 32 | 33 | \author{Achim Zeileis, Jan Verbesselt} 34 | 35 | \seealso{\code{\link[strucchange]{monitor}}, \code{\link[strucchange]{mefp}}, \code{\link[strucchange]{breakpoints}}} 36 | 37 | \examples{ 38 | 39 | library("raster") 40 | f <- system.file("extdata/modisraster.grd", package="bfast") 41 | modisbrick <- brick(f) 42 | ndvi <- bfastts(as.vector(modisbrick[1]), dates, type = c("16-day")) ## data of pixel 1 43 | plot(ndvi/10000) 44 | 45 | } 46 | 47 | \keyword{ts} 48 | 49 | -------------------------------------------------------------------------------- /bfast-master/R/bfastpp.R: -------------------------------------------------------------------------------- 1 | bfastpp <- function(data, order = 3, 2 | lag = NULL, slag = NULL, na.action = na.omit, 3 | stl = c("none", "trend", "seasonal", "both")) 4 | { 5 | ## double check what happens with 29-02 if that happens... 6 | ## we should keep it simple an remove the datum if that happens 7 | 8 | if(!is.ts(data)) data <- as.ts(data) 9 | 10 | ## STL pre-processing to try to adjust for trend or season 11 | stl <- match.arg(stl) 12 | if(stl != "none") { 13 | stl_adjust <- function(x) { 14 | x_stl <- stats::stl(x, s.window = "periodic")$time.series 15 | switch(stl, 16 | "trend" = x - x_stl[, "trend"], 17 | "seasonal" = x - x_stl[, "seasonal"], 18 | "both" = x - x_stl[, "trend"] - x_stl[, "seasonal"]) 19 | } 20 | if(NCOL(data) > 1L) { 21 | for(i in 1:NCOL(data)) data[,i] <- stl_adjust(data[,i]) 22 | } else { 23 | data <- stl_adjust(data) 24 | } 25 | } 26 | 27 | ## check for covariates 28 | if(NCOL(data) > 1L) { 29 | x <- coredata(data)[, -1L] 30 | y <- data[, 1L] 31 | } else { 32 | x <- NULL 33 | y <- data 34 | } 35 | 36 | ## data with trend and season factor 37 | rval <- data.frame( 38 | time = as.numeric(time(y)), 39 | response = y, 40 | trend = 1:NROW(y), 41 | season = factor(cycle(y)) 42 | ) 43 | 44 | ## set up harmonic trend matrix as well 45 | freq <- frequency(y) 46 | order <- min(freq, order) 47 | harmon <- outer(2 * pi * as.vector(time(y)), 1:order) 48 | harmon <- cbind(apply(harmon, 2, cos), apply(harmon, 2, sin)) 49 | colnames(harmon) <- if(order == 1) { 50 | c("cos", "sin") 51 | } else { 52 | c(paste("cos", 1:order, sep = ""), paste("sin", 1:order, sep = "")) 53 | } 54 | if((2 * order) == freq) harmon <- harmon[, -(2 * order)] 55 | rval$harmon <- harmon 56 | 57 | ## add lags 58 | nalag <- function(x, k) c(rep(NA, k), head(x, -k)) 59 | if(!is.null(lag)) { 60 | rval$lag <- sapply(lag, function(k) nalag(as.vector(y), k)) 61 | colnames(rval$lag) <- lag 62 | } 63 | if(!is.null(slag)) { 64 | rval$slag <- sapply(slag * freq, function(k) nalag(as.vector(y), k)) 65 | colnames(rval$slag) <- slag 66 | } 67 | 68 | ## add regressors 69 | rval$xreg <- x 70 | 71 | ## omit missing values 72 | rval <- na.action(rval) 73 | 74 | ## return everything 75 | return(rval) 76 | } 77 | -------------------------------------------------------------------------------- /bfast-master/man/bfast01classify.Rd: -------------------------------------------------------------------------------- 1 | \name{bfast01classify} 2 | \alias{bfast01classify} 3 | \title{Change type analysis of the bfast01 function} 4 | 5 | \description{ 6 | A function to determine the change type 7 | } 8 | 9 | \usage{ 10 | bfast01classify(object, alpha = 0.05, pct_stable = NULL) 11 | } 12 | 13 | \arguments{ 14 | \item{object}{ \code{\link[bfast]{bfast01}} object, i.e. the output of the \code{\link[bfast]{bfast01}} function.} 15 | \item{alpha}{ threshold for significance tests, default 0.05} 16 | \item{pct_stable}{threshold for segment stability, unit: percent change per unit time (0-100), default NULL} 17 | } 18 | 19 | \details{ 20 | \code{bfast01classify} 21 | } 22 | 23 | \value{ 24 | \code{bfast01classify} returns a data.frame with the following elements: 25 | \item{flag_type}{Type of shift: 26 | (1) monotonic increase, 27 | (2) monotonic decrease, 28 | (3) monotonic increase (with positive break), 29 | (4) monotonic decrease (with negative break), 30 | (5) interruption: increase with negative break, 31 | (6) interruption: decrease with positive break, 32 | (7) reversal: increase to decrease, 33 | (8) reversal: decrease to increase 34 | } 35 | \item{flag_significance}{SIGNIFICANCE FLAG: 36 | (0) both segments significant (or no break and significant), 37 | (1) only first segment significant, 38 | (2) only 2nd segment significant, 39 | (3) both segments insignificant (or no break and not significant) 40 | } 41 | \item{flag_pct_stable}{STABILITY FLAG: 42 | (0) change in both segments is substantial (or no break and substantial), 43 | (1) only first segment substantial, 44 | (2) only 2nd segment substantial 45 | (3) both segments are stable (or no break and stable) 46 | } 47 | and also significance and percentage of both segments before and after the potentially detected break: "p_segment1", "p_segment2", "pct_segment1", "pct_segment2". 48 | } 49 | 50 | \references{ 51 | de Jong R, Verbesselt J, Zeileis A, Schaepman M (2013). 52 | Shifts in global vegetation activity trends. 53 | \emph{Remote Sensing}, \bold{5}, 1117--1133. 54 | \url{http://dx.doi.org/10.3390/rs5031117} 55 | } 56 | 57 | \author{Rogier de Jong, Jan Verbesselt} 58 | 59 | \seealso{\code{\link[bfast]{bfast01}}} 60 | 61 | \examples{ 62 | library(zoo) 63 | ## define a regular time series 64 | ndvi <- as.ts(zoo(som$NDVI.a, som$Time)) 65 | ## fit variations 66 | bf1 <- bfast01(ndvi) 67 | bfast01classify(bf1, pct_stable = 0.25) 68 | } 69 | \keyword{ts,bfast01} 70 | 71 | -------------------------------------------------------------------------------- /pyMannKendall-master/Examples/AirPassengers.csv: -------------------------------------------------------------------------------- 1 | Month,#Passengers 2 | 1949-01,112 3 | 1949-02,118 4 | 1949-03,132 5 | 1949-04,129 6 | 1949-05,121 7 | 1949-06,135 8 | 1949-07,148 9 | 1949-08,148 10 | 1949-09,136 11 | 1949-10,119 12 | 1949-11,104 13 | 1949-12,118 14 | 1950-01,115 15 | 1950-02,126 16 | 1950-03,141 17 | 1950-04,135 18 | 1950-05,125 19 | 1950-06,149 20 | 1950-07,170 21 | 1950-08,170 22 | 1950-09,158 23 | 1950-10,133 24 | 1950-11,114 25 | 1950-12,140 26 | 1951-01,145 27 | 1951-02,150 28 | 1951-03,178 29 | 1951-04,163 30 | 1951-05,172 31 | 1951-06,178 32 | 1951-07,199 33 | 1951-08,199 34 | 1951-09,184 35 | 1951-10,162 36 | 1951-11,146 37 | 1951-12,166 38 | 1952-01,171 39 | 1952-02,180 40 | 1952-03,193 41 | 1952-04,181 42 | 1952-05,183 43 | 1952-06,218 44 | 1952-07,230 45 | 1952-08,242 46 | 1952-09,209 47 | 1952-10,191 48 | 1952-11,172 49 | 1952-12,194 50 | 1953-01,196 51 | 1953-02,196 52 | 1953-03,236 53 | 1953-04,235 54 | 1953-05,229 55 | 1953-06,243 56 | 1953-07,264 57 | 1953-08,272 58 | 1953-09,237 59 | 1953-10,211 60 | 1953-11,180 61 | 1953-12,201 62 | 1954-01,204 63 | 1954-02,188 64 | 1954-03,235 65 | 1954-04,227 66 | 1954-05,234 67 | 1954-06,264 68 | 1954-07,302 69 | 1954-08,293 70 | 1954-09,259 71 | 1954-10,229 72 | 1954-11,203 73 | 1954-12,229 74 | 1955-01,242 75 | 1955-02,233 76 | 1955-03,267 77 | 1955-04,269 78 | 1955-05,270 79 | 1955-06,315 80 | 1955-07,364 81 | 1955-08,347 82 | 1955-09,312 83 | 1955-10,274 84 | 1955-11,237 85 | 1955-12,278 86 | 1956-01,284 87 | 1956-02,277 88 | 1956-03,317 89 | 1956-04,313 90 | 1956-05,318 91 | 1956-06,374 92 | 1956-07,413 93 | 1956-08,405 94 | 1956-09,355 95 | 1956-10,306 96 | 1956-11,271 97 | 1956-12,306 98 | 1957-01,315 99 | 1957-02,301 100 | 1957-03,356 101 | 1957-04,348 102 | 1957-05,355 103 | 1957-06,422 104 | 1957-07,465 105 | 1957-08,467 106 | 1957-09,404 107 | 1957-10,347 108 | 1957-11,305 109 | 1957-12,336 110 | 1958-01,340 111 | 1958-02,318 112 | 1958-03,362 113 | 1958-04,348 114 | 1958-05,363 115 | 1958-06,435 116 | 1958-07,491 117 | 1958-08,505 118 | 1958-09,404 119 | 1958-10,359 120 | 1958-11,310 121 | 1958-12,337 122 | 1959-01,360 123 | 1959-02,342 124 | 1959-03,406 125 | 1959-04,396 126 | 1959-05,420 127 | 1959-06,472 128 | 1959-07,548 129 | 1959-08,559 130 | 1959-09,463 131 | 1959-10,407 132 | 1959-11,362 133 | 1959-12,405 134 | 1960-01,417 135 | 1960-02,391 136 | 1960-03,419 137 | 1960-04,461 138 | 1960-05,472 139 | 1960-06,535 140 | 1960-07,622 141 | 1960-08,606 142 | 1960-09,508 143 | 1960-10,461 144 | 1960-11,390 145 | 1960-12,432 146 | -------------------------------------------------------------------------------- /Dependent function/armax.m: -------------------------------------------------------------------------------- 1 | function m = armax(varargin) 2 | %ARMAX Computes the prediction error estimate of an ARMAX model. 3 | % 4 | % M = ARMAX(Z,[na nb nc nk]) or M = ARMAX(Z,'na',na,'nb',nb,'nc',nc,'nk',nk) 5 | % 6 | % M : returns the estimated model in an IDPOLY object format 7 | % along with estimated covariances and structure information. 8 | % For the exact format of M see also help IDPOLY. 9 | % 10 | % Z : The estimation data in IDDATA object format. See help IDDATA 11 | % 12 | % [na nb nc nk] are the orders and delays of the ARMAX model 13 | % 14 | % A(q) y(t) = B(q) u(t-nk) + C(q) e(t) 15 | % 16 | % If the data have several inputs, nb and nk are row vectors with 17 | % lengths equal to the number of input channels. If the data is a time 18 | % series (no input) an ARMA model A(q) y(t) = C(q) e(t) is built. Then nb 19 | % and nk should be omitted, i.e. enter [na nc]. 20 | % 21 | % An alternative syntax is M = ARMAX(Z,Mi), where 22 | % Mi is an estimated model or created by IDPOLY. 23 | % The minimization is then initialized at the parameters given in Mi. 24 | % 25 | % By M = ARMAX(Z,nn,Property_1,Value_1, ...., Property_n,Value_n) 26 | % all properties associated with the model structure and the algorithm 27 | % can be affected. See HELP IDPOLY or IDPROPS ALGORITHM for a list of 28 | % Property/Value pairs. 29 | % 30 | % Note that ARMA models for time series is handled by ARMAX when applied 31 | % to data sets with no input. 32 | % 33 | % See also ARX, BJ, IV4, N4SID, OE, PEM. 34 | 35 | % Copyright 1986-2007 The MathWorks, Inc. 36 | % $Revision: 1.18.4.6 $ $Date: 2007/12/14 14:43:11 $ 37 | 38 | if nargin<2 39 | disp('Usage: M = armax(Data,Orders);') 40 | disp(' M = armax(Data,Orders,Prop/Value pairs).') 41 | if nargout, m = []; end 42 | return 43 | end 44 | 45 | try 46 | [mdum,z] = pemdecod('armax',varargin{:}); 47 | catch E 48 | throw(E) 49 | end 50 | 51 | err = 0; 52 | z = setid(z); 53 | if isempty(pvget(z,'Name')) 54 | z = pvset(z,'Name',inputname(1)); 55 | end 56 | if isa(mdum,'idpoly') 57 | nd = pvget(mdum,'nd'); 58 | nf = pvget(mdum,'nf'); 59 | if sum([nd nf])~=0 60 | err = 1; 61 | end 62 | else 63 | err = 1; 64 | end 65 | if err 66 | error('ident:estimation:invalidARMAXStructure',... 67 | 'This is not an ARMAX model. Type "help armax" for more information.') 68 | end 69 | % $$$ fixp = pvget(mdum,'FixedParameter'); 70 | % $$$ if ~isempty(fixp) 71 | % $$$ warning(sprintf(['To fix a parameter, first define a nominal model.',... 72 | % $$$ '\nNote that mnemonic Parameter Names can be set by SETPNAME.'])) 73 | % $$$ end 74 | try 75 | m = pem(z,mdum); 76 | catch E 77 | throw(E) 78 | end 79 | 80 | es = pvget(m,'EstimationInfo'); 81 | es.Method = 'ARMAX'; 82 | m = pvset(m,'EstimationInfo',es); 83 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 84 | -------------------------------------------------------------------------------- /bfast-master/man/bfast-package.rd: -------------------------------------------------------------------------------- 1 | \name{bfast-package} 2 | \alias{bfast-package} 3 | \title{Breaks For Additive Season and Trend (BFAST)} 4 | 5 | \description{ 6 | BFAST integrates the decomposition of time series into trend, seasonal, and remainder 7 | components with methods for detecting and characterizing abrupt changes within the trend and 8 | seasonal components. BFAST can be used to analyze different types of satellite image time 9 | series and can be applied to other disciplines dealing with seasonal or non-seasonal time 10 | series,such as hydrology, climatology, and econometrics. The algorithm can be extended to 11 | label detected changes with information on the parameters of the fitted piecewise linear 12 | models. 13 | 14 | Additionally monitoring disturbances in BFAST-type models at the end of time series 15 | (i.e., in near real-time) is available: Based on a model for stable historical behaviour 16 | abnormal changes within newly acquired data can be detected. Different models are available 17 | for modeling the stable historical behavior. A season-trend model (with harmonic seasonal pattern) is 18 | used as a default in the regresssion modelling. 19 | } 20 | 21 | \details{The package contains: 22 | \itemize{ 23 | \item \code{\link[bfast]{bfast}}: Main function for iterative decomposition 24 | and break detection as described in Verbesselt et al (2010ab). 25 | \item \code{\link[bfast]{bfastmonitor}}: Monitoring approach for detecting disturbances in 26 | near real-time (see Verbesselt et al. 2011, submitted to Remote Sensing and Environment). 27 | \item \code{\link[bfast]{bfastpp}}: Data pre-processing for BFAST-type modeling. 28 | \item Functions for plotting and printing, see \code{\link[bfast]{bfast}}. 29 | \item \code{\link[bfast]{simts}}: Artificial example data set. 30 | \item \code{\link[bfast]{harvest}}: NDVI time series of a P. radiata plantation that is harvested. 31 | \item \code{\link[bfast]{som}}: NDVI time series of locations in the south of Somalia to 32 | illustrate the near real-time disturbance approach 33 | } 34 | } 35 | 36 | \author{ 37 | Jan Verbesselt [aut, cre], Achim Zeileis [aut], Rob Hyndman [ctb], Rogier De Jong [ctb] 38 | } 39 | 40 | \references{ 41 | Verbesselt J, Zeileis A, Herold M (2012). 42 | Near real-time disturbance detection using satellite image time series. 43 | \emph{Remote Sensing of Environment}, \bold{123}, 98--108. 44 | \url{http://dx.doi.org/10.1016/j.rse.2012.02.022} 45 | 46 | Verbesselt J, Hyndman R, Newnham G, Culvenor D (2010). 47 | Detecting Trend and Seasonal Changes in Satellite Image Time Series. 48 | \emph{Remote Sensing of Environment}, \bold{114}(1), 106--115. 49 | \url{http://dx.doi.org/10.1016/j.rse.2009.08.014} 50 | 51 | Verbesselt J, Hyndman R, Zeileis A, Culvenor D (2010). 52 | Phenological Change Detection while Accounting for Abrupt and Gradual Trends in Satellite Image Time Series. 53 | \emph{Remote Sensing of Environment}, \bold{114}(12), 2970--2980. 54 | \url{http://dx.doi.org/10.1016/j.rse.2010.08.003} 55 | 56 | } 57 | 58 | \keyword{ts} 59 | -------------------------------------------------------------------------------- /bfast-master/inst/CITATION: -------------------------------------------------------------------------------- 1 | citHeader("To cite bfast in publications use") 2 | 3 | citEntry(entry = "Article", 4 | title = "Detecting Trend and Seasonal Changes in Satellite Image Time Series", 5 | author = personList(as.person("Jan Verbesselt"), 6 | as.person("Rob Hyndman"), 7 | as.person("Glenn Newnham"), 8 | as.person("Darius Culvenor")), 9 | journal = "Remote Sensing of Environment", 10 | year = "2010", 11 | volume = "114", 12 | number = "1", 13 | pages = "106--115", 14 | doi = "10.1016/j.rse.2009.08.014", 15 | 16 | textVersion = 17 | paste("Jan Verbesselt, Rob Hyndman, Glenn Newnham, Darius Culvenor (2010).", 18 | "Detecting Trend and Seasonal Changes in Satellite Image Time Series.", 19 | "Remote Sensing of Environment, 114(1), 106-115.", 20 | "doi:10.1016/j.rse.2009.08.014"), 21 | ) 22 | 23 | citEntry(entry = "Article", 24 | title = "Phenological Change Detection while Accounting for Abrupt and Gradual Trends in Satellite Image Time Series", 25 | author = personList(as.person("Jan Verbesselt"), 26 | as.person("Rob Hyndman"), 27 | as.person("Achim Zeileis"), 28 | as.person("Darius Culvenor")), 29 | journal = "Remote Sensing of Environment", 30 | year = "2010", 31 | volume = "114", 32 | number = "12", 33 | pages = "2970--2980", 34 | doi = "10.1016/j.rse.2010.08.003", 35 | textVersion = 36 | paste("Jan Verbesselt, Rob Hyndman, Achim Zeileis, Darius Culvenor (2010).", 37 | "Phenological Change Detection while Accounting for Abrupt and Gradual Trends in Satellite Image Time Series.", 38 | "Remote Sensing of Environment, 114(12), 2970 - 2980.", 39 | "doi:10.1016/j.rse.2010.08.003"), 40 | ) 41 | 42 | citEntry(entry = "TechReport", 43 | title = "Near Real-Time Disturbance Detection in Terrestrial Ecosystems Using Satellite Image Time Series: Drought Detection in {S}omalia", 44 | author = personList(as.person("Jan Verbesselt"), 45 | as.person("Achim Zeileis"), 46 | as.person("Martin Herold")), 47 | institution = "Working Papers in Economics and Statistics, Research Platform Empirical 48 | and Experimental Economics, Universit\\\"at Innsbruck", 49 | year = "2011", 50 | type = "Working Paper", 51 | number = "2011-18", 52 | month = "June", 53 | url = "http://EconPapers.RePEc.org/RePEc:inn:wpaper:2011-18", 54 | 55 | textVersion = 56 | paste("Jan Verbesselt, Achim Zeileis, Martin Herold (2011).", 57 | "Near Real-Time Disturbance Detection in Terrestrial Ecosystems Using Satellite Image Time Series: Drought Detection in Somalia.", 58 | "Working Paper 2011-18.", 59 | "Working Papers in Economics and Statistics, Research Platform Empirical and Experimental Economics, Universitaet Innsbruck.", 60 | "URL http://EconPapers.RePEc.org/RePEc:inn:wpaper:2011-18"), 61 | 62 | header = "If you use bfastmonitor() or bfastpp(), please cite:" 63 | ) 64 | 65 | -------------------------------------------------------------------------------- /pyMannKendall-master/CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation. 6 | 7 | ## Our Standards 8 | 9 | Examples of behavior that contributes to creating a positive environment include: 10 | 11 | * Using welcoming and inclusive language 12 | * Being respectful of differing viewpoints and experiences 13 | * Gracefully accepting constructive criticism 14 | * Focusing on what is best for the community 15 | * Showing empathy towards other community members 16 | 17 | Examples of unacceptable behavior by participants include: 18 | 19 | * The use of sexualized language or imagery and unwelcome sexual attention or advances 20 | * Trolling, insulting/derogatory comments, and personal or political attacks 21 | * Public or private harassment 22 | * Publishing others' private information, such as a physical or electronic address, without explicit permission 23 | * Other conduct which could reasonably be considered inappropriate in a professional setting 24 | 25 | ## Our Responsibilities 26 | 27 | Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. 28 | 29 | Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. 30 | 31 | ## Scope 32 | 33 | This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. 34 | 35 | ## Enforcement 36 | 37 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at mmhs013@gmail.com. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. 38 | 39 | Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. 40 | 41 | ## Attribution 42 | 43 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version] 44 | 45 | [homepage]: http://contributor-covenant.org 46 | [version]: http://contributor-covenant.org/version/1/4/ -------------------------------------------------------------------------------- /bfast-master/man/bfastpp.Rd: -------------------------------------------------------------------------------- 1 | \name{bfastpp} 2 | \alias{bfastpp} 3 | \title{Time Series Preprocessing for BFAST-Type Models} 4 | 5 | \description{ 6 | Time series preprocessing for subsequent regression modeling. 7 | Based on a (seasonal) time series, a data frame with the response, 8 | seasonal terms, a trend term, (seasonal) autoregressive terms, 9 | and covariates is computed. This can subsequently be employed in 10 | regression models. 11 | } 12 | 13 | \usage{ 14 | bfastpp(data, order = 3, 15 | lag = NULL, slag = NULL, na.action = na.omit, 16 | stl = c("none", "trend", "seasonal", "both")) 17 | } 18 | 19 | \arguments{ 20 | \item{data}{A time series of class \code{\link[stats]{ts}}, or another object that 21 | can be coerced to such. For seasonal components, a frequency greater than 1 is 22 | required.} 23 | \item{order}{numeric. Order of the harmonic term, defaulting to \code{3}.} 24 | \item{lag}{numeric. Orders of the autoregressive term, by default omitted.} 25 | \item{slag}{numeric. Orders of the seasonal autoregressive term, by default omitted.} 26 | \item{na.action}{function for handling \code{NA}s in the data (after all other 27 | preprocessing).} 28 | \item{stl}{character. Prior to all other preprocessing, STL (season-trend decomposition 29 | via LOESS smoothing) can be employed for trend-adjustment and/or season-adjustment. 30 | The \code{"trend"} or \code{"seasonal"} component or both from \code{\link[stats]{stl}} 31 | are removed from each column in \code{data}. By default (\code{"none"}), no STL 32 | adjustment is used.} 33 | } 34 | 35 | \details{ 36 | To facilitate (linear) regression models of time series data, \code{bfastpp} facilitates 37 | preprocessing and setting up regressor terms. It returns a \code{data.frame} containing the 38 | first column of the \code{data} as the \code{response} while further columns (if any) are 39 | used as covariates \code{xreg}. Additionally, a linear trend, seasonal dummies, harmonic 40 | seasonal terms, and (seasonal) autoregressive terms are provided. 41 | 42 | Optionally, each column of \code{data} can be seasonally adjusted and/or trend-adjusted via 43 | STL (season-trend decomposition via LOESS smoothing) prior to preprocessing. The idea would 44 | be to capture season and/or trend nonparametrically prior to regression modelling. 45 | } 46 | 47 | \value{ 48 | \code{bfastpp} returns a \code{"data.frame"} with the following variables (some of which may be matrices). 49 | \item{time}{numeric vector of time stamps,} 50 | \item{response}{response vector (first column of \code{data}),} 51 | \item{trend}{linear time trend (running from 1 to number of observations),} 52 | \item{season}{factor indicating season period,} 53 | \item{harmon}{harmonic seasonal terms (of specified \code{order}),} 54 | \item{lag}{autoregressive terms (or orders \code{lag}, if any),} 55 | \item{slag}{seasonal autoregressive terms (or orders \code{slag}, if any),} 56 | \item{xreg}{covariate regressor (all columns of \code{data} except the first, if any).} 57 | } 58 | 59 | \references{ 60 | Verbesselt J, Zeileis A, Herold M (2011). 61 | Near Real-Time Disturbance Detection in Terrestrial Ecosystems Using Satellite 62 | Image Time Series: Drought Detection in Somalia. 63 | Working Paper 2011-18. Working Papers in Economics and Statistics, 64 | Research Platform Empirical and Experimental Economics, Universitaet Innsbruck. 65 | \url{http://EconPapers.RePEc.org/RePEc:inn:wpaper:2011-18}. 66 | Submitted to Remote Sensing and Environment. 67 | } 68 | 69 | \author{Achim Zeileis} 70 | 71 | \seealso{\code{\link[bfast]{bfastmonitor}}} 72 | 73 | \examples{ 74 | ## set up time series 75 | library(zoo) 76 | ndvi <- as.ts(zoo(cbind(a = som$NDVI.a, b = som$NDVI.b), som$Time)) 77 | ndvi <- window(ndvi, start = c(2006, 1), end = c(2009, 23)) 78 | 79 | ## parametric season-trend model 80 | d1 <- bfastpp(ndvi, order = 2) 81 | d1lm <- lm(response ~ trend + harmon, data = d1) 82 | summary(d1lm) 83 | 84 | ## autoregressive model (after nonparametric season-trend adjustment) 85 | d2 <- bfastpp(ndvi, stl = "both", lag = 1:2) 86 | d2lm <- lm(response ~ lag, data = d2) 87 | summary(d2lm) 88 | } 89 | 90 | \keyword{ts} 91 | 92 | -------------------------------------------------------------------------------- /bfast-master/R/plot.bfast.R: -------------------------------------------------------------------------------- 1 | # modifications: 2 | # 1: plot layout (Tt higer, St and et lower) 3 | # 2: new parameter ANOVA determines significance of trend slopes 4 | # 3: plot-labels changed into Yt, Tt, St and et 5 | # 4: trend slope and significance printed in plot 6 | 7 | 8 | plot.bfast <- function (x, type = c("components", "all", "data", "seasonal", 9 | "trend", "noise"), sim = NULL, largest = FALSE, main, ANOVA = FALSE, ...) 10 | { 11 | type <- match.arg(type) 12 | # opar <- par() 13 | realdata <- is.null(sim) 14 | Trend.bp <- !x$nobp$Vt 15 | if (type == "largest" & !Trend.bp) 16 | stop("No trend breakpoints") 17 | title <- !missing(main) 18 | niter <- length(x$output) 19 | out <- x$output[[niter]] 20 | Tt <- out$Tt 21 | St <- out$St 22 | noise <- out$Nt 23 | if (type == "data") { 24 | if (!title) 25 | main <- "Yt" 26 | plot(x$Yt, main = main, ...) 27 | } 28 | else if (type == "components") { 29 | ft <- cbind(seasonal = out$St, trend = out$Tt, remainder = out$Nt) 30 | tsp(ft) <- tsp(x$Yt) 31 | ft <- list(time.series = ft) 32 | if (!title) 33 | main <- paste("no. iterations to estimate breakpoints:", 34 | niter) 35 | # fit = x passes the BFAST object to seasonal() for ANOVA 36 | if (ANOVA == TRUE) { seasonal(ft, out, sim = sim, main = main, fit = x) } 37 | else { seasonal(ft, out, sim = sim, main = main) } 38 | } 39 | else if (type == "noise") { 40 | if (!title) 41 | main <- "Noise component" 42 | tsdisplay(noise, main = main, ...) 43 | } 44 | else { 45 | if (type == "all") { 46 | idx <- 1:niter 47 | opar <- par(mfrow = c(2, niter)) 48 | } 49 | else idx <- niter 50 | for (i in idx) { 51 | out <- x$output[[i]] 52 | if (type != "seasonal") { 53 | if (type == "trend" & !title) 54 | main <- "Trend component" 55 | else if (!title) 56 | main <- paste("Iteration ", i, ": Trend", sep = "") 57 | plot(out$Vt, main = main, ylab = "Vt", ...) 58 | lines(out$Tt, col = 4) 59 | if (Trend.bp) { 60 | lines(out$bp.Vt) 61 | lines(out$ci.Vt) 62 | legend("topright", paste("Time of BP(s)", paste(out$Vt.bp, 63 | collapse = ",")), col = 2) 64 | } 65 | if (!realdata) { 66 | lines(sim$time.series[, "abrupt"], col = 1, 67 | lty = 2) 68 | legend("bottomleft", c("estimated", "simulated"), 69 | lty = c(1, 2), col = 1) 70 | } 71 | if (largest) { 72 | legend("bottomright", c("Magnitude of most sign change"), 73 | lty = c(1), col = 6) 74 | lines(x$jump, col = 6) 75 | points(x$jump, pch = 14, cex = 1, col = 6) 76 | } 77 | } 78 | if (type != "trend") { 79 | if (type == "seasonal" & !title) 80 | main <- "Seasonal component" 81 | else if (!title) 82 | main <- paste("Iteration ", i, ": Seasonal", 83 | sep = "") 84 | plot(out$Wt, main = main, ylab = "Wt", ...) 85 | lines(out$St, col = 2) 86 | Seas.bp <- !x$nobp$Wt 87 | if (Seas.bp) { 88 | lines(out$bp.Wt) 89 | lines(out$ci.Wt) 90 | legend("topright", paste("Time of BP(s)", paste(out$Wt.bp, 91 | collapse = ",")), col = 2) 92 | } 93 | if (!realdata) { 94 | lines(sim$time.series[, "seasonal"], col = 1, 95 | lty = 2) 96 | legend("bottomleft", c("first run seasonality", 97 | "first run estimated", "simulated"), lty = c(1, 98 | 1, 2), col = c(1, 2, 1)) 99 | } 100 | } 101 | } 102 | if (type == "all") 103 | par(opar) 104 | } 105 | # par(opar) 106 | } 107 | -------------------------------------------------------------------------------- /bfast-master/R/bfastmonitor.R: -------------------------------------------------------------------------------- 1 | bfastmonitor <- function(data, start, 2 | formula = response ~ trend + harmon, 3 | order = 3, lag = NULL, slag = NULL, 4 | history = c("ROC", "BP", "all"), 5 | type = "OLS-MOSUM", h = 0.25, end = 10, level = 0.05, 6 | hpc = "none", verbose = FALSE, plot = FALSE) 7 | { 8 | ## PREPROCESSING 9 | ## two levels needed: 1. monitoring, 2. in ROC (if selected) 10 | level <- rep(level, length.out = 2) 11 | 12 | if(!is.ts(data)) data <- as.ts(data) 13 | 14 | ## frequency of data 15 | freq <- frequency(data) 16 | ## start on natural scale (if necessary) 17 | time2num <- function(x) if(length(x) > 1L) x[1L] + (x[2L] - 1)/freq else x 18 | start <- time2num(start) 19 | 20 | ## full data 21 | data_tspp <- bfastpp(data, order = order, lag = lag, slag = slag) 22 | 23 | ## SELECT STABLE HISTORY 24 | ## full history period 25 | history_tspp <- subset(data_tspp, time < start) 26 | 27 | ## find start of history period 28 | ## (may be specified via character, function, or time index directly) 29 | if(is.null(history)) { 30 | history <- start(history_tspp$response) 31 | } else if(all(is.character(history))) { 32 | history <- match.arg(history) 33 | history <- switch(history, 34 | "all" = start(history_tspp$response), 35 | "ROC" = history_roc(formula, data = history_tspp, level = level[2]), 36 | "BP" = history_break(formula, data = history_tspp, hpc = hpc) 37 | ) 38 | } else if(all(is.function(history))) { 39 | history <- history(formula, data = history_tspp) 40 | } 41 | history <- time2num(history) 42 | 43 | ## compute subset 44 | history_tspp <- subset(history_tspp, time >= history) 45 | 46 | ## output information (if desired) 47 | if(verbose) { 48 | cat("\nBFAST monitoring\n\n1. History period\n") 49 | cat(sprintf("Stable period selected: %i(%i)--%i(%i)\n", 50 | start(history_tspp$response)[1], start(history_tspp$response)[2], 51 | end(history_tspp$response)[1], end(history_tspp$response)[2])) 52 | cat(sprintf("Length (in years): %f\n", NROW(history_tspp)/freq)) 53 | } 54 | 55 | 56 | ## MODEL HISTORY PERIOD 57 | test_tspp <- history_tspp 58 | test_mefp <- mefp(formula, data = test_tspp, 59 | type = type, period = end, h = h, alpha = level[1]) 60 | test_lm <- lm(formula, data = test_tspp) 61 | if(floor(h * NROW(test_tspp)) <= 1 | NROW(test_tspp) <= length(coef(test_lm))) { 62 | ok <- FALSE 63 | warning("too few observations in selected history period") 64 | } else { 65 | ok <- TRUE 66 | } 67 | if(verbose) { 68 | cat("Model fit:\n") 69 | print(coef(test_lm)) 70 | } 71 | 72 | ## MONITOR CHANGES IN THE MONITORING PERIOD 73 | test_tspp <- subset(data_tspp, time >= history) 74 | if(ok) { 75 | test_mon <- monitor(test_mefp, data = test_tspp, verbose = FALSE) 76 | tbp <- if(is.na(test_mon$breakpoint)) NA else test_tspp$time[test_mon$breakpoint] 77 | if(verbose) { 78 | cat("\n\n2. Monitoring period\n") 79 | cat(sprintf("Monitoring starts at: %i(%i)\n", floor(start), round((start - floor(start)) * freq) + 1)) 80 | if(is.na(tbp)) { 81 | cat("Break detected at: -- (no break)\n\n") 82 | } else { 83 | cat(sprintf("Break detected at: %i(%i)\n\n", floor(tbp), round((tbp - floor(tbp)) * freq) + 1)) 84 | } 85 | } 86 | } else { 87 | test_mon <- NA 88 | tbp <- NA 89 | } 90 | 91 | ## the magnitude of change 92 | if(ok) { 93 | test_tspp$prediction <- predict(test_lm, newdata = test_tspp) 94 | new_data <- subset(test_tspp, time>=start) ## only data from the monitoring period 95 | magnitude <- median(new_data$response - new_data$prediction,na.rm=TRUE) 96 | } else { 97 | test_tspp$prediction <- NA 98 | magnitude <- NA 99 | } 100 | 101 | ## set up return object 102 | rval <- list( 103 | data = data, 104 | tspp = test_tspp, 105 | model = test_lm, 106 | mefp = test_mon, 107 | history = c(head(history_tspp$time, 1), tail(history_tspp$time, 1)), 108 | monitor = c(start, tail(test_tspp$time, 1)), 109 | breakpoint = tbp, 110 | magnitude = magnitude 111 | ) 112 | class(rval) <- "bfastmonitor" 113 | 114 | ## plot if desired 115 | if(plot) plot(rval) 116 | 117 | ## return object 118 | return(rval) 119 | } 120 | -------------------------------------------------------------------------------- /pyMannKendall-master/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to pyMannKendall 2 | 3 | First of all, thanks for considering contributing to `pyMannKendall`! 👍 It's people like you that make it rewarding for us to work on `pyMannKendall`. 4 | 5 | `pyMannKendall` is an open source project, maintained by publicly funded academic researchers and released under the [MIT](https://github.com/mmhs013/pyMannKendall/blob/master/LICENSE.txt) licence. 6 | 7 | [repo]: https://github.com/mmhs013/pyMannKendall 8 | [issues]: https://github.com/mmhs013/pyMannKendall/issues 9 | [new_issue]: https://github.com/mmhs013/pyMannKendall/issues/new 10 | [email]: mmhs013@gmail.com 11 | [code_of_conduct]: https://github.com/mmhs013/pyMannKendall/blob/master/CODE_OF_CONDUCT.md 12 | 13 | [citation]: https://zenodo.org/record/2540931 14 | [demo_notebook]: https://a-slide.github.io/pycoQC/pycoQC_usage.html 15 | 16 | ## Code of conduct 17 | 18 | Please note that this project is released with a [Contributor Code of Conduct][code_of_conduct]. By participating in this project you agree to abide by its terms. 19 | 20 | ## How you can contribute 21 | 22 | There are several ways you can contribute to this project. If you want to know more about why and how to contribute to open source projects like this one, see this [Open Source Guide](https://opensource.guide/how-to-contribute/). 23 | 24 | ### Share the love ❤️ 25 | 26 | Think `pyMannKendall` is useful? Let others discover it, by telling them in person, via Twitter or a blog post. 27 | 28 | Using `pyMannKendall` for a paper you are writing? Please cite it. 29 | 30 | ### Ask a question ⁉️ 31 | 32 | Using `pyMannKendall` and got stuck? Browse the [readme ][repo] and the [demo notebook][demo_notebook] to see if you can find a solution. 33 | 34 | Still stuck? Post your question as an [issue on GitHub][new_issue]. 35 | 36 | While we cannot offer user support, we'll try to do our best to address it, as questions often lead to better documentation or the discovery of bugs. 37 | 38 | Want to ask a question in private? Contact the package maintainer by [email][email]. 39 | 40 | ### Propose an idea 💡 41 | 42 | Have an idea for a new `pyMannKendall` feature? Take a look at the [issue list][issues] to see if it isn't included or suggested yet. If not, suggest your idea as an [issue on GitHub][new_issue]. While we can't promise to implement your idea, it helps to: 43 | 44 | * Explain in detail how it would work. 45 | * Keep the scope as narrow as possible. 46 | 47 | See below if you want to contribute code for your idea as well. 48 | 49 | ### Report a bug 🐛 50 | 51 | Using `pyMannKendall` and discovered a bug? That's annoying! Don't let others have the same experience and report it as an [issue on GitHub][new_issue] so we can fix it. A good bug report makes it easier for us to do so, so please include: 52 | 53 | * Your operating system name and version (e.g. Mac OS 10.13.6). 54 | * Any details about your local setup that might be helpful in troubleshooting. 55 | * Detailed steps to reproduce the bug. 56 | 57 | ### Improve the documentation 📖 58 | 59 | Noticed a typo on the website? Think a function could use a better example? Good documentation makes all the difference, so your help to improve it is very welcome! 60 | 61 | 1. Fork [this repo][repo] and clone it to your computer. To learn more about this process, see [this guide](https://guides.github.com/activities/forking/). 62 | 2. Edit the README.md file and submit a pull request. We will review your changes and include the fix in the next release. 63 | 64 | ### Contribute code 📝 65 | 66 | Care to fix bugs or implement new functionality for `pyMannKendall`? Awesome! 👏 Have a look at the [issue list][issues] and leave a comment on the things you want to work on. See also the development guidelines below. 67 | 68 | ## Development guidelines 69 | 70 | We try to follow the [GitHub flow](https://guides.github.com/introduction/flow/) for development and the [PEP 8](https://www.python.org/dev/peps/pep-0008/) style Guide for Python Code. 71 | 72 | 1. Fork [this repo][repo] and clone it to your computer. To learn more about this process, see [this guide](https://guides.github.com/activities/forking/). 73 | 74 | 2. If you have forked and cloned the project before and it has been a while since you worked on it, [pull changes from the original repo](https://help.github.com/articles/merging-an-upstream-repository-into-your-fork/) to your clone by using `git pull upstream master`. 75 | 76 | 3. Make your changes and test the modified code. 77 | 78 | 4. Commit and push your changes. 79 | 80 | 5. Submit a [pull request](https://guides.github.com/activities/forking/#making-a-pull-request). 81 | 82 | 83 | 84 | --- 85 | 86 | This file was adapted from a template created by [peterdesmet](https://gist.github.com/peterdesmet/e90a1b0dc17af6c12daf6e8b2f044e7c). -------------------------------------------------------------------------------- /pyMannKendall-master/Paper/paper.bib: -------------------------------------------------------------------------------- 1 | @book{hipel1994time, 2 | title={Time series modelling of water resources and environmental systems}, 3 | author={Hipel, Keith W and McLeod, A Ian}, 4 | volume={45}, 5 | year={1994}, 6 | publisher={Elsevier} 7 | } 8 | @article{mann1945nonparametric, 9 | title={Nonparametric tests against trend}, 10 | author={Mann, Henry B}, 11 | journal={Econometrica: Journal of the Econometric Society}, 12 | pages={245--259}, 13 | year={1945}, 14 | publisher={JSTOR}, 15 | doi={10.2307/1907187} 16 | } 17 | @article{kendall1975rank, 18 | title={Rank correlation measures}, 19 | author={Kendall, MG}, 20 | journal={Charles Griffin, London}, 21 | volume={202}, 22 | pages={15}, 23 | year={1975} 24 | } 25 | @article{bari2016analysis, 26 | title={Analysis of seasonal and annual rainfall trends in the northern region of {Bangladesh}}, 27 | author={Bari, Sheikh Hefzul and Rahman, M Tauhid Ur and Hoque, Muhammad Azizul and Hussain, Md Manjurul}, 28 | journal={Atmospheric Research}, 29 | volume={176}, 30 | pages={148--158}, 31 | year={2016}, 32 | publisher={Elsevier}, 33 | doi={10.1016/j.atmosres.2016.02.008} 34 | } 35 | @article{hirsch1982techniques, 36 | title={Techniques of trend analysis for monthly water quality data}, 37 | author={Hirsch, Robert M and Slack, James R and Smith, Richard A}, 38 | journal={Water resources research}, 39 | volume={18}, 40 | number={1}, 41 | pages={107--121}, 42 | year={1982}, 43 | publisher={Wiley Online Library}, 44 | doi={10.1029/WR018i001p00107} 45 | } 46 | @article{hamed1998modified, 47 | title={A modified {Mann}--{Kendall} trend test for autocorrelated data}, 48 | author={Hamed, Khaled H and Rao, A Ramachandra}, 49 | journal={Journal of hydrology}, 50 | volume={204}, 51 | number={1-4}, 52 | pages={182--196}, 53 | year={1998}, 54 | publisher={Elsevier}, 55 | doi={10.1016/S0022-1694(97)00125-X} 56 | } 57 | @article{cox1955some, 58 | title={Some quick sign tests for trend in location and dispersion}, 59 | author={Cox, David Roxbee and Stuart, Alan}, 60 | journal={Biometrika}, 61 | volume={42}, 62 | number={1/2}, 63 | pages={80--95}, 64 | year={1955}, 65 | publisher={JSTOR}, 66 | doi={10.2307/2333424} 67 | } 68 | @article{yue2004mann, 69 | title={The {Mann}--{Kendall} test modified by effective sample size to detect trend in serially correlated hydrological series}, 70 | author={Yue, Sheng and Wang, ChunYuan}, 71 | journal={Water resources management}, 72 | volume={18}, 73 | number={3}, 74 | pages={201--218}, 75 | year={2004}, 76 | publisher={Springer}, 77 | doi={10.1023/B:WARM.0000043140.61082.60} 78 | } 79 | @article{yue2002applicability, 80 | title={Applicability of prewhitening to eliminate the influence of serial correlation on the {Mann}--{Kendall} test}, 81 | author={Yue, Sheng and Wang, Chun Yuan}, 82 | journal={Water resources research}, 83 | volume={38}, 84 | number={6}, 85 | pages={4--1}, 86 | year={2002}, 87 | publisher={Wiley Online Library}, 88 | doi={10.1029/2001WR000861} 89 | } 90 | @article{yue2002influence, 91 | title={The influence of autocorrelation on the ability to detect trend in hydrological series}, 92 | author={Yue, Sheng and Pilon, Paul and Phinney, Bob and Cavadias, George}, 93 | journal={Hydrological processes}, 94 | volume={16}, 95 | number={9}, 96 | pages={1807--1829}, 97 | year={2002}, 98 | publisher={Wiley Online Library}, 99 | doi={10.1002/hyp.1095} 100 | } 101 | @article{helsel2006regional, 102 | title={Regional {Kendall} test for trend}, 103 | author={Helsel, Dennis R and Frans, Lonna M}, 104 | journal={Environmental science \& technology}, 105 | volume={40}, 106 | number={13}, 107 | pages={4066--4073}, 108 | year={2006}, 109 | publisher={ACS Publications}, 110 | doi={10.1021/es051650b} 111 | } 112 | @book{hipel1994time, 113 | title={Time series modelling of water resources and environmental systems}, 114 | author={Hipel, Keith W and McLeod, A Ian}, 115 | volume={45}, 116 | year={1994}, 117 | publisher={Elsevier} 118 | } 119 | @article{libiseller2002performance, 120 | title={Performance of partial {Mann}--{Kendall} tests for trend detection in the presence of covariates}, 121 | author={Libiseller, Claudia and Grimvall, Anders}, 122 | journal={Environmetrics: The official journal of the International Environmetrics Society}, 123 | volume={13}, 124 | number={1}, 125 | pages={71--84}, 126 | year={2002}, 127 | publisher={Wiley Online Library}, 128 | doi={10.1002/env.507} 129 | } 130 | @inproceedings{theil1950rank, 131 | title={A rank-invariant method of linear and polynominal regression analysis (Parts 1-3)}, 132 | author={Theil, H}, 133 | booktitle={Ned. Akad. Wetensch. Proc. Ser. A}, 134 | volume={53}, 135 | pages={1397--1412}, 136 | year={1950} 137 | } 138 | @article{sen1968estimates, 139 | title={Estimates of the regression coefficient based on {Kendall}'s tau}, 140 | author={Sen, Pranab Kumar}, 141 | journal={Journal of the American statistical association}, 142 | volume={63}, 143 | number={324}, 144 | pages={1379--1389}, 145 | year={1968}, 146 | publisher={Taylor \& Francis Group}, 147 | doi={10.1080/01621459.1968.10480934} 148 | } 149 | -------------------------------------------------------------------------------- /bfast-master/R/seasonal.R: -------------------------------------------------------------------------------- 1 | seasonal <- function (x, out, sim = NULL, labels = colnames(X), 2 | set.pars = list(tck = -0.01, mar = c(0, 6, 0, 6), oma = c(6, 0, 4, 0)), 3 | main = NULL, range.bars = FALSE, ..., col.range = "light gray", fit = NULL) 4 | { 5 | # define plot layout 6 | # notice: mfrow parameter removed from set.pars = list() 7 | layout(matrix(c(1,2,3,4),4,1,byrow=TRUE), heights = c(1,0.75,1.5,0.75), TRUE) 8 | sers <- x$time.series 9 | ncomp <- ncol(sers) 10 | data <- drop(sers %*% rep(1, ncomp)) 11 | X <- cbind(data, sers) 12 | #colnames(X) <- c("data", colnames(sers)) 13 | colnames(X) <- c("Yt","St","Tt", "et") 14 | nplot <- ncomp + 1 15 | if (range.bars) 16 | mx <- min(apply(rx <- apply(X, 2, range), 2, diff)) 17 | if (length(set.pars)) { 18 | oldpar <- do.call("par", as.list(names(set.pars))) 19 | on.exit(par(oldpar)) 20 | do.call("par", set.pars) 21 | } 22 | ## ANOVA 23 | if (is.null(fit) == F) { 24 | niter <- length(fit$output) # nr of iterations 25 | out <- fit$output[[niter]] # output of results of the final fitted seasonal and trend models and nr of breakpoints in both. 26 | out_ANOVA <- array() 27 | out_breakdates <- array() 28 | if (out$Vt.bp[1] > 0) {breaks <- length(out$Vt.bp) } else {breaks <- 0} # number of breaks 29 | if (breaks > 0) { 30 | breakdates <- out$Vt.bp # breakdates 31 | coefs <- coef(out$bp.Vt) # output coefficients per segment 32 | sl <- coefs[,2] # slopes 33 | } 34 | 35 | TS_anova <- fit$Yt - out$St # time series Yt - St for ANOVA test 36 | dataframe <- data.frame(TIME=c(1:length(fit$Yt)),DATA=TS_anova) 37 | 38 | # determine segment startpoint and endpoint, calculate ANOVA 39 | for (m in 1:(breaks+1)) { 40 | startpoint <- if(m==1) 1 else breakdates[[m-1]] 41 | endpoint <- if(m==(breaks+1)) length(fit$Yt) else breakdates[m]-1 42 | df2 <- dataframe[startpoint:endpoint,] # subset of dataframe (section) 43 | model <- lm(DATA~TIME, data=df2) # linear model 44 | modelAnova <- anova(model) # ANOVA 45 | out_ANOVA[m] <- modelAnova$Pr[1] # save p-value 46 | if(breaks==0) {sl <- model$coefficients[2]} ## JV updated -- this was causing problems !# slope Tt if breaks == 0 47 | } 48 | } 49 | ## end ANOVA 50 | 51 | for (i in 1:nplot) { 52 | 53 | if(i == 4) { 54 | par(mar = c(0, 6, 0, 6)) 55 | } 56 | 57 | plot(X[, i], col = if(i == 1) "black" 58 | else "red", 59 | ylim = if (i == 1 | i == 3) 60 | range(X[, 1]) 61 | else range(X[, i], sim$time.series[, i - 1], na.rm = TRUE), 62 | type = if (i < nplot) 63 | "l" 64 | else "h", xlab = "", ylab = "", axes = FALSE, ...) 65 | 66 | if (range.bars) { 67 | dx <- 1/64 * diff(ux <- par("usr")[1:2]) 68 | y <- mean(rx[, i]) 69 | rect(ux[2] - dx, y + mx/2, ux[2] - 0.4 * dx, y - 70 | mx/2, col = col.range, xpd = TRUE) 71 | } 72 | if (i == 1 && !is.null(main)) { 73 | #title(main, line = 2, outer = par("oma")[3] > 0) 74 | mtext(main,side=3,font=2,line=1.25,cex=1.1) 75 | #lines(X[, i], col = "black", type = "l") 76 | if (!is.null(sim)) { 77 | lines(X[, i + 1] + X[, i + 2], col = "red", type = "l") 78 | legend("bottom", c("input", "estimated seasonal + trend "), 79 | col = c("black", "red"), lty = 1) 80 | } 81 | } 82 | if (i == 2) { 83 | lines(sim$time.series[, "seasonal"], col = "black") 84 | lines(out$bp.Wt) 85 | lines(out$ci.Wt) 86 | } 87 | if (i == 3) { 88 | lines(sim$time.series[, "abrupt"], col = "black") 89 | lines(out$bp.Vt) 90 | lines(out$ci.Vt) 91 | 92 | ## plot ANOVA 93 | if(is.null(fit) == FALSE) { 94 | for(m in 1:(breaks+1)) { 95 | # coordinates based on start time series and breakpoints 96 | x_coor <- out$bp.Wt$datatsp[[1]] 97 | if(m > 1) { x_coor <- x_coor + breakdates[[m-1]] / 98 | frequency(fit$Yt) } 99 | y_range <- range(X[, 1]) 100 | y_sl <- y_range[2] - (y_range[2] - y_range[1]) / 10 # 10% from top 101 | y_Pr <- y_range[2] - (y_range[2] - y_range[1]) / 5 # 20% from top 102 | # print slope 103 | beta <- formatC(sl[m],format="f",digits=3) 104 | text(x_coor,y_sl, bquote(beta == .(beta)), pos=4) 105 | # print p-value 106 | Pr <- formatC(out_ANOVA[m],format="f",digits=3) 107 | text(x_coor,y_Pr, bquote(p == .(Pr)), pos=4) 108 | } 109 | } 110 | ## end plot ANOVA 111 | } 112 | if (i == nplot) { 113 | abline(h = 0) 114 | lines(sim$time.series[, "remainder"], col = "black") 115 | } 116 | box() 117 | right <- i%%2 == 0 118 | axis(2, labels = !right) 119 | axis(4, labels = right) 120 | axis(1, labels = i == nplot) 121 | mtext(labels[i], side = 2, 3) 122 | } 123 | mtext("Time", side = 1, line = 3) 124 | invisible() 125 | if (is.null(fit) == FALSE) { 126 | return(data.frame(slope = sl, prob = out_ANOVA))} 127 | layout(matrix(1)) 128 | } 129 | -------------------------------------------------------------------------------- /pyMannKendall-master/Paper/paper.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: 'pyMannKendall: a python package for non parametric Mann Kendall family of trend tests.' 3 | tags: 4 | - mann kendall 5 | - modified mann kendall 6 | - sen's slope 7 | authors: 8 | - name: Md. Manjurul Hussain 9 | orcid: 0000-0002-5361-0633 10 | affiliation: 1 11 | - name: Ishtiak Mahmud 12 | orcid: 0000-0002-4753-5403 13 | affiliation: 2 14 | affiliations: 15 | - name: Institute of Water and Flood Management, Bangladesh University of Engineering and Technology, Dhaka, Bangladesh 16 | index: 1 17 | - name: Shahjalal University of Science and Technology, Sylhet, Bangladesh 18 | index: 2 19 | date: 30 June 2019 20 | bibliography: paper.bib 21 | --- 22 | 23 | # Summary 24 | 25 | Trend analysis is one of the most important measurements in studying time series data. Both parametric and non-parametric tests are commonly used in trend analysis. Parametric tests require data to be independent and normally distributed. On the other hand, non-parametric trend tests require only that the data be independent and can tolerate outliers in the data [@hamed1998modified]. However, parametric tests are more powerful than nonparametric ones. 26 | 27 | The Mann–Kendall trend test [@mann1945nonparametric; @kendall1975rank] is a widely used non-parametric tests to detect significant trends in time series. However, the original Mann-Kendall test didn't consider serial correlation or seasonality effects [@bari2016analysis; @hirsch1982techniques]. But, in many real situations, the observed data are autocorrelated and this autocorrelation will result in misinterpretation of trend tests results [@hamed1998modified; @cox1955some]. Contrariwise, water quality, hydrologic, as well as climatic and other natural time series also have seasonality. To overcome those limitations of original Mann-Kendall test, various modified Mann-Kendall test have been developed. 28 | 29 | Again, Python is one of the widely used tools for data analysis. A large number of data analysis and research tools are also developed using Python. But, till now, there is no Mann-Kendall trend relation Python package available. ``pyMannKendall`` package fills this gap. 30 | 31 | ``pyMannKendall`` is written in pure Python and uses a vectorization approach to increase its performance. Currently, this package has 11 Mann-Kendall Tests and 2 Sen’s slope estimator functions. Brief description of the functions are below: 32 | 33 | 1. **Original Mann-Kendall test (*original_test*):** Original Mann-Kendall test [@mann1945nonparametric; @kendall1975rank] is a nonparametric test, which does not consider serial correlation or seasonal effects. 34 | 35 | 2. **Hamed and Rao Modified MK Test (*hamed_rao_modification_test*):** This modified MK test was proposed by @hamed1998modified to address serial autocorrelation issues. They suggested a variance correction approach to improve trend analysis. Users can consider first n significant lag by insert lag number in this function. By default, it considered all significant lags. 36 | 37 | 3. **Yue and Wang Modified MK Test (*yue_wang_modification_test*):** This is also a variance correction method for considered serial autocorrelation proposed by @yue2004mann. Users can also set their desired significant number of lags for the calculation. 38 | 39 | 4. **Modified MK test using Pre-Whitening method (*pre_whitening_modification_test*):** This test was suggested by @yue2002applicability to use Pre-Whitening the time series before the application of trend test. 40 | 41 | 5. **Modified MK test using Trend free Pre-Whitening method (*trend_free_pre_whitening_modification_test*):** This test was also proposed by @yue2002influence to remove trend components and then Pre-Whitening the time series before application of trend test. 42 | 43 | 6. **Multivariate MK Test (*multivariate_test*):** This is an MK test for multiple parameters proposed by @hirsch1982techniques. They used this method for seasonal MK tests, where they considered every month as a parameter. 44 | 45 | 7. **Seasonal MK Test (*seasonal_test*):** For seasonal time series data, @hirsch1982techniques proposed this test to calculate the seasonal trend. 46 | 47 | 8. **Regional MK Test (*regional_test*):** Based on the proposed seasonal MK test of @hirsch1982techniques, @helsel2006regional suggest a regional MK test to calculate the overall trend on a regional scale. 48 | 49 | 9. **Correlated Multivariate MK Test (*correlated_multivariate_test*):** This multivariate MK test was proposed by @hipel1994time for where the parameters are correlated. 50 | 51 | 10. **Correlated Seasonal MK Test (*correlated_seasonal_test*):** This method was proposed by @hipel1994time, for when time series significantly correlate with the preceding one or more months/seasons. 52 | 53 | 11. **Partial MK Test (*partial_test*):** In a real event, many factors affect the main studied response parameter, which can bias the trend results. To overcome this problem, @libiseller2002performance proposed this partial mk test. It required two parameters as input, where one is the response parameter and other is an independent parameter. 54 | 55 | 12. **Theil-sen's Slope Estimator (*sens_slope*):** This method was proposed by @theil1950rank and @sen1968estimates to estimate the magnitude of the monotonic trend. 56 | 57 | 13. **Seasonal sen's Slope Estimator (*seasonal_sens_slope*):** This method was proposed by @hipel1994time to estimate the magnitude of the monotonic trend, when data has seasonal effects. 58 | 59 | 60 | `pyMannKendall` is a non-parametric Mann-Kendall trend analysis package implemented in pure Python, which brings together almost all types of Mann-Kendall tests, which might help researchers to check Mann-Kendall trends in Python. 61 | 62 | # References 63 | -------------------------------------------------------------------------------- /bfast-master/R/bfast.R: -------------------------------------------------------------------------------- 1 | bfast <- function(Yt, h=0.15, season =c("dummy","harmonic","none"), max.iter = NULL, breaks = NULL, hpc = "none", level = 0.05, type= "OLS-MOSUM") 2 | { 3 | season <- match.arg(season) 4 | level = rep(level, length.out = 2) 5 | ti <- time(Yt) 6 | f <- frequency(Yt) # on cycle every f time points (seasonal cycle) 7 | if(class(Yt)!="ts") 8 | stop ("Not a time series object") 9 | ## return value 10 | output <- list() 11 | Tt <- 0 12 | 13 | # seasonal model setup 14 | if (season=="harmonic") { 15 | w <- 1/f # f = 23 when freq=23 :-) 16 | tl <- 1:length(Yt) 17 | co <- cos(2*pi*tl*w); si <- sin(2*pi*tl*w) 18 | co2 <- cos(2*pi*tl*w*2);si2 <- sin(2*pi*tl*w*2) 19 | co3 <- cos(2*pi*tl*w*3);si3 <- sin(2*pi*tl*w*3) 20 | smod <- Wt ~ co+si+co2+si2+co3+si3 21 | # Start the iterative procedure and for first iteration St=decompose result 22 | St <- stl(Yt, "periodic")$time.series[, "seasonal"] 23 | 24 | } else if (season=="dummy") { 25 | # Start the iterative procedure and for first iteration St=decompose result 26 | St <- stl(Yt, "periodic")$time.series[, "seasonal"] 27 | D <- seasonaldummy(Yt) 28 | D[rowSums(D) == 0,] <- -1 29 | smod <- Wt ~ -1 + D 30 | } else if (season == "none") { 31 | print("No seasonal model will be fitted!") 32 | St <- 0 33 | } else stop("Not a correct seasonal model is selected ('harmonic' or 'dummy') ") 34 | 35 | # number/timing of structural breaks in the trend/seasonal component 36 | Vt.bp <- 0 37 | Wt.bp <- 0 38 | CheckTimeTt <- 1 39 | CheckTimeSt <- 1 40 | i <- 0 41 | while ( (!identical(CheckTimeTt,Vt.bp) | !identical(CheckTimeSt,Wt.bp)) & i < max.iter) 42 | { 43 | CheckTimeTt <- Vt.bp 44 | CheckTimeSt <- Wt.bp 45 | # TREND 46 | Vt <- Yt-St 47 | p.Vt <- sctest(efp(Vt ~ ti, h=h, type=type)) 48 | if (p.Vt$p.value <= level[1]) 49 | { 50 | bp.Vt <- breakpoints(Vt ~ ti, h=h,breaks=breaks, hpc = hpc) 51 | nobp.Vt <- is.na(breakpoints (bp.Vt)[1]) 52 | } 53 | else 54 | { 55 | nobp.Vt <- TRUE 56 | bp.Vt <- NA 57 | } 58 | if (nobp.Vt) 59 | { 60 | fm0 <- lm(Vt ~ ti) 61 | Vt.bp <- 0 # no breaks times 62 | Tt <- ts(fitted(fm0)) # Data minus trend 63 | tsp(Tt) <- tsp(Yt) 64 | ci.Vt <- NA 65 | } 66 | else 67 | { 68 | fm1 <- lm(Vt ~ breakfactor(bp.Vt)/ti) 69 | ci.Vt <- confint(bp.Vt, het.err = FALSE) 70 | Vt.bp <- ci.Vt$confint[,2] 71 | Tt <- ts(fitted(fm1)) # Data minus trend 72 | tsp(Tt) <- tsp(Yt) 73 | } 74 | 75 | # SEASONAL COMPONENT 76 | if (season=="none") { 77 | Wt <- 0 78 | St <- 0 79 | bp.Wt <- NA; ci.Wt <- NA; nobp.Wt<- TRUE 80 | } else 81 | { 82 | Wt <- Yt-Tt 83 | p.Wt <- sctest(efp(smod, h=h, type=type)) # preliminary test 84 | if (p.Wt$p.value <= level[2]) # OR statement 85 | { 86 | bp.Wt <- breakpoints(smod, h=h,breaks=breaks, hpc = hpc) # Breakpoints in the seasonal component 87 | nobp.Wt <- is.na(breakpoints (bp.Wt)[1]) 88 | } 89 | else 90 | { 91 | nobp.Wt <- TRUE 92 | bp.Wt <- NA 93 | } 94 | if (nobp.Wt) 95 | { 96 | sm0 <- lm(smod) 97 | St <- ts(fitted(sm0)) # The fitted seasonal component 98 | tsp(St) <- tsp(Yt) 99 | Wt.bp <- 0 # no seasonal breaks 100 | ci.Wt <- NA 101 | } 102 | else 103 | { 104 | if(season=="dummy") sm1 <-lm(Wt ~ -1+D %in% breakfactor(bp.Wt)) 105 | if(season=="harmonic") sm1 <- lm(Wt ~ (co+si+co2+si2+co3+si3) %in% breakfactor(bp.Wt)) 106 | St <- ts(fitted(sm1)) # The fitted seasonal component 107 | tsp(St) <- tsp(Yt) 108 | ci.Wt <- confint(bp.Wt, het.err = FALSE) 109 | Wt.bp <- ci.Wt$confint[,2] 110 | } 111 | } 112 | i <- i+1 113 | output[[i]] <- list(Tt=Tt,St=St,Nt=Yt-Tt-St, 114 | Vt=Vt, bp.Vt=bp.Vt, Vt.bp=Vt.bp, ci.Vt=ci.Vt, 115 | Wt=Wt, bp.Wt=bp.Wt, Wt.bp=Wt.bp, ci.Wt=ci.Wt) 116 | } 117 | if (!nobp.Vt) # probably only works well for dummy model! 118 | { 119 | Vt.nrbp <- length(bp.Vt$breakpoints) 120 | co <- coef(fm1) # final fitted trend model 121 | Mag <- matrix(NA,Vt.nrbp,3) 122 | for (r in 1:Vt.nrbp) 123 | { 124 | if (r==1) 125 | y1 <- co[1]+co[r+Vt.nrbp+1]*ti[Vt.bp[r]] 126 | else 127 | y1 <- co[1]+co[r]+co[r+Vt.nrbp+1]*ti[Vt.bp[r]] 128 | y2 <- (co[1]+co[r+1])+co[r+Vt.nrbp+2]*ti[Vt.bp[r]+1] 129 | Mag[r,1] <- y1 130 | Mag[r,2] <- y2 131 | Mag[r,3] <- y2-y1 132 | } 133 | index <- which.max(abs(Mag[,3])) 134 | m.x <- rep(Vt.bp[index],2) 135 | m.y <- c(Mag[index,1],Mag[index,2]) #Magnitude position 136 | Magnitude <- Mag[index,3] # Magnitude of biggest change 137 | Time <- Vt.bp[index] 138 | } 139 | else 140 | { 141 | m.x <- NA; m.y <- NA 142 | Magnitude <- 0 # if we do not detect a break then the magnitude is zero 143 | Time <- NA # if we do not detect a break then we have no timing of the break 144 | Mag <- 0 145 | } 146 | return(structure(list(Yt=Yt,output=output,nobp=list(Vt=nobp.Vt,Wt=nobp.Wt),Magnitude=Magnitude,Mags=Mag, 147 | Time=Time,jump=list(x=ti[m.x],y=m.y)),class="bfast")) 148 | } 149 | -------------------------------------------------------------------------------- /bfast-master/inst/extdata/modisraster.grd: -------------------------------------------------------------------------------- 1 | [general] 2 | creator=R package 'raster' 3 | created= 2013-03-22 12:34:17 4 | [georeference] 5 | nrows= 5 6 | ncols= 5 7 | xmin= 41.9 8 | ymin= -0.15 9 | xmax= 42.15 10 | ymax= 0.1 11 | projection= +proj=longlat +ellps=clrk66 +no_defs 12 | [data] 13 | datatype= FLT4S 14 | byteorder= little 15 | nbands= 275 16 | bandorder= BIL 17 | minvalue= 4052:4190:3788:3326:3789:4481:6721:3790:5071:3950:3533:2709:3583:2749:3170:2652:3121:5993:6464:5077:4536:4281:3913:3849:3774:3267:5644:6368:5804:4695:4622:2371:3512:4350:2887:3790:2923:3175:2330:5940:7088:5118:6601:5287:4315:4015:3862:4042:2068:2847:2696:6961:5674:3152:3885:4164:3534:3941:3232:5154:5702:6348:7113:7546:6942:7329:5445:4792:4267:4188:3771:3794:3198:6271:6393:7328:6045:4718:5666:5768:4456:3786:3319:3297:2667:6771:6325:7796:6610:5640:4688:4129:3747:3753:3953:2837:4587:4449:4711:4742:4300:5344:5411:5091:3777:4016:3353:5913:6910:7596:6150:6377:5655:4490:3899:3909:3642:3631:4126:5535:6031:5017:4503:2032:3893:4266:3884:3897:3032:2737:4060:3636:6703:6675:4699:4217:4013:3836:3518:3625:3387:5077:5576:5990:3825:4285:4040:2180:4134:4040:4327:3689:3462:4675:6674:6043:7061:7489:7085:5622:5247:4425:4140:3325:3635:6540:6314:6222:6592:6191:5821:5469:4359:3563:3187:5946:5538:7272:4786:7013:5472:4830:4332:3664:3405:3136:3417:4691:6470:4518:5731:4215:4364:4646:1914:3736:3783:3305:2984:4076:3275:5915:5927:4327:4388:3533:3638:3177:3013:3104:3887:5387:5667:6574:4284:4126:4479:4114:4441:3732:3114:3492:3097:7140:6672:6198:5459:4838:4192:3469:3323:3255:3766:5476:4841:6309:5703:5011:3407:2879:4428:3585:2960:2801:3671:2583:1895:4554:3806:3507:3429:3224:3059:2992:2800:2957:2749:5189:5471:5080:4258:2558:2148:3573:2606:2171:2963:2296:5586:5626:4462:7314:6624:5827:5095 18 | maxvalue= 5042:5376:4767:7207:8028:8042:8287:8051:6989:5550:5505:4480:4878:4468:5583:5139:6748:7405:8054:7187:6075:5563:4810:4757:4491:4692:7923:8597:7909:8565:6818:6909:5502:5356:4929:4733:4566:4342:4347:8091:8384:8576:8074:7004:5180:4885:4488:5317:3677:6433:7097:8327:7962:6536:6200:5638:5428:5060:5301:7506:8083:8315:8250:8335:8250:8223:7195:6094:5215:4725:4655:5786:6455:8345:8509:8588:8677:7661:7983:7109:6068:5934:4809:4106:4308:8632:8433:8538:7974:7370:5986:5523:4981:4528:4784:6861:7772:7450:6768:7057:8075:7835:7637:6459:5477:5596:5216:8163:8637:8808:8025:7580:6930:5533:5487:4624:4209:4216:6938:7408:7881:8007:6735:5857:7409:6110:5965:5193:4627:4037:6200:7626:7920:7945:6298:6176:5560:5086:4331:4604:4906:6906:7881:7654:6612:7515:6566:6856:5665:5361:6323:6165:5402:7547:8349:7974:8371:8509:8448:7399:6612:5696:5364:5169:6166:8532:8471:7984:8303:7706:7018:6350:6823:5553:6172:8255:8127:8001:8064:8213:7949:7424:6977:4910:4543:4207:6652:7321:8301:7674:7963:7843:7215:6231:5416:4914:6489:6773:6358:8081:7355:7562:7333:5956:6088:4829:4828:4288:4188:4653:5330:7988:7937:7723:7161:6863:7611:6849:5568:4936:4160:4538:6920:8055:7638:7935:6906:6513:5715:5201:4599:4720:6239:7173:7160:8067:7720:7428:6569:6038:6003:5397:4380:4296:4877:4792:7457:7753:6808:6056:5741:4813:4432:4499:4604:4112:4829:7657:8149:6817:6052:5335:4712:5037:4066:3813:4265:3665:7673:7907:6916:9020:7860:7084:6480 19 | nodatavalue= -3.4e+38 20 | [legend] 21 | legendtype= 22 | values= 23 | color= 24 | [description] 25 | layername= X2000.02.18:X2000.03.05:X2000.03.21:X2000.04.06:X2000.04.22:X2000.05.08:X2000.05.24:X2000.06.09:X2000.06.25:X2000.07.11:X2000.07.27:X2000.08.12:X2000.08.28:X2000.09.13:X2000.09.29:X2000.10.15:X2000.10.31:X2000.11.16:X2000.12.02:X2000.12.18:X2001.01.01:X2001.01.17:X2001.02.02:X2001.02.18:X2001.03.06:X2001.03.22:X2001.04.07:X2001.04.23:X2001.05.09:X2001.05.25:X2001.06.10:X2001.06.26:X2001.07.12:X2001.07.28:X2001.08.13:X2001.08.29:X2001.09.14:X2001.09.30:X2001.10.16:X2001.11.01:X2001.11.17:X2001.12.03:X2001.12.19:X2002.01.01:X2002.01.17:X2002.02.02:X2002.02.18:X2002.03.06:X2002.03.22:X2002.04.07:X2002.04.23:X2002.05.09:X2002.05.25:X2002.06.10:X2002.06.26:X2002.07.12:X2002.07.28:X2002.08.13:X2002.08.29:X2002.09.14:X2002.09.30:X2002.10.16:X2002.11.01:X2002.11.17:X2002.12.03:X2002.12.19:X2003.01.01:X2003.01.17:X2003.02.02:X2003.02.18:X2003.03.06:X2003.03.22:X2003.04.07:X2003.04.23:X2003.05.09:X2003.05.25:X2003.06.10:X2003.06.26:X2003.07.12:X2003.07.28:X2003.08.13:X2003.08.29:X2003.09.14:X2003.09.30:X2003.10.16:X2003.11.01:X2003.11.17:X2003.12.03:X2003.12.19:X2004.01.01:X2004.01.17:X2004.02.02:X2004.02.18:X2004.03.05:X2004.03.21:X2004.04.06:X2004.04.22:X2004.05.08:X2004.05.24:X2004.06.09:X2004.06.25:X2004.07.11:X2004.07.27:X2004.08.12:X2004.08.28:X2004.09.13:X2004.09.29:X2004.10.15:X2004.10.31:X2004.11.16:X2004.12.02:X2004.12.18:X2005.01.01:X2005.01.17:X2005.02.02:X2005.02.18:X2005.03.06:X2005.03.22:X2005.04.07:X2005.04.23:X2005.05.09:X2005.05.25:X2005.06.10:X2005.06.26:X2005.07.12:X2005.07.28:X2005.08.13:X2005.08.29:X2005.09.14:X2005.09.30:X2005.10.16:X2005.11.01:X2005.11.17:X2005.12.03:X2005.12.19:X2006.01.01:X2006.01.17:X2006.02.02:X2006.02.18:X2006.03.06:X2006.03.22:X2006.04.07:X2006.04.23:X2006.05.09:X2006.05.25:X2006.06.10:X2006.06.26:X2006.07.12:X2006.07.28:X2006.08.13:X2006.08.29:X2006.09.14:X2006.09.30:X2006.10.16:X2006.11.01:X2006.11.17:X2006.12.03:X2006.12.19:X2007.01.01:X2007.01.17:X2007.02.02:X2007.02.18:X2007.03.06:X2007.03.22:X2007.04.07:X2007.04.23:X2007.05.09:X2007.05.25:X2007.06.10:X2007.06.26:X2007.07.12:X2007.07.28:X2007.08.13:X2007.08.29:X2007.09.14:X2007.09.30:X2007.10.16:X2007.11.01:X2007.11.17:X2007.12.03:X2007.12.19:X2008.01.01:X2008.01.17:X2008.02.02:X2008.02.18:X2008.03.05:X2008.03.21:X2008.04.06:X2008.04.22:X2008.05.08:X2008.05.24:X2008.06.09:X2008.06.25:X2008.07.11:X2008.07.27:X2008.08.12:X2008.08.28:X2008.09.13:X2008.09.29:X2008.10.15:X2008.10.31:X2008.11.16:X2008.12.02:X2008.12.18:X2009.01.01:X2009.01.17:X2009.02.02:X2009.02.18:X2009.03.06:X2009.03.22:X2009.04.07:X2009.04.23:X2009.05.09:X2009.05.25:X2009.06.10:X2009.06.26:X2009.07.12:X2009.07.28:X2009.08.13:X2009.08.29:X2009.09.14:X2009.09.30:X2009.10.16:X2009.11.01:X2009.11.17:X2009.12.03:X2009.12.19:X2010.01.01:X2010.01.17:X2010.02.02:X2010.02.18:X2010.03.06:X2010.03.22:X2010.04.07:X2010.04.23:X2010.05.09:X2010.05.25:X2010.06.10:X2010.06.26:X2010.07.12:X2010.07.28:X2010.08.13:X2010.08.29:X2010.09.14:X2010.09.30:X2010.10.16:X2010.11.01:X2010.11.17:X2010.12.03:X2010.12.19:X2011.01.01:X2011.01.17:X2011.02.02:X2011.02.18:X2011.03.06:X2011.03.22:X2011.04.07:X2011.04.23:X2011.05.09:X2011.05.25:X2011.06.10:X2011.06.26:X2011.07.12:X2011.07.28:X2011.08.13:X2011.08.29:X2011.09.14:X2011.09.30:X2011.10.16:X2011.11.01:X2011.11.17:X2011.12.03:X2011.12.19:X2012.01.01:X2012.01.17 26 | history= 27 | -------------------------------------------------------------------------------- /bfast-master/R/bfast01classify.R: -------------------------------------------------------------------------------- 1 | ## ---------------------------------------------------- 2 | ## classification function based on the BFAST01 object 3 | ## ---------------------------------------------------- 4 | # Input: BFAST01 object (object) 5 | # Return: integer representing the class 6 | ## classification function for 1-break bfast output 7 | ## object: bfast01 object 8 | ## alpha: threshold for significance tests, default 0.05 9 | ## pct_stable: threshold for segment stability, unit: percent change per unit time (0-100), default NULL 10 | 11 | bfast01classify <- function(object, alpha=0.05, pct_stable=NULL) { 12 | ## output array 13 | out <- rep(NA,7) 14 | names(out) <- c("flag_type","flag_significance","p_segment1","p_segment2", 15 | "pct_segment1","pct_segment2","flag_pct_stable") 16 | ## classification 17 | object.zoo <- as.zoo(object) # data series 18 | ## monotonic if no break 19 | if(object$breaks == 0) { 20 | slope <- object$model[[1]]$coefficients[2] # slope 21 | if(slope > 0) out[1] <- 1 22 | if(slope < 0) out[1] <- 2 23 | } else { 24 | ## if break, list segment and break point parameters (p$..) 25 | ToB <- as.numeric(object$breakpoints[[1]]) # time of break 26 | s1 <- object$model[[2]]$coefficients[3] # slope segment 1 27 | s2 <- object$model[[2]]$coefficients[4] # slope segment 2 28 | m <- as.numeric(object.zoo$trend[ToB+1]) - as.numeric(object.zoo$trend[ToB]) # magnitude of abrupt change 29 | ## classes with break 30 | # with break, but still monotonic 31 | if(s1 > 0 && s2 > 0 && m > 0) out[1] <- 3 32 | if(s1 < 0 && s2 < 0 && m < 0) out[1] <- 4 33 | # interrupted gradual change (setback or boost) 34 | if(s1 > 0 && s2 > 0 && m < 0) out[1] <- 5 35 | if(s1 < 0 && s2 < 0 && m > 0) out[1] <- 6 36 | # trend reversal (greening to browning v.v.) 37 | if(s1 > 0 && s2 < 0) out[1] <- 7 38 | if(s1 < 0 && s2 > 0) out[1] <- 8 39 | } 40 | ## ANOVA and PCTCHANGE 41 | for (segment in 1:(object$breaks+1)) { 42 | # subset zoo object for segment 43 | date.start <- if(segment==1) object$data$time[1] else object$data$time[ToB+1] 44 | date.end <- if(segment==2 || object$breaks==0) object$data$time[nrow(object$data)] else object$data$time[ToB] 45 | object.zoo.subset <- window(object.zoo, start=date.start, end=date.end) 46 | # Anova 47 | segment.anova <- anova(lm((object.zoo.subset$response-object.zoo.subset$season)~time(object.zoo.subset))) 48 | # linear model of deseasonalized trend versus time 49 | out[segment+2] <- segment.anova$Pr[1] 50 | # PctChange 51 | obs.start <- if(segment==1) 1 else ToB+1 52 | obs.end <- if(segment==2 || object$breaks==0) nrow(object$data) else ToB 53 | if(object.zoo$trend[[obs.end]] / object.zoo$trend[[obs.start]] > 0){ 54 | segment.pctchange <- 55 | ( (object.zoo$trend[[obs.end]] / object.zoo$trend[[obs.start]])^(1/(date.end-date.start)) -1) * 100 56 | } else { 57 | if(object.zoo$trend[[obs.start]] < object.zoo$trend[[obs.end]]){ 58 | value.start <- object.zoo$trend[[obs.start]] + 2 * abs(object.zoo$trend[[obs.start]]) 59 | value.end <- object.zoo$trend[[obs.end]] + 2 * abs(object.zoo$trend[[obs.start]]) 60 | segment.pctchange <- ( (value.end / value.start)^(1/(date.end-date.start)) -1) * 100 61 | } else { 62 | value.start <- object.zoo$trend[[obs.start]] + 2 * abs(object.zoo$trend[[obs.end]]) 63 | value.end <- object.zoo$trend[[obs.end]] + 2 * abs(object.zoo$trend[[obs.end]]) 64 | segment.pctchange <- ( (value.end / value.start)^(1/(date.end-date.start)) -1) * 100 65 | } 66 | } 67 | out[segment+4] <- segment.pctchange 68 | } 69 | ## Segment significance flag 70 | # code: 0 = both segments significant (or no break and significant), 71 | # 1 = only first segment significant, 72 | # 2 = only 2nd segment significant, 73 | # 3 = both segments insignificant (or no break and not significant) 74 | 75 | # no break 76 | if(object$breaks == 0) { 77 | if(out[3] <= alpha) out[2] <- 0 78 | if(out[3] > alpha) out[2] <- 3 79 | # with break 80 | } else { 81 | if(out[3] <= alpha && out[4] <= alpha) out[2] <- 0 82 | if(out[3] <= alpha && out[4] > alpha) out[2] <- 1 83 | if(out[3] > alpha && out[4] <= alpha) out[2] <- 2 84 | if(out[3] > alpha && out[4] > alpha) out[2] <- 3 85 | } 86 | 87 | ## Segment stability flag 88 | # code: 0 = both segments beyond stable (or no break and not stable), 89 | # 1 = only first segment beyond stable, 90 | # 2 = only 2nd segment beyond stable, 91 | # 3 = both segments stable (or no break and stable) 92 | 93 | if(!is.null(pct_stable)) { 94 | # no break 95 | if(object$breaks == 0) { 96 | if(abs(out[5]) > pct_stable) out[7] <- 0 97 | if(abs(out[5]) <= pct_stable) out[7] <- 3 98 | # with break 99 | } else { 100 | if(abs(out[5]) > pct_stable && abs(out[6]) > pct_stable) out[7] <- 0 101 | if(abs(out[5]) > pct_stable && abs(out[6]) <= pct_stable) out[7] <- 1 102 | if(abs(out[5]) <= pct_stable && abs(out[6]) > pct_stable) out[7] <- 2 103 | if(abs(out[5]) <= pct_stable && abs(out[6]) <= pct_stable) out[7] <- 3 104 | } 105 | } else { 106 | out[7] <- NA 107 | } 108 | return(as.data.frame(t(out))) 109 | } 110 | 111 | # ## print the flag labels 112 | # classlabels.bfast01 <- function() { 113 | # cat("\n*** TYPE OF SHIFT *** \n") 114 | # class_names <- c('monotonic increase','monotonic decrease','monotonic increase (with positive break)','monotonic decrease (with negative break)','interruption: increase with negative break','interruption: decrease with positive break','reversal: increase to decrease','reversal: decrease to increase') 115 | # for (i in 1:8) cat(i, " -- ", class_names[i], "\n") 116 | # 117 | # cat("\n*** SIGNIFICANCE FLAG *** \n") 118 | # class_names <- c('both segments significant (or no break and significant)','only first segment significant','only 2nd segment significant','both segments insignificant (or no break and not significant)') 119 | # for (i in 0:3) cat(i, " -- ", class_names[i+1], "\n") 120 | # 121 | # cat("\n*** STABILITY FLAG *** \n") 122 | # class_names <- c('change in both segments is substantial (or no break and substantial)','only first segment substantial','only 2nd segment substantial','both segments are stable (or no break and stable)') 123 | # for (i in 0:3) cat(i, " -- ", class_names[i+1], "\n") 124 | # } -------------------------------------------------------------------------------- /bfast-master/man/bfast01.Rd: -------------------------------------------------------------------------------- 1 | \name{bfast01} 2 | \alias{bfast01} 3 | \title{Checking for one major break in the time series} 4 | 5 | \description{ 6 | A function to select a suitable model for the data by choosing either a model with 0 or with 1 breakpoint. 7 | } 8 | 9 | \usage{ 10 | bfast01(data, formula = NULL, 11 | test = "OLS-MOSUM", level = 0.05, aggregate = all, 12 | trim = NULL, bandwidth = 0.15, functional = "max", 13 | order = 3, lag = NULL, slag = NULL, na.action = na.omit, stl = "none") 14 | } 15 | 16 | \arguments{ 17 | \item{data}{A time series of class \code{\link[stats]{ts}}, or another object that 18 | can be coerced to such. The time series is processed by \code{\link[bfast]{bfastpp}}. A time series of class \code{\link[stats]{ts}} can be prepared by a convenience function \code{\link[bfast]{bfastts}} in case of daily, 10 or 16-daily time series. 19 | } 20 | \item{formula}{formula for the regression model. 21 | The default is intelligently guessed based on the arguments order/lag/slag i.e. \code{response ~ trend + harmon}, i.e., a linear trend and a harmonic season component. Other specifications are possible using all terms set up by \code{\link[bfast]{bfastpp}}, i.e., \code{season} (seasonal pattern with dummy variables), \code{lag} (autoregressive terms), \code{slag} (seasonal autoregressiv terms), or \code{xreg} (further covariates). See \code{\link[bfast]{bfastpp}} for details.} 22 | \item{test}{ character specifying the type of test(s) performed. Can be one or more of BIC, supLM, supF, OLS-MOSUM, ..., or any other test supported by \code{\link[strucchange]{sctest.formula}} } 23 | \item{level}{numeric. Significance for the \code{\link[strucchange]{sctest.formula}} performed.} 24 | \item{aggregate}{function that aggregates a logical vector to a single value. This is used for aggregating the individual test decisions from \code{test} to a single one.} 25 | \item{trim}{numeric. The mimimal segment size passed to the \code{from} argument of the \code{\link[strucchange]{Fstats}} function. } 26 | \item{bandwidth}{numeric scalar from interval (0,1), functional. The \code{bandwidth} argument is passed to 27 | the \code{h} argument of the \code{\link[strucchange]{sctest.formula}}. } 28 | \item{functional}{arguments passed on to \code{\link[strucchange]{sctest.formula}}} 29 | 30 | \item{order}{numeric. Order of the harmonic term, defaulting to \code{3}.} 31 | \item{lag}{numeric. Order of the autoregressive term, by default omitted.} 32 | \item{slag}{numeric. Order of the seasonal autoregressive term, by default omitted.} 33 | \item{na.action}{arguments passed on to \code{\link[bfast]{bfastpp}}} 34 | \item{stl}{argument passed on to \code{\link[bfast]{bfastpp}}} 35 | } 36 | 37 | \details{ 38 | \code{bfast01} tries to select a suitable model for the data by choosing 39 | either a model with 0 or with 1 breakpoint. It proceeds in the following 40 | steps: 41 | 42 | 1. The data is preprocessed with bfastpp using the arguments 43 | order/lag/slag/na.action/stl. 44 | 45 | 2. A linear model with the given formula is fitted. By default a suitable 46 | formula is guessed based on the preprocessing parameters. 47 | 48 | 3. The model with 1 breakpoint is estimated as well where the breakpoint 49 | is chosen to minimize the segmented residual sum of squares. 50 | 51 | 4. A sequence of tests the null hypothesis of zero breaks is performed. 52 | Each test results in a decision for FALSE (no breaks) or TRUE (structural 53 | break(s)). The test decisions are then aggregated to a single decision 54 | (by default using all() but any() or some other function could also be used). 55 | 56 | Available methods for the object returned include standard methods for 57 | linear models (coef, fitted, residuals, predict, AIC, BIC, logLik, deviance, 58 | nobs, model.matrix, model.frame), standard methods for breakpoints (breakpoints, 59 | breakdates), coercion to a zoo series with the decomposed components (as.zoo), 60 | and a plot method which plots such a zoo series along with the confidence 61 | interval (if the 1-break model is visualized). All methods take a 'breaks' 62 | argument which can either be 0 or 1. By default the value chosen based on the 63 | 'test' decisions is used. 64 | 65 | Note that the different tests supported have power for different types of 66 | alternatives. Some tests (such as supLM/supF or BIC) assess changes in all 67 | coefficients of the model while residual-based tests (e.g., OLS-CUSUM or 68 | OLS-MOSUM) assess changes in the conditional mean. See Zeileis (2005) for 69 | a unifying view. 70 | } 71 | 72 | \value{ 73 | \code{bfast01} returns a list of class \code{"bfast01"} with the following elements: 74 | \item{call}{the original function call.} 75 | \item{data}{the data preprocessed by \code{"bfastpp"}.} 76 | \item{formula}{the model formulae.} 77 | \item{breaks}{the number of breaks chosen based on the \code{test} decision (either 0 or 1).} 78 | \item{test}{the individual test decisions.} 79 | \item{breakpoints}{the optimal breakpoint for the model with 1 break.} 80 | \item{model}{A list of two 'lm' objects with no and one breaks, respectively.} 81 | } 82 | 83 | \references{ 84 | de Jong R, Verbesselt J, Zeileis A, Schaepman M (2013). 85 | Shifts in global vegetation activity trends. 86 | \emph{Remote Sensing}, \bold{5}, 1117--1133. 87 | \url{http://dx.doi.org/10.3390/rs5031117} 88 | 89 | Zeileis A (2005). A unified approach to structural change tests 90 | based on ML scores, F statistics, and OLS residuals. 91 | \emph{Econometric Reviews}, \bold{24}, 445--466. 92 | \url{http://dx.doi.org/10.1080/07474930500406053}. 93 | } 94 | 95 | \author{Achim Zeileis, Jan Verbesselt} 96 | 97 | \seealso{\code{\link[bfast]{bfastmonitor}}, \code{\link[strucchange]{breakpoints}}} 98 | 99 | \examples{ 100 | library(zoo) 101 | ## define a regular time series 102 | ndvi <- as.ts(zoo(som$NDVI.a, som$Time)) 103 | 104 | ## fit variations 105 | bf1 <- bfast01(ndvi) 106 | bf2 <- bfast01(ndvi, test = c("BIC", "OLS-MOSUM", "supLM"), aggregate = any) 107 | bf3 <- bfast01(ndvi, test = c("OLS-MOSUM", "supLM"), aggregate = any, bandwidth = 0.11) 108 | 109 | ## inspect test decisions 110 | bf1$test 111 | bf1$breaks 112 | bf2$test 113 | bf2$breaks 114 | bf3$test 115 | bf3$breaks 116 | 117 | ## look at coefficients 118 | coef(bf1) 119 | coef(bf1, breaks = 0) 120 | coef(bf1, breaks = 1) 121 | 122 | ## zoo series with all components 123 | plot(as.zoo(ndvi)) 124 | plot(as.zoo(bf1, breaks = 1)) 125 | plot(as.zoo(bf2)) 126 | plot(as.zoo(bf3)) 127 | 128 | ## leveraged by plot method 129 | plot(bf1, regular = TRUE) 130 | plot(bf2) 131 | plot(bf2, plot.type = "multiple", 132 | which = c("response", "trend", "season"), screens = c(1, 1, 2)) 133 | plot(bf3) 134 | 135 | } 136 | \keyword{ts} 137 | 138 | -------------------------------------------------------------------------------- /bfast-master/man/bfast.Rd: -------------------------------------------------------------------------------- 1 | \name{bfast} 2 | \alias{bfast} 3 | \title{Break Detection in the Seasonal and Trend Component of a Univariate Time Series} 4 | 5 | \description{ 6 | Iterative break detection in seasonal and trend component of a time series. 7 | Seasonal breaks is a function that combines the iterative decomposition of 8 | time series into trend, seasonal and remainder components with significant 9 | break detection in the decomposed components of the time series. 10 | } 11 | 12 | \usage{ 13 | bfast(Yt, h = 0.15, season = c("dummy", "harmonic", "none"), 14 | max.iter = NULL, breaks = NULL, hpc = "none", level = 0.05, type= "OLS-MOSUM") 15 | } 16 | 17 | \arguments{ 18 | \item{Yt}{univariate time series to be analyzed. This should be an object of class "ts" with a frequency greater than one without NA's. 19 | } 20 | \item{h}{minimal segment size between potentially detected breaks in the trend model given as fraction relative to the sample size 21 | (i.e. the minimal number of observations in each segment divided by the total length of the timeseries. 22 | } 23 | \item{season}{the seasonal model used to fit the seasonal component and detect seasonal breaks (i.e. significant phenological change). 24 | There are three options: "dummy", "harmonic", or "none" 25 | where "dummy" is the model proposed in the first Remote Sensing of Environment paper 26 | and "harmonic" is the model used in the second Remote Sensing of Environment paper (See paper for more details) and 27 | where "none" indicates that no seasonal model will be fitted (i.e. St = 0 ). If there is no seasonal cycle (e.g. frequency of the time series is 1) 28 | "none" can be selected to avoid fitting a seasonal model. 29 | } 30 | \item{max.iter}{maximum amount of iterations allowed for estimation of breakpoints in seasonal and trend component. 31 | } 32 | \item{breaks}{integer specifying the maximal number of breaks to be calculated. By default the maximal number allowed by h is used. 33 | } 34 | \item{hpc}{A character specifying the high performance computing support. Default is "none", can be set to "foreach". Install the "foreach" package for hpc support. 35 | } 36 | \item{level}{numeric; threshold value for the \link[strucchange]{sctest.efp} 37 | test; if a length 2 vector is passed, the first value is used for the trend, 38 | the second for the seasonality } 39 | \item{type}{character, indicating the type argument to \link[strucchange]{efp}} 40 | } 41 | 42 | \details{To be completed.} 43 | 44 | \value{ 45 | An object of the class "bfast" is a list with the following elements: 46 | \item{Yt}{ equals the Yt used as input.} 47 | \item{output}{ is a list with the following elements (for each iteration): 48 | \tabular{ll}{ 49 | \code{Tt} \tab the fitted trend component\cr 50 | \code{St} \tab the fitted seasonal component\cr 51 | \code{Nt} \tab the noise or remainder component\cr 52 | \code{Vt} \tab equals the deseasonalized data \code{Yt - St} for each iteration\cr 53 | \code{bp.Vt} \tab output of the \code{\link[strucchange]{breakpoints}} function for the trend model\cr 54 | \code{ci.Vt} \tab output of the \code{\link[strucchange]{breakpoints}} confint function for the trend model\cr 55 | \code{Wt} \tab equals the detrended data \code{Yt - Tt} for each iteration\cr 56 | \code{bp.Wt} \tab output of the \code{\link[strucchange]{breakpoints}} function for the seasonal model\cr 57 | \code{ci.Wt} \tab output of the \code{\link[strucchange]{breakpoints}} confint function for the seasonal model 58 | }} 59 | \item{nobp}{ is a list with the following elements: 60 | \tabular{ll}{ 61 | \code{nobp.Vt} \tab logical, TRUE if there are breakpoints detected\cr 62 | \code{nobp.Wt} \tab logical, TRUE if there are breakpoints detected 63 | }} 64 | \item{magnitude}{ magnitude of the biggest change detected in the trend component} 65 | \item{Time}{ timing of the biggest change detected in the trend component} 66 | } 67 | 68 | \references{ 69 | Verbesselt J, Hyndman R, Newnham G, Culvenor D (2010). 70 | Detecting Trend and Seasonal Changes in Satellite Image Time Series. 71 | \emph{Remote Sensing of Environment}, \bold{114}(1), 106--115. 72 | \url{http://dx.doi.org/10.1016/j.rse.2009.08.014} 73 | 74 | Verbesselt J, Hyndman R, Zeileis A, Culvenor D (2010). 75 | Phenological Change Detection while Accounting for Abrupt and Gradual Trends in Satellite Image Time Series. 76 | \emph{Remote Sensing of Environment}, \bold{114}(12), 2970--2980. 77 | \url{http://dx.doi.org/10.1016/j.rse.2010.08.003} 78 | } 79 | 80 | \author{ 81 | Jan Verbesselt 82 | } 83 | 84 | \seealso{ 85 | \code{\link[bfast]{plot.bfast}} for plotting of bfast() results. \cr 86 | \code{\link[strucchange]{breakpoints}} for more examples and background information about estimation of breakpoints in time series. 87 | } 88 | 89 | \examples{ 90 | 91 | \dontrun{ 92 | rm(list = ls()) 93 | install.packages("bfast", repos="http://R-Forge.R-project.org", type = "source") 94 | update.packages(checkBuilt=TRUE) 95 | # make sure all your package are up to date 96 | # and built correctly for your current R version 97 | } 98 | 99 | ## Simulated Data 100 | plot(simts) # stl object containing simulated NDVI time series 101 | datats <- ts(rowSums(simts$time.series)) 102 | # sum of all the components (season,abrupt,remainder) 103 | tsp(datats) <- tsp(simts$time.series) # assign correct time series attributes 104 | plot(datats) 105 | 106 | \dontrun{ 107 | if (requireNamespace("forecast", quietly = TRUE)) { 108 | fit <- bfast(datats,h=0.15, season="dummy", max.iter=1) 109 | plot(fit,sim=simts) 110 | fit 111 | # prints out whether breakpoints are detected 112 | # in the seasonal and trend component 113 | 114 | } else { 115 | ## do something else not involving forecast related functions 116 | ## like seasonaldummy() and tsdisply() 117 | } 118 | } 119 | 120 | 121 | ## Real data 122 | ## The data should be a regular ts() object without NA's 123 | ## See Fig. 8 b in reference 124 | plot(harvest, ylab="NDVI") 125 | # MODIS 16-day cleaned and interpolated NDVI time series 126 | 127 | (rdist <- 10/length(harvest)) 128 | # ratio of distance between breaks (time steps) and length of the time series 129 | \dontrun{ 130 | if (requireNamespace("forecast", quietly = TRUE)) { 131 | fit <- bfast(harvest,h=rdist, season="harmonic", max.iter=1,breaks=2) 132 | plot(fit) 133 | ## plot anova and slope of the trend identified trend segments 134 | #plot(fit, ANOVA=TRUE) 135 | ## plot the trend component and identify the break with 136 | ## the largest magnitude of change 137 | plot(fit,type="trend",largest=TRUE) 138 | 139 | ## plot all the different available plots 140 | plot(fit,type="all") 141 | 142 | ## output 143 | niter <- length(fit$output) # nr of iterations 144 | out <- fit$output[[niter]] 145 | # output of results of the final fitted seasonal and trend models and 146 | ## #nr of breakpoints in both. 147 | 148 | ## running bfast on yearly data 149 | t <- ts(as.numeric(harvest), frequency = 1, start = 2006) 150 | fit <- bfast(t, h = 0.23, season = "none", max.iter = 1) 151 | plot(fit) 152 | fit 153 | } 154 | } 155 | } 156 | 157 | \keyword{ts} 158 | -------------------------------------------------------------------------------- /pyMannKendall-master/Examples/daily-total-female-births.csv: -------------------------------------------------------------------------------- 1 | "Date","Births" 2 | "1959-01-01",35 3 | "1959-01-02",32 4 | "1959-01-03",30 5 | "1959-01-04",31 6 | "1959-01-05",44 7 | "1959-01-06",29 8 | "1959-01-07",45 9 | "1959-01-08",43 10 | "1959-01-09",38 11 | "1959-01-10",27 12 | "1959-01-11",38 13 | "1959-01-12",33 14 | "1959-01-13",55 15 | "1959-01-14",47 16 | "1959-01-15",45 17 | "1959-01-16",37 18 | "1959-01-17",50 19 | "1959-01-18",43 20 | "1959-01-19",41 21 | "1959-01-20",52 22 | "1959-01-21",34 23 | "1959-01-22",53 24 | "1959-01-23",39 25 | "1959-01-24",32 26 | "1959-01-25",37 27 | "1959-01-26",43 28 | "1959-01-27",39 29 | "1959-01-28",35 30 | "1959-01-29",44 31 | "1959-01-30",38 32 | "1959-01-31",24 33 | "1959-02-01",23 34 | "1959-02-02",31 35 | "1959-02-03",44 36 | "1959-02-04",38 37 | "1959-02-05",50 38 | "1959-02-06",38 39 | "1959-02-07",51 40 | "1959-02-08",31 41 | "1959-02-09",31 42 | "1959-02-10",51 43 | "1959-02-11",36 44 | "1959-02-12",45 45 | "1959-02-13",51 46 | "1959-02-14",34 47 | "1959-02-15",52 48 | "1959-02-16",47 49 | "1959-02-17",45 50 | "1959-02-18",46 51 | "1959-02-19",39 52 | "1959-02-20",48 53 | "1959-02-21",37 54 | "1959-02-22",35 55 | "1959-02-23",52 56 | "1959-02-24",42 57 | "1959-02-25",45 58 | "1959-02-26",39 59 | "1959-02-27",37 60 | "1959-02-28",30 61 | "1959-03-01",35 62 | "1959-03-02",28 63 | "1959-03-03",45 64 | "1959-03-04",34 65 | "1959-03-05",36 66 | "1959-03-06",50 67 | "1959-03-07",44 68 | "1959-03-08",39 69 | "1959-03-09",32 70 | "1959-03-10",39 71 | "1959-03-11",45 72 | "1959-03-12",43 73 | "1959-03-13",39 74 | "1959-03-14",31 75 | "1959-03-15",27 76 | "1959-03-16",30 77 | "1959-03-17",42 78 | "1959-03-18",46 79 | "1959-03-19",41 80 | "1959-03-20",36 81 | "1959-03-21",45 82 | "1959-03-22",46 83 | "1959-03-23",43 84 | "1959-03-24",38 85 | "1959-03-25",34 86 | "1959-03-26",35 87 | "1959-03-27",56 88 | "1959-03-28",36 89 | "1959-03-29",32 90 | "1959-03-30",50 91 | "1959-03-31",41 92 | "1959-04-01",39 93 | "1959-04-02",41 94 | "1959-04-03",47 95 | "1959-04-04",34 96 | "1959-04-05",36 97 | "1959-04-06",33 98 | "1959-04-07",35 99 | "1959-04-08",38 100 | "1959-04-09",38 101 | "1959-04-10",34 102 | "1959-04-11",53 103 | "1959-04-12",34 104 | "1959-04-13",34 105 | "1959-04-14",38 106 | "1959-04-15",35 107 | "1959-04-16",32 108 | "1959-04-17",42 109 | "1959-04-18",34 110 | "1959-04-19",46 111 | "1959-04-20",30 112 | "1959-04-21",46 113 | "1959-04-22",45 114 | "1959-04-23",54 115 | "1959-04-24",34 116 | "1959-04-25",37 117 | "1959-04-26",35 118 | "1959-04-27",40 119 | "1959-04-28",42 120 | "1959-04-29",58 121 | "1959-04-30",51 122 | "1959-05-01",32 123 | "1959-05-02",35 124 | "1959-05-03",38 125 | "1959-05-04",33 126 | "1959-05-05",39 127 | "1959-05-06",47 128 | "1959-05-07",38 129 | "1959-05-08",52 130 | "1959-05-09",30 131 | "1959-05-10",34 132 | "1959-05-11",40 133 | "1959-05-12",35 134 | "1959-05-13",42 135 | "1959-05-14",41 136 | "1959-05-15",42 137 | "1959-05-16",38 138 | "1959-05-17",24 139 | "1959-05-18",34 140 | "1959-05-19",43 141 | "1959-05-20",36 142 | "1959-05-21",55 143 | "1959-05-22",41 144 | "1959-05-23",45 145 | "1959-05-24",41 146 | "1959-05-25",37 147 | "1959-05-26",43 148 | "1959-05-27",39 149 | "1959-05-28",33 150 | "1959-05-29",43 151 | "1959-05-30",40 152 | "1959-05-31",38 153 | "1959-06-01",45 154 | "1959-06-02",46 155 | "1959-06-03",34 156 | "1959-06-04",35 157 | "1959-06-05",48 158 | "1959-06-06",51 159 | "1959-06-07",36 160 | "1959-06-08",33 161 | "1959-06-09",46 162 | "1959-06-10",42 163 | "1959-06-11",48 164 | "1959-06-12",34 165 | "1959-06-13",41 166 | "1959-06-14",35 167 | "1959-06-15",40 168 | "1959-06-16",34 169 | "1959-06-17",30 170 | "1959-06-18",36 171 | "1959-06-19",40 172 | "1959-06-20",39 173 | "1959-06-21",45 174 | "1959-06-22",38 175 | "1959-06-23",47 176 | "1959-06-24",33 177 | "1959-06-25",30 178 | "1959-06-26",42 179 | "1959-06-27",43 180 | "1959-06-28",41 181 | "1959-06-29",41 182 | "1959-06-30",59 183 | "1959-07-01",43 184 | "1959-07-02",45 185 | "1959-07-03",38 186 | "1959-07-04",37 187 | "1959-07-05",45 188 | "1959-07-06",42 189 | "1959-07-07",57 190 | "1959-07-08",46 191 | "1959-07-09",51 192 | "1959-07-10",41 193 | "1959-07-11",47 194 | "1959-07-12",26 195 | "1959-07-13",35 196 | "1959-07-14",44 197 | "1959-07-15",41 198 | "1959-07-16",42 199 | "1959-07-17",36 200 | "1959-07-18",45 201 | "1959-07-19",45 202 | "1959-07-20",45 203 | "1959-07-21",47 204 | "1959-07-22",38 205 | "1959-07-23",42 206 | "1959-07-24",35 207 | "1959-07-25",36 208 | "1959-07-26",39 209 | "1959-07-27",45 210 | "1959-07-28",43 211 | "1959-07-29",47 212 | "1959-07-30",36 213 | "1959-07-31",41 214 | "1959-08-01",50 215 | "1959-08-02",39 216 | "1959-08-03",41 217 | "1959-08-04",46 218 | "1959-08-05",64 219 | "1959-08-06",45 220 | "1959-08-07",34 221 | "1959-08-08",38 222 | "1959-08-09",44 223 | "1959-08-10",48 224 | "1959-08-11",46 225 | "1959-08-12",44 226 | "1959-08-13",37 227 | "1959-08-14",39 228 | "1959-08-15",44 229 | "1959-08-16",45 230 | "1959-08-17",33 231 | "1959-08-18",44 232 | "1959-08-19",38 233 | "1959-08-20",46 234 | "1959-08-21",46 235 | "1959-08-22",40 236 | "1959-08-23",39 237 | "1959-08-24",44 238 | "1959-08-25",48 239 | "1959-08-26",50 240 | "1959-08-27",41 241 | "1959-08-28",42 242 | "1959-08-29",51 243 | "1959-08-30",41 244 | "1959-08-31",44 245 | "1959-09-01",38 246 | "1959-09-02",68 247 | "1959-09-03",40 248 | "1959-09-04",42 249 | "1959-09-05",51 250 | "1959-09-06",44 251 | "1959-09-07",45 252 | "1959-09-08",36 253 | "1959-09-09",57 254 | "1959-09-10",44 255 | "1959-09-11",42 256 | "1959-09-12",53 257 | "1959-09-13",42 258 | "1959-09-14",34 259 | "1959-09-15",40 260 | "1959-09-16",56 261 | "1959-09-17",44 262 | "1959-09-18",53 263 | "1959-09-19",55 264 | "1959-09-20",39 265 | "1959-09-21",59 266 | "1959-09-22",55 267 | "1959-09-23",73 268 | "1959-09-24",55 269 | "1959-09-25",44 270 | "1959-09-26",43 271 | "1959-09-27",40 272 | "1959-09-28",47 273 | "1959-09-29",51 274 | "1959-09-30",56 275 | "1959-10-01",49 276 | "1959-10-02",54 277 | "1959-10-03",56 278 | "1959-10-04",47 279 | "1959-10-05",44 280 | "1959-10-06",43 281 | "1959-10-07",42 282 | "1959-10-08",45 283 | "1959-10-09",50 284 | "1959-10-10",48 285 | "1959-10-11",43 286 | "1959-10-12",40 287 | "1959-10-13",59 288 | "1959-10-14",41 289 | "1959-10-15",42 290 | "1959-10-16",51 291 | "1959-10-17",49 292 | "1959-10-18",45 293 | "1959-10-19",43 294 | "1959-10-20",42 295 | "1959-10-21",38 296 | "1959-10-22",47 297 | "1959-10-23",38 298 | "1959-10-24",36 299 | "1959-10-25",42 300 | "1959-10-26",35 301 | "1959-10-27",28 302 | "1959-10-28",44 303 | "1959-10-29",36 304 | "1959-10-30",45 305 | "1959-10-31",46 306 | "1959-11-01",48 307 | "1959-11-02",49 308 | "1959-11-03",43 309 | "1959-11-04",42 310 | "1959-11-05",59 311 | "1959-11-06",45 312 | "1959-11-07",52 313 | "1959-11-08",46 314 | "1959-11-09",42 315 | "1959-11-10",40 316 | "1959-11-11",40 317 | "1959-11-12",45 318 | "1959-11-13",35 319 | "1959-11-14",35 320 | "1959-11-15",40 321 | "1959-11-16",39 322 | "1959-11-17",33 323 | "1959-11-18",42 324 | "1959-11-19",47 325 | "1959-11-20",51 326 | "1959-11-21",44 327 | "1959-11-22",40 328 | "1959-11-23",57 329 | "1959-11-24",49 330 | "1959-11-25",45 331 | "1959-11-26",49 332 | "1959-11-27",51 333 | "1959-11-28",46 334 | "1959-11-29",44 335 | "1959-11-30",52 336 | "1959-12-01",45 337 | "1959-12-02",32 338 | "1959-12-03",46 339 | "1959-12-04",41 340 | "1959-12-05",34 341 | "1959-12-06",33 342 | "1959-12-07",36 343 | "1959-12-08",49 344 | "1959-12-09",43 345 | "1959-12-10",43 346 | "1959-12-11",34 347 | "1959-12-12",39 348 | "1959-12-13",35 349 | "1959-12-14",52 350 | "1959-12-15",47 351 | "1959-12-16",52 352 | "1959-12-17",39 353 | "1959-12-18",40 354 | "1959-12-19",42 355 | "1959-12-20",42 356 | "1959-12-21",53 357 | "1959-12-22",39 358 | "1959-12-23",40 359 | "1959-12-24",38 360 | "1959-12-25",44 361 | "1959-12-26",34 362 | "1959-12-27",37 363 | "1959-12-28",52 364 | "1959-12-29",48 365 | "1959-12-30",55 366 | "1959-12-31",50 -------------------------------------------------------------------------------- /Dependent function/autocorr.m: -------------------------------------------------------------------------------- 1 | function varargout = autocorr(Series , nLags , Q , nSTDs) 2 | %AUTOCORR Compute or plot sample auto-correlation function. 3 | % Compute or plot the sample auto-correlation function (ACF) of a univariate, 4 | % stochastic time series. When called with no output arguments, AUTOCORR 5 | % displays the ACF sequence with confidence bounds. 6 | % 7 | % [ACF, Lags, Bounds] = autocorr(Series) 8 | % [ACF, Lags, Bounds] = autocorr(Series , nLags , M , nSTDs) 9 | % 10 | % Optional Inputs: nLags , M , nSTDs 11 | % 12 | % Inputs: 13 | % Series - Vector of observations of a univariate time series for which the 14 | % sample ACF is computed or plotted. The last row of Series contains the 15 | % most recent observation of the stochastic sequence. 16 | % 17 | % Optional Inputs: 18 | % nLags - Positive, scalar integer indicating the number of lags of the ACF 19 | % to compute. If empty or missing, the default is to compute the ACF at 20 | % lags 0,1,2, ... T = minimum[20 , length(Series)-1]. Since an ACF is 21 | % symmetric about zero lag, negative lags are ignored. 22 | % 23 | % M - Non-negative integer scalar indicating the number of lags beyond which 24 | % the theoretical ACF is deemed to have died out. Under the hypothesis that 25 | % the underlying Series is really an MA(M) process, the large-lag standard 26 | % error is computed (via Bartlett's approximation) for lags > M as an 27 | % indication of whether the ACF is effectively zero beyond lag M. On the 28 | % assumption that the ACF is zero beyond lag M, Bartlett's approximation 29 | % is used to compute the standard deviation of the ACF for lags > M. If M 30 | % is empty or missing, the default is M = 0, in which case Series is 31 | % assumed to be Gaussian white noise. If Series is a Gaussian white noise 32 | % process of length N, the standard error will be approximately 1/sqrt(N). 33 | % M must be less than nLags. 34 | % 35 | % nSTDs - Positive scalar indicating the number of standard deviations of the 36 | % sample ACF estimation error to compute assuming the theoretical ACF of 37 | % Series is zero beyond lag M. When M = 0 and Series is a Gaussian white 38 | % noise process of length N, specifying nSTDs will result in confidence 39 | % bounds at +/-(nSTDs/sqrt(N)). If empty or missing, default is nSTDs = 2 40 | % (i.e., approximate 95% confidence interval). 41 | % 42 | % Outputs: 43 | % ACF - Sample auto-correlation function of Series. ACF is a vector of 44 | % length nLags + 1 corresponding to lags 0,1,2,...,nLags. The first 45 | % element of ACF is unity (i.e., ACF(1) = 1 = lag 0 correlation). 46 | % 47 | % Lags - Vector of lags corresponding to ACF (0,1,2,...,nLags). 48 | % 49 | % Bounds - Two element vector indicating the approximate upper and lower 50 | % confidence bounds assuming that Series is an MA(M) process. Note that 51 | % Bounds is approximate for lags > M only. 52 | % 53 | % Example: 54 | % Create an MA(2) process from a sequence of 1000 Gaussian deviates, then 55 | % visually assess whether the ACF is effectively zero for lags > 2: 56 | % 57 | % randn('state',0) % Start from a known state. 58 | % x = randn(1000,1); % 1000 Gaussian deviates ~ N(0,1). 59 | % y = filter([1 -1 1] , 1 , x); % Create an MA(2) process. 60 | % autocorr(y , [] , 2) % Inspect the ACF with 95% confidence. 61 | % 62 | % See also CROSSCORR, PARCORR, FILTER. 63 | 64 | % Copyright 1999-2003 The MathWorks, Inc. 65 | % $Revision: 1.6.2.2 $ $Date: 2007/09/11 11:45:55 $ 66 | 67 | % 68 | % Reference: 69 | % Box, G.E.P., Jenkins, G.M., Reinsel, G.C., "Time Series Analysis: 70 | % Forecasting and Control", 3rd edition, Prentice Hall, 1994. 71 | 72 | % 73 | % Ensure the sample data is a VECTOR. 74 | % 75 | 76 | [rows , columns] = size(Series); 77 | 78 | if (rows ~= 1) && (columns ~= 1) 79 | error('econ:autocorr:NonVectorInput' , ' Input ''Series'' must be a vector.'); 80 | end 81 | 82 | rowSeries = size(Series,1) == 1; 83 | 84 | Series = Series(:); % Ensure a column vector 85 | n = length(Series); % Sample size. 86 | defaultLags = 20; % BJR recommend about 20 lags for ACFs. 87 | 88 | % 89 | % Ensure the number of lags, nLags, is a positive 90 | % integer scalar and set default if necessary. 91 | % 92 | 93 | if (nargin >= 2) && ~isempty(nLags) 94 | if numel(nLags) > 1 95 | error('econ:autocorr:NonScalarLags' , ' Number of lags ''nLags'' must be a scalar.'); 96 | end 97 | if (round(nLags) ~= nLags) || (nLags <= 0) 98 | error('econ:autocorr:NonPositiveInteger' , ' Number of lags ''nLags'' must be a positive integer.'); 99 | end 100 | if nLags > (n - 1) 101 | error('econ:autocorr:LagsTooLarge' , ' Number of lags ''nLags'' must not exceed ''Series'' length - 1.'); 102 | end 103 | else 104 | nLags = min(defaultLags , n - 1); 105 | end 106 | 107 | % 108 | % Ensure the hypothesized number of lags, Q, is a non-negative integer 109 | % scalar, and set default if necessary. 110 | % 111 | if (nargin >= 3) && ~isempty(Q) 112 | if numel(Q) > 1 113 | error('econ:autocorr:NonScalarQ' , ' Number of lags ''Q'' must be a scalar.'); 114 | end 115 | if (round(Q) ~= Q) || (Q < 0) 116 | error('econ:autocorr:NegativeInteger' , ' Number of lags ''Q'' must be a non-negative integer.'); 117 | end 118 | if Q >= nLags 119 | error('econ:autocorr:QTooLarge' , ' ''Q'' must be less than ''nLags''.'); 120 | end 121 | else 122 | Q = 0; % Default is 0 (Gaussian white noise hypothisis). 123 | end 124 | 125 | % 126 | % Ensure the number of standard deviations, nSTDs, is a positive 127 | % scalar and set default if necessary. 128 | % 129 | 130 | if (nargin >= 4) && ~isempty(nSTDs) 131 | if numel(nSTDs) > 1 132 | error('econ:autocorr:NonScalarSTDs' , ' Number of standard deviations ''nSTDs'' must be a scalar.'); 133 | end 134 | if nSTDs < 0 135 | error('econ:autocorr:NegativeSTDs' , ' Number of standard deviations ''nSTDs'' must be non-negative.'); 136 | end 137 | else 138 | nSTDs = 2; % Default is 2 standard errors (95% condfidence interval). 139 | end 140 | 141 | % 142 | % Convolution, polynomial multiplication, and FIR digital filtering are 143 | % all the same operation. The FILTER command could be used to compute 144 | % the ACF (by computing the correlation by convolving the de-meaned 145 | % Series with a flipped version of itself), but FFT-based computation 146 | % is significantly faster for large data sets. 147 | % 148 | % The ACF computation is based on Box, Jenkins, Reinsel, pages 30-34, 188. 149 | % 150 | 151 | nFFT = 2^(nextpow2(length(Series)) + 1); 152 | F = fft(Series-mean(Series) , nFFT); 153 | F = F .* conj(F); 154 | ACF = ifft(F); 155 | ACF = ACF(1:(nLags + 1)); % Retain non-negative lags. 156 | ACF = ACF ./ ACF(1); % Normalize. 157 | ACF = real(ACF); 158 | 159 | % 160 | % Compute approximate confidence bounds using the Box-Jenkins-Reinsel 161 | % approach, equations 2.1.13 and 6.2.2, on pages 33 and 188, respectively. 162 | % 163 | 164 | sigmaQ = sqrt((1 + 2*(ACF(2:Q+1)'*ACF(2:Q+1)))/n); 165 | bounds = sigmaQ * [nSTDs ; -nSTDs]; 166 | Lags = [0:nLags]'; 167 | 168 | if nargout == 0 % Make plot if requested. 169 | 170 | % 171 | % Plot the sample ACF. 172 | % 173 | lineHandles = stem(Lags , ACF , 'filled' , 'r-o'); 174 | set (lineHandles(1) , 'MarkerSize' , 4) 175 | grid ('on') 176 | xlabel('Lag') 177 | ylabel('Sample Autocorrelation') 178 | title ('Sample Autocorrelation Function (ACF)') 179 | hold ('on') 180 | % 181 | % Plot the confidence bounds under the hypothesis that the underlying 182 | % Series is really an MA(Q) process. Bartlett's approximation gives 183 | % an indication of whether the ACF is effectively zero beyond lag Q. 184 | % For this reason, the confidence bounds (horizontal lines) appear 185 | % over the ACF ONLY for lags GREATER than Q (i.e., Q+1, Q+2, ... nLags). 186 | % In other words, the confidence bounds enclose ONLY those lags for 187 | % which the null hypothesis is assumed to hold. 188 | % 189 | 190 | plot([Q+0.5 Q+0.5 ; nLags nLags] , [bounds([1 1]) bounds([2 2])] , '-b'); 191 | 192 | plot([0 nLags] , [0 0] , '-k'); 193 | hold('off') 194 | a = axis; 195 | axis([a(1:3) 1]); 196 | 197 | else 198 | 199 | % 200 | % Re-format outputs for compatibility with the SERIES input. When SERIES is 201 | % input as a row vector, then pass the outputs as a row vectors; when SERIES 202 | % is a column vector, then pass the outputs as a column vectors. 203 | % 204 | if rowSeries 205 | ACF = ACF.'; 206 | Lags = Lags.'; 207 | bounds = bounds.'; 208 | end 209 | 210 | varargout = {ACF , Lags , bounds}; 211 | 212 | end 213 | -------------------------------------------------------------------------------- /Dependent function/ar.m: -------------------------------------------------------------------------------- 1 | function [th,ref]=ar(data,n,varargin) 2 | %AR Computes AR-models of signals using various approaches. 3 | % Model = AR(Y,N) or TH = AR(Y,N,Approach) or TH = AR(Y,N,Approach,Win) 4 | % 5 | % Model: returned as an IDPOLY model with the estimated parameters of the 6 | % AR-model, see HELP IDPOLY. 7 | % 8 | % Y: The time series to be modelled, an IDDATA object. (See HELP IDDATA) 9 | % N: The order of the AR-model 10 | % Approach: The method used, one of the following ones: 11 | % 'fb' : The forward-backward approach (default) 12 | % 'ls' : The Least Squares method 13 | % 'yw' : The Yule-Walker method 14 | % 'burg': Burg's method 15 | % 'gl' : A geometric lattice method 16 | % For the two latter ones, reflection coefficients and loss functions 17 | % are returned in REFL by [Model,REFL] = AR(y,n,approach) 18 | % Win : Windows employed, one of the following ones: 19 | % 'now' : No windowing (default, except when approach='yw') 20 | % 'prw' : Prewindowing 21 | % 'pow' : Postwindowing 22 | % 'ppw' : pre- and post-windowing 23 | % 24 | % The Property/Value pairs 'MaxSize'/maxsize and 'Ts'/Ts can be added to 25 | % set the MaxSize property (see also IDPROPS ALG) and to override the sampling 26 | % interval of the data: Example: Model = AR(Y,N,Approach,'MaxSize',500). 27 | % The Property/Value pair 'CovarianceMatrix'/'None' will suppress the 28 | % calcualtion of the covariance matrix. 29 | % 30 | % See also IVAR, ARX, N4SID. 31 | 32 | % L. Ljung 10-7-87 33 | % Copyright 1986-2007 The MathWorks, Inc. 34 | % $Revision: 1.15.4.7 $ $Date: 2007/12/14 14:43:10 $ 35 | 36 | 37 | if nargin <2 38 | disp('Usage: TH = AR(Y,ORDER)') 39 | disp(' TH = AR(Y,ORDER,APPROACH,WINDOW)') 40 | disp(' APPROACH is one of ''fb'', ''ls'', ''yw'', ''burg'', ''gl''.') 41 | disp(' WINDOW is one of ''now'', ''prw'', ''pow'', ''ppw''.') 42 | return 43 | end 44 | ref = []; 45 | maxsize = 'auto'; 46 | T = 1; 47 | approach = 'fb'; 48 | win = 'now'; 49 | pt = 1; 50 | Tflag = 0; 51 | % Some initial tests on the input arguments 52 | indc = 1; 53 | list = {'Maxsize','Ts','fb','ls','yw','burg','gl','now',... 54 | 'prw','pow','ppw','CovarianceMatrix','None','Estimate'}; 55 | while indc<=length(varargin) 56 | arg = varargin{indc}; 57 | if ischar(arg) 58 | if arg(end)=='0' 59 | pt = 0; 60 | arg=arg(1:end-1); 61 | end 62 | try 63 | [prop,im] = pnmatchd(arg,list,7,0); 64 | catch E 65 | throw(E) 66 | end 67 | if im==1 68 | maxsize = varargin{indc+1}; 69 | indc = indc+1; 70 | elseif im==2 71 | T = varargin{indc+1}; 72 | indc=indc+1; 73 | Tflag = 1; 74 | elseif im<8 75 | approach = prop; 76 | elseif im < 12 77 | win = prop; 78 | elseif im == 13 79 | pt = 0; 80 | end 81 | 82 | elseif indc == 3 83 | maxsize = varargin{indc}; 84 | elseif indc==4 85 | T = varargin{indc}; 86 | Tflag = 1; 87 | end 88 | indc=indc+1; 89 | end 90 | pt1 = pt; 91 | errn=0; 92 | if ~isa(n,'double') 93 | errn=1; 94 | elseif n~=fix(n) || n<=0 || ~isreal(n) 95 | errn=1; 96 | end 97 | 98 | if errn 99 | error('ident:estimation:arInvalidOrder','The order, n, must be a positive integer.') 100 | end 101 | if isa(data,'frd') || isa(data,'idfrd') || (isa(data,'iddata') ... 102 | && strcmp(pvget(data,'Domain'),'Frequency')) 103 | error('ident:estimation:arWithFrequencyData','For frequency domain data, use ARX instead of AR.') 104 | end 105 | 106 | if ~isa(data,'iddata') 107 | [N,ny]=size(data); 108 | if min(N,ny)~=1 109 | error('ident:estimation:multiVariableTimeSeries',... 110 | 'Only scalar time series can be handled. Use "arx" command for multivariate signals.') 111 | end 112 | if N1 124 | error('ident:estimation:multiVariableTimeSeries',... 125 | 'Only scalar time series can be handled. Use "arx" command for multivariate signals.') 126 | end 127 | if nu>0 128 | error('ident:estimation:notTimeSeries',... 129 | 'This routine is for scalar time series only. Use "arx" command for the case with input.') 130 | end 131 | maxsdef=idmsize(max(Ncaps),n); 132 | if isempty(maxsize) || ischar(maxsize), 133 | maxsize=maxsdef; 134 | maxs = 1; 135 | else 136 | maxs = 0; 137 | end 138 | 139 | if strcmp(approach,'yw') 140 | win='ppw'; 141 | end 142 | if strcmp(win,'prw') || strcmp(win,'ppw') 143 | for kexp = 1:Ne 144 | y{kexp}=[zeros(n,1);y{kexp}]; 145 | end 146 | Ncaps = Ncaps+n; 147 | end 148 | if strcmp(win,'pow') || strcmp(win,'ppw') 149 | for kexp =1:Ne 150 | y{kexp} = [y{kexp};zeros(n,1)]; 151 | end 152 | Ncaps = Ncaps+n; 153 | 154 | end 155 | th = idpoly; 156 | if maxs 157 | Max = 'auto'; 158 | else 159 | Max = maxsize; 160 | end 161 | th = pvset(th,'MaxSize',Max); 162 | % First the lattice based algorithms 163 | 164 | if any(strcmp(approach,{'burg','gl'})) 165 | ef=y;eb=y; 166 | rho = zeros(1,n+1); 167 | r = zeros(1,n); 168 | A = r; 169 | [ss,l] = sumcell(y,1,Ncaps); 170 | rho(1) = ss/l; 171 | for p=1:n 172 | nef = sumcell(ef,p+1,Ncaps); 173 | neb=sumcell(eb,p,Ncaps-1); 174 | if strcmp(approach,'gl') 175 | den=sqrt(nef*neb); 176 | else 177 | den=(nef+neb)/2; 178 | end 179 | ss=0; 180 | for kexp=1:Ne 181 | ss=ss+(-eb{kexp}(p:Ncaps(kexp)-1)'*ef{kexp}(p+1:Ncaps(kexp))); 182 | end 183 | 184 | r(p)=ss/den; 185 | A(p)=r(p); 186 | A(1:p-1)=A(1:p-1)+r(p)*conj(A(p-1:-1:1)); 187 | rho(p+1)=rho(p)*(1-r(p)*r(p)); 188 | efold=ef; 189 | for kexp = 1:Ne 190 | Ncap = Ncaps(kexp); 191 | ef{kexp}(2:Ncap)=ef{kexp}(2:Ncap)+r(p)*eb{kexp}(1:Ncap-1); 192 | eb{kexp}(2:Ncap)=eb{kexp}(1:Ncap-1)+conj(r(p))*efold{kexp}(2:Ncap); 193 | end 194 | end 195 | th = pvset(th,'a',[1 A]); 196 | ref=[0 r;rho]; 197 | else 198 | pt1 = 1; %override pt for the other appoaches 199 | 200 | end 201 | % Now compute the regression matrix 202 | if pt1 203 | nmax=n; 204 | M=floor(maxsize/n); 205 | R1 = zeros(0,n+1); 206 | fb=strcmp(approach,'fb'); 207 | if strcmp(approach,'fb') 208 | R2 = zeros(0,n+1); 209 | yb = cell(1,Ne); 210 | for kexp = 1:Ne 211 | yb{kexp}=conj(y{kexp}(Ncaps(kexp):-1:1)); 212 | end 213 | end 214 | for kexp = 1:Ne 215 | Ncap = Ncaps(kexp); 216 | yy = y{kexp}; 217 | for k=nmax:M:Ncap-1 218 | jj=(k+1:min(Ncap,k+M)); 219 | phi=zeros(length(jj),n); 220 | if fb, 221 | phib=zeros(length(jj),n); 222 | end 223 | for k1=1:n, 224 | phi(:,k1)=-yy(jj-k1); 225 | end 226 | if fb 227 | for k2=1:n, 228 | phib(:,k2)=-yb{kexp}(jj-k2); 229 | end 230 | end 231 | if fb, 232 | R2 = triu(qr([R2;[[phi;phib],[yy(jj);yb{kexp}(jj)]]])); 233 | [nRr,nRc] =size(R2); 234 | R2 = R2(1:min(nRr,nRc),:); 235 | end 236 | R1 = triu(qr([R1;[phi,yy(jj)]])); 237 | [nRr,nRc] =size(R1); 238 | R1 = R1(1:min(nRr,nRc),:); 239 | %end 240 | end 241 | end 242 | P = pinv(R1(1:n,1:n)); 243 | 244 | if ~any(strcmp(approach,{'burg','gl'})) 245 | if ~fb 246 | A = (P * R1(1:n,n+1)).'; 247 | else 248 | A = (pinv(R2(1:n,1:n)) * R2(1:n,n+1)).'; 249 | end 250 | th = pvset(th,'a',[1 A]); 251 | end 252 | P = P*P'; 253 | else 254 | P = []; 255 | end 256 | if ~pt 257 | P = []; 258 | end 259 | e = []; 260 | for kexp = 1:length(yor); 261 | tt=filter([1 A],1,yor{kexp}); 262 | tt(1:n)=zeros(n,1); 263 | e = [e;tt]; 264 | end 265 | 266 | lam=e'*e/(length(e)-n); 267 | es = pvget(th,'EstimationInfo'); 268 | es.FPE = lam*(1+n/sum(Ncaps))/(1-n/sum(Ncaps)); 269 | es.Status = 'Estimated Model (AR)'; 270 | es.Method = ['AR (''',approach,'''/''',win,''')']; 271 | es.DataLength = sum(Ncaps); 272 | es.LossFcn = lam; 273 | es.DataTs = T; 274 | es.DataName = Name; 275 | es.DataInterSample = 'Not Applicable'; 276 | idm=pvget(th,'idmodel'); 277 | idm=pvset(idm,'Ts',T,'CovarianceMatrix',lam*P,'NoiseVariance',lam,... 278 | 'EstimationInfo',es,... 279 | 'OutputName',pvget(data,'OutputName'),'OutputUnit',... 280 | pvget(data,'OutputUnit')); 281 | th = pvset(th,'idmodel',idm); 282 | th = timemark(th); 283 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 284 | function [s,ln] = sumcell(y,p,N) 285 | ln = 0; 286 | s = 0; 287 | for kexp = 1:length(y) 288 | y1 = y{kexp}; 289 | s=s+y1(p:N(kexp))'*y1(p:N(kexp)); 290 | ln = ln + length(y1); 291 | end 292 | -------------------------------------------------------------------------------- /Dependent function/parcorr.m: -------------------------------------------------------------------------------- 1 | function varargout = parcorr(Series , nLags , P , nSTDs) 2 | %PARCORR Compute or plot sample partial auto-correlation function. 3 | % Compute or plot the sample partial auto-correlation function (partial ACF) 4 | % of a univariate, stochastic time series. The partial ACF is computed by 5 | % fitting successive autoregressive models of orders 1,2, ... by ordinary 6 | % least squares, retaining the last coefficient of each regression. When 7 | % called with no output arguments, PARCORR displays the sequence with 8 | % confidence bounds. 9 | % 10 | % [PartialACF, Lags, Bounds] = parcorr(Series) 11 | % [PartialACF, Lags, Bounds] = parcorr(Series , nLags , R , nSTDs) 12 | % 13 | % Optional Inputs: nLags , R , nSTDs 14 | % 15 | % Inputs: 16 | % Series - Vector of observations of a univariate time series for which the 17 | % sample partial ACF is returned or plotted. The last row of Series 18 | % contains the most recent observation of the stochastic sequence. 19 | % 20 | % Optional Inputs: 21 | % nLags - Positive, scalar integer indicating the number of lags of the 22 | % partial ACF to compute. If empty or missing, the default is to compute 23 | % the partial ACF sequence at lags 0,1,2, ... T, where T is equal to the 24 | % minimum[20 , length(Series)-1]. 25 | % 26 | % R - Non-negative integer scalar indicating the number of lags beyond which 27 | % the theoretical partial ACF is assumed zero. Under the hypothesis that 28 | % the underlying Series is really an AR(R) process, the estimated partial 29 | % ACF coefficients at lags > R are approximately zero-mean, independently 30 | % distributed Gaussian variates. In this case, the standard error of the 31 | % estimated partial ACF coefficients of a fitted Series with N observations 32 | % is approximately 1/sqrt(N) for lags > R. If R is empty or missing, the 33 | % default is R = 0. R must be less than nLags. 34 | % 35 | % nSTDs - Positive scalar indicating the number of standard deviations of the 36 | % sample partial ACF estimation error to display assuming that Series is 37 | % an AR(R) process. If the Rth regression coefficient (i.e., the last OLS 38 | % regression coefficient of Series regressed on a constant and R of its 39 | % lags) is fitted with N observations, specifying nSTDs will result in 40 | % confidence bounds at +/-(nSTDs/sqrt(N)). If empty or missing, default is 41 | % nSTDs = 2 (i.e., approximate 95% confidence interval). 42 | % 43 | % Outputs: 44 | % PartialACF - Sample partial ACF of Series. PartialACF is a vector of length 45 | % nLags + 1 corresponding to lags 0,1,2,...,nLags. The first element of 46 | % PartialACF is defined to be unity (i.e., PartialACF(1) = 1 = OLS 47 | % regression coefficient of Series regressed upon itself), and is included 48 | % as a reference. 49 | % 50 | % Lags - Vector of lags corresponding to PartialACF (0,1,2,...,nLags). 51 | % 52 | % Bounds - Two element vector indicating the approximate upper and lower 53 | % confidence bounds assuming that Series is an AR(R) process. Note that 54 | % Bounds is approximate for lags > R only. 55 | % 56 | % Example: 57 | % Create a stationary AR(2) process from a sequence of 1000 Gaussian deviates, 58 | % then visually assess whether the partial ACF is zero for lags > 2: 59 | % 60 | % randn('state',0) % Start from a known state. 61 | % x = randn(1000,1); % 1000 Gaussian deviates ~ N(0,1). 62 | % y = filter(1,[1 -0.6 0.08],x); % Create a stationary AR(2) process. 63 | % parcorr(y , [] , 2) % Inspect the P-ACF with 95% confidence. 64 | % 65 | % See also CROSSCORR, AUTOCORR, FILTER. 66 | 67 | % Copyright 1999-2003 The MathWorks, Inc. 68 | % $Revision: 1.6.2.2 $ $Date: 2007/09/11 11:46:47 $ 69 | 70 | % 71 | % References: 72 | % Box, G.E.P., Jenkins, G.M., Reinsel, G.C., "Time Series Analysis: 73 | % Forecasting and Control", 3rd edition, Prentice Hall, 1994. 74 | % Hamilton, J.D., "Time Series Analysis", Princeton University Press, 1994. 75 | % 76 | 77 | % 78 | % Ensure the sample data is a VECTOR. 79 | % 80 | 81 | [rows , columns] = size(Series); 82 | 83 | if (rows ~= 1) && (columns ~= 1) 84 | error('econ:parcorr:NonVectorInput' , ' Input ''Series'' must be a vector.'); 85 | end 86 | 87 | rowSeries = size(Series,1) == 1; 88 | 89 | Series = Series(:); % Ensure a column vector 90 | n = length(Series); % Raw sample size. 91 | defaultLags = 20; % BJR recommend about 20 lags for partial ACFs. 92 | 93 | % 94 | % Ensure the number of lags, nLags, is a positive 95 | % integer scalar and set default if necessary. 96 | % 97 | 98 | if (nargin >= 2) && ~isempty(nLags) 99 | if numel(nLags) > 1 100 | error('econ:parcorr:NonScalarLags' , ' Number of lags ''nLags'' must be a scalar.'); 101 | end 102 | if (round(nLags) ~= nLags) || (nLags <= 0) 103 | error('econ:parcorr:NonPositiveIntegerLags' , ' Number of lags ''nLags'' must be a positive integer.'); 104 | end 105 | if nLags > (n - 1) 106 | error('econ:parcorr:LagsTooLarge' , ' Number of lags ''nLags'' must not exceed ''Series'' length - 1.'); 107 | end 108 | else 109 | nLags = min(defaultLags , n - 1); 110 | end 111 | 112 | % 113 | % Ensure the hypothesized number of lags, P, is a non-negative integer 114 | % scalar, and set default if necessary. 115 | % 116 | 117 | if (nargin >= 3) && ~isempty(P) 118 | if numel(P) > 1 119 | error('econ:parcorr:NonScalarP' , ' Number of lags ''P'' must be a scalar.'); 120 | end 121 | if (round(P) ~= P) || (P < 0) 122 | error('econ:parcorr:NegativeIntegerP' , ' Number of lags ''P'' must be a non-negative integer.'); 123 | end 124 | if P >= nLags 125 | error('econ:parcorr:PTooLarge' , ' ''P'' must be less than ''nLags''.'); 126 | end 127 | else 128 | P = 0; % Set default. 129 | end 130 | 131 | % 132 | % Ensure the number of standard deviations, nSTDs, is a positive 133 | % scalar and set default if necessary. 134 | % 135 | 136 | if (nargin >= 4) && ~isempty(nSTDs) 137 | if numel(nSTDs) > 1 138 | error('econ:parcorr:NonScalarSTDs' , ' Number of standard deviations ''nSTDs'' must be a scalar.'); 139 | end 140 | if nSTDs < 0 141 | error('econ:parcorr:NegativeSTDs' , ' Number of standard deviations ''nSTDs'' must be non-negative.'); 142 | end 143 | else 144 | nSTDs = 2; % Default is 2 standard errors (~95% condfidence interval). 145 | end 146 | 147 | % 148 | % Create a lagged regression matrix & allocate storage for the partial ACF. 149 | % 150 | 151 | X = lagmatrix(Series , [1:nLags]); 152 | partialACF = [1 ; zeros(nLags , 1)]; 153 | 154 | % 155 | % Compute partial ACF by fitting successive order AR models 156 | % by OLS, retaining the last coefficient of each regression. 157 | % 158 | 159 | for order = 1:nLags 160 | [Q , R] = qr([ones((length(Series)-order),1) X(order+1:end,1:order)] , 0); 161 | b = R\(Q'*Series(order+1:end)); 162 | partialACF(order+1) = b(end); 163 | end 164 | 165 | % 166 | % Compute approximate confidence bounds using the Box-Jenkins-Reinsel 167 | % approach, equations 3.2.36 and 6.2.3, on pages 68 and 188, respectively. 168 | % 169 | % Note a subtle point here: The Pth autoregressive model 'fit' via OLS 170 | % makes use of only the most recent (n - P) observations. Since the 171 | % approximate confidence bounds for the hypothesized P is of interest 172 | % only for lags > P, and the (P+1)th AR model uses (n - (P + 1) = n - p - 1 173 | % observations, the 'n' in BJR equation 3.2.36 (i.e., the number of 174 | % observations used in 'fitting') is taken to be (n - P - 1) rather than 175 | % the original length of Series. Moreover, the effective number of 176 | % observations used in 'fitting' each successive AR model will decrease 177 | % by one observation for each lag. For even moderate sample sizes, this 178 | % approximation should make little difference. 179 | % 180 | 181 | bounds = [nSTDs ; -nSTDs] ./ sqrt(n - P - 1); 182 | Lags = [0:nLags]'; 183 | 184 | if nargout == 0 185 | 186 | % 187 | % Plot the sample partial ACF. Note the partial ACF at lag 0 is defined to be 1. 188 | % 189 | lineHandles = stem(Lags , partialACF , 'filled' , 'r-o'); 190 | set (lineHandles(1) , 'MarkerSize' , 4) 191 | grid ('on') 192 | xlabel('Lag') 193 | ylabel('Sample Partial Autocorrelations') 194 | title ('Sample Partial Autocorrelation Function') 195 | hold ('on') 196 | % 197 | % Plot the confidence bounds under the hypothesis that the underlying 198 | % Series is really an AR(P) process. The following approximation gives 199 | % an indication of whether the partial ACF is effectively zero beyond 200 | % lag P. For this reason, the confidence bounds (horizontal lines) appear 201 | % over the partial ACF ONLY for lags > P (i.e., P+1, P+2, ... nLags). 202 | % In other words, the confidence bounds enclose ONLY those lags for 203 | % which the null hypothesis is assumed to hold. 204 | % 205 | 206 | plot([P+0.5 P+0.5 ; nLags nLags] , [bounds([1 1]) bounds([2 2])] , '-b'); 207 | plot([0 nLags] , [0 0] , '-k'); 208 | hold('off') 209 | 210 | if max(partialACF) <= 1 211 | a = axis; 212 | axis([a(1:3) 1]); 213 | end 214 | 215 | else 216 | 217 | % 218 | % Re-format outputs for compatibility with the SERIES input. When SERIES is 219 | % input as a row vector, then pass the outputs as a row vectors; when SERIES 220 | % is a column vector, then pass the outputs as a column vectors. 221 | % 222 | if rowSeries 223 | partialACF = partialACF.'; 224 | Lags = Lags.'; 225 | bounds = bounds.'; 226 | end 227 | 228 | varargout = {partialACF , Lags , bounds}; 229 | 230 | end 231 | -------------------------------------------------------------------------------- /Dependent function/iddata.m: -------------------------------------------------------------------------------- 1 | function dat = iddata(varargin) 2 | % IDDATA Create DATA OBJECT to be used for Identification routines 3 | % 4 | % Very Basic Use: 5 | % DAT = IDDATA(Y,U,Ts) to create a data object with output Y and 6 | % input U and sampling interval Ts. Default Ts=1. If U=[], 7 | % or not assigned, DAT defines a signal or a time series. 8 | % With Y =[], DAT describes just the input. 9 | % Y is a N-by-Ny matrix with N being the number of data and Ny 10 | % the number of output channels, and similarly for U. 11 | % Y and U must have the same number of rows. 12 | % 13 | % For Frequency Domain Data use 14 | % DAT = IDDATA(Y,U,Ts,'FREQ',Freqs), 15 | % where Freqs is a column vector containing the frequencies. It is 16 | % of the same length as Y and U. Note that Ts may be equal to 0 17 | % for frequency domain data, to indicate continuous time data. 18 | % 19 | % Retrieve data by DAT.y, DAT.u and DAT.Ts 20 | % Select portions by DAT1 = DAT(1:300) etc. 21 | % 22 | % Basic Use: 23 | % DAT = IDDATA(Y,U,Ts,'OutputName',String,....) or 24 | % SET(DAT,'OutputName',String,.... to add properties to 25 | % the data object, for logistics and plotting. 26 | % Type SET(IDDATA) for a complete list of properties. 27 | % Some basic ones are 28 | % OutputData, InputData: refers to Y and U above. 29 | % OutputName: String. For multiputput, use cell arrays, e.g. 30 | % {'Speed','Voltage'} 31 | % OutputUnit: String. For multioutput use cell arrays, e.g. 32 | % {'mph','volt'} 33 | % InputName, InputUnit, analogously. 34 | % Tstart: Starting time for the samples. 35 | % TimeUnit: String. 36 | % Properties can be set and retrieved either by SET and GET 37 | % or by subfields: 38 | % GET(DAT,'OutputName') or DAT.OutputName 39 | % SET(DAT,'OutputName','Current') or 40 | % DAT.OutputName = {'Current'}; 41 | % Referencing is case insensitive and 'y' is synonymous to 42 | % 'Output' and 'u' is synonymous to 'Input'. Autofill is used as 43 | % soon as the property is unique, so DAT.yna is the same as 44 | % DAT.OutputName etc. 45 | % 46 | % For frequency domain data, the property Frequency contains the 47 | % frequency vector and the property Unit defines the frequency 48 | % unit. 49 | % 50 | % To assign names and units to specific channels use 51 | % DAT.un(3)={'Speed'} or DAT.uu([3 7])={'Volt','m^3/s'} 52 | % See IDPROPS IDDATA for a complete list of properties. 53 | % 54 | % Manipulating Channels: 55 | % An easy way to set and retrieve channel properties is to use 56 | % subscripting. The subscripts are defined as 57 | % DAT(SAMPLES,OUTPUTS,INPUTS), so DAT(:,3,:) is the data object 58 | % obtained from DAT by keeping all input channels, 59 | % but only output channel 3. (Trailing ':'s can be omitted so 60 | % DAT(:,3,:)=DAT(:,3).) 61 | % The channels can also be retrieved by their names,so that 62 | % DAT(:,{'speed','flow'},[]) is the data object where the 63 | % indicated output channels have been selected and no input 64 | % channels are selected. Moreover 65 | % DAT1(101:200,[3 4],[1 3]) = DAT2(1001:1100,[1 2],[6 7]) 66 | % will change samples 101 to 200 of output channels 3 and 4 and 67 | % input channels 1 and 3 in the iddata object DAT1 to the 68 | % indicated values from iddata object DAT2. The names and units 69 | % of these channels will the also be changed accordingly. 70 | % 71 | % To add new channels, use horizontal concatenation of IDDATA 72 | % objects: 73 | % DAT =[DAT1, DAT2]; 74 | % or add the data record directly: DAT.u(:,5) = U will add a 75 | % fifth input to DAT. 76 | % See also IDDATA/SUBSREF, IDDATA/SUBSASGN, and IDDATA/HORZCAT 77 | % 78 | % Non-Equal Sampling: 79 | % The Property 'SamplingInstants' gives the sampling instants 80 | % of the data points. It can always be retrieved by 81 | % get(DAT,'SamplingInstants') (or DAT.s) and is then computed 82 | % from DAT.Ts and DAT.Tstart. 'SamplingInstants' can also be 83 | % set to an arbitrary vector of the same length as the data, 84 | % so that non-equal sampling can be handled. Ts is then 85 | % automatically set to []. 86 | % 87 | % Handling Multiple Experiments: 88 | % The IDDATA object can also store data from separate 89 | % experiments. The property 'ExperimentName' is used to 90 | % separate the experiments. The number of data as well as 91 | % the sampling properties can vary from experiment to 92 | % experiment, but the number of input and output channels 93 | % must be the same. (Use NaN to fill unmeasured channels in 94 | % certain experiments.) 95 | % The data records will be cell arrays, where the cells 96 | % contain data from each experiment. 97 | % Multiple experiments can be defined directly by letting the 98 | % 'y' and 'u' properties as well as 'Ts' and 'Tstart' be cell 99 | % arrays. 100 | % It is easier to merge two experiments by 101 | % DAT = MERGE(DAT1,DAT2). (See HELP IDDATA/MERGE) 102 | % Particular experiments can be retrieved by the command GETEXP: 103 | % GETEXP(DAT,3) is experiment number 3 and GETEXP(DAT,{'Day1','Day4'}) retrieves 104 | % the two experiments with the indicated names. 105 | % Particular experiments can also be addressed by a fourth index to DAT as in 106 | % 107 | % DAT1 = DAT(Samples,Outputs,Inputs,Experiments). 108 | % See also IDDATA/SUBSREF and IDDATA/SUBSASGN. 109 | 110 | % Copyright 1986-2007 The MathWorks, Inc. 111 | % $Revision: 1.20.4.7 $ $Date: 2007/12/14 14:43:46 $ 112 | 113 | ni = nargin; 114 | if ni && isempty(varargin{1}) % To allow for u = iddata([],u), u iddata 115 | if ni==2 && isa(varargin{2},'iddata') 116 | if size(varargin{2},'ny')==0 117 | dat = varargin{2}; 118 | return 119 | end 120 | end 121 | end 122 | 123 | if ni && isa(varargin{1},'iddata') 124 | % Quick exit 125 | if ni==2 % forgiving syntax dat = iddata(y,u) with y and u 126 | %iddata objects will be allowed. 127 | if isa(varargin{2},'iddata') 128 | if size(varargin{1},'nu')==0 && size(varargin{2},'ny')==0 129 | dat = horzcat(varargin{1},varargin{2}); 130 | return 131 | end 132 | end 133 | else 134 | dat = varargin{1}; 135 | if ni>1, 136 | error('ident:general:useSetForProp',... 137 | 'Use "set" command to modify the properties of %s objects.','IDDATA'); 138 | 139 | end 140 | return 141 | end 142 | end 143 | 144 | dat = ... 145 | struct('Domain','Time','Name',[],... 146 | 'OutputData',{{[]}},'OutputName',{{}},'OutputUnit',{{}},... 147 | 'InputData',{{[]}},'InputName',{{}},'InputUnit',{{}},... 148 | 'Period',[],'InterSample',{''},... 149 | 'Ts',{{1}},'Tstart',{{[]}},'SamplingInstants',{{[]}},'TimeUnit',{''},... 150 | 'ExperimentName',{{}},'Notes',[]','UserData',[],... 151 | 'Version',idutils.ver,'Utility',[]); 152 | 153 | % Note: version was string '1.0' before R20078a and '0.1' in first version (R13?) 154 | 155 | % Dissect input list 156 | DoubleInputs = 0; 157 | PVstart = 0; 158 | while DoubleInputs < ni && PVstart==0, 159 | nextarg = varargin{DoubleInputs+1}; 160 | if ischar(nextarg) || (~isempty(nextarg) && iscellstr(nextarg)) 161 | PVstart = DoubleInputs+1; 162 | else 163 | DoubleInputs = DoubleInputs+1; 164 | end 165 | end 166 | % Process numerical data 167 | if ni==0 168 | y = []; 169 | end 170 | 171 | %switch DoubleInputs, 172 | if DoubleInputs > 0 173 | % Output only 174 | [Value,error_str] = datachk(varargin{1},'OutputData'); 175 | if ~isempty(error_str.message), error(error_str), end 176 | dat.OutputData=Value; y = Value; 177 | varargin = varargin(2:end); 178 | if DoubleInputs > 1 179 | [Value,error_str] = datachk(varargin{1},'InputData'); 180 | if ~isempty(error_str.message), error(error_str), end 181 | dat.InputData = Value; 182 | varargin = varargin(2:end); 183 | if DoubleInputs > 2 184 | [Value,error_str] = datachk(varargin{1},'Ts'); 185 | if ~isempty(error_str.message), error(error_str), end 186 | for kk = 1:length(Value) 187 | if length(Value{kk})>1 || Value{kk}<0 % Check also the others 188 | error('ident:iddata:nonPositiveTs',... 189 | 'For time domain data, the sampling interval ("Ts") must be [] or a positive scalar for each experiment.') 190 | end 191 | end 192 | dat.Ts = Value; 193 | varargin = varargin(2:end); 194 | end 195 | end 196 | else 197 | y = []; 198 | end 199 | 200 | dat = class(dat,'iddata'); 201 | dat = timemark(dat,'c'); 202 | % Finally, set any PV pairs 203 | if isempty(varargin) 204 | try 205 | dat = pvset(dat,'OutputData',y); % This is to force the consistency checks 206 | catch E 207 | throw(E) 208 | end 209 | end 210 | 211 | if ni && ~isempty(varargin) 212 | try 213 | set(dat,'OutputData',y,varargin{:}) 214 | catch E 215 | throw(E) 216 | end 217 | end 218 | if strcmp(dat.Domain,'Frequency') && isempty(dat.Tstart{1}) 219 | dat.Tstart =repmat({'rad/s'},1,size(dat.SamplingInstants,2)); 220 | end 221 | Ts = dat.Ts; 222 | if ~iscell(Ts),Ts={Ts};end 223 | for kk=1:length(Ts) 224 | if strcmp(dat.Domain,'Time') 225 | if Ts{kk}<=0 226 | error('ident:iddata:nonPositiveTs',... 227 | 'For time domain data, the sampling interval ("Ts") must be [] or a positive scalar for each experiment.') 228 | end 229 | else 230 | if isempty(Ts{kk}) 231 | error('ident:iddata:freDataTsRequired',... 232 | ['For frequency domain data, a sampling interval ("Ts") must always be supplied.',... 233 | '\nUse Ts = 0 for continuous time data.']) 234 | end 235 | end 236 | end 237 | -------------------------------------------------------------------------------- /bfast-master/man/bfastmonitor.Rd: -------------------------------------------------------------------------------- 1 | \name{bfastmonitor} 2 | \alias{bfastmonitor} 3 | \title{Near Real-Time Disturbance Detection Based on BFAST-Type Models} 4 | 5 | \description{ 6 | Monitoring disturbances in time series models (with trend/season/regressor 7 | terms) at the end of time series (i.e., in near real-time). Based on a model 8 | for stable historical behaviour abnormal changes within newly acquired data 9 | can be detected. Different models are available for modeling the stable 10 | historical behavior. A season-trend model (with harmonic seasonal pattern) is 11 | used as a default in the regresssion modelling. 12 | } 13 | 14 | \usage{ 15 | bfastmonitor(data, start, 16 | formula = response ~ trend + harmon, order = 3, lag = NULL, slag = NULL, 17 | history = c("ROC", "BP", "all"), 18 | type = "OLS-MOSUM", h = 0.25, end = 10, level = 0.05, 19 | hpc = "none", verbose = FALSE, plot = FALSE) 20 | } 21 | 22 | \arguments{ 23 | \item{data}{A time series of class \code{\link[stats]{ts}}, or another object that 24 | can be coerced to such. For seasonal components, a frequency greater than 1 is 25 | required.} 26 | \item{start}{numeric. The starting date of the monitoring period. Can either be 27 | given as a float (e.g., \code{2000.5}) or a vector giving period/cycle 28 | (e.g., \code{c(2000, 7)}).} 29 | \item{formula}{formula for the regression model. The default is 30 | \code{response ~ trend + harmon}, i.e., a linear trend and a harmonic season 31 | component. Other specifications are possible using all terms set up by 32 | \code{\link[bfast]{bfastpp}}, i.e., \code{season} (seasonal pattern with dummy 33 | variables), \code{lag} (autoregressive terms), \code{slag} (seasonal autoregressive 34 | terms), or \code{xreg} (further covariates). See \code{\link[bfast]{bfastpp}} 35 | for details.} 36 | \item{order}{numeric. Order of the harmonic term, defaulting to \code{3}.} 37 | \item{lag}{numeric. Order of the autoregressive term, by default omitted.} 38 | \item{slag}{numeric. Order of the seasonal autoregressive term, by default omitted.} 39 | \item{history}{specification of the start of the stable history period. Can either 40 | be a character, numeric, or a function. If character, then selection is possible 41 | between reverse-ordered CUSUM (\code{"ROC"}, default), Bai and Perron breakpoint 42 | estimation (\code{"BP"}), or all available observations (\code{"all"}). If numeric, 43 | the start date can be specified in the same form as \code{start}. If a function 44 | is supplied it is called as \code{history(formula, data)} to compute a numeric 45 | start date.} 46 | \item{type}{character specifying the type of monitoring process. By default, a MOSUM 47 | process based on OLS residuals is employed. See \code{\link[strucchange]{mefp}} for 48 | alternatives.} 49 | \item{h}{numeric scalar from interval (0,1) specifying the bandwidth relative to the 50 | sample size in MOSUM/ME monitoring processes.} 51 | \item{end}{numeric. Maximum time (relative to the history period) that will be monitored 52 | (in MOSUM/ME processes). Default is 10 times the history period.} 53 | \item{level}{numeric. Significance level of the monitoring (and ROC, if selected) procedure, 54 | i.e., probability of type I error.} 55 | \item{hpc}{character specifying the high performance computing support. Default is 56 | \code{"none"}, can be set to \code{"foreach"}. See \code{\link[strucchange]{breakpoints}} 57 | for more details.} 58 | \item{verbose}{logical. Should information about the monitoring be printed during 59 | computation?} 60 | \item{plot}{logical. Should the result be plotted?} 61 | } 62 | 63 | \details{ 64 | \code{bfastmonitor} provides monitoring of disturbances (or structural changes) in near 65 | real-time based on a wide class of time series regression models with optional 66 | season/trend/autoregressive/covariate terms. See Verbesselt at al. (2011) for details. 67 | 68 | Based on a given time series (typically, but not necessarily, with frequency greater than 1), 69 | the data is first preprocessed for regression modeling. Trend/season/autoregressive/covariate 70 | terms are (optionally) computed using \code{\link[bfast]{bfastpp}}. Second, the data 71 | is split into a history and monitoring period (starting with \code{start}). Third, a subset 72 | of the history period is determined which is considered to be stable (see also below). 73 | Fourth, a regression model is fitted to the preprocessed data in the stable history period. 74 | Fifth, a monitoring procedure is used to determine whether the observations in the monitoring 75 | period conform with this stable regression model or whether a change is detected. 76 | 77 | The regression model can be specified by the user. The default is to use a linear trend and 78 | a harmonic season: \code{response ~ trend + harmon}. However, all other terms set up by 79 | \code{bfastpp} can also be omitted/added, e.g., \code{response ~ 1} (just a constant), 80 | \code{response ~ season} (seasonal dummies for each period), etc. Further terms precomputed 81 | by \code{bfastpp} can be \code{lag} (autoregressive terms of specified order), \code{slag} 82 | (seasonal autoregressive terms of specified order), \code{xreg} (covariates, if \code{data} 83 | has more than one column). 84 | 85 | For determining the size of the stable history period, various approaches are available. 86 | First, the user can set a start date based on subject-matter knowledge. Second, data-driven 87 | methods can be employed. By default, this is a reverse-ordered CUSUM test (ROC). Alternatively, 88 | breakpoints can be estimated (Bai and Perron method) and only the data after the last 89 | breakpoint are employed for the stable history. Finally, the user can also supply a function 90 | for his/her own data-driven method. 91 | } 92 | 93 | \value{ 94 | \code{bfastmonitor} returns an object of class \code{"bfastmonitor"}, i.e., a list with components as follows. 95 | \item{data}{original \code{"ts"} time series,} 96 | \item{tspp}{preprocessed \code{"data.frame"} for regression modeling,} 97 | \item{model}{fitted \code{"lm"} model for the stable history period,} 98 | \item{mefp}{fitted \code{"mefp"} process for the monitoring period,} 99 | \item{history}{start and end time of history period,} 100 | \item{monitor}{start and end time of monitoring period,} 101 | \item{breakpoint}{breakpoint detected (if any).} 102 | \item{magnitude}{median of the difference between the data and the model prediction in the monitoring period.} 103 | } 104 | 105 | \references{ 106 | Verbesselt J, Zeileis A, Herold M (2012). 107 | Near real-time disturbance detection using satellite image time series. 108 | \emph{Remote Sensing Of Environment}, \bold{123}, 98--108. 109 | \url{http://dx.doi.org/10.1016/j.rse.2012.02.022} 110 | } 111 | 112 | \author{Achim Zeileis, Jan Verbesselt} 113 | 114 | \seealso{\code{\link[strucchange]{monitor}}, \code{\link[strucchange]{mefp}}, \code{\link[strucchange]{breakpoints}}} 115 | 116 | \examples{ 117 | 118 | ## See Fig. 6 a and b in Verbesselt et al. (2011) 119 | ## for more information about the data time series and acknowledgements 120 | 121 | library(zoo) 122 | NDVIa <- as.ts(zoo(som$NDVI.a, som$Time)) 123 | plot(NDVIa) 124 | ## apply the bfast monitor function on the data 125 | ## start of the monitoring period is c(2010, 13) 126 | ## and the ROC method is used as a method to automatically identify a stable history 127 | mona <- bfastmonitor(NDVIa, start = c(2010, 13)) 128 | mona 129 | plot(mona) 130 | ## fitted season-trend model in history period 131 | summary(mona$model) 132 | ## OLS-based MOSUM monitoring process 133 | plot(mona$mefp, functional = NULL) 134 | ## the pattern in the running mean of residuals 135 | ## this illustrates the empirical fluctuation process 136 | ## and the significance of the detected break. 137 | 138 | NDVIb <- as.ts(zoo(som$NDVI.b, som$Time)) 139 | plot(NDVIb) 140 | monb <- bfastmonitor(NDVIb, start = c(2010, 13)) 141 | monb 142 | plot(monb) 143 | summary(monb$model) 144 | plot(monb$mefp, functional = NULL) 145 | 146 | ## set the stable history period manually and use a 4th order harmonic model 147 | bfastmonitor(NDVIb, start = c(2010, 13), 148 | history = c(2008, 7), order = 4, plot = TRUE) 149 | 150 | ## just use a 6th order harmonic model without trend 151 | mon <- bfastmonitor(NDVIb, formula = response ~ harmon, 152 | start = c(2010, 13), order = 6, plot = TRUE) 153 | summary(mon$model) 154 | 155 | ## For more info 156 | ?bfastmonitor 157 | 158 | 159 | ## TUTORIAL for processing raster bricks (satellite image time series of 16-day NDVI images) 160 | f <- system.file("extdata/modisraster.grd", package="bfast") 161 | library("raster") 162 | modisbrick <- brick(f) 163 | data <- as.vector(modisbrick[1]) 164 | ndvi <- bfastts(data, dates, type = c("16-day")) 165 | plot(ndvi/10000) 166 | 167 | ## derive median NDVI of a NDVI raster brick 168 | medianNDVI <- calc(modisbrick, fun=function(x) median(x, na.rm = TRUE)) 169 | plot(medianNDVI) 170 | 171 | ## helper function to be used with the calc() function 172 | xbfastmonitor <- function(x,dates) { 173 | ndvi <- bfastts(x, dates, type = c("16-day")) 174 | ndvi <- window(ndvi,end=c(2011,14))/10000 175 | ## delete end of the time to obtain a dataset similar to RSE paper (Verbesselt et al.,2012) 176 | bfm <- bfastmonitor(data = ndvi, start=c(2010,12), history = c("ROC")) 177 | return(cbind(bfm$breakpoint, bfm$magnitude)) 178 | } 179 | 180 | ## apply on one pixel for testing 181 | ndvi <- bfastts(as.numeric(modisbrick[1])/10000, dates, type = c("16-day")) 182 | plot(ndvi) 183 | 184 | bfm <- bfastmonitor(data = ndvi, start=c(2010,12), history = c("ROC")) 185 | bfm$magnitude 186 | plot(bfm) 187 | xbfastmonitor(modisbrick[1], dates) ## helper function applied on one pixel 188 | 189 | \dontrun{ 190 | ## apply the bfastmonitor function onto a raster brick 191 | library(raster) 192 | timeofbreak <- calc(modisbrick, fun=function(x){ 193 | res <- t(apply(x, 1, xbfastmonitor, dates)) 194 | return(res) 195 | }) 196 | 197 | plot(timeofbreak) ## time of break and magnitude of change 198 | plot(timeofbreak,2) ## magnitude of change 199 | 200 | ## create a KMZ file and look at the output 201 | KML(timeofbreak, "timeofbreak.kmz") 202 | } 203 | 204 | } 205 | 206 | \keyword{ts} 207 | 208 | -------------------------------------------------------------------------------- /bfast-master/R/bfast01.R: -------------------------------------------------------------------------------- 1 | bfast01 <- function(data, formula = NULL, 2 | test = "OLS-MOSUM", level = 0.05, aggregate = all, 3 | trim = NULL, bandwidth = 0.15, functional = "max", 4 | order = 3, lag = NULL, slag = NULL, na.action = na.omit, stl = "none") 5 | { 6 | ## data preprocessing 7 | stl <- match.arg(stl, c("none", "trend", "seasonal", "both")) 8 | if(!inherits(data, "data.frame")) data <- bfastpp(data, 9 | order = order, lag = lag, slag = slag, na.action = na.action, stl = stl) 10 | 11 | if(is.null(formula)) { 12 | formula <- c( 13 | trend = !(stl %in% c("trend", "both")), 14 | harmon = order > 0 & !(stl %in% c("seasonal", "both")), 15 | lag = !is.null(lag), 16 | slag = !is.null(slag) 17 | ) 18 | formula <- as.formula(paste("response ~", 19 | paste(names(formula)[formula], collapse = " + "))) 20 | } 21 | 22 | ## fit 1-segment model 23 | model1 <- lm(formula, data = data) 24 | 25 | ## determine optimal single breakpoint 26 | if(is.null(trim)) trim <- 5 * length(coef(model1)) 27 | fs <- Fstats(formula, data = data, from = trim) 28 | bp <- breakpoints(fs) 29 | 30 | ## fit 2-segment model 31 | data$segment <- breakfactor(bp) 32 | levels(data$segment) <- c("1", "2") 33 | formula2 <- update(update(formula, . ~ segment/(.)), . ~ . - 1) 34 | model2 <- lm(formula2, data = data) 35 | 36 | ## compute BIC values 37 | bic <- c(BIC(model1), BIC(model2) + log(nrow(data))) 38 | 39 | ## perform tests 40 | improvement01 <- function(test) { 41 | trim01 <- if(trim > 1) trim/nrow(data) else trim 42 | if(test == "BIC") return(bic[2] < bic[1]) 43 | if(test %in% c("supF", "aveF", "expF")) return(sctest(fs, type = test)$p.value < level) 44 | if(test == "supLM") return(sctest(gefp(formula, data = data), functional = supLM(trim01))$p.value < level) 45 | sctest(formula, data = data, type = test, h = bandwidth, functional = functional)$p.value < level 46 | } 47 | test <- structure(sapply(test, improvement01), names = test) 48 | 49 | rval <- list( 50 | call = match.call(), 51 | data = data, 52 | formula = formula, 53 | breaks = as.numeric(aggregate(test)), 54 | breakpoints = bp$breakpoints, 55 | test = test, 56 | model = list(model1, model2) 57 | ) 58 | class(rval) <- "bfast01" 59 | rval$confint <- .confint01(rval, level = 1 - level) 60 | return(rval) 61 | } 62 | 63 | breakpoints.bfast01 <- function(obj, breaks = NULL, ...) { 64 | if(is.null(breaks)) breaks <- obj$breaks 65 | n <- nrow(obj$data) 66 | rval <- list( 67 | breakpoints = if(breaks > 0) obj$breakpoints else NA, 68 | RSS = if(breaks > 0) deviance(obj$model[[1]]) else deviance(obj$model[[2]]), 69 | nobs = n, 70 | nreg = length(coef(obj$model[[1]])), 71 | call = match.call(), 72 | datatsp = c(1/n, 1, n) 73 | ) 74 | class(rval) <- "breakpoints" 75 | return(rval) 76 | } 77 | 78 | breakdates.bfast01 <- function(obj, format.times = NULL, breaks = NULL, ...) { 79 | if(is.null(breaks)) breaks <- obj$breaks 80 | if(breaks > 0) obj$data$time[obj$breakpoints] else NA 81 | } 82 | 83 | logLik.bfast01 <- function(object, breaks = NULL, ...) { 84 | breaks <- .breaks01(object, breaks) 85 | rval <- logLik(object$model[[breaks + 1]]) 86 | attr(rval, "df") <- attr(rval, "df") + breaks 87 | rval 88 | } 89 | 90 | deviance.bfast01 <- function(object, breaks = NULL, ...) { 91 | breaks <- .breaks01(object, breaks) 92 | deviance(object$model[[breaks + 1]]) 93 | } 94 | 95 | model.frame.bfast01 <- function(formula, breaks = NULL, ...) model.frame(formula$model[[1]]) 96 | 97 | model.matrix.bfast01 <- function(object, breaks = NULL, ...) { 98 | breaks <- .breaks01(object, breaks) 99 | model.matrix(object$model[[breaks + 1]]) 100 | } 101 | 102 | nobs.bfast01 <- function(object, breaks = NULL, ...) nrow(object$data) 103 | 104 | AIC.bfast01 <- function(object, breaks = NULL, ...) AIC(logLik(object, breaks = breaks), ...) 105 | BIC.bfast01 <- function(object, breaks = NULL, ...) BIC(logLik(object, breaks = breaks), ...) 106 | 107 | coef.bfast01 <- function(object, breaks = NULL, ...) { 108 | breaks <- .breaks01(object, breaks) 109 | cf0 <- coef(object$model[[1]]) 110 | if(breaks < 1) return(cf0) 111 | cf <- matrix(coef(object$model[[2]]), nrow = 2) 112 | colnames(cf) <- names(cf0) 113 | bd <- object$data$time[c(1, object$breakpoints, object$breakpoints + 1, nrow(object$data))] 114 | bd <- format(round(bd, digits = 3)) 115 | rownames(cf) <- paste(bd[c(1, 3)], bd[c(2, 4)], sep = "--") 116 | cf 117 | } 118 | 119 | fitted.bfast01 <- function(object, breaks = NULL, ...) { 120 | breaks <- .breaks01(object, breaks) 121 | fitted(object$model[[breaks + 1]]) 122 | } 123 | 124 | residuals.bfast01 <- function(object, breaks = NULL, ...) { 125 | breaks <- .breaks01(object, breaks) 126 | residuals(object$model[[breaks + 1]]) 127 | } 128 | 129 | predict.bfast01 <- function(object, newdata, breaks = NULL, ...) { 130 | breaks <- .breaks01(object, breaks) 131 | predict(object$model[[breaks + 1]], newdata, ...) 132 | } 133 | 134 | as.zoo.bfast01 <- function(x, breaks = NULL, ...) { 135 | breaks <- .breaks01(x, breaks) 136 | 137 | ## fitted values 138 | d <- x$data 139 | fit <- predict(x, newdata = d, breaks = breaks) 140 | 141 | ## residuals 142 | res <- x$data$response - fit 143 | 144 | ## eliminate seasonal effects 145 | if(!is.null(d$harmon)) d$harmon <- d$harmon * 0 146 | if(!is.null(d$season)) d$season <- levels(d$season)[1] 147 | season <- fit - predict(x, newdata = d, breaks = breaks) 148 | 149 | ## eliminate (auto)regressive effects 150 | for(i in c("lag", "slag", "xreg")) if(!is.null(d[[i]])) d[[i]] <- d[[i]] * 0 151 | reg <- fit - season - predict(x, newdata = d, breaks = breaks) 152 | 153 | ## compute fit = trend + season + reg 154 | trend <- fit - season - reg 155 | 156 | ## include mean in trend instead of reg 157 | m <- if(breaks > 0) tapply(reg, x$data$segment, mean)[x$data$segment] else mean(reg) 158 | trend <- trend + m 159 | reg <- reg - m 160 | 161 | rval <- cbind(x$data$response, fit, trend, season, reg, res) 162 | colnames(rval) <- c("response", "fitted", "trend", "season", "reg", "residuals") 163 | zoo(rval, x$data$time) 164 | } 165 | 166 | plot.bfast01 <- function(x, breaks = NULL, which = c("response", "fitted", "trend"), 167 | plot.type = "single", panel = NULL, screens = NULL, 168 | col = NULL, lwd = NULL, 169 | main = "", xlab = "Time", ylab = NULL, ci = NULL, regular = TRUE, ...) 170 | { 171 | ## set up zoo series and select series to be plotted 172 | breaks <- .breaks01(x, breaks) 173 | z <- as.zoo(x, breaks = breaks) 174 | which <- sapply(which, function(x) match.arg(x, colnames(z))) 175 | z <- z[, which] 176 | 177 | ## try making intelligent guesses about default col/lwd 178 | plot.type <- match.arg(plot.type, c("single", "multiple")) 179 | if(is.null(col)) { 180 | col0 <- c("gray", "black", "blue", "red", "green", "black") 181 | if(plot.type == "single") { 182 | col <- col0 183 | names(col) <- c("response", "fitted", "trend", "season", "reg", "residuals") 184 | col <- col[which] 185 | } else { 186 | col <- if(is.null(screens)) 1 else unlist(lapply(unique(screens), 187 | function(i) if((n <- sum(screens == i)) == 1) "black" else rep(col0, length.out = n))) 188 | } 189 | } 190 | if(is.null(lwd)) { 191 | if(plot.type == "single") { 192 | lwd <- c(2, 1, 2, 1, 1, 1) 193 | names(lwd) <- c("response", "fitted", "trend", "season", "reg", "residuals") 194 | lwd <- lwd[which] 195 | } else { 196 | lwd <- 1 197 | } 198 | } 199 | 200 | ## default y-axis labels 201 | if(is.null(ylab)) { 202 | ylab <- which 203 | for(i in seq_along(ylab)) substr(ylab[i], 1, 1) <- toupper(substr(ylab[i], 1, 1)) 204 | if(plot.type == "single") { 205 | ylab <- paste(ylab, collapse = " / ") 206 | } else { 207 | if(!is.null(screens)) ylab <- sapply(unique(screens), function(i) paste(ylab[screens == i], collapse = " / ")) 208 | } 209 | } 210 | 211 | ## set up panel function with confidence intervals 212 | if(is.null(panel)) panel <- .make_confint_lines01(x, breaks = breaks, ci = ci) 213 | 214 | if(regular) z <- as.zoo(as.ts(z)) 215 | 216 | plot(z, plot.type = plot.type, panel = panel, screens = screens, 217 | col = col, lwd = lwd, main = main, xlab = xlab, ylab = ylab, ...) 218 | } 219 | 220 | .breaks01 <- function(object, breaks) { 221 | if(is.null(breaks)) breaks <- object$breaks 222 | breaks <- breaks[1] 223 | if(!breaks %in% 0:1) stop("breaks can only be 0 or 1") 224 | breaks 225 | } 226 | 227 | .make_confint_lines01 <- function(object, breaks = NULL, col = 1, lty = 2, lwd = 1, ci = list(), ...) 228 | { 229 | breaks <- .breaks01(object, breaks) 230 | if(breaks < 1) return(lines) 231 | 232 | function(x, y, ...) { 233 | lines(x, y, ...) 234 | abline(v = breakdates(object, breaks = breaks), lty = lty, col = col, lwd = lwd) 235 | if(!identical(ci, FALSE)) { 236 | if(!is.list(ci)) ci <- list() 237 | if(is.null(ci$col)) ci$col <- 2 238 | if(is.null(ci$angle)) ci$angle <- 90 239 | if(is.null(ci$length)) ci$length <- 0.05 240 | if(is.null(ci$code)) ci$code <- 3 241 | if(is.null(ci$at)) { 242 | at <- par("usr")[3:4] 243 | at <- diff(at)/1.08 * 0.02 + at[1] 244 | } else { 245 | at <- ci$at 246 | } 247 | ci$at <- NULL 248 | do.call("arrows", c(list( 249 | x0 = object$data$time[object$confint[1]], 250 | y0 = at, 251 | x1 = object$data$time[object$confint[3]], 252 | y1 = at), 253 | ci)) 254 | } 255 | } 256 | } 257 | 258 | .confint01 <- function(object, level = 0.95, het.reg = TRUE, het.err = TRUE) 259 | { 260 | ## data and arguments 261 | X <- model.matrix(object$model[[1]]) 262 | y <- model.response(model.frame(object$model[[1]])) 263 | n <- nrow(object$data) 264 | a2 <- (1 - level)/2 265 | bp <- c(0, object$breakpoints, n) 266 | 267 | ## auxiliary functions 268 | myfun <- function(x, level = 0.975, xi = 1, phi1 = 1, phi2 = 1) 269 | (pargmaxV(x, xi = xi, phi1 = phi1, phi2 = phi2) - level) 270 | myprod <- function(delta, mat) as.vector(crossprod(delta, mat) %*% delta) 271 | 272 | ## overall fits 273 | res <- residuals(object$model[[2]]) 274 | beta <- coef(object, breaks = 1) 275 | sigma1 <- sigma2 <- sum(res^2)/n 276 | Q1 <- Q2 <- crossprod(X)/n 277 | Omega1 <- Omega2 <- sigma1 * Q1 278 | 279 | ## subsample fits 280 | X1 <- X[(bp[1]+1):bp[2],,drop = FALSE] 281 | X2 <- X[(bp[2]+1):bp[3],,drop = FALSE] 282 | y1 <- y[(bp[1]+1):bp[2]] 283 | y2 <- y[(bp[2]+1):bp[3]] 284 | beta1 <- beta[1,] 285 | beta2 <- beta[2,] 286 | if(het.reg) { 287 | Q1 <- crossprod(X1)/nrow(X1) 288 | Q2 <- crossprod(X2)/nrow(X2) 289 | } 290 | if(het.err) { 291 | sigma1 <- sum(res[(bp[1]+1):(bp[2])]^2)/nrow(X1) 292 | sigma2 <- sum(res[(bp[2]+1):(bp[3])]^2)/nrow(X2) 293 | Omega1 <- sigma1 * Q1 294 | Omega2 <- sigma2 * Q2 295 | } 296 | delta <- beta2 - beta1 297 | 298 | Oprod1 <- myprod(delta, Omega1) 299 | Oprod2 <- myprod(delta, Omega2) 300 | Qprod1 <- myprod(delta, Q1) 301 | Qprod2 <- myprod(delta, Q2) 302 | 303 | xi <- if(het.reg) Qprod2/Qprod1 else 1 304 | phi1 <- sqrt(Oprod1/Qprod1) 305 | phi2 <- sqrt(Oprod2/Qprod2) 306 | 307 | p0 <- pargmaxV(0, phi1 = phi1, phi2 = phi2, xi = xi) 308 | if(is.nan(p0) || p0 < a2 || p0 > (1-a2)) { 309 | warning(paste("Confidence interval cannot be computed: P(argmax V <= 0) =", round(p0, digits = 4))) 310 | upper <- NA 311 | lower <- NA 312 | } else { 313 | ub <- lb <- 0 314 | while(pargmaxV(ub, phi1 = phi1, phi2 = phi2, xi = xi) < (1 - a2)) ub <- ub + 1000 315 | while(pargmaxV(lb, phi1 = phi1, phi2 = phi2, xi = xi) > a2) lb <- lb - 1000 316 | 317 | upper <- uniroot(myfun, c(0, ub), level = (1-a2), xi = xi, phi1 = phi1, phi2 = phi2)$root 318 | lower <- uniroot(myfun, c(lb, 0), level = a2, xi = xi, phi1 = phi1, phi2 = phi2)$root 319 | 320 | upper <- upper * phi1^2 / Qprod1 321 | lower <- lower * phi1^2 / Qprod1 322 | } 323 | 324 | bp <- c(bp[2] - ceiling(upper), bp[2], bp[2] - floor(lower)) 325 | a2 <- round(a2 * 100, digits = 1) 326 | names(bp) <- c(paste(a2, "%"), "breakpoints", paste(100 - a2, "%")) 327 | bp 328 | } 329 | -------------------------------------------------------------------------------- /pyMannKendall-master/README.md: -------------------------------------------------------------------------------- 1 | # pyMannKendall 2 | [![Build Status](https://travis-ci.org/mmhs013/pyMannKendall.svg?branch=master)](https://travis-ci.org/mmhs013/pyMannKendall) 3 | [![PyPI](https://img.shields.io/pypi/v/pymannkendall.svg)](https://pypi.org/project/pymannkendall/) 4 | [![PyPI - License](https://img.shields.io/pypi/l/pymannkendall.svg)](https://pypi.org/project/pymannkendall/) 5 | [![PyPI - Status](https://img.shields.io/pypi/status/pymannkendall.svg)](https://pypi.org/project/pymannkendall/) 6 | [![Downloads](https://pepy.tech/badge/pymannkendall)](https://pepy.tech/project/pymannkendall) 7 | [![PyPI - Python Version](https://img.shields.io/pypi/pyversions/pymannkendall.svg)](https://pypi.org/project/pymannkendall/) 8 | [![status](http://joss.theoj.org/papers/14903dbd55343be89105112e585d262a/status.svg)](http://joss.theoj.org/papers/14903dbd55343be89105112e585d262a) 9 | [![DOI](https://zenodo.org/badge/174495388.svg)](https://zenodo.org/badge/latestdoi/174495388) 10 | 11 | ## What is the Mann-Kendall Test ? 12 | The Mann-Kendall Trend Test (sometimes called the MK test) is used to analyze time series data for consistently increasing or decreasing trends (monotonic trends). It is a non-parametric test, which means it works for all distributions (i.e. data doesn't have to meet the assumption of normality), but data should have no serial correlation. If the data has a serial correlation, it could affect in significant level (p-value). It could lead to misinterpretation. To overcome this problem, researchers proposed several modified Mann-Kendall tests (Hamed and Rao Modified MK Test, Yue and Wang Modified MK Test, Modified MK test using Pre-Whitening method, etc.). Seasonal Mann-Kendall test also developed to remove the effect of seasonality. 13 | 14 | Mann-Kendall Test is a powerful trend test, so several others modified Mann-Kendall tests like Multivariate MK Test, Regional MK Test, Correlated MK test, Partial MK Test, etc. were developed for the spacial condition. `pyMannkendal` is a pure Python implementation of non-parametric Mann-Kendall trend analysis, which bring together almost all types of Mann-Kendall Test. Currently, this package has 11 Mann-Kendall Tests and 2 sen's slope estimator function. Brief description of functions are below: 15 | 16 | 1. **Original Mann-Kendall test (*original_test*):** Original Mann-Kendall test is a nonparametric test, which does not consider serial correlation or seasonal effects. 17 | 18 | 2. **Hamed and Rao Modified MK Test (*hamed_rao_modification_test*):** This modified MK test proposed by Hamed and Rao (1998) to address serial autocorrelation issues. They suggested a variance correction approach to improve trend analysis. User can consider first n significant lag by insert lag number in this function. By default, it considered all significant lags. 19 | 20 | 3. **Yue and Wang Modified MK Test (*yue_wang_modification_test*):** This is also a variance correction method for considered serial autocorrelation proposed by Yue, S., & Wang, C. Y. (2004). User can also set their desired significant n lags for the calculation. 21 | 22 | 4. **Modified MK test using Pre-Whitening method (*pre_whitening_modification_test*):** This test suggested by Yue and Wang (2002) to using Pre-Whitening the time series before the application of trend test. 23 | 24 | 5. **Modified MK test using Trend free Pre-Whitening method (*trend_free_pre_whitening_modification_test*):** This test also proposed by Yue and Wang (2002) to remove trend component and then Pre-Whitening the time series before application of trend test. 25 | 26 | 6. **Multivariate MK Test (*multivariate_test*):** This is an MK test for multiple parameters proposed by Hirsch (1982). He used this method for seasonal mk test, where he considered every month as a parameter. 27 | 28 | 7. **Seasonal MK Test (*seasonal_test*):** For seasonal time series data, Hirsch, R.M., Slack, J.R. and Smith, R.A. (1982) proposed this test to calculate the seasonal trend. 29 | 30 | 8. **Regional MK Test (*regional_test*):** Based on Hirsch (1982) proposed seasonal mk test, Helsel, D.R. and Frans, L.M., (2006) suggest regional mk test to calculate the overall trend in a regional scale. 31 | 32 | 9. **Correlated Multivariate MK Test (*correlated_multivariate_test*):** This multivariate mk test proposed by Hipel (1994) where the parameters are correlated. 33 | 34 | 10. **Correlated Seasonal MK Test (*correlated_seasonal_test*):** This method proposed by Hipel (1994) used, when time series significantly correlated with the preceding one or more months/seasons. 35 | 36 | 11. **Partial MK Test (*partial_test*):** In a real event, many factors are affecting the main studied response parameter, which can bias the trend results. To overcome this problem, Libiseller (2002) proposed this partial mk test. It required two parameters as input, where, one is response parameter and other is an independent parameter. 37 | 38 | 12. **Theil-sen's Slope Estimator (*sens_slope*):** This method proposed by Theil (1950) and Sen (1968) to estimate the magnitude of the monotonic trend. 39 | 40 | 13. **Seasonal sen's Slope Estimator (*seasonal_sens_slope*):** This method proposed by Hipel (1994) to estimate the magnitude of the monotonic trend, when data has seasonal effects. 41 | 42 | ## Function details: 43 | 44 | All Mann-Kendall test functions have almost similar input parameters. Those are: 45 | 46 | - **x**: a vector (list, numpy array or pandas series) data 47 | - **alpha**: significance level (0.05 is the default) 48 | - **lag**: No. of First Significant Lags (Only available in hamed_rao_modification_test and yue_wang_modification_test) 49 | - **period**: seasonal cycle. For monthly data it is 12, weekly data it is 52 (Only available in seasonal tests) 50 | 51 | And all Mann-Kendall tests return a named tuple which contained: 52 | 53 | - **trend**: tells the trend (increasing, decreasing or no trend) 54 | - **h**: True (if trend is present) or False (if the trend is absence) 55 | - **p**: p-value of the significance test 56 | - **z**: normalized test statistics 57 | - **Tau**: Kendall Tau 58 | - **s**: Mann-Kendal's score 59 | - **var_s**: Variance S 60 | - **slope**: sen's slope 61 | 62 | sen's slope function required data vector. seasonal sen's slope also has optional input period, which by the default value is 12. Both sen's slope function return only slope value. 63 | 64 | ## Dependencies 65 | 66 | For the installation of `pyMannKendall`, the following packages are required: 67 | - [numpy](https://www.numpy.org/) 68 | - [scipy](https://www.scipy.org/) 69 | 70 | ## Installation 71 | 72 | You can install `pyMannKendall` using pip. For Linux users 73 | 74 | ```python 75 | sudo pip install pymannkendall 76 | ``` 77 | 78 | or, for Windows user 79 | 80 | ```python 81 | pip install pymannkendall 82 | ``` 83 | 84 | Or you can clone the repo and install it: 85 | 86 | ```bash 87 | git clone https://github.com/mmhs013/pymannkendall 88 | cd pymannkendall 89 | python setup.py install 90 | ``` 91 | 92 | ## Tests 93 | 94 | `pyMannKendall` is automatically tested using `pytest` package on each commit [here](https://travis-ci.org/mmhs013/pyMannKendall/), but the tests can be manually run: 95 | 96 | ``` 97 | pytest -v 98 | ``` 99 | 100 | ## Usage 101 | 102 | A quick example of `pyMannKendall` usage is given below. Several more examples are provided [here](https://github.com/mmhs013/pyMannKendall/blob/master/Examples/Example_pyMannKendall.ipynb). 103 | 104 | ```python 105 | import numpy as np 106 | import pymannkendall as mk 107 | 108 | # Data generation for analysis 109 | data = np.random.rand(360,1) 110 | 111 | result = mk.original_test(data) 112 | print(result) 113 | ``` 114 | Output are like this: 115 | ```python 116 | Mann_Kendall_Test(trend='no trend', h=False, p=0.9535148145990886, z=0.05829353811789905, Tau=0.002073661405137728, s=134.0, var_s=5205500.0, slope=8.408683160625719e-06) 117 | ``` 118 | Whereas, the output is a named tuple, so you can call by name for specific result: 119 | ```python 120 | print(result.slope) 121 | ``` 122 | or, you can directly unpack your results like this: 123 | ```python 124 | trend, h, p, z, Tau, s, var_s, slope = mk.original_test(data) 125 | ``` 126 | 127 | ## Citation 128 | 129 | If you publish results for which you used `pyMannKendall`, please give credit by citing [Hussain et al., (2019)](https://doi.org/10.21105/joss.01556): 130 | 131 | > Hussain et al., (2019). pyMannKendall: a python package for non parametric Mann Kendall family of trend tests.. Journal of Open Source Software, 4(39), 1556, https://doi.org/10.21105/joss.01556 132 | 133 | 134 | ``` 135 | @article{Hussain2019pyMannKendall, 136 | journal = {Journal of Open Source Software}, 137 | doi = {10.21105/joss.01556}, 138 | issn = {2475-9066}, 139 | number = {39}, 140 | publisher = {The Open Journal}, 141 | title = {pyMannKendall: a python package for non parametric Mann Kendall family of trend tests.}, 142 | url = {http://dx.doi.org/10.21105/joss.01556}, 143 | volume = {4}, 144 | author = {Hussain, Md. and Mahmud, Ishtiak}, 145 | pages = {1556}, 146 | date = {2019-07-25}, 147 | year = {2019}, 148 | month = {7}, 149 | day = {25}, 150 | } 151 | ``` 152 | 153 | ## Contributions 154 | 155 | `pyMannKendall` is a community project and welcomes contributions. Additional information can be found in the [contribution guidelines](https://github.com/mmhs013/pyMannKendall/blob/master/CONTRIBUTING.md). 156 | 157 | 158 | ## Code of Conduct 159 | 160 | `pyMannKendall` wishes to maintain a positive community. Additional details can be found in the [Code of Conduct](https://github.com/mmhs013/pyMannKendall/blob/master/CODE_OF_CONDUCT.md). 161 | 162 | 163 | ## References 164 | 165 | 1. Bari, S. H., Rahman, M. T. U., Hoque, M. A., & Hussain, M. M. (2016). Analysis of seasonal and annual rainfall trends in the northern region of Bangladesh. *Atmospheric Research*, 176, 148–158. doi:[10.1016/j.atmosres.2016.02.008](https://doi.org/10.1016/j.atmosres.2016.02.008) 166 | 167 | 2. Cox, D. R., & Stuart, A. (1955). Some quick sign tests for trend in location and dispersion. *Biometrika*, 42(1/2), 80–95. doi:[10.2307/2333424](https://doi.org/10.2307/2333424) 168 | 169 | 3. Hamed, K. H., & Rao, A. R. (1998). A modified Mann–Kendall trend test for autocorrelated data. *Journal of hydrology*, 204(1-4), 182–196. doi:[10.1016/S0022-1694(97)00125-X](https://doi.org/10.1016/S0022-1694(97)00125-X) 170 | 171 | 4. Helsel, D. R., & Frans, L. M. (2006). Regional Kendall test for trend. *Environmental science & technology*, 40(13), 4066–4073. doi:[10.1021/es051650b](https://doi.org/10.1021/es051650b) 172 | 173 | 5. Hipel, K. W., & McLeod, A. I. (1994). Time series modelling of water resources and environmental systems (Vol. 45). Elsevier. 174 | 175 | 6. Hirsch, R. M., Slack, J. R., & Smith, R. A. (1982). Techniques of trend analysis for monthly water quality data. *Water resources research*, 18(1), 107–121. doi:[10.1029/WR018i001p00107](https://doi.org/10.1029/WR018i001p00107) 176 | 177 | 7. Kendall, M. (1975). Rank correlation measures. *Charles Griffin*, London, 202, 15. 178 | 179 | 8. Libiseller, C., & Grimvall, A. (2002). Performance of partial Mann–Kendall tests for trend detection in the presence of covariates. *Environmetrics: The official journal of the International Environmetrics Society*, 13(1), 71–84. doi:[10.1002/env.507](https://doi.org/1010.1002/env.507) 180 | 181 | 9. Mann, H. B. (1945). Nonparametric tests against trend. *Econometrica: Journal of the Econometric Society*, 245–259. doi:[10.2307/1907187](https://doi.org/10.2307/1907187) 182 | 183 | 10. Sen, P. K. (1968). Estimates of the regression coefficient based on Kendall’s tau. *Journal of the American statistical association*, 63(324), 1379–1389. doi:[10.1080/01621459.1968.10480934](https://doi.org/10.1080/01621459.1968.10480934) 184 | 185 | 11. Theil, H. (1950). A rank-invariant method of linear and polynominal regression analysis (parts 1-3). In *Ned. Akad. Wetensch. Proc. Ser. A* (Vol. 53, pp. 1397–1412). 186 | 187 | 12. Yue, S., & Wang, C. (2004). The Mann–Kendall test modified by effective sample size to detect trend in serially correlated hydrological series. *Water resources management*, 18(3), 201–218. doi:[10.1023/B:WARM.0000043140.61082.60](https://doi.org/10.1023/B:WARM.0000043140.61082.60) 188 | 189 | 13. Yue, S., & Wang, C. Y. (2002). Applicability of prewhitening to eliminate the influence of serial correlation on the Mann–Kendall test. *Water resources research*, 38(6), 4–1. doi:[10.1029/2001WR000861](https://doi.org/10.1029/2001WR000861) 190 | 191 | 14. Yue, S., Pilon, P., Phinney, B., & Cavadias, G. (2002). The influence of autocorrelation on the ability to detect trend in hydrological series. *Hydrological processes*, 16(9), 1807–1829. doi:[10.1002/hyp.1095](https://doi.org/10.1002/hyp.1095) 192 | 193 | -------------------------------------------------------------------------------- /pyMannKendall-master/tests/test_pymannkendall.py: -------------------------------------------------------------------------------- 1 | # In this unit test file, we check all functions with randomly generated No trendy, trendy, arbitrary data. Those results are compared with R package - modifiedmk, fume, rkt, trend. 2 | 3 | import os 4 | import pytest 5 | import numpy as np 6 | import pymannkendall as mk 7 | 8 | @pytest.fixture 9 | def NoTrendData(): 10 | # Generate 360 random value with the same number 11 | NoTrendData = np.ones(360)*np.random.randint(10) 12 | return NoTrendData 13 | 14 | @pytest.fixture 15 | def NoTrend2dData(): 16 | # Generate 2 dimensional 360 random value with same number 17 | NoTrend2dData = np.ones((360,2))*np.random.randint(10) 18 | return NoTrend2dData 19 | 20 | @pytest.fixture 21 | def TrendData(): 22 | # Generate random 360 trendy data with approx. slope 1 23 | TrendData = np.arange(360) + np.random.rand(360) 24 | return TrendData 25 | 26 | @pytest.fixture 27 | def arbitrary_1d_data(): 28 | # Generate arbitrary 360 data 29 | arbitrary_1d_data = np.array([ 32., 20., 25., 189., 240., 193., 379., 278., 301., 0., 0., 30 | 82., 0., 4., np.nan, np.nan, 121., 234., 360., 262., 120., 30., 31 | 11., 1., 7., 3., 31., 31., 355., 102., 248., 274., 308., 32 | np.nan, 5., 26., 11., 16., 6., 48., 388., 539., 431., 272., 33 | 404., 186., 0., 2., 0., 4., 1., 54., 272., 459., 235., 34 | 164., 365., 135., 2., np.nan, np.nan, 4., 0., 128., 210., 163., 35 | 446., 225., 462., 467., 19., 13., 0., 3., 17., 132., 178., 36 | 338., 525., 623., 145., 31., 19., 3., 0., 29., 25., 87., 37 | 259., 756., 486., 180., 292., 43., 92., 1., 0., 16., 2., 38 | 0., 130., 253., 594., 111., 273., 30., 0., 4., 0., 27., 39 | 24., 41., 292., 378., 499., 265., 320., 227., 4., 0., 4., 40 | 14., 8., 48., 416., 240., 404., 207., 733., 105., 0., 112., 41 | 0., 14., 0., 30., 140., 202., 289., 159., 424., 106., 3., 42 | 0., 65., 3., 14., 58., 268., 466., 432., 266., 240., 95., 43 | 1., 0., 10., 26., 4., 114., 94., 289., 173., 208., 263., 44 | 156., 5., 0., 16., 16., 14., 0., 111., 475., 534., 432., 45 | 471., 117., 70., 1., 3., 28., 7., 401., 184., 283., 338., 46 | 171., 335., 176., 0., 0., 10., 11., 9., 140., 102., 208., 47 | 298., 245., 220., 29., 2., 27., 10., 13., 26., 84., 143., 48 | 367., 749., 563., 283., 353., 10., 0., 0., 0., 0., 9., 49 | 246., 265., 343., 429., 168., 133., 17., 0., 18., 35., 76., 50 | 158., 272., 250., 190., 289., 466., 84., 0., 0., 0., 0., 51 | 0., 22., 217., 299., 185., 115., 344., 203., 8., np.nan, np.nan, 52 | 0., 5., 284., 123., 254., 476., 496., 326., 27., 20., 0., 53 | 4., 53., 72., 113., 214., 364., 219., 220., 156., 264., 0., 54 | 13., 0., 0., 45., 90., 137., 638., 529., 261., 206., 251., 55 | 0., 0., 5., 9., 58., 72., 138., 130., 471., 328., 356., 56 | 523., 0., 1., 0., 0., 12., 143., 193., 184., 192., 138., 57 | 174., 69., 1., 0., 0., 18., 25., 28., 92., 732., 320., 58 | 256., 302., 131., 15., 0., 27., 0., 22., 20., 213., 393., 59 | 474., 374., 109., 159., 0., 0., 0., 3., 3., 49., 205., 60 | 128., 194., 570., 169., 89., 0., 0., 0., 0., 0., 26., 61 | 185., 286., 92., 225., 244., 190., 3., 20.]) 62 | return arbitrary_1d_data 63 | 64 | @pytest.fixture 65 | def arbitrary_2d_data(): 66 | # Generate arbitrary 80, 2 dimensional data 67 | arbitrary_2d_data = np.array([[ 490., 458.], [ 540., 469.], [ 220., 4630.], [ 390., 321.], [ 450., 541.], 68 | [ 230., 1640.], [ 360., 1060.], [ 460., 264.], [ 430., 665.], [ 430., 680.], 69 | [ 620., 650.], [ 460., np.nan], [ 450., 380.], [ 580., 325.], [ 350., 1020.], 70 | [ 440., 460.], [ 530., 583.], [ 380., 777.], [ 440., 1230.], [ 430., 565.], 71 | [ 680., 533.], [ 250., 4930.], [np.nan, 3810.], [ 450., 469.], [ 500., 473.], 72 | [ 510., 593.], [ 490., 500.], [ 700., 266.], [ 420., 495.], [ 710., 245.], 73 | [ 430., 736.], [ 410., 508.], [ 700., 578.], [ 260., 4590.], [ 260., 4670.], 74 | [ 500., 503.], [ 450., 469.], [ 500., 314.], [ 620., 432.], [ 670., 279.], 75 | [np.nan, 542.], [ 470., 499.], [ 370., 741.], [ 410., 569.], [ 540., 360.], 76 | [ 550., 513.], [ 220., 3910.], [ 460., 364.], [ 390., 472.], [ 550., 245.], 77 | [ 320., np.nan], [ 570., 224.], [ 480., 342.], [ 520., 732.], [ 620., 240.], 78 | [ 520., 472.], [ 430., 679.], [ 400., 1080.], [ 430., 920.], [ 490., 488.], 79 | [ 560., np.nan], [ 370., 595.], [ 460., 295.], [ 390., 542.], [ 330., 1500.], 80 | [ 350., 1080.], [ 480., 334.], [ 390., 423.], [ 500., 216.], [ 410., 366.], 81 | [ 470., 750.], [ 280., 1260.], [ 510., 223.], [np.nan, 462.], [ 310., 7640.], 82 | [ 230., 2340.], [ 470., 239.], [ 330., 1400.], [ 320., 3070.], [ 500., 244.]]) 83 | return arbitrary_2d_data 84 | 85 | 86 | def test_original_test(NoTrendData, TrendData, arbitrary_1d_data): 87 | # check with no trend data 88 | NoTrendRes = mk.original_test(NoTrendData) 89 | assert NoTrendRes.trend == 'no trend' 90 | assert NoTrendRes.h == False 91 | assert NoTrendRes.p == 1.0 92 | assert NoTrendRes.z == 0 93 | assert NoTrendRes.Tau == 0.0 94 | assert NoTrendRes.s == 0.0 95 | assert NoTrendRes.var_s == 0.0 96 | assert NoTrendRes.slope == 0.0 97 | 98 | # check with trendy data 99 | TrendRes = mk.original_test(TrendData) 100 | assert TrendRes.trend == 'increasing' 101 | assert TrendRes.h == True 102 | assert TrendRes.p == 0.0 103 | assert TrendRes.Tau == 1.0 104 | assert TrendRes.s == 64620.0 105 | np.testing.assert_allclose(TrendRes.slope, 1.0, rtol=1e-02) 106 | 107 | # check with arbitrary data 108 | result = mk.original_test(arbitrary_1d_data) 109 | assert result.trend == 'no trend' 110 | assert result.h == False 111 | assert result.p == 0.37591058740506833 112 | assert result.z == -0.8854562842589916 113 | assert result.Tau == -0.03153167653875869 114 | assert result.s == -1959.0 115 | assert result.var_s == 4889800.333333333 116 | assert result.slope == -0.0064516129032258064 117 | 118 | def test_hamed_rao_modification_test(NoTrendData, TrendData, arbitrary_1d_data): 119 | # check with no trend data 120 | NoTrendRes = mk.hamed_rao_modification_test(NoTrendData) 121 | assert NoTrendRes.trend == 'no trend' 122 | assert NoTrendRes.h == False 123 | assert NoTrendRes.p == 1.0 124 | assert NoTrendRes.z == 0 125 | assert NoTrendRes.Tau == 0.0 126 | assert NoTrendRes.s == 0.0 127 | assert NoTrendRes.slope == 0.0 128 | 129 | # check with trendy data 130 | TrendRes = mk.hamed_rao_modification_test(TrendData) 131 | assert TrendRes.trend == 'increasing' 132 | assert TrendRes.h == True 133 | assert TrendRes.p == 0.0 134 | assert TrendRes.Tau == 1.0 135 | assert TrendRes.s == 64620.0 136 | np.testing.assert_allclose(TrendRes.slope, 1.0, rtol=1e-02) 137 | 138 | # check with arbitrary data 139 | result = mk.hamed_rao_modification_test(arbitrary_1d_data) 140 | assert result.trend == 'decreasing' 141 | assert result.h == True 142 | assert result.p == 0.00011372459883540742 143 | assert result.z == -3.859273515045842 144 | assert result.Tau == -0.03153167653875869 145 | assert result.s == -1959.0 146 | assert result.var_s == 257403.38678462413 147 | assert result.slope == -0.0064516129032258064 148 | 149 | def test_hamed_rao_modification_test_lag3(NoTrendData, TrendData, arbitrary_1d_data): 150 | # check with no trend data 151 | NoTrendRes = mk.hamed_rao_modification_test(NoTrendData, lag=3) 152 | assert NoTrendRes.trend == 'no trend' 153 | assert NoTrendRes.h == False 154 | assert NoTrendRes.p == 1.0 155 | assert NoTrendRes.z == 0 156 | assert NoTrendRes.Tau == 0.0 157 | assert NoTrendRes.s == 0.0 158 | assert NoTrendRes.slope == 0.0 159 | 160 | # check with trendy data 161 | TrendRes = mk.hamed_rao_modification_test(TrendData, lag=3) 162 | assert TrendRes.trend == 'increasing' 163 | assert TrendRes.h == True 164 | assert TrendRes.p == 0.0 165 | assert TrendRes.Tau == 1.0 166 | assert TrendRes.s == 64620.0 167 | np.testing.assert_allclose(TrendRes.slope, 1.0, rtol=1e-02) 168 | 169 | # check with arbitrary data 170 | result = mk.hamed_rao_modification_test(arbitrary_1d_data, lag=3) 171 | assert result.trend == 'no trend' 172 | assert result.h == False 173 | assert result.p == 0.603684460662274 174 | assert result.z == -0.5191093899188985 175 | assert result.Tau == -0.03153167653875869 176 | assert result.s == -1959.0 177 | assert result.var_s == 14226812.425138814 178 | assert result.slope == -0.0064516129032258064 179 | 180 | def test_yue_wang_modification_test(NoTrendData, TrendData, arbitrary_1d_data): 181 | # check with no trend data 182 | NoTrendRes = mk.yue_wang_modification_test(NoTrendData) 183 | assert NoTrendRes.trend == 'no trend' 184 | assert NoTrendRes.h == False 185 | assert NoTrendRes.p == 1.0 186 | assert NoTrendRes.z == 0 187 | assert NoTrendRes.Tau == 0.0 188 | assert NoTrendRes.s == 0.0 189 | assert NoTrendRes.slope == 0.0 190 | 191 | # check with trendy data 192 | TrendRes = mk.yue_wang_modification_test(TrendData) 193 | assert TrendRes.trend == 'increasing' 194 | assert TrendRes.h == True 195 | assert TrendRes.p == 0.0 196 | assert TrendRes.Tau == 1.0 197 | assert TrendRes.s == 64620.0 198 | np.testing.assert_allclose(TrendRes.slope, 1.0, rtol=1e-02) 199 | 200 | # check with arbitrary data 201 | result = mk.yue_wang_modification_test(arbitrary_1d_data) 202 | assert result.trend == 'decreasing' 203 | assert result.h == True 204 | np.testing.assert_allclose(result.p, 0.008344656549921448) 205 | np.testing.assert_allclose(result.z, -2.6377968071103193) 206 | assert result.Tau == -0.03153167653875869 207 | assert result.s == -1959.0 208 | np.testing.assert_allclose(result.var_s, 550988.7079774942) 209 | assert result.slope == -0.0064516129032258064 210 | 211 | def test_yue_wang_modification_test_lag1(NoTrendData, TrendData, arbitrary_1d_data): 212 | # check with no trend data 213 | NoTrendRes = mk.yue_wang_modification_test(NoTrendData, lag=1) 214 | assert NoTrendRes.trend == 'no trend' 215 | assert NoTrendRes.h == False 216 | assert NoTrendRes.p == 1.0 217 | assert NoTrendRes.z == 0 218 | assert NoTrendRes.Tau == 0.0 219 | assert NoTrendRes.s == 0.0 220 | assert NoTrendRes.slope == 0.0 221 | 222 | # check with trendy data 223 | TrendRes = mk.yue_wang_modification_test(TrendData, lag=1) 224 | assert TrendRes.trend == 'increasing' 225 | assert TrendRes.h == True 226 | assert TrendRes.p == 0.0 227 | assert TrendRes.Tau == 1.0 228 | assert TrendRes.s == 64620.0 229 | np.testing.assert_allclose(TrendRes.slope, 1.0, rtol=1e-02) 230 | 231 | # check with arbitrary data 232 | result = mk.yue_wang_modification_test(arbitrary_1d_data, lag=1) 233 | assert result.trend == 'no trend' 234 | assert result.h == False 235 | assert result.p == 0.5433110592605916 236 | assert result.z == -0.6078136738097195 237 | assert result.Tau == -0.03153167653875869 238 | assert result.s == -1959.0 239 | assert result.var_s == 10377301.691383107 240 | assert result.slope == -0.0064516129032258064 241 | 242 | def test_pre_whitening_modification_test(NoTrendData, TrendData, arbitrary_1d_data): 243 | # check with no trend data 244 | NoTrendRes = mk.pre_whitening_modification_test(NoTrendData) 245 | assert NoTrendRes.trend == 'no trend' 246 | assert NoTrendRes.h == False 247 | assert NoTrendRes.p == 1.0 248 | assert NoTrendRes.z == 0 249 | assert NoTrendRes.Tau == 0.0 250 | assert NoTrendRes.s == 0.0 251 | 252 | # check with trendy data 253 | TrendRes = mk.pre_whitening_modification_test(TrendData) 254 | assert TrendRes.trend == 'increasing' 255 | assert TrendRes.h == True 256 | assert TrendRes.p == 0.0 257 | np.testing.assert_allclose(TrendRes.slope, 0, atol=.01) 258 | 259 | # check with arbitrary data 260 | result = mk.pre_whitening_modification_test(arbitrary_1d_data) 261 | assert result.trend == 'no trend' 262 | assert result.h == False 263 | assert result.p == 0.9212742990272651 264 | assert result.z == -0.09882867695903437 265 | assert result.Tau == -0.003545066045066045 266 | assert result.s == -219.0 267 | assert result.var_s == 4865719.0 268 | np.testing.assert_allclose(result.slope, -0.0005373555273865899) 269 | 270 | def test_trend_free_pre_whitening_modification_test(NoTrendData, TrendData, arbitrary_1d_data): 271 | # check with no trend data 272 | NoTrendRes = mk.trend_free_pre_whitening_modification_test(NoTrendData) 273 | assert NoTrendRes.trend == 'no trend' 274 | assert NoTrendRes.h == False 275 | assert NoTrendRes.p == 1.0 276 | assert NoTrendRes.z == 0 277 | assert NoTrendRes.Tau == 0.0 278 | assert NoTrendRes.s == 0.0 279 | 280 | # check with trendy data 281 | TrendRes = mk.trend_free_pre_whitening_modification_test(TrendData) 282 | assert TrendRes.trend == 'increasing' 283 | assert TrendRes.h == True 284 | assert TrendRes.p == 0.0 285 | np.testing.assert_allclose(TrendRes.Tau, 1.0, rtol=1e-02) 286 | np.testing.assert_allclose(TrendRes.slope, 1.0, rtol=1e-02) 287 | 288 | # check with arbitrary data 289 | result = mk.trend_free_pre_whitening_modification_test(arbitrary_1d_data) 290 | assert result.trend == 'no trend' 291 | assert result.h == False 292 | assert result.p == 0.7741578265217384 293 | assert result.z == -0.2869405688895601 294 | assert result.Tau == -0.010262885262885263 295 | assert result.s == -634.0 296 | assert result.var_s == 4866576.0 297 | assert result.slope == -0.004174019670423232 298 | 299 | def test_seasonal_test(NoTrendData, TrendData, arbitrary_1d_data): 300 | # check with no trend data 301 | NoTrendRes = mk.seasonal_test(NoTrendData, period=12) 302 | assert NoTrendRes.trend == 'no trend' 303 | assert NoTrendRes.h == False 304 | assert NoTrendRes.p == 1.0 305 | assert NoTrendRes.z == 0 306 | assert NoTrendRes.Tau == 0.0 307 | assert NoTrendRes.s == 0.0 308 | assert NoTrendRes.slope == 0.0 309 | 310 | # check with trendy data 311 | TrendRes = mk.seasonal_test(TrendData, period=12) 312 | assert TrendRes.trend == 'increasing' 313 | assert TrendRes.h == True 314 | assert TrendRes.p == 0.0 315 | assert TrendRes.Tau == 1.0 316 | assert TrendRes.s == 5220.0 317 | np.testing.assert_allclose(TrendRes.slope, 12, rtol=1e-02) 318 | 319 | # check with arbitrary data 320 | result = mk.seasonal_test(arbitrary_1d_data, period=12) 321 | assert result.trend == 'decreasing' 322 | assert result.h == True 323 | assert result.p == 0.03263834596177739 324 | assert result.z == -2.136504114534638 325 | assert result.Tau == -0.0794979079497908 326 | assert result.s == -399.0 327 | assert result.var_s == 34702.333333333336 328 | assert result.slope == -0.16666666666666666 329 | 330 | def test_regional_test(NoTrend2dData,arbitrary_2d_data): 331 | # check with no trend data 332 | NoTrendRes = mk.regional_test(NoTrend2dData) 333 | assert NoTrendRes.trend == 'no trend' 334 | assert NoTrendRes.h == False 335 | assert NoTrendRes.p == 1.0 336 | assert NoTrendRes.z == 0 337 | assert NoTrendRes.Tau == 0.0 338 | assert NoTrendRes.s == 0.0 339 | assert NoTrendRes.var_s == 0.0 340 | assert NoTrendRes.slope == 0.0 341 | 342 | # check with arbitrary data 343 | result = mk.regional_test(arbitrary_2d_data) 344 | assert result.trend == 'no trend' 345 | assert result.h == False 346 | assert result.p == 0.2613018311185482 347 | assert result.z == -1.1233194854000186 348 | assert result.Tau == -0.06185919343814081 349 | assert result.s == -362.0 350 | assert result.var_s == 103278.0 351 | assert result.slope == -0.6997144022847817 352 | 353 | def test_correlated_multivariate_test(NoTrend2dData,arbitrary_2d_data): 354 | # check with no trend data 355 | NoTrendRes = mk.correlated_multivariate_test(NoTrend2dData) 356 | assert NoTrendRes.trend == 'no trend' 357 | assert NoTrendRes.h == False 358 | assert NoTrendRes.Tau == 0.0 359 | assert NoTrendRes.s == 0.0 360 | assert NoTrendRes.var_s == 0.0 361 | assert NoTrendRes.slope == 0.0 362 | 363 | # check with arbitrary data 364 | result = mk.correlated_multivariate_test(arbitrary_2d_data) 365 | assert result.trend == 'no trend' 366 | assert result.h == False 367 | assert result.p == 0.05777683185903615 368 | assert result.z == -1.8973873659119118 369 | assert result.Tau == -0.05868196964087375 370 | assert result.s == -317.0 371 | assert result.var_s == 27913.000000000007 372 | assert result.slope == -0.6997144022847817 373 | 374 | def test_correlated_seasonal_test(NoTrendData, TrendData, arbitrary_1d_data): 375 | # check with no trend data 376 | NoTrendRes = mk.correlated_seasonal_test(NoTrendData, period=12) 377 | assert NoTrendRes.trend == 'no trend' 378 | assert NoTrendRes.h == False 379 | assert NoTrendRes.Tau == 0.0 380 | assert NoTrendRes.s == 0.0 381 | assert NoTrendRes.slope == 0.0 382 | 383 | # check with trendy data 384 | TrendRes = mk.correlated_seasonal_test(TrendData, period=12) 385 | assert TrendRes.trend == 'increasing' 386 | assert TrendRes.h == True 387 | np.testing.assert_allclose(TrendRes.p, 0.0, atol=.01) 388 | assert TrendRes.Tau == 1.0 389 | assert TrendRes.s == 5220.0 390 | np.testing.assert_allclose(TrendRes.slope, 12, rtol=1e-02) 391 | 392 | # check with arbitrary data 393 | result = mk.correlated_seasonal_test(arbitrary_1d_data, period=12) 394 | assert result.trend == 'no trend' 395 | assert result.h == False 396 | assert result.p == 0.06032641537423844 397 | assert result.z == -1.878400366918792 398 | assert result.Tau == -0.10054347826086957 399 | assert result.s == -333.0 400 | assert result.var_s == 31427.666666666664 401 | assert result.slope == -0.16666666666666666 402 | 403 | def test_partial_test(NoTrend2dData,arbitrary_2d_data): 404 | # check with no trend data 405 | NoTrendRes = mk.partial_test(NoTrend2dData) 406 | assert NoTrendRes.trend == 'no trend' 407 | assert NoTrendRes.h == False 408 | assert NoTrendRes.p == 1.0 409 | assert NoTrendRes.z == 0 410 | assert NoTrendRes.Tau == 0.0 411 | assert NoTrendRes.s == 0.0 412 | assert NoTrendRes.var_s == 5205500.0 413 | assert NoTrendRes.slope == 0.0 414 | 415 | # check with arbitrary data 416 | result = mk.partial_test(arbitrary_2d_data) 417 | assert result.trend == 'no trend' 418 | assert result.h == False 419 | assert result.p == 0.06670496348739152 420 | assert result.z == -1.8336567432191642 421 | assert result.Tau == -0.07552758237689744 422 | assert result.s == -282.53012319329804 423 | assert result.var_s == 23740.695506142725 424 | assert result.slope == -0.6382978723404256 --------------------------------------------------------------------------------