├── .Rbuildignore ├── .github ├── .gitignore └── workflows │ ├── R-CMD-check-full.yaml │ ├── R-CMD-check.yaml │ ├── pkgdown.yaml │ ├── pr-commands.yaml │ └── recheck.yml ├── .gitignore ├── DESCRIPTION ├── LICENSE.md ├── NAMESPACE ├── NEWS.md ├── R ├── 00_specials.R ├── VARIMA.R ├── ar.R ├── arima.R ├── checks.R ├── compat-purrr.R ├── croston.R ├── ets.R ├── etsmodel.R ├── fable-package.R ├── lagwalk.R ├── lm.R ├── mean.R ├── nnetar.R ├── reexports.R ├── theta.R ├── utils.R ├── var.R └── vecm.R ├── README.Rmd ├── README.md ├── _pkgdown.yml ├── codecov.yml ├── cran-comments.md ├── fable.Rproj ├── hex ├── fable.ai ├── fable.png ├── fable.svg ├── fable_icon.png └── fable_paths.svg ├── inst └── WORDLIST ├── man ├── AR.Rd ├── ARIMA.Rd ├── CROSTON.Rd ├── ETS.Rd ├── IRF.ARIMA.Rd ├── IRF.VAR.Rd ├── IRF.VECM.Rd ├── MEAN.Rd ├── NNETAR.Rd ├── RW.Rd ├── THETA.Rd ├── TSLM.Rd ├── VAR.Rd ├── VARIMA.Rd ├── VECM.Rd ├── breusch_godfrey.Rd ├── common_xregs.Rd ├── components.ETS.Rd ├── fable-package.Rd ├── figures │ ├── README-example-1.png │ └── logo.png ├── fitted.AR.Rd ├── fitted.ARIMA.Rd ├── fitted.ETS.Rd ├── fitted.NNETAR.Rd ├── fitted.RW.Rd ├── fitted.TSLM.Rd ├── fitted.VAR.Rd ├── fitted.croston.Rd ├── fitted.fable_theta.Rd ├── fitted.model_mean.Rd ├── forecast.AR.Rd ├── forecast.ARIMA.Rd ├── forecast.ETS.Rd ├── forecast.NNETAR.Rd ├── forecast.RW.Rd ├── forecast.TSLM.Rd ├── forecast.VAR.Rd ├── forecast.croston.Rd ├── forecast.fable_theta.Rd ├── forecast.model_mean.Rd ├── generate.AR.Rd ├── generate.ARIMA.Rd ├── generate.ETS.Rd ├── generate.NNETAR.Rd ├── generate.RW.Rd ├── generate.TSLM.Rd ├── generate.VAR.Rd ├── generate.VECM.Rd ├── generate.model_mean.Rd ├── glance.AR.Rd ├── glance.ARIMA.Rd ├── glance.ETS.Rd ├── glance.NNETAR.Rd ├── glance.RW.Rd ├── glance.TSLM.Rd ├── glance.VAR.Rd ├── glance.VECM.Rd ├── glance.fable_theta.Rd ├── glance.model_mean.Rd ├── interpolate.ARIMA.Rd ├── interpolate.TSLM.Rd ├── interpolate.model_mean.Rd ├── reexports.Rd ├── refit.AR.Rd ├── refit.ARIMA.Rd ├── refit.ETS.Rd ├── refit.NNETAR.Rd ├── refit.RW.Rd ├── refit.TSLM.Rd ├── refit.model_mean.Rd ├── residuals.AR.Rd ├── residuals.ARIMA.Rd ├── residuals.ETS.Rd ├── residuals.NNETAR.Rd ├── residuals.RW.Rd ├── residuals.TSLM.Rd ├── residuals.VAR.Rd ├── residuals.croston.Rd ├── residuals.fable_theta.Rd ├── residuals.model_mean.Rd ├── tidy.AR.Rd ├── tidy.ARIMA.Rd ├── tidy.ETS.Rd ├── tidy.NNETAR.Rd ├── tidy.RW.Rd ├── tidy.TSLM.Rd ├── tidy.VAR.Rd ├── tidy.croston.Rd ├── tidy.fable_theta.Rd ├── tidy.model_mean.Rd └── unitroot_options.Rd ├── pkgdown └── favicon │ ├── apple-touch-icon-120x120.png │ ├── apple-touch-icon-152x152.png │ ├── apple-touch-icon-180x180.png │ ├── apple-touch-icon-60x60.png │ ├── apple-touch-icon-76x76.png │ ├── apple-touch-icon.png │ ├── favicon-16x16.png │ ├── favicon-32x32.png │ └── favicon.ico ├── src ├── .gitignore ├── Makevars ├── Makevars.win ├── etsTargetFunction.cpp ├── etsTargetFunction.h ├── etsTargetFunctionWrapper.cpp ├── etscalc.c └── registerDynamicSymbol.c ├── tests ├── spelling.R ├── testthat.R └── testthat │ ├── setup-data.R │ ├── test-arima.R │ ├── test-ets.R │ ├── test-lm.R │ ├── test-mean.R │ ├── test-nnetar.R │ ├── test-rw.R │ └── test-var.R └── vignettes ├── .gitignore ├── fable.Rmd └── transformations.Rmd /.Rbuildignore: -------------------------------------------------------------------------------- 1 | ^LICENSE\.md$ 2 | ^docs$ 3 | ^_pkgdown\.yml$ 4 | ^tic\.R$ 5 | ^appveyor\.yml$ 6 | ^README\.Rmd$ 7 | Makefile 8 | ^fable\.Rproj$ 9 | ^\.Rproj\.user$ 10 | ^\.travis\.yml$ 11 | ^data-raw$ 12 | ^codecov\.yml$ 13 | ^hex$ 14 | ^cran-comments\.md$ 15 | ^doc$ 16 | ^Meta$ 17 | ^.github$ 18 | ^\.lintr$ 19 | ^CRAN-RELEASE$ 20 | ^\.github$ 21 | ^revdep$ 22 | ^pkgdown$ 23 | ^CRAN-SUBMISSION$ 24 | -------------------------------------------------------------------------------- /.github/.gitignore: -------------------------------------------------------------------------------- 1 | *.html 2 | -------------------------------------------------------------------------------- /.github/workflows/R-CMD-check-full.yaml: -------------------------------------------------------------------------------- 1 | # Workflow derived from https://github.com/r-lib/actions/tree/v2/examples 2 | # Need help debugging build failures? Start at https://github.com/r-lib/actions#where-to-find-help 3 | # 4 | # NOTE: This workflow is overkill for most R packages and 5 | # check-standard.yaml is likely a better choice. 6 | # usethis::use_github_action("check-standard") will install it. 7 | on: 8 | workflow_dispatch: 9 | 10 | name: R-CMD-check-full.yaml 11 | 12 | permissions: read-all 13 | 14 | jobs: 15 | R-CMD-check: 16 | runs-on: ${{ matrix.config.os }} 17 | 18 | name: ${{ matrix.config.os }} (${{ matrix.config.r }}) 19 | 20 | strategy: 21 | fail-fast: false 22 | matrix: 23 | config: 24 | - {os: macos-latest, r: 'release'} 25 | 26 | - {os: windows-latest, r: 'release'} 27 | # use 4.0 or 4.1 to check with rtools40's older compiler 28 | - {os: windows-latest, r: 'oldrel-4'} 29 | 30 | - {os: ubuntu-latest, r: 'devel', http-user-agent: 'release'} 31 | - {os: ubuntu-latest, r: 'release'} 32 | - {os: ubuntu-latest, r: 'oldrel-1'} 33 | - {os: ubuntu-latest, r: 'oldrel-2'} 34 | - {os: ubuntu-latest, r: 'oldrel-3'} 35 | - {os: ubuntu-latest, r: 'oldrel-4'} 36 | 37 | env: 38 | GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} 39 | R_KEEP_PKG_SOURCE: yes 40 | 41 | steps: 42 | - uses: actions/checkout@v4 43 | 44 | - uses: r-lib/actions/setup-pandoc@v2 45 | 46 | - uses: r-lib/actions/setup-r@v2 47 | with: 48 | r-version: ${{ matrix.config.r }} 49 | http-user-agent: ${{ matrix.config.http-user-agent }} 50 | use-public-rspm: true 51 | 52 | - uses: r-lib/actions/setup-r-dependencies@v2 53 | with: 54 | extra-packages: any::rcmdcheck 55 | needs: check 56 | 57 | - uses: r-lib/actions/check-r-package@v2 58 | with: 59 | upload-snapshots: true 60 | build_args: 'c("--no-manual","--compact-vignettes=gs+qpdf")' 61 | -------------------------------------------------------------------------------- /.github/workflows/R-CMD-check.yaml: -------------------------------------------------------------------------------- 1 | # Workflow derived from https://github.com/r-lib/actions/tree/v2/examples 2 | # Need help debugging build failures? Start at https://github.com/r-lib/actions#where-to-find-help 3 | on: 4 | push: 5 | branches: [main, master] 6 | pull_request: 7 | branches: [main, master] 8 | 9 | name: R-CMD-check.yaml 10 | 11 | permissions: read-all 12 | 13 | jobs: 14 | R-CMD-check: 15 | runs-on: ubuntu-latest 16 | env: 17 | GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} 18 | R_KEEP_PKG_SOURCE: yes 19 | steps: 20 | - uses: actions/checkout@v4 21 | 22 | - uses: r-lib/actions/setup-r@v2 23 | with: 24 | use-public-rspm: true 25 | 26 | - uses: r-lib/actions/setup-r-dependencies@v2 27 | with: 28 | extra-packages: any::rcmdcheck 29 | needs: check 30 | 31 | - uses: r-lib/actions/check-r-package@v2 32 | with: 33 | upload-snapshots: true 34 | build_args: 'c("--no-manual","--compact-vignettes=gs+qpdf")' 35 | -------------------------------------------------------------------------------- /.github/workflows/pkgdown.yaml: -------------------------------------------------------------------------------- 1 | # Workflow derived from https://github.com/r-lib/actions/tree/master/examples 2 | # Need help debugging build failures? Start at https://github.com/r-lib/actions#where-to-find-help 3 | on: 4 | push: 5 | # Only deploy docs when master is changed 6 | branches: [main, master] 7 | release: 8 | types: [published] 9 | workflow_dispatch: 10 | 11 | name: pkgdown 12 | 13 | jobs: 14 | pkgdown: 15 | runs-on: ubuntu-latest 16 | env: 17 | GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }} 18 | steps: 19 | - uses: actions/checkout@v4 20 | 21 | - uses: r-lib/actions/setup-pandoc@v2 22 | 23 | - uses: r-lib/actions/setup-r@v2 24 | with: 25 | use-public-rspm: true 26 | 27 | - uses: r-lib/actions/setup-r-dependencies@v2 28 | with: 29 | # "local::." installs fable 30 | # see https://github.com/r-lib/actions/tree/v2/setup-r-dependencies#installing-the-local-package 31 | extra-packages: pkgdown, local::. 32 | needs: website 33 | 34 | - name: Deploy package 35 | run: | 36 | git config --local user.name "$GITHUB_ACTOR" 37 | git config --local user.email "$GITHUB_ACTOR@users.noreply.github.com" 38 | Rscript -e 'pkgdown::deploy_to_branch(new_process = FALSE)' 39 | -------------------------------------------------------------------------------- /.github/workflows/pr-commands.yaml: -------------------------------------------------------------------------------- 1 | on: 2 | issue_comment: 3 | types: [created] 4 | name: Commands 5 | jobs: 6 | document: 7 | if: startsWith(github.event.comment.body, '/document') 8 | name: document 9 | runs-on: macOS-latest 10 | steps: 11 | - uses: actions/checkout@v1 12 | - uses: r-lib/actions/pr-fetch@master 13 | with: 14 | repo-token: ${{ secrets.GH_PAT }} 15 | - uses: r-lib/actions/setup-r@master 16 | - name: Install dependencies 17 | run: Rscript -e 'install.packages(c("remotes", "roxygen2"))' -e 'remotes::install_deps(dependencies = TRUE)' 18 | - name: Document 19 | run: Rscript -e 'roxygen2::roxygenise()' 20 | - name: commit 21 | run: | 22 | git add man/\* NAMESPACE 23 | git commit -m 'Document' 24 | - uses: r-lib/actions/pr-push@master 25 | with: 26 | repo-token: ${{ secrets.GH_PAT }} 27 | style: 28 | if: startsWith(github.event.comment.body, '/style') 29 | name: document 30 | runs-on: macOS-latest 31 | steps: 32 | - uses: actions/checkout@master 33 | - uses: r-lib/actions/pr-fetch@master 34 | with: 35 | repo-token: ${{ secrets.GH_PAT }} 36 | - uses: r-lib/actions/setup-r@master 37 | - name: Install dependencies 38 | run: Rscript -e 'install.packages("styler")' 39 | - name: style 40 | run: Rscript -e 'styler::style_pkg()' 41 | - name: commit 42 | run: | 43 | git add \*.R 44 | git commit -m 'style' 45 | - uses: r-lib/actions/pr-push@master 46 | with: 47 | repo-token: ${{ secrets.GH_PAT }} 48 | -------------------------------------------------------------------------------- /.github/workflows/recheck.yml: -------------------------------------------------------------------------------- 1 | on: 2 | workflow_dispatch: 3 | inputs: 4 | which: 5 | type: choice 6 | description: Which dependents to check 7 | options: 8 | - strong 9 | - most 10 | 11 | name: Reverse dependency check 12 | 13 | jobs: 14 | revdep_check: 15 | name: Reverse check ${{ inputs.which }} dependents 16 | uses: r-devel/recheck/.github/workflows/recheck.yml@v1 17 | with: 18 | which: ${{ inputs.which }} 19 | subdirectory: '' #if your package is in a git subdir -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .Rproj.user 2 | .Rhistory 3 | .RData 4 | .Ruserdata 5 | Makefile 6 | inst/doc 7 | src/*.o 8 | src/*.so 9 | src/*.dll 10 | docs/ 11 | doc 12 | Meta 13 | revdep 14 | .lintr 15 | -------------------------------------------------------------------------------- /DESCRIPTION: -------------------------------------------------------------------------------- 1 | Package: fable 2 | Title: Forecasting Models for Tidy Time Series 3 | Version: 0.4.1.9000 4 | Description: Provides a collection of commonly used univariate and multivariate 5 | time series forecasting models including automatically selected exponential 6 | smoothing (ETS) and autoregressive integrated moving average (ARIMA) models. 7 | These models work within the 'fable' framework provided by the 'fabletools' 8 | package, which provides the tools to evaluate, visualise, and combine models 9 | in a workflow consistent with the tidyverse. 10 | Authors@R: 11 | c(person(given = "Mitchell", 12 | family = "O'Hara-Wild", 13 | email = "mail@mitchelloharawild.com", 14 | role = c("aut", "cre")), 15 | person(given = "Rob", 16 | family = "Hyndman", 17 | role = "aut"), 18 | person(given = "Earo", 19 | family = "Wang", 20 | role = "aut"), 21 | person(given = "Gabriel", 22 | family = "Caceres", 23 | role = "ctb", 24 | comment = "NNETAR implementation"), 25 | person(given = "Christoph", 26 | family = "Bergmeir", 27 | role = "ctb", 28 | comment = c(ORCID = "0000-0002-3665-9021")), 29 | person(given = "Tim-Gunnar", 30 | family = "Hensel", 31 | role = "ctb"), 32 | person(given = "Timothy", 33 | family = "Hyndman", 34 | role = "ctb")) 35 | License: GPL-3 36 | URL: https://fable.tidyverts.org, https://github.com/tidyverts/fable 37 | BugReports: https://github.com/tidyverts/fable/issues 38 | Depends: 39 | R (>= 3.4.0), 40 | fabletools (>= 0.3.0) 41 | Imports: 42 | Rcpp (>= 0.11.0), 43 | rlang (>= 0.4.6), 44 | stats, 45 | dplyr (>= 1.0.0), 46 | tsibble (>= 0.9.0), 47 | tibble, 48 | tidyr, 49 | utils, 50 | distributional 51 | Suggests: 52 | covr, 53 | feasts, 54 | forecast, 55 | knitr, 56 | MTS, 57 | nnet, 58 | rmarkdown, 59 | spelling, 60 | testthat, 61 | tsibbledata (>= 0.2.0) 62 | LinkingTo: 63 | Rcpp (>= 0.11.0) 64 | VignetteBuilder: 65 | knitr 66 | ByteCompile: true 67 | Encoding: UTF-8 68 | Language: en-GB 69 | Roxygen: list(markdown = TRUE, roclets=c('rd', 'collate', 70 | 'namespace')) 71 | RoxygenNote: 7.3.2 72 | -------------------------------------------------------------------------------- /NAMESPACE: -------------------------------------------------------------------------------- 1 | # Generated by roxygen2: do not edit by hand 2 | 3 | S3method(IRF,ARIMA) 4 | S3method(IRF,VAR) 5 | S3method(IRF,VARIMA) 6 | S3method(IRF,VECM) 7 | S3method(breusch_godfrey,TSLM) 8 | S3method(components,ETS) 9 | S3method(fitted,AR) 10 | S3method(fitted,ARIMA) 11 | S3method(fitted,ETS) 12 | S3method(fitted,NNETAR) 13 | S3method(fitted,RW) 14 | S3method(fitted,TSLM) 15 | S3method(fitted,VAR) 16 | S3method(fitted,VARIMA) 17 | S3method(fitted,VECM) 18 | S3method(fitted,croston) 19 | S3method(fitted,fable_theta) 20 | S3method(fitted,model_mean) 21 | S3method(forecast,AR) 22 | S3method(forecast,ARIMA) 23 | S3method(forecast,ETS) 24 | S3method(forecast,NNETAR) 25 | S3method(forecast,RW) 26 | S3method(forecast,TSLM) 27 | S3method(forecast,VAR) 28 | S3method(forecast,VARIMA) 29 | S3method(forecast,VECM) 30 | S3method(forecast,croston) 31 | S3method(forecast,fable_theta) 32 | S3method(forecast,model_mean) 33 | S3method(generate,AR) 34 | S3method(generate,ARIMA) 35 | S3method(generate,ETS) 36 | S3method(generate,NNETAR) 37 | S3method(generate,RW) 38 | S3method(generate,TSLM) 39 | S3method(generate,VAR) 40 | S3method(generate,VARIMA) 41 | S3method(generate,VECM) 42 | S3method(generate,model_mean) 43 | S3method(glance,AR) 44 | S3method(glance,ARIMA) 45 | S3method(glance,ETS) 46 | S3method(glance,NNETAR) 47 | S3method(glance,RW) 48 | S3method(glance,TSLM) 49 | S3method(glance,VAR) 50 | S3method(glance,VARIMA) 51 | S3method(glance,VECM) 52 | S3method(glance,fable_theta) 53 | S3method(glance,model_mean) 54 | S3method(hfitted,ARIMA) 55 | S3method(hfitted,ETS) 56 | S3method(interpolate,ARIMA) 57 | S3method(interpolate,TSLM) 58 | S3method(interpolate,model_mean) 59 | S3method(model_sum,AR) 60 | S3method(model_sum,ARIMA) 61 | S3method(model_sum,ETS) 62 | S3method(model_sum,NNETAR) 63 | S3method(model_sum,RW) 64 | S3method(model_sum,TSLM) 65 | S3method(model_sum,VAR) 66 | S3method(model_sum,VARIMA) 67 | S3method(model_sum,VECM) 68 | S3method(model_sum,fable_theta) 69 | S3method(model_sum,model_mean) 70 | S3method(refit,AR) 71 | S3method(refit,ARIMA) 72 | S3method(refit,ETS) 73 | S3method(refit,NNETAR) 74 | S3method(refit,RW) 75 | S3method(refit,TSLM) 76 | S3method(refit,model_mean) 77 | S3method(report,AR) 78 | S3method(report,ARIMA) 79 | S3method(report,ETS) 80 | S3method(report,NNETAR) 81 | S3method(report,RW) 82 | S3method(report,TSLM) 83 | S3method(report,VAR) 84 | S3method(report,VARIMA) 85 | S3method(report,VECM) 86 | S3method(report,fable_theta) 87 | S3method(report,model_mean) 88 | S3method(residuals,AR) 89 | S3method(residuals,ARIMA) 90 | S3method(residuals,ETS) 91 | S3method(residuals,NNETAR) 92 | S3method(residuals,RW) 93 | S3method(residuals,TSLM) 94 | S3method(residuals,VAR) 95 | S3method(residuals,VARIMA) 96 | S3method(residuals,VECM) 97 | S3method(residuals,croston) 98 | S3method(residuals,fable_theta) 99 | S3method(residuals,model_mean) 100 | S3method(tidy,AR) 101 | S3method(tidy,ARIMA) 102 | S3method(tidy,ETS) 103 | S3method(tidy,NNETAR) 104 | S3method(tidy,RW) 105 | S3method(tidy,TSLM) 106 | S3method(tidy,VAR) 107 | S3method(tidy,VARIMA) 108 | S3method(tidy,VECM) 109 | S3method(tidy,croston) 110 | S3method(tidy,fable_theta) 111 | S3method(tidy,model_mean) 112 | export("%>%") 113 | export(AR) 114 | export(ARIMA) 115 | export(CROSTON) 116 | export(ETS) 117 | export(MEAN) 118 | export(NAIVE) 119 | export(NNETAR) 120 | export(RW) 121 | export(SNAIVE) 122 | export(THETA) 123 | export(TSLM) 124 | export(VAR) 125 | export(VARIMA) 126 | export(VECM) 127 | export(as_tsibble) 128 | export(breusch_godfrey) 129 | export(unitroot_options) 130 | import(Rcpp) 131 | import(fabletools) 132 | import(rlang, except = invoke) 133 | import(tsibble) 134 | importFrom(dplyr,"%>%") 135 | importFrom(dplyr,filter) 136 | importFrom(dplyr,lag) 137 | importFrom(dplyr,left_join) 138 | importFrom(dplyr,mutate) 139 | importFrom(dplyr,select) 140 | importFrom(dplyr,transmute) 141 | importFrom(fabletools,forecast) 142 | importFrom(stats,KalmanForecast) 143 | importFrom(stats,KalmanRun) 144 | importFrom(stats,approx) 145 | importFrom(stats,ar) 146 | importFrom(stats,coef) 147 | importFrom(stats,complete.cases) 148 | importFrom(stats,diffinv) 149 | importFrom(stats,formula) 150 | importFrom(stats,lm) 151 | importFrom(stats,makeARIMA) 152 | importFrom(stats,model.frame) 153 | importFrom(stats,model.matrix) 154 | importFrom(stats,na.omit) 155 | importFrom(stats,predict) 156 | importFrom(stats,qnorm) 157 | importFrom(stats,residuals) 158 | importFrom(stats,sd) 159 | importFrom(stats,time) 160 | importFrom(stats,ts) 161 | importFrom(tsibble,as_tsibble) 162 | importFrom(utils,tail) 163 | useDynLib(fable, .registration = TRUE) 164 | -------------------------------------------------------------------------------- /NEWS.md: -------------------------------------------------------------------------------- 1 | # fable (development version) 2 | 3 | ## Improvements 4 | 5 | * Documentation improvements. 6 | 7 | ## Bug fixes 8 | 9 | * Fixed `VARIMA()` not working with integer data. 10 | 11 | # fable 0.4.1 12 | 13 | ## Bug fixes 14 | 15 | * Fix indexing error of short-run exogenous regressors in the `VECM()` model. 16 | * Fix `generate()` method for `VECM()` models producing array errors. 17 | 18 | # fable 0.4.0 19 | 20 | ## New features 21 | 22 | * Added `generate()` and `IRF()` methods for VAR models. 23 | * Added `IRF()` method for ARIMA models. 24 | * Added `VECM()` and `VARIMA()` models. 25 | 26 | ## Improvements 27 | 28 | * Documentation improvements. 29 | 30 | # fable 0.3.4 31 | 32 | Small patch to resolve issues in C++ R headers. 33 | 34 | ## Improvements 35 | 36 | * Documentation improvements. 37 | 38 | # fable 0.3.3 39 | 40 | Small patch to resolve CRAN check issues. 41 | 42 | ## Improvements 43 | 44 | * Documentation improvements. 45 | 46 | ## Bug fixes 47 | 48 | * Fixed `generate()` method for some variable names. 49 | * Fixed df in `generate()`. 50 | 51 | # fable 0.3.2 52 | 53 | ## Improvements 54 | 55 | * Documentation improvements. 56 | * Added `approx_normal` argument to `forecast()`. This allows you to 57 | optionally return forecasts from the more appropriate Student's T distribution 58 | instead of approximating to a Normal distribution. The default behaviour 59 | remains the same, which is to provide approximate Normal distribution 60 | forecasts which are nicer to work with in model combination and reconciliation 61 | (#343). 62 | * `ETS()` will now ignore the smoothing parameter's range when specific 63 | parameter value is given (#317). 64 | * Modified initial parameter values for `ETS()` when bounds = "admissible". 65 | * Updated RW forecasts to use an unbiased estimate of sigma2 (#368). 66 | 67 | ## Bug fixes 68 | 69 | * Fixed issue with characteristic equation test for admissibility of ETS 70 | parameters (#341). 71 | * Fixed ARIMA selecting differences that don't satisfy the `order_constraint` 72 | (#360). 73 | * Fixed issue with forecasting ARIMA models with intercept and exogenous 74 | regressors. 75 | * Fixed issue with VAR models not storing lagged regressor data for forecasting. 76 | 77 | 78 | # fable 0.3.1 79 | 80 | Small release to resolve check issues with the development and patched versions 81 | of R. The release includes some minor improvements to the output consistency of 82 | initial states in `ETS()` models, the passing of arguments in `ARIMA()` models, 83 | and handling of missing values in `NNETAR()`. 84 | 85 | ## Improvements 86 | 87 | * Display of ETS initial states now use a `state[t]` notation to describe the 88 | state's position in time (#329, #261). 89 | * Allowed specifying `method` argument in `ARIMA()` (#330). 90 | * Improved handling of missing values in `NNETAR()` (#327). 91 | 92 | ## Bug fixes 93 | 94 | * Fixed error with forecasting and simulating from `NNETAR()` estimated using 95 | a short series (#326). 96 | * Fixed `AR()` fitted values not being re-scaled to match original data (#318). 97 | 98 | # fable 0.3.0 99 | 100 | The release of fabletools v0.3.0 introduced general support for computing h-step 101 | ahead fitted values, using the `hfitted(, h = ???)` function. This release 102 | adds model-specific `hfitted()` support to ARIMA and ETS models for improved 103 | performance and accuracy. 104 | 105 | This release adds improved support for refitting models, largely in thanks to 106 | contributions by @Tim-TU. 107 | 108 | It is also now possible to specify an arbitrary model selection criterion 109 | function for automatic `ARIMA()` model selection. 110 | 111 | ## New features 112 | 113 | * Added `refit()` method for NNETAR, MEAN, RW, SNAIVE, and NAIVE models 114 | (#287, #289, #321. @Tim-TU). 115 | * Added `hfitted()` method for ETS and ARIMA, this allows fast estimation of 116 | h-step ahead fitted values. 117 | * Added `generate()` method for AR, the `forecast()` method now supports 118 | bootstrap forecasting via this new method. 119 | 120 | ## Improvements 121 | 122 | * Added the `selection_metric` argument to `ARIMA()`, which allows more control 123 | over the measure used to select the best model. By default this function will 124 | extract the information criteria specified by the `ic` argument. 125 | * Added `trace` argument for tracing the selection procedure used in `ARIMA()` 126 | 127 | ## Bug fixes 128 | 129 | * Fixed unnecessary warning when forecasting short horizons using `NNETAR()`. 130 | * Fixed `generate()` method for NNETAR models when data isn't scaled (#302). 131 | * Fixed `refit.ARIMA()` re-selecting constant instead of using the provided 132 | model's constant usage. 133 | * Fixed use of exogenous regressors in `AR()` models. 134 | 135 | # fable 0.2.1 136 | 137 | This release coincides with v0.2.0 of the fabletools package, which contains 138 | some substantial changes to the output of `forecast()` methods. 139 | These changes to fabletools emphasise the distribution in the fable 140 | object. The most noticeable is a change in column names of the fable, with the 141 | distribution now stored in the column matching the response variable, and the 142 | forecast mean now stored in the `.mean` column. 143 | For a complete summary of these changes, refer to the fabletools v0.2.0 release 144 | news: https://fabletools.tidyverts.org/news/index.html 145 | 146 | ## New features 147 | 148 | * Added the `THETA()` method. 149 | 150 | ## Improvements 151 | 152 | * Forecasts distributions are now provided by the distributional package. They 153 | are now more space efficient and allows calculation of distributional 154 | statistics including the `mean()`, `median()`, `variance()`, `quantile()`, 155 | `cdf()`, and `density()`. 156 | * The uncertainty of the drift parameter in random walk models (`RW()`, 157 | `NAIVE()` and `SNAIVE()`) is now included in data generated with `generate()`. 158 | * Added Syntetos-Boylan and Shale-Boylan-Johnston variants of `CROSTON()` method. 159 | * Performance improvements. 160 | 161 | ## Bug fixes 162 | 163 | * Fixed issue with approximation being used when refitting ARIMA models and when 164 | a specific model is requested. 165 | * Fixed `glance()` for `TSLM()` models when the data contains missing values. 166 | * Fixed typo in `glance()` output of `ETS()` models. 167 | 168 | ## Breaking changes 169 | 170 | * The sample path means are now used instead of analytical means when forecasts 171 | are produced from sample paths. 172 | 173 | # fable 0.2.0 174 | 175 | ## Improvements 176 | 177 | * Added autoregressive modelling with `AR()`. 178 | * Better handling of rank deficiency in `ARIMA()`. 179 | * Added `generate.ARIMA()` method. 180 | * Added bootstrap forecast paths for `ARIMA()` models. 181 | * `ARIMA()` specials now allow specifying fixed coefficients via the `fixed` argument. 182 | * Documentation improvements. 183 | 184 | # fable 0.1.2 185 | 186 | ## Improvements 187 | 188 | * Added `CROSTON()` for Croston's method of intermittent demand forecasting. 189 | * Documentation improvements 190 | 191 | ## Bug fixes 192 | 193 | * Fixed NNETAR & VAR handling of missing values (#215). 194 | * Fix ETS forecasting with forecast horizons less than the seasonal period (#219). 195 | * Fixed season() special for non-seasonally based time indices (#220) 196 | * Fix issue with simulation forecasting from damped ETS models. 197 | 198 | # fable 0.1.1 199 | 200 | ## Improvements 201 | 202 | * Added interpolation method for `MEAN()` model (#203). 203 | * Added rolling mean option for `MEAN()` model (#204). 204 | 205 | ## Bug fixes 206 | 207 | * Corrected forecast standard error for drift models. 208 | 209 | # fable 0.1.0 210 | 211 | * First release. 212 | 213 | ## New features 214 | 215 | * Support for 9 models and relevant methods: `ARIMA`, `ETS`, `TSLM`, `MEAN`, `RW`, `NAIVE`, `SNAIVE`, `NNETAR`, `VAR`. 216 | -------------------------------------------------------------------------------- /R/00_specials.R: -------------------------------------------------------------------------------- 1 | globalVariables(c("self", "origin")) 2 | 3 | #' @importFrom stats model.frame 4 | model_xreg <- function(...) { 5 | model_formula <- new_formula( 6 | lhs = NULL, 7 | rhs = reduce(enexprs(...), function(.x, .y) call2("+", .x, .y)) 8 | ) 9 | env <- map(enquos(...), get_env) 10 | env[map_lgl(env, compose(is_empty, env_parents))] <- NULL 11 | env <- if (!is_empty(env)) get_env(env[[1]]) else base_env() 12 | out <- model.frame(model_formula, data = env, na.action = stats::na.pass) 13 | } 14 | 15 | no_xreg <- function(...) { 16 | abort("Exogenous regressors are not supported for this model type.") 17 | } 18 | 19 | trend <- function(x, knots = NULL, origin = NULL) { 20 | UseMethod("trend") 21 | } 22 | 23 | trend.tbl_ts <- function(x, knots = NULL, origin = NULL) { 24 | idx_num <- as.double(x[[index_var(x)]]) 25 | knots_num <- if (is.null(knots)) { 26 | NULL 27 | } else { 28 | as.double(knots) 29 | } 30 | index_interval <- default_time_units(interval(x)) 31 | idx_num <- idx_num / index_interval 32 | knots_num <- knots_num / index_interval 33 | if (!is.null(origin)) { 34 | origin <- as.double(origin) / index_interval 35 | } 36 | 37 | trend(idx_num, knots_num, origin) 38 | } 39 | 40 | trend.numeric <- function(x, knots = NULL, origin = NULL) { 41 | if (!is.null(origin)) { 42 | origin <- origin - 1 # trend should count from 1 43 | x <- x - origin 44 | knots <- knots - origin 45 | } 46 | knots_exprs <- map(knots, function(.x) pmax(0, x - .x)) 47 | knots_exprs <- set_names( 48 | knots_exprs, 49 | map_chr(knots, function(.x) paste0("trend_", format(.x))) 50 | ) 51 | tibble( 52 | trend = x, 53 | !!!knots_exprs 54 | ) 55 | } 56 | 57 | season <- function(x, period) { 58 | UseMethod("season") 59 | } 60 | 61 | season.tbl_ts <- function(x, period) { 62 | idx_num <- as.double(x[[index_var(x)]]) 63 | index_interval <- default_time_units(interval(x)) 64 | idx_num <- idx_num / index_interval 65 | period <- get_frequencies(period, x, .auto = "smallest") 66 | 67 | season(idx_num, period) 68 | } 69 | 70 | season.numeric <- function(x, period) { 71 | season_exprs <- map(period, function(.x) expr(factor(floor((x %% (!!.x)) + 1), levels = seq_len(!!.x)))) 72 | season_exprs <- set_names(season_exprs, names(period) %||% paste0("season_", period)) 73 | tibble(!!!season_exprs) 74 | } 75 | 76 | fourier <- function(x, period, K, origin = NULL) { 77 | UseMethod("fourier") 78 | } 79 | 80 | fourier.tbl_ts <- function(x, period, K, origin = NULL) { 81 | idx_num <- as.double(x[[index_var(x)]]) 82 | index_interval <- default_time_units(interval(x)) 83 | idx_num <- idx_num / index_interval 84 | if (!is.null(origin)) { 85 | origin <- as.double(origin) / index_interval 86 | } 87 | period <- get_frequencies(period, x, .auto = "smallest") 88 | 89 | fourier(idx_num, period, K, origin) 90 | } 91 | 92 | fourier.numeric <- function(x, period, K, origin = NULL) { 93 | if (length(period) != length(K)) { 94 | abort("Number of periods does not match number of orders") 95 | } 96 | if (any(2 * K > period)) { 97 | abort("K must be not be greater than period/2") 98 | } 99 | 100 | fourier_exprs <- map2( 101 | as.numeric(period), K, 102 | function(period, K) { 103 | set_names(seq_len(K) / period, paste0(seq_len(K), "_", round(period))) 104 | } 105 | ) %>% 106 | invoke(c, .) %>% 107 | .[!duplicated(.)] %>% 108 | map2(., names(.), function(p, name) { 109 | out <- exprs(C = cospi(2 * !!p * x)) 110 | if (abs(2 * p - round(2 * p)) > .Machine$double.eps) { 111 | out <- c(out, exprs(S = sinpi(2 * !!p * x))) 112 | } 113 | names(out) <- paste0(names(out), name) 114 | out 115 | }) %>% 116 | set_names(NULL) %>% 117 | unlist(recursive = FALSE) 118 | 119 | tibble(!!!fourier_exprs) 120 | } 121 | 122 | #' Common exogenous regressors 123 | #' 124 | #' These special functions provide interfaces to more complicated functions within 125 | #' the model formulae interface. 126 | #' 127 | #' @section Specials: 128 | #' 129 | #' \subsection{trend}{ 130 | #' The `trend` special includes common linear trend regressors in the model. It also supports piecewise linear trend via the `knots` argument. 131 | #' \preformatted{ 132 | #' trend(knots = NULL, origin = NULL) 133 | #' } 134 | #' 135 | #' \tabular{ll}{ 136 | #' `knots` \tab A vector of times (same class as the data's time index) identifying the position of knots for a piecewise linear trend.\cr 137 | #' `origin` \tab An optional time value to act as the starting time for the trend. 138 | #' } 139 | #' } 140 | #' 141 | #' \subsection{season}{ 142 | #' The `season` special includes seasonal dummy variables in the model. 143 | #' \preformatted{ 144 | #' season(period = NULL) 145 | #' } 146 | #' 147 | #' \tabular{ll}{ 148 | #' `period` \tab The periodic nature of the seasonality. This can be either a number indicating the number of observations in each seasonal period, or text to indicate the duration of the seasonal window (for example, annual seasonality would be "1 year"). 149 | #' } 150 | #' } 151 | #' 152 | #' \subsection{fourier}{ 153 | #' The `fourier` special includes seasonal fourier terms in the model. The maximum order of the fourier terms must be specified using `K`. 154 | #' \preformatted{ 155 | #' fourier(period = NULL, K, origin = NULL) 156 | #' } 157 | #' 158 | #' \tabular{ll}{ 159 | #' `period` \tab The periodic nature of the seasonality. This can be either a number indicating the number of observations in each seasonal period, or text to indicate the duration of the seasonal window (for example, annual seasonality would be "1 year"). \cr 160 | #' `K` \tab The maximum order of the fourier terms.\cr 161 | #' `origin` \tab An optional time value to act as the starting time for the fourier series. 162 | #' } 163 | #' } 164 | #' 165 | #' @format NULL 166 | #' @keywords internal 167 | #' @rdname common_xregs 168 | common_xregs <- list( 169 | trend = function(knots = NULL, origin = NULL) { 170 | if (is.null(origin)) { 171 | if (is.null(self$origin)) { 172 | self$origin <- self$data[[index_var(self$data)]][[1]] 173 | } 174 | origin <- self$origin 175 | } 176 | as.matrix(fable:::trend(self$data, knots, origin)) 177 | }, 178 | season = function(period = NULL) { 179 | as_model_matrix(fable:::season(self$data, period)) 180 | }, 181 | fourier = function(period = NULL, K, origin = NULL) { 182 | if (is.null(origin)) { 183 | if (is.null(self$origin)) { 184 | self$origin <- self$data[[index_var(self$data)]][[1]] 185 | } 186 | origin <- self$origin 187 | } 188 | as.matrix(fable:::fourier(self$data, period, K, origin)) 189 | } 190 | ) 191 | 192 | as_model_matrix <- function(tbl) { 193 | stats::model.matrix(~., data = tbl)[, -1, drop = FALSE] 194 | } 195 | -------------------------------------------------------------------------------- /R/checks.R: -------------------------------------------------------------------------------- 1 | check_gaps <- function(x) { 2 | if (any(tsibble::has_gaps(x)[[".gaps"]])) { 3 | abort(sprintf("%s contains implicit gaps in time. You should check your data and convert implicit gaps into explicit missing values using `tsibble::fill_gaps()` if required.", deparse(substitute(x)))) 4 | } 5 | } 6 | 7 | check_regular <- function(x) { 8 | if (!is_regular(x)) { 9 | abort(sprintf("%s is an irregular time series, which this model does not support. You should consider if your data can be made regular, and use `tsibble::update_tsibble(%s, regular = TRUE)` if appropriate.", deparse(substitute(x)), deparse(substitute(x)))) 10 | } 11 | } 12 | 13 | check_ordered <- function(x) { 14 | if (!is_ordered(x)) { 15 | abort(sprintf( 16 | "%s is an unordered time series. To use this model, you first must sort the data in time order using `dplyr::arrange(%s, %s)`", 17 | deparse(substitute(x)), paste(c(deparse(substitute(x)), key_vars(x)), collapse = ", "), index_var(x) 18 | )) 19 | } 20 | } 21 | 22 | all_tsbl_checks <- function(.data) { 23 | check_gaps(.data) 24 | check_regular(.data) 25 | check_ordered(.data) 26 | if (NROW(.data) == 0) { 27 | abort("There is no data to model. Please provide a dataset with at least one observation.") 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /R/compat-purrr.R: -------------------------------------------------------------------------------- 1 | # nocov start 2 | 3 | # This file serves as a reference for compatibility functions for 4 | # purrr. They are not drop-in replacements but allow a similar style 5 | # of programming. This is useful in cases where purrr is too heavy a 6 | # package to depend on. 7 | 8 | # Slightly adapted from the version found in rlang 9 | 10 | map <- function(.x, .f, ...) { 11 | lapply(.x, .f, ...) 12 | } 13 | map_mold <- function(.x, .f, .mold, ...) { 14 | out <- vapply(.x, .f, .mold, ..., USE.NAMES = FALSE) 15 | names(out) <- names(.x) 16 | out 17 | } 18 | map_lgl <- function(.x, .f, ...) { 19 | map_mold(.x, .f, logical(1), ...) 20 | } 21 | map_int <- function(.x, .f, ...) { 22 | map_mold(.x, .f, integer(1), ...) 23 | } 24 | map_dbl <- function(.x, .f, ...) { 25 | map_mold(.x, .f, double(1), ...) 26 | } 27 | map_chr <- function(.x, .f, ...) { 28 | map_mold(.x, .f, character(1), ...) 29 | } 30 | map_cpl <- function(.x, .f, ...) { 31 | map_mold(.x, .f, complex(1), ...) 32 | } 33 | 34 | pluck <- function(.x, .f) { 35 | map(.x, `[[`, .f) 36 | } 37 | pluck_lgl <- function(.x, .f) { 38 | map_lgl(.x, `[[`, .f) 39 | } 40 | pluck_int <- function(.x, .f) { 41 | map_int(.x, `[[`, .f) 42 | } 43 | pluck_dbl <- function(.x, .f) { 44 | map_dbl(.x, `[[`, .f) 45 | } 46 | pluck_chr <- function(.x, .f) { 47 | map_chr(.x, `[[`, .f) 48 | } 49 | pluck_cpl <- function(.x, .f) { 50 | map_cpl(.x, `[[`, .f) 51 | } 52 | 53 | map2 <- function(.x, .y, .f, ...) { 54 | mapply(.f, .x, .y, MoreArgs = list(...), SIMPLIFY = FALSE) 55 | } 56 | map2_lgl <- function(.x, .y, .f, ...) { 57 | as.vector(map2(.x, .y, .f, ...), "logical") 58 | } 59 | map2_int <- function(.x, .y, .f, ...) { 60 | as.vector(map2(.x, .y, .f, ...), "integer") 61 | } 62 | map2_dbl <- function(.x, .y, .f, ...) { 63 | as.vector(map2(.x, .y, .f, ...), "double") 64 | } 65 | map2_chr <- function(.x, .y, .f, ...) { 66 | as.vector(map2(.x, .y, .f, ...), "character") 67 | } 68 | map2_cpl <- function(.x, .y, .f, ...) { 69 | as.vector(map2(.x, .y, .f, ...), "complex") 70 | } 71 | 72 | args_recycle <- function(args) { 73 | lengths <- map_int(args, length) 74 | n <- max(lengths) 75 | 76 | stopifnot(all(lengths == 1L | lengths == n)) 77 | to_recycle <- lengths == 1L 78 | args[to_recycle] <- map(args[to_recycle], function(x) rep.int(x, n)) 79 | 80 | args 81 | } 82 | pmap <- function(.l, .f, ...) { 83 | args <- args_recycle(.l) 84 | do.call("mapply", c( 85 | FUN = list(quote(.f)), 86 | args, MoreArgs = quote(list(...)), 87 | SIMPLIFY = FALSE, USE.NAMES = FALSE 88 | )) 89 | } 90 | pmap_dbl <- function(.l, .f, ...) { 91 | args <- args_recycle(.l) 92 | as.vector(do.call("mapply", c( 93 | FUN = list(quote(.f)), 94 | args, MoreArgs = quote(list(...)), 95 | SIMPLIFY = FALSE, USE.NAMES = FALSE 96 | )), mode = "double") 97 | } 98 | 99 | pmap_chr <- function(.l, .f, ...) { 100 | args <- args_recycle(.l) 101 | as.vector(do.call("mapply", c( 102 | FUN = list(quote(.f)), 103 | args, MoreArgs = quote(list(...)), 104 | SIMPLIFY = FALSE, USE.NAMES = FALSE 105 | )), mode = "character") 106 | } 107 | 108 | probe <- function(.x, .p, ...) { 109 | if (is_logical(.p)) { 110 | stopifnot(length(.p) == length(.x)) 111 | .p 112 | } else { 113 | map_lgl(.x, .p, ...) 114 | } 115 | } 116 | 117 | keep <- function(.x, .f, ...) { 118 | .x[probe(.x, .f, ...)] 119 | } 120 | discard <- function(.x, .p, ...) { 121 | sel <- probe(.x, .p, ...) 122 | .x[is.na(sel) | !sel] 123 | } 124 | map_if <- function(.x, .p, .f, ...) { 125 | matches <- probe(.x, .p) 126 | .x[matches] <- map(.x[matches], .f, ...) 127 | .x 128 | } 129 | 130 | compact <- function(.x) { 131 | Filter(length, .x) 132 | } 133 | 134 | transpose <- function(.l) { 135 | inner_names <- names(.l[[1]]) 136 | if (is.null(inner_names)) { 137 | fields <- seq_along(.l[[1]]) 138 | } else { 139 | fields <- set_names(inner_names) 140 | } 141 | 142 | map(fields, function(i) { 143 | map(.l, .subset2, i) 144 | }) 145 | } 146 | 147 | every <- function(.x, .p, ...) { 148 | for (i in seq_along(.x)) { 149 | if (!rlang::is_true(.p(.x[[i]], ...))) { 150 | return(FALSE) 151 | } 152 | } 153 | TRUE 154 | } 155 | some <- function(.x, .p, ...) { 156 | for (i in seq_along(.x)) { 157 | if (rlang::is_true(.p(.x[[i]], ...))) { 158 | return(TRUE) 159 | } 160 | } 161 | FALSE 162 | } 163 | negate <- function(.p) { 164 | function(...) !.p(...) 165 | } 166 | 167 | reduce <- function(.x, .f, ..., .init) { 168 | f <- function(x, y) .f(x, y, ...) 169 | Reduce(f, .x, init = .init) 170 | } 171 | reduce_right <- function(.x, .f, ..., .init) { 172 | f <- function(x, y) .f(y, x, ...) 173 | Reduce(f, .x, init = .init, right = TRUE) 174 | } 175 | accumulate <- function(.x, .f, ..., .init) { 176 | f <- function(x, y) .f(x, y, ...) 177 | Reduce(f, .x, init = .init, accumulate = TRUE) 178 | } 179 | accumulate_right <- function(.x, .f, ..., .init) { 180 | f <- function(x, y) .f(y, x, ...) 181 | Reduce(f, .x, init = .init, right = TRUE, accumulate = TRUE) 182 | } 183 | 184 | invoke <- function(.f, .x, ..., .env = NULL) { 185 | .env <- .env %||% parent.frame() 186 | args <- c(as.list(.x), list(...)) 187 | do.call(.f, args, envir = .env) 188 | } 189 | imap <- function(.x, .f, ...) { 190 | map2(.x, names(.x) %||% seq_along(.x), .f, ...) 191 | } 192 | 193 | capture_error <- function(code, otherwise = NULL, quiet = TRUE) { 194 | tryCatch(list(result = code, error = NULL), error = function(e) { 195 | if (!quiet) { 196 | message("Error: ", e$message) 197 | } 198 | list(result = otherwise, error = e) 199 | }, interrupt = function(e) { 200 | stop("Terminated by user", call. = FALSE) 201 | }) 202 | } 203 | safely <- function(.f, otherwise = NULL, quiet = TRUE) { 204 | function(...) capture_error(.f(...), otherwise, quiet) 205 | } 206 | possibly <- function(.f, otherwise, quiet = TRUE) { 207 | force(otherwise) 208 | function(...) capture_error(.f(...), otherwise, quiet)$result 209 | } 210 | quietly <- function(.f) { 211 | function(...) suppressMessages(suppressWarnings(.f(...))) 212 | } 213 | compose <- function(...) { 214 | fs <- lapply(list(...), match.fun) 215 | n <- length(fs) 216 | last <- fs[[n]] 217 | rest <- fs[-n] 218 | function(...) { 219 | out <- last(...) 220 | for (f in rev(rest)) { 221 | out <- f(out) 222 | } 223 | out 224 | } 225 | } 226 | # nocov end 227 | 228 | slide_dbl <- function(.x, .fn, ..., .size = 1, .partial = FALSE) { 229 | out <- numeric(if(.partial) length(.x) else length(.x) - .size + 1) 230 | for(i in seq_along(out)) { 231 | idx <- seq.int(i+.size*(-1L+!.partial) + .partial, i + (.size*!.partial) - 1 + .partial, by = 1L) 232 | idx[idx<=0] <- NA_integer_ 233 | out[i] <- .fn(.x[idx], ...) 234 | } 235 | out 236 | } -------------------------------------------------------------------------------- /R/fable-package.R: -------------------------------------------------------------------------------- 1 | #' @keywords internal 2 | "_PACKAGE" 3 | 4 | globalVariables(".") 5 | 6 | #' @rawNamespace import(rlang, except = invoke) 7 | #' @import tsibble 8 | #' @import fabletools 9 | #' @import Rcpp 10 | #' @importFrom dplyr mutate transmute filter lag left_join select 11 | #' 12 | #' @useDynLib fable, .registration = TRUE 13 | NULL 14 | -------------------------------------------------------------------------------- /R/reexports.R: -------------------------------------------------------------------------------- 1 | #' @importFrom dplyr %>% 2 | #' @export 3 | dplyr::`%>%` 4 | 5 | #' @export 6 | tsibble::as_tsibble 7 | -------------------------------------------------------------------------------- /R/theta.R: -------------------------------------------------------------------------------- 1 | #' @importFrom stats sd 2 | train_theta <- function(.data, specials, ...) { 3 | if (length(measured_vars(.data)) > 1) { 4 | abort("Only univariate responses are supported by the Theta method") 5 | } 6 | 7 | y <- unclass(.data)[[measured_vars(.data)]] 8 | n <- length(y) 9 | 10 | if (all(is.na(y))) { 11 | abort("All observations are missing, a model cannot be estimated without data.") 12 | } 13 | 14 | # Check seasonality 15 | m <- specials$season[[1]]$period 16 | y <- ts(y, frequency = m) 17 | if (m > 1 && !is.constant(y) && n > 2 * m) { 18 | r <- as.numeric(stats::acf(y, lag.max = m, plot = FALSE)$acf)[-1] 19 | stat <- sqrt((1 + 2 * sum(r[-m]^2)) / n) 20 | if(!(abs(r[m]) / stat > qnorm(0.95))) { 21 | m <- 1L 22 | } 23 | } else { 24 | m <- 1L 25 | } 26 | 27 | # Seasonal decomposition 28 | if (m > 1L) { 29 | dcmp <- stats::decompose(y, type = specials$season[[1]]$method) 30 | if (any(abs(dcmp$seasonal) < 1e-4)) { 31 | warning("Seasonal indexes equal to zero. Using non-seasonal Theta method") 32 | } else { 33 | y_sa <- if(dcmp$type == "additive") dcmp$x - dcmp$seasonal else dcmp$x / dcmp$seasonal 34 | } 35 | } else { 36 | y_sa <- y 37 | } 38 | 39 | # Find theta lines 40 | ses <- etsmodel(y_sa, m, "A", "N", "N", FALSE, opt.crit = "mse", nmse = 3, bounds = "both") 41 | alpha <- pmax(1e-10, ses$par["alpha"]) 42 | sigma2 <- sum(ses$residuals ^ 2, na.rm = TRUE) / (n - length(ses$par)) 43 | drift <- stats::lsfit(0:(n - 1), y_sa)$coefficients[2] / 2 44 | 45 | # Reseasonalize 46 | if (m > 1L) { 47 | ses$fitted <- if(dcmp$type == "additive") ses$fitted + dcmp$seasonal else ses$fitted * dcmp$seasonal 48 | ses$residuals <- y - ses$fitted 49 | } 50 | 51 | structure( 52 | list( 53 | fitted = as.numeric(ses$fitted), 54 | resid = as.numeric(ses$residuals), 55 | period = m, 56 | alpha = alpha, 57 | l0 = ses$par["l"], 58 | lT = ses$states[n+1,1], 59 | drift = drift, 60 | sigma2 = sigma2, 61 | dcmp = specials$season[[1]]$method, 62 | season = if(m > 1L) dcmp$seasonal[seq(n-m+1, n)] else NULL 63 | ), 64 | class = "fable_theta" 65 | ) 66 | } 67 | 68 | specials_theta <- new_specials( 69 | season = function(period = NULL, method = c("multiplicative", "additive")) { 70 | period <- get_frequencies(period, self$data, .auto = "smallest") 71 | method <- match.arg(method) 72 | list(period = period, method = method) 73 | }, 74 | .required_specials = "season" 75 | ) 76 | 77 | #' Theta method 78 | #' 79 | #' The theta method of Assimakopoulos and Nikolopoulos (2000) is equivalent to 80 | #' simple exponential smoothing with drift. This is demonstrated in Hyndman and 81 | #' Billah (2003). 82 | #' 83 | #' The series is tested for seasonality using the test outlined in A&N. If 84 | #' deemed seasonal, the series is seasonally adjusted using a classical 85 | #' multiplicative decomposition before applying the theta method. The resulting 86 | #' forecasts are then reseasonalized. 87 | #' 88 | #' More general theta methods are available in the forecTheta package. 89 | #' 90 | #' @param formula Model specification. 91 | #' @param ... Not used. 92 | #' 93 | #' @section Specials: 94 | #' 95 | #' \subsection{season}{ 96 | #' The `season` special is used to specify the parameters of the seasonal adjustment via classical decomposition. 97 | #' \preformatted{ 98 | #' season(period = NULL, method = c("multiplicative", "additive")) 99 | #' } 100 | #' 101 | #' \tabular{ll}{ 102 | #' `period` \tab The periodic nature of the seasonality. This can be either a number indicating the number of observations in each seasonal period, or text to indicate the duration of the seasonal window (for example, annual seasonality would be "1 year"). \cr 103 | #' `method` \tab The type of classical decomposition to apply. The original Theta method always used multiplicative seasonal decomposition, and so this is the default. 104 | #' } 105 | #' } 106 | #' 107 | #' @return A model specification. 108 | #' 109 | #' @references 110 | #' Assimakopoulos, V. and Nikolopoulos, K. (2000). The theta model: 111 | #' a decomposition approach to forecasting. \emph{International Journal of 112 | #' Forecasting} \bold{16}, 521-530. 113 | #' 114 | #' Hyndman, R.J., and Billah, B. (2003) Unmasking the Theta method. 115 | #' \emph{International J. Forecasting}, \bold{19}, 287-290. 116 | #' 117 | #' @examples 118 | #' # Theta method with transform 119 | #' deaths <- as_tsibble(USAccDeaths) 120 | #' deaths %>% 121 | #' model(theta = THETA(log(value))) %>% 122 | #' forecast(h = "4 years") %>% 123 | #' autoplot(deaths) 124 | #' 125 | #' # Compare seasonal specifications 126 | #' library(tsibbledata) 127 | #' library(dplyr) 128 | #' aus_retail %>% 129 | #' filter(Industry == "Clothing retailing") %>% 130 | #' model(theta_multiplicative = THETA(Turnover ~ season(method = "multiplicative")), 131 | #' theta_additive = THETA(Turnover ~ season(method = "additive"))) %>% 132 | #' accuracy() 133 | #' @author Rob J Hyndman, Mitchell O'Hara-Wild 134 | #' @export 135 | THETA <- function(formula, ...) { 136 | theta_model <- new_model_class("theta", 137 | train = train_theta, 138 | specials = specials_theta 139 | ) 140 | new_model_definition(theta_model, !!enquo(formula), ...) 141 | } 142 | 143 | #' @importFrom fabletools forecast 144 | #' 145 | #' @inherit forecast.ARIMA 146 | #' @export 147 | forecast.fable_theta <- function(object, new_data, specials = NULL, bootstrap = FALSE, times = 5000, ...) { 148 | if (bootstrap) { 149 | abort("Bootstrapped forecasts are not yet supported for the Theta method.") 150 | } 151 | h <- NROW(new_data) 152 | 153 | n <- length(object$resid) 154 | alpha <- object$alpha 155 | drift <- object$drift 156 | sigma2 <- object$sigma2 157 | m <- object$period 158 | 159 | # Produce forecasts 160 | fc <- ets_fc_class1(h, object$lT, "N", "N", FALSE, m, sigma2, par = alpha) 161 | fc <- fc$mu + drift * (0:(h - 1) + (1 - (1 - alpha)^n) / alpha) 162 | 163 | # Re-seasonalise 164 | if(m > 1L){ 165 | seas_fc <- rep(object$season, trunc(1 + h / m))[1:h] 166 | fc <- if(object$dcmp == "additive") fc + seas_fc else fc * seas_fc 167 | } 168 | 169 | se <- sqrt(sigma2) * sqrt((0:(h - 1)) * alpha^2 + 1) 170 | distributional::dist_normal(fc, se) 171 | } 172 | 173 | #' @inherit fitted.ARIMA 174 | #' 175 | #' @examples 176 | #' library(tsibbledata) 177 | #' vic_elec %>% 178 | #' model(avg = MEAN(Demand)) %>% 179 | #' fitted() 180 | #' @export 181 | fitted.fable_theta <- function(object, ...) { 182 | object$fitted 183 | } 184 | 185 | #' @inherit residuals.ARIMA 186 | #' 187 | #' @examples 188 | #' library(tsibbledata) 189 | #' vic_elec %>% 190 | #' model(avg = MEAN(Demand)) %>% 191 | #' residuals() 192 | #' @export 193 | residuals.fable_theta <- function(object, ...) { 194 | object$resid 195 | } 196 | 197 | #' Glance a theta method 198 | #' 199 | #' Construct a single row summary of the average method model. 200 | #' 201 | #' Contains the variance of residuals (`sigma2`). 202 | #' 203 | #' @inheritParams generics::glance 204 | #' 205 | #' @return A one row tibble summarising the model's fit. 206 | #' @export 207 | glance.fable_theta <- function(x, ...) { 208 | tibble(sigma2 = x$sigma2) 209 | } 210 | 211 | #' @inherit tidy.ARIMA 212 | #' 213 | #' @export 214 | tidy.fable_theta <- function(x, ...) { 215 | tibble( 216 | term = c("alpha", "level", "drift"), 217 | estimate = c(x$alpha, x$l0, x$drift) 218 | ) 219 | } 220 | 221 | #' @export 222 | report.fable_theta <- function(object, ...) { 223 | cat("\n") 224 | cat(paste("Alpha:", round(object$alpha, 4), "\n")) 225 | cat(paste("Drift:", round(object$drift, 4), "\n")) 226 | cat(paste("sigma^2:", round(object$sigma2, 4), "\n")) 227 | } 228 | 229 | #' @export 230 | model_sum.fable_theta <- function(x) { 231 | paste0("THETA") 232 | } 233 | -------------------------------------------------------------------------------- /R/utils.R: -------------------------------------------------------------------------------- 1 | is.constant <- function(x) { 2 | x <- as.numeric(x) 3 | y <- rep(x[1], length(x)) 4 | return(isTRUE(all.equal(x, y))) 5 | } 6 | 7 | assignSpecials <- function(x, env = caller_env()) { 8 | imap(x, function(.x, nm) { 9 | if (length(.x) > 1) warn(sprintf("Only one special for `%s` is allowed, defaulting to the first usage", nm)) 10 | imap(.x[[1]], function(.x, .y) assign(.y, .x, envir = env)) 11 | }) 12 | } 13 | 14 | require_package <- function(pkg) { 15 | if (!requireNamespace(pkg, quietly = TRUE)) { 16 | abort( 17 | sprintf('The `%s` package must be installed to use this functionality. It can be installed with install.packages("%s")', pkg, pkg) 18 | ) 19 | } 20 | } 21 | 22 | `%||%` <- function(x, y) if (is_null(x)) y else x 23 | -------------------------------------------------------------------------------- /README.Rmd: -------------------------------------------------------------------------------- 1 | --- 2 | output: github_document 3 | --- 4 | 5 | 6 | 7 | ```{r setup, include = FALSE} 8 | knitr::opts_chunk$set( 9 | collapse = TRUE, 10 | comment = "#>", 11 | fig.path = "man/figures/README-", 12 | out.width = "100%" 13 | ) 14 | ``` 15 | 16 | # fable 17 | 18 | [![R-CMD-check](https://github.com/tidyverts/fable/actions/workflows/R-CMD-check.yaml/badge.svg)](https://github.com/tidyverts/fable/actions/workflows/R-CMD-check.yaml) 19 | [![Coverage status](https://codecov.io/gh/tidyverts/fable/branch/master/graph/badge.svg)](https://app.codecov.io/github/tidyverts/fable?branch=master) 20 | [![CRAN status](https://www.r-pkg.org/badges/version/fable)](https://CRAN.R-project.org/package=fable) 21 | [![Lifecycle: stable](https://img.shields.io/badge/lifecycle-stable-brightgreen.svg)](https://lifecycle.r-lib.org/articles/stages.html#stable) 22 | 23 | 24 | The R package *fable* provides a collection of commonly used univariate and multivariate time series forecasting models including exponential smoothing via state space models and automatic ARIMA modelling. These models work within the fable framework, which provides the tools to evaluate, visualise, and combine models in a workflow consistent with the tidyverse. 25 | 26 | ## Installation 27 | 28 | You can install the **stable** version from [CRAN](https://cran.r-project.org/package=fable): 29 | 30 | ```{r cran-installation, eval = FALSE} 31 | install.packages("fable") 32 | ``` 33 | 34 | You can install the **development** version from 35 | [GitHub](https://github.com/tidyverts/fable) 36 | 37 | ```{r gh-installation, eval = FALSE} 38 | # install.packages("remotes") 39 | remotes::install_github("tidyverts/fable") 40 | ``` 41 | 42 | Installing this software requires a compiler 43 | 44 | ## Example 45 | 46 | ```{r example, message=FALSE} 47 | library(fable) 48 | library(tsibble) 49 | library(tsibbledata) 50 | library(lubridate) 51 | library(dplyr) 52 | aus_retail %>% 53 | filter( 54 | State %in% c("New South Wales", "Victoria"), 55 | Industry == "Department stores" 56 | ) %>% 57 | model( 58 | ets = ETS(box_cox(Turnover, 0.3)), 59 | arima = ARIMA(log(Turnover)), 60 | snaive = SNAIVE(Turnover) 61 | ) %>% 62 | forecast(h = "2 years") %>% 63 | autoplot(filter(aus_retail, year(Month) > 2010), level = NULL) 64 | ``` 65 | 66 | ## Learning to forecast with fable 67 | 68 | - The pkgdown site describes all models provided by fable, and how they are used: https://fable.tidyverts.org/ 69 | - The forecasting principles and practices online textbook provides an introduction to time series forecasting using fable: https://otexts.com/fpp3/ (WIP) 70 | 71 | 72 | ## Getting help 73 | 74 | - Questions about forecasting can be asked on [Cross Validated](https://stats.stackexchange.com/tags/forecasting). 75 | 76 | - Common questions about the fable package are often found on [Stack Overflow](https://stackoverflow.com/tags/fable+r). You can use this to ask for help if the question isn't already answered. A [minimally reproducible example](https://www.tidyverse.org/help/) that describes your issue is the best way to ask for help! 77 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | # fable 5 | 6 | 7 | 8 | [![R-CMD-check](https://github.com/tidyverts/fable/actions/workflows/R-CMD-check.yaml/badge.svg)](https://github.com/tidyverts/fable/actions/workflows/R-CMD-check.yaml) 9 | [![Coverage 10 | status](https://codecov.io/gh/tidyverts/fable/branch/master/graph/badge.svg)](https://app.codecov.io/github/tidyverts/fable?branch=master) 11 | [![CRAN 12 | status](https://www.r-pkg.org/badges/version/fable)](https://CRAN.R-project.org/package=fable) 13 | [![Lifecycle: 14 | stable](https://img.shields.io/badge/lifecycle-stable-brightgreen.svg)](https://lifecycle.r-lib.org/articles/stages.html#stable) 15 | 16 | 17 | The R package *fable* provides a collection of commonly used univariate 18 | and multivariate time series forecasting models including exponential 19 | smoothing via state space models and automatic ARIMA modelling. These 20 | models work within the fable framework, which provides the tools to 21 | evaluate, visualise, and combine models in a workflow consistent with 22 | the tidyverse. 23 | 24 | ## Installation 25 | 26 | You can install the **stable** version from 27 | [CRAN](https://cran.r-project.org/package=fable): 28 | 29 | ``` r 30 | install.packages("fable") 31 | ``` 32 | 33 | You can install the **development** version from 34 | [GitHub](https://github.com/tidyverts/fable) 35 | 36 | ``` r 37 | # install.packages("remotes") 38 | remotes::install_github("tidyverts/fable") 39 | ``` 40 | 41 | Installing this software requires a compiler 42 | 43 | ## Example 44 | 45 | ``` r 46 | library(fable) 47 | library(tsibble) 48 | library(tsibbledata) 49 | library(lubridate) 50 | library(dplyr) 51 | aus_retail %>% 52 | filter( 53 | State %in% c("New South Wales", "Victoria"), 54 | Industry == "Department stores" 55 | ) %>% 56 | model( 57 | ets = ETS(box_cox(Turnover, 0.3)), 58 | arima = ARIMA(log(Turnover)), 59 | snaive = SNAIVE(Turnover) 60 | ) %>% 61 | forecast(h = "2 years") %>% 62 | autoplot(filter(aus_retail, year(Month) > 2010), level = NULL) 63 | ``` 64 | 65 | 66 | 67 | ## Learning to forecast with fable 68 | 69 | - The pkgdown site describes all models provided by fable, and how they 70 | are used: 71 | - The forecasting principles and practices online textbook provides an 72 | introduction to time series forecasting using fable: 73 | (WIP) 74 | 75 | 76 | ## Getting help 77 | 78 | - Questions about forecasting can be asked on [Cross 79 | Validated](https://stats.stackexchange.com/tags/forecasting). 80 | 81 | - Common questions about the fable package are often found on [Stack 82 | Overflow](https://stackoverflow.com/tags/fable+r). You can use this to 83 | ask for help if the question isn’t already answered. A [minimally 84 | reproducible example](https://www.tidyverse.org/help/) that describes 85 | your issue is the best way to ask for help! 86 | -------------------------------------------------------------------------------- /_pkgdown.yml: -------------------------------------------------------------------------------- 1 | url: https://fable.tidyverts.org 2 | 3 | template: 4 | params: 5 | bootswatch: cosmo 6 | includes: 7 | in_header: | 8 | 9 | 10 | development: 11 | mode: auto 12 | 13 | authors: 14 | Rob Hyndman: 15 | href: http://robjhyndman.com 16 | Mitchell O'Hara-Wild: 17 | href: https://mitchelloharawild.com 18 | Earo Wang: 19 | href: https://earo.me 20 | 21 | navbar: 22 | type: default 23 | left: 24 | - text: Reference 25 | href: reference/index.html 26 | - text: Changelog 27 | href: news/index.html 28 | - text: Vignettes 29 | menu: 30 | - text: Introduction to fable 31 | href: articles/fable.html 32 | - text: Forecasting with transformations 33 | href: articles/transformations.html 34 | right: 35 | - text: Feedback 36 | href: https://docs.google.com/forms/d/e/1FAIpQLSfc66U8e8e-x_7TeWpuBAtxqdJD4UXozdkLgmBI3mlRuKPkzA/viewform?usp=sf_link 37 | - icon: fa-github fa-lg 38 | href: https://github.com/tidyverts/fable 39 | 40 | 41 | reference: 42 | - title: ARIMA 43 | desc: > 44 | The ARIMA model and its supported methods. 45 | contents: 46 | - ends_with("ARIMA") 47 | - unitroot_options 48 | - title: ETS 49 | desc: > 50 | Exponential smoothing state space models. 51 | contents: 52 | - ends_with("ETS") 53 | - title: TSLM 54 | desc: > 55 | Time series linear models. 56 | contents: 57 | - ends_with("TSLM") 58 | - breusch_godfrey 59 | - title: Simple forecasting methods 60 | desc: > 61 | A collection of simple forecasting methods that are commonly used as benchmarks. 62 | contents: 63 | - MEAN 64 | - ends_with("model_mean") 65 | - ends_with("RW") 66 | - NAIVE 67 | - SNAIVE 68 | - title: Neural network autoregression 69 | desc: > 70 | Feed-forward neural networks with a single hidden layer and lagged inputs for forecasting univariate time series. 71 | contents: 72 | - ends_with("NNETAR") 73 | - title: Croston's method 74 | desc: > 75 | Croston's (1972) method for intermittent demand forecasting 76 | contents: 77 | - CROSTON 78 | - ends_with("croston") 79 | - title: Theta method 80 | desc: > 81 | The Theta method of Assimakopoulos and Nikolopoulos (2000) 82 | contents: 83 | - THETA 84 | - ends_with("fable_theta") 85 | - title: Autoregression 86 | desc: > 87 | Autoregressive time series models 88 | contents: 89 | - ends_with("AR") 90 | - title: Vector autoregression 91 | desc: > 92 | Estimates a VAR(p) model with support for exogenous regressors. 93 | contents: 94 | - ends_with("VAR") 95 | - title: Vector autoregression integrated moving average 96 | desc: > 97 | Estimates a VARIMA(p,d,q) model 98 | contents: 99 | - ends_with("VARIMA") 100 | - title: Vector error correction models 101 | desc: > 102 | Estimates a VECM(p, r) model with support for exogenous regressors. 103 | contents: 104 | - ends_with("VECM") 105 | -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | comment: false 2 | 3 | coverage: 4 | status: 5 | project: 6 | default: 7 | target: auto 8 | threshold: 1% 9 | patch: 10 | default: 11 | target: auto 12 | threshold: 1% 13 | -------------------------------------------------------------------------------- /cran-comments.md: -------------------------------------------------------------------------------- 1 | ## Test environments 2 | * local ubuntu 20.04 install, R 4.1.2 3 | * ubuntu-latest (on GitHub actions), R-devel, R-release, R-oldrel 4 | * macOS (on GitHub actions), R-release 5 | * windows (on GitHub actions), R-release 6 | * win-builder, R-devel 7 | 8 | ## R CMD check results 9 | 10 | 0 errors | 0 warnings | 0 notes 11 | 12 | ## Revdep checks 13 | 14 | All reverse dependencies have been checked, none have changed to worse. 15 | -------------------------------------------------------------------------------- /fable.Rproj: -------------------------------------------------------------------------------- 1 | Version: 1.0 2 | 3 | RestoreWorkspace: No 4 | SaveWorkspace: No 5 | AlwaysSaveHistory: No 6 | 7 | EnableCodeIndexing: Yes 8 | UseSpacesForTab: Yes 9 | NumSpacesForTab: 2 10 | Encoding: UTF-8 11 | 12 | RnwWeave: knitr 13 | LaTeX: pdfLaTeX 14 | 15 | BuildType: Package 16 | PackageUseDevtools: Yes 17 | PackageInstallArgs: --no-multiarch --with-keep.source 18 | PackageRoxygenize: rd,collate,namespace,vignette 19 | -------------------------------------------------------------------------------- /hex/fable.ai: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tidyverts/fable/4411495bf79840a777dac398f7b9ed4ace92643c/hex/fable.ai -------------------------------------------------------------------------------- /hex/fable.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tidyverts/fable/4411495bf79840a777dac398f7b9ed4ace92643c/hex/fable.png -------------------------------------------------------------------------------- /hex/fable_icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tidyverts/fable/4411495bf79840a777dac398f7b9ed4ace92643c/hex/fable_icon.png -------------------------------------------------------------------------------- /inst/WORDLIST: -------------------------------------------------------------------------------- 1 | AAM 2 | AICc 3 | AMM 4 | AR 5 | Akram 6 | Assimakopoulos 7 | Billah 8 | Boylan 9 | Breusch 10 | CMD 11 | Grose 12 | KPSS 13 | Koehler 14 | Kourentzes 15 | Lifecycle 16 | MMA 17 | Md 18 | NNAR 19 | NNETAR 20 | Nikolopoulos 21 | ORCID 22 | RW 23 | SAR 24 | SES 25 | SMA 26 | SNAIVE 27 | STL 28 | Syntetos 29 | TSLM 30 | VAR 31 | VARIMA 32 | VECM 33 | Verlag 34 | WIP 35 | arima 36 | autoregressions 37 | backtransformations 38 | backtransforming 39 | cointegrating 40 | df 41 | dfrac 42 | fabletools 43 | forecTheta 44 | fourier 45 | kpss 46 | mable 47 | nonseasonal 48 | parameterisations 49 | parameterised 50 | pdq 51 | pkgdown 52 | rd 53 | reseasonalized 54 | tibble 55 | tidyverse 56 | tsibble 57 | underly 58 | xreg 59 | -------------------------------------------------------------------------------- /man/AR.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/ar.R 3 | \name{AR} 4 | \alias{AR} 5 | \alias{report.AR} 6 | \title{Estimate a AR model} 7 | \usage{ 8 | AR(formula, ic = c("aicc", "aic", "bic"), ...) 9 | } 10 | \arguments{ 11 | \item{formula}{Model specification (see "Specials" section).} 12 | 13 | \item{ic}{The information criterion used in selecting the model.} 14 | 15 | \item{...}{Further arguments for arima} 16 | } 17 | \value{ 18 | A model specification. 19 | } 20 | \description{ 21 | Searches through the vector of lag orders to find the best AR model which 22 | has lowest AIC, AICc or BIC value. It is implemented using OLS, and behaves 23 | comparably to \code{\link[stats:ar.ols]{stats::ar.ols()}}. 24 | } 25 | \details{ 26 | Exogenous regressors and \code{\link{common_xregs}} can be specified in the model 27 | formula. 28 | } 29 | \section{Specials}{ 30 | 31 | 32 | \subsection{pdq}{ 33 | The \code{order} special is used to specify the lag order for the auto-regression. 34 | \preformatted{ 35 | order(p = 0:15, fixed = list()) 36 | } 37 | 38 | \tabular{ll}{ 39 | \code{p} \tab The order of the auto-regressive (AR) terms. If multiple values are provided, the one which minimises \code{ic} will be chosen.\cr 40 | \code{fixed} \tab A named list of fixed parameters for coefficients. The names identify the coefficient, beginning with \code{ar}, and then followed by the lag order. For example, \code{fixed = list(ar1 = 0.3, ar3 = 0)}. 41 | } 42 | } 43 | 44 | \subsection{xreg}{ 45 | Exogenous regressors can be included in an AR model without explicitly using the \code{xreg()} special. Common exogenous regressor specials as specified in \code{\link{common_xregs}} can also be used. These regressors are handled using \code{\link[stats:model.frame]{stats::model.frame()}}, and so interactions and other functionality behaves similarly to \code{\link[stats:lm]{stats::lm()}}. 46 | 47 | The inclusion of a constant in the model follows the similar rules to \code{\link[stats:lm]{stats::lm()}}, where including \code{1} will add a constant and \code{0} or \code{-1} will remove the constant. If left out, the inclusion of a constant will be determined by minimising \code{ic}. 48 | 49 | \preformatted{ 50 | xreg(..., fixed = list()) 51 | } 52 | 53 | \tabular{ll}{ 54 | \code{...} \tab Bare expressions for the exogenous regressors (such as \code{log(x)})\cr 55 | \code{fixed} \tab A named list of fixed parameters for coefficients. The names identify the coefficient, and should match the name of the regressor. For example, \code{fixed = list(constant = 20)}. 56 | } 57 | } 58 | } 59 | 60 | \examples{ 61 | luteinizing_hormones <- as_tsibble(lh) 62 | fit <- luteinizing_hormones \%>\% 63 | model(AR(value ~ order(3))) 64 | 65 | report(fit) 66 | 67 | fit \%>\% 68 | forecast() \%>\% 69 | autoplot(luteinizing_hormones) 70 | } 71 | \seealso{ 72 | \href{https://otexts.com/fpp3/AR.html}{Forecasting: Principles and Practices, Vector autoregressions (section 11.2)} 73 | } 74 | -------------------------------------------------------------------------------- /man/CROSTON.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/croston.R 3 | \name{CROSTON} 4 | \alias{CROSTON} 5 | \title{Croston's method} 6 | \usage{ 7 | CROSTON( 8 | formula, 9 | opt_crit = c("mse", "mae"), 10 | type = c("croston", "sba", "sbj"), 11 | ... 12 | ) 13 | } 14 | \arguments{ 15 | \item{formula}{Model specification (see "Specials" section).} 16 | 17 | \item{opt_crit}{The optimisation criterion used to optimise the parameters.} 18 | 19 | \item{type}{Which variant of Croston's method to use. Defaults to \code{"croston"} for 20 | Croston's method, but can also be set to \code{"sba"} for the Syntetos-Boylan 21 | approximation, and \code{"sbj"} for the Shale-Boylan-Johnston method.} 22 | 23 | \item{...}{Not used.} 24 | } 25 | \value{ 26 | A model specification. 27 | } 28 | \description{ 29 | Based on Croston's (1972) method for intermittent demand forecasting, also described in Shenstone and Hyndman (2005). Croston's method involves using simple exponential smoothing (SES) on the non-zero elements of the time series and a separate application of SES to the times between non-zero elements of the time series. 30 | } 31 | \details{ 32 | Note that forecast distributions are not computed as Croston's method has no 33 | underlying stochastic model. In a later update, we plan to support distributions via 34 | the equivalent stochastic models that underly Croston's method (Shenstone and 35 | Hyndman, 2005) 36 | 37 | There are two variant methods available which apply multiplicative correction factors 38 | to the forecasts that result from the original Croston's method. For the 39 | Syntetos-Boylan approximation (\code{type = "sba"}), this factor is \eqn{1 - \alpha / 2}, 40 | and for the Shale-Boylan-Johnston method (\code{type = "sbj"}), this factor is 41 | \eqn{1 - \alpha / (2 - \alpha)}, where \eqn{\alpha} is the smoothing parameter for 42 | the interval SES application. 43 | } 44 | \section{Specials}{ 45 | 46 | 47 | \subsection{demand}{ 48 | The \code{demand} special specifies parameters for the demand SES application. 49 | \preformatted{ 50 | demand(initial = NULL, param = NULL, param_range = c(0, 1)) 51 | } 52 | 53 | \tabular{ll}{ 54 | \code{initial} \tab The initial value for the demand application of SES. \cr 55 | \code{param} \tab The smoothing parameter for the demand application of SES. \cr 56 | \code{param_range} \tab If \code{param = NULL}, the range of values over which to search for the smoothing parameter. 57 | } 58 | } 59 | 60 | \subsection{interval}{ 61 | The \code{interval} special specifies parameters for the interval SES application. 62 | \preformatted{ 63 | interval(initial = NULL, param = NULL, param_range = c(0, 1)) 64 | } 65 | 66 | \tabular{ll}{ 67 | \code{initial} \tab The initial value for the interval application of SES. \cr 68 | \code{param} \tab The smoothing parameter for the interval application of SES. \cr 69 | \code{param_range} \tab If \code{param = NULL}, the range of values over which to search for the smoothing parameter. 70 | } 71 | } 72 | } 73 | 74 | \examples{ 75 | library(tsibble) 76 | sim_poisson <- tsibble( 77 | time = yearmonth("2012 Dec") + seq_len(24), 78 | count = rpois(24, lambda = 0.3), 79 | index = time 80 | ) 81 | 82 | sim_poisson \%>\% 83 | autoplot(count) 84 | 85 | sim_poisson \%>\% 86 | model(CROSTON(count)) \%>\% 87 | forecast(h = "2 years") \%>\% 88 | autoplot(sim_poisson) 89 | } 90 | \references{ 91 | Croston, J. (1972) "Forecasting and stock control for 92 | intermittent demands", \emph{Operational Research Quarterly}, \bold{23}(3), 93 | 289-303. 94 | 95 | Shenstone, L., and Hyndman, R.J. (2005) "Stochastic models underlying 96 | Croston's method for intermittent demand forecasting". \emph{Journal of 97 | Forecasting}, \bold{24}, 389-402. 98 | 99 | Kourentzes, N. (2014) "On intermittent demand model optimisation and 100 | selection". \emph{International Journal of Production Economics}, \bold{156}, 101 | 180-190. \doi{10.1016/j.ijpe.2014.06.007}. 102 | } 103 | -------------------------------------------------------------------------------- /man/ETS.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/ets.R 3 | \name{ETS} 4 | \alias{ETS} 5 | \alias{report.ETS} 6 | \title{Exponential smoothing state space model} 7 | \usage{ 8 | ETS( 9 | formula, 10 | opt_crit = c("lik", "amse", "mse", "sigma", "mae"), 11 | nmse = 3, 12 | bounds = c("both", "usual", "admissible"), 13 | ic = c("aicc", "aic", "bic"), 14 | restrict = TRUE, 15 | ... 16 | ) 17 | } 18 | \arguments{ 19 | \item{formula}{Model specification (see "Specials" section).} 20 | 21 | \item{opt_crit}{The optimization criterion. Defaults to the log-likelihood 22 | \code{"lik"}, but can also be set to \code{"mse"} (Mean Square Error), \code{"amse"} 23 | (Average MSE over first \code{nmse} forecast horizons), \code{"sigma"} (Standard 24 | deviation of residuals), or \code{"mae"} (Mean Absolute Error).} 25 | 26 | \item{nmse}{If \code{opt_crit == "amse"}, \code{nmse} provides the number of steps for 27 | average multistep MSE (\verb{1<=nmse<=30}).} 28 | 29 | \item{bounds}{Type of parameter space to impose: \code{"usual"} indicates 30 | all parameters must lie between specified lower and upper bounds; 31 | \code{"admissible"} indicates parameters must lie in the admissible space; 32 | \code{"both"} (default) takes the intersection of these regions.} 33 | 34 | \item{ic}{The information criterion used in selecting the model.} 35 | 36 | \item{restrict}{If TRUE (default), the models with infinite variance will not 37 | be allowed. These restricted model components are AMM, AAM, AMA, and MMA.} 38 | 39 | \item{...}{Other arguments} 40 | } 41 | \value{ 42 | A model specification. 43 | } 44 | \description{ 45 | Returns ETS model specified by the formula. 46 | } 47 | \details{ 48 | Based on the classification of methods as described in Hyndman et al (2008). 49 | 50 | The methodology is fully automatic. The model is chosen automatically if not 51 | specified. This methodology performed extremely well on the M3-competition 52 | data. (See Hyndman, et al, 2002, below.) 53 | } 54 | \section{Specials}{ 55 | 56 | 57 | The \emph{specials} define the methods and parameters for the components (error, trend, and seasonality) of an ETS model. If more than one method is specified, \code{ETS} will consider all combinations of the specified models and select the model which best fits the data (minimising \code{ic}). The method argument for each specials have reasonable defaults, so if a component is not specified an appropriate method will be chosen automatically. 58 | 59 | There are a couple of limitations to note about ETS models: 60 | \itemize{ 61 | \item It does not support exogenous regressors. 62 | \item It does not support missing values. You can complete missing values in the data with imputed values (e.g. with \code{\link[tidyr:fill]{tidyr::fill()}}, or by fitting a different model type and then calling \code{\link[fabletools:reexports]{fabletools::interpolate()}}) before fitting the model. 63 | } 64 | 65 | \subsection{error}{ 66 | The \code{error} special is used to specify the form of the error term. 67 | \preformatted{ 68 | error(method = c("A", "M")) 69 | } 70 | 71 | \tabular{ll}{ 72 | \code{method} \tab The form of the error term: either additive ("A") or multiplicative ("M"). If the error is multiplicative, the data must be non-negative. All specified methods are tested on the data, and the one that gives the best fit (lowest \code{ic}) will be kept. 73 | } 74 | } 75 | 76 | \subsection{trend}{ 77 | The \code{trend} special is used to specify the form of the trend term and associated parameters. 78 | \preformatted{ 79 | trend(method = c("N", "A", "Ad"), 80 | alpha = NULL, alpha_range = c(1e-04, 0.9999), 81 | beta = NULL, beta_range = c(1e-04, 0.9999), 82 | phi = NULL, phi_range = c(0.8, 0.98)) 83 | } 84 | 85 | \tabular{ll}{ 86 | \code{method} \tab The form of the trend term: either none ("N"), additive ("A"), multiplicative ("M") or damped variants ("Ad", "Md"). All specified methods are tested on the data, and the one that gives the best fit (lowest \code{ic}) will be kept.\cr 87 | \code{alpha} \tab The value of the smoothing parameter for the level. If \code{alpha = 0}, the level will not change over time. Conversely, if \code{alpha = 1} the level will update similarly to a random walk process. \cr 88 | \code{alpha_range} \tab If \code{alpha=NULL}, \code{alpha_range} provides bounds for the optimised value of \code{alpha}.\cr 89 | \code{beta} \tab The value of the smoothing parameter for the slope. If \code{beta = 0}, the slope will not change over time. Conversely, if \code{beta = 1} the slope will have no memory of past slopes. \cr 90 | \code{beta_range} \tab If \code{beta=NULL}, \code{beta_range} provides bounds for the optimised value of \code{beta}.\cr 91 | \code{phi} \tab The value of the dampening parameter for the slope. If \code{phi = 0}, the slope will be dampened immediately (no slope). Conversely, if \code{phi = 1} the slope will not be dampened. \cr 92 | \code{phi_range} \tab If \code{phi=NULL}, \code{phi_range} provides bounds for the optimised value of \code{phi}. 93 | } 94 | } 95 | 96 | \subsection{season}{ 97 | The \code{season} special is used to specify the form of the seasonal term and associated parameters. To specify a nonseasonal model you would include \code{season(method = "N")}. 98 | \preformatted{ 99 | season(method = c("N", "A", "M"), period = NULL, 100 | gamma = NULL, gamma_range = c(1e-04, 0.9999)) 101 | } 102 | 103 | \tabular{ll}{ 104 | \code{method} \tab The form of the seasonal term: either none ("N"), additive ("A") or multiplicative ("M"). All specified methods are tested on the data, and the one that gives the best fit (lowest \code{ic}) will be kept.\cr 105 | \code{period} \tab The periodic nature of the seasonality. This can be either a number indicating the number of observations in each seasonal period, or text to indicate the duration of the seasonal window (for example, annual seasonality would be "1 year"). \cr 106 | \code{gamma} \tab The value of the smoothing parameter for the seasonal pattern. If \code{gamma = 0}, the seasonal pattern will not change over time. Conversely, if \code{gamma = 1} the seasonality will have no memory of past seasonal periods. \cr 107 | \code{gamma_range} \tab If \code{gamma=NULL}, \code{gamma_range} provides bounds for the optimised value of \code{gamma}. 108 | } 109 | } 110 | } 111 | 112 | \examples{ 113 | as_tsibble(USAccDeaths) \%>\% 114 | model(ETS(log(value) ~ season("A"))) 115 | } 116 | \references{ 117 | Hyndman, R.J., Koehler, A.B., Snyder, R.D., and Grose, S. (2002) 118 | "A state space framework for automatic forecasting using exponential 119 | smoothing methods", \emph{International J. Forecasting}, \bold{18}(3), 120 | 439--454. 121 | 122 | Hyndman, R.J., Akram, Md., and Archibald, B. (2008) "The admissible 123 | parameter space for exponential smoothing models". \emph{Annals of 124 | Statistical Mathematics}, \bold{60}(2), 407--426. 125 | 126 | Hyndman, R.J., Koehler, A.B., Ord, J.K., and Snyder, R.D. (2008) 127 | \emph{Forecasting with exponential smoothing: the state space approach}, 128 | Springer-Verlag. \url{http://www.exponentialsmoothing.net}. 129 | } 130 | \seealso{ 131 | \href{https://otexts.com/fpp3/expsmooth.html}{Forecasting: Principles and Practices, Exponential smoothing (chapter 8)} 132 | } 133 | -------------------------------------------------------------------------------- /man/IRF.ARIMA.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/arima.R 3 | \name{IRF.ARIMA} 4 | \alias{IRF.ARIMA} 5 | \title{Calculate impulse responses from a fable model} 6 | \usage{ 7 | \method{IRF}{ARIMA}(x, new_data, specials, ...) 8 | } 9 | \arguments{ 10 | \item{x}{A fitted model.} 11 | 12 | \item{new_data}{A tsibble containing the time points and exogenous regressors to produce forecasts for.} 13 | 14 | \item{specials}{(passed by \code{\link[fabletools:forecast]{fabletools::forecast.mdl_df()}}).} 15 | 16 | \item{...}{Other arguments passed to methods} 17 | } 18 | \description{ 19 | Calculate impulse responses from a fable model 20 | } 21 | -------------------------------------------------------------------------------- /man/IRF.VAR.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/var.R 3 | \name{IRF.VAR} 4 | \alias{IRF.VAR} 5 | \title{Calculate impulse responses from a fable model} 6 | \usage{ 7 | \method{IRF}{VAR}(x, new_data, specials, impulse = NULL, orthogonal = FALSE, ...) 8 | } 9 | \arguments{ 10 | \item{x}{A fitted model.} 11 | 12 | \item{new_data}{A tsibble containing the time points and exogenous regressors to produce forecasts for.} 13 | 14 | \item{specials}{(passed by \code{\link[fabletools:forecast]{fabletools::forecast.mdl_df()}}).} 15 | 16 | \item{impulse}{A character string specifying the name of the variable that is shocked (the impulse variable).} 17 | 18 | \item{orthogonal}{If TRUE, orthogonalised impulse responses will be computed.} 19 | 20 | \item{...}{Other arguments passed to methods} 21 | } 22 | \description{ 23 | Simulates future paths from a dataset using a fitted model. Innovations are 24 | sampled by the model's assumed error distribution. If \code{bootstrap} is \code{TRUE}, 25 | innovations will be sampled from the model's residuals. If \code{new_data} 26 | contains the \code{.innov} column, those values will be treated as innovations. 27 | } 28 | -------------------------------------------------------------------------------- /man/IRF.VECM.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/vecm.R 3 | \name{IRF.VECM} 4 | \alias{IRF.VECM} 5 | \title{Calculate impulse responses from a fable model} 6 | \usage{ 7 | \method{IRF}{VECM}(x, new_data, specials, impulse = NULL, orthogonal = FALSE, ...) 8 | } 9 | \arguments{ 10 | \item{x}{A fitted model.} 11 | 12 | \item{new_data}{A tsibble containing the time points and exogenous regressors to produce forecasts for.} 13 | 14 | \item{specials}{(passed by \code{\link[fabletools:forecast]{fabletools::forecast.mdl_df()}}).} 15 | 16 | \item{impulse}{A character string specifying the name of the variable that is shocked (the impulse variable).} 17 | 18 | \item{orthogonal}{If TRUE, orthogonalised impulse responses will be computed.} 19 | 20 | \item{...}{Other arguments passed to methods} 21 | } 22 | \description{ 23 | Simulates future paths from a dataset using a fitted model. Innovations are 24 | sampled by the model's assumed error distribution. If \code{bootstrap} is \code{TRUE}, 25 | innovations will be sampled from the model's residuals. If \code{new_data} 26 | contains the \code{.innov} column, those values will be treated as innovations. 27 | } 28 | -------------------------------------------------------------------------------- /man/MEAN.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/mean.R 3 | \name{MEAN} 4 | \alias{MEAN} 5 | \alias{report.model_mean} 6 | \title{Mean models} 7 | \usage{ 8 | MEAN(formula, ...) 9 | } 10 | \arguments{ 11 | \item{formula}{Model specification.} 12 | 13 | \item{...}{Not used.} 14 | } 15 | \value{ 16 | A model specification. 17 | } 18 | \description{ 19 | \code{MEAN()} returns an iid model applied to the formula's response variable. 20 | } 21 | \section{Specials}{ 22 | 23 | 24 | \subsection{window}{ 25 | The \code{window} special is used to specify a rolling window for the mean. 26 | \preformatted{ 27 | window(size = NULL) 28 | } 29 | 30 | \tabular{ll}{ 31 | \code{size} \tab The size (number of observations) for the rolling window. If NULL (default), a rolling window will not be used. 32 | } 33 | } 34 | } 35 | 36 | \examples{ 37 | library(tsibbledata) 38 | vic_elec \%>\% 39 | model(avg = MEAN(Demand)) 40 | } 41 | \seealso{ 42 | \href{https://otexts.com/fpp3/simple-methods.html}{Forecasting: Principles and Practices, Some simple forecasting methods (section 3.2)} 43 | } 44 | -------------------------------------------------------------------------------- /man/NNETAR.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/nnetar.R 3 | \name{NNETAR} 4 | \alias{NNETAR} 5 | \alias{report.NNETAR} 6 | \title{Neural Network Time Series Forecasts} 7 | \usage{ 8 | NNETAR(formula, n_nodes = NULL, n_networks = 20, scale_inputs = TRUE, ...) 9 | } 10 | \arguments{ 11 | \item{formula}{Model specification (see "Specials" section).} 12 | 13 | \item{n_nodes}{Number of nodes in the hidden layer. Default is half of the 14 | number of input nodes (including external regressors, if given) plus 1.} 15 | 16 | \item{n_networks}{Number of networks to fit with different random starting 17 | weights. These are then averaged when producing forecasts.} 18 | 19 | \item{scale_inputs}{If TRUE, inputs are scaled by subtracting the column 20 | means and dividing by their respective standard deviations. Scaling is 21 | applied after transformations.} 22 | 23 | \item{...}{Other arguments passed to \code{\link[nnet:nnet]{nnet::nnet()}}.} 24 | } 25 | \value{ 26 | A model specification. 27 | } 28 | \description{ 29 | Feed-forward neural networks with a single hidden layer and lagged inputs 30 | for forecasting univariate time series. 31 | } 32 | \details{ 33 | A feed-forward neural network is fitted with lagged values of the response as 34 | inputs and a single hidden layer with \code{size} nodes. The inputs are for 35 | lags 1 to \code{p}, and lags \code{m} to \code{mP} where 36 | \code{m} is the seasonal period specified. 37 | 38 | If exogenous regressors are provided, its columns are also used as inputs. 39 | Missing values are currently not supported by this model. 40 | A total of \code{repeats} networks are 41 | fitted, each with random starting weights. These are then averaged when 42 | computing forecasts. The network is trained for one-step forecasting. 43 | Multi-step forecasts are computed recursively. 44 | 45 | For non-seasonal data, the fitted model is denoted as an NNAR(p,k) model, 46 | where k is the number of hidden nodes. This is analogous to an AR(p) model 47 | but with non-linear functions. For seasonal data, the fitted model is called 48 | an NNAR(p,P,k)[m] model, which is analogous to an ARIMA(p,0,0)(P,0,0)[m] 49 | model but with non-linear functions. 50 | } 51 | \section{Specials}{ 52 | 53 | 54 | \subsection{AR}{ 55 | The \code{AR} special is used to specify auto-regressive components in each of the 56 | nodes of the neural network. 57 | 58 | \preformatted{ 59 | AR(p = NULL, P = 1, period = NULL) 60 | } 61 | 62 | \tabular{ll}{ 63 | \code{p} \tab The order of the non-seasonal auto-regressive (AR) terms. If \code{p = NULL}, an optimal number of lags will be selected for a linear AR(p) model via AIC. For seasonal time series, this will be computed on the seasonally adjusted data (via STL decomposition). \cr 64 | \code{P} \tab The order of the seasonal auto-regressive (SAR) terms. \cr 65 | \code{period} \tab The periodic nature of the seasonality. This can be either a number indicating the number of observations in each seasonal period, or text to indicate the duration of the seasonal window (for example, annual seasonality would be "1 year"). 66 | } 67 | } 68 | 69 | \subsection{xreg}{ 70 | Exogenous regressors can be included in an NNETAR model without explicitly using the \code{xreg()} special. Common exogenous regressor specials as specified in \code{\link{common_xregs}} can also be used. These regressors are handled using \code{\link[stats:model.frame]{stats::model.frame()}}, and so interactions and other functionality behaves similarly to \code{\link[stats:lm]{stats::lm()}}. 71 | \preformatted{ 72 | xreg(...) 73 | } 74 | 75 | \tabular{ll}{ 76 | \code{...} \tab Bare expressions for the exogenous regressors (such as \code{log(x)}) 77 | } 78 | } 79 | } 80 | 81 | \examples{ 82 | as_tsibble(airmiles) \%>\% 83 | model(nn = NNETAR(box_cox(value, 0.15))) 84 | } 85 | \seealso{ 86 | \href{https://otexts.com/fpp2/nnetar.html}{Forecasting: Principles and Practices, Neural network models (section 11.3)} 87 | } 88 | -------------------------------------------------------------------------------- /man/RW.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/lagwalk.R 3 | \name{RW} 4 | \alias{RW} 5 | \alias{report.RW} 6 | \alias{NAIVE} 7 | \alias{SNAIVE} 8 | \title{Random walk models} 9 | \usage{ 10 | RW(formula, ...) 11 | 12 | NAIVE(formula, ...) 13 | 14 | SNAIVE(formula, ...) 15 | } 16 | \arguments{ 17 | \item{formula}{Model specification (see "Specials" section).} 18 | 19 | \item{...}{Not used.} 20 | } 21 | \value{ 22 | A model specification. 23 | } 24 | \description{ 25 | \code{RW()} returns a random walk model, which is equivalent to an ARIMA(0,1,0) 26 | model with an optional drift coefficient included using \code{drift()}. \code{naive()} is simply a wrapper 27 | to \code{rwf()} for simplicity. \code{snaive()} returns forecasts and 28 | prediction intervals from an ARIMA(0,0,0)(0,1,0)m model where m is the 29 | seasonal period. 30 | } 31 | \details{ 32 | The random walk with drift model is \deqn{Y_t=c + Y_{t-1} + Z_t}{Y[t]=c + 33 | Y[t-1] + Z[t]} where \eqn{Z_t}{Z[t]} is a normal iid error. Forecasts are 34 | given by \deqn{Y_n(h)=ch+Y_n}{Y[n+h]=ch+Y[n]}. If there is no drift (as in 35 | \code{naive}), the drift parameter c=0. Forecast standard errors allow for 36 | uncertainty in estimating the drift parameter (unlike the corresponding 37 | forecasts obtained by fitting an ARIMA model directly). 38 | 39 | The seasonal naive model is \deqn{Y_t= Y_{t-m} + Z_t}{Y[t]=Y[t-m] + Z[t]} 40 | where \eqn{Z_t}{Z[t]} is a normal iid error. 41 | } 42 | \section{Specials}{ 43 | 44 | 45 | \subsection{lag}{ 46 | The \code{lag} special is used to specify the lag order for the random walk process. 47 | If left out, this special will automatically be included. 48 | 49 | \preformatted{ 50 | lag(lag = NULL) 51 | } 52 | 53 | \tabular{ll}{ 54 | \code{lag} \tab The lag order for the random walk process. If \code{lag = m}, forecasts will return the observation from \code{m} time periods ago. This can also be provided as text indicating the duration of the lag window (for example, annual seasonal lags would be "1 year"). 55 | } 56 | } 57 | 58 | \subsection{drift}{ 59 | The \code{drift} special can be used to include a drift/trend component into the model. By default, drift is not included unless \code{drift()} is included in the formula. 60 | 61 | \preformatted{ 62 | drift(drift = TRUE) 63 | } 64 | 65 | \tabular{ll}{ 66 | \code{drift} \tab If \code{drift = TRUE}, a drift term will be included in the model. 67 | } 68 | } 69 | } 70 | 71 | \examples{ 72 | library(tsibbledata) 73 | aus_production \%>\% 74 | model(rw = RW(Beer ~ drift())) 75 | 76 | as_tsibble(Nile) \%>\% 77 | model(NAIVE(value)) 78 | library(tsibbledata) 79 | aus_production \%>\% 80 | model(snaive = SNAIVE(Beer ~ lag("year"))) 81 | } 82 | \seealso{ 83 | \href{https://otexts.com/fpp3/simple-methods.html}{Forecasting: Principles and Practices, Some simple forecasting methods (section 3.2)} 84 | } 85 | -------------------------------------------------------------------------------- /man/THETA.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/theta.R 3 | \name{THETA} 4 | \alias{THETA} 5 | \title{Theta method} 6 | \usage{ 7 | THETA(formula, ...) 8 | } 9 | \arguments{ 10 | \item{formula}{Model specification.} 11 | 12 | \item{...}{Not used.} 13 | } 14 | \value{ 15 | A model specification. 16 | } 17 | \description{ 18 | The theta method of Assimakopoulos and Nikolopoulos (2000) is equivalent to 19 | simple exponential smoothing with drift. This is demonstrated in Hyndman and 20 | Billah (2003). 21 | } 22 | \details{ 23 | The series is tested for seasonality using the test outlined in A&N. If 24 | deemed seasonal, the series is seasonally adjusted using a classical 25 | multiplicative decomposition before applying the theta method. The resulting 26 | forecasts are then reseasonalized. 27 | 28 | More general theta methods are available in the forecTheta package. 29 | } 30 | \section{Specials}{ 31 | 32 | 33 | \subsection{season}{ 34 | The \code{season} special is used to specify the parameters of the seasonal adjustment via classical decomposition. 35 | \preformatted{ 36 | season(period = NULL, method = c("multiplicative", "additive")) 37 | } 38 | 39 | \tabular{ll}{ 40 | \code{period} \tab The periodic nature of the seasonality. This can be either a number indicating the number of observations in each seasonal period, or text to indicate the duration of the seasonal window (for example, annual seasonality would be "1 year"). \cr 41 | \code{method} \tab The type of classical decomposition to apply. The original Theta method always used multiplicative seasonal decomposition, and so this is the default. 42 | } 43 | } 44 | } 45 | 46 | \examples{ 47 | # Theta method with transform 48 | deaths <- as_tsibble(USAccDeaths) 49 | deaths \%>\% 50 | model(theta = THETA(log(value))) \%>\% 51 | forecast(h = "4 years") \%>\% 52 | autoplot(deaths) 53 | 54 | # Compare seasonal specifications 55 | library(tsibbledata) 56 | library(dplyr) 57 | aus_retail \%>\% 58 | filter(Industry == "Clothing retailing") \%>\% 59 | model(theta_multiplicative = THETA(Turnover ~ season(method = "multiplicative")), 60 | theta_additive = THETA(Turnover ~ season(method = "additive"))) \%>\% 61 | accuracy() 62 | } 63 | \references{ 64 | Assimakopoulos, V. and Nikolopoulos, K. (2000). The theta model: 65 | a decomposition approach to forecasting. \emph{International Journal of 66 | Forecasting} \bold{16}, 521-530. 67 | 68 | Hyndman, R.J., and Billah, B. (2003) Unmasking the Theta method. 69 | \emph{International J. Forecasting}, \bold{19}, 287-290. 70 | } 71 | \author{ 72 | Rob J Hyndman, Mitchell O'Hara-Wild 73 | } 74 | -------------------------------------------------------------------------------- /man/TSLM.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/lm.R 3 | \name{TSLM} 4 | \alias{TSLM} 5 | \alias{report.TSLM} 6 | \title{Fit a linear model with time series components} 7 | \usage{ 8 | TSLM(formula) 9 | } 10 | \arguments{ 11 | \item{formula}{Model specification.} 12 | } 13 | \value{ 14 | A model specification. 15 | } 16 | \description{ 17 | The model formula will be handled using \code{\link[stats:model.matrix]{stats::model.matrix()}}, and so 18 | the the same approach to include interactions in \code{\link[stats:lm]{stats::lm()}} applies when 19 | specifying the \code{formula}. In addition to \code{\link[stats:lm]{stats::lm()}}, it is possible to 20 | include \code{\link{common_xregs}} in the model formula, such as \code{trend()}, \code{season()}, 21 | and \code{fourier()}. 22 | } 23 | \section{Specials}{ 24 | 25 | 26 | \subsection{xreg}{ 27 | Exogenous regressors can be included in a TSLM model without explicitly using the \code{xreg()} special. Common exogenous regressor specials as specified in \code{\link{common_xregs}} can also be used. These regressors are handled using \code{\link[stats:model.frame]{stats::model.frame()}}, and so interactions and other functionality behaves similarly to \code{\link[stats:lm]{stats::lm()}}. 28 | \preformatted{ 29 | xreg(...) 30 | } 31 | 32 | \tabular{ll}{ 33 | \code{...} \tab Bare expressions for the exogenous regressors (such as \code{log(x)}) 34 | } 35 | } 36 | } 37 | 38 | \examples{ 39 | as_tsibble(USAccDeaths) \%>\% 40 | model(lm = TSLM(log(value) ~ trend() + season())) 41 | 42 | library(tsibbledata) 43 | olympic_running \%>\% 44 | model(TSLM(Time ~ trend())) \%>\% 45 | interpolate(olympic_running) 46 | } 47 | \seealso{ 48 | \code{\link[stats:lm]{stats::lm()}}, \code{\link[stats:model.matrix]{stats::model.matrix()}} 49 | \href{https://otexts.com/fpp3/regression.html}{Forecasting: Principles and Practices, Time series regression models (chapter 6)} 50 | } 51 | -------------------------------------------------------------------------------- /man/VAR.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/var.R 3 | \name{VAR} 4 | \alias{VAR} 5 | \alias{report.VAR} 6 | \title{Estimate a VAR model} 7 | \usage{ 8 | VAR(formula, ic = c("aicc", "aic", "bic"), ...) 9 | } 10 | \arguments{ 11 | \item{formula}{Model specification (see "Specials" section).} 12 | 13 | \item{ic}{The information criterion used in selecting the model.} 14 | 15 | \item{...}{Further arguments for arima} 16 | } 17 | \value{ 18 | A model specification. 19 | } 20 | \description{ 21 | Searches through the vector of lag orders to find the best VAR model which 22 | has lowest AIC, AICc or BIC value. It is implemented using OLS per equation. 23 | } 24 | \details{ 25 | Exogenous regressors and \code{\link{common_xregs}} can be specified in the model 26 | formula. 27 | } 28 | \section{Specials}{ 29 | 30 | 31 | \subsection{AR}{ 32 | The \code{AR} special is used to specify the lag order for the auto-regression. 33 | \preformatted{ 34 | AR(p = 0:5) 35 | } 36 | 37 | \tabular{ll}{ 38 | \code{p} \tab The order of the auto-regressive (AR) terms. If multiple values are provided, the one which minimises \code{ic} will be chosen.\cr 39 | } 40 | } 41 | 42 | \subsection{xreg}{ 43 | Exogenous regressors can be included in an VAR model without explicitly using the \code{xreg()} special. Common exogenous regressor specials as specified in \code{\link{common_xregs}} can also be used. These regressors are handled using \code{\link[stats:model.frame]{stats::model.frame()}}, and so interactions and other functionality behaves similarly to \code{\link[stats:lm]{stats::lm()}}. 44 | 45 | The inclusion of a constant in the model follows the similar rules to \code{\link[stats:lm]{stats::lm()}}, where including \code{1} will add a constant and \code{0} or \code{-1} will remove the constant. If left out, the inclusion of a constant will be determined by minimising \code{ic}. 46 | 47 | \preformatted{ 48 | xreg(...) 49 | } 50 | 51 | \tabular{ll}{ 52 | \code{...} \tab Bare expressions for the exogenous regressors (such as \code{log(x)}) 53 | } 54 | } 55 | } 56 | 57 | \examples{ 58 | 59 | lung_deaths <- cbind(mdeaths, fdeaths) \%>\% 60 | as_tsibble(pivot_longer = FALSE) 61 | 62 | fit <- lung_deaths \%>\% 63 | model(VAR(vars(mdeaths, fdeaths) ~ AR(3))) 64 | 65 | report(fit) 66 | 67 | fit \%>\% 68 | forecast() \%>\% 69 | autoplot(lung_deaths) 70 | } 71 | \seealso{ 72 | \href{https://otexts.com/fpp2/VAR.html}{Forecasting: Principles and Practices, Vector autoregressions (section 11.2)} 73 | } 74 | -------------------------------------------------------------------------------- /man/VECM.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/vecm.R 3 | \name{VECM} 4 | \alias{VECM} 5 | \title{Estimate a VECM model} 6 | \usage{ 7 | VECM(formula, ic = c("aicc", "aic", "bic"), r = 1L, ...) 8 | } 9 | \arguments{ 10 | \item{formula}{Model specification (see "Specials" section).} 11 | 12 | \item{ic}{The information criterion used in selecting the model.} 13 | 14 | \item{r}{The number of cointegrating relationships} 15 | 16 | \item{...}{Further arguments for arima} 17 | } 18 | \value{ 19 | A model specification. 20 | } 21 | \description{ 22 | Searches through the vector of lag orders to find the best VECM model which 23 | has lowest AIC, AICc or BIC value. The model is estimated using the Johansen 24 | procedure (maximum likelihood). 25 | } 26 | \details{ 27 | Exogenous regressors and \code{\link{common_xregs}} can be specified in the model 28 | formula. 29 | } 30 | \section{Specials}{ 31 | 32 | 33 | \subsection{AR}{ 34 | The \code{AR} special is used to specify the lag order for the auto-regression. 35 | \preformatted{ 36 | AR(p = 0:5) 37 | } 38 | 39 | \tabular{ll}{ 40 | \code{p} \tab The order of the auto-regressive (AR) terms. If multiple values are provided, the one which minimises \code{ic} will be chosen.\cr 41 | } 42 | } 43 | 44 | \subsection{xreg}{ 45 | Exogenous regressors can be included in an VECM model without explicitly using the \code{xreg()} special. Common exogenous regressor specials as specified in \code{\link{common_xregs}} can also be used. These regressors are handled using \code{\link[stats:model.frame]{stats::model.frame()}}, and so interactions and other functionality behaves similarly to \code{\link[stats:lm]{stats::lm()}}. 46 | 47 | The inclusion of a constant in the model follows the similar rules to \code{\link[stats:lm]{stats::lm()}}, where including \code{1} will add a constant and \code{0} or \code{-1} will remove the constant. If left out, the inclusion of a constant will be determined by minimising \code{ic}. 48 | 49 | \preformatted{ 50 | xreg(...) 51 | } 52 | 53 | \tabular{ll}{ 54 | \code{...} \tab Bare expressions for the exogenous regressors (such as \code{log(x)}) 55 | } 56 | } 57 | } 58 | 59 | \examples{ 60 | 61 | lung_deaths <- cbind(mdeaths, fdeaths) \%>\% 62 | as_tsibble(pivot_longer = FALSE) 63 | 64 | fit <- lung_deaths \%>\% 65 | model(VECM(vars(mdeaths, fdeaths) ~ AR(3))) 66 | 67 | report(fit) 68 | 69 | fit \%>\% 70 | forecast() \%>\% 71 | autoplot(lung_deaths) 72 | } 73 | -------------------------------------------------------------------------------- /man/breusch_godfrey.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/lm.R 3 | \name{breusch_godfrey} 4 | \alias{breusch_godfrey} 5 | \alias{breusch_godfrey.TSLM} 6 | \title{Breusch-Godfrey Test} 7 | \usage{ 8 | breusch_godfrey(x, ...) 9 | 10 | \method{breusch_godfrey}{TSLM}(x, order = 1, type = c("Chisq", "F"), ...) 11 | } 12 | \arguments{ 13 | \item{x}{A model object to be tested.} 14 | 15 | \item{...}{Further arguments for methods.} 16 | 17 | \item{order}{The maximum order of serial correlation to test for.} 18 | 19 | \item{type}{The type of test statistic to use.} 20 | } 21 | \description{ 22 | Breusch-Godfrey test for higher-order serial correlation. 23 | } 24 | \seealso{ 25 | \code{\link[lmtest:bgtest]{lmtest::bgtest()}} 26 | } 27 | -------------------------------------------------------------------------------- /man/common_xregs.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/00_specials.R 3 | \docType{data} 4 | \name{common_xregs} 5 | \alias{common_xregs} 6 | \title{Common exogenous regressors} 7 | \usage{ 8 | common_xregs 9 | } 10 | \description{ 11 | These special functions provide interfaces to more complicated functions within 12 | the model formulae interface. 13 | } 14 | \section{Specials}{ 15 | 16 | 17 | \subsection{trend}{ 18 | The \code{trend} special includes common linear trend regressors in the model. It also supports piecewise linear trend via the \code{knots} argument. 19 | \preformatted{ 20 | trend(knots = NULL, origin = NULL) 21 | } 22 | 23 | \tabular{ll}{ 24 | \code{knots} \tab A vector of times (same class as the data's time index) identifying the position of knots for a piecewise linear trend.\cr 25 | \code{origin} \tab An optional time value to act as the starting time for the trend. 26 | } 27 | } 28 | 29 | \subsection{season}{ 30 | The \code{season} special includes seasonal dummy variables in the model. 31 | \preformatted{ 32 | season(period = NULL) 33 | } 34 | 35 | \tabular{ll}{ 36 | \code{period} \tab The periodic nature of the seasonality. This can be either a number indicating the number of observations in each seasonal period, or text to indicate the duration of the seasonal window (for example, annual seasonality would be "1 year"). 37 | } 38 | } 39 | 40 | \subsection{fourier}{ 41 | The \code{fourier} special includes seasonal fourier terms in the model. The maximum order of the fourier terms must be specified using \code{K}. 42 | \preformatted{ 43 | fourier(period = NULL, K, origin = NULL) 44 | } 45 | 46 | \tabular{ll}{ 47 | \code{period} \tab The periodic nature of the seasonality. This can be either a number indicating the number of observations in each seasonal period, or text to indicate the duration of the seasonal window (for example, annual seasonality would be "1 year"). \cr 48 | \code{K} \tab The maximum order of the fourier terms.\cr 49 | \code{origin} \tab An optional time value to act as the starting time for the fourier series. 50 | } 51 | } 52 | } 53 | 54 | \keyword{internal} 55 | -------------------------------------------------------------------------------- /man/components.ETS.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/ets.R 3 | \name{components.ETS} 4 | \alias{components.ETS} 5 | \title{Extract estimated states from an ETS model.} 6 | \usage{ 7 | \method{components}{ETS}(object, ...) 8 | } 9 | \arguments{ 10 | \item{object}{An estimated model.} 11 | 12 | \item{...}{Unused.} 13 | } 14 | \value{ 15 | A \code{\link[fabletools:dable]{fabletools::dable()}} containing estimated states. 16 | } 17 | \description{ 18 | Extract estimated states from an ETS model. 19 | } 20 | \examples{ 21 | as_tsibble(USAccDeaths) \%>\% 22 | model(ets = ETS(log(value) ~ season("A"))) \%>\% 23 | components() 24 | } 25 | -------------------------------------------------------------------------------- /man/fable-package.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/fable-package.R 3 | \docType{package} 4 | \name{fable-package} 5 | \alias{fable} 6 | \alias{fable-package} 7 | \title{fable: Forecasting Models for Tidy Time Series} 8 | \description{ 9 | \if{html}{\figure{logo.png}{options: style='float: right' alt='logo' width='120'}} 10 | 11 | Provides a collection of commonly used univariate and multivariate time series forecasting models including automatically selected exponential smoothing (ETS) and autoregressive integrated moving average (ARIMA) models. These models work within the 'fable' framework provided by the 'fabletools' package, which provides the tools to evaluate, visualise, and combine models in a workflow consistent with the tidyverse. 12 | } 13 | \seealso{ 14 | Useful links: 15 | \itemize{ 16 | \item \url{https://fable.tidyverts.org} 17 | \item \url{https://github.com/tidyverts/fable} 18 | \item Report bugs at \url{https://github.com/tidyverts/fable/issues} 19 | } 20 | 21 | } 22 | \author{ 23 | \strong{Maintainer}: Mitchell O'Hara-Wild \email{mail@mitchelloharawild.com} 24 | 25 | Authors: 26 | \itemize{ 27 | \item Rob Hyndman 28 | \item Earo Wang 29 | } 30 | 31 | Other contributors: 32 | \itemize{ 33 | \item Gabriel Caceres (NNETAR implementation) [contributor] 34 | \item Christoph Bergmeir (\href{https://orcid.org/0000-0002-3665-9021}{ORCID}) [contributor] 35 | \item Tim-Gunnar Hensel [contributor] 36 | \item Timothy Hyndman [contributor] 37 | } 38 | 39 | } 40 | \keyword{internal} 41 | -------------------------------------------------------------------------------- /man/figures/README-example-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tidyverts/fable/4411495bf79840a777dac398f7b9ed4ace92643c/man/figures/README-example-1.png -------------------------------------------------------------------------------- /man/figures/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tidyverts/fable/4411495bf79840a777dac398f7b9ed4ace92643c/man/figures/logo.png -------------------------------------------------------------------------------- /man/fitted.AR.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/ar.R 3 | \name{fitted.AR} 4 | \alias{fitted.AR} 5 | \title{Extract fitted values from a fable model} 6 | \usage{ 7 | \method{fitted}{AR}(object, ...) 8 | } 9 | \arguments{ 10 | \item{object}{A model for which forecasts are required.} 11 | 12 | \item{...}{Other arguments passed to methods} 13 | } 14 | \value{ 15 | A vector of fitted values. 16 | } 17 | \description{ 18 | Extracts the fitted values. 19 | } 20 | \examples{ 21 | as_tsibble(lh) \%>\% 22 | model(AR(value ~ order(3))) \%>\% 23 | fitted() 24 | } 25 | -------------------------------------------------------------------------------- /man/fitted.ARIMA.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/arima.R 3 | \name{fitted.ARIMA} 4 | \alias{fitted.ARIMA} 5 | \title{Extract fitted values from a fable model} 6 | \usage{ 7 | \method{fitted}{ARIMA}(object, ...) 8 | } 9 | \arguments{ 10 | \item{object}{A model for which forecasts are required.} 11 | 12 | \item{...}{Other arguments passed to methods} 13 | } 14 | \value{ 15 | A vector of fitted values. 16 | } 17 | \description{ 18 | Extracts the fitted values. 19 | } 20 | \examples{ 21 | USAccDeaths \%>\% 22 | as_tsibble() \%>\% 23 | model(arima = ARIMA(log(value) ~ pdq(0, 1, 1) + PDQ(0, 1, 1))) \%>\% 24 | fitted() 25 | } 26 | -------------------------------------------------------------------------------- /man/fitted.ETS.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/ets.R 3 | \name{fitted.ETS} 4 | \alias{fitted.ETS} 5 | \title{Extract fitted values from a fable model} 6 | \usage{ 7 | \method{fitted}{ETS}(object, ...) 8 | } 9 | \arguments{ 10 | \item{object}{A model for which forecasts are required.} 11 | 12 | \item{...}{Other arguments passed to methods} 13 | } 14 | \value{ 15 | A vector of fitted values. 16 | } 17 | \description{ 18 | Extracts the fitted values. 19 | } 20 | \examples{ 21 | as_tsibble(USAccDeaths) \%>\% 22 | model(ets = ETS(log(value) ~ season("A"))) \%>\% 23 | fitted() 24 | } 25 | -------------------------------------------------------------------------------- /man/fitted.NNETAR.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/nnetar.R 3 | \name{fitted.NNETAR} 4 | \alias{fitted.NNETAR} 5 | \title{Extract fitted values from a fable model} 6 | \usage{ 7 | \method{fitted}{NNETAR}(object, ...) 8 | } 9 | \arguments{ 10 | \item{object}{A model for which forecasts are required.} 11 | 12 | \item{...}{Other arguments passed to methods} 13 | } 14 | \value{ 15 | A vector of fitted values. 16 | } 17 | \description{ 18 | Extracts the fitted values. 19 | } 20 | \examples{ 21 | as_tsibble(airmiles) \%>\% 22 | model(nn = NNETAR(box_cox(value, 0.15))) \%>\% 23 | fitted() 24 | } 25 | -------------------------------------------------------------------------------- /man/fitted.RW.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/lagwalk.R 3 | \name{fitted.RW} 4 | \alias{fitted.RW} 5 | \title{Extract fitted values from a fable model} 6 | \usage{ 7 | \method{fitted}{RW}(object, ...) 8 | } 9 | \arguments{ 10 | \item{object}{A model for which forecasts are required.} 11 | 12 | \item{...}{Other arguments passed to methods} 13 | } 14 | \value{ 15 | A vector of fitted values. 16 | } 17 | \description{ 18 | Extracts the fitted values. 19 | } 20 | \examples{ 21 | as_tsibble(Nile) \%>\% 22 | model(NAIVE(value)) \%>\% 23 | fitted() 24 | 25 | library(tsibbledata) 26 | aus_production \%>\% 27 | model(snaive = SNAIVE(Beer ~ lag("year"))) \%>\% 28 | fitted() 29 | } 30 | -------------------------------------------------------------------------------- /man/fitted.TSLM.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/lm.R 3 | \name{fitted.TSLM} 4 | \alias{fitted.TSLM} 5 | \title{Extract fitted values from a fable model} 6 | \usage{ 7 | \method{fitted}{TSLM}(object, ...) 8 | } 9 | \arguments{ 10 | \item{object}{A model for which forecasts are required.} 11 | 12 | \item{...}{Other arguments passed to methods} 13 | } 14 | \value{ 15 | A vector of fitted values. 16 | } 17 | \description{ 18 | Extracts the fitted values. 19 | } 20 | \examples{ 21 | as_tsibble(USAccDeaths) \%>\% 22 | model(lm = TSLM(log(value) ~ trend() + season())) \%>\% 23 | fitted() 24 | } 25 | -------------------------------------------------------------------------------- /man/fitted.VAR.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/var.R 3 | \name{fitted.VAR} 4 | \alias{fitted.VAR} 5 | \title{Extract fitted values from a fable model} 6 | \usage{ 7 | \method{fitted}{VAR}(object, ...) 8 | } 9 | \arguments{ 10 | \item{object}{A model for which forecasts are required.} 11 | 12 | \item{...}{Other arguments passed to methods} 13 | } 14 | \value{ 15 | A vector of fitted values. 16 | } 17 | \description{ 18 | Extracts the fitted values. 19 | } 20 | \examples{ 21 | lung_deaths <- cbind(mdeaths, fdeaths) \%>\% 22 | as_tsibble(pivot_longer = FALSE) 23 | 24 | lung_deaths \%>\% 25 | model(VAR(vars(mdeaths, fdeaths) ~ AR(3))) \%>\% 26 | fitted() 27 | } 28 | -------------------------------------------------------------------------------- /man/fitted.croston.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/croston.R 3 | \name{fitted.croston} 4 | \alias{fitted.croston} 5 | \title{Extract fitted values from a fable model} 6 | \usage{ 7 | \method{fitted}{croston}(object, ...) 8 | } 9 | \arguments{ 10 | \item{object}{A model for which forecasts are required.} 11 | 12 | \item{...}{Other arguments passed to methods} 13 | } 14 | \value{ 15 | A vector of fitted values. 16 | } 17 | \description{ 18 | Extracts the fitted values. 19 | } 20 | \examples{ 21 | library(tsibble) 22 | sim_poisson <- tsibble( 23 | time = yearmonth("2012 Dec") + seq_len(24), 24 | count = rpois(24, lambda = 0.3), 25 | index = time 26 | ) 27 | 28 | sim_poisson \%>\% 29 | model(CROSTON(count)) \%>\% 30 | tidy() 31 | } 32 | -------------------------------------------------------------------------------- /man/fitted.fable_theta.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/theta.R 3 | \name{fitted.fable_theta} 4 | \alias{fitted.fable_theta} 5 | \title{Extract fitted values from a fable model} 6 | \usage{ 7 | \method{fitted}{fable_theta}(object, ...) 8 | } 9 | \arguments{ 10 | \item{object}{A model for which forecasts are required.} 11 | 12 | \item{...}{Other arguments passed to methods} 13 | } 14 | \value{ 15 | A vector of fitted values. 16 | } 17 | \description{ 18 | Extracts the fitted values. 19 | } 20 | \examples{ 21 | library(tsibbledata) 22 | vic_elec \%>\% 23 | model(avg = MEAN(Demand)) \%>\% 24 | fitted() 25 | } 26 | -------------------------------------------------------------------------------- /man/fitted.model_mean.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/mean.R 3 | \name{fitted.model_mean} 4 | \alias{fitted.model_mean} 5 | \title{Extract fitted values from a fable model} 6 | \usage{ 7 | \method{fitted}{model_mean}(object, ...) 8 | } 9 | \arguments{ 10 | \item{object}{A model for which forecasts are required.} 11 | 12 | \item{...}{Other arguments passed to methods} 13 | } 14 | \value{ 15 | A vector of fitted values. 16 | } 17 | \description{ 18 | Extracts the fitted values. 19 | } 20 | \examples{ 21 | library(tsibbledata) 22 | vic_elec \%>\% 23 | model(avg = MEAN(Demand)) \%>\% 24 | fitted() 25 | } 26 | -------------------------------------------------------------------------------- /man/forecast.AR.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/ar.R 3 | \name{forecast.AR} 4 | \alias{forecast.AR} 5 | \title{Forecast a model from the fable package} 6 | \usage{ 7 | \method{forecast}{AR}( 8 | object, 9 | new_data = NULL, 10 | specials = NULL, 11 | bootstrap = FALSE, 12 | times = 5000, 13 | ... 14 | ) 15 | } 16 | \arguments{ 17 | \item{object}{A model for which forecasts are required.} 18 | 19 | \item{new_data}{A tsibble containing the time points and exogenous regressors to produce forecasts for.} 20 | 21 | \item{specials}{(passed by \code{\link[fabletools:forecast]{fabletools::forecast.mdl_df()}}).} 22 | 23 | \item{bootstrap}{If \code{TRUE}, then forecast distributions are computed using simulation with resampled errors.} 24 | 25 | \item{times}{The number of sample paths to use in estimating the forecast distribution when \code{bootstrap = TRUE}.} 26 | 27 | \item{...}{Other arguments passed to methods} 28 | } 29 | \value{ 30 | A list of forecasts. 31 | } 32 | \description{ 33 | Produces forecasts from a trained model. 34 | } 35 | \examples{ 36 | as_tsibble(lh) \%>\% 37 | model(AR(value ~ order(3))) \%>\% 38 | forecast() 39 | } 40 | -------------------------------------------------------------------------------- /man/forecast.ARIMA.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/arima.R 3 | \name{forecast.ARIMA} 4 | \alias{forecast.ARIMA} 5 | \title{Forecast a model from the fable package} 6 | \usage{ 7 | \method{forecast}{ARIMA}( 8 | object, 9 | new_data = NULL, 10 | specials = NULL, 11 | bootstrap = FALSE, 12 | times = 5000, 13 | ... 14 | ) 15 | } 16 | \arguments{ 17 | \item{object}{A model for which forecasts are required.} 18 | 19 | \item{new_data}{A tsibble containing the time points and exogenous regressors to produce forecasts for.} 20 | 21 | \item{specials}{(passed by \code{\link[fabletools:forecast]{fabletools::forecast.mdl_df()}}).} 22 | 23 | \item{bootstrap}{If \code{TRUE}, then forecast distributions are computed using simulation with resampled errors.} 24 | 25 | \item{times}{The number of sample paths to use in estimating the forecast distribution when \code{bootstrap = TRUE}.} 26 | 27 | \item{...}{Other arguments passed to methods} 28 | } 29 | \value{ 30 | A list of forecasts. 31 | } 32 | \description{ 33 | Produces forecasts from a trained model. 34 | } 35 | \examples{ 36 | USAccDeaths \%>\% 37 | as_tsibble() \%>\% 38 | model(arima = ARIMA(log(value) ~ pdq(0, 1, 1) + PDQ(0, 1, 1))) \%>\% 39 | forecast() 40 | } 41 | -------------------------------------------------------------------------------- /man/forecast.ETS.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/ets.R 3 | \name{forecast.ETS} 4 | \alias{forecast.ETS} 5 | \title{Forecast a model from the fable package} 6 | \usage{ 7 | \method{forecast}{ETS}( 8 | object, 9 | new_data, 10 | specials = NULL, 11 | simulate = FALSE, 12 | bootstrap = FALSE, 13 | times = 5000, 14 | ... 15 | ) 16 | } 17 | \arguments{ 18 | \item{object}{A model for which forecasts are required.} 19 | 20 | \item{new_data}{A tsibble containing the time points and exogenous regressors to produce forecasts for.} 21 | 22 | \item{specials}{(passed by \code{\link[fabletools:forecast]{fabletools::forecast.mdl_df()}}).} 23 | 24 | \item{simulate}{If \code{TRUE}, prediction intervals are produced by simulation rather than using analytic formulae.} 25 | 26 | \item{bootstrap}{If \code{TRUE}, then forecast distributions are computed using simulation with resampled errors.} 27 | 28 | \item{times}{The number of sample paths to use in estimating the forecast distribution if simulated intervals are used.} 29 | 30 | \item{...}{Other arguments passed to methods} 31 | } 32 | \value{ 33 | A list of forecasts. 34 | } 35 | \description{ 36 | Produces forecasts from a trained model. 37 | } 38 | \examples{ 39 | as_tsibble(USAccDeaths) \%>\% 40 | model(ets = ETS(log(value) ~ season("A"))) \%>\% 41 | forecast() 42 | } 43 | -------------------------------------------------------------------------------- /man/forecast.NNETAR.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/nnetar.R 3 | \name{forecast.NNETAR} 4 | \alias{forecast.NNETAR} 5 | \title{Forecast a model from the fable package} 6 | \usage{ 7 | \method{forecast}{NNETAR}( 8 | object, 9 | new_data, 10 | specials = NULL, 11 | simulate = TRUE, 12 | bootstrap = FALSE, 13 | times = 5000, 14 | ... 15 | ) 16 | } 17 | \arguments{ 18 | \item{object}{A model for which forecasts are required.} 19 | 20 | \item{new_data}{A tsibble containing the time points and exogenous regressors to produce forecasts for.} 21 | 22 | \item{specials}{(passed by \code{\link[fabletools:forecast]{fabletools::forecast.mdl_df()}}).} 23 | 24 | \item{simulate}{If \code{TRUE}, forecast distributions are produced by sampling from a normal distribution. Without simulation, forecast uncertainty cannot be estimated for this model and instead a degenerate distribution with the forecast mean will be produced.} 25 | 26 | \item{bootstrap}{If \code{TRUE}, forecast distributions are produced by sampling from the model's training residuals.} 27 | 28 | \item{times}{The number of sample paths to use in producing the forecast distribution. Setting \code{simulate = FALSE} or \code{times = 0} will produce degenerate forecast distributions of the forecast mean.} 29 | 30 | \item{...}{Other arguments passed to methods} 31 | } 32 | \value{ 33 | A list of forecasts. 34 | } 35 | \description{ 36 | Produces forecasts from a trained model. 37 | } 38 | \examples{ 39 | as_tsibble(airmiles) \%>\% 40 | model(nn = NNETAR(box_cox(value, 0.15))) \%>\% 41 | forecast(times = 10) 42 | } 43 | -------------------------------------------------------------------------------- /man/forecast.RW.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/lagwalk.R 3 | \name{forecast.RW} 4 | \alias{forecast.RW} 5 | \title{Forecast a model from the fable package} 6 | \usage{ 7 | \method{forecast}{RW}( 8 | object, 9 | new_data, 10 | specials = NULL, 11 | simulate = FALSE, 12 | bootstrap = FALSE, 13 | times = 5000, 14 | ... 15 | ) 16 | } 17 | \arguments{ 18 | \item{object}{A model for which forecasts are required.} 19 | 20 | \item{new_data}{A tsibble containing the time points and exogenous regressors to produce forecasts for.} 21 | 22 | \item{specials}{(passed by \code{\link[fabletools:forecast]{fabletools::forecast.mdl_df()}}).} 23 | 24 | \item{simulate}{If \code{TRUE}, prediction intervals are produced by simulation rather than using analytic formulae.} 25 | 26 | \item{bootstrap}{If \code{TRUE}, then forecast distributions are computed using simulation with resampled errors.} 27 | 28 | \item{times}{The number of sample paths to use in estimating the forecast distribution when \code{bootstrap = TRUE}.} 29 | 30 | \item{...}{Other arguments passed to methods} 31 | } 32 | \value{ 33 | A list of forecasts. 34 | } 35 | \description{ 36 | Produces forecasts from a trained model. 37 | } 38 | \examples{ 39 | as_tsibble(Nile) \%>\% 40 | model(NAIVE(value)) \%>\% 41 | forecast() 42 | 43 | library(tsibbledata) 44 | aus_production \%>\% 45 | model(snaive = SNAIVE(Beer ~ lag("year"))) \%>\% 46 | forecast() 47 | } 48 | -------------------------------------------------------------------------------- /man/forecast.TSLM.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/lm.R 3 | \name{forecast.TSLM} 4 | \alias{forecast.TSLM} 5 | \title{Forecast a model from the fable package} 6 | \usage{ 7 | \method{forecast}{TSLM}( 8 | object, 9 | new_data, 10 | specials = NULL, 11 | bootstrap = FALSE, 12 | approx_normal = TRUE, 13 | times = 5000, 14 | ... 15 | ) 16 | } 17 | \arguments{ 18 | \item{object}{A model for which forecasts are required.} 19 | 20 | \item{new_data}{A tsibble containing the time points and exogenous regressors to produce forecasts for.} 21 | 22 | \item{specials}{(passed by \code{\link[fabletools:forecast]{fabletools::forecast.mdl_df()}}).} 23 | 24 | \item{bootstrap}{If \code{TRUE}, then forecast distributions are computed using simulation with resampled errors.} 25 | 26 | \item{approx_normal}{Should the resulting forecast distributions be 27 | approximated as a Normal distribution instead of a Student's T 28 | distribution. Returning Normal distributions (the default) is a useful 29 | approximation to make it easier for using TSLM models in model combinations 30 | or reconciliation processes.} 31 | 32 | \item{times}{The number of sample paths to use in estimating the forecast distribution when \code{bootstrap = TRUE}.} 33 | 34 | \item{...}{Other arguments passed to methods} 35 | } 36 | \value{ 37 | A list of forecasts. 38 | } 39 | \description{ 40 | Produces forecasts from a trained model. 41 | } 42 | \examples{ 43 | as_tsibble(USAccDeaths) \%>\% 44 | model(lm = TSLM(log(value) ~ trend() + season())) \%>\% 45 | forecast() 46 | } 47 | -------------------------------------------------------------------------------- /man/forecast.VAR.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/var.R 3 | \name{forecast.VAR} 4 | \alias{forecast.VAR} 5 | \title{Forecast a model from the fable package} 6 | \usage{ 7 | \method{forecast}{VAR}( 8 | object, 9 | new_data = NULL, 10 | specials = NULL, 11 | bootstrap = FALSE, 12 | times = 5000, 13 | ... 14 | ) 15 | } 16 | \arguments{ 17 | \item{object}{A model for which forecasts are required.} 18 | 19 | \item{new_data}{A tsibble containing the time points and exogenous regressors to produce forecasts for.} 20 | 21 | \item{specials}{(passed by \code{\link[fabletools:forecast]{fabletools::forecast.mdl_df()}}).} 22 | 23 | \item{bootstrap}{If \code{TRUE}, then forecast distributions are computed using simulation with resampled errors.} 24 | 25 | \item{times}{The number of sample paths to use in estimating the forecast distribution when \code{bootstrap = TRUE}.} 26 | 27 | \item{...}{Other arguments passed to methods} 28 | } 29 | \value{ 30 | A list of forecasts. 31 | } 32 | \description{ 33 | Produces forecasts from a trained model. 34 | } 35 | \examples{ 36 | lung_deaths <- cbind(mdeaths, fdeaths) \%>\% 37 | as_tsibble(pivot_longer = FALSE) 38 | 39 | lung_deaths \%>\% 40 | model(VAR(vars(mdeaths, fdeaths) ~ AR(3))) \%>\% 41 | forecast() 42 | } 43 | -------------------------------------------------------------------------------- /man/forecast.croston.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/croston.R 3 | \name{forecast.croston} 4 | \alias{forecast.croston} 5 | \title{Forecast a model from the fable package} 6 | \usage{ 7 | \method{forecast}{croston}(object, new_data, specials = NULL, ...) 8 | } 9 | \arguments{ 10 | \item{object}{A model for which forecasts are required.} 11 | 12 | \item{new_data}{A tsibble containing the time points and exogenous regressors to produce forecasts for.} 13 | 14 | \item{specials}{(passed by \code{\link[fabletools:forecast]{fabletools::forecast.mdl_df()}}).} 15 | 16 | \item{...}{Other arguments passed to methods} 17 | } 18 | \value{ 19 | A list of forecasts. 20 | } 21 | \description{ 22 | Produces forecasts from a trained model. 23 | } 24 | \examples{ 25 | library(tsibble) 26 | sim_poisson <- tsibble( 27 | time = yearmonth("2012 Dec") + seq_len(24), 28 | count = rpois(24, lambda = 0.3), 29 | index = time 30 | ) 31 | 32 | sim_poisson \%>\% 33 | model(CROSTON(count)) \%>\% 34 | forecast() 35 | } 36 | -------------------------------------------------------------------------------- /man/forecast.fable_theta.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/theta.R 3 | \name{forecast.fable_theta} 4 | \alias{forecast.fable_theta} 5 | \title{Forecast a model from the fable package} 6 | \usage{ 7 | \method{forecast}{fable_theta}( 8 | object, 9 | new_data, 10 | specials = NULL, 11 | bootstrap = FALSE, 12 | times = 5000, 13 | ... 14 | ) 15 | } 16 | \arguments{ 17 | \item{object}{A model for which forecasts are required.} 18 | 19 | \item{new_data}{A tsibble containing the time points and exogenous regressors to produce forecasts for.} 20 | 21 | \item{specials}{(passed by \code{\link[fabletools:forecast]{fabletools::forecast.mdl_df()}}).} 22 | 23 | \item{bootstrap}{If \code{TRUE}, then forecast distributions are computed using simulation with resampled errors.} 24 | 25 | \item{times}{The number of sample paths to use in estimating the forecast distribution when \code{bootstrap = TRUE}.} 26 | 27 | \item{...}{Other arguments passed to methods} 28 | } 29 | \value{ 30 | A list of forecasts. 31 | } 32 | \description{ 33 | Produces forecasts from a trained model. 34 | } 35 | \examples{ 36 | USAccDeaths \%>\% 37 | as_tsibble() \%>\% 38 | model(arima = ARIMA(log(value) ~ pdq(0, 1, 1) + PDQ(0, 1, 1))) \%>\% 39 | forecast() 40 | } 41 | -------------------------------------------------------------------------------- /man/forecast.model_mean.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/mean.R 3 | \name{forecast.model_mean} 4 | \alias{forecast.model_mean} 5 | \title{Forecast a model from the fable package} 6 | \usage{ 7 | \method{forecast}{model_mean}( 8 | object, 9 | new_data, 10 | specials = NULL, 11 | bootstrap = FALSE, 12 | times = 5000, 13 | ... 14 | ) 15 | } 16 | \arguments{ 17 | \item{object}{A model for which forecasts are required.} 18 | 19 | \item{new_data}{A tsibble containing the time points and exogenous regressors to produce forecasts for.} 20 | 21 | \item{specials}{(passed by \code{\link[fabletools:forecast]{fabletools::forecast.mdl_df()}}).} 22 | 23 | \item{bootstrap}{If \code{TRUE}, then forecast distributions are computed using simulation with resampled errors.} 24 | 25 | \item{times}{The number of sample paths to use in estimating the forecast distribution when \code{bootstrap = TRUE}.} 26 | 27 | \item{...}{Other arguments passed to methods} 28 | } 29 | \value{ 30 | A list of forecasts. 31 | } 32 | \description{ 33 | Produces forecasts from a trained model. 34 | } 35 | \examples{ 36 | library(tsibbledata) 37 | vic_elec \%>\% 38 | model(avg = MEAN(Demand)) \%>\% 39 | forecast() 40 | } 41 | -------------------------------------------------------------------------------- /man/generate.AR.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/ar.R 3 | \name{generate.AR} 4 | \alias{generate.AR} 5 | \title{Generate new data from a fable model} 6 | \usage{ 7 | \method{generate}{AR}(x, new_data = NULL, specials = NULL, bootstrap = FALSE, ...) 8 | } 9 | \arguments{ 10 | \item{x}{A fitted model.} 11 | 12 | \item{new_data}{A tsibble containing the time points and exogenous regressors to produce forecasts for.} 13 | 14 | \item{specials}{(passed by \code{\link[fabletools:forecast]{fabletools::forecast.mdl_df()}}).} 15 | 16 | \item{bootstrap}{If \code{TRUE}, then forecast distributions are computed using simulation with resampled errors.} 17 | 18 | \item{...}{Other arguments passed to methods} 19 | } 20 | \description{ 21 | Simulates future paths from a dataset using a fitted model. Innovations are 22 | sampled by the model's assumed error distribution. If \code{bootstrap} is \code{TRUE}, 23 | innovations will be sampled from the model's residuals. If \code{new_data} 24 | contains the \code{.innov} column, those values will be treated as innovations. 25 | } 26 | \examples{ 27 | as_tsibble(lh) \%>\% 28 | model(AR(value ~ order(3))) \%>\% 29 | generate() 30 | } 31 | \seealso{ 32 | \code{\link[fabletools:generate.mdl_df]{fabletools::generate.mdl_df}} 33 | } 34 | -------------------------------------------------------------------------------- /man/generate.ARIMA.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/arima.R 3 | \name{generate.ARIMA} 4 | \alias{generate.ARIMA} 5 | \title{Generate new data from a fable model} 6 | \usage{ 7 | \method{generate}{ARIMA}(x, new_data, specials, bootstrap = FALSE, ...) 8 | } 9 | \arguments{ 10 | \item{x}{A fitted model.} 11 | 12 | \item{new_data}{A tsibble containing the time points and exogenous regressors to produce forecasts for.} 13 | 14 | \item{specials}{(passed by \code{\link[fabletools:forecast]{fabletools::forecast.mdl_df()}}).} 15 | 16 | \item{bootstrap}{If \code{TRUE}, then forecast distributions are computed using simulation with resampled errors.} 17 | 18 | \item{...}{Other arguments passed to methods} 19 | } 20 | \description{ 21 | Simulates future paths from a dataset using a fitted model. Innovations are 22 | sampled by the model's assumed error distribution. If \code{bootstrap} is \code{TRUE}, 23 | innovations will be sampled from the model's residuals. If \code{new_data} 24 | contains the \code{.innov} column, those values will be treated as innovations. 25 | } 26 | \examples{ 27 | fable_fit <- as_tsibble(USAccDeaths) \%>\% 28 | model(model = ARIMA(value ~ 0 + pdq(0,1,1) + PDQ(0,1,1))) 29 | fable_fit \%>\% generate(times = 10) 30 | 31 | } 32 | \seealso{ 33 | \code{\link[fabletools:generate.mdl_df]{fabletools::generate.mdl_df}} 34 | } 35 | -------------------------------------------------------------------------------- /man/generate.ETS.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/ets.R 3 | \name{generate.ETS} 4 | \alias{generate.ETS} 5 | \title{Generate new data from a fable model} 6 | \usage{ 7 | \method{generate}{ETS}(x, new_data, specials, bootstrap = FALSE, ...) 8 | } 9 | \arguments{ 10 | \item{x}{A fitted model.} 11 | 12 | \item{new_data}{A tsibble containing the time points and exogenous regressors to produce forecasts for.} 13 | 14 | \item{specials}{(passed by \code{\link[fabletools:forecast]{fabletools::forecast.mdl_df()}}).} 15 | 16 | \item{bootstrap}{If \code{TRUE}, then forecast distributions are computed using simulation with resampled errors.} 17 | 18 | \item{...}{Other arguments passed to methods} 19 | } 20 | \description{ 21 | Simulates future paths from a dataset using a fitted model. Innovations are 22 | sampled by the model's assumed error distribution. If \code{bootstrap} is \code{TRUE}, 23 | innovations will be sampled from the model's residuals. If \code{new_data} 24 | contains the \code{.innov} column, those values will be treated as innovations. 25 | } 26 | \examples{ 27 | as_tsibble(USAccDeaths) \%>\% 28 | model(ETS(log(value) ~ season("A"))) \%>\% 29 | generate(times = 100) 30 | } 31 | \seealso{ 32 | \code{\link[fabletools:generate.mdl_df]{fabletools::generate.mdl_df}} 33 | } 34 | -------------------------------------------------------------------------------- /man/generate.NNETAR.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/nnetar.R 3 | \name{generate.NNETAR} 4 | \alias{generate.NNETAR} 5 | \title{Generate new data from a fable model} 6 | \usage{ 7 | \method{generate}{NNETAR}(x, new_data, specials = NULL, bootstrap = FALSE, ...) 8 | } 9 | \arguments{ 10 | \item{x}{A fitted model.} 11 | 12 | \item{new_data}{A tsibble containing the time points and exogenous regressors to produce forecasts for.} 13 | 14 | \item{specials}{(passed by \code{\link[fabletools:forecast]{fabletools::forecast.mdl_df()}}).} 15 | 16 | \item{bootstrap}{If \code{TRUE}, then forecast distributions are computed using simulation with resampled errors.} 17 | 18 | \item{...}{Other arguments passed to methods} 19 | } 20 | \description{ 21 | Simulates future paths from a dataset using a fitted model. Innovations are 22 | sampled by the model's assumed error distribution. If \code{bootstrap} is \code{TRUE}, 23 | innovations will be sampled from the model's residuals. If \code{new_data} 24 | contains the \code{.innov} column, those values will be treated as innovations. 25 | } 26 | \examples{ 27 | as_tsibble(airmiles) \%>\% 28 | model(nn = NNETAR(box_cox(value, 0.15))) \%>\% 29 | generate() 30 | } 31 | \seealso{ 32 | \code{\link[fabletools:generate.mdl_df]{fabletools::generate.mdl_df}} 33 | } 34 | -------------------------------------------------------------------------------- /man/generate.RW.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/lagwalk.R 3 | \name{generate.RW} 4 | \alias{generate.RW} 5 | \title{Generate new data from a fable model} 6 | \usage{ 7 | \method{generate}{RW}(x, new_data, bootstrap = FALSE, ...) 8 | } 9 | \arguments{ 10 | \item{x}{A fitted model.} 11 | 12 | \item{new_data}{A tsibble containing the time points and exogenous regressors to produce forecasts for.} 13 | 14 | \item{bootstrap}{If \code{TRUE}, then forecast distributions are computed using simulation with resampled errors.} 15 | 16 | \item{...}{Other arguments passed to methods} 17 | } 18 | \description{ 19 | Simulates future paths from a dataset using a fitted model. Innovations are 20 | sampled by the model's assumed error distribution. If \code{bootstrap} is \code{TRUE}, 21 | innovations will be sampled from the model's residuals. If \code{new_data} 22 | contains the \code{.innov} column, those values will be treated as innovations. 23 | } 24 | \examples{ 25 | as_tsibble(Nile) \%>\% 26 | model(NAIVE(value)) \%>\% 27 | generate() 28 | 29 | library(tsibbledata) 30 | aus_production \%>\% 31 | model(snaive = SNAIVE(Beer ~ lag("year"))) \%>\% 32 | generate() 33 | } 34 | \seealso{ 35 | \code{\link[fabletools:generate.mdl_df]{fabletools::generate.mdl_df}} 36 | } 37 | -------------------------------------------------------------------------------- /man/generate.TSLM.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/lm.R 3 | \name{generate.TSLM} 4 | \alias{generate.TSLM} 5 | \title{Generate new data from a fable model} 6 | \usage{ 7 | \method{generate}{TSLM}(x, new_data, specials, bootstrap = FALSE, ...) 8 | } 9 | \arguments{ 10 | \item{x}{A fitted model.} 11 | 12 | \item{new_data}{A tsibble containing the time points and exogenous regressors to produce forecasts for.} 13 | 14 | \item{specials}{(passed by \code{\link[fabletools:forecast]{fabletools::forecast.mdl_df()}}).} 15 | 16 | \item{bootstrap}{If \code{TRUE}, then forecast distributions are computed using simulation with resampled errors.} 17 | 18 | \item{...}{Other arguments passed to methods} 19 | } 20 | \description{ 21 | Simulates future paths from a dataset using a fitted model. Innovations are 22 | sampled by the model's assumed error distribution. If \code{bootstrap} is \code{TRUE}, 23 | innovations will be sampled from the model's residuals. If \code{new_data} 24 | contains the \code{.innov} column, those values will be treated as innovations. 25 | } 26 | \examples{ 27 | as_tsibble(USAccDeaths) \%>\% 28 | model(lm = TSLM(log(value) ~ trend() + season())) \%>\% 29 | generate() 30 | } 31 | \seealso{ 32 | \code{\link[fabletools:generate.mdl_df]{fabletools::generate.mdl_df}} 33 | } 34 | -------------------------------------------------------------------------------- /man/generate.VAR.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/var.R 3 | \name{generate.VAR} 4 | \alias{generate.VAR} 5 | \title{Generate new data from a fable model} 6 | \usage{ 7 | \method{generate}{VAR}(x, new_data, specials, ...) 8 | } 9 | \arguments{ 10 | \item{x}{A fitted model.} 11 | 12 | \item{new_data}{A tsibble containing the time points and exogenous regressors to produce forecasts for.} 13 | 14 | \item{specials}{(passed by \code{\link[fabletools:forecast]{fabletools::forecast.mdl_df()}}).} 15 | 16 | \item{...}{Other arguments passed to methods} 17 | } 18 | \description{ 19 | Simulates future paths from a dataset using a fitted model. Innovations are 20 | sampled by the model's assumed error distribution. If \code{bootstrap} is \code{TRUE}, 21 | innovations will be sampled from the model's residuals. If \code{new_data} 22 | contains the \code{.innov} column, those values will be treated as innovations. 23 | } 24 | \examples{ 25 | as_tsibble(USAccDeaths) \%>\% 26 | model(ETS(log(value) ~ season("A"))) \%>\% 27 | generate(times = 100) 28 | } 29 | \seealso{ 30 | \code{\link[fabletools:generate.mdl_df]{fabletools::generate.mdl_df}} 31 | } 32 | -------------------------------------------------------------------------------- /man/generate.VECM.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/vecm.R 3 | \name{generate.VECM} 4 | \alias{generate.VECM} 5 | \title{Generate new data from a fable model} 6 | \usage{ 7 | \method{generate}{VECM}(x, new_data, specials, ...) 8 | } 9 | \arguments{ 10 | \item{x}{A fitted model.} 11 | 12 | \item{new_data}{A tsibble containing the time points and exogenous regressors to produce forecasts for.} 13 | 14 | \item{specials}{(passed by \code{\link[fabletools:forecast]{fabletools::forecast.mdl_df()}}).} 15 | 16 | \item{...}{Other arguments passed to methods} 17 | } 18 | \description{ 19 | Simulates future paths from a dataset using a fitted model. Innovations are 20 | sampled by the model's assumed error distribution. If \code{bootstrap} is \code{TRUE}, 21 | innovations will be sampled from the model's residuals. If \code{new_data} 22 | contains the \code{.innov} column, those values will be treated as innovations. 23 | } 24 | \examples{ 25 | as_tsibble(USAccDeaths) \%>\% 26 | model(ETS(log(value) ~ season("A"))) \%>\% 27 | generate(times = 100) 28 | } 29 | \seealso{ 30 | \code{\link[fabletools:generate.mdl_df]{fabletools::generate.mdl_df}} 31 | } 32 | -------------------------------------------------------------------------------- /man/generate.model_mean.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/mean.R 3 | \name{generate.model_mean} 4 | \alias{generate.model_mean} 5 | \title{Generate new data from a fable model} 6 | \usage{ 7 | \method{generate}{model_mean}(x, new_data, bootstrap = FALSE, ...) 8 | } 9 | \arguments{ 10 | \item{x}{A fitted model.} 11 | 12 | \item{new_data}{A tsibble containing the time points and exogenous regressors to produce forecasts for.} 13 | 14 | \item{bootstrap}{If \code{TRUE}, then forecast distributions are computed using simulation with resampled errors.} 15 | 16 | \item{...}{Other arguments passed to methods} 17 | } 18 | \description{ 19 | Simulates future paths from a dataset using a fitted model. Innovations are 20 | sampled by the model's assumed error distribution. If \code{bootstrap} is \code{TRUE}, 21 | innovations will be sampled from the model's residuals. If \code{new_data} 22 | contains the \code{.innov} column, those values will be treated as innovations. 23 | } 24 | \examples{ 25 | library(tsibbledata) 26 | vic_elec \%>\% 27 | model(avg = MEAN(Demand)) \%>\% 28 | generate() 29 | } 30 | \seealso{ 31 | \code{\link[fabletools:generate.mdl_df]{fabletools::generate.mdl_df}} 32 | } 33 | -------------------------------------------------------------------------------- /man/glance.AR.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/ar.R 3 | \name{glance.AR} 4 | \alias{glance.AR} 5 | \title{Glance a AR} 6 | \usage{ 7 | \method{glance}{AR}(x, ...) 8 | } 9 | \arguments{ 10 | \item{x}{model or other R object to convert to single-row data frame} 11 | 12 | \item{...}{other arguments passed to methods} 13 | } 14 | \value{ 15 | A one row tibble summarising the model's fit. 16 | } 17 | \description{ 18 | Construct a single row summary of the AR model. 19 | } 20 | \details{ 21 | Contains the variance of residuals (\code{sigma2}), the log-likelihood (\code{log_lik}), 22 | and information criterion (\code{AIC}, \code{AICc}, \code{BIC}). 23 | } 24 | \examples{ 25 | as_tsibble(lh) \%>\% 26 | model(AR(value ~ order(3))) \%>\% 27 | glance() 28 | } 29 | -------------------------------------------------------------------------------- /man/glance.ARIMA.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/arima.R 3 | \name{glance.ARIMA} 4 | \alias{glance.ARIMA} 5 | \title{Glance an ARIMA model} 6 | \format{ 7 | A data frame with 1 row, with columns: 8 | \describe{ 9 | \item{sigma2}{The unbiased variance of residuals. Calculated as \code{sum(residuals^2) / (num_observations - num_pararameters + 1)}} 10 | \item{log_lik}{The log-likelihood} 11 | \item{AIC}{Akaike information criterion} 12 | \item{AICc}{Akaike information criterion, corrected for small sample sizes} 13 | \item{BIC}{Bayesian information criterion} 14 | \item{ar_roots, ma_roots}{The model's characteristic roots} 15 | } 16 | } 17 | \usage{ 18 | \method{glance}{ARIMA}(x, ...) 19 | } 20 | \arguments{ 21 | \item{x}{model or other R object to convert to single-row data frame} 22 | 23 | \item{...}{other arguments passed to methods} 24 | } 25 | \value{ 26 | A one row tibble summarising the model's fit. 27 | } 28 | \description{ 29 | Construct a single row summary of the ARIMA model. 30 | } 31 | \examples{ 32 | USAccDeaths \%>\% 33 | as_tsibble() \%>\% 34 | model(arima = ARIMA(log(value) ~ pdq(0, 1, 1) + PDQ(0, 1, 1))) \%>\% 35 | glance() 36 | } 37 | -------------------------------------------------------------------------------- /man/glance.ETS.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/ets.R 3 | \name{glance.ETS} 4 | \alias{glance.ETS} 5 | \title{Glance an ETS model} 6 | \usage{ 7 | \method{glance}{ETS}(x, ...) 8 | } 9 | \arguments{ 10 | \item{x}{model or other R object to convert to single-row data frame} 11 | 12 | \item{...}{other arguments passed to methods} 13 | } 14 | \value{ 15 | A one row tibble summarising the model's fit. 16 | } 17 | \description{ 18 | Construct a single row summary of the ETS model. 19 | } 20 | \details{ 21 | Contains the variance of residuals (\code{sigma2}), the log-likelihood (\code{log_lik}), 22 | and information criterion (\code{AIC}, \code{AICc}, \code{BIC}). 23 | } 24 | \examples{ 25 | as_tsibble(USAccDeaths) \%>\% 26 | model(ets = ETS(log(value) ~ season("A"))) \%>\% 27 | glance() 28 | } 29 | -------------------------------------------------------------------------------- /man/glance.NNETAR.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/nnetar.R 3 | \name{glance.NNETAR} 4 | \alias{glance.NNETAR} 5 | \title{Glance a NNETAR model} 6 | \usage{ 7 | \method{glance}{NNETAR}(x, ...) 8 | } 9 | \arguments{ 10 | \item{x}{model or other R object to convert to single-row data frame} 11 | 12 | \item{...}{other arguments passed to methods} 13 | } 14 | \value{ 15 | A one row tibble summarising the model's fit. 16 | } 17 | \description{ 18 | Construct a single row summary of the NNETAR model. 19 | Contains the variance of residuals (\code{sigma2}). 20 | } 21 | \examples{ 22 | as_tsibble(airmiles) \%>\% 23 | model(nn = NNETAR(box_cox(value, 0.15))) \%>\% 24 | glance() 25 | } 26 | -------------------------------------------------------------------------------- /man/glance.RW.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/lagwalk.R 3 | \name{glance.RW} 4 | \alias{glance.RW} 5 | \title{Glance a lag walk model} 6 | \usage{ 7 | \method{glance}{RW}(x, ...) 8 | } 9 | \arguments{ 10 | \item{x}{model or other R object to convert to single-row data frame} 11 | 12 | \item{...}{other arguments passed to methods} 13 | } 14 | \value{ 15 | A one row tibble summarising the model's fit. 16 | } 17 | \description{ 18 | Construct a single row summary of the lag walk model. 19 | Contains the variance of residuals (\code{sigma2}). 20 | } 21 | \examples{ 22 | as_tsibble(Nile) \%>\% 23 | model(NAIVE(value)) \%>\% 24 | glance() 25 | 26 | library(tsibbledata) 27 | aus_production \%>\% 28 | model(snaive = SNAIVE(Beer ~ lag("year"))) \%>\% 29 | glance() 30 | } 31 | -------------------------------------------------------------------------------- /man/glance.TSLM.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/lm.R 3 | \name{glance.TSLM} 4 | \alias{glance.TSLM} 5 | \title{Glance a TSLM} 6 | \usage{ 7 | \method{glance}{TSLM}(x, ...) 8 | } 9 | \arguments{ 10 | \item{x}{model or other R object to convert to single-row data frame} 11 | 12 | \item{...}{other arguments passed to methods} 13 | } 14 | \value{ 15 | A one row tibble summarising the model's fit. 16 | } 17 | \description{ 18 | Construct a single row summary of the TSLM model. 19 | } 20 | \details{ 21 | Contains the R squared (\code{r_squared}), variance of residuals (\code{sigma2}), 22 | the log-likelihood (\code{log_lik}), and information criterion (\code{AIC}, \code{AICc}, \code{BIC}). 23 | } 24 | \examples{ 25 | as_tsibble(USAccDeaths) \%>\% 26 | model(lm = TSLM(log(value) ~ trend() + season())) \%>\% 27 | glance() 28 | } 29 | -------------------------------------------------------------------------------- /man/glance.VAR.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/var.R 3 | \name{glance.VAR} 4 | \alias{glance.VAR} 5 | \title{Glance a VAR} 6 | \usage{ 7 | \method{glance}{VAR}(x, ...) 8 | } 9 | \arguments{ 10 | \item{x}{model or other R object to convert to single-row data frame} 11 | 12 | \item{...}{other arguments passed to methods} 13 | } 14 | \value{ 15 | A one row tibble summarising the model's fit. 16 | } 17 | \description{ 18 | Construct a single row summary of the VAR model. 19 | } 20 | \details{ 21 | Contains the variance of residuals (\code{sigma2}), the log-likelihood (\code{log_lik}), 22 | and information criterion (\code{AIC}, \code{AICc}, \code{BIC}). 23 | } 24 | \examples{ 25 | lung_deaths <- cbind(mdeaths, fdeaths) \%>\% 26 | as_tsibble(pivot_longer = FALSE) 27 | 28 | lung_deaths \%>\% 29 | model(VAR(vars(mdeaths, fdeaths) ~ AR(3))) \%>\% 30 | glance() 31 | } 32 | -------------------------------------------------------------------------------- /man/glance.VECM.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/vecm.R 3 | \name{glance.VECM} 4 | \alias{glance.VECM} 5 | \title{Glance a VECM} 6 | \usage{ 7 | \method{glance}{VECM}(x, ...) 8 | } 9 | \arguments{ 10 | \item{x}{model or other R object to convert to single-row data frame} 11 | 12 | \item{...}{other arguments passed to methods} 13 | } 14 | \value{ 15 | A one row tibble summarising the model's fit. 16 | } 17 | \description{ 18 | Construct a single row summary of the VECM model. 19 | } 20 | \details{ 21 | Contains the variance of residuals (\code{sigma2}), the log-likelihood 22 | (\code{log_lik}), the cointegrating vector (\code{beta}) and information criterion 23 | (\code{AIC}, \code{AICc}, \code{BIC}). 24 | } 25 | -------------------------------------------------------------------------------- /man/glance.fable_theta.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/theta.R 3 | \name{glance.fable_theta} 4 | \alias{glance.fable_theta} 5 | \title{Glance a theta method} 6 | \usage{ 7 | \method{glance}{fable_theta}(x, ...) 8 | } 9 | \arguments{ 10 | \item{x}{model or other R object to convert to single-row data frame} 11 | 12 | \item{...}{other arguments passed to methods} 13 | } 14 | \value{ 15 | A one row tibble summarising the model's fit. 16 | } 17 | \description{ 18 | Construct a single row summary of the average method model. 19 | } 20 | \details{ 21 | Contains the variance of residuals (\code{sigma2}). 22 | } 23 | -------------------------------------------------------------------------------- /man/glance.model_mean.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/mean.R 3 | \name{glance.model_mean} 4 | \alias{glance.model_mean} 5 | \title{Glance a average method model} 6 | \usage{ 7 | \method{glance}{model_mean}(x, ...) 8 | } 9 | \arguments{ 10 | \item{x}{model or other R object to convert to single-row data frame} 11 | 12 | \item{...}{other arguments passed to methods} 13 | } 14 | \value{ 15 | A one row tibble summarising the model's fit. 16 | } 17 | \description{ 18 | Construct a single row summary of the average method model. 19 | } 20 | \details{ 21 | Contains the variance of residuals (\code{sigma2}). 22 | } 23 | \examples{ 24 | library(tsibbledata) 25 | vic_elec \%>\% 26 | model(avg = MEAN(Demand)) \%>\% 27 | glance() 28 | } 29 | -------------------------------------------------------------------------------- /man/interpolate.ARIMA.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/arima.R 3 | \name{interpolate.ARIMA} 4 | \alias{interpolate.ARIMA} 5 | \title{Interpolate missing values from a fable model} 6 | \usage{ 7 | \method{interpolate}{ARIMA}(object, new_data, specials, ...) 8 | } 9 | \arguments{ 10 | \item{object}{A model for which forecasts are required.} 11 | 12 | \item{new_data}{A tsibble containing the time points and exogenous regressors to produce forecasts for.} 13 | 14 | \item{specials}{(passed by \code{\link[fabletools:forecast]{fabletools::forecast.mdl_df()}}).} 15 | 16 | \item{...}{Other arguments passed to methods} 17 | } 18 | \value{ 19 | A tibble of the same dimension of \code{new_data} with missing values interpolated. 20 | } 21 | \description{ 22 | Applies a model-specific estimation technique to predict the values of missing values in a \code{tsibble}, and replace them. 23 | } 24 | \examples{ 25 | library(tsibbledata) 26 | 27 | olympic_running \%>\% 28 | model(arima = ARIMA(Time ~ trend())) \%>\% 29 | interpolate(olympic_running) 30 | } 31 | -------------------------------------------------------------------------------- /man/interpolate.TSLM.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/lm.R 3 | \name{interpolate.TSLM} 4 | \alias{interpolate.TSLM} 5 | \title{Interpolate missing values from a fable model} 6 | \usage{ 7 | \method{interpolate}{TSLM}(object, new_data, specials, ...) 8 | } 9 | \arguments{ 10 | \item{object}{A model for which forecasts are required.} 11 | 12 | \item{new_data}{A tsibble containing the time points and exogenous regressors to produce forecasts for.} 13 | 14 | \item{specials}{(passed by \code{\link[fabletools:forecast]{fabletools::forecast.mdl_df()}}).} 15 | 16 | \item{...}{Other arguments passed to methods} 17 | } 18 | \value{ 19 | A tibble of the same dimension of \code{new_data} with missing values interpolated. 20 | } 21 | \description{ 22 | Applies a model-specific estimation technique to predict the values of missing values in a \code{tsibble}, and replace them. 23 | } 24 | \examples{ 25 | library(tsibbledata) 26 | 27 | olympic_running \%>\% 28 | model(lm = TSLM(Time ~ trend())) \%>\% 29 | interpolate(olympic_running) 30 | } 31 | -------------------------------------------------------------------------------- /man/interpolate.model_mean.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/mean.R 3 | \name{interpolate.model_mean} 4 | \alias{interpolate.model_mean} 5 | \title{Interpolate missing values from a fable model} 6 | \usage{ 7 | \method{interpolate}{model_mean}(object, new_data, specials, ...) 8 | } 9 | \arguments{ 10 | \item{object}{A model for which forecasts are required.} 11 | 12 | \item{new_data}{A tsibble containing the time points and exogenous regressors to produce forecasts for.} 13 | 14 | \item{specials}{(passed by \code{\link[fabletools:forecast]{fabletools::forecast.mdl_df()}}).} 15 | 16 | \item{...}{Other arguments passed to methods} 17 | } 18 | \value{ 19 | A tibble of the same dimension of \code{new_data} with missing values interpolated. 20 | } 21 | \description{ 22 | Applies a model-specific estimation technique to predict the values of missing values in a \code{tsibble}, and replace them. 23 | } 24 | \examples{ 25 | library(tsibbledata) 26 | 27 | olympic_running \%>\% 28 | model(mean = MEAN(Time)) \%>\% 29 | interpolate(olympic_running) 30 | } 31 | -------------------------------------------------------------------------------- /man/reexports.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/reexports.R 3 | \docType{import} 4 | \name{reexports} 5 | \alias{reexports} 6 | \alias{\%>\%} 7 | \alias{as_tsibble} 8 | \title{Objects exported from other packages} 9 | \keyword{internal} 10 | \description{ 11 | These objects are imported from other packages. Follow the links 12 | below to see their documentation. 13 | 14 | \describe{ 15 | \item{dplyr}{\code{\link[dplyr:reexports]{\%>\%}}} 16 | 17 | \item{tsibble}{\code{\link[tsibble:as-tsibble]{as_tsibble}}} 18 | }} 19 | 20 | -------------------------------------------------------------------------------- /man/refit.AR.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/ar.R 3 | \name{refit.AR} 4 | \alias{refit.AR} 5 | \title{Refit an AR model} 6 | \usage{ 7 | \method{refit}{AR}(object, new_data, specials = NULL, reestimate = FALSE, ...) 8 | } 9 | \arguments{ 10 | \item{object}{A model for which forecasts are required.} 11 | 12 | \item{new_data}{A tsibble containing the time points and exogenous regressors to produce forecasts for.} 13 | 14 | \item{specials}{(passed by \code{\link[fabletools:forecast]{fabletools::forecast.mdl_df()}}).} 15 | 16 | \item{reestimate}{If \code{TRUE}, the coefficients for the fitted model will be re-estimated to suit the new data.} 17 | 18 | \item{...}{Other arguments passed to methods} 19 | } 20 | \value{ 21 | A refitted model. 22 | } 23 | \description{ 24 | Applies a fitted AR model to a new dataset. 25 | } 26 | \examples{ 27 | lung_deaths_male <- as_tsibble(mdeaths) 28 | lung_deaths_female <- as_tsibble(fdeaths) 29 | 30 | fit <- lung_deaths_male \%>\% 31 | model(AR(value ~ 1 + order(10))) 32 | 33 | report(fit) 34 | 35 | fit \%>\% 36 | refit(lung_deaths_female) \%>\% 37 | report() 38 | } 39 | -------------------------------------------------------------------------------- /man/refit.ARIMA.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/arima.R 3 | \name{refit.ARIMA} 4 | \alias{refit.ARIMA} 5 | \title{Refit an ARIMA model} 6 | \usage{ 7 | \method{refit}{ARIMA}(object, new_data, specials = NULL, reestimate = FALSE, ...) 8 | } 9 | \arguments{ 10 | \item{object}{A model for which forecasts are required.} 11 | 12 | \item{new_data}{A tsibble containing the time points and exogenous regressors to produce forecasts for.} 13 | 14 | \item{specials}{(passed by \code{\link[fabletools:forecast]{fabletools::forecast.mdl_df()}}).} 15 | 16 | \item{reestimate}{If \code{TRUE}, the coefficients for the fitted model will be re-estimated to suit the new data.} 17 | 18 | \item{...}{Other arguments passed to methods} 19 | } 20 | \value{ 21 | A refitted model. 22 | } 23 | \description{ 24 | Applies a fitted ARIMA model to a new dataset. 25 | } 26 | \examples{ 27 | lung_deaths_male <- as_tsibble(mdeaths) 28 | lung_deaths_female <- as_tsibble(fdeaths) 29 | 30 | fit <- lung_deaths_male \%>\% 31 | model(ARIMA(value ~ 1 + pdq(2, 0, 0) + PDQ(2, 1, 0))) 32 | 33 | report(fit) 34 | 35 | fit \%>\% 36 | refit(lung_deaths_female) \%>\% 37 | report() 38 | } 39 | -------------------------------------------------------------------------------- /man/refit.ETS.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/ets.R 3 | \name{refit.ETS} 4 | \alias{refit.ETS} 5 | \title{Refit an ETS model} 6 | \usage{ 7 | \method{refit}{ETS}( 8 | object, 9 | new_data, 10 | specials = NULL, 11 | reestimate = FALSE, 12 | reinitialise = TRUE, 13 | ... 14 | ) 15 | } 16 | \arguments{ 17 | \item{object}{A model for which forecasts are required.} 18 | 19 | \item{new_data}{A tsibble containing the time points and exogenous regressors to produce forecasts for.} 20 | 21 | \item{specials}{(passed by \code{\link[fabletools:forecast]{fabletools::forecast.mdl_df()}}).} 22 | 23 | \item{reestimate}{If \code{TRUE}, the coefficients for the fitted model will be re-estimated to suit the new data.} 24 | 25 | \item{reinitialise}{If TRUE, the initial parameters will be re-estimated to suit the new data.} 26 | 27 | \item{...}{Other arguments passed to methods} 28 | } 29 | \description{ 30 | Applies a fitted ETS model to a new dataset. 31 | } 32 | \examples{ 33 | lung_deaths_male <- as_tsibble(mdeaths) 34 | lung_deaths_female <- as_tsibble(fdeaths) 35 | 36 | fit <- lung_deaths_male \%>\% 37 | model(ETS(value)) 38 | 39 | report(fit) 40 | 41 | fit \%>\% 42 | refit(lung_deaths_female, reinitialise = TRUE) \%>\% 43 | report() 44 | } 45 | -------------------------------------------------------------------------------- /man/refit.NNETAR.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/nnetar.R 3 | \name{refit.NNETAR} 4 | \alias{refit.NNETAR} 5 | \title{Refit a NNETAR model} 6 | \usage{ 7 | \method{refit}{NNETAR}(object, new_data, specials = NULL, reestimate = FALSE, ...) 8 | } 9 | \arguments{ 10 | \item{object}{A model for which forecasts are required.} 11 | 12 | \item{new_data}{A tsibble containing the time points and exogenous regressors to produce forecasts for.} 13 | 14 | \item{specials}{(passed by \code{\link[fabletools:forecast]{fabletools::forecast.mdl_df()}}).} 15 | 16 | \item{reestimate}{If \code{TRUE}, the networks will be initialized with random 17 | starting weights to suit the new data. If \code{FALSE}, for every network the best 18 | individual set of weights found in the pre-estimation process is used as the 19 | starting weight vector.} 20 | 21 | \item{...}{Other arguments passed to methods} 22 | } 23 | \value{ 24 | A refitted model. 25 | } 26 | \description{ 27 | Applies a fitted NNETAR model to a new dataset. 28 | } 29 | \examples{ 30 | lung_deaths_male <- as_tsibble(mdeaths) 31 | lung_deaths_female <- as_tsibble(fdeaths) 32 | 33 | fit <- lung_deaths_male \%>\% 34 | model(NNETAR(value)) 35 | 36 | report(fit) 37 | 38 | fit \%>\% 39 | refit(new_data = lung_deaths_female, reestimate = FALSE) \%>\% 40 | report() 41 | } 42 | -------------------------------------------------------------------------------- /man/refit.RW.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/lagwalk.R 3 | \name{refit.RW} 4 | \alias{refit.RW} 5 | \title{Refit a lag walk model} 6 | \usage{ 7 | \method{refit}{RW}(object, new_data, specials = NULL, reestimate = FALSE, ...) 8 | } 9 | \arguments{ 10 | \item{object}{A model for which forecasts are required.} 11 | 12 | \item{new_data}{A tsibble containing the time points and exogenous regressors to produce forecasts for.} 13 | 14 | \item{specials}{(passed by \code{\link[fabletools:forecast]{fabletools::forecast.mdl_df()}}).} 15 | 16 | \item{reestimate}{If \code{TRUE}, the lag walk model will be re-estimated 17 | to suit the new data.} 18 | 19 | \item{...}{Other arguments passed to methods} 20 | } 21 | \description{ 22 | Applies a fitted random walk model to a new dataset. 23 | } 24 | \details{ 25 | The models \code{NAIVE} and \code{SNAIVE} have no specific model parameters. Using \code{refit} 26 | for one of these models will provide the same estimation results as one would 27 | use \code{fabletools::model(NAIVE(...))} (or \code{fabletools::model(SNAIVE(...))}. 28 | } 29 | \examples{ 30 | lung_deaths_male <- as_tsibble(mdeaths) 31 | lung_deaths_female <- as_tsibble(fdeaths) 32 | 33 | fit <- lung_deaths_male \%>\% 34 | model(RW(value ~ drift())) 35 | 36 | report(fit) 37 | 38 | fit \%>\% 39 | refit(lung_deaths_female) \%>\% 40 | report() 41 | } 42 | -------------------------------------------------------------------------------- /man/refit.TSLM.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/lm.R 3 | \name{refit.TSLM} 4 | \alias{refit.TSLM} 5 | \title{Refit a \code{TSLM}} 6 | \usage{ 7 | \method{refit}{TSLM}(object, new_data, specials = NULL, reestimate = FALSE, ...) 8 | } 9 | \arguments{ 10 | \item{object}{A model for which forecasts are required.} 11 | 12 | \item{new_data}{A tsibble containing the time points and exogenous regressors to produce forecasts for.} 13 | 14 | \item{specials}{(passed by \code{\link[fabletools:forecast]{fabletools::forecast.mdl_df()}}).} 15 | 16 | \item{reestimate}{If \code{TRUE}, the coefficients for the fitted model will be re-estimated to suit the new data.} 17 | 18 | \item{...}{Other arguments passed to methods} 19 | } 20 | \description{ 21 | Applies a fitted \code{TSLM} to a new dataset. 22 | } 23 | \examples{ 24 | lung_deaths_male <- as_tsibble(mdeaths) 25 | lung_deaths_female <- as_tsibble(fdeaths) 26 | 27 | fit <- lung_deaths_male \%>\% 28 | model(TSLM(value ~ trend() + season())) 29 | 30 | report(fit) 31 | 32 | fit \%>\% 33 | refit(lung_deaths_female) \%>\% 34 | report() 35 | } 36 | -------------------------------------------------------------------------------- /man/refit.model_mean.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/mean.R 3 | \name{refit.model_mean} 4 | \alias{refit.model_mean} 5 | \title{Refit a MEAN model} 6 | \usage{ 7 | \method{refit}{model_mean}(object, new_data, specials = NULL, reestimate = FALSE, ...) 8 | } 9 | \arguments{ 10 | \item{object}{A model for which forecasts are required.} 11 | 12 | \item{new_data}{A tsibble containing the time points and exogenous regressors to produce forecasts for.} 13 | 14 | \item{specials}{(passed by \code{\link[fabletools:forecast]{fabletools::forecast.mdl_df()}}).} 15 | 16 | \item{reestimate}{If \code{TRUE}, the mean for the fitted model will be re-estimated 17 | to suit the new data.} 18 | 19 | \item{...}{Other arguments passed to methods} 20 | } 21 | \description{ 22 | Applies a fitted average method model to a new dataset. 23 | } 24 | \examples{ 25 | lung_deaths_male <- as_tsibble(mdeaths) 26 | lung_deaths_female <- as_tsibble(fdeaths) 27 | 28 | fit <- lung_deaths_male \%>\% 29 | model(MEAN(value)) 30 | 31 | report(fit) 32 | 33 | fit \%>\% 34 | refit(lung_deaths_female) \%>\% 35 | report() 36 | } 37 | -------------------------------------------------------------------------------- /man/residuals.AR.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/ar.R 3 | \name{residuals.AR} 4 | \alias{residuals.AR} 5 | \title{Extract residuals from a fable model} 6 | \usage{ 7 | \method{residuals}{AR}(object, type = c("innovation", "regression"), ...) 8 | } 9 | \arguments{ 10 | \item{object}{A model for which forecasts are required.} 11 | 12 | \item{type}{The type of residuals to extract.} 13 | 14 | \item{...}{Other arguments passed to methods} 15 | } 16 | \value{ 17 | A vector of fitted residuals. 18 | } 19 | \description{ 20 | Extracts the residuals. 21 | } 22 | \examples{ 23 | as_tsibble(lh) \%>\% 24 | model(AR(value ~ order(3))) \%>\% 25 | residuals() 26 | } 27 | -------------------------------------------------------------------------------- /man/residuals.ARIMA.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/arima.R 3 | \name{residuals.ARIMA} 4 | \alias{residuals.ARIMA} 5 | \title{Extract residuals from a fable model} 6 | \usage{ 7 | \method{residuals}{ARIMA}(object, type = c("innovation", "regression"), ...) 8 | } 9 | \arguments{ 10 | \item{object}{A model for which forecasts are required.} 11 | 12 | \item{type}{The type of residuals to extract.} 13 | 14 | \item{...}{Other arguments passed to methods} 15 | } 16 | \value{ 17 | A vector of fitted residuals. 18 | } 19 | \description{ 20 | Extracts the residuals. 21 | } 22 | \examples{ 23 | USAccDeaths \%>\% 24 | as_tsibble() \%>\% 25 | model(arima = ARIMA(log(value) ~ pdq(0, 1, 1) + PDQ(0, 1, 1))) \%>\% 26 | residuals() 27 | } 28 | -------------------------------------------------------------------------------- /man/residuals.ETS.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/ets.R 3 | \name{residuals.ETS} 4 | \alias{residuals.ETS} 5 | \title{Extract residuals from a fable model} 6 | \usage{ 7 | \method{residuals}{ETS}(object, ...) 8 | } 9 | \arguments{ 10 | \item{object}{A model for which forecasts are required.} 11 | 12 | \item{...}{Other arguments passed to methods} 13 | } 14 | \value{ 15 | A vector of fitted residuals. 16 | } 17 | \description{ 18 | Extracts the residuals. 19 | } 20 | \examples{ 21 | as_tsibble(USAccDeaths) \%>\% 22 | model(ets = ETS(log(value) ~ season("A"))) \%>\% 23 | residuals() 24 | } 25 | -------------------------------------------------------------------------------- /man/residuals.NNETAR.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/nnetar.R 3 | \name{residuals.NNETAR} 4 | \alias{residuals.NNETAR} 5 | \title{Extract residuals from a fable model} 6 | \usage{ 7 | \method{residuals}{NNETAR}(object, ...) 8 | } 9 | \arguments{ 10 | \item{object}{A model for which forecasts are required.} 11 | 12 | \item{...}{Other arguments passed to methods} 13 | } 14 | \value{ 15 | A vector of fitted residuals. 16 | } 17 | \description{ 18 | Extracts the residuals. 19 | } 20 | \examples{ 21 | as_tsibble(airmiles) \%>\% 22 | model(nn = NNETAR(box_cox(value, 0.15))) \%>\% 23 | residuals() 24 | } 25 | -------------------------------------------------------------------------------- /man/residuals.RW.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/lagwalk.R 3 | \name{residuals.RW} 4 | \alias{residuals.RW} 5 | \title{Extract residuals from a fable model} 6 | \usage{ 7 | \method{residuals}{RW}(object, ...) 8 | } 9 | \arguments{ 10 | \item{object}{A model for which forecasts are required.} 11 | 12 | \item{...}{Other arguments passed to methods} 13 | } 14 | \value{ 15 | A vector of fitted residuals. 16 | } 17 | \description{ 18 | Extracts the residuals. 19 | } 20 | \examples{ 21 | as_tsibble(Nile) \%>\% 22 | model(NAIVE(value)) \%>\% 23 | residuals() 24 | 25 | library(tsibbledata) 26 | aus_production \%>\% 27 | model(snaive = SNAIVE(Beer ~ lag("year"))) \%>\% 28 | residuals() 29 | } 30 | -------------------------------------------------------------------------------- /man/residuals.TSLM.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/lm.R 3 | \name{residuals.TSLM} 4 | \alias{residuals.TSLM} 5 | \title{Extract residuals from a fable model} 6 | \usage{ 7 | \method{residuals}{TSLM}(object, ...) 8 | } 9 | \arguments{ 10 | \item{object}{A model for which forecasts are required.} 11 | 12 | \item{...}{Other arguments passed to methods} 13 | } 14 | \value{ 15 | A vector of fitted residuals. 16 | } 17 | \description{ 18 | Extracts the residuals. 19 | } 20 | \examples{ 21 | as_tsibble(USAccDeaths) \%>\% 22 | model(lm = TSLM(log(value) ~ trend() + season())) \%>\% 23 | residuals() 24 | } 25 | -------------------------------------------------------------------------------- /man/residuals.VAR.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/var.R 3 | \name{residuals.VAR} 4 | \alias{residuals.VAR} 5 | \title{Extract residuals from a fable model} 6 | \usage{ 7 | \method{residuals}{VAR}(object, ...) 8 | } 9 | \arguments{ 10 | \item{object}{A model for which forecasts are required.} 11 | 12 | \item{...}{Other arguments passed to methods} 13 | } 14 | \value{ 15 | A vector of fitted residuals. 16 | } 17 | \description{ 18 | Extracts the residuals. 19 | } 20 | \examples{ 21 | lung_deaths <- cbind(mdeaths, fdeaths) \%>\% 22 | as_tsibble(pivot_longer = FALSE) 23 | 24 | lung_deaths \%>\% 25 | model(VAR(vars(mdeaths, fdeaths) ~ AR(3))) \%>\% 26 | residuals() 27 | } 28 | -------------------------------------------------------------------------------- /man/residuals.croston.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/croston.R 3 | \name{residuals.croston} 4 | \alias{residuals.croston} 5 | \title{Extract residuals from a fable model} 6 | \usage{ 7 | \method{residuals}{croston}(object, ...) 8 | } 9 | \arguments{ 10 | \item{object}{A model for which forecasts are required.} 11 | 12 | \item{...}{Other arguments passed to methods} 13 | } 14 | \value{ 15 | A vector of fitted residuals. 16 | } 17 | \description{ 18 | Extracts the residuals. 19 | } 20 | \examples{ 21 | library(tsibble) 22 | sim_poisson <- tsibble( 23 | time = yearmonth("2012 Dec") + seq_len(24), 24 | count = rpois(24, lambda = 0.3), 25 | index = time 26 | ) 27 | 28 | sim_poisson \%>\% 29 | model(CROSTON(count)) \%>\% 30 | residuals() 31 | } 32 | -------------------------------------------------------------------------------- /man/residuals.fable_theta.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/theta.R 3 | \name{residuals.fable_theta} 4 | \alias{residuals.fable_theta} 5 | \title{Extract residuals from a fable model} 6 | \usage{ 7 | \method{residuals}{fable_theta}(object, ...) 8 | } 9 | \arguments{ 10 | \item{object}{A model for which forecasts are required.} 11 | 12 | \item{...}{Other arguments passed to methods} 13 | } 14 | \value{ 15 | A vector of fitted residuals. 16 | } 17 | \description{ 18 | Extracts the residuals. 19 | } 20 | \examples{ 21 | library(tsibbledata) 22 | vic_elec \%>\% 23 | model(avg = MEAN(Demand)) \%>\% 24 | residuals() 25 | } 26 | -------------------------------------------------------------------------------- /man/residuals.model_mean.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/mean.R 3 | \name{residuals.model_mean} 4 | \alias{residuals.model_mean} 5 | \title{Extract residuals from a fable model} 6 | \usage{ 7 | \method{residuals}{model_mean}(object, ...) 8 | } 9 | \arguments{ 10 | \item{object}{A model for which forecasts are required.} 11 | 12 | \item{...}{Other arguments passed to methods} 13 | } 14 | \value{ 15 | A vector of fitted residuals. 16 | } 17 | \description{ 18 | Extracts the residuals. 19 | } 20 | \examples{ 21 | library(tsibbledata) 22 | vic_elec \%>\% 23 | model(avg = MEAN(Demand)) \%>\% 24 | residuals() 25 | } 26 | -------------------------------------------------------------------------------- /man/tidy.AR.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/ar.R 3 | \name{tidy.AR} 4 | \alias{tidy.AR} 5 | \title{Tidy a fable model} 6 | \usage{ 7 | \method{tidy}{AR}(x, ...) 8 | } 9 | \arguments{ 10 | \item{x}{An object to be converted into a tidy \code{\link[tibble:tibble]{tibble::tibble()}}.} 11 | 12 | \item{...}{Additional arguments to tidying method.} 13 | } 14 | \value{ 15 | The model's coefficients in a \code{tibble}. 16 | } 17 | \description{ 18 | Returns the coefficients from the model in a \code{tibble} format. 19 | } 20 | \examples{ 21 | as_tsibble(lh) \%>\% 22 | model(AR(value ~ order(3))) \%>\% 23 | tidy() 24 | } 25 | -------------------------------------------------------------------------------- /man/tidy.ARIMA.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/arima.R 3 | \name{tidy.ARIMA} 4 | \alias{tidy.ARIMA} 5 | \title{Tidy a fable model} 6 | \usage{ 7 | \method{tidy}{ARIMA}(x, ...) 8 | } 9 | \arguments{ 10 | \item{x}{An object to be converted into a tidy \code{\link[tibble:tibble]{tibble::tibble()}}.} 11 | 12 | \item{...}{Additional arguments to tidying method.} 13 | } 14 | \value{ 15 | The model's coefficients in a \code{tibble}. 16 | } 17 | \description{ 18 | Returns the coefficients from the model in a \code{tibble} format. 19 | } 20 | \examples{ 21 | USAccDeaths \%>\% 22 | as_tsibble() \%>\% 23 | model(arima = ARIMA(log(value) ~ pdq(0, 1, 1) + PDQ(0, 1, 1))) \%>\% 24 | tidy() 25 | } 26 | -------------------------------------------------------------------------------- /man/tidy.ETS.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/ets.R 3 | \name{tidy.ETS} 4 | \alias{tidy.ETS} 5 | \title{Tidy a fable model} 6 | \usage{ 7 | \method{tidy}{ETS}(x, ...) 8 | } 9 | \arguments{ 10 | \item{x}{An object to be converted into a tidy \code{\link[tibble:tibble]{tibble::tibble()}}.} 11 | 12 | \item{...}{Additional arguments to tidying method.} 13 | } 14 | \value{ 15 | The model's coefficients in a \code{tibble}. 16 | } 17 | \description{ 18 | Returns the coefficients from the model in a \code{tibble} format. 19 | } 20 | \examples{ 21 | as_tsibble(USAccDeaths) \%>\% 22 | model(ets = ETS(log(value) ~ season("A"))) \%>\% 23 | tidy() 24 | } 25 | -------------------------------------------------------------------------------- /man/tidy.NNETAR.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/nnetar.R 3 | \name{tidy.NNETAR} 4 | \alias{tidy.NNETAR} 5 | \title{Tidy a fable model} 6 | \usage{ 7 | \method{tidy}{NNETAR}(x, ...) 8 | } 9 | \arguments{ 10 | \item{x}{An object to be converted into a tidy \code{\link[tibble:tibble]{tibble::tibble()}}.} 11 | 12 | \item{...}{Additional arguments to tidying method.} 13 | } 14 | \value{ 15 | The model's coefficients in a \code{tibble}. 16 | } 17 | \description{ 18 | Returns the coefficients from the model in a \code{tibble} format. 19 | } 20 | \examples{ 21 | as_tsibble(airmiles) \%>\% 22 | model(nn = NNETAR(box_cox(value, 0.15))) \%>\% 23 | tidy() 24 | } 25 | -------------------------------------------------------------------------------- /man/tidy.RW.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/lagwalk.R 3 | \name{tidy.RW} 4 | \alias{tidy.RW} 5 | \title{Tidy a fable model} 6 | \usage{ 7 | \method{tidy}{RW}(x, ...) 8 | } 9 | \arguments{ 10 | \item{x}{An object to be converted into a tidy \code{\link[tibble:tibble]{tibble::tibble()}}.} 11 | 12 | \item{...}{Additional arguments to tidying method.} 13 | } 14 | \value{ 15 | The model's coefficients in a \code{tibble}. 16 | } 17 | \description{ 18 | Returns the coefficients from the model in a \code{tibble} format. 19 | } 20 | \examples{ 21 | as_tsibble(Nile) \%>\% 22 | model(NAIVE(value)) \%>\% 23 | tidy() 24 | 25 | library(tsibbledata) 26 | aus_production \%>\% 27 | model(snaive = SNAIVE(Beer ~ lag("year"))) \%>\% 28 | tidy() 29 | } 30 | -------------------------------------------------------------------------------- /man/tidy.TSLM.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/lm.R 3 | \name{tidy.TSLM} 4 | \alias{tidy.TSLM} 5 | \title{Tidy a fable model} 6 | \usage{ 7 | \method{tidy}{TSLM}(x, ...) 8 | } 9 | \arguments{ 10 | \item{x}{An object to be converted into a tidy \code{\link[tibble:tibble]{tibble::tibble()}}.} 11 | 12 | \item{...}{Additional arguments to tidying method.} 13 | } 14 | \value{ 15 | The model's coefficients in a \code{tibble}. 16 | } 17 | \description{ 18 | Returns the coefficients from the model in a \code{tibble} format. 19 | } 20 | \examples{ 21 | as_tsibble(USAccDeaths) \%>\% 22 | model(lm = TSLM(log(value) ~ trend() + season())) \%>\% 23 | tidy() 24 | } 25 | -------------------------------------------------------------------------------- /man/tidy.VAR.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/var.R 3 | \name{tidy.VAR} 4 | \alias{tidy.VAR} 5 | \title{Tidy a fable model} 6 | \usage{ 7 | \method{tidy}{VAR}(x, ...) 8 | } 9 | \arguments{ 10 | \item{x}{An object to be converted into a tidy \code{\link[tibble:tibble]{tibble::tibble()}}.} 11 | 12 | \item{...}{Additional arguments to tidying method.} 13 | } 14 | \value{ 15 | The model's coefficients in a \code{tibble}. 16 | } 17 | \description{ 18 | Returns the coefficients from the model in a \code{tibble} format. 19 | } 20 | \examples{ 21 | lung_deaths <- cbind(mdeaths, fdeaths) \%>\% 22 | as_tsibble(pivot_longer = FALSE) 23 | 24 | lung_deaths \%>\% 25 | model(VAR(vars(mdeaths, fdeaths) ~ AR(3))) \%>\% 26 | tidy() 27 | } 28 | -------------------------------------------------------------------------------- /man/tidy.croston.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/croston.R 3 | \name{tidy.croston} 4 | \alias{tidy.croston} 5 | \title{Tidy a fable model} 6 | \usage{ 7 | \method{tidy}{croston}(x, ...) 8 | } 9 | \arguments{ 10 | \item{x}{An object to be converted into a tidy \code{\link[tibble:tibble]{tibble::tibble()}}.} 11 | 12 | \item{...}{Additional arguments to tidying method.} 13 | } 14 | \value{ 15 | The model's coefficients in a \code{tibble}. 16 | } 17 | \description{ 18 | Returns the coefficients from the model in a \code{tibble} format. 19 | } 20 | \examples{ 21 | library(tsibble) 22 | sim_poisson <- tsibble( 23 | time = yearmonth("2012 Dec") + seq_len(24), 24 | count = rpois(24, lambda = 0.3), 25 | index = time 26 | ) 27 | 28 | sim_poisson \%>\% 29 | model(CROSTON(count)) \%>\% 30 | tidy() 31 | } 32 | -------------------------------------------------------------------------------- /man/tidy.fable_theta.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/theta.R 3 | \name{tidy.fable_theta} 4 | \alias{tidy.fable_theta} 5 | \title{Tidy a fable model} 6 | \usage{ 7 | \method{tidy}{fable_theta}(x, ...) 8 | } 9 | \arguments{ 10 | \item{x}{An object to be converted into a tidy \code{\link[tibble:tibble]{tibble::tibble()}}.} 11 | 12 | \item{...}{Additional arguments to tidying method.} 13 | } 14 | \value{ 15 | The model's coefficients in a \code{tibble}. 16 | } 17 | \description{ 18 | Returns the coefficients from the model in a \code{tibble} format. 19 | } 20 | \examples{ 21 | USAccDeaths \%>\% 22 | as_tsibble() \%>\% 23 | model(arima = ARIMA(log(value) ~ pdq(0, 1, 1) + PDQ(0, 1, 1))) \%>\% 24 | tidy() 25 | } 26 | -------------------------------------------------------------------------------- /man/tidy.model_mean.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/mean.R 3 | \name{tidy.model_mean} 4 | \alias{tidy.model_mean} 5 | \title{Tidy a fable model} 6 | \usage{ 7 | \method{tidy}{model_mean}(x, ...) 8 | } 9 | \arguments{ 10 | \item{x}{An object to be converted into a tidy \code{\link[tibble:tibble]{tibble::tibble()}}.} 11 | 12 | \item{...}{Additional arguments to tidying method.} 13 | } 14 | \value{ 15 | The model's coefficients in a \code{tibble}. 16 | } 17 | \description{ 18 | Returns the coefficients from the model in a \code{tibble} format. 19 | } 20 | \examples{ 21 | library(tsibbledata) 22 | vic_elec \%>\% 23 | model(avg = MEAN(Demand)) \%>\% 24 | tidy() 25 | } 26 | -------------------------------------------------------------------------------- /man/unitroot_options.Rd: -------------------------------------------------------------------------------- 1 | % Generated by roxygen2: do not edit by hand 2 | % Please edit documentation in R/arima.R 3 | \name{unitroot_options} 4 | \alias{unitroot_options} 5 | \title{Options for the unit root tests for order of integration} 6 | \usage{ 7 | unitroot_options( 8 | ndiffs_alpha = 0.05, 9 | nsdiffs_alpha = 0.05, 10 | ndiffs_pvalue = ~feasts::unitroot_kpss(.)["kpss_pvalue"], 11 | nsdiffs_pvalue = ur_seasonal_strength(0.64) 12 | ) 13 | } 14 | \arguments{ 15 | \item{ndiffs_alpha, nsdiffs_alpha}{The level for the test specified in the \code{pval} functions. As long as \code{pval < alpha}, differences will be added.} 16 | 17 | \item{ndiffs_pvalue, nsdiffs_pvalue}{A function (or lambda expression) that provides a p-value for the unit root test. As long as \code{pval < alpha}, differences will be added. 18 | 19 | For the function for the seasonal p-value, the seasonal period will be provided as the \code{.period} argument to this function. 20 | A vector of data to test is available as \code{.} or \code{.x}.} 21 | } 22 | \value{ 23 | A list of parameters 24 | } 25 | \description{ 26 | By default, a kpss test (via \code{\link[feasts:unitroot]{feasts::unitroot_kpss()}}) will be performed 27 | for testing the required first order differences, and a test of the seasonal 28 | strength (via \code{\link[feasts:feat_stl]{feasts::feat_stl()}} seasonal_strength) being above the 0.64 29 | threshold is used for determining seasonal required differences. 30 | } 31 | -------------------------------------------------------------------------------- /pkgdown/favicon/apple-touch-icon-120x120.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tidyverts/fable/4411495bf79840a777dac398f7b9ed4ace92643c/pkgdown/favicon/apple-touch-icon-120x120.png -------------------------------------------------------------------------------- /pkgdown/favicon/apple-touch-icon-152x152.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tidyverts/fable/4411495bf79840a777dac398f7b9ed4ace92643c/pkgdown/favicon/apple-touch-icon-152x152.png -------------------------------------------------------------------------------- /pkgdown/favicon/apple-touch-icon-180x180.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tidyverts/fable/4411495bf79840a777dac398f7b9ed4ace92643c/pkgdown/favicon/apple-touch-icon-180x180.png -------------------------------------------------------------------------------- /pkgdown/favicon/apple-touch-icon-60x60.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tidyverts/fable/4411495bf79840a777dac398f7b9ed4ace92643c/pkgdown/favicon/apple-touch-icon-60x60.png -------------------------------------------------------------------------------- /pkgdown/favicon/apple-touch-icon-76x76.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tidyverts/fable/4411495bf79840a777dac398f7b9ed4ace92643c/pkgdown/favicon/apple-touch-icon-76x76.png -------------------------------------------------------------------------------- /pkgdown/favicon/apple-touch-icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tidyverts/fable/4411495bf79840a777dac398f7b9ed4ace92643c/pkgdown/favicon/apple-touch-icon.png -------------------------------------------------------------------------------- /pkgdown/favicon/favicon-16x16.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tidyverts/fable/4411495bf79840a777dac398f7b9ed4ace92643c/pkgdown/favicon/favicon-16x16.png -------------------------------------------------------------------------------- /pkgdown/favicon/favicon-32x32.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tidyverts/fable/4411495bf79840a777dac398f7b9ed4ace92643c/pkgdown/favicon/favicon-32x32.png -------------------------------------------------------------------------------- /pkgdown/favicon/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tidyverts/fable/4411495bf79840a777dac398f7b9ed4ace92643c/pkgdown/favicon/favicon.ico -------------------------------------------------------------------------------- /src/.gitignore: -------------------------------------------------------------------------------- 1 | *.o 2 | *.so 3 | *.dll 4 | -------------------------------------------------------------------------------- /src/Makevars: -------------------------------------------------------------------------------- 1 | PKG_CXXFLAGS = $(SHLIB_OPENMP_CXXFLAGS) -DR_NO_REMAP 2 | PKG_LIBS = $(SHLIB_OPENMP_CXXFLAGS) $(LAPACK_LIBS) $(BLAS_LIBS) $(FLIBS) 3 | -------------------------------------------------------------------------------- /src/Makevars.win: -------------------------------------------------------------------------------- 1 | PKG_CXXFLAGS = $(SHLIB_OPENMP_CXXFLAGS) -DR_NO_REMAP 2 | PKG_LIBS = $(SHLIB_OPENMP_CXXFLAGS) $(LAPACK_LIBS) $(BLAS_LIBS) $(FLIBS) 3 | -------------------------------------------------------------------------------- /src/etsTargetFunction.h: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | #include 4 | 5 | extern "C" { 6 | 7 | void etscalc(double *, int *, double *, int *, int *, int *, int *, 8 | double *, double *, double *, double *, double *, double *, double *, int *); 9 | 10 | void cpolyroot(double *opr, double *opi, int *degree, 11 | double *zeror, double *zeroi, Rboolean *fail); 12 | } 13 | 14 | class EtsTargetFunction { 15 | 16 | public: 17 | 18 | void eval(const double* p_var, int p_var_length); 19 | void init(std::vector & p_y, int p_nstate, int p_errortype, 20 | int p_trendtype, int p_seasontype, bool p_damped, 21 | std::vector & p_lower, std::vector & p_upper, std::string p_opt_crit, 22 | int p_nmse, std::string p_bounds, int p_m, 23 | bool p_optAlpha, bool p_optBeta, bool p_optGamma, bool p_optPhi, 24 | bool p_givenAlpha, bool p_givenBeta, bool p_givenGamma, bool p_givenPhi, 25 | double alpha, double beta, double gamma, double phi); 26 | 27 | double getObjVal() { return(objval); }; 28 | 29 | 30 | private: 31 | 32 | bool check_params(); 33 | bool admissible(); 34 | 35 | std::vector par; 36 | std::vector y; 37 | 38 | int nstate; 39 | int errortype; 40 | int trendtype; 41 | int seasontype; 42 | bool damped; 43 | std::vector par_noopt; 44 | std::vector lower; 45 | std::vector upper; 46 | std::string opt_crit; 47 | int nmse; 48 | std::string bounds; 49 | int m; 50 | int n; 51 | 52 | std::vector state; 53 | double alpha, beta, gamma, phi; 54 | 55 | std::vector e; 56 | std::vector amse; 57 | 58 | double lik, objval; 59 | 60 | bool optAlpha, optBeta, optGamma, optPhi, givenAlpha, givenBeta, givenGamma, givenPhi; 61 | 62 | }; 63 | -------------------------------------------------------------------------------- /src/etsTargetFunctionWrapper.cpp: -------------------------------------------------------------------------------- 1 | 2 | #include 3 | #include 4 | 5 | #include 6 | 7 | //For R's Nelder-Mead solver 8 | #include 9 | 10 | #include 11 | 12 | #include "etsTargetFunction.h" 13 | 14 | // This function initializes all the parameters, constructs an 15 | // object of type EtsTargetFunction and adds an external pointer 16 | // to this object with name "ets.xptr" 17 | // to the environment submitted as p_rho 18 | // 19 | RcppExport SEXP etsTargetFunctionInit(SEXP p_y, SEXP p_nstate, SEXP p_errortype, SEXP p_trendtype, 20 | SEXP p_seasontype, SEXP p_damped, SEXP p_lower, SEXP p_upper, 21 | SEXP p_opt_crit, SEXP p_nmse, SEXP p_bounds, SEXP p_m, 22 | SEXP p_optAlpha, SEXP p_optBeta, SEXP p_optGamma, SEXP p_optPhi, 23 | SEXP p_givenAlpha, SEXP p_givenBeta, SEXP p_givenGamma, SEXP p_givenPhi, 24 | SEXP p_alpha, SEXP p_beta, SEXP p_gamma, SEXP p_phi, SEXP p_rho) { 25 | 26 | BEGIN_RCPP; 27 | 28 | EtsTargetFunction* sp = new EtsTargetFunction(); 29 | 30 | std::vector y = Rcpp::as< std::vector >(p_y); 31 | 32 | int nstate = Rcpp::as(p_nstate); 33 | 34 | int errortype = Rcpp::as(p_errortype); 35 | int trendtype = Rcpp::as(p_trendtype); 36 | int seasontype = Rcpp::as(p_seasontype); 37 | 38 | bool damped = Rcpp::as(p_damped); 39 | 40 | std::vector lower = Rcpp::as< std::vector >(p_lower); 41 | std::vector upper = Rcpp::as< std::vector >(p_upper); 42 | 43 | std::string opt_crit = Rcpp::as(p_opt_crit); 44 | int nmse = Rcpp::as(p_nmse); 45 | 46 | std::string bounds = Rcpp::as< std::string >(p_bounds); 47 | int m = Rcpp::as(p_m); 48 | 49 | bool optAlpha = Rcpp::as(p_optAlpha); 50 | bool optBeta = Rcpp::as(p_optBeta); 51 | bool optGamma = Rcpp::as(p_optGamma); 52 | bool optPhi = Rcpp::as(p_optPhi); 53 | 54 | bool givenAlpha = Rcpp::as(p_givenAlpha); 55 | bool givenBeta = Rcpp::as(p_givenBeta); 56 | bool givenGamma = Rcpp::as(p_givenGamma); 57 | bool givenPhi = Rcpp::as(p_givenPhi); 58 | 59 | double alpha = Rcpp::as(p_alpha); 60 | double beta = Rcpp::as(p_beta); 61 | double gamma = Rcpp::as(p_gamma); 62 | double phi = Rcpp::as(p_phi); 63 | 64 | 65 | sp->init(y, nstate, errortype, trendtype, seasontype, damped, lower, upper, opt_crit, 66 | nmse, bounds, m, optAlpha, optBeta, optGamma, optPhi, 67 | givenAlpha, givenBeta, givenGamma, givenPhi, 68 | alpha, beta, gamma, phi); 69 | 70 | Rcpp::Environment e(p_rho); 71 | e["ets.xptr"] = Rcpp::XPtr( sp, true ); 72 | 73 | return Rcpp::wrap(e); 74 | 75 | END_RCPP; 76 | } 77 | 78 | // RcppExport double targetFunctionRmalschains(SEXP p_par, SEXP p_env) 79 | // { 80 | // Rcpp::NumericVector par(p_par); 81 | 82 | // Rcpp::Environment e(p_env); 83 | // Rcpp::XPtr sp(e.get("ets.xptr")); 84 | 85 | // sp->eval(par.begin(), par.size()); 86 | 87 | // //return Rcpp::wrap(sp->getObjVal()); 88 | // return sp->getObjVal(); 89 | 90 | // } 91 | 92 | // RcppExport SEXP etsGetTargetFunctionRmalschainsPtr() { 93 | 94 | // typedef double (*funcPtr)(SEXP, SEXP); 95 | // return (Rcpp::XPtr(new funcPtr(&targetFunctionRmalschains))); 96 | // } 97 | 98 | /* 99 | RcppExport SEXP targetFunctionRdonlp2(SEXP p_var, SEXP p_env) 100 | { 101 | 102 | Rcpp::Environment e(p_env); 103 | Rcpp::XPtr sp(e.get("ets.xptr")); 104 | 105 | Rcpp::NumericVector var(p_var); 106 | 107 | int mode = var[0]; 108 | int fun_id = var[1]; 109 | 110 | sp->eval(var.begin()+2, var.size()-2); 111 | 112 | if(mode == 0) { 113 | if(fun_id == 0) { 114 | return Rcpp::wrap(sp->getObjVal()); 115 | } else { 116 | return Rcpp::wrap(0); 117 | //return Rcpp::wrap(sp->restrictions[fun_id-1]); 118 | } 119 | } else if(mode==1) { 120 | // error("Gradients are not implemented, exiting."); 121 | }; 122 | 123 | return R_NilValue; 124 | } 125 | 126 | RcppExport SEXP etsGetTargetFunctionRdonlp2Ptr() { 127 | 128 | typedef SEXP (*funcPtr)(SEXP, SEXP); 129 | return (Rcpp::XPtr(new funcPtr(&targetFunctionRdonlp2))); 130 | } 131 | */ 132 | 133 | 134 | double targetFunctionEtsNelderMead(int n, double *par, void *ex) 135 | { 136 | EtsTargetFunction* sp = (EtsTargetFunction*) ex; 137 | 138 | sp->eval(par, n); 139 | return sp->getObjVal(); 140 | 141 | } 142 | 143 | 144 | RcppExport SEXP etsNelderMead(SEXP p_var, SEXP p_env, SEXP p_abstol, 145 | SEXP p_intol, SEXP p_alpha, SEXP p_beta, SEXP p_gamma, 146 | SEXP p_trace, SEXP p_maxit) 147 | { 148 | 149 | double abstol = Rcpp::as(p_abstol); 150 | double intol = Rcpp::as(p_intol); 151 | double alpha = Rcpp::as(p_alpha); 152 | double beta= Rcpp::as(p_beta); 153 | double gamma= Rcpp::as(p_gamma); 154 | 155 | int trace = Rcpp::as(p_trace); 156 | int maxit = Rcpp::as(p_maxit); 157 | 158 | int fncount = 0, fail=0; 159 | double Fmin = 0.0; 160 | 161 | Rcpp::NumericVector dpar(p_var); 162 | Rcpp::NumericVector opar(dpar.size()); 163 | 164 | Rcpp::Environment e(p_env); 165 | Rcpp::XPtr sp(e.get("ets.xptr")); 166 | 167 | double (*funcPtr)(int n, double *par, void *ex) = targetFunctionEtsNelderMead; 168 | 169 | nmmin(dpar.size(), dpar.begin(), opar.begin(), &Fmin, funcPtr, 170 | &fail, abstol, intol, sp, alpha, beta, gamma, trace, &fncount, maxit); 171 | 172 | return Rcpp::List::create(Rcpp::Named("value") = Fmin, 173 | Rcpp::Named("par") = opar, 174 | Rcpp::Named("fail") = fail, 175 | Rcpp::Named("fncount") = fncount); 176 | 177 | } 178 | 179 | 180 | 181 | -------------------------------------------------------------------------------- /src/registerDynamicSymbol.c: -------------------------------------------------------------------------------- 1 | // RegisteringDynamic Symbols 2 | 3 | #include 4 | #include 5 | #include 6 | 7 | void R_init_markovchain(DllInfo* info) { 8 | R_registerRoutines(info, NULL, NULL, NULL, NULL); 9 | R_useDynamicSymbols(info, TRUE); 10 | } 11 | -------------------------------------------------------------------------------- /tests/spelling.R: -------------------------------------------------------------------------------- 1 | if(requireNamespace('spelling', quietly = TRUE)) 2 | spelling::spell_check_test(vignettes = TRUE, error = FALSE, 3 | skip_on_cran = TRUE) 4 | -------------------------------------------------------------------------------- /tests/testthat.R: -------------------------------------------------------------------------------- 1 | library(testthat) 2 | library(fable) 3 | 4 | test_check("fable") 5 | -------------------------------------------------------------------------------- /tests/testthat/setup-data.R: -------------------------------------------------------------------------------- 1 | context("setup-data.R") 2 | 3 | USAccDeaths_tbl <- as_tsibble(USAccDeaths) 4 | UKLungDeaths <- as_tsibble(cbind(mdeaths, fdeaths), pivot_longer = FALSE) 5 | 6 | fc_mean <- function(x){ 7 | if(inherits(x, "distribution")) mean(x) else x 8 | } 9 | -------------------------------------------------------------------------------- /tests/testthat/test-arima.R: -------------------------------------------------------------------------------- 1 | context("test-arima.R") 2 | 3 | stats_fit <- arima(USAccDeaths, c(0, 1, 1), list(order = c(0, 1, 1), 12)) 4 | test_that("Automatic ARIMA selection", { 5 | skip_if_not_installed("feasts") 6 | # Automatic model selection 7 | fable_fit <- USAccDeaths_tbl %>% 8 | model(arima = ARIMA(value ~ pdq(0:1, 0:1, 0:1) + PDQ(0:1, 0:1, 0:1))) 9 | 10 | expect_identical( 11 | coef(fable_fit$arima[[1]]$fit$model), 12 | coef(stats_fit) 13 | ) 14 | 15 | # Automatic (approximate) model selection 16 | fable_fit_approx <- USAccDeaths_tbl %>% 17 | model(arima = ARIMA(value ~ pdq(0:1, 0:1, 0:1) + PDQ(0:1, 0:1, 0:1), approximation = TRUE)) 18 | 19 | expect_output( 20 | report(fable_fit_approx), 21 | "ARIMA\\(0,1,1\\)\\(1,1,0\\)\\[12\\]" 22 | ) 23 | 24 | # Automatic d/D selection 25 | fit <- USAccDeaths_tbl %>% model(ARIMA(value ~ pdq(p = 0, q = 1) + PDQ(P = 0, Q = 1))) 26 | expect_identical( 27 | model_sum(fit[[1]][[1]]), 28 | "ARIMA(0,1,1)(0,1,1)[12]" 29 | ) 30 | }) 31 | 32 | test_that("Manual ARIMA selection", { 33 | # Manual model selection 34 | fable_fit <- USAccDeaths_tbl %>% model(model = ARIMA(value ~ 0 + pdq(0, 1, 1) + PDQ(0, 1, 1))) 35 | 36 | expect_identical( 37 | coef(fable_fit$model[[1]]$fit$model), 38 | coef(stats_fit) 39 | ) 40 | 41 | expect_identical( 42 | model_sum(fable_fit$model[[1]]), 43 | "ARIMA(0,1,1)(0,1,1)[12]" 44 | ) 45 | 46 | fable_fc <- fable_fit %>% forecast() 47 | stats_fc <- stats_fit %>% predict(24) 48 | 49 | expect_equivalent( 50 | fc_mean(fable_fc$value), 51 | unclass(stats_fc$pred) 52 | ) 53 | 54 | expect_equivalent( 55 | tidy(fable_fit)$estimate, coef(stats_fit) 56 | ) 57 | 58 | expect_equivalent( 59 | fitted(fable_fit)$.fitted, unclass(USAccDeaths - residuals(stats_fit)) 60 | ) 61 | 62 | expect_equivalent( 63 | residuals(fable_fit)$.resid, unclass(residuals(stats_fit)) 64 | ) 65 | 66 | expect_equivalent( 67 | fitted(fable_fit), 68 | fitted(refit(fable_fit, USAccDeaths_tbl)) 69 | ) 70 | 71 | expect_output( 72 | report(fable_fit), 73 | "log likelihood=-425.44" 74 | ) 75 | 76 | USAccDeaths_miss <- USAccDeaths_tbl 77 | USAccDeaths_miss$value[c(10, 14, 15)] <- NA 78 | USAccDeaths_miss <- fable_fit %>% 79 | interpolate(USAccDeaths_miss) 80 | expect_false( 81 | any(is.na(USAccDeaths_miss$value)) 82 | ) 83 | expect_equal( 84 | USAccDeaths_tbl$value[-c(10, 14, 15)], 85 | USAccDeaths_miss$value[-c(10, 14, 15)] 86 | ) 87 | }) 88 | 89 | test_that("Fixed ARIMA coefficients", { 90 | # Manual model selection 91 | fable_fit <- USAccDeaths_tbl %>% model(model = ARIMA(value ~ xreg(1, fixed = list(constant = 20)) + pdq(0, 0, 1, fixed = list(ma1 = 0.3)) + PDQ(0, 1, 1, fixed = list(sma1 = 3)))) 92 | 93 | expect_identical( 94 | tidy(fable_fit)$estimate, 95 | c(0.3, 3, 20) 96 | ) 97 | }) 98 | 99 | test_that("ARIMA with bad inputs", { 100 | expect_warning( 101 | UKLungDeaths %>% 102 | model(ARIMA(mdeaths ~ 1 + pdq(2, 0, 0) + pdq(d = 1) + PDQ(2, 1, 0))), 103 | "Only one special for `pdq\\(\\)` and `PDQ\\(\\)` is allowed" 104 | ) 105 | 106 | expect_warning( 107 | UKLungDeaths %>% 108 | model(ARIMA(vars(mdeaths, fdeaths))), 109 | "Only univariate responses are supported by ARIMA" 110 | ) 111 | 112 | expect_warning( 113 | UKLungDeaths %>% 114 | model(ARIMA(mdeaths ~ 0 + pdq(2, 2, 0) + PDQ(2, 1, 0))), 115 | "Having 3 or more differencing operations is not recommended" 116 | ) 117 | 118 | expect_warning( 119 | UKLungDeaths %>% 120 | model(ARIMA(mdeaths ~ pdq(2, 0, 0) + PDQ(2, 2, 0))), 121 | "Having more than one seasonal difference is not recommended" 122 | ) 123 | 124 | expect_warning( 125 | fit <- UKLungDeaths %>% 126 | model(poly = ARIMA(mdeaths ~ 1 + pdq(2, 1, 0) + PDQ(2, 1, 0))), 127 | "Model specification induces a quadratic or higher order polynomial trend" 128 | ) 129 | 130 | expect_identical( 131 | model_sum(fit$poly[[1]]), 132 | "ARIMA(2,1,0)(2,1,0)[12] w/ poly" 133 | ) 134 | }) 135 | 136 | test_that("ARIMA with xregs", { 137 | skip_if_not_installed("feasts") 138 | tr <- UKLungDeaths %>% head(-12) 139 | ts <- UKLungDeaths %>% tail(12) 140 | fable_fit <- tr %>% model(model = ARIMA(mdeaths ~ 1 + fdeaths + PDQ(P = 0, Q = 0))) 141 | stats_fit <- arima(head(mdeaths, -12), c(1, 1, 1), 142 | xreg = data.frame(fdeaths = head(fdeaths, -12), intercept = seq_len(60)) 143 | ) 144 | 145 | expect_equivalent( 146 | coef(fable_fit$model[[1]]$fit$model), 147 | coef(stats_fit) 148 | ) 149 | 150 | fable_fc <- fable_fit %>% forecast(ts) 151 | stats_fc <- stats_fit %>% predict(12, 152 | newxreg = data.frame(fdeaths = tail(fdeaths, 12), intercept = 61:72) 153 | ) 154 | 155 | expect_equivalent( 156 | fc_mean(fable_fc$mdeaths), 157 | unclass(stats_fc$pred) 158 | ) 159 | 160 | expect_identical( 161 | model_sum(fable_fit$model[[1]]), 162 | "LM w/ ARIMA(1,1,1) errors" 163 | ) 164 | 165 | fable_fit <- tr %>% 166 | model(model = ARIMA(mdeaths ~ 1 + lag(fdeaths) + PDQ(P = 0, Q = 0))) 167 | expect_equal( 168 | model_sum(fable_fit$model[[1]]), 169 | "LM w/ ARIMA(2,0,1) errors" 170 | ) 171 | fable_fc <- fable_fit %>% 172 | forecast(ts) 173 | expect_true( 174 | !any(is.na(fable_fc$mdeaths)) 175 | ) 176 | }) 177 | -------------------------------------------------------------------------------- /tests/testthat/test-ets.R: -------------------------------------------------------------------------------- 1 | context("test-ets.R") 2 | skip_if_not_installed("forecast") 3 | 4 | forecast_fit <- USAccDeaths %>% forecast::ets() 5 | test_that("Automatic ETS selection", { 6 | # Automatic model selection 7 | fable_fit <- USAccDeaths_tbl %>% model(ets = ETS(value)) 8 | 9 | expect_equivalent( 10 | tidy(fable_fit$ets[[1]]$fit)$estimate, 11 | c(coef(forecast_fit), -sum(coef(forecast_fit)[-(1:3)])) 12 | ) 13 | 14 | # Short series 15 | expect_equal( 16 | tidy(model(UKLungDeaths[1:24, ], ETS(mdeaths)))$estimate, 17 | c(1, 2134), 18 | tolerance = 0.5 19 | ) 20 | }) 21 | 22 | test_that("Manual ETS selection", { 23 | # Manual model selection 24 | fable_fit <- USAccDeaths_tbl %>% model(ets = ETS(value ~ error("A") + trend("N") + season("A"))) 25 | 26 | expect_equivalent( 27 | tidy(fable_fit$ets[[1]]$fit)$estimate, 28 | c(coef(forecast_fit), -sum(coef(forecast_fit)[-(1:3)])) 29 | ) 30 | 31 | expect_identical( 32 | model_sum(fable_fit$ets[[1]]), 33 | "ETS(A,N,A)" 34 | ) 35 | 36 | fable_fc <- fable_fit %>% forecast() 37 | forecast_fc <- forecast_fit %>% forecast::forecast() 38 | 39 | expect_equivalent( 40 | fc_mean(fable_fc$value), 41 | unclass(forecast_fc$mean) 42 | ) 43 | 44 | # Test simulation 45 | fable_fit %>% 46 | generate(USAccDeaths_tbl) 47 | fable_fit %>% 48 | generate(USAccDeaths_tbl %>% 49 | dplyr::mutate(index = index + 72)) 50 | 51 | # Test refit 52 | expect_identical( 53 | tidy(refit(fable_fit, USAccDeaths_tbl))$estimate == tidy(fable_fit)$estimate, 54 | c(rep(TRUE, 2), rep(FALSE, 13)) 55 | ) 56 | expect_identical( 57 | tidy(refit(fable_fit, USAccDeaths_tbl, reinitialise = FALSE))$estimate, 58 | tidy(fable_fit)$estimate 59 | ) 60 | 61 | # Test components 62 | cmp <- components(fable_fit) 63 | expect_identical( 64 | tidy(fable_fit)$estimate[3:14], 65 | c(cmp$level[12], cmp$season[12:2]) 66 | ) 67 | expect_s3_class( 68 | cmp, "dcmp_ts" 69 | ) 70 | 71 | # Test report 72 | expect_output( 73 | report(fable_fit), 74 | "sigma\\^2: 85667.86" 75 | ) 76 | 77 | aug <- augment(fable_fit) 78 | expect_equal( 79 | aug$value, 80 | aug$.fitted + aug$.resid 81 | ) 82 | 83 | # Test specification of smoothing params 84 | coef <- USAccDeaths_tbl %>% 85 | model(ETS(value ~ error("A") + season("A", gamma = 0.0001) + 86 | trend("Ad", alpha = 0.5, beta = 0.006, phi = 0.975))) %>% 87 | tidy() 88 | expect_identical( 89 | coef$estimate[1:4], 90 | c(0.5, 0.006, 0.0001, 0.975) 91 | ) 92 | expect_identical( 93 | coef$term, 94 | c("alpha", "beta", "gamma", "phi", "l[0]", "b[0]", sprintf("s[%i]", 0:-11)) 95 | ) 96 | }) 97 | 98 | 99 | test_that("ETS with bad inputs", { 100 | # Test for multiple specials defined 101 | expect_warning( 102 | USAccDeaths_tbl %>% model(ETS(value ~ error("A") + error("A"))), 103 | "Only one special of each type is allowed for ETS" 104 | ) 105 | 106 | expect_warning( 107 | USAccDeaths_tbl %>% model(ETS(value ~ trend(alpha = 1.5))), 108 | "Inconsistent parameter boundaries" 109 | ) 110 | 111 | expect_warning( 112 | USAccDeaths_tbl %>% model(ETS(value ~ error("A") + trend("A", alpha = 0.2, beta = 0.5) + season("N"))), 113 | "Parameters out of range" 114 | ) 115 | 116 | expect_warning( 117 | UKLungDeaths %>% 118 | model(ETS(vars(mdeaths, fdeaths))), 119 | "Only univariate responses are supported by ETS" 120 | ) 121 | 122 | UK_missing <- UKLungDeaths 123 | UK_missing[["mdeaths"]][3:5] <- NA 124 | expect_warning( 125 | UK_missing %>% 126 | model(ETS(mdeaths)), 127 | "ETS does not support missing values" 128 | ) 129 | 130 | expect_warning( 131 | UKLungDeaths %>% 132 | model(ETS(mdeaths ~ trend("M") + season("A"))), 133 | "No valid ETS models have been allowed" 134 | ) 135 | 136 | expect_warning( 137 | UKLungDeaths[1:2, ] %>% 138 | model(ETS(mdeaths)), 139 | "Not enough data to estimate this ETS model" 140 | ) 141 | }) 142 | 143 | 144 | test_that("Multiplicative ETS models", { 145 | fable_fit <- USAccDeaths_tbl %>% 146 | model(ets = ETS(value ~ error("M") + trend("N") + season("N"))) 147 | expect_true( 148 | is.constant(fc_mean(forecast(fable_fit)$value)) 149 | ) 150 | 151 | expect_s3_class( 152 | USAccDeaths_tbl %>% 153 | model(ets = ETS(value ~ error("M") + trend("A") + season("M"))) %>% 154 | forecast(), 155 | "fbl_ts" 156 | ) 157 | 158 | 159 | expect_s3_class( 160 | USAccDeaths_tbl %>% 161 | model(ets = ETS(value ~ error("M") + trend("M") + season("M"))) %>% 162 | forecast(times = 5), 163 | "fbl_ts" 164 | ) 165 | }) 166 | 167 | test_that("Automatic ETS selection bug (#425)", { 168 | train <- tsibble( 169 | YM = yearmonth("2022 Jan") + 0:35, 170 | value = rep(c(rep(-78040, 11), -78061), 3), 171 | index = YM 172 | ) 173 | expect_identical( 174 | model_sum(model(train, ets=ETS(value))$ets[[1]]), 175 | "ETS(A,N,N)" 176 | ) 177 | }) -------------------------------------------------------------------------------- /tests/testthat/test-lm.R: -------------------------------------------------------------------------------- 1 | context("test-lm.R") 2 | 3 | test_that("LM", { 4 | skip_if_not_installed("forecast") 5 | 6 | # NULL model selection 7 | fable_fit <- USAccDeaths_tbl %>% model(lm = TSLM(value)) 8 | forecast_fit <- lm(USAccDeaths ~ 1) 9 | 10 | expect_equivalent( 11 | coef(fable_fit)$estimate, 12 | coef(forecast_fit) 13 | ) 14 | 15 | # Trend + Season 16 | fable_fit <- USAccDeaths_tbl %>% model(lm = TSLM(value ~ trend() + season())) 17 | forecast_fit <- forecast::tslm(USAccDeaths ~ trend + season) 18 | 19 | expect_equivalent( 20 | unclass(fitted(fable_fit)[[".fitted"]]), 21 | unclass(fitted(forecast_fit)) 22 | ) 23 | 24 | # Model coefs 25 | expect_equivalent( 26 | tidy(fable_fit) %>% dplyr::filter(term == "trend()") %>% dplyr::pull(estimate), 27 | coef(forecast_fit)["trend"] 28 | ) 29 | 30 | # Forecast 31 | fable_fc <- fable_fit %>% forecast(h = 12) 32 | fable_fc_short <- fable_fit %>% forecast(h = 1) 33 | forecast_fc <- forecast_fit %>% forecast::forecast(h = 12) 34 | expect_equivalent( 35 | fc_mean(fable_fc$value), 36 | unclass(forecast_fc$mean) 37 | ) 38 | expect_equivalent( 39 | fable_fc$value[1], 40 | fable_fc_short$value 41 | ) 42 | 43 | fable_fc_sim <- fable_fit %>% forecast(h = 12, bootstrap = TRUE, times = 5) 44 | # expect_equal( 45 | # fc_mean(fable_fc$value), 46 | # fc_mean(fable_fc_sim$value) 47 | # ) 48 | 49 | # Fourier 50 | fable_fit <- USAccDeaths_tbl %>% model(lm = TSLM(value ~ trend() + fourier(K = 5))) 51 | forecast_fit <- forecast::tslm(USAccDeaths ~ trend + forecast::fourier(USAccDeaths, K = 5)) 52 | 53 | expect_equivalent( 54 | unclass(fitted(fable_fit)[[".fitted"]]), 55 | unclass(fitted(forecast_fit)) 56 | ) 57 | 58 | # Model summary 59 | expect_identical( 60 | model_sum(fable_fit$lm[[1]]), 61 | "TSLM" 62 | ) 63 | 64 | # Model report 65 | expect_output( 66 | report(fable_fit), 67 | "Residual standard error: 442.5" 68 | ) 69 | 70 | # Model glance 71 | expect_equal( 72 | with(glance(fable_fit), df + df.residual), 73 | NROW(USAccDeaths_tbl) 74 | ) 75 | 76 | # Refit 77 | expect_identical( 78 | tidy(fable_fit)$estimate, 79 | tidy(refit(fable_fit, USAccDeaths_tbl))$estimate 80 | ) 81 | 82 | # Interpolate 83 | USAccDeaths_tbl[["value"]][10] <- NA 84 | expect_equal( 85 | interpolate(fable_fit, USAccDeaths_tbl)[["value"]][10], 86 | fitted(fable_fit)[[".fitted"]][10] 87 | ) 88 | }) 89 | -------------------------------------------------------------------------------- /tests/testthat/test-mean.R: -------------------------------------------------------------------------------- 1 | context("test-mean") 2 | 3 | test_that("MEAN", { 4 | fit <- USAccDeaths_tbl %>% model(mean = MEAN(value)) 5 | expect_identical( 6 | fitted(fit) %>% select(index, .fitted), 7 | USAccDeaths_tbl %>% transmute(.fitted = mean(value)) 8 | ) 9 | 10 | expect_equivalent( 11 | glance(fit)$sigma2, var(scale(USAccDeaths_tbl$value, scale = FALSE)) 12 | ) 13 | 14 | expect_identical( 15 | residuals(fit)[[".resid"]], USAccDeaths_tbl$value - mean(USAccDeaths_tbl$value) 16 | ) 17 | 18 | expect_identical( 19 | tidy(fit)$estimate, mean(USAccDeaths_tbl$value) 20 | ) 21 | 22 | expect_output(report(fit), "Mean: 8788\\.7917") 23 | 24 | fc <- fit %>% forecast(h = 3) 25 | 26 | expect_identical( 27 | fc_mean(fc$value), 28 | rep(mean(USAccDeaths_tbl$value), 3) 29 | ) 30 | 31 | fc_sim <- fit %>% forecast(h = 3, bootstrap = TRUE, times = 5) 32 | 33 | # expect_identical( 34 | # fc$value, fc_sim$value 35 | # ) 36 | }) 37 | -------------------------------------------------------------------------------- /tests/testthat/test-nnetar.R: -------------------------------------------------------------------------------- 1 | context("test-nnetar") 2 | skip_if_not_installed("nnet") 3 | 4 | airmiles <- as_tsibble(airmiles) 5 | test_that("Automatic NNETAR selection", { 6 | air_fit <- airmiles %>% model(NNETAR(box_cox(value, 0.15))) 7 | expect_equal(model_sum(air_fit[[1]][[1]]), "NNAR(1,1)") 8 | 9 | # Test with xreg 10 | air_fit <- airmiles %>% model(NNETAR(box_cox(value, 0.15) ~ trend() + rnorm(length(index)))) 11 | air_fit <- airmiles %>% model(NNETAR(box_cox(value, 0.15) ~ trend())) 12 | 13 | # Test simulations 14 | air_fit %>% 15 | generate(h = 10, times = 5) 16 | 17 | # Test forecasts 18 | fc_sim <- air_fit %>% 19 | forecast(h = 10, times = 5) 20 | fc_boot <- air_fit %>% 21 | forecast(h = 10, times = 5, bootstrap = TRUE) 22 | expect_equal( 23 | mean(fc_sim$value), 24 | mean(fc_boot$value), 25 | tolerance = 100 26 | ) 27 | 28 | # Short series 29 | expect_output( 30 | UKLungDeaths[1:24, ] %>% 31 | model(NNETAR(mdeaths)) %>% 32 | report(), 33 | "NNAR\\(4,1,3\\)\\[12\\]" 34 | ) 35 | }) 36 | 37 | test_that("Manual NNETAR selection", { 38 | fit <- UKLungDeaths %>% 39 | model(NNETAR(mdeaths ~ AR(p = 3, P = 2))) 40 | expect_equal(model_sum(fit[[1]][[1]]), "NNAR(3,2,3)[12]") 41 | 42 | expect_equal( 43 | with(augment(fit), .fitted + .resid)[-(1:24)], 44 | UKLungDeaths$mdeaths[-(1:24)] 45 | ) 46 | 47 | # Short series 48 | expect_warning( 49 | airmiles[1:5, ] %>% 50 | model(NNETAR(value ~ AR(10))), 51 | "Reducing number of lagged inputs due to short series" 52 | ) 53 | }) 54 | 55 | 56 | test_that("NNETAR with bad inputs", { 57 | expect_warning( 58 | airmiles[1:2, ] %>% 59 | model(NNETAR(value)), 60 | "Not enough data to fit a model" 61 | ) 62 | 63 | expect_warning( 64 | airmiles %>% 65 | model(NNETAR(resp(rep_along(value, NA)))), 66 | "All observations are missing, a model cannot be estimated without data" 67 | ) 68 | 69 | expect_warning( 70 | airmiles %>% 71 | model(NNETAR(resp(rep_along(value, 1)))), 72 | "Constant data, setting `AR\\(p=1, P=0\\)`, and `scale_inputs=FALSE`" 73 | ) 74 | 75 | expect_warning( 76 | airmiles %>% 77 | model(NNETAR(value ~ rep_along(value, 1))), 78 | "Constant xreg column, setting `scale_inputs=FALSE`" 79 | ) 80 | }) 81 | -------------------------------------------------------------------------------- /tests/testthat/test-rw.R: -------------------------------------------------------------------------------- 1 | context("test-rw.R") 2 | test_that("NAIVE", { 3 | skip_if_not_installed("forecast") 4 | fable_fit <- USAccDeaths_tbl %>% model(naive = NAIVE(value)) 5 | forecast_fc <- forecast::naive(USAccDeaths, h = 12) 6 | 7 | expect_equivalent( 8 | fitted(fable_fit)[[".fitted"]], 9 | unclass(fitted(forecast_fc)) 10 | ) 11 | 12 | fable_fc <- fable_fit %>% forecast(h = 12) 13 | 14 | expect_equivalent( 15 | fc_mean(fable_fc$value), 16 | unclass(forecast_fc$mean) 17 | ) 18 | expect_identical( 19 | model_sum(fable_fit$naive[[1]]), 20 | "NAIVE" 21 | ) 22 | }) 23 | 24 | test_that("RW w/ drift", { 25 | skip_if_not_installed("forecast") 26 | fable_fit <- USAccDeaths_tbl %>% model(rw = RW(value ~ drift())) 27 | forecast_fc <- forecast::rwf(USAccDeaths, drift = TRUE, h = 12) 28 | 29 | expect_equivalent( 30 | fitted(fable_fit)[[".fitted"]], 31 | unclass(fitted(forecast_fc)) 32 | ) 33 | 34 | fable_fc <- fable_fit %>% forecast(h = 12) 35 | 36 | expect_equivalent( 37 | fc_mean(fable_fc$value), 38 | unclass(forecast_fc$mean) 39 | ) 40 | 41 | if(packageVersion("forecast") > "8.17.0"){ 42 | expect_equivalent( 43 | unclass(hilo(fable_fc)$`80%`)$upper, 44 | unclass(forecast_fc$upper[, 1]) 45 | ) 46 | } 47 | 48 | expect_identical( 49 | model_sum(fable_fit$rw[[1]]), 50 | "RW w/ drift" 51 | ) 52 | 53 | expect_output( 54 | report(fable_fit), 55 | "Drift: 3\\.2817 \\(se: 87\\.2696\\)" 56 | ) 57 | 58 | expect_equal( 59 | tidy(fable_fit)$estimate, 60 | forecast_fc$model$par$drift 61 | ) 62 | 63 | expect_equal( 64 | glance(fable_fit)$sigma2, 65 | forecast_fc$model$sigma2 66 | ) 67 | 68 | expect_equivalent( 69 | residuals(fable_fit)[[".resid"]], 70 | unclass(residuals(forecast_fc)) 71 | ) 72 | }) 73 | 74 | test_that("SNAIVE", { 75 | skip_if_not_installed("forecast") 76 | fable_fit <- USAccDeaths_tbl %>% model(snaive = SNAIVE(value)) 77 | forecast_fc <- forecast::snaive(USAccDeaths, h = 12) 78 | 79 | expect_equivalent( 80 | fitted(fable_fit)[[".fitted"]], 81 | unclass(fitted(forecast_fc)) 82 | ) 83 | 84 | fable_fc <- fable_fit %>% forecast(h = 12) 85 | 86 | expect_equivalent( 87 | fc_mean(fable_fc$value), 88 | unclass(forecast_fc$mean) 89 | ) 90 | 91 | expect_identical( 92 | model_sum(fable_fit$snaive[[1]]), 93 | "SNAIVE" 94 | ) 95 | 96 | fable_fc_sim <- fable_fit %>% 97 | forecast(h = 12, bootstrap = TRUE, times = 5) 98 | # expect_equal( 99 | # fable_fc$value, 100 | # fable_fc_sim$value 101 | # ) 102 | 103 | skip_if_not_installed("tsibbledata") 104 | expect_warning( 105 | tsibbledata::vic_elec %>% 106 | tsibble::index_by(date = as.Date(Time)) %>% 107 | dplyr::summarise(demand = mean(Demand)) %>% 108 | model(SNAIVE(demand ~ lag("year"))), 109 | "Non-integer lag orders for random walk models are not supported" 110 | ) 111 | }) 112 | 113 | test_that("RW short", { 114 | library(tsibble) 115 | fc <- suppressWarnings(tsibble(year = 2010:2012, y = 1:3, index = year) %>% 116 | model(SNAIVE(y ~ lag(4))) %>% 117 | forecast(h = 4)) 118 | 119 | expect_equal( 120 | fc_mean(fc$y), 121 | c(NA, 1, 2, 3) 122 | ) 123 | }) 124 | 125 | test_that("lagwalk with bad inputs", { 126 | expect_warning( 127 | UKLungDeaths %>% 128 | model(SNAIVE(vars(mdeaths, fdeaths))), 129 | "Only univariate responses are supported by lagwalks" 130 | ) 131 | 132 | expect_warning( 133 | UKLungDeaths %>% 134 | model(SNAIVE(resp(rlang::rep_along(mdeaths, NA)))), 135 | "All observations are missing" 136 | ) 137 | 138 | expect_warning( 139 | UKLungDeaths %>% 140 | model(SNAIVE(mdeaths ~ lag(1))), 141 | "Non-seasonal model specification provided" 142 | ) 143 | }) 144 | -------------------------------------------------------------------------------- /tests/testthat/test-var.R: -------------------------------------------------------------------------------- 1 | context("test-var") 2 | 3 | test_that("Automatic VAR selection", { 4 | fit <- UKLungDeaths %>% 5 | model(VAR(vars(mdeaths, fdeaths) ~ AR(0:2))) 6 | 7 | expect_output( 8 | report(fit), 9 | "Model: VAR\\(2\\) w/ mean" 10 | ) 11 | 12 | expect_equal(NROW(tidy(fit)), 10) 13 | 14 | expect_identical( 15 | dim(glance(fit)$sigma2[[1]]), 16 | rep(2L, 2) 17 | ) 18 | 19 | expect_equal( 20 | matrix(with(augment(fit), .fitted + .resid), ncol = 2)[3:72, ], 21 | cbind(UKLungDeaths$mdeaths[3:72], UKLungDeaths$fdeaths[3:72]) 22 | ) 23 | }) 24 | 25 | test_that("Univariate VAR", { 26 | fit <- UKLungDeaths %>% 27 | model(VAR(fdeaths ~ 0)) 28 | expect_s3_class( 29 | forecast(fit), 30 | "fbl_ts" 31 | ) 32 | }) 33 | 34 | test_that("VAR with xregs", { 35 | fit <- UKLungDeaths %>% 36 | model(VAR(vars(mdeaths, fdeaths) ~ AR(2) + fourier(K = 5))) 37 | 38 | expect_equal( 39 | NROW(tidy(fit)), 40 | 30 41 | ) 42 | 43 | expect_s3_class( 44 | forecast(fit), 45 | "fbl_ts" 46 | ) 47 | }) 48 | -------------------------------------------------------------------------------- /vignettes/.gitignore: -------------------------------------------------------------------------------- 1 | *.html 2 | *.R 3 | -------------------------------------------------------------------------------- /vignettes/fable.Rmd: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Introduction to fable" 3 | output: rmarkdown::html_vignette 4 | vignette: > 5 | %\VignetteIndexEntry{Introduction to fable} 6 | %\VignetteEngine{knitr::rmarkdown} 7 | %\VignetteEncoding{UTF-8} 8 | --- 9 | 10 | ```{r, include = FALSE} 11 | knitr::opts_chunk$set( 12 | collapse = TRUE, 13 | comment = "#>", 14 | fig.height = 4, 15 | fig.width = 7 16 | ) 17 | ``` 18 | 19 | ```{r setup, message=FALSE} 20 | library(fable) 21 | library(tsibble) 22 | library(dplyr) 23 | ``` 24 | 25 | The fable package provides some commonly used univariate and multivariate time series forecasting models which can be used with tidy temporal data in the tsibble format. These models are used within a consistent and tidy modelling framework, allowing several models to be estimated, compared, combined, forecasted and otherwise worked with across many time series. 26 | 27 | Suppose we wanted to forecast the number of domestic travellers to Melbourne, Australia. In the `tsibble::tourism` data set, this can be further broken down into 4 reasons of travel: "business", "holiday", "visiting friends and relatives" and "other reasons". The first observation from each series are shown below. 28 | 29 | ```{r data} 30 | tourism_melb <- tourism %>% 31 | filter(Region == "Melbourne") 32 | tourism_melb %>% 33 | group_by(Purpose) %>% 34 | slice(1) 35 | ``` 36 | 37 | The variable that we'd like to estimate is the number of overnight trips (in thousands) represented by the `Trips` variable. A plot of the data reveals that some trends and weak seasonality are apparent. 38 | 39 | ```{r plot} 40 | tourism_melb %>% 41 | autoplot(Trips) 42 | ``` 43 | 44 | Two widely used models available in this package are ETS and ARIMA. These models are specified using a compact formula representation (much like cross-sectional linear models using `lm()`). The response variable (`Trips`) and any transformations are included on the left, while the model specification is on the right of the formula. When a model is not fully specified (or if the formula's right side is missing completely), the unspecified components will be chosen automatically. 45 | 46 | Suppose we think that the ETS model must have an additive trend, and want the other elements to be chosen automatically. This model would be specified using `ETS(Trips ~ trend("A"))`. Similarly, a completely automatic ARIMA model (much like `auto.arima()` from the `forecast` package) can be specified using `ARIMA(Trips)`. The `model()` function is used to estimate these model specifications on a particular dataset, and will return a "mable" (model table). 47 | 48 | ```{r mdl} 49 | fit <- tourism_melb %>% 50 | model( 51 | ets = ETS(Trips ~ trend("A")), 52 | arima = ARIMA(Trips) 53 | ) 54 | fit 55 | ``` 56 | 57 | A mable contains a row for each time series (uniquely identified by the key variables), and a column for each model specification. A model is contained within the cells of each model column. In the example above we can see that the all four ETS models have an additive trend, and the error and seasonality have been chosen automatically. Similarly, the ARIMA model varies between time series as it has been automatically selected. 58 | 59 | The `coef()` or `tidy()` function is used to extract the coefficients from the models. It's possible to use `select()` and other verbs to focus on the coefficients from a particular model. 60 | 61 | ```{r coef} 62 | fit %>% 63 | select(Region, State, Purpose, arima) %>% 64 | coef() 65 | ``` 66 | 67 | The `glance()` function provides a one-row summary of each model, and commonly includes descriptions of the model's fit such as the residual variance and information criteria. Be wary though, as information criteria (AIC, AICc, BIC) are only comparable between the same model class and only if those models share the same response (after transformations and differencing). 68 | 69 | ```{r glance} 70 | fit %>% 71 | glance() 72 | ``` 73 | 74 | If you're working with a single model (or want to look at one model in particular), the `report()` function gives a familiar and nicely formatted model-specific display. 75 | 76 | ```{r report} 77 | fit %>% 78 | filter(Purpose == "Holiday") %>% 79 | select(ets) %>% 80 | report() 81 | ``` 82 | 83 | The fitted values and residuals from a model can obtained using `fitted()` and `residuals()` respectively. Additionally, the `augment()` function may be more convenient, which provides the original data along with both fitted values and their residuals. 84 | 85 | ```{r augment} 86 | fit %>% 87 | augment() 88 | ``` 89 | 90 | To compare how well the models fit the data, we can consider some common accuracy measures. It seems that on the training set the ETS model out-performs ARIMA for the series where travellers are on holiday, business, and visiting friends and relatives. The [*Evaluating modelling accuracy*](https://otexts.com/fpp3/accuracy.html) chapter from the [*Forecasting: Principles and Practices (3rd Ed.)*](https://otexts.com/fpp3/) textbook provides more detail in how modelling and forecasting accuracy is evaluated. 91 | 92 | ```{r accuracy} 93 | fit %>% 94 | accuracy() %>% 95 | arrange(MASE) 96 | ``` 97 | 98 | Forecasts from these models can be produced directly as our specified models do not require any additional data. 99 | 100 | ```{r fc} 101 | fc <- fit %>% 102 | forecast(h = "5 years") 103 | fc 104 | ``` 105 | 106 | The resulting forecasts are contained in a "fable" (forecast table), and both point forecasts and forecast distributions are available in the table for the next five years. Confidence intervals can be extracted from the distribution using the `hilo()` function. 107 | 108 | 109 | 110 | 111 | 112 | 113 | The `hilo()` function can also be used on fable objects, which allows you to extract multiple intervals at once. 114 | 115 | ```{r fc-hilo} 116 | fc %>% 117 | hilo(level = c(80, 95)) 118 | ``` 119 | 120 | 121 | 131 | 132 | You can also see a plot of the forecasts using `autoplot()`. To see the historical data along with the forecasts you can provide it as the first argument to the function. 133 | 134 | ```{r fc-plot, fig.height=10} 135 | fc %>% 136 | autoplot(tourism_melb) 137 | ``` 138 | 139 | More model methods may be supported by particular models, including the ability to `refit()` the model to new data, `stream()` in new data to extend the fit, `generate()` simulated paths from a model, `interpolate()` missing values, extract `components()` from the fitted model, and display the model's `equation()`. 140 | 141 | More information about modelling time series and using the fable package can be found in [*Forecasting: Principles and Practices (3rd Ed.)*](https://otexts.com/fpp3/) and in the [*pkgdown site*](https://fable.tidyverts.org/). -------------------------------------------------------------------------------- /vignettes/transformations.Rmd: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Forecasting with transformations" 3 | author: "Mitchell O'Hara-Wild" 4 | output: rmarkdown::html_vignette 5 | vignette: > 6 | %\VignetteIndexEntry{Forecasting with transformations} 7 | %\VignetteEngine{knitr::rmarkdown} 8 | %\VignetteEncoding{UTF-8} 9 | --- 10 | 11 | ```{r setup, include = FALSE} 12 | knitr::opts_chunk$set( 13 | collapse = TRUE, 14 | comment = "#>" 15 | ) 16 | ``` 17 | 18 | All fable models with formula-based model specification support a highly flexible specification of transformations. Specified transformations are automatically back-transformed and bias adjusted to produce forecast means and fitted values on the original scale of the data. 19 | 20 | The transformation used for the model is defined on the left of the tilde (`~`) in the formula. For example, when forecasting Melbourne `Trips` from the `tsibble::tourism` dataset, a square root transformation can applied using `sqrt(Trips)`. 21 | 22 | ```{r sqrt} 23 | library(fable) 24 | library(tsibble) 25 | library(dplyr) 26 | tourism %>% 27 | filter(Region == "Melbourne") %>% 28 | model(ETS(sqrt(Trips))) 29 | ``` 30 | 31 | ## Combining transformations 32 | Multiple transformations can be combined using this interface, allowing more complicated transformations to be used. A simple example of a combined transformation is $f(x) = log(x+1)$, as it involves both a `log` transformation, and a `+1` transformation. This transformation is commonly used to overcome a limitation of using log transformations to preserve non-negativity, on data which contains zeroes. 33 | 34 | Simple combined transformations and backtransformations can be constructed automatically. 35 | ```{r combine} 36 | library(tsibble) 37 | tourism %>% 38 | filter(Region == "Melbourne") %>% 39 | model(ETS(log(Trips + 1))) 40 | ``` 41 | 42 | 43 | ## Custom transformations 44 | It is possible to extend the supported transformations by defining your own transformation with an appropriate back-transformation function. It is assumed that the first argument of your function is your data which is being transformed. 45 | 46 | A useful transformation which is not readily supported by fable is the scaled logit, which allows the forecasts to be bounded by a given interval ([*forecasting within limits*](https://robjhyndman.com/hyndsight/forecasting-within-limits/)). The appropriate transformation to ensure the forecasted values are between $a$ and $b$ (where $a% 67 | as_tsibble(pivot_longer = FALSE) %>% 68 | model(ETS(my_scaled_logit(mdeaths, 750, 3000) ~ 69 | error("A") + trend("N") + season("A"))) %>% 70 | report() 71 | ``` 72 | 73 | ## Forecast means and medians 74 | When forecasting with transformations, the model is fitted and forecasted using the transformed data. To produce forecasts of the original data, the predicted values must be back-transformed. However this process of predicting transformed data and backtransforming predictions usually results in producing forecast medians. To convert the forecast medians into forecast means, a transformation bias adjustment is required: 75 | 76 | $$\hat{y} = f^{-1}(\tilde{y}) + \dfrac{1}{2}\sigma^2\dfrac{\partial^2}{\partial \tilde{y}^2}f^{-1}(\tilde{y})$$ 77 | Note that the forecast medians are given by $f^{-1}(\tilde{y})$, and the adjustment needed to produce forecast means ($\hat{y}$) is $\dfrac{1}{2}\sigma^2\dfrac{\partial^2}{\partial \tilde{y}^2}f^{-1}(\tilde{y})$. 78 | 79 | The fable package automatically produces forecast means (by back-transforming and adjusting the transformed forecasts). The forecast medians can be obtained via the forecast intervals when `level=0`. 80 | 81 | More information about adjusting forecasts to compute forecast means can be found at [*the forecast mean after back-transformation*](https://robjhyndman.com/hyndsight/backtransforming/). --------------------------------------------------------------------------------