├── .Rbuildignore
├── .github
├── .gitignore
└── workflows
│ ├── R-CMD-check.yaml
│ ├── pkgdown.yaml
│ ├── pr-commands.yaml
│ └── recheck.yml
├── .gitignore
├── .lintr
├── DESCRIPTION
├── NAMESPACE
├── NEWS.md
├── R
├── accessors.R
├── accuracy.R
├── aggregate.R
├── box_cox.R
├── broom.R
├── compat-purrr.R
├── components.R
├── dable.R
├── definitions.R
├── dplyr-dable.R
├── dplyr-fable.R
├── dplyr-mable.R
├── equation.R
├── estimate.R
├── fable.R
├── fabletools-package.R
├── features.R
├── fitted.R
├── forecast.R
├── frequency.R
├── generate.R
├── guess.R
├── hilo.R
├── hypothesise.R
├── interpolate.R
├── irf.R
├── lst_mdl.R
├── mable.R
├── mdl_ts.R
├── model.R
├── model_combination.R
├── model_decomposition.R
├── model_null.R
├── outliers.R
├── parse.R
├── plot.R
├── reconciliation.R
├── reexports.R
├── refit.R
├── report.R
├── residuals.R
├── response.R
├── specials.R
├── stream.R
├── tbl_utils.R
├── temporal_aggregation.R
├── transform.R
├── traverse.R
├── utils.R
├── vctrs-dable.R
├── vctrs-fable.R
├── vctrs-mable.R
├── xregs.R
└── zzz.R
├── README.Rmd
├── README.md
├── _pkgdown.yml
├── codecov.yml
├── cran-comments.md
├── fabletools.Rproj
├── inst
└── WORDLIST
├── man
├── IRF.Rd
├── MAAPE.Rd
├── accuracy.Rd
├── agg_vec.Rd
├── aggregate_index.Rd
├── aggregate_key.Rd
├── aggregation-vctrs.Rd
├── as-dable.Rd
├── as-fable.Rd
├── as_mable.Rd
├── augment.Rd
├── autoplot.dcmp_ts.Rd
├── autoplot.fbl_ts.Rd
├── autoplot.tbl_ts.Rd
├── bias_adjust.Rd
├── bottom_up.Rd
├── box_cox.Rd
├── combination_ensemble.Rd
├── combination_model.Rd
├── combination_weighted.Rd
├── common_xregs.Rd
├── components.Rd
├── construct_fc.Rd
├── dable-vctrs.Rd
├── dable.Rd
├── decomposition_model.Rd
├── directional_accuracy_measures.Rd
├── distribution_accuracy_measures.Rd
├── distribution_var.Rd
├── estimate.Rd
├── fable-vctrs.Rd
├── fable.Rd
├── fabletools-package.Rd
├── feature_set.Rd
├── features.Rd
├── features_by_pkg.Rd
├── features_by_tag.Rd
├── figures
│ ├── README-example-1.png
│ ├── README-pressure-1.png
│ ├── lifecycle-archived.svg
│ ├── lifecycle-defunct.svg
│ ├── lifecycle-deprecated.svg
│ ├── lifecycle-experimental.svg
│ ├── lifecycle-maturing.svg
│ ├── lifecycle-questioning.svg
│ ├── lifecycle-retired.svg
│ ├── lifecycle-soft-deprecated.svg
│ ├── lifecycle-stable.svg
│ └── lifecycle-superseded.svg
├── fitted.mdl_df.Rd
├── forecast.Rd
├── freq_tools.Rd
├── generate.mdl_df.Rd
├── glance.Rd
├── hypothesize.mdl_df.Rd
├── interpolate.Rd
├── interval_accuracy_measures.Rd
├── is_aggregated.Rd
├── is_dable.Rd
├── is_fable.Rd
├── is_mable.Rd
├── is_model.Rd
├── mable-vctrs.Rd
├── mable.Rd
├── mable_vars.Rd
├── middle_out.Rd
├── min_trace.Rd
├── model.Rd
├── model_lhs.Rd
├── model_rhs.Rd
├── model_sum.Rd
├── new-model-class.Rd
├── new_specials.Rd
├── new_transformation.Rd
├── null_model.Rd
├── outliers.Rd
├── parse_model.Rd
├── parse_model_lhs.Rd
├── parse_model_rhs.Rd
├── point_accuracy_measures.Rd
├── reconcile.Rd
├── reexports.Rd
├── refit.Rd
├── register_feature.Rd
├── report.Rd
├── residuals.mdl_df.Rd
├── response.Rd
├── response_vars.Rd
├── scenarios.Rd
├── skill_score.Rd
├── special_xreg.Rd
├── stream.Rd
├── tidy.Rd
├── top_down.Rd
├── traverse.Rd
├── unpack_hilo.Rd
└── validate_formula.Rd
├── tests
├── testthat.R
└── testthat
│ ├── setup-data.R
│ ├── setup-models.R
│ ├── test-accuracy.R
│ ├── test-broom.R
│ ├── test-combination.R
│ ├── test-decomposition-model.R
│ ├── test-fable.R
│ ├── test-features.R
│ ├── test-generate.R
│ ├── test-graphics.R
│ ├── test-hilo.R
│ ├── test-interpolate.R
│ ├── test-mable.R
│ ├── test-multivariate.R
│ ├── test-parser.R
│ ├── test-reconciliation.R
│ ├── test-spelling.R
│ ├── test-transformations.R
│ └── test-validate_model.R
└── vignettes
├── .gitignore
└── extension_models.Rmd
/.Rbuildignore:
--------------------------------------------------------------------------------
1 | ^README\.Rmd$
2 | Makefile
3 | ^fabletools\.Rproj$
4 | ^\.Rproj\.user$
5 | ^\.travis\.yml$
6 | ^data-raw$
7 | ^codecov\.yml$
8 | ^_pkgdown\.yml$
9 | ^docs$
10 | ^pkgdown$
11 | ^tic\.R$
12 | ^appveyor\.yml$
13 | ^cran-comments\.md$
14 | ^revdep$
15 | ^\.lintr$
16 | ^\.github$
17 | ^CRAN-RELEASE$
18 |
--------------------------------------------------------------------------------
/.github/.gitignore:
--------------------------------------------------------------------------------
1 | *.html
2 |
--------------------------------------------------------------------------------
/.github/workflows/R-CMD-check.yaml:
--------------------------------------------------------------------------------
1 | # Workflow derived from https://github.com/r-lib/actions/tree/v2/examples
2 | # Need help debugging build failures? Start at https://github.com/r-lib/actions#where-to-find-help
3 | on:
4 | push:
5 | branches: [main, master]
6 | pull_request:
7 | branches: [main, master]
8 |
9 | name: R-CMD-check
10 |
11 | jobs:
12 | R-CMD-check:
13 | runs-on: ${{ matrix.config.os }}
14 |
15 | name: ${{ matrix.config.os }} (${{ matrix.config.r }})
16 |
17 | strategy:
18 | fail-fast: false
19 | matrix:
20 | config:
21 | - {os: macOS-latest, r: 'release'}
22 | - {os: windows-latest, r: 'release'}
23 | - {os: ubuntu-latest, r: 'devel', http-user-agent: 'release'}
24 | - {os: ubuntu-latest, r: 'release'}
25 | - {os: ubuntu-latest, r: 'oldrel-1'}
26 |
27 | env:
28 | GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }}
29 | R_KEEP_PKG_SOURCE: yes
30 |
31 | steps:
32 | - uses: actions/checkout@v2
33 |
34 | - uses: r-lib/actions/setup-pandoc@v2
35 |
36 | - uses: r-lib/actions/setup-r@v2
37 | with:
38 | r-version: ${{ matrix.config.r }}
39 | http-user-agent: ${{ matrix.config.http-user-agent }}
40 | use-public-rspm: true
41 |
42 | - uses: r-lib/actions/setup-r-dependencies@v2
43 | with:
44 | extra-packages: any::rcmdcheck
45 | needs: check
46 |
47 | - uses: r-lib/actions/check-r-package@v2
48 | with:
49 | upload-snapshots: true
50 |
--------------------------------------------------------------------------------
/.github/workflows/pkgdown.yaml:
--------------------------------------------------------------------------------
1 | # Workflow derived from https://github.com/r-lib/actions/tree/v2/examples
2 | # Need help debugging build failures? Start at https://github.com/r-lib/actions#where-to-find-help
3 | on:
4 | push:
5 | branches: [main, master]
6 | pull_request:
7 | branches: [main, master]
8 | release:
9 | types: [published]
10 | workflow_dispatch:
11 |
12 | name: pkgdown
13 |
14 | jobs:
15 | pkgdown:
16 | runs-on: ubuntu-latest
17 | # Only restrict concurrency for non-PR jobs
18 | concurrency:
19 | group: pkgdown-${{ github.event_name != 'pull_request' || github.run_id }}
20 | env:
21 | GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }}
22 | steps:
23 | - uses: actions/checkout@v2
24 |
25 | - uses: r-lib/actions/setup-pandoc@v2
26 |
27 | - uses: r-lib/actions/setup-r@v2
28 | with:
29 | use-public-rspm: true
30 |
31 | - uses: r-lib/actions/setup-r-dependencies@v2
32 | with:
33 | extra-packages: any::pkgdown, local::.
34 | needs: website
35 |
36 | - name: Build site
37 | run: pkgdown::build_site_github_pages(new_process = FALSE, install = FALSE)
38 | shell: Rscript {0}
39 |
40 | - name: Deploy to GitHub pages 🚀
41 | if: github.event_name != 'pull_request'
42 | uses: JamesIves/github-pages-deploy-action@4.1.4
43 | with:
44 | clean: false
45 | branch: gh-pages
46 | folder: docs
47 |
--------------------------------------------------------------------------------
/.github/workflows/pr-commands.yaml:
--------------------------------------------------------------------------------
1 | on:
2 | issue_comment:
3 | types: [created]
4 | name: Commands
5 | jobs:
6 | document:
7 | if: startsWith(github.event.comment.body, '/document')
8 | name: document
9 | runs-on: macOS-latest
10 | steps:
11 | - uses: actions/checkout@v1
12 | - uses: r-lib/actions/pr-fetch@master
13 | with:
14 | repo-token: ${{ secrets.GH_PAT }}
15 | - uses: r-lib/actions/setup-r@master
16 | - name: Install dependencies
17 | run: Rscript -e 'install.packages(c("remotes", "roxygen2"))' -e 'remotes::install_deps(dependencies = TRUE)'
18 | - name: Document
19 | run: Rscript -e 'roxygen2::roxygenise()'
20 | - name: commit
21 | run: |
22 | git add man/\* NAMESPACE
23 | git commit -m 'Document'
24 | - uses: r-lib/actions/pr-push@master
25 | with:
26 | repo-token: ${{ secrets.GH_PAT }}
27 | style:
28 | if: startsWith(github.event.comment.body, '/style')
29 | name: document
30 | runs-on: macOS-latest
31 | steps:
32 | - uses: actions/checkout@master
33 | - uses: r-lib/actions/pr-fetch@master
34 | with:
35 | repo-token: ${{ secrets.GH_PAT }}
36 | - uses: r-lib/actions/setup-r@master
37 | - name: Install dependencies
38 | run: Rscript -e 'install.packages("styler")'
39 | - name: style
40 | run: Rscript -e 'styler::style_pkg()'
41 | - name: commit
42 | run: |
43 | git add \*.R
44 | git commit -m 'style'
45 | - uses: r-lib/actions/pr-push@master
46 | with:
47 | repo-token: ${{ secrets.GH_PAT }}
48 |
--------------------------------------------------------------------------------
/.github/workflows/recheck.yml:
--------------------------------------------------------------------------------
1 | on:
2 | workflow_dispatch:
3 | inputs:
4 | which:
5 | type: choice
6 | description: Which dependents to check
7 | options:
8 | - strong
9 | - most
10 |
11 | name: Reverse dependency check
12 |
13 | jobs:
14 | revdep_check:
15 | name: Reverse check ${{ inputs.which }} dependents
16 | uses: r-devel/recheck/.github/workflows/recheck.yml@v1
17 | with:
18 | which: ${{ inputs.which }}
19 | subdirectory: '' #if your package is in a git subdir
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .Rproj.user
2 | .Rhistory
3 | .RData
4 | .Ruserdata
5 | Makefile
6 | inst/doc
7 | tests/testthat/Rplots.pdf
8 | revdep
9 | docs
--------------------------------------------------------------------------------
/.lintr:
--------------------------------------------------------------------------------
1 | linters: with_defaults(
2 | trailing_whitespace_linter = NULL,
3 | line_length_linter = NULL,
4 | paren_brace_linter = NULL,
5 | commas_linter = NULL,
6 | object_name_linter = NULL,
7 | cyclocomp_linter = NULL,
8 | camel_case_linter = NULL,
9 | spaces_left_parentheses_linter = NULL,
10 | object_usage_linter = NULL,
11 | infix_spaces_linter = NULL,
12 | open_curly_linter = NULL
13 | )
14 |
--------------------------------------------------------------------------------
/DESCRIPTION:
--------------------------------------------------------------------------------
1 | Package: fabletools
2 | Title: Core Tools for Packages in the 'fable' Framework
3 | Version: 0.5.0.9000
4 | Authors@R:
5 | c(person(given = "Mitchell",
6 | family = "O'Hara-Wild",
7 | role = c("aut", "cre"),
8 | email = "mail@mitchelloharawild.com",
9 | comment = c(ORCID = "0000-0001-6729-7695")),
10 | person(given = "Rob",
11 | family = "Hyndman",
12 | role = "aut"),
13 | person(given = "Earo",
14 | family = "Wang",
15 | role = "aut",
16 | comment = c(ORCID = "0000-0001-6448-5260")),
17 | person(given = "Di",
18 | family = "Cook",
19 | role = "ctb"),
20 | person(given = "George",
21 | family = "Athanasopoulos",
22 | role = "ctb"),
23 | person(given = "David",
24 | family = "Holt",
25 | role = "ctb"))
26 | Description: Provides tools, helpers and data structures for
27 | developing models and time series functions for 'fable' and extension
28 | packages. These tools support a consistent and tidy interface for time
29 | series modelling and analysis.
30 | License: GPL-3
31 | URL: https://fabletools.tidyverts.org/,
32 | https://github.com/tidyverts/fabletools
33 | BugReports: https://github.com/tidyverts/fabletools/issues
34 | Depends:
35 | R (>= 3.1.3)
36 | Imports:
37 | tsibble (>= 0.9.0),
38 | tibble (>= 1.4.1),
39 | ggplot2 (>= 3.0.0),
40 | tidyselect,
41 | rlang (>= 0.4.5),
42 | stats,
43 | dplyr (>= 1.0.0),
44 | tidyr (>= 1.1.0),
45 | generics,
46 | R6,
47 | utils,
48 | vctrs (>= 0.2.2),
49 | distributional (>= 0.3.0.9000),
50 | progressr,
51 | lifecycle,
52 | ggdist,
53 | scales
54 | Suggests:
55 | covr,
56 | crayon,
57 | fable (>= 0.2.0),
58 | future,
59 | future.apply,
60 | knitr,
61 | pillar (>= 1.0.1),
62 | feasts (>= 0.1.2),
63 | rmarkdown,
64 | spelling,
65 | testthat,
66 | tsibbledata (>= 0.2.0),
67 | lubridate,
68 | urca,
69 | mvtnorm,
70 | Matrix
71 | VignetteBuilder:
72 | knitr
73 | RdMacros:
74 | lifecycle
75 | ByteCompile: true
76 | Encoding: UTF-8
77 | Language: en-GB
78 | Roxygen: list(markdown = TRUE, roclets=c('rd', 'collate',
79 | 'namespace'))
80 | RoxygenNote: 7.3.2
81 |
--------------------------------------------------------------------------------
/R/accessors.R:
--------------------------------------------------------------------------------
1 | #' Return response variables
2 | #'
3 | #' `response_vars()` returns a character vector of the response variables in the
4 | #' object.
5 | #'
6 | #' @param x A dataset containing a response variable (such as a mable, fable, or dable).
7 | #' @export
8 | response_vars <- function(x){
9 | UseMethod("response_vars")
10 | }
11 |
12 | #' @export
13 | response_vars.fbl_ts <- function(x){
14 | x%@%"response"
15 | }
16 | #' @export
17 | response_vars.mdl_df <- function(x){
18 | x%@%"response"
19 | }
20 | #' @export
21 | response_vars.mdl_ts <- function(x){
22 | vapply(x$response, rlang::as_label, FUN.VALUE = character(1L))
23 | }
24 | #' @export
25 | response_vars.dcmp_ts <- function(x){
26 | x%@%"response"
27 | }
28 |
29 | #' Return distribution variable
30 | #'
31 | #' `distribution_var()` returns a character vector of the distribution variable
32 | #' in the data.
33 | #'
34 | #' @param x A dataset containing a distribution variable (such as a fable).
35 | #' @export
36 | distribution_var <- function(x){
37 | UseMethod("distribution_var")
38 | }
39 | #' @export
40 | distribution_var.fbl_ts <- function(x){
41 | x%@%"dist"
42 | }
43 |
44 | #' Return model column variables
45 | #'
46 | #' `mable_vars()` returns a character vector of the model variables in the
47 | #' object.
48 | #'
49 | #' @param x A dataset containing models (such as a mable).
50 | #' @export
51 | mable_vars <- function(x){
52 | UseMethod("mable_vars")
53 | }
54 | #' @export
55 | mable_vars.mdl_df <- function(x){
56 | x%@%"model"
57 | }
--------------------------------------------------------------------------------
/R/box_cox.R:
--------------------------------------------------------------------------------
1 | #' Box Cox Transformation
2 | #'
3 | #' `box_cox()` returns a transformation of the input variable using a Box-Cox
4 | #' transformation. `inv_box_cox()` reverses the transformation.
5 | #'
6 | #' The Box-Cox transformation is given by \deqn{f_\lambda(x) =\frac{x^\lambda -
7 | #' 1}{\lambda}}{f(x;lambda)=(x^lambda - 1)/lambda} if \eqn{\lambda\ne0}{lambda
8 | #' is not equal to 0}. For \eqn{\lambda=0}{lambda=0},
9 | #' \deqn{f_0(x)=\log(x)}{f(x;0)=log(x)}.
10 | #'
11 | #' @param x a numeric vector.
12 | #' @param lambda a numeric value for the transformation parameter.
13 | #' @return a transformed numeric vector of the same length as x.
14 | #' @author Rob J Hyndman & Mitchell O'Hara-Wild
15 | #'
16 | #' @references Box, G. E. P. and Cox, D. R. (1964) An analysis of
17 | #' transformations. \emph{JRSS B} \bold{26} 211--246.
18 | #'
19 | #' @examples
20 | #' library(tsibble)
21 | #' library(dplyr)
22 | #' airmiles %>%
23 | #' as_tsibble() %>%
24 | #' mutate(box_cox = box_cox(value, lambda = 0.3))
25 | #'
26 | #' @export
27 | box_cox <- function(x, lambda) {
28 | lambda <- vec_recycle(lambda, vec_size(x))
29 | x[lambda < 0 & x < 0] <- NA
30 | lambda_0 <- lambda == 0
31 | x[lambda_0] <- log(x[lambda_0])
32 | x[!lambda_0] <- (sign(x[!lambda_0]) * abs(x[!lambda_0]) ^ lambda[!lambda_0] - 1) / lambda[!lambda_0]
33 | x
34 | }
35 |
36 | #' @rdname box_cox
37 | #' @export
38 | inv_box_cox <- function(x, lambda) {
39 | lambda <- vec_recycle(lambda, vec_size(x))
40 | x[lambda < 0 & (x > -1 / lambda)] <- NA
41 | lambda_0 <- lambda == 0
42 | x[lambda_0] <- exp(x[lambda_0])
43 | z <- x[!lambda_0] * lambda[!lambda_0] + 1
44 | x[!lambda_0] <- sign(z) * abs(z) ^ (1 / lambda[!lambda_0])
45 | x
46 | }
--------------------------------------------------------------------------------
/R/components.R:
--------------------------------------------------------------------------------
1 | #' Extract components from a fitted model
2 | #'
3 | #' Allows you to extract elements of interest from the model which can be
4 | #' useful in understanding how they contribute towards the overall fitted values.
5 | #'
6 | #' A dable will be returned, which will allow you to easily plot the components
7 | #' and see the way in which components are combined to give forecasts.
8 | #'
9 | #' @param object A mable.
10 | #' @param ... Other arguments passed to methods.
11 | #'
12 | #' @examplesIf requireNamespace("fable", quietly = TRUE)
13 | #' library(fable)
14 | #' library(tsibbledata)
15 | #'
16 | #' # Forecasting with an ETS(M,Ad,A) model to Australian beer production
17 | #' aus_production %>%
18 | #' model(ets = ETS(log(Beer) ~ error("M") + trend("Ad") + season("A"))) %>%
19 | #' components() %>%
20 | #' autoplot()
21 | #'
22 | #' @rdname components
23 | #' @export
24 | components.mdl_df <- function(object, ...){
25 | object <- tidyr::pivot_longer(object, all_of(mable_vars(object)),
26 | names_to = ".model", values_to = ".fit")
27 | kv <- key_vars(object)
28 | object <- transmute(as_tibble(object),
29 | !!!syms(kv), !!sym(".model"),
30 | cmp = map(!!sym(".fit"), components))
31 | attrs <- combine_dcmp_attr(object[["cmp"]])
32 | object <- unnest_tsbl(object, "cmp", parent_key = kv)
33 | as_dable(object, method = attrs[["method"]], resp = !!attrs[["response"]],
34 | seasons = attrs[["seasons"]], aliases = attrs[["aliases"]])
35 | }
36 |
37 | #' @rdname components
38 | #' @export
39 | components.mdl_ts <- function(object, ...){
40 | components(object$fit, ...)
41 | }
--------------------------------------------------------------------------------
/R/dable.R:
--------------------------------------------------------------------------------
1 | #' Create a dable object
2 | #'
3 | #' A dable (decomposition table) data class (`dcmp_ts`) which is a tsibble-like
4 | #' data structure for representing decompositions. This data class is useful for
5 | #' representing decompositions, as its print method describes how its columns
6 | #' can be combined to produce the original data, and has a more appropriate
7 | #' `autoplot()` method for displaying decompositions. Beyond this, a dable
8 | #' (`dcmp_ts`) behaves very similarly to a tsibble (`tbl_ts`).
9 | #'
10 | #' @param ... Arguments passed to [tsibble::tsibble()].
11 | #' @param response The name of the response variable column.
12 | #' @param method The name of the decomposition method.
13 | #' @param seasons A named list describing the structure of seasonal components
14 | #' (such as `period`, and `base`).
15 | #' @param aliases A named list of calls describing common aliases computed from
16 | #' components.
17 | #'
18 | #' @export
19 | dable <- function(..., response, method = NULL, seasons = list(), aliases = list()){
20 | build_dable(tsibble(...), method = method, response = !!enquo(response),
21 | seasons = seasons, aliases = aliases)
22 | }
23 |
24 | #' Is the object a dable
25 | #'
26 | #' @param x An object.
27 | #'
28 | #' @export
29 | is_dable <- function(x){
30 | inherits(x, "dcmp_ts")
31 | }
32 |
33 | #' Coerce to a dable object
34 | #'
35 | #' @inheritParams as_fable
36 | #' @param x Object to be coerced to a dable (`dcmp_ts`)
37 | #'
38 | #' @rdname as-dable
39 | #' @export
40 | as_dable <- function(x, ...){
41 | UseMethod("as_dable")
42 | }
43 |
44 | #' @rdname as-dable
45 | #' @export
46 | as_dable.tbl_df <- function(x, response, method = NULL, seasons = list(), aliases = list(), ...){
47 | build_dable(x, method = method, response = !!enquo(response),
48 | seasons = seasons, aliases = aliases)
49 | }
50 |
51 | #' @rdname as-dable
52 | #'
53 | #' @inheritParams dable
54 | #'
55 | #' @export
56 | as_dable.tbl_ts <- function(x, response, method = NULL, seasons = list(), aliases = list(), ...){
57 | build_dable(x, method = method, response = !!enquo(response),
58 | seasons = seasons, aliases = aliases)
59 | }
60 |
61 | build_dable <- function (x, response, method = NULL, seasons = list(), aliases = list()) {
62 | response <- names(x)[tidyselect::eval_select(enquo(response), x)]
63 | new_tsibble(x, method = method, response = response,
64 | seasons = seasons, aliases = aliases, class = "dcmp_ts")
65 | }
66 |
67 | #' @export
68 | as_tsibble.dcmp_ts <- function(x, ...){
69 | new_tsibble(x)
70 | }
71 |
72 | #' @export
73 | `[.dcmp_ts` <- function (x, i, j, drop = FALSE){
74 | out <- NextMethod()
75 | # Drop dable if tsibble is dropped
76 |
77 | cn <- colnames(out)
78 | not_dable <- !(response_vars(x) %in% cn) || !is_tsibble(out)
79 |
80 | if(not_dable)
81 | return(out)
82 | else
83 | as_dable(out, response = response_vars(x), method = x%@%"method",
84 | seasons = x%@%"seasons", aliases = x%@%"aliases")
85 | }
86 |
87 | tbl_sum.dcmp_ts <- function(x){
88 | response <- response_vars(x)
89 | method <- expr_text((x%@%"aliases")[[response]])
90 | out <- NextMethod()
91 | names(out)[1] <- "A dable"
92 | if(!is.null(method)) {
93 | out[[length(out) + 1]] <- set_names(paste(response, method, sep = " = "),
94 | paste(x%@%"method", "Decomposition"))
95 | }
96 | out
97 | }
98 |
99 | #' @export
100 | rbind.dcmp_ts <- function(...){
101 | deprecate_warn("0.2.0", "rbind.fbl_ts()", "bind_rows()")
102 | dots <- dots_list(...)
103 |
104 | attrs <- combine_dcmp_attr(dots)
105 |
106 | as_dable(invoke("rbind", map(dots, as_tsibble)),
107 | method = attrs[["method"]], response = !!attrs[["response"]],
108 | seasons = attrs[["seasons"]], aliases = attrs[["aliases"]])
109 | }
110 |
111 | combine_dcmp_attr <- function(lst_dcmp){
112 | resp <- map(lst_dcmp, response_vars)
113 | method <- map(lst_dcmp, function(x) x%@%"method")
114 | strc <- map(lst_dcmp, function(x) x%@%"seasons")
115 | aliases <- map(lst_dcmp, function(x) x%@%"aliases")
116 | if(length(resp <- unique(resp)) > 1){
117 | abort("Decomposition response variables must be the same for all models.")
118 | }
119 |
120 | strc <- unlist(unique(strc), recursive = FALSE)
121 | strc <- strc[!duplicated(names(strc))]
122 |
123 | aliases <- unlist(unique(aliases), recursive = FALSE)
124 | aliases <- split(aliases, names(aliases)) %>%
125 | map(function(x){
126 | vars <- map(x, all.vars)
127 | x[[which.max(map_dbl(vars, length))]]
128 | })
129 |
130 | list(response = resp[[1]], method = paste0(unique(method), collapse = " & "),
131 | seasons = strc, aliases = aliases)
132 | }
--------------------------------------------------------------------------------
/R/dplyr-dable.R:
--------------------------------------------------------------------------------
1 | #' @export
2 | dplyr_row_slice.dcmp_ts <- function(data, i, ..., preserve = FALSE) {
3 | res <- NextMethod()
4 | build_dable(res, response = response_vars(data), method = data%@%"method",
5 | seasons = data%@%"seasons", aliases = data%@%"aliases")
6 | }
7 |
8 | #' @export
9 | dplyr_col_modify.dcmp_ts <- function(data, cols) {
10 | res <- NextMethod()
11 | build_dable(res, response = response_vars(data), method = data%@%"method",
12 | seasons = data%@%"seasons", aliases = data%@%"aliases")
13 | }
14 |
15 | #' @export
16 | dplyr_reconstruct.dcmp_ts <- function(data, template) {
17 | res <- NextMethod()
18 | build_dable(res, response = response_vars(template), method = template%@%"method",
19 | seasons = template%@%"seasons", aliases = template%@%"aliases")
20 | }
21 |
--------------------------------------------------------------------------------
/R/dplyr-fable.R:
--------------------------------------------------------------------------------
1 | #' @export
2 | dplyr_row_slice.fbl_ts <- function(data, i, ..., preserve = FALSE) {
3 | res <- NextMethod()
4 | build_fable(res, response = response_vars(data), distribution = distribution_var(data))
5 | }
6 |
7 | #' @export
8 | dplyr_row_slice.grouped_fbl <- dplyr_row_slice.fbl_ts
9 |
10 | #' @export
11 | dplyr_col_modify.fbl_ts <- function(data, cols) {
12 | res <- NextMethod()
13 | build_fable(res, response = response_vars(data), distribution = distribution_var(data))
14 | }
15 |
16 | #' @export
17 | dplyr_col_modify.grouped_fbl <- dplyr_col_modify.fbl_ts
18 |
19 | #' @export
20 | dplyr_reconstruct.fbl_ts <- function(data, template) {
21 | res <- NextMethod()
22 | dist <- distribution_var(template)
23 | if(dist %in% names(res)) {
24 | build_fable(res, response = response_vars(template), distribution = dist)
25 | } else {
26 | res
27 | }
28 | }
29 |
30 | #' @export
31 | dplyr_reconstruct.grouped_fbl <- dplyr_reconstruct.fbl_ts
32 |
33 | #' @export
34 | summarise.fbl_ts <- function(.data, ..., .groups = NULL) {
35 | dist_var <- distribution_var(.data)
36 | dist_ptype <- vec_ptype(.data[[dist_var]])
37 | resp_var <- response_vars(.data)
38 | .data <- summarise(as_tsibble(.data), ..., .groups = .groups)
39 |
40 | # If the distribution is lost, return a tsibble
41 | if(!(dist_var %in% names(.data))) {
42 | if(!vec_is(.data[[dist_var]], dist_ptype)){
43 | return(.data)
44 | }
45 | }
46 |
47 | build_fable(.data, response = resp_var, distribution = dist_var)
48 | }
49 |
50 | #' @export
51 | summarise.grouped_fbl <- summarise.fbl_ts
--------------------------------------------------------------------------------
/R/dplyr-mable.R:
--------------------------------------------------------------------------------
1 | #' @export
2 | dplyr_row_slice.mdl_df <- function(data, i, ..., preserve = FALSE) {
3 | res <- dplyr_row_slice(as_tibble(data), i, ..., preserve = preserve)
4 | build_mable(res, key = !!key_vars(data), model = mable_vars(data))
5 | }
6 |
7 | #' @export
8 | dplyr_col_modify.mdl_df <- function(data, cols) {
9 | res <- dplyr_col_modify(as_tibble(data), cols)
10 | is_mdl <- map_lgl(cols, inherits, "lst_mdl")
11 | # val_key <- any(key_vars(data) %in% cols)
12 | # if (val_key) {
13 | # key_vars <- setdiff(names(res), measured_vars(data))
14 | # data <- remove_key(data, key_vars)
15 | # }
16 | build_mable(res,
17 | key = !!key_vars(data),
18 | model = union(mable_vars(data), names(which(is_mdl))))
19 | }
20 |
21 | #' @export
22 | dplyr_reconstruct.mdl_df <- function(data, template) {
23 | res <- NextMethod()
24 | mbl_vars <- names(which(vapply(data, inherits, logical(1L), "lst_mdl")))
25 | kv <- key_vars(template)
26 | if(all(kv %in% names(res))) {
27 | build_mable(data, key = !!kv, model = mbl_vars)
28 | } else {
29 | as_tibble(res)
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/R/equation.R:
--------------------------------------------------------------------------------
1 | #' @export
2 | equation.mdl_df <- function(object, ...){
3 | if(NROW(object) > 1 || length(mable_vars(object)) > 1){
4 | abort("Model equations are only supported for individual models. To see the equation for a specific model, use `select()` and `filter()` to identify a single model.")
5 | }
6 | equation(object[[(mable_vars(object))[[1]]]][[1]])
7 | }
8 |
9 | #' @export
10 | equation.mdl_ts <- function(object, ...){
11 | if(any(!map_lgl(object$transformation, compose(is.name, body)))){
12 | abort("Cannot display equations containing transformations.")
13 | }
14 | equation(object[["fit"]])
15 | }
--------------------------------------------------------------------------------
/R/estimate.R:
--------------------------------------------------------------------------------
1 | #' Estimate a model
2 | #'
3 | #' @param .data A data structure suitable for the models (such as a `tsibble`).
4 | #' @param ... Further arguments passed to methods.
5 | #'
6 | #' @rdname estimate
7 | #'
8 | #' @export
9 | estimate <- function(.data, ...){
10 | UseMethod("estimate")
11 | }
12 |
13 | #' @param .model Definition for the model to be used.
14 | #'
15 | #' @rdname estimate
16 | #' @export
17 | estimate.tbl_ts <- function(.data, .model, ...){
18 | if(!inherits(.model, "mdl_defn")){
19 | abort("Model definition incorrectly created. Check that specified model(s) are model definitions.")
20 | }
21 | .model$stage <- "estimate"
22 | .model$add_data(.data)
23 | validate_formula(.model, .data)
24 | parsed <- parse_model(.model)
25 |
26 | # Compute response data (as attributes shouldn't change, using this approach should be much faster)
27 | .dt_attr <- attributes(.data)
28 | resp <- map(parsed$expressions, eval_tidy, data = .data, env = .model$specials)
29 | .data <- unclass(.data)[index_var(.data)]
30 | .data[map_chr(parsed$expressions, expr_name)] <- resp
31 | attributes(.data) <- c(attributes(.data), .dt_attr[setdiff(names(.dt_attr), names(attributes(.data)))])
32 |
33 | fit <- eval_tidy(
34 | expr(.model$train(.data = .data, specials = parsed$specials, !!!.model$extra))
35 | )
36 | .model$remove_data()
37 | .model$stage <- NULL
38 | new_model(fit, .model, .data, parsed$response, parsed$transformation)
39 | }
40 |
--------------------------------------------------------------------------------
/R/fabletools-package.R:
--------------------------------------------------------------------------------
1 | #' @docType package
2 | #' @keywords package
3 | "_PACKAGE"
4 |
5 | globalVariables(".")
6 |
7 | ## usethis namespace: start
8 | #' @import rlang
9 | #' @import vctrs
10 | #' @import tsibble
11 | #' @importFrom dplyr mutate transmute summarise filter select rename group_by ungroup groups group_data
12 | #' @importFrom dplyr full_join anti_join left_join semi_join
13 | #' @importFrom dplyr dplyr_row_slice dplyr_col_modify dplyr_reconstruct
14 | #' @importFrom dplyr bind_rows bind_cols
15 | #' @importFrom tidyr nest unnest gather spread
16 | #' @importFrom tidyselect all_of
17 | #' @importFrom lifecycle deprecate_warn deprecated
18 | ## usethis namespace: end
19 | NULL
--------------------------------------------------------------------------------
/R/fitted.R:
--------------------------------------------------------------------------------
1 | #' Extract fitted values from models
2 | #'
3 | #' Extracts the fitted values from each of the models in a mable. A tsibble will
4 | #' be returned containing these fitted values. Fitted values will be
5 | #' automatically back-transformed if a transformation was specified.
6 | #'
7 | #' @aliases hfitted
8 | #'
9 | #' @param object A mable or time series model.
10 | #' @param ... Other arguments passed to the model method for `fitted()`
11 | #'
12 | #' @importFrom stats fitted
13 | #' @export
14 | fitted.mdl_df <- function(object, ...){
15 | mbl_vars <- mable_vars(object)
16 | kv <- key_vars(object)
17 | object <- mutate(as_tibble(object),
18 | dplyr::across(all_of(mbl_vars), function(x) lapply(x, fitted, ...)))
19 | object <- pivot_longer(object, all_of(mbl_vars), names_to = ".model", values_to = ".fitted")
20 | unnest_tsbl(object, ".fitted", parent_key = c(kv, ".model"))
21 | }
22 |
23 | #' @rdname fitted.mdl_df
24 | #'
25 | #' @param h The number of steps ahead that these fitted values are computed from.
26 | #'
27 | #' @export
28 | fitted.mdl_ts <- function(object, h = 1, ...){
29 | bt <- map(object$transformation, invert_transformation)
30 |
31 | fits <- if(h==1) fitted(object$fit, ...) else hfitted(object, h = h, ...)
32 | if(h == 1){
33 | fits <- as.matrix(fits)
34 | # Backtransformation is required for fitted, but forecast() handles it already.
35 | fits <- map2(bt, split(fits, col(fits)), function(bt, fit) bt(fit))
36 | }
37 |
38 | nm <- if(length(fits) == 1) ".fitted" else map_chr(object$response, expr_name)
39 |
40 | out <- object$data[index_var(object$data)]
41 | out[nm] <- fits
42 | out
43 | }
44 |
45 | #' @export
46 | hfitted <- function(object, ...) {
47 | UseMethod("hfitted")
48 | }
49 |
50 | #' @export
51 | hfitted.mdl_ts <- function(object, h, ...) {
52 | fn <- tryCatch(utils::getS3method("hfitted", class(object[["fit"]])),
53 | error = function(e) NULL)
54 | if(is.null(fn)) {
55 | dt <- object$data
56 | resp <- response_vars(object)
57 |
58 | # Undo transformations
59 | bt <- lapply(object$transformation, invert_transformation)
60 | mv <- match(measured_vars(dt), names(dt))
61 | dt[mv] <- mapply(calc, bt, dt[measured_vars(dt)], SIMPLIFY = FALSE)
62 | names(dt)[mv] <- resp
63 |
64 | n <- nrow(dt)
65 | fits <- rep(NA_real_, n)
66 |
67 | for (i in seq_len(n-h)) {
68 | mdl <- tryCatch(refit(object, vec_slice(dt, seq_len(i))),
69 | error = function(e) NULL)
70 | if(is.null(mdl)) next
71 | fits[i + h] <- mean(forecast(mdl, h = h, point_forecast = NULL)[[resp]][h])
72 | }
73 | fits <- list(fits)
74 | } else {
75 | fits <- as.matrix(fn(object[["fit"]], h=h, ...))
76 | # Backtransform fits from model method
77 | bt <- map(object$transformation, invert_transformation)
78 | fits <- map2(bt, split(fits, col(fits)), function(bt, fit) bt(fit))
79 | }
80 | fits
81 | }
--------------------------------------------------------------------------------
/R/frequency.R:
--------------------------------------------------------------------------------
1 | #' Extract frequencies for common seasonal periods
2 | #'
3 | #' @param x An object containing temporal data (such as a `tsibble`, `interval`, `datetime` and others.)
4 | #'
5 | #' @return A named vector of frequencies appropriate for the provided data.
6 | #'
7 | #' @references
8 | #'
9 | #' @rdname freq_tools
10 | #'
11 | #' @examples
12 | #' common_periods(tsibble::pedestrian)
13 | #'
14 | #' @export
15 | common_periods <- function(x){
16 | UseMethod("common_periods")
17 | }
18 |
19 | #' @rdname freq_tools
20 | #' @export
21 | common_periods.default <- function(x){
22 | common_periods(interval_pull(x))
23 | }
24 |
25 | #' @rdname freq_tools
26 | #' @export
27 | common_periods.tbl_ts <- function(x){
28 | common_periods(tsibble::interval(x))
29 | }
30 |
31 | #' @rdname freq_tools
32 | #' @export
33 | common_periods.interval <- function(x){
34 | if(inherits(x, "vctrs_vctr")){
35 | x <- vctrs::vec_data(x)
36 | }
37 | freq_sec <- c(year = 31557600, week = 604800, day = 86400, hour = 3600, minute = 60, second = 1,
38 | millisecond = 1e-3, microsecond = 1e-6, nanosecond = 1e-9)
39 | nm <- names(x)[x!=0]
40 | if(is_empty(x)) return(NULL)
41 | switch(paste(nm, collapse = ""),
42 | "unit" = c("none" = 1),
43 | "year" = c("year" = 1),
44 | "quarter" = c("year" = 4/x[["quarter"]]),
45 | "month" = c("year" = 12/x[["month"]]),
46 | "week" = c("year" = 52/x[["week"]]),
47 | "day" = c("year" = 365.25, "week" = 7)/x[["day"]],
48 | with(list(secs = freq_sec/sum(as.numeric(x)*freq_sec[nm])), secs[secs>1])
49 | )
50 | }
51 |
52 | #' @rdname freq_tools
53 | #' @param period Specification of the time-series period
54 | #' @param ... Other arguments to be passed on to methods
55 | #' @export
56 | get_frequencies <- function(period, ...){
57 | UseMethod("get_frequencies")
58 | }
59 |
60 | #' @rdname freq_tools
61 | #' @export
62 | get_frequencies.numeric <- function(period, ...){
63 | period
64 | }
65 |
66 | #' @rdname freq_tools
67 | #' @param data A tsibble
68 | #' @param .auto The method used to automatically select the appropriate seasonal
69 | #' periods
70 | #' @export
71 | get_frequencies.NULL <- function(period, data, ...,
72 | .auto = c("smallest", "largest", "all")){
73 | .auto <- match.arg(.auto)
74 | frequencies <- Filter(function(x) x >= 1, common_periods(data))
75 | if(is_empty(frequencies)) frequencies <- 1
76 | if(.auto == "smallest") {
77 | return(frequencies[which.min(frequencies)])
78 | }
79 | else if(.auto == "largest"){
80 | return(frequencies[which.max(frequencies)])
81 | }
82 | else {
83 | return(frequencies)
84 | }
85 | }
86 |
87 | #' @rdname freq_tools
88 | #' @export
89 | get_frequencies.character <- function(period, data, ...){
90 | require_package("lubridate")
91 | m <- lubridate::as.period(period)
92 | if(is.na(m)) abort(paste("Unknown period:", period))
93 | get_frequencies(m, data, ...)
94 | }
95 |
96 | #' @rdname freq_tools
97 | #' @export
98 | get_frequencies.Period <- function(period, data, ...){
99 | require_package("lubridate")
100 |
101 | interval <- tsibble::interval(data)
102 |
103 | interval <- with(interval, lubridate::years(year) +
104 | lubridate::period(3*quarter + month, units = "month") + lubridate::weeks(week) +
105 | lubridate::days(day) + lubridate::hours(hour) + lubridate::minutes(minute) +
106 | lubridate::seconds(second) + lubridate::milliseconds(millisecond) +
107 | lubridate::microseconds(microsecond) + lubridate::nanoseconds(nanosecond))
108 |
109 | suppressMessages(period / interval)
110 | }
--------------------------------------------------------------------------------
/R/guess.R:
--------------------------------------------------------------------------------
1 | guess_response <- function(.data){
2 | all_vars <- custom_error(
3 | measured_vars,
4 | "This model function does not support automatic selection of response variables. Please specify this in the model formula."
5 | )(.data)
6 |
7 | if(length(all_vars)!=1){
8 | abort("Could not automatically determine the response variable, please provide the response variable in the model specification")
9 | }
10 |
11 | out <- sym(all_vars[[1]])
12 | inform(sprintf(
13 | "Model not specified, defaulting to automatic modelling of the `%s` variable. Override this using the model formula.",
14 | expr_name(out)
15 | ))
16 | out
17 | }
--------------------------------------------------------------------------------
/R/hilo.R:
--------------------------------------------------------------------------------
1 | #' Unpack a hilo column
2 | #'
3 | #' @description
4 | #' `r lifecycle::badge('superseded')`
5 | #'
6 | #' This function is superceded. It is recommended that you use the functionality
7 | #' from the [distributional](https://pkg.mitchelloharawild.com/distributional/)
8 | #' package to extract elements from a `` object. For example, you can access
9 | #' the lower bound with `$lower`.
10 | #'
11 | #' Allows a hilo column to be unpacked into its component columns: "lower",
12 | #' "upper", and "level".
13 | #'
14 | #' @inheritParams tidyr::pack
15 | #' @param cols Name of hilo columns to unpack.
16 | #'
17 | #' @seealso [`tidyr::unpack()`]
18 | #'
19 | #' @keywords internal
20 | #' @export
21 | unpack_hilo <- function(data, cols, names_sep = "_", names_repair = "check_unique"){
22 | orig <- data
23 | cols <- tidyselect::eval_select(enexpr(cols), data)
24 | if(any(bad_col <- !map_lgl(data[cols], inherits, "hilo"))){
25 | abort(sprintf(
26 | "Not all unpacking columns are hilo objects (%s). All unpacking columns of unpack_hilo() must be hilo vectors.",
27 | paste(names(bad_col)[bad_col], collapse = ", ")
28 | ))
29 | }
30 | data[cols] <- map(data[cols], function(x) vctrs::vec_proxy(x)[c("lower", "upper")])
31 | data <- tidyr::unpack(data, names(cols), names_sep = names_sep, names_repair = names_repair)
32 | vctrs::vec_restore(data, orig)
33 | }
34 |
--------------------------------------------------------------------------------
/R/hypothesise.R:
--------------------------------------------------------------------------------
1 | #' Run a hypothesis test from a mable
2 | #'
3 | #' This function will return the results of a hypothesis test for each model in
4 | #' the mable.
5 | #'
6 | #' @param x A mable.
7 | #' @param ... Arguments for model methods.
8 | #'
9 | #' @examplesIf requireNamespace("fable", quietly = TRUE)
10 | #' library(fable)
11 | #' library(tsibbledata)
12 | #'
13 | #' olympic_running %>%
14 | #' model(lm = TSLM(log(Time) ~ trend())) %>%
15 | #' hypothesize()
16 | #'
17 | #' @rdname hypothesize.mdl_df
18 | #' @importFrom generics hypothesize
19 | #' @export
20 | hypothesize.mdl_df <- function(x, ...){
21 | mbl_vars <- mable_vars(x)
22 | x <- mutate(as_tibble(x),
23 | dplyr::across(all_of(mbl_vars), function(x) lapply(x, hypothesize, ...)))
24 | x <- pivot_longer(x, all_of(mbl_vars), names_to = ".model", values_to = ".hypothesis")
25 | unnest(x, ".hypothesis")
26 | }
27 |
28 | #' @param tests a list of test functions to perform on the model
29 | #' @rdname hypothesize.mdl_df
30 | #' @export
31 | hypothesize.mdl_ts <- function(x, tests = list(), ...){
32 | if(is_function(tests)){
33 | tests <- list(tests)
34 | }
35 | vctrs::vec_rbind(
36 | !!!map(tests, calc, x$fit, ...),
37 | .names_to = ".test"
38 | )
39 | }
--------------------------------------------------------------------------------
/R/interpolate.R:
--------------------------------------------------------------------------------
1 | #' Interpolate missing values
2 | #'
3 | #' Uses a fitted model to interpolate missing values from a dataset.
4 | #'
5 | #' @param object A mable containing a single model column.
6 | #' @param new_data A dataset with the same structure as the data used to fit the model.
7 | #' @param ... Other arguments passed to interpolate methods.
8 | #'
9 | #' @examplesIf requireNamespace("fable", quietly = TRUE)
10 | #' library(fable)
11 | #' library(tsibbledata)
12 | #'
13 | #' # The fastest running times for the olympics are missing for years during
14 | #' # world wars as the olympics were not held.
15 | #' olympic_running
16 | #'
17 | #' olympic_running %>%
18 | #' model(TSLM(Time ~ trend())) %>%
19 | #' interpolate(olympic_running)
20 | #'
21 | #' @rdname interpolate
22 | #' @export
23 | interpolate.mdl_df <- function(object, new_data, ...){
24 | if(length(mable_vars(object)) > 1){
25 | abort("Interpolation can only be done using one model.
26 | Please use select() to choose the model to interpolate with.")
27 | }
28 |
29 | object <- bind_new_data(object, new_data)
30 | kv <- key_vars(object)
31 | object <- transmute(as_tibble(object),
32 | !!!syms(kv),
33 | interpolated = map2(!!sym(mable_vars(object)), new_data, interpolate, ...)
34 | )
35 | unnest_tsbl(object, "interpolated", parent_key = kv)
36 | }
37 |
38 | #' @rdname interpolate
39 | #' @export
40 | interpolate.mdl_ts <- function(object, new_data, ...){
41 | # Compute specials with new_data
42 | object$model$stage <- "interpolate"
43 | object$model$add_data(new_data)
44 | specials <- tryCatch(parse_model_rhs(object$model),
45 | error = function(e){
46 | abort(sprintf(
47 | "%s
48 | Unable to compute required variables from provided `new_data`.
49 | Does your interpolation data include all variables required by the model?", e$message))
50 | }, interrupt = function(e) {
51 | stop("Terminated by user", call. = FALSE)
52 | })
53 |
54 | object$model$remove_data()
55 | object$model$stage <- NULL
56 |
57 | resp <- map2(seq_along(object$response), object$response, function(i, resp){
58 | expr(object$transformation[[!!i]](!!resp))
59 | }) %>%
60 | set_names(map_chr(object$response, as_string))
61 |
62 | new_data <- transmute(new_data, !!!resp)
63 | new_data <- interpolate(object[["fit"]], new_data = new_data, specials = specials, ...)
64 | new_data[names(resp)] <- map2(new_data[names(resp)], object$transformation,
65 | function(x, f) invert_transformation(f)(x))
66 | new_data
67 | }
--------------------------------------------------------------------------------
/R/irf.R:
--------------------------------------------------------------------------------
1 | #' Compute Impulse Response Function (IRF)
2 | #'
3 | #' This function calculates the impulse response function (IRF) of a time series model.
4 | #' The IRF describes how a model's variables react to external shocks over time.
5 | #'
6 | #' If `new_data` contains the `.impulse` column, those values will be
7 | #' treated as impulses for the calculated impulse responses.
8 | #'
9 | #' @param x A fitted model object, such as from a VAR or ARIMA model. This model is used to compute the impulse response.
10 | #' @param ... Additional arguments to be passed to lower-level functions.
11 | #'
12 | #' @details
13 | #' The impulse response function provides insight into the dynamic behaviour of a system in
14 | #' response to external shocks. It traces the effect of a one-unit change in the impulse
15 | #' variable on the response variable over a specified number of periods.
16 | #'
17 | #' @export
18 | IRF <- function(x, ...) {
19 | UseMethod("IRF")
20 | }
21 |
22 | #' @export
23 | IRF.mdl_df <- function(x, ...){
24 | mdl_df_apply(x, IRF, ...)
25 | }
26 |
27 | #' @export
28 | IRF.mdl_ts <- function(x, new_data = NULL, h = NULL, ...) {
29 | if(is.null(new_data)){
30 | new_data <- make_future_data(x$data, h)
31 | }
32 |
33 | # Compute specials with new_data
34 | x$model$stage <- "generate"
35 | x$model$add_data(new_data)
36 | specials <- tryCatch(parse_model_rhs(x$model),
37 | error = function(e){
38 | abort(sprintf(
39 | "%s
40 | Unable to compute required variables from provided `new_data`.
41 | Does your model require extra variables to produce simulations?", e$message))
42 | }, interrupt = function(e) {
43 | stop("Terminated by user", call. = FALSE)
44 | })
45 | x$model$remove_data()
46 | x$model$stage <- NULL
47 |
48 | IRF(x$fit, new_data, specials, ...)
49 | }
--------------------------------------------------------------------------------
/R/lst_mdl.R:
--------------------------------------------------------------------------------
1 | list_of_models <- function(x = list()){
2 | vctrs::new_vctr(x, class = "lst_mdl")
3 | }
4 |
5 | type_sum.lst_mdl <- function(x){
6 | "model"
7 | }
8 |
9 | #' @export
10 | format.lst_mdl <- function(x, ...){
11 | map_chr(x, function(x) paste0("<", model_sum(x), ">"))
12 | }
13 |
14 | #' @export
15 | vec_cast.character.lst_mdl <- function(x, to, ...) format(x)
16 |
17 | #' @export
18 | vec_ptype2.lst_mdl.lst_mdl <- function(x, y, ...){
19 | list_of_models()
20 | }
21 |
22 | #' @export
23 | vec_cast.lst_mdl.lst_mdl <- function(x, to, ...){
24 | if(!identical(class(x), class(to))){
25 | abort("Cannot combine model lists with different reconciliation strategies.")
26 | }
27 | x
28 | }
--------------------------------------------------------------------------------
/R/mdl_ts.R:
--------------------------------------------------------------------------------
1 | new_model <- function(fit = NULL, model, data, response, transformation){
2 | structure(list(fit = fit, model = model, data = data,
3 | response = response, transformation = transformation),
4 | class = "mdl_ts")
5 | }
6 |
7 | #' @export
8 | format.mdl_ts <- function(x, ...){
9 | model_sum(x)
10 | }
11 |
12 | type_sum.mdl_ts <- function(x){
13 | model_sum(x[["fit"]])
14 | }
15 |
16 | #' Is the object a model
17 | #'
18 | #' @param x An object.
19 | #'
20 | #' @export
21 | is_model <- function(x){
22 | inherits(x, "mdl_ts")
23 | }
24 |
25 | #' Provide a succinct summary of a model
26 | #'
27 | #' Similarly to pillar's type_sum and obj_sum, model_sum is used to provide brief model summaries.
28 | #'
29 | #' @param x The model to summarise
30 | #'
31 | #' @export
32 | model_sum <- function(x){
33 | UseMethod("model_sum")
34 | }
35 |
36 | #' @export
37 | model_sum.default <- function(x){
38 | tibble::type_sum(x)
39 | }
40 |
41 | #' @export
42 | model_sum.mdl_ts <- function(x){
43 | model_sum(x[["fit"]])
44 | }
--------------------------------------------------------------------------------
/R/model_null.R:
--------------------------------------------------------------------------------
1 | train_null_mdl <- function(.data, ...){
2 | structure(list(n = NROW(.data), vars = measured_vars(.data)), class = "null_mdl")
3 | }
4 |
5 | #' NULL model
6 | #'
7 | #' Create a NULL model definition. This model produces NA forecasts and does not
8 | #' require any estimation of the data. It is generally used as a placeholder for
9 | #' models which have encountered an error (see `.safely` in [`model()`]).
10 | #'
11 | #' @param formula Model specification (response variable)
12 | #' @param ... Unused
13 | #'
14 | #' @keywords internal
15 | #' @export
16 | null_model <- function(formula, ...){
17 | null_model <- new_model_class("null_mdl", train = train_null_mdl,
18 | specials = new_specials(xreg = function(...) NULL))
19 | new_model_definition(null_model, formula = !!enquo(formula), ...)
20 | }
21 |
22 | #' @rdname null_model
23 | #' @param x The object to be tested.
24 | #' @export
25 | is_null_model <- function(x){
26 | if(is_model(x)) return(is_null_model(x[["fit"]]))
27 | if(inherits(x, "lst_mdl")) return(map_lgl(x, is_null_model))
28 | is.null(x) || inherits(x, "null_mdl")
29 | }
30 |
31 | #' @export
32 | forecast.null_mdl <- function(object, new_data, ...){
33 | h <- NROW(new_data)
34 | vec_cast(rep(NA_real_, h), distributional::new_dist())
35 | }
36 |
37 | #' @export
38 | forecast.NULL <- forecast.null_mdl
39 |
40 | #' @export
41 | generate.null_mdl <- function(x, new_data, ...){
42 | mutate(new_data, .sim = NA_real_)
43 | }
44 | #' @export
45 | generate.NULL <- generate.null_mdl
46 |
47 | #' @export
48 | stream.null_mdl <- function(object, new_data, ...){
49 | object$n <- object$n + NROW(new_data)
50 | object
51 | }
52 | #' @export
53 | stream.NULL <- function(object, new_data, ...) {
54 | NULL
55 | }
56 |
57 | #' @export
58 | refit.null_mdl <- function(object, new_data, ...){
59 | object$n <- NROW(new_data)
60 | object
61 | }
62 | #' @export
63 | refit.NULL <- function(object, new_data, ...) {
64 | NULL
65 | }
66 |
67 | #' @export
68 | residuals.null_mdl <- function(object, ...){
69 | matrix(NA_real_, nrow = object$n, ncol = length(object$vars),
70 | dimnames = list(NULL, object$vars))
71 | }
72 | #' @export
73 | residuals.NULL <- function(object, new_data, ...) {
74 | NA_real_
75 | }
76 |
77 | #' @export
78 | fitted.null_mdl <- function(object, ...){
79 | matrix(NA_real_, nrow = object$n, ncol = length(object$vars),
80 | dimnames = list(NULL, object$vars))
81 | }
82 | #' @export
83 | fitted.NULL <- function(object, new_data, ...) {
84 | NA_real_
85 | }
86 |
87 | #' @export
88 | glance.null_mdl <- function(x, ...){
89 | tibble()
90 | }
91 |
92 | #' @export
93 | tidy.null_mdl <- function(x, ...){
94 | tibble(term = character(), estimate = numeric())
95 | }
96 |
97 | #' @export
98 | report.null_mdl <- function(object, ...){
99 | cat("NULL model")
100 | }
101 | #' @export
102 | report.NULL <- report.null_mdl
103 |
104 | #' @export
105 | model_sum.null_mdl <- function(x){
106 | "NULL model"
107 | }
108 | #' @export
109 | report.NULL <- report.null_mdl
--------------------------------------------------------------------------------
/R/outliers.R:
--------------------------------------------------------------------------------
1 | #' Identify outliers
2 | #'
3 | #' Return a table of outlying observations using a fitted model.
4 | #'
5 | #' @param object An object which can identify outliers.
6 | #' @param ... Arguments for further methods.
7 | #'
8 | #' @rdname outliers
9 | #' @export
10 | outliers <- function(object, ...){
11 | UseMethod("outliers")
12 | }
13 |
14 | #' @rdname outliers
15 | #' @export
16 | outliers.mdl_df <- function(object, ...){
17 | mbl_vars <- mable_vars(object)
18 | kv <- key_vars(object)
19 | object <- mutate(as_tibble(object),
20 | dplyr::across(all_of(mbl_vars), function(x) lapply(x, outliers, ...)))
21 | object <- pivot_longer(object, all_of(mbl_vars), names_to = ".model", values_to = ".outliers")
22 | unnest_tsbl(object, ".outliers", parent_key = c(kv, ".model"))
23 | }
24 |
25 | #' @rdname outliers
26 | #' @export
27 | outliers.mdl_ts <- function(object, ...){
28 | object$data[outliers(object$fit, ...),]
29 | }
--------------------------------------------------------------------------------
/R/reexports.R:
--------------------------------------------------------------------------------
1 | #' @importFrom dplyr %>%
2 | #' @export
3 | dplyr::`%>%`
4 |
5 | #' @export
6 | tsibble::as_tsibble
7 |
8 | #' @importFrom dplyr vars
9 | #' @export
10 | dplyr::vars
11 |
12 | #' @importFrom ggplot2 autoplot
13 | #' @export
14 | ggplot2::autoplot
15 |
16 | #' @importFrom ggplot2 autolayer
17 | #' @export
18 | ggplot2::autolayer
19 |
20 | #' @importFrom generics accuracy
21 | #' @export
22 | generics::accuracy
23 |
24 | #' @importFrom generics equation
25 | #' @export
26 | generics::equation
27 |
28 | #' @importFrom generics interpolate
29 | #' @export
30 | generics::interpolate
31 |
32 | #' @importFrom generics components
33 | #' @export
34 | generics::components
35 |
36 | #' @importFrom generics augment
37 | #' @export
38 | generics::augment
39 |
40 | #' @importFrom generics glance
41 | #' @export
42 | generics::glance
43 |
44 | #' @importFrom generics tidy
45 | #' @export
46 | generics::tidy
47 |
48 | #' @importFrom generics hypothesize
49 | #' @export
50 | generics::hypothesize
51 |
52 | #' @importFrom generics generate
53 | #' @export
54 | generics::generate
55 |
56 | #' @importFrom generics refit
57 | #' @export
58 | generics::refit
59 |
60 | #' @importFrom generics forecast
61 | #' @export
62 | generics::forecast
63 |
64 | #' @importFrom distributional hilo
65 | #' @export
66 | distributional::hilo
67 |
--------------------------------------------------------------------------------
/R/refit.R:
--------------------------------------------------------------------------------
1 | #' Refit a mable to a new dataset
2 | #'
3 | #' Applies a fitted model to a new dataset. For most methods this can be done
4 | #' with or without re-estimation of the parameters.
5 | #'
6 | #' @param object A mable.
7 | #' @param new_data A tsibble dataset used to refit the model.
8 | #' @param ... Additional optional arguments for refit methods.
9 | #'
10 | #' @examplesIf requireNamespace("fable", quietly = TRUE)
11 | #' library(fable)
12 | #'
13 | #' fit <- as_tsibble(mdeaths) %>%
14 | #' model(ETS(value ~ error("M") + trend("A") + season("A")))
15 | #' fit %>% report()
16 | #'
17 | #' fit %>%
18 | #' refit(as_tsibble(fdeaths)) %>%
19 | #' report(reinitialise = TRUE)
20 | #'
21 | #' @rdname refit
22 | #' @export
23 | refit.mdl_df <- function(object, new_data, ...){
24 | mdls <- mable_vars(object)
25 | object <- bind_new_data(object, new_data)
26 | new_data <- object[["new_data"]]
27 | object[["new_data"]] <- NULL
28 | dplyr::mutate_at(object, vars(!!!mdls), refit, new_data, ...)
29 | }
30 |
31 | #' @export
32 | refit.lst_mdl <- function(object, new_data, ...){
33 | attrb <- attributes(object)
34 | object <- map2(object, new_data, refit, ...)
35 | attributes(object) <- attrb
36 | object
37 | }
38 |
39 | #' @rdname refit
40 | #' @export
41 | refit.mdl_ts <- function(object, new_data, ...){
42 | # Compute specials with new_data
43 | object$model$stage <- "refit"
44 | object$model$add_data(new_data)
45 | specials <- parse_model_rhs(object$model)
46 | object$model$remove_data()
47 | object$model$stage <- NULL
48 |
49 | resp <- map2(seq_along(object$response), object$response, function(i, resp){
50 | expr(object$transformation[[!!i]](!!resp))
51 | }) %>%
52 | set_names(map_chr(object$response, as_string))
53 |
54 | new_data <- transmute(new_data, !!!resp)
55 | object$fit <- refit(object[["fit"]], new_data, specials = specials, ...)
56 | object$data <- new_data
57 | object
58 | }
--------------------------------------------------------------------------------
/R/report.R:
--------------------------------------------------------------------------------
1 | #' Report information about an object
2 | #'
3 | #' Displays the object in a suitable format for reporting.
4 | #'
5 | #' @param object The object to report
6 | #' @param ... Additional options for the reporting function
7 | #'
8 | #' @export
9 | report <- function(object, ...){
10 | UseMethod("report")
11 | }
12 |
13 | #' @export
14 | report.mdl_df <- function(object, ...){
15 | if(NROW(object) > 1 || length(mable_vars(object)) > 1){
16 | warning("Model reporting is only supported for individual models, so a glance will be shown. To see the report for a specific model, use `select()` and `filter()` to identify a single model.")
17 | return(glance(object))
18 | }
19 | else{
20 | report(object[[mable_vars(object)[[1]]]][[1]], ...)
21 | }
22 | invisible(object)
23 | }
24 |
25 | #' @export
26 | report.mdl_ts <- function(object, ...){
27 | cat(paste("Series:", paste0(map(object$response, expr_name), collapse = ", "), "\n"))
28 | cat(paste("Model:", model_sum(object), "\n"))
29 | if(!is_symbol(body(object$transformation[[1]])) && length(object$response) == 1){
30 | cat(paste("Transformation:", expr_name(body(object$transformation[[1]])), "\n"))
31 | }
32 | tryCatch(
33 | report(object[["fit"]], ...),
34 | error = function(e){
35 | cat("\nA model specific report is not available for this model class.")
36 | }
37 | )
38 | }
--------------------------------------------------------------------------------
/R/residuals.R:
--------------------------------------------------------------------------------
1 | #' Extract residuals values from models
2 | #'
3 | #' Extracts the residuals from each of the models in a mable. A tsibble will
4 | #' be returned containing these residuals.
5 | #'
6 | #' @param object A mable or time series model.
7 | #' @param ... Other arguments passed to the model method for `residuals()`
8 | #'
9 | #' @importFrom stats residuals
10 | #' @export
11 | residuals.mdl_df <- function(object, ...){
12 | mbl_vars <- mable_vars(object)
13 | kv <- key_vars(object)
14 | object <- mutate(as_tibble(object),
15 | dplyr::across(all_of(mbl_vars), function(x) lapply(x, residuals, ...)))
16 | object <- pivot_longer(object, all_of(mbl_vars), names_to = ".model", values_to = ".resid")
17 | unnest_tsbl(object, ".resid", parent_key = c(kv, ".model"))
18 | }
19 |
20 | #' @param type The type of residuals to compute. If `type="response"`, residuals on the back-transformed data will be computed.
21 | #' @rdname residuals.mdl_df
22 | #' @export
23 | residuals.mdl_ts <- function(object, type = "innovation", ...){
24 | if(type == "response"){
25 | .resid <- response(object)
26 | .fits <- fitted(object)
27 | .resid <- as.matrix(.resid[measured_vars(.resid)]) - as.matrix(.fits[measured_vars(.fits)])
28 | }
29 | else{
30 | .resid <- residuals(object$fit, type = type, ...)
31 | if(is.null(.resid)){
32 | if(type == "innovation") {
33 | .resid <- response(object)
34 | .resid <- map2(object$transformation, .resid[measured_vars(.resid)], calc)
35 | .fits <- fitted(object)
36 | .fits <- map2(object$transformation, .fits[measured_vars(.fits)], calc)
37 | .resid <- do.call(cbind, .resid) - do.call(cbind, as.matrix(.fits))
38 | } else {
39 | warn(sprintf(
40 | 'Residuals of type `%s` are not supported for %s models.
41 | Defaulting to `type="response"`', type, model_sum(object)))
42 | return(residuals(object, type = "response", ...))
43 | }
44 | }
45 | }
46 | .resid <- as.matrix(.resid)
47 |
48 | .resid <- split(.resid, col(.resid))
49 | nm <- if(length(.resid) == 1) ".resid" else map_chr(object$response, expr_name)
50 |
51 | out <- object$data[index_var(object$data)]
52 | out[nm] <- .resid
53 | out
54 | }
--------------------------------------------------------------------------------
/R/response.R:
--------------------------------------------------------------------------------
1 | #' Extract the response variable from a model
2 | #'
3 | #' Returns a tsibble containing only the response variable used in the fitting
4 | #' of a model.
5 | #'
6 | #' @param object The object containing response data
7 | #' @param ... Additional parameters passed on to other methods
8 | #'
9 | #' @export
10 | response <- function(object, ...){
11 | UseMethod("response")
12 | }
13 |
14 | #' @export
15 | response.mdl_df <- function(object, ...){
16 | object <- tidyr::pivot_longer(object, all_of(mable_vars(object)),
17 | names_to = ".model", values_to = ".fit")
18 | kv <- c(key_vars(object), ".model")
19 | object <- transmute(as_tibble(object),
20 | !!!syms(kv),
21 | !!sym(".model"),
22 | response = map(!!sym(".fit"), response)
23 | )
24 | unnest_tsbl(object, "response", parent_key = kv)
25 | }
26 |
27 | #' @export
28 | response.mdl_ts <- function(object, ...){
29 | # Extract response
30 | mv <- measured_vars(object$data)
31 | resp <- as.list(object$data)[mv]
32 |
33 | # Back transform response
34 | bt <- map(object$transformation, invert_transformation)
35 | resp <- map2(bt, resp, function(bt, fit) bt(fit))
36 |
37 | # Create object
38 | out <- object$data[index_var(object$data)]
39 | out[if(length(resp) == 1) ".response" else mv] <- resp
40 | out
41 | }
--------------------------------------------------------------------------------
/R/specials.R:
--------------------------------------------------------------------------------
1 | #' Create evaluation environment for specials
2 | #'
3 | #' Allows extension packages to make use of the formula parsing of specials.
4 | #'
5 | #' @param ... A named set of functions which used to parse formula inputs
6 | #' @param .required_specials The names of specials which must be provided (and if not, are included with no inputs).
7 | #' @param .xreg_specials The names of specials which will be only used as inputs to other specials (most commonly `xreg`).
8 | #'
9 | #' @export
10 | new_specials <- function(..., .required_specials = NULL, .xreg_specials = NULL){
11 | specials <- squash(list2(...))
12 | if(is.null(specials$xreg)){
13 | specials$xreg <- function(...) abort(sprintf("Exogenous regressors are not supported for %s.", self$model))
14 | }
15 | structure(specials,
16 | required_specials = .required_specials,
17 | xreg_specials = .xreg_specials,
18 | class="fable_specials")
19 | }
20 |
21 | #' Helper special for producing a model matrix of exogenous regressors
22 | #'
23 | #' @param ... Arguments for `fable_xreg_matrix` (see Details)
24 | #'
25 | #' @details
26 | #'
27 | #' Currently the `fable_xreg_matrix` helper supports a single argument named
28 | #' `default_intercept`. If this argument is TRUE (passed via `...` above), then
29 | #' the intercept will be returned in the matrix if not specified (much like the
30 | #' behaviour of `lm()`). If FALSE, then the intercept will only be included if
31 | #' explicitly requested via `1` in the formula.
32 | #'
33 | #' @importFrom stats model.frame model.matrix terms
34 | #' @export
35 | special_xreg <- function(...) {
36 | new_function(
37 | args = pairlist2(...=),
38 | body = call2(call2(":::", sym("fabletools"), sym("fable_xreg_matrix")),
39 | sym("..."), ..., .data = parse_expr("self$data")),
40 | env = base_env()
41 | )
42 | }
43 |
44 | fable_xreg_matrix <- function(..., .data, default_intercept = TRUE) {
45 | dots <- enexprs(...)
46 | # Remove default intercept if needed.
47 | if(!default_intercept) {
48 | constants <- map_lgl(dots, inherits, "numeric")
49 | constant_specified <- any(map_lgl(dots[constants], `%in%`, c(-1, 0, 1)))
50 | # If the constant isn't specified, remove it.
51 | if(!constant_specified) dots <- c(dots, list(0))
52 | }
53 | # Remove index and keys from .
54 | is_dot <- vapply(dots, function(x) expr_text(x) == ".", logical(1L))
55 | if(any(is_dot)) {
56 | new_dot <- reduce(syms(c(".", index_var(.data), key_vars(.data))), call2, .fn = "-")
57 | dots <- c(new_dot, dots[!is_dot])
58 | }
59 |
60 | # Combine `...` into a model formula, then evaluate terms() to substitute `.`
61 | model_formula <- new_formula(
62 | lhs = NULL,
63 | rhs = reduce(dots, function(.x, .y) call2("+", .x, .y))
64 | )
65 | model_formula <- terms(model_formula, data = .data)
66 |
67 | # Produce appropriate evaluation environment with specials
68 | env <- map(enquos(...), get_env)
69 | env[map_lgl(env, compose(is_empty, env_parents))] <- NULL
70 | env <- if (!is_empty(env)) get_env(env[[1]]) else base_env()
71 |
72 | # Produce xreg matrix
73 | xreg <- model.frame(model_formula, data = env, na.action = stats::na.pass)
74 | mm <- model.matrix(terms(xreg), xreg)
75 | if (NROW(mm) == 0 && identical(colnames(mm), "(Intercept)")) {
76 | return(matrix(data = 1, nrow = NROW(.data), dimnames = list(NULL, "(Intercept)")))
77 | }
78 | mm
79 | }
--------------------------------------------------------------------------------
/R/stream.R:
--------------------------------------------------------------------------------
1 | #' Extend a fitted model with new data
2 | #'
3 | #' Extend the length of data used to fit a model and update the parameters to
4 | #' suit this new data.
5 | #'
6 | #' @param object An object (such as a model) which can be extended with additional data.
7 | #' @param ... Additional arguments passed on to stream methods.
8 | #'
9 | #' @rdname stream
10 | #' @export
11 | stream <- function(object, ...){
12 | UseMethod("stream")
13 | }
14 |
15 | #' @param new_data A dataset of the same structure as was used to fit the model.
16 | #'
17 | #' @rdname stream
18 | #' @export
19 | stream.mdl_df <- function(object, new_data, ...){
20 | mdls <- mable_vars(object)
21 | object <- bind_new_data(object, new_data)
22 | new_data <- object[["new_data"]]
23 | object[["new_data"]] <- NULL
24 | dplyr::mutate_at(object, vars(!!!mdls), stream, new_data, ...)
25 | }
26 |
27 | #' @export
28 | stream.lst_mdl <- function(object, new_data, ...){
29 | add_class(map2(object, new_data, stream, ...), class(object))
30 | }
31 |
32 | #' @export
33 | stream.mdl_ts <- function(object, new_data, ...){
34 | # Compute specials with new_data
35 | object$model$add_data(new_data)
36 | specials <- parse_model_rhs(object$model)
37 | object$model$remove_data()
38 |
39 | resp <- map2(object$response, object$transformation,
40 | function(y, t){
41 | eval_tidy(expr(t(!!y)), new_data)
42 | }
43 | )
44 | new_data <- new_data[index_var(new_data)]
45 | new_data[measured_vars(object$data)] <- resp
46 |
47 | object$fit <- stream(object[["fit"]], new_data, specials = specials, ...)
48 | object$data <- bind_rows(object$data, select(new_data, !!!syms(colnames(object$data))))
49 | object
50 | }
--------------------------------------------------------------------------------
/R/tbl_utils.R:
--------------------------------------------------------------------------------
1 | big_mark <- function (x, ...){
2 | mark <- if (identical(getOption("OutDec"), ","))
3 | "."
4 | else ","
5 | formatC(x, big.mark = mark, ...)
6 | }
7 |
8 | dim_tbl <- function (x){
9 | dim_x <- dim(x)
10 | format_dim <- map_chr(dim_x, big_mark)
11 | paste(format_dim, collapse = " x ")
12 | }
--------------------------------------------------------------------------------
/R/traverse.R:
--------------------------------------------------------------------------------
1 | #' Recursively traverse an object
2 | #'
3 | #' @param x The object to traverse
4 | #' @param .f A function for combining the recursed components
5 | #' @param .g A function applied to the object before recursion
6 | #' @param .h A function applied to the base case
7 | #' @param base The base case for the recursion
8 | #'
9 | #' @keywords internal
10 | traverse <- function(x, .f = list, .g = identity, .h = identity, base = function(.x) is_syntactic_literal(.x) || is_symbol(.x)){
11 | # base case
12 | if(base(x))
13 | return(.h(x))
14 | # recursive case
15 | .f(lapply(.g(x), traverse, .f=.f, .g=.g, .h=.h, base=base), .h(x))
16 | }
17 |
18 | traverse_list <- function(x,
19 | .f = function(.x, .y) as.list(.x),
20 | .g = identity,
21 | .h = identity,
22 | base = function(.x) !is.list(.x)){
23 | traverse(x, .f=.f, .g=.g, .h=.h, base=base)
24 | }
25 |
26 | traverse_call <- function(x,
27 | .f = function(.x, .y) map(.x, quo_get_expr) %>% as.call %>% new_quosure(env = get_env(.x[[1]])), # (quo_listcall_to_call)
28 | .g = function(.x) .x %>% get_expr %>% as.list %>% map(new_quosure, env = get_env(.x)), # (quo_call_to_listcall)
29 | .h = identity,
30 | base = function(.x) !quo_is_call(.x)){
31 | x <- enquo(x)
32 | traverse(x, .f=.f, .g=.g, .h=.h, base=base)
33 | }
34 |
--------------------------------------------------------------------------------
/R/vctrs-dable.R:
--------------------------------------------------------------------------------
1 | #' Internal vctrs methods
2 | #'
3 | #' These methods are the extensions that allow dable objects to
4 | #' work with vctrs.
5 | #'
6 | #' @keywords internal
7 | #' @name dable-vctrs
8 | NULL
9 |
10 | #' @rdname dable-vctrs
11 | #' @method vec_ptype2 dcmp_ts
12 | #' @export
13 | vec_ptype2.dcmp_ts <- function(x, y, ...) {
14 | UseMethod("vec_ptype2.dcmp_ts", y)
15 | }
16 |
17 | #' @export
18 | vec_ptype2.dcmp_ts.dcmp_ts <- function(x, y, ...) {
19 | dable_ptype2(x, y, ...)
20 | }
21 |
22 | #' @export
23 | vec_ptype2.data.frame.dcmp_ts <- function(x, y, ...) {
24 | dable_ptype2(y, x, ...)
25 | }
26 |
27 | #' @export
28 | vec_ptype2.dcmp_ts.data.frame <- vec_ptype2.dcmp_ts.dcmp_ts
29 |
30 | #' @export
31 | vec_ptype2.tbl_df.dcmp_ts <- vec_ptype2.data.frame.dcmp_ts
32 |
33 | #' @export
34 | vec_ptype2.dcmp_ts.tbl_df <- vec_ptype2.dcmp_ts.dcmp_ts
35 |
36 | dable_ptype2 <- function(x, y, ...) {
37 | idx_x <- index_var(x)
38 | key_x <- key_vars(x)
39 | seas_x <- x%@%"seasons"
40 | alias_x <- x%@%"aliases"
41 | method_x <- x%@%"method"
42 | resp_x <- response_vars(x)
43 | if (is_dable(y)) {
44 | if (idx_x != index_var(y)) {
45 | abort("No common index variable for `x` and `y`.")
46 | }
47 | if (!identical(resp_x, response_vars(y))) {
48 | abort("Objects with different response variables cannot be combined.")
49 | }
50 | if(method_x != y%@%"method") method_x <- NULL
51 | seas_x <- vec_unique(c(seas_x, y%@%"seasons"))
52 | alias_x <- vec_unique(c(alias_x, y%@%"aliases"))
53 | key_x <- union(key_x, key_vars(y))
54 | }
55 | out <- df_ptype2(x, y, ...)
56 | tsbl <- build_tsibble_meta(
57 | out, key_data = tibble(!!!x[key_x], !!".rows" := list_of(.ptype = integer())),
58 | index = idx_x, index2 = idx_x, ordered = TRUE,
59 | interval = new_interval())
60 | build_dable(tsbl, response = resp_x, method = method_x,
61 | seasons = seas_x, aliases = alias_x)
62 | }
63 |
64 | #' @rdname dable-vctrs
65 | #' @method vec_cast dcmp_ts
66 | #' @export
67 | vec_cast.dcmp_ts <- function(x, to, ...) {
68 | UseMethod("vec_cast.dcmp_ts")
69 | }
70 |
71 | #' @export
72 | vec_cast.dcmp_ts.dcmp_ts <- function(x, to, ...) {
73 | is_identical <- identical(x, to)
74 | tbl <- tib_cast(x, to, ...)
75 | tsbl <- build_tsibble(
76 | tbl, key = key_vars(to),
77 | key_data = if (is_identical) key_data(x) else NULL,
78 | index = index_var(to), index2 = index2_var(to),
79 | ordered = is_ordered(to),
80 | validate = FALSE, .drop = key_drop_default(to))
81 | build_dable(tsbl, response = response_vars(to), method = to%@%"method",
82 | seasons = to%@%"seasons", aliases = to%@%"aliases")
83 | }
84 |
85 | #' @export
86 | vec_cast.dcmp_ts.tbl_df <- function(x, to, ...) {
87 | tbl <- tib_cast(x, to, ...)
88 | tsbl <- build_tsibble(
89 | tbl, key = key_vars(to), index = index_var(to), index2 = index2_var(to),
90 | ordered = TRUE, validate = TRUE, .drop = key_drop_default(to))
91 | build_dable(tsbl, response = response_vars(to), method = to%@%"method",
92 | seasons = to%@%"seasons", aliases = to%@%"aliases")
93 | }
94 |
95 | #' @export
96 | vec_cast.dcmp_ts.data.frame <- vec_cast.dcmp_ts.tbl_df
97 |
98 | #' @export
99 | vec_cast.tbl_df.dcmp_ts <- function(x, to, ...) {
100 | tib_cast(x, to, ...)
101 | }
102 |
103 | #' @export
104 | vec_cast.data.frame.dcmp_ts <- function(x, to, ...) {
105 | df_cast(x, to, ...)
106 | }
107 |
108 | #' @export
109 | vec_cast.tbl_ts.dcmp_ts <- function(x, to, ...) {
110 | tbl <- tib_cast(x, to, ...)
111 | build_tsibble(
112 | tbl, key = key_vars(to), index = index_var(to), index2 = index2_var(to),
113 | ordered = TRUE, validate = TRUE, .drop = key_drop_default(to)
114 | )
115 | }
--------------------------------------------------------------------------------
/R/vctrs-fable.R:
--------------------------------------------------------------------------------
1 | #' Internal vctrs methods
2 | #'
3 | #' These methods are the extensions that allow fable objects to
4 | #' work with vctrs.
5 | #'
6 | #' @keywords internal
7 | #' @name fable-vctrs
8 | NULL
9 |
10 | #' @rdname fable-vctrs
11 | #' @method vec_ptype2 fbl_ts
12 | #' @export
13 | vec_ptype2.fbl_ts <- function(x, y, ...) {
14 | UseMethod("vec_ptype2.fbl_ts", y)
15 | }
16 |
17 | #' @export
18 | vec_ptype2.fbl_ts.fbl_ts <- function(x, y, ...) {
19 | fable_ptype2(x, y, ...)
20 | }
21 |
22 | #' @export
23 | vec_ptype2.data.frame.fbl_ts <- function(x, y, ...) {
24 | fable_ptype2(y, x, ...)
25 | }
26 |
27 | #' @export
28 | vec_ptype2.fbl_ts.data.frame <- vec_ptype2.fbl_ts.fbl_ts
29 |
30 | #' @export
31 | vec_ptype2.tbl_df.fbl_ts <- vec_ptype2.data.frame.fbl_ts
32 |
33 | #' @export
34 | vec_ptype2.fbl_ts.tbl_df <- vec_ptype2.fbl_ts.fbl_ts
35 |
36 | fable_ptype2 <- function(x, y, ...) {
37 | idx_x <- index_var(x)
38 | key_x <- key_vars(x)
39 | dist_x <- distribution_var(x)
40 | resp_x <- response_vars(x)
41 | if (is_fable(y)) {
42 | if (idx_x != index_var(y)) {
43 | abort("No common index variable for `x` and `y`.")
44 | }
45 | if (dist_x != distribution_var(y)) {
46 | abort("No common distribution variable for `x` and `y`.")
47 | }
48 | if (!identical(resp_x, response_vars(y))) {
49 | abort("Objects with different response variables cannot be combined.")
50 | }
51 | key_x <- union(key_x, key_vars(y))
52 | }
53 | out <- df_ptype2(x, y, ...)
54 | tsbl <- build_tsibble_meta(
55 | out, key_data = tibble(!!!x[key_x], !!".rows" := list_of(.ptype = integer())),
56 | index = idx_x, index2 = idx_x, ordered = TRUE,
57 | interval = new_interval())
58 | build_fable(tsbl, response = resp_x, distribution = dist_x)
59 | }
60 |
61 | #' @rdname fable-vctrs
62 | #' @method vec_cast fbl_ts
63 | #' @export
64 | vec_cast.fbl_ts <- function(x, to, ...) {
65 | UseMethod("vec_cast.fbl_ts")
66 | }
67 |
68 | #' @export
69 | vec_cast.fbl_ts.fbl_ts <- function(x, to, ...) {
70 | is_identical <- identical(x, to)
71 | tbl <- tib_cast(x, to, ...)
72 | tsbl <- build_tsibble(
73 | tbl, key = key_vars(to),
74 | key_data = if (is_identical) key_data(x) else NULL,
75 | index = index_var(to), index2 = index2_var(to),
76 | ordered = is_ordered(to),
77 | validate = FALSE, .drop = key_drop_default(to))
78 | build_fable(tsbl, response = response_vars(to), distribution = distribution_var(to))
79 | }
80 |
81 | #' @export
82 | vec_cast.fbl_ts.tbl_df <- function(x, to, ...) {
83 | tbl <- tib_cast(x, to, ...)
84 | tsbl <- build_tsibble(
85 | tbl, key = key_vars(to), index = index_var(to), index2 = index2_var(to),
86 | ordered = TRUE, validate = TRUE, .drop = key_drop_default(to))
87 | build_fable(tsbl, response = response_vars(to), distribution = distribution_var(to))
88 | }
89 |
90 | #' @export
91 | vec_cast.tbl_ts.fbl_ts <- function(x, to, ...) {
92 | tbl <- tib_cast(x, to, ...)
93 | build_tsibble(
94 | tbl, key = key_vars(to), index = index_var(to), index2 = index2_var(to),
95 | ordered = TRUE, validate = TRUE, .drop = key_drop_default(to)
96 | )
97 | }
98 |
99 | #' @export
100 | vec_cast.fbl_ts.data.frame <- vec_cast.fbl_ts.tbl_df
101 |
102 | #' @export
103 | vec_cast.tbl_df.fbl_ts <- function(x, to, ...) {
104 | tib_cast(x, to, ...)
105 | }
106 |
107 | #' @export
108 | vec_cast.data.frame.fbl_ts <- function(x, to, ...) {
109 | df_cast(x, to, ...)
110 | }
111 |
112 | #' @export
113 | vec_restore.fbl_ts <- function(x, to, ..., n = NULL) {
114 | if(!is_tsibble(x)){
115 | x <- build_tsibble(
116 | x, key = key_vars(to), index = index_var(to), index2 = index2_var(to),
117 | ordered = TRUE, validate = TRUE, .drop = key_drop_default(to))
118 | }
119 | build_fable(x, response = response_vars(to), distribution = distribution_var(to))
120 | }
--------------------------------------------------------------------------------
/R/vctrs-mable.R:
--------------------------------------------------------------------------------
1 | #' Internal vctrs methods
2 | #'
3 | #' These methods are the extensions that allow mable objects to
4 | #' work with vctrs.
5 | #'
6 | #' @keywords internal
7 | #' @name mable-vctrs
8 | NULL
9 |
10 | #' @rdname mable-vctrs
11 | #' @method vec_ptype2 mdl_df
12 | #' @export
13 | vec_ptype2.mdl_df <- function(x, y, ...) {
14 | UseMethod("vec_ptype2.mdl_df", y)
15 | }
16 |
17 | #' @export
18 | vec_ptype2.mdl_df.mdl_df <- function(x, y, ...) {
19 | mable_ptype2(x, y, ...)
20 | }
21 |
22 | #' @export
23 | vec_ptype2.data.frame.mdl_df <- function(x, y, ...) {
24 | mable_ptype2(y, x, ...)
25 | }
26 |
27 | #' @export
28 | vec_ptype2.mdl_df.data.frame <- vec_ptype2.mdl_df.mdl_df
29 |
30 | #' @export
31 | vec_ptype2.tbl_df.mdl_df <- vec_ptype2.data.frame.mdl_df
32 |
33 | #' @export
34 | vec_ptype2.mdl_df.tbl_df <- vec_ptype2.mdl_df.mdl_df
35 |
36 | mable_ptype2 <- function(x, y, ...) {
37 | key_x <- key_vars(x)
38 | mdl_x <- mable_vars(x)
39 | resp_x <- response_vars(x)
40 | if (is_mable(y)) {
41 | if (!identical(resp_x, response_vars(y))) {
42 | abort("Objects with different response variables cannot be combined.")
43 | }
44 | key_x <- union(key_x, key_vars(y))
45 | mdl_x <- union(mdl_x, mable_vars(y))
46 | }
47 | out <- df_ptype2(x, y, ...)
48 | build_mable_meta(out, key_data = group_data(group_by(out, !!!syms(key_x))),
49 | model = mdl_x, response = resp_x)
50 | }
51 |
52 | #' @rdname mable-vctrs
53 | #' @method vec_cast mdl_df
54 | #' @export
55 | vec_cast.mdl_df <- function(x, to, ...) {
56 | UseMethod("vec_cast.mdl_df")
57 | }
58 |
59 | #' @export
60 | vec_cast.mdl_df.mdl_df <- function(x, to, ...) {
61 | is_identical <- identical(x, to)
62 | tbl <- tib_cast(x, to, ...)
63 | build_mable(tbl,
64 | key = !!key_vars(to),
65 | key_data = if (is_identical) key_data(x) else NULL,
66 | model = mable_vars(to))
67 | }
68 |
69 | #' @export
70 | vec_cast.mdl_df.tbl_df <- function(x, to, ...) {
71 | tbl <- tib_cast(x, to, ...)
72 | build_mable(tbl,
73 | key = !!key_vars(to),
74 | key_data = NULL,
75 | model = mable_vars(to))
76 | }
77 |
78 | #' @export
79 | vec_cast.mdl_df.data.frame <- vec_cast.mdl_df.tbl_df
80 |
81 | #' @export
82 | vec_cast.tbl_df.mdl_df <- function(x, to, ...) {
83 | tib_cast(x, to, ...)
84 | }
85 |
86 | #' @export
87 | vec_cast.data.frame.mdl_df <- function(x, to, ...) {
88 | df_cast(x, to, ...)
89 | }
90 |
--------------------------------------------------------------------------------
/R/zzz.R:
--------------------------------------------------------------------------------
1 | # nocov start
2 | .onLoad <- function(...) {
3 | register_s3_method("pillar", "type_sum", "mdl_ts")
4 | register_s3_method("pillar", "type_sum", "lst_mdl")
5 | register_s3_method("pillar", "type_sum", "fbl_ts")
6 |
7 | register_s3_method("pillar", "pillar_shaft", "agg_vec")
8 |
9 | register_s3_method("tibble", "tbl_sum", "dcmp_ts")
10 | register_s3_method("tibble", "tbl_sum", "mdl_df")
11 | register_s3_method("tibble", "tbl_sum", "fbl_ts")
12 |
13 | register_s3_method("ggplot2", "scale_type", "agg_vec")
14 |
15 | op <- options()
16 | op.fable <- list(
17 | fable.show_progress = TRUE
18 | )
19 | toset <- !(names(op.fable) %in% names(op))
20 | if (any(toset)) options(op.fable[toset])
21 |
22 | invisible()
23 | }
24 |
25 | register_s3_method <- function(pkg, generic, class, fun = NULL) {
26 | stopifnot(is.character(pkg), length(pkg) == 1)
27 | stopifnot(is.character(generic), length(generic) == 1)
28 | stopifnot(is.character(class), length(class) == 1)
29 |
30 | if (is.null(fun)) {
31 | fun <- get(paste0(generic, ".", class), envir = parent.frame())
32 | } else {
33 | stopifnot(is.function(fun))
34 | }
35 |
36 | if (pkg %in% loadedNamespaces()) {
37 | registerS3method(generic, class, fun, envir = asNamespace(pkg))
38 | }
39 |
40 | # Always register hook in case package is later unloaded & reloaded
41 | setHook(
42 | packageEvent(pkg, "onLoad"),
43 | function(...) {
44 | registerS3method(generic, class, fun, envir = asNamespace(pkg))
45 | }
46 | )
47 | }
48 | # nocov end
--------------------------------------------------------------------------------
/README.Rmd:
--------------------------------------------------------------------------------
1 | ---
2 | output: github_document
3 | ---
4 |
5 |
6 |
7 | ```{r setup, include = FALSE}
8 | knitr::opts_chunk$set(
9 | collapse = TRUE,
10 | comment = "#>",
11 | fig.path = "man/figures/README-",
12 | out.width = "100%"
13 | )
14 | ```
15 |
16 | # fabletools
17 |
18 | [](https://github.com/tidyverts/fabletools/actions/workflows/R-CMD-check.yaml)
19 | [](https://app.codecov.io/gh/tidyverts/fabletools?branch=master)
20 | [](https://lifecycle.r-lib.org/articles/stages.html)
21 | [](https://www.gnu.org/licenses/gpl-3.0.en.html)
22 | [](https://CRAN.R-project.org/package=fabletools)
23 |
24 |
25 | The R package *fabletools* provides tools for building modelling packages, with a focus on time series forecasting. This package allows package developers to extend *fable* with additional models, without needing to depend on the models supported by *fable*.
26 |
27 | ## Installation
28 |
29 | You could install the **stable** version on [CRAN](https://cran.r-project.org/package=fabletools):
30 |
31 | ```{r, eval = FALSE}
32 | install.packages("fabletools")
33 | ```
34 |
35 | You can install the **development** version from
36 | [GitHub](https://github.com/tidyverts/fabletools):
37 |
38 | ```{r gh-installation, eval = FALSE}
39 | # install.packages("remotes")
40 | remotes::install_github("tidyverts/fabletools")
41 | ```
42 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | # fabletools
5 |
6 |
7 |
8 | [](https://github.com/tidyverts/fabletools/actions/workflows/R-CMD-check.yaml)
10 | [](https://app.codecov.io/gh/tidyverts/fabletools?branch=master)
12 | [](https://lifecycle.r-lib.org/articles/stages.html)
13 | [](https://www.gnu.org/licenses/gpl-3.0.en.html)
14 | [](https://CRAN.R-project.org/package=fabletools)
16 |
17 |
18 | The R package *fabletools* provides tools for building modelling
19 | packages, with a focus on time series forecasting. This package allows
20 | package developers to extend *fable* with additional models, without
21 | needing to depend on the models supported by *fable*.
22 |
23 | ## Installation
24 |
25 | You could install the **stable** version on
26 | [CRAN](https://cran.r-project.org/package=fabletools):
27 |
28 | ``` r
29 | install.packages("fabletools")
30 | ```
31 |
32 | You can install the **development** version from
33 | [GitHub](https://github.com/tidyverts/fabletools):
34 |
35 | ``` r
36 | # install.packages("remotes")
37 | remotes::install_github("tidyverts/fabletools")
38 | ```
39 |
--------------------------------------------------------------------------------
/_pkgdown.yml:
--------------------------------------------------------------------------------
1 | url: https://fabletools.tidyverts.org
2 |
3 | template:
4 | params:
5 | bootswatch: cosmo
6 | includes:
7 | in_header: |
8 |
9 |
10 |
11 | development:
12 | mode: auto
13 |
14 | authors:
15 | Rob Hyndman:
16 | href: http://robjhyndman.com
17 | Mitchell O'Hara-Wild:
18 | href: https://mitchelloharawild.com
19 | Earo Wang:
20 | href: https://earo.me
21 |
22 | reference:
23 | - title: Overview
24 | desc: >
25 | Describes the package
26 | contents:
27 | - fabletools-package
28 | - title: Data structures
29 | desc: >
30 | Data classes for models, forecasts and decompositions.
31 | contents:
32 | - mable
33 | - as_mable
34 | - is_mable
35 | - fable
36 | - as_fable
37 | - is_fable
38 | - dable
39 | - as_dable
40 | - is_dable
41 | - mable_vars
42 | - response_vars
43 | - distribution_var
44 | - title: Models
45 | desc: >
46 | Models which make use of models from other packages, or are useful for programming.
47 | contents:
48 | - decomposition_model
49 | - combination_model
50 | - combination_ensemble
51 | - combination_weighted
52 | - null_model
53 | - title: Forecast reconciliation
54 | desc: >
55 | Use reconciliation techniques to ensure that forecasts are coherent with the hierarchical structure of data.
56 | contents:
57 | - reconcile
58 | - min_trace
59 | - bottom_up
60 | - middle_out
61 | - top_down
62 | - aggregate_key
63 | - aggregate_index
64 | - agg_vec
65 | - is_aggregated
66 | - title: Accuracy evaluation
67 | desc: >
68 | Functionality for evaluating model performance
69 | contents:
70 | - accuracy.mdl_df
71 | - ME
72 | - MSE
73 | - RMSE
74 | - MAE
75 | - MPE
76 | - MAPE
77 | - MAAPE
78 | - MASE
79 | - ACF1
80 | - winkler_score
81 | - percentile_score
82 | - MDA
83 | - MDV
84 | - MDPV
85 | - skill_score
86 | - title: Methods
87 | desc: >
88 | The fabletools package facilitates the handling of key structures for these generics.
89 | contents:
90 | - forecast
91 | - model
92 | - report
93 | - stream
94 | - outliers
95 | - model_sum
96 | - forecast.mdl_df
97 | - generate.mdl_df
98 | - interpolate.mdl_df
99 | - refit.mdl_df
100 | - augment.mdl_df
101 | - glance.mdl_df
102 | - tidy.mdl_df
103 | - components.mdl_df
104 | - fitted.mdl_df
105 | - residuals.mdl_df
106 | - estimate
107 | - response
108 | - scenarios
109 | - IRF
110 | - hypothesize.mdl_df
111 | - is_model
112 | - title: Features
113 | desc: >
114 | Functions for using and defining features across a dataset.
115 | contents:
116 | - features
117 | - feature_set
118 | - register_feature
119 | - title: Transformations
120 | desc: >
121 | Commonly used transformation functions.
122 | contents:
123 | - box_cox
124 | - inv_box_cox
125 | - new_transformation
126 | - title: Graphics
127 | desc: >
128 | Some `autoplot()` and `autolayer()` methods are defined for classes commonly used within fabletools.
129 | contents:
130 | - autoplot.tbl_ts
131 | - autolayer.tbl_ts
132 | - autoplot.fbl_ts
133 | - autolayer.fbl_ts
134 | - autoplot.dcmp_ts
135 | - title: Extension package helpers
136 | desc: >
137 | Functions provided to help develop extension packages.
138 | contents:
139 | - special_xreg
140 | - new_specials
141 | - new_model_class
142 | - model_lhs
143 | - model_rhs
144 | - common_periods
145 | - common_xregs
146 |
--------------------------------------------------------------------------------
/codecov.yml:
--------------------------------------------------------------------------------
1 | comment: false
2 |
3 | coverage:
4 | status:
5 | project:
6 | default:
7 | target: auto
8 | threshold: 1%
9 | patch:
10 | default:
11 | target: auto
12 | threshold: 1%
13 |
--------------------------------------------------------------------------------
/cran-comments.md:
--------------------------------------------------------------------------------
1 | ## Test environments
2 | * local kubuntu 20.04 install, R 4.1.2
3 | * ubuntu 16.04 (on GitHub actions), R-devel, R 4.0.0, R 3.6.3, R 3.5.3
4 | * macOS (on GitHub actions), R 4.0.0
5 | * windows (on GitHub actions), R 4.0.0
6 | * win-builder, R-devel, R-release, R-oldrelease
7 |
8 | ## R CMD check results
9 |
10 | 0 errors | 0 warnings | 0 notes
11 |
12 | ## revdep checks
13 |
14 | All revdeps have been checked. No changes to worse were found.
--------------------------------------------------------------------------------
/fabletools.Rproj:
--------------------------------------------------------------------------------
1 | Version: 1.0
2 |
3 | RestoreWorkspace: No
4 | SaveWorkspace: No
5 | AlwaysSaveHistory: No
6 |
7 | EnableCodeIndexing: Yes
8 | UseSpacesForTab: Yes
9 | NumSpacesForTab: 2
10 | Encoding: UTF-8
11 |
12 | RnwWeave: knitr
13 | LaTeX: pdfLaTeX
14 |
15 | BuildType: Package
16 | PackageUseDevtools: Yes
17 | PackageInstallArgs: --no-multiarch --with-keep.source
18 | PackageRoxygenize: rd,collate,namespace,vignette
19 |
--------------------------------------------------------------------------------
/inst/WORDLIST:
--------------------------------------------------------------------------------
1 | Backtransformations
2 | Blaskowitz
3 | CRPS
4 | Heeyoung
5 | Herwartz
6 | IRF
7 | JRSS
8 | MAAPE
9 | MASE
10 | MatrixM
11 | MinT
12 | ORCID
13 | Sungil
14 | VAR
15 | Wickramasuriya
16 | backtransform
17 | dable
18 | dables
19 | doi
20 | dplyr
21 | emperical
22 | erroring
23 | etc
24 | forecast's
25 | formals
26 | fourier
27 | ggdist
28 | hilo
29 | horison
30 | lifecycle
31 | mable
32 | mables
33 | obj
34 | org
35 | progressr
36 | purrr
37 | quosure
38 | seasonalities
39 | superceded
40 | tibble
41 | tidyr
42 | tidyselect
43 | tidyverse
44 | tidyverts
45 | tsibble
46 | unnested
47 | vctrs
48 |
--------------------------------------------------------------------------------
/man/IRF.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/irf.R
3 | \name{IRF}
4 | \alias{IRF}
5 | \title{Compute Impulse Response Function (IRF)}
6 | \usage{
7 | IRF(x, ...)
8 | }
9 | \arguments{
10 | \item{x}{A fitted model object, such as from a VAR or ARIMA model. This model is used to compute the impulse response.}
11 |
12 | \item{...}{Additional arguments to be passed to lower-level functions.}
13 | }
14 | \description{
15 | This function calculates the impulse response function (IRF) of a time series model.
16 | The IRF describes how a model's variables react to external shocks over time.
17 | }
18 | \details{
19 | If \code{new_data} contains the \code{.impulse} column, those values will be
20 | treated as impulses for the calculated impulse responses.
21 |
22 | The impulse response function provides insight into the dynamic behaviour of a system in
23 | response to external shocks. It traces the effect of a one-unit change in the impulse
24 | variable on the response variable over a specified number of periods.
25 | }
26 |
--------------------------------------------------------------------------------
/man/MAAPE.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/accuracy.R
3 | \name{MAAPE}
4 | \alias{MAAPE}
5 | \title{Mean Arctangent Absolute Percentage Error}
6 | \usage{
7 | MAAPE(.resid, .actual, na.rm = TRUE, ...)
8 | }
9 | \arguments{
10 | \item{.resid}{A vector of residuals from either the training (model accuracy)
11 | or test (forecast accuracy) data.}
12 |
13 | \item{.actual}{A vector of responses matching the fitted values
14 | (for forecast accuracy, \code{new_data} must be provided).}
15 |
16 | \item{na.rm}{Remove the missing values before calculating the accuracy measure}
17 |
18 | \item{...}{Additional arguments for each measure.}
19 | }
20 | \description{
21 | Mean Arctangent Absolute Percentage Error
22 | }
23 | \references{
24 | Kim, Sungil and Heeyoung Kim (2016) "A new metric of absolute percentage error
25 | for intermittent demand forecasts". \emph{International Journal of Forecasting},
26 | \bold{32}(3), 669-679.
27 | }
28 |
--------------------------------------------------------------------------------
/man/accuracy.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/accuracy.R
3 | \name{accuracy.mdl_df}
4 | \alias{accuracy.mdl_df}
5 | \alias{accuracy.mdl_ts}
6 | \alias{accuracy.fbl_ts}
7 | \title{Evaluate accuracy of a forecast or model}
8 | \usage{
9 | \method{accuracy}{mdl_df}(object, measures = point_accuracy_measures, ...)
10 |
11 | \method{accuracy}{mdl_ts}(object, measures = point_accuracy_measures, ...)
12 |
13 | \method{accuracy}{fbl_ts}(object, data, measures = point_accuracy_measures, ..., by = NULL)
14 | }
15 | \arguments{
16 | \item{object}{A model or forecast object}
17 |
18 | \item{measures}{A list of accuracy measure functions to compute (such as \code{\link{point_accuracy_measures}}, \code{\link{interval_accuracy_measures}}, or \code{\link{distribution_accuracy_measures}})}
19 |
20 | \item{...}{Additional arguments to be passed to measures that use it.}
21 |
22 | \item{data}{A dataset containing the complete model dataset (both training and test data). The training portion of the data will be used in the computation of some accuracy measures, and the test data is used to compute the forecast errors.}
23 |
24 | \item{by}{Variables over which the accuracy is computed (useful for computing across forecast horizons in cross-validation). If \code{by} is \code{NULL}, groups will be chosen automatically from the key structure.}
25 | }
26 | \description{
27 | Summarise the performance of the model using accuracy measures. Accuracy
28 | measures can be computed directly from models as the one-step-ahead fitted
29 | residuals are available. When evaluating accuracy on forecasts, you will
30 | need to provide a complete dataset that includes the future data and data
31 | used to train the model.
32 | }
33 | \examples{
34 | \dontshow{if (requireNamespace("fable", quietly = TRUE) && requireNamespace("tsibbledata", quietly = TRUE)) (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf}
35 | library(fable)
36 | library(tsibble)
37 | library(tsibbledata)
38 | library(dplyr)
39 |
40 | fit <- aus_production \%>\%
41 | filter(Quarter < yearquarter("2006 Q1")) \%>\%
42 | model(ets = ETS(log(Beer) ~ error("M") + trend("Ad") + season("A")))
43 |
44 | # In-sample training accuracy does not require extra data provided.
45 | accuracy(fit)
46 |
47 | # Out-of-sample forecast accuracy requires the future values to compare with.
48 | # All available future data will be used, and a warning will be given if some
49 | # data for the forecast window is unavailable.
50 | fc <- fit \%>\%
51 | forecast(h = "5 years")
52 | fc \%>\%
53 | accuracy(aus_production)
54 |
55 | # It is also possible to compute interval and distributional measures of
56 | # accuracy for models and forecasts which give forecast distributions.
57 | fc \%>\%
58 | accuracy(
59 | aus_production,
60 | measures = list(interval_accuracy_measures, distribution_accuracy_measures)
61 | )
62 | \dontshow{\}) # examplesIf}
63 | }
64 | \seealso{
65 | \href{https://otexts.com/fpp3/accuracy.html}{Evaluating forecast accuracy}
66 | }
67 |
--------------------------------------------------------------------------------
/man/agg_vec.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/aggregate.R
3 | \name{agg_vec}
4 | \alias{agg_vec}
5 | \title{Create an aggregation vector}
6 | \usage{
7 | agg_vec(x = character(), aggregated = logical(vec_size(x)))
8 | }
9 | \arguments{
10 | \item{x}{The vector of values.}
11 |
12 | \item{aggregated}{A logical vector to identify which values are \verb{}.}
13 | }
14 | \description{
15 | \lifecycle{maturing}
16 | }
17 | \details{
18 | An aggregation vector extends usual vectors by adding \verb{} values.
19 | These vectors are typically produced via the \code{\link[=aggregate_key]{aggregate_key()}} function,
20 | however it can be useful to create them manually to produce more complicated
21 | hierarchies (such as unbalanced hierarchies).
22 | }
23 | \examples{
24 | agg_vec(
25 | x = c(NA, "A", "B"),
26 | aggregated = c(TRUE, FALSE, FALSE)
27 | )
28 |
29 | }
30 |
--------------------------------------------------------------------------------
/man/aggregate_index.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/temporal_aggregation.R
3 | \name{aggregate_index}
4 | \alias{aggregate_index}
5 | \title{Expand a dataset to include temporal aggregates}
6 | \usage{
7 | aggregate_index(.data, .window, ..., .offset = "end", .bin_size = NULL)
8 | }
9 | \arguments{
10 | \item{.data}{A tsibble.}
11 |
12 | \item{.window}{Temporal aggregations to include. The default (NULL) will
13 | automatically identify appropriate temporal aggregations. This can be
14 | specified in several ways (see details).}
15 |
16 | \item{...}{<\code{\link[rlang:args_data_masking]{data-masking}}> Name-value pairs of
17 | summary functions. The name will be the name of the variable in the result.
18 |
19 | The value can be:
20 | \itemize{
21 | \item A vector of length 1, e.g. \code{min(x)}, \code{n()}, or \code{sum(is.na(y))}.
22 | \item A data frame, to add multiple columns from a single expression.
23 | }
24 |
25 | \ifelse{html}{\href{https://lifecycle.r-lib.org/articles/stages.html#deprecated}{\figure{lifecycle-deprecated.svg}{options: alt='[Deprecated]'}}}{\strong{[Deprecated]}} Returning values with size 0 or >1 was
26 | deprecated as of 1.1.0. Please use \code{\link[dplyr:reframe]{reframe()}} for this instead.}
27 |
28 | \item{.offset}{Offset the temporal aggregation windows to align with the start
29 | or end of the data. If FALSE, no offset will be applied (giving common
30 | breakpoints for temporal bins.)}
31 |
32 | \item{.bin_size}{Temporary. Define the number of observations in each temporal bucket}
33 | }
34 | \description{
35 | \lifecycle{experimental}
36 | }
37 | \details{
38 | This feature is very experimental. It currently allows for temporal
39 | aggregation of daily data as a proof of concept.
40 |
41 | The aggregation \code{.window} can be specified in several ways:
42 | \itemize{
43 | \item A character string, containing one of "day", "week", "month", "quarter" or
44 | "year". This can optionally be preceded by a (positive or negative) integer
45 | and a space, or followed by "s".
46 | \item A number, taken to be in days.
47 | \item A \code{\link{difftime}} object.
48 | }
49 | }
50 | \examples{
51 | library(tsibble)
52 | pedestrian \%>\%
53 | # Currently only supports daily data
54 | index_by(Date) \%>\%
55 | dplyr::summarise(Count = sum(Count)) \%>\%
56 | # Compute weekly aggregates
57 | fabletools:::aggregate_index("1 week", Count = sum(Count))
58 | }
59 |
--------------------------------------------------------------------------------
/man/aggregate_key.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/aggregate.R
3 | \name{aggregate_key}
4 | \alias{aggregate_key}
5 | \title{Expand a dataset to include other levels of aggregation}
6 | \usage{
7 | aggregate_key(.data, .spec, ...)
8 | }
9 | \arguments{
10 | \item{.data}{A tsibble.}
11 |
12 | \item{.spec}{The specification of aggregation structure.}
13 |
14 | \item{...}{<\code{\link[rlang:args_data_masking]{data-masking}}> Name-value pairs of
15 | summary functions. The name will be the name of the variable in the result.
16 |
17 | The value can be:
18 | \itemize{
19 | \item A vector of length 1, e.g. \code{min(x)}, \code{n()}, or \code{sum(is.na(y))}.
20 | \item A data frame, to add multiple columns from a single expression.
21 | }
22 |
23 | \ifelse{html}{\href{https://lifecycle.r-lib.org/articles/stages.html#deprecated}{\figure{lifecycle-deprecated.svg}{options: alt='[Deprecated]'}}}{\strong{[Deprecated]}} Returning values with size 0 or >1 was
24 | deprecated as of 1.1.0. Please use \code{\link[dplyr:reframe]{reframe()}} for this instead.}
25 | }
26 | \description{
27 | Uses the structural specification given in \code{.spec} to aggregate a time
28 | series. A grouped structure is specified using \code{grp1 * grp2}, and a nested
29 | structure is specified via \code{parent / child}. Aggregating the key structure is
30 | commonly used with forecast reconciliation to produce coherent forecasts over
31 | some hierarchy.
32 | }
33 | \details{
34 | This function is experimental, and is subject to change in the future.
35 |
36 | The way in which the measured variables are aggregated is specified in a
37 | similar way to how \verb{[dplyr::summarise()]} is used.
38 | }
39 | \examples{
40 | library(tsibble)
41 | tourism \%>\%
42 | aggregate_key(Purpose * (State / Region), Trips = sum(Trips))
43 |
44 | }
45 | \seealso{
46 | \code{\link[=reconcile]{reconcile()}}, \code{\link[=is_aggregated]{is_aggregated()}}
47 | }
48 |
--------------------------------------------------------------------------------
/man/aggregation-vctrs.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/aggregate.R
3 | \name{aggregation-vctrs}
4 | \alias{aggregation-vctrs}
5 | \alias{vec_ptype2.agg_vec}
6 | \alias{vec_ptype2.agg_vec.agg_vec}
7 | \alias{vec_ptype2.agg_vec.default}
8 | \alias{vec_ptype2.agg_vec.character}
9 | \alias{vec_ptype2.character.agg_vec}
10 | \alias{vec_ptype_abbr.agg_vec}
11 | \alias{vec_cast.agg_vec}
12 | \alias{vec_cast.agg_vec.agg_vec}
13 | \alias{vec_cast.agg_vec.default}
14 | \alias{vec_cast.character.agg_vec}
15 | \alias{vec_proxy_compare.agg_vec}
16 | \title{Internal vctrs methods}
17 | \usage{
18 | \method{vec_ptype2}{agg_vec}(x, y, ...)
19 |
20 | \method{vec_ptype2}{agg_vec.agg_vec}(x, y, ...)
21 |
22 | \method{vec_ptype2}{agg_vec.default}(x, y, ...)
23 |
24 | \method{vec_ptype2}{agg_vec.character}(x, y, ...)
25 |
26 | \method{vec_ptype2}{character.agg_vec}(x, y, ...)
27 |
28 | \method{vec_ptype_abbr}{agg_vec}(x, ...)
29 |
30 | \method{vec_cast}{agg_vec}(x, to, ...)
31 |
32 | \method{vec_cast}{agg_vec.agg_vec}(x, to, ...)
33 |
34 | \method{vec_cast}{agg_vec.default}(x, to, ...)
35 |
36 | \method{vec_cast}{character.agg_vec}(x, to, ...)
37 |
38 | \method{vec_proxy_compare}{agg_vec}(x, ...)
39 | }
40 | \description{
41 | These methods are the extensions that allow aggregation vectors to work with
42 | vctrs.
43 | }
44 | \keyword{internal}
45 |
--------------------------------------------------------------------------------
/man/as-dable.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/dable.R
3 | \name{as_dable}
4 | \alias{as_dable}
5 | \alias{as_dable.tbl_df}
6 | \alias{as_dable.tbl_ts}
7 | \title{Coerce to a dable object}
8 | \usage{
9 | as_dable(x, ...)
10 |
11 | \method{as_dable}{tbl_df}(x, response, method = NULL, seasons = list(), aliases = list(), ...)
12 |
13 | \method{as_dable}{tbl_ts}(x, response, method = NULL, seasons = list(), aliases = list(), ...)
14 | }
15 | \arguments{
16 | \item{x}{Object to be coerced to a dable (\code{dcmp_ts})}
17 |
18 | \item{...}{Additional arguments passed to methods}
19 |
20 | \item{response}{The character vector of response variable(s).}
21 |
22 | \item{method}{The name of the decomposition method.}
23 |
24 | \item{seasons}{A named list describing the structure of seasonal components
25 | (such as \code{period}, and \code{base}).}
26 |
27 | \item{aliases}{A named list of calls describing common aliases computed from
28 | components.}
29 | }
30 | \description{
31 | Coerce to a dable object
32 | }
33 |
--------------------------------------------------------------------------------
/man/as-fable.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/fable.R
3 | \name{as_fable}
4 | \alias{as_fable}
5 | \alias{as_fable.tbl_ts}
6 | \alias{as_fable.grouped_ts}
7 | \alias{as_fable.tbl_df}
8 | \alias{as_fable.fbl_ts}
9 | \alias{as_fable.grouped_df}
10 | \alias{as_fable.forecast}
11 | \title{Coerce to a fable object}
12 | \usage{
13 | as_fable(x, ...)
14 |
15 | \method{as_fable}{tbl_ts}(x, response, distribution, ...)
16 |
17 | \method{as_fable}{grouped_ts}(x, response, distribution, ...)
18 |
19 | \method{as_fable}{tbl_df}(x, response, distribution, ...)
20 |
21 | \method{as_fable}{fbl_ts}(x, response, distribution, ...)
22 |
23 | \method{as_fable}{grouped_df}(x, response, distribution, ...)
24 |
25 | \method{as_fable}{forecast}(x, ..., point_forecast = list(.mean = mean))
26 | }
27 | \arguments{
28 | \item{x}{Object to be coerced to a fable (\code{fbl_ts})}
29 |
30 | \item{...}{Additional arguments passed to methods}
31 |
32 | \item{response}{The character vector of response variable(s).}
33 |
34 | \item{distribution}{The name of the distribution column (can be provided
35 | using a bare expression).}
36 |
37 | \item{point_forecast}{The point forecast measure(s) which should be returned
38 | in the resulting fable. Specified as a named list of functions which accept
39 | a distribution and return a vector. To compute forecast medians, you can use
40 | \code{list(.median = median)}.}
41 | }
42 | \description{
43 | Coerce to a fable object
44 | }
45 |
--------------------------------------------------------------------------------
/man/as_mable.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/mable.R
3 | \name{as_mable}
4 | \alias{as_mable}
5 | \alias{as_mable.data.frame}
6 | \title{Coerce a dataset to a mable}
7 | \usage{
8 | as_mable(x, ...)
9 |
10 | \method{as_mable}{data.frame}(x, key = NULL, model = NULL, ...)
11 | }
12 | \arguments{
13 | \item{x}{A dataset containing a list model column.}
14 |
15 | \item{...}{Additional arguments passed to other methods.}
16 |
17 | \item{key}{Structural variable(s) that identify each model.}
18 |
19 | \item{model}{Identifiers for the columns containing model(s).}
20 | }
21 | \description{
22 | Coerce a dataset to a mable
23 | }
24 |
--------------------------------------------------------------------------------
/man/augment.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/broom.R
3 | \name{augment.mdl_df}
4 | \alias{augment.mdl_df}
5 | \alias{augment.mdl_ts}
6 | \title{Augment a mable}
7 | \usage{
8 | \method{augment}{mdl_df}(x, ...)
9 |
10 | \method{augment}{mdl_ts}(x, type = NULL, ...)
11 | }
12 | \arguments{
13 | \item{x}{A mable.}
14 |
15 | \item{...}{Arguments for model methods.}
16 |
17 | \item{type}{Deprecated.}
18 | }
19 | \description{
20 | Uses a fitted model to augment the response variable with fitted values and
21 | residuals. Response residuals (back-transformed) are stored in the \code{.resid}
22 | column, while innovation residuals (transformed) are stored in the \code{.innov}
23 | column.
24 | }
25 | \examples{
26 | \dontshow{if (requireNamespace("fable", quietly = TRUE)) (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf}
27 | library(fable)
28 | library(tsibbledata)
29 |
30 | # Forecasting with an ETS(M,Ad,A) model to Australian beer production
31 | aus_production \%>\%
32 | model(ets = ETS(log(Beer) ~ error("M") + trend("Ad") + season("A"))) \%>\%
33 | augment()
34 | \dontshow{\}) # examplesIf}
35 | }
36 |
--------------------------------------------------------------------------------
/man/autoplot.dcmp_ts.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/plot.R
3 | \name{autoplot.dcmp_ts}
4 | \alias{autoplot.dcmp_ts}
5 | \title{Decomposition plots}
6 | \usage{
7 | \method{autoplot}{dcmp_ts}(object, .vars = NULL, scale_bars = TRUE, level = c(80, 95), ...)
8 | }
9 | \arguments{
10 | \item{object}{A dable.}
11 |
12 | \item{.vars}{The column of the dable used to plot. By default, this will be the response variable of the decomposition.}
13 |
14 | \item{scale_bars}{If \code{TRUE}, each facet will include a scale bar which represents the same units across each facet.}
15 |
16 | \item{level}{If the decomposition contains distributions, which levels should be used to display intervals?}
17 |
18 | \item{...}{Further arguments passed to \code{\link[ggplot2:geom_path]{ggplot2::geom_line()}}, which can be used to specify fixed aesthetics such as \code{colour = "red"} or \code{linewidth = 3}.}
19 | }
20 | \description{
21 | Produces a faceted plot of the components used to build the response
22 | variable of the dable. Useful for visualising how the components contribute
23 | in a decomposition or model.
24 | }
25 | \examples{
26 | \dontshow{if (requireNamespace("feasts", quietly = TRUE) && requireNamespace("tsibbledata", quietly = TRUE)) (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf}
27 | library(feasts)
28 | library(tsibbledata)
29 | aus_production \%>\%
30 | model(STL(Beer)) \%>\%
31 | components() \%>\%
32 | autoplot()
33 | \dontshow{\}) # examplesIf}
34 | }
35 |
--------------------------------------------------------------------------------
/man/autoplot.fbl_ts.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/plot.R
3 | \name{autoplot.fbl_ts}
4 | \alias{autoplot.fbl_ts}
5 | \alias{autolayer.fbl_ts}
6 | \title{Plot a set of forecasts}
7 | \usage{
8 | \method{autoplot}{fbl_ts}(object, data = NULL, level = c(80, 95), show_gap = TRUE, ...)
9 |
10 | \method{autolayer}{fbl_ts}(
11 | object,
12 | data = NULL,
13 | level = c(80, 95),
14 | point_forecast = list(mean = mean),
15 | show_gap = TRUE,
16 | ...
17 | )
18 | }
19 | \arguments{
20 | \item{object}{A fable.}
21 |
22 | \item{data}{A tsibble with the same key structure as the fable.}
23 |
24 | \item{level}{The confidence level(s) for the plotted intervals.}
25 |
26 | \item{show_gap}{Setting this to \code{FALSE} will connect the most recent value in \code{data} with the forecasts.}
27 |
28 | \item{...}{Further arguments passed used to specify fixed aesthetics for the forecasts such as \code{colour = "red"} or \code{linewidth = 3}.}
29 |
30 | \item{point_forecast}{The point forecast measure to be displayed in the plot.}
31 | }
32 | \description{
33 | Produces a forecast plot from a fable. As the original data is not included
34 | in the fable object, it will need to be specified via the \code{data} argument.
35 | The \code{data} argument can be used to specify a shorter period of data, which is
36 | useful to focus on the more recent observations.
37 | }
38 | \examples{
39 | \dontshow{if (requireNamespace("fable", quietly = TRUE) && requireNamespace("tsibbledata", quietly = TRUE)) (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf}
40 | library(fable)
41 | library(tsibbledata)
42 |
43 | fc <- aus_production \%>\%
44 | model(ets = ETS(log(Beer) ~ error("M") + trend("Ad") + season("A"))) \%>\%
45 | forecast(h = "3 years")
46 |
47 | fc \%>\%
48 | autoplot(aus_production)
49 | \dontshow{\}) # examplesIf}
50 | \dontshow{if (requireNamespace("fable", quietly = TRUE)) (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf}
51 | aus_production \%>\%
52 | autoplot(Beer) +
53 | autolayer(fc)
54 | \dontshow{\}) # examplesIf}
55 | }
56 |
--------------------------------------------------------------------------------
/man/autoplot.tbl_ts.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/plot.R
3 | \name{autoplot.tbl_ts}
4 | \alias{autoplot.tbl_ts}
5 | \alias{autolayer.tbl_ts}
6 | \title{Plot time series from a tsibble}
7 | \usage{
8 | \method{autoplot}{tbl_ts}(object, .vars = NULL, ...)
9 |
10 | \method{autolayer}{tbl_ts}(object, .vars = NULL, ...)
11 | }
12 | \arguments{
13 | \item{object}{A tsibble.}
14 |
15 | \item{.vars}{A bare expression containing data you wish to plot. Multiple variables can be plotted using \code{\link[ggplot2:vars]{ggplot2::vars()}}.}
16 |
17 | \item{...}{Further arguments passed to \code{\link[ggplot2:geom_path]{ggplot2::geom_line()}}, which can be used to specify fixed aesthetics such as \code{colour = "red"} or \code{linewidth = 3}.}
18 | }
19 | \description{
20 | Produces a time series plot of one or more variables from a tsibble. If the
21 | tsibble contains a multiple keys, separate time series will be identified by
22 | colour.
23 | }
24 | \examples{
25 | \dontshow{if (requireNamespace("fable", quietly = TRUE)) (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf}
26 | library(fable)
27 | library(tsibbledata)
28 | library(tsibble)
29 |
30 | tsibbledata::gafa_stock \%>\%
31 | autoplot(vars(Close, log(Close)))
32 | \dontshow{\}) # examplesIf}
33 | }
34 |
--------------------------------------------------------------------------------
/man/bias_adjust.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/transform.R
3 | \name{bias_adjust}
4 | \alias{bias_adjust}
5 | \title{Bias adjust back-transformation functions}
6 | \usage{
7 | bias_adjust(bt, sd)
8 | }
9 | \arguments{
10 | \item{bt}{The back-transformation function}
11 |
12 | \item{sd}{The forecast standard deviation}
13 | }
14 | \description{
15 | To produce forecast means (instead of forecast medians) it is necessary to adjust the back-transformation function relative to the forecast variance.
16 | }
17 | \details{
18 | More details about bias adjustment can be found in the transformations vignette: read the vignette:
19 | \code{vignette("transformations", package = "fable")}
20 | }
21 | \examples{
22 |
23 | adj_fn <- bias_adjust(function(x) exp(x), 1:10)
24 | y <- rnorm(10)
25 | exp(y)
26 | adj_fn(y)
27 |
28 | }
29 | \keyword{internal}
30 |
--------------------------------------------------------------------------------
/man/bottom_up.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/reconciliation.R
3 | \name{bottom_up}
4 | \alias{bottom_up}
5 | \title{Bottom up forecast reconciliation}
6 | \usage{
7 | bottom_up(models)
8 | }
9 | \arguments{
10 | \item{models}{A column of models in a mable.}
11 | }
12 | \description{
13 | \lifecycle{experimental}
14 | }
15 | \details{
16 | Reconciles a hierarchy using the bottom up reconciliation method. The
17 | response variable of the hierarchy must be aggregated using sums. The
18 | forecasted time points must match for all series in the hierarchy.
19 | }
20 | \seealso{
21 | \code{\link[=reconcile]{reconcile()}}, \code{\link[=aggregate_key]{aggregate_key()}}
22 | }
23 |
--------------------------------------------------------------------------------
/man/box_cox.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/box_cox.R
3 | \name{box_cox}
4 | \alias{box_cox}
5 | \alias{inv_box_cox}
6 | \title{Box Cox Transformation}
7 | \usage{
8 | box_cox(x, lambda)
9 |
10 | inv_box_cox(x, lambda)
11 | }
12 | \arguments{
13 | \item{x}{a numeric vector.}
14 |
15 | \item{lambda}{a numeric value for the transformation parameter.}
16 | }
17 | \value{
18 | a transformed numeric vector of the same length as x.
19 | }
20 | \description{
21 | \code{box_cox()} returns a transformation of the input variable using a Box-Cox
22 | transformation. \code{inv_box_cox()} reverses the transformation.
23 | }
24 | \details{
25 | The Box-Cox transformation is given by \deqn{f_\lambda(x) =\frac{x^\lambda -
26 | 1}{\lambda}}{f(x;lambda)=(x^lambda - 1)/lambda} if \eqn{\lambda\ne0}{lambda
27 | is not equal to 0}. For \eqn{\lambda=0}{lambda=0},
28 | \deqn{f_0(x)=\log(x)}{f(x;0)=log(x)}.
29 | }
30 | \examples{
31 | library(tsibble)
32 | library(dplyr)
33 | airmiles \%>\%
34 | as_tsibble() \%>\%
35 | mutate(box_cox = box_cox(value, lambda = 0.3))
36 |
37 | }
38 | \references{
39 | Box, G. E. P. and Cox, D. R. (1964) An analysis of
40 | transformations. \emph{JRSS B} \bold{26} 211--246.
41 | }
42 | \author{
43 | Rob J Hyndman & Mitchell O'Hara-Wild
44 | }
45 |
--------------------------------------------------------------------------------
/man/combination_ensemble.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/model_combination.R
3 | \name{combination_ensemble}
4 | \alias{combination_ensemble}
5 | \title{Ensemble combination}
6 | \usage{
7 | combination_ensemble(..., weights = c("equal", "inv_var"))
8 | }
9 | \arguments{
10 | \item{...}{Estimated models used in the ensemble.}
11 |
12 | \item{weights}{The method used to weight each model in the ensemble.}
13 | }
14 | \description{
15 | Ensemble combination
16 | }
17 | \seealso{
18 | \code{\link[=combination_weighted]{combination_weighted()}}
19 | }
20 |
--------------------------------------------------------------------------------
/man/combination_model.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/model_combination.R
3 | \name{combination_model}
4 | \alias{combination_model}
5 | \title{Combination modelling}
6 | \usage{
7 | combination_model(..., cmbn_fn = combination_ensemble, cmbn_args = list())
8 | }
9 | \arguments{
10 | \item{...}{Model definitions used in the combination.}
11 |
12 | \item{cmbn_fn}{A function used to produce the combination.}
13 |
14 | \item{cmbn_args}{Additional arguments passed to \code{cmbn_fn}.}
15 | }
16 | \description{
17 | Combines multiple model definitions (passed via \code{...}) to produce a model
18 | combination definition using some combination function (\code{cmbn_fn}). Currently
19 | distributional forecasts are only supported for models producing normally
20 | distributed forecasts.
21 | }
22 | \details{
23 | A combination model can also be produced using mathematical operations.
24 | }
25 | \examples{
26 | \dontshow{if (requireNamespace("fable", quietly = TRUE)) (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf}
27 | library(fable)
28 | library(tsibble)
29 | library(tsibbledata)
30 |
31 | # cmbn1 and cmbn2 are equivalent and equally weighted.
32 | aus_production \%>\%
33 | model(
34 | cmbn1 = combination_model(SNAIVE(Beer), TSLM(Beer ~ trend() + season())),
35 | cmbn2 = (SNAIVE(Beer) + TSLM(Beer ~ trend() + season()))/2
36 | )
37 |
38 | # An inverse variance weighted ensemble.
39 | aus_production \%>\%
40 | model(
41 | cmbn1 = combination_model(
42 | SNAIVE(Beer), TSLM(Beer ~ trend() + season()),
43 | cmbn_args = list(weights = "inv_var")
44 | )
45 | )
46 | \dontshow{\}) # examplesIf}
47 | }
48 |
--------------------------------------------------------------------------------
/man/combination_weighted.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/model_combination.R
3 | \name{combination_weighted}
4 | \alias{combination_weighted}
5 | \title{Weighted combination}
6 | \usage{
7 | combination_weighted(..., weights = NULL)
8 | }
9 | \arguments{
10 | \item{...}{Estimated models used in the ensemble.}
11 |
12 | \item{weights}{The numeric weights applied to each model in \code{...}}
13 | }
14 | \description{
15 | Weighted combination
16 | }
17 | \seealso{
18 | \code{\link[=combination_ensemble]{combination_ensemble()}}
19 | }
20 |
--------------------------------------------------------------------------------
/man/common_xregs.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/xregs.R
3 | \docType{data}
4 | \name{common_xregs}
5 | \alias{common_xregs}
6 | \title{Common exogenous regressors}
7 | \usage{
8 | common_xregs
9 | }
10 | \description{
11 | These special functions provide interfaces to more complicated functions within
12 | the model formulae interface.
13 | }
14 | \section{Specials}{
15 |
16 |
17 | \subsection{trend}{
18 | The \code{trend} special includes common linear trend regressors in the model. It also supports piecewise linear trend via the \code{knots} argument.
19 | \preformatted{
20 | trend(knots = NULL, origin = NULL)
21 | }
22 |
23 | \tabular{ll}{
24 | \code{knots} \tab A vector of times (same class as the data's time index) identifying the position of knots for a piecewise linear trend.\cr
25 | \code{origin} \tab An optional time value to act as the starting time for the trend.
26 | }
27 | }
28 |
29 | \subsection{season}{
30 | The \code{season} special includes seasonal dummy variables in the model.
31 | \preformatted{
32 | season(period = NULL)
33 | }
34 |
35 | \tabular{ll}{
36 | \code{period} \tab The periodic nature of the seasonality. This can be either a number indicating the number of observations in each seasonal period, or text to indicate the duration of the seasonal window (for example, annual seasonality would be "1 year").
37 | }
38 | }
39 |
40 | \subsection{fourier}{
41 | The \code{fourier} special includes seasonal fourier terms in the model. The maximum order of the fourier terms must be specified using \code{K}.
42 | \preformatted{
43 | fourier(period = NULL, K, origin = NULL)
44 | }
45 |
46 | \tabular{ll}{
47 | \code{period} \tab The periodic nature of the seasonality. This can be either a number indicating the number of observations in each seasonal period, or text to indicate the duration of the seasonal window (for example, annual seasonality would be "1 year"). \cr
48 | \code{K} \tab The maximum order of the fourier terms.\cr
49 | \code{origin} \tab An optional time value to act as the starting time for the fourier series.
50 | }
51 | }
52 | }
53 |
54 | \keyword{datasets}
55 |
--------------------------------------------------------------------------------
/man/components.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/components.R
3 | \name{components.mdl_df}
4 | \alias{components.mdl_df}
5 | \alias{components.mdl_ts}
6 | \title{Extract components from a fitted model}
7 | \usage{
8 | \method{components}{mdl_df}(object, ...)
9 |
10 | \method{components}{mdl_ts}(object, ...)
11 | }
12 | \arguments{
13 | \item{object}{A mable.}
14 |
15 | \item{...}{Other arguments passed to methods.}
16 | }
17 | \description{
18 | Allows you to extract elements of interest from the model which can be
19 | useful in understanding how they contribute towards the overall fitted values.
20 | }
21 | \details{
22 | A dable will be returned, which will allow you to easily plot the components
23 | and see the way in which components are combined to give forecasts.
24 | }
25 | \examples{
26 | \dontshow{if (requireNamespace("fable", quietly = TRUE)) (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf}
27 | library(fable)
28 | library(tsibbledata)
29 |
30 | # Forecasting with an ETS(M,Ad,A) model to Australian beer production
31 | aus_production \%>\%
32 | model(ets = ETS(log(Beer) ~ error("M") + trend("Ad") + season("A"))) \%>\%
33 | components() \%>\%
34 | autoplot()
35 | \dontshow{\}) # examplesIf}
36 | }
37 |
--------------------------------------------------------------------------------
/man/construct_fc.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/forecast.R
3 | \name{construct_fc}
4 | \alias{construct_fc}
5 | \title{Construct a new set of forecasts}
6 | \usage{
7 | construct_fc(point, sd, dist)
8 | }
9 | \arguments{
10 | \item{point}{The transformed point forecasts}
11 |
12 | \item{sd}{The standard deviation of the transformed forecasts}
13 |
14 | \item{dist}{The forecast distribution (typically produced using \code{new_fcdist})}
15 | }
16 | \description{
17 | \ifelse{html}{\href{https://lifecycle.r-lib.org/articles/stages.html#deprecated}{\figure{lifecycle-deprecated.svg}{options: alt='[Deprecated]'}}}{\strong{[Deprecated]}}
18 |
19 | This function is deprecated. \code{forecast()} methods for a model should return
20 | a vector of distributions using the distributional package.
21 |
22 | Backtransformations are automatically handled, and so no transformations should be specified here.
23 | }
24 | \keyword{internal}
25 |
--------------------------------------------------------------------------------
/man/dable-vctrs.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/vctrs-dable.R
3 | \name{dable-vctrs}
4 | \alias{dable-vctrs}
5 | \alias{vec_ptype2.dcmp_ts}
6 | \alias{vec_cast.dcmp_ts}
7 | \title{Internal vctrs methods}
8 | \usage{
9 | \method{vec_ptype2}{dcmp_ts}(x, y, ...)
10 |
11 | \method{vec_cast}{dcmp_ts}(x, to, ...)
12 | }
13 | \description{
14 | These methods are the extensions that allow dable objects to
15 | work with vctrs.
16 | }
17 | \keyword{internal}
18 |
--------------------------------------------------------------------------------
/man/dable.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/dable.R
3 | \name{dable}
4 | \alias{dable}
5 | \title{Create a dable object}
6 | \usage{
7 | dable(..., response, method = NULL, seasons = list(), aliases = list())
8 | }
9 | \arguments{
10 | \item{...}{Arguments passed to \code{\link[tsibble:tsibble]{tsibble::tsibble()}}.}
11 |
12 | \item{response}{The name of the response variable column.}
13 |
14 | \item{method}{The name of the decomposition method.}
15 |
16 | \item{seasons}{A named list describing the structure of seasonal components
17 | (such as \code{period}, and \code{base}).}
18 |
19 | \item{aliases}{A named list of calls describing common aliases computed from
20 | components.}
21 | }
22 | \description{
23 | A dable (decomposition table) data class (\code{dcmp_ts}) which is a tsibble-like
24 | data structure for representing decompositions. This data class is useful for
25 | representing decompositions, as its print method describes how its columns
26 | can be combined to produce the original data, and has a more appropriate
27 | \code{autoplot()} method for displaying decompositions. Beyond this, a dable
28 | (\code{dcmp_ts}) behaves very similarly to a tsibble (\code{tbl_ts}).
29 | }
30 |
--------------------------------------------------------------------------------
/man/decomposition_model.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/model_decomposition.R
3 | \name{decomposition_model}
4 | \alias{decomposition_model}
5 | \title{Decomposition modelling}
6 | \usage{
7 | decomposition_model(dcmp, ...)
8 | }
9 | \arguments{
10 | \item{dcmp}{A model definition which supports extracting decomposed \code{\link[=components]{components()}}.}
11 |
12 | \item{...}{Model definitions used to model the components}
13 | }
14 | \description{
15 | This function allows you to specify a decomposition combination model using
16 | any additive decomposition. It works by first decomposing the data using the
17 | decomposition method provided to \code{dcmp_fn} with the given formula. Secondary
18 | models are used to fit each of the components from the resulting
19 | decomposition. These models are specified after the decomposition formula.
20 | All non-seasonal decomposition components must be specified, and any
21 | unspecified seasonal components will be forecasted using seasonal naive.
22 | These component models will be combined according to the decomposition
23 | method, giving a combination model for the response of the decomposition.
24 | }
25 | \examples{
26 | \dontshow{if (requireNamespace("fable", quietly = TRUE) && requireNamespace("feasts", quietly = TRUE)) (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf}
27 | library(fable)
28 | library(feasts)
29 | library(tsibble)
30 | library(dplyr)
31 |
32 | vic_food <- tsibbledata::aus_retail \%>\%
33 | filter(State == "Victoria", Industry == "Food retailing")
34 |
35 | # Identify an appropriate decomposition
36 | vic_food \%>\%
37 | model(STL(log(Turnover) ~ season(window = Inf))) \%>\%
38 | components() \%>\%
39 | autoplot()
40 |
41 | # Use an ETS model to seasonally adjusted data, and SNAIVE to season_year
42 | # Any model can be used, and seasonal components will default to use SNAIVE.
43 | my_dcmp_spec <- decomposition_model(
44 | STL(log(Turnover) ~ season(window = Inf)),
45 | ETS(season_adjust ~ season("N")), SNAIVE(season_year)
46 | )
47 |
48 | vic_food \%>\%
49 | model(my_dcmp_spec) \%>\%
50 | forecast(h="5 years") \%>\%
51 | autoplot(vic_food)
52 | \dontshow{\}) # examplesIf}
53 | }
54 | \seealso{
55 | \href{https://otexts.com/fpp3/forecasting-decomposition.html}{\emph{Forecasting: Principles and Practice} - Forecasting Decomposition}
56 | }
57 |
--------------------------------------------------------------------------------
/man/directional_accuracy_measures.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/accuracy.R
3 | \docType{data}
4 | \name{MDA}
5 | \alias{MDA}
6 | \alias{MDV}
7 | \alias{MDPV}
8 | \alias{directional_accuracy_measures}
9 | \title{Directional accuracy measures}
10 | \format{
11 | An object of class \code{list} of length 3.
12 | }
13 | \usage{
14 | MDA(.resid, .actual, na.rm = TRUE, reward = 1, penalty = 0, ...)
15 |
16 | MDV(.resid, .actual, na.rm = TRUE, ...)
17 |
18 | MDPV(.resid, .actual, na.rm = TRUE, ...)
19 |
20 | directional_accuracy_measures
21 | }
22 | \arguments{
23 | \item{.resid}{A vector of residuals from either the training (model accuracy)
24 | or test (forecast accuracy) data.}
25 |
26 | \item{.actual}{A vector of responses matching the fitted values
27 | (for forecast accuracy, \code{new_data} must be provided).}
28 |
29 | \item{na.rm}{Remove the missing values before calculating the accuracy measure}
30 |
31 | \item{reward, penalty}{The weights given to correct and incorrect predicted
32 | directions.}
33 |
34 | \item{...}{Additional arguments for each measure.}
35 | }
36 | \description{
37 | A collection of accuracy measures based on the accuracy of the prediction's
38 | direction (say, increasing or decreasing).
39 | }
40 | \details{
41 | \code{MDA()}: Mean Directional Accuracy
42 | \code{MDV()}: Mean Directional Value
43 | \code{MDPV()}: Mean Directional Percentage Value
44 | }
45 | \references{
46 | Blaskowitz and H. Herwartz (2011) "On economic evaluation of directional forecasts". \emph{International Journal of Forecasting},
47 | \bold{27}(4), 1058-1065.
48 | }
49 | \keyword{datasets}
50 |
--------------------------------------------------------------------------------
/man/distribution_accuracy_measures.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/accuracy.R
3 | \docType{data}
4 | \name{percentile_score}
5 | \alias{percentile_score}
6 | \alias{quantile_score}
7 | \alias{CRPS}
8 | \alias{distribution_accuracy_measures}
9 | \title{Distribution accuracy measures}
10 | \format{
11 | An object of class \code{list} of length 2.
12 | }
13 | \usage{
14 | percentile_score(.dist, .actual, na.rm = TRUE, ...)
15 |
16 | quantile_score(
17 | .dist,
18 | .actual,
19 | probs = c(0.05, 0.25, 0.5, 0.75, 0.95),
20 | na.rm = TRUE,
21 | ...
22 | )
23 |
24 | CRPS(.dist, .actual, n_quantiles = 1000, na.rm = TRUE, ...)
25 |
26 | distribution_accuracy_measures
27 | }
28 | \arguments{
29 | \item{.dist}{The distribution of fitted values from the model, or forecasted values from the forecast.}
30 |
31 | \item{.actual}{A vector of responses matching the fitted values
32 | (for forecast accuracy, \code{new_data} must be provided).}
33 |
34 | \item{na.rm}{Remove the missing values before calculating the accuracy measure}
35 |
36 | \item{...}{Additional arguments for each measure.}
37 |
38 | \item{probs}{A vector of probabilities at which the metric is evaluated.}
39 |
40 | \item{n_quantiles}{The number of quantiles to use in approximating CRPS when an exact solution is not available.}
41 | }
42 | \description{
43 | These accuracy measures can be used to evaluate how accurately a forecast
44 | distribution predicts a given actual value.
45 | }
46 | \section{Quantile/percentile score (pinball loss)}{
47 |
48 |
49 | A quantile (or percentile) score evaluates how accurately a set of quantiles
50 | (or percentiles) from the distribution match the given actual value. This
51 | score uses a pinball loss function, and can be calculated via the average of
52 | the score function given below:
53 |
54 | The score function \eqn{s_p(q_p,y)} is given by \eqn{(1-p)(q_p-y)} if
55 | \eqn{y < q_p}, and \eqn{p(y-q_p)} if \eqn{y \ge q_p}. Where \eqn{p} is the
56 | quantile probability, \eqn{q_p = F^{-1}(p)} is the quantile with probability
57 | \eqn{p}, and \eqn{y} is the actual value.
58 |
59 | The resulting accuracy measure will average this score over all predicted
60 | points at all desired quantiles (defined via the \code{probs} argument).
61 |
62 | The percentile score is uses the same method with \code{probs} set to all
63 | percentiles \code{probs = seq(0.01, 0.99, 0.01)}.
64 | }
65 |
66 | \section{Continuous ranked probability score (CRPS)}{
67 |
68 |
69 | The continuous ranked probability score (CRPS) is the continuous analogue of
70 | the pinball loss quantile score defined above. Its value is twice the
71 | integral of the quantile score over all possible quantiles:
72 |
73 | \deqn{
74 | CRPS(F,y) = 2 \int_0^1 s_p(q_p,y) dp
75 | }{
76 | CRPS(F,y) = 2 integral_0^1 s_p(q_p,y) dp
77 | }
78 |
79 | It can be computed directly from the distribution via:
80 |
81 | \deqn{
82 | CRPS(F,y) = \int_{-\infty}^\infty (F(x) - 1{y\leq x})^2 dx
83 | }{
84 | CRPS(F,y) = integral_{-\infty}^\infty (F(x) - 1{y\leq x})^2 dx
85 | }
86 |
87 | For some forecast distribution \eqn{F} and actual value \eqn{y}.
88 |
89 | Calculating the CRPS accuracy measure is computationally difficult for many
90 | distributions, however it can be computed quickly and exactly for Normal and
91 | emperical (sample) distributions. For other distributions the CRPS is
92 | approximated using the quantile score of many quantiles (using the number of
93 | quantiles specified in the \code{n_quantiles} argument).
94 | }
95 |
96 | \keyword{datasets}
97 |
--------------------------------------------------------------------------------
/man/distribution_var.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/accessors.R
3 | \name{distribution_var}
4 | \alias{distribution_var}
5 | \title{Return distribution variable}
6 | \usage{
7 | distribution_var(x)
8 | }
9 | \arguments{
10 | \item{x}{A dataset containing a distribution variable (such as a fable).}
11 | }
12 | \description{
13 | \code{distribution_var()} returns a character vector of the distribution variable
14 | in the data.
15 | }
16 |
--------------------------------------------------------------------------------
/man/estimate.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/estimate.R
3 | \name{estimate}
4 | \alias{estimate}
5 | \alias{estimate.tbl_ts}
6 | \title{Estimate a model}
7 | \usage{
8 | estimate(.data, ...)
9 |
10 | \method{estimate}{tbl_ts}(.data, .model, ...)
11 | }
12 | \arguments{
13 | \item{.data}{A data structure suitable for the models (such as a \code{tsibble}).}
14 |
15 | \item{...}{Further arguments passed to methods.}
16 |
17 | \item{.model}{Definition for the model to be used.}
18 | }
19 | \description{
20 | Estimate a model
21 | }
22 |
--------------------------------------------------------------------------------
/man/fable-vctrs.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/vctrs-fable.R
3 | \name{fable-vctrs}
4 | \alias{fable-vctrs}
5 | \alias{vec_ptype2.fbl_ts}
6 | \alias{vec_cast.fbl_ts}
7 | \title{Internal vctrs methods}
8 | \usage{
9 | \method{vec_ptype2}{fbl_ts}(x, y, ...)
10 |
11 | \method{vec_cast}{fbl_ts}(x, to, ...)
12 | }
13 | \description{
14 | These methods are the extensions that allow fable objects to
15 | work with vctrs.
16 | }
17 | \keyword{internal}
18 |
--------------------------------------------------------------------------------
/man/fable.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/fable.R
3 | \name{fable}
4 | \alias{fable}
5 | \title{Create a fable object}
6 | \usage{
7 | fable(..., response, distribution)
8 | }
9 | \arguments{
10 | \item{...}{Arguments passed to \code{\link[tsibble:tsibble]{tsibble::tsibble()}}.}
11 |
12 | \item{response}{The character vector of response variable(s).}
13 |
14 | \item{distribution}{The name of the distribution column (can be provided
15 | using a bare expression).}
16 | }
17 | \description{
18 | A fable (forecast table) data class (\code{fbl_ts}) which is a tsibble-like data
19 | structure for representing forecasts. In extension to the key and index from
20 | the tsibble (\code{tbl_ts}) class, a fable (\code{fbl_ts}) must also contain a single
21 | distribution column that uses values from the distributional package.
22 | }
23 |
--------------------------------------------------------------------------------
/man/fabletools-package.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/fabletools-package.R
3 | \docType{package}
4 | \name{fabletools-package}
5 | \alias{fabletools}
6 | \alias{fabletools-package}
7 | \title{fabletools: Core Tools for Packages in the 'fable' Framework}
8 | \description{
9 | Provides tools, helpers and data structures for developing models and time series functions for 'fable' and extension packages. These tools support a consistent and tidy interface for time series modelling and analysis.
10 | }
11 | \seealso{
12 | Useful links:
13 | \itemize{
14 | \item \url{https://fabletools.tidyverts.org/}
15 | \item \url{https://github.com/tidyverts/fabletools}
16 | \item Report bugs at \url{https://github.com/tidyverts/fabletools/issues}
17 | }
18 |
19 | }
20 | \author{
21 | \strong{Maintainer}: Mitchell O'Hara-Wild \email{mail@mitchelloharawild.com} (\href{https://orcid.org/0000-0001-6729-7695}{ORCID})
22 |
23 | Authors:
24 | \itemize{
25 | \item Rob Hyndman
26 | \item Earo Wang (\href{https://orcid.org/0000-0001-6448-5260}{ORCID})
27 | }
28 |
29 | Other contributors:
30 | \itemize{
31 | \item Di Cook [contributor]
32 | \item George Athanasopoulos [contributor]
33 | \item David Holt [contributor]
34 | }
35 |
36 | }
37 | \keyword{package}
38 |
--------------------------------------------------------------------------------
/man/feature_set.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/features.R
3 | \name{feature_set}
4 | \alias{feature_set}
5 | \title{Create a feature set from tags}
6 | \usage{
7 | feature_set(pkgs = NULL, tags = NULL)
8 | }
9 | \arguments{
10 | \item{pkgs}{The package(s) from which to search for features. If \code{NULL},
11 | all registered features from currently loaded packages will be searched.}
12 |
13 | \item{tags}{Tags used to identify similar groups of features. If \code{NULL},
14 | all tags will be included.}
15 | }
16 | \description{
17 | Construct a feature set from features available in currently loaded packages.
18 | Lists of available features can be found in the following pages:
19 | \itemize{
20 | \item \link[=features_by_pkg]{Features by package}
21 | \item \link[=features_by_tag]{Features by tag}
22 | }
23 | }
24 | \section{Registering features}{
25 |
26 | Features can be registered for use with the \code{feature_set()} function using
27 | \code{\link[=register_feature]{register_feature()}}. This function allows you to register a feature along
28 | with the tags associated with it. If the features are being registered from
29 | within a package, this feature registration should happen at load time using
30 | \verb{[.onLoad()]}.
31 | }
32 |
33 |
--------------------------------------------------------------------------------
/man/features.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/features.R
3 | \name{features}
4 | \alias{features}
5 | \alias{features_at}
6 | \alias{features_all}
7 | \alias{features_if}
8 | \title{Extract features from a dataset}
9 | \usage{
10 | features(.tbl, .var, features, ...)
11 |
12 | features_at(.tbl, .vars, features, ...)
13 |
14 | features_all(.tbl, features, ...)
15 |
16 | features_if(.tbl, .predicate, features, ...)
17 | }
18 | \arguments{
19 | \item{.tbl}{A dataset}
20 |
21 | \item{.var}{An expression that produces a vector from which the features are computed.}
22 |
23 | \item{features}{A list of functions (or lambda expressions) for the features to compute. \code{\link[=feature_set]{feature_set()}} is a useful helper for building sets of features.}
24 |
25 | \item{...}{Additional arguments to be passed to each feature. These arguments will only be passed to features which use it in their formal arguments (\code{\link[base:formals]{base::formals()}}), and not via their \code{...}. While passing \code{na.rm = TRUE} to \code{\link[stats:cor]{stats::var()}} will work, it will not for \code{\link[base:mean]{base::mean()}} as its formals are \code{x} and \code{...}. To more precisely pass inputs to each function, you should use lambdas in the list of features (\code{~ mean(., na.rm = TRUE)}).}
26 |
27 | \item{.vars}{A tidyselect compatible selection of the column(s) to compute features on.}
28 |
29 | \item{.predicate}{A predicate function (or lambda expression) to be applied to the columns or a logical vector. The variables for which .predicate is or returns TRUE are selected.}
30 | }
31 | \description{
32 | Create scalar valued summary features for a dataset from feature functions.
33 | }
34 | \details{
35 | Lists of available features can be found in the following pages:
36 | \itemize{
37 | \item \link[=features_by_pkg]{Features by package}
38 | \item \link[=features_by_tag]{Features by tag}
39 | }
40 | }
41 | \examples{
42 | # Provide a set of functions as a named list to features.
43 | library(tsibble)
44 | tourism \%>\%
45 | features(Trips, features = list(mean = mean, sd = sd))
46 |
47 | # Search and use useful features with `feature_set()`.
48 |
49 | \dontshow{if (requireNamespace("feasts", quietly = TRUE)) (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf}
50 | library(feasts)
51 | \dontshow{\}) # examplesIf}
52 | tourism \%>\%
53 | features(Trips, features = feature_set(tags = "autocorrelation"))
54 |
55 | # Best practice is to use anonymous functions for additional arguments
56 | tourism \%>\%
57 | features(Trips, list(~ quantile(., probs=seq(0,1,by=0.2))))
58 |
59 |
60 | }
61 | \seealso{
62 | \code{\link[=feature_set]{feature_set()}}
63 | }
64 |
--------------------------------------------------------------------------------
/man/features_by_pkg.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/features.R
3 | \name{features_by_pkg}
4 | \alias{features_by_pkg}
5 | \title{Features by package}
6 | \description{
7 | This documentation lists all available in currently loaded packages. This is
8 | a useful reference for making a \code{\link[=feature_set]{feature_set()}} from particular package(s).
9 | }
10 | \details{
11 | \Sexpr[stage=render,results=rd]{fabletools:::rd_features_pkg()}
12 | }
13 | \seealso{
14 | \link{features_by_tag}
15 | }
16 | \keyword{internal}
17 |
--------------------------------------------------------------------------------
/man/features_by_tag.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/features.R
3 | \name{features_by_tag}
4 | \alias{features_by_tag}
5 | \title{Features by tag}
6 | \description{
7 | This documentation lists all available in currently loaded packages. This is
8 | a useful reference for making a \code{\link[=feature_set]{feature_set()}} from particular tag(s).
9 | }
10 | \details{
11 | \Sexpr[stage=render,results=rd]{fabletools:::rd_features_tag()}
12 | }
13 | \seealso{
14 | \link{features_by_pkg}
15 | }
16 | \keyword{internal}
17 |
--------------------------------------------------------------------------------
/man/figures/README-example-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tidyverts/fabletools/3f3ce99899f20195a8f355fb357e84477549475c/man/figures/README-example-1.png
--------------------------------------------------------------------------------
/man/figures/README-pressure-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tidyverts/fabletools/3f3ce99899f20195a8f355fb357e84477549475c/man/figures/README-pressure-1.png
--------------------------------------------------------------------------------
/man/figures/lifecycle-archived.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/man/figures/lifecycle-defunct.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/man/figures/lifecycle-deprecated.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/man/figures/lifecycle-experimental.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/man/figures/lifecycle-maturing.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/man/figures/lifecycle-questioning.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/man/figures/lifecycle-retired.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/man/figures/lifecycle-soft-deprecated.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/man/figures/lifecycle-stable.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/man/figures/lifecycle-superseded.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/man/fitted.mdl_df.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/fitted.R
3 | \name{fitted.mdl_df}
4 | \alias{fitted.mdl_df}
5 | \alias{hfitted}
6 | \alias{fitted.mdl_ts}
7 | \title{Extract fitted values from models}
8 | \usage{
9 | \method{fitted}{mdl_df}(object, ...)
10 |
11 | \method{fitted}{mdl_ts}(object, h = 1, ...)
12 | }
13 | \arguments{
14 | \item{object}{A mable or time series model.}
15 |
16 | \item{...}{Other arguments passed to the model method for \code{fitted()}}
17 |
18 | \item{h}{The number of steps ahead that these fitted values are computed from.}
19 | }
20 | \description{
21 | Extracts the fitted values from each of the models in a mable. A tsibble will
22 | be returned containing these fitted values. Fitted values will be
23 | automatically back-transformed if a transformation was specified.
24 | }
25 |
--------------------------------------------------------------------------------
/man/freq_tools.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/frequency.R
3 | \name{common_periods}
4 | \alias{common_periods}
5 | \alias{common_periods.default}
6 | \alias{common_periods.tbl_ts}
7 | \alias{common_periods.interval}
8 | \alias{get_frequencies}
9 | \alias{get_frequencies.numeric}
10 | \alias{get_frequencies.NULL}
11 | \alias{get_frequencies.character}
12 | \alias{get_frequencies.Period}
13 | \title{Extract frequencies for common seasonal periods}
14 | \usage{
15 | common_periods(x)
16 |
17 | \method{common_periods}{default}(x)
18 |
19 | \method{common_periods}{tbl_ts}(x)
20 |
21 | \method{common_periods}{interval}(x)
22 |
23 | get_frequencies(period, ...)
24 |
25 | \method{get_frequencies}{numeric}(period, ...)
26 |
27 | \method{get_frequencies}{`NULL`}(period, data, ..., .auto = c("smallest", "largest", "all"))
28 |
29 | \method{get_frequencies}{character}(period, data, ...)
30 |
31 | \method{get_frequencies}{Period}(period, data, ...)
32 | }
33 | \arguments{
34 | \item{x}{An object containing temporal data (such as a \code{tsibble}, \code{interval}, \code{datetime} and others.)}
35 |
36 | \item{period}{Specification of the time-series period}
37 |
38 | \item{...}{Other arguments to be passed on to methods}
39 |
40 | \item{data}{A tsibble}
41 |
42 | \item{.auto}{The method used to automatically select the appropriate seasonal
43 | periods}
44 | }
45 | \value{
46 | A named vector of frequencies appropriate for the provided data.
47 | }
48 | \description{
49 | Extract frequencies for common seasonal periods
50 | }
51 | \examples{
52 | common_periods(tsibble::pedestrian)
53 |
54 | }
55 | \references{
56 | \url{https://robjhyndman.com/hyndsight/seasonal-periods/}
57 | }
58 |
--------------------------------------------------------------------------------
/man/generate.mdl_df.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/generate.R
3 | \name{generate.mdl_df}
4 | \alias{generate.mdl_df}
5 | \alias{generate.mdl_ts}
6 | \title{Generate responses from a mable}
7 | \usage{
8 | \method{generate}{mdl_df}(x, new_data = NULL, h = NULL, times = 1, seed = NULL, ...)
9 |
10 | \method{generate}{mdl_ts}(
11 | x,
12 | new_data = NULL,
13 | h = NULL,
14 | times = 1,
15 | seed = NULL,
16 | bootstrap = FALSE,
17 | bootstrap_block_size = 1,
18 | ...
19 | )
20 | }
21 | \arguments{
22 | \item{x}{A mable.}
23 |
24 | \item{new_data}{The data to be generated (time index and exogenous regressors)}
25 |
26 | \item{h}{The simulation horizon (can be used instead of \code{new_data} for regular
27 | time series with no exogenous regressors).}
28 |
29 | \item{times}{The number of replications.}
30 |
31 | \item{seed}{The seed for the random generation from distributions.}
32 |
33 | \item{...}{Additional arguments for individual simulation methods.}
34 |
35 | \item{bootstrap}{If TRUE, then forecast distributions are computed using simulation with resampled errors.}
36 |
37 | \item{bootstrap_block_size}{The bootstrap block size specifies the number of contiguous residuals to be taken in each bootstrap sample.}
38 | }
39 | \description{
40 | Use a model's fitted distribution to simulate additional data with similar
41 | behaviour to the response. This is a tidy implementation of
42 | \code{\link[stats:simulate]{stats::simulate()}}.
43 | }
44 | \details{
45 | Innovations are sampled by the model's assumed error distribution.
46 | If \code{bootstrap} is \code{TRUE}, innovations will be sampled from the model's
47 | residuals. If \code{new_data} contains the \code{.innov} column, those values will be
48 | treated as innovations for the simulated paths.
49 | }
50 | \examples{
51 | \dontshow{if (requireNamespace("fable", quietly = TRUE)) (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf}
52 | library(fable)
53 | library(dplyr)
54 | UKLungDeaths <- as_tsibble(cbind(mdeaths, fdeaths), pivot_longer = FALSE)
55 | UKLungDeaths \%>\%
56 | model(lm = TSLM(mdeaths ~ fourier("year", K = 4) + fdeaths)) \%>\%
57 | generate(UKLungDeaths, times = 5)
58 | \dontshow{\}) # examplesIf}
59 | }
60 |
--------------------------------------------------------------------------------
/man/glance.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/broom.R
3 | \name{glance.mdl_df}
4 | \alias{glance.mdl_df}
5 | \alias{glance.mdl_ts}
6 | \title{Glance a mable}
7 | \usage{
8 | \method{glance}{mdl_df}(x, ...)
9 |
10 | \method{glance}{mdl_ts}(x, ...)
11 | }
12 | \arguments{
13 | \item{x}{A mable.}
14 |
15 | \item{...}{Arguments for model methods.}
16 | }
17 | \description{
18 | Uses the models within a mable to produce a one row summary of their fits.
19 | This typically contains information about the residual variance,
20 | information criterion, and other relevant summary statistics. Each model
21 | will be represented with a row of output.
22 | }
23 | \examples{
24 | \dontshow{if (requireNamespace("fable", quietly = TRUE)) (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf}
25 | library(fable)
26 | library(tsibbledata)
27 |
28 | olympic_running \%>\%
29 | model(lm = TSLM(log(Time) ~ trend())) \%>\%
30 | glance()
31 | \dontshow{\}) # examplesIf}
32 | }
33 |
--------------------------------------------------------------------------------
/man/hypothesize.mdl_df.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/hypothesise.R
3 | \name{hypothesize.mdl_df}
4 | \alias{hypothesize.mdl_df}
5 | \alias{hypothesize.mdl_ts}
6 | \title{Run a hypothesis test from a mable}
7 | \usage{
8 | \method{hypothesize}{mdl_df}(x, ...)
9 |
10 | \method{hypothesize}{mdl_ts}(x, tests = list(), ...)
11 | }
12 | \arguments{
13 | \item{x}{A mable.}
14 |
15 | \item{...}{Arguments for model methods.}
16 |
17 | \item{tests}{a list of test functions to perform on the model}
18 | }
19 | \description{
20 | This function will return the results of a hypothesis test for each model in
21 | the mable.
22 | }
23 | \examples{
24 | \dontshow{if (requireNamespace("fable", quietly = TRUE)) (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf}
25 | library(fable)
26 | library(tsibbledata)
27 |
28 | olympic_running \%>\%
29 | model(lm = TSLM(log(Time) ~ trend())) \%>\%
30 | hypothesize()
31 | \dontshow{\}) # examplesIf}
32 | }
33 |
--------------------------------------------------------------------------------
/man/interpolate.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/interpolate.R
3 | \name{interpolate.mdl_df}
4 | \alias{interpolate.mdl_df}
5 | \alias{interpolate.mdl_ts}
6 | \title{Interpolate missing values}
7 | \usage{
8 | \method{interpolate}{mdl_df}(object, new_data, ...)
9 |
10 | \method{interpolate}{mdl_ts}(object, new_data, ...)
11 | }
12 | \arguments{
13 | \item{object}{A mable containing a single model column.}
14 |
15 | \item{new_data}{A dataset with the same structure as the data used to fit the model.}
16 |
17 | \item{...}{Other arguments passed to interpolate methods.}
18 | }
19 | \description{
20 | Uses a fitted model to interpolate missing values from a dataset.
21 | }
22 | \examples{
23 | \dontshow{if (requireNamespace("fable", quietly = TRUE)) (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf}
24 | library(fable)
25 | library(tsibbledata)
26 |
27 | # The fastest running times for the olympics are missing for years during
28 | # world wars as the olympics were not held.
29 | olympic_running
30 |
31 | olympic_running \%>\%
32 | model(TSLM(Time ~ trend())) \%>\%
33 | interpolate(olympic_running)
34 | \dontshow{\}) # examplesIf}
35 | }
36 |
--------------------------------------------------------------------------------
/man/interval_accuracy_measures.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/accuracy.R
3 | \docType{data}
4 | \name{winkler_score}
5 | \alias{winkler_score}
6 | \alias{pinball_loss}
7 | \alias{scaled_pinball_loss}
8 | \alias{interval_accuracy_measures}
9 | \title{Interval estimate accuracy measures}
10 | \format{
11 | An object of class \code{list} of length 3.
12 | }
13 | \usage{
14 | winkler_score(.dist, .actual, level = 95, na.rm = TRUE, ...)
15 |
16 | pinball_loss(.dist, .actual, level = 95, na.rm = TRUE, ...)
17 |
18 | scaled_pinball_loss(
19 | .dist,
20 | .actual,
21 | .train,
22 | level = 95,
23 | na.rm = TRUE,
24 | demean = FALSE,
25 | .period,
26 | d = .period == 1,
27 | D = .period > 1,
28 | ...
29 | )
30 |
31 | interval_accuracy_measures
32 | }
33 | \arguments{
34 | \item{.dist}{The distribution of fitted values from the model, or forecasted values from the forecast.}
35 |
36 | \item{.actual}{A vector of responses matching the fitted values
37 | (for forecast accuracy, \code{new_data} must be provided).}
38 |
39 | \item{level}{The level of the forecast interval.}
40 |
41 | \item{na.rm}{Remove the missing values before calculating the accuracy measure}
42 |
43 | \item{...}{Additional arguments for each measure.}
44 |
45 | \item{.train}{A vector of responses used to train the model
46 | (for forecast accuracy, the \code{orig_data} must be provided).}
47 |
48 | \item{demean}{Should the response be demeaned (MASE)}
49 |
50 | \item{.period}{The seasonal period of the data (defaulting to 'smallest' seasonal period).
51 | from a model, or forecasted values from the forecast.}
52 |
53 | \item{d}{Should the response model include a first difference?}
54 |
55 | \item{D}{Should the response model include a seasonal difference?}
56 | }
57 | \description{
58 | Interval estimate accuracy measures
59 | }
60 | \keyword{datasets}
61 |
--------------------------------------------------------------------------------
/man/is_aggregated.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/aggregate.R
3 | \name{is_aggregated}
4 | \alias{is_aggregated}
5 | \title{Is the element an aggregation of smaller data}
6 | \usage{
7 | is_aggregated(x)
8 | }
9 | \arguments{
10 | \item{x}{An object.}
11 | }
12 | \description{
13 | Is the element an aggregation of smaller data
14 | }
15 | \seealso{
16 | \code{\link{aggregate_key}}
17 | }
18 |
--------------------------------------------------------------------------------
/man/is_dable.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/dable.R
3 | \name{is_dable}
4 | \alias{is_dable}
5 | \title{Is the object a dable}
6 | \usage{
7 | is_dable(x)
8 | }
9 | \arguments{
10 | \item{x}{An object.}
11 | }
12 | \description{
13 | Is the object a dable
14 | }
15 |
--------------------------------------------------------------------------------
/man/is_fable.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/fable.R
3 | \name{is_fable}
4 | \alias{is_fable}
5 | \title{Is the object a fable}
6 | \usage{
7 | is_fable(x)
8 | }
9 | \arguments{
10 | \item{x}{An object.}
11 | }
12 | \description{
13 | Is the object a fable
14 | }
15 |
--------------------------------------------------------------------------------
/man/is_mable.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/mable.R
3 | \name{is_mable}
4 | \alias{is_mable}
5 | \title{Is the object a mable}
6 | \usage{
7 | is_mable(x)
8 | }
9 | \arguments{
10 | \item{x}{An object.}
11 | }
12 | \description{
13 | Is the object a mable
14 | }
15 |
--------------------------------------------------------------------------------
/man/is_model.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/mdl_ts.R
3 | \name{is_model}
4 | \alias{is_model}
5 | \title{Is the object a model}
6 | \usage{
7 | is_model(x)
8 | }
9 | \arguments{
10 | \item{x}{An object.}
11 | }
12 | \description{
13 | Is the object a model
14 | }
15 |
--------------------------------------------------------------------------------
/man/mable-vctrs.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/vctrs-mable.R
3 | \name{mable-vctrs}
4 | \alias{mable-vctrs}
5 | \alias{vec_ptype2.mdl_df}
6 | \alias{vec_cast.mdl_df}
7 | \title{Internal vctrs methods}
8 | \usage{
9 | \method{vec_ptype2}{mdl_df}(x, y, ...)
10 |
11 | \method{vec_cast}{mdl_df}(x, to, ...)
12 | }
13 | \description{
14 | These methods are the extensions that allow mable objects to
15 | work with vctrs.
16 | }
17 | \keyword{internal}
18 |
--------------------------------------------------------------------------------
/man/mable.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/mable.R
3 | \name{mable}
4 | \alias{mable}
5 | \title{Create a new mable}
6 | \usage{
7 | mable(..., key = NULL, model = NULL)
8 | }
9 | \arguments{
10 | \item{...}{A set of name-value pairs.}
11 |
12 | \item{key}{Structural variable(s) that identify each model.}
13 |
14 | \item{model}{Identifiers for the columns containing model(s).}
15 | }
16 | \description{
17 | A mable (model table) data class (\code{mdl_df}) is a tibble-like data structure
18 | for applying multiple models to a dataset. Each row of the mable refers to a
19 | different time series from the data (identified by the key columns). A mable
20 | must contain at least one column of time series models (\code{mdl_ts}), where the
21 | list column itself (\code{lst_mdl}) describes how these models are related.
22 | }
23 |
--------------------------------------------------------------------------------
/man/mable_vars.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/accessors.R
3 | \name{mable_vars}
4 | \alias{mable_vars}
5 | \title{Return model column variables}
6 | \usage{
7 | mable_vars(x)
8 | }
9 | \arguments{
10 | \item{x}{A dataset containing models (such as a mable).}
11 | }
12 | \description{
13 | \code{mable_vars()} returns a character vector of the model variables in the
14 | object.
15 | }
16 |
--------------------------------------------------------------------------------
/man/middle_out.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/reconciliation.R
3 | \name{middle_out}
4 | \alias{middle_out}
5 | \title{Middle out forecast reconciliation}
6 | \usage{
7 | middle_out(models, split = 1)
8 | }
9 | \arguments{
10 | \item{models}{A column of models in a mable.}
11 |
12 | \item{split}{The middle level of the hierarchy from which the bottom-up and
13 | top-down approaches are used above and below respectively.}
14 | }
15 | \description{
16 | \lifecycle{experimental}
17 | }
18 | \details{
19 | Reconciles a hierarchy using the middle out reconciliation method. The
20 | response variable of the hierarchy must be aggregated using sums. The
21 | forecasted time points must match for all series in the hierarchy.
22 | }
23 | \seealso{
24 | \code{\link[=reconcile]{reconcile()}}, \code{\link[=aggregate_key]{aggregate_key()}}
25 | \href{https://otexts.com/fpp3/single-level.html#middle-out-approach}{\emph{Forecasting: Principles and Practice} - Middle-out approach}
26 | }
27 |
--------------------------------------------------------------------------------
/man/min_trace.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/reconciliation.R
3 | \name{min_trace}
4 | \alias{min_trace}
5 | \title{Minimum trace forecast reconciliation}
6 | \usage{
7 | min_trace(
8 | models,
9 | method = c("wls_var", "ols", "wls_struct", "mint_cov", "mint_shrink"),
10 | sparse = NULL
11 | )
12 | }
13 | \arguments{
14 | \item{models}{A column of models in a mable.}
15 |
16 | \item{method}{The reconciliation method to use.}
17 |
18 | \item{sparse}{If TRUE, the reconciliation will be computed using sparse
19 | matrix algebra? By default, sparse matrices will be used if the MatrixM
20 | package is installed.}
21 | }
22 | \description{
23 | Reconciles a hierarchy using the minimum trace combination method. The
24 | response variable of the hierarchy must be aggregated using sums. The
25 | forecasted time points must match for all series in the hierarchy (caution:
26 | this is not yet tested for beyond the series length).
27 | }
28 | \references{
29 | Wickramasuriya, S. L., Athanasopoulos, G., & Hyndman, R. J. (2019). Optimal forecast reconciliation for hierarchical and grouped time series through trace minimization. Journal of the American Statistical Association, 1-45. https://doi.org/10.1080/01621459.2018.1448825
30 | }
31 | \seealso{
32 | \code{\link[=reconcile]{reconcile()}}, \code{\link[=aggregate_key]{aggregate_key()}}
33 | }
34 |
--------------------------------------------------------------------------------
/man/model.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/model.R
3 | \name{model}
4 | \alias{model}
5 | \alias{model.tbl_ts}
6 | \title{Estimate models}
7 | \usage{
8 | model(.data, ...)
9 |
10 | \method{model}{tbl_ts}(.data, ..., .safely = TRUE)
11 | }
12 | \arguments{
13 | \item{.data}{A data structure suitable for the models (such as a \code{tsibble})}
14 |
15 | \item{...}{Definitions for the models to be used. All models must share the
16 | same response variable.}
17 |
18 | \item{.safely}{If a model encounters an error, rather than aborting the process a \link[=null_model]{NULL model} will be returned instead. This allows for an error to occur when computing many models, without losing the results of the successful models.}
19 | }
20 | \description{
21 | Trains specified model definition(s) to a dataset. This function will
22 | estimate the a set of model definitions (passed via \code{...}) to each series
23 | within \code{.data} (as identified by the key structure). The result will be a
24 | mable (a model table), which neatly stores the estimated models in a tabular
25 | structure. Rows of the data identify different series within the data, and
26 | each model column contains all models from that model definition. Each cell
27 | in the mable identifies a single model.
28 | }
29 | \section{Parallel}{
30 |
31 |
32 | It is possible to estimate models in parallel using the
33 | \href{https://cran.r-project.org/package=future}{future} package. By specifying a
34 | \code{\link[future:plan]{future::plan()}} before estimating the models, they will be computed
35 | according to that plan.
36 | }
37 |
38 | \section{Progress}{
39 |
40 |
41 | Progress on model estimation can be obtained by wrapping the code with
42 | \code{progressr::with_progress()}. Further customisation on how progress is
43 | reported can be controlled using the \code{progressr} package.
44 | }
45 |
46 | \examples{
47 | \dontshow{if (requireNamespace("fable", quietly = TRUE) && requireNamespace("tsibbledata", quietly = TRUE)) (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf}
48 | library(fable)
49 | library(tsibbledata)
50 |
51 | # Training an ETS(M,Ad,A) model to Australian beer production
52 | aus_production \%>\%
53 | model(ets = ETS(log(Beer) ~ error("M") + trend("Ad") + season("A")))
54 |
55 | # Training a seasonal naive and ETS(A,A,A) model to the monthly
56 | # "Food retailing" turnover for selected Australian states.
57 | library(dplyr)
58 | progressr::with_progress(
59 | aus_retail \%>\%
60 | filter(
61 | Industry == "Food retailing",
62 | State \%in\% c("Victoria", "New South Wales", "Queensland")
63 | ) \%>\%
64 | model(
65 | snaive = SNAIVE(Turnover),
66 | ets = ETS(log(Turnover) ~ error("A") + trend("A") + season("A")),
67 | )
68 | )
69 | \dontshow{\}) # examplesIf}
70 | }
71 |
--------------------------------------------------------------------------------
/man/model_lhs.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/model.R
3 | \name{model_lhs}
4 | \alias{model_lhs}
5 | \title{Extract the left hand side of a model}
6 | \usage{
7 | model_lhs(model)
8 | }
9 | \arguments{
10 | \item{model}{A formula}
11 | }
12 | \description{
13 | Extract the left hand side of a model
14 | }
15 |
--------------------------------------------------------------------------------
/man/model_rhs.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/model.R
3 | \name{model_rhs}
4 | \alias{model_rhs}
5 | \title{Extract the right hand side of a model}
6 | \usage{
7 | model_rhs(model)
8 | }
9 | \arguments{
10 | \item{model}{A formula}
11 | }
12 | \description{
13 | Extract the right hand side of a model
14 | }
15 |
--------------------------------------------------------------------------------
/man/model_sum.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/mdl_ts.R
3 | \name{model_sum}
4 | \alias{model_sum}
5 | \title{Provide a succinct summary of a model}
6 | \usage{
7 | model_sum(x)
8 | }
9 | \arguments{
10 | \item{x}{The model to summarise}
11 | }
12 | \description{
13 | Similarly to pillar's type_sum and obj_sum, model_sum is used to provide brief model summaries.
14 | }
15 |
--------------------------------------------------------------------------------
/man/new-model-class.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/definitions.R
3 | \name{new_model_class}
4 | \alias{new_model_class}
5 | \alias{new_model_definition}
6 | \title{Create a new class of models}
7 | \usage{
8 | new_model_class(
9 | model = "Unknown model",
10 | train = function(.data, formula, specials, ...)
11 | abort("This model has not defined a training method."),
12 | specials = new_specials(),
13 | check = function(.data) {
14 | },
15 | prepare = function(...) {
16 | },
17 | ...,
18 | .env = caller_env(),
19 | .inherit = model_definition
20 | )
21 |
22 | new_model_definition(.class, formula, ..., .env = caller_env(n = 2))
23 | }
24 | \arguments{
25 | \item{model}{The name of the model}
26 |
27 | \item{train}{A function that trains the model to a dataset. \code{.data} is a tsibble
28 | containing the data's index and response variables only. \code{formula} is the
29 | user's provided formula. \code{specials} is the evaluated specials used in the formula.}
30 |
31 | \item{specials}{Special functions produced using \code{\link[=new_specials]{new_specials()}}}
32 |
33 | \item{check}{A function that is used to check the data for suitability with
34 | the model. This can be used to check for missing values (both implicit and
35 | explicit), regularity of observations, ordered time index, and univariate
36 | responses.}
37 |
38 | \item{prepare}{This allows you to modify the model class according to user
39 | inputs. \code{...} is the arguments passed to \code{new_model_definition}, allowing
40 | you to perform different checks or training procedures according to different
41 | user inputs.}
42 |
43 | \item{...}{Further arguments to \code{\link[R6:R6Class]{R6::R6Class()}}. This can be useful to set up
44 | additional elements used in the other functions. For example, to use
45 | \code{common_xregs}, an \code{origin} element in the model is used to store
46 | the origin for \code{trend()} and \code{fourier()} specials. To use these specials, you
47 | must add an \code{origin} element to the object (say with \code{origin = NULL}).}
48 |
49 | \item{.env}{The environment from which functions should inherit from.}
50 |
51 | \item{.inherit}{A model class to inherit from.}
52 |
53 | \item{.class}{A model class (typically created with \code{\link[=new_model_class]{new_model_class()}}).}
54 |
55 | \item{formula}{The user's model formula.}
56 | }
57 | \description{
58 | Suitable for extension packages to create new models for fable.
59 | }
60 | \details{
61 | This function produces a new R6 model definition. An understanding of R6 is
62 | not required, however could be useful to provide more sophisticated model
63 | interfaces. All functions have access to \code{self}, allowing the functions for
64 | training the model and evaluating specials to access the model class itself.
65 | This can be useful to obtain elements set in the \%TODO
66 | }
67 |
--------------------------------------------------------------------------------
/man/new_specials.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/specials.R
3 | \name{new_specials}
4 | \alias{new_specials}
5 | \title{Create evaluation environment for specials}
6 | \usage{
7 | new_specials(..., .required_specials = NULL, .xreg_specials = NULL)
8 | }
9 | \arguments{
10 | \item{...}{A named set of functions which used to parse formula inputs}
11 |
12 | \item{.required_specials}{The names of specials which must be provided (and if not, are included with no inputs).}
13 |
14 | \item{.xreg_specials}{The names of specials which will be only used as inputs to other specials (most commonly \code{xreg}).}
15 | }
16 | \description{
17 | Allows extension packages to make use of the formula parsing of specials.
18 | }
19 |
--------------------------------------------------------------------------------
/man/new_transformation.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/transform.R
3 | \name{new_transformation}
4 | \alias{new_transformation}
5 | \alias{invert_transformation}
6 | \title{Create a new modelling transformation}
7 | \usage{
8 | new_transformation(transformation, inverse)
9 |
10 | invert_transformation(x, ...)
11 | }
12 | \arguments{
13 | \item{transformation}{A function which transforms the data}
14 |
15 | \item{inverse}{A function which is the inverse of a transformation}
16 |
17 | \item{x}{A transformation (such as one created with \code{new_transformation}).}
18 |
19 | \item{...}{Further arguments passed to other methods.}
20 | }
21 | \description{
22 | Produces a new transformation for fable modelling functions which will be used to transform, back-transform, and adjust forecasts.
23 | }
24 | \details{
25 | For more details about transformations, read the vignette:
26 | \code{vignette("transformations", package = "fable")}
27 | }
28 | \examples{
29 |
30 | scaled_logit <- function(x, lower=0, upper=1){
31 | log((x-lower)/(upper-x))
32 | }
33 | inv_scaled_logit <- function(x, lower=0, upper=1){
34 | (upper-lower)*exp(x)/(1+exp(x)) + lower
35 | }
36 | my_scaled_logit <- new_transformation(scaled_logit, inv_scaled_logit)
37 |
38 | t_vals <- my_scaled_logit(1:10, 0, 100)
39 | t_vals
40 |
41 | }
42 |
--------------------------------------------------------------------------------
/man/null_model.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/model_null.R
3 | \name{null_model}
4 | \alias{null_model}
5 | \alias{is_null_model}
6 | \title{NULL model}
7 | \usage{
8 | null_model(formula, ...)
9 |
10 | is_null_model(x)
11 | }
12 | \arguments{
13 | \item{formula}{Model specification (response variable)}
14 |
15 | \item{...}{Unused}
16 |
17 | \item{x}{The object to be tested.}
18 | }
19 | \description{
20 | Create a NULL model definition. This model produces NA forecasts and does not
21 | require any estimation of the data. It is generally used as a placeholder for
22 | models which have encountered an error (see \code{.safely} in \code{\link[=model]{model()}}).
23 | }
24 | \keyword{internal}
25 |
--------------------------------------------------------------------------------
/man/outliers.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/outliers.R
3 | \name{outliers}
4 | \alias{outliers}
5 | \alias{outliers.mdl_df}
6 | \alias{outliers.mdl_ts}
7 | \title{Identify outliers}
8 | \usage{
9 | outliers(object, ...)
10 |
11 | \method{outliers}{mdl_df}(object, ...)
12 |
13 | \method{outliers}{mdl_ts}(object, ...)
14 | }
15 | \arguments{
16 | \item{object}{An object which can identify outliers.}
17 |
18 | \item{...}{Arguments for further methods.}
19 | }
20 | \description{
21 | Return a table of outlying observations using a fitted model.
22 | }
23 |
--------------------------------------------------------------------------------
/man/parse_model.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/parse.R
3 | \name{parse_model}
4 | \alias{parse_model}
5 | \title{Parse the model specification for specials}
6 | \usage{
7 | parse_model(model)
8 | }
9 | \arguments{
10 | \item{model}{A model definition}
11 | }
12 | \description{
13 | Using a list of defined special functions, the user's formula specification and data
14 | is parsed to extract important modelling components.
15 | }
16 | \keyword{internal}
17 |
--------------------------------------------------------------------------------
/man/parse_model_lhs.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/parse.R
3 | \name{parse_model_lhs}
4 | \alias{parse_model_lhs}
5 | \title{Parse the RHS of the model formula for transformations}
6 | \usage{
7 | parse_model_lhs(model)
8 | }
9 | \arguments{
10 | \item{model}{A model definition}
11 | }
12 | \description{
13 | Parse the RHS of the model formula for transformations
14 | }
15 | \keyword{internal}
16 |
--------------------------------------------------------------------------------
/man/parse_model_rhs.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/parse.R
3 | \name{parse_model_rhs}
4 | \alias{parse_model_rhs}
5 | \title{Parse the RHS of the model formula for specials}
6 | \usage{
7 | parse_model_rhs(model)
8 | }
9 | \arguments{
10 | \item{model}{A model definition}
11 | }
12 | \description{
13 | Parse the RHS of the model formula for specials
14 | }
15 | \keyword{internal}
16 |
--------------------------------------------------------------------------------
/man/point_accuracy_measures.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/accuracy.R
3 | \docType{data}
4 | \name{ME}
5 | \alias{ME}
6 | \alias{MSE}
7 | \alias{RMSE}
8 | \alias{MAE}
9 | \alias{MPE}
10 | \alias{MAPE}
11 | \alias{MASE}
12 | \alias{RMSSE}
13 | \alias{ACF1}
14 | \alias{point_accuracy_measures}
15 | \title{Point estimate accuracy measures}
16 | \format{
17 | An object of class \code{list} of length 8.
18 | }
19 | \usage{
20 | ME(.resid, na.rm = TRUE, ...)
21 |
22 | MSE(.resid, na.rm = TRUE, ...)
23 |
24 | RMSE(.resid, na.rm = TRUE, ...)
25 |
26 | MAE(.resid, na.rm = TRUE, ...)
27 |
28 | MPE(.resid, .actual, na.rm = TRUE, ...)
29 |
30 | MAPE(.resid, .actual, na.rm = TRUE, ...)
31 |
32 | MASE(
33 | .resid,
34 | .train,
35 | demean = FALSE,
36 | na.rm = TRUE,
37 | .period,
38 | d = .period == 1,
39 | D = .period > 1,
40 | ...
41 | )
42 |
43 | RMSSE(
44 | .resid,
45 | .train,
46 | demean = FALSE,
47 | na.rm = TRUE,
48 | .period,
49 | d = .period == 1,
50 | D = .period > 1,
51 | ...
52 | )
53 |
54 | ACF1(.resid, na.action = stats::na.pass, demean = TRUE, ...)
55 |
56 | point_accuracy_measures
57 | }
58 | \arguments{
59 | \item{.resid}{A vector of residuals from either the training (model accuracy)
60 | or test (forecast accuracy) data.}
61 |
62 | \item{na.rm}{Remove the missing values before calculating the accuracy measure}
63 |
64 | \item{...}{Additional arguments for each measure.}
65 |
66 | \item{.actual}{A vector of responses matching the fitted values
67 | (for forecast accuracy, \code{new_data} must be provided).}
68 |
69 | \item{.train}{A vector of responses used to train the model
70 | (for forecast accuracy, the \code{orig_data} must be provided).}
71 |
72 | \item{demean}{Should the response be demeaned (MASE)}
73 |
74 | \item{.period}{The seasonal period of the data (defaulting to 'smallest' seasonal period).
75 | from a model, or forecasted values from the forecast.}
76 |
77 | \item{d}{Should the response model include a first difference?}
78 |
79 | \item{D}{Should the response model include a seasonal difference?}
80 |
81 | \item{na.action}{Function to handle missing values.}
82 | }
83 | \description{
84 | Point estimate accuracy measures
85 | }
86 | \keyword{datasets}
87 |
--------------------------------------------------------------------------------
/man/reconcile.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/reconciliation.R
3 | \name{reconcile}
4 | \alias{reconcile}
5 | \alias{reconcile.mdl_df}
6 | \title{Forecast reconciliation}
7 | \usage{
8 | reconcile(.data, ...)
9 |
10 | \method{reconcile}{mdl_df}(.data, ...)
11 | }
12 | \arguments{
13 | \item{.data}{A mable.}
14 |
15 | \item{...}{Reconciliation methods applied to model columns within \code{.data}.}
16 | }
17 | \description{
18 | This function allows you to specify the method used to reconcile forecasts
19 | in accordance with its key structure.
20 | }
21 | \examples{
22 | \dontshow{if (requireNamespace("fable", quietly = TRUE)) (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf}
23 | library(fable)
24 | lung_deaths_agg <- as_tsibble(cbind(mdeaths, fdeaths)) \%>\%
25 | aggregate_key(key, value = sum(value))
26 |
27 | lung_deaths_agg \%>\%
28 | model(lm = TSLM(value ~ trend() + season())) \%>\%
29 | reconcile(lm = min_trace(lm)) \%>\%
30 | forecast()
31 | \dontshow{\}) # examplesIf}
32 | }
33 |
--------------------------------------------------------------------------------
/man/reexports.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/reexports.R
3 | \docType{import}
4 | \name{reexports}
5 | \alias{reexports}
6 | \alias{\%>\%}
7 | \alias{as_tsibble}
8 | \alias{vars}
9 | \alias{autoplot}
10 | \alias{autolayer}
11 | \alias{accuracy}
12 | \alias{equation}
13 | \alias{interpolate}
14 | \alias{components}
15 | \alias{augment}
16 | \alias{glance}
17 | \alias{tidy}
18 | \alias{hypothesize}
19 | \alias{generate}
20 | \alias{refit}
21 | \alias{forecast}
22 | \alias{hilo}
23 | \title{Objects exported from other packages}
24 | \keyword{internal}
25 | \description{
26 | These objects are imported from other packages. Follow the links
27 | below to see their documentation.
28 |
29 | \describe{
30 | \item{distributional}{\code{\link[distributional]{hilo}}}
31 |
32 | \item{dplyr}{\code{\link[dplyr:reexports]{\%>\%}}, \code{\link[dplyr]{vars}}}
33 |
34 | \item{generics}{\code{\link[generics]{accuracy}}, \code{\link[generics]{augment}}, \code{\link[generics]{components}}, \code{\link[generics]{equation}}, \code{\link[generics]{forecast}}, \code{\link[generics]{generate}}, \code{\link[generics]{glance}}, \code{\link[generics]{hypothesize}}, \code{\link[generics]{interpolate}}, \code{\link[generics]{refit}}, \code{\link[generics]{tidy}}}
35 |
36 | \item{ggplot2}{\code{\link[ggplot2]{autolayer}}, \code{\link[ggplot2]{autoplot}}}
37 |
38 | \item{tsibble}{\code{\link[tsibble:as-tsibble]{as_tsibble}}}
39 | }}
40 |
41 |
--------------------------------------------------------------------------------
/man/refit.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/refit.R
3 | \name{refit.mdl_df}
4 | \alias{refit.mdl_df}
5 | \alias{refit.mdl_ts}
6 | \title{Refit a mable to a new dataset}
7 | \usage{
8 | \method{refit}{mdl_df}(object, new_data, ...)
9 |
10 | \method{refit}{mdl_ts}(object, new_data, ...)
11 | }
12 | \arguments{
13 | \item{object}{A mable.}
14 |
15 | \item{new_data}{A tsibble dataset used to refit the model.}
16 |
17 | \item{...}{Additional optional arguments for refit methods.}
18 | }
19 | \description{
20 | Applies a fitted model to a new dataset. For most methods this can be done
21 | with or without re-estimation of the parameters.
22 | }
23 | \examples{
24 | \dontshow{if (requireNamespace("fable", quietly = TRUE)) (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf}
25 | library(fable)
26 |
27 | fit <- as_tsibble(mdeaths) \%>\%
28 | model(ETS(value ~ error("M") + trend("A") + season("A")))
29 | fit \%>\% report()
30 |
31 | fit \%>\%
32 | refit(as_tsibble(fdeaths)) \%>\%
33 | report(reinitialise = TRUE)
34 | \dontshow{\}) # examplesIf}
35 | }
36 |
--------------------------------------------------------------------------------
/man/register_feature.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/features.R
3 | \name{register_feature}
4 | \alias{register_feature}
5 | \title{Register a feature function}
6 | \usage{
7 | register_feature(fn, tags)
8 | }
9 | \arguments{
10 | \item{fn}{The feature function}
11 |
12 | \item{tags}{Identifying tags}
13 | }
14 | \description{
15 | Allows users to find and use features from your package using \code{\link[=feature_set]{feature_set()}}.
16 | If the features are being registered from within a package, this feature
17 | registration should happen at load time using \verb{[.onLoad()]}.
18 | }
19 | \examples{
20 |
21 | \dontrun{
22 | tukey_five <- function(x){
23 | setNames(fivenum(x), c("min", "hinge_lwr", "med", "hinge_upr", "max"))
24 | }
25 |
26 | register_feature(tukey_five, tags = c("boxplot", "simple"))
27 |
28 | }
29 |
30 | }
31 |
--------------------------------------------------------------------------------
/man/report.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/report.R
3 | \name{report}
4 | \alias{report}
5 | \title{Report information about an object}
6 | \usage{
7 | report(object, ...)
8 | }
9 | \arguments{
10 | \item{object}{The object to report}
11 |
12 | \item{...}{Additional options for the reporting function}
13 | }
14 | \description{
15 | Displays the object in a suitable format for reporting.
16 | }
17 |
--------------------------------------------------------------------------------
/man/residuals.mdl_df.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/residuals.R
3 | \name{residuals.mdl_df}
4 | \alias{residuals.mdl_df}
5 | \alias{residuals.mdl_ts}
6 | \title{Extract residuals values from models}
7 | \usage{
8 | \method{residuals}{mdl_df}(object, ...)
9 |
10 | \method{residuals}{mdl_ts}(object, type = "innovation", ...)
11 | }
12 | \arguments{
13 | \item{object}{A mable or time series model.}
14 |
15 | \item{...}{Other arguments passed to the model method for \code{residuals()}}
16 |
17 | \item{type}{The type of residuals to compute. If \code{type="response"}, residuals on the back-transformed data will be computed.}
18 | }
19 | \description{
20 | Extracts the residuals from each of the models in a mable. A tsibble will
21 | be returned containing these residuals.
22 | }
23 |
--------------------------------------------------------------------------------
/man/response.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/response.R
3 | \name{response}
4 | \alias{response}
5 | \title{Extract the response variable from a model}
6 | \usage{
7 | response(object, ...)
8 | }
9 | \arguments{
10 | \item{object}{The object containing response data}
11 |
12 | \item{...}{Additional parameters passed on to other methods}
13 | }
14 | \description{
15 | Returns a tsibble containing only the response variable used in the fitting
16 | of a model.
17 | }
18 |
--------------------------------------------------------------------------------
/man/response_vars.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/accessors.R
3 | \name{response_vars}
4 | \alias{response_vars}
5 | \title{Return response variables}
6 | \usage{
7 | response_vars(x)
8 | }
9 | \arguments{
10 | \item{x}{A dataset containing a response variable (such as a mable, fable, or dable).}
11 | }
12 | \description{
13 | \code{response_vars()} returns a character vector of the response variables in the
14 | object.
15 | }
16 |
--------------------------------------------------------------------------------
/man/scenarios.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/forecast.R
3 | \name{scenarios}
4 | \alias{scenarios}
5 | \title{A set of future scenarios for forecasting}
6 | \usage{
7 | scenarios(..., names_to = ".scenario")
8 | }
9 | \arguments{
10 | \item{...}{Input data for each scenario}
11 |
12 | \item{names_to}{The column name used to identify each scenario}
13 | }
14 | \description{
15 | A set of future scenarios for forecasting
16 | }
17 |
--------------------------------------------------------------------------------
/man/skill_score.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/accuracy.R
3 | \name{skill_score}
4 | \alias{skill_score}
5 | \title{Forecast skill score measure}
6 | \usage{
7 | skill_score(measure)
8 | }
9 | \arguments{
10 | \item{measure}{The accuracy measure to use in computing the skill score.}
11 | }
12 | \description{
13 | This function converts other error metrics such as \code{MSE} into a skill score.
14 | The reference or benchmark forecasting method is the Naive method for
15 | non-seasonal data, and the seasonal naive method for seasonal data.
16 | When used within \code{\link{accuracy.fbl_ts}}, it is important that the data
17 | contains both the training and test data, as the training data is used to
18 | compute the benchmark forecasts.
19 | }
20 | \examples{
21 |
22 | skill_score(MSE)
23 |
24 | \dontshow{if (requireNamespace("fable", quietly = TRUE)) (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf}
25 | library(fable)
26 | library(tsibble)
27 |
28 | lung_deaths <- as_tsibble(cbind(mdeaths, fdeaths))
29 | lung_deaths \%>\%
30 | dplyr::filter(index < yearmonth("1979 Jan")) \%>\%
31 | model(
32 | ets = ETS(value ~ error("M") + trend("A") + season("A")),
33 | lm = TSLM(value ~ trend() + season())
34 | ) \%>\%
35 | forecast(h = "1 year") \%>\%
36 | accuracy(lung_deaths, measures = list(skill = skill_score(MSE)))
37 | \dontshow{\}) # examplesIf}
38 | }
39 |
--------------------------------------------------------------------------------
/man/special_xreg.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/specials.R
3 | \name{special_xreg}
4 | \alias{special_xreg}
5 | \title{Helper special for producing a model matrix of exogenous regressors}
6 | \usage{
7 | special_xreg(...)
8 | }
9 | \arguments{
10 | \item{...}{Arguments for \code{fable_xreg_matrix} (see Details)}
11 | }
12 | \description{
13 | Helper special for producing a model matrix of exogenous regressors
14 | }
15 | \details{
16 | Currently the \code{fable_xreg_matrix} helper supports a single argument named
17 | \code{default_intercept}. If this argument is TRUE (passed via \code{...} above), then
18 | the intercept will be returned in the matrix if not specified (much like the
19 | behaviour of \code{lm()}). If FALSE, then the intercept will only be included if
20 | explicitly requested via \code{1} in the formula.
21 | }
22 |
--------------------------------------------------------------------------------
/man/stream.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/stream.R
3 | \name{stream}
4 | \alias{stream}
5 | \alias{stream.mdl_df}
6 | \title{Extend a fitted model with new data}
7 | \usage{
8 | stream(object, ...)
9 |
10 | \method{stream}{mdl_df}(object, new_data, ...)
11 | }
12 | \arguments{
13 | \item{object}{An object (such as a model) which can be extended with additional data.}
14 |
15 | \item{...}{Additional arguments passed on to stream methods.}
16 |
17 | \item{new_data}{A dataset of the same structure as was used to fit the model.}
18 | }
19 | \description{
20 | Extend the length of data used to fit a model and update the parameters to
21 | suit this new data.
22 | }
23 |
--------------------------------------------------------------------------------
/man/tidy.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/broom.R
3 | \name{tidy.mdl_df}
4 | \alias{tidy.mdl_df}
5 | \alias{coef.mdl_df}
6 | \alias{tidy.mdl_ts}
7 | \alias{coef.mdl_ts}
8 | \title{Extract model coefficients from a mable}
9 | \usage{
10 | \method{tidy}{mdl_df}(x, ...)
11 |
12 | \method{coef}{mdl_df}(object, ...)
13 |
14 | \method{tidy}{mdl_ts}(x, ...)
15 |
16 | \method{coef}{mdl_ts}(object, ...)
17 | }
18 | \arguments{
19 | \item{x, object}{A mable.}
20 |
21 | \item{...}{Arguments for model methods.}
22 | }
23 | \description{
24 | This function will obtain the coefficients (and associated statistics) for
25 | each model in the mable.
26 | }
27 | \examples{
28 | \dontshow{if (requireNamespace("fable", quietly = TRUE)) (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf}
29 | library(fable)
30 | library(tsibbledata)
31 |
32 | olympic_running \%>\%
33 | model(lm = TSLM(log(Time) ~ trend())) \%>\%
34 | tidy()
35 | \dontshow{\}) # examplesIf}
36 | }
37 |
--------------------------------------------------------------------------------
/man/top_down.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/reconciliation.R
3 | \name{top_down}
4 | \alias{top_down}
5 | \title{Top down forecast reconciliation}
6 | \usage{
7 | top_down(
8 | models,
9 | method = c("forecast_proportions", "average_proportions", "proportion_averages")
10 | )
11 | }
12 | \arguments{
13 | \item{models}{A column of models in a mable.}
14 |
15 | \item{method}{The reconciliation method to use.}
16 | }
17 | \description{
18 | \lifecycle{experimental}
19 | }
20 | \details{
21 | Reconciles a hierarchy using the top down reconciliation method. The
22 | response variable of the hierarchy must be aggregated using sums. The
23 | forecasted time points must match for all series in the hierarchy.
24 | }
25 | \seealso{
26 | \code{\link[=reconcile]{reconcile()}}, \code{\link[=aggregate_key]{aggregate_key()}}
27 | }
28 |
--------------------------------------------------------------------------------
/man/traverse.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/traverse.R
3 | \name{traverse}
4 | \alias{traverse}
5 | \title{Recursively traverse an object}
6 | \usage{
7 | traverse(
8 | x,
9 | .f = list,
10 | .g = identity,
11 | .h = identity,
12 | base = function(.x) is_syntactic_literal(.x) || is_symbol(.x)
13 | )
14 | }
15 | \arguments{
16 | \item{x}{The object to traverse}
17 |
18 | \item{.f}{A function for combining the recursed components}
19 |
20 | \item{.g}{A function applied to the object before recursion}
21 |
22 | \item{.h}{A function applied to the base case}
23 |
24 | \item{base}{The base case for the recursion}
25 | }
26 | \description{
27 | Recursively traverse an object
28 | }
29 | \keyword{internal}
30 |
--------------------------------------------------------------------------------
/man/unpack_hilo.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/hilo.R
3 | \name{unpack_hilo}
4 | \alias{unpack_hilo}
5 | \title{Unpack a hilo column}
6 | \usage{
7 | unpack_hilo(data, cols, names_sep = "_", names_repair = "check_unique")
8 | }
9 | \arguments{
10 | \item{data}{A data frame.}
11 |
12 | \item{cols}{Name of hilo columns to unpack.}
13 |
14 | \item{names_sep}{If \code{NULL}, the default, the names will be left
15 | as is. In \code{pack()}, inner names will come from the former outer names;
16 | in \code{unpack()}, the new outer names will come from the inner names.
17 |
18 | If a string, the inner and outer names will be used together. In
19 | \code{unpack()}, the names of the new outer columns will be formed by pasting
20 | together the outer and the inner column names, separated by \code{names_sep}. In
21 | \code{pack()}, the new inner names will have the outer names + \code{names_sep}
22 | automatically stripped. This makes \code{names_sep} roughly symmetric between
23 | packing and unpacking.}
24 |
25 | \item{names_repair}{Used to check that output data frame has valid
26 | names. Must be one of the following options:
27 | \itemize{
28 | \item \verb{"minimal}": no name repair or checks, beyond basic existence,
29 | \item \verb{"unique}": make sure names are unique and not empty,
30 | \item \verb{"check_unique}": (the default), no name repair, but check they are unique,
31 | \item \verb{"universal}": make the names unique and syntactic
32 | \item a function: apply custom name repair.
33 | \item \link[tidyr]{tidyr_legacy}: use the name repair from tidyr 0.8.
34 | \item a formula: a purrr-style anonymous function (see \code{\link[rlang:as_function]{rlang::as_function()}})
35 | }
36 |
37 | See \code{\link[vctrs:vec_as_names]{vctrs::vec_as_names()}} for more details on these terms and the
38 | strategies used to enforce them.}
39 | }
40 | \description{
41 | \ifelse{html}{\href{https://lifecycle.r-lib.org/articles/stages.html#superseded}{\figure{lifecycle-superseded.svg}{options: alt='[Superseded]'}}}{\strong{[Superseded]}}
42 |
43 | This function is superceded. It is recommended that you use the functionality
44 | from the \href{https://pkg.mitchelloharawild.com/distributional/}{distributional}
45 | package to extract elements from a \verb{} object. For example, you can access
46 | the lower bound with \verb{$lower}.
47 |
48 | Allows a hilo column to be unpacked into its component columns: "lower",
49 | "upper", and "level".
50 | }
51 | \seealso{
52 | \code{\link[tidyr:pack]{tidyr::unpack()}}
53 | }
54 | \keyword{internal}
55 |
--------------------------------------------------------------------------------
/man/validate_formula.Rd:
--------------------------------------------------------------------------------
1 | % Generated by roxygen2: do not edit by hand
2 | % Please edit documentation in R/parse.R
3 | \name{validate_formula}
4 | \alias{validate_formula}
5 | \title{Validate the user provided model}
6 | \usage{
7 | validate_formula(model, data = NULL)
8 | }
9 | \arguments{
10 | \item{model}{A quosure for the user's model specification}
11 |
12 | \item{data}{A dataset used for automatic response selection}
13 | }
14 | \description{
15 | Appropriately format the user's model for evaluation. Typically ran as one of the first steps
16 | in a model function.
17 | }
18 | \keyword{internal}
19 |
--------------------------------------------------------------------------------
/tests/testthat.R:
--------------------------------------------------------------------------------
1 | library(testthat)
2 | library(dplyr)
3 |
4 | test_check("fabletools")
5 |
--------------------------------------------------------------------------------
/tests/testthat/setup-data.R:
--------------------------------------------------------------------------------
1 | context("setup-data.R")
2 |
3 | us_deaths <- as_tsibble(USAccDeaths)
4 | us_deaths_tr <- dplyr::filter(us_deaths, index < tsibble::yearmonth("1978 Jan"))
5 | lung_deaths_long <- as_tsibble(cbind(mdeaths, fdeaths))
6 | lung_deaths_long_tr <- dplyr::filter(lung_deaths_long, index < tsibble::yearmonth("1979 Jan"))
7 | lung_deaths_wide <- as_tsibble(cbind(mdeaths, fdeaths), pivot_longer = FALSE)
8 | lung_deaths_wide_tr <- dplyr::filter(lung_deaths_wide, index < tsibble::yearmonth("1979 Jan"))
9 |
10 | if (requireNamespace("fable", quietly = TRUE)) {
11 | mbl <- us_deaths_tr %>%
12 | model(ets = fable::ETS(value))
13 | fbl <- mbl %>% forecast(h = 12)
14 |
15 | mbl_multi <- lung_deaths_long_tr %>%
16 | model(ets = fable::ETS(value))
17 | fbl_multi <- mbl_multi %>% forecast(h = 12)
18 |
19 | mbl_complex <- lung_deaths_long_tr %>%
20 | model(ets = fable::ETS(value), lm = fable::TSLM(value ~ trend() + season()))
21 | fbl_complex <- mbl_complex %>% forecast(h = 12)
22 |
23 | mbl_mv <- lung_deaths_wide_tr %>%
24 | model(var = fable::VAR(vars(mdeaths, fdeaths) ~ fourier(K = 4)))
25 | fbl_mv <- mbl_mv %>% forecast(h = 12)
26 | }
27 |
28 | if (requireNamespace("feasts", quietly = TRUE)) {
29 | dcmp <- us_deaths %>%
30 | model(feasts::STL(value)) %>%
31 | components()
32 |
33 | dcmp_multi <- lung_deaths_long %>%
34 | model(feasts::STL(value)) %>%
35 | components()
36 | }
37 |
--------------------------------------------------------------------------------
/tests/testthat/setup-models.R:
--------------------------------------------------------------------------------
1 | context("setup-models.R")
2 |
3 | test_specials <- new_specials(
4 | rnorm = function(m,s){
5 | stats::rnorm(NROW(self$data), m, s)
6 | },
7 | log5 = function(x){
8 | logb(x, base = 5)
9 | },
10 | oops = function(){
11 | stop("Not allowed")
12 | },
13 | xreg = function(...){
14 | deparse(match.call())
15 | }
16 | )
17 |
18 | test_train <- function(specials, ...) specials
19 |
20 | no_specials <- function(formula, ...){
21 | no_specials_model <- new_model_class(model = "test model", train = test_train, specials = NULL)
22 | new_model_definition(no_specials_model, !!enquo(formula), ...)
23 | }
24 |
25 | specials <- function(formula, ...){
26 | specials_model <- new_model_class(model = "test model", train = test_train, specials = test_specials)
27 | new_model_definition(specials_model, !!enquo(formula), ...)
28 | }
--------------------------------------------------------------------------------
/tests/testthat/test-accuracy.R:
--------------------------------------------------------------------------------
1 | context("test-accuracy")
2 |
3 | test_that("accuracy() hints", {
4 | skip_if_not_installed("fable")
5 | expect_error(
6 | accuracy(mbl, us_deaths),
7 | "To compute forecast accuracy, you'll need to compute the forecasts first.",
8 | fixed = TRUE
9 | )
10 | })
11 |
12 | test_that("In-sample accuracy", {
13 | skip_if_not_installed("fable")
14 |
15 | expect_warning(
16 | accuracy(mbl, measures = interval_accuracy_measures),
17 | 'argument ".dist" is missing'
18 | ) %>%
19 | {is.na(.[["winkler"]])} %>%
20 | expect_true()
21 |
22 | acc <- accuracy(mbl)
23 | expect_equal(acc$.type, "Training")
24 | expect_equal(dim(acc), c(1,10))
25 | expect_true(!any(map_lgl(acc, compose(any, is.na))))
26 | expect_equal(
27 | as.list(acc),
28 | as_tibble(augment(mbl)) %>%
29 | group_by(.model) %>%
30 | summarise(.type = "Training", ME = mean(.resid), RMSE = sqrt(mean(.resid^2)),
31 | MAE = mean(abs(.resid)), MPE = mean(.resid/value*100),
32 | MAPE = mean(abs(.resid/value)*100),
33 | MASE = MASE(.resid, value, .period = 12),
34 | RMSSE = RMSSE(.resid, value, .period = 12),
35 | ACF1 = ACF1(.resid)) %>%
36 | as.list()
37 | )
38 |
39 | acc_multi <- accuracy(mbl_multi)
40 | expect_equal(acc_multi$key, c("fdeaths", "mdeaths"))
41 | expect_equal(dim(acc_multi), c(2,11))
42 | expect_true(!any(map_lgl(acc_multi, compose(any, is.na))))
43 |
44 | acc_complex <- accuracy(mbl_complex)
45 | expect_equal(acc_complex$key, rep(c("fdeaths", "mdeaths"), each = 2))
46 | expect_equal(acc_complex$.model, rep(c("ets", "lm"), 2))
47 | expect_equal(dim(acc_complex), c(4,11))
48 | expect_equal(acc_complex[c(1,3), -2], acc_multi[,-2])
49 | expect_true(!any(map_lgl(acc_complex, compose(any, is.na))))
50 |
51 | acc_mv <- accuracy(mbl_mv)
52 | expect_equal(
53 | acc_mv$.response,
54 | factor(c("mdeaths", "fdeaths"), levels = unique(c("mdeaths", "fdeaths")))
55 | )
56 | expect_true(!any(map_lgl(acc_mv, compose(any, is.na))))
57 | })
58 |
59 |
60 | test_that("Out-of-sample accuracy", {
61 | skip_if_not_installed("fable")
62 |
63 | expect_warning(
64 | accuracy(fbl, utils::head(us_deaths)),
65 | "12 observations are missing between 1978 Jan and 1978 Dec"
66 | )
67 |
68 | acc <- accuracy(fbl, us_deaths)
69 | expect_equal(acc$.type, "Test")
70 | expect_equal(dim(acc), c(1,10))
71 | expect_true(!any(map_lgl(acc, compose(any, is.na))))
72 | expect_equal(
73 | as.list(acc),
74 | as_tibble(fbl) %>%
75 | mutate(
76 | actual = semi_join(us_deaths, fbl, by = "index")$value,
77 | .resid = actual - mean(value)
78 | ) %>%
79 | group_by(.model) %>%
80 | summarise(.type = "Test", ME = mean(.resid), RMSE = sqrt(mean(.resid^2)),
81 | MAE = mean(abs(.resid)), MPE = mean(.resid/actual*100),
82 | MAPE = mean(abs(.resid/actual)*100),
83 | MASE = MASE(.resid, us_deaths_tr$value, .period = 12),
84 | RMSSE = RMSSE(.resid, us_deaths_tr$value, .period = 12),
85 | ACF1 = ACF1(.resid)) %>%
86 | as.list()
87 | )
88 |
89 | acc <- accuracy(fbl, us_deaths, measures = list(interval_accuracy_measures, distribution_accuracy_measures))
90 | expect_equal(acc$.type, "Test")
91 | expect_equal(colnames(acc), c(".model", ".type", "winkler", "pinball", "scaled_pinball", "percentile", "CRPS"))
92 | expect_true(!any(map_lgl(acc, compose(any, is.na))))
93 |
94 | acc_multi <- accuracy(fbl_multi, lung_deaths_long)
95 | expect_equal(acc_multi$key, c("fdeaths", "mdeaths"))
96 | expect_equal(dim(acc_multi), c(2,11))
97 | expect_true(!any(map_lgl(acc_multi, compose(any, is.na))))
98 |
99 | acc_complex <- accuracy(fbl_complex, lung_deaths_long)
100 |
101 | expect_equal(acc_complex$key, rep(c("fdeaths", "mdeaths"), 2))
102 | expect_equal(acc_complex$.model, rep(c("ets", "lm"), each = 2))
103 | expect_equal(dim(acc_complex), c(4,11))
104 | expect_equal(acc_complex[1:2, -(1:2)], acc_multi[,-(1:2)])
105 | expect_true(!any(map_lgl(acc_complex, compose(any, is.na))))
106 |
107 | acc_mv <- accuracy(fbl_mv, lung_deaths_wide)
108 | expect_equal(
109 | acc_mv$.response,
110 | c("fdeaths", "mdeaths")
111 | )
112 | expect_true(!any(map_lgl(acc_mv, compose(any, is.na))))
113 | })
114 |
115 |
--------------------------------------------------------------------------------
/tests/testthat/test-broom.R:
--------------------------------------------------------------------------------
1 | context("test-broom")
2 |
3 | test_that("augment", {
4 | skip_if_not_installed("fable")
5 |
6 | aug <- augment(mbl)
7 | expect_equal(aug$index, us_deaths_tr$index)
8 | expect_equal(aug$.fitted, fitted(mbl)$.fitted)
9 | expect_equal(aug$.resid, residuals(mbl, type ="response")$.resid)
10 | expect_equal(aug$.innov, residuals(mbl)$.resid)
11 |
12 | aug <- augment(mbl_multi)
13 | expect_equal(aug$index, lung_deaths_long_tr$index)
14 | expect_equal(aug$.fitted, fitted(mbl_multi)$.fitted)
15 | expect_equal(aug$.resid, residuals(mbl_multi, type = "response")$.resid)
16 | expect_equal(aug$.innov, residuals(mbl_multi)$.resid)
17 |
18 | aug <- augment(mbl_complex)
19 | expect_equal(aug$index, rep(lung_deaths_long_tr$index, 2))
20 | expect_equal(aug$.fitted, fitted(mbl_complex)$.fitted)
21 | expect_equal(aug$.resid, residuals(mbl_complex, type = "response")$.resid)
22 | expect_equal(aug$.innov, residuals(mbl_complex)$.resid)
23 |
24 | aug <- augment(mbl_mv)
25 | expect_equal(aug$index, rep(lung_deaths_wide_tr$index, 2))
26 | expect_equal(aug$.fitted, c(fitted(mbl_mv)$mdeaths, fitted(mbl_mv)$fdeaths))
27 | expect_equal(aug$.resid, c(residuals(mbl_mv)$mdeaths, residuals(mbl_mv)$fdeaths))
28 | })
29 |
30 | test_that("glance", {
31 | skip_if_not_installed("fable")
32 |
33 | gl <- glance(mbl)
34 | expect_equal(NROW(gl), 1)
35 | gl_multi <- glance(mbl_multi)
36 | expect_equal(NROW(gl_multi), 2)
37 | expect_equal(gl_multi$key, c("fdeaths", "mdeaths"))
38 | gl_complex <- glance(mbl_complex)
39 | expect_equal(NROW(gl_complex), 4)
40 | expect_equal(gl_complex$key, rep(c("fdeaths", "mdeaths"), each = 2))
41 | expect_equal(gl_multi[-2], gl_complex[c(1,3), names(gl_multi)][-2])
42 |
43 | gl_mv <- glance(mbl_mv)
44 | expect_equal(NROW(gl_mv), 1)
45 | })
46 |
47 | test_that("tidy", {
48 | skip_if_not_installed("fable")
49 |
50 | td <- tidy(mbl)
51 | expect_equal(unique(td$.model), "ets")
52 | td_multi <- tidy(mbl_multi)
53 | expect_equal(unique(td_multi$.model), "ets")
54 | expect_equal(unique(td_multi$key), c("fdeaths", "mdeaths"))
55 | td_complex <- tidy(mbl_complex)
56 | expect_equal(unique(td_complex$.model), c("ets", "lm"))
57 | expect_equal(unique(td_complex$key), c("fdeaths", "mdeaths"))
58 |
59 | td_mv <- tidy(mbl_mv)
60 | expect_equal(unique(td_mv$.model), "var")
61 | expect_equal(unique(td_mv$.response), c("mdeaths", "fdeaths"))
62 | })
63 |
--------------------------------------------------------------------------------
/tests/testthat/test-combination.R:
--------------------------------------------------------------------------------
1 | context("test-combination")
2 |
3 | test_that("Combination modelling", {
4 | skip_if_not_installed("fable")
5 |
6 | mbl_cmbn <- mbl %>%
7 | transmute(combination = (ets + ets)/2)
8 |
9 | expect_equal(
10 | select(augment(mbl_cmbn), -.model, -.innov),
11 | select(augment(mbl), -.model, -.innov)
12 | )
13 |
14 | expect_equivalent(
15 | forecast(mbl_cmbn, h = 12)[,-1],
16 | fbl[,-1]
17 | )
18 |
19 | mbl_cmbn <- us_deaths_tr %>%
20 | model(
21 | a = fable::SNAIVE(value),
22 | b = combination_model(fable::SNAIVE(value), fable::SNAIVE(value))
23 | )
24 |
25 | fbl_cmbn <- forecast(mbl_cmbn)
26 |
27 | expect_equivalent(
28 | fbl_cmbn[1:24, -1],
29 | fbl_cmbn[25:48, -1]
30 | )
31 |
32 | mbl_cmbn <- us_deaths_tr %>%
33 | model(
34 | snaive = fable::SNAIVE(value),
35 | rw = fable::RW(value ~ drift()),
36 | cmbn = (fable::SNAIVE(value) + fable::RW(value ~ drift()))/2
37 | ) %>%
38 | mutate(combination_ensemble(snaive, rw, weights = "inv_var"))
39 | fbl_snaive <- forecast(select(mbl_cmbn, 1))
40 | fbl_rw <- forecast(select(mbl_cmbn, 2))
41 | fbl_cmbn <- forecast(select(mbl_cmbn, 3))
42 | fbl_wt_cmbn <- forecast(select(mbl_cmbn, 4))
43 | expect_equal(
44 | mean(fbl_cmbn$value),
45 | mean((fbl_snaive$value + fbl_rw$value)/2)
46 | )
47 | expect_failure(
48 | expect_equal(
49 | mean(fbl_wt_cmbn$value),
50 | mean(fbl_cmbn$value)
51 | )
52 | )
53 | })
54 |
--------------------------------------------------------------------------------
/tests/testthat/test-decomposition-model.R:
--------------------------------------------------------------------------------
1 | context("test-decomposition-model")
2 |
3 | test_that("Decomposition modelling", {
4 | skip_if_not_installed("fable")
5 | skip_if_not_installed("feasts")
6 |
7 | mdl_dcmp <- us_deaths %>%
8 | model(decomposition_model(feasts::STL(value), fable::NAIVE(season_adjust)))
9 |
10 | expect_output(
11 | report(mdl_dcmp),
12 | "Series: season_adjust \\nModel: NAIVE"
13 | )
14 | expect_output(
15 | report(mdl_dcmp),
16 | "Series: season_year \\nModel: SNAIVE"
17 | )
18 |
19 | fbl_dcmp <- forecast(mdl_dcmp)
20 |
21 | expect_equal(
22 | mean(fbl_dcmp$value),
23 | rep(dcmp$season_year[61:72], 2) + dcmp$season_adjust[72]
24 | )
25 | })
26 |
--------------------------------------------------------------------------------
/tests/testthat/test-fable.R:
--------------------------------------------------------------------------------
1 | context("test-fable")
2 |
3 | test_that("fable dplyr verbs", {
4 | skip_if_not_installed("fable")
5 |
6 | fbl_complex %>% filter(key == "mdeaths") %>%
7 | expect_s3_class("fbl_ts") %>%
8 | NROW %>%
9 | expect_equal(24)
10 |
11 | # tsibble now automatically selects keys
12 | # expect_error(
13 | # fbl_complex %>% select(index, .model, value, .distribution),
14 | # "not a valid tsibble"
15 | # )
16 |
17 | fbl_complex %>%
18 | filter(key == "mdeaths") %>%
19 | select(index, .model, value, .mean) %>%
20 | n_keys() %>%
21 | expect_equal(2)
22 |
23 | expect_equal(
24 | colnames(hilo(fbl_complex, level = c(50, 80, 95))),
25 | c("key", ".model", "index", "value", ".mean", "50%", "80%", "95%")
26 | )
27 |
28 | expect_equivalent(
29 | as.list(fbl_multi),
30 | as.list(bind_rows(fbl_multi[1:12,], fbl_multi[13:24,]))
31 | )
32 | })
33 |
--------------------------------------------------------------------------------
/tests/testthat/test-features.R:
--------------------------------------------------------------------------------
1 | context("test-features")
2 |
3 | www_usage <- as_tsibble(WWWusage)
4 | lung_deaths_long <- as_tsibble(cbind(mdeaths, fdeaths))
5 | lung_deaths_wide <- as_tsibble(cbind(mdeaths, fdeaths), pivot_longer = FALSE)
6 |
7 | first <- function(x) c(first = x[1])
8 | last <- function(x) c(last = x[length(x)])
9 |
10 | test_that("features()", {
11 | expect_error(
12 | features(lung_deaths_wide, vars(mdeaths, fdeaths), first),
13 | "only supports a single variable"
14 | )
15 |
16 | expect_message(
17 | features(lung_deaths_wide, features = list(first, last)),
18 | "Feature variable not specified, automatically selected \\`.var = mdeaths\\`"
19 | ) %>%
20 | colnames() %>%
21 | expect_equal(c("first", "last"))
22 |
23 | features(lung_deaths_wide, mdeaths, features = list(a = mean, b = min, max)) %>%
24 | colnames() %>%
25 | expect_equal(c("a", "b", "...3"))
26 | })
27 |
28 | test_that("Scoped variants of features()", {
29 | ft_at <- features_at(lung_deaths_wide, vars(mdeaths:fdeaths), list(first, last))
30 | expect_equal(
31 | substr(colnames(ft_at), 1, 7),
32 | c(rep("mdeaths", 2), rep("fdeaths", 2))
33 | )
34 | ft_if <- features_if(lung_deaths_wide, is.numeric, list(first, last))
35 | expect_identical(
36 | ft_at, ft_if
37 | )
38 | ft_all <- features_all(lung_deaths_wide, list(first, last))
39 | expect_identical(
40 | ft_if, ft_all
41 | )
42 | })
--------------------------------------------------------------------------------
/tests/testthat/test-generate.R:
--------------------------------------------------------------------------------
1 | context("test-generate")
2 |
3 | test_that("generate", {
4 | skip_if_not_installed("fable")
5 |
6 | gen <- mbl %>% generate()
7 | expect_equal(NROW(gen), 24)
8 | expect_equal(gen$index, yearmonth("1978 Jan") + 0:23)
9 |
10 | gen_multi <- mbl_multi %>% generate()
11 | expect_equal(NROW(gen_multi), 48)
12 | expect_equal(gen_multi$index, yearmonth("1979 Jan") + rep(0:23, 2))
13 | expect_equal(unique(gen_multi$key), c("fdeaths", "mdeaths"))
14 |
15 | gen_complex <- mbl_complex %>% generate(times = 3)
16 | expect_equal(NROW(gen_complex), 24*2*2*3)
17 | expect_equal(gen_complex$index, yearmonth("1979 Jan") + rep(0:23, 2*2*3))
18 | expect_equal(unique(gen_complex$key), c("fdeaths", "mdeaths"))
19 | expect_equal(unique(gen_complex$.model), c("ets", "lm"))
20 | })
21 |
22 | test_that("generate seed setting", {
23 | skip_if_not_installed("fable")
24 |
25 | seed <- rnorm(1)
26 | expect_equal(
27 | mbl %>% generate(seed = seed),
28 | mbl %>% generate(seed = seed)
29 | )
30 |
31 | expect_failure(
32 | expect_equal(
33 | mbl %>% generate(),
34 | mbl %>% generate()
35 | )
36 | )
37 | })
--------------------------------------------------------------------------------
/tests/testthat/test-hilo.R:
--------------------------------------------------------------------------------
1 | context("test-hilo.R")
2 |
3 | test_that("Extracting intervals from a distribution", {
4 | skip_if_not_installed("fable")
5 |
6 | fc_sum <- fbl %>%
7 | mutate(`80%` = hilo(value, 80))
8 |
9 | expect_s3_class(fc_sum$`80%`, "hilo")
10 | })
11 |
--------------------------------------------------------------------------------
/tests/testthat/test-interpolate.R:
--------------------------------------------------------------------------------
1 | context("test-interpolate.R")
2 |
3 | test_that("Test interpolation", {
4 | skip_if_not_installed("fable")
5 | NA_pos <- c(0, sample(seq_len(NROW(us_deaths)), 10), NROW(us_deaths))
6 | us_deaths$value[NA_pos] <- NA
7 | mbl_miss <- us_deaths %>%
8 | model(fable::TSLM(value ~ trend() + season()))
9 | interpolated <- interpolate(mbl_miss, us_deaths)
10 | expect_true(all(!is.na(interpolated$value)))
11 | })
12 |
--------------------------------------------------------------------------------
/tests/testthat/test-mable.R:
--------------------------------------------------------------------------------
1 | context("test-mable.R")
2 |
3 | test_that("Mable classes", {
4 | skip_if_not_installed("fable")
5 | expect_s3_class(mbl, "mdl_df")
6 | expect_s3_class(mbl[[attr(mbl,"model")[[1]]]], "lst_mdl")
7 | })
8 |
9 | test_that("Mable print output", {
10 | skip_if_not_installed("fable")
11 | expect_output(print(mbl), "A mable:")
12 | })
13 |
14 | test_that("Mable fitted values", {
15 | skip_if_not_installed("fable")
16 | fits <- fitted(mbl)
17 | expect_true(is_tsibble(fits))
18 | expect_true(all(colnames(fits) %in% c(".model", "index", ".fitted")))
19 | expect_equal(fits[["index"]], us_deaths_tr[["index"]])
20 | expect_equal(
21 | fits[[".fitted"]],
22 | fitted(mbl[[attr(mbl,"model")[[1]]]][[1]])[[".fitted"]]
23 | )
24 |
25 | fits <- fitted(mbl_multi)
26 | expect_true(is_tsibble(fits))
27 | expect_equal(key_vars(fits), c("key", ".model"))
28 | expect_true(all(colnames(fits) %in% c("key", ".model", "index", ".fitted")))
29 | expect_equal(unique(fits[["key"]]), mbl_multi[["key"]])
30 | expect_equal(fits[["index"]], lung_deaths_long_tr[["index"]])
31 | expect_equal(fits[[".fitted"]],
32 | as.numeric(c(
33 | fitted(mbl_multi[[attr(mbl,"model")[[1]]]][[1]])[[".fitted"]],
34 | fitted(mbl_multi[[attr(mbl,"model")[[1]]]][[2]])[[".fitted"]]
35 | ))
36 | )
37 | })
38 |
39 | test_that("Mable residuals", {
40 | skip_if_not_installed("fable")
41 | resids <- residuals(mbl)
42 | expect_true(is_tsibble(resids))
43 | expect_true(all(colnames(resids) %in% c(".model", "index", ".resid")))
44 | expect_equal(resids[["index"]], us_deaths_tr[["index"]])
45 | expect_equal(resids[[".resid"]], as.numeric(residuals(mbl[[attr(mbl,"model")[[1]]]][[1]])[[".resid"]]))
46 |
47 | resids <- residuals(mbl_multi)
48 | expect_true(is_tsibble(resids))
49 | expect_equal(key_vars(resids), c("key", ".model"))
50 | expect_true(all(colnames(resids) %in% c("key", ".model", "index", ".resid")))
51 | expect_equal(unique(resids[["key"]]), mbl_multi[["key"]])
52 | expect_equal(resids[["index"]], lung_deaths_long_tr[["index"]])
53 | expect_equal(resids[[".resid"]],
54 | as.numeric(c(
55 | residuals(mbl_multi[[attr(mbl,"model")[[1]]]][[1]])[[".resid"]],
56 | residuals(mbl_multi[[attr(mbl,"model")[[1]]]][[2]])[[".resid"]]
57 | ))
58 | )
59 | })
60 |
61 | test_that("mable dplyr verbs", {
62 | skip_if_not_installed("fable")
63 | library(dplyr)
64 | expect_output(mbl_complex %>% select(key, ets) %>% print, "mable: 2 x 2") %>%
65 | colnames %>%
66 | expect_identical(c("key", "ets"))
67 |
68 | expect_output(mbl_complex %>% select(key, ets) %>% print, "mable: 2 x 2") %>%
69 | colnames %>%
70 | expect_identical(c("key", "ets"))
71 |
72 | # Test for negative tidyselect with keyed data (#120)
73 | mbl_complex %>%
74 | select(-lm) %>%
75 | colnames() %>%
76 | expect_identical(c("key", "ets"))
77 |
78 | # expect_error(select(mbl_complex, -key),
79 | # "not a valid mable")
80 |
81 | expect_output(mbl_complex %>% filter(key == "mdeaths") %>% print, "mable") %>%
82 | .[["key"]] %>%
83 | expect_identical("mdeaths")
84 | })
--------------------------------------------------------------------------------
/tests/testthat/test-multivariate.R:
--------------------------------------------------------------------------------
1 | context("test-multivariate.R")
2 |
3 | test_that("multiple univariate", {
4 | skip_if_not_installed("fable")
5 |
6 | expect_equal(sort(mbl_multi$key), c("fdeaths", "mdeaths"))
7 | expect_s3_class(mbl_multi[[attr(mbl_multi,"model")[[1]]]], "lst_mdl")
8 |
9 | expect_equal(sort(unique(fbl_multi$key)), c("fdeaths", "mdeaths"))
10 | expect_s3_class(fbl_multi[[attr(fbl_multi,"dist")]], "distribution")
11 | })
12 |
--------------------------------------------------------------------------------
/tests/testthat/test-reconciliation.R:
--------------------------------------------------------------------------------
1 | context("test-reconciliation")
2 |
3 | test_that("reconciliation", {
4 | lung_deaths_agg <- lung_deaths_long %>%
5 | aggregate_key(key, value = sum(value))
6 | expect_equal(n_keys(lung_deaths_agg), 3)
7 | expect_equal(
8 | lung_deaths_agg$value[1:72],
9 | lung_deaths_long$value[1:72] + lung_deaths_long$value[72 + (1:72)]
10 | )
11 | expect_output(
12 | print(lung_deaths_agg$key),
13 | ""
14 | )
15 | expect_output(
16 | print(lung_deaths_agg),
17 | ""
18 | )
19 |
20 | skip_if_not_installed("fable")
21 |
22 | fit_agg <- lung_deaths_agg %>%
23 | model(snaive = fable::SNAIVE(value))
24 |
25 | fc_agg <- fit_agg %>% forecast()
26 | fc_agg_reconciled <- fit_agg %>% reconcile(snaive = min_trace(snaive)) %>% forecast()
27 |
28 | expect_equal(
29 | mean(fc_agg$value),
30 | mean(fc_agg_reconciled$value)
31 | )
32 | expect_failure(
33 | expect_equal(
34 | fc_agg$value,
35 | fc_agg_reconciled$value
36 | )
37 | )
38 |
39 | fit_agg <- lung_deaths_agg %>%
40 | model(ses = fable::ETS(value ~ error("A") + trend("A") + season("A")))
41 | fc_agg <- fit_agg %>% forecast()
42 | fc_agg_reconciled <- fit_agg %>% reconcile(ses = min_trace(ses)) %>% forecast()
43 | expect_equal(
44 | mean(fc_agg_reconciled$value[48 + (1:24)]),
45 | mean(fc_agg_reconciled$value[(1:24)] + fc_agg_reconciled$value[24 + (1:24)]),
46 | )
47 | expect_failure(
48 | expect_equal(
49 | fc_agg$value,
50 | fc_agg_reconciled$value
51 | )
52 | )
53 |
54 | fc_agg_reconciled <- fit_agg %>% reconcile(ses = min_trace(ses, method = "wls_var")) %>% forecast()
55 | expect_equal(
56 | mean(fc_agg_reconciled$value[48 + (1:24)]),
57 | mean(fc_agg_reconciled$value[(1:24)] + fc_agg_reconciled$value[24 + (1:24)])
58 | )
59 | expect_failure(
60 | expect_equal(
61 | fc_agg$value,
62 | fc_agg_reconciled$value
63 | )
64 | )
65 |
66 | fc_agg_reconciled <- fit_agg %>% reconcile(ses = min_trace(ses, method = "ols")) %>% forecast()
67 | expect_equal(
68 | mean(fc_agg_reconciled$value[48 + (1:24)]),
69 | mean(fc_agg_reconciled$value[(1:24)] + fc_agg_reconciled$value[24 + (1:24)])
70 | )
71 | expect_failure(
72 | expect_equal(
73 | fc_agg$value,
74 | fc_agg_reconciled$value
75 | )
76 | )
77 |
78 | fc_agg_reconciled <- fit_agg %>% reconcile(ses = min_trace(ses, method = "mint_cov")) %>% forecast()
79 | expect_equal(
80 | mean(fc_agg_reconciled$value[48 + (1:24)]),
81 | mean(fc_agg_reconciled$value[(1:24)] + fc_agg_reconciled$value[24 + (1:24)])
82 | )
83 | expect_failure(
84 | expect_equal(
85 | fc_agg$value,
86 | fc_agg_reconciled$value
87 | )
88 | )
89 | })
90 |
--------------------------------------------------------------------------------
/tests/testthat/test-spelling.R:
--------------------------------------------------------------------------------
1 | context("test-spelling.R")
2 |
3 | test_that("package spell check", {
4 | skip_on_cran()
5 |
6 | # Determine package source path by finding README.md
7 | package_dir <- dirname(list.files("../../", pattern = "README.md", recursive = TRUE, full.names = TRUE))
8 | skip_if(length(package_dir) != 1)
9 | badspell <- spelling::spell_check_package(
10 | package_dir
11 | )
12 |
13 | expect_equal(NROW(badspell), 0, info = capture.output(print(badspell)))
14 | })
15 |
--------------------------------------------------------------------------------
/tests/testthat/test-transformations.R:
--------------------------------------------------------------------------------
1 | context("test-transformations")
2 |
3 | simple_data <- tsibble(idx = 1:10, y = abs(rnorm(10)), x = 1:10, index = idx)
4 | test_transformation <- function(..., dt = simple_data){
5 | mdl <- estimate(dt, no_specials(...))
6 | trans <- mdl$transformation[[1]]
7 | resp <- mdl$response[[1]]
8 | expect_equal(
9 | dt[[expr_name(resp)]],
10 | fabletools:::invert_transformation(trans)(trans(dt[[expr_name(resp)]]))
11 | )
12 | }
13 |
14 | test_that("single transformations", {
15 | test_transformation(y)
16 | test_transformation(y + 10)
17 | test_transformation(10 + y)
18 | test_transformation(+y)
19 | test_transformation(y - 10)
20 | test_transformation(10 - y)
21 | test_transformation(-y)
22 | test_transformation(3*y)
23 | test_transformation(y*3)
24 | test_transformation(3/y)
25 | test_transformation(y/3)
26 | test_transformation(log(y))
27 | test_transformation(logb(y, 10))
28 | test_transformation(log10(y))
29 | test_transformation(log2(y))
30 | test_transformation(log1p(y))
31 | test_transformation(expm1(y))
32 | test_transformation(exp(y))
33 | test_transformation(box_cox(y, 0.4))
34 | test_transformation(inv_box_cox(y, 0.4))
35 | test_transformation(sqrt(y))
36 | test_transformation(y^2)
37 | test_transformation(2^y)
38 | test_transformation((y))
39 | })
40 |
41 |
42 | test_that("transformation chains", {
43 | test_transformation(y + 10 - 10)
44 | test_transformation(10 + y * 10)
45 | test_transformation(+y - y)
46 | test_transformation(y^2 + 3)
47 | test_transformation(log(sqrt(y)))
48 | test_transformation(log(y + 1))
49 | test_transformation(box_cox(y^2,0.3))
50 | test_transformation(box_cox(y,0.3) + 1)
51 |
52 | # Something too complex
53 | expect_error(
54 | test_transformation(box_cox(y,0.3)^2),
55 | "Could not identify a valid back-transformation"
56 | )
57 |
58 | # Something rediculous
59 | test_transformation(log(sqrt(sqrt(sqrt(sqrt(sqrt(y)))+3))))
60 | })
--------------------------------------------------------------------------------
/tests/testthat/test-validate_model.R:
--------------------------------------------------------------------------------
1 | context("test-validate_model.R")
2 |
3 | test_that("validate_model", {
4 | # Test expression capturing
5 | expect_identical(validate_formula(no_specials(user_model)), as.name("user_model"))
6 |
7 | # Test formula evaluating
8 | user_model <- y~x
9 | expect_identical(validate_formula(no_specials(user_model)), user_model)
10 |
11 | # Test bare formula
12 | expect_identical(validate_formula(no_specials(y~x)), y~x)
13 |
14 | tsbl1 <- tsibble::tsibble(
15 | date = seq(as.Date("2017-01-01"), as.Date("2017-01-10"), by = 1),
16 | value = rnorm(10), index = date
17 | )
18 |
19 | # Test automatic response selection
20 | expect_message(res <- validate_formula(no_specials(), data = tsbl1),
21 | "Model not specified, defaulting to automatic modelling of the `value` variable.")
22 | expect_identical(res, as.name("value"))
23 |
24 | # Test LHS automatic response selection
25 | expect_message(res <- validate_formula(no_specials(~x), data = tsbl1), "Model not specified, defaulting to automatic modelling of the `value` variable.")
26 | expect_identical(res, value ~ x)
27 |
28 | # Test failed response selection
29 | tsbl1[["value2"]] <- rnorm(10)
30 | expect_error(validate_formula(no_specials(), data = tsbl1), "Could not automatically determine the response variable")
31 |
32 | # Test failed LHS response variable selection
33 | expect_error(validate_formula(no_specials(~ x), data = tsbl1), "Could not automatically determine the response variable")
34 |
35 | # Test not supported automatic variable selection
36 | expect_error(validate_formula(no_specials(), data = NULL), "This model function does not support automatic selection")
37 | })
--------------------------------------------------------------------------------
/vignettes/.gitignore:
--------------------------------------------------------------------------------
1 | *.html
2 | *.R
3 |
--------------------------------------------------------------------------------