├── DESCRIPTION ├── LICENSE ├── MD5 ├── NAMESPACE ├── R ├── add.ar.R ├── add.dynamic.regression.R ├── add.generalized.local.linear.trend.R ├── add.local.level.R ├── add.local.linear.trend.R ├── add.monthly.annual.cycle.R ├── add.random.walk.holiday.R ├── add.regression.holiday.R ├── add.seasonal.R ├── add.semilocal.linear.trend.R ├── add.static.intercept.R ├── add.student.local.linear.trend.R ├── add.trig.R ├── bsts.R ├── compare.bsts.models.R ├── date.functions.R ├── diagnostics.R ├── dirm.R ├── format.learning.data.R ├── format.timestamps.R ├── holiday.R ├── mbsts.R ├── mbsts.plots.R ├── mixed.frequency.R ├── plot_seasonal_effect.R ├── plots.R ├── predict.bsts.R ├── predict.mbsts.R ├── summary.bsts.R └── utils.R ├── data ├── gdp.RData ├── goog.RData ├── iclaims.RData ├── new.home.sales.RData ├── rsxfs.RData ├── shark.RData └── turkish.RData ├── inst └── tests │ ├── tests │ ├── testthat.R │ └── testthat │ │ ├── test-ar.R │ │ ├── test-autoar.R │ │ ├── test-date-range.R │ │ ├── test-dirm.R │ │ ├── test-dynamic-regression.R │ │ ├── test-goog.R │ │ ├── test-holidays.R │ │ ├── test-multivariate.R │ │ ├── test-plot-components.R │ │ ├── test-poisson.R │ │ ├── test-prediction-errors.R │ │ ├── test-prediction.R │ │ ├── test-regressionholiday.R │ │ ├── test-seasonal.R │ │ ├── test-sigma-upper-limit.R │ │ ├── test-student.R │ │ └── test-trig.R │ ├── testthat.R │ └── testthat │ ├── test-ar.R │ ├── test-autoar.R │ ├── test-date-range.R │ ├── test-dirm.R │ ├── test-dynamic-regression.R │ ├── test-goog.R │ ├── test-holidays.R │ ├── test-multivariate.R │ ├── test-plot-components.R │ ├── test-poisson.R │ ├── test-prediction-errors.R │ ├── test-prediction.R │ ├── test-regressionholiday.R │ ├── test-seasonal.R │ ├── test-sigma-upper-limit.R │ ├── test-student.R │ └── test-trig.R ├── man ├── HarveyCumulator.Rd ├── MATCH.NumericTimestamps.Rd ├── StateSpecification.Rd ├── SuggestBurn.Rd ├── add.ar.Rd ├── add.dynamic.regression.Rd ├── add.local.level.Rd ├── add.local.linear.trend.Rd ├── add.monthly.annual.cycle.Rd ├── add.random.walk.holiday.Rd ├── add.seasonal.Rd ├── add.semilocal.linear.trend.Rd ├── add.shared.local.level.Rd ├── add.static.intercept.Rd ├── add.student.local.linear.trend.Rd ├── add.trig.Rd ├── aggregate.time.series.Rd ├── aggregate.weeks.to.months.Rd ├── auto.ar.Rd ├── bsts-package.Rd ├── bsts.Rd ├── bsts.options.Rd ├── compare.bsts.models.Rd ├── date.range.Rd ├── descriptive-plots.Rd ├── diagnostic-plots.Rd ├── dirm-model-options.Rd ├── dirm.Rd ├── estimate.time.scale.Rd ├── extend.time.Rd ├── format.timestamps.Rd ├── gdp.Rd ├── geometric.sequence.Rd ├── get.fraction.Rd ├── goog.Rd ├── holiday.Rd ├── iclaims.Rd ├── last.day.in.month.Rd ├── match.week.to.month.Rd ├── max.window.width.Rd ├── mbsts.Rd ├── mixed.frequency.Rd ├── month.distance.Rd ├── named.holidays.Rd ├── new.home.sales.Rd ├── one.step.prediction.errors.Rd ├── plot.bsts.Rd ├── plot.bsts.mixed.Rd ├── plot.bsts.prediction.Rd ├── plot.bsts.predictors.Rd ├── plot.holiday.Rd ├── plot.mbsts.Rd ├── plot.mbsts.prediction.Rd ├── predict.bsts.Rd ├── predict.mbsts.Rd ├── quarter.Rd ├── regression.holiday.Rd ├── regularize.timestamps.Rd ├── residuals.bsts.Rd ├── rsxfs.Rd ├── shark.Rd ├── shorten.Rd ├── simulate.fake.mixed.frequency.data.Rd ├── spike.slab.ar.prior.Rd ├── state.sizes.Rd ├── summary.bsts.Rd ├── to.posixt.Rd ├── turkish.Rd ├── week.ends.Rd ├── weekday.names.Rd └── wide.to.long.Rd └── src ├── Makevars ├── aggregate_time_series.cc ├── bsts.cc ├── bsts_init.cc ├── create_dynamic_intercept_state_model.cpp ├── create_dynamic_intercept_state_model.h ├── create_shared_state_model.cpp ├── create_shared_state_model.h ├── create_state_model.cpp ├── create_state_model.h ├── dirm.cc ├── dynamic_intercept_model_manager.cc ├── dynamic_intercept_model_manager.h ├── get_date_ranges.cc ├── mbsts.cc ├── mixed_frequency.cc ├── model_manager.cc ├── model_manager.h ├── multivariate_gaussian_model_manager.cc ├── multivariate_gaussian_model_manager.h ├── state_space_gaussian_model_manager.cc ├── state_space_gaussian_model_manager.h ├── state_space_logit_model_manager.cc ├── state_space_logit_model_manager.h ├── state_space_poisson_model_manager.cc ├── state_space_poisson_model_manager.h ├── state_space_regression_model_manager.cc ├── state_space_regression_model_manager.h ├── state_space_student_model_manager.cc ├── state_space_student_model_manager.h ├── timestamp_info.cc ├── timestamp_info.h ├── utils.cc └── utils.h /DESCRIPTION: -------------------------------------------------------------------------------- 1 | Package: bsts 2 | Version: 0.9.10 3 | Date: 2024-01-16 4 | Title: Bayesian Structural Time Series 5 | Author: Steven L. Scott 6 | Maintainer: Steven L. Scott 7 | Description: Time series regression using dynamic linear models fit using 8 | MCMC. See Scott and Varian (2014) , among many 9 | other sources. 10 | Depends: BoomSpikeSlab (>= 1.2.6), zoo (>= 1.8), xts, Boom (>= 0.9.13), 11 | R(>= 3.4.0) 12 | Suggests: testthat 13 | LinkingTo: Boom (>= 0.9.13) 14 | License: LGPL-2.1 | MIT + file LICENSE 15 | Encoding: UTF-8 16 | NeedsCompilation: yes 17 | Packaged: 2024-01-16 23:31:15 UTC; steve 18 | Repository: CRAN 19 | Date/Publication: 2024-01-17 13:02:07 UTC 20 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | YEAR: 2022 2 | COPYRIGHT HOLDER: Steven L. Scott 3 | -------------------------------------------------------------------------------- /NAMESPACE: -------------------------------------------------------------------------------- 1 | import(Boom, BoomSpikeSlab, xts, zoo) 2 | 3 | importFrom("grDevices", "dev.off", "gray", "pdf", "rainbow") 4 | 5 | importFrom("graphics", "abline", "axis", "axis.Date", "axis.POSIXct", "box", 6 | "boxplot", "hist", "legend", "lines", "par", "plot", "points", 7 | "polygon", "text", "title", "close.screen", "screen", "split.screen", 8 | "strheight", "strwidth") 9 | 10 | importFrom("stats", ".checkMFClasses", ".getXlevels", "acf", "delete.response", 11 | "is.ts", "lsfit", "median", "model.frame", "model.frame.default", 12 | "model.matrix", "model.response", "na.exclude", "na.omit", "na.pass", 13 | "plogis", "plot.ts", "ppoints", "quantile", "qnorm", "rnorm", "sd", "terms", 14 | "var", "window") 15 | 16 | importFrom("utils", "head", "help", "tail") 17 | 18 | export(AcfDist, 19 | AddAr, 20 | AddAutoAr, 21 | AddDynamicRegression, 22 | AddGeneralizedLocalLinearTrend, 23 | AddHierarchicalRegressionHoliday, 24 | AddLocalLevel, 25 | AddLocalLinearTrend, 26 | AddMonthlyAnnualCycle, 27 | AddRandomWalkHoliday, 28 | AddRegressionHoliday, 29 | AddSeasonal, 30 | AddSemilocalLinearTrend, 31 | AddSharedLocalLevel, 32 | AddStaticIntercept, 33 | AddStudentLocalLinearTrend, 34 | AddTrig, 35 | AggregateTimeSeries, 36 | AggregateWeeksToMonths, 37 | bsts, 38 | bsts.mixed, 39 | bsts.prediction.errors, 40 | BstsOptions, 41 | CompareBstsModels, 42 | DateRange, 43 | DateRangeHoliday, 44 | DateToPOSIX, 45 | DayPlot, 46 | dirm, 47 | DirmModelOptions, 48 | DynamicRegressionArOptions, 49 | DynamicRegressionHierarchicalRandomWalkOptions, 50 | DynamicRegressionRandomWalkOptions, 51 | EstimateTimeScale, 52 | ExtendTime, 53 | FixedDateHoliday, 54 | GeometricSequence, 55 | GetFractionOfDaysInInitialMonth, 56 | GetFractionOfDaysInInitialQuarter, 57 | HarveyCumulator, 58 | HasDuplicateTimestamps, 59 | IsRegular, 60 | LastDayInMonth, 61 | LastWeekdayInMonthHoliday, 62 | LongToWide, 63 | MATCH.NumericTimestamps, 64 | MatchWeekToMonth, 65 | MaxWindowWidth, 66 | MaxWindowWidth.default, 67 | MaxWindowWidth.DateRangeHoliday, 68 | mbsts, 69 | MonthDistance, 70 | MonthPlot, 71 | NamedHoliday, 72 | named.holidays, 73 | NoDuplicates, 74 | NoGaps, 75 | NthWeekdayInMonthHoliday, 76 | plot.bsts, 77 | PlotBstsCoefficients, 78 | PlotBstsComponents, 79 | PlotBstsForecastDistribution, 80 | plot.bsts.mixed, 81 | PlotBstsMixedComponents, 82 | PlotBstsMixedState, 83 | plot.bsts.prediction, 84 | PlotBstsPredictionErrors, 85 | PlotBstsPredictors, 86 | PlotBstsResiduals, 87 | PlotBstsSize, 88 | PlotBstsState, 89 | PlotDynamicRegression, 90 | PlotHoliday, 91 | plot.mbsts, 92 | plot.mbsts.prediction, 93 | predict.mbsts, 94 | PlotMbstsSeriesMeans, 95 | PlotSeasonalEffect, 96 | predict.bsts, 97 | Quarter, 98 | qqdist, 99 | RegularizeTimestamps, 100 | RegularizeTimestamps.Date, 101 | RegularizeTimestamps.POSIXt, 102 | RegularizeTimestamps.default, 103 | RegularizeTimestamps.numeric, 104 | residuals.bsts, 105 | Shorten, 106 | SimulateFakeMixedFrequencyData, 107 | SpikeSlabArPrior, 108 | StateSizes, 109 | SuggestBurn, 110 | summary.bsts, 111 | weekday.names, 112 | WeekEndsMonth, 113 | WeekEndsQuarter, 114 | WideToLong, 115 | YearMonToPOSIX, 116 | YearPlot) 117 | 118 | S3method(plot, bsts) 119 | S3method(summary, bsts) 120 | S3method(predict, bsts) 121 | S3method(residuals, bsts) 122 | 123 | S3method(plot, bsts.prediction) 124 | 125 | S3method(plot, bsts.mixed) 126 | 127 | S3method(plot, mbsts) 128 | S3method(predict, mbsts) 129 | S3method(plot, mbsts.prediction) 130 | 131 | # Plot methods for specific state components 132 | S3method(plot, StateModel) 133 | S3method(plot, HierarchicalRegressionHolidayStateModel) 134 | S3method(plot, RandomWalkHolidayStateModel) 135 | S3method(plot, RegressionHolidayStateModel) 136 | S3method(plot, Seasonal) 137 | 138 | S3method(RegularizeTimestamps, default) 139 | S3method(RegularizeTimestamps, numeric) 140 | S3method(RegularizeTimestamps, Date) 141 | S3method(RegularizeTimestamps, POSIXt) 142 | 143 | S3method(MATCH, NumericTimestamps) 144 | 145 | S3method(MaxWindowWidth, default) 146 | S3method(MaxWindowWidth, DateRangeHoliday) 147 | 148 | useDynLib(bsts, .registration = TRUE) 149 | -------------------------------------------------------------------------------- /R/add.generalized.local.linear.trend.R: -------------------------------------------------------------------------------- 1 | # Copyright 2011 Google Inc. All Rights Reserved. 2 | # Author: stevescott@google.com (Steve Scott) 3 | 4 | AddGeneralizedLocalLinearTrend <- function (state.specification = list(), 5 | y = NULL, 6 | level.sigma.prior = NULL, 7 | slope.mean.prior = NULL, 8 | slope.ar1.prior = NULL, 9 | slope.sigma.prior = NULL, 10 | initial.level.prior = NULL, 11 | initial.slope.prior = NULL, 12 | sdy = NULL, 13 | initial.y = NULL) { 14 | warning("AddGeneralizedLocalLinearTrend is deprecated (because it was a ", 15 | "terrible name!). Please use AddSemilocalLinearTrend instead.") 16 | return(AddSemilocalLinearTrend( 17 | state.specification, 18 | y, 19 | level.sigma.prior, 20 | slope.mean.prior, 21 | slope.ar1.prior, 22 | slope.sigma.prior, 23 | initial.level.prior, 24 | initial.slope.prior, 25 | sdy, 26 | initial.y)) 27 | } 28 | -------------------------------------------------------------------------------- /R/add.monthly.annual.cycle.R: -------------------------------------------------------------------------------- 1 | # Copyright 2018 Google LLC. All Rights Reserved. 2 | # 3 | # This library is free software; you can redistribute it and/or 4 | # modify it under the terms of the GNU Lesser General Public 5 | # License as published by the Free Software Foundation; either 6 | # version 2.1 of the License, or (at your option) any later version. 7 | # 8 | # This library is distributed in the hope that it will be useful, 9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 11 | # Lesser General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU Lesser General Public 14 | # License along with this library; if not, write to the Free Software 15 | # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA 16 | 17 | AddMonthlyAnnualCycle <- function(state.specification, 18 | y, 19 | date.of.first.observation = NULL, 20 | sigma.prior = NULL, 21 | initial.state.prior = NULL, 22 | sdy) { 23 | ## Adds a monthly 12-cycle that updates at the first of each month. Intended 24 | ## for daily data. 25 | ## 26 | ## Args: 27 | ## state.specification: A list of state components. If omitted, 28 | ## an empty list is assumed. 29 | ## y: A numeric vector. The time series to be modeled. 30 | ## date.of.first.observation: An object of type 'Date' or 'POSIXt' giving 31 | ## the date of the first observation in 'y'. If 'y' is of type 'zoo' and 32 | ## the index of 'y' is Date or POSIXt then a NULL value here signals that 33 | ## the date of the first observation should be inferred. 34 | ## sigma.prior: An object created by SdPrior. This is the prior 35 | ## distribution describing the standard deviation of the seasonal 36 | ## increments. 37 | ## initial.state.prior: An object created by NormalPrior. The prior 38 | ## distribution on the values of the initial state (i.e. the state of the 39 | ## first observation). 40 | ## sdy: The standard deviation of y. This will be ignored if y is provided, 41 | ## or if both sigma.prior and initial.state.prior are supplied directly. 42 | ## 43 | ## Returns: 44 | ## state.specification, after appending an element for the monthly 45 | ## component. 46 | if (missing(state.specification)) state.specification <- list() 47 | stopifnot(is.list(state.specification)) 48 | if (!missing(y)) { 49 | stopifnot(is.numeric(y)) 50 | sdy <- sd(as.numeric(y), na.rm = TRUE) 51 | } 52 | 53 | if (is.null(sigma.prior)) { 54 | ## The default prior says that sigma is small, and can be no larger than the 55 | ## sample standard deviation of the time series being modeled. 56 | sigma.prior <- SdPrior(.01 * sdy, upper.limit = sdy) 57 | } 58 | stopifnot(inherits(sigma.prior, "SdPrior")) 59 | 60 | if (is.null(initial.state.prior)) { 61 | initial.state.prior <- NormalPrior(0, sdy) 62 | } 63 | stopifnot(inherits(initial.state.prior, "NormalPrior")) 64 | 65 | if (is.null(date.of.first.observation)) { 66 | if (is.zoo(y)) { 67 | dates <- index(y) 68 | date.of.first.observation <- as.Date(dates[1]) 69 | } 70 | } 71 | stopifnot(inherits(date.of.first.observation, c("Date", "POSIXt")), 72 | length(date.of.first.observation) == 1) 73 | date.of.first.observation <- as.POSIXlt(date.of.first.observation) 74 | 75 | monthly <- list( 76 | name = "Monthly", 77 | sigma.prior = sigma.prior, 78 | initial.state.prior = initial.state.prior, 79 | first.observation.month = as.integer( 80 | date.of.first.observation$mon + 1), 81 | first.observation.day = as.integer( 82 | date.of.first.observation$mday), 83 | first.observation.year = as.integer( 84 | 1900 + date.of.first.observation$year), 85 | size = 11) 86 | class(monthly) <- c("Monthly", "StateModel") 87 | state.specification[[length(state.specification) + 1]] <- monthly 88 | return(state.specification) 89 | } 90 | -------------------------------------------------------------------------------- /R/add.static.intercept.R: -------------------------------------------------------------------------------- 1 | # Copyright 2018 Google LLC. All Rights Reserved. 2 | # 3 | # This library is free software; you can redistribute it and/or 4 | # modify it under the terms of the GNU Lesser General Public 5 | # License as published by the Free Software Foundation; either 6 | # version 2.1 of the License, or (at your option) any later version. 7 | # 8 | # This library is distributed in the hope that it will be useful, 9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 11 | # Lesser General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU Lesser General Public 14 | # License along with this library; if not, write to the Free Software 15 | # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA 16 | 17 | AddStaticIntercept <- function( 18 | state.specification, 19 | y, 20 | initial.state.prior = NormalPrior(y[1], sd(y, na.rm = TRUE))) { 21 | ## Adds a static intercept term to a state space model. If the model includes 22 | ## a traditional trend component (e.g. local level, local linear trend, etc) 23 | ## then a separate intercept is not needed (and will probably cause trouble, 24 | ## as it will be confounded with the initial state of the trend model). 25 | ## However, if there is no trend, or the trend is an AR process centered 26 | ## around zero, then adding a static intercept will shift the center to a 27 | ## data-determined value. 28 | ## 29 | ## Args: 30 | ## state.specification: A list of state components. If omitted, an empty 31 | ## list is assumed. 32 | ## y: A numeric vector. The time series to be modeled. 33 | ## initial.state.prior: An object of class NormalPrior. The prior 34 | ## distribution on the values of the initial state (i.e. the state of the 35 | ## first observation). 36 | ## Returns: 37 | ## state.specification, after appending the information necessary 38 | ## to define a static intercept term. 39 | if (missing(state.specification)) state.specification <- list() 40 | stopifnot(is.list(state.specification)) 41 | stopifnot(inherits(initial.state.prior, "NormalPrior")) 42 | component <- list(name = "Intercept", 43 | initial.state.prior = initial.state.prior, 44 | size = 1) 45 | class(component) <- c("StaticIntercept", "StateModel") 46 | state.specification[[length(state.specification) + 1]] <- component 47 | return(state.specification) 48 | } 49 | -------------------------------------------------------------------------------- /R/add.trig.R: -------------------------------------------------------------------------------- 1 | # Copyright 2018 Google LLC. All Rights Reserved. 2 | # 3 | # This library is free software; you can redistribute it and/or 4 | # modify it under the terms of the GNU Lesser General Public 5 | # License as published by the Free Software Foundation; either 6 | # version 2.1 of the License, or (at your option) any later version. 7 | # 8 | # This library is distributed in the hope that it will be useful, 9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 11 | # Lesser General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU Lesser General Public 14 | # License along with this library; if not, write to the Free Software 15 | # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA 16 | 17 | AddTrig <- function(state.specification = NULL, 18 | y, 19 | period, 20 | frequencies, 21 | sigma.prior = NULL, 22 | initial.state.prior = NULL, 23 | sdy = sd(y, na.rm = TRUE), 24 | method = c("harmonic", "direct")) { 25 | ## A trigonometric state model. 26 | ## 27 | ## Args: 28 | ## state.specification: A list of state components. If omitted, an empty 29 | ## list is assumed. 30 | ## y: A numeric vector. The time series to be modeled. This can be omitted 31 | ## if sdy is provided. 32 | ## period: A positive scalar giving the number of time steps required for 33 | ## the longest cycle to repeat. 34 | ## frequencies: A vector of positive real numbers giving the number of times 35 | ## each cyclic component repeats in a period. One sine and one cosine 36 | ## term will be added for each frequency. 37 | ## sigma.prior: The prior distribution for the standard deviations of the 38 | ## changes in the sinusoid coefficients at each new time point. This can 39 | ## be NULL (in which case a default prior will be used), or a single 40 | ## object of class SdPrior (which will be repeated for each sinusoid 41 | ## independently). 42 | ## initial.state.prior: The prior distribution for the values of 43 | ## the sinusoid coefficients at time 0. This can either be NULL 44 | ## (in which case a default prior will be used), an object of 45 | ## class MvnPrior. If the prior is specified directly its 46 | ## dimension must be twice the number of frequencies. 47 | ## sdy: The standard deviation of the time series to be modeled. This 48 | ## argument is ignored if y is provided. 49 | if (is.null(state.specification)) state.specification <- list() 50 | stopifnot(is.list(state.specification)) 51 | 52 | if (!missing(y)) { 53 | stopifnot(is.numeric(y)) 54 | sdy <- sd(as.numeric(y), na.rm = TRUE) 55 | } else if (missing(sdy)) { 56 | stop("At least one of y or sdy must be supplied to AddTrig.") 57 | } 58 | 59 | stopifnot(is.numeric(period), 60 | length(period) == 1, 61 | period > 0) 62 | stopifnot(is.numeric(frequencies), 63 | length(frequencies) > 0, 64 | all(frequencies > 0)) 65 | method <- match.arg(method) 66 | 67 | ## Check the prior on the sinusoid coefficient increments. 68 | if (is.null(sigma.prior)) { 69 | sigma.prior <- SdPrior(0.01 * sdy, upper.limit = sdy) 70 | } 71 | stopifnot(inherits(sigma.prior, "SdPrior")) 72 | 73 | ## Check the prior on the initial state of the sinusoid coefficients. 74 | dimension <- 2 * length(frequencies) 75 | if (is.null(initial.state.prior)) { 76 | initial.state.prior <- MvnPrior( 77 | mean = rep(0, dimension), 78 | variance = diag(rep(sdy, dimension)^2)) 79 | } 80 | stopifnot(inherits(initial.state.prior, "MvnDiagonalPrior") || 81 | inherits(initial.state.prior, "MvnPrior")) 82 | stopifnot(length(initial.state.prior$mean) == dimension) 83 | 84 | ## All data has been checked and gathered at this point. Return the 85 | ## object. 86 | trig <- list(name = paste0("trig.", period), 87 | frequencies = frequencies, 88 | period = period, 89 | sigma.prior = sigma.prior, 90 | initial.state.prior = initial.state.prior, 91 | size = dimension, 92 | method = method) 93 | class(trig) <- c("Trig", "StateModel") 94 | state.specification[[length(state.specification) + 1]] <- trig 95 | return(state.specification) 96 | } 97 | -------------------------------------------------------------------------------- /R/summary.bsts.R: -------------------------------------------------------------------------------- 1 | # Copyright 2018 Google LLC. All Rights Reserved. 2 | # 3 | # This library is free software; you can redistribute it and/or 4 | # modify it under the terms of the GNU Lesser General Public 5 | # License as published by the Free Software Foundation; either 6 | # version 2.1 of the License, or (at your option) any later version. 7 | # 8 | # This library is distributed in the hope that it will be useful, 9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 11 | # Lesser General Public License for more details. 12 | # 13 | # You should have received a copy of the GNU Lesser General Public 14 | # License along with this library; if not, write to the Free Software 15 | # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA 16 | 17 | summary.bsts <- function(object, burn = SuggestBurn(.1, object), ...) { 18 | ## Prints a summary of the supplied bsts object. 19 | ## Args: 20 | ## object: An object of class 'bsts' 21 | ## burn: The number of MCMC iterations to discard as burn-in. 22 | ## ...: Additional arguments passed to summary.lm.spike, if 23 | ## 'object' has a regression component. 24 | ## Returns: 25 | ## A list of summaries describing the bsts object. 26 | ## residual.sd: The posterior mean of the residual standard 27 | ## deviation parameter. 28 | ## prediction.sd: The standard deviation of the one-step-ahead 29 | ## prediction errors. These differ from the residuals because 30 | ## they only condition on the data preceding the prediction. 31 | ## The residuals condition on all data in both directions. 32 | ## rquare: The R-square from the model, computing by comparing 33 | ## 'residual.sd' to the sample variance of the original series. 34 | ## relative.gof: Harvey's goodness of fit statistic: 35 | ## 1 - SSE(prediction errors) / SST(first difference of original series). 36 | ## This is loosly analogous to the R^2 in a regression model. 37 | ## It differs in that the baseline model is a random walk with 38 | ## drift (instead of the sample mean). Models that fit worse, 39 | ## on average, than the baseline model can have a negative 40 | ## relative.gof score. 41 | ## size: If the original object had a regression component, then 42 | ## 'size' summarizes the distribution of the number of nonzero 43 | ## coefficients. 44 | ## coefficients: If the original object had a regression 45 | ## component, then 'coef' contains a summary of the regression 46 | ## coefficients computed using summary.lm.spike. 47 | stopifnot(inherits(object, "bsts")) 48 | sigma.obs <- object$sigma.obs 49 | if (!is.null(sigma.obs)) { 50 | if (burn > 0) { 51 | sigma.obs <- sigma.obs[-(1:burn)] 52 | } 53 | residual.sd <- mean(sigma.obs) 54 | original.variance <- var(object$original.series, na.rm = TRUE) 55 | stopifnot(original.variance > 0) 56 | rsquare <- 1 - residual.sd^2 / original.variance 57 | } 58 | 59 | prediction.errors <- bsts.prediction.errors(object, burn = burn)$in.sample 60 | prediction.sse <- sum(colMeans(prediction.errors)^2) 61 | original.series <- as.numeric(object$original.series) 62 | dy <- diff(original.series) 63 | prediction.sst <- var(dy) * (length(dy) - 1) 64 | 65 | ans <- list(residual.sd = residual.sd, 66 | prediction.sd = sd(colMeans(prediction.errors)), 67 | rsquare = rsquare, 68 | relative.gof = 1 - prediction.sse / prediction.sst) 69 | 70 | ##---------------------------------------------------------------------- 71 | ## summarize the regression coefficients 72 | if (object$has.regression) { 73 | beta <- object$coefficients 74 | if (burn > 0) { 75 | beta <- beta[-(1:burn), , drop = FALSE] 76 | } 77 | include <- beta != 0 78 | model.size <- rowSums(include) 79 | ans$size <- summary(model.size) 80 | ans$coefficients <- SummarizeSpikeSlabCoefficients(object$coefficients, 81 | burn = burn, ...) 82 | } 83 | return(ans) 84 | } 85 | -------------------------------------------------------------------------------- /data/gdp.RData: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cran/bsts/f4573bfa3c64bd54b8a1b29f6e328e19677194f6/data/gdp.RData -------------------------------------------------------------------------------- /data/goog.RData: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cran/bsts/f4573bfa3c64bd54b8a1b29f6e328e19677194f6/data/goog.RData -------------------------------------------------------------------------------- /data/iclaims.RData: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cran/bsts/f4573bfa3c64bd54b8a1b29f6e328e19677194f6/data/iclaims.RData -------------------------------------------------------------------------------- /data/new.home.sales.RData: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cran/bsts/f4573bfa3c64bd54b8a1b29f6e328e19677194f6/data/new.home.sales.RData -------------------------------------------------------------------------------- /data/rsxfs.RData: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cran/bsts/f4573bfa3c64bd54b8a1b29f6e328e19677194f6/data/rsxfs.RData -------------------------------------------------------------------------------- /data/shark.RData: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cran/bsts/f4573bfa3c64bd54b8a1b29f6e328e19677194f6/data/shark.RData -------------------------------------------------------------------------------- /data/turkish.RData: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cran/bsts/f4573bfa3c64bd54b8a1b29f6e328e19677194f6/data/turkish.RData -------------------------------------------------------------------------------- /inst/tests/tests/testthat.R: -------------------------------------------------------------------------------- 1 | library(testthat) 2 | library(bsts) 3 | library(Boom) 4 | 5 | test_check("bsts") 6 | -------------------------------------------------------------------------------- /inst/tests/tests/testthat/test-ar.R: -------------------------------------------------------------------------------- 1 | library(bsts) 2 | library(testthat) 3 | 4 | test_that("AddAr produces nonzero coefficients", { 5 | sample.size <- 100 6 | residual.sd <- .001 7 | # Actual values of the AR coefficients 8 | true.phi <- c(-.7, .3, .15) 9 | ar <- arima.sim(model = list(ar = true.phi), n = sample.size, sd = 3) 10 | ## Layer some noise on top of the AR process. 11 | y <- ar + rnorm(sample.size, 0, residual.sd) 12 | ss <- AddAr(list(), lags = 3, sigma.prior = SdPrior(3.0, 1.0)) 13 | # Fit the model with knowledge with residual.sd essentially fixed at the true 14 | # value. 15 | model <- bsts(y, 16 | state.specification = ss, 17 | niter = 10, 18 | prior = SdPrior(residual.sd, 100000)) 19 | 20 | expect_true(any(model$AR3.coefficients != 0)) 21 | }) 22 | -------------------------------------------------------------------------------- /inst/tests/tests/testthat/test-autoar.R: -------------------------------------------------------------------------------- 1 | library(bsts) 2 | library(testthat) 3 | 4 | test_that("AutoAr does not crash on minimal data.", { 5 | y <- rnorm(4) 6 | ss <- AddAutoAr(list(), y=y, lags = 4) 7 | 8 | for (i in 1:20) { 9 | model <- bsts(y, ss, niter = 100, ping = -1) 10 | } 11 | expect_true(inherits(model, "bsts")) 12 | }) 13 | -------------------------------------------------------------------------------- /inst/tests/tests/testthat/test-date-range.R: -------------------------------------------------------------------------------- 1 | holiday <- NamedHoliday("MemorialDay", days.before = 2, days.after = 2) 2 | timestamps <- seq.Date(from = as.Date("2001-01-01"), by = "day", 3 | length.out = 365 * 10) 4 | 5 | influence <- DateRange(holiday, timestamps) 6 | 7 | test_that("DateRange returns a two-column data frame of dates", { 8 | expect_that(influence, is_a("data.frame")) 9 | expect_that(ncol(influence), equals(2)) 10 | expect_that(influence[, 1], is_a("Date")) 11 | expect_that(influence[, 2], is_a("Date")) 12 | expect_true(all(influence[, 1] <= influence[, 2])) 13 | }) 14 | -------------------------------------------------------------------------------- /inst/tests/tests/testthat/test-dirm.R: -------------------------------------------------------------------------------- 1 | set.seed(8675309) 2 | 3 | library(bsts) 4 | library(testthat) 5 | SimulateDirmData <- function(observation.sd = 1, trend.sd = .1, 6 | time.dimension = 100, nobs.per.period = 3, 7 | xdim = 4) { 8 | trend <- cumsum(rnorm(time.dimension, 0, trend.sd)) 9 | total.sample.size <- nobs.per.period * time.dimension 10 | predictors <- matrix(rnorm(total.sample.size * xdim), 11 | nrow = total.sample.size) 12 | coefficients <- rnorm(xdim) 13 | expanded.trend <- rep(trend, each = nobs.per.period) 14 | response <- expanded.trend + predictors %*% coefficients + rnorm( 15 | total.sample.size, 0, observation.sd) 16 | timestamps <- seq.Date(from = as.Date("2008-01-01"), len = time.dimension, by = "day") 17 | extended.timestamps <- rep(timestamps, each = nobs.per.period) 18 | return(list(response = response, 19 | predictors = predictors, 20 | timestamps = extended.timestamps, 21 | trend = trend, 22 | coefficients = coefficients)) 23 | } 24 | 25 | 26 | data <- SimulateDirmData() 27 | ss <- AddLocalLevel(list(), data$response, 28 | sigma.prior = SdPrior(sigma.guess = 0.1, sample.size = 1)) 29 | 30 | model <- dirm(data$response ~ data$predictors, ss, niter = 50, 31 | timestamps = data$timestamps, seed = 8675309, expected.model.size = 20) 32 | model2 <- dirm(response ~ predictors, ss, niter = 50, data = data, 33 | timestamps = data$timestamps, seed = 8675309, expected.model.size = 20) 34 | 35 | test_that("Models are identical", { 36 | expect_that(model, is_a("DynamicIntercept")) 37 | expect_that(model$coefficients, is_a("matrix")) 38 | expect_true(all(abs(model$coefficients - model2$coefficients) < 1e-8)) 39 | expect_true(all(abs(model$sigma.obs - model2$sigma.obs) < 1e-8)) 40 | }) 41 | -------------------------------------------------------------------------------- /inst/tests/tests/testthat/test-goog.R: -------------------------------------------------------------------------------- 1 | set.seed(8675309) 2 | library(bsts) 3 | data(goog) 4 | 5 | ## This works 6 | ## ss0 <- AddSemilocalLinearTrend(list(), as.numeric(goog)) 7 | ## model0 <- bsts(as.numeric(goog), ss0, niter = 50) 8 | 9 | 10 | # This example has lots of missing data but no timestamps 11 | ## pattern <- c(T, T, T, T, T, F, F) 12 | ## pattern <- rep(pattern, len = (7/5) * length(goog)) 13 | ## if (sum(pattern) > length(goog)) { 14 | ## pattern <- head(pattern, -(sum(pattern) - length(goog))) 15 | ## } 16 | ## extended.goog <- rep(NA, length(pattern)) 17 | ## extended.goog[pattern] <- goog 18 | ## ss1 <- AddSemilocalLinearTrend(list(), extended.goog) 19 | ## model1 <- bsts(extended.goog, ss1, niter = 50) 20 | 21 | 22 | ## This does not. 23 | goog <- zoo(as.numeric(goog), index(goog)) 24 | ss <- AddSemilocalLinearTrend(list(), goog) 25 | model <- bsts(goog, ss, niter = 50) 26 | 27 | 28 | -------------------------------------------------------------------------------- /inst/tests/tests/testthat/test-holidays.R: -------------------------------------------------------------------------------- 1 | library(testthat) 2 | library(bsts) 3 | seed <- 8675309 4 | set.seed(seed) 5 | 6 | trend <- cumsum(rnorm(1095, 0, .1)) 7 | dates <- seq.Date(from = as.Date("2014-01-01"), length = length(trend), by = "day") 8 | y <- zoo(trend + rnorm(length(trend), 0, .2), dates) 9 | 10 | AddHolidayEffect <- function(y, dates, effect) { 11 | ## Adds a holiday effect to simulated data. 12 | ## Args: 13 | ## y: A zoo time series, with Dates for indices. 14 | ## dates: The dates of the holidays. 15 | ## effect: A vector of holiday effects of odd length. The central effect is 16 | ## the main holiday, with a symmetric influence window on either side. 17 | ## Returns: 18 | ## y, with the holiday effects added. 19 | time <- dates - (length(effect) - 1) / 2 20 | for (i in 1:length(effect)) { 21 | y[time] <- y[time] + effect[i] 22 | time <- time + 1 23 | } 24 | return(y) 25 | } 26 | 27 | ## Define some holidays. 28 | memorial.day <- NamedHoliday("MemorialDay") 29 | memorial.day.effect <- c(-.75, -2, -2) 30 | memorial.day.dates <- as.Date(c("2014-05-26", "2015-05-25", "2016-05-30")) 31 | y <- AddHolidayEffect(y, memorial.day.dates, memorial.day.effect) 32 | 33 | ## The holidays can be in any order. 34 | holiday.list <- list(memorial.day) 35 | 36 | ## Let's train the model to just before MemorialDay 37 | cut.date = as.Date("2016-05-25") 38 | train.data <- y[time(y) < cut.date] 39 | test.data <- y[time(y) >= cut.date] 40 | ss <- AddLocalLevel(list(), train.data) 41 | ss <- AddRegressionHoliday(ss, train.data, holiday.list = holiday.list) 42 | model <- bsts(train.data, state.specification = ss, niter = 100, ping = 0, 43 | seed = seed) 44 | 45 | ## Now make a prediction covering MemorialDay 46 | cat("Starting the prediction.\n") 47 | my.horizon <- 15 48 | pred <- predict(object = model, horizon = my.horizon, seed = seed) 49 | plot(pred, plot.original = 365) 50 | points(index(test.data), test.data) 51 | 52 | test_that("Holiday covers true values", { 53 | expect_true(CheckMcmcMatrix(pred$distribution, test.data[1:15])) 54 | }) 55 | -------------------------------------------------------------------------------- /inst/tests/tests/testthat/test-multivariate.R: -------------------------------------------------------------------------------- 1 | library(bsts) 2 | library(testthat) 3 | 4 | # GDP figures for 57 countries as reported by the OECD. 5 | ## data(gdp) 6 | ## series.id <- gdp$Country 7 | ## timestamps <- gdp$Time 8 | 9 | test_that("Predictions and state are sane when only factors are present", { 10 | nobs <- 200 11 | ndim <- 3 12 | nfactors <- 2 13 | seed <- 8675309 14 | set.seed(seed) 15 | 16 | residual.sd <- sqrt(abs(rnorm(ndim))) / 10 17 | 18 | factors <- matrix(rnorm(nobs * nfactors, sd=1), ncol = nfactors) 19 | factors <- apply(factors, 2, cumsum) 20 | coefficients <- matrix(rnorm(ndim * nfactors), nrow=nfactors) 21 | coefficients <- coefficients/coefficients[, 1] 22 | 23 | state <- factors %*% coefficients 24 | errors <- matrix(rnorm(nobs * ndim), ncol=ndim) %*% diag(residual.sd) 25 | y <- state + errors 26 | 27 | ss <- AddSharedLocalLevel(list(), y, nfactors=nfactors) 28 | x <- matrix(rep(1, nobs), ncol=1) 29 | 30 | model <- mbsts(y, ss, niter=500, data.format="wide", seed=seed) 31 | pred <- predict(model, 24, seed = seed) 32 | ## Each time series should be within the prediction interval of the next 33 | ## point. 34 | for (s in 1:ndim) { 35 | last.y = tail(y[, s], 1) 36 | interval <- pred$interval[s, , 1] 37 | expect_gt(last.y, interval[1]) 38 | expect_lt(last.y, interval[2]) 39 | } 40 | 41 | state.means <- apply(model$shared.state.contributions, c(1, 3, 4), sum) 42 | intervals <- apply(state.means[-(1:100), , ], c(2,3), quantile, c(.025, .975)) 43 | state.posterior.means <- apply(state.means[-(1:100), , ], c(2,3), mean) 44 | 45 | mean.residual.sd <- colMeans(model$residual.sd[-(1:100), ]) 46 | for (s in 1:ndim) { 47 | expect_gt(cor(y[, s], colMeans(state.means[, s, ])), .99) 48 | } 49 | }) 50 | -------------------------------------------------------------------------------- /inst/tests/tests/testthat/test-plot-components.R: -------------------------------------------------------------------------------- 1 | data(iclaims) 2 | ss <- AddLocalLinearTrend(list(), initial.claims$iclaimsNSA) 3 | ss <- AddSeasonal(ss, initial.claims$iclaimsNSA, nseasons = 52) 4 | model <- bsts(iclaimsNSA ~ ., state.specification = ss, 5 | data = initial.claims, niter = 100) 6 | 7 | test_that("PlotBstsComponents handles errors correctly", { 8 | expect_error(plot(model, "comp", burn = 10, components = 99), ".* is not TRUE") 9 | expect_error(plot(model, "comp", burn = 10, components = 1:2), NA) 10 | expect_error(plot(model, "comp", burn = 10, components = 2:1), NA) 11 | expect_error(plot(model, "comp", burn = 10, components = c(-1, 2)), ".* is not TRUE") 12 | expect_error(plot(model, "comp", burn = 10, components = numeric(0)), ".* is not TRUE") 13 | }) 14 | 15 | -------------------------------------------------------------------------------- /inst/tests/tests/testthat/test-poisson.R: -------------------------------------------------------------------------------- 1 | library(bsts) 2 | library(testthat) 3 | 4 | data(shark) 5 | logshark <- log1p(shark$Attacks) 6 | seed <- 8675309 7 | 8 | test_that("Poisson bsts", { 9 | 10 | ss.level <- AddLocalLevel(list(), y = logshark) 11 | model.level <- bsts(shark$Attacks, ss.level, niter = 500, 12 | ping = 250, family = "poisson", seed = seed) 13 | expect_that(model.level, is_a("bsts")) 14 | expect_true(all(abs(model.level$state.contributions) < 10)) 15 | 16 | ss.level <- AddLocalLevel(list(), y = logshark) 17 | model.level <- bsts(cbind(shark$Attacks, shark$Population / 1000), 18 | state.specification = ss.level, niter = 500, family = "poisson", 19 | ping = 250, seed = seed) 20 | expect_true(all(abs(model.level$state.contributions) < 15)) 21 | 22 | ss <- AddLocalLinearTrend(list(), y = logshark) 23 | model <- bsts(shark$Attacks, ss, niter = 500, family = "poisson", ping = 250, 24 | seed = seed) 25 | expect_that(model, is_a("bsts")) 26 | 27 | expect_true(all(abs(model$state.contributions) < 10)) 28 | 29 | ss <- AddLocalLinearTrend(list(), logshark, 30 | initial.level.prior = NormalPrior(0, .1), 31 | initial.slope.prior = NormalPrior(.16, .1)) 32 | model <- bsts(shark$Attacks, ss, niter = 500, ping = 250, 33 | family = "poisson", seed = seed) 34 | expect_that(model, is_a("bsts")) 35 | expect_true(all(abs(model$state.contributions) < 10)) 36 | 37 | ss.semi <- AddSemilocalLinearTrend(list(), y = logshark) 38 | model.semi <- bsts(shark$Attacks, ss.semi, niter = 500, 39 | ping = 250, family = "poisson", seed = seed) 40 | expect_that(model.semi, is_a("bsts")) 41 | expect_true(all(abs(model.semi$state.contributions) < 10)) 42 | 43 | ss.student <- AddStudentLocalLinearTrend(list(), y = logshark) 44 | model.student <- bsts(shark$Attacks, ss.student, niter = 500, 45 | ping = 250, family = "poisson", seed = seed) 46 | expect_that(model.student, is_a("bsts")) 47 | expect_true(all(abs(model.student$state.contributions) < 10)) 48 | 49 | ## Add an unrelated predictor. 50 | shark$x <- rnorm(nrow(shark)) 51 | shark.training <- shark[1:48,] 52 | shark.test <- shark[49:54, ] 53 | ss.reg <- AddLocalLinearTrend(list(), y = logshark) 54 | ss.reg <- AddDynamicRegression(ss.reg, log1p(Attacks) ~ x, data = shark) 55 | model <- bsts(cbind(shark.training$Attacks, shark.training$Population / 1000), 56 | ss.reg, niter = 500, family = "poisson", seed = seed) 57 | pred <- predict(model, newdata = shark.test, trials.or.exposure = max(model$exposure)) 58 | expect_that(pred, is_a("bsts.prediction")) 59 | expect_equal(ncol(pred$distribution), 6) 60 | }) 61 | -------------------------------------------------------------------------------- /inst/tests/tests/testthat/test-prediction-errors.R: -------------------------------------------------------------------------------- 1 | library(bsts) 2 | library(testthat) 3 | 4 | test_that("Scaled prediction errors are reasonable.", { 5 | data(AirPassengers) 6 | y <- log(AirPassengers) 7 | ss <- AddLocalLinearTrend(list(), y) 8 | ss <- AddSeasonal(ss, y, nseasons = 12) 9 | model <- bsts(y, state.specification = ss, niter = 500) 10 | errors <- bsts.prediction.errors(model, burn = 100) 11 | se <- bsts.prediction.errors(model, burn = 100, standardize = TRUE) 12 | 13 | ## The scaled and unscaled errors should be the same size. 14 | expect_equal(dim(se[[1]]), dim(errors[[1]])) 15 | 16 | ## The errors should be highly but not perfectly correlated. 17 | expect_gt(cor(se[[1]][30, ], errors[[1]][30, ]), .8) 18 | expect_lte(cor(se[[1]][30, ], errors[[1]][30, ]), 1.0) 19 | }) 20 | 21 | test_that("Prediction errors work for student family", { 22 | data(AirPassengers) 23 | y <- log(AirPassengers) 24 | ss <- AddLocalLinearTrend(list(), y) 25 | ss <- AddSeasonal(ss, y, nseasons = 12) 26 | model <- bsts(y, state.specification = ss, niter = 500, family="student") 27 | errors <- bsts.prediction.errors(model, cutpoints = c(80, 120)) 28 | }) 29 | -------------------------------------------------------------------------------- /inst/tests/tests/testthat/test-seasonal.R: -------------------------------------------------------------------------------- 1 | notcran <- Sys.getenv("NOT_CRAN") != "" 2 | if (notcran) { 3 | seed <- 8675309 4 | 5 | SeasonalTransitionMatrix <- function(nseasons) { 6 | ## Returns the transition matrix for a seasonal state model. 7 | ## 8 | ## Args: 9 | ## nseasons: The number of seasons per cycle. 10 | ## 11 | ## Returns: 12 | ## A matrix with nseasons - 1 rows and columns. 13 | id.matrix <- diag(rep(1, nseasons - 2)) 14 | return(rbind(rep(-1, nseasons - 1), 15 | cbind(id.matrix, rep(0, nrow(id.matrix))))) 16 | } 17 | 18 | SimulateSeasonalPattern <- function(sample.size, initial.pattern, 19 | season.duration, innovation.sd) { 20 | ## Args: 21 | ## sample.size: The number of time points to simulate. 22 | ## initial.pattern: The pattern from a single cycle, which need not sum to 23 | ## zero. 24 | ## season.duration: The number of time points that each seasons will last. 25 | ## innovation.sd: The standard deviation of the innovation error term in the 26 | ## seasonal state model. 27 | ## 28 | ## Returns: 29 | ## A vector of length 'sample.size' containing the contribution of this 30 | ## seasonal component to the mean of the series. 31 | 32 | ## Compute the initial state by removing the final element from the initial 33 | ## pattern. 34 | state <- head(initial.pattern, -1) 35 | nseasons <- length(initial.pattern) 36 | transition.matrix <- SeasonalTransitionMatrix(nseasons) 37 | pattern <- numeric(sample.size) 38 | for (i in 1:sample.size) { 39 | pattern[i] <- state[1] 40 | state <- transition.matrix %*% state 41 | state[1] <- state[1] + rnorm(1, 0, innovation.sd) 42 | } 43 | if (season.duration > 1) { 44 | pattern <- rep(pattern, each = season.duration)[1:sample.size] 45 | } 46 | return(pattern) 47 | } 48 | 49 | set.seed(seed) 50 | 51 | daily.pattern <- rnorm(7) 52 | 53 | ## There are roughly 52 weeks per year, but we can pretend there are 54 | ## fewer for testing purposes. 55 | weeks.per.year <- 5 56 | n.years <- 10 57 | 58 | ## A smooth annual pattern is more easily aliased with the trend. 59 | weekly.annual.pattern <- rnorm(weeks.per.year, 60 | cos(2 * pi * (1:weeks.per.year) / weeks.per.year), .1) 61 | 62 | sample.size <- round(7 * weeks.per.year * n.years) 63 | 64 | trend <- cumsum(rnorm(sample.size, 0, .3)) 65 | seasonal.daily <- SimulateSeasonalPattern(sample.size, daily.pattern, 66 | season.duration = 1, innovation.sd = .15) 67 | seasonal.annual <- SimulateSeasonalPattern(sample.size, weekly.annual.pattern, 68 | season.duration = 7, innovation.sd = .5) 69 | 70 | series <- rnorm(sample.size, trend + seasonal.daily + seasonal.annual, 1.0) 71 | 72 | ss <- AddLocalLevel(list(), series, sigma.prior = SdPrior(.3, 10)) 73 | ss <- AddSeasonal(ss, series, nseasons = 7, sigma.prior = SdPrior(.15, 10)) 74 | ss <- AddSeasonal(ss, series, nseasons = weeks.per.year, season.duration = 7, 75 | sigma.prior = SdPrior(.5, 10)) 76 | 77 | model <- bsts(series, ss, niter = 500, seed = seed, prior = SdPrior(1.0, 10)) 78 | 79 | ## Check that the recovered state values match the truth. 80 | test_that("seasonal model covers true state", { 81 | expect_that(model, is_a("bsts")) 82 | ## The trend here is more jagged than the model expects, so the trend test 83 | ## fails. That's fine as long as the other two tests pass. 84 | ## 85 | ## expect_true(CheckMcmcMatrix(model$state.contributions[, 1, ], 86 | ## truth = trend, confidence = .5), 87 | ## info = "trend failed") 88 | expect_true(CheckMcmcMatrix(model$state.contributions[, 2, ], 89 | truth = seasonal.daily, confidence = .8), 90 | info = "seasonal.daily failed") 91 | expect_true(CheckMcmcMatrix(model$state.contributions[, 3, ], 92 | truth = seasonal.annual, confidence = .8), 93 | info = "seasonal.annual failed") 94 | }) 95 | } 96 | -------------------------------------------------------------------------------- /inst/tests/tests/testthat/test-sigma-upper-limit.R: -------------------------------------------------------------------------------- 1 | library(bsts) 2 | library(testthat) 3 | 4 | test_that("sigma.upper.limit can be specified at the main bsts call.", { 5 | y <- rnorm(4) 6 | x <- rnorm(4) 7 | ss <- AddLocalLevel(list(), y) 8 | 9 | warning("enable the test for 'sigma.upper.limit'") 10 | 11 | ## for (i in 1:20) { 12 | ## model <- bsts(y, ss, niter = 100, ping = -1, sigma.upper.limit = 10) 13 | ## } 14 | ## expect_true(inherits(model, "bsts")) 15 | 16 | ## for(i in 1:20) { 17 | ## model <- bsts(y ~ x, ss, niter = 10, ping = -1, sigma.upper.limit = 10) 18 | ## } 19 | }) 20 | -------------------------------------------------------------------------------- /inst/tests/tests/testthat/test-trig.R: -------------------------------------------------------------------------------- 1 | library(testthat) 2 | library(bsts) 3 | seed <- 8675309 4 | set.seed(seed) 5 | 6 | test_that("Multiple frequencies can be present.", { 7 | data(AirPassengers) 8 | y <- log(AirPassengers) 9 | ss <- AddLocalLinearTrend(list(), y) 10 | ss <- AddTrig(ss, y, period = 20, frequencies = 1) 11 | ss <- AddTrig(ss, y, period = 41, frequencies = 1) 12 | model <- bsts(y, state.specification = ss, niter = 500) 13 | expect_equal(500, length(model$trig.coefficient.sd.20)) 14 | expect_equal(500, length(model$trig.coefficient.sd.41)) 15 | expect_equal(dimnames(model$state.contributions)$component, 16 | c("trend", "trig.20", "trig.41")) 17 | }) 18 | -------------------------------------------------------------------------------- /inst/tests/testthat.R: -------------------------------------------------------------------------------- 1 | library(testthat) 2 | library(bsts) 3 | library(Boom) 4 | 5 | test_check("bsts") 6 | -------------------------------------------------------------------------------- /inst/tests/testthat/test-ar.R: -------------------------------------------------------------------------------- 1 | library(bsts) 2 | library(testthat) 3 | 4 | test_that("AddAr produces nonzero coefficients", { 5 | sample.size <- 100 6 | residual.sd <- .001 7 | # Actual values of the AR coefficients 8 | true.phi <- c(-.7, .3, .15) 9 | ar <- arima.sim(model = list(ar = true.phi), n = sample.size, sd = 3) 10 | ## Layer some noise on top of the AR process. 11 | y <- ar + rnorm(sample.size, 0, residual.sd) 12 | ss <- AddAr(list(), lags = 3, sigma.prior = SdPrior(3.0, 1.0)) 13 | # Fit the model with knowledge with residual.sd essentially fixed at the true 14 | # value. 15 | model <- bsts(y, 16 | state.specification = ss, 17 | niter = 10, 18 | prior = SdPrior(residual.sd, 100000)) 19 | 20 | expect_true(any(model$AR3.coefficients != 0)) 21 | }) 22 | -------------------------------------------------------------------------------- /inst/tests/testthat/test-autoar.R: -------------------------------------------------------------------------------- 1 | library(bsts) 2 | library(testthat) 3 | 4 | test_that("AutoAr does not crash on minimal data.", { 5 | y <- rnorm(4) 6 | ss <- AddAutoAr(list(), y=y, lags = 4) 7 | 8 | for (i in 1:20) { 9 | model <- bsts(y, ss, niter = 100, ping = -1) 10 | } 11 | expect_true(inherits(model, "bsts")) 12 | }) 13 | -------------------------------------------------------------------------------- /inst/tests/testthat/test-date-range.R: -------------------------------------------------------------------------------- 1 | holiday <- NamedHoliday("MemorialDay", days.before = 2, days.after = 2) 2 | timestamps <- seq.Date(from = as.Date("2001-01-01"), by = "day", 3 | length.out = 365 * 10) 4 | 5 | influence <- DateRange(holiday, timestamps) 6 | 7 | test_that("DateRange returns a two-column data frame of dates", { 8 | expect_that(influence, is_a("data.frame")) 9 | expect_that(ncol(influence), equals(2)) 10 | expect_that(influence[, 1], is_a("Date")) 11 | expect_that(influence[, 2], is_a("Date")) 12 | expect_true(all(influence[, 1] <= influence[, 2])) 13 | }) 14 | -------------------------------------------------------------------------------- /inst/tests/testthat/test-dirm.R: -------------------------------------------------------------------------------- 1 | set.seed(8675309) 2 | 3 | library(bsts) 4 | library(testthat) 5 | SimulateDirmData <- function(observation.sd = 1, trend.sd = .1, 6 | time.dimension = 100, nobs.per.period = 3, 7 | xdim = 4) { 8 | trend <- cumsum(rnorm(time.dimension, 0, trend.sd)) 9 | total.sample.size <- nobs.per.period * time.dimension 10 | predictors <- matrix(rnorm(total.sample.size * xdim), 11 | nrow = total.sample.size) 12 | coefficients <- rnorm(xdim) 13 | expanded.trend <- rep(trend, each = nobs.per.period) 14 | response <- expanded.trend + predictors %*% coefficients + rnorm( 15 | total.sample.size, 0, observation.sd) 16 | timestamps <- seq.Date(from = as.Date("2008-01-01"), len = time.dimension, by = "day") 17 | extended.timestamps <- rep(timestamps, each = nobs.per.period) 18 | return(list(response = response, 19 | predictors = predictors, 20 | timestamps = extended.timestamps, 21 | trend = trend, 22 | coefficients = coefficients)) 23 | } 24 | 25 | 26 | data <- SimulateDirmData() 27 | ss <- AddLocalLevel(list(), data$response, 28 | sigma.prior = SdPrior(sigma.guess = 0.1, sample.size = 1)) 29 | 30 | model <- dirm(data$response ~ data$predictors, ss, niter = 50, 31 | timestamps = data$timestamps, seed = 8675309, expected.model.size = 20) 32 | model2 <- dirm(response ~ predictors, ss, niter = 50, data = data, 33 | timestamps = data$timestamps, seed = 8675309, expected.model.size = 20) 34 | 35 | test_that("Models are identical", { 36 | expect_that(model, is_a("DynamicIntercept")) 37 | expect_that(model$coefficients, is_a("matrix")) 38 | expect_true(all(abs(model$coefficients - model2$coefficients) < 1e-8)) 39 | expect_true(all(abs(model$sigma.obs - model2$sigma.obs) < 1e-8)) 40 | }) 41 | -------------------------------------------------------------------------------- /inst/tests/testthat/test-goog.R: -------------------------------------------------------------------------------- 1 | set.seed(8675309) 2 | library(bsts) 3 | data(goog) 4 | 5 | ## This works 6 | ## ss0 <- AddSemilocalLinearTrend(list(), as.numeric(goog)) 7 | ## model0 <- bsts(as.numeric(goog), ss0, niter = 50) 8 | 9 | 10 | # This example has lots of missing data but no timestamps 11 | ## pattern <- c(T, T, T, T, T, F, F) 12 | ## pattern <- rep(pattern, len = (7/5) * length(goog)) 13 | ## if (sum(pattern) > length(goog)) { 14 | ## pattern <- head(pattern, -(sum(pattern) - length(goog))) 15 | ## } 16 | ## extended.goog <- rep(NA, length(pattern)) 17 | ## extended.goog[pattern] <- goog 18 | ## ss1 <- AddSemilocalLinearTrend(list(), extended.goog) 19 | ## model1 <- bsts(extended.goog, ss1, niter = 50) 20 | 21 | 22 | ## This does not. 23 | goog <- zoo(as.numeric(goog), index(goog)) 24 | ss <- AddSemilocalLinearTrend(list(), goog) 25 | model <- bsts(goog, ss, niter = 50) 26 | 27 | 28 | -------------------------------------------------------------------------------- /inst/tests/testthat/test-holidays.R: -------------------------------------------------------------------------------- 1 | library(testthat) 2 | library(bsts) 3 | seed <- 8675309 4 | set.seed(seed) 5 | 6 | trend <- cumsum(rnorm(1095, 0, .1)) 7 | dates <- seq.Date(from = as.Date("2014-01-01"), length = length(trend), by = "day") 8 | y <- zoo(trend + rnorm(length(trend), 0, .2), dates) 9 | 10 | AddHolidayEffect <- function(y, dates, effect) { 11 | ## Adds a holiday effect to simulated data. 12 | ## Args: 13 | ## y: A zoo time series, with Dates for indices. 14 | ## dates: The dates of the holidays. 15 | ## effect: A vector of holiday effects of odd length. The central effect is 16 | ## the main holiday, with a symmetric influence window on either side. 17 | ## Returns: 18 | ## y, with the holiday effects added. 19 | time <- dates - (length(effect) - 1) / 2 20 | for (i in 1:length(effect)) { 21 | y[time] <- y[time] + effect[i] 22 | time <- time + 1 23 | } 24 | return(y) 25 | } 26 | 27 | ## Define some holidays. 28 | memorial.day <- NamedHoliday("MemorialDay") 29 | memorial.day.effect <- c(-.75, -2, -2) 30 | memorial.day.dates <- as.Date(c("2014-05-26", "2015-05-25", "2016-05-30")) 31 | y <- AddHolidayEffect(y, memorial.day.dates, memorial.day.effect) 32 | 33 | ## The holidays can be in any order. 34 | holiday.list <- list(memorial.day) 35 | 36 | ## Let's train the model to just before MemorialDay 37 | cut.date = as.Date("2016-05-25") 38 | train.data <- y[time(y) < cut.date] 39 | test.data <- y[time(y) >= cut.date] 40 | ss <- AddLocalLevel(list(), train.data) 41 | ss <- AddRegressionHoliday(ss, train.data, holiday.list = holiday.list) 42 | model <- bsts(train.data, state.specification = ss, niter = 100, ping = 0, 43 | seed = seed) 44 | 45 | ## Now make a prediction covering MemorialDay 46 | cat("Starting the prediction.\n") 47 | my.horizon <- 15 48 | pred <- predict(object = model, horizon = my.horizon, seed = seed) 49 | plot(pred, plot.original = 365) 50 | points(index(test.data), test.data) 51 | 52 | test_that("Holiday covers true values", { 53 | expect_true(CheckMcmcMatrix(pred$distribution, test.data[1:15])) 54 | }) 55 | -------------------------------------------------------------------------------- /inst/tests/testthat/test-multivariate.R: -------------------------------------------------------------------------------- 1 | library(bsts) 2 | library(testthat) 3 | 4 | # GDP figures for 57 countries as reported by the OECD. 5 | ## data(gdp) 6 | ## series.id <- gdp$Country 7 | ## timestamps <- gdp$Time 8 | 9 | test_that("Predictions and state are sane when only factors are present", { 10 | nobs <- 200 11 | ndim <- 3 12 | nfactors <- 2 13 | seed <- 8675309 14 | set.seed(seed) 15 | 16 | residual.sd <- sqrt(abs(rnorm(ndim))) / 10 17 | 18 | factors <- matrix(rnorm(nobs * nfactors, sd=1), ncol = nfactors) 19 | factors <- apply(factors, 2, cumsum) 20 | coefficients <- matrix(rnorm(ndim * nfactors), nrow=nfactors) 21 | coefficients <- coefficients/coefficients[, 1] 22 | 23 | state <- factors %*% coefficients 24 | errors <- matrix(rnorm(nobs * ndim), ncol=ndim) %*% diag(residual.sd) 25 | y <- state + errors 26 | 27 | ss <- AddSharedLocalLevel(list(), y, nfactors=nfactors) 28 | x <- matrix(rep(1, nobs), ncol=1) 29 | 30 | model <- mbsts(y, ss, niter=500, data.format="wide", seed=seed) 31 | pred <- predict(model, 24, seed = seed) 32 | ## Each time series should be within the prediction interval of the next 33 | ## point. 34 | for (s in 1:ndim) { 35 | last.y = tail(y[, s], 1) 36 | interval <- pred$interval[s, , 1] 37 | expect_gt(last.y, interval[1]) 38 | expect_lt(last.y, interval[2]) 39 | } 40 | 41 | state.means <- apply(model$shared.state.contributions, c(1, 3, 4), sum) 42 | intervals <- apply(state.means[-(1:100), , ], c(2,3), quantile, c(.025, .975)) 43 | state.posterior.means <- apply(state.means[-(1:100), , ], c(2,3), mean) 44 | 45 | mean.residual.sd <- colMeans(model$residual.sd[-(1:100), ]) 46 | for (s in 1:ndim) { 47 | expect_gt(cor(y[, s], colMeans(state.means[, s, ])), .99) 48 | } 49 | }) 50 | -------------------------------------------------------------------------------- /inst/tests/testthat/test-plot-components.R: -------------------------------------------------------------------------------- 1 | data(iclaims) 2 | ss <- AddLocalLinearTrend(list(), initial.claims$iclaimsNSA) 3 | ss <- AddSeasonal(ss, initial.claims$iclaimsNSA, nseasons = 52) 4 | model <- bsts(iclaimsNSA ~ ., state.specification = ss, 5 | data = initial.claims, niter = 100) 6 | 7 | test_that("PlotBstsComponents handles errors correctly", { 8 | expect_error(plot(model, "comp", burn = 10, components = 99), ".* is not TRUE") 9 | expect_error(plot(model, "comp", burn = 10, components = 1:2), NA) 10 | expect_error(plot(model, "comp", burn = 10, components = 2:1), NA) 11 | expect_error(plot(model, "comp", burn = 10, components = c(-1, 2)), ".* is not TRUE") 12 | expect_error(plot(model, "comp", burn = 10, components = numeric(0)), ".* is not TRUE") 13 | }) 14 | 15 | -------------------------------------------------------------------------------- /inst/tests/testthat/test-poisson.R: -------------------------------------------------------------------------------- 1 | library(bsts) 2 | library(testthat) 3 | 4 | data(shark) 5 | logshark <- log1p(shark$Attacks) 6 | seed <- 8675309 7 | 8 | test_that("Poisson bsts", { 9 | 10 | ss.level <- AddLocalLevel(list(), y = logshark) 11 | model.level <- bsts(shark$Attacks, ss.level, niter = 500, 12 | ping = 250, family = "poisson", seed = seed) 13 | expect_that(model.level, is_a("bsts")) 14 | expect_true(all(abs(model.level$state.contributions) < 10)) 15 | 16 | ss.level <- AddLocalLevel(list(), y = logshark) 17 | model.level <- bsts(cbind(shark$Attacks, shark$Population / 1000), 18 | state.specification = ss.level, niter = 500, family = "poisson", 19 | ping = 250, seed = seed) 20 | expect_true(all(abs(model.level$state.contributions) < 15)) 21 | 22 | ss <- AddLocalLinearTrend(list(), y = logshark) 23 | model <- bsts(shark$Attacks, ss, niter = 500, family = "poisson", ping = 250, 24 | seed = seed) 25 | expect_that(model, is_a("bsts")) 26 | 27 | expect_true(all(abs(model$state.contributions) < 10)) 28 | 29 | ss <- AddLocalLinearTrend(list(), logshark, 30 | initial.level.prior = NormalPrior(0, .1), 31 | initial.slope.prior = NormalPrior(.16, .1)) 32 | model <- bsts(shark$Attacks, ss, niter = 500, ping = 250, 33 | family = "poisson", seed = seed) 34 | expect_that(model, is_a("bsts")) 35 | expect_true(all(abs(model$state.contributions) < 10)) 36 | 37 | ss.semi <- AddSemilocalLinearTrend(list(), y = logshark) 38 | model.semi <- bsts(shark$Attacks, ss.semi, niter = 500, 39 | ping = 250, family = "poisson", seed = seed) 40 | expect_that(model.semi, is_a("bsts")) 41 | expect_true(all(abs(model.semi$state.contributions) < 10)) 42 | 43 | ss.student <- AddStudentLocalLinearTrend(list(), y = logshark) 44 | model.student <- bsts(shark$Attacks, ss.student, niter = 500, 45 | ping = 250, family = "poisson", seed = seed) 46 | expect_that(model.student, is_a("bsts")) 47 | expect_true(all(abs(model.student$state.contributions) < 10)) 48 | 49 | ## Add an unrelated predictor. 50 | shark$x <- rnorm(nrow(shark)) 51 | shark.training <- shark[1:48,] 52 | shark.test <- shark[49:54, ] 53 | ss.reg <- AddLocalLinearTrend(list(), y = logshark) 54 | ss.reg <- AddDynamicRegression(ss.reg, log1p(Attacks) ~ x, data = shark) 55 | model <- bsts(cbind(shark.training$Attacks, shark.training$Population / 1000), 56 | ss.reg, niter = 500, family = "poisson", seed = seed) 57 | pred <- predict(model, newdata = shark.test, trials.or.exposure = max(model$exposure)) 58 | expect_that(pred, is_a("bsts.prediction")) 59 | expect_equal(ncol(pred$distribution), 6) 60 | }) 61 | -------------------------------------------------------------------------------- /inst/tests/testthat/test-prediction-errors.R: -------------------------------------------------------------------------------- 1 | library(bsts) 2 | library(testthat) 3 | 4 | test_that("Scaled prediction errors are reasonable.", { 5 | data(AirPassengers) 6 | y <- log(AirPassengers) 7 | ss <- AddLocalLinearTrend(list(), y) 8 | ss <- AddSeasonal(ss, y, nseasons = 12) 9 | model <- bsts(y, state.specification = ss, niter = 500) 10 | errors <- bsts.prediction.errors(model, burn = 100) 11 | se <- bsts.prediction.errors(model, burn = 100, standardize = TRUE) 12 | 13 | ## The scaled and unscaled errors should be the same size. 14 | expect_equal(dim(se[[1]]), dim(errors[[1]])) 15 | 16 | ## The errors should be highly but not perfectly correlated. 17 | expect_gt(cor(se[[1]][30, ], errors[[1]][30, ]), .8) 18 | expect_lte(cor(se[[1]][30, ], errors[[1]][30, ]), 1.0) 19 | }) 20 | 21 | test_that("Prediction errors work for student family", { 22 | data(AirPassengers) 23 | y <- log(AirPassengers) 24 | ss <- AddLocalLinearTrend(list(), y) 25 | ss <- AddSeasonal(ss, y, nseasons = 12) 26 | model <- bsts(y, state.specification = ss, niter = 500, family="student") 27 | errors <- bsts.prediction.errors(model, cutpoints = c(80, 120)) 28 | }) 29 | -------------------------------------------------------------------------------- /inst/tests/testthat/test-seasonal.R: -------------------------------------------------------------------------------- 1 | notcran <- Sys.getenv("NOT_CRAN") != "" 2 | if (notcran) { 3 | seed <- 8675309 4 | 5 | SeasonalTransitionMatrix <- function(nseasons) { 6 | ## Returns the transition matrix for a seasonal state model. 7 | ## 8 | ## Args: 9 | ## nseasons: The number of seasons per cycle. 10 | ## 11 | ## Returns: 12 | ## A matrix with nseasons - 1 rows and columns. 13 | id.matrix <- diag(rep(1, nseasons - 2)) 14 | return(rbind(rep(-1, nseasons - 1), 15 | cbind(id.matrix, rep(0, nrow(id.matrix))))) 16 | } 17 | 18 | SimulateSeasonalPattern <- function(sample.size, initial.pattern, 19 | season.duration, innovation.sd) { 20 | ## Args: 21 | ## sample.size: The number of time points to simulate. 22 | ## initial.pattern: The pattern from a single cycle, which need not sum to 23 | ## zero. 24 | ## season.duration: The number of time points that each seasons will last. 25 | ## innovation.sd: The standard deviation of the innovation error term in the 26 | ## seasonal state model. 27 | ## 28 | ## Returns: 29 | ## A vector of length 'sample.size' containing the contribution of this 30 | ## seasonal component to the mean of the series. 31 | 32 | ## Compute the initial state by removing the final element from the initial 33 | ## pattern. 34 | state <- head(initial.pattern, -1) 35 | nseasons <- length(initial.pattern) 36 | transition.matrix <- SeasonalTransitionMatrix(nseasons) 37 | pattern <- numeric(sample.size) 38 | for (i in 1:sample.size) { 39 | pattern[i] <- state[1] 40 | state <- transition.matrix %*% state 41 | state[1] <- state[1] + rnorm(1, 0, innovation.sd) 42 | } 43 | if (season.duration > 1) { 44 | pattern <- rep(pattern, each = season.duration)[1:sample.size] 45 | } 46 | return(pattern) 47 | } 48 | 49 | set.seed(seed) 50 | 51 | daily.pattern <- rnorm(7) 52 | 53 | ## There are roughly 52 weeks per year, but we can pretend there are 54 | ## fewer for testing purposes. 55 | weeks.per.year <- 5 56 | n.years <- 10 57 | 58 | ## A smooth annual pattern is more easily aliased with the trend. 59 | weekly.annual.pattern <- rnorm(weeks.per.year, 60 | cos(2 * pi * (1:weeks.per.year) / weeks.per.year), .1) 61 | 62 | sample.size <- round(7 * weeks.per.year * n.years) 63 | 64 | trend <- cumsum(rnorm(sample.size, 0, .3)) 65 | seasonal.daily <- SimulateSeasonalPattern(sample.size, daily.pattern, 66 | season.duration = 1, innovation.sd = .15) 67 | seasonal.annual <- SimulateSeasonalPattern(sample.size, weekly.annual.pattern, 68 | season.duration = 7, innovation.sd = .5) 69 | 70 | series <- rnorm(sample.size, trend + seasonal.daily + seasonal.annual, 1.0) 71 | 72 | ss <- AddLocalLevel(list(), series, sigma.prior = SdPrior(.3, 10)) 73 | ss <- AddSeasonal(ss, series, nseasons = 7, sigma.prior = SdPrior(.15, 10)) 74 | ss <- AddSeasonal(ss, series, nseasons = weeks.per.year, season.duration = 7, 75 | sigma.prior = SdPrior(.5, 10)) 76 | 77 | model <- bsts(series, ss, niter = 500, seed = seed, prior = SdPrior(1.0, 10)) 78 | 79 | ## Check that the recovered state values match the truth. 80 | test_that("seasonal model covers true state", { 81 | expect_that(model, is_a("bsts")) 82 | ## The trend here is more jagged than the model expects, so the trend test 83 | ## fails. That's fine as long as the other two tests pass. 84 | ## 85 | ## expect_true(CheckMcmcMatrix(model$state.contributions[, 1, ], 86 | ## truth = trend, confidence = .5), 87 | ## info = "trend failed") 88 | expect_true(CheckMcmcMatrix(model$state.contributions[, 2, ], 89 | truth = seasonal.daily, confidence = .8), 90 | info = "seasonal.daily failed") 91 | expect_true(CheckMcmcMatrix(model$state.contributions[, 3, ], 92 | truth = seasonal.annual, confidence = .8), 93 | info = "seasonal.annual failed") 94 | }) 95 | } 96 | -------------------------------------------------------------------------------- /inst/tests/testthat/test-sigma-upper-limit.R: -------------------------------------------------------------------------------- 1 | library(bsts) 2 | library(testthat) 3 | 4 | test_that("sigma.upper.limit can be specified at the main bsts call.", { 5 | y <- rnorm(4) 6 | x <- rnorm(4) 7 | ss <- AddLocalLevel(list(), y) 8 | 9 | warning("enable the test for 'sigma.upper.limit'") 10 | 11 | ## for (i in 1:20) { 12 | ## model <- bsts(y, ss, niter = 100, ping = -1, sigma.upper.limit = 10) 13 | ## } 14 | ## expect_true(inherits(model, "bsts")) 15 | 16 | ## for(i in 1:20) { 17 | ## model <- bsts(y ~ x, ss, niter = 10, ping = -1, sigma.upper.limit = 10) 18 | ## } 19 | }) 20 | -------------------------------------------------------------------------------- /inst/tests/testthat/test-trig.R: -------------------------------------------------------------------------------- 1 | library(testthat) 2 | library(bsts) 3 | seed <- 8675309 4 | set.seed(seed) 5 | 6 | test_that("Multiple frequencies can be present.", { 7 | data(AirPassengers) 8 | y <- log(AirPassengers) 9 | ss <- AddLocalLinearTrend(list(), y) 10 | ss <- AddTrig(ss, y, period = 20, frequencies = 1) 11 | ss <- AddTrig(ss, y, period = 41, frequencies = 1) 12 | model <- bsts(y, state.specification = ss, niter = 500) 13 | expect_equal(500, length(model$trig.coefficient.sd.20)) 14 | expect_equal(500, length(model$trig.coefficient.sd.41)) 15 | expect_equal(dimnames(model$state.contributions)$component, 16 | c("trend", "trig.20", "trig.41")) 17 | }) 18 | -------------------------------------------------------------------------------- /man/HarveyCumulator.Rd: -------------------------------------------------------------------------------- 1 | \name{HarveyCumulator} 2 | \alias{HarveyCumulator} 3 | \title{HarveyCumulator} 4 | \Rdversion{1.1} 5 | 6 | \description{ Given a state space model on a fine scale, the Harvey 7 | cumulator aggregates the model to a coarser scale (e.g. from days to 8 | weeks, or weeks to months). 9 | 10 | } 11 | 12 | \usage{ 13 | HarveyCumulator(fine.series, 14 | contains.end, 15 | membership.fraction) 16 | } 17 | 18 | \arguments{ 19 | 20 | \item{fine.series}{The fine-scale time series to be aggregated.} 21 | 22 | \item{contains.end}{A logical vector, with length matching 23 | \code{fine.series} indicating whether each fine scale time interval 24 | contains the end of a coarse time interval. For example, months 25 | don't contain a fixed number of weeks, so when cumulating a weekly 26 | time series into a monthly series, you need to know which weeks 27 | contain the end of a month.} 28 | 29 | \item{membership.fraction}{The fraction of each fine-scale time 30 | observation belonging to the coarse scale time observation at the 31 | beginning of the time interval. For example, if week i started in 32 | March and ended in April, \code{membership.fraction[i]} is the 33 | fraction of fine.series[i] that should be attributed to March. This 34 | should be 1 for most observations.} 35 | 36 | } 37 | 38 | \value{ Returns a vector containing the course scale partial aggregates 39 | of \code{fine.series}. } 40 | 41 | \references{ 42 | Harvey (1990), "Forecasting, structural time series, and the Kalman 43 | filter", Cambridge University Press. 44 | 45 | Durbin and Koopman (2001), "Time series analysis by state space 46 | methods", Oxford University Press. 47 | } 48 | 49 | \author{ 50 | Steven L. Scott \email{steve.the.bayesian@gmail.com} 51 | } 52 | 53 | \seealso{ 54 | \code{\link{bsts.mixed}}, 55 | } 56 | 57 | \examples{ 58 | 59 | data(goog) 60 | days <- factor(weekdays(index(goog)), 61 | levels = c("Monday", "Tuesday", "Wednesday", 62 | "Thursday", "Friday"), 63 | ordered = TRUE) 64 | 65 | ## Because of holidays, etc the days do not always go in sequence. 66 | ## (Sorry, Rebecca Black! https://www.youtube.com/watch?v=kfVsfOSbJY0) 67 | ## diff.days[i] is the number of days between days[i-1] and days[i]. 68 | ## We know that days[i] is the end of a week if diff.days[i] < 0. 69 | diff.days <- tail(as.numeric(days), -1) - head(as.numeric(days), -1) 70 | contains.end <- c(FALSE, diff.days < 0) 71 | 72 | goog.weekly <- HarveyCumulator(goog, contains.end, 1) 73 | 74 | 75 | } 76 | \keyword{models} 77 | \keyword{regression} 78 | -------------------------------------------------------------------------------- /man/MATCH.NumericTimestamps.Rd: -------------------------------------------------------------------------------- 1 | \name{MATCH.NumericTimestamps} 2 | \title{Match Numeric Timestamps} 3 | 4 | \alias{MATCH.NumericTimestamps} 5 | 6 | \description{S3 generic method for MATCH function supplied in the zoo package.} 7 | 8 | \usage{ 9 | \method{MATCH}{NumericTimestamps}(x, table, nomatch = NA, \dots) 10 | } 11 | 12 | \arguments{ 13 | \item{x}{A numeric set of timestamps.} 14 | 15 | \item{table}{A set of regular numeric timestamps to match against. } 16 | 17 | \item{nomatch}{The value to be returned in the case when no match is found. 18 | Note that it is coerced to integer.} 19 | 20 | \item{\dots}{Additional arguments passed to \code{\link{match}}. } 21 | } 22 | 23 | \details{ 24 | Numeric timestamps match if they agree to 8 significant digits. 25 | } 26 | 27 | \value{ 28 | Returns the index of the entry in \code{table} matched by each 29 | argument in \code{x}. If an entry has no match then \code{nomatch} is 30 | returned at that position. 31 | } 32 | 33 | \seealso{ 34 | \code{\link[zoo]{MATCH}} 35 | } 36 | -------------------------------------------------------------------------------- /man/StateSpecification.Rd: -------------------------------------------------------------------------------- 1 | % Copyright 2011 Google Inc. All Rights Reserved. 2 | % Author: steve.the.bayesian@gmail.com (Steve Scott) 3 | 4 | \name{StateSpecification} 5 | 6 | \alias{state.specification} 7 | \alias{StateSpecification} 8 | 9 | \Rdversion{1.1} 10 | 11 | \title{ 12 | Add a state component to a Bayesian structural time series model 13 | } 14 | 15 | \description{ 16 | Add a state component to the \code{state.specification} argument in a 17 | \code{\link{bsts}} model. 18 | } 19 | 20 | \references{ 21 | Harvey (1990), "Forecasting, structural time series, and the Kalman 22 | filter", Cambridge University Press. 23 | 24 | Durbin and Koopman (2001), "Time series analysis by state space 25 | methods", Oxford University Press. 26 | } 27 | 28 | \author{ 29 | Steven L. Scott \email{steve.the.bayesian@gmail.com} 30 | } 31 | 32 | \seealso{ 33 | \code{\link{bsts}}. 34 | \code{\link[Boom]{SdPrior}} 35 | \code{\link[Boom]{NormalPrior}} 36 | \code{\link[Boom]{Ar1CoefficientPrior}} 37 | } 38 | 39 | \examples{ 40 | data(AirPassengers) 41 | y <- log(AirPassengers) 42 | ss <- AddLocalLinearTrend(list(), y) 43 | ss <- AddSeasonal(ss, y, nseasons = 12) 44 | model <- bsts(y, state.specification = ss, niter = 500) 45 | pred <- predict(model, horizon = 12, burn = 100) 46 | plot(pred) 47 | } 48 | \keyword{models} 49 | \keyword{regression} 50 | -------------------------------------------------------------------------------- /man/SuggestBurn.Rd: -------------------------------------------------------------------------------- 1 | % Copyright 2011 Google Inc. All Rights Reserved. 2 | % Author: steve.the.bayesian@gmail.com (Steve Scott) 3 | 4 | \name{SuggestBurn} 5 | \title{Suggested burn-in size} 6 | 7 | \alias{SuggestBurn} 8 | 9 | \description{Suggest the size of an MCMC burn in sample as a proportion 10 | of the total run.} 11 | 12 | \usage{SuggestBurn(proportion, bsts.object)} 13 | 14 | \arguments{ 15 | 16 | \item{proportion}{The proportion of the MCMC run to discard as burn in.} 17 | \item{bsts.object}{An object of class \code{\link{bsts}}.} 18 | } 19 | 20 | \value{ 21 | An integer number of iterations to discard. 22 | } 23 | 24 | \seealso{ 25 | \code{\link{bsts}} 26 | } 27 | 28 | 29 | -------------------------------------------------------------------------------- /man/add.ar.Rd: -------------------------------------------------------------------------------- 1 | % Copyright 2012 Google Inc. All Rights Reserved. 2 | % Author: steve.the.bayesian@gmail.com (Steve Scott) 3 | 4 | \name{add.ar} 5 | \alias{AddAr} 6 | 7 | \Rdversion{1.1} 8 | \title{ 9 | AR(p) state component 10 | } 11 | 12 | \description{ 13 | Add an AR(p) state component to the state specification. 14 | } 15 | 16 | \usage{ 17 | AddAr(state.specification, 18 | y, 19 | lags = 1, 20 | sigma.prior, 21 | initial.state.prior = NULL, 22 | sdy) 23 | } 24 | 25 | \arguments{ 26 | \item{state.specification}{A list of state components. If omitted, 27 | an empty list is assumed.} 28 | 29 | \item{y}{ A numeric vector. The time series to be modeled.} 30 | 31 | \item{lags}{The number of lags ("p") in the AR(p) process.} 32 | 33 | \item{sigma.prior}{An object created by SdPrior. The prior for 34 | the standard deviation of the process increments.} 35 | 36 | \item{initial.state.prior}{ An object of class MvnPrior describing the 37 | values of the state at time 0. This argument can be \code{NULL}, 38 | in which case the stationary distribution of the AR(p) process 39 | will be used as the initial state distribution.} 40 | 41 | \item{sdy}{The sample standard deviation of the time series to be 42 | modeled. Used to scale the prior distribution. This can be omitted 43 | if \code{y} is supplied. } 44 | } 45 | 46 | \value{ Returns \code{state.specification} with an AR(p) state component 47 | added to the end.} 48 | 49 | \details{ 50 | The model is 51 | 52 | \deqn{\alpha_{t} = \phi_1\alpha_{i, t-1} + \cdots + \phi_p 53 | \alpha_{t-p} + \epsilon_{t-1} \qquad 54 | \epsilon_t \sim \mathcal{N}(0, \sigma^2)}{% 55 | alpha[t] = phi[1] * alpha[t-1] + ... + phi[p] * alpha[t-p] + 56 | epsilon[t-1], with epsilon[t-1] ~ N(0, sigma^2) 57 | } 58 | 59 | The state consists of the last \code{p} lags of \code{alpha}. The 60 | state transition matrix has \code{phi} in its first row, ones along 61 | its first subdiagonal, and zeros elsewhere. The state variance matrix 62 | has \code{sigma^2} in its upper left corner and is zero elsewhere. 63 | The observation matrix has 1 in its first element and is zero 64 | otherwise. 65 | 66 | } 67 | 68 | \references{ 69 | Harvey (1990), "Forecasting, structural time series, and the Kalman 70 | filter", Cambridge University Press. 71 | 72 | Durbin and Koopman (2001), "Time series analysis by state space 73 | methods", Oxford University Press. 74 | } 75 | 76 | \author{ 77 | Steven L. Scott \email{steve.the.bayesian@gmail.com} 78 | } 79 | 80 | \examples{ 81 | n <- 100 82 | residual.sd <- .001 83 | 84 | # Actual values of the AR coefficients 85 | true.phi <- c(-.7, .3, .15) 86 | ar <- arima.sim(model = list(ar = true.phi), 87 | n = n, 88 | sd = 3) 89 | 90 | ## Layer some noise on top of the AR process. 91 | y <- ar + rnorm(n, 0, residual.sd) 92 | ss <- AddAr(list(), lags = 3, sigma.prior = SdPrior(3.0, 1.0)) 93 | 94 | # Fit the model with knowledge with residual.sd essentially fixed at the 95 | # true value. 96 | model <- bsts(y, state.specification=ss, niter = 500, prior = SdPrior(residual.sd, 100000)) 97 | 98 | # Now compare the empirical ACF to the true ACF. 99 | acf(y, lag.max = 30) 100 | points(0:30, ARMAacf(ar = true.phi, lag.max = 30), pch = "+") 101 | points(0:30, ARMAacf(ar = colMeans(model$AR3.coefficients), lag.max = 30)) 102 | legend("topright", leg = c("empirical", "truth", "MCMC"), pch = c(NA, "+", "o")) 103 | } 104 | 105 | \seealso{ 106 | \code{\link{bsts}}. 107 | \code{\link[Boom]{SdPrior}} 108 | } 109 | 110 | \keyword{models} 111 | -------------------------------------------------------------------------------- /man/add.local.level.Rd: -------------------------------------------------------------------------------- 1 | % Copyright 2012 Google Inc. All Rights Reserved. 2 | % Author: steve.the.bayesian@gmail.com (Steve Scott) 3 | 4 | \name{add.local.level} 5 | \alias{AddLocalLevel} 6 | 7 | \Rdversion{1.1} 8 | \title{ 9 | Local level trend state component 10 | } 11 | 12 | \description{ 13 | Add a local level model to a state specification. 14 | The local level model assumes the trend is a 15 | random walk: \deqn{\alpha_{t+1} = \alpha_t + \epsilon_t \qquad 16 | \epsilon_t \sim \mathcal{N}(0,\sigma).}{% 17 | alpha[t+1] = alpha[t] + rnorm(1, 0, sigma). } 18 | The prior is on the \eqn{\sigma}{sigma} 19 | parameter. 20 | } 21 | 22 | \usage{ 23 | AddLocalLevel( 24 | state.specification, 25 | y, 26 | sigma.prior, 27 | initial.state.prior, 28 | sdy, 29 | initial.y) 30 | 31 | } 32 | 33 | \arguments{ 34 | \item{state.specification}{A list of state components that you wish to add to. If 35 | omitted, an empty list will be assumed. } 36 | 37 | \item{y}{ The time series to be modeled, as a numeric vector.} 38 | 39 | \item{sigma.prior}{An object created by \code{\link[Boom]{SdPrior}} 40 | describing the prior distribution for the standard deviation of the 41 | random walk increments.} 42 | 43 | \item{initial.state.prior}{An object created using 44 | \code{\link[Boom]{NormalPrior}}, describing the prior distribution 45 | of the initial state vector (at time 1).} 46 | 47 | \item{sdy}{The standard deviation of the series to be modeled. This 48 | will be ignored if \code{y} is provided, or if all the required 49 | prior distributions are supplied directly. } 50 | 51 | \item{initial.y}{The initial value of the series being modeled. This will be 52 | ignored if \code{y} is provided, or if the priors for the initial 53 | state are all provided directly.} 54 | } 55 | 56 | \value{ Returns a list with the elements necessary to specify a local 57 | linear trend state model.} 58 | 59 | \references{ 60 | Harvey (1990), "Forecasting, structural time series, and the Kalman 61 | filter", Cambridge University Press. 62 | 63 | Durbin and Koopman (2001), "Time series analysis by state space 64 | methods", Oxford University Press. 65 | } 66 | 67 | \author{ 68 | Steven L. Scott \email{steve.the.bayesian@gmail.com} 69 | } 70 | 71 | \seealso{ 72 | \code{\link{bsts}}. 73 | \code{\link[Boom]{SdPrior}} 74 | \code{\link[Boom]{NormalPrior}} 75 | } 76 | 77 | \keyword{models} 78 | -------------------------------------------------------------------------------- /man/add.local.linear.trend.Rd: -------------------------------------------------------------------------------- 1 | % Copyright 2012 Google Inc. All Rights Reserved. 2 | % Author: steve.the.bayesian@gmail.com (Steve Scott) 3 | 4 | \name{add.local.linear.trend} 5 | \alias{AddLocalLinearTrend} 6 | 7 | \Rdversion{1.1} 8 | \title{ 9 | Local linear trend state component 10 | } 11 | 12 | \description{ 13 | Add a local linear trend model to a state specification. 14 | The local linear trend model assumes that both the mean and the slope of the 15 | trend follow random walks. The equation for the mean is 16 | \deqn{\mu_{t+1} = \mu_t + \delta_t + \epsilon_t \qquad \epsilon_t 17 | \sim \mathcal{N}(0, \sigma_\mu).}{% 18 | mu[t+1] = mu[t] + delta[t] + rnorm(1, 0, sigma.level).} The 19 | equation for the slope is 20 | \deqn{\delta_{t+1} = \delta_t + \eta_t \qquad \eta_t \sim 21 | \mathcal{N}(0, \sigma_\delta).}{% 22 | delta[t+1] = delta[t] + rnorm(1, 0, sigma.slope).} 23 | The prior distribution is on the level standard deviation 24 | \eqn{\sigma_\mu}{sigma.level} and the slope standard deviation 25 | \eqn{\sigma_\delta}{sigma.slope}. 26 | } 27 | 28 | \usage{ 29 | AddLocalLinearTrend( 30 | state.specification = NULL, 31 | y, 32 | level.sigma.prior = NULL, 33 | slope.sigma.prior = NULL, 34 | initial.level.prior = NULL, 35 | initial.slope.prior = NULL, 36 | sdy, 37 | initial.y) 38 | } 39 | 40 | \arguments{ 41 | \item{state.specification}{A list of state components that you wish to add to. If 42 | omitted, an empty list will be assumed. } 43 | 44 | \item{y}{The time series to be modeled, as a numeric vector.} 45 | 46 | \item{level.sigma.prior}{An object created by 47 | \code{\link[Boom]{SdPrior}} describing the prior distribution for 48 | the standard deviation of the level component.} 49 | 50 | \item{slope.sigma.prior}{An object created by 51 | \code{\link[Boom]{SdPrior}} describing the prior distribution of 52 | the standard deviation of the slope component.} 53 | 54 | \item{initial.level.prior}{An object created by 55 | \code{\link[Boom]{NormalPrior}} describing the initial distribution 56 | of the level portion of the initial state vector.} 57 | 58 | \item{initial.slope.prior}{An object created by 59 | \code{\link[Boom]{NormalPrior}} describing the prior distribution 60 | for the slope portion of the initial state vector.} 61 | 62 | \item{sdy}{The standard deviation of the series to be modeled. This 63 | will be ignored if \code{y} is provided, or if all the required 64 | prior distributions are supplied directly. } 65 | 66 | \item{initial.y}{The initial value of the series being modeled. This will be 67 | ignored if \code{y} is provided, or if the priors for the initial 68 | state are all provided directly.} 69 | } 70 | 71 | \value{ Returns a list with the elements necessary to specify a local 72 | linear trend state model.} 73 | 74 | \references{ 75 | Harvey (1990), "Forecasting, structural time series, and the Kalman 76 | filter", Cambridge University Press. 77 | 78 | Durbin and Koopman (2001), "Time series analysis by state space 79 | methods", Oxford University Press. 80 | } 81 | 82 | \author{ 83 | Steven L. Scott \email{steve.the.bayesian@gmail.com} 84 | } 85 | 86 | \seealso{ 87 | \code{\link{bsts}}. 88 | \code{\link[Boom]{SdPrior}} 89 | \code{\link[Boom]{NormalPrior}} 90 | } 91 | 92 | \examples{ 93 | data(AirPassengers) 94 | y <- log(AirPassengers) 95 | ss <- AddLocalLinearTrend(list(), y) 96 | ss <- AddSeasonal(ss, y, nseasons = 12) 97 | model <- bsts(y, state.specification = ss, niter = 500) 98 | pred <- predict(model, horizon = 12, burn = 100) 99 | plot(pred) 100 | } 101 | 102 | \keyword{models} 103 | -------------------------------------------------------------------------------- /man/add.seasonal.Rd: -------------------------------------------------------------------------------- 1 | % Copyright 2012 Google Inc. All Rights Reserved. 2 | % Author: steve.the.bayesian@gmail.com (Steve Scott) 3 | 4 | \name{add.seasonal} 5 | \alias{AddSeasonal} 6 | 7 | \Rdversion{1.1} 8 | \title{ 9 | Seasonal State Component 10 | } 11 | 12 | \description{ 13 | Add a seasonal model to a state specification. 14 | 15 | The seasonal model can be thought of as a regression on 16 | \code{nseasons} dummy variables with coefficients constrained to sum 17 | to 1 (in expectation). If there are \code{S} seasons then the state 18 | vector \eqn{\gamma}{gamma} is of dimension \code{S-1}. The first 19 | element of the state vector obeys 20 | \deqn{\gamma_{t+1, 1} = -\sum_{i = 2}^S \gamma_{t, i} + \epsilon_t 21 | \qquad \epsilon_t \sim \mathcal{N}(0, \sigma)}{% 22 | gamma[t+1, 1] = -1 * sum(gamma[t, -1]) + rnorm(1, 0, sigma) 23 | } 24 | } 25 | 26 | \usage{ 27 | AddSeasonal( 28 | state.specification, 29 | y, 30 | nseasons, 31 | season.duration = 1, 32 | sigma.prior, 33 | initial.state.prior, 34 | sdy) 35 | } 36 | 37 | \arguments{ 38 | \item{state.specification}{A list of state components that you wish to add to. If 39 | omitted, an empty list will be assumed. } 40 | 41 | \item{y}{ The time series to be modeled, as a numeric vector.} 42 | 43 | \item{nseasons}{The number of seasons to be modeled.} 44 | 45 | \item{season.duration}{The number of time periods in each season.} 46 | 47 | \item{sigma.prior}{An object created by \code{\link[Boom]{SdPrior}} 48 | describing the prior distribution for the standard deviation of the 49 | random walk increments.} 50 | 51 | \item{initial.state.prior}{An object created using 52 | \code{\link[Boom]{NormalPrior}}, describing the prior distribution 53 | of the the initial state vector (at time 1).} 54 | 55 | \item{sdy}{The standard deviation of the series to be modeled. This 56 | will be ignored if \code{y} is provided, or if all the required 57 | prior distributions are supplied directly. } 58 | 59 | } 60 | 61 | \value{Returns a list with the elements necessary to specify a seasonal 62 | state model.} 63 | 64 | \references{ 65 | Harvey (1990), "Forecasting, structural time series, and the Kalman 66 | filter", Cambridge University Press. 67 | 68 | Durbin and Koopman (2001), "Time series analysis by state space 69 | methods", Oxford University Press. 70 | } 71 | 72 | \author{ 73 | Steven L. Scott \email{steve.the.bayesian@gmail.com} 74 | } 75 | 76 | \seealso{ 77 | \code{\link{bsts}}. 78 | \code{\link[Boom]{SdPrior}} 79 | \code{\link[Boom]{NormalPrior}} 80 | } 81 | 82 | \examples{ 83 | data(AirPassengers) 84 | y <- log(AirPassengers) 85 | ss <- AddLocalLinearTrend(list(), y) 86 | ss <- AddSeasonal(ss, y, nseasons = 12) 87 | model <- bsts(y, state.specification = ss, niter = 500) 88 | pred <- predict(model, horizon = 12, burn = 100) 89 | plot(pred) 90 | } 91 | 92 | \keyword{models} 93 | -------------------------------------------------------------------------------- /man/add.shared.local.level.Rd: -------------------------------------------------------------------------------- 1 | % Copyright 2019 Steven L. Scott. All Rights Reserved. 2 | % Author: steve.the.bayesian@gmail.com (Steve Scott) 3 | 4 | \name{add.shared.local.level} 5 | \alias{AddSharedLocalLevel} 6 | 7 | \Rdversion{1.1} 8 | \title{ 9 | Local level trend state component 10 | } 11 | 12 | \description{ 13 | Add a shared local level model to a state specification. 14 | The shared local level model assumes the trend is a 15 | multivariate random walk: 16 | 17 | \deqn{\alpha_{t+1} = \alpha_t + \eta_t \qquad 18 | \eta_{tj} \sim \mathcal{N}(0,\sigma_j).}{% 19 | alpha[t+1, ] = alpha[t, ] + rnorm(nfactors, 0, sigma). } 20 | 21 | The contribution to the mean of the observed series obeys \deqn{y_{t} 22 | = B \alpha_t + \epsilon_t.}{y[t, ] = B \%*\% alpha[t, ]} plus 23 | observation error. Identifiability constraints imply that the 24 | observation coefficients \code{B} form a rectangular lower triangular 25 | matrix with diagonal 1.0. 26 | 27 | If there are \eqn{m} time series and \eqn{p} factors, then \eqn{B} has 28 | \eqn{m} rows and \eqn{p} columns. Having \eqn{B} be lower triangular 29 | means that the first factor affects all series. The second affects 30 | all but the first, the third excludes the first two, etc. 31 | } 32 | 33 | \usage{ 34 | AddSharedLocalLevel( 35 | state.specification, 36 | response, 37 | nfactors, 38 | coefficient.prior = NULL, 39 | initial.state.prior = NULL, 40 | timestamps = NULL, 41 | series.id = NULL, 42 | sdy, 43 | ...) 44 | } 45 | 46 | \arguments{ 47 | 48 | \item{state.specification}{A pre-existing list of state components 49 | that you wish to add to. If omitted, an empty list will be assumed. 50 | } 51 | 52 | \item{response}{ The time series to be modeled. This can either be a 53 | matrix with rows as time and columns as series, or it can be a numeric 54 | vector. If a vector is passed then \code{timestamps} and 55 | \code{series.id} are required. Otherwise they are unused.} 56 | 57 | \item{nfactors}{The number of latent factors to include in the model. 58 | This is the dimension of the state for this model component.} 59 | 60 | \item{coefficient.prior}{Prior distribution on the observation 61 | coefficients. } 62 | 63 | \item{initial.state.prior}{An object of class 64 | \code{\link[Boom]{MvnPrior}}, describing the prior distribution of 65 | the initial state vector (at time 1).} 66 | 67 | \item{timestamps}{If \code{response} is in long format (i.e. a vector 68 | instead of a matrix) this argument is a vector of the same 69 | length indicating the time index to which each element of 70 | \code{response} belongs. } 71 | 72 | \item{series.id}{If \code{response} is in long format (i.e. a vector 73 | instead of a matrix) this argument is a vector of the same length 74 | indicating the time series to which each element of \code{response} 75 | belongs. } 76 | 77 | \item{sdy}{A vector giving the standard deviation of each series to be 78 | modeled. This argument is only necessary if \code{response} cannot 79 | be supplied directly.} 80 | 81 | \item{\dots}{Extra arguments passed to 82 | \code{\link[BoomSpikeSlab]{ConditionalZellnerPrior}}, used to create 83 | a default prior for the observation coefficients when 84 | \code{coefficient.prior} is left as \code{NULL}.} 85 | } 86 | 87 | \value{ Returns a list with the elements necessary to specify a local 88 | linear trend state model.} 89 | 90 | \references{ 91 | Harvey (1990), "Forecasting, structural time series, and the Kalman 92 | filter", Cambridge University Press. 93 | 94 | Durbin and Koopman (2001), "Time series analysis by state space 95 | methods", Oxford University Press. 96 | } 97 | 98 | \author{ 99 | Steven L. Scott \email{steve.the.bayesian@gmail.com} 100 | } 101 | 102 | \seealso{ 103 | \code{\link{bsts}}. 104 | \code{\link[Boom]{SdPrior}} 105 | \code{\link[Boom]{NormalPrior}} 106 | } 107 | 108 | \keyword{models} 109 | -------------------------------------------------------------------------------- /man/add.static.intercept.Rd: -------------------------------------------------------------------------------- 1 | % Copyright 2018 Google Inc. All Rights Reserved. 2 | % 3 | % This library is free software; you can redistribute it and/or 4 | % modify it under the terms of the GNU Lesser General Public 5 | % License as published by the Free Software Foundation; either 6 | % version 2.1 of the License, or (at your option) any later version. 7 | % 8 | % This library is distributed in the hope that it will be useful, 9 | % but WITHOUT ANY WARRANTY; without even the implied warranty of 10 | % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 11 | % Lesser General Public License for more details. 12 | % 13 | % You should have received a copy of the GNU Lesser General Public 14 | % License along with this library; if not, write to the Free Software 15 | % Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA 16 | 17 | \name{add.static.intercept} 18 | \alias{AddStaticIntercept} 19 | 20 | \Rdversion{1.1} 21 | \title{ 22 | Static Intercept State Component 23 | } 24 | 25 | \description{ 26 | Adds a static intercept term to a state space model. If the model includes 27 | a traditional trend component (e.g. local level, local linear trend, etc) 28 | then a separate intercept is not needed (and will probably cause trouble, 29 | as it will be confounded with the initial state of the trend model). 30 | However, if there is no trend, or the trend is an AR process centered 31 | around zero, then adding a static intercept will shift the center to a 32 | data-determined value. 33 | } 34 | 35 | \usage{ 36 | AddStaticIntercept( 37 | state.specification, 38 | y, 39 | initial.state.prior = NormalPrior(y[1], sd(y, na.rm = TRUE))) 40 | } 41 | 42 | \arguments{ 43 | \item{state.specification}{A list of state components that you wish to add to. If 44 | omitted, an empty list will be assumed. } 45 | 46 | \item{y}{The time series to be modeled, as a numeric vector.} 47 | 48 | \item{initial.state.prior}{An object created using 49 | \code{\link[Boom]{NormalPrior}}, describing the prior distribution 50 | of the intecept term.} 51 | } 52 | 53 | \value{ Returns a list with the information required to specify the 54 | state component. If initial.state.prior is specified then y is unused. 55 | } 56 | 57 | \references{ 58 | Harvey (1990), "Forecasting, structural time series, and the Kalman 59 | filter", Cambridge University Press. 60 | 61 | Durbin and Koopman (2001), "Time series analysis by state space 62 | methods", Oxford University Press. 63 | } 64 | 65 | \author{ 66 | Steven L. Scott 67 | } 68 | 69 | \seealso{ 70 | \code{\link{bsts}}. 71 | \code{\link[Boom]{SdPrior}} 72 | \code{\link[Boom]{NormalPrior}} 73 | } 74 | 75 | \keyword{models} 76 | -------------------------------------------------------------------------------- /man/add.student.local.linear.trend.Rd: -------------------------------------------------------------------------------- 1 | % Copyright 2012 Google Inc. All Rights Reserved. 2 | % Author: steve.the.bayesian@gmail.com (Steve Scott) 3 | 4 | \name{add.student.local.linear.trend} 5 | \alias{AddStudentLocalLinearTrend} 6 | 7 | \Rdversion{1.1} 8 | \title{ 9 | Robust local linear trend 10 | } 11 | 12 | \description{ 13 | Add a local level model to a state specification. 14 | The local linear trend model assumes that both the mean and the slope of the 15 | trend follow random walks. The equation for the mean is 16 | \deqn{\mu_{t+1} = \mu_t + \delta_t + \epsilon_t \qquad \epsilon_t 17 | \sim \mathcal{T}_{\nu_\mu}(0, \sigma_\mu).}{% 18 | mu[t+1] = mu[t] + delta[t] + sigma.level * rt(1, nu.level).} The 19 | equation for the slope is 20 | \deqn{\delta_{t+1} = \delta_t + \eta_t \qquad \eta_t \sim 21 | \mathcal{T}_{\nu_\delta}(0, \sigma_\delta).}{% 22 | delta[t+1] = delta[t] + sigma.slope * rt(1, nu.slope).} 23 | Independent prior distributions are assumed on the level standard 24 | deviation, \eqn{\sigma_\mu}{sigma.level} the slope standard deviation 25 | \eqn{\sigma_\delta}{sigma.slope}, the level tail thickness 26 | \eqn{\nu_\mu}{nu.level}, and the slope tail thickness 27 | \eqn{\nu_\delta}{nu.slope}. } 28 | 29 | \usage{ 30 | AddStudentLocalLinearTrend( 31 | state.specification = NULL, 32 | y, 33 | save.weights = FALSE, 34 | level.sigma.prior = NULL, 35 | level.nu.prior = NULL, 36 | slope.sigma.prior = NULL, 37 | slope.nu.prior = NULL, 38 | initial.level.prior = NULL, 39 | initial.slope.prior = NULL, 40 | sdy, 41 | initial.y) 42 | } 43 | 44 | \arguments{ 45 | \item{state.specification}{A list of state components that you wish to add to. If 46 | omitted, an empty list will be assumed. } 47 | 48 | \item{y}{The time series to be modeled, as a numeric vector.} 49 | 50 | \item{save.weights}{A logical value indicating whether to save the 51 | draws of the weights from the normal mixture representation.} 52 | 53 | \item{level.sigma.prior}{An object created by 54 | \code{\link[Boom]{SdPrior}} describing the prior distribution for 55 | the standard deviation of the level component.} 56 | 57 | \item{level.nu.prior}{An object inheritng from the class 58 | \code{\link[Boom]{DoubleModel}}, representing the prior 59 | distribution on the \code{nu} tail thickness parameter of the T 60 | distribution for errors in the evolution equation for the level 61 | component.} 62 | 63 | \item{slope.sigma.prior}{An object created by 64 | \code{\link[Boom]{SdPrior}} describing the prior distribution of 65 | the standard deviation of the slope component.} 66 | 67 | \item{slope.nu.prior}{An object inheritng from the class 68 | \code{\link[Boom]{DoubleModel}}, representing the prior 69 | distribution on the \code{nu} tail thickness parameter of the T 70 | distribution for errors in the evolution equation for the slope 71 | component.} 72 | 73 | \item{initial.level.prior}{An object created by 74 | \code{\link[Boom]{NormalPrior}} describing the initial distribution 75 | of the level portion of the initial state vector.} 76 | 77 | \item{initial.slope.prior}{An object created by 78 | \code{\link[Boom]{NormalPrior}} describing the prior distribution 79 | for the slope portion of the initial state vector.} 80 | 81 | \item{sdy}{The standard deviation of the series to be modeled. This 82 | will be ignored if \code{y} is provided, or if all the required 83 | prior distributions are supplied directly. } 84 | 85 | \item{initial.y}{The initial value of the series being modeled. This will be 86 | ignored if \code{y} is provided, or if the priors for the initial 87 | state are all provided directly.} 88 | } 89 | 90 | \value{ Returns a list with the elements necessary to specify a local 91 | linear trend state model.} 92 | 93 | \references{ 94 | Harvey (1990), "Forecasting, structural time series, and the Kalman 95 | filter", Cambridge University Press. 96 | 97 | Durbin and Koopman (2001), "Time series analysis by state space 98 | methods", Oxford University Press. 99 | } 100 | 101 | \author{ 102 | Steven L. Scott \email{steve.the.bayesian@gmail.com} 103 | } 104 | 105 | \seealso{ 106 | \code{\link{bsts}}. 107 | \code{\link[Boom]{SdPrior}} 108 | \code{\link[Boom]{NormalPrior}} 109 | } 110 | 111 | \examples{ 112 | data(rsxfs) 113 | ss <- AddStudentLocalLinearTrend(list(), rsxfs) 114 | model <- bsts(rsxfs, state.specification = ss, niter = 500) 115 | pred <- predict(model, horizon = 12, burn = 100) 116 | plot(pred) 117 | } 118 | 119 | \keyword{models} 120 | -------------------------------------------------------------------------------- /man/aggregate.time.series.Rd: -------------------------------------------------------------------------------- 1 | % Copyright 2011 Google Inc. All Rights Reserved. 2 | % Author: steve.the.bayesian@gmail.com (Steve Scott) 3 | 4 | \name{aggregate.time.series} 5 | 6 | \alias{AggregateTimeSeries} 7 | 8 | \title{Aggregate a fine time series to a coarse summary} 9 | \Rdversion{1.0} 10 | 11 | \description{ Aggregate measurements from a fine scaled time series into 12 | a coarse time series. This is similar to functions from the 13 | \code{xts} package, but it can handle aggregation from weeks to 14 | months. } 15 | 16 | \usage{ 17 | AggregateTimeSeries(fine.series, 18 | contains.end, 19 | membership.fraction, 20 | trim.left = any(membership.fraction < 1), 21 | trim.right = NULL, 22 | byrow = TRUE) 23 | } 24 | 25 | \arguments{ 26 | 27 | \item{fine.series}{A numeric vector or matrix giving the fine scale 28 | time series to be aggregated.} 29 | 30 | \item{contains.end}{A logical vector corresponding to 31 | \code{fine.series} indicating whether each fine time interval 32 | contains the end of a coarse time interval.} 33 | 34 | \item{membership.fraction}{A numeric vector corresponding to 35 | \code{fine.series}, giving the fraction of each time interval's 36 | observation attributable to the coarse interval containing the fine 37 | interval's first day. This will usually be a vector of 1's, unless 38 | \code{fine.series} is weekly.} 39 | 40 | \item{trim.left}{Logical indicating whether the first observation in 41 | the coarse aggregate should be removed.} 42 | 43 | \item{trim.right}{Logical indicating whether the final observation in 44 | the coarse aggregate should be removed.} 45 | 46 | \item{byrow}{Logical. If \code{fine.series} is a matrix, this 47 | argument indicates whether rows (\code{TRUE}) or columns 48 | (\code{FALSE}) correspond to time points.} 49 | 50 | } 51 | 52 | \value{ A matrix (if \code{fine.series} is a matrix) or vector 53 | (otherwise) containing the aggregated values of \code{fine.series}. } 54 | 55 | \author{ 56 | Steven L. Scott \email{steve.the.bayesian@gmail.com} 57 | } 58 | 59 | 60 | \examples{ 61 | week.ending <- as.Date(c("2011-11-05", 62 | "2011-11-12", 63 | "2011-11-19", 64 | "2011-11-26", 65 | "2011-12-03", 66 | "2011-12-10", 67 | "2011-12-17", 68 | "2011-12-24", 69 | "2011-12-31")) 70 | membership.fraction <- GetFractionOfDaysInInitialMonth(week.ending) 71 | which.month <- MatchWeekToMonth(week.ending, as.Date("2011-11-01")) 72 | contains.end <- WeekEndsMonth(week.ending) 73 | 74 | weekly.values <- rnorm(length(week.ending)) 75 | monthly.values <- AggregateTimeSeries(weekly.values, contains.end, membership.fraction) 76 | 77 | } 78 | 79 | \keyword{chron} 80 | -------------------------------------------------------------------------------- /man/aggregate.weeks.to.months.Rd: -------------------------------------------------------------------------------- 1 | % Copyright 2011 Google Inc. All Rights Reserved. 2 | % Author: steve.the.bayesian@gmail.com (Steve Scott) 3 | 4 | \name{aggregate.weeks.to.months} 5 | 6 | \alias{AggregateWeeksToMonths} 7 | 8 | \title{Aggregate a weekly time series to monthly} 9 | \Rdversion{1.0} 10 | 11 | \description{Aggregate measurements from a weekly time series into 12 | a monthly time series. } 13 | 14 | \usage{ 15 | AggregateWeeksToMonths(weekly.series, 16 | membership.fraction = NULL, 17 | trim.left = TRUE, 18 | trim.right = NULL) 19 | 20 | } 21 | 22 | \arguments{ 23 | 24 | \item{weekly.series}{A numeric vector or matrix of class 25 | \code{\link[zoo]{zoo}} giving the weekly time series to be aggregated. 26 | The index must be convertible to class \code{\link{Date}}.} 27 | 28 | \item{membership.fraction}{A optional numeric vector corresponding to 29 | \code{weekly.series}, giving the fraction of each week's observation 30 | attributable to the month containing the week's first day. If 31 | missing, then weeks will be split across months in proportion to the 32 | number of days in each month.} 33 | 34 | \item{trim.left}{Logical indicating whether the first observation in 35 | the monthly aggregate should be removed.} 36 | 37 | \item{trim.right}{Logical indicating whether the final observation in 38 | the monthly aggregate should be removed.} 39 | 40 | } 41 | 42 | \value{ A zoo-matrix (if \code{weekly.series} is a matrix) or vector 43 | (otherwise) containing the aggregated values of \code{weekly.series}. 44 | } 45 | 46 | \author{ 47 | Steven L. Scott \email{steve.the.bayesian@gmail.com} 48 | } 49 | 50 | \seealso{ 51 | \code{\link{AggregateTimeSeries}} 52 | } 53 | 54 | \examples{ 55 | week.ending <- as.Date(c("2011-11-05", 56 | "2011-11-12", 57 | "2011-11-19", 58 | "2011-11-26", 59 | "2011-12-03", 60 | "2011-12-10", 61 | "2011-12-17", 62 | "2011-12-24", 63 | "2011-12-31")) 64 | 65 | weekly.values <- zoo(rnorm(length(week.ending)), week.ending) 66 | monthly.values <- AggregateWeeksToMonths(weekly.values) 67 | } 68 | 69 | \keyword{chron} 70 | -------------------------------------------------------------------------------- /man/auto.ar.Rd: -------------------------------------------------------------------------------- 1 | % Copyright 2012 Google Inc. All Rights Reserved. 2 | % Author: steve.the.bayesian@gmail.com (Steve Scott) 3 | 4 | \name{auto.ar} 5 | \alias{AddAutoAr} 6 | 7 | \Rdversion{1.1} 8 | \title{ 9 | Sparse AR(p) 10 | } 11 | 12 | \description{ Add a sparse AR(p) process to the state distribution. A 13 | sparse AR(p) is an AR(p) process with a spike and slab prior on the 14 | autoregression coefficients. } 15 | 16 | \usage{ 17 | AddAutoAr(state.specification, 18 | y, 19 | lags = 1, 20 | prior = NULL, 21 | sdy = NULL, 22 | ...) 23 | } 24 | 25 | \arguments{ 26 | \item{state.specification}{A list of state components. If omitted, 27 | an empty list is assumed.} 28 | 29 | \item{y}{ A numeric vector. The time series to be modeled. This can 30 | be omitted if \code{sdy} is supplied.} 31 | 32 | \item{lags}{The maximum number of lags ("p") to be considered in the AR(p) process.} 33 | 34 | \item{prior}{An object inheriting from \code{\link{SpikeSlabArPrior}}, or 35 | \code{NULL}. If the latter, then a default 36 | \code{\link{SpikeSlabArPrior}} will be created.} 37 | 38 | \item{sdy}{The sample standard deviation of the time series to be 39 | modeled. Used to scale the prior distribution. This can be omitted 40 | if \code{y} is supplied. } 41 | 42 | \item{\dots}{Extra arguments passed to \code{\link{SpikeSlabArPrior}}.} 43 | 44 | } 45 | 46 | \value{ Returns \code{state.specification} with an AR(p) state component 47 | added to the end.} 48 | 49 | \details{ 50 | The model contributes alpha[t] to the expected value of y[t], where 51 | the transition equation is 52 | 53 | \deqn{\alpha_{t} = \phi_1\alpha_{i, t-1} + \cdots + \phi_p 54 | \alpha_{t-p} + \epsilon_{t-1} \qquad 55 | \epsilon_t \sim \mathcal{N}(0, \sigma^2)}{% 56 | alpha[t] = phi[1] * alpha[t-1] + ... + phi[p] * alpha[t-p] + 57 | epsilon[t-1], with epsilon[t-1] ~ N(0, sigma^2) 58 | } 59 | 60 | The state consists of the last \code{p} lags of \code{alpha}. The 61 | state transition matrix has \code{phi} in its first row, ones along 62 | its first subdiagonal, and zeros elsewhere. The state variance matrix 63 | has \code{sigma^2} in its upper left corner and is zero elsewhere. 64 | The observation matrix has 1 in its first element and is zero 65 | otherwise. 66 | 67 | This model differs from the one in \code{\link{AddAr}} only in that 68 | some of its coefficients may be set to zero. 69 | } 70 | 71 | \references{ 72 | Harvey (1990), "Forecasting, structural time series, and the Kalman 73 | filter", Cambridge University Press. 74 | 75 | Durbin and Koopman (2001), "Time series analysis by state space 76 | methods", Oxford University Press. 77 | } 78 | 79 | \author{ 80 | Steven L. Scott \email{steve.the.bayesian@gmail.com} 81 | } 82 | 83 | \examples{ 84 | n <- 100 85 | residual.sd <- .001 86 | 87 | # Actual values of the AR coefficients 88 | true.phi <- c(-.7, .3, .15) 89 | ar <- arima.sim(model = list(ar = true.phi), 90 | n = n, 91 | sd = 3) 92 | 93 | ## Layer some noise on top of the AR process. 94 | y <- ar + rnorm(n, 0, residual.sd) 95 | ss <- AddAutoAr(list(), y, lags = 6) 96 | 97 | # Fit the model with knowledge with residual.sd essentially fixed at the 98 | # true value. 99 | model <- bsts(y, state.specification=ss, niter = 500, prior = SdPrior(residual.sd, 100000)) 100 | 101 | # Now compare the empirical ACF to the true ACF. 102 | acf(y, lag.max = 30) 103 | points(0:30, ARMAacf(ar = true.phi, lag.max = 30), pch = "+") 104 | points(0:30, ARMAacf(ar = colMeans(model$AR6.coefficients), lag.max = 30)) 105 | legend("topright", leg = c("empirical", "truth", "MCMC"), pch = c(NA, "+", "o")) 106 | } 107 | 108 | \seealso{ 109 | \code{\link{bsts}}. 110 | \code{\link[Boom]{SdPrior}} 111 | } 112 | 113 | \keyword{models} 114 | -------------------------------------------------------------------------------- /man/bsts-package.Rd: -------------------------------------------------------------------------------- 1 | \name{bsts-package} 2 | \alias{bsts-package} 3 | \docType{package} 4 | \title{ 5 | bsts 6 | } 7 | 8 | \description{ 9 | Time series regression using dynamic linear models fit using 10 | MCMC. See Scott and Varian (2014) , among many 11 | other sources. 12 | } 13 | 14 | \details{ 15 | \subsection{Installation note for Linux users}{ If you are installing bsts 16 | using \code{\link{install.packages}} on a Linux machine (and thus 17 | compiling yourself) you will almost certainly want to set the 18 | \code{Ncpus} argument to a large number. Windows and Mac users can 19 | ignore this advice. } 20 | 21 | } 22 | 23 | \author{ 24 | Author: Steven L. Scott 25 | Maintainer: Steven L. Scott 26 | } 27 | 28 | \references{ 29 | Please see the references in the help page for the \code{\link{bsts}} function. 30 | } 31 | 32 | \keyword{ package } 33 | 34 | \seealso{ 35 | See the examples in the \code{\link{bsts}} function. 36 | } 37 | -------------------------------------------------------------------------------- /man/bsts.options.Rd: -------------------------------------------------------------------------------- 1 | \name{bsts.options.Rd} 2 | 3 | \alias{BstsOptions} 4 | 5 | \title{Bsts Model Options} 6 | \Rdversion{1.0} 7 | 8 | \description{ 9 | Rarely used modeling options for bsts models. 10 | } 11 | 12 | \usage{ 13 | BstsOptions(save.state.contributions = TRUE, 14 | save.prediction.errors = TRUE, 15 | bma.method = c("SSVS", "ODA"), 16 | oda.options = list( 17 | fallback.probability = 0.0, 18 | eigenvalue.fudge.factor = 0.01), 19 | timeout.seconds = Inf, 20 | save.full.state = FALSE) 21 | 22 | } 23 | 24 | \arguments{ 25 | 26 | \item{save.state.contributions}{Logical. If \code{TRUE} then a 3-way 27 | array named \code{state.contributions} will be stored in the 28 | returned object. The indices correspond to MCMC iteration, state 29 | model number, and time. Setting \code{save.state.contributions} to 30 | \code{FALSE} yields a smaller object, but \code{plot} will not be 31 | able to plot the the "state", "components", or "residuals" for the 32 | fitted model.} 33 | 34 | \item{save.prediction.errors}{Logical. If \code{TRUE} then a matrix 35 | named \code{one.step.prediction.errors} will be saved as part of the 36 | model object. The rows of the matrix represent MCMC iterations, and 37 | the columns represent time. The matrix entries are the 38 | one-step-ahead prediction errors from the Kalman filter. } 39 | 40 | \item{bma.method}{If the model contains a regression component, this 41 | argument specifies the method to use for Bayesian model averaging. 42 | "SSVS" is stochastic search variable selection, which is the classic 43 | approach from George and McCulloch (1997). "ODA" is orthoganal data 44 | augmentation, from Ghosh and Clyde (2011). It adds a set of latent 45 | observations that make the \eqn{X^TX}{X'X} matrix diagonal, vastly 46 | simplifying complete data MCMC for model selection.} 47 | 48 | \item{oda.options}{If bma.method == "ODA" then these are some options 49 | for fine tuning the ODA algorithm. 50 | \itemize{ 51 | 52 | \item \code{fallback.probability}: Each MCMC iteration will use 53 | SSVS instead of ODA with this probability. In cases where 54 | the latent data have high leverage, ODA mixing can suffer. 55 | Mixing in a few SSVS steps can help keep an errant algorithm 56 | on track. 57 | 58 | \item \code{eigenvalue.fudge.factor}: The latent X's will be 59 | chosen so that the complete data \eqn{X^TX}{X'X} matrix (after 60 | scaling) is a constant diagonal matrix equal to the largest 61 | eigenvalue of the observed (scaled) \eqn{X^TX}{X'X} times (1 + 62 | eigenvalue.fudge.factor). This should be a small positive number. 63 | } } 64 | 65 | \item{timeout.seconds}{The number of seconds that sampler will be 66 | allowed to run. If the timeout is exceeded the returned object will 67 | be truncated to the final draw that took place before the timeout 68 | occurred, as if that had been the requested number of iterations.} 69 | 70 | \item{save.full.state}{Logical. If \code{TRUE} then the full 71 | distribution of the state vector will be preserved. It will be 72 | stored in the model under the name \code{full.state}, which is a 73 | 3-way array with dimenions corresponding to MCMC iteration, state 74 | dimension, and time.} 75 | 76 | } 77 | 78 | \value{ 79 | The arguments are checked to make sure they have legal types and 80 | values, then a list is returned containing the arguments. 81 | } 82 | 83 | \author{ 84 | Steven L. Scott \email{steve.the.bayesian@gmail.com} 85 | } 86 | 87 | \keyword{chron} 88 | -------------------------------------------------------------------------------- /man/compare.bsts.models.Rd: -------------------------------------------------------------------------------- 1 | % Copyright 2018 Google Inc. All Rights Reserved. 2 | % 3 | % This library is free software; you can redistribute it and/or 4 | % modify it under the terms of the GNU Lesser General Public 5 | % License as published by the Free Software Foundation; either 6 | % version 2.1 of the License, or (at your option) any later version. 7 | % 8 | % This library is distributed in the hope that it will be useful, 9 | % but WITHOUT ANY WARRANTY; without even the implied warranty of 10 | % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 11 | % Lesser General Public License for more details. 12 | % 13 | % You should have received a copy of the GNU Lesser General Public 14 | % License along with this library; if not, write to the Free Software 15 | % Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA 16 | 17 | \name{compare.bsts.models} 18 | 19 | \alias{CompareBstsModels} 20 | 21 | \Rdversion{1.0} 22 | 23 | \title{ 24 | Compare bsts models 25 | } 26 | 27 | \description{ Produce a set of line plots showing the cumulative 28 | absolute one step ahead prediction errors for different models. This 29 | plot not only shows which model is doing the best job predicting the 30 | data, it highlights regions of the data where the predictions are 31 | particularly good or bad. 32 | } 33 | 34 | \usage{ 35 | CompareBstsModels(model.list, 36 | burn = SuggestBurn(.1, model.list[[1]]), 37 | filename = "", 38 | colors = NULL, 39 | lwd = 2, 40 | xlab = "Time", 41 | main = "", 42 | grid = TRUE, 43 | cutpoint = NULL) 44 | } 45 | 46 | \arguments{ 47 | \item{model.list}{A list of \code{\link{bsts}} models.} 48 | 49 | \item{burn}{The number of initial MCMC iterations to remove from each 50 | model as burn-in.} 51 | 52 | \item{filename}{ A string. If non-empty string then a pdf of the plot 53 | will be saved in the specified file.} 54 | 55 | \item{colors}{A vector of colors to use for the different lines in the 56 | plot. If \code{NULL} then the \code{\link{rainbow}} pallette will 57 | be used. } 58 | 59 | \item{lwd}{The width of the lines to be drawn.} 60 | 61 | \item{xlab}{Label for the horizontal axis.} 62 | 63 | \item{main}{Main title for the plot.} 64 | 65 | \item{grid}{Logical. Should gridlines be drawn in the background?} 66 | 67 | \item{cutpoint}{Either \code{NULL}, or an integer giving the 68 | observation number used to define a holdout sample. Prediction 69 | errors occurring after the cutpoint will be true out of sample 70 | errors. If \code{NULL} then all prediction errors are "in sample". 71 | See the discussion in \code{\link{bsts.prediction.errors}}.} 72 | } 73 | 74 | \value{ 75 | Invisibly returns the matrix of cumulative one-step ahead prediction 76 | errors (the lines in the top panel of the plot). Each row in the 77 | matrix corresponds to a model in model.list. 78 | } 79 | 80 | \author{ 81 | Steven L. Scott \email{steve.the.bayesian@gmail.com} 82 | } 83 | 84 | \examples{ 85 | data(AirPassengers) 86 | y <- log(AirPassengers) 87 | ss <- AddLocalLinearTrend(list(), y) 88 | trend.only <- bsts(y, ss, niter = 250) 89 | 90 | ss <- AddSeasonal(ss, y, nseasons = 12) 91 | trend.and.seasonal <- bsts(y, ss, niter = 250) 92 | 93 | CompareBstsModels(list(trend = trend.only, 94 | "trend and seasonal" = trend.and.seasonal)) 95 | 96 | CompareBstsModels(list(trend = trend.only, 97 | "trend and seasonal" = trend.and.seasonal), 98 | cutpoint = 100) 99 | 100 | } 101 | 102 | \keyword{chron} 103 | -------------------------------------------------------------------------------- /man/date.range.Rd: -------------------------------------------------------------------------------- 1 | \name{date.range} 2 | 3 | \alias{DateRange} 4 | 5 | \Rdversion{1.0} 6 | 7 | \title{ 8 | Date Range 9 | } 10 | 11 | \description{ 12 | Returns the first and last dates of the influence window for the given 13 | holiday, among the given timestamps. 14 | } 15 | 16 | \usage{ 17 | DateRange(holiday, timestamps) 18 | } 19 | 20 | \arguments{ 21 | 22 | \item{holiday}{An object of class \code{\link{Holiday}}.} 23 | 24 | \item{timestamps}{A vector of timestamps of class \code{\link{Date}} 25 | or class \code{\link{POSIXt}}. This function assumes daily 26 | data. Use with care in other settings.} 27 | 28 | } 29 | 30 | \value{ Returns a two-column data frame giving the first and last dates 31 | of the influence window for the holiday in the period covered by 32 | \code{timestamps}. 33 | } 34 | 35 | \author{ 36 | Steven L. Scott \email{steve.the.bayesian@gmail.com} 37 | } 38 | 39 | \examples{ 40 | 41 | holiday <- NamedHoliday("MemorialDay", days.before = 2, days.after = 2) 42 | timestamps <- seq.Date(from = as.Date("2001-01-01"), by = "day", 43 | length.out = 365 * 10) 44 | influence <- DateRange(holiday, timestamps) 45 | 46 | } 47 | 48 | \keyword{chron} 49 | -------------------------------------------------------------------------------- /man/descriptive-plots.Rd: -------------------------------------------------------------------------------- 1 | \name{descriptive-plots} 2 | \title{Descriptive Plots} 3 | 4 | \alias{YearPlot} 5 | \alias{MonthPlot} 6 | \alias{DayPlot} 7 | 8 | \description{Plots for describing time series data.} 9 | 10 | \usage{ 11 | DayPlot(y, colors = NULL, ylab = NULL, ...) 12 | MonthPlot(y, seasonal.identifier = months, colors = NULL, ylab = NULL, ...) 13 | YearPlot(y, colors = NULL, ylab = NULL, ylim = NULL, legend = TRUE, ...) 14 | } 15 | 16 | \arguments{ 17 | 18 | \item{y}{A time series to plot. Must be of class \code{\link{ts}}, or 19 | \code{\link{zoo}}. If a zoo object then the timestamps must be of 20 | type \code{\link{Date}}, \code{\link{yearmon}}, or \code{\link{POSIXt}}. 21 | } 22 | 23 | \item{seasonal.identifier}{ 24 | A function that takes a vector of class \code{\link{POSIXt}} 25 | (date/time) and returns a character vector indicating the season to 26 | which each element belongs. Each unique element returned by this 27 | function returns a "season" to be plotted. See 28 | \code{\link{weekdays}}, \code{\link{months}}, 29 | and \code{\link{quarters}} for examples of how this should work. } 30 | 31 | \item{colors}{A vector of colors to use for the lines.} 32 | 33 | \item{legend}{Logical. If \code{TRUE} then a legend is added to the plot.} 34 | 35 | \item{ylab}{Label for the vertical axis.} 36 | 37 | \item{ylim}{Limits for the vertical axis. (a 2-vector)} 38 | 39 | \item{\dots}{Extra arguments passed to \code{\link{plot}} or \code{\link{lines}}.} 40 | 41 | } 42 | 43 | \details{ 44 | 45 | \code{DayPlot} and \code{MonthPlot} plot the time series one season at 46 | a time, on the same set of axes. The intent is to use DayPlot for 47 | daily data and MonthPlot for monthly or quarterly data. 48 | 49 | \code{YearPlot} plots each year of the time series as a separate line 50 | on the same set of axes. 51 | 52 | Both sets of plots help visualize seasonal patterns. 53 | 54 | } 55 | 56 | \value{ 57 | Returns \code{invisible{NULL}}. 58 | } 59 | 60 | \examples{ 61 | ## Plot a 'ts' time series. 62 | data(AirPassengers) 63 | par(mfrow = c(1,2)) 64 | MonthPlot(AirPassengers) 65 | YearPlot(AirPassengers) 66 | 67 | ## Plot a 'zoo' time series. 68 | data(turkish) 69 | par(mfrow = c(1,2)) 70 | YearPlot(turkish) 71 | DayPlot(turkish) 72 | 73 | } 74 | 75 | \seealso{ \code{\link{monthplot}} is a base R function for plotting time 76 | series of type \code{\link{ts}}. } 77 | -------------------------------------------------------------------------------- /man/diagnostic-plots.Rd: -------------------------------------------------------------------------------- 1 | \name{diagnostic-plots} 2 | \title{Diagnostic Plots} 3 | 4 | \alias{qqdist} 5 | \alias{AcfDist} 6 | 7 | \description{ 8 | Diagnostic plots for distributions of residuals. 9 | } 10 | 11 | \usage{ 12 | qqdist(draws, \dots) 13 | AcfDist(draws, lag.max = NULL, xlab = "Lag", ylab = "Autocorrelation", \dots) 14 | } 15 | 16 | \arguments{ 17 | 18 | \item{draws}{A matrix of Monte Carlo draws of residual errors. Each 19 | row is a Monte Carlo draw, and each column is an observation. In the 20 | case of AcfDist successive observations are assumed to be sequential 21 | in time.} 22 | 23 | \item{lag.max}{The number of lags to plot in the autocorrelation 24 | function. See \code{\link{acf}}.} 25 | 26 | \item{xlab}{Label for the horizontal axis.} 27 | \item{ylab}{Label for the vertical axis.} 28 | 29 | \item{\dots}{Extra arguments passed to either \code{\link{boxplot}} 30 | (for \code{AcfDist}) or \code{\link[Boom]{PlotDynamicDistribution}} (for 31 | \code{qqdist}).} 32 | 33 | } 34 | 35 | \details{ 36 | 37 | \code{qqdist} sorts the columns of \code{draws} by their mean, and 38 | plots the resulting set of curves against the quantiles of the 39 | standard normal distribution. A reference line is added, and the mean 40 | of each column of draws is represented by a blue dot. The dots and 41 | the line are the transpose of what you get with \code{\link{qqnorm}} 42 | and \code{\link{qqline}}. 43 | 44 | \code{AcfDist} plots the posterior distribution of the autocorrelation 45 | function using a set of side-by-side boxplots. 46 | 47 | } 48 | 49 | 50 | \examples{ 51 | 52 | data(AirPassengers) 53 | y <- log(AirPassengers) 54 | 55 | ss <- AddLocalLinearTrend(list(), y) 56 | ss <- AddSeasonal(ss, y, nseasons = 12) 57 | model <- bsts(y, ss, niter = 500) 58 | 59 | r <- residuals(model) 60 | par(mfrow = c(1,2)) 61 | qqdist(r) ## A bit of departure in the upper tail 62 | AcfDist(r) 63 | 64 | } 65 | -------------------------------------------------------------------------------- /man/dirm-model-options.Rd: -------------------------------------------------------------------------------- 1 | % Copyright 2018 Steven L. Scott. All Rights Reserved. 2 | % 3 | % This library is free software; you can redistribute it and/or 4 | % modify it under the terms of the GNU Lesser General Public 5 | % License as published by the Free Software Foundation; either 6 | % version 2.1 of the License, or (at your option) any later version. 7 | % 8 | % This library is distributed in the hope that it will be useful, 9 | % but WITHOUT ANY WARRANTY; without even the implied warranty of 10 | % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 11 | % Lesser General Public License for more details. 12 | % 13 | % You should have received a copy of the GNU Lesser General Public 14 | % License along with this library; if not, write to the Free Software 15 | % Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA 16 | 17 | \alias{DirmModelOptions} 18 | \name{dirm-model-optoins} 19 | \title{Specify Options for a Dynamic Intercept Regression Model} 20 | \Rdversion{1.1} 21 | \description{ 22 | Specify modeling options for a dynamic intercept regression model. 23 | } 24 | 25 | \usage{ 26 | DirmModelOptions(timeout.seconds = Inf, 27 | high.dimensional.threshold.factor = 1.0) 28 | } 29 | 30 | \arguments{ 31 | 32 | \item{timeout.seconds}{The overall time budget for model fitting. If 33 | the MCMC algorithm takes longer than this number, the current 34 | iteration will complete, and then the fitting algorithm will return 35 | with however many MCMC iterations were managed during the allotted 36 | time. } 37 | 38 | \item{high.dimensional.threshold.factor}{ When doing Kalman filter 39 | updates for the model, Sherman-Morrisson-Woodbury style updates are 40 | applied for high dimensional data, while direct linear algebra is 41 | used for low dimensional data. The definition of "high dimensional" 42 | is relative to the dimension of the state. An observation is 43 | considered high dimensional if its dimension exceeds the state 44 | dimension times this factor. 45 | } 46 | 47 | } 48 | 49 | \value{ 50 | An object of class \code{DirmModelOptions}, which is simply a list 51 | containing values of the function arguments. 52 | 53 | The value of using this function instead of making a list "by hand" is 54 | that argument types are properly checked, and list names are sure to 55 | be correct. 56 | } 57 | 58 | \keyword{models} 59 | \keyword{regression} 60 | -------------------------------------------------------------------------------- /man/estimate.time.scale.Rd: -------------------------------------------------------------------------------- 1 | % Copyright 2011 Google Inc. All Rights Reserved. 2 | % Author: steve.the.bayesian@gmail.com (Steve Scott) 3 | 4 | \name{estimate.time.scale} 5 | \title{Intervals between dates} 6 | \alias{EstimateTimeScale} 7 | 8 | \description{Estimate the time scale used in time series data.} 9 | 10 | \usage{EstimateTimeScale(dates)} 11 | 12 | \arguments{ 13 | \item{dates}{A sorted vector of class \code{\link{Date}}.} 14 | } 15 | 16 | \value{A character string. Either "daily", "weekly", "yearly", 17 | "monthly", "quarterly", or "other". The value is determined based on 18 | counting the number of days between successive observations in \code{dates}.} 19 | 20 | \author{Steven L. Scott \email{steve.the.bayesian@gmail.com}} 21 | 22 | \examples{ 23 | weekly.data <- as.Date(c("2011-10-01", 24 | "2011-10-08", 25 | "2011-10-15", 26 | "2011-10-22", 27 | "2011-10-29", 28 | "2011-11-05")) 29 | 30 | EstimateTimeScale(weekly.data) # "weekly" 31 | 32 | almost.weekly.data <- as.Date(c("2011-10-01", 33 | "2011-10-08", 34 | "2011-10-15", 35 | "2011-10-22", 36 | "2011-10-29", 37 | "2011-11-06")) # last day is one later 38 | 39 | EstimateTimeScale(weekly.data) # "other" 40 | } 41 | 42 | \keyword{chron} 43 | 44 | 45 | -------------------------------------------------------------------------------- /man/extend.time.Rd: -------------------------------------------------------------------------------- 1 | % Copyright 2011 Google Inc. All Rights Reserved. 2 | % Author: steve.the.bayesian@gmail.com (Steve Scott) 3 | 4 | \name{extend.time} 5 | 6 | \alias{ExtendTime} 7 | 8 | \Rdversion{1.0} 9 | 10 | \title{ 11 | Extends a vector of dates to a given length 12 | } 13 | 14 | \description{ 15 | Pads a vector of dates to a specified length. 16 | } 17 | 18 | \usage{ 19 | ExtendTime(dates, number.of.periods, dt = NULL) 20 | } 21 | 22 | \arguments{ 23 | 24 | \item{dates}{An ordered vector of class \code{\link{Date}}.} 25 | 26 | \item{number.of.periods}{The desired length of the output.} 27 | 28 | \item{dt}{A character string describing the frequency of the dates in 29 | \code{dates}. Possible values are "daily", "weekly", "monthly", 30 | "quarterly", "yearly", or "other". An attempt to deduce \code{dt} 31 | will be made if it is missing.} 32 | 33 | } 34 | 35 | \value{ 36 | If \code{number.of.periods} is longer than \code{length(dates)}, then 37 | \code{dates} will be padded to the desired length. Extra dates are 38 | added at time intervals matching the average interval in 39 | \code{dates}. Thus they may not be 40 | } 41 | 42 | \author{ 43 | Steven L. Scott \email{steve.the.bayesian@gmail.com} 44 | } 45 | 46 | \seealso{ 47 | \code{\link{bsts.mixed}}. 48 | } 49 | 50 | \examples{ 51 | origin.month <- as.Date("2011-09-01") 52 | week.ending <- as.Date(c("2011-10-01", ## 1 53 | "2011-10-08", ## 2 54 | "2011-12-03", ## 3 55 | "2011-12-31")) ## 4 56 | MatchWeekToMonth(week.ending, origin.month) == 1:4 57 | } 58 | 59 | \keyword{chron} 60 | -------------------------------------------------------------------------------- /man/format.timestamps.Rd: -------------------------------------------------------------------------------- 1 | \name{format.timestamps} 2 | 3 | \alias{NoDuplicates} 4 | \alias{NoGaps} 5 | \alias{IsRegular} 6 | \alias{HasDuplicateTimestamps} 7 | 8 | \title{Checking for Regularity} 9 | \Rdversion{1.0} 10 | 11 | \description{ 12 | 13 | Tools for checking if a series of timestamps is 'regular' meaning that 14 | it has no duplicates, and no gaps. Checking for regularity can be 15 | tricky. For example, if you have monthly observations with 16 | \code{\link{Date}} or \code{\link{POSIXt}} timestamps then gaps 17 | between timestamps can be 28, 29, 30, or 31 days, but the series is 18 | still "regular". 19 | 20 | } 21 | 22 | \usage{ 23 | NoDuplicates(timestamps) 24 | NoGaps(timestamps) 25 | IsRegular(timestamps) 26 | 27 | HasDuplicateTimestamps(bsts.object) 28 | } 29 | 30 | \arguments{ 31 | 32 | \item{timestamps}{A set of (possibly irregular or non-unique) 33 | timestamps. This could be a set of integers (like 1, 2, , 3...), a 34 | set of numeric like (1945, 1945.083, 1945.167, ...) indicating years 35 | and fractions of years, a \code{\link{Date}} object, or a 36 | \code{\link{POSIXt}} object.} 37 | 38 | \item{bsts.object}{A bsts model object.} 39 | } 40 | 41 | \value{ 42 | All four functions return scalar logical values. \code{NoDuplicates} 43 | returns \code{TRUE} if all elements of \code{timestamps} are unique. 44 | 45 | \code{NoGaps} examines the smallest nonzero gap between time points. 46 | As long as no gaps between time points are more than twice as wide as 47 | the smallest gap, it returns \code{TRUE}, indicating that there are no 48 | missing timestamps. Otherwise it returns \code{FALSE}. 49 | 50 | \code{IsRegular} returns \code{TRUE} if \code{NoDuplicates} and 51 | \code{NoGaps} both return \code{TRUE}. 52 | 53 | \code{HasDuplicateTimestamps} returns \code{FALSE} if the data used to 54 | fit bsts.model either has NULL timestamps, or if the timestamps 55 | contain no duplicate values. 56 | } 57 | 58 | \author{ 59 | Steven L. Scott \email{steve.the.bayesian@gmail.com} 60 | } 61 | 62 | \examples{ 63 | first <- as.POSIXct("2015-04-19 08:00:04") 64 | monthly <- seq(from = first, length.out = 24, by = "month") 65 | IsRegular(monthly) ## TRUE 66 | 67 | skip.one <- monthly[-8] 68 | IsRegular(skip.one) ## FALSE 69 | 70 | has.duplicates <- monthly 71 | has.duplicates[1] <- has.duplicates[2] 72 | IsRegular(has.duplicates) ## FALSE 73 | } 74 | \keyword{chron} 75 | -------------------------------------------------------------------------------- /man/gdp.Rd: -------------------------------------------------------------------------------- 1 | \name{gdp} 2 | \docType{data} 3 | \alias{gdp} 4 | \title{Gross Domestic Product for 57 Countries} 5 | \description{Annual gross domestic product for 57 countries, as produced 6 | by the OECD. 7 | 8 | Fields: 9 | \itemize{ 10 | \item{LOCATION: Three letter country code.} 11 | \item{MEASURE: MLN_USD signifies a total GDP number in millions of 12 | US dollars. USD_CAP is per capita GDP in US dollars.} 13 | \item{TIME: The year of the measurement.} 14 | \item{Value: The measured value.} 15 | \item{Flag.Codes: P for provisional data, B for a break in the 16 | series, and E for an estimated value.} 17 | } 18 | } 19 | \usage{ 20 | data(gdp) 21 | } 22 | \format{data frame} 23 | \source{OECD website: See 24 | https://data.oecd.org/gdp/gross-domestic-product-gdp.htm} 25 | 26 | \keyword{datasets} 27 | -------------------------------------------------------------------------------- /man/geometric.sequence.Rd: -------------------------------------------------------------------------------- 1 | \name{geometric.sequence} 2 | 3 | \alias{GeometricSequence} 4 | 5 | \Rdversion{1.0} 6 | 7 | \title{ 8 | Create a Geometric Sequence 9 | } 10 | 11 | \description{ Create a geometric sequence.} 12 | 13 | \usage{ 14 | GeometricSequence(length, initial.value = 1, discount.factor = .5) 15 | } 16 | 17 | \arguments{ 18 | 19 | \item{length}{A positive integer giving the length of the desired sequence.} 20 | 21 | \item{initial.value}{The first term in the sequence. Cannot be zero.} 22 | 23 | \item{discount.factor}{The ratio between a sequence term and the 24 | preceding term. Cannot be zero.} 25 | 26 | } 27 | 28 | \value{ 29 | A numeric vector containing the desired sequence. 30 | } 31 | 32 | \author{ 33 | Steven L. Scott \email{steve.the.bayesian@gmail.com} 34 | } 35 | 36 | \examples{ 37 | GeometricSequence(4, .8, .6) 38 | # [1] 0.8000 0.4800 0.2880 0.1728 39 | 40 | GeometricSequence(5, 2, 3) 41 | # [1] 2 6 18 54 162 42 | 43 | \dontrun{ 44 | GeometricSequence(0, -1, -2) 45 | # Error: length > 0 is not TRUE 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /man/get.fraction.Rd: -------------------------------------------------------------------------------- 1 | % Copyright 2011 Google Inc. All Rights Reserved. 2 | % Author: steve.the.bayesian@gmail.com (Steve Scott) 3 | 4 | \name{get.fraction} 5 | 6 | \alias{GetFractionOfDaysInInitialMonth} 7 | \alias{GetFractionOfDaysInInitialQuarter} 8 | 9 | \Rdversion{1.0} 10 | 11 | \title{ 12 | Compute membership fractions 13 | } 14 | 15 | \description{ 16 | Returns the fraction of days in a week that occur in the ear 17 | } 18 | 19 | \usage{ 20 | GetFractionOfDaysInInitialMonth(week.ending) 21 | GetFractionOfDaysInInitialQuarter(week.ending) 22 | } 23 | 24 | \arguments{ 25 | \item{week.ending}{A vector of class \code{\link{Date}}. Each entry contains the 26 | date of the last day in a week.} 27 | } 28 | 29 | \value{ 30 | Returns a numeric vector of the same length as \code{week.ending}. 31 | Each entry gives the fraction of days in the week that occur in the 32 | coarse time interval (month or quarter) containing the start of the 33 | week (i.e the date 6 days before). 34 | } 35 | 36 | \author{ 37 | Steven L. Scott \email{steve.the.bayesian@gmail.com} 38 | } 39 | 40 | \seealso{ 41 | \code{\link{bsts.mixed}}. 42 | } 43 | 44 | \examples{ 45 | dates <- as.Date(c("2003-03-31", 46 | "2003-04-01", 47 | "2003-04-02", 48 | "2003-04-03", 49 | "2003-04-04", 50 | "2003-04-05", 51 | "2003-04-06", 52 | "2003-04-07")) 53 | fraction <- GetFractionOfDaysInInitialMonth(dates) 54 | fraction == c(1, 6/7, 5/7, 4/7, 3/7, 2/7, 1/7, 1) 55 | } 56 | 57 | \keyword{chron} 58 | -------------------------------------------------------------------------------- /man/goog.Rd: -------------------------------------------------------------------------------- 1 | \name{goog} 2 | \docType{data} 3 | \alias{goog} 4 | \alias{GOOG} 5 | \title{Google stock price} 6 | \description{Daily closing price of Google stock.} 7 | \usage{data(goog)} 8 | \format{xts time series} 9 | \source{The Internets} 10 | \keyword{datasets} 11 | -------------------------------------------------------------------------------- /man/iclaims.Rd: -------------------------------------------------------------------------------- 1 | \name{iclaims} 2 | \docType{data} 3 | \alias{initial.claims} 4 | \alias{iclaims} 5 | \title{Initial Claims Data} 6 | 7 | \description{A weekly time series of US initial claims for unemployment. 8 | The first column contains the initial claims numbers from FRED. The 9 | others contain a measure of the relative popularity of various search 10 | queries identified by Google Correlate.} 11 | 12 | \usage{ 13 | data(iclaims) 14 | } 15 | \format{zoo time series} 16 | \source{ 17 | FRED. http://research.stlouisfed.org/fred2/series/ICNSA,\cr 18 | Google correlate. http://www.google.com/trends/correlate} 19 | \examples{ 20 | data(iclaims) 21 | plot(initial.claims) 22 | } 23 | 24 | \seealso{ 25 | \code{\link{bsts}} 26 | } 27 | 28 | \keyword{datasets} 29 | -------------------------------------------------------------------------------- /man/last.day.in.month.Rd: -------------------------------------------------------------------------------- 1 | % Copyright 2011 Google Inc. All Rights Reserved. 2 | % Author: steve.the.bayesian@gmail.com (Steve Scott) 3 | 4 | 5 | \name{last.day.in.month} 6 | 7 | \alias{LastDayInMonth} 8 | 9 | \title{Find the last day in a month} 10 | \Rdversion{1.0} 11 | \description{ 12 | Finds the last day in the month containing a specefied date. 13 | } 14 | 15 | \usage{ 16 | LastDayInMonth(dates) 17 | } 18 | 19 | \arguments{ 20 | 21 | \item{dates}{A vector of class \code{\link{Date}}.} 22 | 23 | } 24 | 25 | \value{ 26 | A vector of class \code{\link{Date}} where each entry is the last day 27 | in the month containing the corresponding entry in \code{dates}. 28 | } 29 | 30 | \author{ 31 | Steven L. Scott \email{steve.the.bayesian@gmail.com} 32 | } 33 | 34 | 35 | \examples{ 36 | inputs <- as.Date(c("2007-01-01", 37 | "2007-01-31", 38 | "2008-02-01", 39 | "2008-02-29", 40 | "2008-03-14", 41 | "2008-12-01", 42 | "2008-12-31")) 43 | expected.outputs <- as.Date(c("2007-01-31", 44 | "2007-01-31", 45 | "2008-02-29", 46 | "2008-02-29", 47 | "2008-03-31", 48 | "2008-12-31", 49 | "2008-12-31")) 50 | LastDayInMonth(inputs) == expected.outputs 51 | } 52 | 53 | \keyword{chron} 54 | -------------------------------------------------------------------------------- /man/match.week.to.month.Rd: -------------------------------------------------------------------------------- 1 | % Copyright 2011 Google Inc. All Rights Reserved. 2 | % Author: steve.the.bayesian@gmail.com (Steve Scott) 3 | 4 | \name{match.week.to.month} 5 | 6 | \alias{MatchWeekToMonth} 7 | 8 | \Rdversion{1.0} 9 | 10 | \title{ 11 | Find the month containing a week 12 | } 13 | 14 | \description{ 15 | Returns the index of a month, in a sequence of months, that contains a 16 | given week. 17 | } 18 | 19 | \usage{ 20 | MatchWeekToMonth(week.ending, origin.month) 21 | } 22 | 23 | \arguments{ 24 | 25 | \item{week.ending}{A vector of class \code{\link{Date}}. Each entry contains the 26 | date of the last day in a week.} 27 | 28 | \item{origin.month}{A \code{\link{Date}}, giving any day in the month 29 | to use as the origin of the sequence (month 1).} 30 | 31 | } 32 | 33 | \value{ The index of the month matching the month containing the first 34 | day in \code{week.ending}. The origin is month 1. It is the caller's 35 | responsibility to ensure that these indices correspond to legal values 36 | in a particular vector of months. } 37 | 38 | \author{ 39 | Steven L. Scott \email{steve.the.bayesian@gmail.com} 40 | } 41 | 42 | \seealso{ 43 | \code{\link{bsts.mixed}}. 44 | } 45 | 46 | \examples{ 47 | origin.month <- as.Date("2011-09-01") 48 | week.ending <- as.Date(c("2011-10-01", ## 1 49 | "2011-10-08", ## 2 50 | "2011-12-03", ## 3 51 | "2011-12-31")) ## 4 52 | MatchWeekToMonth(week.ending, origin.month) == 1:4 53 | } 54 | 55 | \keyword{chron} 56 | -------------------------------------------------------------------------------- /man/max.window.width.Rd: -------------------------------------------------------------------------------- 1 | \name{max.window.width} 2 | \alias{MaxWindowWidth} 3 | \alias{MaxWindowWidth.default} 4 | \alias{MaxWindowWidth.DateRangeHoliday} 5 | \title{Maximum Window Width for a Holiday} 6 | \Rdversion{1.1} 7 | 8 | \description{The maximum width of a holiday's influence window} 9 | 10 | \usage{ 11 | \method{MaxWindowWidth}{default}(holiday, ...) 12 | \method{MaxWindowWidth}{DateRangeHoliday}(holiday, ...) 13 | } 14 | 15 | \arguments{ 16 | 17 | \item{holiday}{An object of class \code{\link{Holiday}}.} 18 | 19 | \item{\dots}{Other arguments (not used).} 20 | } 21 | 22 | 23 | \value{ 24 | Returns the number of days in a holiday's influence window. 25 | } 26 | 27 | 28 | \author{ 29 | Steven L. Scott \email{steve.the.bayesian@gmail.com} 30 | } 31 | 32 | \seealso{ 33 | \code{\link{Holiday}}. 34 | \code{\link{AddRegressionHoliday}}. 35 | \code{\link{AddRandomWalkHoliday}}. 36 | \code{\link{AddHierarchicalRegressionHoliday}}. 37 | } 38 | 39 | \examples{ 40 | 41 | easter <- NamedHoliday("Easter", days.before = 2, days.after = 1) 42 | if (MaxWindowWidth(easter) == 4) { 43 | print("That's the right answer!\n") 44 | } 45 | 46 | ## This holiday lasts two days longer in 2005 than in 2004. 47 | may18 <- DateRangeHoliday("May18", 48 | start = as.Date(c("2004-05-17", 49 | "2005-05-16")), 50 | end = as.Date(c("2004-05-19", 51 | "2005-05-20"))) 52 | 53 | if (MaxWindowWidth(may18) == 5) { 54 | print("Right again!\n") 55 | } 56 | 57 | } 58 | \keyword{models} 59 | \keyword{regression} 60 | -------------------------------------------------------------------------------- /man/month.distance.Rd: -------------------------------------------------------------------------------- 1 | % Copyright 2011 Google Inc. All Rights Reserved. 2 | % Author: steve.the.bayesian@gmail.com (Steve Scott) 3 | 4 | \name{month.distance} 5 | 6 | \alias{MonthDistance} 7 | 8 | \Rdversion{1.0} 9 | 10 | \title{Elapsed time in months} 11 | 12 | \description{The (integer) number of months between dates.} 13 | 14 | \usage{ 15 | MonthDistance(dates, origin) 16 | } 17 | 18 | \arguments{ 19 | 20 | \item{dates}{A vector of class \code{\link{Date}} to be measured.} 21 | 22 | \item{origin}{A scalar of class \code{\link{Date}}.} 23 | 24 | } 25 | 26 | \value{ Returns a numeric vector giving the integer number of months 27 | that have elapsed between \code{origin} and each element in 28 | \code{dates}. The daily component of each date is ignored, so two 29 | dates that are in the same month will have the same measured 30 | distance. Distances are signed, so months that occur before 31 | \code{origin} will have negative values. } 32 | 33 | \author{ 34 | Steven L. Scott \email{steve.the.bayesian@gmail.com} 35 | } 36 | 37 | \examples{ 38 | dates <- as.Date(c("2008-04-17", 39 | "2008-05-01", 40 | "2008-05-31", 41 | "2008-06-01")) 42 | origin <- as.Date("2008-05-15") 43 | MonthDistance(dates, origin) == c(-1, 0, 0, 1) 44 | } 45 | 46 | \keyword{chron} 47 | -------------------------------------------------------------------------------- /man/named.holidays.Rd: -------------------------------------------------------------------------------- 1 | \name{named.holidays} 2 | \docType{data} 3 | \alias{named.holidays} 4 | \title{Holidays Recognized by Name} 5 | \description{A character vector listing the names of pre-specified holidays.} 6 | \value{ 7 | "NewYearsDay" "SuperBowlSunday" 8 | "MartinLutherKingDay" "PresidentsDay" 9 | "ValentinesDay" "SaintPatricksDay" 10 | "USDaylightSavingsTimeBegins" "USDaylightSavingsTimeEnds" 11 | "EasterSunday" "USMothersDay" 12 | "IndependenceDay" "LaborDay" 13 | "ColumbusDay" "Halloween" 14 | "Thanksgiving" "MemorialDay" 15 | "VeteransDay" "Christmas" 16 | } 17 | \usage{ 18 | named.holidays 19 | } 20 | 21 | -------------------------------------------------------------------------------- /man/new.home.sales.Rd: -------------------------------------------------------------------------------- 1 | \name{new.home.sales} 2 | \docType{data} 3 | \alias{new.home.sales} 4 | \title{New home sales and Google trends} 5 | 6 | \description{ The first column, HSN1FNSA is a time series of new home 7 | sales in the US, obtained from the FRED online data base. The series 8 | has been manually deseasonalized. The remaining columns contain 9 | search terms from Google trends (obtained from 10 | http://trends.google.com/correlate). These show the relative 11 | popularity of each search term among all serach terms typed into 12 | Google. All series in this data set have been standardized by 13 | subtracting off their mean and dividing by their standard deviation.} 14 | 15 | \usage{data(new.home.sales)} 16 | \format{zoo time series} 17 | \source{FRED and trends.google.com} 18 | \keyword{datasets} 19 | -------------------------------------------------------------------------------- /man/one.step.prediction.errors.Rd: -------------------------------------------------------------------------------- 1 | % Copyright 2011 Google Inc. All Rights Reserved. 2 | % Author: steve.the.bayesian@gmail.com (Steve Scott) 3 | 4 | \name{one.step.prediction.errors} 5 | \alias{bsts.prediction.errors} 6 | \title{Prediction Errors} 7 | \Rdversion{1.1} 8 | \description{ 9 | Computes the one-step-ahead prediction errors for a \code{\link{bsts}} 10 | model. 11 | } 12 | 13 | \usage{ 14 | bsts.prediction.errors(bsts.object, 15 | cutpoints = NULL, 16 | burn = SuggestBurn(.1, bsts.object), 17 | standardize = FALSE) 18 | } 19 | 20 | \arguments{ 21 | 22 | \item{bsts.object}{ An object of class \code{\link{bsts}}.} 23 | 24 | \item{cutpoints}{An increasing sequence of integers between 1 and the 25 | number of time points in the trainig data for \code{bsts.object}, or 26 | \code{NULL}. If \code{NULL} then the in-sample one-step prediction 27 | errors from the bsts object will be extracted and returned. 28 | Otherwise the model will be re-fit with a separate MCMC run 29 | for each entry in 'cutpoints'. Data up to each cutpoint will be 30 | included in the fit, and one-step prediction errors for data after 31 | the cutpoint will be computed. } 32 | 33 | \item{burn}{An integer giving the number of MCMC iterations to discard 34 | as burn-in. If \code{burn <= 0} then no burn-in sample will be 35 | discarded.} 36 | 37 | \item{standardize}{ Logical. If \code{TRUE} then the prediction 38 | errors are divided by the square root of the one-step-ahead forecast 39 | variance. If \code{FALSE} the raw errors are returned. } 40 | 41 | } 42 | 43 | \value{ 44 | A matrix of draws of the one-step-ahead prediction errors. Rows of 45 | the matrix correspond to MCMC draws. Columns correspond to time. 46 | } 47 | 48 | \details{ 49 | 50 | Returns the posterior distribution of the one-step-ahead prediction errors 51 | from the bsts.object. The errors are computing using the Kalman filter, 52 | and are of two types. 53 | 54 | Purely in-sample errors are computed as a by-product of the Kalman 55 | filter as a result of fitting the model. These are stored in the 56 | bsts.object assuming the \code{save.prediction.errors} option is TRUE, 57 | which is the default (See \code{\link{BstsOptions}}). The in-sample 58 | errors are 'in-sample' in the sense that the parameter values used to 59 | run the Kalman filter are drawn from their posterior distribution given 60 | complete data. Conditional on the parameters in that MCMC iteration, 61 | each 'error' is the difference between the observed y[t] and its 62 | expectation given data to t-1. 63 | 64 | Purely out-of-sample errors can be computed by specifying the 'cutpoints' 65 | argument. If cutpoints are supplied then a separate MCMC is run using just 66 | data up to the cutpoint. The Kalman filter is then run on the remaining 67 | data, again finding the difference between y[t] and its expectation given 68 | data to t-1, but conditional on parameters estimated using data up to the 69 | cutpoint. 70 | } 71 | 72 | \references{ 73 | Harvey (1990), "Forecasting, structural time series, and the Kalman 74 | filter", Cambridge University Press. 75 | 76 | Durbin and Koopman (2001), "Time series analysis by state space 77 | methods", Oxford University Press. 78 | } 79 | 80 | \author{ 81 | Steven L. Scott \email{steve.the.bayesian@gmail.com} 82 | } 83 | 84 | \seealso{ 85 | \code{\link{bsts}}, 86 | \code{\link{AddLocalLevel}}, 87 | \code{\link{AddLocalLinearTrend}}, 88 | \code{\link{AddSemilocalLinearTrend}}, 89 | \code{\link[BoomSpikeSlab]{SpikeSlabPrior}}, 90 | \code{\link[Boom]{SdPrior}}. 91 | } 92 | 93 | \examples{ 94 | data(AirPassengers) 95 | y <- log(AirPassengers) 96 | ss <- AddLocalLinearTrend(list(), y) 97 | ss <- AddSeasonal(ss, y, nseasons = 12) 98 | 99 | \dontrun{ 100 | model <- bsts(y, state.specification = ss, niter = 500) 101 | } 102 | \dontshow{ 103 | model <- bsts(y, state.specification = ss, niter = 200) 104 | } 105 | errors <- bsts.prediction.errors(model, burn = 100) 106 | PlotDynamicDistribution(errors$in.sample) 107 | 108 | ## Compute out of sample prediction errors beyond times 80 and 120. 109 | errors <- bsts.prediction.errors(model, cutpoints = c(80, 120)) 110 | standardized.errors <- bsts.prediction.errors( 111 | model, cutpoints = c(80, 120), standardize = TRUE) 112 | plot(model, "prediction.errors", cutpoints = c(80, 120)) 113 | str(errors) ## three matrices, with 400 ( = 500 - 100) rows 114 | ## and length(y) columns 115 | } 116 | \keyword{models} 117 | \keyword{regression} 118 | -------------------------------------------------------------------------------- /man/plot.bsts.prediction.Rd: -------------------------------------------------------------------------------- 1 | % Copyright 2011 Google Inc. All Rights Reserved. 2 | % Author: steve.the.bayesian@gmail.com (Steve Scott) 3 | 4 | \name{plot.bsts.prediction} 5 | \title{Plot predictions from Bayesian structural time series} 6 | 7 | \alias{plot.bsts.prediction} 8 | 9 | \description{Plot the posterior predictive distribution from a 10 | \code{\link{bsts}} prediction object.} 11 | 12 | \usage{ 13 | \method{plot}{bsts.prediction}(x, 14 | y = NULL, 15 | burn = 0, 16 | plot.original = TRUE, 17 | median.color = "blue", 18 | median.type = 1, 19 | median.width = 3, 20 | interval.quantiles = c(.025, .975), 21 | interval.color = "green", 22 | interval.type = 2, 23 | interval.width = 2, 24 | style = c("dynamic", "boxplot"), 25 | ylim = NULL, 26 | ...) 27 | } 28 | 29 | \arguments{ 30 | 31 | \item{x}{An object of class \code{\link{bsts.prediction}} 32 | created by calling \code{predict} on a \code{\link{bsts}} object.} 33 | 34 | \item{y}{A dummy argument necessary to match the signature of the 35 | \code{\link{plot}} generic function. This argument is unused.} 36 | 37 | \item{plot.original}{Logical or numeric. If \code{TRUE} then the 38 | prediction is plotted after a time series plot of the original 39 | series. If \code{FALSE}, the prediction fills the entire plot. 40 | If numeric, then it specifies the number of trailing observations 41 | of the original time series to plot in addition to the 42 | predictions.} 43 | 44 | \item{burn}{The number of observations you wish to discard as burn-in 45 | from the posterior predictive distribution. This is in addition 46 | to the burn-in discarded using \code{\link{predict.bsts}}.} 47 | 48 | \item{median.color}{The color to use for the posterior median of the 49 | prediction.} 50 | 51 | \item{median.type}{The type of line (lty) to use for the posterior median 52 | of the prediction.} 53 | 54 | \item{median.width}{The width of line (lwd) to use for the posterior median 55 | of the prediction.} 56 | 57 | \item{interval.quantiles}{The lower and upper limits of the credible 58 | interval to be plotted.} 59 | 60 | \item{interval.color}{The color to use for the upper and lower limits 61 | of the 95\% credible interval for the prediction.} 62 | 63 | \item{interval.type}{The type of line (lty) to use for the upper and 64 | lower limits of the 95\% credible inerval for of the 65 | prediction.} 66 | 67 | \item{interval.width}{The width of line (lwd) to use for the upper and 68 | lower limits of the 95\% credible inerval for of the 69 | prediction.} 70 | 71 | \item{style}{Either "dynamic", for dynamic distribution plots, or 72 | "boxplot", for box plots. Partial matching is allowed, so "dyn" or 73 | "box" would work, for example.} 74 | 75 | \item{ylim}{Limits on the vertical axis.} 76 | 77 | \item{...}{Extra arguments to be passed to 78 | \code{\link[Boom]{PlotDynamicDistribution}} 79 | and \code{\link{lines}}.} 80 | } 81 | 82 | \details{ Plots the posterior predictive distribution described by 83 | \code{x} using a dynamic distribution plot generated by 84 | \code{\link[Boom]{PlotDynamicDistribution}}. Overlays the 85 | posterior median and 95\% prediction limits for the predictive 86 | distribution. } 87 | 88 | \value{ 89 | Returns NULL. 90 | } 91 | 92 | \examples{ 93 | data(AirPassengers) 94 | y <- log(AirPassengers) 95 | ss <- AddLocalLinearTrend(list(), y) 96 | ss <- AddSeasonal(ss, y, nseasons = 12) 97 | model <- bsts(y, state.specification = ss, niter = 500) 98 | pred <- predict(model, horizon = 12, burn = 100) 99 | plot(pred) 100 | } 101 | 102 | \seealso{ 103 | \code{\link{bsts}} 104 | \code{\link[Boom]{PlotDynamicDistribution}} 105 | \code{\link[BoomSpikeSlab]{plot.lm.spike}} 106 | } 107 | -------------------------------------------------------------------------------- /man/plot.bsts.predictors.Rd: -------------------------------------------------------------------------------- 1 | % Copyright 2011 Google Inc. All Rights Reserved. 2 | % Author: steve.the.bayesian@gmail.com (Steve Scott) 3 | 4 | \name{plot.bsts.predictors} 5 | \title{Plot the most likely predictors} 6 | 7 | \alias{PlotBstsPredictors} 8 | 9 | \description{Creates a time series plot showing the most likely 10 | predictors of a time series used to fit a \code{\link{bsts}} object.} 11 | 12 | \usage{ 13 | PlotBstsPredictors(bsts.object, 14 | burn = SuggestBurn(.1, bsts.object), 15 | inclusion.threshold = .1, 16 | ylim = NULL, 17 | flip.signs = TRUE, 18 | show.legend = TRUE, 19 | grayscale = TRUE, 20 | short.names = TRUE, 21 | ...) 22 | } 23 | 24 | \arguments{ 25 | \item{bsts.object}{An object of class \code{\link{bsts}}.} 26 | 27 | \item{burn}{The number of observations you wish to discard as burn-in.} 28 | 29 | \item{inclusion.threshold}{Plot predictors with marginal inclusion 30 | probabilities above this threshold.} 31 | 32 | \item{ylim}{Scale for the vertical axis. } 33 | 34 | \item{flip.signs}{If true then a predictor with a negative sign will 35 | be flipped before being plotted, to better align visually 36 | with the target series.} 37 | 38 | \item{show.legend}{ 39 | Should a legend be shown indicating which predictors are plotted? 40 | } 41 | 42 | \item{grayscale}{Logical. If \code{TRUE} then lines for different 43 | predictors grow progressively lighter as their inclusion probability 44 | decreases. If \code{FALSE} then lines are drawn in black.} 45 | 46 | \item{short.names}{Logical. If \code{TRUE} then a common prefix or 47 | suffix shared by all the variables will be discarded.} 48 | 49 | \item{...}{Extra arguments to be passed to \code{\link{plot}}.} 50 | } 51 | 52 | \examples{ 53 | data(AirPassengers) 54 | y <- log(AirPassengers) 55 | lag.y <- c(NA, head(y, -1)) 56 | ss <- AddLocalLinearTrend(list(), y) 57 | ss <- AddSeasonal(ss, y, nseasons = 12) 58 | ## Call bsts with na.action = na.omit to omit the leading NA in lag.y 59 | model <- bsts(y ~ lag.y, state.specification = ss, niter = 500, 60 | na.action = na.omit) 61 | plot(model, "predictors") 62 | } 63 | \seealso{ 64 | \code{\link{bsts}} 65 | \code{\link[Boom]{PlotDynamicDistribution}} 66 | \code{\link[BoomSpikeSlab]{plot.lm.spike}} 67 | } 68 | -------------------------------------------------------------------------------- /man/plot.holiday.Rd: -------------------------------------------------------------------------------- 1 | \name{plot.holiday} 2 | \title{Plot Holiday Effects} 3 | \alias{PlotHoliday} 4 | 5 | \description{ Plot the estimated effect of the given holiday.} 6 | 7 | \usage{ 8 | PlotHoliday(holiday, model, show.raw.data = TRUE, ylim = NULL, \dots) 9 | } 10 | 11 | \arguments{ 12 | \item{holiday}{An object of class \code{\link{Holiday}}.} 13 | 14 | \item{model}{A model fit by \code{\link{bsts}} containing either a 15 | \code{\link{RegressionHolidayStateModel}} or 16 | \code{\link{HierarchicalRegressionHolidayStateModel}} that includes 17 | \code{holiday}. } 18 | 19 | \item{show.raw.data}{Logical indicating if the raw data corresponding 20 | to \code{holiday} should be superimposed on the plot. The 'raw 21 | data' are the actual values of the target series, minus the value of 22 | the target series the day before the holiday began, which is a 23 | (somewhat poor) proxy for remaining state elements. The raw data 24 | can appear artificially noisy if there are other strong state 25 | effects such as a day-of-week effect for holidays that don't always 26 | occur on the same day of the week. } 27 | 28 | \item{ylim}{Limits on the vertical axis of the plots.} 29 | 30 | \item{\dots}{Extra arguments passed to \code{\link{boxplot}}.} 31 | } 32 | 33 | 34 | \value{ 35 | Returns \code{invisible{NULL}}. 36 | } 37 | 38 | \examples{ 39 | trend <- cumsum(rnorm(730, 0, .1)) 40 | dates <- seq.Date(from = as.Date("2014-01-01"), length = length(trend), 41 | by = "day") 42 | y <- zoo(trend + rnorm(length(trend), 0, .2), dates) 43 | 44 | AddHolidayEffect <- function(y, dates, effect) { 45 | ## Adds a holiday effect to simulated data. 46 | ## Args: 47 | ## y: A zoo time series, with Dates for indices. 48 | ## dates: The dates of the holidays. 49 | ## effect: A vector of holiday effects of odd length. The central effect is 50 | ## the main holiday, with a symmetric influence window on either side. 51 | ## Returns: 52 | ## y, with the holiday effects added. 53 | time <- dates - (length(effect) - 1) / 2 54 | for (i in 1:length(effect)) { 55 | y[time] <- y[time] + effect[i] 56 | time <- time + 1 57 | } 58 | return(y) 59 | } 60 | 61 | ## Define some holidays. 62 | memorial.day <- NamedHoliday("MemorialDay") 63 | memorial.day.effect <- c(.3, 3, .5) 64 | memorial.day.dates <- as.Date(c("2014-05-26", "2015-05-25")) 65 | y <- AddHolidayEffect(y, memorial.day.dates, memorial.day.effect) 66 | 67 | presidents.day <- NamedHoliday("PresidentsDay") 68 | presidents.day.effect <- c(.5, 2, .25) 69 | presidents.day.dates <- as.Date(c("2014-02-17", "2015-02-16")) 70 | y <- AddHolidayEffect(y, presidents.day.dates, presidents.day.effect) 71 | 72 | labor.day <- NamedHoliday("LaborDay") 73 | labor.day.effect <- c(1, 2, 1) 74 | labor.day.dates <- as.Date(c("2014-09-01", "2015-09-07")) 75 | y <- AddHolidayEffect(y, labor.day.dates, labor.day.effect) 76 | 77 | ## The holidays can be in any order. 78 | holiday.list <- list(memorial.day, labor.day, presidents.day) 79 | number.of.holidays <- length(holiday.list) 80 | 81 | ## In a real example you'd want more than 100 MCMC iterations. 82 | niter <- 100 83 | ss <- AddLocalLevel(list(), y) 84 | ss <- AddRegressionHoliday(ss, y, holiday.list = holiday.list) 85 | model <- bsts(y, state.specification = ss, niter = niter) 86 | 87 | PlotHoliday(memorial.day, model) 88 | } 89 | 90 | \seealso{ 91 | \code{\link{bsts}} 92 | \code{\link{AddRandomWalkHoliday}} 93 | } 94 | -------------------------------------------------------------------------------- /man/plot.mbsts.Rd: -------------------------------------------------------------------------------- 1 | % Copyright 2019 Steven L. Scott All Rights Reserved. 2 | % 3 | % This library is free software; you can redistribute it and/or 4 | % modify it under the terms of the GNU Lesser General Public 5 | % License as published by the Free Software Foundation; either 6 | % version 2.1 of the License, or (at your option) any later version. 7 | % 8 | % This library is distributed in the hope that it will be useful, 9 | % but WITHOUT ANY WARRANTY; without even the implied warranty of 10 | % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 11 | % Lesser General Public License for more details. 12 | % 13 | % You should have received a copy of the GNU Lesser General Public 14 | % License along with this library; if not, write to the Free Software 15 | % Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA 16 | 17 | \name{plot.mbsts} 18 | \title{Plotting Functions for Multivariate Bayesian Structural Time Series} 19 | 20 | \alias{plot.mbsts} 21 | \alias{PlotMbstsSeriesMeans} 22 | 23 | \description{Functions to plot the results of a model fit using 24 | \code{\link{mbsts}}.} 25 | 26 | \usage{ 27 | 28 | \method{plot}{mbsts}(x, y = c("means", "help"), ...) 29 | 30 | PlotMbstsSeriesMeans(mbsts.object, 31 | series.id = NULL, 32 | same.scale = TRUE, 33 | burn = SuggestBurn(.1, mbsts.object), 34 | time, 35 | show.actuals = TRUE, 36 | ylim = NULL, 37 | gap = 0, 38 | cex.actuals = 0.2, 39 | ...) 40 | } 41 | 42 | \arguments{ 43 | \item{x}{An object of class \code{\link{mbsts}}.} 44 | 45 | \item{y}{A character string indicating the aspect of the model that 46 | should be plotted.} 47 | 48 | \item{mbsts.object}{An object of class \code{\link{mbsts}}.} 49 | 50 | \item{series.id}{Indicates which series should be plotted. An 51 | integer, logical, or character vector.} 52 | 53 | \item{same.scale}{Logical. If \code{TRUE} then all the series or 54 | state components will be plotted with the same scale on the vertical 55 | axis. If \code{FALSE} then each plot will get its own scale for the 56 | vertical axis.} 57 | 58 | \item{burn}{The number of MCMC iterations to discard as burn-in.} 59 | 60 | \item{time}{An optional vector of values to plot against. If missing, 61 | the default is to diagnose the time scale of the original time 62 | series.} 63 | 64 | \item{show.actuals}{Logical. If \code{TRUE} then actual values from 65 | the fitted series will be shown on the plot.} 66 | 67 | \item{ylim}{Limits for the vertical axis. If \code{NULL} these will 68 | be inferred from the state components and the \code{same.scale} 69 | argument. Otherwise all plots will be created with the same 70 | \code{ylim} values.} 71 | 72 | \item{gap}{The number of lines to leave between plots. This need not 73 | be an integer.} 74 | 75 | \item{cex.actuals}{Scale factor to use for plotting the raw data.} 76 | 77 | \item{...}{Additional arguments passed to 78 | \code{\link[Boom]{PlotDynamicDistribution}}.} 79 | 80 | } 81 | 82 | \seealso{ 83 | \code{\link{plot.bsts}} 84 | } 85 | -------------------------------------------------------------------------------- /man/plot.mbsts.prediction.Rd: -------------------------------------------------------------------------------- 1 | % Copyright 2019 Steven L. Scott. All Rights Reserved. 2 | % Author: steve.the.bayesian@gmail.com (Steve Scott) 3 | 4 | \name{plot.mbsts.prediction} 5 | \title{Plot Multivariate Bsts Predictions} 6 | 7 | \alias{plot.mbsts.prediction} 8 | 9 | \description{Plot the posterior predictive distribution from an 10 | \code{\link{mbsts}} prediction object.} 11 | 12 | \usage{ 13 | \method{plot}{mbsts.prediction}(x, 14 | y = NULL, 15 | burn = 0, 16 | plot.original = TRUE, 17 | median.color = "blue", 18 | median.type = 1, 19 | median.width = 3, 20 | interval.quantiles = c(.025, .975), 21 | interval.color = "green", 22 | interval.type = 2, 23 | interval.width = 2, 24 | style = c("dynamic", "boxplot"), 25 | ylim = NULL, 26 | series.id = NULL, 27 | same.scale = TRUE, 28 | gap = 0, 29 | ...) 30 | } 31 | 32 | \arguments{ 33 | 34 | \item{x}{An object of class \code{\link{bsts.prediction}} 35 | created by calling \code{predict} on a \code{\link{bsts}} object.} 36 | 37 | \item{y}{A dummy argument necessary to match the signature of the 38 | \code{\link{plot}} generic function. This argument is unused.} 39 | 40 | \item{plot.original}{Logical or numeric. If \code{TRUE} then the 41 | prediction is plotted after a time series plot of the original 42 | series. If \code{FALSE}, the prediction fills the entire plot. 43 | If numeric, then it specifies the number of trailing observations 44 | of the original time series to plot in addition to the 45 | predictions.} 46 | 47 | \item{burn}{The number of observations you wish to discard as burn-in 48 | from the posterior predictive distribution. This is in addition 49 | to the burn-in discarded using \code{\link{predict.bsts}}.} 50 | 51 | \item{median.color}{The color to use for the posterior median of the 52 | prediction.} 53 | 54 | \item{median.type}{The type of line (lty) to use for the posterior median 55 | of the prediction.} 56 | 57 | \item{median.width}{The width of line (lwd) to use for the posterior median 58 | of the prediction.} 59 | 60 | \item{interval.quantiles}{The lower and upper limits of the credible 61 | interval to be plotted.} 62 | 63 | \item{interval.color}{The color to use for the upper and lower limits 64 | of the 95\% credible interval for the prediction.} 65 | 66 | \item{interval.type}{The type of line (lty) to use for the upper and 67 | lower limits of the 95\% credible inerval for of the 68 | prediction.} 69 | 70 | \item{interval.width}{The width of line (lwd) to use for the upper and 71 | lower limits of the 95\% credible inerval for of the 72 | prediction.} 73 | 74 | \item{style}{Either "dynamic", for dynamic distribution plots, or 75 | "boxplot", for box plots. Partial matching is allowed, so "dyn" or 76 | "box" would work, for example.} 77 | 78 | \item{ylim}{Limits on the vertical axis.} 79 | 80 | \item{series.id}{A factor, string, or integer used to indicate which 81 | of the multivariate series to plot. If NULL then predictions for 82 | all series will be plotted. If there are many series this can make 83 | the plot unreadable.} 84 | 85 | \item{same.scale}{ Logical. If TRUE then all predictions are plotted 86 | with the same scale, and limits are drawn on the Y axis. If FALSE 87 | then each prediction is drawn to fill its plot region, and no tick 88 | marks are drawn on the y axis. If ylim is specified then it is used 89 | for all plots, and same.scale is ignored.} 90 | 91 | \item{gap}{The amount of space to leave between plots, measured in 92 | lines of text.} 93 | 94 | \item{...}{Extra arguments to be passed to 95 | \code{\link[Boom]{PlotDynamicDistribution}} 96 | and \code{\link{lines}}.} 97 | } 98 | 99 | \details{ Plots the posterior predictive distribution described by 100 | \code{x} using a dynamic distribution plot generated by 101 | \code{\link[Boom]{PlotDynamicDistribution}}. Overlays the 102 | posterior median and 95\% prediction limits for the predictive 103 | distribution. } 104 | 105 | \value{ 106 | Returns NULL. 107 | } 108 | -------------------------------------------------------------------------------- /man/predict.mbsts.Rd: -------------------------------------------------------------------------------- 1 | % Copyright 2019 Steven L. Scott. All Rights Reserved. 2 | % Author: steve.the.bayesian@gmail.com (Steve Scott) 3 | 4 | \name{predict.mbsts} 5 | \alias{predict.mbsts} 6 | \alias{mbsts.prediction} 7 | \title{Prediction for Multivariate Bayesian Structural Time Series} 8 | \Rdversion{1.1} 9 | \description{Generate draws from the posterior predictive distribution 10 | of an \code{\link{mbsts}} object.} 11 | 12 | \usage{ 13 | \method{predict}{mbsts}(object, 14 | horizon = 1, 15 | newdata = NULL, 16 | timestamps = NULL, 17 | burn = SuggestBurn(.1, object), 18 | na.action = na.exclude, 19 | quantiles = c(.025, .975), 20 | seed = NULL, 21 | ...) 22 | } 23 | 24 | \arguments{ 25 | 26 | \item{object}{An object of class \code{\link{mbsts}}.} 27 | 28 | \item{horizon}{An integer specifying the number of periods into the 29 | future you wish to predict. If \code{object} contains a regression 30 | component then the forecast horizon is \code{nrow(newdata)} and this 31 | argument is not used.} 32 | 33 | \item{newdata}{A vector, matrix, or data frame containing the 34 | predictor variables to use in making the prediction. This is only 35 | required if \code{object} contains a regression component. If a 36 | data frame, it must include variables with the same names as the 37 | data used to fit \code{object}. The first observation in newdata is 38 | assumed to be one time unit after the end of the last data used in 39 | fitting \code{object}, and the subsequent observations are 40 | sequential time points. If the regression part of \code{object} 41 | contains only a single predictor then newdata can be a vector. If 42 | \code{newdata} is passed as a matrix it is the caller's 43 | responsibility to ensure that it contains the correct number of 44 | columns and that the columns correspond to those in 45 | object$coefficients. } 46 | 47 | \item{timestamps}{A vector of time stamps (of the same type as the 48 | timestamps used to fit \code{object}), with one per row of 49 | \code{newdata} (or element of \code{newdata}, if \code{newdata} is a 50 | vector). The time stamps give the time points as which each 51 | prediction is desired. They must be interpretable as integer (0 or 52 | larger) time steps following the last time stamp in \code{object}. 53 | If \code{NULL}, then the requested predictions are interpreted as 54 | being at 1, 2, 3, ... steps following the training data.} 55 | 56 | \item{burn}{An integer describing the number of MCMC iterations in 57 | \code{object} to be discarded as burn-in. If burn <= 0 then no 58 | burn-in period will be discarded.} 59 | 60 | \item{na.action}{A function determining what should be done with 61 | missing values in \code{newdata}.} 62 | 63 | \item{quantiles}{A numeric vector of length 2 giving the lower and 64 | upper quantiles to use for the forecast interval estimate.} 65 | 66 | \item{seed}{An integer to use as the C++ random seed. If 67 | \code{NULL} then the C++ seed will be set using the clock.} 68 | 69 | \item{\dots}{Not used. Present to match the signature of the 70 | default predict method.} 71 | } 72 | 73 | \details{ 74 | The prediction is based off of samples taken from the posterior 75 | distribution of a multivariate Bayesian structural time series model. 76 | 77 | As an added convenience, means and interval estimates are produced 78 | from the posterior predictive distribution. 79 | } 80 | 81 | \value{ 82 | Returns an object of class mbsts.prediction, which is a list. 83 | } 84 | 85 | \author{ 86 | Steven L. Scott 87 | } 88 | 89 | \seealso{ 90 | \code{\link{mbsts}}. 91 | \code{\link{predict.bsts}} 92 | \code{\link{plot.mbsts.prediction}} 93 | } 94 | -------------------------------------------------------------------------------- /man/quarter.Rd: -------------------------------------------------------------------------------- 1 | % Copyright 2011 Google Inc. All Rights Reserved. 2 | % Author: steve.the.bayesian@gmail.com (Steve Scott) 3 | 4 | \name{quarter} 5 | 6 | \alias{Quarter} 7 | 8 | \title{Find the quarter in which a date occurs} 9 | \Rdversion{1.0} 10 | \description{ 11 | Returns the quarter and year in which a date occurs. 12 | } 13 | 14 | \usage{ 15 | Quarter(date) 16 | } 17 | 18 | \arguments{ 19 | 20 | \item{date}{A vector convertible to \code{\link{POSIXlt}}. A 21 | \code{\link{Date}} or \code{character} is fine.} 22 | 23 | } 24 | 25 | \value{ A numeric vector identifying the quarter that each element of 26 | \code{date} corresponds to, expressed as a number of years since 1900. 27 | Thus Q1-2000 is 100.00, and Q3-2007 is 107.50. } 28 | 29 | \author{ 30 | Steven L. Scott \email{steve.the.bayesian@gmail.com} 31 | } 32 | 33 | \examples{ 34 | Quarter(c("2008-02-29", "2008-04-29")) 35 | # [1] 108.00 108.25 36 | } 37 | \keyword{chron} 38 | -------------------------------------------------------------------------------- /man/regularize.timestamps.Rd: -------------------------------------------------------------------------------- 1 | \name{regularize.timestamps} 2 | 3 | \alias{RegularizeTimestamps} 4 | \alias{RegularizeTimestamps.default} 5 | \alias{RegularizeTimestamps.numeric} 6 | \alias{RegularizeTimestamps.Date} 7 | \alias{RegularizeTimestamps.POSIXt} 8 | 9 | \title{Produce a Regular Series of Time Stamps} 10 | \Rdversion{1.0} 11 | 12 | \description{ 13 | Given an set of timestamps that might contain duplicates and gaps, 14 | produce a set of timestamps that has no duplicates and no gaps. 15 | } 16 | 17 | \usage{ 18 | RegularizeTimestamps(timestamps) 19 | 20 | \method{RegularizeTimestamps}{default}(timestamps) 21 | 22 | \method{RegularizeTimestamps}{numeric}(timestamps) 23 | 24 | \method{RegularizeTimestamps}{Date}(timestamps) 25 | 26 | \method{RegularizeTimestamps}{POSIXt}(timestamps) 27 | } 28 | 29 | \arguments{ 30 | 31 | \item{timestamps}{A set of (possibly irregular or non-unique) 32 | timestamps. This could be a set of integers (like 1, 2, , 3...), a 33 | set of numeric like (1945, 1945.083, 1945.167, ...) indicating years 34 | and fractions of years, a \code{\link{Date}} object, or a 35 | \code{\link{POSIXt}} object.} If the argument is \code{NULL} a 36 | \code{NULL} will be returned.} 37 | 38 | \value{ 39 | 40 | A set of regularly spaced timestamps of the same class as the argument 41 | (which might be \code{NULL}). 42 | 43 | } 44 | 45 | \author{ 46 | Steven L. Scott \email{steve.the.bayesian@gmail.com} 47 | } 48 | 49 | \examples{ 50 | first <- as.POSIXct("2015-04-19 08:00:04") 51 | monthly <- seq(from = first, length.out = 24, by = "month") 52 | skip.one <- monthly[-8] 53 | has.duplicates <- monthly 54 | has.duplicates[2] <- has.duplicates[3] 55 | 56 | reg1 <- RegularizeTimestamps(skip.one) 57 | all.equal(reg1, monthly) ## TRUE 58 | 59 | reg2 <- RegularizeTimestamps(has.duplicates) 60 | all.equal(reg2, monthly) ## TRUE 61 | 62 | } 63 | \keyword{chron} 64 | -------------------------------------------------------------------------------- /man/residuals.bsts.Rd: -------------------------------------------------------------------------------- 1 | \name{residuals.bsts} 2 | \title{Residuals from a bsts Object} 3 | 4 | \alias{residuals.bsts} 5 | 6 | \description{Residuals (or posterior distribution of residuals) from a 7 | bsts object.} 8 | 9 | \usage{ 10 | \method{residuals}{bsts}(object, 11 | burn = SuggestBurn(.1, object), 12 | mean.only = FALSE, 13 | \dots) 14 | } 15 | 16 | \arguments{ 17 | \item{object}{An object of class \code{\link{bsts}} created by the function 18 | of the same name.} 19 | 20 | \item{burn}{The number of MCMC iterations to discard as burn-in.} 21 | 22 | \item{mean.only}{Logical. If \code{TRUE} then the mean residual for 23 | each time period is returned. If \code{FALSE} then the full 24 | posterior distribution is returned.} 25 | 26 | \item{\dots}{Not used. This argument is here to comply with the 27 | signature of the generic residuals function.} 28 | } 29 | 30 | \value{ 31 | 32 | If \code{mean.only} is \code{TRUE} then this function returns a vector 33 | of residuals with the same "time stamp" as the original series. If 34 | \code{mean.only} is \code{FALSE} then the posterior distribution of 35 | the residuals is returned instead, as a matrix of draws. Each row of 36 | the matrix is an MCMC draw, and each column is a time point. The 37 | colnames of the returned matrix will be the timestamps of the original 38 | series, as text. } 39 | 40 | 41 | \seealso{ 42 | \code{\link{bsts}}, \code{\link{plot.bsts}}. 43 | } 44 | -------------------------------------------------------------------------------- /man/rsxfs.Rd: -------------------------------------------------------------------------------- 1 | \name{rsxfs} 2 | \docType{data} 3 | \alias{rsxfs} 4 | \alias{RSXFS} 5 | \alias{retail.sales} 6 | \title{Retail sales, excluding food services} 7 | \description{A monthly time series of retail sales in the US, excluding 8 | food services. In millions of dollars. Seasonally adjusted.} 9 | \usage{ 10 | data(rsxfs) 11 | } 12 | \format{zoo time series} 13 | \source{FRED. See http://research.stlouisfed.org/fred2/series/RSXFS} 14 | \examples{ 15 | data(rsxfs) 16 | plot(rsxfs) 17 | } 18 | 19 | \keyword{datasets} 20 | -------------------------------------------------------------------------------- /man/shark.Rd: -------------------------------------------------------------------------------- 1 | \name{shark} 2 | \docType{data} 3 | \alias{shark} 4 | \title{Shark Attacks in Florida.} 5 | \description{An annual time series of shark attacks and fatalities in Florida.} 6 | \usage{ 7 | data(shark) 8 | } 9 | \format{zoo time series} 10 | \source{ 11 | From Jeffrey Simonoff "Analysis of Categorical Data". 12 | http://people.stern.nyu.edu/jsimonof/AnalCatData/Data/Comma_separated/floridashark.csv} 13 | \examples{ 14 | data(shark) 15 | head(shark) 16 | } 17 | 18 | \keyword{datasets} 19 | -------------------------------------------------------------------------------- /man/shorten.Rd: -------------------------------------------------------------------------------- 1 | % Copyright 2011 Google Inc. All Rights Reserved. 2 | % Author: steve.the.bayesian@gmail.com (Steve Scott) 3 | 4 | \name{shorten} 5 | 6 | \alias{Shorten} 7 | 8 | \Rdversion{1.0} 9 | 10 | \title{ 11 | Shorten long names 12 | } 13 | 14 | \description{ 15 | Removes common prefixes and suffixes from character vectors. 16 | } 17 | 18 | \usage{ 19 | Shorten(words) 20 | } 21 | 22 | \arguments{ 23 | \item{words}{A character vector to be shortened.} 24 | } 25 | 26 | \value{ 27 | The argument \code{words} is returned, after common prefixes and 28 | suffixes have been removed. If all arguments are identical then no 29 | shortening is done. 30 | } 31 | 32 | \author{ 33 | Steven L. Scott \email{steve.the.bayesian@gmail.com} 34 | } 35 | 36 | \seealso{ 37 | \code{\link{bsts.mixed}}. 38 | } 39 | 40 | \examples{ 41 | Shorten(c("/usr/common/foo.tex", "/usr/common/barbarian.tex")) 42 | # returns c("foo", "barbarian") 43 | 44 | Shorten(c("hello", "hellobye")) 45 | # returns c("", "bye") 46 | 47 | Shorten(c("hello", "hello")) 48 | # returns c("hello", "hello") 49 | 50 | Shorten(c("", "x", "xx")) 51 | # returns c("", "x", "xx") 52 | 53 | Shorten("abcde") 54 | # returns "abcde" 55 | } 56 | 57 | \keyword{character} 58 | -------------------------------------------------------------------------------- /man/simulate.fake.mixed.frequency.data.Rd: -------------------------------------------------------------------------------- 1 | % Copyright 2011 Google Inc. All Rights Reserved. 2 | % Author: steve.the.bayesian@gmail.com (Steve Scott) 3 | 4 | \name{simulate.fake.mixed.frequency.data} 5 | 6 | \alias{SimulateFakeMixedFrequencyData} 7 | 8 | \title{Simulate fake mixed frequency data} 9 | 10 | 11 | \Rdversion{1.0} 12 | \description{ 13 | Simulate a fake data set that can be used to test mixed frequency code. 14 | } 15 | 16 | \usage{ 17 | SimulateFakeMixedFrequencyData(nweeks, 18 | xdim, 19 | number.nonzero = xdim, 20 | start.date = as.Date("2009-01-03"), 21 | sigma.obs = 1.0, 22 | sigma.slope = .5, 23 | sigma.level = .5, 24 | beta.sd = 10) 25 | 26 | } 27 | 28 | \arguments{ 29 | 30 | \item{nweeks}{The number of weeks of data to simulate.} 31 | 32 | \item{xdim}{The dimension of the predictor variables to be simulated.} 33 | 34 | \item{number.nonzero}{The number nonzero coefficients. Must be 35 | less than or equal to \code{xdim}.} 36 | 37 | \item{start.date}{The date of the first simulated week.} 38 | 39 | \item{sigma.obs}{The residual standard deviation for the fine time 40 | scale model.} 41 | 42 | \item{sigma.slope}{The standard deviation of the slope component of 43 | the local linear trend model for the fine time scale data.} 44 | 45 | \item{sigma.level}{The standard deviation of the level component fo 46 | the local linear trend model for the fine time scale data.} 47 | 48 | \item{beta.sd}{The standard deviation of the regression coefficients 49 | to be simulated.} 50 | 51 | } 52 | 53 | \value{ 54 | 55 | Returns a list with the following components 56 | 57 | \item{coarse.target}{A \code{\link[zoo]{zoo}} time series containing the 58 | monthly values to be modeled.} 59 | 60 | \item{fine.target}{A \code{\link[zoo]{zoo}} time series containing the 61 | weekly observations that aggregate to \code{coarse.target}. } 62 | 63 | \item{predictors}{A \code{\link[zoo]{zoo}} matrix corresponding to 64 | \code{fine.target} containing the set of predictors variables to use 65 | in \code{\link{bsts.mixed}} prediction.} 66 | 67 | 68 | \item{true.beta}{The vector of "true" regression coefficients used to 69 | simulate \code{fine.target}.} 70 | 71 | \item{true.sigma.obs}{The residual standard deviation that was used to 72 | simulate \code{fine.target}.} 73 | 74 | \item{true.sigma.slope}{The value of \code{sigma.slope} used to 75 | simulate \code{fine.target}.} 76 | 77 | \item{true.sigma.level}{The value of \code{sigma.level} use to 78 | simulate \code{fine.target}.} 79 | 80 | \item{true.trend}{The combined contribution of the simulated latent 81 | state on \code{fine.target}, including regression effects.} 82 | 83 | \item{true.state}{A matrix containin the fine-scale state of the model 84 | being simulated. Columns represent time (weeks). Rows correspond 85 | to regression (a constant 1), the local linear trend level, the 86 | local linear trend slope, the values of \code{fine.target}, and the 87 | weekly partial aggregates of \code{coarse.target}.} 88 | 89 | } 90 | 91 | \details{ 92 | 93 | The simulation begins by simulating a local linear trend model for 94 | \code{nweeks} to get the trend component. 95 | 96 | Next a \code{nweeks} by \code{xdim} matrix of predictor variables is 97 | simulated as IID normal(0, 1) deviates, and a \code{xdim}-vector of 98 | regression coefficients is simulated as IID normal(0, \code{beta.sd}). 99 | The product of the predictor matrix and regression coefficients is 100 | added to the output of the local linear trend model to get 101 | \code{fine.target}. 102 | 103 | Finally, \code{fine.target} is aggregated to the month level to get 104 | \code{coarse.target}. 105 | 106 | } 107 | 108 | \references{ 109 | Harvey (1990), "Forecasting, structural time series, and the Kalman 110 | filter", Cambridge University Press. 111 | 112 | Durbin and Koopman (2001), "Time series analysis by state space 113 | methods", Oxford University Press. 114 | } 115 | 116 | \author{ 117 | Steven L. Scott \email{steve.the.bayesian@gmail.com} 118 | } 119 | 120 | \seealso{ 121 | \code{\link{bsts.mixed}}, 122 | \code{\link{AddLocalLinearTrend}}, 123 | } 124 | 125 | \examples{ 126 | fake.data <- SimulateFakeMixedFrequencyData(nweeks = 100, xdim = 10) 127 | plot(fake.data$coarse.target) 128 | } 129 | 130 | \keyword{models} 131 | \keyword{regression} 132 | -------------------------------------------------------------------------------- /man/spike.slab.ar.prior.Rd: -------------------------------------------------------------------------------- 1 | \name{spike.slab.ar.prior} 2 | 3 | \alias{SpikeSlabArPrior} 4 | 5 | \Rdversion{1.0} 6 | 7 | \title{ 8 | Spike and Slab Priors for AR Processes 9 | } 10 | 11 | \description{ Returns a spike and slab prior for the parameters of an 12 | AR(p) process. 13 | } 14 | 15 | \usage{ 16 | SpikeSlabArPrior( 17 | lags, 18 | prior.inclusion.probabilities = 19 | GeometricSequence( lags, initial.value = .8, discount.factor = .8), 20 | prior.mean = rep(0, lags), 21 | prior.sd = 22 | GeometricSequence(lags, initial.value = .5, discount.factor = .8), 23 | sdy, 24 | prior.df = 1, 25 | expected.r2 = .5, 26 | sigma.upper.limit = Inf, 27 | truncate = TRUE) 28 | } 29 | 30 | \arguments{ 31 | \item{lags}{A positive integer giving the maximum number of lags to 32 | consider.} 33 | 34 | \item{prior.inclusion.probabilities}{ A vector of length \code{lags} 35 | giving the prior probability that the corresponding AR coefficient 36 | is nonzero. } 37 | 38 | \item{prior.mean}{A vector of length \code{lags} giving the prior mean 39 | of the AR coefficients. This should almost surely stay set at zero. 40 | } 41 | 42 | \item{prior.sd}{A vector of length \code{lags} giving the prior 43 | standard deviations of the AR coefficients, which are modeled as 44 | a-priori independent of one another.} 45 | 46 | \item{sdy}{The sample standard deviation of the series being modeled.} 47 | 48 | \item{expected.r2}{The expected fraction of variation in the response 49 | explained by this AR proces.} 50 | 51 | \item{prior.df}{A positive number indicating the number of 52 | observations (time points) worth of weight to assign to the guess at 53 | \code{expected.r2}.} 54 | 55 | \item{sigma.upper.limit}{A positive number less than infinity 56 | truncates the support of the prior distribution to regions where the 57 | residual standard deviation is less than the specified limit. Any 58 | other value indicates support over the entire positive real line.} 59 | 60 | \item{truncate}{If \code{TRUE} then the support of the distribution is 61 | truncated to the region where the AR coefficients imply a stationary 62 | process. If \code{FALSE} the coefficients are unconstrained.} 63 | 64 | } 65 | 66 | \value{ 67 | A list of class \code{SpikeSlabArPrior} containing the information 68 | needed for the underlying C++ code to instantiate this prior. 69 | } 70 | 71 | \author{ 72 | Steven L. Scott \email{steve.the.bayesian@gmail.com} 73 | } 74 | -------------------------------------------------------------------------------- /man/state.sizes.Rd: -------------------------------------------------------------------------------- 1 | \name{state.sizes} 2 | 3 | \alias{StateSizes} 4 | 5 | \Rdversion{1.0} 6 | 7 | \title{ 8 | Compute state dimensions 9 | } 10 | 11 | \description{ Returns a vector containing the size of each state 12 | component (i.e. the state dimension) in the state vector. } 13 | 14 | \usage{ 15 | StateSizes(state.specification) 16 | } 17 | 18 | \arguments{ 19 | \item{state.specification}{A list containing state specification 20 | components, such as would be passed to \code{\link{bsts}}.} 21 | } 22 | 23 | \value{ 24 | A numeric vector giving the dimension of each state component. 25 | } 26 | 27 | \author{ 28 | Steven L. Scott \email{steve.the.bayesian@gmail.com} 29 | } 30 | 31 | \examples{ 32 | y <- rnorm(1000) 33 | state.specification <- AddLocalLinearTrend(list(), y) 34 | state.specification <- AddSeasonal(state.specification, y, 7) 35 | StateSizes(state.specification) 36 | } 37 | 38 | \keyword{chron} 39 | -------------------------------------------------------------------------------- /man/summary.bsts.Rd: -------------------------------------------------------------------------------- 1 | % Copyright 2011 Google Inc. All Rights Reserved. 2 | % Author: steve.the.bayesian@gmail.com (Steve Scott) 3 | 4 | \name{summary.bsts} 5 | \title{Summarize a Bayesian structural time series object} 6 | 7 | \alias{summary.bsts} 8 | 9 | \description{Print a summary of a \code{\link{bsts}} object.} 10 | 11 | \usage{ 12 | \method{summary}{bsts}(object, burn = SuggestBurn(.1, object), \dots) 13 | } 14 | 15 | \arguments{ 16 | \item{object}{An object of class \code{\link{bsts}} created by the function 17 | of the same name.} 18 | 19 | \item{burn}{The number of MCMC iterations to discard as burn-in.} 20 | 21 | \item{\dots}{Additional arguments passed to 22 | \code{\link[BoomSpikeSlab]{summary.lm.spike}} if \code{object} has a regression component.} 23 | } 24 | 25 | \value{ 26 | Returns a list with the following elements. 27 | \item{residual.sd}{The posterior mean of the residual standard 28 | deviation parameter.} 29 | 30 | \item{prediction.sd}{The standard deviation of the one-step-ahead 31 | prediction errors for the training data.} 32 | 33 | \item{rsquare}{Proportion by which the residual variance is less 34 | than the variance of the original observations.} 35 | 36 | \item{relative.gof}{Harvey's goodness of fit statistic. Let 37 | \eqn{\nu}{nu} denote the one step ahead prediction errors, 38 | \eqn{n}{n} denote the length of the series, and \eqn{y}{y} denote 39 | the original series. The goodness of fit statistic is \deqn{ 1 - 40 | \sum_{i = 1}^n \nu_i^2 / \sum_{i = 2}{n} (\Delta y_i- \Delta \bar 41 | y)^2.}{ 1 - sum(nu^2) / (n-2) * var(diff(y)).} 42 | 43 | This statistic is analogous to \eqn{R^2}{rsquare} in a regression 44 | model, but the reduction in sum of squared errors is relative to a 45 | random walk with a constant drift, \deqn{y_{t+1} = y_t + \beta + 46 | \epsilon_t,}{y[t+1] = y[t] + beta + epsilon[t],} which Harvey 47 | (1989, equation 5.5.14) argues is a more relevant baseline than a 48 | simple mean. Unlike a traditional R-square statistic, this can be 49 | negative.} 50 | 51 | \item{size}{Distribution of the number of nonzero coefficients 52 | appearing in the model} 53 | 54 | \item{coefficients}{If \code{object} contains a regression component then the 55 | output contains matrix with rows corresponding to coefficients, and 56 | columns corresponding to: 57 | \itemize{ 58 | \item The posterior probability the variable is included. 59 | \item The posterior probability that the variable is positive. 60 | \item The conditional expectation of the coefficient, given inclusion. 61 | \item The conditional standard deviation of the coefficient, given inclusion. 62 | } 63 | } 64 | } 65 | 66 | \references{Harvey's goodness of fit statistic is from Harvey (1989) 67 | \emph{Forecasting, structural time series models, and the Kalman filter.} 68 | Page 268.} 69 | 70 | \examples{ 71 | data(AirPassengers) 72 | y <- log(AirPassengers) 73 | ss <- AddLocalLinearTrend(list(), y) 74 | ss <- AddSeasonal(ss, y, nseasons = 12) 75 | model <- bsts(y, state.specification = ss, niter = 100) 76 | summary(model, burn = 20) 77 | } 78 | 79 | \seealso{ 80 | \code{\link{bsts}}, \code{\link{plot.bsts}}, \code{\link[BoomSpikeSlab]{summary.lm.spike}} 81 | } 82 | -------------------------------------------------------------------------------- /man/to.posixt.Rd: -------------------------------------------------------------------------------- 1 | \name{to.posixt} 2 | \alias{YearMonToPOSIX} 3 | \alias{DateToPOSIX} 4 | \title{Convert to POSIXt} 5 | 6 | \Rdversion{1.1} 7 | 8 | \description{ 9 | Convert an object of class Date to class POSIXct without getting bogged 10 | down in timezone calculation. 11 | 12 | } 13 | 14 | \details{ 15 | 16 | Calling \code{\link{as.POSIXct}} on another date/time object 17 | (e.g. Date) applies a timezone correction to the object. This can 18 | shift the time marker by a few hours, which can have the effect of 19 | shifting the day by one unit. If the day was the first or last in a 20 | month or year, then the month or year will be off by one as well. 21 | 22 | Coercing the object to the character representation of a Date prevents this 23 | adjustment from being applied, and leaves the POSIXt return value with the 24 | intended day, month, and year. 25 | } 26 | 27 | \usage{ 28 | DateToPOSIX(timestamps) 29 | YearMonToPOSIX(timestamps) 30 | } 31 | 32 | \arguments{ 33 | 34 | \item{timestamps}{An object of class \code{\link{yearmon}} or 35 | \code{\link{Date}} to be converted to POSIXct.} 36 | 37 | } 38 | 39 | 40 | \author{ 41 | Steven L. Scott \email{steve.the.bayesian@gmail.com} 42 | } 43 | 44 | 45 | \keyword{models} 46 | \keyword{regression} 47 | -------------------------------------------------------------------------------- /man/turkish.Rd: -------------------------------------------------------------------------------- 1 | \name{turkish} 2 | \docType{data} 3 | \alias{turkish} 4 | \title{Turkish Electricity Usage} 5 | 6 | \description{A daily time series of electricity usaage in Turkey. } 7 | 8 | \usage{ 9 | data(turkish) 10 | } 11 | \format{zoo time series} 12 | \source{ 13 | https://robjhyndman.com/data/turkey_elec.csv 14 | } 15 | \examples{ 16 | data(turkish) 17 | plot(turkish) 18 | } 19 | 20 | \seealso{ 21 | \code{\link{bsts}} 22 | } 23 | 24 | \keyword{datasets} 25 | -------------------------------------------------------------------------------- /man/week.ends.Rd: -------------------------------------------------------------------------------- 1 | % Copyright 2011 Google Inc. All Rights Reserved. 2 | % Author: steve.the.bayesian@gmail.com (Steve Scott) 3 | 4 | \name{week.ends} 5 | 6 | \alias{WeekEndsMonth} 7 | \alias{WeekEndsQuarter} 8 | 9 | \Rdversion{1.0} 10 | 11 | \title{ 12 | Check to see if a week contains the end of a month or quarter 13 | } 14 | 15 | \description{ 16 | Returns a logical vector indicating whether the given week contains 17 | the end of a month or quarter. 18 | } 19 | 20 | \usage{ 21 | WeekEndsMonth(week.ending) 22 | WeekEndsQuarter(week.ending) 23 | } 24 | 25 | \arguments{ 26 | 27 | \item{week.ending}{A vector of class \code{\link{Date}}. Each entry contains the 28 | date of the last day in a week.} 29 | 30 | } 31 | 32 | \value{A logical vector indicating whether the given week contains the 33 | end of a month or a quarter.} 34 | 35 | \author{ 36 | Steven L. Scott \email{steve.the.bayesian@gmail.com} 37 | } 38 | 39 | \seealso{ 40 | \code{\link{bsts.mixed}}. 41 | } 42 | 43 | \examples{ 44 | week.ending <- as.Date(c("2011-10-01", 45 | "2011-10-08", 46 | "2011-12-03", 47 | "2011-12-31")) 48 | WeekEndsMonth(week.ending) == c(TRUE, FALSE, TRUE, TRUE) 49 | WeekEndsQuarter(week.ending) == c(TRUE, FALSE, FALSE, TRUE) 50 | } 51 | 52 | \keyword{chron} 53 | -------------------------------------------------------------------------------- /man/weekday.names.Rd: -------------------------------------------------------------------------------- 1 | \name{weekday.names} 2 | \docType{data} 3 | \alias{weekday.names} 4 | \title{Days of the Week} 5 | \description{A character vector listing the names the days of the week.} 6 | \usage{ 7 | weekday.names 8 | } 9 | \seealso{ 10 | \code{\link{month.name}} 11 | } 12 | 13 | -------------------------------------------------------------------------------- /man/wide.to.long.Rd: -------------------------------------------------------------------------------- 1 | \name{wide.to.long} 2 | \alias{WideToLong} 3 | \alias{LongToWide} 4 | 5 | \Rdversion{1.1} 6 | 7 | \title{ 8 | Convert Between Wide and Long Format 9 | } 10 | 11 | \description{ 12 | 13 | Convert a multivariate time series between wide and long formats. In 14 | "wide" format there is one row per time point, with series organzied 15 | by columns. In "long" format there is one row per observation, with 16 | variables indicating the series and time point to which an observation 17 | belongs. 18 | 19 | } 20 | 21 | \usage{ 22 | WideToLong(response, na.rm = TRUE) 23 | LongToWide(response, series.id, timestamps) 24 | } 25 | 26 | \arguments{ 27 | 28 | \item{response}{For \code{WideToLong} this is a matrix, with rows 29 | representing time and columns representing variables. This can be a 30 | \code{\link{zoo}} matrix with timestamps as an index. 31 | 32 | For \code{LongToWide}, \code{response} is a vector.} 33 | 34 | \item{na.rm}{If TRUE then missing values will be omitted from the 35 | returned data frame (their absence denoting missingness). 36 | Otherwise, missing values will be included as NA's.} 37 | 38 | \item{series.id}{A factor (or variable coercible to factor) of the 39 | same length as \code{response}, indicating the series to which each 40 | observation belongs. } 41 | 42 | \item{timestamps}{A variable of the same length as \code{response}, 43 | indicating the time period to which each observation belongs.} 44 | } 45 | 46 | \value{ 47 | \code{LongToWide} returns a zoo matrix with the time series in wide format. 48 | \code{WideToLong} returns a 3-column data frame with columns "time", "series", and "values". 49 | } 50 | 51 | \author{ 52 | Steven L. Scott \email{steve.the.bayesian@gmail.com} 53 | } 54 | 55 | \examples{ 56 | data(gdp) 57 | gdp.wide <- LongToWide(gdp$GDP, gdp$Country, gdp$Time) 58 | gdp.long <- WideToLong(gdp.wide) 59 | } 60 | 61 | -------------------------------------------------------------------------------- /src/Makevars: -------------------------------------------------------------------------------- 1 | BOOM_DIR = `$(R_HOME)/bin/Rscript -e "cat(system.file(package='Boom'))"` 2 | BOOM_LIB = ${BOOM_DIR}/lib${R_ARCH}/libboom.a 3 | PKG_LIBS = ${BOOM_LIB} 4 | PKG_CPPFLAGS = -I${BOOM_DIR}/include -DADD_ -DR_NO_REMAP -DRLANGUAGE 5 | 6 | # PKG_CXXFLAGS= -fsanitize=address 7 | # PKG_CXXFLAGS= -fsanitize=undefined 8 | 9 | # CXX_STD = CXX11 10 | 11 | # When compiling remotely (e.g. on CRAN's winbuilder) flags to be 12 | # passed to make can be specified here. 13 | # -k: keep going 14 | # -j 16: use 16 threads 15 | # MAKEFLAGS=" -j 32 " 16 | -------------------------------------------------------------------------------- /src/aggregate_time_series.cc: -------------------------------------------------------------------------------- 1 | // Copyright 2011 Google Inc. All Rights Reserved. 2 | // Author: stevescott@google.com (Steve Scott) 3 | 4 | #include 5 | #include 6 | #include "Rinternals.h" 7 | #include "r_interface/boom_r_tools.hpp" 8 | #include "LinAlg/Matrix.hpp" 9 | #include "LinAlg/SubMatrix.hpp" 10 | 11 | extern "C" { 12 | // Sum the results of a fine-scale time series to a coarser scale. 13 | // Args: 14 | // r_fine_series: An R vector or matrix to aggregate. If a matrix 15 | // is passed, then the columns of the matrix represent time. 16 | // r_contains_end: An R logical vector indicating whether each 17 | // fine-scale time interval in r_fine_series contains the end of 18 | // a coarse time interval. 19 | // r_membership_fraction: An R numeric vector containing the 20 | // fraction of the output that should be attributed to the 21 | // coarse-scale interval containing the beginning of each 22 | // fine-scale time interval. This is always positive, and is 23 | // typically 1. 24 | // Returns: 25 | // An R matrix (if r_fine_series is a matrix) or vector 26 | // (otherwise) containing the time series aggregation of 27 | // r_fine_series. 28 | // 29 | // Note that unless r_fine_series happens to coincide with the 30 | // exact beginning or end of a coarse time interval, the left and 31 | // right end points of the resulting aggregation will not contain 32 | // full aggregates. 33 | SEXP analysis_common_r_bsts_aggregate_time_series_( 34 | SEXP r_fine_series, 35 | SEXP r_contains_end, 36 | SEXP r_membership_fraction) { 37 | int *contains_end = LOGICAL(r_contains_end); 38 | double *membership_fraction = REAL(r_membership_fraction); 39 | 40 | int num_fine_time_points = LENGTH(r_contains_end); 41 | int num_fine_rows = 1; 42 | 43 | if (Rf_isMatrix(r_fine_series)) { 44 | num_fine_rows = Rf_nrows(r_fine_series); 45 | } 46 | 47 | int num_coarse_time_points = 0; 48 | for (int i = 0; i < num_fine_time_points; ++i) { 49 | bool end = contains_end[i]; 50 | num_coarse_time_points += end; 51 | } 52 | 53 | // There is a remainder unless the last entry contains_end and the 54 | // membership fraction is 1. The .9999 allows for a bit of 55 | // numerical fudge. 56 | bool no_remainder = contains_end[num_fine_time_points - 1] && 57 | membership_fraction[num_fine_time_points - 1] >= .9999; 58 | bool have_remainder = !no_remainder; 59 | num_coarse_time_points += have_remainder; 60 | 61 | BOOM::SubMatrix fine_series(REAL(r_fine_series), 62 | num_fine_rows, 63 | num_fine_time_points); 64 | BOOM::Matrix coarse_series(num_fine_rows, num_coarse_time_points); 65 | 66 | for (int iteration = 0; iteration < num_fine_rows; ++iteration) { 67 | double current = 0; 68 | int coarse_time = 0; 69 | for (int fine_time = 0; fine_time < num_fine_time_points; ++fine_time) { 70 | if (contains_end[fine_time]) { 71 | current += fine_series(iteration, fine_time) * 72 | membership_fraction[fine_time]; 73 | coarse_series(iteration, coarse_time) = current; 74 | ++coarse_time; 75 | current = (1 - membership_fraction[fine_time]) * 76 | fine_series(iteration, fine_time); 77 | } else { 78 | current += fine_series(iteration, fine_time); 79 | } 80 | } 81 | if (have_remainder) { 82 | coarse_series(iteration, coarse_time) = current; 83 | } 84 | } 85 | 86 | BOOM::RMemoryProtector protector; 87 | SEXP r_ans = protector.protect( 88 | Rf_isMatrix(r_fine_series) 89 | ? Rf_allocMatrix(REALSXP, num_fine_rows, num_coarse_time_points) 90 | : Rf_allocVector(REALSXP, num_coarse_time_points)); 91 | double *ans = REAL(r_ans); 92 | std::copy(coarse_series.begin(), coarse_series.end(), ans); 93 | return r_ans; 94 | } 95 | } 96 | -------------------------------------------------------------------------------- /src/bsts_init.cc: -------------------------------------------------------------------------------- 1 | // Copyright 2018 Google Inc. All Rights Reserved. 2 | // 3 | // This library is free software; you can redistribute it and/or 4 | // modify it under the terms of the GNU Lesser General Public 5 | // License as published by the Free Software Foundation; either 6 | // version 2.1 of the License, or (at your option) any later version. 7 | // 8 | // This library is distributed in the hope that it will be useful, 9 | // but WITHOUT ANY WARRANTY; without even the implied warranty of 10 | // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 11 | // Lesser General Public License for more details. 12 | // 13 | // You should have received a copy of the GNU Lesser General Public 14 | // License along with this library; if not, write to the Free Software 15 | // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA 16 | 17 | #include "Rinternals.h" // for SEXP 18 | 19 | // for R_registerRoutines and R_CallMethodDef 20 | #include "R_ext/Rdynload.h" 21 | 22 | #define CALLDEF(name, n) {#name, (DL_FUNC) &name, n} 23 | 24 | extern "C" { 25 | 26 | SEXP analysis_common_r_fit_bsts_model_( 27 | SEXP r_data_list, 28 | SEXP r_state_specification, 29 | SEXP r_prior, 30 | SEXP r_options, 31 | SEXP r_family, 32 | SEXP r_niter, 33 | SEXP r_ping, 34 | SEXP r_timeout_in_seconds, 35 | SEXP r_seed); 36 | 37 | SEXP analysis_common_r_fit_dirm_( 38 | SEXP r_data_list, 39 | SEXP r_state_specification, 40 | SEXP r_prior, 41 | SEXP r_niter, 42 | SEXP r_ping, 43 | SEXP r_timeout_in_seconds, 44 | SEXP r_seed); 45 | 46 | SEXP analysis_common_r_predict_bsts_model_( 47 | SEXP r_bsts_object, 48 | SEXP r_prediction_data, 49 | SEXP r_burn, 50 | SEXP r_observed_data, 51 | SEXP r_seed); 52 | 53 | SEXP analysis_common_r_bsts_one_step_prediction_errors_( 54 | SEXP r_bsts_object, 55 | SEXP r_cutpoints, 56 | SEXP r_standardize); 57 | 58 | SEXP analysis_common_r_bsts_aggregate_time_series_( 59 | SEXP r_fine_series, 60 | SEXP r_contains_end, 61 | SEXP r_membership_fraction); 62 | 63 | SEXP analysis_common_r_bsts_fit_mixed_frequency_model_( 64 | SEXP r_target_series, 65 | SEXP r_predictors, 66 | SEXP r_which_coarse_interval, 67 | SEXP r_membership_fraction, 68 | SEXP r_contains_end, 69 | SEXP r_state_specification, 70 | SEXP r_regression_prior, 71 | SEXP r_niter, 72 | SEXP r_ping, 73 | SEXP r_seed, 74 | SEXP r_truth); 75 | 76 | SEXP analysis_common_r_get_date_ranges_( 77 | SEXP r_holiday, 78 | SEXP r_timestamps); 79 | 80 | SEXP analysis_common_r_fit_multivariate_bsts_model_( 81 | SEXP r_data_list, 82 | SEXP r_state_specification, 83 | SEXP r_series_state_specification, 84 | SEXP r_prior, 85 | SEXP r_options, 86 | SEXP r_niter, 87 | SEXP r_ping, 88 | SEXP r_seed); 89 | 90 | SEXP analysis_common_r_predict_multivariate_bsts_model_( 91 | SEXP r_mbsts_object, 92 | SEXP r_prediction_data, 93 | SEXP r_burn, 94 | SEXP r_seed); 95 | 96 | static R_CallMethodDef bsts_arg_description[] = { 97 | CALLDEF(analysis_common_r_fit_bsts_model_, 9), 98 | CALLDEF(analysis_common_r_fit_dirm_, 7), 99 | CALLDEF(analysis_common_r_predict_bsts_model_, 5), 100 | CALLDEF(analysis_common_r_bsts_one_step_prediction_errors_, 3), 101 | CALLDEF(analysis_common_r_bsts_aggregate_time_series_, 3), 102 | CALLDEF(analysis_common_r_bsts_fit_mixed_frequency_model_, 11), 103 | CALLDEF(analysis_common_r_get_date_ranges_, 2), 104 | CALLDEF(analysis_common_r_fit_multivariate_bsts_model_, 8), 105 | CALLDEF(analysis_common_r_predict_multivariate_bsts_model_, 4), 106 | {NULL, NULL, 0} // NOLINT 107 | }; 108 | 109 | void R_init_bsts(DllInfo *info) { 110 | R_registerRoutines(info, NULL, bsts_arg_description, NULL, NULL); // NOLINT 111 | R_useDynamicSymbols(info, FALSE); 112 | } 113 | 114 | } // extern "C" 115 | -------------------------------------------------------------------------------- /src/create_dynamic_intercept_state_model.h: -------------------------------------------------------------------------------- 1 | #ifndef BSTS_CREATE_DYNAMIC_INTERCEPT_STATE_MODEL_H_ 2 | #define BSTS_CREATE_DYNAMIC_INTERCEPT_STATE_MODEL_H_ 3 | /* 4 | Copyright (C) 2005-2019 Steven L. Scott 5 | 6 | This library is free software; you can redistribute it and/or modify it under 7 | the terms of the GNU Lesser General Public License as published by the Free 8 | Software Foundation; either version 2.1 of the License, or (at your option) 9 | any later version. 10 | 11 | This library is distributed in the hope that it will be useful, but WITHOUT 12 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 13 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more 14 | details. 15 | 16 | You should have received a copy of the GNU Lesser General Public License along 17 | with this library; if not, write to the Free Software Foundation, Inc., 51 18 | Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA 19 | */ 20 | 21 | #include "create_state_model.h" 22 | #include "Models/StateSpace/StateModels/StateModel.hpp" 23 | 24 | namespace BOOM { 25 | 26 | // Forward declarations. 27 | 28 | // Host model 29 | class DynamicInterceptRegressionModel; 30 | 31 | // State models 32 | class DynamicInterceptLocalLevelStateModel; 33 | 34 | namespace bsts { 35 | 36 | class DynamicInterceptStateModelFactory : public StateModelFactoryBase { 37 | public: 38 | // Args: 39 | // io_manager: A pointer to the object manaaging the R list that will 40 | // record (or has already recorded) the MCMC output. If a nullptr is 41 | // passed then states will be created without IoManager support. 42 | explicit DynamicInterceptStateModelFactory(RListIoManager *io_manager) 43 | : StateModelFactoryBase(io_manager) {} 44 | 45 | void AddState(DynamicInterceptRegressionModel *model, 46 | SEXP r_state_specification, 47 | const std::string &prefix = ""); 48 | 49 | // Save the final state (i.e. at time T) of the model for use with 50 | // prediction. Do not call this function until after all components of 51 | // state have been added. 52 | // Args: 53 | // model: A pointer to the model that owns the state. 54 | // final_state: A pointer to a Vector to hold the state. This can be 55 | // nullptr if the state is only going to be recorded. If state is 56 | // going to be read, then final_state must be non-NULL. A non-NULL 57 | // vector will be re-sized if it is the wrong size. 58 | // list_element_name: The name of the final state vector in the R list 59 | // holding the MCMC output. 60 | void SaveFinalState(DynamicInterceptRegressionModel *model, 61 | BOOM::Vector *final_state = nullptr, 62 | const std::string &list_element_name = "final.state"); 63 | 64 | private: 65 | Ptr CreateStateModel( 66 | DynamicInterceptRegressionModel *model, 67 | SEXP r_state_component, 68 | const std::string &prefix); 69 | 70 | DynamicInterceptLocalLevelStateModel *CreateDynamicLocalLevel( 71 | SEXP r_state_component, 72 | const std::string &prefix); 73 | 74 | }; 75 | 76 | } // namespace bsts 77 | } // namespace BOOM 78 | 79 | 80 | #endif // BSTS_CREATE_DYNAMIC_INTERCEPT_STATE_MODEL_H_ 81 | 82 | -------------------------------------------------------------------------------- /src/get_date_ranges.cc: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (C) 2018 Steven L. Scott 3 | 4 | This library is free software; you can redistribute it and/or modify it under 5 | the terms of the GNU Lesser General Public License as published by the Free 6 | Software Foundation; either version 2.1 of the License, or (at your option) 7 | any later version. 8 | 9 | This library is distributed in the hope that it will be useful, but WITHOUT 10 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 11 | FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more 12 | details. 13 | 14 | You should have received a copy of the GNU Lesser General Public License along 15 | with this library; if not, write to the Free Software Foundation, Inc., 51 16 | Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA 17 | */ 18 | 19 | 20 | #include 21 | #include 22 | 23 | #include "create_state_model.h" 24 | 25 | #include "r_interface/boom_r_tools.hpp" 26 | #include "r_interface/handle_exception.hpp" 27 | #include "r_interface/list_io.hpp" 28 | #include "r_interface/print_R_timestamp.hpp" 29 | #include "r_interface/seed_rng_from_R.hpp" 30 | 31 | #include "Models/StateSpace/StateModels/Holiday.hpp" 32 | #include "cpputil/report_error.hpp" 33 | 34 | extern "C" { 35 | using namespace BOOM; 36 | using std::endl; 37 | 38 | // Given a holiday and a sorted (increasing) vector of of timestamps, return a 39 | // two column matrix of timestamps giving the date ranges over which the 40 | // holiday is active. 41 | // 42 | // Args: 43 | // r_holiday: An R object inheriting from "Holiday". 44 | // r_timestamps: A vector containing an increasing sequence of Date objects. 45 | // 46 | // Returns: 47 | // A two column matrix of integers giving the indices of the first and last 48 | // active timepoints of each holiday period contained in timestamps. The 49 | // indices are unit-offset, so they're ready for use by R without adding 1 50 | // to them. 51 | SEXP analysis_common_r_get_date_ranges_( 52 | SEXP r_holiday, 53 | SEXP r_timestamps) { 54 | try { 55 | Ptr holiday = 56 | BOOM::bsts::StateModelFactory::CreateHoliday(r_holiday); 57 | std::vector dates = BOOM::ToBoomDateVector(r_timestamps); 58 | std::vector> date_ranges; 59 | bool previous_day_was_holiday = false; 60 | int start = -1; 61 | int end = -1; 62 | for (int i = 0; i < dates.size(); ++i) { 63 | if (holiday->active(dates[i])) { 64 | if (!previous_day_was_holiday) { 65 | // Found the start of a new holiday. Add one to correct for R's 66 | // unit-offset counting scheme. 67 | start = i + 1; 68 | } 69 | previous_day_was_holiday = true; 70 | } else { 71 | if (previous_day_was_holiday) { 72 | // Found the end of a holiday. Don't add 1 here because the end was 73 | // found to be the previous time point. 74 | end = i; 75 | date_ranges.push_back(std::make_pair(start, end)); 76 | start = -1; 77 | end = -1; 78 | previous_day_was_holiday = false; 79 | } 80 | } 81 | } 82 | if (start > 0 && end < 0) { 83 | date_ranges.push_back(std::make_pair(start, dates.size())); 84 | } 85 | 86 | Matrix date_range_matrix(date_ranges.size(), 2); 87 | for (int i = 0; i < nrow(date_range_matrix); ++i) { 88 | date_range_matrix(i, 0) = date_ranges[i].first; 89 | date_range_matrix(i, 1) = date_ranges[i].second; 90 | } 91 | return ToRMatrix(date_range_matrix); 92 | } catch (std::exception &e) { 93 | RInterface::handle_exception(e); 94 | } catch(...) { 95 | RInterface::handle_unknown_exception(); 96 | } 97 | return R_NilValue; 98 | } 99 | 100 | } // extern "C" 101 | -------------------------------------------------------------------------------- /src/state_space_gaussian_model_manager.h: -------------------------------------------------------------------------------- 1 | // Copyright 2018 Google Inc. All Rights Reserved. 2 | // 3 | // This library is free software; you can redistribute it and/or 4 | // modify it under the terms of the GNU Lesser General Public 5 | // License as published by the Free Software Foundation; either 6 | // version 2.1 of the License, or (at your option) any later version. 7 | // 8 | // This library is distributed in the hope that it will be useful, 9 | // but WITHOUT ANY WARRANTY; without even the implied warranty of 10 | // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 11 | // Lesser General Public License for more details. 12 | // 13 | // You should have received a copy of the GNU Lesser General Public 14 | // License along with this library; if not, write to the Free Software 15 | // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA 16 | 17 | #ifndef BSTS_SRC_STATE_SPACE_GAUSSIAN_MODEL_MANAGER_H_ 18 | #define BSTS_SRC_STATE_SPACE_GAUSSIAN_MODEL_MANAGER_H_ 19 | 20 | #include "model_manager.h" 21 | #include "LinAlg/Matrix.hpp" 22 | #include "LinAlg/Vector.hpp" 23 | #include "Models/StateSpace/StateSpaceModel.hpp" 24 | 25 | namespace BOOM { 26 | namespace bsts { 27 | 28 | // A base class that handles "CreateModel" for both the regression and 29 | // non-regression flavors of Gaussian models. 30 | class GaussianModelManagerBase : public ScalarModelManager { 31 | public: 32 | ScalarStateSpaceModelBase * CreateModel( 33 | SEXP r_data_list, 34 | SEXP r_state_specification, 35 | SEXP r_prior, 36 | SEXP r_options, 37 | RListIoManager *io_manager) override; 38 | }; 39 | 40 | // A holdout error sampler for a plain Gaussian state space model. 41 | class StateSpaceModelPredictionErrorSampler 42 | : public HoldoutErrorSamplerImpl { 43 | public: 44 | // Args: 45 | // model: The model containing data up to a specified cutpoint. 46 | // holdout_data: Observed values after the cutpoint. 47 | // niter: The desired number of draws (MCMC iterations) from the posterior 48 | // distribution. 49 | // errors: A matrix that will hold the output of the simulation. 50 | StateSpaceModelPredictionErrorSampler(const Ptr &model, 51 | const Vector &holdout_data, 52 | int niter, 53 | bool standardize, 54 | Matrix *errors); 55 | void sample_holdout_prediction_errors() override; 56 | 57 | private: 58 | Ptr model_; 59 | Vector holdout_data_; 60 | int niter_; 61 | bool standardize_; 62 | Matrix *errors_; 63 | }; 64 | 65 | class StateSpaceModelManager 66 | : public GaussianModelManagerBase { 67 | public: 68 | // Creates the model_ object, assigns a PosteriorSamper, and 69 | // allocates space in the io_manager for the objects in the 70 | // observation model. 71 | // Args: 72 | // r_data_list: Contains a numeric vector named 'response' and a 73 | // logical vector 'response.is.observed.' 74 | // r_prior: An R object of class SdPrior. 75 | // r_options: Not used. 76 | // io_manager: The io_manager that will record the MCMC draws. 77 | StateSpaceModel * CreateBareModel( 78 | SEXP r_data_list, 79 | SEXP r_prior, 80 | SEXP r_options, 81 | RListIoManager *io_manager) override; 82 | 83 | HoldoutErrorSampler CreateHoldoutSampler( 84 | SEXP r_bsts_object, 85 | int cutpoint, 86 | bool standardize, 87 | Matrix *prediction_error_output) override; 88 | 89 | void AddDataFromBstsObject(SEXP r_bsts_object) override; 90 | void AddDataFromList(SEXP r_data_list) override; 91 | int UnpackForecastData(SEXP r_prediction_data) override; 92 | Vector SimulateForecast(const Vector &final_state) override; 93 | 94 | private: 95 | void AddData(const Vector &response, 96 | const std::vector &response_is_observed); 97 | 98 | Ptr model_; 99 | int forecast_horizon_; 100 | }; 101 | 102 | } // namespace bsts 103 | } // namespace BOOM 104 | 105 | #endif // BSTS_SRC_STATE_SPACE_GAUSSIAN_MODEL_MANAGER_H_ 106 | -------------------------------------------------------------------------------- /src/state_space_logit_model_manager.h: -------------------------------------------------------------------------------- 1 | // Copyright 2018 Google Inc. All Rights Reserved. 2 | // 3 | // This library is free software; you can redistribute it and/or 4 | // modify it under the terms of the GNU Lesser General Public 5 | // License as published by the Free Software Foundation; either 6 | // version 2.1 of the License, or (at your option) any later version. 7 | // 8 | // This library is distributed in the hope that it will be useful, 9 | // but WITHOUT ANY WARRANTY; without even the implied warranty of 10 | // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 11 | // Lesser General Public License for more details. 12 | // 13 | // You should have received a copy of the GNU Lesser General Public 14 | // License along with this library; if not, write to the Free Software 15 | // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA 16 | 17 | #ifndef BSTS_SRC_STATE_SPACE_LOGIT_MODEL_MANAGER_H_ 18 | #define BSTS_SRC_STATE_SPACE_LOGIT_MODEL_MANAGER_H_ 19 | 20 | #include "model_manager.h" 21 | #include "LinAlg/Matrix.hpp" 22 | #include "LinAlg/Vector.hpp" 23 | #include "Models/StateSpace/StateSpaceLogitModel.hpp" 24 | 25 | namespace BOOM { 26 | namespace bsts { 27 | 28 | class StateSpaceLogitModelManager 29 | : public ScalarModelManager { 30 | public: 31 | StateSpaceLogitModelManager(); 32 | 33 | // Args: 34 | // r_data_list: Must either be NULL, or contain 'response', 35 | // 'trials', and 'respnose.is.observed'. If the model contains 36 | // a regression component then it must contain 'predictors' as 37 | // well. 38 | // r_prior: Can be R_NilValue if the model has no regression 39 | // component (or the model is not being created for MCMC). 40 | // Otherwise this should be SpikeSlabGlmPrior. 41 | // r_options: A list containing "clt.threshold" for use with the 42 | // MCMC sampler. Can be NULL. 43 | // io_manager: The io_manager that will link the MCMC draws to the 44 | // R list receiving them. 45 | StateSpaceLogitModel * CreateBareModel( 46 | SEXP r_data_list, 47 | SEXP r_prior, 48 | SEXP r_options, 49 | RListIoManager *io_manager) override; 50 | 51 | HoldoutErrorSampler CreateHoldoutSampler(SEXP, int, bool, Matrix *) override { 52 | return HoldoutErrorSampler(new NullErrorSampler); 53 | } 54 | 55 | void AddDataFromBstsObject(SEXP r_bsts_object) override; 56 | void AddDataFromList(SEXP r_data_list) override; 57 | int UnpackForecastData(SEXP r_prediction_data) override; 58 | Vector SimulateForecast(const Vector &final_state) override; 59 | 60 | void SetPredictorDimension(int xdim); 61 | void AddData(const Vector &successes, 62 | const Vector &trials, 63 | const Matrix &predictors, 64 | const std::vector &response_is_observed); 65 | private: 66 | Ptr model_; 67 | int predictor_dimension_; 68 | int clt_threshold_; 69 | 70 | Vector forecast_trials_; 71 | Matrix forecast_predictors_; 72 | }; 73 | 74 | } // namespace bsts 75 | } // namespace BOOM 76 | 77 | #endif // BSTS_SRC_STATE_SPACE_LOGIT_MODEL_MANAGER_H_ 78 | -------------------------------------------------------------------------------- /src/state_space_poisson_model_manager.h: -------------------------------------------------------------------------------- 1 | // Copyright 2018 Google Inc. All Rights Reserved. 2 | // 3 | // This library is free software; you can redistribute it and/or 4 | // modify it under the terms of the GNU Lesser General Public 5 | // License as published by the Free Software Foundation; either 6 | // version 2.1 of the License, or (at your option) any later version. 7 | // 8 | // This library is distributed in the hope that it will be useful, 9 | // but WITHOUT ANY WARRANTY; without even the implied warranty of 10 | // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 11 | // Lesser General Public License for more details. 12 | // 13 | // You should have received a copy of the GNU Lesser General Public 14 | // License along with this library; if not, write to the Free Software 15 | // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA 16 | 17 | #ifndef BSTS_SRC_STATE_SPACE_POISSON_MODEL_MANAGER_H_ 18 | #define BSTS_SRC_STATE_SPACE_POISSON_MODEL_MANAGER_H_ 19 | 20 | #include "model_manager.h" 21 | #include "LinAlg/Matrix.hpp" 22 | #include "LinAlg/Vector.hpp" 23 | #include "Models/StateSpace/StateSpacePoissonModel.hpp" 24 | 25 | namespace BOOM { 26 | namespace bsts { 27 | 28 | class StateSpacePoissonModelManager 29 | : public ScalarModelManager { 30 | public: 31 | StateSpacePoissonModelManager(); 32 | 33 | StateSpacePoissonModel * CreateBareModel( 34 | SEXP r_data_list, 35 | SEXP r_prior, 36 | SEXP r_options, 37 | RListIoManager *io_manager) override; 38 | 39 | HoldoutErrorSampler CreateHoldoutSampler(SEXP, int, bool, Matrix *) override { 40 | return HoldoutErrorSampler(new NullErrorSampler); 41 | } 42 | 43 | void AddDataFromBstsObject(SEXP r_bsts_object) override; 44 | void AddDataFromList(SEXP r_data_list) override; 45 | int UnpackForecastData(SEXP r_prediction_data) override; 46 | Vector SimulateForecast(const Vector &final_state) override; 47 | 48 | void SetPredictorDimension(int xdim); 49 | void AddData(const Vector &counts, 50 | const Vector &exposure, 51 | const Matrix &predictors, 52 | const std::vector &is_observed); 53 | 54 | private: 55 | Ptr model_; 56 | int predictor_dimension_; 57 | 58 | Vector forecast_exposure_; 59 | Matrix forecast_predictors_; 60 | }; 61 | 62 | } // namespace bsts 63 | } // namespace BOOM 64 | 65 | #endif // BSTS_SRC_STATE_SPACE_POISSON_MODEL_MANAGER_H_ 66 | -------------------------------------------------------------------------------- /src/timestamp_info.cc: -------------------------------------------------------------------------------- 1 | // Copyright 2020 Steven L. Scott. All Rights Reserved. 2 | // 3 | // This library is free software; you can redistribute it and/or 4 | // modify it under the terms of the GNU Lesser General Public 5 | // License as published by the Free Software Foundation; either 6 | // version 2.1 of the License, or (at your option) any later version. 7 | // 8 | // This library is distributed in the hope that it will be useful, 9 | // but WITHOUT ANY WARRANTY; without even the implied warranty of 10 | // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 11 | // Lesser General Public License for more details. 12 | // 13 | // You should have received a copy of the GNU Lesser General Public 14 | // License along with this library; if not, write to the Free Software 15 | // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA 16 | 17 | #include "timestamp_info.h" 18 | #include "r_interface/boom_r_tools.hpp" 19 | 20 | namespace BOOM { 21 | namespace bsts { 22 | 23 | TimestampInfo::TimestampInfo(SEXP r_data_list) { 24 | Unpack(r_data_list); 25 | } 26 | 27 | void TimestampInfo::Unpack(SEXP r_data_list) { 28 | SEXP r_timestamp_info = getListElement(r_data_list, "timestamp.info"); 29 | trivial_ = Rf_asLogical(getListElement( 30 | r_timestamp_info, "timestamps.are.trivial")); 31 | number_of_time_points_ = Rf_asInteger(getListElement( 32 | r_timestamp_info, "number.of.time.points")); 33 | if (!trivial_) { 34 | timestamp_mapping_ = ToIntVector(getListElement( 35 | r_timestamp_info, "timestamp.mapping")); 36 | } 37 | } 38 | 39 | // Args: 40 | // r_prediction_data: A list containing an object named 'timestamps', 41 | // which is a list containing the following objects. 42 | // - timestamp.mapping: A vector of integers indicating the timestamp to 43 | // which each observation belongs. 44 | // 45 | // Effects: 46 | // The forecast_timestamps_ element in the TimestampInfo object gets 47 | // populated. 48 | void TimestampInfo::UnpackForecastTimestamps(SEXP r_prediction_data) { 49 | SEXP r_forecast_timestamps = getListElement( 50 | r_prediction_data, "timestamps"); 51 | if (!Rf_isNull(r_forecast_timestamps)) { 52 | forecast_timestamps_ = ToIntVector(getListElement( 53 | r_forecast_timestamps, "timestamp.mapping")); 54 | for (int i = 1; i < forecast_timestamps_.size(); ++i) { 55 | if (forecast_timestamps_[i] < forecast_timestamps_[i - 1]) { 56 | report_error("Time stamps for multiplex predictions must be " 57 | "in increasing order."); 58 | } 59 | } 60 | } 61 | } 62 | } // namespace bsts 63 | } // namespace BOOM 64 | -------------------------------------------------------------------------------- /src/timestamp_info.h: -------------------------------------------------------------------------------- 1 | #ifndef BSTS_SRC_TIMESTAMP_INFO_H_ 2 | #define BSTS_SRC_TIMESTAMP_INFO_H_ 3 | // Copyright 2020 Steven L. Scott. All Rights Reserved. 4 | // 5 | // This library is free software; you can redistribute it and/or 6 | // modify it under the terms of the GNU Lesser General Public 7 | // License as published by the Free Software Foundation; either 8 | // version 2.1 of the License, or (at your option) any later version. 9 | // 10 | // This library is distributed in the hope that it will be useful, 11 | // but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 13 | // Lesser General Public License for more details. 14 | // 15 | // You should have received a copy of the GNU Lesser General Public 16 | // License along with this library; if not, write to the Free Software 17 | // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA 18 | 19 | #include "r_interface/boom_r_tools.hpp" 20 | 21 | namespace BOOM { 22 | namespace bsts { 23 | 24 | // A summary describing the timestamps accompanying the time series. 25 | class TimestampInfo { 26 | public: 27 | 28 | // Create a default TimestampInfo object. 29 | TimestampInfo() : trivial_(true), 30 | number_of_time_points_(-1) 31 | {} 32 | 33 | // Create a C++ TimestampInfo object from an R TimestampInfo object. 34 | explicit TimestampInfo(SEXP r_data_list); 35 | 36 | // Args: 37 | // r_data_list: A list containing an object named 'timestamp.info' which 38 | // is an R object of class TimestampInfo. The list contains the 39 | // following named elements: 40 | // - timestamps.are.trivial: Scalar boolean. 41 | // - number.of.time.points: Scalar integer. 42 | // - timestamp.mapping: Either R_NilValue (if timestamps are trivial) 43 | // or a numeric vector containing the index of the timestamp to 44 | // which each observation belongs. These indices are in R's 45 | // unit-offset counting system. The member function 'mapping' 46 | // handles the conversion to the C++ 0-offset counting system. 47 | // 48 | // Effects: 49 | // The timestamp.info object is extracted, and its contents are used to 50 | // populate this object. 51 | void Unpack(SEXP r_data_list); 52 | 53 | void UnpackForecastTimestamps(SEXP r_prediction_data); 54 | 55 | void set_time_dimension(int dim) { 56 | number_of_time_points_ = dim; 57 | } 58 | 59 | bool trivial() const {return trivial_;} 60 | int number_of_time_points() const {return number_of_time_points_;} 61 | 62 | // The index of the time point to which observation i belongs. The index 63 | // is in C's 0-based counting system. 64 | // 65 | // Args: 66 | // observation_number: The index of an observation (row in the data) 67 | // in C's 0-offset counting system. 68 | // 69 | // Returns: 70 | // The index of the time point (again, in C's 0-offset counting system) 71 | // to which the specified observation belongs. 72 | int mapping(int observation_number) const { 73 | return trivial_ ? observation_number 74 | : timestamp_mapping_[observation_number] - 1; 75 | } 76 | 77 | const std::vector &forecast_timestamps() const { 78 | return forecast_timestamps_; 79 | } 80 | 81 | private: 82 | // Timestamps are trivial if the time points are uniformly spaced, no time 83 | // point is skipped, and there is a single observation per time point. 84 | bool trivial_; 85 | 86 | // The number of distinct time points. Some of these might contain only 87 | // missing data. 88 | int number_of_time_points_; 89 | 90 | // timestamp_mapping_[i] gives the index of the time point to which 91 | // observation i belongs. The indices are stored relative to 1 (as is the 92 | // custom in R). 93 | std::vector timestamp_mapping_; 94 | 95 | // Indicates the number of time points past the end of the training data 96 | // for each forecast data point. For example, if the next three time 97 | // points are to be forecast, this will be [1, 2, 3]. If data are not 98 | // multiplexed then forecast_timestamps_ will be empty. 99 | std::vector forecast_timestamps_; 100 | }; 101 | } // namespace bsts 102 | } // namespace BOOM 103 | 104 | #endif // BSTS_SRC_TIMESTAMP_INFO_H_ 105 | -------------------------------------------------------------------------------- /src/utils.cc: -------------------------------------------------------------------------------- 1 | // Copyright 2018 Google Inc. All Rights Reserved. 2 | // 3 | // This library is free software; you can redistribute it and/or 4 | // modify it under the terms of the GNU Lesser General Public 5 | // License as published by the Free Software Foundation; either 6 | // version 2.1 of the License, or (at your option) any later version. 7 | // 8 | // This library is distributed in the hope that it will be useful, 9 | // but WITHOUT ANY WARRANTY; without even the implied warranty of 10 | // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 11 | // Lesser General Public License for more details. 12 | // 13 | // You should have received a copy of the GNU Lesser General Public 14 | // License along with this library; if not, write to the Free Software 15 | // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA 16 | 17 | #include "utils.h" 18 | 19 | namespace BOOM { 20 | namespace bsts { 21 | 22 | //====================================================================== 23 | // Initialize the model to be empty, except for variables that are known to 24 | // be present with probability 1. 25 | void DropUnforcedCoefficients(const Ptr &glm, 26 | const BOOM::Vector &prior_inclusion_probs) { 27 | glm->coef().drop_all(); 28 | for (int i = 0; i < prior_inclusion_probs.size(); ++i) { 29 | if (prior_inclusion_probs[i] >= 1.0) { 30 | glm->coef().add(i); 31 | } 32 | } 33 | } 34 | 35 | Matrix ExtractPredictors(SEXP r_object, 36 | const std::string &name, 37 | int default_length) { 38 | SEXP r_predictors = getListElement(r_object, name); 39 | if (Rf_isNull(r_predictors)) { 40 | return Matrix(default_length, 1, 1.0); 41 | } else { 42 | Matrix ans = ToBoomMatrix(r_predictors); 43 | if (ans.nrow() != default_length) { 44 | report_error("Matrix of predictors had an unexpected number of " 45 | "rows."); 46 | } 47 | return ans; 48 | } 49 | } 50 | 51 | std::vector IsObserved(SEXP r_vector) { 52 | if (!Rf_isNumeric(r_vector)) { 53 | report_error("Input vector is non-numeric."); 54 | } 55 | size_t n = Rf_length(r_vector); 56 | std::vector ans(n); 57 | double *data = REAL(Rf_coerceVector(r_vector, REALSXP)); 58 | for (int i = 0; i < n; ++i) { 59 | ans[i] = !BOOM::isNA(data[i]); 60 | } 61 | return ans; 62 | } 63 | 64 | SelectorMatrix IsObserved(const Matrix &matrix) { 65 | SelectorMatrix ans(matrix.nrow(), matrix.ncol(), true); 66 | for (int i = 0; i < matrix.nrow(); ++i) { 67 | for (int j = 0; j < matrix.ncol(); ++j) { 68 | if (BOOM::isNA(matrix(i, j))) { 69 | ans.drop(i, j); 70 | } 71 | } 72 | } 73 | return ans; 74 | } 75 | } // namespace bsts 76 | } // namespace BOOM 77 | --------------------------------------------------------------------------------