diff --git a/DESCRIPTION b/DESCRIPTION index 951625b9..8b99fecf 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -53,6 +53,7 @@ Suggests: feasts, forecast, knitr, + MTS, nnet, rmarkdown, spelling, @@ -67,4 +68,4 @@ Encoding: UTF-8 Language: en-GB Roxygen: list(markdown = TRUE, roclets=c('rd', 'collate', 'namespace')) -RoxygenNote: 7.3.1 +RoxygenNote: 7.3.2 diff --git a/NAMESPACE b/NAMESPACE index f6dc3c79..9b74f649 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -2,6 +2,7 @@ S3method(IRF,ARIMA) S3method(IRF,VAR) +S3method(IRF,VARIMA) S3method(IRF,VECM) S3method(breusch_godfrey,TSLM) S3method(components,ETS) @@ -12,6 +13,7 @@ S3method(fitted,NNETAR) S3method(fitted,RW) S3method(fitted,TSLM) S3method(fitted,VAR) +S3method(fitted,VARIMA) S3method(fitted,VECM) S3method(fitted,croston) S3method(fitted,fable_theta) @@ -23,6 +25,7 @@ S3method(forecast,NNETAR) S3method(forecast,RW) S3method(forecast,TSLM) S3method(forecast,VAR) +S3method(forecast,VARIMA) S3method(forecast,VECM) S3method(forecast,croston) S3method(forecast,fable_theta) @@ -34,6 +37,7 @@ S3method(generate,NNETAR) S3method(generate,RW) S3method(generate,TSLM) S3method(generate,VAR) +S3method(generate,VARIMA) S3method(generate,VECM) S3method(generate,model_mean) S3method(glance,AR) @@ -43,6 +47,7 @@ S3method(glance,NNETAR) S3method(glance,RW) S3method(glance,TSLM) S3method(glance,VAR) +S3method(glance,VARIMA) S3method(glance,VECM) S3method(glance,fable_theta) S3method(glance,model_mean) @@ -58,6 +63,7 @@ S3method(model_sum,NNETAR) S3method(model_sum,RW) S3method(model_sum,TSLM) S3method(model_sum,VAR) +S3method(model_sum,VARIMA) S3method(model_sum,VECM) S3method(model_sum,fable_theta) S3method(model_sum,model_mean) @@ -75,6 +81,7 @@ S3method(report,NNETAR) S3method(report,RW) S3method(report,TSLM) S3method(report,VAR) +S3method(report,VARIMA) S3method(report,VECM) S3method(report,fable_theta) S3method(report,model_mean) @@ -85,6 +92,7 @@ S3method(residuals,NNETAR) S3method(residuals,RW) S3method(residuals,TSLM) S3method(residuals,VAR) +S3method(residuals,VARIMA) S3method(residuals,VECM) S3method(residuals,croston) S3method(residuals,fable_theta) @@ -96,6 +104,7 @@ S3method(tidy,NNETAR) S3method(tidy,RW) S3method(tidy,TSLM) S3method(tidy,VAR) +S3method(tidy,VARIMA) S3method(tidy,VECM) S3method(tidy,croston) S3method(tidy,fable_theta) @@ -113,6 +122,7 @@ export(SNAIVE) export(THETA) export(TSLM) export(VAR) +export(VARIMA) export(VECM) export(as_tsibble) export(breusch_godfrey) diff --git a/R/VARIMA.R b/R/VARIMA.R new file mode 100644 index 00000000..be3343f0 --- /dev/null +++ b/R/VARIMA.R @@ -0,0 +1,457 @@ +train_varima <- function(.data, specials, ic, ...) { + # Get response variables + y <- invoke(cbind, unclass(.data)[measured_vars(.data)]) + + if(any(colnames(specials$xreg[[1]]) != "(Intercept)")) { + stop("Exogenous regressors for VARIMA are not yet supported.") + } + + p <- specials$pdq[[1]]$p + d <- specials$pdq[[1]]$d + q <- specials$pdq[[1]]$q + + require_package("MTS") + utils::capture.output( + fit <- MTS::VARMA( + if(d > 0) diff(y, differences = d) else y, + p = p, q = q, + include.mean = "(Intercept)" %in% colnames(specials$xreg[[1]]) + ) + ) + dimnames(fit$Sigma) <- list(colnames(y), colnames(y)) + fit$y_start <- y[seq_len(d),,drop = FALSE] + fit$y_end <- y[NROW(y) - d + seq_len(d),,drop = FALSE] + structure(fit, class="VARIMA") +} + +specials_varima <- new_specials( + pdq = function(p, d, q) { + if (self$stage %in% c("estimate", "refit")) { + p <- p[p <= floor(NROW(self$data) / 3)] + q <- q[q <= floor(NROW(self$data) / 3)] + } + as.list(environment()) + }, + PDQ = function(P, D, Q, period = NULL) { + stop("Seasonal VARIMA models are not yet supported.") + }, + common_xregs, + xreg = special_xreg(default_intercept = FALSE), + .required_specials = c("pdq", "xreg"), + .xreg_specials = names(common_xregs) +) + +#' Estimate a VARIMA model +#' +#' Estimates a VARIMA model of a given order. +#' +#' Exogenous regressors and [`common_xregs`] can be specified in the model +#' formula. +#' +#' @aliases report.VARIMA +#' +#' @param formula Model specification (see "Specials" section). +#' @param ic The information criterion used in selecting the model. +#' @param ... Further arguments for arima +#' +#' @section Specials: +#' +#' \subsection{pdq}{ +#' The `pdq` special is used to specify non-seasonal components of the model. +#' \preformatted{ +#' pdq(p = 0:5, d = 0:2, q = 0:5) +#' } +#' +#' \tabular{ll}{ +#' `p` \tab The order of the non-seasonal auto-regressive (AR) terms. If multiple values are provided, the one which minimises `ic` will be chosen. \cr +#' `d` \tab The order of integration for non-seasonal differencing. If multiple values are provided, one of the values will be selected via repeated KPSS tests. \cr +#' `q` \tab The order of the non-seasonal moving average (MA) terms. If multiple values are provided, the one which minimises `ic` will be chosen. \cr +#' } +#' } + +#' \subsection{xreg}{ +#' Exogenous regressors can be included in an VARIMA model without explicitly using the `xreg()` special. Common exogenous regressor specials as specified in [`common_xregs`] can also be used. These regressors are handled using [stats::model.frame()], and so interactions and other functionality behaves similarly to [stats::lm()]. +#' +#' The inclusion of a constant in the model follows the similar rules to [`stats::lm()`], where including `1` will add a constant and `0` or `-1` will remove the constant. If left out, the inclusion of a constant will be determined by minimising `ic`. +#' +#' \preformatted{ +#' xreg(...) +#' } +#' +#' \tabular{ll}{ +#' `...` \tab Bare expressions for the exogenous regressors (such as `log(x)`) +#' } +#' } +#' +#' @return A model specification. +#' +#' @seealso +#' [`MTS::VARMA()`], [`MTS:sVARMA()`]. +#' +#' @examples +#' +#' eu_stocks <- as_tsibble(ts(EuStockMarkets, frequency = 1), pivot_longer = FALSE) +#' +#' eu_stocks |> +#' autoplot(vars(DAX, SMI, CAC, FTSE)) +#' +#' fit <- eu_stocks %>% +#' model(VARIMA(vars(DAX, SMI, CAC, FTSE) ~ pdq(1, 1, 1))) +#' +#' fit +#' @export +VARIMA <- function(formula, ...) { + # ic <- match.arg(ic) + # ic <- switch(ic, aicc = "AICc", aic = "AIC", bic = "BIC") + varima_model <- new_model_class("VARIMA", + train = train_varima, + specials = specials_varima, + origin = NULL, + check = all_tsbl_checks + ) + new_model_definition(varima_model, !!enquo(formula), ...) +} + +#' @rdname VARIMA +#' @inheritParams forecast.VAR +#' @examples +#' +#' fit %>% +#' forecast(h = 50) %>% +#' autoplot(tail(eu_stocks, 100)) +#' @export +forecast.VARIMA <- function(object, new_data = NULL, specials = NULL, + bootstrap = FALSE, times = 5000, ...) { + mts_varma_pred(object, NROW(new_data)) +} + +# nocov start +# Adapted from MTS::VARMApred to return distributional distributions +mts_varma_pred <- function(model, h) { + x = as.matrix(model$data) + resi = as.matrix(model$residuals) + sig = model$Sigma + Phi = model$Phi + Theta = model$Theta + Ph0 = model$Ph0 + p = model$ARorder + q = model$MAorder + if (p < 0) + p = 0 + if (q < 0) + q = 0 + if (h < 1) + h = 1 + nT = dim(x)[1] + k = dim(x)[2] + T1 = dim(resi)[1] + if (nT > T1) { + r1 = matrix(0, (nT - T1), k) + resi = rbind(r1, resi) + } + if (length(Ph0) < 1) + Ph0 = rep(0, k) + orig = nT + px = x[1:orig, ] + presi = resi[1:orig, ] + psi = diag(rep(1, k)) + wk = c(psi) + lag = max(1, h) + for (i in 1:lag) { + if (i <= p) { + idx = (i - 1) * k + tmp = Phi[, (idx + 1):(idx + k)] + } + else { + tmp = matrix(0, k, k) + } + if (i <= q) { + mdx = (i - 1) * k + tmp = tmp - Theta[, (mdx + 1):(mdx + k)] + } + jj = i - 1 + jp = min(jj, p) + if (jp > 0) { + for (j in 1:jp) { + jdx = (j - 1) * k + idx = (i - j) * k + w1 = Phi[, (jdx + 1):(jdx + k)] + w2 = psi[, (idx + 1):(idx + k)] + tmp = tmp + w1 %*% w2 + } + } + psi = cbind(psi, tmp) + wk = cbind(wk, c(tmp)) + } + mu = sigma = vector("list", h) + for (j in 1:h) { + fcst = Ph0 + Sig = sig + t = orig + j + if (p > 0) { + for (ii in 1:p) { + idx = (ii - 1) * k + ph = Phi[, (idx + 1):(idx + k)] + fcst = fcst + matrix(px[(t - ii), ], 1, k) %*% + t(ph) + } + } + if (q > 0) { + for (jj in 1:q) { + idx = (jj - 1) * k + if ((t - jj) <= orig) { + th = Theta[, (idx + 1):(idx + k)] + fcst = fcst - matrix(resi[(t - jj), ], 1, + k) %*% t(th) + } + } + } + px = rbind(px, fcst) + if (j > 1) { + Sig = sig + for (jj in 2:j) { + jdx = (jj - 1) * k + wk = psi[, (jdx + 1):(jdx + k)] + Sig = Sig + wk %*% sig %*% t(wk) + } + } + + mu[[j]] <- fcst + sigma[[j]] <- Sig + } + + # Adjust to be forecasts in levels + d <- NROW(model$y_end) + if (d > 0) { + mu <- do.call(rbind, mu) + mu <- diffinv(mu, differences = d, xi = model$y_end)[-1,,drop=FALSE] + mu <- unname(split(mu, row(mu))) + } + + distributional::dist_multivariate_normal(mu, sigma) +} +# nocov end + + +#' @rdname VARIMA +#' @inheritParams fitted.VAR +#' +#' @examples +#' +#' fitted(fit) +#' @export +fitted.VARIMA <- function(object, ...) { + d <- NROW(object$y_end) + fit <- object$data - residuals(object)[seq_len(nrow(object$data)) + d,] + + if (d > 0) { + fit[seq_len(object$ARorder),] <- object$data[seq_len(object$ARorder),] + fit <- diffinv(fit, differences = d, xi = object$y_start) + fit[seq_len(object$ARorder + d),] <- NA + } + fit +} + +#' @rdname VARIMA +#' @inheritParams residuals.VAR +#' +#' @examples +#' residuals(fit) +#' @export +residuals.VARIMA <- function(object, ...) { + rbind( + matrix(NA, nrow(object$data) - nrow(object$residuals) + nrow(object$y_end), ncol(object$residuals)), + object$residuals + ) +} + +#' @export +model_sum.VARIMA <- function(x) { + sprintf( + "VARIMA(%i,%i,%i)%s", + x$ARorder, NROW(x$y_end), x$MAorder, ifelse(x$cnst, " w/ mean", "")) +} + +#' @rdname VARIMA +#' @inheritParams tidy.VAR +#' +#' @examples +#' tidy(fit) +#' @export +tidy.VARIMA <- function(x, ...) { + colnames(x$coef) <- cn <- colnames(x$data) + k <- NCOL(x$coef) + rownames(x$coef) <- c( + if(x$cnst) "constant" else NULL, + paste0("lag(", rep(cn, x$ARorder), ",", rep(seq_len(x$ARorder), each = k), ")"), + paste0("lag(e_", rep(cn, x$MAorder), ",", rep(seq_len(x$MAorder), each = k), ")") + ) + + rdf <- (x$ARorder + x$MAorder) * k^2 + k*(k+1)/2 + + coef <- dplyr::as_tibble(x$coef, rownames = "term") + coef <- tidyr::gather(coef, ".response", "estimate", !!!syms(colnames(x$coef))) + mutate( + coef, + std.error = as.numeric(x$secoef), + statistic = !!sym("estimate") / !!sym("std.error"), + p.value = 2 * stats::pt(abs(!!sym("statistic")), rdf, lower.tail = FALSE) + ) +} + + +#' @rdname VARIMA +#' @inheritParams generics::glance +#' +#' @return A one row tibble summarising the model's fit. +#' +#' @examples +#' glance(fit) +#' @export +glance.VARIMA <- function(x, ...) { + tibble( + sigma2 = list(x$Sigma), + AIC = x$aic, BIC = x$bic + ) +} + +#' @rdname VARIMA +#' +#' @examples +#' report(fit) +#' +#' @export +report.VARIMA <- function(object, ...) { + coef <- tidy(object) + coef <- map( + split(coef, factor(coef$.response, levels = unique(coef$.response))), + function(x) `colnames<-`(rbind(x$estimate, s.e. = x$std.error), x$term) + ) + + imap(coef, function(par, nm) { + cat(sprintf("\nCoefficients for %s:\n", nm)) + print.default(round(par, digits = 4), print.gap = 2) + }) + + cat("\nResidual covariance matrix:\n") + print.default(round(object$Sigma, 4)) + + + cat( + sprintf( + "\nAIC = %s\tBIC = %s", + format(round(object$aic, 2L)), + format(round(object$bic, 2L)) + ) + ) +} + +#' @rdname VARIMA +#' +#' @examples +#' generate(fit, h = 10) +#' +#' @export +generate.VARIMA <- function(x, new_data, specials, ...){ + if (!".innov" %in% names(new_data)) { + new_data[[".innov"]] <- generate( + distributional::dist_multivariate_normal( + list(matrix(0, ncol = NCOL(x$Sigma))), list(x$Sigma) + ), + nrow(new_data) + )[[1L]] + } + kr <- key_data(new_data)$.rows + h <- lengths(kr) + + new_data$.sim <- do.call( + rbind, lapply(kr, function(i) { + mts_varma_sim(x, new_data[[".innov"]][i,,drop=FALSE]) + }) + ) + new_data +} + + +# nocov start +# Adapted from MTS::VARMAsim to simulate from model +mts_varma_sim <- function(model, innov) { + cnst = if(model$cnst) model$Ph0 else 0 + phi = model$Phi + nar = model$ARorder + theta = model$Theta + nma = model$MAorder + k = NCOL(model$data) + lastobs <- model$data[NROW(model$data) - seq_len(nar) + 1,,drop=FALSE] + lastres <- model$residuals[NROW(model$residuals) - seq_len(nma) + 1,,drop=FALSE] + zt <- matrix(numeric(length(innov)), ncol = ncol(innov)) + for (it in seq_len(NROW(innov))) { + tmp = innov[it,,drop=FALSE] + if (nma > 0) { + for (j in seq_len(nma)) { + jdx = (j - 1) * k + thej = theta[, (jdx + 1):(jdx + k)] + tmp = tmp - lastres[j,,drop=FALSE] %*% t(thej) + } + } + if (nar > 0) { + for (i in 1:nar) { + idx = (i - 1) * k + phj = phi[, (idx + 1):(idx + k)] + tmp = tmp + lastobs[i,,drop=FALSE] %*% t(phj) + } + } + lastobs[seq_len(nrow(lastobs)-1) + 1,] <- lastobs[seq_len(nrow(lastobs)-1),] + lastobs[1,] = zt[it, ] = cnst + tmp + lastres[seq_len(nrow(lastres)-1) + 1,] <- lastres[seq_len(nrow(lastres)-1),] + lastres[1,] <- innov[it,] + } + + d <- NROW(model$y_end) + if (d > 0) { + zt <- diffinv(zt, differences = d, xi = model$y_end)[-1,,drop=FALSE] + } + + zt +} +# nocov end + +#' @rdname VARIMA +#' +#' @param x A fitted model. +#' @param impulse A character string specifying the name of the variable that is shocked (the impulse variable). +#' @param orthogonal If TRUE, orthogonalised impulse responses will be computed. +#' +#' @examples +#' IRF(fit, h = 10, impulse = "FTSE") +#' +#' @export +IRF.VARIMA <- function(x, new_data, specials, impulse = NULL, orthogonal = FALSE, ...) { + # Zero out end of data + x$data[seq_along(x$data)] <- 0 + x$residuals[seq_along(x$residuals)] <- 0 + x$y_end[seq_along(x$y_end)] <- 0 + + # Remove regressors + x$cnst <- FALSE + + # Add shocks + if (".impulse" %in% names(new_data)) { + names(new_data)[names(new_data) == ".impulse"] <- ".innov" + } else { + new_data$.innov <- matrix(0, nrow = nrow(new_data), ncol = ncol(x$data), + dimnames = dimnames(x$data)) + new_data$.innov[1, impulse] <- 1 + } + + # Orthogonalised shocks + if(orthogonal) { + # Use Cholesky decomposition to orthogonalise the shocks / innovations + new_data$.innov <- new_data$.innov %*% chol(x$Sigma) + } + + irf <- generate(x, new_data, specials) + irf[colnames(x$data)] <- split(irf$.sim, col(irf$.sim)) + irf$.innov <- irf$.sim <- NULL + irf +} \ No newline at end of file diff --git a/R/arima.R b/R/arima.R index 03142e09..15234d7a 100644 --- a/R/arima.R +++ b/R/arima.R @@ -920,8 +920,6 @@ generate.ARIMA <- function(x, new_data, specials, bootstrap = FALSE, ...){ #' @inheritParams forecast.ETS #' @param x A fitted model. #' -#' @seealso [`fabletools::IRF.mdl_df`] -#' #' @export IRF.ARIMA <- function(x, new_data, specials, ...) { # Zero out data diff --git a/man/IRF.ARIMA.Rd b/man/IRF.ARIMA.Rd index a1ce1667..7d7c4cfa 100644 --- a/man/IRF.ARIMA.Rd +++ b/man/IRF.ARIMA.Rd @@ -4,7 +4,7 @@ \alias{IRF.ARIMA} \title{Calculate impulse responses from a fable model} \usage{ -\method{IRF}{ARIMA}(x, new_data, specials, impulse = NULL, orthogonal = FALSE, ...) +\method{IRF}{ARIMA}(x, new_data, specials, ...) } \arguments{ \item{x}{A fitted model.} @@ -13,17 +13,10 @@ \item{specials}{(passed by \code{\link[fabletools:forecast]{fabletools::forecast.mdl_df()}}).} -\item{impulse}{A character string specifying the name of the variable that is shocked (the impulse variable).} - -\item{orthogonal}{If TRUE, orthogonalised impulse responses will be computed.} - \item{...}{Other arguments passed to methods} } \description{ -Simulates future paths from a dataset using a fitted model. Innovations are -sampled by the model's assumed error distribution. If \code{bootstrap} is \code{TRUE}, -innovations will be sampled from the model's residuals. If \code{new_data} -contains the \code{.innov} column, those values will be treated as innovations. +Calculate impulse responses from a fable model } \seealso{ \code{\link[fabletools:IRF.mdl_df]{fabletools::IRF.mdl_df}} diff --git a/man/IRF.VAR.Rd b/man/IRF.VAR.Rd index 06162a08..c2d26a35 100644 --- a/man/IRF.VAR.Rd +++ b/man/IRF.VAR.Rd @@ -25,6 +25,3 @@ sampled by the model's assumed error distribution. If \code{bootstrap} is \code{ innovations will be sampled from the model's residuals. If \code{new_data} contains the \code{.innov} column, those values will be treated as innovations. } -\seealso{ -\code{\link[fabletools:IRF.mdl_df]{fabletools::IRF.mdl_df}} -} diff --git a/man/IRF.VECM.Rd b/man/IRF.VECM.Rd index be389f08..b95e1edd 100644 --- a/man/IRF.VECM.Rd +++ b/man/IRF.VECM.Rd @@ -25,6 +25,3 @@ sampled by the model's assumed error distribution. If \code{bootstrap} is \code{ innovations will be sampled from the model's residuals. If \code{new_data} contains the \code{.innov} column, those values will be treated as innovations. } -\seealso{ -\code{\link[fabletools:IRF.mdl_df]{fabletools::IRF.mdl_df}} -} diff --git a/man/VAR.Rd b/man/VAR.Rd index 3d1be157..97ac2dd4 100644 --- a/man/VAR.Rd +++ b/man/VAR.Rd @@ -28,7 +28,7 @@ formula. \section{Specials}{ -\subsection{pdq}{ +\subsection{AR}{ The \code{AR} special is used to specify the lag order for the auto-regression. \preformatted{ AR(p = 0:5) diff --git a/man/VARIMA.Rd b/man/VARIMA.Rd new file mode 100644 index 00000000..15ab45ab --- /dev/null +++ b/man/VARIMA.Rd @@ -0,0 +1,134 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/VARIMA.R +\name{VARIMA} +\alias{VARIMA} +\alias{report.VARIMA} +\alias{forecast.VARIMA} +\alias{fitted.VARIMA} +\alias{residuals.VARIMA} +\alias{tidy.VARIMA} +\alias{glance.VARIMA} +\alias{generate.VARIMA} +\alias{IRF.VARIMA} +\title{Estimate a VARIMA model} +\usage{ +VARIMA(formula, ...) + +\method{forecast}{VARIMA}( + object, + new_data = NULL, + specials = NULL, + bootstrap = FALSE, + times = 5000, + ... +) + +\method{fitted}{VARIMA}(object, ...) + +\method{residuals}{VARIMA}(object, ...) + +\method{tidy}{VARIMA}(x, ...) + +\method{glance}{VARIMA}(x, ...) + +\method{report}{VARIMA}(object, ...) + +\method{generate}{VARIMA}(x, new_data, specials, ...) + +\method{IRF}{VARIMA}(x, new_data, specials, impulse = NULL, orthogonal = FALSE, ...) +} +\arguments{ +\item{formula}{Model specification (see "Specials" section).} + +\item{...}{Further arguments for arima} + +\item{object}{A model for which forecasts are required.} + +\item{new_data}{A tsibble containing the time points and exogenous regressors to produce forecasts for.} + +\item{specials}{(passed by \code{\link[fabletools:forecast]{fabletools::forecast.mdl_df()}}).} + +\item{bootstrap}{If \code{TRUE}, then forecast distributions are computed using simulation with resampled errors.} + +\item{times}{The number of sample paths to use in estimating the forecast distribution when \code{bootstrap = TRUE}.} + +\item{x}{A fitted model.} + +\item{impulse}{A character string specifying the name of the variable that is shocked (the impulse variable).} + +\item{orthogonal}{If TRUE, orthogonalised impulse responses will be computed.} + +\item{ic}{The information criterion used in selecting the model.} +} +\value{ +A model specification. + +A one row tibble summarising the model's fit. +} +\description{ +Estimates a VARIMA model of a given order. +} +\details{ +Exogenous regressors and \code{\link{common_xregs}} can be specified in the model +formula. +} +\section{Specials}{ + + +\subsection{pdq}{ +The \code{pdq} special is used to specify non-seasonal components of the model. +\preformatted{ +pdq(p = 0:5, d = 0:2, q = 0:5) +} + +\tabular{ll}{ +\code{p} \tab The order of the non-seasonal auto-regressive (AR) terms. If multiple values are provided, the one which minimises \code{ic} will be chosen. \cr +\code{d} \tab The order of integration for non-seasonal differencing. If multiple values are provided, one of the values will be selected via repeated KPSS tests. \cr +\code{q} \tab The order of the non-seasonal moving average (MA) terms. If multiple values are provided, the one which minimises \code{ic} will be chosen. \cr +} +} +\subsection{xreg}{ +Exogenous regressors can be included in an VARIMA model without explicitly using the \code{xreg()} special. Common exogenous regressor specials as specified in \code{\link{common_xregs}} can also be used. These regressors are handled using \code{\link[stats:model.frame]{stats::model.frame()}}, and so interactions and other functionality behaves similarly to \code{\link[stats:lm]{stats::lm()}}. + +The inclusion of a constant in the model follows the similar rules to \code{\link[stats:lm]{stats::lm()}}, where including \code{1} will add a constant and \code{0} or \code{-1} will remove the constant. If left out, the inclusion of a constant will be determined by minimising \code{ic}. + +\preformatted{ +xreg(...) +} + +\tabular{ll}{ +\code{...} \tab Bare expressions for the exogenous regressors (such as \code{log(x)}) +} +} +} + +\examples{ + +eu_stocks <- as_tsibble(ts(EuStockMarkets, frequency = 1), pivot_longer = FALSE) + +eu_stocks |> + autoplot(vars(DAX, SMI, CAC, FTSE)) + +fit <- eu_stocks \%>\% + model(VARIMA(vars(DAX, SMI, CAC, FTSE) ~ pdq(1, 1, 1))) + +fit + +fit \%>\% + forecast(h = 50) \%>\% + autoplot(tail(eu_stocks, 100)) + +fitted(fit) +residuals(fit) +tidy(fit) +glance(fit) +report(fit) + +generate(fit, h = 10) + +IRF(fit, h = 10, impulse = "FTSE") + +} +\seealso{ +\code{\link[MTS:VARMA]{MTS::VARMA()}}, \code{\link[=MTS:sVARMA]{MTS:sVARMA()}}. +} diff --git a/man/VECM.Rd b/man/VECM.Rd index 295f6b88..9341c664 100644 --- a/man/VECM.Rd +++ b/man/VECM.Rd @@ -28,7 +28,7 @@ formula. \section{Specials}{ -\subsection{pdq}{ +\subsection{AR}{ The \code{AR} special is used to specify the lag order for the auto-regression. \preformatted{ AR(p = 0:5) @@ -40,7 +40,7 @@ AR(p = 0:5) } \subsection{xreg}{ -Exogenous regressors can be included in an VAR model without explicitly using the \code{xreg()} special. Common exogenous regressor specials as specified in \code{\link{common_xregs}} can also be used. These regressors are handled using \code{\link[stats:model.frame]{stats::model.frame()}}, and so interactions and other functionality behaves similarly to \code{\link[stats:lm]{stats::lm()}}. +Exogenous regressors can be included in an VECM model without explicitly using the \code{xreg()} special. Common exogenous regressor specials as specified in \code{\link{common_xregs}} can also be used. These regressors are handled using \code{\link[stats:model.frame]{stats::model.frame()}}, and so interactions and other functionality behaves similarly to \code{\link[stats:lm]{stats::lm()}}. The inclusion of a constant in the model follows the similar rules to \code{\link[stats:lm]{stats::lm()}}, where including \code{1} will add a constant and \code{0} or \code{-1} will remove the constant. If left out, the inclusion of a constant will be determined by minimising \code{ic}.