From 249f3e9ae13ef50c4706ab2a0d69012999b02900 Mon Sep 17 00:00:00 2001 From: santikka Date: Fri, 3 May 2024 19:49:37 +0300 Subject: [PATCH] fix get_parameter_types, fix categories in as.data.table, fix extended tests, run-extended --- R/as_data_table.R | 7 +- R/coef.R | 3 +- R/getters.R | 2 +- R/loo.R | 1 - R/summary.R | 2 +- man/coef.dynamitefit.Rd | 2 +- man/dynamite.Rd | 2 +- tests/testthat/test-extended.R | 6 +- tests/testthat/test-lfactor.R | 11 +- tests/testthat/test-output.R | 2 +- tests/testthat/test-predict.R | 3 +- tests/testthat/test-recovery.R | 446 ++++++++++++++++++++------------- 12 files changed, 295 insertions(+), 192 deletions(-) diff --git a/R/as_data_table.R b/R/as_data_table.R index 1aa135d..3a809ae 100644 --- a/R/as_data_table.R +++ b/R/as_data_table.R @@ -230,15 +230,16 @@ as.data.table.dynamitefit <- function(x, keep.rownames = FALSE, ) } categories <- c( - NA_character_, ulapply( - x$stan$responses, + unlist(x$stan$responses), function(y) { channel <- get_channel(x, y) if (is_cumulative(channel$family)) { seq_len(channel$S - 1L) - } else { + } else if (is_categorical(channel$family)) { channel$categories[-1L] + } else { + NA_character_ } } ) diff --git a/R/coef.R b/R/coef.R index 791b71e..e40f4c3 100644 --- a/R/coef.R +++ b/R/coef.R @@ -20,7 +20,8 @@ #' deltas <- coef(gaussian_example_fit, type = "delta") #' coef.dynamitefit <- function(object, - types = NULL, parameters = NULL, + types = c("alpha", "beta", "delta"), + parameters = NULL, responses = NULL, times = NULL, groups = NULL, summary = TRUE, probs = c(0.05, 0.95), ...) { stopifnot_( diff --git a/R/getters.R b/R/getters.R index 1420ae3..302a625 100644 --- a/R/getters.R +++ b/R/getters.R @@ -318,7 +318,7 @@ get_parameter_types <- function(x, ...) { #' @rdname get_parameter_types #' @export get_parameter_types.dynamitefit <- function(x, ...) { - d <- as.data.table(x) + d <- as.data.table(x, types = all_types) unique(d$type) } diff --git a/R/loo.R b/R/loo.R index a120101..9a14b54 100644 --- a/R/loo.R +++ b/R/loo.R @@ -88,7 +88,6 @@ loo.dynamitefit <- function(x, separate_channels = FALSE, thin = 1L, ...) { ) loo::loo(ll, r_eff = reff) } - if (separate_channels) { ll <- split( x = data.table::melt( diff --git a/R/summary.R b/R/summary.R index a1406b5..9074392 100644 --- a/R/summary.R +++ b/R/summary.R @@ -1,6 +1,6 @@ #' Summary for a Dynamite Model Fit #' -#' The `summary` method provides statistics of the posterior samples of the +#' The `summary()` method provides statistics of the posterior samples of the #' model; this is an alias of [dynamite::as.data.frame.dynamitefit()] with #' `summary = TRUE`. #' diff --git a/man/coef.dynamitefit.Rd b/man/coef.dynamitefit.Rd index dc8ceff..360d0c8 100644 --- a/man/coef.dynamitefit.Rd +++ b/man/coef.dynamitefit.Rd @@ -6,7 +6,7 @@ \usage{ \method{coef}{dynamitefit}( object, - types = NULL, + types = c("alpha", "beta", "delta"), parameters = NULL, responses = NULL, times = NULL, diff --git a/man/dynamite.Rd b/man/dynamite.Rd index 6bcb054..56eeb52 100644 --- a/man/dynamite.Rd +++ b/man/dynamite.Rd @@ -157,7 +157,7 @@ Information on the estimated dynamite model can be obtained via effective sample sizes, largest Rhat and summary statistics of the time- and group-invariant model parameters. -The \code{summary} method provides statistics of the posterior samples of the +The \code{summary()} method provides statistics of the posterior samples of the model; this is an alias of \code{\link[=as.data.frame.dynamitefit]{as.data.frame.dynamitefit()}} with \code{summary = TRUE}. } diff --git a/tests/testthat/test-extended.R b/tests/testthat/test-extended.R index 1720b7f..2150891 100644 --- a/tests/testthat/test-extended.R +++ b/tests/testthat/test-extended.R @@ -40,11 +40,11 @@ test_that("multivariate gaussian fit and predict work", { iter = 2000, refresh = 0 ) - expect_error(sumr <- summary(fit, type = "corr"), NA) + expect_error(sumr <- summary(fit, types = "corr"), NA) expect_equal(sumr$mean, cov2cor(S)[2, 1], tolerance = 0.1) - expect_error(sumr <- summary(fit, type = "sigma"), NA) + expect_error(sumr <- summary(fit, types = "sigma"), NA) expect_equal(sumr$mean, c(0.5, sqrt(diag(S))), tolerance = 0.1) - expect_error(sumr <- summary(fit, type = "beta"), NA) + expect_error(sumr <- summary(fit, types = "beta"), NA) expect_equal(sumr$mean, c(0.5, 0.7, -0.2, 0.4), tolerance = 0.1) expect_error(predict(fit, n_draws = 5), NA) }) diff --git a/tests/testthat/test-lfactor.R b/tests/testthat/test-lfactor.R index 6c9379e..f821f00 100644 --- a/tests/testthat/test-lfactor.R +++ b/tests/testthat/test-lfactor.R @@ -45,7 +45,10 @@ test_that("nonidentifiable lfactor specification gives warning", { correlated = TRUE, noncentered_psi = TRUE ) + splines(30), - data = d, time = "time", group = "id", debug = list(no_compile = TRUE)), + data = d, + time = "time", + group = "id", + debug = list(no_compile = TRUE)), NA ) expect_warning( @@ -167,14 +170,14 @@ test_that("latent factor related parameters can be got", { skip_if_not(run_extended_tests) expect_equal( get_parameter_types(latent_factor_example_fit), - c("alpha", "sigma", "lambda", "sigma_lambda", "psi", "tau_psi", "omega_psi") + c("alpha", "lambda", "omega_psi", "psi", "sigma", "sigma_lambda", "tau_psi") ) }) test_that("lambdas can be plotted", { skip_if_not(run_extended_tests) expect_error( - plot_lambdas(latent_factor_example_fit), + plot(latent_factor_example_fit, types = "lambda"), NA ) }) @@ -182,7 +185,7 @@ test_that("lambdas can be plotted", { test_that("psis can be plotted", { skip_if_not(run_extended_tests) expect_error( - plot_psis(latent_factor_example_fit), + plot(latent_factor_example_fit, types = "psi"), NA ) }) diff --git a/tests/testthat/test-output.R b/tests/testthat/test-output.R index bdf488f..75f9624 100644 --- a/tests/testthat/test-output.R +++ b/tests/testthat/test-output.R @@ -82,7 +82,7 @@ test_that("default plot works", { test_that("trace type plot works", { expect_error( - plot(gaussian_example_fit, plot_type = "trace", type = "beta"), + plot(gaussian_example_fit, plot_type = "trace", types = "beta"), NA ) }) diff --git a/tests/testthat/test-predict.R b/tests/testthat/test-predict.R index 9f06ebf..1df9d69 100644 --- a/tests/testthat/test-predict.R +++ b/tests/testthat/test-predict.R @@ -151,7 +151,8 @@ test_that("fitted works", { idx <- categorical_example_fit$permutation[1L] iter <- idx %% n chain <- 1 + idx %/% n - xzy <- categorical_example_fit$data |> dplyr::filter(id == 5 & time == 20) + xzy <- categorical_example_fit$data |> + dplyr::filter(id == 5 & time == 20) manual <- as_draws(categorical_example_fit) |> dplyr::filter(.iteration == iter & .chain == chain) |> dplyr::summarise( diff --git a/tests/testthat/test-recovery.R b/tests/testthat/test-recovery.R index 1166cd7..bf231f5 100644 --- a/tests/testthat/test-recovery.R +++ b/tests/testthat/test-recovery.R @@ -21,15 +21,24 @@ test_that("parameters for the linear regression are recovered as with lm", { d <- data.frame(time = 1:n, y = y, x = x) fit_lm <- lm(y ~ x, data = d) - priors <- get_priors(obs(y ~ x, family = "gaussian"), - data = d, time = "time" + priors <- get_priors( + obs(y ~ x, family = "gaussian"), + data = d, + time = "time" ) priors$prior <- c("normal(0, 5)", "std_normal()", "exponential(1)") - fit_dynamite <- dynamite(obs(y ~ x, family = "gaussian"), - data = d, time = "time", priors = priors, - chains = 1, iter = 2000, refresh = 0 + fit_dynamite <- dynamite( + obs(y ~ x, family = "gaussian"), + data = d, + time = "time", + priors = priors, + chains = 1, + iter = 2000, + refresh = 0 ) - expect_equal(coef(fit_dynamite)$mean, coef(fit_lm), + expect_equal( + coef(fit_dynamite)$mean, + coef(fit_lm), tolerance = 0.01, ignore_attr = TRUE ) @@ -44,10 +53,17 @@ test_that("parameters for the poisson glm are recovered as with glm", { d <- data.frame(time = 1:n, y = y, x = x) fit_glm <- glm(y ~ x, data = d, family = poisson) - fit_dynamite <- dynamite(obs(y ~ x, family = "poisson"), - data = d, time = "time", chains = 1, iter = 2000, refresh = 0 + fit_dynamite <- dynamite( + obs(y ~ x, family = "poisson"), + data = d, + time = "time", + chains = 1, + iter = 2000, + refresh = 0 ) - expect_equal(coef(fit_dynamite)$mean, coef(fit_glm), + expect_equal( + coef(fit_dynamite)$mean, + coef(fit_glm), tolerance = 0.01, ignore_attr = TRUE ) @@ -63,10 +79,17 @@ test_that("parameters for the binomial glm are recovered as with glm", { d <- data.frame(time = 1:n, y = y, x = x, u = u) fit_glm <- glm(cbind(y, u - y) ~ x, data = d, family = binomial) - fit_dynamite <- dynamite(obs(y ~ x + trials(u), family = "binomial"), - data = d, time = "time", chains = 1, iter = 2000, refresh = 0 + fit_dynamite <- dynamite( + obs(y ~ x + trials(u), family = "binomial"), + data = d, + time = "time", + chains = 1, + iter = 2000, + refresh = 0 ) - expect_equal(coef(fit_dynamite)$mean, coef(fit_glm), + expect_equal( + coef(fit_dynamite)$mean, + coef(fit_glm), tolerance = 0.01, ignore_attr = TRUE ) @@ -81,10 +104,17 @@ test_that("parameters for the gamma glm are recovered as with glm", { d <- data.frame(time = 1:n, y = y, x = x) fit_glm <- glm(y ~ x, data = d, family = Gamma(link = "log")) - fit_dynamite <- dynamite(obs(y ~ x, family = "gamma"), - data = d, time = "time", chains = 1, iter = 2000, refresh = 0 + fit_dynamite <- dynamite( + obs(y ~ x, family = "gamma"), + data = d, + time = "time", + chains = 1, + iter = 2000, + refresh = 0 ) - expect_equal(coef(fit_dynamite)$mean[1:2], coef(fit_glm), + expect_equal( + coef(fit_dynamite)$mean, + coef(fit_glm), tolerance = 0.01, ignore_attr = TRUE ) @@ -116,14 +146,25 @@ test_that("parameters for poisson mixed model are recovered", { ) fit_dynamite <- dynamite( obs(y ~ x + random(~ 1 + x), family = "poisson"), - data = d, time = "year", group = "person", priors = p, - init = 0, chains = 2, cores = 2, iter = 2000, refresh = 0, seed = 1 + data = d, + time = "year", + group = "person", + priors = p, + init = 0, + chains = 2, + cores = 2, + iter = 2000, + refresh = 0, + seed = 1 ) # "ground truth" obtained from one long dynamite run - expect_equal(coef(fit_dynamite)$mean, c(2, -0.99), + expect_equal( + coef(fit_dynamite)$mean, + c(2, -0.99), tolerance = 0.1 ) - expect_equal(coef(fit_dynamite, type = "nu")$mean, + expect_equal( + coef(fit_dynamite, types = "nu")$mean, c( 0.17, 0.42, -0.09, -0.13, -0.07, -0.12, -0.2, -0.12, 0.28, -0.1, -0.03, 0, 0.1, -0.11, -0.03, 0.02, 0.04, -0.02, -0.14, 0.16 @@ -135,7 +176,8 @@ test_that("parameters for poisson mixed model are recovered", { test_that("parameters for an AR(1) model are recovered as with arima", { skip_if_not(run_extended_tests) set.seed(1) - fit <- dynamite(obs(LakeHuron ~ 1, "gaussian") + lags(), + fit <- dynamite( + obs(LakeHuron ~ 1, "gaussian") + lags(), data = data.frame(LakeHuron, time = seq_len(length(LakeHuron)), id = 1), time = "time", group = "id", @@ -144,134 +186,18 @@ test_that("parameters for an AR(1) model are recovered as with arima", { refresh = 0 ) fit_arima <- arima(LakeHuron, c(1, 0, 0)) - expect_equal(coef(fit)$mean[2], coef(fit_arima)[1L], + expect_equal( + coef(fit)$mean[2L], + coef(fit_arima)[1L], tolerance = 0.01, ignore_attr = TRUE ) expect_equal( coef(fit)$mean[1L], coef(fit_arima)[2L] * (1 - coef(fit_arima)[1L]), - tolerance = 1, ignore_attr = TRUE - ) -}) - -test_that("LOO works for AR(1) model", { - skip_if_not(run_extended_tests) - set.seed(1) - fit <- dynamite(obs(LakeHuron ~ 1, "gaussian") + lags(), - data = data.frame(LakeHuron, time = seq_len(length(LakeHuron)), id = 1), - time = "time", - group = "id", - chains = 1, - iter = 2000, - refresh = 0 - ) - l <- loo(fit) - expect_equal( - l$estimates, - structure( - c( - -107.877842970846, 2.86041434691809, 215.755685941693, - 7.36848739076899, 0.561813071004331, 14.736974781538 - ), - dim = 3:2, - dimnames = list(c("elpd_loo", "p_loo", "looic"), c("Estimate", "SE")) - ), - tolerance = 1 - ) - expect_error(plot(l), NA) -}) - -test_that("LOO works with separate channels", { - skip_if_not(run_extended_tests) - set.seed(1) - # Fit again so that recompile with update works with all platforms - multichannel_fit <- dynamite( - dformula = obs(g ~ lag(g) + lag(logp), family = "gaussian") + - obs(p ~ lag(g) + lag(logp) + lag(b), family = "poisson") + - obs(b ~ lag(b) * lag(logp) + lag(b) * lag(g), family = "bernoulli") + - aux(numeric(logp) ~ log(p + 1)), - data = multichannel_example, - time = "time", - group = "id", - verbose = FALSE, - chains = 1, - cores = 1, - iter = 2000, - warmup = 1000, - init = 0, - refresh = 0, - thin = 1, - save_warmup = FALSE - ) - expect_error( - l <- loo(update(multichannel_fit, thin = 1), separate_channels = TRUE), - NA - ) - expect_equal( - l$g_loglik$estimates, - structure( - c( - 127.7731689, 3.9598420, -255.5463377, - 21.1943047, 0.2433661, 42.3886094 - ), - dim = 3:2, - dimnames = list(c("elpd_loo", "p_loo", "looic"), c("Estimate", "SE")) - ), - tolerance = 1 - ) - expect_equal( - l$p_loglik$estimates, - structure( - c( - -2128.5452197, 4.5260226, 4257.0904393, - 26.5452884, 0.3107372, 53.0905768 - ), - dim = 3:2, - dimnames = list(c("elpd_loo", "p_loo", "looic"), c("Estimate", "SE")) - ), - tolerance = 1 - ) - expect_equal( - l$b_loglik$estimates, - structure( - c( - -583.3724555, 6.8573891, 1166.7449111, - 12.1459613, 0.3097697, 24.2919227 - ), - dim = 3:2, - dimnames = list(c("elpd_loo", "p_loo", "looic"), c("Estimate", "SE")) - ), - tolerance = 1 - ) -}) - -test_that("LFO works for AR(1) model", { - # This also implicitly tests update method - skip_if_not(run_extended_tests) - set.seed(1) - d <- data.frame(LakeHuron, time = seq_len(length(LakeHuron))) - priors <- get_priors( - obs(LakeHuron ~ 1, "gaussian") + lags(k = 1:4), - data = d, - time = "time" - ) - priors$prior[2:5] <- "normal(0, 0.5)" - priors$prior[6] <- "student_t(3, 0, 2.5)" - fit <- dynamite(obs(LakeHuron ~ 1, "gaussian") + lags(k = 1:4), - data = d, - time = "time", - priors = priors, - chains = 2, - cores = 2, - iter = 2000, - refresh = 0 + tolerance = 1, + ignore_attr = TRUE ) - l <- lfo(fit, L = 20) - expect_equal(l$ELPD, -92.7, tolerance = 1) - expect_equal(l$ELPD_SE, 7.8, tolerance = 1) - expect_error(plot(l), NA) - expect_error(print(l), NA) }) test_that("parameters of a time-varying gaussian model are recovered", { @@ -316,7 +242,8 @@ test_that("parameters of a time-varying gaussian model are recovered", { dformula <- obs(y ~ -1 + z + varying(~x), family = "gaussian") + splines(df = 50) # compile model only once - code <- get_code(dformula, + code <- get_code( + dformula, data = d$data, time = "time", group = "id" @@ -330,9 +257,12 @@ test_that("parameters of a time-varying gaussian model are recovered", { for (i in seq_len(n)) { data <- get_data(dformula, group = "id", time = "time", data = d$data) diffs[, i] <- rstan::get_posterior_mean( - rstan::sampling(model, + rstan::sampling( + model, data = data, - refresh = 0, chains = 1, iter = 2000, + refresh = 0, + chains = 1, + iter = 2000, pars = pars ), pars = pars @@ -344,16 +274,27 @@ test_that("parameters of a time-varying gaussian model are recovered", { # test with a single large dataset d <- create_data(T_ = 500, N = 500, D = 100) - data <- get_data(obs(y ~ -1 + z + varying(~x), family = "gaussian") + - splines(df = 100), time = "time", group = "id", data = d$data) - fit_long <- rstan::sampling(model, + data <- get_data( + obs(y ~ -1 + z + varying(~x), family = "gaussian") + + splines(df = 100), + time = "time", + group = "id", + data = d$data + ) + fit_long <- rstan::sampling( + model, data = data, - refresh = 0, chains = 1, iter = 2000, + refresh = 0, + chains = 1, + iter = 2000, pars = pars ) estimates <- c(rstan::get_posterior_mean(fit_long, pars = pars)) - expect_equal(c(estimates), d$true_values, - ignore_attr = TRUE, tolerance = 0.1 + expect_equal( + c(estimates), + d$true_values, + ignore_attr = TRUE, + tolerance = 0.1 ) }) @@ -364,16 +305,16 @@ test_that("prior parameters are recovered with zero observations", { p <- get_priors(obs(y ~ x, "gaussian"), d, time = "time", group = "id") p$prior[] <- c("normal(2, 0.1)", "normal(5, 0.5)", "exponential(10)") fit_prior <- dynamite(obs(y ~ x, "gaussian"), - data = d, - time = "time", - group = "id", - priors = p, - iter = 55000, - warmup = 5000, - chains = 1, - cores = 1, - refresh = 0, - save_warmup = FALSE + data = d, + time = "time", + group = "id", + priors = p, + iter = 55000, + warmup = 5000, + chains = 1, + cores = 1, + refresh = 0, + save_warmup = FALSE ) sumr <- summary(fit_prior) |> dplyr::select(parameter, mean, sd, q5, q95) |> @@ -430,20 +371,52 @@ test_that("predict recovers correct estimates", { # use_cache = FALSE)$summary[, 1:3]), # c("mean_m", "mean_s", "se_m", "se_s", "sd_m", "sd_s")) - rstan_obs_results_id1_time4 <- c(mean = 0.6098, se_mean = 0.0035, sd = 0.4878) - rstan_obs_results_avg4 <- c(mean_m = 0.7136, mean_s = 0.4508, - se_m = 7e-04, se_s = 4e-04, sd_m = 0.0939, sd_s = 0.0511) - rstan_prob_results_id1_time4 <- c(mean = 0.6062, se_mean = 9e-04, sd = 0.1409) - rstan_prob_results_avg4 <- c(mean_m = 0.7138, mean_s = 0.213, se_m = 2e-04, - se_s = 2e-04, sd_m = 0.0264, sd_s = 0.025) + rstan_obs_results_id1_time4 <- c( + mean = 0.6098, + se_mean = 0.0035, + sd = 0.4878 + ) + rstan_obs_results_avg4 <- c( + mean_m = 0.7136, + mean_s = 0.4508, + se_m = 7e-04, + se_s = 4e-04, + sd_m = 0.0939, + sd_s = 0.0511 + ) + rstan_prob_results_id1_time4 <- c( + mean = 0.6062, + se_mean = 9e-04, + sd = 0.1409 + ) + rstan_prob_results_avg4 <- c( + mean_m = 0.7138, + mean_s = 0.213, + se_m = 2e-04, + se_s = 2e-04, + sd_m = 0.0264, + sd_s = 0.025 + ) d <- data.frame(y = c(y), time = rep(1:T_, each = N), id = 1:N) - p <- get_priors(obs(y ~ lag(y) + random(~1), "bernoulli"), - data = d, time = "time", group = "id") + p <- get_priors( + obs(y ~ lag(y) + random(~1), "bernoulli"), + data = d, + time = "time", + group = "id" + ) p$prior[] <- "std_normal()" - fitd <- dynamite(obs(y ~ lag(y) + random(~1), "bernoulli"), - data = d, time = "time", group = "id", priors = p, - chains = 1, iter = 2e4, warmup = 1000, refresh = 0) + fitd <- dynamite( + obs(y ~ lag(y) + random(~1), "bernoulli"), + data = d, + time = "time", + group = "id", + priors = p, + chains = 1, + iter = 2e4, + warmup = 1000, + refresh = 0 + ) pred <- predict(fitd) y_new <- pred$y_new[pred$time == 4 & pred$id == 1] @@ -465,9 +438,10 @@ test_that("predict recovers correct estimates", { mean_m = mean(m), mean_s = mean(s), se_m = sd(m) / sqrt(dplyr::n()), se_s = sd(s) / sqrt(dplyr::n()), sd_m = sd(m), sd_s = sd(s) - ) |> unlist() + ) |> unlist() - expect_equal(res, + expect_equal( + res, rstan_obs_results_avg4, tolerance = 0.01 ) @@ -494,8 +468,132 @@ test_that("predict recovers correct estimates", { sd_m = sd(m), sd_s = sd(s) ) |> unlist() - expect_equal(res, + expect_equal( + res, rstan_prob_results_avg4, tolerance = 0.01 ) }) + + +test_that("LOO works for AR(1) model", { + skip_if_not(run_extended_tests) + set.seed(1) + fit <- dynamite(obs(LakeHuron ~ 1, "gaussian") + lags(), + data = data.frame(LakeHuron, time = seq_len(length(LakeHuron)), id = 1), + time = "time", + group = "id", + chains = 1, + iter = 2000, + refresh = 0 + ) + l <- loo(fit) + expect_equal( + l$estimates, + structure( + c( + -107.877842970846, 2.86041434691809, 215.755685941693, + 7.36848739076899, 0.561813071004331, 14.736974781538 + ), + dim = 3:2, + dimnames = list(c("elpd_loo", "p_loo", "looic"), c("Estimate", "SE")) + ), + tolerance = 1 + ) + expect_error(plot(l), NA) +}) + +test_that("LOO works with separate channels", { + skip_if_not(run_extended_tests) + set.seed(1) + # Fit again so that recompile with update works with all platforms + multichannel_fit <- dynamite( + dformula = obs(g ~ lag(g) + lag(logp), family = "gaussian") + + obs(p ~ lag(g) + lag(logp) + lag(b), family = "poisson") + + obs(b ~ lag(b) * lag(logp) + lag(b) * lag(g), family = "bernoulli") + + aux(numeric(logp) ~ log(p + 1)), + data = multichannel_example, + time = "time", + group = "id", + verbose = FALSE, + chains = 1, + cores = 1, + iter = 2000, + warmup = 1000, + init = 0, + refresh = 0, + thin = 1, + save_warmup = FALSE + ) + expect_error( + l <- loo(update(multichannel_fit, thin = 1), separate_channels = TRUE), + NA + ) + expect_equal( + l$g_loglik$estimates, + structure( + c( + 127.7731689, 3.9598420, -255.5463377, + 21.1943047, 0.2433661, 42.3886094 + ), + dim = 3:2, + dimnames = list(c("elpd_loo", "p_loo", "looic"), c("Estimate", "SE")) + ), + tolerance = 1 + ) + expect_equal( + l$p_loglik$estimates, + structure( + c( + -2128.5452197, 4.5260226, 4257.0904393, + 26.5452884, 0.3107372, 53.0905768 + ), + dim = 3:2, + dimnames = list(c("elpd_loo", "p_loo", "looic"), c("Estimate", "SE")) + ), + tolerance = 1 + ) + expect_equal( + l$b_loglik$estimates, + structure( + c( + -583.3724555, 6.8573891, 1166.7449111, + 12.1459613, 0.3097697, 24.2919227 + ), + dim = 3:2, + dimnames = list(c("elpd_loo", "p_loo", "looic"), c("Estimate", "SE")) + ), + tolerance = 1 + ) +}) + +test_that("LFO works for AR(1) model", { + # This also implicitly tests update method + skip_if_not(run_extended_tests) + set.seed(1) + d <- data.frame(LakeHuron, time = seq_len(length(LakeHuron))) + priors <- get_priors( + obs(LakeHuron ~ 1, "gaussian") + lags(k = 1:4), + data = d, + time = "time" + ) + priors$prior[2:5] <- "normal(0, 0.5)" + priors$prior[6] <- "student_t(3, 0, 2.5)" + fit <- dynamite(obs(LakeHuron ~ 1, "gaussian") + lags(k = 1:4), + data = d, + time = "time", + priors = priors, + chains = 2, + cores = 2, + iter = 2000, + refresh = 0 + ) + l <- lfo(fit, L = 20) + expect_equal(l$ELPD, -92.7, tolerance = 1) + expect_equal(l$ELPD_SE, 7.8, tolerance = 1) + expect_error(plot(l), NA) + expect_error(print(l), NA) +}) + + +