Skip to content
Merged
Show file tree
Hide file tree
Changes from 9 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 12 additions & 2 deletions DESCRIPTION
Original file line number Diff line number Diff line change
Expand Up @@ -11,17 +11,27 @@ Encoding: UTF-8
Roxygen: list(markdown = TRUE)
RoxygenNote: 7.3.2
Imports:
checkmate,
cli,
dplyr,
epipredict,
epiprocess,
forecasttools,
fs,
glue,
hardhat,
hubData,
hubEnsembles,
lubridate,
readr,
stringr,
tidyr,
tidyselect
Remotes:
forecasttools=github::cdcgov/forecasttools,
hubUtils=github::hubverse-org/hubUtils
hubUtils=github::hubverse-org/hubUtils,
hubData=github::hubverse-org/hubData,
epipredict=github::cmu-delphi/epipredict,
epiprocess=github::cmu-delphi/epiprocess
Suggests:
httptest2,
testthat (>= 3.0.0),
Expand Down
2 changes: 2 additions & 0 deletions NAMESPACE
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
# Generated by roxygen2: do not edit by hand

export(excluded_locations)
export(generate_hub_baseline)
export(generate_hub_ensemble)
export(update_hub_target_data)
216 changes: 216 additions & 0 deletions R/generate_hub_baselines.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,216 @@
library(epipredict)

baseline_model_names <- list(
covid = "CovidHub-baseline",
rsv = "RSVHub-baseline"
)

check_data_latency <- function(
epi_df,
reference_date,
desired_max_time_value,
target_label
) {
excess_latency_tbl <- epi_df |>
tidyr::drop_na(.data$observation) |>
dplyr::group_by(.data$geo_value) |>
dplyr::summarize(
max_time_value = max(.data$time_value),
.groups = "drop"
) |>
dplyr::mutate(
excess_latency = pmax(
as.integer(desired_max_time_value - .data$max_time_value) %/% 7L,
0L
),
has_excess_latency = .data$excess_latency > 0L
)

overlatent_err_thresh <- 0.20
prop_locs_overlatent <- mean(excess_latency_tbl$has_excess_latency)

if (prop_locs_overlatent > overlatent_err_thresh) {
cli::cli_abort(
paste0(
"{target_label} forecast: More than ",
"{100 * overlatent_err_thresh}% of locations have excess latency. ",
"The reference date is {reference_date}, so we desire observations ",
"at least through {desired_max_time_value}. However, ",
"{nrow(excess_latency_tbl |> dplyr::filter(.data$has_excess_latency))} ",
"location{?s} had excess latency."
)
)
} else if (prop_locs_overlatent > 0) {
cli::cli_warn(
paste0(
"{target_label} forecast: Some locations have excess latency. ",
"The reference date is {reference_date}, so we desire observations ",
"at least through {desired_max_time_value}. However, ",
"{nrow(excess_latency_tbl |> dplyr::filter(.data$has_excess_latency))} ",
"location{?s} had excess latency."
)
)
}
}

make_baseline_forecast <- function(
base_hub_path,
target_name,
target_label,
reference_date,
desired_max_time_value
) {
epi_df <- hubData::connect_target_timeseries(base_hub_path) |>
dplyr::collect() |>
forecasttools::hub_target_data_as_of() |>
dplyr::filter(.data$target == !!target_name) |>
dplyr::mutate(
geo_value = forecasttools::us_location_recode(
.data$location,
"code",
"abbr"
)
) |>
dplyr::rename(
time_value = "date"
) |>
dplyr::select(-c("location", "target")) |>
epiprocess::as_epi_df()

check_data_latency(
epi_df,
reference_date,
desired_max_time_value,
target_label
)

rng_seed <- as.integer((59460707 + as.numeric(reference_date)) %% 2e9)
preds <- withr::with_rng_version(
"4.0.0",
withr::with_seed(rng_seed, {
fcst <- epipredict::cdc_baseline_forecaster(
epi_df |>
dplyr::filter(
.data$time_value <= desired_max_time_value
),
"observation",
epipredict::cdc_baseline_args_list(aheads = 1:4, nsims = 1e5)
)
# advance forecast_date by a week due to data latency and
# create forecast for horizon -1
fcst$predictions |>
dplyr::mutate(
forecast_date = reference_date,
ahead = as.integer(.data$target_date - reference_date) %/% 7L
) |>
# prepare -1 horizon predictions
dplyr::bind_rows(
epi_df |>
tidyr::drop_na(.data$observation) |>
dplyr::slice_max(.data$time_value) |>
dplyr::transmute(
forecast_date = reference_date,
target_date = reference_date - 7L,
ahead = -1L,
geo_value,
.pred = .data$observation,
.pred_distn = hardhat::quantile_pred(
values = matrix(
rep(
.data$observation,
each = length(
epipredict::cdc_baseline_args_list()$quantile_levels
)
),
nrow = length(.data$observation),
ncol = length(
epipredict::cdc_baseline_args_list()$quantile_levels
),
byrow = TRUE
),
quantile_levels = epipredict::cdc_baseline_args_list()$quantile_levels # nolint
)
)
)
})
)

preds_formatted <- preds |>
epipredict::flusight_hub_formatter(
target = target_name,
output_type = "quantile"
) |>
tidyr::drop_na(.data$output_type_id) |>
dplyr::arrange(.data$target, .data$horizon, .data$location) |>
dplyr::select(
"reference_date",
"horizon",
"target",
"target_end_date",
"location",
"output_type",
"output_type_id",
"value"
)
return(preds_formatted)
}


#' Generate hub baseline forecasts for a given disease and reference date
#'
#' @param base_hub_path Path to the base hub directory.
#' @param reference_date Reference date (should be a Saturday).
#' @param disease Disease name ("covid" or "rsv").
#' @export
generate_hub_baseline <- function(
base_hub_path,
reference_date,
disease
) {
if (!disease %in% c("covid", "rsv")) {
stop("'disease' must be either 'covid' or 'rsv'")
}
reference_date <- lubridate::as_date(reference_date)
desired_max_time_value <- reference_date - 7L
dow_supplied <- lubridate::wday(reference_date, week_start = 7, label = FALSE)
if (dow_supplied != 7) {
cli::cli_abort(
message = paste0(
"Expected `reference_date` to be a Saturday, day number 7 ",
"of the week, given the `week_start` value of Sunday. ",
"Got {reference_date}, which is day number ",
"{dow_supplied} of the week."
)
)
}

baseline_model_name <- baseline_model_names[[disease]]
output_dirpath <- fs::path(base_hub_path, "model-output", baseline_model_name)
if (!fs::dir_exists(output_dirpath)) {
fs::dir_create(output_dirpath, recurse = TRUE)
}

preds_hosp <- make_baseline_forecast(
base_hub_path = base_hub_path,
target_name = glue::glue("wk inc {disease} hosp"),
target_label = "Hospital Admissions",
reference_date = reference_date,
desired_max_time_value = desired_max_time_value
)

preds_ed <- make_baseline_forecast(
base_hub_path = base_hub_path,
target_name = glue::glue("wk inc {disease} prop ed visits"),
target_label = "Proportion ED Visits",
reference_date = reference_date,
desired_max_time_value = desired_max_time_value
)

forecasttools::write_tabular_file(
dplyr::bind_rows(preds_hosp, preds_ed),
fs::path(
output_dirpath,
paste0(as.character(reference_date), "-", baseline_model_name, ".csv")
)
)
}
Loading
Loading