Commit 87f42f34 authored by Conor Anderson's avatar Conor Anderson

Merge branch devel

Merge branch 'devel' into crop

# Conflicts:
#	server.R
#	ui.R
parents dde1388e e9b8051a
calc_anoms <- function(datin) {
calc_anoms <- function(datin, var) {
if (has_name(datin, "Ensembles")) {
warning("It seems you already averaged the ensembles. This function needs to be run first.")
......@@ -22,7 +22,7 @@ calc_anoms <- function(datin) {
col <- 5
}
do_calc <- function(i, dat_hist, dat_proj, grouping, col) {
do_calc <- function(i, dat_hist, dat_proj, grouping, col, var) {
scenario <- if (str_detect(dat_hist$Scenario[i], pattern = "RCP[024568.]{3}")) {
str_extract(dat_hist$Scenario[i], pattern = "RCP[024568.]{3}")
} else {
......@@ -38,7 +38,11 @@ calc_anoms <- function(datin) {
matched[, col] <- unlist(dat_hist[i, col])
for (co in (col+1):number_of_cols) {
for (ro in 1:nrow(matched)) {
matched[ro, co] <- matched[ro, co] - matched[ro, col]
if (var %in% c("tas", "tasmin", "tasmax")) {
matched[ro, co] <- matched[ro, co] - matched[ro, col]
} else {
matched[ro, co] <- (matched[ro, co] - matched[ro, col]) / matched[ro, col] * 100
}
}
}
if (has_name(dat_hist, "Note") && length(dat_hist$Note[i] > 0)) {
......@@ -46,5 +50,6 @@ calc_anoms <- function(datin) {
}
matched
}
do.call("bind_rows", lapply(1:nrow(dat_hist), do_calc, dat_hist, dat_proj, grouping, col))
do.call("bind_rows", lapply(1:nrow(dat_hist), do_calc, dat_hist, dat_proj, grouping, col, var))
}
convert_units <- function(tab, var) {
if (debug_flag) message("convert_units")
if (var %in% c("tas", "tasmin", "tasmax")) {
return(mutate(tab, Value = Value - 273.15))
} else if (var %in% c("pr", "prc")) {
tab <- mutate(tab, Days = days_in_month(as.Date(paste(Year,
Month,
"01", sep = "-"))))
return(dplyr::select(mutate(tab, Value = Value * (Days*86400)), -Days))
} else {
stop("Unrecognized variable in convert_units.")
}
}
\ No newline at end of file
get_choices <- function(file_dir, var, lim = FALSE) {
print(sprintf("Getting choices for %s in %s", var, file_dir))
if (debug_flag) message(sprintf("Getting choices for %s in %s", var, file_dir))
filenames <- if (lim) {
grep("200601-210012.nc$", dir(file.path(file_dir, var, "verified")), value = TRUE)
} else {
......
get_coords <- function(coords_in) {
print(sprintf("Getting coordinates for input %s", coords_in))
if (debug_flag) message("Getting coordinates for input ", coords_in)
if (length(unlist(strsplit(coords_in, split = ","))) == 2) {
print("Input is of length two")
if (debug_flag) message("Input is of length two")
coerce_coords <- suppressWarnings(as.numeric(unlist(strsplit(coords_in, split = ","))))
}
if (exists("coerce_coords") && !any(is.na(coerce_coords))) {
print("Coordinates were passed directly")
if (debug_flag) message("Coordinates were passed directly")
coords <- data.frame(lon = coerce_coords[2], lat = coerce_coords[1])
rm(coerce_coords)
} else {
......@@ -14,6 +14,6 @@ get_coords <- function(coords_in) {
lat = coords$results$geometry$location$lat)
}
if(nrow(coords) > 1) warning("We got more than one result. We'll use the first.")
print(coords)
if (debug_flag) message(coords)
coords[1,]
}
\ No newline at end of file
get_gcm_files <- function(choices, baseline = NULL, projection = NULL, model = NULL, scenario = NULL, ensemble = NULL) {
print("get_gcm_files()")
if (debug_flag) message("get_gcm_files()")
# If we want both the baseline and the projections, we'll run this twice, once for each period.
if (!is.null(baseline) && !is.null(projection)) {
baseline_files <- get_gcm_files(dir, baseline, NULL, model, scenario, ensemble)
projection_files <- get_gcm_files(dir, NULL, projection, model, scenario, ensemble)
baseline_files <- get_gcm_files(choices, baseline, NULL, model, scenario, ensemble)
projection_files <- get_gcm_files(choices, NULL, projection, model, scenario, ensemble)
return(bind_rows(baseline_files, projection_files))
}
......
get_gcm_ts <- function(meta) {
if (debug_flag) {
message("get_gcm_ts")
message(sprintf("Reading files from %s on %i cores with a cache date of %s",
file_dir, no_of_cores, cache_ver))
}
st_point <- storr::storr_rds(".cache/point-cache", default_namespace = "point")
# Process each NetCDF file
print("Collecting the NetCDF time series...")
message("Collecting the NetCDF time series...")
registerDoParallel(no_of_cores)
......@@ -21,16 +27,19 @@ get_gcm_ts <- function(meta) {
time_tmp <- NULL
if (st_point$exists(key)) {
print("Hit the cache.")
message("Hit the cache.")
time_tmp <- st_point$get(key)
if (!inherits(time_tmp, "tbl_df") || ncol(time_tmp) != 8) { ## Invalidate old cache entries.
## Invalidate old cache entries.
if (is.null(attr(time_tmp, "cache_ver")) || attr(time_tmp, "cache_ver") != cache_ver) {
message("Cached version is invalid. Deleting.")
st_point$del(key)
time_tmp <- NULL
}
}
if (is.null(time_tmp)) {
print("Missed the cache")
message("Missed the cache")
# Get the time
components <- get.split.filename.cmip5(fs[f])
......@@ -63,7 +72,6 @@ get_gcm_ts <- function(meta) {
nc_var <- nc_var[1, 1, ]
}
if (var %in% c("tas", "tasmax", "tasmin")) nc_var <- nc_var - 273.15
time_tmp <- tibble(Var = var,
Model = meta$Model[row],
Scenario = meta$Scenario[row],
......@@ -74,8 +82,9 @@ get_gcm_ts <- function(meta) {
Value = nc_var)
rm(nc_var, nc_time); gc()
attr(time_tmp, "cache_ver") <- cache_ver
st_point$set(key, time_tmp)
print("Cached data")
message("Cached data")
}
if (f == 1) {
......@@ -91,7 +100,7 @@ get_gcm_ts <- function(meta) {
}
})
print("Done collecting the NetCDF files...")
message("Done collecting the NetCDF files...")
# Return our tibble
dat %>% distinct() ##FIXME: Why do I get so many dupes?
......
......@@ -2,19 +2,28 @@ get_metadata <- function(choices, coords, baseline = NULL,
projection = NULL, model = NULL, scenario = NULL,
ensemble = NULL) {
if (debug_flag) message("get_metadata")
st_meta <- storr::storr_rds(".cache/meta-cache", default_namespace = "meta")
var <- unique(choices$Variable)
print("Getting metadata")
## FIXME: This is the nuclear option.
if (!st_meta$exists("cache_ver") || st_meta$get("cache_ver") != cache_ver) {
message("Metadata cache is invalid. Deleting.")
st_meta$clear()
st_meta$set("cache_ver", cache_ver)
}
var <- unique(choices$Variable)
if (is.null(model)) model <- unique(choices$Model)
if (is.null(scenario)) scenario <- unique(choices$Scenario)
if (!is.null(baseline)) scenario <- c(scenario, "historical")
if (is.null(ensemble)) ensemble <- unique(choices$Ensemble)
cases <- choices %>% dplyr::select(Model, Scenario, Ensemble) %>%
filter(Model %in% model & Scenario %in% scenario & Ensemble %in% ensemble) %>%
distinct()
if(!is.null(baseline)) {
if (!is.null(baseline)) {
coyote <- (!is.null(baseline) && paste0(baseline[length(baseline)], "12") > "200512")
gcm_baseline_key <- if (coyote) {
......@@ -51,7 +60,7 @@ get_metadata <- function(choices, coords, baseline = NULL,
}
if (nrow(short_cases) > 0) {
print("Baseline cases were short.")
message("Baseline cases were short.")
new_hist_files <- get_gcm_files(choices, baseline,
projection = NULL,
model = unique(short_cases$Model),
......@@ -98,7 +107,7 @@ get_metadata <- function(choices, coords, baseline = NULL,
}
if (nrow(short_cases) > 0) {
print("Projection models were short.")
message("Projection models were short.")
new_proj_files <- get_gcm_files(choices, baseline = NULL,
projection,
model = unique(short_cases$Model),
......
......@@ -28,7 +28,7 @@ poll_files <- function(filtered, period, coyote, scenario, ensemble) {
# Start with out start date
start_row <- max(which(model_stats$Start <= paste0(min(period), "01")))
if (length(start_row) == 0) {
print("Add some code to panic because no start")
stop("No start!")
} else {
model_files <- filtered %>% filter(Model == mod, Scenario == model_stats$Scenario[start_row], Ensemble == model_stats$Ensemble[start_row], Length == model_stats$Length[start_row]) %>% dplyr::select(Start, End, Filenames)
next_row <- suppressWarnings(try(min(which(as.yearmon(model_stats$Start, format = "%Y%m") == (as.yearmon(model_stats$End[start_row], format = "%Y%m") + 1/12) | (model_stats$Start <= model_stats$End[start_row] & model_stats$End > model_stats$End[start_row]))), silent = TRUE))
......@@ -42,7 +42,7 @@ poll_files <- function(filtered, period, coyote, scenario, ensemble) {
}
}
if (next_row == Inf) {
print(paste("Not enough data for", mod))
message(paste("Not enough data for", mod))
} else {
out$Start[out$Model == mod] <- model_files$Start[max(which(model_files$Start <= paste0(min(period), "01")))]
out$End[out$Model == mod] <- model_files$End[min(which(model_files$End >= paste0(max(period), "12")))]
......
......@@ -11,7 +11,7 @@ process_raw_anom_ts <- function(time_series, anom_period_in, baseline, projectio
# Choose our period of analysis
time_series$Year <- as.integer(time_series$Year)
print(sprintf("The input period is %s", anom_period_in))
if (debug_flag) message("The input period is ", anom_period_in)
if (anom_period_in == "Annual") {
grouping <- c("Var", "Model", "Scenario", "Ensemble", "Year")
......
## Debug options
# Some options that allow for debug
options(shiny.reactlog = FALSE)
debug_flag = FALSE
## Data location
# Conjuntool assumes that you have amassed all necessary NetCDF files form the ESGF
# These should be sorted into folders by variable, e.g. "tas", "tasmin", etc.
# These should be sorted into folders by variable, e.g. "tas", "tasmin", etc.
# For now these are hard-coded, so you will need to change the relevant line of code to include other variables
# For more info on the ESGF, see https://esgf-node.llnl.gov/projects/esgf-llnl/
file_dir = ""
......@@ -13,3 +18,8 @@ google_key = ""
## Number of cores to use
# Conjuntool parallelizes some heavy functions. Set the number of cores to use.
no_of_cores = detectCores() - 1
## Cache version
# A code with which to validate cache entries. Entries that do not match this
# version string will be deleted as they are called (useful for breaking changes).
cache_ver = "2018-10-11"
## -- Load required packages -- ##
# pkgs <- c("shiny", "broom", "doParallel", "dplyr", "ggplot2", "googleway", "htmlwidgets",
# "leaflet", "lubridate", "mapview", "ncdf4", "ncdf4.helpers", "PCICt", "plotly",
# "purrr", "raster", "RColorBrewer", "readr", "storr", "stringr",
# "tibble", "tidyr", "rgdal", "zoo")
# lapply(pkgs, library, character.only = TRUE)
library("shiny")
library("broom")
library("doParallel")
......@@ -57,9 +51,10 @@ shinyServer(function(input, output, session) {
})
get_plot_data <- reactive({
print("get_plot_data()")
if (debug_flag) message("get_plot_data()")
meta <- plot_model_in()
plot_data <- get_gcm_ts(meta)
if (input$convert_units_plot) plot_data <- convert_units(plot_data, input$var_filter_plot)
# Get the time that we are interested in
plot_data <- plot_data %>% filter(Year >= input$year_in[1] &
......@@ -96,7 +91,11 @@ shinyServer(function(input, output, session) {
}
validate(need(input$types_in, "Select at least one plot type, below."))
isolate({
label <- ifelse((input$var_filter_plot %in% c("tas", "tasmax", "tasmin")), "Temperature (°C)", "Precipitation Flux (kg·m⁻²·s⁻¹)")
if (input$convert_units_plot) {
label <- ifelse((input$var_filter_plot %in% c("tas", "tasmax", "tasmin")), "Temperature (°C)", "Precipitation (mm)")
} else {
label <- ifelse((input$var_filter_plot %in% c("tas", "tasmax", "tasmin")), "Temperature (K)", "Precipitation Flux (kg·m⁻²·s⁻¹)")
}
colo <- ifelse((input$var_filter_plot %in% c("tas", "tasmax", "tasmin")), "red", "blue")
})
plot <- plot_ly(data = plot_data, x = ~Time, y = ~Value, color = I(colo))
......@@ -134,6 +133,29 @@ shinyServer(function(input, output, session) {
### ANOMALIES
## -- Ensure projection is longer than baseline -- ##
# Code from https://stackoverflow.com/questions/52212688/setting-a-minimum-range-in-a-shiny-app-slider
observeEvent(input$baseline_in, {
max_length <- 2100 - 2006
baseline_length <- isolate((input$baseline_in[2] - input$baseline_in[1]))
if (baseline_length > max_length) {
updateSliderInput(session, "baseline_in", value = c((input$baseline_in[1]),input$baseline_in[1] + max_length))
}
})
observeEvent({input$baseline_in | input$projection_in}, {
baseline_length <- isolate((input$baseline_in[2] - input$baseline_in[1]))
if ((input$projection_in[2] - input$projection_in[1]) < baseline_length) {
if ((input$projection_in[1] + baseline_length) <= 2100) {
updateSliderInput(session, "projection_in", value = c((input$projection_in[1]),input$projection_in[1] + baseline_length))
} else {
updateSliderInput(session, "projection_in", value = c(input$projection_in[2] - baseline_length, input$projection_in[2]))
}
}
})
## ----------------------------------------------- ##
get_choices_anoms <- reactive({
get_choices(file_dir, input$var_filter, lim = FALSE)
})
......@@ -169,18 +191,21 @@ shinyServer(function(input, output, session) {
})
update_anom_table_header <- eventReactive(input$anom_go, {
if(is.null(input$var_filter)) {
str <- "Nothing here yet!"
str <- switch(input$var_filter,
tas = "Mean Temperature",
tasmin = "Minimum Temperature",
tasmax = "Maximum Temperature",
pr = "Precipitation",
prc = "Convective Precipitation",
"Nothing here yet!")
if (isTRUE(input$convert_units)) {
unit <- ifelse(input$var_filter %in% c("tas", "tasmax", "tasmin"),
"(&#176;C)", "(mm)")
} else {
str <- switch(input$var_filter,
tas = "Mean Temperature (&#176;C)",
tasmin = "Minimum Temperature (&#176;C)",
tasmax = "Maximum Temperature (&#176;C)",
pr = "Precipitation (kg&#8901;m<sup>-2</sup>&#8901;s<sup>-1</sup>)",
prc = "Convective Precipitation (kg&#8901;m<sup>-2</sup>&#8901;s<sup>-1</sup>)",
"Nothing here yet!")
unit <- ifelse(input$var_filter %in% c("tas", "tasmax", "tasmin"),
"(K)", "(kg&#8901;m<sup>-2</sup>&#8901;s<sup>-1</sup>)")
}
str
paste(str, unit)
})
output$anom_table_header = renderUI({
......@@ -237,8 +262,10 @@ shinyServer(function(input, output, session) {
## ----------------------------------- ##
## -- Get anom data and manipulate --- ##
generate_anom_data <- reactive({
time_series <- get_anom_ts()
if (isolate(isTRUE(input$convert_units))) time_series <- convert_units(time_series, input$var_filter)
process_raw_anom_ts(time_series, reac$anom_period_in,
baseline = isolate(input$baseline_in[1]:input$baseline_in[2]),
projection = isolate(input$projection_in[1]:input$projection_in[2]))
......@@ -247,7 +274,7 @@ shinyServer(function(input, output, session) {
filter_anom_data <- reactive({
anom_data <- generate_anom_data()
if (reac$anom_period_in != "Annual" ) {
print(paste("The selected periods are", paste(unlist(reac$anom_selected_periods), collapse = ", ")))
if (debug_flag) message("The selected periods are", paste(unlist(reac$anom_selected_periods), collapse = ", "))
}
if (reac$anom_period_in == "Monthly" && !is.null(reac$anom_selected_periods)) {
anom_data <- filter(anom_data, Month %in% reac$anom_selected_periods)
......@@ -265,7 +292,7 @@ shinyServer(function(input, output, session) {
anom_data <- avg_baseline(anom_data)
}
if (is.element("Calculate Anomalies", input$add_proc)) {
anom_data <- calc_anoms(anom_data)
anom_data <- calc_anoms(anom_data, isolate(input$var_filter))
}
if (is.element("Average Ensembles/Runs", input$add_proc)) {
anom_data <- ensemble_means(anom_data)
......@@ -273,12 +300,29 @@ shinyServer(function(input, output, session) {
}
anom_data
})
## ----------------------------------- ##
output$anoms_out <- renderDataTable({
validate(need(get_anom_ts(), "Need to compile the time series!"))
final_anom_data()
})
## -- Note about precip. anomalies --- ##
# Ideally I could move this to a conditional panel.
output$precip_anom_note <- renderUI({
validate(need(input$anom_go, FALSE))
if (isolate(input$var_filter %in% c("pr", "prc")) &&
!is.null(input$add_proc) &&
is.element("Calculate Anomalies", input$add_proc)) {
out <- "<i>Precipitation anomalies are expresed as percentage change (%)</i><br><br>"
} else {
out <- NULL
}
HTML(out)
})
## ----------------------------------- ##
## -- Anoms download options --------- ##
output$download_data <- downloadHandler(
filename = function() {
paste0(
......@@ -304,20 +348,19 @@ shinyServer(function(input, output, session) {
}
)
output$download_debug <- downloadHandler(
filename = "debug.rds", content = function(file) {
saveRDS(get_anom_ts(), file)
output$download_anom_ts <- downloadHandler(
filename = "time_series.csv", content = function(file) {
time_series <- get_anom_ts()
if (isolate(isTRUE(input$convert_units))) time_series <- convert_units(time_series, input$var_filter)
time_series <- filter(time_series,
(grep("historical", Scenario) & Year %in% isolate(input$baseline_in[1]:input$baseline_in[2])) |
(!grep("historical", Scenario) & Year %in% isolate(input$projection_in[1]:input$projection_in[2])))
write_csv(time_series, file)
}
)
output$download_input <- downloadHandler(
filename = "input.rds", content = function(file) {
saveRDS(input, file)
}
)
output$download_debug_meta <- downloadHandler(
filename = "debug_files.rds", content = function(file) {
filename = "file_metadata.rds", content = function(file) {
lim <- switch(input$period_limits,
none = "none",
hind = "hind",
......@@ -333,6 +376,8 @@ shinyServer(function(input, output, session) {
saveRDS(meta, file)
}
)
## ----------------------------------- ##
### OVERLAY MAP
......@@ -350,10 +395,10 @@ shinyServer(function(input, output, session) {
key <- paste(file, input$year_in_2[1], input$year_in_2[2], sep = "_")
if (st_avg$exists(key)) {
print("Hit the cache.")
if (debug_flag) message("Hit the cache.")
map_data <- st_avg$get(key)
} else {
print("Missed the cache.")
if (debug_flag) message("Missed the cache.")
nc_nc <- nc_open(file.path(file_dir, input$var_filter_map, "verified", file), readunlim = FALSE)
incProgress(1/6, detail = paste("Found the file."))
......@@ -411,7 +456,7 @@ shinyServer(function(input, output, session) {
map_data <- flip(map_data, 'y')
incProgress(1/6, detail = paste("Flipped and rotated."))
st_avg$set(key, map_data)
print("Cached data.")
if (debug_flag) message("Cached data.")
map_data
}
......@@ -515,7 +560,7 @@ shinyServer(function(input, output, session) {
map_shapefile()
})
#Show popup on click
# Show popup on click
observeEvent(input$resample_map_click, {
click <- input$resample_map_click
proxy <- leafletProxy("resample_map")
......
......@@ -47,7 +47,10 @@ dashboardPage(
box(
title = "Input",
width = 3,
selectInput("var_filter_plot", "Select Variables", c("tas", "tasmax", "tasmin", "pr")),
selectInput("var_filter_plot", "Select Variables", c("tas", "tasmax", "tasmin", "pr", "prc")),
checkboxInput("convert_units_plot",
HTML("Convert <b>K</b> to <b>&#176;C</b> OR <b>kg&#8901;m<sup>-2</sup>&#8901;s<sup>-1</sup></b> to <b>mm</b>."),
TRUE),
singleModelSelectInput("plot_model_in", "Select Model"),
sliderInput("year_in", label = "Projection Period", min = 2006,
max = 2100, value = c(2011, 2100), step = 1,
......@@ -56,7 +59,6 @@ dashboardPage(
textInput("plot_city_in", "Location", value = "UTSC, Scarborough, ON", width = NULL, placeholder = NULL),
leafletOutput("plot_map", height = 200)
),
box(
title = "Output",
width = 9,
......@@ -75,13 +77,16 @@ dashboardPage(
id = "tabset1", width = 3,
tabPanel("1: Def. params",
selectInput("var_filter", "Select Variables", c("tas", "tasmax", "tasmin", "pr", "prc")),
checkboxInput("convert_units",
HTML("Convert <b>K</b> to <b>&#176;C</b> OR <b>kg&#8901;m<sup>-2</sup>&#8901;s<sup>-1</sup></b> to <b>mm</b>."),
TRUE),
sliderInput("baseline_in", label = "Baseline", min = 1850,
max = 2015, value = c(1981, 2010), sep = "", ticks = FALSE),
HTML("<i>Note, model projections begin in 2005, so any baseline past that date will include \"climate change\".</i><br/><br/>"),
max = year(today()) - 1, value = c(1981, 2010), sep = "", ticks = FALSE),
HTML("<i>Model projections begin in 2005, so any baseline past that date will include climate change forcing.</i><br/><br/>"),
sliderInput("projection_in", label = "Projection Period", min = 2006,
max = 2100, value = c(2011, 2100), sep = "", ticks = FALSE),
HTML("<i>Note, the baseline can't be longer than the projections. This isn't controlled for presently in the code.</i> <br/><br/>"),
radioButtons("period_limits", "Limit Casting", choices = c("no limit" = "none", "hindcast only" = "hind", "forecast only" = "fore"), selected = NULL,
HTML("<i>The baseline can't be longer than the projections. Future periods will be defined based on the baseline length, i.e. if projection is the same length as the baseline, there will be one future period; if it is thrice as long, there will be three future periods.</i><br/><br/>"),
radioButtons("period_limits", "Limit Casting", choices = c("do not limit" = "none", "hindcast only" = "hind", "forecast only" = "fore"), selected = NULL,
inline = FALSE, width = NULL, choiceNames = NULL, choiceValues = NULL),
textInput("anom_city_in", "Location", value = "UTSC, Scarborough, ON", width = NULL, placeholder = NULL),
leafletOutput("anom_map", height = 200)
......@@ -103,7 +108,6 @@ dashboardPage(
tabPanel("5: Add. Opts.",
HTML("Use this tab to change the period of the analysis or to manipulate the final table.<br/><br/>"),
selectInput("anom_period_in", "Change Period of Analysis", c("Annual", "Seasonal", "Monthly")),
conditionalPanel("$('#anoms_out').hasClass('recalculating')", HTML('<i class="fas fa-circle-notch fa-spin"></i> <b>Recalculating</b><br/><br/>')),
uiOutput("anom_selected_periods"),
checkboxGroupInput("add_proc", "Additional Processing", list("Baseline Averages", "Calculate Anomalies", "Average Ensembles/Runs"), inline = FALSE),
HTML("<i>Note, some of the above options will only apply if you select multiple runs / scenarios from one model.</i> <br/><br/>")
......@@ -113,23 +117,18 @@ dashboardPage(
box(
width = 9,
status = "success",
conditionalPanel("$('#anoms_out').hasClass('recalculating')",
HTML('<p style="font-size:20px" align="right"><i class="fas fa-circle-notch fa-spin"></i> <b>Recalculating</b></p>')),
conditionalPanel("output.anoms_out", htmlOutput("anom_table_header")),
uiOutput("precip_anom_note"),
div(style = 'overflow-x: scroll', dataTableOutput("anoms_out")),
br(),
conditionalPanel("output.anoms_out", downloadButton("download_data", "Download this table")
conditionalPanel("output.anoms_out",
downloadButton("download_data", "Download this table"),
downloadButton("download_anom_ts", "Download full timeseries"),
downloadButton("download_debug_meta", "Download file metadata (for debugging)")
)
)
),
fluidRow(
box(
width = 3,
title = "Debug Downloads",
collapsible = TRUE,
collapsed = TRUE,
downloadButton("download_input", "Download input settings"),
downloadButton("download_debug_meta", "Download file metadata"),
downloadButton("download_debug", "Download full ts")
)
)
), # End tab 2
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment