Commit 062210c8 authored by Conor Anderson's avatar Conor Anderson

Plumber changes to enable cropping

parent 9db63d19
......@@ -44,7 +44,7 @@ get_choices <- function(cmip, var, lim) {
} else {
names(choices) <- c("Variable", "Period", "Model", "Scenario", "Ensemble", "Grid", "Range", "Start", "End")
}
choices <- choices %>%
mutate(Model = gsub("([0-9])-([0-9])", paste0("\\1", ".", "\\2"), gsub("([0-9])-([0-9])", paste0("\\1", ".", "\\2"), Model)),
Scenario = sub("rcp([0-9])([0-9])", paste0("RCP", "\\1", ".", "\\2"), Scenario),
......@@ -55,11 +55,11 @@ get_choices <- function(cmip, var, lim) {
#* @filter checkAuth
function(req, res){
auth = sub("Basic ", "", req$HTTP_AUTHORIZATION)
if (!(rawToChar(jsonlite::base64_dec(auth)) %in% paste(users, passwords, sep = ":"))) {
if(req$PATH_INFO == "/download" || (rawToChar(jsonlite::base64_dec(auth)) %in% paste(users, passwords, sep = ":"))) {
plumber::forward()
} else {
res$status <- 401 # Unauthorized
return(list(error="Authentication required"))
} else {
plumber::forward()
}
}
......@@ -124,13 +124,13 @@ function(cmip = "CMIP5", var, lim) {
#* @post /timeseries
function(key) {
time_tmp <- NULL
if (st_point$exists(key)) {
message("Hit the cache for ", key)
time_tmp <- st_point$get(key)
## Invalidate old cache entries.
if (is.null(attr(time_tmp, "cache_ver")) || attr(time_tmp, "cache_ver") != cache_ver) {
message("Cached version is invalid. Deleting.")
......@@ -138,11 +138,11 @@ function(key) {
time_tmp <- NULL
}
}
if (is.null(time_tmp)) {
message("Missed the cache for ", key)
filename <- str_extract(key, ".*\\.nc4?")
# Get the time
components <- get.split.filename.cmip(filename)
cmip <- ifelse(has_name(components, "grid"), "CMIP6", "CMIP5")
......@@ -161,17 +161,17 @@ function(key) {
} else {
nc_time <- as.yearmon(format(nc_time, format = "%Y-%m-%d hh:mm:ss"))
}
cells <- unlist(strsplit(sub(paste0(filename, "_"), "", key), "_"))
lon_cell <- cells[2]
lat_cell <- cells[1]
# Now load only that grid data
nc_var <- nc.get.var.subset.by.axes(nc_nc, components[['var']], axis.indices = list(X = lon_cell, Y = lat_cell))
# Close the nc connection
nc_close(nc_nc); rm(nc_nc)
# Note, does this still apply?
if (dim(nc_var)[1] > 1 || dim(nc_var)[2] > 1) {
warning("We got two latitud or longitude cells. We'll average across them.")
......@@ -179,7 +179,7 @@ function(key) {
} else {
nc_var <- nc_var[1, 1, ]
}
time_tmp <- tibble(Var = components[['var']],
Model = gsub("([0-9])-([0-9])", paste0("\\1", ".", "\\2"), gsub("([0-9])-([0-9])", paste0("\\1", ".", "\\2"), components[['model']])),
Scenario = sub("rcp([0-9])([0-9])", paste0("RCP", "\\1", ".", "\\2"), components[['emissions']]),
......@@ -188,7 +188,7 @@ function(key) {
Year = format(as.yearmon(Time), format = "%Y"),
Month = format(as.yearmon(Time), format = "%m"),
Value = nc_var)
rm(nc_var, nc_time); gc()
attr(time_tmp, "cache_ver") <- cache_ver
st_point$set(key, time_tmp)
......@@ -204,57 +204,57 @@ function(key) {
#* @post /mapseries
function(key) {
if (st_avg$exists(key)) {
if (debug_flag) message("Hit the cache.")
map_data <- st_avg$get(key)
} else {
if (debug_flag) message("Missed the cache.")
filename <- str_extract(key, ".*\\.nc4?")
unlisted <- unlist(str_split(key, "_"))
components <- get.split.filename.cmip(filename)
cmip <- ifelse(has_name(components, "grid"), "CMIP6", "CMIP5")
nc_nc <- nc_open(file.path(file_dir, cmip, components['var'], "verified", filename), readunlim = FALSE)
nc_time <- try(nc.get.time.series(nc_nc, v = components['var'], time.dim.name = "time"))
if (inherits(nc_time, "try-error")) {
warning(paste(fs[f], "failed"))
break
}
nc_time <- as.yearmon(format(nc_time, format = "%Y-%m-%d hh:mm:ss"))
# Get the time that we are interested in
index_start <- min(which(format(nc_time, format = "%Y") == unlisted[length(unlisted)-1]))
index_end <- max(which(format(nc_time, format = "%Y") == unlisted[length(unlisted)]))
nc_lat <- ncvar_get(nc_nc, "lat")
nc_lon <- ncvar_get(nc_nc, "lon")
lon_bnds <- try(ncvar_get(nc_nc, "lon_bnds"), silent = TRUE)
if (inherits(lon_bnds, "try-error")) {
lon_bnds <- ncvar_get(nc_nc, "lon_bounds")
}
lat_bnds <- try(ncvar_get(nc_nc, "lat_bnds"), silent = TRUE)
if (inherits(lat_bnds, "try-error")) {
lat_bnds <- ncvar_get(nc_nc, "lat_bounds")
}
nc_var <- nc.get.var.subset.by.axes(nc_nc, components['var'], axis.indices = list(T = index_start:index_end))
# Close the nc connection
nc_close(nc_nc)
rm(nc_nc)
map_data_mat <- apply(nc_var, c(1,2), mean)
map_data <- raster(t(map_data_mat),
xmn = mean(lon_bnds[,1]), xmx = mean(lon_bnds[,ncol(lon_bnds)]),
ymn = mean(lat_bnds[,1]), ymx = mean(lat_bnds[,ncol(lat_bnds)]),
crs="+proj=longlat +datum=WGS84")
# This step fails if the model outputs the corner instead of the centre point.
map_data <- try(rotate(map_data))
if(inherits(map_data, "try-error")) {
......@@ -263,12 +263,12 @@ function(key) {
map_data <- raster(t(map_data_mat), xmn = (nc_lon[2] - nc_lon[1])/2, xmx = max(nc_lon) + (nc_lon[2] - nc_lon[1])/2, ymn = min(nc_lat), ymx = max(nc_lat), crs="+proj=longlat +datum=WGS84")
map_data <- rotate(map_data)
}
map_data <- flip(map_data, 'y')
st_avg$set(key, map_data)
if (debug_flag) message("Cached data.")
}
serialize(map_data, NULL)
}
......@@ -279,7 +279,7 @@ function(key) {
#* @post /cacheget
function(key){
serialize(
(if (st_meta$exists(key))st_meta$get(key) else "NULL"),
(if (st_meta$exists(key)) st_meta$get(key) else "NULL"),
NULL)
}
......@@ -292,3 +292,80 @@ function(req, key){
st_meta$set(key = key, value = as_tibble(fromJSON(req$postBody)))
}
#* Crop netcdf file and return the filename for download
#* @param key The model key to process
#* @post /crop
function(key, req, res){
message("The model is ", key)
shp <- unserialize(base64decode(req$postBody))
filename <- str_extract(key, ".*\\.nc4?")
unlisted <- unlist(str_split(key, "_"))
components <- get.split.filename.cmip(filename)
cmip <- ifelse(has_name(components, "grid"), "CMIP6", "CMIP5")
nc_nc <- nc_open(file.path(file_dir, cmip, components[['var']], "verified", filename), readunlim = FALSE)
var_details <- nc_nc$var[[components['var']]]
nc_close(nc_nc)
chbr = brick(file.path(file_dir, cmip, components['var'], "verified", filename))
if (attr(extent(chbr), "xmax") > 180) chbr = rotate(chbr)
if (is(shp, "SpatialPolygonsDataFrame")) {
message("Working with a shapefile")
if (!identical(crs(shp), crs(chbr))) shp = spTransform(shp, crs(chbr))
chbr.crop <- crop(chbr, shp)
rm(chbr); invisible(gc())
chbr.out = mask(chbr.crop, shp)
rm(chbr.crop); invisible(gc())
} else if (is(shp, "Extent")) {
message("Working with an Extent")
chbr.out <- crop(chbr, shp)
rm(chbr); invisible(gc())
} else {
stop("An unknown error occurred")
}
ddir <- file.path(file_dir, "dist")
if(!dir.exists(ddir)) dir.create(ddir, recursive = TRUE)
fileout <- file.path(ddir, paste("cropped",
format(Sys.time(), format = "%Y%m%d_%H%M%S"),
filename, sep = "_"))
writeRaster(chbr.out, fileout, overwrite=TRUE, format="CDF",
varname=var_details$name,
varunit=var_details$units,
longname=var_details$longname,
xname=var_details$dim[[1]]$name,
yname=var_details$dim[[2]]$name,
zname=var_details$dim[[3]]$name)
message("File written to ", fileout)
res$status <- 200
res$body <- jsonlite::toJSON(auto_unbox = TRUE, list(
status = 200,
message = "Data was processed successfully. The filename is included in this body.",
filename = fileout
))
return(res)
}
# If the URL gets called the browser will automatically download the file.
#* @serializer contentType list(type="application/octet-stream")
#* @param filepath The file to return for download
#* @get /download
function(filepath, res) {
filepath <- URLdecode(filepath)
if (!file.exists(filepath)) {
res$status <- 404
res$body <- jsonlite::toJSON(auto_unbox = TRUE, list(
status = 404,
message = "File does not exist")
)
return(res)
}
readBin(filepath, "raw", n = file.info(filepath)$size)
}
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment