Skip to content

Commit

Permalink
Merge pull request #57 from EvolEcolGroup/revert-56-master
Browse files Browse the repository at this point in the history
Revert "Improve error handling of dataset names in region_series"
  • Loading branch information
dramanica authored Sep 20, 2024
2 parents 61e8499 + 4a1743a commit d62932a
Show file tree
Hide file tree
Showing 3 changed files with 1 addition and 68 deletions.
4 changes: 0 additions & 4 deletions R/location_slice_from_region_series.R
Original file line number Diff line number Diff line change
Expand Up @@ -52,10 +52,6 @@ location_slice_from_region_series <-
directions = 8) {
# get the region series for this dataset
climate_brick <- region_series
# Check that region_series has valid names
if (is.null(names(region_series)) || any(make.names(names(region_series)) != names(region_series))) {
stop("The subdatasets in 'region_series' must have valid names.")
}
bio_variables <- names(region_series)

time_bp <- check_time_vars(time_bp = time_bp, time_ce = time_ce)
Expand Down
5 changes: 1 addition & 4 deletions R/slice_region_series.R
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,6 @@ slice_region_series <- function(x, time_bp = NULL, time_ce = NULL) {
if (!time_bp %in% time_bp(x[[1]])) {
stop("time_bp is not a time slice within the region series x")
}
if (any(make.names(sapply(x, varnames)) != sapply(x, varnames))) {
stop("'region_series' subdatasets must have valid varnames")
}
# get index
time_index <- which(time_bp(x[[1]]) == time_bp)
# now slice it and convert it to a SpatRaster
Expand All @@ -40,6 +37,6 @@ slice_region_series <- function(x, time_bp = NULL, time_ce = NULL) {
terra::add(climate_spatraster) <- subset(x[[i]], time_index)
}
}
names(climate_spatraster) <- varnames(climate_spatraster)
names(climate_spatraster) <- varnames(climate_spatraster) #<- names(x)
return(climate_spatraster)
}
60 changes: 0 additions & 60 deletions tests/testthat/test_location_slice_from_region_series.R
Original file line number Diff line number Diff line change
Expand Up @@ -35,66 +35,6 @@ test_that("location_slice", {
# wrapper around this function with added code to generate the region series
})

test_that("location_slice error handling", {

# Load data
locations <- data.frame(
name = c("A", "B", "C", "D"),
longitude = c(0, 90, -120, -9), latitude = c(20, 45, 60, 37),
time_bp = c(0, -10000, -20000, -10000)
)

this_series <- region_series(
bio_variables = c("bio01", "bio12"),
dataset = "Example"
)

# Name-related errors
name_error <- this_series
names(name_error) <- c("1303", "\\")

expect_error(location_slice_from_region_series(
x = locations[, c("longitude", "latitude")],
time_bp = locations$time_bp, region_series = name_error,
nn_interpol = FALSE
), "The subdatasets in 'region_series' must have valid names.")

bio01 <- this_series[[1]]
varnames(bio01) <- ""
name_error <- sds(bio01, this_series[[2]])
names(name_error) <- c("bio01", "bio12")

expect_error(location_slice_from_region_series(
x = locations[, c("longitude", "latitude")],
time_bp = locations$time_bp, region_series = name_error,
nn_interpol = FALSE
), "'region_series' subdatasets must have valid varnames")

# Time-related errors
time_error <- locations
time_error$time_ce <- seq(-500, -2000, length.out = 4)

expect_error(location_slice_from_region_series(
x = time_error,
region_series = this_series,
nn_interpol = FALSE
), "in x, there should only be either a 'time_bp' column, or a 'time_ce' column")

expect_error(location_slice_from_region_series(
x = locations[, c("longitude", "latitude")],
region_series = this_series,
nn_interpol = FALSE
), "missing times: they should either be given as a column of x, or as values for time_bp or time_ce")

expect_error(location_slice_from_region_series(
x = locations,
time_bp = seq(-500, -2000, length.out = 4),
region_series = this_series,
nn_interpol = FALSE
))

})

################################################################################
# clean up for the next test
unlink(data_path, recursive = TRUE)

0 comments on commit d62932a

Please sign in to comment.