Skip to content

Commit

Permalink
Merge pull request #89 from thebioengineer/release_1.0.2
Browse files Browse the repository at this point in the history
Release 1.0.2
  • Loading branch information
thebioengineer authored Dec 3, 2023
2 parents 51dfba6 + 1eeae12 commit 2e82bde
Show file tree
Hide file tree
Showing 33 changed files with 275 additions and 146 deletions.
9 changes: 7 additions & 2 deletions .github/workflows/R-CMD-check.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,14 @@ jobs:
matrix:
config:
- {os: windows-latest, r: 'release'}
- {os: windows-latest, r: '3.6'}
- {os: macOS-latest, r: 'release'}
- {os: macOS-latest, r: 'devel'}
- {os: ubuntu-16.04, r: 'release', rspm: "https://packagemanager.rstudio.com/cran/__linux__/xenial/latest"}
- {os: ubuntu-18.04, r: 'devel', http-user-agent: 'release'}
- {os: ubuntu-18.04, r: 'release'}
- {os: ubuntu-18.04, r: 'oldrel-1'}
- {os: ubuntu-18.04, r: 'oldrel-2'}
- {os: ubuntu-18.04, r: 'oldrel-3'}
- {os: ubuntu-18.04, r: 'oldrel-4'}

env:
R_REMOTES_NO_ERRORS_FROM_WARNINGS: true
Expand Down
4 changes: 2 additions & 2 deletions CRAN-RELEASE
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
This package was submitted to CRAN on 2020-07-09.
Once it is accepted, delete this file and tag the release (commit 042d9a2d32).
This package was submitted to CRAN on 2022-01-31.
Once it is accepted, delete this file and tag the release (commit 5f278be).
2 changes: 1 addition & 1 deletion DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
Package: tidytuesdayR
Type: Package
Title: Access the Weekly 'TidyTuesday' Project Dataset
Version: 1.0.1.9000
Version: 1.0.2
Authors@R:
c(
person(
Expand Down
6 changes: 5 additions & 1 deletion NEWS.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,8 @@
# tidytuesdayR (development version)
# tidytuesdayR (1.0.2 version)

* [bug fix] During testing it was identified that 502 errors from github servers would cause the code to error out. Now it will retry a few times before giving an error.
* [bug fix] No internet connection bug on rstudio resolved to due malformed url checks (https).
* [bug fix] Partial argument matching correction in `tt_download_file.character()`, `tt_parse_blob()`, and in tests. (thanks @mgirlich)

# tidytuesdayR 1.0.1

Expand Down
28 changes: 22 additions & 6 deletions R/github_api.R
Original file line number Diff line number Diff line change
Expand Up @@ -299,7 +299,7 @@ github_pat <- function (quiet = TRUE) {
#'
#' @importFrom httr GET add_headers
#' @importFrom jsonlite base64_enc
github_GET <- function(url, auth = github_pat(), ...){
github_GET <- function(url, auth = github_pat(), ..., times_run = 1){

if(!is.null(auth)){
headers <- add_headers(
Expand Down Expand Up @@ -328,15 +328,28 @@ github_GET <- function(url, auth = github_pat(), ...){

if(inherits(get_res,"try-error")){
check_connectivity(rerun=TRUE)
if(!check_connectivity()){
if(!get_connectivity()){
return(no_internet_error())
}else{
## Unexpected issue
stop(attr(get_res,"condition"))
}
}else{
rate_limit_update(header_to_rate_info(get_res))
return(get_res)
if(get_res$status_code == 502){
## rerun when 502 status code - server error, not tidytuesdayR code error
if(times_run < 3){
if(rate_limit_check() > 0){
github_GET(url, auth = github_pat(), ..., times_run = times_run + 1)
}else{
rate_limit_error()
}
}else{
tt_gh_error.response(get_res)
}
}else{
rate_limit_update(header_to_rate_info(get_res))
return(get_res)
}
}
}else{
rate_limit_error()
Expand Down Expand Up @@ -397,8 +410,11 @@ rate_limit_update <- function(rate_info = NULL, auth = github_pat()){
} else {
rate_lim <- GET("https://api.github.com/rate_limit")
}
rate_info <- GET_json(rate_lim)$rate
rate_info$remaining = rate_info$remaining - 1 # we have one less than we think

if(rate_lim$status_code == 200){
rate_info <- GET_json(rate_lim)$rate
rate_info$remaining = rate_info$remaining - 1 # we have one less than we think
}
}
}

Expand Down
20 changes: 8 additions & 12 deletions R/tt_check_date.R
Original file line number Diff line number Diff line change
Expand Up @@ -51,23 +51,18 @@ tt_check_date.year <- function(x, week) {

tt_folders <- tt_weeks(x)

if (week > length(tt_folders$week_desc)) {
if (!week %in% tt_folders$week_desc & week > 1) {
stop(
paste0(
"Only ",
length(tt_folders$week_desc),
" TidyTuesday Weeks exist in ",
x,
". Please enter a value for week between 1 and ",
length(tt_folders$week_desc)
"'",week,"' is not a valid TidyTuesday week entry for ",x,".\n",
"Please enter a valid value for week:\n\t",
contiguous_weeks(tt_folders$week_desc)
)
)
} else if (week < 1) {
stop(paste0(
"Week entry must be a valid positive integer between 1 and ",
length(tt_folders$week_desc),
"."
))
stop(
"Week entry must be a valid positive integer value."
)
}

tt_date <- tt_folders$folders[tt_folders$week_desc == week]
Expand Down Expand Up @@ -156,3 +151,4 @@ tt_closest_date <- function(inputdate, availabledates) {
availabledates[
which.min(abs(difftime(inputdate, availabledates, units = "days")))]
}

4 changes: 2 additions & 2 deletions R/tt_download_file.R
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ tt_download_file.character <-

file_info <- attr(tt, ".files")

if (x %in% file_info$data_file) {
if (x %in% file_info$data_files) {

tt_date <- attr(tt, ".date")
tt_year <- year(tt_date)
Expand All @@ -80,7 +80,7 @@ tt_download_file.character <-
auth = auth
)

tt_parse_blob(blob, file_info = file_info[file_info$data_file == x,], ...)
tt_parse_blob(blob, file_info = file_info[file_info$data_files == x,], ...)

} else {
stop(paste0(
Expand Down
2 changes: 1 addition & 1 deletion R/tt_load_gh.R
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
#' @examples
#'
#' # check to make sure there are requests still available
#' if(rate_limit_check(quiet = TRUE) > 10){
#' if(rate_limit_check(quiet = TRUE) > 10 & interactive()){
#' tt_gh <- tt_load_gh("2019-01-15")
#'
#' ## readme attempts to open the readme for the weekly dataset
Expand Down
6 changes: 3 additions & 3 deletions R/tt_parse.R
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,9 @@ tt_parse_blob <- function(blob, ..., file_info) {
"xls" = tt_parse_binary(blob, readxl::read_xls, ...,
filename = file_info$data_files),
"xlsx" = tt_parse_binary(blob, readxl::read_xlsx, ...,
filename = file_info$data_file),
filename = file_info$data_files),
"rds" = tt_parse_binary(blob, readRDS,
filename = file_info$data_file),
filename = file_info$data_files),
tt_parse_text(
blob = blob,
func = readr::read_delim,
Expand All @@ -34,7 +34,7 @@ tt_parse_blob <- function(blob, ..., file_info) {
}

# rda option just in case
# "rda" = tt_parse_binary(blob, read_rda, filename = file_info$data_file),
# "rda" = tt_parse_binary(blob, read_rda, filename = file_info$data_files),


#' @title utility to assist with parsing the raw binary data
Expand Down
55 changes: 55 additions & 0 deletions R/utils.R
Original file line number Diff line number Diff line change
Expand Up @@ -57,9 +57,11 @@ print.tt <- function(x,...){
#' TidyTuesday dataset in the Viewer.
#' @examples
#' \donttest{
#' if(rate_limit_check(quiet = TRUE) > 10 & interactive()){
#' tt_output <- tt_load_gh("2019-01-15")
#' readme(tt_output)
#' }
#' }
readme <- function(tt) {
if ("tt_data" %in% class(tt)) {
tt <- attr(tt, ".tt")
Expand All @@ -85,3 +87,56 @@ html_viewer <- function(url, is_interactive = interactive()){
browseURL(url = url)
}
}

#' @noRd
contiguous_weeks <- function(week_vctr){
if(length(week_vctr) == 1){
text_out <- as.character(week_vctr)
}else{
is_not_contig <- which(diff(week_vctr) != 1)
if(length(is_not_contig) == 0){
text_out <- paste0(week_vctr[1], "-",week_vctr[length(week_vctr)])
}else{
if(is_not_contig[[1]] == 1){
text_out <- as.character(week_vctr[1])
}else{
text_out <- paste0(week_vctr[1], "-",week_vctr[is_not_contig[[1]]])
}
contig_split <- 1
while(contig_split < length(is_not_contig)){

if( diff(c(is_not_contig[contig_split], is_not_contig[contig_split+1])) == 1){
text_out <- paste0(
text_out, ", ", week_vctr[is_not_contig[contig_split]+1]
)
}else{
text_out <- paste0(
text_out,", ", paste0(week_vctr[is_not_contig[contig_split]+1], "-",week_vctr[is_not_contig[contig_split+1]])
)
}
contig_split %+=% 1
}

if(length(week_vctr) == (is_not_contig[contig_split]+1)){
text_out <- paste0(
text_out, ", ", week_vctr[length(week_vctr)]
)
}else{
text_out <- paste0(
text_out,", ", paste0(week_vctr[is_not_contig[contig_split]+1], "-",week_vctr[length(week_vctr)])
)
}
}}
return(text_out)
}

`%+=%` <- function(x,y, env = parent.frame()){
x_name <- as.character(substitute(x))
x_new <- x + y
assign(
x = x_name,
value = x_new,
envir = env
)
}

10 changes: 6 additions & 4 deletions cran-comments.md
Original file line number Diff line number Diff line change
@@ -1,13 +1,15 @@
## Release summary

* Update to address failing solaris build on CRAN, with some additional features
* Update to address failing examples build on CRAN causing removal
* small bug fixes identified by the community (partial argument matching, malformed url)

* There are no reverse dependencies to check at this time

## Test environments
* local R installation, R 4.0.2
* ubuntu 16.04 (on github actions), , R 4.0.2
* mac OS 10.15.4 (on github actions) R-devel, R 4.0.2,
* local R installation, R 4.1.0
* ubuntu 18.04 (on github actions),R-devel, R-release
* mac OS (on github actions) R-devel, R-release
* Windows-lated (on github actions) R-devel, R-release
* win-builder (devel)

## R CMD check results
Expand Down
8 changes: 4 additions & 4 deletions docs/404.html

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 4 additions & 4 deletions docs/CODE_OF_CONDUCT.html

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 4 additions & 4 deletions docs/LICENSE-text.html

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading

0 comments on commit 2e82bde

Please sign in to comment.