diff --git a/DESCRIPTION b/DESCRIPTION index 9d0f94786..a95255f1b 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -8,6 +8,7 @@ Authors@R: c( comment = c(ORCID = "0000-0002-6299-179X")), person("Maëlle", "Salmon", role = "aut", comment = c(ORCID = "0000-0002-2815-0399")), + person("Olivier", "Roy", role = "aut"), person("Posit Software, PBC", role = c("cph", "fnd")) ) Description: Generate an attractive and useful website from a source diff --git a/NEWS.md b/NEWS.md index 2531d3f18..1ba1aa126 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,5 +1,10 @@ # pkgdown (development version) +* `build_reference()` does a better job of parsing `\value{}` blocks (#2371). +* When built on GitHub, source urls now use the name of the current upstream branch (rather than `HEAD`), which is more likely to generate correct links (#2597). +* New `vignette("non-english")` that discusses non-English sites including how to submit new translations (#2605). +* `build_reference()` now generates the usage that users actually type for infix and replacement methods (#2303). +* @olivroy is now a pkgdown author in recognition of his contributions. * `pkgdown_sitrep()`/`check_pkgdown()` now check that you have up-to-date favicons if you have a package logo. * pkgdown now uses httr2 instead of httr (#2600). * New `template.math-rendering` allows you to control how math is rendered across your site. The default uses `mathml` which is low-dependency, but has the lowest fidelity. You can also use `mathjax`, the previous default, and `katex`, a faster alternative. (#1966). diff --git a/R/rd-data.R b/R/rd-data.R index 56c7550af..4b9ecadf8 100644 --- a/R/rd-data.R +++ b/R/rd-data.R @@ -78,25 +78,46 @@ as_data.tag_value <- function(x, ...) { } describe_contents <- function(x, ..., id_prefix = NULL) { - # Drop pure whitespace nodes between items - is_ws <- purrr::map_lgl(x, is_whitespace) - - # Group contiguous \items{}/whitespace into a
- is_item <- purrr::map_lgl(x, inherits, "tag_item") | is_ws - changed <- is_item[-1] != is_item[-length(is_item)] - group <- cumsum(c(TRUE, changed)) - - parse_piece <- function(x) { - if (length(x) == 0) { - NULL - } else if (any(purrr::map_lgl(x, inherits, "tag_item"))) { - paste0("
\n", parse_descriptions(x, ..., id_prefix = id_prefix), "
") + if (length(x) == 0) { + return("") + } + + # Group contiguous \items{}/whitespace into a
; everything else + # is handled as is + block_id <- integer(length(x)) + block_id[[1]] <- 1 + cur_block_is_dl <- inherits(x[[1]], "tag_item") + + for (i in seq2(2, length(x))) { + is_item <- inherits(x[[i]], "tag_item") + if (cur_block_is_dl) { + same_type <- is_item || is_whitespace(x[[i]]) + } else { + same_type <- !is_item + } + + if (same_type) { + block_id[[i]] <- block_id[[i - 1]] + } else { + block_id[[i]] <- block_id[[i - 1]] + 1 + cur_block_is_dl <- !cur_block_is_dl + } + } + + parse_block <- function(x) { + is_dl <- any(purrr::map_lgl(x, inherits, "tag_item")) + if (is_dl) { + paste0( + "
\n", + parse_descriptions(x, ..., id_prefix = id_prefix), + "
" + ) } else { flatten_para(x, ...) } } - pieces <- split(x, group) - out <- purrr::map(pieces, parse_piece) + blocks <- split(x, block_id) + out <- purrr::map(blocks, parse_block) paste(unlist(out), collapse = "\n") } diff --git a/R/repo.R b/R/repo.R index 601c5a21a..cc9e2881c 100644 --- a/R/repo.R +++ b/R/repo.R @@ -63,7 +63,7 @@ package_repo <- function(pkg) { # Use metadata if available repo <- config_pluck_list(pkg, "repo") url <- config_pluck_list(pkg, "repo.url") - branch <- config_pluck_string(pkg, "repo.branch", default = "HEAD") + if (!is.null(url)) { return(repo) @@ -77,6 +77,7 @@ package_repo <- function(pkg) { gh_links <- grep("^https?://git(hub|lab)\\..+/", urls, value = TRUE) if (length(gh_links) > 0) { + branch <- config_pluck_string(pkg, "repo.branch") return(repo_meta_gh_like(gh_links[[1]], branch)) } @@ -96,7 +97,8 @@ repo_meta <- function(home = NULL, source = NULL, issue = NULL, user = NULL) { repo_meta_gh_like <- function(link, branch = NULL) { gh <- parse_github_like_url(link) - branch <- branch %||% "HEAD" + branch <- branch %||% gha_current_branch() + repo_meta( paste0(gh$host, "/", gh$owner, "/", gh$repo, "/"), paste0(gh$host, "/", gh$owner, "/", gh$repo, "/blob/", branch, "/"), @@ -105,6 +107,22 @@ repo_meta_gh_like <- function(link, branch = NULL) { ) } +gha_current_branch <- function() { + # Only set in pull requests + ref <- Sys.getenv("GITHUB_HEAD_REF") + if (ref != "") { + return(ref) + } + + # Set everywhere but might not be a branch + ref <- Sys.getenv("GITHUB_REF_NAME") + if (ref != "") { + return(ref) + } + + "HEAD" +} + parse_github_like_url <- function(link) { supports_subgroups <- grepl("^https?://gitlab\\.", link) rx <- paste0( diff --git a/R/usage.R b/R/usage.R index 991b6dad3..ace7e12db 100644 --- a/R/usage.R +++ b/R/usage.R @@ -1,10 +1,27 @@ # Reference page --------------------------------------------------------------- +# For testing +usage2text <- function(x) { + rd <- rd_text(paste0("\\usage{", x, "}"), FALSE)[[1]] + strip_html_tags(as_data(rd)) +} + #' @export as_data.tag_usage <- function(x, ...) { text <- paste(flatten_text(x, ..., escape = FALSE), collapse = "\n") text <- str_trim(text) + # Look for single line calls to non-syntactic functions and then use + # deparse1 to convert to standard style. We want to avoid reparsing + # any other lines to avoid losing whitespace, comments etc. (These + # are not generated by roxygen but can be added by the user.) + lines <- strsplit(text, "\n", fixed = TRUE)[[1]] + parsed <- lapply(lines, function(x) tryCatch(parse(text = x)[[1]], error = function(e) NULL)) + needs_tweak <- vapply(parsed, function(x) is_call(x) && !is_syntactic(x[[1]]), logical(1)) + lines[needs_tweak] <- vapply(parsed[needs_tweak], deparse1, character(1)) + + text <- paste(lines, collapse = "\n") + highlight_text(text) } @@ -16,8 +33,11 @@ as_html.tag_S3method <- function(x, ...) method_usage(x, "S3") as_html.tag_S4method <- function(x, ...) method_usage(x, "S4") method_usage <- function(x, type) { - fun <- as_html(x[[1]]) - class <- as_html(x[[2]]) + # Despite these being called from the as_html() generic, the target isn't + # actually HTML, but R code, which is turned into HTML by the syntax + # highlighting in as as_data.tag_usage() + fun <- as_html(x[[1]], escape = FALSE) + class <- as_html(x[[2]], escape = FALSE) if (x[[2]] == "default") { method <- sprintf(tr_("# Default %s method"), type) @@ -25,6 +45,9 @@ method_usage <- function(x, type) { method <- sprintf(tr_("# %s method for class '%s'"), type, class) } + if (!is_syntactic(fun)) { + fun <- paste0("`", fun, "`") + } paste0(method, "\n", fun) } diff --git a/inst/BS5/assets/pkgdown.scss b/inst/BS5/assets/pkgdown.scss index f3431521b..aa51029ef 100644 --- a/inst/BS5/assets/pkgdown.scss +++ b/inst/BS5/assets/pkgdown.scss @@ -438,9 +438,16 @@ pre, pre code { word-wrap: normal; } -[data-bs-theme="dark"] pre { - background-color: RGBA(var(--bs-body-color-rgb), 0.1); - border-color: var(--bs-border-color); +// Default dark mode styling does not look good for code +[data-bs-theme="dark"] { + pre, code { + background-color: RGBA(var(--bs-body-color-rgb), 0.1); + } + // don't double apply transparency + pre code { + background: transparent; + } + } code { diff --git a/man/pkgdown-package.Rd b/man/pkgdown-package.Rd index d6df52195..c88eab63a 100644 --- a/man/pkgdown-package.Rd +++ b/man/pkgdown-package.Rd @@ -26,6 +26,7 @@ Authors: \itemize{ \item Jay Hesselberth (\href{https://orcid.org/0000-0002-6299-179X}{ORCID}) \item Maëlle Salmon (\href{https://orcid.org/0000-0002-2815-0399}{ORCID}) + \item Olivier Roy } Other contributors: diff --git a/pkgdown/_pkgdown.yml b/pkgdown/_pkgdown.yml index faa95c5a9..d48d56265 100644 --- a/pkgdown/_pkgdown.yml +++ b/pkgdown/_pkgdown.yml @@ -12,8 +12,6 @@ authors: href: https://hadley.nz Posit Software, PBC: href: https://posit.co - html: >- - Posit template: bootstrap: 5 @@ -35,6 +33,7 @@ articles: navbar: ~ contents: - customise + - translations - accessibility - linking - metadata diff --git a/tests/testthat/_snaps/usage.md b/tests/testthat/_snaps/usage.md index 1d4b83050..211e0e23f 100644 --- a/tests/testthat/_snaps/usage.md +++ b/tests/testthat/_snaps/usage.md @@ -1,15 +1,18 @@ -# usage escapes special characters +# usage generates user facing code for S3/S4 infix/replacement methods Code - # Parseable - cat(strip_html_tags(usage2html("# <>\nx"))) + cat(usage2text("\\S3method{$}{indexed_frame}(x, name)")) Output - # <> - x + # S3 method for class 'indexed_frame' + x$name Code - # Unparseable - cat(strip_html_tags(usage2html("# <>\n<"))) + cat(usage2text("\\method{[[}{indexed_frame}(x, i) <- value")) Output - # <> - < + # S3 method for class 'indexed_frame' + x[[i]] <- value + Code + cat(usage2text("\\S4method{>=}{MyType,numeric}(e1, e2)")) + Output + # S4 method for class 'MyType,numeric' + e1 >= e2 diff --git a/tests/testthat/test-rd-data.R b/tests/testthat/test-rd-data.R index 3a0baa820..8d89a125f 100644 --- a/tests/testthat/test-rd-data.R +++ b/tests/testthat/test-rd-data.R @@ -48,7 +48,6 @@ test_that("leading whitespace doesn't break items", { value2html("\n\\item{a}{b}\n\n\\item{c}{d}\n\n\\item{e}{f}"), c( "
", - "", "
a
", "

b

", "", "", "
c
", "

d

", "", "", "
e
", "

f

", @@ -57,14 +56,10 @@ test_that("leading whitespace doesn't break items", { ) }) -test_that("whitespace between text is preserved", { +test_that("whitespace between text is not preserved", { expect_equal( value2html("a\n\nb\n\nc"), - c( - "

a

", "", "", - "

b

", "", "", - "

c

" - ) + c("

a

", "

b

", "

c

") ) }) @@ -82,3 +77,15 @@ test_that("can have multiple interleaved blocks", { ) ) }) + +test_that("other tags don't affect breaking (#2371)", { + expect_equal( + value2html("1\\code{xxx}\n2\n3"), + c("

1xxx", "2", "3

") + ) + # additionally teading whitespace + expect_equal( + value2html("1\\code{xxx}\n 2\n 3"), + c("

1xxx", "2", "3

") + ) +}) diff --git a/tests/testthat/test-repo.R b/tests/testthat/test-repo.R index 0d4f44f29..2b608810d 100644 --- a/tests/testthat/test-repo.R +++ b/tests/testthat/test-repo.R @@ -47,6 +47,7 @@ test_that("Jira issues are automatically linked", { # repo_source ------------------------------------------------------------- test_that("repo_source() truncates automatically", { + withr::local_envvar(GITHUB_HEAD_REF = "HEAD") pkg <- list(repo = repo_meta_gh_like("https://github.com/r-lib/pkgdown")) expect_snapshot({ @@ -81,6 +82,7 @@ test_that("repo_source() uses the branch setting in meta", { # package_repo ------------------------------------------------------------ test_that("can find github from BugReports or URL", { + withr::local_envvar(GITHUB_HEAD_REF = "HEAD") expected <- repo_meta_gh_like("https://github.com/r-lib/pkgdown") pkg <- local_pkgdown_site(desc = list( @@ -103,11 +105,27 @@ test_that("can find github from BugReports or URL", { }) test_that("can find gitlab url", { + withr::local_envvar(GITHUB_HEAD_REF = "HEAD") url <- "https://gitlab.com/msberends/AMR" pkg <- local_pkgdown_site(desc = list(URL = url)) expect_equal(package_repo(pkg), repo_meta_gh_like(url)) }) +test_that("uses GITHUB env vars if set", { + withr::local_envvar(GITHUB_HEAD_REF = NA, GITHUB_REF_NAME = "abc") + expect_equal( + repo_meta_gh_like("https://github.com/r-lib/pkgdown")$url$source, + "https://github.com/r-lib/pkgdown/blob/abc/" + ) + + withr::local_envvar(GITHUB_HEAD_REF = "xyz") + expect_equal( + repo_meta_gh_like("https://github.com/r-lib/pkgdown")$url$source, + "https://github.com/r-lib/pkgdown/blob/xyz/" + ) + +}) + test_that("GitLab subgroups are properly parsed", { issue_url <- function(...) { pkg <- local_pkgdown_site(desc = list(...)) @@ -124,6 +142,8 @@ test_that("GitLab subgroups are properly parsed", { }) test_that("can find github enterprise url", { + withr::local_envvar(GITHUB_HEAD_REF = "HEAD") + url <- "https://github.acme.com/roadrunner/speed" pkg <- local_pkgdown_site(desc = list(BugReports = url)) expect_equal(package_repo(pkg), repo_meta_gh_like(url)) diff --git a/tests/testthat/test-usage.R b/tests/testthat/test-usage.R index dea67d31a..282c94b50 100644 --- a/tests/testthat/test-usage.R +++ b/tests/testthat/test-usage.R @@ -2,17 +2,26 @@ # Reference -------------------------------------------------------------------- test_that("usage escapes special characters", { + # parseable + expect_equal(usage2text("# <"), "# <") + #unparseable + expect_equal(usage2text("<"), "<") +}) + +test_that("usage re-renders non-syntactic calls", { + expect_equal(usage2text("`<`(x, y)"), "x < y") + expect_equal(usage2text("`[`(x, y)"), "x[y]") +}) - usage2html <- function(x) { - rd <- rd_text(paste0("\\usage{", x, "}"), FALSE)[[1]] - as_data(rd) - } +test_that("usage doesn't re-renders syntactic calls", { + expect_equal(usage2text("foo(x , y) # hi"), "foo(x , y) # hi") +}) +test_that("usage generates user facing code for S3/S4 infix/replacement methods", { expect_snapshot({ - "Parseable" - cat(strip_html_tags(usage2html("# <>\nx"))) - "Unparseable" - cat(strip_html_tags(usage2html("# <>\n<"))) + cat(usage2text("\\S3method{$}{indexed_frame}(x, name)")) + cat(usage2text("\\method{[[}{indexed_frame}(x, i) <- value")) + cat(usage2text("\\S4method{>=}{MyType,numeric}(e1, e2)")) }) }) @@ -54,6 +63,13 @@ test_that("default methods get custom text", { expect_equal(out[1], "# Default S4 method") }) +test_that("non-syntactic functions get backquoted, not escaped", { + out <- rd2html("\\S3method{<}{foo}(x, y)") + expect_equal(out[[2]], "`<`(x, y)") + + out <- rd2html("\\S4method{bar<-}{foo}(x, y)") + expect_equal(out[[2]], "`bar<-`(x, y)") +}) # Reference index -------------------------------------------------------------- diff --git a/vignettes/.gitignore b/vignettes/.gitignore index ea00d0081..2fa644087 100644 --- a/vignettes/.gitignore +++ b/vignettes/.gitignore @@ -2,3 +2,5 @@ *.R *.log *_files + +/.quarto/ diff --git a/vignettes/pkgdown.Rmd b/vignettes/pkgdown.Rmd index 71eac8e28..f14db7d25 100644 --- a/vignettes/pkgdown.Rmd +++ b/vignettes/pkgdown.Rmd @@ -54,27 +54,6 @@ template: You can learn more about controlling the appearance of your site in `vignette("customise")`. -### Language - -If your documentation (`.Rd` and `.Rmd`) is written in a language other than English, declare it by setting setting `lang` to the [two letter language code](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) for your language: - -``` yaml -lang: fr -``` - -This will be used to set the language of the web page and to translate the English words that pkgdown generates on your site. -Current available translations are: - -- `ca`: Catalan -- `de`: German -- `dk`: Danish -- `es`: Spanish -- `fr`: French -- `ko`: Korean -- `pt`: Portuguese -- `tr`: Turkish -- `zh_CN`: Chinese (simplified) - ## Accessibility pkgdown's defaults work to ensure that your site is accessible to as many people as possible. But there are some accessibilty issues that only a human can solve, so make sure to also read `vignette("accessibility")` to learn about them. diff --git a/vignettes/translations.Rmd b/vignettes/translations.Rmd new file mode 100644 index 000000000..eccb08e04 --- /dev/null +++ b/vignettes/translations.Rmd @@ -0,0 +1,96 @@ +--- +title: "Translations" +output: rmarkdown::html_vignette +description: > + If your documentation is written in a language other than English, you can + automatically translate the text generated by pkgdown by setting the `lang` + field. If your language isn't currently supported, we'll show you how you can + provide translations. +vignette: > + %\VignetteIndexEntry{Translations} + %\VignetteEngine{knitr::rmarkdown} + %\VignetteEncoding{UTF-8} +--- + +```{r, include = FALSE} +knitr::opts_chunk$set( + collapse = TRUE, + comment = "#>" +) +``` + +If your documentation (`.Rd` and `.Rmd`) is written in a language other than English, declare it by setting setting `lang` to the [language code](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) for your language: + +``` yaml +lang: fr +``` + +This will be used to set the language of the web page and to translate the English words that pkgdown generates on your site. +Current available translations are: + +- `ca`: Catalan +- `de`: German +- `dk`: Danish +- `es`: Spanish +- `fr`: French +- `ko`: Korean +- `pt`: Portuguese +- `tr`: Turkish +- `zh_CN`: Chinese (simplified) + +As you can see, most language codes are two letters, but if a language has multiple variants, it gets a longer form which can be used to disambiguate the options. For example, Chinese can use one of two forms: simplified (used in China and Singapore) or traditional (used in Taiwan and Hong Kong). Another example would be providing specific French Canadian translations by using code `fr_CN`. + +## Translations + +Translations are contributed by community members so if your language is not currently available, you could be the one to add it! + +To get started, first check the [pkgdown issues](https://github.com/r-lib/pkgdown/issues) to see if anyone has filed an existing issue. If so, the person who filed the issue might make for a great collaborator 😀. + +Next, install [potools](https://michaelchirico.github.io/potools) and [usethis](https://usethis.r-lib.org): + +```{r} +#| eval: false +install.packages(c("potools", "usethis")) +``` + +You'll then need to familiarise yourself with the basics of [translations with potools](https://michaelchirico.github.io/potools/articles/translators.html) and [creating pull requests](https://usethis.r-lib.org/articles/pr-functions.html) with usethis. + +If you don't already know it, you'll need to look up the ISO 639-1 abbreviation for [your language](https://en.wikipedia.org/wiki/List_of_ISO_639_language_codes). In the examples below, I'll pretend I'm providing translations for Zulu, which has code `zu`. + +Start by initialising a pull request: + +```{r} +#| eval: false +usethis::pr_init("translation-zu") +``` + +Then create the translation file by running `potools::po_create("zu")`, open `po/R-zu.po`, and starting filling in the translations. + +If you have access to chatGPT or similar, you can try prepopulating the translations, with a prompt something like thii: + +> You are an R developer who is fluent in English and Zulu. You love to do careful, high quality translations in your spare time. Complete the following po file for the R pkgdown package (which creates websites for R packages) by supplying translations for Zulu. Use inclusive gender forms where possible. + +Then copy and paste the complete contents of the `.po` file . + +You can check your work by adding `lang: zu` to your `_pkgdown.yml` then running: + +```{r} +#| eval: false +potools::po_compile() +devtools::load_all() +build_site("~/path/to/your/site") +``` + +Once you're happy with your work, make sure to compile the changes: + +```{r} +#| eval: false +potools::po_compile() +``` + +Then commit your changes to Git and submit your pull request for review: + +```{r} +#| eval: false +usethis::pr_submit() +```