Skip to content

Commit

Permalink
removed importFrom [no ci] (#325) (#338)
Browse files Browse the repository at this point in the history
  • Loading branch information
schochastics authored Sep 4, 2023
1 parent 8bf9a23 commit 34f8a02
Show file tree
Hide file tree
Showing 13 changed files with 266 additions and 322 deletions.
51 changes: 0 additions & 51 deletions NAMESPACE
Original file line number Diff line number Diff line change
Expand Up @@ -86,54 +86,3 @@ export(import)
export(import_list)
export(install_formats)
export(spread_attrs)
importFrom(curl,curl_fetch_memory)
importFrom(curl,parse_headers)
importFrom(data.table,as.data.table)
importFrom(data.table,fread)
importFrom(data.table,fwrite)
importFrom(data.table,is.data.table)
importFrom(foreign,read.arff)
importFrom(foreign,read.dbf)
importFrom(foreign,read.dta)
importFrom(foreign,read.epiinfo)
importFrom(foreign,read.mtp)
importFrom(foreign,read.spss)
importFrom(foreign,read.systat)
importFrom(foreign,read.xport)
importFrom(foreign,write.arff)
importFrom(foreign,write.dbf)
importFrom(haven,labelled)
importFrom(haven,read_dta)
importFrom(haven,read_por)
importFrom(haven,read_sas)
importFrom(haven,read_sav)
importFrom(haven,read_xpt)
importFrom(haven,write_dta)
importFrom(haven,write_sas)
importFrom(haven,write_sav)
importFrom(haven,write_xpt)
importFrom(openxlsx,read.xlsx)
importFrom(openxlsx,write.xlsx)
importFrom(readxl,read_xls)
importFrom(readxl,read_xlsx)
importFrom(stats,na.omit)
importFrom(stats,setNames)
importFrom(tibble,as_tibble)
importFrom(tibble,is_tibble)
importFrom(tools,file_ext)
importFrom(tools,file_path_sans_ext)
importFrom(utils,capture.output)
importFrom(utils,install.packages)
importFrom(utils,installed.packages)
importFrom(utils,packageName)
importFrom(utils,read.DIF)
importFrom(utils,read.fortran)
importFrom(utils,read.fwf)
importFrom(utils,read.table)
importFrom(utils,tar)
importFrom(utils,type.convert)
importFrom(utils,untar)
importFrom(utils,unzip)
importFrom(utils,write.csv)
importFrom(utils,write.table)
importFrom(utils,zip)
8 changes: 4 additions & 4 deletions R/compression.R
Original file line number Diff line number Diff line change
Expand Up @@ -35,12 +35,12 @@ compress_out <- function(cfile, filename, type = c("zip", "tar", "gzip", "bzip2"
on.exit(setwd(wd), add = TRUE)
setwd(tmp)
if (type == "zip") {
o <- zip(cfile2, files = basename(filename))
o <- utils::zip(cfile2, files = basename(filename))
} else {
if (type == "tar") {
type <- "none"
}
o <- tar(cfile2, files = basename(filename), compression = type)
o <- utils::tar(cfile2, files = basename(filename), compression = type)
}
setwd(wd)
if (o != 0) {
Expand All @@ -66,7 +66,7 @@ parse_zip <- function(file, which, ...) {
utils::unzip(file, files = file_list$Name[which], exdir = d)
file.path(d, file_list$Name[which])
} else {
if (substring(which, 1,1) != "^") {
if (substring(which, 1, 1) != "^") {
which2 <- paste0("^", which)
}
utils::unzip(file, files = file_list$Name[grep(which2, file_list$Name)[1]], exdir = d)
Expand All @@ -89,7 +89,7 @@ parse_tar <- function(file, which, ...) {
utils::untar(file, files = file_list[which], exdir = d)
file.path(d, file_list[which])
} else {
if (substring(which, 1,1) != "^") {
if (substring(which, 1, 1) != "^") {
which2 <- paste0("^", which)
}
utils::untar(file, files = file_list[grep(which2, file_list)[1]], exdir = d)
Expand Down
1 change: 0 additions & 1 deletion R/export.R
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,6 @@
#'
#' ## export(mtcars, format = "stata")
#' @seealso [characterize()], [import()], [convert()], [export_list()]
#' @importFrom haven labelled
#' @export
export <- function(x, file, format, ...) {
.check_file(file, single_only = TRUE)
Expand Down
97 changes: 50 additions & 47 deletions R/export_methods.R
Original file line number Diff line number Diff line change
@@ -1,25 +1,31 @@
#' @importFrom data.table fwrite
#' @importFrom utils write.table
export_delim <- function(file, x, fwrite = TRUE, sep = "\t", row.names = FALSE,
col.names = TRUE, append = FALSE, ...) {
if (isTRUE(fwrite) & !inherits(file, "connection")) {
if (isTRUE(append)) {
data.table::fwrite(x, file = file, sep = sep, row.names = row.names,
col.names = FALSE, append = TRUE, ...)
data.table::fwrite(x,
file = file, sep = sep, row.names = row.names,
col.names = FALSE, append = TRUE, ...
)
} else {
data.table::fwrite(x, file = file, sep = sep, row.names = row.names,
col.names = col.names, append = FALSE, ...)
data.table::fwrite(x,
file = file, sep = sep, row.names = row.names,
col.names = col.names, append = FALSE, ...
)
}
} else {
if (isTRUE(fwrite) & inherits(file, "connection")) {
message("data.table::fwrite() does not support writing to connections. Using utils::write.table() instead.")
}
if (isTRUE(append)) {
write.table(x, file = file, sep = sep, row.names = row.names,
col.names = FALSE, append = TRUE, ...)
utils::write.table(x,
file = file, sep = sep, row.names = row.names,
col.names = FALSE, append = TRUE, ...
)
} else {
write.table(x, file = file, sep = sep, row.names = row.names,
col.names = col.names, append = FALSE, ...)
utils::write.table(x,
file = file, sep = sep, row.names = row.names,
col.names = col.names, append = FALSE, ...
)
}
}
}
Expand Down Expand Up @@ -54,17 +60,16 @@ export_delim <- function(file, x, fwrite = TRUE, sep = "\t", row.names = FALSE,
export_delim(x = x, file = file, sep = "|", ...)
}

#' @importFrom utils capture.output write.csv
#' @export
.export.rio_fwf <- function(file, x, verbose = getOption("verbose", FALSE), sep = "", row.names = FALSE, quote = FALSE, col.names = FALSE, digits = getOption("digits", 7), ...) {
dat <- lapply(x, function(col) {
if (is.character(col)) {
col <- as.numeric(as.factor(col))
} else if(is.factor(col)) {
} else if (is.factor(col)) {
col <- as.integer(col)
}
if (is.integer(col)) {
return(sprintf("%i",col))
return(sprintf("%i", col))
}
if (is.numeric(col)) {
decimals <- strsplit(as.character(col), ".", fixed = TRUE)
Expand All @@ -79,34 +84,40 @@ export_delim <- function(file, x, fwrite = TRUE, sep = "\t", row.names = FALSE,
if (!is.finite(m2)) {
m2 <- digits
}
return(formatC(sprintf(fmt = paste0("%0.",m2,"f"), col), width = (m1+m2+1)))
} else if(is.logical(col)) {
return(sprintf("%i",col))
return(formatC(sprintf(fmt = paste0("%0.", m2, "f"), col), width = (m1 + m2 + 1)))
} else if (is.logical(col)) {
return(sprintf("%i", col))
}
})
dat <- do.call(cbind, dat)
n <- nchar(dat[1,]) + c(rep(nchar(sep), ncol(dat)-1), 0)
n <- nchar(dat[1, ]) + c(rep(nchar(sep), ncol(dat) - 1), 0)
col_classes <- vapply(x, class, character(1))
col_classes[col_classes == "factor"] <- "integer"
dict <- cbind.data.frame(variable = names(n),
class = col_classes,
width = unname(n),
columns = paste0(c(1, cumsum(n)+1)[-length(n)], "-", cumsum(n)),
stringsAsFactors = FALSE)
dict <- cbind.data.frame(
variable = names(n),
class = col_classes,
width = unname(n),
columns = paste0(c(1, cumsum(n) + 1)[-length(n)], "-", cumsum(n)),
stringsAsFactors = FALSE
)
if (isTRUE(verbose)) {
message("Columns:")
message(paste0(capture.output(dict), collapse = "\n"))
message(paste0(utils::capture.output(dict), collapse = "\n"))
if (sep == "") {
message(paste0('\nRead in with:\n',
'import("', file, '",\n',
' widths = c(', paste0(n, collapse = ","), '),\n',
' col.names = c("', paste0(names(n), collapse = '","'), '"),\n',
' colClasses = c("', paste0(col_classes, collapse = '","') ,'"))\n'), domain = NA)
message(paste0(
"\nRead in with:\n",
'import("', file, '",\n',
" widths = c(", paste0(n, collapse = ","), "),\n",
' col.names = c("', paste0(names(n), collapse = '","'), '"),\n',
' colClasses = c("', paste0(col_classes, collapse = '","'), '"))\n'
), domain = NA)
}
}
.write_as_utf8(paste0("#", capture.output(write.csv(dict, row.names = FALSE, quote = FALSE))), file = file, sep = "\n")
utils::write.table(dat, file = file, append = TRUE, row.names = row.names, sep = sep, quote = quote,
col.names = col.names, ...)
.write_as_utf8(paste0("#", utils::capture.output(utils::write.csv(dict, row.names = FALSE, quote = FALSE))), file = file, sep = "\n")
utils::write.table(dat,
file = file, append = TRUE, row.names = row.names, sep = sep, quote = quote,
col.names = col.names, ...
)
}

#' @export
Expand Down Expand Up @@ -161,42 +172,36 @@ export_delim <- function(file, x, fwrite = TRUE, sep = "\t", row.names = FALSE,
rmatio::write.mat(object = x, filename = file, ...)
}

#' @importFrom haven write_sav
#' @export
.export.rio_sav <- function(file, x, ...) {
x <- restore_labelled(x)
haven::write_sav(data = x, path = file, ...)
}

#' @importFrom haven write_sav
#' @export
.export.rio_zsav <- function(file, x, compress = TRUE, ...) {
x <- restore_labelled(x)
haven::write_sav(data = x, path = file, compress = compress, ...)
}

#' @importFrom haven write_dta
#' @export
.export.rio_dta <- function(file, x, ...) {
x <- restore_labelled(x)
haven::write_dta(data = x, path = file, ...)
}

#' @importFrom haven write_sas
#' @export
.export.rio_sas7bdat <- function(file, x, ...) {
x <- restore_labelled(x)
haven::write_sas(data = x, path = file, ...)
}

#' @importFrom haven write_xpt
#' @export
.export.rio_xpt <- function(file, x, ...) {
x <- restore_labelled(x)
haven::write_xpt(data = x, path = file, ...)
}

#' @importFrom foreign write.dbf
#' @export
.export.rio_dbf <- function(file, x, ...) {
foreign::write.dbf(dataframe = x, file = file, ...)
Expand All @@ -208,13 +213,11 @@ export_delim <- function(file, x, fwrite = TRUE, sep = "\t", row.names = FALSE,
.write_as_utf8(jsonlite::toJSON(x, ...), file = file)
}

#' @importFrom foreign write.arff
#' @export
.export.rio_arff <- function(file, x, ...) {
foreign::write.arff(x = x, file = file, ...)
}

#' @importFrom openxlsx write.xlsx
#' @export
.export.rio_xlsx <- function(file, x, which, ...) {
dots <- list(...)
Expand Down Expand Up @@ -262,8 +265,8 @@ export_delim <- function(file, x, fwrite = TRUE, sep = "\t", row.names = FALSE,
}
for (i in seq_along(x)) {
x[[i]][] <- lapply(x[[i]], as.character)
x[[i]][] <- lapply(x[[i]], function(v) gsub('&','&amp;',v))
names(x[[i]]) <- gsub('&','&amp;',names(x[[i]]))
x[[i]][] <- lapply(x[[i]], function(v) gsub("&", "&amp;", v))
names(x[[i]]) <- gsub("&", "&amp;", names(x[[i]]))
tab <- xml2::xml_add_child(bod, "table")
# add header row
invisible(xml2::xml_add_child(tab, xml2::read_xml(paste0(twrap(paste0(twrap(names(x[[i]]), "th"), collapse = ""), "tr"), "\n"))))
Expand All @@ -279,15 +282,15 @@ export_delim <- function(file, x, fwrite = TRUE, sep = "\t", row.names = FALSE,
.export.rio_xml <- function(file, x, ...) {
.check_pkg_availability("xml2")
root <- ""
xml <- xml2::read_xml(paste0("<",as.character(substitute(x)),">\n</",as.character(substitute(x)),">\n"))
xml <- xml2::read_xml(paste0("<", as.character(substitute(x)), ">\n</", as.character(substitute(x)), ">\n"))
att <- attributes(x)[!names(attributes(x)) %in% c("names", "row.names", "class")]
for (a in seq_along(att)) {
xml2::xml_attr(xml, names(att)[a]) <- att[[a]]
}
# remove illegal characters
row.names(x) <- gsub('&', '&amp;', row.names(x))
colnames(x) <- gsub('[ &]', '.', colnames(x))
x[] <- lapply(x, function(v) gsub('&', '&amp;', v))
row.names(x) <- gsub("&", "&amp;", row.names(x))
colnames(x) <- gsub("[ &]", ".", colnames(x))
x[] <- lapply(x, function(v) gsub("&", "&amp;", v))
# add data
for (i in seq_len(nrow(x))) {
thisrow <- xml2::xml_add_child(xml, "Observation")
Expand Down Expand Up @@ -315,11 +318,11 @@ export_delim <- function(file, x, fwrite = TRUE, sep = "\t", row.names = FALSE,
#' @export
.export.rio_pzfx <- function(file, x, ..., row_names = FALSE) {
.check_pkg_availability("pzfx")
pzfx::write_pzfx(x=x, path=file, ..., row_names=row_names)
pzfx::write_pzfx(x = x, path = file, ..., row_names = row_names)
}

#' @export
.export.rio_parquet <- function(file, x, ...) {
.check_pkg_availability("arrow")
arrow::write_parquet(x=x, sink = file, ...)
arrow::write_parquet(x = x, sink = file, ...)
}
Loading

0 comments on commit 34f8a02

Please sign in to comment.