Skip to content

Commit

Permalink
Merge branch 'develop'
Browse files Browse the repository at this point in the history
# Conflicts:
#	.github/workflows/R_CMD_check_Hades.yaml
#	.github/workflows/R_CMD_check_main_weekly.yaml
  • Loading branch information
schuemie committed Nov 8, 2023
2 parents 5867bc1 + 650f49f commit 239d686
Show file tree
Hide file tree
Showing 116 changed files with 913 additions and 1,709 deletions.
17 changes: 11 additions & 6 deletions .github/workflows/R_CMD_check_Hades.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -28,22 +28,22 @@ jobs:
GITHUB_PAT: ${{ secrets.GH_TOKEN }}
R_REMOTES_NO_ERRORS_FROM_WARNINGS: true
RSPM: ${{ matrix.config.rspm }}
CDM5_ORACLE_CDM_SCHEMA: ${{ secrets.CDM5_ORACLE_CDM_SCHEMA }}
CDM5_ORACLE_CDM54_SCHEMA: ${{ secrets.CDM5_ORACLE_CDM54_SCHEMA }}
CDM5_ORACLE_OHDSI_SCHEMA: ${{ secrets.CDM5_ORACLE_OHDSI_SCHEMA }}
CDM5_ORACLE_PASSWORD: ${{ secrets.CDM5_ORACLE_PASSWORD }}
CDM5_ORACLE_SERVER: ${{ secrets.CDM5_ORACLE_SERVER }}
CDM5_ORACLE_USER: ${{ secrets.CDM5_ORACLE_USER }}
CDM5_POSTGRESQL_CDM_SCHEMA: ${{ secrets.CDM5_POSTGRESQL_CDM_SCHEMA }}
CDM5_POSTGRESQL_CDM54_SCHEMA: ${{ secrets.CDM5_POSTGRESQL_CDM54_SCHEMA }}
CDM5_POSTGRESQL_OHDSI_SCHEMA: ${{ secrets.CDM5_POSTGRESQL_OHDSI_SCHEMA }}
CDM5_POSTGRESQL_PASSWORD: ${{ secrets.CDM5_POSTGRESQL_PASSWORD }}
CDM5_POSTGRESQL_SERVER: ${{ secrets.CDM5_POSTGRESQL_SERVER }}
CDM5_POSTGRESQL_USER: ${{ secrets.CDM5_POSTGRESQL_USER }}
CDM5_SQL_SERVER_CDM_SCHEMA: ${{ secrets.CDM5_SQL_SERVER_CDM_SCHEMA }}
CDM5_SQL_SERVER_CDM54_SCHEMA: ${{ secrets.CDM5_SQL_SERVER_CDM54_SCHEMA }}
CDM5_SQL_SERVER_OHDSI_SCHEMA: ${{ secrets.CDM5_SQL_SERVER_OHDSI_SCHEMA }}
CDM5_SQL_SERVER_PASSWORD: ${{ secrets.CDM5_SQL_SERVER_PASSWORD }}
CDM5_SQL_SERVER_SERVER: ${{ secrets.CDM5_SQL_SERVER_SERVER }}
CDM5_SQL_SERVER_USER: ${{ secrets.CDM5_SQL_SERVER_USER }}
CDM5_REDSHIFT_CDM_SCHEMA: ${{ secrets.CDM5_REDSHIFT_CDM_SCHEMA }}
CDM5_REDSHIFT_CDM54_SCHEMA: ${{ secrets.CDM5_REDSHIFT_CDM54_SCHEMA }}
CDM5_REDSHIFT_OHDSI_SCHEMA: ${{ secrets.CDM5_REDSHIFT_OHDSI_SCHEMA }}
CDM5_REDSHIFT_PASSWORD: ${{ secrets.CDM5_REDSHIFT_PASSWORD }}
CDM5_REDSHIFT_SERVER: ${{ secrets.CDM5_REDSHIFT_SERVER }}
Expand All @@ -52,10 +52,16 @@ jobs:
CDM_SNOWFLAKE_OHDSI_SCHEMA: ${{ secrets.CDM_SNOWFLAKE_OHDSI_SCHEMA }}
CDM_SNOWFLAKE_PASSWORD: ${{ secrets.CDM_SNOWFLAKE_PASSWORD }}
CDM_SNOWFLAKE_CONNECTION_STRING: ${{ secrets.CDM_SNOWFLAKE_CONNECTION_STRING }}
CDM_SNOWFLAKE_USER: ${{ secrets.CDM_SNOWFLAKE_USER }}
CDM_SNOWFLAKE_USER: ${{ secrets.CDM_SNOWFLAKE_USER }}
CDM5_SPARK_USER: ${{ secrets.CDM5_SPARK_USER }}
CDM5_SPARK_PASSWORD: ${{ secrets.CDM5_SPARK_PASSWORD }}
CDM5_SPARK_CONNECTION_STRING: ${{ secrets.CDM5_SPARK_CONNECTION_STRING }}
CDM5_SPARK_CDM_SCHEMA: ${{ secrets.CDM5_SPARK_CDM_SCHEMA }}
CDM5_SPARK_OHDSI_SCHEMA: ${{ secrets.CDM5_SPARK_OHDSI_SCHEMA }}
CDM_BIG_QUERY_CONNECTION_STRING: ${{ secrets.CDM_BIG_QUERY_CONNECTION_STRING }}
CDM_BIG_QUERY_KEY_FILE: ${{ secrets.CDM_BIG_QUERY_KEY_FILE }}
CDM_BIG_QUERY_CDM_SCHEMA: ${{ secrets.CDM_BIG_QUERY_CDM_SCHEMA }}
CDM_BIG_QUERY_OHDSI_SCHEMA: ${{ secrets.CDM_BIG_QUERY_OHDSI_SCHEMA }}

steps:
- uses: actions/checkout@v3
Expand Down Expand Up @@ -174,4 +180,3 @@ jobs:
if: ${{ env.new_version != '' }}
run: |
curl --data "build=true" -X POST https://registry.hub.docker.com/u/ohdsi/broadsea-methodslibrary/trigger/f0b51cec-4027-4781-9383-4b38b42dd4f5/
14 changes: 8 additions & 6 deletions .github/workflows/R_CMD_check_main_weekly.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -20,22 +20,22 @@ jobs:
GITHUB_PAT: ${{ secrets.GH_TOKEN }}
R_REMOTES_NO_ERRORS_FROM_WARNINGS: true
RSPM: ${{ matrix.config.rspm }}
CDM5_ORACLE_CDM_SCHEMA: ${{ secrets.CDM5_ORACLE_CDM_SCHEMA }}
CDM5_ORACLE_CDM54_SCHEMA: ${{ secrets.CDM5_ORACLE_CDM54_SCHEMA }}
CDM5_ORACLE_OHDSI_SCHEMA: ${{ secrets.CDM5_ORACLE_OHDSI_SCHEMA }}
CDM5_ORACLE_PASSWORD: ${{ secrets.CDM5_ORACLE_PASSWORD }}
CDM5_ORACLE_SERVER: ${{ secrets.CDM5_ORACLE_SERVER }}
CDM5_ORACLE_USER: ${{ secrets.CDM5_ORACLE_USER }}
CDM5_POSTGRESQL_CDM_SCHEMA: ${{ secrets.CDM5_POSTGRESQL_CDM_SCHEMA }}
CDM5_POSTGRESQL_CDM54_SCHEMA: ${{ secrets.CDM5_POSTGRESQL_CDM54_SCHEMA }}
CDM5_POSTGRESQL_OHDSI_SCHEMA: ${{ secrets.CDM5_POSTGRESQL_OHDSI_SCHEMA }}
CDM5_POSTGRESQL_PASSWORD: ${{ secrets.CDM5_POSTGRESQL_PASSWORD }}
CDM5_POSTGRESQL_SERVER: ${{ secrets.CDM5_POSTGRESQL_SERVER }}
CDM5_POSTGRESQL_USER: ${{ secrets.CDM5_POSTGRESQL_USER }}
CDM5_SQL_SERVER_CDM_SCHEMA: ${{ secrets.CDM5_SQL_SERVER_CDM_SCHEMA }}
CDM5_SQL_SERVER_CDM54_SCHEMA: ${{ secrets.CDM5_SQL_SERVER_CDM54_SCHEMA }}
CDM5_SQL_SERVER_OHDSI_SCHEMA: ${{ secrets.CDM5_SQL_SERVER_OHDSI_SCHEMA }}
CDM5_SQL_SERVER_PASSWORD: ${{ secrets.CDM5_SQL_SERVER_PASSWORD }}
CDM5_SQL_SERVER_SERVER: ${{ secrets.CDM5_SQL_SERVER_SERVER }}
CDM5_SQL_SERVER_USER: ${{ secrets.CDM5_SQL_SERVER_USER }}
CDM5_REDSHIFT_CDM_SCHEMA: ${{ secrets.CDM5_REDSHIFT_CDM_SCHEMA }}
CDM5_REDSHIFT_CDM54_SCHEMA: ${{ secrets.CDM5_REDSHIFT_CDM54_SCHEMA }}
CDM5_REDSHIFT_OHDSI_SCHEMA: ${{ secrets.CDM5_REDSHIFT_OHDSI_SCHEMA }}
CDM5_REDSHIFT_PASSWORD: ${{ secrets.CDM5_REDSHIFT_PASSWORD }}
CDM5_REDSHIFT_SERVER: ${{ secrets.CDM5_REDSHIFT_SERVER }}
Expand All @@ -44,10 +44,12 @@ jobs:
CDM_SNOWFLAKE_OHDSI_SCHEMA: ${{ secrets.CDM_SNOWFLAKE_OHDSI_SCHEMA }}
CDM_SNOWFLAKE_PASSWORD: ${{ secrets.CDM_SNOWFLAKE_PASSWORD }}
CDM_SNOWFLAKE_CONNECTION_STRING: ${{ secrets.CDM_SNOWFLAKE_CONNECTION_STRING }}
CDM_SNOWFLAKE_USER: ${{ secrets.CDM_SNOWFLAKE_USER }}
CDM_SNOWFLAKE_USER: ${{ secrets.CDM_SNOWFLAKE_USER }}
CDM5_SPARK_USER: ${{ secrets.CDM5_SPARK_USER }}
CDM5_SPARK_PASSWORD: ${{ secrets.CDM5_SPARK_PASSWORD }}
CDM5_SPARK_CONNECTION_STRING: ${{ secrets.CDM5_SPARK_CONNECTION_STRING }}
CDM5_SPARK_CDM_SCHEMA: ${{ secrets.CDM5_SPARK_CDM_SCHEMA }}
CDM5_SPARK_OHDSI_SCHEMA: ${{ secrets.CDM5_SPARK_OHDSI_SCHEMA }}

steps:
- uses: actions/checkout@v3
Expand All @@ -69,4 +71,4 @@ jobs:
with:
args: 'c("--no-manual", "--as-cran")'
error-on: '"warning"'
check-dir: '"check"'
check-dir: '"check"'
6 changes: 3 additions & 3 deletions CRAN-SUBMISSION
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
Version: 6.2.4
Date: 2023-09-07 07:52:51 UTC
SHA: 0275322b097fac96668c8804dd0acd8466c1442c
Version: 6.3.0
Date: 2023-11-08 13:53:53 UTC
SHA: 105405047c12c144c4486ecfacb41d68a70be778
6 changes: 3 additions & 3 deletions DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
Package: DatabaseConnector
Type: Package
Title: Connecting to Various Database Platforms
Version: 6.2.4
Date: 2023-09-07
Version: 6.3.0
Date: 2023-11-08
Authors@R: c(
person("Martijn", "Schuemie", email = "[email protected]", role = c("aut", "cre")),
person("Marc", "Suchard", role = c("aut")),
Expand All @@ -20,7 +20,7 @@ Depends:
R (>= 4.0.0)
Imports:
rJava,
SqlRender (>= 1.15.2),
SqlRender (>= 1.16.0),
methods,
stringr,
readr,
Expand Down
16 changes: 16 additions & 0 deletions NEWS.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,19 @@
DatabaseConnector 6.3.0
=======================

Changes:

1. On Snowflake always using `QUOTED_IDENTIFIERS_IGNORE_CASE=TRUE` to avoid name mismatches when using quotes.

2. Updated Redshift drivers.

3. Added unit tests for all supported platforms.

Bugfixes:

1. Fix bug on BigQuery where wait time was too short to avoid rate limit error.


DatabaseConnector 6.2.4
=======================

Expand Down
24 changes: 21 additions & 3 deletions R/Connect.R
Original file line number Diff line number Diff line change
Expand Up @@ -34,13 +34,30 @@ checkIfDbmsIsSupported <- function(dbms) {
"synapse",
"duckdb"
)
deprecated <- c(
"hive",
"impala",
"netezza",
"pdw"
)
if (!dbms %in% supportedDbmss) {
abort(sprintf(
"DBMS '%s' not supported. Please use one of these values: '%s'",
dbms,
paste(supportedDbmss, collapse = "', '")
))
}
if (dbms %in% deprecated) {
warn(sprintf(
paste(c("DBMS '%s' has been deprecated. Current functionality is provided as is.",
"No futher support will be provided.",
"Please consider switching to a different database platform."),
collapse = " "),
dbms),
.frequency = "regularly",
.frequency_id = "deprecated_dbms"
)
}
}

checkDetailValidation <- function(connectionDetails, name) {
Expand Down Expand Up @@ -484,8 +501,8 @@ connectPostgreSql <- function(connectionDetails) {

connectRedShift <- function(connectionDetails) {
inform("Connecting using Redshift driver")
jarPath <- findPathToJar("^RedshiftJDBC.*\\.jar$", connectionDetails$pathToDriver)
if (grepl("RedshiftJDBC42", jarPath)) {
jarPath <- findPathToJar("^[Rr]edshift.*\\.jar$", connectionDetails$pathToDriver)
if (grepl("RedshiftJDBC42", jarPath) || grepl("redshift-jdbc42", jarPath)) {
driver <- getJbcDriverSingleton("com.amazon.redshift.jdbc42.Driver", jarPath)
} else {
driver <- getJbcDriverSingleton("com.amazon.redshift.jdbc4.Driver", jarPath)
Expand Down Expand Up @@ -709,7 +726,8 @@ connectSnowflake <- function(connectionDetails) {
user = connectionDetails$user(),
password = connectionDetails$password(),
dbms = connectionDetails$dbms,
"CLIENT_TIMESTAMP_TYPE_MAPPING"="TIMESTAMP_NTZ"
"CLIENT_TIMESTAMP_TYPE_MAPPING"="TIMESTAMP_NTZ",
"QUOTED_IDENTIFIERS_IGNORE_CASE"="TRUE"
)
}
return(connection)
Expand Down
2 changes: 1 addition & 1 deletion R/DBI.R
Original file line number Diff line number Diff line change
Expand Up @@ -455,7 +455,7 @@ setMethod(
)
}
rowsAffected <- executeSql(connection = conn, sql = statement)
rowsAffected <- rJava::.jnew("java/lang/Integer", as.integer(rowsAffected))
rowsAffected <- rJava::.jnew("java/lang/Double", as.double(sum(rowsAffected)))
result <- new("DatabaseConnectorJdbcResult",
content = rowsAffected,
type = "rowsAffected",
Expand Down
4 changes: 2 additions & 2 deletions R/Drivers.R
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ downloadJdbcDrivers <- function(dbms, pathToDriver = Sys.getenv("DATABASECONNECT
jdbcDriverSources <- utils::read.csv(text =
"row,dbms, fileName, baseUrl
1,postgresql,postgresqlV42.2.18.zip,https://ohdsi.github.io/DatabaseConnectorJars/
2,redshift,redShiftV2.1.0.9.zip,https://ohdsi.github.io/DatabaseConnectorJars/
2,redshift,redshift-jdbc42-2.1.0.20.zip,https://s3.amazonaws.com/redshift-downloads/drivers/jdbc/2.1.0.20/
3,sql server,sqlServerV9.2.0.zip,https://ohdsi.github.io/DatabaseConnectorJars/
4,oracle,oracleV19.8.zip,https://ohdsi.github.io/DatabaseConnectorJars/
5,spark,DatabricksJDBC42-2.6.32.1054.zip,https://databricks-bi-artifacts.s3.us-east-2.amazonaws.com/simbaspark-drivers/jdbc/2.6.32/
Expand Down Expand Up @@ -162,7 +162,7 @@ getJbcDriverSingleton <- function(driverClass = "", classPath = "") {
}

checkPathToDriver <- function(pathToDriver, dbms) {
if (!is.null(dbms) && dbms %in% c("sqlite", "sqlite extended")) {
if (!is.null(dbms) && dbms %in% c("sqlite", "sqlite extended", "duckdb")) {
return()
}
if (pathToDriver == "") {
Expand Down
54 changes: 23 additions & 31 deletions R/Sql.R
Original file line number Diff line number Diff line change
Expand Up @@ -255,44 +255,33 @@ lowLevelExecuteSql <- function(connection, sql) {
UseMethod("lowLevelExecuteSql", connection)
}

delayIfNecessary <- function(sql, regex, executionTimeList, threshold) {
ddlExecutionTimes <- new.env()
insertExecutionTimes <- new.env()

delayIfNecessary <- function(sql, regex, executionTimes, threshold) {
regexGroups <- stringr::str_match(sql, stringr::regex(regex, ignore_case = TRUE))
tableName <- regexGroups[3]
if (!is.na(tableName) && !is.null(tableName)) {
currentTime <- Sys.time()
lastExecutedTime <- executionTimeList[[tableName]]
lastExecutedTime <- executionTimes[[tableName]]
if (!is.na(lastExecutedTime) && !is.null(lastExecutedTime)) {
delta <- currentTime - lastExecutedTime
delta <- difftime(currentTime, lastExecutedTime, units = "secs")
if (delta < threshold) {
Sys.sleep(threshold - delta)
}
}

executionTimeList[[tableName]] <- currentTime
executionTimes[[tableName]] <- currentTime
}
return(executionTimeList)
}

delayIfNecessaryForDdl <- function(sql) {
ddlList <- getOption("ddlList")
if (is.null(ddlList)) {
ddlList <- list()
}

regexForDdl <- "(^CREATE\\s+TABLE\\s+IF\\s+EXISTS|^CREATE\\s+TABLE|^DROP\\s+TABLE\\s+IF\\s+EXISTS|^DROP\\s+TABLE)\\s+([a-zA-Z0-9_$#-]*\\.?\\s*(?:[a-zA-Z0-9_]+)*)"
updatedList <- delayIfNecessary(sql, regexForDdl, ddlList, 5)
options(ddlList = updatedList)
delayIfNecessary(sql, regexForDdl, ddlExecutionTimes, 5)
}

delayIfNecessaryForInsert <- function(sql) {
insetList <- getOption("insetList")
if (is.null(insetList)) {
insetList <- list()
}

regexForInsert <- "(^INSERT\\s+INTO)\\s+([a-zA-Z0-9_$#-]*\\.?\\s*(?:[a-zA-Z0-9_]+)*)"
updatedList <- delayIfNecessary(sql, regexForInsert, insetList, 5)
options(insetList = updatedList)
delayIfNecessary(sql, regexForInsert, insertExecutionTimes, 5)
}

#' @export
Expand All @@ -302,18 +291,23 @@ lowLevelExecuteSql.default <- function(connection, sql) {

statement <- rJava::.jcall(connection@jConnection, "Ljava/sql/Statement;", "createStatement")
on.exit(rJava::.jcall(statement, "V", "close"))
hasResultSet <- rJava::.jcall(statement, "Z", "execute", as.character(sql), check = FALSE)
if (dbms(connection) == "spark") {
# For some queries the DataBricks JDBC driver will throw an error saying no ROWCOUNT is returned
# when using executeLargeUpdate, so using execute instead.
rJava::.jcall(statement, "Z", "execute", as.character(sql), check = FALSE)
rowsAffected <- rJava::.jcall(statement, "I", "getUpdateCount", check = FALSE)
if (rowsAffected == -1) {
rowsAffected <- 0
}
} else {
rowsAffected <- rJava::.jcall(statement, "J", "executeLargeUpdate", as.character(sql), check = FALSE)
}

if (dbms(connection) == "bigquery") {
delayIfNecessaryForDdl(sql)
delayIfNecessaryForInsert(sql)
}

rowsAffected <- 0
if (!hasResultSet) {
rowsAffected <- rJava::.jcall(statement, "I", "getUpdateCount", check = FALSE)
}

delta <- Sys.time() - startTime
logTrace(paste("Executing SQL took", delta, attr(delta, "units")))
invisible(rowsAffected)
Expand Down Expand Up @@ -421,9 +415,9 @@ executeSql <- function(connection,

batched <- runAsBatch && supportsBatchUpdates(connection)
sqlStatements <- SqlRender::splitSql(sql)
rowsAffected <- c()
if (batched) {
batchSize <- 1000
rowsAffected <- 0
for (start in seq(1, length(sqlStatements), by = batchSize)) {
end <- min(start + batchSize - 1, length(sqlStatements))

Expand All @@ -441,7 +435,7 @@ executeSql <- function(connection,
tryCatch(
{
startQuery <- Sys.time()
rowsAffected <- c(rowsAffected, rJava::.jcall(statement, "[I", "executeBatch"))
rowsAffected <- c(rowsAffected, rJava::.jcall(statement, "[J", "executeLargeBatch"))
delta <- Sys.time() - startQuery
if (profile) {
inform(paste("Statements", start, "through", end, "took", delta, attr(delta, "units")))
Expand Down Expand Up @@ -471,7 +465,7 @@ executeSql <- function(connection,
tryCatch(
{
startQuery <- Sys.time()
lowLevelExecuteSql(connection, sqlStatement)
rowsAffected <- c(rowsAffected, lowLevelExecuteSql(connection, sqlStatement))
delta <- Sys.time() - startQuery
if (profile) {
inform(paste("Statement ", i, "took", delta, attr(delta, "units")))
Expand All @@ -498,9 +492,7 @@ executeSql <- function(connection,
delta <- Sys.time() - startTime
inform(paste("Executing SQL took", signif(delta, 3), attr(delta, "units")))
}
if (batched) {
invisible(rowsAffected)
}
}

convertFields <- function(dbms, result) {
Expand Down
4 changes: 2 additions & 2 deletions cran-comments.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
This update includes 1 changes and 2 bugfixes, including the fix requested by Prof. Hornik. (see NEWS.md).
This update includes 3 changes and 1 bugfixes. (see NEWS.md).

---

Expand All @@ -14,4 +14,4 @@ There were no ERRORs or WARNINGs.

## Downstream dependencies

DatabaseConnector is used by CDMConnector, which was tested with this new version. No issues were found.
DatabaseConnector is used by Achilles, CohortAlgebra, CohortExplorer, TreatmentPatterns, and CDMConnector, which were tested with this new version. No issues were found.
2 changes: 1 addition & 1 deletion docs/404.html

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 3 additions & 3 deletions docs/articles/Connecting.html

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading

0 comments on commit 239d686

Please sign in to comment.