Skip to content

Commit

Permalink
Raise pool connection limit to prevent timeout on acquire()
Browse files Browse the repository at this point in the history
  • Loading branch information
rm-dr committed Nov 16, 2024
1 parent 64bdb93 commit 41c8340
Show file tree
Hide file tree
Showing 4 changed files with 38 additions and 7 deletions.
3 changes: 2 additions & 1 deletion copperd/bin/edged/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,8 @@ async fn make_app(config: Arc<EdgedConfig>, s3_client: Arc<S3Client>) -> Router

trace!(message = "Connecting to itemdb");
// Connect to database
let itemdb_client = match ItemdbClient::open(&config.edged_itemdb_addr, true).await {
// TODO: configure max connections
let itemdb_client = match ItemdbClient::open(32, &config.edged_itemdb_addr, true).await {
Ok(db) => Arc::new(db),
Err(ItemdbOpenError::Database(e)) => {
error!(message = "SQL error while opening item database", err = ?e);
Expand Down
15 changes: 14 additions & 1 deletion copperd/bin/piper/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,20 @@ async fn main() {
trace!(message = "Connecting to itemdb");
// Connect to database
let itemdb_client = loop {
match ItemdbClient::open(&config.piper_itemdb_addr, false).await {
match ItemdbClient::open(
// We need at least one connection per job.
// If we use any fewer, requests to acquire new connections will time out!
// ...and add 4 extra connections, just to be safe.
//
// If piper exits with a "connection timed out" error, we need to raise this limit.
// Be careful with this, though---understand *why* you need so many connections!
// We really shouldn't need more than one per job.
u32::try_from(config.piper_parallel_jobs).unwrap() + 4,
&config.piper_itemdb_addr,
false,
)
.await
{
Ok(db) => break Arc::new(db),
Err(ItemdbOpenError::Database(e)) => {
error!(message = "SQL error while opening item database", err = ?e);
Expand Down
20 changes: 16 additions & 4 deletions copperd/lib/itemdb/src/client/client/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use sqlx::{
pool::PoolConnection, postgres::PgPoolOptions, Connection, PgConnection, PgPool, Postgres,
};
use thiserror::Error;
use tracing::info;
use tracing::{info, trace};

use crate::client::migrate;

Expand Down Expand Up @@ -39,7 +39,11 @@ pub struct ItemdbClient {

impl ItemdbClient {
/// Create a new [`LocalDataset`].
pub async fn open(db_addr: &str, migrate: bool) -> Result<Self, ItemdbOpenError> {
pub async fn open(
max_connections: u32,
db_addr: &str,
migrate: bool,
) -> Result<Self, ItemdbOpenError> {
info!(message = "Opening dataset", ds_type = "postgres", ?db_addr);

// Apply migrations
Expand All @@ -56,15 +60,23 @@ impl ItemdbClient {
drop(conn);

let pool = PgPoolOptions::new()
// TODO: configure
.max_connections(5)
.max_connections(max_connections)
.connect(db_addr)
.await?;

Ok(Self { pool })
}

pub async fn new_connection(&self) -> Result<PoolConnection<Postgres>, sqlx::Error> {
let size = self.pool.size();
let idle_connections = self.pool.num_idle();
let active_connections = size - u32::try_from(idle_connections).unwrap();
trace!(
message = "Trying to open itemdb connection",
idle_connections,
active_connections
);

let conn = self.pool.acquire().await?;
return Ok(conn);
}
Expand Down
7 changes: 6 additions & 1 deletion copperd/lib/util/src/logging.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ impl LoggingPreset {
other: LogLevel::Warn,
http: LogLevel::Warn,
s3: LogLevel::Warn,
sqlx: LogLevel::Error,

piper: LogLevel::Info,
runner: LogLevel::Info,
Expand All @@ -64,6 +65,7 @@ impl LoggingPreset {
other: LogLevel::Warn,
http: LogLevel::Warn,
s3: LogLevel::Warn,
sqlx: LogLevel::Error,

piper: LogLevel::Debug,
runner: LogLevel::Debug,
Expand All @@ -77,6 +79,7 @@ impl LoggingPreset {
other: LogLevel::Debug,
http: LogLevel::Warn,
s3: LogLevel::Warn,
sqlx: LogLevel::Warn,

piper: LogLevel::Trace,
runner: LogLevel::Trace,
Expand All @@ -90,6 +93,7 @@ impl LoggingPreset {
other: LogLevel::Trace,
http: LogLevel::Warn,
s3: LogLevel::Warn,
sqlx: LogLevel::Warn,

piper: LogLevel::Trace,
runner: LogLevel::Trace,
Expand All @@ -106,6 +110,7 @@ pub struct LoggingConfig {
other: LogLevel,
http: LogLevel,
s3: LogLevel,
sqlx: LogLevel,

piper: LogLevel,
runner: LogLevel,
Expand All @@ -122,7 +127,6 @@ impl From<LoggingConfig> for EnvFilter {
//
// Non-configurable sources
//
format!("sqlx={}", LogLevel::Warn),
format!("aws_sdk_s3={}", LogLevel::Warn),
format!("aws_smithy_runtime={}", LogLevel::Warn),
format!("aws_smithy_runtime_api={}", LogLevel::Warn),
Expand All @@ -133,6 +137,7 @@ impl From<LoggingConfig> for EnvFilter {
//
format!("tower_http={}", conf.http),
format!("s3={}", conf.s3),
format!("sqlx={}", conf.sqlx),
// // Piper
format!("piper::pipeline::runner={}", conf.runner),
format!("piper::pipeline::job={}", conf.job),
Expand Down

0 comments on commit 41c8340

Please sign in to comment.