PostgreSQL Connection Pool Best Practices - Expert Guide
Published: September 25, 2024 | Reading time: 10 minutes
Quick Solution
For optimal PostgreSQL connection pooling, use pg-pool with these proven settings:
const { Pool } = require('pg');
const pool = new Pool({
host: 'localhost',
port: 5432,
database: 'mydb',
user: 'postgres',
password: 'password',
max: 20, // Maximum connections
min: 5, // Minimum connections
idleTimeoutMillis: 30000, // Close idle connections
connectionTimeoutMillis: 2000, // Connection timeout
statement_timeout: 30000, // Query timeout
query_timeout: 30000 // Query timeout
});
Understanding PostgreSQL Connection Pooling
PostgreSQL connection pooling is crucial for high-performance applications. Without proper pooling, you'll face connection exhaustion, timeouts, and poor scalability.
Why Connection Pooling Matters
- Resource Efficiency: Reuses connections instead of creating new ones
- Performance: Eliminates connection overhead for each query
- Scalability: Manages concurrent connections effectively
- Reliability: Handles connection failures gracefully
pg-pool Configuration Deep Dive
1. Basic Pool Setup
const { Pool } = require('pg');
const pool = new Pool({
// Connection details
host: process.env.DB_HOST || 'localhost',
port: process.env.DB_PORT || 5432,
database: process.env.DB_NAME || 'mydb',
user: process.env.DB_USER || 'postgres',
password: process.env.DB_PASSWORD || 'password',
// Pool settings
max: 20, // Maximum connections in pool
min: 5, // Minimum connections in pool
idleTimeoutMillis: 30000, // Close idle connections after 30s
connectionTimeoutMillis: 2000, // Time to get connection from pool
// Query settings
statement_timeout: 30000, // Statement timeout (30s)
query_timeout: 30000, // Query timeout (30s)
// SSL settings
ssl: process.env.NODE_ENV === 'production' ? {
rejectUnauthorized: false
} : false
});
2. Production-Ready Configuration
const { Pool } = require('pg');
class DatabaseService {
constructor() {
this.pool = new Pool({
host: process.env.DB_HOST,
port: process.env.DB_PORT || 5432,
database: process.env.DB_NAME,
user: process.env.DB_USER,
password: process.env.DB_PASSWORD,
// Optimized pool settings
max: parseInt(process.env.DB_MAX_CONNECTIONS) || 20,
min: parseInt(process.env.DB_MIN_CONNECTIONS) || 5,
// Timeout settings
idleTimeoutMillis: 30000,
connectionTimeoutMillis: 2000,
// Query timeouts
statement_timeout: 60000, // 1 minute for complex queries
query_timeout: 60000,
// Connection validation
allowExitOnIdle: true,
// Application settings
application_name: 'myapp',
// SSL configuration
ssl: process.env.NODE_ENV === 'production' ? {
rejectUnauthorized: false,
ca: process.env.DB_SSL_CA,
cert: process.env.DB_SSL_CERT,
key: process.env.DB_SSL_KEY
} : false
});
this.setupEventHandlers();
}
setupEventHandlers() {
this.pool.on('connect', (client) => {
console.log('New PostgreSQL client connected');
});
this.pool.on('error', (err, client) => {
console.error('Unexpected error on idle client', err);
});
this.pool.on('remove', (client) => {
console.log('PostgreSQL client removed from pool');
});
}
async query(text, params) {
const start = Date.now();
try {
const res = await this.pool.query(text, params);
const duration = Date.now() - start;
console.log('Executed query', { text, duration, rows: res.rowCount });
return res;
} catch (error) {
console.error('Database query error:', error);
throw error;
}
}
async getClient() {
return await this.pool.connect();
}
async close() {
await this.pool.end();
}
}
3. Connection Pool Sizing Strategy
Determining the right pool size is critical for performance:
// Calculate optimal pool size based on your application
function calculatePoolSize() {
const cpuCores = require('os').cpus().length;
const expectedConcurrency = 100; // Estimated concurrent users
const avgQueryTime = 50; // Average query time in ms
// Formula: (CPU cores * 2) + (concurrent users * avg_query_time / 1000)
const optimalSize = Math.min(
Math.ceil((cpuCores * 2) + (expectedConcurrency * avgQueryTime / 1000)),
100 // Don't exceed 100 connections
);
return Math.max(optimalSize, 5); // Minimum 5 connections
}
const poolSize = calculatePoolSize();
console.log(`Recommended pool size: ${poolSize}`);
const pool = new Pool({
// ... other config
max: poolSize,
min: Math.ceil(poolSize * 0.2) // 20% of max as minimum
});
Advanced Pooling Patterns
1. Read/Write Split with Multiple Pools
class ReadWriteDatabaseService {
constructor() {
// Write pool (primary database)
this.writePool = new Pool({
host: process.env.DB_WRITE_HOST,
port: 5432,
database: process.env.DB_NAME,
user: process.env.DB_WRITE_USER,
password: process.env.DB_WRITE_PASSWORD,
max: 10,
min: 2
});
// Read pool (read replica)
this.readPool = new Pool({
host: process.env.DB_READ_HOST,
port: 5432,
database: process.env.DB_NAME,
user: process.env.DB_READ_USER,
password: process.env.DB_READ_PASSWORD,
max: 20,
min: 5
});
}
async query(text, params, { useWritePool = false } = {}) {
const pool = useWritePool ? this.writePool : this.readPool;
return await pool.query(text, params);
}
async write(text, params) {
return await this.query(text, params, { useWritePool: true });
}
async read(text, params) {
return await this.query(text, params, { useWritePool: false });
}
}
2. Transaction Management
async function executeTransaction(queries) {
const client = await pool.connect();
try {
await client.query('BEGIN');
const results = [];
for (const { text, params } of queries) {
const result = await client.query(text, params);
results.push(result);
}
await client.query('COMMIT');
return results;
} catch (error) {
await client.query('ROLLBACK');
throw error;
} finally {
client.release();
}
}
// Usage example
await executeTransaction([
{ text: 'INSERT INTO users (name) VALUES ($1)', params: ['John'] },
{ text: 'INSERT INTO orders (user_id) VALUES ($1)', params: [1] }
]);
3. Connection Health Monitoring
class PoolMonitor {
constructor(pool) {
this.pool = pool;
this.stats = {
totalConnections: 0,
idleConnections: 0,
waitingClients: 0
};
}
startMonitoring() {
setInterval(() => {
this.collectStats();
this.logStats();
}, 30000); // Every 30 seconds
}
collectStats() {
this.stats = {
totalConnections: this.pool.totalCount,
idleConnections: this.pool.idleCount,
waitingClients: this.pool.waitingCount
};
}
logStats() {
console.log('Pool Statistics:', this.stats);
// Alert if pool is getting full
if (this.stats.waitingClients > 5) {
console.warn('High number of waiting clients:', this.stats.waitingClients);
}
// Alert if too many idle connections
if (this.stats.idleConnections > this.stats.totalConnections * 0.8) {
console.warn('Too many idle connections');
}
}
}
// Usage
const monitor = new PoolMonitor(pool);
monitor.startMonitoring();
Common Pitfalls and Solutions
1. Connection Leaks
Problem: Not releasing connections back to the pool
// This will exhaust your pool
async function badExample() {
const client = await pool.connect();
const result = await client.query('SELECT * FROM users');
// Missing client.release() - LEAK!
return result;
}
// Use try/finally to ensure release
async function goodExample() {
const client = await pool.connect();
try {
const result = await client.query('SELECT * FROM users');
return result;
} finally {
client.release();
}
}
// Or use pool.query() which handles this automatically
async function bestExample() {
return await pool.query('SELECT * FROM users');
}
2. Improper Error Handling
async function safeQuery(text, params) {
let client;
try {
client = await pool.connect();
const result = await client.query(text, params);
return result;
} catch (error) {
console.error('Database error:', error);
// Handle specific PostgreSQL errors
if (error.code === '23505') {
throw new Error('Duplicate entry');
} else if (error.code === '23503') {
throw new Error('Foreign key violation');
}
throw error;
} finally {
if (client) {
client.release();
}
}
}
Performance Optimization Tips
1. Connection Pre-warming
async function preWarmPool() {
const clients = [];
// Create minimum connections upfront
for (let i = 0; i < pool.options.min; i++) {
const client = await pool.connect();
clients.push(client);
}
// Release them back to the pool
clients.forEach(client => client.release());
console.log('Pool pre-warmed with', pool.options.min, 'connections');
}
// Call this at application startup
await preWarmPool();
2. Query Optimization
// Use prepared statements for repeated queries
const getUserStmt = 'SELECT * FROM users WHERE id = $1';
async function getUser(id) {
return await pool.query(getUserStmt, [id]);
}
// Batch operations
async function batchInsert(users) {
const values = users.map((user, index) =>
`($${index * 2 + 1}, $${index * 2 + 2})`
).join(', ');
const params = users.flatMap(user => [user.name, user.email]);
const query = `INSERT INTO users (name, email) VALUES ${values}`;
return await pool.query(query, params);
}
Recommended Tools
pg-pool
The official PostgreSQL connection pool for Node.js with excellent performance and reliability.
Install pg-poolPgBouncer
Lightweight connection pooler for PostgreSQL that can dramatically improve performance.
Learn PgBouncerSupabase
Open source Firebase alternative with built-in PostgreSQL connection pooling.
Explore SupabaseSummary
PostgreSQL connection pooling best practices include:
- Using pg-pool with appropriate min/max connections
- Setting proper timeout values
- Implementing robust error handling
- Monitoring pool health and performance
- Using transactions properly
- Pre-warming connections for better performance
Need Help with PostgreSQL?
Our database experts can help you optimize your PostgreSQL setup for maximum performance and reliability.
Get Expert Help