Network Request Optimization - Complete Guide
Published: September 25, 2024 | Reading time: 18 minutes
Network Optimization Overview
Network request optimization improves application performance and user experience:
Network Optimization Benefits
# Network Optimization Benefits
- Faster data loading
- Reduced bandwidth usage
- Better user experience
- Lower server costs
- Improved scalability
- Better mobile performance
- Enhanced reliability
HTTP Optimization
HTTP/2 and HTTP/3
HTTP Optimization Strategies
# HTTP Optimization Strategies
# 1. HTTP/2 Server Push
# Nginx configuration for HTTP/2 push
server {
listen 443 ssl http2;
server_name example.com;
location / {
http2_push /css/style.css;
http2_push /js/app.js;
http2_push /images/logo.png;
}
}
# 2. HTTP/3 Configuration
# Nginx HTTP/3 setup
server {
listen 443 ssl http2;
listen 443 ssl http3 reuseport;
server_name example.com;
ssl_certificate /etc/ssl/certs/example.com.crt;
ssl_certificate_key /etc/ssl/private/example.com.key;
# HTTP/3 specific settings
ssl_protocols TLSv1.3;
ssl_ciphers TLS_AES_128_GCM_SHA256:TLS_AES_256_GCM_SHA384;
}
# 3. Compression Configuration
# Gzip compression
gzip on;
gzip_vary on;
gzip_min_length 1024;
gzip_proxied any;
gzip_comp_level 6;
gzip_types
text/plain
text/css
text/xml
text/javascript
application/json
application/javascript
application/xml+rss
application/atom+xml
image/svg+xml;
# Brotli compression
brotli on;
brotli_comp_level 6;
brotli_types
text/plain
text/css
application/json
application/javascript
text/xml
application/xml
application/xml+rss
text/javascript;
# 4. Cache Headers
# Static assets caching
location ~* \.(css|js|png|jpg|jpeg|gif|ico|svg|woff|woff2|ttf|eot)$ {
expires 1y;
add_header Cache-Control "public, immutable";
add_header Vary Accept-Encoding;
}
# API responses caching
location /api/ {
expires 1h;
add_header Cache-Control "public, must-revalidate";
add_header ETag $request_id;
}
# 5. Connection Optimization
# Keep-alive connections
upstream backend {
server 127.0.0.1:3000;
keepalive 32;
}
server {
location / {
proxy_pass http://backend;
proxy_http_version 1.1;
proxy_set_header Connection "";
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
}
# 6. Request Optimization
# Request size limits
client_max_body_size 10M;
client_body_buffer_size 128k;
client_header_buffer_size 1k;
large_client_header_buffers 4 4k;
# Timeout settings
client_body_timeout 12;
client_header_timeout 12;
keepalive_timeout 15;
send_timeout 10;
API Optimization
REST API Optimization
API Optimization Techniques
# API Optimization Techniques
# 1. Request Batching
class RequestBatcher {
constructor(batchSize = 10, delay = 100) {
this.batchSize = batchSize;
this.delay = delay;
this.queue = [];
this.isProcessing = false;
}
addRequest(request) {
return new Promise((resolve, reject) => {
this.queue.push({ request, resolve, reject });
if (this.queue.length >= this.batchSize) {
this.processBatch();
} else if (!this.isProcessing) {
setTimeout(() => this.processBatch(), this.delay);
}
});
}
async processBatch() {
if (this.isProcessing || this.queue.length === 0) return;
this.isProcessing = true;
const batch = this.queue.splice(0, this.batchSize);
try {
const results = await this.executeBatch(batch.map(item => item.request));
batch.forEach((item, index) => {
item.resolve(results[index]);
});
} catch (error) {
batch.forEach(item => {
item.reject(error);
});
}
this.isProcessing = false;
if (this.queue.length > 0) {
setTimeout(() => this.processBatch(), this.delay);
}
}
async executeBatch(requests) {
const responses = await Promise.all(requests.map(request =>
fetch(request.url, request.options)
));
return Promise.all(responses.map(response => response.json()));
}
}
# 2. Request Deduplication
class RequestDeduplicator {
constructor() {
this.pendingRequests = new Map();
}
async deduplicate(key, requestFunction) {
if (this.pendingRequests.has(key)) {
return this.pendingRequests.get(key);
}
const promise = requestFunction().finally(() => {
this.pendingRequests.delete(key);
});
this.pendingRequests.set(key, promise);
return promise;
}
}
# 3. Response Caching
class ResponseCache {
constructor(maxSize = 100, ttl = 300000) { // 5 minutes default
this.cache = new Map();
this.maxSize = maxSize;
this.ttl = ttl;
}
get(key) {
const item = this.cache.get(key);
if (!item) {
return null;
}
if (Date.now() - item.timestamp > this.ttl) {
this.cache.delete(key);
return null;
}
return item.data;
}
set(key, data) {
if (this.cache.size >= this.maxSize) {
const firstKey = this.cache.keys().next().value;
this.cache.delete(firstKey);
}
this.cache.set(key, {
data: data,
timestamp: Date.now()
});
}
clear() {
this.cache.clear();
}
}
# 4. API Client with Optimization
class OptimizedAPIClient {
constructor(baseURL, options = {}) {
this.baseURL = baseURL;
this.options = {
timeout: 10000,
retries: 3,
retryDelay: 1000,
...options
};
this.cache = new ResponseCache();
this.batcher = new RequestBatcher();
this.deduplicator = new RequestDeduplicator();
}
async request(endpoint, options = {}) {
const url = `${this.baseURL}${endpoint}`;
const cacheKey = this.generateCacheKey(url, options);
// Check cache first
const cached = this.cache.get(cacheKey);
if (cached) {
return cached;
}
// Deduplicate requests
const result = await this.deduplicator.deduplicate(cacheKey, async () => {
return this.executeRequest(url, options);
});
// Cache successful responses
if (result.success) {
this.cache.set(cacheKey, result);
}
return result;
}
async executeRequest(url, options) {
const requestOptions = {
...this.options,
...options,
headers: {
'Content-Type': 'application/json',
...this.options.headers,
...options.headers
}
};
try {
const response = await this.fetchWithTimeout(url, requestOptions);
const data = await response.json();
return {
success: true,
data: data,
status: response.status
};
} catch (error) {
return {
success: false,
error: error.message
};
}
}
async fetchWithTimeout(url, options) {
const controller = new AbortController();
const timeoutId = setTimeout(() => controller.abort(), options.timeout);
try {
const response = await fetch(url, {
...options,
signal: controller.signal
});
clearTimeout(timeoutId);
return response;
} catch (error) {
clearTimeout(timeoutId);
throw error;
}
}
generateCacheKey(url, options) {
return `${url}:${JSON.stringify(options)}`;
}
}
# 5. GraphQL Optimization
class GraphQLOptimizer {
constructor() {
this.queryCache = new Map();
this.fragmentCache = new Map();
}
optimizeQuery(query) {
// Remove unnecessary fields
const optimizedQuery = this.removeUnusedFields(query);
// Combine fragments
const combinedQuery = this.combineFragments(optimizedQuery);
// Cache query
const cacheKey = this.generateQueryKey(combinedQuery);
this.queryCache.set(cacheKey, combinedQuery);
return combinedQuery;
}
removeUnusedFields(query) {
// Simplified field removal logic
return query.replace(/\s+/g, ' ').trim();
}
combineFragments(query) {
// Simplified fragment combination
return query;
}
generateQueryKey(query) {
return btoa(query);
}
getCachedQuery(key) {
return this.queryCache.get(key);
}
}
# 6. WebSocket Optimization
class WebSocketOptimizer {
constructor(url, options = {}) {
this.url = url;
this.options = {
reconnectInterval: 5000,
maxReconnectAttempts: 5,
heartbeatInterval: 30000,
...options
};
this.ws = null;
this.reconnectAttempts = 0;
this.isConnected = false;
this.messageQueue = [];
this.heartbeatTimer = null;
}
connect() {
try {
this.ws = new WebSocket(this.url);
this.ws.onopen = () => {
this.isConnected = true;
this.reconnectAttempts = 0;
this.processMessageQueue();
this.startHeartbeat();
};
this.ws.onmessage = (event) => {
this.handleMessage(event.data);
};
this.ws.onclose = () => {
this.isConnected = false;
this.stopHeartbeat();
this.handleReconnect();
};
this.ws.onerror = (error) => {
console.error('WebSocket error:', error);
};
} catch (error) {
console.error('Failed to connect WebSocket:', error);
this.handleReconnect();
}
}
send(message) {
if (this.isConnected) {
this.ws.send(JSON.stringify(message));
} else {
this.messageQueue.push(message);
}
}
processMessageQueue() {
while (this.messageQueue.length > 0 && this.isConnected) {
const message = this.messageQueue.shift();
this.ws.send(JSON.stringify(message));
}
}
startHeartbeat() {
this.heartbeatTimer = setInterval(() => {
if (this.isConnected) {
this.ws.send(JSON.stringify({ type: 'ping' }));
}
}, this.options.heartbeatInterval);
}
stopHeartbeat() {
if (this.heartbeatTimer) {
clearInterval(this.heartbeatTimer);
this.heartbeatTimer = null;
}
}
handleReconnect() {
if (this.reconnectAttempts < this.options.maxReconnectAttempts) {
this.reconnectAttempts++;
setTimeout(() => this.connect(), this.options.reconnectInterval);
}
}
disconnect() {
this.isConnected = false;
this.stopHeartbeat();
if (this.ws) {
this.ws.close();
}
}
}
Request Strategies
Advanced Request Patterns
Request Strategy Implementation
# Request Strategy Implementation
# 1. Request Queue with Priority
class PriorityRequestQueue {
constructor(maxConcurrent = 6) {
this.maxConcurrent = maxConcurrent;
this.queue = [];
this.activeRequests = 0;
this.isProcessing = false;
}
addRequest(request, priority = 0) {
return new Promise((resolve, reject) => {
this.queue.push({ request, priority, resolve, reject });
this.queue.sort((a, b) => b.priority - a.priority);
if (!this.isProcessing) {
this.processQueue();
}
});
}
async processQueue() {
this.isProcessing = true;
while (this.queue.length > 0 && this.activeRequests < this.maxConcurrent) {
const { request, resolve, reject } = this.queue.shift();
this.activeRequests++;
try {
const result = await this.executeRequest(request);
resolve(result);
} catch (error) {
reject(error);
} finally {
this.activeRequests--;
}
}
this.isProcessing = false;
if (this.queue.length > 0) {
setTimeout(() => this.processQueue(), 100);
}
}
async executeRequest(request) {
const response = await fetch(request.url, request.options);
return response.json();
}
}
# 2. Request Retry with Exponential Backoff
class RetryRequestHandler {
constructor(options = {}) {
this.options = {
maxRetries: 3,
baseDelay: 1000,
maxDelay: 10000,
backoffFactor: 2,
...options
};
}
async executeWithRetry(requestFunction) {
let lastError;
for (let attempt = 0; attempt <= this.options.maxRetries; attempt++) {
try {
return await requestFunction();
} catch (error) {
lastError = error;
if (attempt === this.options.maxRetries) {
throw lastError;
}
const delay = this.calculateDelay(attempt);
await this.sleep(delay);
}
}
}
calculateDelay(attempt) {
const delay = this.options.baseDelay * Math.pow(this.options.backoffFactor, attempt);
return Math.min(delay, this.options.maxDelay);
}
sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
}
# 3. Request Preloading
class RequestPreloader {
constructor() {
this.preloadQueue = [];
this.isPreloading = false;
}
preload(url, options = {}) {
this.preloadQueue.push({ url, options });
if (!this.isPreloading) {
this.startPreloading();
}
}
async startPreloading() {
this.isPreloading = true;
while (this.preloadQueue.length > 0) {
const { url, options } = this.preloadQueue.shift();
try {
await fetch(url, { ...options, method: 'HEAD' });
} catch (error) {
console.warn('Preload failed:', url, error);
}
}
this.isPreloading = false;
}
}
# 4. Request Interceptor
class RequestInterceptor {
constructor() {
this.interceptors = [];
}
addInterceptor(interceptor) {
this.interceptors.push(interceptor);
}
async intercept(request) {
let modifiedRequest = request;
for (const interceptor of this.interceptors) {
if (interceptor.request) {
modifiedRequest = await interceptor.request(modifiedRequest);
}
}
return modifiedRequest;
}
async interceptResponse(response, request) {
let modifiedResponse = response;
for (const interceptor of this.interceptors) {
if (interceptor.response) {
modifiedResponse = await interceptor.response(modifiedResponse, request);
}
}
return modifiedResponse;
}
}
# 5. Request Analytics
class RequestAnalytics {
constructor() {
this.metrics = {
totalRequests: 0,
successfulRequests: 0,
failedRequests: 0,
averageResponseTime: 0,
totalResponseTime: 0,
requestsByEndpoint: new Map(),
requestsByMethod: new Map()
};
}
recordRequest(request, response, duration) {
this.metrics.totalRequests++;
if (response.ok) {
this.metrics.successfulRequests++;
} else {
this.metrics.failedRequests++;
}
this.metrics.totalResponseTime += duration;
this.metrics.averageResponseTime = this.metrics.totalResponseTime / this.metrics.totalRequests;
// Record by endpoint
const endpoint = new URL(request.url).pathname;
const endpointCount = this.metrics.requestsByEndpoint.get(endpoint) || 0;
this.metrics.requestsByEndpoint.set(endpoint, endpointCount + 1);
// Record by method
const method = request.method;
const methodCount = this.metrics.requestsByMethod.get(method) || 0;
this.metrics.requestsByMethod.set(method, methodCount + 1);
}
getMetrics() {
return {
...this.metrics,
successRate: (this.metrics.successfulRequests / this.metrics.totalRequests) * 100,
failureRate: (this.metrics.failedRequests / this.metrics.totalRequests) * 100
};
}
getTopEndpoints(limit = 10) {
return Array.from(this.metrics.requestsByEndpoint.entries())
.sort((a, b) => b[1] - a[1])
.slice(0, limit);
}
}
# 6. Request Manager
class RequestManager {
constructor(options = {}) {
this.options = {
maxConcurrent: 6,
timeout: 10000,
retries: 3,
...options
};
this.queue = new PriorityRequestQueue(this.options.maxConcurrent);
this.retryHandler = new RetryRequestHandler();
this.preloader = new RequestPreloader();
this.interceptor = new RequestInterceptor();
this.analytics = new RequestAnalytics();
}
async request(url, options = {}) {
const startTime = performance.now();
try {
// Intercept request
const interceptedRequest = await this.interceptor.intercept({
url,
...options
});
// Execute with retry
const response = await this.retryHandler.executeWithRetry(async () => {
return await this.queue.addRequest(interceptedRequest, options.priority || 0);
});
// Intercept response
const interceptedResponse = await this.interceptor.interceptResponse(response, interceptedRequest);
// Record analytics
const duration = performance.now() - startTime;
this.analytics.recordRequest(interceptedRequest, interceptedResponse, duration);
return interceptedResponse;
} catch (error) {
const duration = performance.now() - startTime;
this.analytics.recordRequest({ url, ...options }, { ok: false }, duration);
throw error;
}
}
preload(url, options = {}) {
this.preloader.preload(url, options);
}
getAnalytics() {
return this.analytics.getMetrics();
}
}
Performance Monitoring
Network Performance Tracking
Network Performance Monitoring
# Network Performance Monitoring
# 1. Network Performance Observer
class NetworkPerformanceObserver {
constructor() {
this.observers = [];
this.metrics = {
totalRequests: 0,
totalBytes: 0,
averageResponseTime: 0,
slowRequests: 0,
failedRequests: 0
};
}
startObserving() {
if ('PerformanceObserver' in window) {
const observer = new PerformanceObserver((list) => {
for (const entry of list.getEntries()) {
this.handleResourceEntry(entry);
}
});
observer.observe({ entryTypes: ['resource'] });
this.observers.push(observer);
}
}
stopObserving() {
this.observers.forEach(observer => observer.disconnect());
this.observers = [];
}
handleResourceEntry(entry) {
this.metrics.totalRequests++;
this.metrics.totalBytes += entry.transferSize || 0;
const responseTime = entry.responseEnd - entry.responseStart;
this.metrics.averageResponseTime =
(this.metrics.averageResponseTime * (this.metrics.totalRequests - 1) + responseTime) /
this.metrics.totalRequests;
if (responseTime > 1000) { // Slow request threshold
this.metrics.slowRequests++;
}
if (entry.transferSize === 0 && entry.responseStart === 0) {
this.metrics.failedRequests++;
}
}
getMetrics() {
return { ...this.metrics };
}
}
# 2. Connection Quality Monitor
class ConnectionQualityMonitor {
constructor() {
this.connection = navigator.connection || navigator.mozConnection || navigator.webkitConnection;
this.metrics = {
effectiveType: 'unknown',
downlink: 0,
rtt: 0,
saveData: false
};
this.updateMetrics();
}
updateMetrics() {
if (this.connection) {
this.metrics.effectiveType = this.connection.effectiveType;
this.metrics.downlink = this.connection.downlink;
this.metrics.rtt = this.connection.rtt;
this.metrics.saveData = this.connection.saveData;
}
}
getConnectionQuality() {
this.updateMetrics();
if (this.metrics.effectiveType === '4g') {
return 'excellent';
} else if (this.metrics.effectiveType === '3g') {
return 'good';
} else if (this.metrics.effectiveType === '2g') {
return 'poor';
} else {
return 'unknown';
}
}
shouldOptimizeForSlowConnection() {
return this.metrics.effectiveType === '2g' || this.metrics.saveData;
}
}
# 3. Request Performance Tracker
class RequestPerformanceTracker {
constructor() {
this.requests = new Map();
this.metrics = {
totalRequests: 0,
averageResponseTime: 0,
totalBytes: 0,
cacheHits: 0,
cacheMisses: 0
};
}
startTracking(requestId, url, method) {
this.requests.set(requestId, {
url,
method,
startTime: performance.now(),
startBytes: this.getCurrentBytes()
});
}
endTracking(requestId, success = true, fromCache = false) {
const request = this.requests.get(requestId);
if (!request) return;
const endTime = performance.now();
const endBytes = this.getCurrentBytes();
const duration = endTime - request.startTime;
const bytes = endBytes - request.startBytes;
this.metrics.totalRequests++;
this.metrics.averageResponseTime =
(this.metrics.averageResponseTime * (this.metrics.totalRequests - 1) + duration) /
this.metrics.totalRequests;
this.metrics.totalBytes += bytes;
if (fromCache) {
this.metrics.cacheHits++;
} else {
this.metrics.cacheMisses++;
}
this.requests.delete(requestId);
return {
duration,
bytes,
success,
fromCache
};
}
getCurrentBytes() {
// Simplified byte counting
return performance.memory ? performance.memory.usedJSHeapSize : 0;
}
getMetrics() {
return {
...this.metrics,
cacheHitRate: this.metrics.cacheHits / (this.metrics.cacheHits + this.metrics.cacheMisses) * 100
};
}
}
# 4. Network Error Handler
class NetworkErrorHandler {
constructor() {
this.errorCounts = new Map();
this.retryStrategies = new Map();
}
handleError(error, request) {
const errorType = this.categorizeError(error);
const count = this.errorCounts.get(errorType) || 0;
this.errorCounts.set(errorType, count + 1);
const strategy = this.retryStrategies.get(errorType);
if (strategy) {
return strategy(error, request);
}
return this.defaultErrorHandler(error, request);
}
categorizeError(error) {
if (error.name === 'TypeError' && error.message.includes('fetch')) {
return 'network_error';
} else if (error.status >= 500) {
return 'server_error';
} else if (error.status >= 400) {
return 'client_error';
} else {
return 'unknown_error';
}
}
setRetryStrategy(errorType, strategy) {
this.retryStrategies.set(errorType, strategy);
}
defaultErrorHandler(error, request) {
console.error('Network error:', error);
// Send to error tracking service
if (window.gtag) {
gtag('event', 'network_error', {
error_type: this.categorizeError(error),
url: request.url,
method: request.method
});
}
return Promise.reject(error);
}
getErrorStats() {
return Array.from(this.errorCounts.entries()).map(([type, count]) => ({
type,
count
}));
}
}
# 5. Performance Budget Monitor
class NetworkPerformanceBudget {
constructor(budget) {
this.budget = {
maxResponseTime: 1000, // 1 second
maxRequestSize: 1024 * 1024, // 1MB
maxConcurrentRequests: 10,
maxErrorRate: 5, // 5%
...budget
};
this.violations = [];
}
checkBudget(metrics) {
const violations = [];
if (metrics.averageResponseTime > this.budget.maxResponseTime) {
violations.push({
type: 'response_time',
value: metrics.averageResponseTime,
limit: this.budget.maxResponseTime,
severity: 'warning'
});
}
if (metrics.totalBytes > this.budget.maxRequestSize) {
violations.push({
type: 'request_size',
value: metrics.totalBytes,
limit: this.budget.maxRequestSize,
severity: 'error'
});
}
if (metrics.totalRequests > this.budget.maxConcurrentRequests) {
violations.push({
type: 'concurrent_requests',
value: metrics.totalRequests,
limit: this.budget.maxConcurrentRequests,
severity: 'warning'
});
}
this.violations.push(...violations);
return violations;
}
getViolations() {
return this.violations;
}
generateReport() {
return {
budget: this.budget,
violations: this.violations,
summary: {
totalViolations: this.violations.length,
errors: this.violations.filter(v => v.severity === 'error').length,
warnings: this.violations.filter(v => v.severity === 'warning').length
}
};
}
}
Best Practices
Network Optimization Guidelines
Optimization Best Practices
- Use HTTP/2 and HTTP/3
- Implement proper caching
- Compress responses
- Batch requests when possible
- Use CDN for static assets
- Monitor performance metrics
- Implement retry strategies
Common Issues
- Too many concurrent requests
- Large response sizes
- No caching strategy
- Poor error handling
- Inefficient request patterns
- No performance monitoring
- Missing compression
Summary
Network request optimization involves several key components:
- HTTP Optimization: HTTP/2, HTTP/3, compression, caching
- API Optimization: Request batching, deduplication, response caching
- Request Strategies: Priority queues, retry logic, preloading
- Performance Monitoring: Network metrics, error tracking, budgets
- Best Practices: Optimization guidelines, common issues
Need More Help?
Struggling with network request optimization or need help improving your application's network performance? Our performance experts can help you optimize network requests and improve performance.
Get Network Optimization Help