Skip to main content

Performance Optimization

Optimize your SavvyMoney integration for speed, reliability, and efficiency.

Caching Strategies

Token Caching

Tokens are valid for 1 hour. Always cache them to avoid unnecessary authentication requests:

Token Cache Implementation
class TokenCache {
private cache = new Map<string, { token: string; expiresAt: number }>();

async getAccessToken(): Promise<string> {
const cached = this.cache.get('access_token');
const now = Date.now();
const bufferMs = 5 * 60 * 1000; // 5 minute buffer

if (cached && cached.expiresAt > now + bufferMs) {
return cached.token;
}

const response = await this.fetchNewToken();

this.cache.set('access_token', {
token: response.access_token,
expiresAt: now + (response.expires_in * 1000)
});

return response.access_token;
}

private async fetchNewToken() {
const response = await fetch('https://api.savvymoney.com/oauth/token', {
method: 'POST',
headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
body: new URLSearchParams({
grant_type: 'client_credentials',
client_id: process.env.SAVVY_CLIENT_ID!,
client_secret: process.env.SAVVY_CLIENT_SECRET!
})
});

return response.json();
}
}

export const tokenCache = new TokenCache();

Credit Score Caching

Credit scores typically update monthly. Cache appropriately:

Credit Score Cache
import Redis from 'ioredis';

const redis = new Redis(process.env.REDIS_URL);

interface CachedScore {
score: number;
scoreDate: string;
rating: string;
cachedAt: number;
}

async function getCreditScore(userId: string): Promise<CreditScore> {
const cacheKey = `credit_score:${userId}`;

// Try cache first
const cached = await redis.get(cacheKey);
if (cached) {
const data: CachedScore = JSON.parse(cached);

// Return cached if less than 24 hours old
if (Date.now() - data.cachedAt < 24 * 60 * 60 * 1000) {
return data;
}
}

// Fetch fresh data
const score = await savvyApi.getCreditScore(userId);

// Cache for 24 hours
await redis.setex(cacheKey, 86400, JSON.stringify({
...score,
cachedAt: Date.now()
}));

return score;
}

Cache Invalidation

Invalidate caches when webhooks indicate data changes:

Webhook-based Cache Invalidation
app.post('/webhooks/savvymoney', async (req, res) => {
const { type, data } = req.body;

switch (type) {
case 'score.updated':
case 'score.changed':
// Invalidate score cache
await redis.del(`credit_score:${data.userId}`);
break;

case 'user.deleted':
// Clear all user caches
const keys = await redis.keys(`*:${data.userId}`);
if (keys.length > 0) {
await redis.del(...keys);
}
break;
}

res.status(200).json({ received: true });
});

Connection Management

Connection Pooling

Use connection pools to reduce latency:

HTTP Agent with Connection Pooling
import https from 'https';

const agent = new https.Agent({
keepAlive: true,
keepAliveMsecs: 30000,
maxSockets: 50,
maxFreeSockets: 10,
timeout: 30000
});

const savvyClient = {
fetch: (url: string, options: RequestInit = {}) => {
return fetch(url, {
...options,
// @ts-ignore
agent
});
}
};
Java Connection Pool
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;

@Configuration
public class HttpClientConfig {

@Bean
public CloseableHttpClient httpClient() {
PoolingHttpClientConnectionManager connManager =
new PoolingHttpClientConnectionManager();

connManager.setMaxTotal(100);
connManager.setDefaultMaxPerRoute(20);

return HttpClients.custom()
.setConnectionManager(connManager)
.setKeepAliveStrategy((response, context) -> 30000)
.build();
}
}

Database Connection Pooling

Database Pool Configuration
import { Pool } from 'pg';

const pool = new Pool({
host: process.env.DB_HOST,
database: process.env.DB_NAME,
max: 20, // Maximum connections
idleTimeoutMillis: 30000, // Close idle connections after 30s
connectionTimeoutMillis: 2000
});

// Use pool for all database operations
async function getUserData(userId: string) {
const client = await pool.connect();
try {
return await client.query('SELECT * FROM users WHERE id = $1', [userId]);
} finally {
client.release();
}
}

Request Optimization

Batch Requests

Use batch endpoints when enrolling multiple users:

Batch Enrollment
// ❌ Inefficient - multiple requests
for (const user of users) {
await savvyApi.enrollUser(user); // N requests
}

// ✅ Efficient - single batch request
const results = await savvyApi.batchEnrollUsers(users); // 1 request

Parallel Requests

Execute independent requests in parallel:

Parallel API Calls
async function getUserDashboard(userId: string) {
// ❌ Sequential - slow
const score = await savvyApi.getCreditScore(userId);
const offers = await savvyApi.getOffers(userId);
const alerts = await savvyApi.getAlerts(userId);

// ✅ Parallel - fast
const [score, offers, alerts] = await Promise.all([
savvyApi.getCreditScore(userId),
savvyApi.getOffers(userId),
savvyApi.getAlerts(userId)
]);

return { score, offers, alerts };
}

Request Deduplication

Prevent duplicate concurrent requests:

Request Deduplication
class RequestDeduplicator {
private pending = new Map<string, Promise<any>>();

async dedupe<T>(key: string, fn: () => Promise<T>): Promise<T> {
// Return pending request if exists
if (this.pending.has(key)) {
return this.pending.get(key)!;
}

// Create new request
const promise = fn().finally(() => {
this.pending.delete(key);
});

this.pending.set(key, promise);
return promise;
}
}

const deduplicator = new RequestDeduplicator();

// Usage - multiple calls resolve to same request
async function getCreditScore(userId: string) {
return deduplicator.dedupe(
`credit_score:${userId}`,
() => savvyApi.getCreditScore(userId)
);
}

Response Optimization

Compression

Enable response compression:

Enable Compression
import compression from 'compression';

app.use(compression({
filter: (req, res) => {
if (req.headers['x-no-compression']) {
return false;
}
return compression.filter(req, res);
},
level: 6
}));

Selective Field Fetching

Request only needed fields when available:

Selective Fields
// Fetch only what you need
const score = await savvyApi.getCreditScore(userId, {
fields: ['score', 'scoreDate', 'rating']
});

// Instead of fetching everything
const fullScore = await savvyApi.getCreditScore(userId);

Pagination

Always paginate large result sets:

Efficient Pagination
async function* getAllAlerts(userId: string) {
let cursor: string | undefined;

do {
const response = await savvyApi.getAlerts(userId, {
limit: 100,
cursor
});

for (const alert of response.data) {
yield alert;
}

cursor = response.pagination.nextCursor;
} while (cursor);
}

// Usage
for await (const alert of getAllAlerts(userId)) {
processAlert(alert);
}

Error Handling & Retries

Exponential Backoff

Implement exponential backoff for retries:

Exponential Backoff
interface RetryConfig {
maxRetries: number;
baseDelayMs: number;
maxDelayMs: number;
}

async function withRetry<T>(
fn: () => Promise<T>,
config: RetryConfig = { maxRetries: 3, baseDelayMs: 1000, maxDelayMs: 30000 }
): Promise<T> {
let lastError: Error;

for (let attempt = 0; attempt <= config.maxRetries; attempt++) {
try {
return await fn();
} catch (error) {
lastError = error as Error;

// Don't retry on client errors (4xx except 429)
if (error.status >= 400 && error.status < 500 && error.status !== 429) {
throw error;
}

if (attempt < config.maxRetries) {
const delay = Math.min(
config.baseDelayMs * Math.pow(2, attempt),
config.maxDelayMs
);

// Add jitter
const jitter = delay * 0.1 * Math.random();
await new Promise(resolve => setTimeout(resolve, delay + jitter));
}
}
}

throw lastError!;
}

// Usage
const score = await withRetry(() => savvyApi.getCreditScore(userId));

Circuit Breaker

Prevent cascade failures with circuit breakers:

Circuit Breaker Pattern
enum CircuitState {
CLOSED,
OPEN,
HALF_OPEN
}

class CircuitBreaker {
private state = CircuitState.CLOSED;
private failures = 0;
private lastFailure = 0;
private readonly threshold: number;
private readonly timeout: number;

constructor(threshold = 5, timeoutMs = 30000) {
this.threshold = threshold;
this.timeout = timeoutMs;
}

async execute<T>(fn: () => Promise<T>): Promise<T> {
if (this.state === CircuitState.OPEN) {
if (Date.now() - this.lastFailure > this.timeout) {
this.state = CircuitState.HALF_OPEN;
} else {
throw new Error('Circuit breaker is open');
}
}

try {
const result = await fn();
this.onSuccess();
return result;
} catch (error) {
this.onFailure();
throw error;
}
}

private onSuccess() {
this.failures = 0;
this.state = CircuitState.CLOSED;
}

private onFailure() {
this.failures++;
this.lastFailure = Date.now();

if (this.failures >= this.threshold) {
this.state = CircuitState.OPEN;
}
}
}

const savvyCircuit = new CircuitBreaker(5, 30000);

async function getCreditScore(userId: string) {
return savvyCircuit.execute(() => savvyApi.getCreditScore(userId));
}

Monitoring & Metrics

Request Timing

Track API call performance:

Request Timing
class MetricsCollector {
private metrics: Map<string, number[]> = new Map();

recordTiming(operation: string, durationMs: number) {
if (!this.metrics.has(operation)) {
this.metrics.set(operation, []);
}
this.metrics.get(operation)!.push(durationMs);
}

getStats(operation: string) {
const timings = this.metrics.get(operation) || [];
if (timings.length === 0) return null;

const sorted = [...timings].sort((a, b) => a - b);

return {
count: timings.length,
avg: timings.reduce((a, b) => a + b, 0) / timings.length,
p50: sorted[Math.floor(sorted.length * 0.5)],
p95: sorted[Math.floor(sorted.length * 0.95)],
p99: sorted[Math.floor(sorted.length * 0.99)]
};
}
}

const metrics = new MetricsCollector();

// Wrap API calls
async function timedFetch(operation: string, fn: () => Promise<any>) {
const start = Date.now();
try {
return await fn();
} finally {
metrics.recordTiming(operation, Date.now() - start);
}
}

Health Checks

Implement health check endpoints:

Health Check Endpoint
app.get('/health', async (req, res) => {
const checks = {
api: false,
database: false,
cache: false
};

// Check SavvyMoney API
try {
const token = await tokenCache.getAccessToken();
checks.api = !!token;
} catch (e) {
checks.api = false;
}

// Check database
try {
await pool.query('SELECT 1');
checks.database = true;
} catch (e) {
checks.database = false;
}

// Check Redis
try {
await redis.ping();
checks.cache = true;
} catch (e) {
checks.cache = false;
}

const healthy = Object.values(checks).every(Boolean);

res.status(healthy ? 200 : 503).json({
status: healthy ? 'healthy' : 'degraded',
checks
});
});

Performance Checklist

  • Token caching implemented
  • Credit score caching with appropriate TTL
  • Webhook-based cache invalidation
  • Connection pooling configured
  • Batch endpoints used where applicable
  • Parallel requests for independent calls
  • Request deduplication implemented
  • Response compression enabled
  • Pagination for large datasets
  • Exponential backoff for retries
  • Circuit breaker pattern implemented
  • Request timing metrics collected
  • Health check endpoints configured

Next Steps