ZipMarketData data updates once per day. Calling the API more often than necessary wastes quota and adds latency. A simple cache layer makes your application faster and more cost-efficient.
SQLite Cache (Python)
import sqlite3, requests, json, time
from datetime import datetime, timedelta
conn = sqlite3.connect("zip_cache.db")
conn.execute('''
CREATE TABLE IF NOT EXISTS cache (
zip_code TEXT, endpoint TEXT, data TEXT,
fetched_at REAL, PRIMARY KEY (zip_code, endpoint)
)''')
CACHE_TTL = 86400 # 24 hours
def cached_get(endpoint, zip_code, key, extra_params=None):
params = {"zip_code": zip_code, **(extra_params or {})}
row = conn.execute("SELECT data, fetched_at FROM cache WHERE zip_code=? AND endpoint=?",
(zip_code, endpoint)).fetchone()
if row and (time.time() - row[1]) < CACHE_TTL:
return json.loads(row[0])
r = requests.get(f"https://zipmarketdata.com/{endpoint}",
params=params,
headers={"x-rapidapi-proxy-secret": key}, timeout=10)
r.raise_for_status()
data = r.json()
conn.execute("INSERT OR REPLACE INTO cache VALUES (?,?,?,?)",
(zip_code, endpoint, json.dumps(data), time.time()))
conn.commit()
return data
Redis Cache (Node.js)
const redis = require('redis');
const client = redis.createClient();
const TTL = 86400; // 24 hours
async function getWithCache(zipCode) {
const cached = await client.get(`mkt:${zipCode}`);
if (cached) return JSON.parse(cached);
const data = await fetchFromAPI(zipCode);
await client.setEx(`mkt:${zipCode}`, TTL, JSON.stringify(data));
return data;
}