Django supports multiple cache backends, each with distinct characteristics, performance profiles, and use cases. Choosing the right backend and configuring it properly is crucial for optimal caching performance. This chapter covers all available backends, their configuration options, and guidance for selecting the best backend for your specific requirements.
Django provides several built-in cache backends:
Redis is the most popular and feature-rich cache backend for Django applications.
# settings.py
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.redis.RedisCache',
'LOCATION': 'redis://127.0.0.1:6379/1',
'OPTIONS': {
'CLIENT_CLASS': 'django_redis.client.DefaultClient',
}
}
}
# Alternative configuration with django-redis
CACHES = {
'default': {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': 'redis://127.0.0.1:6379/1',
'OPTIONS': {
'CLIENT_CLASS': 'django_redis.client.DefaultClient',
'CONNECTION_POOL_KWARGS': {
'max_connections': 50,
'retry_on_timeout': True,
}
}
}
}
# settings.py - Production Redis setup
CACHES = {
'default': {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': [
'redis://redis-master:6379/1',
'redis://redis-replica-1:6379/1',
'redis://redis-replica-2:6379/1',
],
'OPTIONS': {
'CLIENT_CLASS': 'django_redis.client.ShardClient',
'CONNECTION_POOL_KWARGS': {
'max_connections': 100,
'retry_on_timeout': True,
'socket_keepalive': True,
'socket_keepalive_options': {},
},
'COMPRESSOR': 'django_redis.compressors.zlib.ZlibCompressor',
'SERIALIZER': 'django_redis.serializers.json.JSONSerializer',
'IGNORE_EXCEPTIONS': True, # Graceful degradation
},
'KEY_PREFIX': 'myapp',
'VERSION': 1,
'TIMEOUT': 300, # Default timeout (5 minutes)
},
# Separate cache for sessions
'sessions': {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': 'redis://redis-sessions:6379/2',
'OPTIONS': {
'CLIENT_CLASS': 'django_redis.client.DefaultClient',
'CONNECTION_POOL_KWARGS': {
'max_connections': 20,
}
},
'TIMEOUT': 1800, # 30 minutes
},
# Cache for temporary data
'temporary': {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': 'redis://redis-temp:6379/3',
'OPTIONS': {
'CLIENT_CLASS': 'django_redis.client.DefaultClient',
},
'TIMEOUT': 60, # 1 minute
}
}
# Use Redis for sessions
SESSION_ENGINE = 'django.contrib.sessions.backends.cache'
SESSION_CACHE_ALIAS = 'sessions'
# settings.py - Redis Cluster setup
CACHES = {
'default': {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': [
'redis://redis-cluster-node1:7000/1',
'redis://redis-cluster-node2:7000/1',
'redis://redis-cluster-node3:7000/1',
'redis://redis-cluster-node4:7000/1',
'redis://redis-cluster-node5:7000/1',
'redis://redis-cluster-node6:7000/1',
],
'OPTIONS': {
'CLIENT_CLASS': 'django_redis.client.RedisClusterClient',
'CONNECTION_POOL_CLASS': 'rediscluster.connection.ClusterConnectionPool',
'CONNECTION_POOL_CLASS_KWARGS': {
'skip_full_coverage_check': True,
'max_connections_per_node': 50,
},
'COMPRESSOR': 'django_redis.compressors.lz4.Lz4Compressor',
'SERIALIZER': 'django_redis.serializers.msgpack.MSGPackSerializer',
}
}
}
# settings.py - Secure Redis configuration
import os
CACHES = {
'default': {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': f"rediss://:{os.environ['REDIS_PASSWORD']}@redis.example.com:6380/1",
'OPTIONS': {
'CLIENT_CLASS': 'django_redis.client.DefaultClient',
'CONNECTION_POOL_KWARGS': {
'ssl_cert_reqs': 'required',
'ssl_ca_certs': '/path/to/ca-certificates.crt',
'ssl_certfile': '/path/to/client-cert.pem',
'ssl_keyfile': '/path/to/client-key.pem',
}
}
}
}
# Alternative using environment variables
REDIS_URL = os.environ.get('REDIS_URL', 'redis://localhost:6379/1')
CACHES = {
'default': {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': REDIS_URL,
'OPTIONS': {
'CLIENT_CLASS': 'django_redis.client.DefaultClient',
'CONNECTION_POOL_KWARGS': {
'max_connections': int(os.environ.get('REDIS_MAX_CONNECTIONS', 50)),
}
}
}
}
Memcached is a high-performance, distributed memory caching system.
# settings.py
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.PyMemcacheCache',
'LOCATION': '127.0.0.1:11211',
}
}
# Multiple Memcached servers
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.PyMemcacheCache',
'LOCATION': [
'172.19.26.240:11211',
'172.19.26.242:11211',
'172.19.26.244:11211',
]
}
}
# settings.py - Production Memcached setup
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.PyMemcacheCache',
'LOCATION': [
'memcached-1.example.com:11211',
'memcached-2.example.com:11211',
'memcached-3.example.com:11211',
],
'OPTIONS': {
'server_max_value_length': 1024 * 1024 * 2, # 2MB
'no_delay': True,
'ignore_exc': True,
'max_pool_size': 4,
'use_pooling': True,
},
'KEY_PREFIX': 'myapp',
'VERSION': 1,
'TIMEOUT': 300,
}
}
# Using pylibmc (C extension, faster)
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.PyLibMCCache',
'LOCATION': [
'172.19.26.240:11211',
'172.19.26.242:11211',
],
'OPTIONS': {
'binary': True,
'behaviors': {
'tcp_nodelay': True,
'ketama': True, # Consistent hashing
}
}
}
}
The database backend stores cache data in your database.
# settings.py
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.db.DatabaseCache',
'LOCATION': 'cache_table',
'OPTIONS': {
'MAX_ENTRIES': 1000000, # Maximum cache entries
'CULL_FREQUENCY': 3, # Delete 1/3 when MAX_ENTRIES reached
}
}
}
# Create cache table
# python manage.py createcachetable
# settings.py - Multiple cache tables for different purposes
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.db.DatabaseCache',
'LOCATION': 'general_cache',
'OPTIONS': {
'MAX_ENTRIES': 500000,
'CULL_FREQUENCY': 3,
}
},
'sessions': {
'BACKEND': 'django.core.cache.backends.db.DatabaseCache',
'LOCATION': 'session_cache',
'OPTIONS': {
'MAX_ENTRIES': 100000,
'CULL_FREQUENCY': 4,
}
},
'api': {
'BACKEND': 'django.core.cache.backends.db.DatabaseCache',
'LOCATION': 'api_cache',
'OPTIONS': {
'MAX_ENTRIES': 200000,
'CULL_FREQUENCY': 2,
}
}
}
# Custom database for cache
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.db.DatabaseCache',
'LOCATION': 'cache_table',
'OPTIONS': {
'db_table': 'custom_cache_table',
}
}
}
Stores cache data as files on the filesystem.
# settings.py
import os
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache',
'LOCATION': '/var/tmp/django_cache',
'OPTIONS': {
'MAX_ENTRIES': 1000000,
'CULL_FREQUENCY': 3,
}
}
}
# Development configuration
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache',
'LOCATION': os.path.join(BASE_DIR, 'cache'),
'TIMEOUT': 300,
'OPTIONS': {
'MAX_ENTRIES': 10000,
}
}
}
Stores cache data in local process memory.
# settings.py
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'unique-snowflake',
'OPTIONS': {
'MAX_ENTRIES': 10000,
'CULL_FREQUENCY': 3,
}
}
}
# Multiple local memory caches
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'default-cache',
},
'sessions': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'session-cache',
}
}
No-op backend for development and testing.
# settings.py - Development/Testing
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
# Conditional dummy cache
import os
if os.environ.get('DISABLE_CACHE'):
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
else:
CACHES = {
'default': {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': 'redis://127.0.0.1:6379/1',
}
}
Using multiple cache backends for different purposes.
# settings.py - Multi-tier caching setup
CACHES = {
# Fast, small cache for hot data
'hot': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'hot-cache',
'TIMEOUT': 60, # 1 minute
'OPTIONS': {
'MAX_ENTRIES': 1000,
}
},
# Medium-speed cache for warm data
'warm': {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': 'redis://127.0.0.1:6379/1',
'TIMEOUT': 300, # 5 minutes
},
# Slower, persistent cache for cold data
'cold': {
'BACKEND': 'django.core.cache.backends.db.DatabaseCache',
'LOCATION': 'cold_cache_table',
'TIMEOUT': 3600, # 1 hour
},
# Default cache
'default': {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': 'redis://127.0.0.1:6379/1',
}
}
# utils/cache_router.py
from django.core.cache import caches
class CacheRouter:
"""Route cache operations to appropriate backends."""
def __init__(self):
self.hot_cache = caches['hot']
self.warm_cache = caches['warm']
self.cold_cache = caches['cold']
def get(self, key, default=None):
"""Get value from cache hierarchy."""
# Try hot cache first
value = self.hot_cache.get(key)
if value is not None:
return value
# Try warm cache
value = self.warm_cache.get(key)
if value is not None:
# Promote to hot cache
self.hot_cache.set(key, value, 60)
return value
# Try cold cache
value = self.cold_cache.get(key)
if value is not None:
# Promote to warm cache
self.warm_cache.set(key, value, 300)
return value
return default
def set(self, key, value, timeout=None, cache_level='warm'):
"""Set value in appropriate cache level."""
if cache_level == 'hot':
self.hot_cache.set(key, value, timeout or 60)
elif cache_level == 'warm':
self.warm_cache.set(key, value, timeout or 300)
elif cache_level == 'cold':
self.cold_cache.set(key, value, timeout or 3600)
else:
# Set in all levels
self.hot_cache.set(key, value, 60)
self.warm_cache.set(key, value, 300)
self.cold_cache.set(key, value, 3600)
def delete(self, key):
"""Delete from all cache levels."""
self.hot_cache.delete(key)
self.warm_cache.delete(key)
self.cold_cache.delete(key)
# Usage
cache_router = CacheRouter()
def get_expensive_data(key):
data = cache_router.get(key)
if data is None:
data = perform_expensive_operation()
cache_router.set(key, data, cache_level='warm')
return data
# utils/cache_benchmark.py
import time
import random
import string
from django.core.cache import caches
from django.test import TestCase
class CacheBenchmark:
"""Benchmark different cache backends."""
def __init__(self, cache_alias='default'):
self.cache = caches[cache_alias]
self.test_data = self.generate_test_data()
def generate_test_data(self, count=1000):
"""Generate test data for benchmarking."""
data = {}
for i in range(count):
key = f"test_key_{i}"
value = ''.join(random.choices(string.ascii_letters, k=100))
data[key] = value
return data
def benchmark_set_operations(self):
"""Benchmark cache set operations."""
start_time = time.time()
for key, value in self.test_data.items():
self.cache.set(key, value, 300)
duration = time.time() - start_time
ops_per_second = len(self.test_data) / duration
return {
'operation': 'set',
'total_time': duration,
'operations': len(self.test_data),
'ops_per_second': ops_per_second
}
def benchmark_get_operations(self):
"""Benchmark cache get operations."""
# First, populate cache
for key, value in self.test_data.items():
self.cache.set(key, value, 300)
# Now benchmark gets
start_time = time.time()
for key in self.test_data.keys():
self.cache.get(key)
duration = time.time() - start_time
ops_per_second = len(self.test_data) / duration
return {
'operation': 'get',
'total_time': duration,
'operations': len(self.test_data),
'ops_per_second': ops_per_second
}
def benchmark_delete_operations(self):
"""Benchmark cache delete operations."""
# First, populate cache
for key, value in self.test_data.items():
self.cache.set(key, value, 300)
# Now benchmark deletes
start_time = time.time()
for key in self.test_data.keys():
self.cache.delete(key)
duration = time.time() - start_time
ops_per_second = len(self.test_data) / duration
return {
'operation': 'delete',
'total_time': duration,
'operations': len(self.test_data),
'ops_per_second': ops_per_second
}
def run_full_benchmark(self):
"""Run complete benchmark suite."""
results = {
'backend': self.cache.__class__.__name__,
'set': self.benchmark_set_operations(),
'get': self.benchmark_get_operations(),
'delete': self.benchmark_delete_operations(),
}
return results
# Management command to run benchmarks
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = 'Benchmark cache backends'
def add_arguments(self, parser):
parser.add_argument(
'--backends',
nargs='+',
default=['default'],
help='Cache backends to benchmark'
)
def handle(self, *args, **options):
backends = options['backends']
for backend in backends:
self.stdout.write(f'\nBenchmarking {backend} backend...')
benchmark = CacheBenchmark(backend)
results = benchmark.run_full_benchmark()
self.stdout.write(f"Backend: {results['backend']}")
self.stdout.write(f"Set ops/sec: {results['set']['ops_per_second']:.2f}")
self.stdout.write(f"Get ops/sec: {results['get']['ops_per_second']:.2f}")
self.stdout.write(f"Delete ops/sec: {results['delete']['ops_per_second']:.2f}")
# Decision matrix for cache backend selection
class CacheBackendSelector:
"""Help choose the right cache backend."""
BACKEND_CHARACTERISTICS = {
'redis': {
'performance': 'excellent',
'persistence': True,
'features': 'rich',
'complexity': 'medium',
'memory_usage': 'efficient',
'network_overhead': 'low',
'use_cases': ['high_traffic', 'complex_data', 'persistence_needed']
},
'memcached': {
'performance': 'excellent',
'persistence': False,
'features': 'basic',
'complexity': 'low',
'memory_usage': 'very_efficient',
'network_overhead': 'very_low',
'use_cases': ['simple_caching', 'high_performance', 'distributed']
},
'database': {
'performance': 'good',
'persistence': True,
'features': 'basic',
'complexity': 'low',
'memory_usage': 'depends_on_db',
'network_overhead': 'medium',
'use_cases': ['simple_setup', 'persistence_needed', 'small_scale']
},
'filesystem': {
'performance': 'fair',
'persistence': True,
'features': 'basic',
'complexity': 'low',
'memory_usage': 'low',
'network_overhead': 'none',
'use_cases': ['single_server', 'simple_setup', 'development']
},
'locmem': {
'performance': 'excellent',
'persistence': False,
'features': 'basic',
'complexity': 'very_low',
'memory_usage': 'high',
'network_overhead': 'none',
'use_cases': ['single_process', 'development', 'testing']
}
}
@classmethod
def recommend_backend(cls, requirements):
"""Recommend backend based on requirements."""
scores = {}
for backend, characteristics in cls.BACKEND_CHARACTERISTICS.items():
score = 0
# Performance requirement
if requirements.get('high_performance'):
if characteristics['performance'] in ['excellent']:
score += 3
elif characteristics['performance'] == 'good':
score += 2
# Persistence requirement
if requirements.get('persistence_needed'):
if characteristics['persistence']:
score += 3
else:
score -= 2
# Complexity preference
if requirements.get('simple_setup'):
if characteristics['complexity'] in ['low', 'very_low']:
score += 2
# Feature requirements
if requirements.get('advanced_features'):
if characteristics['features'] == 'rich':
score += 2
# Distributed requirement
if requirements.get('distributed'):
if backend in ['redis', 'memcached']:
score += 2
scores[backend] = score
# Return sorted recommendations
return sorted(scores.items(), key=lambda x: x[1], reverse=True)
# Usage example
selector = CacheBackendSelector()
recommendations = selector.recommend_backend({
'high_performance': True,
'persistence_needed': True,
'distributed': True,
'advanced_features': True
})
print("Recommended backends:")
for backend, score in recommendations:
print(f"{backend}: {score} points")
Choosing the right cache backend is crucial for optimal performance. Redis offers the best balance of performance, features, and reliability for most production applications. Memcached excels in pure performance scenarios, while database caching provides simplicity and persistence. Consider your specific requirements for performance, persistence, features, and operational complexity when making your selection.
Introduction to Caching
Caching is a fundamental performance optimization technique that stores frequently accessed data in fast storage locations, reducing the need to repeatedly compute or fetch the same information. Understanding caching principles, patterns, and trade-offs is essential for building high-performance Django applications that scale efficiently and provide excellent user experiences.
Per View Caching
View-level caching is one of the most effective ways to improve Django application performance by caching entire HTTP responses. This approach eliminates the need to execute view logic, database queries, and template rendering for cached responses, providing dramatic performance improvements for content that doesn't change frequently.