Skip to content
Low Level Design Mastery Logo
LowLevelDesign Mastery

Idempotency & Exactly-Once Processing

Making operations safe to retry

In distributed systems, messages/requests can be duplicated:

Diagram

Problem: Without idempotency, retries create duplicates!

Solution: Idempotency - Make operations safe to retry!


Idempotent operation = Same result when executed multiple times.

f(f(x)) = f(x)

Calling the function twice = calling once.

✅ Idempotent Operations:

  • GET /users/123 - Always returns same user
  • PUT /users/123 - Replaces user (same result)
  • DELETE /users/123 - Deletes user (same result if already deleted)
  • SET balance = 100 - Sets to 100 (same result)

❌ Non-Idempotent Operations:

  • POST /orders - Creates new order each time
  • balance = balance + 50 - Adds 50 each time
  • send_email() - Sends email each time

Client provides unique key. Server checks if seen before.

Diagram

How it works:

  1. Client generates unique idempotency key
  2. Server checks if key exists
  3. If new → Execute operation, store result with key
  4. If exists → Return cached result (don’t execute)
"idempotency.py
from typing import Optional, Dict, Any
import hashlib
import json
from datetime import datetime, timedelta
from functools import wraps
class IdempotencyStore:
"""Stores idempotency keys and results"""
def __init__(self):
self.store: Dict[str, Dict[str, Any]] = {}
def get(self, key: str) -> Optional[Dict[str, Any]]:
"""Get cached result for key"""
entry = self.store.get(key)
if entry:
# Check if expired
if datetime.now() > entry['expires_at']:
del self.store[key]
return None
return entry['result']
return None
def set(self, key: str, result: Dict[str, Any], ttl_seconds: int = 3600):
"""Store result with key"""
self.store[key] = {
'result': result,
'expires_at': datetime.now() + timedelta(seconds=ttl_seconds)
}
class IdempotentHandler:
"""Handles idempotent operations"""
def __init__(self, idempotency_store: IdempotencyStore):
self.store = idempotency_store
def handle_request(self, idempotency_key: str, operation: callable, *args, **kwargs):
"""Handle request with idempotency"""
# Check if key exists
cached_result = self.store.get(idempotency_key)
if cached_result:
print(f"Idempotency key {idempotency_key} seen before. Returning cached result.")
return cached_result
# Execute operation
try:
result = operation(*args, **kwargs)
# Store result
self.store.set(idempotency_key, result)
return result
except Exception as e:
# Don't store failed operations
raise
# Decorator for idempotent endpoints
def idempotent(idempotency_store: IdempotencyStore):
"""Decorator for idempotent endpoints"""
def decorator(func):
@wraps(func)
def wrapper(request, *args, **kwargs):
# Get idempotency key from header
idempotency_key = request.headers.get('Idempotency-Key')
if not idempotency_key:
# Generate from request content
content = json.dumps(request.json or {})
idempotency_key = hashlib.sha256(content.encode()).hexdigest()
handler = IdempotentHandler(idempotency_store)
return handler.handle_request(
idempotency_key,
func,
request,
*args,
**kwargs
)
return wrapper
return decorator
# Usage
store = IdempotencyStore()
@idempotent(store)
def create_order(request):
"""Create order endpoint"""
order_data = request.json
# Create order...
return {'order_id': 123, 'status': 'created'}
# Client sends request with idempotency key
# POST /orders
# Idempotency-Key: abc123
# { "user_id": 456, "amount": 99.99 }

Ensures message processed exactly once.

  1. Idempotent Operations - Safe to retry
  2. Deduplication - Track processed messages
  3. Distributed Coordination - Handle duplicates across servers
"deduplication.py
from typing import Set
import hashlib
import json
from datetime import datetime, timedelta
class MessageDeduplicator:
"""Deduplicates messages"""
def __init__(self, ttl_seconds: int = 3600):
self.processed_messages: Set[str] = set()
self.message_timestamps: Dict[str, datetime] = {}
self.ttl_seconds = ttl_seconds
def generate_message_id(self, message: Dict[str, Any]) -> str:
"""Generate unique ID for message"""
# Use message content + source
content = json.dumps(message, sort_keys=True)
return hashlib.sha256(content.encode()).hexdigest()
def is_duplicate(self, message: Dict[str, Any]) -> bool:
"""Check if message is duplicate"""
message_id = self.generate_message_id(message)
# Clean expired entries
self._clean_expired()
# Check if seen
if message_id in self.processed_messages:
return True
# Mark as processed
self.processed_messages.add(message_id)
self.message_timestamps[message_id] = datetime.now()
return False
def _clean_expired(self):
"""Remove expired entries"""
now = datetime.now()
expired = [
msg_id for msg_id, timestamp in self.message_timestamps.items()
if (now - timestamp).total_seconds() > self.ttl_seconds
]
for msg_id in expired:
self.processed_messages.discard(msg_id)
del self.message_timestamps[msg_id]
class ExactlyOnceProcessor:
"""Processes messages exactly once"""
def __init__(self, deduplicator: MessageDeduplicator):
self.deduplicator = deduplicator
def process(self, message: Dict[str, Any], handler: callable):
"""Process message exactly once"""
# Check if duplicate
if self.deduplicator.is_duplicate(message):
print("Duplicate message detected. Skipping.")
return None
# Process message (idempotent handler)
try:
result = handler(message)
return result
except Exception as e:
# On error, remove from processed set (allow retry)
message_id = self.deduplicator.generate_message_id(message)
self.deduplicator.processed_messages.discard(message_id)
raise
# Usage
deduplicator = MessageDeduplicator(ttl_seconds=3600)
processor = ExactlyOnceProcessor(deduplicator)
def handle_order_message(message):
"""Idempotent message handler"""
order_id = message['order_id']
# Process order (idempotent operation)
return process_order(order_id)
# Process message
result = processor.process({
'order_id': 123,
'user_id': 456,
'amount': 99.99
}, handle_order_message)
# Retry same message (will be skipped)
result = processor.process({
'order_id': 123,
'user_id': 456,
'amount': 99.99
}, handle_order_message) # Returns None (duplicate)

For multiple servers, use shared storage (Redis/Database):

"distributed_idempotency.py
import redis
import json
import hashlib
class DistributedIdempotencyStore:
"""Distributed idempotency store using Redis"""
def __init__(self, redis_client: redis.Redis):
self.redis = redis_client
def check_and_set(self, key: str, operation: callable, ttl_seconds: int = 3600):
"""Check if key exists, execute if not"""
# Try to set key (only if not exists)
acquired = self.redis.set(
f"idempotency:{key}",
"processing",
nx=True, # Only set if not exists
ex=ttl_seconds
)
if not acquired:
# Key exists - get cached result
cached = self.redis.get(f"idempotency:result:{key}")
if cached:
return json.loads(cached)
else:
# Still processing, wait or return error
raise Exception("Operation in progress")
try:
# Execute operation
result = operation()
# Store result
self.redis.setex(
f"idempotency:result:{key}",
ttl_seconds,
json.dumps(result)
)
# Mark as completed
self.redis.setex(
f"idempotency:{key}",
ttl_seconds,
"completed"
)
return result
except Exception as e:
# On error, remove key (allow retry)
self.redis.delete(f"idempotency:{key}")
raise
# Usage
redis_client = redis.Redis(host='localhost', port=6379)
store = DistributedIdempotencyStore(redis_client)
def create_order(order_data):
# Create order logic...
return {'order_id': 123}
# Process with idempotency
idempotency_key = "order-123-abc"
result = store.check_and_set(
idempotency_key,
lambda: create_order(order_data),
ttl_seconds=3600
)

Always provide idempotency keys for mutating operations:

POST /orders
Idempotency-Key: abc123-xyz789
Content-Type: application/json
{ "user_id": 456, "amount": 99.99 }

Client generates unique key:

  • UUID
  • Timestamp + random
  • Hash of request content

Cache results with keys:

  • Store successful results
  • Don’t store failed results (allow retry)
  • Set TTL (e.g., 24 hours)

Always check key before processing:

  • If exists → Return cached result
  • If new → Process and cache

Design operations to be idempotent:

  • Use “set” instead of “add”
  • Check state before modifying
  • Use conditional updates

🔄 Idempotency is Critical

Make operations safe to retry. Essential for distributed systems, retries, at-least-once delivery.

🔑 Idempotency Keys

Use unique keys to detect duplicates. Check before processing. Cache results. Return cached on duplicate.

✅ Exactly-Once

Exactly-once requires idempotency + deduplication. Track processed messages. Use distributed storage for multiple servers.

💾 Store Results

Cache successful results with keys. Don’t cache failures. Set TTL. Check before processing.