Comprehensive guide to Python's functools module including lru_cache, cache, reduce, partial, wraps, and other higher-order function utilities for efficient Python programming.
Expert guidance on Python's `functools` module for higher-order functions and operations on callable objects. Master caching, decorators, function wrapping, and functional programming utilities.
Provides comprehensive knowledge of the Python `functools` module, covering all major utilities including `lru_cache`, `cache`, `cached_property`, `partial`, `reduce`, `wraps`, `total_ordering`, and more. Helps you optimize code performance, create reusable decorators, and apply functional programming patterns.
When the user asks about Python functools or related topics, follow these steps:
Determine which functools feature the user needs:
#### For Caching (`@cache` and `@lru_cache`)
```python
from functools import cache, lru_cache
@cache
def fibonacci(n):
return n if n < 2 else fibonacci(n-1) + fibonacci(n-2)
@lru_cache(maxsize=128)
def expensive_computation(x, y):
# Cache saves results for up to 128 most recent calls
return complex_calculation(x, y)
print(expensive_computation.cache_info())
expensive_computation.cache_clear()
```
**Key Points:**
#### For Cached Properties (`@cached_property`)
```python
from functools import cached_property
import statistics
class DataSet:
def __init__(self, sequence_of_numbers):
self._data = tuple(sequence_of_numbers)
@cached_property
def stdev(self):
# Computed once, then cached as normal attribute
return statistics.stdev(self._data)
@cached_property
def mean(self):
return statistics.mean(self._data)
ds = DataSet([1, 2, 3, 4, 5])
print(ds.stdev) # Computed and cached
print(ds.stdev) # Returns cached value
del ds.stdev # Clear cache, will recompute on next access
```
**Key Points:**
#### For Partial Application (`partial`)
```python
from functools import partial
def power(base, exponent):
return base ** exponent
square = partial(power, exponent=2)
cube = partial(power, exponent=3)
print(square(5)) # 25
print(cube(5)) # 125
from operator import mul
double = partial(mul, 2)
numbers = [1, 2, 3, 4, 5]
doubled = list(map(double, numbers)) # [2, 4, 6, 8, 10]
```
**Key Points:**
#### For Decorator Creation (`@wraps`)
```python
from functools import wraps
def my_decorator(func):
@wraps(func) # Preserves original function metadata
def wrapper(*args, **kwargs):
print(f"Calling {func.__name__}")
result = func(*args, **kwargs)
print(f"Finished {func.__name__}")
return result
return wrapper
@my_decorator
def greet(name):
"""Greet someone by name."""
return f"Hello, {name}!"
print(greet.__name__) # 'greet' (not 'wrapper')
print(greet.__doc__) # 'Greet someone by name.'
```
**Key Points:**
#### For Reduce Operations (`reduce`)
```python
from functools import reduce
from operator import add, mul
numbers = [1, 2, 3, 4, 5]
total = reduce(add, numbers) # 15
product = reduce(mul, numbers) # 120
reduce(add, numbers, 100) # 115
reduce(lambda a, b: a if a > b else b, numbers) # 5
```
**Key Points:**
#### For Comparison Methods (`@total_ordering`)
```python
from functools import total_ordering
@total_ordering
class Student:
def __init__(self, name, grade):
self.name = name
self.grade = grade
def __eq__(self, other):
return self.grade == other.grade
def __lt__(self, other):
return self.grade < other.grade
# __le__, __gt__, __ge__ automatically generated
students = [Student("Alice", 85), Student("Bob", 92), Student("Charlie", 78)]
students.sort() # Works with just __eq__ and __lt__ defined
```
**Key Points:**
#### For Single Dispatch (`@singledispatch`)
```python
from functools import singledispatch
@singledispatch
def process(value):
"""Default implementation."""
return f"Processing {type(value).__name__}: {value}"
@process.register
def _(value: int):
return f"Integer: {value * 2}"
@process.register
def _(value: str):
return f"String length: {len(value)}"
@process.register(list)
def _(value):
return f"List with {len(value)} items"
print(process(42)) # "Integer: 84"
print(process("hello")) # "String length: 5"
print(process([1, 2, 3])) # "List with 3 items"
```
**Key Points:**
**When to Use Caching:**
**Cache Size Guidelines:**
```python
@cache
def lookup_config(key):
return expensive_config_load(key)
@lru_cache(maxsize=128) # Good default
def api_request(endpoint):
return requests.get(endpoint).json()
@lru_cache(maxsize=None)
def factorial(n):
return n * factorial(n-1) if n else 1
```
**Common Patterns:**
```python
class DataCache:
def __init__(self):
self._data = []
@lru_cache(maxsize=1)
def expensive_analysis(self):
return analyze(self._data)
def update_data(self, new_data):
self._data = new_data
self.expensive_analysis.cache_clear()
from functools import wraps, lru_cache
def logged(func):
@wraps(func)
def wrapper(*args, **kwargs):
print(f"Calling {func.__name__}")
return func(*args, **kwargs)
return wrapper
@logged
@lru_cache(maxsize=128)
def compute(x):
return x ** 2
class Analyzer:
def __init__(self, data):
self._data = data
@lru_cache(maxsize=1)
def analyze(self):
# self is included in cache key
return expensive_analysis(self._data)
```
**Issue: "TypeError: unhashable type"**
```python
@lru_cache
def process(items):
return sum(items)
process([1, 2, 3]) # TypeError!
@lru_cache
def process(items):
items = tuple(items) if not isinstance(items, tuple) else items
return sum(items)
```
**Issue: Memory leaks with instance methods**
```python
class Worker:
@lru_cache(maxsize=128)
def process(self, data):
return expensive_work(data)
class Worker:
def process(self, data):
return _process_cached(self._config, data)
@lru_cache(maxsize=128)
def _process_cached(config, data):
return expensive_work(config, data)
```
Always include complete, runnable code examples that demonstrate:
**All functools utilities:**
```python
from functools import cache
@cache
def longest_increasing_subsequence(nums):
"""Dynamic programming with automatic memoization."""
if not nums:
return 0
result = 1
for i in range(len(nums)):
for j in range(i):
if nums[j] < nums[i]:
result = max(result, 1 + longest_increasing_subsequence(nums[:i]))
return result
```
```python
from functools import wraps
import time
def retry(max_attempts=3, delay=1):
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
for attempt in range(max_attempts):
try:
return func(*args, **kwargs)
except Exception as e:
if attempt == max_attempts - 1:
raise
time.sleep(delay)
return wrapper
return decorator
@retry(max_attempts=5, delay=2)
def unreliable_api_call():
# Will retry up to 5 times with 2s delay
return requests.get("https://api.example.com/data")
```
```python
from functools import singledispatch
import json
@singledispatch
def serialize(value):
"""Serialize value to JSON-compatible format."""
return str(value)
@serialize.register
def _(value: dict):
return {k: serialize(v) for k, v in value.items()}
@serialize.register
def _(value: list):
return [serialize(item) for item in value]
@serialize.register(int)
@serialize.register(float)
@serialize.register(str)
def _(value):
return value
data = {"users": [{"name": "Alice", "age": 30}]}
print(json.dumps(serialize(data)))
```
Leave a review
No reviews yet. Be the first to review this skill!
# Download SKILL.md from killerskills.ai/api/skills/python-functools-reference/raw