perf-workshop/scenario2-memoization/config_validator.py
2026-01-10 19:50:13 +05:30

65 lines
1.6 KiB
Python

#!/usr/bin/env python3
"""
Config Validator Comparison
===========================
Runs all three validation strategies and compares performance.
Run generate_events.py first to create test data.
Usage:
python3 generate_events.py 100000
python3 config_validator.py
"""
import time
from common import load_events
import config_validator_naive
import config_validator_memoized
import config_validator_precomputed
ITERATIONS = 5
def benchmark(name, func, events, setup=None):
"""Run a function multiple times and report average timing."""
times = []
for i in range(ITERATIONS):
if setup and i == 0:
setup()
start = time.perf_counter()
result = func(events)
times.append(time.perf_counter() - start)
avg = sum(times) / len(times)
print(f"{name:20s}: {avg:.3f}s avg (valid: {result})")
return avg
def main():
events = load_events()
print(f"Processing {len(events)} events, {ITERATIONS} iterations each...")
print()
t_naive = benchmark("Naive", config_validator_naive.process_events, events)
t_memo = benchmark(
"Memoized",
config_validator_memoized.process_events,
events,
setup=config_validator_memoized.validate_rule_cached.cache_clear
)
t_pre = benchmark("Precomputed", config_validator_precomputed.process_events, events)
print()
print(f"Speedup (memo vs naive): {t_naive/t_memo:.1f}x")
print(f"Speedup (precomp vs naive): {t_naive/t_pre:.1f}x")
print(f"Speedup (precomp vs memo): {t_memo/t_pre:.1f}x")
if __name__ == "__main__":
main()