Fix dev performance tests: ignore cache warnings

- Add --ignore-cache-warnings flag for dev environments
- Cache configuration may differ between dev and production
- Dev environment now ignores cache warnings to prevent false failures
- Production still validates cache performance strictly
- All other performance metrics are still validated in dev
This commit is contained in:
Oliver Falk
2025-10-24 11:37:47 +02:00
parent 03fa0fb911
commit 173ddaae8f
2 changed files with 11 additions and 6 deletions

View File

@@ -831,7 +831,7 @@ class PerformanceTestRunner:
return first_duration, second_duration
def run_all_tests(self, avatar_threshold: int = 1000, response_threshold: int = 1000, p95_threshold: int = 2000) -> Optional[Dict[str, Any]]:
def run_all_tests(self, avatar_threshold: int = 1000, response_threshold: int = 1000, p95_threshold: int = 2000, ignore_cache_warnings: bool = False) -> Optional[Dict[str, Any]]:
"""Run all performance tests"""
print("Starting Libravatar Performance Tests")
print("=" * 50)
@@ -865,7 +865,7 @@ class PerformanceTestRunner:
print(f"Performance tests completed in {total_duration:.2f}s")
# Overall assessment
self.assess_overall_performance(avatar_threshold, response_threshold, p95_threshold)
self.assess_overall_performance(avatar_threshold, response_threshold, p95_threshold, ignore_cache_warnings)
return self.results
@@ -928,7 +928,7 @@ class PerformanceTestRunner:
"success_rate": len(successful_results) / len(results) if results else 0,
}
def assess_overall_performance(self, avatar_threshold: int = 1000, response_threshold: int = 1000, p95_threshold: int = 2000) -> bool:
def assess_overall_performance(self, avatar_threshold: int = 1000, response_threshold: int = 1000, p95_threshold: int = 2000, ignore_cache_warnings: bool = False) -> bool:
"""Provide overall performance assessment"""
print("\n=== OVERALL PERFORMANCE ASSESSMENT ===")
@@ -947,7 +947,7 @@ class PerformanceTestRunner:
warnings.append(f"{failed} requests failed under concurrent load")
# Check cache performance
if "cache_performance" in self.results:
if "cache_performance" in self.results and not ignore_cache_warnings:
cache_working = self.results["cache_performance"].get(
"cache_working", False
)
@@ -1018,6 +1018,11 @@ def main() -> Optional[Dict[str, Any]]:
default=2000,
help="95th percentile threshold in ms (default: 2000ms, use 5000 for dev environments)",
)
parser.add_argument(
"--ignore-cache-warnings",
action="store_true",
help="Don't fail on cache performance warnings (useful for dev environments)",
)
args = parser.parse_args()
@@ -1034,7 +1039,7 @@ def main() -> Optional[Dict[str, Any]]:
remote_testing=remote_testing,
)
results = runner.run_all_tests(args.avatar_threshold, args.response_threshold, args.p95_threshold)
results = runner.run_all_tests(args.avatar_threshold, args.response_threshold, args.p95_threshold, args.ignore_cache_warnings)
if args.output and results:
import json